mirror of
https://github.com/mapbox/tippecanoe.git
synced 2025-02-24 10:44:51 +00:00
Merge branch 'master' into object-attributes
This commit is contained in:
commit
4756be6e2e
1
.gitignore
vendored
1
.gitignore
vendored
@ -34,6 +34,7 @@ tippecanoe
|
||||
tile-join
|
||||
tippecanoe-decode
|
||||
tippecanoe-enumerate
|
||||
tippecanoe-json-tool
|
||||
unit
|
||||
|
||||
# Vim
|
||||
|
12
.travis.yml
12
.travis.yml
@ -52,6 +52,13 @@ matrix:
|
||||
- os: linux
|
||||
compiler: clang
|
||||
env: CLANG_VERSION='3.8.0' BUILDTYPE=Debug CC="clang-3.8" CXX="clang++-3.8" CXXFLAGS="--coverage" CFLAGS="--coverage" LDFLAGS="--coverage"
|
||||
after_script:
|
||||
- mason install llvm-cov 3.9.1
|
||||
- mason link llvm-cov 3.9.1
|
||||
- which llvm-cov
|
||||
- curl -S -f https://codecov.io/bash -o codecov
|
||||
- chmod +x codecov
|
||||
- ./codecov -x "llvm-cov gcov" -Z
|
||||
addons:
|
||||
apt:
|
||||
sources: ['ubuntu-toolchain-r-test' ]
|
||||
@ -101,8 +108,3 @@ script:
|
||||
BUILDTYPE=${BUILDTYPE} make fewer-tests; else
|
||||
BUILDTYPE=${BUILDTYPE} make test geobuf-test;
|
||||
fi
|
||||
- if [ -n "${COVERAGE}" ]; then
|
||||
/usr/bin/llvm-cov-3.5 -lp *.o;
|
||||
pip install --user cpp-coveralls;
|
||||
~/.local/bin/coveralls --no-gcov -i ./ --exclude clipper;
|
||||
fi
|
||||
|
@ -1,3 +1,12 @@
|
||||
## 1.27.13
|
||||
|
||||
* Allow filtering features by zoom level in conditional expressions
|
||||
* Lines in CSV input with empty geometry columns will be ignored
|
||||
|
||||
## 1.27.12
|
||||
|
||||
* Check integrity of sqlite3 file before decoding or tile-joining
|
||||
|
||||
## 1.27.11
|
||||
|
||||
* Always include tile and layer in tippecanoe-decode, fixing corrupt JSON.
|
||||
|
6
Makefile
6
Makefile
@ -235,6 +235,12 @@ join-filter-test:
|
||||
./tippecanoe-decode tests/feature-filter/out/filtered.mbtiles > tests/feature-filter/out/filtered.json.check
|
||||
cmp tests/feature-filter/out/filtered.json.check tests/feature-filter/out/filtered.json.standard
|
||||
rm -f tests/feature-filter/out/filtered.json.check tests/feature-filter/out/filtered.mbtiles tests/feature-filter/out/all.mbtiles
|
||||
# Test zoom level filtering
|
||||
./tippecanoe -r1 -z8 -f -o tests/feature-filter/out/places.mbtiles tests/ne_110m_populated_places/in.json
|
||||
./tile-join -J tests/feature-filter/places-filter -f -o tests/feature-filter/out/places-filter.mbtiles tests/feature-filter/out/places.mbtiles
|
||||
./tippecanoe-decode tests/feature-filter/out/places-filter.mbtiles > tests/feature-filter/out/places-filter.mbtiles.json.check
|
||||
cmp tests/feature-filter/out/places-filter.mbtiles.json.check tests/feature-filter/out/places-filter.mbtiles.json.standard
|
||||
rm -f tests/feature-filter/out/places.mbtiles tests/feature-filter/out/places-filter.mbtiles tests/feature-filter/out/places-filter.mbtiles.json.check
|
||||
|
||||
json-tool-test: tippecanoe-json-tool
|
||||
./tippecanoe-json-tool -e GEOID10 tests/join-population/tabblock_06001420.json | sort > tests/join-population/tabblock_06001420.json.sort
|
||||
|
25
README.md
25
README.md
@ -7,7 +7,7 @@ Builds [vector tilesets](https://www.mapbox.com/developers/vector-tiles/) from l
|
||||

|
||||
|
||||
[](https://travis-ci.org/mapbox/tippecanoe)
|
||||
[](https://coveralls.io/github/mapbox/tippecanoe?branch=master)
|
||||
[](https://codecov.io/gh/mapbox/tippecanoe)
|
||||
|
||||
Intent
|
||||
------
|
||||
@ -35,10 +35,23 @@ Installation
|
||||
|
||||
The easiest way to install tippecanoe on OSX is with [Homebrew](http://brew.sh/):
|
||||
|
||||
```js
|
||||
```sh
|
||||
$ brew install tippecanoe
|
||||
```
|
||||
|
||||
On Ubuntu it will usually be easiest to build from the source repository:
|
||||
|
||||
```sh
|
||||
$ git clone git@github.com:mapbox/tippecanoe.git
|
||||
$ cd tippecanoe
|
||||
$ make -j
|
||||
$ make install
|
||||
```
|
||||
|
||||
See [Development](#development) below for how to upgrade your
|
||||
C++ compiler or install prerequisite packages if you get
|
||||
compiler errors.
|
||||
|
||||
Usage
|
||||
-----
|
||||
|
||||
@ -197,7 +210,7 @@ resolution is obtained than by using a smaller _maxzoom_ or _detail_.
|
||||
|
||||
### Filtering features by attributes
|
||||
|
||||
* `-j` *filter* or `--feature-filter`=*filter*: Check features against a per-layer filter (as defined in the [Mapbox GL Style Specification](https://www.mapbox.com/mapbox-gl-js/style-spec/#types-filter)) and only include those that match. Any features in layers that have no filter specified will be passed through. Filters for the layer `"*"` apply to all layers.
|
||||
* `-j` *filter* or `--feature-filter`=*filter*: Check features against a per-layer filter (as defined in the [Mapbox GL Style Specification](https://www.mapbox.com/mapbox-gl-js/style-spec/#types-filter)) and only include those that match. Any features in layers that have no filter specified will be passed through. Filters for the layer `"*"` apply to all layers. The special variable `$zoom` refers to the current zoom level.
|
||||
* `-J` *filter-file* or `--feature-filter-file`=*filter-file*: Like `-j`, but read the filter from a file.
|
||||
|
||||
Example: to find the Natural Earth countries with low `scalerank` but high `LABELRANK`:
|
||||
@ -206,6 +219,12 @@ Example: to find the Natural Earth countries with low `scalerank` but high `LABE
|
||||
tippecanoe -z5 -o filtered.mbtiles -j '{ "ne_10m_admin_0_countries": [ "all", [ "<", "scalerank", 3 ], [ ">", "LABELRANK", 5 ] ] }' ne_10m_admin_0_countries.geojson
|
||||
```
|
||||
|
||||
Example: to retain only major TIGER roads at low zoom levels:
|
||||
|
||||
```
|
||||
./tippecanoe -o roads.mbtiles -j '{ "*": [ "any", [ ">=", "$zoom", 11 ], [ "in", "MTFCC", "S1100", "S1200" ] ] }' tl_2015_06001_roads.json
|
||||
```
|
||||
|
||||
### Dropping a fixed fraction of features by zoom level
|
||||
|
||||
* `-r` _rate_ or `--drop-rate=`_rate_: Rate at which dots are dropped at zoom levels below basezoom (default 2.5).
|
||||
|
2
codecov.yml
Normal file
2
codecov.yml
Normal file
@ -0,0 +1,2 @@
|
||||
ignore:
|
||||
- "test"
|
@ -274,6 +274,12 @@ void decode(char *fname, int z, unsigned x, unsigned y, std::set<std::string> co
|
||||
fprintf(stderr, "%s: %s\n", fname, sqlite3_errmsg(db));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
char *err = NULL;
|
||||
if (sqlite3_exec(db, "PRAGMA integrity_check;", NULL, NULL, &err) != SQLITE_OK) {
|
||||
fprintf(stderr, "%s: integrity_check: %s\n", fname, err);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
}
|
||||
|
||||
if (z < 0) {
|
||||
|
@ -11,6 +11,12 @@ void enumerate(char *fname) {
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
char *err = NULL;
|
||||
if (sqlite3_exec(db, "PRAGMA integrity_check;", NULL, NULL, &err) != SQLITE_OK) {
|
||||
fprintf(stderr, "%s: integrity_check: %s\n", fname, err);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
const char *sql = "SELECT zoom_level, tile_column, tile_row from tiles order by zoom_level, tile_column, tile_row;";
|
||||
|
||||
sqlite3_stmt *stmt;
|
||||
|
@ -387,7 +387,6 @@ void readFeature(protozero::pbf_reader &pbf, size_t dim, double e, std::vector<s
|
||||
sf.t = dv[i].type;
|
||||
sf.full_keys = full_keys;
|
||||
sf.full_values = full_values;
|
||||
sf.m = sf.full_values.size();
|
||||
|
||||
auto tip = other.find("tippecanoe");
|
||||
if (tip != other.end()) {
|
||||
@ -512,7 +511,6 @@ void outBareGeometry(drawvec const &dv, int type, struct serialization_state *ss
|
||||
sf.seq = (*sst->layer_seq);
|
||||
sf.geometry = dv;
|
||||
sf.t = type;
|
||||
sf.m = 0;
|
||||
|
||||
serialize_feature(sst, sf);
|
||||
}
|
||||
|
11
geocsv.cpp
11
geocsv.cpp
@ -61,10 +61,18 @@ void parse_geocsv(std::vector<struct serialization_state> &sst, std::string fnam
|
||||
std::vector<std::string> line = csv_split(s.c_str());
|
||||
|
||||
if (line.size() != header.size()) {
|
||||
fprintf(stderr, "%s:%zu: Mismatched column count: %zu in line, %zu in header\n", fname.c_str(), seq, line.size(), header.size());
|
||||
fprintf(stderr, "%s:%zu: Mismatched column count: %zu in line, %zu in header\n", fname.c_str(), seq + 1, line.size(), header.size());
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
if (line[loncol].empty() || line[latcol].empty()) {
|
||||
static int warned = 0;
|
||||
if (!warned) {
|
||||
fprintf(stderr, "%s:%zu: null geometry (additional not reported)\n", fname.c_str(), seq + 1);
|
||||
warned = 1;
|
||||
}
|
||||
continue;
|
||||
}
|
||||
double lon = atof(line[loncol].c_str());
|
||||
double lat = atof(line[latcol].c_str());
|
||||
|
||||
@ -108,7 +116,6 @@ void parse_geocsv(std::vector<struct serialization_state> &sst, std::string fnam
|
||||
sf.t = 1; // POINT
|
||||
sf.full_keys = full_keys;
|
||||
sf.full_values = full_values;
|
||||
sf.m = sf.full_values.size();
|
||||
|
||||
serialize_feature(&sst[0], sf);
|
||||
}
|
||||
|
@ -193,7 +193,6 @@ int serialize_geojson_feature(struct serialization_state *sst, json_object *geom
|
||||
sf.has_tippecanoe_maxzoom = (tippecanoe_maxzoom != -1);
|
||||
sf.tippecanoe_maxzoom = tippecanoe_maxzoom;
|
||||
sf.geometry = dv;
|
||||
sf.m = m;
|
||||
sf.feature_minzoom = 0; // Will be filled in during index merging
|
||||
sf.seq = *(sst->layer_seq);
|
||||
|
||||
|
2
main.cpp
2
main.cpp
@ -2153,7 +2153,7 @@ int read_input(std::vector<source> &sources, char *fname, int maxzoom, int minzo
|
||||
|
||||
std::atomic<unsigned> midx(0);
|
||||
std::atomic<unsigned> midy(0);
|
||||
int written = traverse_zooms(fd, size, meta, stringpool, &midx, &midy, maxzoom, minzoom, outdb, outdir, buffer, fname, tmpdir, gamma, full_detail, low_detail, min_detail, meta_off, pool_off, initial_x, initial_y, simplification, layermaps, prefilter, postfilter, attribute_accum);
|
||||
int written = traverse_zooms(fd, size, meta, stringpool, &midx, &midy, maxzoom, minzoom, outdb, outdir, buffer, fname, tmpdir, gamma, full_detail, low_detail, min_detail, meta_off, pool_off, initial_x, initial_y, simplification, layermaps, prefilter, postfilter, attribute_accum, filter);
|
||||
|
||||
if (maxzoom != written) {
|
||||
fprintf(stderr, "\n\n\n*** NOTE TILES ONLY COMPLETE THROUGH ZOOM %d ***\n\n\n", written);
|
||||
|
@ -6,7 +6,7 @@ like these \[la]MADE_WITH.md\[ra]\&.
|
||||
[Mapbox Tippecanoe](\[la]https://user-images.githubusercontent.com/1951835/36568734-ede27ec0-17df-11e8-8c22-ffaaebb8daf4.JPG\[ra])
|
||||
.PP
|
||||
[Build Status](https://travis\-ci.org/mapbox/tippecanoe.svg) \[la]https://travis-ci.org/mapbox/tippecanoe\[ra]
|
||||
[Coverage Status](https://coveralls.io/repos/mapbox/tippecanoe/badge.svg?branch=master&service=github) \[la]https://coveralls.io/github/mapbox/tippecanoe?branch=master\[ra]
|
||||
[Coverage Status](https://codecov.io/gh/mapbox/tippecanoe/branch/master/graph/badge.svg) \[la]https://codecov.io/gh/mapbox/tippecanoe\[ra]
|
||||
.SH Intent
|
||||
.PP
|
||||
The goal of Tippecanoe is to enable making a scale\-independent view of your data,
|
||||
@ -35,6 +35,21 @@ The easiest way to install tippecanoe on OSX is with Homebrew \[la]http://brew.s
|
||||
$ brew install tippecanoe
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
On Ubuntu it will usually be easiest to build from the source repository:
|
||||
.PP
|
||||
.RS
|
||||
.nf
|
||||
$ git clone git@github.com:mapbox/tippecanoe.git
|
||||
$ cd tippecanoe
|
||||
$ make \-j
|
||||
$ make install
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
See Development \[la]#development\[ra] below for how to upgrade your
|
||||
C++ compiler or install prerequisite packages if you get
|
||||
compiler errors.
|
||||
.SH Usage
|
||||
.PP
|
||||
.RS
|
||||
@ -221,7 +236,7 @@ to specify how the named \fIattribute\fP is accumulated onto the attribute of th
|
||||
.SS Filtering features by attributes
|
||||
.RS
|
||||
.IP \(bu 2
|
||||
\fB\fC\-j\fR \fIfilter\fP or \fB\fC\-\-feature\-filter\fR=\fIfilter\fP: Check features against a per\-layer filter (as defined in the Mapbox GL Style Specification \[la]https://www.mapbox.com/mapbox-gl-js/style-spec/#types-filter\[ra]) and only include those that match. Any features in layers that have no filter specified will be passed through. Filters for the layer \fB\fC"*"\fR apply to all layers.
|
||||
\fB\fC\-j\fR \fIfilter\fP or \fB\fC\-\-feature\-filter\fR=\fIfilter\fP: Check features against a per\-layer filter (as defined in the Mapbox GL Style Specification \[la]https://www.mapbox.com/mapbox-gl-js/style-spec/#types-filter\[ra]) and only include those that match. Any features in layers that have no filter specified will be passed through. Filters for the layer \fB\fC"*"\fR apply to all layers. The special variable \fB\fC$zoom\fR refers to the current zoom level.
|
||||
.IP \(bu 2
|
||||
\fB\fC\-J\fR \fIfilter\-file\fP or \fB\fC\-\-feature\-filter\-file\fR=\fIfilter\-file\fP: Like \fB\fC\-j\fR, but read the filter from a file.
|
||||
.RE
|
||||
@ -233,6 +248,14 @@ Example: to find the Natural Earth countries with low \fB\fCscalerank\fR but hig
|
||||
tippecanoe \-z5 \-o filtered.mbtiles \-j '{ "ne_10m_admin_0_countries": [ "all", [ "<", "scalerank", 3 ], [ ">", "LABELRANK", 5 ] ] }' ne_10m_admin_0_countries.geojson
|
||||
.fi
|
||||
.RE
|
||||
.PP
|
||||
Example: to retain only major TIGER roads at low zoom levels:
|
||||
.PP
|
||||
.RS
|
||||
.nf
|
||||
\&./tippecanoe \-o roads.mbtiles \-j '{ "*": [ "any", [ ">=", "$zoom", 11 ], [ "in", "MTFCC", "S1100", "S1200" ] ] }' tl_2015_06001_roads.json
|
||||
.fi
|
||||
.RE
|
||||
.SS Dropping a fixed fraction of features by zoom level
|
||||
.RS
|
||||
.IP \(bu 2
|
||||
|
@ -603,6 +603,10 @@ std::map<std::string, layermap_entry> merge_layermaps(std::vector<std::map<std::
|
||||
}
|
||||
|
||||
void add_to_file_keys(std::map<std::string, type_and_string_stats> &file_keys, std::string const &attrib, type_and_string const &val) {
|
||||
if (val.type == mvt_null) {
|
||||
return;
|
||||
}
|
||||
|
||||
auto fka = file_keys.find(attrib);
|
||||
if (fka == file_keys.end()) {
|
||||
file_keys.insert(std::pair<std::string, type_and_string_stats>(attrib, type_and_string_stats()));
|
||||
|
@ -393,7 +393,6 @@ serial_feature parse_feature(json_pull *jp, int z, unsigned x, unsigned y, std::
|
||||
sf.bbox[0] = sf.bbox[1] = LLONG_MAX;
|
||||
sf.bbox[2] = sf.bbox[3] = LLONG_MIN;
|
||||
sf.extent = 0;
|
||||
sf.m = 0;
|
||||
sf.metapos = 0;
|
||||
sf.has_id = false;
|
||||
|
||||
|
73
serial.cpp
73
serial.cpp
@ -219,20 +219,16 @@ void serialize_feature(FILE *geomfile, serial_feature *sf, long long *geompos, c
|
||||
serialize_long_long(geomfile, sf->extent, geompos, fname);
|
||||
}
|
||||
|
||||
serialize_int(geomfile, sf->m, geompos, fname);
|
||||
if (sf->m != 0) {
|
||||
serialize_long_long(geomfile, sf->metapos, geompos, fname);
|
||||
}
|
||||
|
||||
if (sf->metapos < 0 && sf->m != sf->keys.size()) {
|
||||
fprintf(stderr, "Internal error: feature said to have %lld attributes but only %lld found\n", (long long) sf->m, (long long) sf->keys.size());
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
if (sf->metapos < 0) {
|
||||
serialize_long_long(geomfile, sf->keys.size(), geompos, fname);
|
||||
|
||||
for (size_t i = 0; i < sf->keys.size(); i++) {
|
||||
serialize_long_long(geomfile, sf->keys[i], geompos, fname);
|
||||
serialize_long_long(geomfile, sf->values[i], geompos, fname);
|
||||
}
|
||||
}
|
||||
|
||||
if (include_minzoom) {
|
||||
serialize_byte(geomfile, sf->feature_minzoom, geompos, fname);
|
||||
@ -285,19 +281,14 @@ serial_feature deserialize_feature(FILE *geoms, long long *geompos_in, char *met
|
||||
sf.layer >>= 6;
|
||||
|
||||
sf.metapos = 0;
|
||||
{
|
||||
int m;
|
||||
deserialize_int_io(geoms, &m, geompos_in);
|
||||
sf.m = m;
|
||||
}
|
||||
if (sf.m != 0) {
|
||||
deserialize_long_long_io(geoms, &sf.metapos, geompos_in);
|
||||
}
|
||||
|
||||
if (sf.metapos >= 0) {
|
||||
char *meta = metabase + sf.metapos + meta_off[sf.segment];
|
||||
long long count;
|
||||
deserialize_long_long(&meta, &count);
|
||||
|
||||
for (size_t i = 0; i < sf.m; i++) {
|
||||
for (long long i = 0; i < count; i++) {
|
||||
long long k, v;
|
||||
deserialize_long_long(&meta, &k);
|
||||
deserialize_long_long(&meta, &v);
|
||||
@ -305,7 +296,10 @@ serial_feature deserialize_feature(FILE *geoms, long long *geompos_in, char *met
|
||||
sf.values.push_back(v);
|
||||
}
|
||||
} else {
|
||||
for (size_t i = 0; i < sf.m; i++) {
|
||||
long long count;
|
||||
deserialize_long_long_io(geoms, &count, geompos_in);
|
||||
|
||||
for (long long i = 0; i < count; i++) {
|
||||
long long k, v;
|
||||
deserialize_long_long_io(geoms, &k, geompos_in);
|
||||
deserialize_long_long_io(geoms, &v, geompos_in);
|
||||
@ -524,63 +518,17 @@ int serialize_feature(struct serialization_state *sst, serial_feature &sf) {
|
||||
if (sst->include->count(sf.full_keys[i]) == 0) {
|
||||
sf.full_keys.erase(sf.full_keys.begin() + i);
|
||||
sf.full_values.erase(sf.full_values.begin() + i);
|
||||
sf.m--;
|
||||
continue;
|
||||
}
|
||||
} else if (sst->exclude->count(sf.full_keys[i]) != 0) {
|
||||
sf.full_keys.erase(sf.full_keys.begin() + i);
|
||||
sf.full_values.erase(sf.full_values.begin() + i);
|
||||
sf.m--;
|
||||
continue;
|
||||
}
|
||||
|
||||
coerce_value(sf.full_keys[i], sf.full_values[i].type, sf.full_values[i].s, sst->attribute_types);
|
||||
}
|
||||
|
||||
if (sst->filter != NULL) {
|
||||
std::map<std::string, mvt_value> attributes;
|
||||
|
||||
for (size_t i = 0; i < sf.full_keys.size(); i++) {
|
||||
std::string key = sf.full_keys[i];
|
||||
mvt_value val = stringified_to_mvt_value(sf.full_values[i].type, sf.full_values[i].s.c_str());
|
||||
|
||||
attributes.insert(std::pair<std::string, mvt_value>(key, val));
|
||||
}
|
||||
|
||||
if (sf.has_id) {
|
||||
mvt_value v;
|
||||
v.type = mvt_uint;
|
||||
v.numeric_value.uint_value = sf.id;
|
||||
|
||||
attributes.insert(std::pair<std::string, mvt_value>("$id", v));
|
||||
}
|
||||
|
||||
mvt_value v;
|
||||
v.type = mvt_string;
|
||||
|
||||
if (sf.t == mvt_point) {
|
||||
v.string_value = "Point";
|
||||
} else if (sf.t == mvt_linestring) {
|
||||
v.string_value = "LineString";
|
||||
} else if (sf.t == mvt_polygon) {
|
||||
v.string_value = "Polygon";
|
||||
}
|
||||
|
||||
attributes.insert(std::pair<std::string, mvt_value>("$type", v));
|
||||
|
||||
if (!evaluate(attributes, sf.layername, sst->filter)) {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
for (ssize_t i = (ssize_t) sf.full_keys.size() - 1; i >= 0; i--) {
|
||||
if (sf.full_values[i].type == mvt_null) {
|
||||
sf.full_keys.erase(sf.full_keys.begin() + i);
|
||||
sf.full_values.erase(sf.full_values.begin() + i);
|
||||
sf.m--;
|
||||
}
|
||||
}
|
||||
|
||||
if (!sst->filters) {
|
||||
for (size_t i = 0; i < sf.full_keys.size(); i++) {
|
||||
type_and_string attrib;
|
||||
@ -600,6 +548,7 @@ int serialize_feature(struct serialization_state *sst, serial_feature &sf) {
|
||||
}
|
||||
} else {
|
||||
sf.metapos = r->metapos;
|
||||
serialize_long_long(r->metafile, sf.full_keys.size(), &r->metapos, sst->fname);
|
||||
for (size_t i = 0; i < sf.full_keys.size(); i++) {
|
||||
serialize_long_long(r->metafile, addpool(r->poolfile, r->treefile, sf.full_keys[i].c_str(), mvt_string), &r->metapos, sst->fname);
|
||||
serialize_long_long(r->metafile, addpool(r->poolfile, r->treefile, sf.full_values[i].s.c_str(), sf.full_values[i].type), &r->metapos, sst->fname);
|
||||
|
@ -57,9 +57,9 @@ struct serial_feature {
|
||||
unsigned long long index = 0;
|
||||
long long extent = 0;
|
||||
|
||||
size_t m = 0;
|
||||
std::vector<long long> keys{};
|
||||
std::vector<long long> values{};
|
||||
// If >= 0, metadata is external
|
||||
long long metapos = 0;
|
||||
|
||||
// XXX This isn't serialized. Should it be here?
|
||||
|
@ -242,3 +242,4 @@ scalerank,natscale,labelrank,featurecla,name,namepar,namealt,diffascii,nameascii
|
||||
0,600,3,Admin-1 capital,Sydney,,,0,Sydney,0.00000000000,,,1.00000000000,1,Australia,AUS,Australia,AUS,New South Wales,AU,,-33.92001096720,151.18517980900,4.00000000000,0,Changed feature class.,4630000,3641422,2669348,12,12,2147714.00000000000,Sydney,Sydney1,1,0,1.7
|
||||
0,600,0,Admin-0 capital,Singapore,,,0,Singapore,1.00000000000,,,1.00000000000,1,Singapore,SGP,Singapore,SGP,,SG,,1.29303346649,103.85582067800,0.00000000000,0,,5183700,3289529,3314179,13,12,1880252.00000000000,Singapore,Singapore,1,5,2.1
|
||||
0,600,0,Admin-0 region capital,Hong Kong,,,0,Hong Kong,0.00000000000,,,1.00000000000,1,China,CHN,Hong Kong S.A.R.,HKG,,HK,,22.30498089500,114.18500931700,0.00000000000,0,,7206000,4551579,4549026,13,12,1819729.00000000000,Hong Kong,Hong Kong,1,0,3.0
|
||||
,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,,
|
||||
|
Can't render this file because it has a wrong number of fields in line 245.
|
File diff suppressed because one or more lines are too long
6780
tests/feature-filter/out/places-filter.mbtiles.json.standard
Normal file
6780
tests/feature-filter/out/places-filter.mbtiles.json.standard
Normal file
File diff suppressed because one or more lines are too long
12
tests/feature-filter/places-filter
Normal file
12
tests/feature-filter/places-filter
Normal file
@ -0,0 +1,12 @@
|
||||
{ "*": [
|
||||
"any",
|
||||
[ "all", [ "==", "SCALERANK", 0 ], [ ">=", "$zoom", 0 ] ],
|
||||
[ "all", [ "==", "SCALERANK", 1 ], [ ">=", "$zoom", 1 ] ],
|
||||
[ "all", [ "==", "SCALERANK", 2 ], [ ">=", "$zoom", 2 ] ],
|
||||
[ "all", [ "==", "SCALERANK", 3 ], [ ">=", "$zoom", 3 ] ],
|
||||
[ "all", [ "==", "SCALERANK", 4 ], [ ">=", "$zoom", 4 ] ],
|
||||
[ "all", [ "==", "SCALERANK", 5 ], [ ">=", "$zoom", 5 ] ],
|
||||
[ "all", [ "==", "SCALERANK", 6 ], [ ">=", "$zoom", 6 ] ],
|
||||
[ "all", [ "==", "SCALERANK", 7 ], [ ">=", "$zoom", 7 ] ],
|
||||
[ "all", [ ">=", "SCALERANK", 8 ], [ ">=", "$zoom", 8 ] ]
|
||||
] }
|
File diff suppressed because one or more lines are too long
@ -172,6 +172,12 @@ void handle(std::string message, int z, unsigned x, unsigned y, std::map<std::st
|
||||
|
||||
attributes.insert(std::pair<std::string, mvt_value>("$type", v));
|
||||
|
||||
mvt_value v2;
|
||||
v2.type = mvt_uint;
|
||||
v2.numeric_value.uint_value = z;
|
||||
|
||||
attributes.insert(std::pair<std::string, mvt_value>("$zoom", v2));
|
||||
|
||||
if (!evaluate(attributes, layer.name, filter)) {
|
||||
continue;
|
||||
}
|
||||
@ -440,6 +446,12 @@ struct reader *begin_reading(char *fname) {
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
char *err = NULL;
|
||||
if (sqlite3_exec(db, "PRAGMA integrity_check;", NULL, NULL, &err) != SQLITE_OK) {
|
||||
fprintf(stderr, "%s: integrity_check: %s\n", fname, err);
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
const char *sql = "SELECT zoom_level, tile_column, tile_row, tile_data from tiles order by zoom_level, tile_column, tile_row;";
|
||||
sqlite3_stmt *stmt;
|
||||
|
||||
|
129
tile.cpp
129
tile.cpp
@ -38,6 +38,7 @@
|
||||
#include "main.hpp"
|
||||
#include "write_json.hpp"
|
||||
#include "milo/dtoa_milo.h"
|
||||
#include "evaluator.hpp"
|
||||
|
||||
extern "C" {
|
||||
#include "jsonpull/jsonpull.h"
|
||||
@ -77,7 +78,7 @@ bool draws_something(drawvec &geom) {
|
||||
return false;
|
||||
}
|
||||
|
||||
static int metacmp(int m1, const std::vector<long long> &keys1, const std::vector<long long> &values1, char *stringpool1, int m2, const std::vector<long long> &keys2, const std::vector<long long> &values2, char *stringpool2);
|
||||
static int metacmp(const std::vector<long long> &keys1, const std::vector<long long> &values1, char *stringpool1, const std::vector<long long> &keys2, const std::vector<long long> &values2, char *stringpool2);
|
||||
int coalindexcmp(const struct coalesce *c1, const struct coalesce *c2);
|
||||
|
||||
struct coalesce {
|
||||
@ -90,7 +91,6 @@ struct coalesce {
|
||||
unsigned long long index = 0;
|
||||
long long original_seq = 0;
|
||||
int type = 0;
|
||||
int m = 0;
|
||||
bool coalesced = false;
|
||||
double spacing = 0;
|
||||
bool has_id = false;
|
||||
@ -134,7 +134,7 @@ int coalcmp(const void *v1, const void *v2) {
|
||||
}
|
||||
}
|
||||
|
||||
cmp = metacmp(c1->m, c1->keys, c1->values, c1->stringpool, c2->m, c2->keys, c2->values, c2->stringpool);
|
||||
cmp = metacmp(c1->keys, c1->values, c1->stringpool, c2->keys, c2->values, c2->stringpool);
|
||||
if (cmp != 0) {
|
||||
return cmp;
|
||||
}
|
||||
@ -247,9 +247,9 @@ size_t tag_object(mvt_layer &layer, json_object *j) {
|
||||
return layer.tag_value(tv);
|
||||
}
|
||||
|
||||
void decode_meta(int m, std::vector<long long> const &metakeys, std::vector<long long> const &metavals, char *stringpool, mvt_layer &layer, mvt_feature &feature) {
|
||||
int i;
|
||||
for (i = 0; i < m; i++) {
|
||||
void decode_meta(std::vector<long long> const &metakeys, std::vector<long long> const &metavals, char *stringpool, mvt_layer &layer, mvt_feature &feature) {
|
||||
size_t i;
|
||||
for (i = 0; i < metakeys.size(); i++) {
|
||||
int otype;
|
||||
mvt_value key = retrieve_string(metakeys[i], stringpool, NULL);
|
||||
mvt_value value = retrieve_string(metavals[i], stringpool, &otype);
|
||||
@ -273,9 +273,9 @@ void decode_meta(int m, std::vector<long long> const &metakeys, std::vector<long
|
||||
}
|
||||
}
|
||||
|
||||
static int metacmp(int m1, const std::vector<long long> &keys1, const std::vector<long long> &values1, char *stringpool1, int m2, const std::vector<long long> &keys2, const std::vector<long long> &values2, char *stringpool2) {
|
||||
int i;
|
||||
for (i = 0; i < m1 && i < m2; i++) {
|
||||
static int metacmp(const std::vector<long long> &keys1, const std::vector<long long> &values1, char *stringpool1, const std::vector<long long> &keys2, const std::vector<long long> &values2, char *stringpool2) {
|
||||
size_t i;
|
||||
for (i = 0; i < keys1.size() && i < keys2.size(); i++) {
|
||||
mvt_value key1 = retrieve_string(keys1[i], stringpool1, NULL);
|
||||
mvt_value key2 = retrieve_string(keys2[i], stringpool2, NULL);
|
||||
|
||||
@ -302,16 +302,16 @@ static int metacmp(int m1, const std::vector<long long> &keys1, const std::vecto
|
||||
}
|
||||
}
|
||||
|
||||
if (m1 < m2) {
|
||||
if (keys1.size() < keys2.size()) {
|
||||
return -1;
|
||||
} else if (m1 > m2) {
|
||||
} else if (keys1.size() > keys2.size()) {
|
||||
return 1;
|
||||
} else {
|
||||
return 0;
|
||||
}
|
||||
}
|
||||
|
||||
void rewrite(drawvec &geom, int z, int nextzoom, int maxzoom, long long *bbox, unsigned tx, unsigned ty, int buffer, int *within, long long *geompos, FILE **geomfile, const char *fname, signed char t, int layer, long long metastart, signed char feature_minzoom, int child_shards, int max_zoom_increment, long long seq, int tippecanoe_minzoom, int tippecanoe_maxzoom, int segment, unsigned *initial_x, unsigned *initial_y, int m, std::vector<long long> &metakeys, std::vector<long long> &metavals, bool has_id, unsigned long long id, unsigned long long index, long long extent) {
|
||||
void rewrite(drawvec &geom, int z, int nextzoom, int maxzoom, long long *bbox, unsigned tx, unsigned ty, int buffer, int *within, long long *geompos, FILE **geomfile, const char *fname, signed char t, int layer, long long metastart, signed char feature_minzoom, int child_shards, int max_zoom_increment, long long seq, int tippecanoe_minzoom, int tippecanoe_maxzoom, int segment, unsigned *initial_x, unsigned *initial_y, std::vector<long long> &metakeys, std::vector<long long> &metavals, bool has_id, unsigned long long id, unsigned long long index, long long extent) {
|
||||
if (geom.size() > 0 && (nextzoom <= maxzoom || additional[A_EXTEND_ZOOMS])) {
|
||||
int xo, yo;
|
||||
int span = 1 << (nextzoom - z);
|
||||
@ -402,11 +402,10 @@ void rewrite(drawvec &geom, int z, int nextzoom, int maxzoom, long long *bbox, u
|
||||
sf.geometry = geom2;
|
||||
sf.index = index;
|
||||
sf.extent = extent;
|
||||
sf.m = m;
|
||||
sf.feature_minzoom = feature_minzoom;
|
||||
|
||||
if (metastart < 0) {
|
||||
for (int i = 0; i < m; i++) {
|
||||
for (size_t i = 0; i < metakeys.size(); i++) {
|
||||
sf.keys.push_back(metakeys[i]);
|
||||
sf.values.push_back(metavals[i]);
|
||||
}
|
||||
@ -429,7 +428,6 @@ struct partial {
|
||||
long long layer = 0;
|
||||
long long original_seq = 0;
|
||||
unsigned long long index = 0;
|
||||
int m = 0;
|
||||
int segment = 0;
|
||||
bool reduced = 0;
|
||||
int z = 0;
|
||||
@ -1278,6 +1276,7 @@ struct write_tile_args {
|
||||
bool still_dropping = false;
|
||||
int wrote_zoom = 0;
|
||||
size_t tiling_seg = 0;
|
||||
struct json_object *filter = NULL;
|
||||
};
|
||||
|
||||
bool clip_to_tile(serial_feature &sf, int z, long long buffer) {
|
||||
@ -1356,7 +1355,7 @@ bool clip_to_tile(serial_feature &sf, int z, long long buffer) {
|
||||
return false;
|
||||
}
|
||||
|
||||
serial_feature next_feature(FILE *geoms, long long *geompos_in, char *metabase, long long *meta_off, int z, unsigned tx, unsigned ty, unsigned *initial_x, unsigned *initial_y, long long *original_features, long long *unclipped_features, int nextzoom, int maxzoom, int minzoom, int max_zoom_increment, size_t pass, size_t passes, std::atomic<long long> *along, long long alongminus, int buffer, int *within, bool *first_time, FILE **geomfile, long long *geompos, std::atomic<double> *oprogress, double todo, const char *fname, int child_shards) {
|
||||
serial_feature next_feature(FILE *geoms, long long *geompos_in, char *metabase, long long *meta_off, int z, unsigned tx, unsigned ty, unsigned *initial_x, unsigned *initial_y, long long *original_features, long long *unclipped_features, int nextzoom, int maxzoom, int minzoom, int max_zoom_increment, size_t pass, size_t passes, std::atomic<long long> *along, long long alongminus, int buffer, int *within, bool *first_time, FILE **geomfile, long long *geompos, std::atomic<double> *oprogress, double todo, const char *fname, int child_shards, struct json_object *filter, const char *stringpool, long long *pool_off, std::vector<std::vector<std::string>> *layer_unmaps) {
|
||||
while (1) {
|
||||
serial_feature sf = deserialize_feature(geoms, geompos_in, metabase, meta_off, z, tx, ty, initial_x, initial_y);
|
||||
if (sf.t < 0) {
|
||||
@ -1383,7 +1382,7 @@ serial_feature next_feature(FILE *geoms, long long *geompos_in, char *metabase,
|
||||
|
||||
if (*first_time && pass == 1) { /* only write out the next zoom once, even if we retry */
|
||||
if (sf.tippecanoe_maxzoom == -1 || sf.tippecanoe_maxzoom >= nextzoom) {
|
||||
rewrite(sf.geometry, z, nextzoom, maxzoom, sf.bbox, tx, ty, buffer, within, geompos, geomfile, fname, sf.t, sf.layer, sf.metapos, sf.feature_minzoom, child_shards, max_zoom_increment, sf.seq, sf.tippecanoe_minzoom, sf.tippecanoe_maxzoom, sf.segment, initial_x, initial_y, sf.m, sf.keys, sf.values, sf.has_id, sf.id, sf.index, sf.extent);
|
||||
rewrite(sf.geometry, z, nextzoom, maxzoom, sf.bbox, tx, ty, buffer, within, geompos, geomfile, fname, sf.t, sf.layer, sf.metapos, sf.feature_minzoom, child_shards, max_zoom_increment, sf.seq, sf.tippecanoe_minzoom, sf.tippecanoe_maxzoom, sf.segment, initial_x, initial_y, sf.keys, sf.values, sf.has_id, sf.id, sf.index, sf.extent);
|
||||
}
|
||||
}
|
||||
|
||||
@ -1398,10 +1397,82 @@ serial_feature next_feature(FILE *geoms, long long *geompos_in, char *metabase,
|
||||
continue;
|
||||
}
|
||||
|
||||
if (filter != NULL) {
|
||||
std::map<std::string, mvt_value> attributes;
|
||||
std::string layername = (*layer_unmaps)[sf.segment][sf.layer];
|
||||
|
||||
for (size_t i = 0; i < sf.keys.size(); i++) {
|
||||
std::string key = stringpool + pool_off[sf.segment] + sf.keys[i] + 1;
|
||||
|
||||
serial_val sv;
|
||||
sv.type = (stringpool + pool_off[sf.segment])[sf.values[i]];
|
||||
sv.s = stringpool + pool_off[sf.segment] + sf.values[i] + 1;
|
||||
|
||||
mvt_value val = stringified_to_mvt_value(sv.type, sv.s.c_str());
|
||||
attributes.insert(std::pair<std::string, mvt_value>(key, val));
|
||||
}
|
||||
|
||||
for (size_t i = 0; i < sf.full_keys.size(); i++) {
|
||||
std::string key = sf.full_keys[i];
|
||||
mvt_value val = stringified_to_mvt_value(sf.full_values[i].type, sf.full_values[i].s.c_str());
|
||||
|
||||
attributes.insert(std::pair<std::string, mvt_value>(key, val));
|
||||
}
|
||||
|
||||
if (sf.has_id) {
|
||||
mvt_value v;
|
||||
v.type = mvt_uint;
|
||||
v.numeric_value.uint_value = sf.id;
|
||||
|
||||
attributes.insert(std::pair<std::string, mvt_value>("$id", v));
|
||||
}
|
||||
|
||||
mvt_value v;
|
||||
v.type = mvt_string;
|
||||
|
||||
if (sf.t == mvt_point) {
|
||||
v.string_value = "Point";
|
||||
} else if (sf.t == mvt_linestring) {
|
||||
v.string_value = "LineString";
|
||||
} else if (sf.t == mvt_polygon) {
|
||||
v.string_value = "Polygon";
|
||||
}
|
||||
|
||||
attributes.insert(std::pair<std::string, mvt_value>("$type", v));
|
||||
|
||||
mvt_value v2;
|
||||
v2.type = mvt_uint;
|
||||
v2.numeric_value.uint_value = z;
|
||||
|
||||
attributes.insert(std::pair<std::string, mvt_value>("$zoom", v2));
|
||||
|
||||
if (!evaluate(attributes, layername, filter)) {
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (sf.tippecanoe_minzoom == -1 && z < sf.feature_minzoom) {
|
||||
sf.dropped = true;
|
||||
}
|
||||
|
||||
// Remove nulls, now that the filter has run
|
||||
|
||||
for (ssize_t i = sf.keys.size() - 1; i >= 0; i--) {
|
||||
int type = (stringpool + pool_off[sf.segment])[sf.values[i]];
|
||||
|
||||
if (type == mvt_null) {
|
||||
sf.keys.erase(sf.keys.begin() + i);
|
||||
sf.values.erase(sf.values.begin() + i);
|
||||
}
|
||||
}
|
||||
|
||||
for (ssize_t i = (ssize_t) sf.full_keys.size() - 1; i >= 0; i--) {
|
||||
if (sf.full_values[i].type == mvt_null) {
|
||||
sf.full_keys.erase(sf.full_keys.begin() + i);
|
||||
sf.full_values.erase(sf.full_values.begin() + i);
|
||||
}
|
||||
}
|
||||
|
||||
return sf;
|
||||
}
|
||||
}
|
||||
@ -1439,6 +1510,7 @@ struct run_prefilter_args {
|
||||
char *stringpool = NULL;
|
||||
long long *pool_off = NULL;
|
||||
FILE *prefilter_fp = NULL;
|
||||
struct json_object *filter = NULL;
|
||||
};
|
||||
|
||||
void *run_prefilter(void *v) {
|
||||
@ -1446,7 +1518,7 @@ void *run_prefilter(void *v) {
|
||||
json_writer state(rpa->prefilter_fp);
|
||||
|
||||
while (1) {
|
||||
serial_feature sf = next_feature(rpa->geoms, rpa->geompos_in, rpa->metabase, rpa->meta_off, rpa->z, rpa->tx, rpa->ty, rpa->initial_x, rpa->initial_y, rpa->original_features, rpa->unclipped_features, rpa->nextzoom, rpa->maxzoom, rpa->minzoom, rpa->max_zoom_increment, rpa->pass, rpa->passes, rpa->along, rpa->alongminus, rpa->buffer, rpa->within, rpa->first_time, rpa->geomfile, rpa->geompos, rpa->oprogress, rpa->todo, rpa->fname, rpa->child_shards);
|
||||
serial_feature sf = next_feature(rpa->geoms, rpa->geompos_in, rpa->metabase, rpa->meta_off, rpa->z, rpa->tx, rpa->ty, rpa->initial_x, rpa->initial_y, rpa->original_features, rpa->unclipped_features, rpa->nextzoom, rpa->maxzoom, rpa->minzoom, rpa->max_zoom_increment, rpa->pass, rpa->passes, rpa->along, rpa->alongminus, rpa->buffer, rpa->within, rpa->first_time, rpa->geomfile, rpa->geompos, rpa->oprogress, rpa->todo, rpa->fname, rpa->child_shards, rpa->filter, rpa->stringpool, rpa->pool_off, rpa->layer_unmaps);
|
||||
if (sf.t < 0) {
|
||||
break;
|
||||
}
|
||||
@ -1477,7 +1549,7 @@ void *run_prefilter(void *v) {
|
||||
tmp_feature.geometry[i].y += sy;
|
||||
}
|
||||
|
||||
decode_meta(sf.m, sf.keys, sf.values, rpa->stringpool + rpa->pool_off[sf.segment], tmp_layer, tmp_feature);
|
||||
decode_meta(sf.keys, sf.values, rpa->stringpool + rpa->pool_off[sf.segment], tmp_layer, tmp_feature);
|
||||
tmp_layer.features.push_back(tmp_feature);
|
||||
|
||||
layer_to_geojson(tmp_layer, 0, 0, 0, false, true, false, true, sf.index, sf.seq, sf.extent, true, state);
|
||||
@ -1539,7 +1611,7 @@ void preserve_attribute(attribute_op op, std::map<std::string, accum_state> &att
|
||||
// If the feature being merged into has this key as a metadata reference,
|
||||
// promote it to a full_key so it can be modified
|
||||
|
||||
for (int i = 0; i < p.m; i++) {
|
||||
for (size_t i = 0; i < p.keys.size(); i++) {
|
||||
if (strcmp(key.c_str(), stringpool + pool_off[p.segment] + p.keys[i] + 1) == 0) {
|
||||
serial_val sv;
|
||||
sv.s = stringpool + pool_off[p.segment] + p.values[i] + 1;
|
||||
@ -1550,7 +1622,6 @@ void preserve_attribute(attribute_op op, std::map<std::string, accum_state> &att
|
||||
|
||||
p.keys.erase(p.keys.begin() + i);
|
||||
p.values.erase(p.values.begin() + i);
|
||||
p.m--;
|
||||
|
||||
break;
|
||||
}
|
||||
@ -1622,7 +1693,7 @@ void preserve_attribute(attribute_op op, std::map<std::string, accum_state> &att
|
||||
}
|
||||
|
||||
void preserve_attributes(std::map<std::string, attribute_op> const *attribute_accum, std::map<std::string, accum_state> &attribute_accum_state, serial_feature &sf, char *stringpool, long long *pool_off, partial &p) {
|
||||
for (size_t i = 0; i < sf.m; i++) {
|
||||
for (size_t i = 0; i < sf.keys.size(); i++) {
|
||||
std::string key = stringpool + pool_off[sf.segment] + sf.keys[i] + 1;
|
||||
|
||||
serial_val sv;
|
||||
@ -1661,7 +1732,7 @@ bool find_partial(std::vector<partial> &partials, serial_feature &sf, ssize_t &o
|
||||
return false;
|
||||
}
|
||||
|
||||
long long write_tile(FILE *geoms, long long *geompos_in, char *metabase, char *stringpool, int z, unsigned tx, unsigned ty, int detail, int min_detail, sqlite3 *outdb, const char *outdir, int buffer, const char *fname, FILE **geomfile, int minzoom, int maxzoom, double todo, std::atomic<long long> *along, long long alongminus, double gamma, int child_shards, long long *meta_off, long long *pool_off, unsigned *initial_x, unsigned *initial_y, std::atomic<int> *running, double simplification, std::vector<std::map<std::string, layermap_entry>> *layermaps, std::vector<std::vector<std::string>> *layer_unmaps, size_t tiling_seg, size_t pass, size_t passes, unsigned long long mingap, long long minextent, double fraction, const char *prefilter, const char *postfilter, write_tile_args *arg) {
|
||||
long long write_tile(FILE *geoms, long long *geompos_in, char *metabase, char *stringpool, int z, unsigned tx, unsigned ty, int detail, int min_detail, sqlite3 *outdb, const char *outdir, int buffer, const char *fname, FILE **geomfile, int minzoom, int maxzoom, double todo, std::atomic<long long> *along, long long alongminus, double gamma, int child_shards, long long *meta_off, long long *pool_off, unsigned *initial_x, unsigned *initial_y, std::atomic<int> *running, double simplification, std::vector<std::map<std::string, layermap_entry>> *layermaps, std::vector<std::vector<std::string>> *layer_unmaps, size_t tiling_seg, size_t pass, size_t passes, unsigned long long mingap, long long minextent, double fraction, const char *prefilter, const char *postfilter, struct json_object *filter, write_tile_args *arg) {
|
||||
int line_detail;
|
||||
double merge_fraction = 1;
|
||||
double mingap_fraction = 1;
|
||||
@ -1777,6 +1848,7 @@ long long write_tile(FILE *geoms, long long *geompos_in, char *metabase, char *s
|
||||
rpa.layer_unmaps = layer_unmaps;
|
||||
rpa.stringpool = stringpool;
|
||||
rpa.pool_off = pool_off;
|
||||
rpa.filter = filter;
|
||||
|
||||
if (pthread_create(&prefilter_writer, NULL, run_prefilter, &rpa) != 0) {
|
||||
perror("pthread_create (prefilter writer)");
|
||||
@ -1796,7 +1868,7 @@ long long write_tile(FILE *geoms, long long *geompos_in, char *metabase, char *s
|
||||
ssize_t which_partial = -1;
|
||||
|
||||
if (prefilter == NULL) {
|
||||
sf = next_feature(geoms, geompos_in, metabase, meta_off, z, tx, ty, initial_x, initial_y, &original_features, &unclipped_features, nextzoom, maxzoom, minzoom, max_zoom_increment, pass, passes, along, alongminus, buffer, within, &first_time, geomfile, geompos, &oprogress, todo, fname, child_shards);
|
||||
sf = next_feature(geoms, geompos_in, metabase, meta_off, z, tx, ty, initial_x, initial_y, &original_features, &unclipped_features, nextzoom, maxzoom, minzoom, max_zoom_increment, pass, passes, along, alongminus, buffer, within, &first_time, geomfile, geompos, &oprogress, todo, fname, child_shards, filter, stringpool, pool_off, layer_unmaps);
|
||||
} else {
|
||||
sf = parse_feature(prefilter_jp, z, tx, ty, layermaps, tiling_seg, layer_unmaps, postfilter != NULL);
|
||||
}
|
||||
@ -1904,7 +1976,6 @@ long long write_tile(FILE *geoms, long long *geompos_in, char *metabase, char *s
|
||||
partial p;
|
||||
p.geoms.push_back(sf.geometry);
|
||||
p.layer = sf.layer;
|
||||
p.m = sf.m;
|
||||
p.t = sf.t;
|
||||
p.segment = sf.segment;
|
||||
p.original_seq = sf.seq;
|
||||
@ -2051,7 +2122,6 @@ long long write_tile(FILE *geoms, long long *geompos_in, char *metabase, char *s
|
||||
pgeoms[j].clear();
|
||||
c.coalesced = false;
|
||||
c.original_seq = original_seq;
|
||||
c.m = partials[i].m;
|
||||
c.stringpool = stringpool + pool_off[partials[i].segment];
|
||||
c.keys = partials[i].keys;
|
||||
c.values = partials[i].values;
|
||||
@ -2178,7 +2248,7 @@ long long write_tile(FILE *geoms, long long *geompos_in, char *metabase, char *s
|
||||
feature.id = layer_features[x].id;
|
||||
feature.has_id = layer_features[x].has_id;
|
||||
|
||||
decode_meta(layer_features[x].m, layer_features[x].keys, layer_features[x].values, layer_features[x].stringpool, layer, feature);
|
||||
decode_meta(layer_features[x].keys, layer_features[x].values, layer_features[x].stringpool, layer, feature);
|
||||
for (size_t a = 0; a < layer_features[x].full_keys.size(); a++) {
|
||||
serial_val sv = layer_features[x].full_values[a];
|
||||
mvt_value v = stringified_to_mvt_value(sv.type, sv.s.c_str());
|
||||
@ -2479,7 +2549,7 @@ void *run_thread(void *vargs) {
|
||||
|
||||
// fprintf(stderr, "%d/%u/%u\n", z, x, y);
|
||||
|
||||
long long len = write_tile(geom, &geompos, arg->metabase, arg->stringpool, z, x, y, z == arg->maxzoom ? arg->full_detail : arg->low_detail, arg->min_detail, arg->outdb, arg->outdir, arg->buffer, arg->fname, arg->geomfile, arg->minzoom, arg->maxzoom, arg->todo, arg->along, geompos, arg->gamma, arg->child_shards, arg->meta_off, arg->pool_off, arg->initial_x, arg->initial_y, arg->running, arg->simplification, arg->layermaps, arg->layer_unmaps, arg->tiling_seg, arg->pass, arg->passes, arg->mingap, arg->minextent, arg->fraction, arg->prefilter, arg->postfilter, arg);
|
||||
long long len = write_tile(geom, &geompos, arg->metabase, arg->stringpool, z, x, y, z == arg->maxzoom ? arg->full_detail : arg->low_detail, arg->min_detail, arg->outdb, arg->outdir, arg->buffer, arg->fname, arg->geomfile, arg->minzoom, arg->maxzoom, arg->todo, arg->along, geompos, arg->gamma, arg->child_shards, arg->meta_off, arg->pool_off, arg->initial_x, arg->initial_y, arg->running, arg->simplification, arg->layermaps, arg->layer_unmaps, arg->tiling_seg, arg->pass, arg->passes, arg->mingap, arg->minextent, arg->fraction, arg->prefilter, arg->postfilter, arg->filter, arg);
|
||||
|
||||
if (len < 0) {
|
||||
int *err = &arg->err;
|
||||
@ -2544,7 +2614,7 @@ void *run_thread(void *vargs) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
int traverse_zooms(int *geomfd, off_t *geom_size, char *metabase, char *stringpool, std::atomic<unsigned> *midx, std::atomic<unsigned> *midy, int &maxzoom, int minzoom, sqlite3 *outdb, const char *outdir, int buffer, const char *fname, const char *tmpdir, double gamma, int full_detail, int low_detail, int min_detail, long long *meta_off, long long *pool_off, unsigned *initial_x, unsigned *initial_y, double simplification, std::vector<std::map<std::string, layermap_entry>> &layermaps, const char *prefilter, const char *postfilter, std::map<std::string, attribute_op> const *attribute_accum) {
|
||||
int traverse_zooms(int *geomfd, off_t *geom_size, char *metabase, char *stringpool, std::atomic<unsigned> *midx, std::atomic<unsigned> *midy, int &maxzoom, int minzoom, sqlite3 *outdb, const char *outdir, int buffer, const char *fname, const char *tmpdir, double gamma, int full_detail, int low_detail, int min_detail, long long *meta_off, long long *pool_off, unsigned *initial_x, unsigned *initial_y, double simplification, std::vector<std::map<std::string, layermap_entry>> &layermaps, const char *prefilter, const char *postfilter, std::map<std::string, attribute_op> const *attribute_accum, struct json_object *filter) {
|
||||
last_progress = 0;
|
||||
|
||||
// The existing layermaps are one table per input thread.
|
||||
@ -2733,6 +2803,7 @@ int traverse_zooms(int *geomfd, off_t *geom_size, char *metabase, char *stringpo
|
||||
args[thread].prefilter = prefilter;
|
||||
args[thread].postfilter = postfilter;
|
||||
args[thread].attribute_accum = attribute_accum;
|
||||
args[thread].filter = filter;
|
||||
|
||||
args[thread].tasks = dispatches[thread].tasks;
|
||||
args[thread].running = &running;
|
||||
|
3
tile.hpp
3
tile.hpp
@ -7,6 +7,7 @@
|
||||
#include <atomic>
|
||||
#include <map>
|
||||
#include "mbtiles.hpp"
|
||||
#include "jsonpull/jsonpull.h"
|
||||
|
||||
enum attribute_op {
|
||||
op_sum,
|
||||
@ -20,7 +21,7 @@ enum attribute_op {
|
||||
|
||||
long long write_tile(char **geom, char *metabase, char *stringpool, unsigned *file_bbox, int z, unsigned x, unsigned y, int detail, int min_detail, int basezoom, sqlite3 *outdb, const char *outdir, double droprate, int buffer, const char *fname, FILE **geomfile, int file_minzoom, int file_maxzoom, double todo, char *geomstart, long long along, double gamma, int nlayers);
|
||||
|
||||
int traverse_zooms(int *geomfd, off_t *geom_size, char *metabase, char *stringpool, std::atomic<unsigned> *midx, std::atomic<unsigned> *midy, int &maxzoom, int minzoom, sqlite3 *outdb, const char *outdir, int buffer, const char *fname, const char *tmpdir, double gamma, int full_detail, int low_detail, int min_detail, long long *meta_off, long long *pool_off, unsigned *initial_x, unsigned *initial_y, double simplification, std::vector<std::map<std::string, layermap_entry> > &layermap, const char *prefilter, const char *postfilter, std::map<std::string, attribute_op> const *attribute_accum);
|
||||
int traverse_zooms(int *geomfd, off_t *geom_size, char *metabase, char *stringpool, std::atomic<unsigned> *midx, std::atomic<unsigned> *midy, int &maxzoom, int minzoom, sqlite3 *outdb, const char *outdir, int buffer, const char *fname, const char *tmpdir, double gamma, int full_detail, int low_detail, int min_detail, long long *meta_off, long long *pool_off, unsigned *initial_x, unsigned *initial_y, double simplification, std::vector<std::map<std::string, layermap_entry> > &layermap, const char *prefilter, const char *postfilter, std::map<std::string, attribute_op> const *attribute_accum, struct json_object *filter);
|
||||
|
||||
int manage_gap(unsigned long long index, unsigned long long *previndex, double scale, double gamma, double *gap);
|
||||
|
||||
|
@ -1,6 +1,6 @@
|
||||
#ifndef VERSION_HPP
|
||||
#define VERSION_HPP
|
||||
|
||||
#define VERSION "tippecanoe v1.27.11\n"
|
||||
#define VERSION "tippecanoe v1.27.13\n"
|
||||
|
||||
#endif
|
||||
|
Loading…
x
Reference in New Issue
Block a user