mirror of
https://github.com/mapbox/tippecanoe.git
synced 2025-02-01 08:47:57 +00:00
Fix dangling pointer. Defer tilestats generation until tiling if filtering.
This commit is contained in:
parent
eebc8f7639
commit
635429cd87
18
geojson.cpp
18
geojson.cpp
@ -95,7 +95,7 @@ static long long parse_geometry1(int t, json_object *j, long long *bbox, drawvec
|
||||
return geom.size();
|
||||
}
|
||||
|
||||
int serialize_geometry(json_object *geometry, json_object *properties, json_object *id, const char *reading, int line, volatile long long *layer_seq, volatile long long *progress_seq, long long *metapos, long long *geompos, long long *indexpos, std::set<std::string> *exclude, std::set<std::string> *include, int exclude_all, FILE *metafile, FILE *geomfile, FILE *indexfile, struct memfile *poolfile, struct memfile *treefile, const char *fname, int basezoom, int layer, double droprate, long long *file_bbox, json_object *tippecanoe, int segment, int *initialized, unsigned *initial_x, unsigned *initial_y, struct reader *readers, int maxzoom, json_object *feature, std::map<std::string, layermap_entry> *layermap, std::string layername, bool uses_gamma, std::map<std::string, int> const *attribute_types, double *dist_sum, size_t *dist_count, bool want_dist) {
|
||||
int serialize_geometry(json_object *geometry, json_object *properties, json_object *id, const char *reading, int line, volatile long long *layer_seq, volatile long long *progress_seq, long long *metapos, long long *geompos, long long *indexpos, std::set<std::string> *exclude, std::set<std::string> *include, int exclude_all, FILE *metafile, FILE *geomfile, FILE *indexfile, struct memfile *poolfile, struct memfile *treefile, const char *fname, int basezoom, int layer, double droprate, long long *file_bbox, json_object *tippecanoe, int segment, int *initialized, unsigned *initial_x, unsigned *initial_y, struct reader *readers, int maxzoom, json_object *feature, std::map<std::string, layermap_entry> *layermap, std::string layername, bool uses_gamma, std::map<std::string, int> const *attribute_types, double *dist_sum, size_t *dist_count, bool want_dist, bool filters) {
|
||||
json_object *geometry_type = json_hash_get(geometry, "type");
|
||||
if (geometry_type == NULL) {
|
||||
static int warned = 0;
|
||||
@ -271,8 +271,10 @@ int serialize_geometry(json_object *geometry, json_object *properties, json_obje
|
||||
attrib.type = metatype[m - 1];
|
||||
attrib.string = metaval[m - 1];
|
||||
|
||||
auto fk = layermap->find(layername);
|
||||
add_to_file_keys(fk->second.file_keys, metakey[m - 1], attrib);
|
||||
if (!filters) {
|
||||
auto fk = layermap->find(layername);
|
||||
add_to_file_keys(fk->second.file_keys, metakey[m - 1], attrib);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
@ -466,7 +468,7 @@ void check_crs(json_object *j, const char *reading) {
|
||||
}
|
||||
}
|
||||
|
||||
void parse_json(json_pull *jp, const char *reading, volatile long long *layer_seq, volatile long long *progress_seq, long long *metapos, long long *geompos, long long *indexpos, std::set<std::string> *exclude, std::set<std::string> *include, int exclude_all, FILE *metafile, FILE *geomfile, FILE *indexfile, struct memfile *poolfile, struct memfile *treefile, char *fname, int basezoom, int layer, double droprate, long long *file_bbox, int segment, int *initialized, unsigned *initial_x, unsigned *initial_y, struct reader *readers, int maxzoom, std::map<std::string, layermap_entry> *layermap, std::string layername, bool uses_gamma, std::map<std::string, int> const *attribute_types, double *dist_sum, size_t *dist_count, bool want_dist) {
|
||||
void parse_json(json_pull *jp, const char *reading, volatile long long *layer_seq, volatile long long *progress_seq, long long *metapos, long long *geompos, long long *indexpos, std::set<std::string> *exclude, std::set<std::string> *include, int exclude_all, FILE *metafile, FILE *geomfile, FILE *indexfile, struct memfile *poolfile, struct memfile *treefile, char *fname, int basezoom, int layer, double droprate, long long *file_bbox, int segment, int *initialized, unsigned *initial_x, unsigned *initial_y, struct reader *readers, int maxzoom, std::map<std::string, layermap_entry> *layermap, std::string layername, bool uses_gamma, std::map<std::string, int> const *attribute_types, double *dist_sum, size_t *dist_count, bool want_dist, bool filters) {
|
||||
long long found_hashes = 0;
|
||||
long long found_features = 0;
|
||||
long long found_geometries = 0;
|
||||
@ -534,7 +536,7 @@ void parse_json(json_pull *jp, const char *reading, volatile long long *layer_se
|
||||
}
|
||||
found_geometries++;
|
||||
|
||||
serialize_geometry(j, NULL, NULL, reading, jp->line, layer_seq, progress_seq, metapos, geompos, indexpos, exclude, include, exclude_all, metafile, geomfile, indexfile, poolfile, treefile, fname, basezoom, layer, droprate, file_bbox, NULL, segment, initialized, initial_x, initial_y, readers, maxzoom, j, layermap, layername, uses_gamma, attribute_types, dist_sum, dist_count, want_dist);
|
||||
serialize_geometry(j, NULL, NULL, reading, jp->line, layer_seq, progress_seq, metapos, geompos, indexpos, exclude, include, exclude_all, metafile, geomfile, indexfile, poolfile, treefile, fname, basezoom, layer, droprate, file_bbox, NULL, segment, initialized, initial_x, initial_y, readers, maxzoom, j, layermap, layername, uses_gamma, attribute_types, dist_sum, dist_count, want_dist, filters);
|
||||
json_free(j);
|
||||
continue;
|
||||
}
|
||||
@ -577,10 +579,10 @@ void parse_json(json_pull *jp, const char *reading, volatile long long *layer_se
|
||||
if (geometries != NULL) {
|
||||
size_t g;
|
||||
for (g = 0; g < geometries->length; g++) {
|
||||
serialize_geometry(geometries->array[g], properties, id, reading, jp->line, layer_seq, progress_seq, metapos, geompos, indexpos, exclude, include, exclude_all, metafile, geomfile, indexfile, poolfile, treefile, fname, basezoom, layer, droprate, file_bbox, tippecanoe, segment, initialized, initial_x, initial_y, readers, maxzoom, j, layermap, layername, uses_gamma, attribute_types, dist_sum, dist_count, want_dist);
|
||||
serialize_geometry(geometries->array[g], properties, id, reading, jp->line, layer_seq, progress_seq, metapos, geompos, indexpos, exclude, include, exclude_all, metafile, geomfile, indexfile, poolfile, treefile, fname, basezoom, layer, droprate, file_bbox, tippecanoe, segment, initialized, initial_x, initial_y, readers, maxzoom, j, layermap, layername, uses_gamma, attribute_types, dist_sum, dist_count, want_dist, filters);
|
||||
}
|
||||
} else {
|
||||
serialize_geometry(geometry, properties, id, reading, jp->line, layer_seq, progress_seq, metapos, geompos, indexpos, exclude, include, exclude_all, metafile, geomfile, indexfile, poolfile, treefile, fname, basezoom, layer, droprate, file_bbox, tippecanoe, segment, initialized, initial_x, initial_y, readers, maxzoom, j, layermap, layername, uses_gamma, attribute_types, dist_sum, dist_count, want_dist);
|
||||
serialize_geometry(geometry, properties, id, reading, jp->line, layer_seq, progress_seq, metapos, geompos, indexpos, exclude, include, exclude_all, metafile, geomfile, indexfile, poolfile, treefile, fname, basezoom, layer, droprate, file_bbox, tippecanoe, segment, initialized, initial_x, initial_y, readers, maxzoom, j, layermap, layername, uses_gamma, attribute_types, dist_sum, dist_count, want_dist, filters);
|
||||
}
|
||||
|
||||
json_free(j);
|
||||
@ -592,7 +594,7 @@ void parse_json(json_pull *jp, const char *reading, volatile long long *layer_se
|
||||
void *run_parse_json(void *v) {
|
||||
struct parse_json_args *pja = (struct parse_json_args *) v;
|
||||
|
||||
parse_json(pja->jp, pja->reading, pja->layer_seq, pja->progress_seq, pja->metapos, pja->geompos, pja->indexpos, pja->exclude, pja->include, pja->exclude_all, pja->metafile, pja->geomfile, pja->indexfile, pja->poolfile, pja->treefile, pja->fname, pja->basezoom, pja->layer, pja->droprate, pja->file_bbox, pja->segment, pja->initialized, pja->initial_x, pja->initial_y, pja->readers, pja->maxzoom, pja->layermap, *pja->layername, pja->uses_gamma, pja->attribute_types, pja->dist_sum, pja->dist_count, pja->want_dist);
|
||||
parse_json(pja->jp, pja->reading, pja->layer_seq, pja->progress_seq, pja->metapos, pja->geompos, pja->indexpos, pja->exclude, pja->include, pja->exclude_all, pja->metafile, pja->geomfile, pja->indexfile, pja->poolfile, pja->treefile, pja->fname, pja->basezoom, pja->layer, pja->droprate, pja->file_bbox, pja->segment, pja->initialized, pja->initial_x, pja->initial_y, pja->readers, pja->maxzoom, pja->layermap, *pja->layername, pja->uses_gamma, pja->attribute_types, pja->dist_sum, pja->dist_count, pja->want_dist, pja->filters);
|
||||
|
||||
return NULL;
|
||||
}
|
||||
|
@ -42,12 +42,13 @@ struct parse_json_args {
|
||||
double *dist_sum;
|
||||
size_t *dist_count;
|
||||
bool want_dist;
|
||||
bool filters;
|
||||
};
|
||||
|
||||
struct json_pull *json_begin_map(char *map, long long len);
|
||||
void json_end_map(struct json_pull *jp);
|
||||
|
||||
void parse_json(json_pull *jp, const char *reading, volatile long long *layer_seq, volatile long long *progress_seq, long long *metapos, long long *geompos, long long *indexpos, std::set<std::string> *exclude, std::set<std::string> *include, int exclude_all, FILE *metafile, FILE *geomfile, FILE *indexfile, struct memfile *poolfile, struct memfile *treefile, char *fname, int basezoom, int layer, double droprate, long long *file_bbox, int segment, int *initialized, unsigned *initial_x, unsigned *initial_y, struct reader *readers, int maxzoom, std::map<std::string, layermap_entry> *layermap, std::string layername, bool uses_gamma, std::map<std::string, int> const *attribute_types, double *dist_sum, size_t *dist_count, bool want_dist);
|
||||
void parse_json(json_pull *jp, const char *reading, volatile long long *layer_seq, volatile long long *progress_seq, long long *metapos, long long *geompos, long long *indexpos, std::set<std::string> *exclude, std::set<std::string> *include, int exclude_all, FILE *metafile, FILE *geomfile, FILE *indexfile, struct memfile *poolfile, struct memfile *treefile, char *fname, int basezoom, int layer, double droprate, long long *file_bbox, int segment, int *initialized, unsigned *initial_x, unsigned *initial_y, struct reader *readers, int maxzoom, std::map<std::string, layermap_entry> *layermap, std::string layername, bool uses_gamma, std::map<std::string, int> const *attribute_types, double *dist_sum, size_t *dist_count, bool want_dist, bool filters);
|
||||
void *run_parse_json(void *v);
|
||||
|
||||
#endif
|
||||
|
17
main.cpp
17
main.cpp
@ -373,7 +373,7 @@ void *run_sort(void *v) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
void do_read_parallel(char *map, long long len, long long initial_offset, const char *reading, struct reader *reader, volatile long long *progress_seq, std::set<std::string> *exclude, std::set<std::string> *include, int exclude_all, char *fname, int basezoom, int source, int nlayers, std::vector<std::map<std::string, layermap_entry> > *layermaps, double droprate, int *initialized, unsigned *initial_x, unsigned *initial_y, int maxzoom, std::string layername, bool uses_gamma, std::map<std::string, int> const *attribute_types, int separator, double *dist_sum, size_t *dist_count, bool want_dist) {
|
||||
void do_read_parallel(char *map, long long len, long long initial_offset, const char *reading, struct reader *reader, volatile long long *progress_seq, std::set<std::string> *exclude, std::set<std::string> *include, int exclude_all, char *fname, int basezoom, int source, int nlayers, std::vector<std::map<std::string, layermap_entry> > *layermaps, double droprate, int *initialized, unsigned *initial_x, unsigned *initial_y, int maxzoom, std::string layername, bool uses_gamma, std::map<std::string, int> const *attribute_types, int separator, double *dist_sum, size_t *dist_count, bool want_dist, bool filters) {
|
||||
long long segs[CPUS + 1];
|
||||
segs[0] = 0;
|
||||
segs[CPUS] = len;
|
||||
@ -439,6 +439,7 @@ void do_read_parallel(char *map, long long len, long long initial_offset, const
|
||||
pja[i].dist_sum = &(dist_sums[i]);
|
||||
pja[i].dist_count = &(dist_counts[i]);
|
||||
pja[i].want_dist = want_dist;
|
||||
pja[i].filters = filters;
|
||||
|
||||
if (pthread_create(&pthreads[i], NULL, run_parse_json, &pja[i]) != 0) {
|
||||
perror("pthread_create");
|
||||
@ -490,6 +491,7 @@ struct read_parallel_arg {
|
||||
double *dist_sum;
|
||||
size_t *dist_count;
|
||||
bool want_dist;
|
||||
bool filters;
|
||||
};
|
||||
|
||||
void *run_read_parallel(void *v) {
|
||||
@ -511,7 +513,7 @@ void *run_read_parallel(void *v) {
|
||||
}
|
||||
madvise(map, rpa->len, MADV_RANDOM); // sequential, but from several pointers at once
|
||||
|
||||
do_read_parallel(map, rpa->len, rpa->offset, rpa->reading, rpa->reader, rpa->progress_seq, rpa->exclude, rpa->include, rpa->exclude_all, rpa->fname, rpa->basezoom, rpa->source, rpa->nlayers, rpa->layermaps, rpa->droprate, rpa->initialized, rpa->initial_x, rpa->initial_y, rpa->maxzoom, rpa->layername, rpa->uses_gamma, rpa->attribute_types, rpa->separator, rpa->dist_sum, rpa->dist_count, rpa->want_dist);
|
||||
do_read_parallel(map, rpa->len, rpa->offset, rpa->reading, rpa->reader, rpa->progress_seq, rpa->exclude, rpa->include, rpa->exclude_all, rpa->fname, rpa->basezoom, rpa->source, rpa->nlayers, rpa->layermaps, rpa->droprate, rpa->initialized, rpa->initial_x, rpa->initial_y, rpa->maxzoom, rpa->layername, rpa->uses_gamma, rpa->attribute_types, rpa->separator, rpa->dist_sum, rpa->dist_count, rpa->want_dist, rpa->filters);
|
||||
|
||||
madvise(map, rpa->len, MADV_DONTNEED);
|
||||
if (munmap(map, rpa->len) != 0) {
|
||||
@ -528,7 +530,7 @@ void *run_read_parallel(void *v) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
void start_parsing(int fd, FILE *fp, long long offset, long long len, volatile int *is_parsing, pthread_t *parallel_parser, bool &parser_created, const char *reading, struct reader *reader, volatile long long *progress_seq, std::set<std::string> *exclude, std::set<std::string> *include, int exclude_all, char *fname, int basezoom, int source, int nlayers, std::vector<std::map<std::string, layermap_entry> > &layermaps, double droprate, int *initialized, unsigned *initial_x, unsigned *initial_y, int maxzoom, std::string layername, bool uses_gamma, std::map<std::string, int> const *attribute_types, int separator, double *dist_sum, size_t *dist_count, bool want_dist) {
|
||||
void start_parsing(int fd, FILE *fp, long long offset, long long len, volatile int *is_parsing, pthread_t *parallel_parser, bool &parser_created, const char *reading, struct reader *reader, volatile long long *progress_seq, std::set<std::string> *exclude, std::set<std::string> *include, int exclude_all, char *fname, int basezoom, int source, int nlayers, std::vector<std::map<std::string, layermap_entry> > &layermaps, double droprate, int *initialized, unsigned *initial_x, unsigned *initial_y, int maxzoom, std::string layername, bool uses_gamma, std::map<std::string, int> const *attribute_types, int separator, double *dist_sum, size_t *dist_count, bool want_dist, bool filters) {
|
||||
// This has to kick off an intermediate thread to start the parser threads,
|
||||
// so the main thread can get back to reading the next input stage while
|
||||
// the intermediate thread waits for the completion of the parser threads.
|
||||
@ -570,6 +572,7 @@ void start_parsing(int fd, FILE *fp, long long offset, long long len, volatile i
|
||||
rpa->dist_sum = dist_sum;
|
||||
rpa->dist_count = dist_count;
|
||||
rpa->want_dist = want_dist;
|
||||
rpa->filters = filters;
|
||||
|
||||
if (pthread_create(parallel_parser, NULL, run_read_parallel, rpa) != 0) {
|
||||
perror("pthread_create");
|
||||
@ -1257,7 +1260,7 @@ int read_input(std::vector<source> &sources, char *fname, int maxzoom, int minzo
|
||||
}
|
||||
|
||||
if (map != NULL && map != MAP_FAILED && read_parallel_this) {
|
||||
do_read_parallel(map, st.st_size - off, overall_offset, reading.c_str(), reader, &progress_seq, exclude, include, exclude_all, fname, basezoom, layer, nlayers, &layermaps, droprate, initialized, initial_x, initial_y, maxzoom, sources[layer].layer, uses_gamma, attribute_types, read_parallel_this, &dist_sum, &dist_count, guess_maxzoom);
|
||||
do_read_parallel(map, st.st_size - off, overall_offset, reading.c_str(), reader, &progress_seq, exclude, include, exclude_all, fname, basezoom, layer, nlayers, &layermaps, droprate, initialized, initial_x, initial_y, maxzoom, sources[layer].layer, uses_gamma, attribute_types, read_parallel_this, &dist_sum, &dist_count, guess_maxzoom, prefilter != NULL || postfilter != NULL);
|
||||
overall_offset += st.st_size - off;
|
||||
checkdisk(reader, CPUS);
|
||||
|
||||
@ -1333,7 +1336,7 @@ int read_input(std::vector<source> &sources, char *fname, int maxzoom, int minzo
|
||||
}
|
||||
|
||||
fflush(readfp);
|
||||
start_parsing(readfd, readfp, initial_offset, ahead, &is_parsing, ¶llel_parser, parser_created, reading.c_str(), reader, &progress_seq, exclude, include, exclude_all, fname, basezoom, layer, nlayers, layermaps, droprate, initialized, initial_x, initial_y, maxzoom, sources[layer].layer, gamma != 0, attribute_types, read_parallel_this, &dist_sum, &dist_count, guess_maxzoom);
|
||||
start_parsing(readfd, readfp, initial_offset, ahead, &is_parsing, ¶llel_parser, parser_created, reading.c_str(), reader, &progress_seq, exclude, include, exclude_all, fname, basezoom, layer, nlayers, layermaps, droprate, initialized, initial_x, initial_y, maxzoom, sources[layer].layer, gamma != 0, attribute_types, read_parallel_this, &dist_sum, &dist_count, guess_maxzoom, prefilter != NULL || postfilter != NULL);
|
||||
|
||||
initial_offset += ahead;
|
||||
overall_offset += ahead;
|
||||
@ -1370,7 +1373,7 @@ int read_input(std::vector<source> &sources, char *fname, int maxzoom, int minzo
|
||||
fflush(readfp);
|
||||
|
||||
if (ahead > 0) {
|
||||
start_parsing(readfd, readfp, initial_offset, ahead, &is_parsing, ¶llel_parser, parser_created, reading.c_str(), reader, &progress_seq, exclude, include, exclude_all, fname, basezoom, layer, nlayers, layermaps, droprate, initialized, initial_x, initial_y, maxzoom, sources[layer].layer, gamma != 0, attribute_types, read_parallel_this, &dist_sum, &dist_count, guess_maxzoom);
|
||||
start_parsing(readfd, readfp, initial_offset, ahead, &is_parsing, ¶llel_parser, parser_created, reading.c_str(), reader, &progress_seq, exclude, include, exclude_all, fname, basezoom, layer, nlayers, layermaps, droprate, initialized, initial_x, initial_y, maxzoom, sources[layer].layer, gamma != 0, attribute_types, read_parallel_this, &dist_sum, &dist_count, guess_maxzoom, prefilter != NULL || postfilter != NULL);
|
||||
|
||||
if (parser_created) {
|
||||
if (pthread_join(parallel_parser, NULL) != 0) {
|
||||
@ -1387,7 +1390,7 @@ int read_input(std::vector<source> &sources, char *fname, int maxzoom, int minzo
|
||||
|
||||
long long layer_seq = overall_offset;
|
||||
json_pull *jp = json_begin_file(fp);
|
||||
parse_json(jp, reading.c_str(), &layer_seq, &progress_seq, &reader[0].metapos, &reader[0].geompos, &reader[0].indexpos, exclude, include, exclude_all, reader[0].metafile, reader[0].geomfile, reader[0].indexfile, reader[0].poolfile, reader[0].treefile, fname, basezoom, layer, droprate, reader[0].file_bbox, 0, &initialized[0], &initial_x[0], &initial_y[0], reader, maxzoom, &layermaps[0], sources[layer].layer, uses_gamma, attribute_types, &dist_sum, &dist_count, guess_maxzoom);
|
||||
parse_json(jp, reading.c_str(), &layer_seq, &progress_seq, &reader[0].metapos, &reader[0].geompos, &reader[0].indexpos, exclude, include, exclude_all, reader[0].metafile, reader[0].geomfile, reader[0].indexfile, reader[0].poolfile, reader[0].treefile, fname, basezoom, layer, droprate, reader[0].file_bbox, 0, &initialized[0], &initial_x[0], &initial_y[0], reader, maxzoom, &layermaps[0], sources[layer].layer, uses_gamma, attribute_types, &dist_sum, &dist_count, guess_maxzoom, prefilter != NULL || postfilter != NULL);
|
||||
json_end(jp);
|
||||
overall_offset = layer_seq;
|
||||
checkdisk(reader, CPUS);
|
||||
|
12
plugin.cpp
12
plugin.cpp
@ -82,6 +82,7 @@ static std::vector<mvt_geometry> to_feature(drawvec &geom) {
|
||||
return out;
|
||||
}
|
||||
|
||||
// Reads from the postfilter
|
||||
std::vector<mvt_layer> parse_layers(int fd, int z, unsigned x, unsigned y, std::vector<std::map<std::string, layermap_entry>> *layermaps, size_t tiling_seg, std::vector<std::vector<std::string>> *layer_unmaps, int extent) {
|
||||
std::map<std::string, mvt_layer> ret;
|
||||
|
||||
@ -277,7 +278,8 @@ std::vector<mvt_layer> parse_layers(int fd, int z, unsigned x, unsigned y, std::
|
||||
return final;
|
||||
}
|
||||
|
||||
serial_feature parse_feature(json_pull *jp, int z, unsigned x, unsigned y, std::vector<std::map<std::string, layermap_entry>> *layermaps, size_t tiling_seg, std::vector<std::vector<std::string>> *layer_unmaps) {
|
||||
// Reads from the prefilter
|
||||
serial_feature parse_feature(json_pull *jp, int z, unsigned x, unsigned y, std::vector<std::map<std::string, layermap_entry>> *layermaps, size_t tiling_seg, std::vector<std::vector<std::string>> *layer_unmaps, bool postfilter) {
|
||||
serial_feature sf;
|
||||
|
||||
while (1) {
|
||||
@ -470,7 +472,9 @@ serial_feature parse_feature(json_pull *jp, int z, unsigned x, unsigned y, std::
|
||||
attrib.string = v.s;
|
||||
attrib.type = v.type;
|
||||
|
||||
add_to_file_keys(fk->second.file_keys, std::string(properties->keys[i]->string), attrib);
|
||||
if (!postfilter) {
|
||||
add_to_file_keys(fk->second.file_keys, std::string(properties->keys[i]->string), attrib);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
@ -597,7 +601,7 @@ std::vector<mvt_layer> filter_layers(const char *filter, std::vector<mvt_layer>
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
layers = parse_layers(read_from, z, x, y, layermaps, tiling_seg, layer_unmaps, extent);
|
||||
std::vector<mvt_layer> nlayers = parse_layers(read_from, z, x, y, layermaps, tiling_seg, layer_unmaps, extent);
|
||||
|
||||
while (1) {
|
||||
int stat_loc;
|
||||
@ -616,5 +620,5 @@ std::vector<mvt_layer> filter_layers(const char *filter, std::vector<mvt_layer>
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
return layers;
|
||||
return nlayers;
|
||||
}
|
||||
|
@ -1,3 +1,3 @@
|
||||
std::vector<mvt_layer> filter_layers(const char *filter, std::vector<mvt_layer> &layer, unsigned z, unsigned x, unsigned y, std::vector<std::map<std::string, layermap_entry>> *layermaps, size_t tiling_seg, std::vector<std::vector<std::string>> *layer_unmaps, int extent);
|
||||
void setup_filter(const char *filter, int *write_to, int *read_from, pid_t *pid, unsigned z, unsigned x, unsigned y);
|
||||
serial_feature parse_feature(json_pull *jp, int z, unsigned x, unsigned y, std::vector<std::map<std::string, layermap_entry>> *layermaps, size_t tiling_seg, std::vector<std::vector<std::string>> *layer_unmaps);
|
||||
serial_feature parse_feature(json_pull *jp, int z, unsigned x, unsigned y, std::vector<std::map<std::string, layermap_entry>> *layermaps, size_t tiling_seg, std::vector<std::vector<std::string>> *layer_unmaps, bool filters);
|
||||
|
3
tests/filter/rename
Executable file
3
tests/filter/rename
Executable file
@ -0,0 +1,3 @@
|
||||
#!/bin/sh
|
||||
|
||||
sed 's/"layer": "[^"]*"/"layer": "renamed"/'
|
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
2
tile.cpp
2
tile.cpp
@ -1532,7 +1532,7 @@ long long write_tile(FILE *geoms, long long *geompos_in, char *metabase, char *s
|
||||
if (prefilter == NULL) {
|
||||
sf = next_feature(geoms, geompos_in, metabase, meta_off, z, tx, ty, initial_x, initial_y, &original_features, &unclipped_features, nextzoom, maxzoom, minzoom, max_zoom_increment, pass, passes, along, alongminus, buffer, within, &first_time, line_detail, geomfile, geompos, &oprogress, todo, fname, child_shards);
|
||||
} else {
|
||||
sf = parse_feature(prefilter_jp, z, tx, ty, layermaps, tiling_seg, layer_unmaps);
|
||||
sf = parse_feature(prefilter_jp, z, tx, ty, layermaps, tiling_seg, layer_unmaps, postfilter != NULL);
|
||||
}
|
||||
|
||||
if (sf.t < 0) {
|
||||
|
@ -96,11 +96,11 @@ void layer_to_geojson(FILE *fp, mvt_layer const &layer, unsigned z, unsigned x,
|
||||
}
|
||||
|
||||
if (feat.tags[t] >= layer.keys.size()) {
|
||||
fprintf(stderr, "Error: out of bounds feature key\n");
|
||||
fprintf(stderr, "Error: out of bounds feature key (%u in %zu)\n", feat.tags[t], layer.keys.size());
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
if (feat.tags[t + 1] >= layer.values.size()) {
|
||||
fprintf(stderr, "Error: out of bounds feature value\n");
|
||||
fprintf(stderr, "Error: out of bounds feature value (%u in %zu)\n", feat.tags[t + 1], layer.values.size());
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user