mirror of
https://github.com/mapbox/tippecanoe.git
synced 2025-04-27 06:20:04 +00:00
Merge pull request #108 from mapbox/layer-name
Merge input files into one layer if a layer name is specified
This commit is contained in:
commit
321c12da5e
@ -54,7 +54,8 @@ Options
|
||||
|
||||
### Naming
|
||||
|
||||
* -l _name_: Layer name (default "file" if source is file.json or output is file.mbtiles). Only works if there is only one layer.
|
||||
* -l _name_: Layer name (default "file" if source is file.json or output is file.mbtiles). If there are multiple input files
|
||||
specified, the files are all merged into the single named layer.
|
||||
* -n _name_: Human-readable name (default file.json)
|
||||
|
||||
### File control
|
||||
|
20
decode.cc
20
decode.cc
@ -109,6 +109,20 @@ void handle(std::string message, int z, unsigned x, unsigned y, int describe) {
|
||||
mapnik::vector::tile_layer layer = tile.layers(l);
|
||||
int extent = layer.extent();
|
||||
|
||||
if (describe) {
|
||||
if (l != 0) {
|
||||
printf(",\n");
|
||||
}
|
||||
|
||||
printf("{ \"type\": \"FeatureCollection\"");
|
||||
printf(", \"properties\": { \"layer\": ");
|
||||
printq(layer.name().c_str());
|
||||
printf(" }");
|
||||
printf(", \"features\": [\n");
|
||||
|
||||
within = 0;
|
||||
}
|
||||
|
||||
for (int f = 0; f < layer.features_size(); f++) {
|
||||
mapnik::vector::tile_feature feat = layer.features(f);
|
||||
int px = 0, py = 0;
|
||||
@ -260,7 +274,7 @@ void handle(std::string message, int z, unsigned x, unsigned y, int describe) {
|
||||
}
|
||||
|
||||
areas[i] = area;
|
||||
if (areas[i] <= 0) {
|
||||
if (areas[i] <= 0 || i == 0) {
|
||||
outer++;
|
||||
}
|
||||
|
||||
@ -316,6 +330,10 @@ void handle(std::string message, int z, unsigned x, unsigned y, int describe) {
|
||||
|
||||
printf(" } }\n");
|
||||
}
|
||||
|
||||
if (describe) {
|
||||
printf("] }\n");
|
||||
}
|
||||
}
|
||||
|
||||
printf("] }\n");
|
||||
|
257
geojson.c
257
geojson.c
@ -598,6 +598,118 @@ int serialize_geometry(json_object *geometry, json_object *properties, const cha
|
||||
return 1;
|
||||
}
|
||||
|
||||
void parse_json(json_pull *jp, const char *reading, long long *seq, long long *metapos, long long *geompos, long long *indexpos, struct pool *exclude, struct pool *include, int exclude_all, FILE *metafile, FILE *geomfile, FILE *indexfile, struct memfile *poolfile, struct memfile *treefile, char *fname, int maxzoom, int layer, double droprate, unsigned *file_bbox) {
|
||||
long long found_hashes = 0;
|
||||
long long found_features = 0;
|
||||
long long found_geometries = 0;
|
||||
|
||||
while (1) {
|
||||
json_object *j = json_read(jp);
|
||||
if (j == NULL) {
|
||||
if (jp->error != NULL) {
|
||||
fprintf(stderr, "%s:%d: %s\n", reading, jp->line, jp->error);
|
||||
}
|
||||
|
||||
json_free(jp->root);
|
||||
break;
|
||||
}
|
||||
|
||||
if (j->type == JSON_HASH) {
|
||||
found_hashes++;
|
||||
|
||||
if (found_hashes == 50 && found_features == 0 && found_geometries == 0) {
|
||||
fprintf(stderr, "%s:%d: Warning: not finding any GeoJSON features or geometries in input yet after 50 objects.\n", reading, jp->line);
|
||||
}
|
||||
}
|
||||
|
||||
json_object *type = json_hash_get(j, "type");
|
||||
if (type == NULL || type->type != JSON_STRING) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (found_features == 0) {
|
||||
int i;
|
||||
int is_geometry = 0;
|
||||
for (i = 0; i < GEOM_TYPES; i++) {
|
||||
if (strcmp(type->string, geometry_names[i]) == 0) {
|
||||
is_geometry = 1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (is_geometry) {
|
||||
if (j->parent != NULL) {
|
||||
if (j->parent->type == JSON_ARRAY) {
|
||||
if (j->parent->parent->type == JSON_HASH) {
|
||||
json_object *geometries = json_hash_get(j->parent->parent, "geometries");
|
||||
if (geometries != NULL) {
|
||||
// Parent of Parent must be a GeometryCollection
|
||||
is_geometry = 0;
|
||||
}
|
||||
}
|
||||
} else if (j->parent->type == JSON_HASH) {
|
||||
json_object *geometry = json_hash_get(j->parent, "geometry");
|
||||
if (geometry != NULL) {
|
||||
// Parent must be a Feature
|
||||
is_geometry = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (is_geometry) {
|
||||
if (found_features != 0 && found_geometries == 0) {
|
||||
fprintf(stderr, "%s:%d: Warning: found a mixture of features and bare geometries\n", reading, jp->line);
|
||||
}
|
||||
found_geometries++;
|
||||
|
||||
serialize_geometry(j, NULL, reading, jp, seq, metapos, geompos, indexpos, exclude, include, exclude_all, metafile, geomfile, indexfile, poolfile, treefile, fname, maxzoom, layer, droprate, file_bbox, NULL);
|
||||
json_free(j);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (strcmp(type->string, "Feature") != 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (found_features == 0 && found_geometries != 0) {
|
||||
fprintf(stderr, "%s:%d: Warning: found a mixture of features and bare geometries\n", reading, jp->line);
|
||||
}
|
||||
found_features++;
|
||||
|
||||
json_object *geometry = json_hash_get(j, "geometry");
|
||||
if (geometry == NULL) {
|
||||
fprintf(stderr, "%s:%d: feature with no geometry\n", reading, jp->line);
|
||||
json_free(j);
|
||||
continue;
|
||||
}
|
||||
|
||||
json_object *properties = json_hash_get(j, "properties");
|
||||
if (properties == NULL || (properties->type != JSON_HASH && properties->type != JSON_NULL)) {
|
||||
fprintf(stderr, "%s:%d: feature without properties hash\n", reading, jp->line);
|
||||
json_free(j);
|
||||
continue;
|
||||
}
|
||||
|
||||
json_object *tippecanoe = json_hash_get(j, "tippecanoe");
|
||||
|
||||
json_object *geometries = json_hash_get(geometry, "geometries");
|
||||
if (geometries != NULL) {
|
||||
int g;
|
||||
for (g = 0; g < geometries->length; g++) {
|
||||
serialize_geometry(geometries->array[g], properties, reading, jp, seq, metapos, geompos, indexpos, exclude, include, exclude_all, metafile, geomfile, indexfile, poolfile, treefile, fname, maxzoom, layer, droprate, file_bbox, tippecanoe);
|
||||
}
|
||||
} else {
|
||||
serialize_geometry(geometry, properties, reading, jp, seq, metapos, geompos, indexpos, exclude, include, exclude_all, metafile, geomfile, indexfile, poolfile, treefile, fname, maxzoom, layer, droprate, file_bbox, tippecanoe);
|
||||
}
|
||||
|
||||
json_free(j);
|
||||
|
||||
/* XXX check for any non-features in the outer object */
|
||||
}
|
||||
}
|
||||
|
||||
int read_json(int argc, char **argv, char *fname, const char *layername, int maxzoom, int minzoom, sqlite3 *outdb, struct pool *exclude, struct pool *include, int exclude_all, double droprate, int buffer, const char *tmpdir, double gamma, char *prevent, char *additional) {
|
||||
int ret = EXIT_SUCCESS;
|
||||
|
||||
@ -684,140 +796,50 @@ int read_json(int argc, char **argv, char *fname, const char *layername, int max
|
||||
unsigned midx = 0, midy = 0;
|
||||
long long seq = 0;
|
||||
|
||||
int nlayers = argc;
|
||||
if (nlayers == 0) {
|
||||
int nlayers;
|
||||
if (layername != NULL) {
|
||||
nlayers = 1;
|
||||
} else {
|
||||
nlayers = argc;
|
||||
if (nlayers == 0) {
|
||||
nlayers = 1;
|
||||
}
|
||||
}
|
||||
|
||||
int layer;
|
||||
for (layer = 0; layer < nlayers; layer++) {
|
||||
int nsources = argc;
|
||||
if (nsources == 0) {
|
||||
nsources = 1;
|
||||
}
|
||||
|
||||
int source;
|
||||
for (source = 0; source < nsources; source++) {
|
||||
json_pull *jp;
|
||||
const char *reading;
|
||||
FILE *fp;
|
||||
long long found_hashes = 0;
|
||||
long long found_features = 0;
|
||||
long long found_geometries = 0;
|
||||
|
||||
if (layer >= argc) {
|
||||
if (source >= argc) {
|
||||
reading = "standard input";
|
||||
fp = stdin;
|
||||
} else {
|
||||
reading = argv[layer];
|
||||
fp = fopen(argv[layer], "r");
|
||||
reading = argv[source];
|
||||
fp = fopen(argv[source], "r");
|
||||
if (fp == NULL) {
|
||||
perror(argv[layer]);
|
||||
perror(argv[source]);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
jp = json_begin_file(fp);
|
||||
|
||||
while (1) {
|
||||
json_object *j = json_read(jp);
|
||||
if (j == NULL) {
|
||||
if (jp->error != NULL) {
|
||||
fprintf(stderr, "%s:%d: %s\n", reading, jp->line, jp->error);
|
||||
}
|
||||
|
||||
json_free(jp->root);
|
||||
break;
|
||||
}
|
||||
|
||||
if (j->type == JSON_HASH) {
|
||||
found_hashes++;
|
||||
|
||||
if (found_hashes == 50 && found_features == 0 && found_geometries == 0) {
|
||||
fprintf(stderr, "%s:%d: Warning: not finding any GeoJSON features or geometries in input yet after 50 objects.\n", reading, jp->line);
|
||||
}
|
||||
}
|
||||
|
||||
json_object *type = json_hash_get(j, "type");
|
||||
if (type == NULL || type->type != JSON_STRING) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (found_features == 0) {
|
||||
int i;
|
||||
int is_geometry = 0;
|
||||
for (i = 0; i < GEOM_TYPES; i++) {
|
||||
if (strcmp(type->string, geometry_names[i]) == 0) {
|
||||
is_geometry = 1;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (is_geometry) {
|
||||
if (j->parent != NULL) {
|
||||
if (j->parent->type == JSON_ARRAY) {
|
||||
if (j->parent->parent->type == JSON_HASH) {
|
||||
json_object *geometries = json_hash_get(j->parent->parent, "geometries");
|
||||
if (geometries != NULL) {
|
||||
// Parent of Parent must be a GeometryCollection
|
||||
is_geometry = 0;
|
||||
}
|
||||
}
|
||||
} else if (j->parent->type == JSON_HASH) {
|
||||
json_object *geometry = json_hash_get(j->parent, "geometry");
|
||||
if (geometry != NULL) {
|
||||
// Parent must be a Feature
|
||||
is_geometry = 0;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (is_geometry) {
|
||||
if (found_features != 0 && found_geometries == 0) {
|
||||
fprintf(stderr, "%s:%d: Warning: found a mixture of features and bare geometries\n", reading, jp->line);
|
||||
}
|
||||
found_geometries++;
|
||||
|
||||
serialize_geometry(j, NULL, reading, jp, &seq, &metapos, &geompos, &indexpos, exclude, include, exclude_all, metafile, geomfile, indexfile, poolfile, treefile, fname, maxzoom, layer, droprate, file_bbox, NULL);
|
||||
json_free(j);
|
||||
continue;
|
||||
}
|
||||
}
|
||||
|
||||
if (strcmp(type->string, "Feature") != 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
if (found_features == 0 && found_geometries != 0) {
|
||||
fprintf(stderr, "%s:%d: Warning: found a mixture of features and bare geometries\n", reading, jp->line);
|
||||
}
|
||||
found_features++;
|
||||
|
||||
json_object *geometry = json_hash_get(j, "geometry");
|
||||
if (geometry == NULL) {
|
||||
fprintf(stderr, "%s:%d: feature with no geometry\n", reading, jp->line);
|
||||
json_free(j);
|
||||
continue;
|
||||
}
|
||||
|
||||
json_object *properties = json_hash_get(j, "properties");
|
||||
if (properties == NULL || (properties->type != JSON_HASH && properties->type != JSON_NULL)) {
|
||||
fprintf(stderr, "%s:%d: feature without properties hash\n", reading, jp->line);
|
||||
json_free(j);
|
||||
continue;
|
||||
}
|
||||
|
||||
json_object *tippecanoe = json_hash_get(j, "tippecanoe");
|
||||
|
||||
json_object *geometries = json_hash_get(geometry, "geometries");
|
||||
if (geometries != NULL) {
|
||||
int g;
|
||||
for (g = 0; g < geometries->length; g++) {
|
||||
serialize_geometry(geometries->array[g], properties, reading, jp, &seq, &metapos, &geompos, &indexpos, exclude, include, exclude_all, metafile, geomfile, indexfile, poolfile, treefile, fname, maxzoom, layer, droprate, file_bbox, tippecanoe);
|
||||
}
|
||||
} else {
|
||||
serialize_geometry(geometry, properties, reading, jp, &seq, &metapos, &geompos, &indexpos, exclude, include, exclude_all, metafile, geomfile, indexfile, poolfile, treefile, fname, maxzoom, layer, droprate, file_bbox, tippecanoe);
|
||||
}
|
||||
|
||||
json_free(j);
|
||||
|
||||
/* XXX check for any non-features in the outer object */
|
||||
int layer;
|
||||
if (nlayers == 1) {
|
||||
layer = 0;
|
||||
} else {
|
||||
layer = source;
|
||||
}
|
||||
|
||||
parse_json(jp, reading, &seq, &metapos, &geompos, &indexpos, exclude, include, exclude_all, metafile, geomfile, indexfile, poolfile, treefile, fname, maxzoom, layer, droprate, file_bbox);
|
||||
|
||||
json_end(jp);
|
||||
fclose(fp);
|
||||
}
|
||||
@ -862,7 +884,7 @@ int read_json(int argc, char **argv, char *fname, const char *layername, int max
|
||||
|
||||
char *layernames[nlayers];
|
||||
for (i = 0; i < nlayers; i++) {
|
||||
if (argc <= 1 && layername != NULL) {
|
||||
if (layername != NULL) {
|
||||
layernames[i] = strdup(layername);
|
||||
} else {
|
||||
char *src = argv[i];
|
||||
@ -887,7 +909,6 @@ int read_json(int argc, char **argv, char *fname, const char *layername, int max
|
||||
if (cp != NULL) {
|
||||
*cp = '\0';
|
||||
}
|
||||
layername = trunc;
|
||||
|
||||
char *out = trunc;
|
||||
for (cp = trunc; *cp; cp++) {
|
||||
@ -1132,7 +1153,7 @@ int read_json(int argc, char **argv, char *fname, const char *layername, int max
|
||||
midlon = maxlon;
|
||||
}
|
||||
|
||||
mbtiles_write_metadata(outdb, fname, layernames, minzoom, maxzoom, minlat, minlon, maxlat, maxlon, midlat, midlon, file_keys, nlayers); // XXX layers
|
||||
mbtiles_write_metadata(outdb, fname, layernames, minzoom, maxzoom, minlat, minlon, maxlat, maxlon, midlat, midlon, file_keys, nlayers);
|
||||
|
||||
for (i = 0; i < nlayers; i++) {
|
||||
pool_free_strings(&file_keys1[i]);
|
||||
|
11
geometry.cc
11
geometry.cc
@ -344,6 +344,7 @@ drawvec reduce_tiny_poly(drawvec &geom, int z, int detail, bool *reduced, double
|
||||
long long pixel = (1 << (32 - detail - z)) * 2;
|
||||
|
||||
*reduced = true;
|
||||
bool included_last_outer = false;
|
||||
|
||||
for (unsigned i = 0; i < geom.size(); i++) {
|
||||
if (geom[i].op == VT_MOVETO) {
|
||||
@ -361,7 +362,7 @@ drawvec reduce_tiny_poly(drawvec &geom, int z, int detail, bool *reduced, double
|
||||
}
|
||||
area = area / 2;
|
||||
|
||||
if (fabs(area) <= pixel * pixel) {
|
||||
if (fabs(area) <= pixel * pixel || (area < 0 && !included_last_outer)) {
|
||||
// printf("area is only %f vs %lld so using square\n", area, pixel * pixel);
|
||||
|
||||
*accum_area += area;
|
||||
@ -376,6 +377,10 @@ drawvec reduce_tiny_poly(drawvec &geom, int z, int detail, bool *reduced, double
|
||||
|
||||
*accum_area -= pixel * pixel;
|
||||
}
|
||||
|
||||
if (area >= 0) {
|
||||
included_last_outer = false;
|
||||
}
|
||||
} else {
|
||||
// printf("area is %f so keeping instead of %lld\n", area, pixel * pixel);
|
||||
|
||||
@ -384,6 +389,10 @@ drawvec reduce_tiny_poly(drawvec &geom, int z, int detail, bool *reduced, double
|
||||
}
|
||||
|
||||
*reduced = false;
|
||||
|
||||
if (area >= 0) {
|
||||
included_last_outer = true;
|
||||
}
|
||||
}
|
||||
|
||||
i = j - 1;
|
||||
|
@ -54,7 +54,8 @@ it encounters.
|
||||
.SS Naming
|
||||
.RS
|
||||
.IP \(bu 2
|
||||
\-l \fIname\fP: Layer name (default "file" if source is file.json or output is file.mbtiles). Only works if there is only one layer.
|
||||
\-l \fIname\fP: Layer name (default "file" if source is file.json or output is file.mbtiles). If there are multiple input files
|
||||
specified, the files are all merged into the single named layer.
|
||||
.IP \(bu 2
|
||||
\-n \fIname\fP: Human\-readable name (default file.json)
|
||||
.RE
|
||||
|
4
tile.cc
4
tile.cc
@ -246,6 +246,10 @@ mapnik::vector::tile create_tile(char **layernames, int line_detail, std::vector
|
||||
|
||||
int i;
|
||||
for (i = 0; i < nlayers; i++) {
|
||||
if (features[i].size() == 0) {
|
||||
continue;
|
||||
}
|
||||
|
||||
mapnik::vector::tile_layer *layer = tile.add_layers();
|
||||
|
||||
layer->set_name(layernames[i]);
|
||||
|
Loading…
x
Reference in New Issue
Block a user