mirror of
https://github.com/mapbox/tippecanoe.git
synced 2025-04-13 13:53:11 +00:00
Use shared_ptr for all references to JSON objects
This commit is contained in:
parent
5d46fe7876
commit
72cf2c7238
@ -174,7 +174,7 @@ sqlite3 *dirmeta2tmp(const char *fname) {
|
||||
}
|
||||
|
||||
json_pull *jp = json_begin_file(f);
|
||||
json_object *o = json_read_tree(jp);
|
||||
std::shared_ptr<json_object> o = json_read_tree(jp);
|
||||
|
||||
if (o->type != JSON_HASH) {
|
||||
fprintf(stderr, "%s: bad metadata format\n", name.c_str());
|
||||
|
@ -5,7 +5,7 @@
|
||||
#include "mvt.hpp"
|
||||
#include "evaluator.hpp"
|
||||
|
||||
int compare(mvt_value one, json_object *two, bool &fail) {
|
||||
int compare(mvt_value one, std::shared_ptr<json_object> two, bool &fail) {
|
||||
if (one.type == mvt_string) {
|
||||
if (two->type != JSON_STRING) {
|
||||
fail = true;
|
||||
@ -69,7 +69,7 @@ int compare(mvt_value one, json_object *two, bool &fail) {
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
bool eval(std::map<std::string, mvt_value> const &feature, json_object *f) {
|
||||
bool eval(std::map<std::string, mvt_value> const &feature, std::shared_ptr<json_object> f) {
|
||||
if (f == NULL || f->type != JSON_ARRAY) {
|
||||
fprintf(stderr, "Filter is not an array: %s\n", json_stringify(f).c_str());
|
||||
exit(EXIT_FAILURE);
|
||||
@ -269,14 +269,14 @@ bool eval(std::map<std::string, mvt_value> const &feature, json_object *f) {
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
bool evaluate(std::map<std::string, mvt_value> const &feature, std::string const &layer, json_object *filter) {
|
||||
bool evaluate(std::map<std::string, mvt_value> const &feature, std::string const &layer, std::shared_ptr<json_object> filter) {
|
||||
if (filter == NULL || filter->type != JSON_HASH) {
|
||||
fprintf(stderr, "Error: filter is not a hash: %s\n", json_stringify(filter).c_str());
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
bool ok = true;
|
||||
json_object *f;
|
||||
std::shared_ptr<json_object> f;
|
||||
|
||||
f = json_hash_get(filter, layer.c_str());
|
||||
if (ok && f != NULL) {
|
||||
@ -291,7 +291,7 @@ bool evaluate(std::map<std::string, mvt_value> const &feature, std::string const
|
||||
return ok;
|
||||
}
|
||||
|
||||
json_object *read_filter(const char *fname) {
|
||||
std::shared_ptr<json_object> read_filter(const char *fname) {
|
||||
FILE *fp = fopen(fname, "r");
|
||||
if (fp == NULL) {
|
||||
perror(fname);
|
||||
@ -299,7 +299,7 @@ json_object *read_filter(const char *fname) {
|
||||
}
|
||||
|
||||
json_pull *jp = json_begin_file(fp);
|
||||
json_object *filter = json_read_tree(jp);
|
||||
std::shared_ptr<json_object> filter = json_read_tree(jp);
|
||||
if (filter == NULL) {
|
||||
fprintf(stderr, "%s: %s\n", fname, jp->error.c_str());
|
||||
exit(EXIT_FAILURE);
|
||||
@ -310,9 +310,9 @@ json_object *read_filter(const char *fname) {
|
||||
return filter;
|
||||
}
|
||||
|
||||
json_object *parse_filter(const char *s) {
|
||||
std::shared_ptr<json_object> parse_filter(const char *s) {
|
||||
json_pull *jp = json_begin_string(s);
|
||||
json_object *filter = json_read_tree(jp);
|
||||
std::shared_ptr<json_object> filter = json_read_tree(jp);
|
||||
if (filter == NULL) {
|
||||
fprintf(stderr, "Could not parse filter %s\n", s);
|
||||
fprintf(stderr, "%s\n", jp->error.c_str());
|
||||
|
@ -6,8 +6,8 @@
|
||||
#include "jsonpull/jsonpull.hpp"
|
||||
#include "mvt.hpp"
|
||||
|
||||
bool evaluate(std::map<std::string, mvt_value> const &feature, std::string const &layer, json_object *filter);
|
||||
json_object *parse_filter(const char *s);
|
||||
json_object *read_filter(const char *fname);
|
||||
bool evaluate(std::map<std::string, mvt_value> const &feature, std::string const &layer, std::shared_ptr<json_object> filter);
|
||||
std::shared_ptr<json_object> parse_filter(const char *s);
|
||||
std::shared_ptr<json_object> read_filter(const char *fname);
|
||||
|
||||
#endif
|
||||
|
@ -391,22 +391,22 @@ void readFeature(protozero::pbf_reader &pbf, size_t dim, double e, std::vector<s
|
||||
auto tip = other.find("tippecanoe");
|
||||
if (tip != other.end()) {
|
||||
json_pull *jp = json_begin_string(tip->second.s.c_str());
|
||||
json_object *o = json_read_tree(jp);
|
||||
std::shared_ptr<json_object> o = json_read_tree(jp);
|
||||
|
||||
if (o != NULL) {
|
||||
json_object *min = json_hash_get(o, "minzoom");
|
||||
std::shared_ptr<json_object> min = json_hash_get(o, "minzoom");
|
||||
if (min != NULL && (min->type == JSON_STRING || min->type == JSON_NUMBER)) {
|
||||
sf.has_tippecanoe_minzoom = true;
|
||||
sf.tippecanoe_minzoom = atoi(min->string.c_str());
|
||||
}
|
||||
|
||||
json_object *max = json_hash_get(o, "maxzoom");
|
||||
std::shared_ptr<json_object> max = json_hash_get(o, "maxzoom");
|
||||
if (max != NULL && (max->type == JSON_STRING || max->type == JSON_NUMBER)) {
|
||||
sf.has_tippecanoe_maxzoom = true;
|
||||
sf.tippecanoe_maxzoom = atoi(max->string.c_str());
|
||||
}
|
||||
|
||||
json_object *tlayer = json_hash_get(o, "layer");
|
||||
std::shared_ptr<json_object> tlayer = json_hash_get(o, "layer");
|
||||
if (tlayer != NULL && (tlayer->type == JSON_STRING || tlayer->type == JSON_NUMBER)) {
|
||||
sf.layername = tlayer->string;
|
||||
}
|
||||
|
38
geojson.cpp
38
geojson.cpp
@ -38,8 +38,8 @@
|
||||
#include "read_json.hpp"
|
||||
#include "mvt.hpp"
|
||||
|
||||
int serialize_geojson_feature(struct serialization_state *sst, json_object *geometry, json_object *properties, json_object *id, int layer, json_object *tippecanoe, json_object *feature, std::string layername) {
|
||||
json_object *geometry_type = json_hash_get(geometry, "type");
|
||||
int serialize_geojson_feature(struct serialization_state *sst, std::shared_ptr<json_object> geometry, std::shared_ptr<json_object> properties, std::shared_ptr<json_object> id, int layer, std::shared_ptr<json_object> tippecanoe, std::shared_ptr<json_object> feature, std::string layername) {
|
||||
std::shared_ptr<json_object> geometry_type = json_hash_get(geometry, "type");
|
||||
if (geometry_type == NULL) {
|
||||
static int warned = 0;
|
||||
if (!warned) {
|
||||
@ -57,7 +57,7 @@ int serialize_geojson_feature(struct serialization_state *sst, json_object *geom
|
||||
return 0;
|
||||
}
|
||||
|
||||
json_object *coordinates = json_hash_get(geometry, "coordinates");
|
||||
std::shared_ptr<json_object> coordinates = json_hash_get(geometry, "coordinates");
|
||||
if (coordinates == NULL || coordinates->type != JSON_ARRAY) {
|
||||
fprintf(stderr, "%s:%d: feature without coordinates array\n", sst->fname, sst->line);
|
||||
json_context(feature);
|
||||
@ -81,7 +81,7 @@ int serialize_geojson_feature(struct serialization_state *sst, json_object *geom
|
||||
std::string tippecanoe_layername;
|
||||
|
||||
if (tippecanoe != NULL) {
|
||||
json_object *min = json_hash_get(tippecanoe, "minzoom");
|
||||
std::shared_ptr<json_object> min = json_hash_get(tippecanoe, "minzoom");
|
||||
if (min != NULL && min->type == JSON_NUMBER) {
|
||||
tippecanoe_minzoom = min->number;
|
||||
}
|
||||
@ -89,7 +89,7 @@ int serialize_geojson_feature(struct serialization_state *sst, json_object *geom
|
||||
tippecanoe_minzoom = atoi(min->string.c_str());
|
||||
}
|
||||
|
||||
json_object *max = json_hash_get(tippecanoe, "maxzoom");
|
||||
std::shared_ptr<json_object> max = json_hash_get(tippecanoe, "maxzoom");
|
||||
if (max != NULL && max->type == JSON_NUMBER) {
|
||||
tippecanoe_maxzoom = max->number;
|
||||
}
|
||||
@ -97,7 +97,7 @@ int serialize_geojson_feature(struct serialization_state *sst, json_object *geom
|
||||
tippecanoe_maxzoom = atoi(max->string.c_str());
|
||||
}
|
||||
|
||||
json_object *ln = json_hash_get(tippecanoe, "layer");
|
||||
std::shared_ptr<json_object> ln = json_hash_get(tippecanoe, "layer");
|
||||
if (ln != NULL && (ln->type == JSON_STRING || ln->type == JSON_NUMBER)) {
|
||||
tippecanoe_layername = ln->string;
|
||||
}
|
||||
@ -214,12 +214,12 @@ int serialize_geojson_feature(struct serialization_state *sst, json_object *geom
|
||||
return serialize_feature(sst, sf);
|
||||
}
|
||||
|
||||
void check_crs(json_object *j, const char *reading) {
|
||||
json_object *crs = json_hash_get(j, "crs");
|
||||
void check_crs(std::shared_ptr<json_object> j, const char *reading) {
|
||||
std::shared_ptr<json_object> crs = json_hash_get(j, "crs");
|
||||
if (crs != NULL) {
|
||||
json_object *properties = json_hash_get(crs, "properties");
|
||||
std::shared_ptr<json_object> properties = json_hash_get(crs, "properties");
|
||||
if (properties != NULL) {
|
||||
json_object *name = json_hash_get(properties, "name");
|
||||
std::shared_ptr<json_object> name = json_hash_get(properties, "name");
|
||||
if (name->type == JSON_STRING) {
|
||||
if (name->string != projection->alias) {
|
||||
if (!quiet) {
|
||||
@ -238,7 +238,7 @@ void parse_json(struct serialization_state *sst, json_pull *jp, int layer, std::
|
||||
long long found_geometries = 0;
|
||||
|
||||
while (1) {
|
||||
json_object *j = json_read(jp);
|
||||
std::shared_ptr<json_object> j = json_read(jp);
|
||||
if (j == NULL) {
|
||||
if (jp->error.size() != 0) {
|
||||
fprintf(stderr, "%s:%zu: %s\n", sst->fname, jp->line, jp->error.c_str());
|
||||
@ -259,7 +259,7 @@ void parse_json(struct serialization_state *sst, json_pull *jp, int layer, std::
|
||||
}
|
||||
}
|
||||
|
||||
json_object *type = json_hash_get(j, "type");
|
||||
std::shared_ptr<json_object> type = json_hash_get(j, "type");
|
||||
if (type == NULL || type->type != JSON_STRING) {
|
||||
continue;
|
||||
}
|
||||
@ -278,14 +278,14 @@ void parse_json(struct serialization_state *sst, json_pull *jp, int layer, std::
|
||||
if (j->parent != NULL) {
|
||||
if (j->parent->type == JSON_ARRAY && j->parent->parent != NULL) {
|
||||
if (j->parent->parent->type == JSON_HASH) {
|
||||
json_object *geometries = json_hash_get(j->parent->parent, "geometries");
|
||||
std::shared_ptr<json_object> geometries = json_hash_get(j->parent->parent, "geometries");
|
||||
if (geometries != NULL) {
|
||||
// Parent of Parent must be a GeometryCollection
|
||||
is_geometry = 0;
|
||||
}
|
||||
}
|
||||
} else if (j->parent->type == JSON_HASH) {
|
||||
json_object *geometry = json_hash_get(j->parent, "geometry");
|
||||
std::shared_ptr<json_object> geometry = json_hash_get(j->parent, "geometry");
|
||||
if (geometry != NULL) {
|
||||
// Parent must be a Feature
|
||||
is_geometry = 0;
|
||||
@ -320,7 +320,7 @@ void parse_json(struct serialization_state *sst, json_pull *jp, int layer, std::
|
||||
}
|
||||
found_features++;
|
||||
|
||||
json_object *geometry = json_hash_get(j, "geometry");
|
||||
std::shared_ptr<json_object> geometry = json_hash_get(j, "geometry");
|
||||
if (geometry == NULL) {
|
||||
fprintf(stderr, "%s:%zu: feature with no geometry\n", sst->fname, jp->line);
|
||||
json_context(j);
|
||||
@ -328,7 +328,7 @@ void parse_json(struct serialization_state *sst, json_pull *jp, int layer, std::
|
||||
continue;
|
||||
}
|
||||
|
||||
json_object *properties = json_hash_get(j, "properties");
|
||||
std::shared_ptr<json_object> properties = json_hash_get(j, "properties");
|
||||
if (properties == NULL || (properties->type != JSON_HASH && properties->type != JSON_NULL)) {
|
||||
fprintf(stderr, "%s:%zu: feature without properties hash\n", sst->fname, jp->line);
|
||||
json_context(j);
|
||||
@ -336,10 +336,10 @@ void parse_json(struct serialization_state *sst, json_pull *jp, int layer, std::
|
||||
continue;
|
||||
}
|
||||
|
||||
json_object *tippecanoe = json_hash_get(j, "tippecanoe");
|
||||
json_object *id = json_hash_get(j, "id");
|
||||
std::shared_ptr<json_object> tippecanoe = json_hash_get(j, "tippecanoe");
|
||||
std::shared_ptr<json_object> id = json_hash_get(j, "id");
|
||||
|
||||
json_object *geometries = json_hash_get(geometry, "geometries");
|
||||
std::shared_ptr<json_object> geometries = json_hash_get(geometry, "geometries");
|
||||
if (geometries != NULL && geometries->type == JSON_ARRAY) {
|
||||
size_t g;
|
||||
for (g = 0; g < geometries->array.size(); g++) {
|
||||
|
@ -96,8 +96,8 @@ static inline int read_wrap(json_pull *j) {
|
||||
return c;
|
||||
}
|
||||
|
||||
static json_object *fabricate_object(json_pull *jp, json_object *parent, json_type type) {
|
||||
json_object *o = new json_object;
|
||||
static std::shared_ptr<json_object> fabricate_object(json_pull *jp, std::shared_ptr<json_object> parent, json_type type) {
|
||||
std::shared_ptr<json_object> o = std::make_shared<json_object>();
|
||||
if (o == NULL) {
|
||||
perror("Out of memory");
|
||||
exit(EXIT_FAILURE);
|
||||
@ -108,9 +108,9 @@ static json_object *fabricate_object(json_pull *jp, json_object *parent, json_ty
|
||||
return o;
|
||||
}
|
||||
|
||||
static json_object *add_object(json_pull *j, json_type type) {
|
||||
json_object *c = j->container;
|
||||
json_object *o = fabricate_object(j, c, type);
|
||||
static std::shared_ptr<json_object> add_object(json_pull *j, json_type type) {
|
||||
std::shared_ptr<json_object> c = j->container;
|
||||
std::shared_ptr<json_object> o = fabricate_object(j, c, type);
|
||||
|
||||
if (c != NULL) {
|
||||
if (c->type == JSON_ARRAY) {
|
||||
@ -119,7 +119,6 @@ static json_object *add_object(json_pull *j, json_type type) {
|
||||
c->expect = JSON_COMMA;
|
||||
} else {
|
||||
j->error = "Expected a comma, not a list item";
|
||||
delete o;
|
||||
return NULL;
|
||||
}
|
||||
} else if (c->type == JSON_HASH) {
|
||||
@ -129,7 +128,6 @@ static json_object *add_object(json_pull *j, json_type type) {
|
||||
} else if (c->expect == JSON_KEY) {
|
||||
if (type != JSON_STRING) {
|
||||
j->error = "Hash key is not a string";
|
||||
delete o;
|
||||
return NULL;
|
||||
}
|
||||
|
||||
@ -138,7 +136,6 @@ static json_object *add_object(json_pull *j, json_type type) {
|
||||
c->expect = JSON_COLON;
|
||||
} else {
|
||||
j->error = "Expected a comma or colon";
|
||||
delete o;
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
@ -153,7 +150,7 @@ static json_object *add_object(json_pull *j, json_type type) {
|
||||
return o;
|
||||
}
|
||||
|
||||
json_object *json_hash_get(json_object *o, std::string const &s) {
|
||||
std::shared_ptr<json_object> json_hash_get(std::shared_ptr<json_object> o, std::string const &s) {
|
||||
if (o == NULL || o->type != JSON_HASH) {
|
||||
return NULL;
|
||||
}
|
||||
@ -170,7 +167,7 @@ json_object *json_hash_get(json_object *o, std::string const &s) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
json_object *json_read_separators(json_pull *j, json_separator_callback cb, void *state) {
|
||||
std::shared_ptr<json_object> json_read_separators(json_pull *j, json_separator_callback cb, void *state) {
|
||||
int c;
|
||||
|
||||
// In case there is an error at the top level
|
||||
@ -215,7 +212,7 @@ again:
|
||||
/////////////////////////// Arrays
|
||||
|
||||
if (c == '[') {
|
||||
json_object *o = add_object(j, JSON_ARRAY);
|
||||
std::shared_ptr<json_object> o = add_object(j, JSON_ARRAY);
|
||||
if (o == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
@ -245,7 +242,7 @@ again:
|
||||
}
|
||||
}
|
||||
|
||||
json_object *ret = j->container;
|
||||
std::shared_ptr<json_object> ret = j->container;
|
||||
j->container = ret->parent;
|
||||
return ret;
|
||||
}
|
||||
@ -253,7 +250,7 @@ again:
|
||||
/////////////////////////// Hashes
|
||||
|
||||
if (c == '{') {
|
||||
json_object *o = add_object(j, JSON_HASH);
|
||||
std::shared_ptr<json_object> o = add_object(j, JSON_HASH);
|
||||
if (o == NULL) {
|
||||
return NULL;
|
||||
}
|
||||
@ -283,7 +280,7 @@ again:
|
||||
}
|
||||
}
|
||||
|
||||
json_object *ret = j->container;
|
||||
std::shared_ptr<json_object> ret = j->container;
|
||||
j->container = ret->parent;
|
||||
return ret;
|
||||
}
|
||||
@ -448,7 +445,7 @@ again:
|
||||
}
|
||||
}
|
||||
|
||||
json_object *n = add_object(j, JSON_NUMBER);
|
||||
std::shared_ptr<json_object> n = add_object(j, JSON_NUMBER);
|
||||
if (n != NULL) {
|
||||
n->number = atof(val.c_str());
|
||||
n->string = val;
|
||||
@ -578,7 +575,7 @@ again:
|
||||
return NULL;
|
||||
}
|
||||
|
||||
json_object *s = add_object(j, JSON_STRING);
|
||||
std::shared_ptr<json_object> s = add_object(j, JSON_STRING);
|
||||
if (s != NULL) {
|
||||
s->string = val;
|
||||
}
|
||||
@ -589,12 +586,12 @@ again:
|
||||
return NULL;
|
||||
}
|
||||
|
||||
json_object *json_read(json_pull *j) {
|
||||
std::shared_ptr<json_object> json_read(json_pull *j) {
|
||||
return json_read_separators(j, NULL, NULL);
|
||||
}
|
||||
|
||||
json_object *json_read_tree(json_pull *p) {
|
||||
json_object *j;
|
||||
std::shared_ptr<json_object> json_read_tree(json_pull *p) {
|
||||
std::shared_ptr<json_object> j;
|
||||
|
||||
while ((j = json_read(p)) != NULL) {
|
||||
if (j->parent == NULL) {
|
||||
@ -605,7 +602,7 @@ json_object *json_read_tree(json_pull *p) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
void json_free(json_object *o) {
|
||||
void json_free(std::shared_ptr<json_object> o) {
|
||||
size_t i;
|
||||
|
||||
if (o == NULL) {
|
||||
@ -615,7 +612,7 @@ void json_free(json_object *o) {
|
||||
// Free any data linked from here
|
||||
|
||||
if (o->type == JSON_ARRAY) {
|
||||
std::vector<json_object *> a = o->array;
|
||||
std::vector<std::shared_ptr<json_object> > a = o->array;
|
||||
size_t n = o->array.size();
|
||||
|
||||
o->array.resize(0);
|
||||
@ -624,8 +621,8 @@ void json_free(json_object *o) {
|
||||
json_free(a[i]);
|
||||
}
|
||||
} else if (o->type == JSON_HASH) {
|
||||
std::vector<json_object *> k = o->keys;
|
||||
std::vector<json_object *> v = o->values;
|
||||
std::vector<std::shared_ptr<json_object> > k = o->keys;
|
||||
std::vector<std::shared_ptr<json_object> > v = o->values;
|
||||
size_t n = o->keys.size();
|
||||
|
||||
o->keys.resize(0);
|
||||
@ -638,11 +635,9 @@ void json_free(json_object *o) {
|
||||
}
|
||||
|
||||
json_disconnect(o);
|
||||
|
||||
delete o;
|
||||
}
|
||||
|
||||
static void json_disconnect_parser(json_object *o) {
|
||||
static void json_disconnect_parser(std::shared_ptr<json_object> o) {
|
||||
if (o->type == JSON_HASH) {
|
||||
size_t i;
|
||||
for (i = 0; i < o->keys.size(); i++) {
|
||||
@ -659,7 +654,7 @@ static void json_disconnect_parser(json_object *o) {
|
||||
o->parser = NULL;
|
||||
}
|
||||
|
||||
void json_disconnect(json_object *o) {
|
||||
void json_disconnect(std::shared_ptr<json_object> o) {
|
||||
// Expunge references to this as an array element
|
||||
// or a hash key or value.
|
||||
|
||||
@ -695,9 +690,6 @@ void json_disconnect(json_object *o) {
|
||||
if (i < o->parent->keys.size()) {
|
||||
if (o->parent->keys[i] != NULL && o->parent->keys[i]->type == JSON_NULL) {
|
||||
if (o->parent->values[i] != NULL && o->parent->values[i]->type == JSON_NULL) {
|
||||
delete o->parent->keys[i];
|
||||
delete o->parent->values[i];
|
||||
|
||||
o->parent->keys.erase(o->parent->keys.begin() + i);
|
||||
o->parent->values.erase(o->parent->values.begin() + i);
|
||||
}
|
||||
@ -714,7 +706,7 @@ void json_disconnect(json_object *o) {
|
||||
o->parent = NULL;
|
||||
}
|
||||
|
||||
static void json_print_one(std::string &val, json_object *o) {
|
||||
static void json_print_one(std::string &val, std::shared_ptr<json_object> o) {
|
||||
if (o == NULL) {
|
||||
val.append("...");
|
||||
} else if (o->type == JSON_STRING) {
|
||||
@ -752,7 +744,7 @@ static void json_print_one(std::string &val, json_object *o) {
|
||||
}
|
||||
}
|
||||
|
||||
static void json_print(std::string &val, json_object *o) {
|
||||
static void json_print(std::string &val, std::shared_ptr<json_object> o) {
|
||||
if (o == NULL) {
|
||||
// Hash value in incompletely read hash
|
||||
val.append("...");
|
||||
@ -784,7 +776,7 @@ static void json_print(std::string &val, json_object *o) {
|
||||
}
|
||||
}
|
||||
|
||||
std::string json_stringify(json_object *o) {
|
||||
std::string json_stringify(std::shared_ptr<json_object> o) {
|
||||
std::string val;
|
||||
json_print(val, o);
|
||||
return val;
|
||||
|
@ -3,6 +3,7 @@
|
||||
|
||||
#include <string>
|
||||
#include <vector>
|
||||
#include <memory>
|
||||
|
||||
typedef enum json_type {
|
||||
// These types can be returned by json_read()
|
||||
@ -28,15 +29,15 @@ struct json_object;
|
||||
|
||||
struct json_object {
|
||||
json_type type;
|
||||
struct json_object *parent;
|
||||
std::shared_ptr<struct json_object> parent;
|
||||
struct json_pull *parser;
|
||||
|
||||
std::string string;
|
||||
double number;
|
||||
|
||||
std::vector<json_object *> array;
|
||||
std::vector<json_object *> keys;
|
||||
std::vector<json_object *> values;
|
||||
std::vector<std::shared_ptr<json_object>> array;
|
||||
std::vector<std::shared_ptr<json_object>> keys;
|
||||
std::vector<std::shared_ptr<json_object>> values;
|
||||
|
||||
int expect;
|
||||
};
|
||||
@ -51,8 +52,8 @@ typedef struct json_pull {
|
||||
ssize_t buffer_tail;
|
||||
ssize_t buffer_head;
|
||||
|
||||
json_object *container;
|
||||
json_object *root;
|
||||
std::shared_ptr<json_object> container;
|
||||
std::shared_ptr<json_object> root;
|
||||
} json_pull;
|
||||
|
||||
json_pull *json_begin_file(FILE *f);
|
||||
@ -63,14 +64,14 @@ void json_end(json_pull *p);
|
||||
|
||||
typedef void (*json_separator_callback)(json_type type, json_pull *j, void *state);
|
||||
|
||||
json_object *json_read_tree(json_pull *j);
|
||||
json_object *json_read(json_pull *j);
|
||||
json_object *json_read_separators(json_pull *j, json_separator_callback cb, void *state);
|
||||
void json_free(json_object *j);
|
||||
void json_disconnect(json_object *j);
|
||||
std::shared_ptr<json_object> json_read_tree(json_pull *j);
|
||||
std::shared_ptr<json_object> json_read(json_pull *j);
|
||||
std::shared_ptr<json_object> json_read_separators(json_pull *j, json_separator_callback cb, void *state);
|
||||
void json_free(std::shared_ptr<json_object> j);
|
||||
void json_disconnect(std::shared_ptr<json_object> j);
|
||||
|
||||
json_object *json_hash_get(json_object *o, std::string const &key);
|
||||
std::shared_ptr<json_object> json_hash_get(std::shared_ptr<json_object> o, std::string const &key);
|
||||
|
||||
std::string json_stringify(json_object *o);
|
||||
std::string json_stringify(std::shared_ptr<json_object> o);
|
||||
|
||||
#endif
|
||||
|
22
jsontool.cpp
22
jsontool.cpp
@ -136,12 +136,12 @@ std::string sort_quote(const char *s) {
|
||||
return ret;
|
||||
}
|
||||
|
||||
void out(std::string const &s, int type, json_object *properties) {
|
||||
void out(std::string const &s, int type, std::shared_ptr<json_object> properties) {
|
||||
if (extract != NULL) {
|
||||
std::string extracted = sort_quote("null");
|
||||
bool found = false;
|
||||
|
||||
json_object *o = json_hash_get(properties, extract);
|
||||
std::shared_ptr<json_object> o = json_hash_get(properties, extract);
|
||||
if (o != NULL) {
|
||||
found = true;
|
||||
if (o->type == JSON_STRING || o->type == JSON_NUMBER) {
|
||||
@ -200,7 +200,7 @@ void out(std::string const &s, int type, json_object *properties) {
|
||||
|
||||
std::string prev_joinkey;
|
||||
|
||||
void join_csv(json_object *j) {
|
||||
void join_csv(std::shared_ptr<json_object> j) {
|
||||
if (header.size() == 0) {
|
||||
std::string s = csv_getline(csvfile);
|
||||
if (s.size() == 0) {
|
||||
@ -226,8 +226,8 @@ void join_csv(json_object *j) {
|
||||
}
|
||||
}
|
||||
|
||||
json_object *properties = json_hash_get(j, "properties");
|
||||
json_object *key = NULL;
|
||||
std::shared_ptr<json_object> properties = json_hash_get(j, "properties");
|
||||
std::shared_ptr<json_object> key = NULL;
|
||||
|
||||
if (properties != NULL) {
|
||||
key = json_hash_get(properties, header[0].c_str());
|
||||
@ -315,8 +315,8 @@ void join_csv(json_object *j) {
|
||||
{
|
||||
// This knows more about the structure of JSON objects than it ought to
|
||||
|
||||
json_object *ko = new json_object;
|
||||
json_object *vo = new json_object;
|
||||
std::shared_ptr<json_object> ko = std::make_shared<json_object>();
|
||||
std::shared_ptr<json_object> vo = std::make_shared<json_object>();
|
||||
if (ko == NULL || vo == NULL) {
|
||||
perror("malloc");
|
||||
exit(EXIT_FAILURE);
|
||||
@ -343,7 +343,7 @@ void process(FILE *fp, const char *fname) {
|
||||
json_pull *jp = json_begin_file(fp);
|
||||
|
||||
while (1) {
|
||||
json_object *j = json_read(jp);
|
||||
std::shared_ptr<json_object> j = json_read(jp);
|
||||
if (j == NULL) {
|
||||
if (jp->error.size() != 0) {
|
||||
fprintf(stderr, "%s:%zu: %s\n", fname, jp->line, jp->error.c_str());
|
||||
@ -353,7 +353,7 @@ void process(FILE *fp, const char *fname) {
|
||||
break;
|
||||
}
|
||||
|
||||
json_object *type = json_hash_get(j, "type");
|
||||
std::shared_ptr<json_object> type = json_hash_get(j, "type");
|
||||
if (type == NULL || type->type != JSON_STRING) {
|
||||
continue;
|
||||
}
|
||||
@ -376,14 +376,14 @@ void process(FILE *fp, const char *fname) {
|
||||
if (j->parent != NULL) {
|
||||
if (j->parent->type == JSON_ARRAY && j->parent->parent != NULL) {
|
||||
if (j->parent->parent->type == JSON_HASH) {
|
||||
json_object *geometries = json_hash_get(j->parent->parent, "geometries");
|
||||
std::shared_ptr<json_object> geometries = json_hash_get(j->parent->parent, "geometries");
|
||||
if (geometries != NULL) {
|
||||
// Parent of Parent must be a GeometryCollection
|
||||
is_geometry = 0;
|
||||
}
|
||||
}
|
||||
} else if (j->parent->type == JSON_HASH) {
|
||||
json_object *geometry = json_hash_get(j->parent, "geometry");
|
||||
std::shared_ptr<json_object> geometry = json_hash_get(j->parent, "geometry");
|
||||
if (geometry != NULL) {
|
||||
// Parent must be a Feature
|
||||
is_geometry = 0;
|
||||
|
18
main.cpp
18
main.cpp
@ -390,7 +390,7 @@ void *run_sort(void *v) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
void do_read_parallel(char *map, long long len, long long initial_offset, const char *reading, std::vector<struct reader> *readers, std::atomic<long long> *progress_seq, std::set<std::string> *exclude, std::set<std::string> *include, int exclude_all, json_object *filter, int basezoom, int source, std::vector<std::map<std::string, layermap_entry> > *layermaps, int *initialized, unsigned *initial_x, unsigned *initial_y, int maxzoom, std::string layername, bool uses_gamma, std::map<std::string, int> const *attribute_types, int separator, double *dist_sum, size_t *dist_count, bool want_dist, bool filters) {
|
||||
void do_read_parallel(char *map, long long len, long long initial_offset, const char *reading, std::vector<struct reader> *readers, std::atomic<long long> *progress_seq, std::set<std::string> *exclude, std::set<std::string> *include, int exclude_all, std::shared_ptr<json_object> filter, int basezoom, int source, std::vector<std::map<std::string, layermap_entry> > *layermaps, int *initialized, unsigned *initial_x, unsigned *initial_y, int maxzoom, std::string layername, bool uses_gamma, std::map<std::string, int> const *attribute_types, int separator, double *dist_sum, size_t *dist_count, bool want_dist, bool filters) {
|
||||
long long segs[CPUS + 1];
|
||||
segs[0] = 0;
|
||||
segs[CPUS] = len;
|
||||
@ -579,7 +579,7 @@ struct read_parallel_arg {
|
||||
std::set<std::string> *exclude = NULL;
|
||||
std::set<std::string> *include = NULL;
|
||||
int exclude_all = 0;
|
||||
json_object *filter = NULL;
|
||||
std::shared_ptr<json_object> filter = NULL;
|
||||
int maxzoom = 0;
|
||||
int basezoom = 0;
|
||||
int source = 0;
|
||||
@ -632,7 +632,7 @@ void *run_read_parallel(void *v) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
void start_parsing(int fd, STREAM *fp, long long offset, long long len, std::atomic<int> *is_parsing, pthread_t *parallel_parser, bool &parser_created, const char *reading, std::vector<struct reader> *readers, std::atomic<long long> *progress_seq, std::set<std::string> *exclude, std::set<std::string> *include, int exclude_all, json_object *filter, int basezoom, int source, std::vector<std::map<std::string, layermap_entry> > &layermaps, int *initialized, unsigned *initial_x, unsigned *initial_y, int maxzoom, std::string layername, bool uses_gamma, std::map<std::string, int> const *attribute_types, int separator, double *dist_sum, size_t *dist_count, bool want_dist, bool filters) {
|
||||
void start_parsing(int fd, STREAM *fp, long long offset, long long len, std::atomic<int> *is_parsing, pthread_t *parallel_parser, bool &parser_created, const char *reading, std::vector<struct reader> *readers, std::atomic<long long> *progress_seq, std::set<std::string> *exclude, std::set<std::string> *include, int exclude_all, std::shared_ptr<json_object> filter, int basezoom, int source, std::vector<std::map<std::string, layermap_entry> > &layermaps, int *initialized, unsigned *initial_x, unsigned *initial_y, int maxzoom, std::string layername, bool uses_gamma, std::map<std::string, int> const *attribute_types, int separator, double *dist_sum, size_t *dist_count, bool want_dist, bool filters) {
|
||||
// This has to kick off an intermediate thread to start the parser threads,
|
||||
// so the main thread can get back to reading the next input stage while
|
||||
// the intermediate thread waits for the completion of the parser threads.
|
||||
@ -1134,7 +1134,7 @@ void choose_first_zoom(long long *file_bbox, std::vector<struct reader> &readers
|
||||
}
|
||||
}
|
||||
|
||||
int read_input(std::vector<source> &sources, char *fname, int maxzoom, int minzoom, int basezoom, double basezoom_marker_width, sqlite3 *outdb, const char *outdir, std::set<std::string> *exclude, std::set<std::string> *include, int exclude_all, json_object *filter, double droprate, int buffer, const char *tmpdir, double gamma, int read_parallel, int forcetable, const char *attribution, bool uses_gamma, long long *file_bbox, const char *prefilter, const char *postfilter, const char *description, bool guess_maxzoom, std::map<std::string, int> const *attribute_types, const char *pgm, std::map<std::string, attribute_op> const *attribute_accum, std::map<std::string, std::string> const &attribute_descriptions) {
|
||||
int read_input(std::vector<source> &sources, char *fname, int maxzoom, int minzoom, int basezoom, double basezoom_marker_width, sqlite3 *outdb, const char *outdir, std::set<std::string> *exclude, std::set<std::string> *include, int exclude_all, std::shared_ptr<json_object> filter, double droprate, int buffer, const char *tmpdir, double gamma, int read_parallel, int forcetable, const char *attribution, bool uses_gamma, long long *file_bbox, const char *prefilter, const char *postfilter, const char *description, bool guess_maxzoom, std::map<std::string, int> const *attribute_types, const char *pgm, std::map<std::string, attribute_op> const *attribute_accum, std::map<std::string, std::string> const &attribute_descriptions) {
|
||||
int ret = EXIT_SUCCESS;
|
||||
|
||||
std::vector<struct reader> readers;
|
||||
@ -2376,7 +2376,7 @@ void set_attribute_accum(std::map<std::string, attribute_op> &attribute_accum, c
|
||||
|
||||
void parse_json_source(const char *arg, struct source &src) {
|
||||
json_pull *jp = json_begin_string(arg);
|
||||
json_object *o = json_read_tree(jp);
|
||||
std::shared_ptr<json_object> o = json_read_tree(jp);
|
||||
|
||||
if (o == NULL) {
|
||||
fprintf(stderr, "%s: -L%s: %s\n", *av, arg, jp->error.c_str());
|
||||
@ -2388,7 +2388,7 @@ void parse_json_source(const char *arg, struct source &src) {
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
json_object *fname = json_hash_get(o, "file");
|
||||
std::shared_ptr<json_object> fname = json_hash_get(o, "file");
|
||||
if (fname == NULL || fname->type != JSON_STRING) {
|
||||
fprintf(stderr, "%s: -L%s: requires \"file\": filename\n", *av, arg);
|
||||
exit(EXIT_FAILURE);
|
||||
@ -2396,12 +2396,12 @@ void parse_json_source(const char *arg, struct source &src) {
|
||||
|
||||
src.file = fname->string;
|
||||
|
||||
json_object *layer = json_hash_get(o, "layer");
|
||||
std::shared_ptr<json_object> layer = json_hash_get(o, "layer");
|
||||
if (layer != NULL && layer->type == JSON_STRING) {
|
||||
src.layer = layer->string;
|
||||
}
|
||||
|
||||
json_object *description = json_hash_get(o, "description");
|
||||
std::shared_ptr<json_object> description = json_hash_get(o, "description");
|
||||
if (description != NULL && description->type == JSON_STRING) {
|
||||
src.description = description->string;
|
||||
}
|
||||
@ -2451,7 +2451,7 @@ int main(int argc, char **argv) {
|
||||
int exclude_all = 0;
|
||||
int read_parallel = 0;
|
||||
int files_open_at_start;
|
||||
json_object *filter = NULL;
|
||||
std::shared_ptr<json_object> filter = NULL;
|
||||
|
||||
for (i = 0; i < 256; i++) {
|
||||
prevent[i] = 0;
|
||||
|
44
plugin.cpp
44
plugin.cpp
@ -95,7 +95,7 @@ std::vector<mvt_layer> parse_layers(int fd, int z, unsigned x, unsigned y, std::
|
||||
json_pull *jp = json_begin_file(f);
|
||||
|
||||
while (1) {
|
||||
json_object *j = json_read(jp);
|
||||
std::shared_ptr<json_object> j = json_read(jp);
|
||||
if (j == NULL) {
|
||||
if (jp->error.size() != 0) {
|
||||
fprintf(stderr, "Filter output:%zu: %s\n", jp->line, jp->error.c_str());
|
||||
@ -109,7 +109,7 @@ std::vector<mvt_layer> parse_layers(int fd, int z, unsigned x, unsigned y, std::
|
||||
break;
|
||||
}
|
||||
|
||||
json_object *type = json_hash_get(j, "type");
|
||||
std::shared_ptr<json_object> type = json_hash_get(j, "type");
|
||||
if (type == NULL || type->type != JSON_STRING) {
|
||||
continue;
|
||||
}
|
||||
@ -117,7 +117,7 @@ std::vector<mvt_layer> parse_layers(int fd, int z, unsigned x, unsigned y, std::
|
||||
continue;
|
||||
}
|
||||
|
||||
json_object *geometry = json_hash_get(j, "geometry");
|
||||
std::shared_ptr<json_object> geometry = json_hash_get(j, "geometry");
|
||||
if (geometry == NULL) {
|
||||
fprintf(stderr, "Filter output:%zu: filtered feature with no geometry\n", jp->line);
|
||||
json_context(j);
|
||||
@ -125,7 +125,7 @@ std::vector<mvt_layer> parse_layers(int fd, int z, unsigned x, unsigned y, std::
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
json_object *properties = json_hash_get(j, "properties");
|
||||
std::shared_ptr<json_object> properties = json_hash_get(j, "properties");
|
||||
if (properties == NULL || (properties->type != JSON_HASH && properties->type != JSON_NULL)) {
|
||||
fprintf(stderr, "Filter output:%zu: feature without properties hash\n", jp->line);
|
||||
json_context(j);
|
||||
@ -133,7 +133,7 @@ std::vector<mvt_layer> parse_layers(int fd, int z, unsigned x, unsigned y, std::
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
json_object *geometry_type = json_hash_get(geometry, "type");
|
||||
std::shared_ptr<json_object> geometry_type = json_hash_get(geometry, "type");
|
||||
if (geometry_type == NULL) {
|
||||
fprintf(stderr, "Filter output:%zu: null geometry (additional not reported)\n", jp->line);
|
||||
json_context(j);
|
||||
@ -146,7 +146,7 @@ std::vector<mvt_layer> parse_layers(int fd, int z, unsigned x, unsigned y, std::
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
json_object *coordinates = json_hash_get(geometry, "coordinates");
|
||||
std::shared_ptr<json_object> coordinates = json_hash_get(geometry, "coordinates");
|
||||
if (coordinates == NULL || coordinates->type != JSON_ARRAY) {
|
||||
fprintf(stderr, "Filter output:%zu: feature without coordinates array\n", jp->line);
|
||||
json_context(j);
|
||||
@ -166,8 +166,8 @@ std::vector<mvt_layer> parse_layers(int fd, int z, unsigned x, unsigned y, std::
|
||||
}
|
||||
|
||||
std::string layername = "unknown";
|
||||
json_object *tippecanoe = json_hash_get(j, "tippecanoe");
|
||||
json_object *layer = NULL;
|
||||
std::shared_ptr<json_object> tippecanoe = json_hash_get(j, "tippecanoe");
|
||||
std::shared_ptr<json_object> layer = NULL;
|
||||
if (tippecanoe != NULL) {
|
||||
layer = json_hash_get(tippecanoe, "layer");
|
||||
if (layer != NULL && layer->type == JSON_STRING) {
|
||||
@ -214,7 +214,7 @@ std::vector<mvt_layer> parse_layers(int fd, int z, unsigned x, unsigned y, std::
|
||||
feature.type = mb_geometry[t];
|
||||
feature.geometry = to_feature(dv);
|
||||
|
||||
json_object *id = json_hash_get(j, "id");
|
||||
std::shared_ptr<json_object> id = json_hash_get(j, "id");
|
||||
if (id != NULL) {
|
||||
feature.id = atoll(id->string.c_str());
|
||||
feature.has_id = true;
|
||||
@ -299,7 +299,7 @@ serial_feature parse_feature(json_pull *jp, int z, unsigned x, unsigned y, std::
|
||||
serial_feature sf;
|
||||
|
||||
while (1) {
|
||||
json_object *j = json_read(jp);
|
||||
std::shared_ptr<json_object> j = json_read(jp);
|
||||
if (j == NULL) {
|
||||
if (jp->error.size() != 0) {
|
||||
fprintf(stderr, "Filter output:%zu: %s\n", jp->line, jp->error.c_str());
|
||||
@ -314,7 +314,7 @@ serial_feature parse_feature(json_pull *jp, int z, unsigned x, unsigned y, std::
|
||||
return sf;
|
||||
}
|
||||
|
||||
json_object *type = json_hash_get(j, "type");
|
||||
std::shared_ptr<json_object> type = json_hash_get(j, "type");
|
||||
if (type == NULL || type->type != JSON_STRING) {
|
||||
continue;
|
||||
}
|
||||
@ -322,7 +322,7 @@ serial_feature parse_feature(json_pull *jp, int z, unsigned x, unsigned y, std::
|
||||
continue;
|
||||
}
|
||||
|
||||
json_object *geometry = json_hash_get(j, "geometry");
|
||||
std::shared_ptr<json_object> geometry = json_hash_get(j, "geometry");
|
||||
if (geometry == NULL) {
|
||||
fprintf(stderr, "Filter output:%zu: filtered feature with no geometry\n", jp->line);
|
||||
json_context(j);
|
||||
@ -330,7 +330,7 @@ serial_feature parse_feature(json_pull *jp, int z, unsigned x, unsigned y, std::
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
json_object *properties = json_hash_get(j, "properties");
|
||||
std::shared_ptr<json_object> properties = json_hash_get(j, "properties");
|
||||
if (properties == NULL || (properties->type != JSON_HASH && properties->type != JSON_NULL)) {
|
||||
fprintf(stderr, "Filter output:%zu: feature without properties hash\n", jp->line);
|
||||
json_context(j);
|
||||
@ -338,7 +338,7 @@ serial_feature parse_feature(json_pull *jp, int z, unsigned x, unsigned y, std::
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
json_object *geometry_type = json_hash_get(geometry, "type");
|
||||
std::shared_ptr<json_object> geometry_type = json_hash_get(geometry, "type");
|
||||
if (geometry_type == NULL) {
|
||||
fprintf(stderr, "Filter output:%zu: null geometry (additional not reported)\n", jp->line);
|
||||
json_context(j);
|
||||
@ -351,7 +351,7 @@ serial_feature parse_feature(json_pull *jp, int z, unsigned x, unsigned y, std::
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
json_object *coordinates = json_hash_get(geometry, "coordinates");
|
||||
std::shared_ptr<json_object> coordinates = json_hash_get(geometry, "coordinates");
|
||||
if (coordinates == NULL || coordinates->type != JSON_ARRAY) {
|
||||
fprintf(stderr, "Filter output:%zu: feature without coordinates array\n", jp->line);
|
||||
json_context(j);
|
||||
@ -401,29 +401,29 @@ serial_feature parse_feature(json_pull *jp, int z, unsigned x, unsigned y, std::
|
||||
sf.has_id = false;
|
||||
|
||||
std::string layername = "unknown";
|
||||
json_object *tippecanoe = json_hash_get(j, "tippecanoe");
|
||||
std::shared_ptr<json_object> tippecanoe = json_hash_get(j, "tippecanoe");
|
||||
if (tippecanoe != NULL) {
|
||||
json_object *layer = json_hash_get(tippecanoe, "layer");
|
||||
std::shared_ptr<json_object> layer = json_hash_get(tippecanoe, "layer");
|
||||
if (layer != NULL && layer->type == JSON_STRING) {
|
||||
layername = layer->string;
|
||||
}
|
||||
|
||||
json_object *index = json_hash_get(tippecanoe, "index");
|
||||
std::shared_ptr<json_object> index = json_hash_get(tippecanoe, "index");
|
||||
if (index != NULL && index->type == JSON_NUMBER) {
|
||||
sf.index = index->number;
|
||||
}
|
||||
|
||||
json_object *sequence = json_hash_get(tippecanoe, "sequence");
|
||||
std::shared_ptr<json_object> sequence = json_hash_get(tippecanoe, "sequence");
|
||||
if (sequence != NULL && sequence->type == JSON_NUMBER) {
|
||||
sf.seq = sequence->number;
|
||||
}
|
||||
|
||||
json_object *extent = json_hash_get(tippecanoe, "extent");
|
||||
std::shared_ptr<json_object> extent = json_hash_get(tippecanoe, "extent");
|
||||
if (extent != NULL && sequence->type == JSON_NUMBER) {
|
||||
sf.extent = extent->number;
|
||||
}
|
||||
|
||||
json_object *dropped = json_hash_get(tippecanoe, "dropped");
|
||||
std::shared_ptr<json_object> dropped = json_hash_get(tippecanoe, "dropped");
|
||||
if (dropped != NULL && dropped->type == JSON_TRUE) {
|
||||
sf.dropped = true;
|
||||
}
|
||||
@ -446,7 +446,7 @@ serial_feature parse_feature(json_pull *jp, int z, unsigned x, unsigned y, std::
|
||||
}
|
||||
}
|
||||
|
||||
json_object *id = json_hash_get(j, "id");
|
||||
std::shared_ptr<json_object> id = json_hash_get(j, "id");
|
||||
if (id != NULL) {
|
||||
sf.id = atoll(id->string.c_str());
|
||||
sf.has_id = true;
|
||||
|
@ -30,7 +30,7 @@ int mb_geometry[GEOM_TYPES] = {
|
||||
VT_POINT, VT_POINT, VT_LINE, VT_LINE, VT_POLYGON, VT_POLYGON,
|
||||
};
|
||||
|
||||
void json_context(json_object *j) {
|
||||
void json_context(std::shared_ptr<json_object> j) {
|
||||
std::string s = json_stringify(j);
|
||||
|
||||
if (s.size() >= 500) {
|
||||
@ -41,7 +41,7 @@ void json_context(json_object *j) {
|
||||
fprintf(stderr, "In JSON object %s\n", s.c_str());
|
||||
}
|
||||
|
||||
void parse_geometry(int t, json_object *j, drawvec &out, int op, const char *fname, int line, json_object *feature) {
|
||||
void parse_geometry(int t, std::shared_ptr<json_object> j, drawvec &out, int op, const char *fname, int line, std::shared_ptr<json_object> feature) {
|
||||
if (j == NULL || j->type != JSON_ARRAY) {
|
||||
fprintf(stderr, "%s:%d: expected array for type %d\n", fname, line, t);
|
||||
json_context(feature);
|
||||
@ -103,7 +103,7 @@ void parse_geometry(int t, json_object *j, drawvec &out, int op, const char *fna
|
||||
}
|
||||
}
|
||||
|
||||
void canonicalize(json_object *o) {
|
||||
void canonicalize(std::shared_ptr<json_object> o) {
|
||||
if (o->type == JSON_NUMBER) {
|
||||
std::string s;
|
||||
long long v;
|
||||
@ -128,7 +128,7 @@ void canonicalize(json_object *o) {
|
||||
}
|
||||
}
|
||||
|
||||
void stringify_value(json_object *value, int &type, std::string &stringified, const char *reading, int line, json_object *feature) {
|
||||
void stringify_value(std::shared_ptr<json_object> value, int &type, std::string &stringified, const char *reading, int line, std::shared_ptr<json_object> feature) {
|
||||
if (value != NULL) {
|
||||
int vt = value->type;
|
||||
std::string val;
|
||||
|
@ -10,7 +10,7 @@ extern const char *geometry_names[GEOM_TYPES];
|
||||
extern int geometry_within[GEOM_TYPES];
|
||||
extern int mb_geometry[GEOM_TYPES];
|
||||
|
||||
void json_context(json_object *j);
|
||||
void parse_geometry(int t, json_object *j, drawvec &out, int op, const char *fname, int line, json_object *feature);
|
||||
void json_context(std::shared_ptr<json_object> j);
|
||||
void parse_geometry(int t, std::shared_ptr<json_object> j, drawvec &out, int op, const char *fname, int line, std::shared_ptr<json_object> feature);
|
||||
|
||||
void stringify_value(json_object *value, int &type, std::string &stringified, const char *reading, int line, json_object *feature);
|
||||
void stringify_value(std::shared_ptr<json_object> value, int &type, std::string &stringified, const char *reading, int line, std::shared_ptr<json_object> feature);
|
||||
|
@ -163,7 +163,7 @@ struct serialization_state {
|
||||
std::set<std::string> *exclude = NULL;
|
||||
std::set<std::string> *include = NULL;
|
||||
int exclude_all = 0;
|
||||
json_object *filter = NULL;
|
||||
std::shared_ptr<json_object> filter = NULL;
|
||||
};
|
||||
|
||||
int serialize_feature(struct serialization_state *sst, serial_feature &sf);
|
||||
|
@ -67,7 +67,7 @@ void aprintf(std::string *buf, const char *format, ...) {
|
||||
free(tmp);
|
||||
}
|
||||
|
||||
void handle(std::string message, int z, unsigned x, unsigned y, std::map<std::string, layermap_entry> &layermap, std::vector<std::string> &header, std::map<std::string, std::vector<std::string>> &mapping, std::set<std::string> &exclude, std::set<std::string> &keep_layers, std::set<std::string> &remove_layers, int ifmatched, mvt_tile &outtile, json_object *filter) {
|
||||
void handle(std::string message, int z, unsigned x, unsigned y, std::map<std::string, layermap_entry> &layermap, std::vector<std::string> &header, std::map<std::string, std::vector<std::string>> &mapping, std::set<std::string> &exclude, std::set<std::string> &keep_layers, std::set<std::string> &remove_layers, int ifmatched, mvt_tile &outtile, std::shared_ptr<json_object> filter) {
|
||||
mvt_tile tile;
|
||||
int features_added = 0;
|
||||
bool was_compressed;
|
||||
@ -472,7 +472,7 @@ struct arg {
|
||||
std::set<std::string> *keep_layers = NULL;
|
||||
std::set<std::string> *remove_layers = NULL;
|
||||
int ifmatched = 0;
|
||||
json_object *filter = NULL;
|
||||
std::shared_ptr<json_object> filter = NULL;
|
||||
};
|
||||
|
||||
void *join_worker(void *v) {
|
||||
@ -517,7 +517,7 @@ void *join_worker(void *v) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
void handle_tasks(std::map<zxy, std::vector<std::string>> &tasks, std::vector<std::map<std::string, layermap_entry>> &layermaps, sqlite3 *outdb, const char *outdir, std::vector<std::string> &header, std::map<std::string, std::vector<std::string>> &mapping, std::set<std::string> &exclude, int ifmatched, std::set<std::string> &keep_layers, std::set<std::string> &remove_layers, json_object *filter) {
|
||||
void handle_tasks(std::map<zxy, std::vector<std::string>> &tasks, std::vector<std::map<std::string, layermap_entry>> &layermaps, sqlite3 *outdb, const char *outdir, std::vector<std::string> &header, std::map<std::string, std::vector<std::string>> &mapping, std::set<std::string> &exclude, int ifmatched, std::set<std::string> &keep_layers, std::set<std::string> &remove_layers, std::shared_ptr<json_object> filter) {
|
||||
pthread_t pthreads[CPUS];
|
||||
std::vector<arg> args;
|
||||
|
||||
@ -573,12 +573,12 @@ void handle_tasks(std::map<zxy, std::vector<std::string>> &tasks, std::vector<st
|
||||
}
|
||||
}
|
||||
|
||||
void handle_vector_layers(json_object *vector_layers, std::map<std::string, layermap_entry> &layermap, std::map<std::string, std::string> &attribute_descriptions) {
|
||||
void handle_vector_layers(std::shared_ptr<json_object> vector_layers, std::map<std::string, layermap_entry> &layermap, std::map<std::string, std::string> &attribute_descriptions) {
|
||||
if (vector_layers != NULL && vector_layers->type == JSON_ARRAY) {
|
||||
for (size_t i = 0; i < vector_layers->array.size(); i++) {
|
||||
if (vector_layers->array[i]->type == JSON_HASH) {
|
||||
json_object *id = json_hash_get(vector_layers->array[i], "id");
|
||||
json_object *desc = json_hash_get(vector_layers->array[i], "description");
|
||||
std::shared_ptr<json_object> id = json_hash_get(vector_layers->array[i], "id");
|
||||
std::shared_ptr<json_object> desc = json_hash_get(vector_layers->array[i], "description");
|
||||
|
||||
if (id != NULL && desc != NULL && id->type == JSON_STRING && desc->type == JSON_STRING) {
|
||||
std::string sid = id->string;
|
||||
@ -592,7 +592,7 @@ void handle_vector_layers(json_object *vector_layers, std::map<std::string, laye
|
||||
}
|
||||
}
|
||||
|
||||
json_object *fields = json_hash_get(vector_layers->array[i], "fields");
|
||||
std::shared_ptr<json_object> fields = json_hash_get(vector_layers->array[i], "fields");
|
||||
if (fields != NULL && fields->type == JSON_HASH) {
|
||||
for (size_t j = 0; j < fields->keys.size(); j++) {
|
||||
if (fields->keys[j]->type == JSON_STRING && fields->values[j]->type) {
|
||||
@ -612,7 +612,7 @@ void handle_vector_layers(json_object *vector_layers, std::map<std::string, laye
|
||||
}
|
||||
}
|
||||
|
||||
void decode(struct reader *readers, std::map<std::string, layermap_entry> &layermap, sqlite3 *outdb, const char *outdir, struct stats *st, std::vector<std::string> &header, std::map<std::string, std::vector<std::string>> &mapping, std::set<std::string> &exclude, int ifmatched, std::string &attribution, std::string &description, std::set<std::string> &keep_layers, std::set<std::string> &remove_layers, std::string &name, json_object *filter, std::map<std::string, std::string> &attribute_descriptions) {
|
||||
void decode(struct reader *readers, std::map<std::string, layermap_entry> &layermap, sqlite3 *outdb, const char *outdir, struct stats *st, std::vector<std::string> &header, std::map<std::string, std::vector<std::string>> &mapping, std::set<std::string> &exclude, int ifmatched, std::string &attribution, std::string &description, std::set<std::string> &keep_layers, std::set<std::string> &remove_layers, std::string &name, std::shared_ptr<json_object> filter, std::map<std::string, std::string> &attribute_descriptions) {
|
||||
std::vector<std::map<std::string, layermap_entry>> layermaps;
|
||||
for (size_t i = 0; i < CPUS; i++) {
|
||||
layermaps.push_back(std::map<std::string, layermap_entry>());
|
||||
@ -794,10 +794,10 @@ void decode(struct reader *readers, std::map<std::string, layermap_entry> &layer
|
||||
|
||||
if (s != NULL) {
|
||||
json_pull *jp = json_begin_string((const char *) s);
|
||||
json_object *o = json_read_tree(jp);
|
||||
std::shared_ptr<json_object> o = json_read_tree(jp);
|
||||
|
||||
if (o != NULL && o->type == JSON_HASH) {
|
||||
json_object *vector_layers = json_hash_get(o, "vector_layers");
|
||||
std::shared_ptr<json_object> vector_layers = json_hash_get(o, "vector_layers");
|
||||
|
||||
handle_vector_layers(vector_layers, layermap, attribute_descriptions);
|
||||
json_free(o);
|
||||
@ -832,7 +832,7 @@ int main(int argc, char **argv) {
|
||||
char *csv = NULL;
|
||||
int force = 0;
|
||||
int ifmatched = 0;
|
||||
json_object *filter = NULL;
|
||||
std::shared_ptr<json_object> filter = NULL;
|
||||
|
||||
CPUS = sysconf(_SC_NPROCESSORS_ONLN);
|
||||
|
||||
|
10
tile.cpp
10
tile.cpp
@ -1209,7 +1209,7 @@ struct write_tile_args {
|
||||
bool still_dropping = false;
|
||||
int wrote_zoom = 0;
|
||||
size_t tiling_seg = 0;
|
||||
struct json_object *filter = NULL;
|
||||
std::shared_ptr<json_object> filter = NULL;
|
||||
};
|
||||
|
||||
bool clip_to_tile(serial_feature &sf, int z, long long buffer) {
|
||||
@ -1288,7 +1288,7 @@ bool clip_to_tile(serial_feature &sf, int z, long long buffer) {
|
||||
return false;
|
||||
}
|
||||
|
||||
serial_feature next_feature(FILE *geoms, std::atomic<long long> *geompos_in, char *metabase, long long *meta_off, int z, unsigned tx, unsigned ty, unsigned *initial_x, unsigned *initial_y, long long *original_features, long long *unclipped_features, int nextzoom, int maxzoom, int minzoom, int max_zoom_increment, size_t pass, size_t passes, std::atomic<long long> *along, long long alongminus, int buffer, int *within, bool *first_time, FILE **geomfile, std::atomic<long long> *geompos, std::atomic<double> *oprogress, double todo, const char *fname, int child_shards, struct json_object *filter, const char *stringpool, long long *pool_off, std::vector<std::vector<std::string>> *layer_unmaps) {
|
||||
serial_feature next_feature(FILE *geoms, std::atomic<long long> *geompos_in, char *metabase, long long *meta_off, int z, unsigned tx, unsigned ty, unsigned *initial_x, unsigned *initial_y, long long *original_features, long long *unclipped_features, int nextzoom, int maxzoom, int minzoom, int max_zoom_increment, size_t pass, size_t passes, std::atomic<long long> *along, long long alongminus, int buffer, int *within, bool *first_time, FILE **geomfile, std::atomic<long long> *geompos, std::atomic<double> *oprogress, double todo, const char *fname, int child_shards, std::shared_ptr<json_object> filter, const char *stringpool, long long *pool_off, std::vector<std::vector<std::string>> *layer_unmaps) {
|
||||
while (1) {
|
||||
serial_feature sf = deserialize_feature(geoms, geompos_in, metabase, meta_off, z, tx, ty, initial_x, initial_y);
|
||||
if (sf.t < 0) {
|
||||
@ -1443,7 +1443,7 @@ struct run_prefilter_args {
|
||||
char *stringpool = NULL;
|
||||
long long *pool_off = NULL;
|
||||
FILE *prefilter_fp = NULL;
|
||||
struct json_object *filter = NULL;
|
||||
std::shared_ptr<json_object> filter = NULL;
|
||||
};
|
||||
|
||||
void *run_prefilter(void *v) {
|
||||
@ -1665,7 +1665,7 @@ bool find_partial(std::vector<partial> &partials, serial_feature &sf, ssize_t &o
|
||||
return false;
|
||||
}
|
||||
|
||||
long long write_tile(FILE *geoms, std::atomic<long long> *geompos_in, char *metabase, char *stringpool, int z, unsigned tx, unsigned ty, int detail, int min_detail, sqlite3 *outdb, const char *outdir, int buffer, const char *fname, FILE **geomfile, int minzoom, int maxzoom, double todo, std::atomic<long long> *along, long long alongminus, double gamma, int child_shards, long long *meta_off, long long *pool_off, unsigned *initial_x, unsigned *initial_y, std::atomic<int> *running, double simplification, std::vector<std::map<std::string, layermap_entry>> *layermaps, std::vector<std::vector<std::string>> *layer_unmaps, size_t tiling_seg, size_t pass, size_t passes, unsigned long long mingap, long long minextent, double fraction, const char *prefilter, const char *postfilter, struct json_object *filter, write_tile_args *arg) {
|
||||
long long write_tile(FILE *geoms, std::atomic<long long> *geompos_in, char *metabase, char *stringpool, int z, unsigned tx, unsigned ty, int detail, int min_detail, sqlite3 *outdb, const char *outdir, int buffer, const char *fname, FILE **geomfile, int minzoom, int maxzoom, double todo, std::atomic<long long> *along, long long alongminus, double gamma, int child_shards, long long *meta_off, long long *pool_off, unsigned *initial_x, unsigned *initial_y, std::atomic<int> *running, double simplification, std::vector<std::map<std::string, layermap_entry>> *layermaps, std::vector<std::vector<std::string>> *layer_unmaps, size_t tiling_seg, size_t pass, size_t passes, unsigned long long mingap, long long minextent, double fraction, const char *prefilter, const char *postfilter, std::shared_ptr<json_object> filter, write_tile_args *arg) {
|
||||
int line_detail;
|
||||
double merge_fraction = 1;
|
||||
double mingap_fraction = 1;
|
||||
@ -2559,7 +2559,7 @@ void *run_thread(void *vargs) {
|
||||
return NULL;
|
||||
}
|
||||
|
||||
int traverse_zooms(int *geomfd, off_t *geom_size, char *metabase, char *stringpool, std::atomic<unsigned> *midx, std::atomic<unsigned> *midy, int &maxzoom, int minzoom, sqlite3 *outdb, const char *outdir, int buffer, const char *fname, const char *tmpdir, double gamma, int full_detail, int low_detail, int min_detail, long long *meta_off, long long *pool_off, unsigned *initial_x, unsigned *initial_y, double simplification, std::vector<std::map<std::string, layermap_entry>> &layermaps, const char *prefilter, const char *postfilter, std::map<std::string, attribute_op> const *attribute_accum, struct json_object *filter) {
|
||||
int traverse_zooms(int *geomfd, off_t *geom_size, char *metabase, char *stringpool, std::atomic<unsigned> *midx, std::atomic<unsigned> *midy, int &maxzoom, int minzoom, sqlite3 *outdb, const char *outdir, int buffer, const char *fname, const char *tmpdir, double gamma, int full_detail, int low_detail, int min_detail, long long *meta_off, long long *pool_off, unsigned *initial_x, unsigned *initial_y, double simplification, std::vector<std::map<std::string, layermap_entry>> &layermaps, const char *prefilter, const char *postfilter, std::map<std::string, attribute_op> const *attribute_accum, std::shared_ptr<json_object> filter) {
|
||||
last_progress = 0;
|
||||
|
||||
// The existing layermaps are one table per input thread.
|
||||
|
2
tile.hpp
2
tile.hpp
@ -21,7 +21,7 @@ enum attribute_op {
|
||||
|
||||
long long write_tile(char **geom, char *metabase, char *stringpool, unsigned *file_bbox, int z, unsigned x, unsigned y, int detail, int min_detail, int basezoom, sqlite3 *outdb, const char *outdir, double droprate, int buffer, const char *fname, FILE **geomfile, int file_minzoom, int file_maxzoom, double todo, char *geomstart, long long along, double gamma, int nlayers);
|
||||
|
||||
int traverse_zooms(int *geomfd, off_t *geom_size, char *metabase, char *stringpool, std::atomic<unsigned> *midx, std::atomic<unsigned> *midy, int &maxzoom, int minzoom, sqlite3 *outdb, const char *outdir, int buffer, const char *fname, const char *tmpdir, double gamma, int full_detail, int low_detail, int min_detail, long long *meta_off, long long *pool_off, unsigned *initial_x, unsigned *initial_y, double simplification, std::vector<std::map<std::string, layermap_entry> > &layermap, const char *prefilter, const char *postfilter, std::map<std::string, attribute_op> const *attribute_accum, struct json_object *filter);
|
||||
int traverse_zooms(int *geomfd, off_t *geom_size, char *metabase, char *stringpool, std::atomic<unsigned> *midx, std::atomic<unsigned> *midy, int &maxzoom, int minzoom, sqlite3 *outdb, const char *outdir, int buffer, const char *fname, const char *tmpdir, double gamma, int full_detail, int low_detail, int min_detail, long long *meta_off, long long *pool_off, unsigned *initial_x, unsigned *initial_y, double simplification, std::vector<std::map<std::string, layermap_entry> > &layermap, const char *prefilter, const char *postfilter, std::map<std::string, attribute_op> const *attribute_accum, std::shared_ptr<json_object> filter);
|
||||
|
||||
int manage_gap(unsigned long long index, unsigned long long *previndex, double scale, double gamma, double *gap);
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user