Compare commits

..

11 Commits

9 changed files with 503 additions and 184 deletions

View File

@ -10,7 +10,6 @@
* Output of `decode` utility is now in GeoJSON format
* Tile generation with a minzoom spends less time on unused lower zoom levels
* Bare geometries without a Feature wrapper are accepted
* Default tile resolution is 4096 units at all zooms since renderers assume it
## 1.2.0

View File

@ -67,8 +67,8 @@ Options
* -z _zoom_: Base (maxzoom) zoom level (default 14)
* -Z _zoom_: Lowest (minzoom) zoom level (default 0)
* -d _detail_: Detail at base zoom level (default 12, for tile resolution of 4096)
* -D _detail_: Detail at lower zoom levels (default 12, for tile resolution of 4096)
* -d _detail_: Detail at base zoom level (default 12 at -z14 or higher, or 13 at -z13 or lower. Detail beyond 13 has rendering problems with Mapbox GL.)
* -D _detail_: Detail at lower zoom levels (default 10, for tile resolution of 1024)
* -m _detail_: Minimum detail that it will try if tiles are too big at regular detail (default 7)
* -b _pixels_: Buffer size where features are duplicated from adjacent tiles. Units are "screen pixels"--1/256th of the tile width or height. (default 5)

View File

@ -295,12 +295,6 @@ void handle(std::string message, int z, unsigned x, unsigned y, int describe) {
}
printf("[ %f, %f ]", rings[i][j].lon, rings[i][j].lat);
} else {
if (j != 0) {
printf(", ");
}
printf("[ %f, %f ]", rings[i][0].lon, rings[i][0].lat);
}
}

607
geojson.c
View File

@ -17,6 +17,7 @@
#include <limits.h>
#include <sqlite3.h>
#include <stdarg.h>
#include <pthread.h>
#include "jsonpull.h"
#include "tile.h"
@ -26,7 +27,7 @@
#include "version.h"
#include "memfile.h"
int low_detail = 12;
int low_detail = 10;
int full_detail = -1;
int min_detail = 7;
int quiet = 0;
@ -116,9 +117,9 @@ void serialize_string(FILE *out, const char *s, long long *fpos, const char *fna
*fpos += len + 1;
}
void parse_geometry(int t, json_object *j, unsigned *bbox, long long *fpos, FILE *out, int op, const char *fname, json_pull *source, long long *wx, long long *wy, int *initialized) {
void parse_geometry(int t, json_object *j, unsigned *bbox, long long *fpos, FILE *out, int op, const char *fname, int line, long long *wx, long long *wy, int *initialized) {
if (j == NULL || j->type != JSON_ARRAY) {
fprintf(stderr, "%s:%d: expected array for type %d\n", fname, source->line, t);
fprintf(stderr, "%s:%d: expected array for type %d\n", fname, line, t);
return;
}
@ -134,7 +135,7 @@ void parse_geometry(int t, json_object *j, unsigned *bbox, long long *fpos, FILE
}
}
parse_geometry(within, j->array[i], bbox, fpos, out, op, fname, source, wx, wy, initialized);
parse_geometry(within, j->array[i], bbox, fpos, out, op, fname, line, wx, wy, initialized);
}
} else {
if (j->length >= 2 && j->array[0]->type == JSON_NUMBER && j->array[1]->type == JSON_NUMBER) {
@ -147,7 +148,7 @@ void parse_geometry(int t, json_object *j, unsigned *bbox, long long *fpos, FILE
static int warned = 0;
if (!warned) {
fprintf(stderr, "%s:%d: ignoring dimensions beyond two\n", fname, source->line);
fprintf(stderr, "%s:%d: ignoring dimensions beyond two\n", fname, line);
warned = 1;
}
}
@ -181,7 +182,7 @@ void parse_geometry(int t, json_object *j, unsigned *bbox, long long *fpos, FILE
*wx = x;
*wy = y;
} else {
fprintf(stderr, "%s:%d: malformed point\n", fname, source->line);
fprintf(stderr, "%s:%d: malformed point\n", fname, line);
}
}
@ -415,12 +416,12 @@ long long addpool(struct memfile *poolfile, struct memfile *treefile, char *s, c
return off;
}
int serialize_geometry(json_object *geometry, json_object *properties, const char *reading, json_pull *jp, long long *seq, long long *metapos, long long *geompos, long long *indexpos, struct pool *exclude, struct pool *include, int exclude_all, FILE *metafile, FILE *geomfile, FILE *indexfile, struct memfile *poolfile, struct memfile *treefile, const char *fname, int maxzoom, int layer, double droprate, unsigned *file_bbox, json_object *tippecanoe) {
int serialize_geometry(json_object *geometry, json_object *properties, const char *reading, int line, long long *seq, long long *metapos, long long *geompos, long long *indexpos, struct pool *exclude, struct pool *include, int exclude_all, FILE *metafile, FILE *geomfile, FILE *indexfile, struct memfile *poolfile, struct memfile *treefile, const char *fname, int maxzoom, int layer, double droprate, unsigned *file_bbox, json_object *tippecanoe) {
json_object *geometry_type = json_hash_get(geometry, "type");
if (geometry_type == NULL) {
static int warned = 0;
if (!warned) {
fprintf(stderr, "%s:%d: null geometry (additional not reported)\n", reading, jp->line);
fprintf(stderr, "%s:%d: null geometry (additional not reported)\n", reading, line);
warned = 1;
}
@ -428,13 +429,13 @@ int serialize_geometry(json_object *geometry, json_object *properties, const cha
}
if (geometry_type->type != JSON_STRING) {
fprintf(stderr, "%s:%d: geometry without type\n", reading, jp->line);
fprintf(stderr, "%s:%d: geometry without type\n", reading, line);
return 0;
}
json_object *coordinates = json_hash_get(geometry, "coordinates");
if (coordinates == NULL || coordinates->type != JSON_ARRAY) {
fprintf(stderr, "%s:%d: feature without coordinates array\n", reading, jp->line);
fprintf(stderr, "%s:%d: feature without coordinates array\n", reading, line);
return 0;
}
@ -445,7 +446,7 @@ int serialize_geometry(json_object *geometry, json_object *properties, const cha
}
}
if (t >= GEOM_TYPES) {
fprintf(stderr, "%s:%d: Can't handle geometry type %s\n", reading, jp->line, geometry_type->string);
fprintf(stderr, "%s:%d: Can't handle geometry type %s\n", reading, line, geometry_type->string);
return 0;
}
@ -511,7 +512,7 @@ int serialize_geometry(json_object *geometry, json_object *properties, const cha
} else if (properties->values[i] != NULL && (properties->values[i]->type == JSON_NULL)) {
;
} else {
fprintf(stderr, "%s:%d: Unsupported property type for %s\n", reading, jp->line, properties->keys[i]->string);
fprintf(stderr, "%s:%d: Unsupported property type for %s\n", reading, line, properties->keys[i]->string);
continue;
}
}
@ -538,7 +539,7 @@ int serialize_geometry(json_object *geometry, json_object *properties, const cha
serialize_long_long(geomfile, metastart, geompos, fname);
long long wx = initial_x, wy = initial_y;
parse_geometry(t, coordinates, bbox, geompos, geomfile, VT_MOVETO, fname, jp, &wx, &wy, &initialized);
parse_geometry(t, coordinates, bbox, geompos, geomfile, VT_MOVETO, fname, line, &wx, &wy, &initialized);
serialize_byte(geomfile, VT_END, geompos, fname);
/*
@ -598,6 +599,345 @@ int serialize_geometry(json_object *geometry, json_object *properties, const cha
return 1;
}
pthread_mutex_t json_lock = PTHREAD_MUTEX_INITIALIZER;
pthread_mutex_t gq_lock = PTHREAD_MUTEX_INITIALIZER;
pthread_cond_t gq_notify;
struct geometry_queue {
json_object *geometry;
json_object *properties;
json_object *tippecanoe;
json_object *to_free;
int line;
struct geometry_queue *next;
struct geometry_queue *prev;
} *gq_head = NULL, *gq_tail = NULL;
void enqueue_geometry(json_object *geometry, json_object *properties, json_object *tippecanoe, json_object *to_free, int line) {
struct geometry_queue *gq = malloc(sizeof(struct geometry_queue));
if (gq == NULL) {
perror("malloc");
exit(EXIT_FAILURE);
}
gq->geometry = geometry;
gq->properties = properties;
gq->tippecanoe = tippecanoe;
gq->to_free = to_free;
gq->next = NULL;
gq->prev = NULL;
gq->line = line;
if (pthread_mutex_lock(&gq_lock) != 0) {
perror("pthread_mutex_lock");
exit(EXIT_FAILURE);
}
if (gq_head == NULL) {
gq_head = gq_tail = gq;
} else {
gq->prev = gq_tail;
gq_tail->next = gq;
gq_tail = gq;
}
if (pthread_cond_signal(&gq_notify) != 0) {
perror("pthread_cond_signal");
exit(EXIT_FAILURE);
}
if (pthread_mutex_unlock(&gq_lock) != 0) {
perror("pthread_mutex_unlock");
exit(EXIT_FAILURE);
}
}
struct run_queue_args {
const char *reading;
long long *seq;
long long *metapos;
long long *geompos;
long long *indexpos;
struct pool *exclude;
struct pool *include;
int exclude_all;
FILE *metafile;
FILE *geomfile;
FILE *indexfile;
struct memfile *poolfile;
struct memfile *treefile;
const char *fname;
int maxzoom;
int layer;
double droprate;
unsigned *file_bbox;
};
void *run_queue(void *v) {
struct run_queue_args *a = v;
while (1) {
if (pthread_mutex_lock(&gq_lock) != 0) {
perror("pthread_mutex_lock");
exit(EXIT_FAILURE);
}
if (pthread_cond_wait(&gq_notify, &gq_lock) != 0) {
perror("pthread_cond_wait");
exit(EXIT_FAILURE);
}
while (gq_head != NULL) {
struct geometry_queue *gq = gq_head;
gq_head = gq_head->next;
if (gq_head != NULL) {
gq_head->prev = NULL;
}
if (pthread_mutex_unlock(&gq_lock) != 0) {
perror("pthread_mutex_unlock");
exit(EXIT_FAILURE);
}
// Magic value to signal end of queue
if (gq->geometry == NULL && gq->to_free == NULL) {
return NULL;
}
if (gq->geometry != NULL) {
serialize_geometry(gq->geometry, gq->properties, a->reading, gq->line, a->seq, a->metapos, a->geompos, a->indexpos, a->exclude, a->include, a->exclude_all, a->metafile, a->geomfile, a->indexfile, a->poolfile, a->treefile, a->fname, a->maxzoom, a->layer, a->droprate, a->file_bbox, gq->tippecanoe);
}
if (gq->to_free != NULL) {
if (pthread_mutex_lock(&json_lock) != 0) {
perror("pthread_mutex_lock");
exit(EXIT_FAILURE);
}
json_free(gq->to_free);
if (pthread_mutex_unlock(&json_lock) != 0) {
perror("pthread_mutex_unlock");
exit(EXIT_FAILURE);
}
}
free(gq);
if (pthread_mutex_lock(&gq_lock) != 0) {
perror("pthread_mutex_lock");
exit(EXIT_FAILURE);
}
}
if (pthread_mutex_unlock(&gq_lock) != 0) {
perror("pthread_mutex_unlock");
exit(EXIT_FAILURE);
}
}
return NULL;
}
void parse_json(json_pull *jp, const char *reading) { // , long long *seq, long long *metapos, long long *geompos, long long *indexpos, struct pool *exclude, struct pool *include, int exclude_all, FILE *metafile, FILE *geomfile, FILE *indexfile, struct memfile *poolfile, struct memfile *treefile, char *fname, int maxzoom, int layer, double droprate, unsigned *file_bbox) {
long long found_hashes = 0;
long long found_features = 0;
long long found_geometries = 0;
while (1) {
if (pthread_mutex_lock(&json_lock) != 0) {
perror("pthread_mutex_lock");
exit(EXIT_FAILURE);
}
json_object *j = json_read(jp);
if (pthread_mutex_unlock(&json_lock) != 0) {
perror("pthread_mutex_unlock");
exit(EXIT_FAILURE);
}
if (j == NULL) {
if (jp->error != NULL) {
fprintf(stderr, "%s:%d: %s\n", reading, jp->line, jp->error);
}
if (pthread_mutex_lock(&json_lock) != 0) {
perror("pthread_mutex_lock");
exit(EXIT_FAILURE);
}
json_free(jp->root);
if (pthread_mutex_unlock(&json_lock) != 0) {
perror("pthread_mutex_unlock");
exit(EXIT_FAILURE);
}
break;
}
if (j->type == JSON_HASH) {
found_hashes++;
if (found_hashes == 50 && found_features == 0 && found_geometries == 0) {
fprintf(stderr, "%s:%d: Warning: not finding any GeoJSON features or geometries in input yet after 50 objects.\n", reading, jp->line);
}
}
json_object *type = json_hash_get(j, "type");
if (type == NULL || type->type != JSON_STRING) {
continue;
}
if (found_features == 0) {
int i;
int is_geometry = 0;
for (i = 0; i < GEOM_TYPES; i++) {
if (strcmp(type->string, geometry_names[i]) == 0) {
is_geometry = 1;
break;
}
}
if (is_geometry) {
if (j->parent != NULL) {
if (j->parent->type == JSON_ARRAY) {
if (j->parent->parent->type == JSON_HASH) {
json_object *geometries = json_hash_get(j->parent->parent, "geometries");
if (geometries != NULL) {
// Parent of Parent must be a GeometryCollection
is_geometry = 0;
}
}
} else if (j->parent->type == JSON_HASH) {
json_object *geometry = json_hash_get(j->parent, "geometry");
if (geometry != NULL) {
// Parent must be a Feature
is_geometry = 0;
}
}
}
}
if (is_geometry) {
if (found_features != 0 && found_geometries == 0) {
fprintf(stderr, "%s:%d: Warning: found a mixture of features and bare geometries\n", reading, jp->line);
}
found_geometries++;
if (pthread_mutex_lock(&json_lock) != 0) {
perror("pthread_mutex_lock");
exit(EXIT_FAILURE);
}
json_disconnect(j);
if (pthread_mutex_unlock(&json_lock) != 0) {
perror("pthread_mutex_unlock");
exit(EXIT_FAILURE);
}
enqueue_geometry(j, NULL, NULL, j, jp->line);
continue;
}
}
if (strcmp(type->string, "Feature") != 0) {
continue;
}
if (pthread_mutex_lock(&json_lock) != 0) {
perror("pthread_mutex_lock");
exit(EXIT_FAILURE);
}
json_disconnect(j);
if (pthread_mutex_unlock(&json_lock) != 0) {
perror("pthread_mutex_unlock");
exit(EXIT_FAILURE);
}
if (found_features == 0 && found_geometries != 0) {
fprintf(stderr, "%s:%d: Warning: found a mixture of features and bare geometries\n", reading, jp->line);
}
found_features++;
json_object *geometry = json_hash_get(j, "geometry");
if (geometry == NULL) {
fprintf(stderr, "%s:%d: feature with no geometry\n", reading, jp->line);
json_free(j); // Already disconnected, so don't need lock
continue;
}
json_object *properties = json_hash_get(j, "properties");
if (properties == NULL || (properties->type != JSON_HASH && properties->type != JSON_NULL)) {
fprintf(stderr, "%s:%d: feature without properties hash\n", reading, jp->line);
json_free(j); // Already disconnected, so don't need lock
continue;
}
json_object *tippecanoe = json_hash_get(j, "tippecanoe");
json_object *geometries = json_hash_get(geometry, "geometries");
if (geometries != NULL) {
int g;
for (g = 0; g < geometries->length; g++) {
enqueue_geometry(geometries->array[g], properties, tippecanoe, NULL, jp->line);
}
enqueue_geometry(NULL, NULL, NULL, j, jp->line);
} else {
enqueue_geometry(geometry, properties, tippecanoe, j, jp->line);
}
/* XXX check for any non-features in the outer object */
}
}
struct parser_args {
json_pull *jp;
const char *reading;
};
void *run_parser(void *vargs) {
struct parser_args *a = vargs;
parse_json(a->jp, a->reading);
return NULL;
}
struct jsonmap {
char *map;
long long off;
long long end;
};
int json_map_read(struct json_pull *jp, char *buffer, int n) {
struct jsonmap *jm = jp->source;
if (jm->off + n >= jm->end) {
n = jm->end - jm->off;
}
memcpy(buffer, jm->map + jm->off, n);
jm->off += n;
return n;
}
struct json_pull *json_begin_map(char *map, long long len) {
struct jsonmap *jm = malloc(sizeof(struct jsonmap));
jm->map = map;
jm->off = 0;
jm->end = len;
return json_begin(json_map_read, jm);
}
int read_json(int argc, char **argv, char *fname, const char *layername, int maxzoom, int minzoom, sqlite3 *outdb, struct pool *exclude, struct pool *include, int exclude_all, double droprate, int buffer, const char *tmpdir, double gamma, char *prevent, char *additional) {
int ret = EXIT_SUCCESS;
@ -693,133 +1033,134 @@ int read_json(int argc, char **argv, char *fname, const char *layername, int max
for (layer = 0; layer < nlayers; layer++) {
json_pull *jp;
const char *reading;
FILE *fp;
long long found_hashes = 0;
long long found_features = 0;
long long found_geometries = 0;
int fd;
if (layer >= argc) {
reading = "standard input";
fp = stdin;
fd = 0;
} else {
reading = argv[layer];
fp = fopen(argv[layer], "r");
if (fp == NULL) {
fd = open(argv[layer], O_RDONLY);
if (fd < 0) {
perror(argv[layer]);
continue;
}
}
jp = json_begin_file(fp);
while (1) {
json_object *j = json_read(jp);
if (j == NULL) {
if (jp->error != NULL) {
fprintf(stderr, "%s:%d: %s\n", reading, jp->line, jp->error);
}
json_free(jp->root);
break;
}
if (j->type == JSON_HASH) {
found_hashes++;
if (found_hashes == 50 && found_features == 0 && found_geometries == 0) {
fprintf(stderr, "%s:%d: Warning: not finding any GeoJSON features or geometries in input yet after 50 objects.\n", reading, jp->line);
}
}
json_object *type = json_hash_get(j, "type");
if (type == NULL || type->type != JSON_STRING) {
continue;
}
if (found_features == 0) {
int i;
int is_geometry = 0;
for (i = 0; i < GEOM_TYPES; i++) {
if (strcmp(type->string, geometry_names[i]) == 0) {
is_geometry = 1;
break;
}
}
if (is_geometry) {
if (j->parent != NULL) {
if (j->parent->type == JSON_ARRAY) {
if (j->parent->parent->type == JSON_HASH) {
json_object *geometries = json_hash_get(j->parent->parent, "geometries");
if (geometries != NULL) {
// Parent of Parent must be a GeometryCollection
is_geometry = 0;
}
}
} else if (j->parent->type == JSON_HASH) {
json_object *geometry = json_hash_get(j->parent, "geometry");
if (geometry != NULL) {
// Parent must be a Feature
is_geometry = 0;
}
}
}
}
if (is_geometry) {
if (found_features != 0 && found_geometries == 0) {
fprintf(stderr, "%s:%d: Warning: found a mixture of features and bare geometries\n", reading, jp->line);
}
found_geometries++;
serialize_geometry(j, NULL, reading, jp, &seq, &metapos, &geompos, &indexpos, exclude, include, exclude_all, metafile, geomfile, indexfile, poolfile, treefile, fname, maxzoom, layer, droprate, file_bbox, NULL);
json_free(j);
continue;
}
}
if (strcmp(type->string, "Feature") != 0) {
continue;
}
if (found_features == 0 && found_geometries != 0) {
fprintf(stderr, "%s:%d: Warning: found a mixture of features and bare geometries\n", reading, jp->line);
}
found_features++;
json_object *geometry = json_hash_get(j, "geometry");
if (geometry == NULL) {
fprintf(stderr, "%s:%d: feature with no geometry\n", reading, jp->line);
json_free(j);
continue;
}
json_object *properties = json_hash_get(j, "properties");
if (properties == NULL || (properties->type != JSON_HASH && properties->type != JSON_NULL)) {
fprintf(stderr, "%s:%d: feature without properties hash\n", reading, jp->line);
json_free(j);
continue;
}
json_object *tippecanoe = json_hash_get(j, "tippecanoe");
json_object *geometries = json_hash_get(geometry, "geometries");
if (geometries != NULL) {
int g;
for (g = 0; g < geometries->length; g++) {
serialize_geometry(geometries->array[g], properties, reading, jp, &seq, &metapos, &geompos, &indexpos, exclude, include, exclude_all, metafile, geomfile, indexfile, poolfile, treefile, fname, maxzoom, layer, droprate, file_bbox, tippecanoe);
}
} else {
serialize_geometry(geometry, properties, reading, jp, &seq, &metapos, &geompos, &indexpos, exclude, include, exclude_all, metafile, geomfile, indexfile, poolfile, treefile, fname, maxzoom, layer, droprate, file_bbox, tippecanoe);
}
json_free(j);
/* XXX check for any non-features in the outer object */
if (pthread_cond_init(&gq_notify, NULL) != 0) {
perror("pthread_cond_init");
exit(EXIT_FAILURE);
}
json_end(jp);
fclose(fp);
struct run_queue_args a;
a.reading = reading;
a.seq = &seq; // XXX sequence for sortings vs sequence for progress
a.metapos = &metapos;
a.geompos = &geompos;
a.indexpos = &indexpos;
a.exclude = exclude;
a.include = include;
a.exclude_all = exclude_all;
a.metafile = metafile;
a.geomfile = geomfile;
a.indexfile = indexfile;
a.poolfile = poolfile;
a.treefile = treefile;
a.fname = fname;
a.maxzoom = maxzoom;
a.layer = layer;
a.droprate = droprate;
a.file_bbox = file_bbox;
pthread_t reader;
if (pthread_create(&reader, NULL, run_queue, &a) != 0) {
perror("pthread_create");
exit(EXIT_FAILURE);
}
struct stat st;
char *map = NULL;
off_t off;
if (fstat(fd, &st) == 0) {
off = lseek(fd, 0, SEEK_CUR);
if (off >= 0) {
map = mmap(NULL, st.st_size - off, PROT_READ, MAP_PRIVATE, fd, off);
}
}
if (map != NULL && map != MAP_FAILED) {
int split = 1;
if (split) {
#define THREADS 10
long long segs[THREADS + 1];
pthread_t pthreads[THREADS];
struct parser_args parser_args[THREADS];
segs[0] = 0;
segs[THREADS] = st.st_size - off;
int i;
for (i = 1; i < THREADS; i++) {
segs[i] = off + (st.st_size - off) * i / THREADS;
while (segs[i] < st.st_size && map[segs[i]] != '\n') {
segs[i]++;
}
printf("%d %lld\n", i, segs[i]);
}
for (i = 0; i < THREADS; i++) {
parser_args[i].jp = jp = json_begin_map(map + segs[i], segs[i + 1] - segs[i]);
parser_args[i].reading = reading;
if (pthread_create(&pthreads[i], NULL, run_parser, &parser_args[i]) != 0) {
perror("pthread_create");
exit(EXIT_FAILURE);
}
}
for (i = 0; i < THREADS; i++) {
void *retval;
if (pthread_join(pthreads[i], &retval) != 0) {
perror("pthread_join");
}
free(parser_args[i].jp->source);
json_end(parser_args[i].jp);
}
} else {
jp = json_begin_map(map, st.st_size - off);
parse_json(jp, reading);
free(jp->source);
json_end(jp);
}
munmap(map, st.st_size - off);
close(fd);
} else {
FILE *fp = fdopen(fd, "r");
if (fp == NULL) {
perror(argv[layer]);
close(fd);
continue;
}
jp = json_begin_file(fp);
parse_json(jp, reading);
json_end(jp);
fclose(fp);
}
enqueue_geometry(NULL, NULL, NULL, NULL, 0); // Shutdown message for the reader
void *retval;
if (pthread_join(reader, &retval) != 0) {
perror("pthread_join");
exit(EXIT_FAILURE);
}
}
fclose(metafile);
@ -1274,7 +1615,15 @@ int main(int argc, char **argv) {
}
if (full_detail <= 0) {
full_detail = 12;
if (maxzoom >= 14) {
// ~0.5m accuracy at z14
full_detail = 12;
} else {
// as good as we can get without breaking GL
full_detail = 13;
}
full_detail = 26 - maxzoom;
}
if (full_detail < min_detail || low_detail < min_detail) {

View File

@ -98,6 +98,7 @@ drawvec remove_noop(drawvec geom, int type, int shift) {
}
if (geom[i].op == VT_CLOSEPATH) {
fprintf(stderr, "Shouldn't happen\n");
out.push_back(geom[i]);
} else { /* moveto or lineto */
out.push_back(geom[i]);
@ -309,36 +310,6 @@ drawvec clean_or_clip_poly(drawvec &geom, int z, int detail, int buffer, bool cl
return out;
}
drawvec close_poly(drawvec &geom) {
drawvec out;
for (unsigned i = 0; i < geom.size(); i++) {
if (geom[i].op == VT_MOVETO) {
unsigned j;
for (j = i + 1; j < geom.size(); j++) {
if (geom[j].op != VT_LINETO) {
break;
}
}
if (j - 1 > i) {
if (geom[j - 1].x != geom[i].x || geom[j - 1].y != geom[i].y) {
fprintf(stderr, "Internal error: polygon not closed\n");
}
}
for (unsigned n = i; n < j - 1; n++) {
out.push_back(geom[n]);
}
out.push_back(draw(VT_CLOSEPATH, 0, 0));
i = j - 1;
}
}
return out;
}
drawvec reduce_tiny_poly(drawvec &geom, int z, int detail, bool *reduced, double *accum_area) {
drawvec out;
long long pixel = (1 << (32 - detail - z)) * 2;

View File

@ -21,7 +21,6 @@ void to_tile_scale(drawvec &geom, int z, int detail);
drawvec remove_noop(drawvec geom, int type, int shift);
drawvec clip_point(drawvec &geom, int z, int detail, long long buffer);
drawvec clean_or_clip_poly(drawvec &geom, int z, int detail, int buffer, bool clip);
drawvec close_poly(drawvec &geom);
drawvec reduce_tiny_poly(drawvec &geom, int z, int detail, bool *reduced, double *accum_area);
drawvec clip_lines(drawvec &geom, int z, int detail, long long buffer);
int quick_check(long long *bbox, int z, int detail, long long buffer);

View File

@ -332,20 +332,17 @@ again:
/////////////////////////// Comma
if (c == ',') {
if (j->container == NULL) {
j->error = "Found comma at top level";
return NULL;
}
if (j->container != NULL) {
if (j->container->expect != JSON_COMMA) {
j->error = "Found unexpected comma";
return NULL;
}
if (j->container->expect != JSON_COMMA) {
j->error = "Found unexpected comma";
return NULL;
}
if (j->container->type == JSON_HASH) {
j->container->expect = JSON_KEY;
} else {
j->container->expect = JSON_ITEM;
if (j->container->type == JSON_HASH) {
j->container->expect = JSON_KEY;
} else {
j->container->expect = JSON_ITEM;
}
}
if (cb != NULL) {
@ -568,11 +565,19 @@ void json_free(json_object *o) {
free(o->string);
}
json_disconnect(o);
free(o);
}
void json_disconnect(json_object *o) {
// Expunge references to this as an array element
// or a hash key or value.
if (o->parent != NULL) {
if (o->parent->type == JSON_ARRAY) {
int i;
for (i = 0; i < o->parent->length; i++) {
if (o->parent->array[i] == o) {
break;
@ -586,6 +591,8 @@ void json_free(json_object *o) {
}
if (o->parent->type == JSON_HASH) {
int i;
for (i = 0; i < o->parent->length; i++) {
if (o->parent->keys[i] == o) {
o->parent->keys[i] = fabricate_object(o->parent, JSON_NULL);
@ -612,5 +619,5 @@ void json_free(json_object *o) {
}
}
free(o);
o->parent = NULL;
}

View File

@ -59,5 +59,6 @@ json_object *json_read_tree(json_pull *j);
json_object *json_read(json_pull *j);
json_object *json_read_separators(json_pull *j, json_separator_callback cb, void *state);
void json_free(json_object *j);
void json_disconnect(json_object *j);
json_object *json_hash_get(json_object *o, const char *s);

View File

@ -656,7 +656,6 @@ long long write_tile(char **geoms, char *metabase, char *stringpool, int z, unsi
// Scaling may have made the polygon degenerate.
// Give Clipper a chance to try to fix it.
geom = clean_or_clip_poly(geom, 0, 0, 0, false);
geom = close_poly(geom);
}
if (t == VT_POINT || to_feature(geom, NULL)) {