mirror of
https://github.com/mapbox/tippecanoe.git
synced 2025-04-08 11:34:12 +00:00
Implement FlatGeobuf Polygon type parsing [#2]
This commit is contained in:
parent
a3dec5116d
commit
8a5a2496c3
2
Makefile
2
Makefile
@ -47,7 +47,7 @@ C = $(wildcard *.c) $(wildcard *.cpp)
|
||||
INCLUDES = -I/usr/local/include -I.
|
||||
LIBS = -L/usr/local/lib
|
||||
|
||||
tippecanoe: geojson.o jsonpull/jsonpull.o tile.o pool.o mbtiles.o geometry.o projection.o memfile.o mvt.o serial.o main.o text.o dirtiles.o plugin.o read_json.o write_json.o geobuf.o evaluator.o geocsv.o csv.o geojson-loop.o
|
||||
tippecanoe: geojson.o jsonpull/jsonpull.o tile.o pool.o mbtiles.o geometry.o projection.o memfile.o mvt.o serial.o main.o text.o dirtiles.o plugin.o read_json.o write_json.o geobuf.o flatgeobuf.o evaluator.o geocsv.o csv.o geojson-loop.o
|
||||
$(CXX) $(PG) $(LIBS) $(FINAL_FLAGS) $(CXXFLAGS) -o $@ $^ $(LDFLAGS) -lm -lz -lsqlite3 -lpthread
|
||||
|
||||
tippecanoe-enumerate: enumerate.o
|
||||
|
109
flatgeobuf.cpp
Normal file
109
flatgeobuf.cpp
Normal file
@ -0,0 +1,109 @@
|
||||
#include <stdio.h>
|
||||
#include "serial.hpp"
|
||||
#include <iostream>
|
||||
#include "projection.hpp"
|
||||
#include "flatgeobuf/feature_generated.h"
|
||||
#include "flatgeobuf/header_generated.h"
|
||||
|
||||
using namespace std;
|
||||
|
||||
struct NodeItem {
|
||||
double minX;
|
||||
double minY;
|
||||
double maxX;
|
||||
double maxY;
|
||||
uint64_t offset;
|
||||
};
|
||||
|
||||
// copied from https://github.com/flatgeobuf/flatgeobuf/blob/master/src/cpp/packedrtree.cpp#L365
|
||||
uint64_t PackedRTreeSize(const uint64_t numItems, const uint16_t nodeSize)
|
||||
{
|
||||
if (nodeSize < 2)
|
||||
throw std::invalid_argument("Node size must be at least 2");
|
||||
if (numItems == 0)
|
||||
throw std::invalid_argument("Number of items must be greater than 0");
|
||||
const uint16_t nodeSizeMin = std::min(std::max(nodeSize, static_cast<uint16_t>(2)), static_cast<uint16_t>(65535));
|
||||
// limit so that resulting size in bytes can be represented by uint64_t
|
||||
if (numItems > static_cast<uint64_t>(1) << 56)
|
||||
throw std::overflow_error("Number of items must be less than 2^56");
|
||||
uint64_t n = numItems;
|
||||
uint64_t numNodes = n;
|
||||
do {
|
||||
n = (n + nodeSizeMin - 1) / nodeSizeMin;
|
||||
numNodes += n;
|
||||
} while (n != 1);
|
||||
return numNodes * sizeof(NodeItem);
|
||||
}
|
||||
|
||||
drawvec readGeometry(const FlatGeobuf::Geometry *geometry, FlatGeobuf::GeometryType geometry_type) {
|
||||
// if it is a GeometryCollection, parse Parts, ignore XY
|
||||
auto xy = geometry->xy();
|
||||
auto ends = geometry->ends();
|
||||
size_t current_end = 0;
|
||||
|
||||
drawvec dv;
|
||||
for (unsigned int i = 0; i < xy->size(); i+=2) {
|
||||
long long x, y;
|
||||
projection->project(xy->Get(i), xy->Get(i+1), 32, &x, &y);
|
||||
if (i == 0 || (ends != nullptr && current_end < ends->size() && i == ends->Get(current_end))) {
|
||||
dv.push_back(draw(VT_MOVETO, x, y));
|
||||
if (i > 0) current_end++;
|
||||
} else {
|
||||
dv.push_back(draw(VT_LINETO, x, y));
|
||||
}
|
||||
}
|
||||
|
||||
return dv;
|
||||
}
|
||||
|
||||
void parse_flatgeobuf(std::vector<struct serialization_state> *sst, const char *src, int layer, std::string layername) {
|
||||
auto header_size = flatbuffers::GetPrefixedSize((const uint8_t *)src + 8);
|
||||
auto header = FlatGeobuf::GetSizePrefixedHeader(src + 8);
|
||||
auto features_count = header->features_count();
|
||||
auto node_size = header->index_node_size();
|
||||
|
||||
auto index_size = PackedRTreeSize(features_count,node_size);
|
||||
|
||||
auto h_geometry_type = header->geometry_type();
|
||||
|
||||
switch (h_geometry_type) {
|
||||
case FlatGeobuf::GeometryType::Unknown :
|
||||
case FlatGeobuf::GeometryType::Point :
|
||||
case FlatGeobuf::GeometryType::LineString :
|
||||
case FlatGeobuf::GeometryType::Polygon :
|
||||
case FlatGeobuf::GeometryType::MultiPoint :
|
||||
case FlatGeobuf::GeometryType::MultiLineString :
|
||||
case FlatGeobuf::GeometryType::MultiPolygon :
|
||||
case FlatGeobuf::GeometryType::GeometryCollection :
|
||||
break;
|
||||
default:
|
||||
fprintf(stderr, "flatgeobuf has unsupported geometry type\n");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
const char* start = src + 8 + 4 + header_size + index_size;
|
||||
for (size_t i = 0; i < features_count; i++) {
|
||||
|
||||
serial_feature sf;
|
||||
|
||||
auto my_sst = &(*sst)[0];
|
||||
|
||||
auto feature_size = flatbuffers::GetPrefixedSize((const uint8_t *)start);
|
||||
auto feature = FlatGeobuf::GetSizePrefixedFeature(start);
|
||||
drawvec dv = readGeometry(feature->geometry(), h_geometry_type);
|
||||
|
||||
sf.layer = layer;
|
||||
sf.layername = layername;
|
||||
sf.segment = my_sst->segment;
|
||||
sf.has_id = false;
|
||||
sf.has_tippecanoe_minzoom = false;
|
||||
sf.has_tippecanoe_maxzoom = false;
|
||||
sf.feature_minzoom = false;
|
||||
sf.seq = (*my_sst->layer_seq);
|
||||
sf.geometry = dv;
|
||||
sf.t = 4;
|
||||
|
||||
serialize_feature(my_sst, sf);
|
||||
start += 4 + feature_size;
|
||||
}
|
||||
}
|
@ -1,2 +1,6 @@
|
||||
#include "flatgeobuf/feature_generated.h"
|
||||
#include "flatgeobuf/header_generated.h"
|
||||
#ifndef FLATGEOBUF_HPP
|
||||
#define FLATGEOBUF_HPP
|
||||
|
||||
void parse_flatgeobuf(std::vector<struct serialization_state> *sst, const char *s, int layer, std::string layername);
|
||||
|
||||
#endif
|
70
main.cpp
70
main.cpp
@ -1354,6 +1354,76 @@ int read_input(std::vector<source> &sources, char *fname, int maxzoom, int minzo
|
||||
}
|
||||
size_t layer = a->second.id;
|
||||
|
||||
// geobuf
|
||||
if (sources[source].format == "fgb" || (sources[source].file.size() > 4 && sources[source].file.substr(sources[source].file.size() - 4) == std::string(".fgb"))) {
|
||||
struct stat st;
|
||||
if (fstat(fd, &st) != 0) {
|
||||
perror("fstat");
|
||||
perror(sources[source].file.c_str());
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
char *map = (char *) mmap(NULL, st.st_size, PROT_READ, MAP_PRIVATE, fd, 0);
|
||||
if (map == MAP_FAILED) {
|
||||
fprintf(stderr, "%s: mmap: %s: %s\n", *av, reading.c_str(), strerror(errno));
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
std::atomic<long long> layer_seq[CPUS];
|
||||
double dist_sums[CPUS];
|
||||
size_t dist_counts[CPUS];
|
||||
std::vector<struct serialization_state> sst;
|
||||
sst.resize(CPUS);
|
||||
|
||||
for (size_t i = 0; i < CPUS; i++) {
|
||||
layer_seq[i] = overall_offset;
|
||||
dist_sums[i] = 0;
|
||||
dist_counts[i] = 0;
|
||||
|
||||
sst[i].fname = reading.c_str();
|
||||
sst[i].line = 0;
|
||||
sst[i].layer_seq = &layer_seq[i];
|
||||
sst[i].progress_seq = &progress_seq;
|
||||
sst[i].readers = &readers;
|
||||
sst[i].segment = i;
|
||||
sst[i].initial_x = &initial_x[i];
|
||||
sst[i].initial_y = &initial_y[i];
|
||||
sst[i].initialized = &initialized[i];
|
||||
sst[i].dist_sum = &dist_sums[i];
|
||||
sst[i].dist_count = &dist_counts[i];
|
||||
sst[i].want_dist = guess_maxzoom;
|
||||
sst[i].maxzoom = maxzoom;
|
||||
sst[i].filters = prefilter != NULL || postfilter != NULL;
|
||||
sst[i].uses_gamma = uses_gamma;
|
||||
sst[i].layermap = &layermaps[i];
|
||||
sst[i].exclude = exclude;
|
||||
sst[i].include = include;
|
||||
sst[i].exclude_all = exclude_all;
|
||||
sst[i].basezoom = basezoom;
|
||||
sst[i].attribute_types = attribute_types;
|
||||
}
|
||||
|
||||
parse_flatgeobuf(&sst, map, st.st_size, layer, sources[layer].layer);
|
||||
|
||||
for (size_t i = 0; i < CPUS; i++) {
|
||||
dist_sum += dist_sums[i];
|
||||
dist_count += dist_counts[i];
|
||||
}
|
||||
|
||||
if (munmap(map, st.st_size) != 0) {
|
||||
perror("munmap source file");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
if (close(fd) != 0) {
|
||||
perror("close");
|
||||
exit(EXIT_FAILURE);
|
||||
}
|
||||
|
||||
overall_offset = layer_seq[0];
|
||||
checkdisk(&readers);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (sources[source].format == "geobuf" || (sources[source].file.size() > 7 && sources[source].file.substr(sources[source].file.size() - 7) == std::string(".geobuf"))) {
|
||||
struct stat st;
|
||||
if (fstat(fd, &st) != 0) {
|
||||
|
Loading…
x
Reference in New Issue
Block a user