Secure your code as it's written. Use Snyk Code to scan source code in minutes - no build needed - and fix issues immediately.
var layerCount = 0
for (var layer in featuresByLayer) {
var gridFeatures = aggregateCells(
featuresByLayer[layer],
tile,
z + options._depth,
options.aggregations[layer],
options.postAggregations[layer])
aggregatedLayers[layer] = tileFromFeatureCollection(gridFeatures, tile)
layerCount++
}
// serialize, compress, and save the tile
if (layerCount) {
var buff = vtpbf.fromGeojsonVt(aggregatedLayers)
zlib.gzip(buff, function (err, zipped) {
if (err) { return next(err) }
// console.log('putting', tile)
db.putTile(tile[0], tile[1], tile[2], zipped, next)
})
} else {
next()
}
}
for (const feature of tile.features) {
for (const property in feature.tags) {
fields[property] = typeof feature.tags[property];
}
}
if (options.storeClusterExpansionZoom) {
for (const feature of tile.features) {
if (feature.tags.cluster_id) {
feature.tags['clusterExpansionZoom'] = clustered.getClusterExpansionZoom(feature.tags.cluster_id);
}
}
}
// Convert to PBF and compress before insertion
compressedTiles.push(
gzip(VTpbf.fromGeojsonVt({'geojsonLayer': tile}, {version: options.tileSpecVersion, extent: options.extent})).then((compressed) => {
if (compressed.length > 500000) {
return Promise.reject(new Error(`Tile z:${z}, x:${x}, y:${y} greater than 500KB compressed. Try increasing radius or max zoom, or try including fewer cluster properties.`));
}
statements.push(
db.run(
'INSERT INTO tiles (zoom_level, tile_column, tile_row, tile_data) VALUES(?, ?, ?, ?)',
z, x, zoomDimension - 1 - y, compressed));
return Promise.resolve();
})
);
}
}
}
// Complete metadata table by adding layer definition
const vectorJson = {
let x = parseInt(p.x);
let y = parseInt(p.y);
let z = parseInt(p.z);
let format = p.format;
let tileData = await queryTile(z, x, y)
logger.debug(JSON.stringify(tileData));
let features = tileData.map(dbObjToGeoJSON);
try {
// Send a vector tile
if (format === 'pbf') {
if (features.length > 0) {
const tileIndex = gvt(fc(features), {indexMaxZoom: z, maxZoom: z});
const tile = tileIndex.getTile(z, x, y);
let buf = vtpbf.fromGeojsonVt({ 'geojsonLayer': tile})
res.writeHead(200, {
'Content-Type': 'application/x-protobuf'
});
res.end(buf)
} else {
res.status(404).send();
}
// Send a GeoJSON array
} else if (format === 'json'){
res.json(features);
} else {
res.boom.badRequest('format must be pbf or json');
}
} catch (err) {
logger.error(err);
res.boom.badImplementation('server error!');
if (!this._geoJSONIndex) {
return callback(null, null); // we couldn't load the file
}
const geoJSONTile = this._geoJSONIndex.getTile(canonical.z, canonical.x, canonical.y);
if (!geoJSONTile) {
return callback(null, null); // nothing in the given tile
}
const geojsonWrapper = new GeoJSONWrapper(geoJSONTile.features);
// Encode the geojson-vt tile into binary vector tile form. This
// is a convenience that allows `FeatureIndex` to operate the same way
// across `VectorTileSource` and `GeoJSONSource` data.
let pbf = vtpbf(geojsonWrapper);
if (pbf.byteOffset !== 0 || pbf.byteLength !== pbf.buffer.byteLength) {
// Compatibility with node Buffer (https://github.com/mapbox/pbf/issues/35)
pbf = new Uint8Array(pbf);
}
callback(null, {
vectorTile: geojsonWrapper,
rawData: pbf.buffer
});
}
.then(function(layers) {
var pbfOptions = {};
for(var i in layers) {
var layer = layers[i];
if(layer.pbf){
//construct the GeoJSONWrapper here, so that we can tell him the version !
pbfOptions[layer.name] = new GeoJSONWrapper(layer.pbf.features);
pbfOptions[layer.name].name = layer.name;
pbfOptions[layer.name].version = 2;
}
}
if(pbfOptions.length === 0) {
return undefined;
}
// we use fromVectorTileJs instead of fromGeojsonVt because we constructed the GeoJSONWrapper ourselves
var buff = vtpbf.fromVectorTileJs({layers: pbfOptions});
if(buff) {
buff = new Buffer(buff.buffer);
}
return buff;
});
};
app.get('/:layer/:z/:x/:y.mvt', function (req, res) {
var layer = req.params['layer'];
if (!tileIndexes.hasOwnProperty(layer)) {
send404(res);
}
var z = +req.params['z'];
var x = +req.params['x'];
var y = +req.params['y'];
var tile = tileIndexes[layer].getTile(z, x, y);
if (!tile || !tile.features) {
return res.json({});
}
var buff = vtpbf.fromGeojsonVt({ 'geojsonLayer': tile });
res.send(buff);
});
app.listen(httpPort, function () { return console.info("Tile service is listening on port " + httpPort); });
static async geoJSONs2VTPBF(geojsons, zoomLevel, column, row, extent) {
const tiles = {};
const layerNames = Object.keys(geojsons);
await Utils.asyncForEach(layerNames, async (layerName) => {
const tile = await DataConverter.geoJSON2MVTLayer(geojsons[layerName]);
DataConverter.convertTileCoords(tile, zoomLevel, column, row, extent);
tiles[layerName] = tile;
});
const buffer = vtpbf.fromGeojsonVt(tiles, {version: 2});
const binBuffer = Buffer.from(buffer);
return binBuffer;
}
app.get('/:layer/:z/:x/:y.mvt', (req, res) => {
const { tile } = getTile(req, res);
if (!tile || !tile.features) {
return;
}
/** Notice that I set the source-layer (for Mapbox GL) to all */
res.send(Buffer.from(vtpbf.fromGeojsonVt({ all: tile })));
});
geopackage.features(req.layer, req.params.tableName, tileParams, tileBuffer, function(err, featureCollection) {
if (err) return next(err);
const tileIndex = geojsonvt(featureCollection, {buffer: tileBuffer * 8, maxZoom: tileParams.z});
const tile = tileIndex.getTile(tileParams.z, tileParams.x, tileParams.y);
const vectorTile = vtpbf.fromGeojsonVt({ [table.name]: tile || { features: [] } });
res.contentType('application/x-protobuf');
res.send(Buffer.from(vectorTile));
});
} else {
features: features
};
var tileBuffer = 8;
var tileIndex = geojsonvt(featureCollection, {buffer: tileBuffer * 8, maxZoom: z});
var layer = {};
var tile = tileIndex.getTile(z, x, y);
var gjvt = {};
if (tile) {
gjvt[table] = tile;
} else {
gjvt[table] = {features:[]};
}
return vtpbf.fromGeojsonVt(gjvt);
});
}