diff options
Diffstat (limited to 'export_mvt.py')
-rw-r--r-- | export_mvt.py | 98 |
1 files changed, 91 insertions, 7 deletions
diff --git a/export_mvt.py b/export_mvt.py index a929b78..d19909c 100644 --- a/export_mvt.py +++ b/export_mvt.py @@ -20,7 +20,7 @@ # pylint: disable=invalid-name, missing-module-docstring, fixme -from os import O_RDONLY, O_WRONLY, O_CREAT, O_EXCL, O_CLOEXEC, O_DIRECTORY, F_OK +from os import O_RDONLY, O_WRONLY, O_CREAT, O_EXCL, O_TRUNC, O_CLOEXEC, O_DIRECTORY, F_OK import os from errno import EAGAIN import json @@ -29,7 +29,7 @@ from pathlib import Path import shutil import tempfile from typing import Any, Iterator, Optional -from time import monotonic as time_monotonic +from time import monotonic as time_monotonic, time_ns import brotli from osgeo import gdal, ogr, osr @@ -274,9 +274,89 @@ def compress_brotli(path : str, os.close(fd_in) return size_in, size_out +def getLayerMetadata(layers : dict[str,Any], + sources : dict[str,Any], + license_info: dict[str,str|dict[str,str]], + last_modified : dict[str,int], + last_updated : int) -> dict[str,int|dict[int|str|dict[str,str]]]: + """Return a dictionary suitable for metadata.json""" + layers2 = {} + for k, v in layers.items(): + layers2[k] = x = {} + if 'description' in v: + x['description'] = v['description'] + source_paths = [] + for src in v.get('sources', []): + if 'source' not in src or src['source'] is None: + continue + if 'path' not in src['source']: + continue + source_path = src['source']['path'] + if source_path is not None: + source_paths.append(source_path) + if len(source_paths) > 0: + # remove duplicates but preserve order + x['source_files'] = list(dict.fromkeys(source_paths)) + + source_files = {} + for source_path in { p for v in layers2.values() for p in v.get('source_files', []) }: + source_files[source_path] = x = {} + if source_path in sources and 'url' in sources[source_path]: + x['url'] = sources[source_path]['url'] + if source_path not in license_info: + logging.warning('Source path %s lacks license information', source_path) + else: + license_info0 = license_info[source_path] + for k in ('description', 'copyright', 'product_url'): + if k in license_info0: + x[k] = license_info0[k] + if 'license' in license_info0: + if isinstance(license_info0['license'], str): + x['license'] = { 'name': license_info0['license'] } + elif isinstance(license_info0['license'], dict): + x['license'] = license_info0['license'].copy() + if source_path not in last_modified: + logging.warning('Source path %s lack last_modified value', source_path) + else: + x['last_modified'] = last_modified[source_path] + + return { + 'layers': layers2, + 'source_files': source_files, + 'last_updated': last_updated + } + +def exportMetadata(basedir : Path, data : dict[str,Any], + dir_fd : Optional[int] = None, + compress : bool = False) -> None: + """Generate metadata.json""" + data = json.dumps(data, ensure_ascii=False, separators=(',',':')).encode('utf-8') + path = basedir.joinpath('metadata.json') + flags = O_WRONLY|O_CREAT|O_TRUNC|O_CLOEXEC + + fd = os.open(str(path), flags, mode=0o644, dir_fd=dir_fd) + try: + write_all(fd, data) + finally: + os.close(fd) + + if not compress: + return + + compressor = brotli.Compressor(mode=brotli.MODE_GENERIC, quality=11) + fd = os.open(str(path.with_suffix('.json.br')), flags, mode=0o644, dir_fd=dir_fd) + try: + write_all(fd, compressor.process(data)) + write_all(fd, compressor.finish()) + finally: + os.close(fd) + # pylint: disable-next=too-many-branches, too-many-statements def exportMVT(ds : gdal.Dataset, layers : dict[str,dict[str,Any]], + sources : dict[str,Any], + license_info: dict[str,str|dict[str,str]], + last_modified : dict[str,int], dst : Path, drvname : str = 'MVT', default_options : dict[str,Any]|None = None, @@ -321,6 +401,7 @@ def exportMVT(ds : gdal.Dataset, start = time_monotonic() os.mkdir(dbname, mode=0o700, dir_fd=dir_fd) basedir = Path(f'/proc/self/fd/{dir_fd}') + creation_time = time_ns() dso = createMVT(drv, path=str(basedir.joinpath(mvtname)), default_options=default_options, options = { @@ -406,11 +487,14 @@ def exportMVT(ds : gdal.Dataset, format_bytes(size_min_z), format_bytes(size_max_z), format_bytes(size_tot_z), format_bytes(round(size_tot_z/tile_count))) - try: - # OpenLayers doesn't make use of that file so delete it - os.unlink(str(Path(mvtname).joinpath('metadata.json')), dir_fd=dir_fd) - except FileNotFoundError: - pass + exportMetadata(basedir=Path(mvtname), + data=getLayerMetadata({k:layers[v] for k,(v,_) in export_layers.items()}, + sources=sources, + license_info=license_info, + last_modified=last_modified, + last_updated=creation_time // 1000000), + dir_fd=dir_fd, + compress=compress) try: # atomically exchange paths |