aboutsummaryrefslogtreecommitdiffstats
diff options
context:
space:
mode:
-rw-r--r--common.py2
-rw-r--r--common_gdal.py6
-rw-r--r--config.yml20
-rw-r--r--export_mvt.py5
-rw-r--r--export_raster.py251
-rw-r--r--import_source.py23
-rwxr-xr-xwebmap-import77
7 files changed, 343 insertions, 41 deletions
diff --git a/common.py b/common.py
index 74cd748..cbdc43c 100644
--- a/common.py
+++ b/common.py
@@ -115,7 +115,7 @@ def parse_config(path : Optional[Path] = None,
for name, layerdefs in layers.items():
if isinstance(layerdefs, dict) and 'sources' not in layerdefs:
layers[name] = { 'sources': [layerdefs] }
- for k in ('description', 'create', 'publish'):
+ for k in ('description', 'create', 'publish', 'type'):
if k in layerdefs:
layers[name][k] = layerdefs.pop(k)
layerdefs = layers[name]
diff --git a/common_gdal.py b/common_gdal.py
index 7333f58..b91f8c5 100644
--- a/common_gdal.py
+++ b/common_gdal.py
@@ -59,7 +59,7 @@ def gdalSetOpenExArgs(option_dict : Optional[dict[str, Any]] = {},
flags : int = 0) -> tuple[dict[str, int|list[str]], gdal.Driver]:
"""Return a pair kwargs and driver to use with gdal.OpenEx()."""
- kwargs = { 'nOpenFlags': gdal.OF_VECTOR | flags }
+ kwargs = { 'nOpenFlags': flags }
fmt = option_dict.get('format', None)
if fmt is None:
@@ -68,8 +68,10 @@ def gdalSetOpenExArgs(option_dict : Optional[dict[str, Any]] = {},
drv = gdal.GetDriverByName(fmt)
if drv is None:
raise RuntimeError(f'Unknown driver name "{fmt}"')
- if not gdalGetMetadataItem(drv, gdal.DCAP_VECTOR):
+ if flags & gdal.OF_VECTOR and not gdalGetMetadataItem(drv, gdal.DCAP_VECTOR):
raise RuntimeError(f'Driver "{drv.ShortName}" has no vector capabilities')
+ if flags & gdal.OF_RASTER and not gdalGetMetadataItem(drv, gdal.DCAP_RASTER):
+ raise RuntimeError(f'Driver "{drv.ShortName}" has no raster capabilities')
kwargs['allowed_drivers'] = [ drv.ShortName ]
oo = option_dict.get('open-options', None)
diff --git a/config.yml b/config.yml
index 17987d5..f39b453 100644
--- a/config.yml
+++ b/config.yml
@@ -80,6 +80,8 @@ layer-groups:
- 'sks:sumpskog'
- 'nvk:skyddsvard_statlig_skog'
- 'lst:pagaende_naturreservatsbildning'
+ kskog:
+ - 'nvk:kskog'
# Global GDAL/OGR configuration options, cf. https://gdal.org/user/configoptions.html and
@@ -189,7 +191,7 @@ downloads:
- path: nvk/Skyddsvarda_statliga_skogar.zip
url: https://geodata.naturvardsverket.se/nedladdning/skog/Skyddsvarda_statliga_skogar.zip
- - path: nvk/k-skog/Sannolikt_och_potentiell_kontinuitetsskog_BorealRegion_2024.zip
+ - path: nvk/Sannolikt_och_potentiell_kontinuitetsskog_BorealRegion_2024.zip
url: https://geodata.naturvardsverket.se/nedladdning/Kartering_av_kontinuitetsskog/Sannolikt_och_potentiell_kontinuitetsskog_BorealRegion_2024.zip
max-size: 268435456 # 256MiB
@@ -480,10 +482,10 @@ license-info:
# https://www.skogsstyrelsen.se/e-tjanster-och-kartor/karttjanster/geodatatjanster/villkor/
name: CC0 1.0 Universiell
url: https://creativecommons.org/publicdomain/zero/1.0/deed.sv
- nvk/k-skog/Sannolikt_och_potentiell_kontinuitetsskog_BorealRegion_2024.zip:
+ nvk/Sannolikt_och_potentiell_kontinuitetsskog_BorealRegion_2024.zip:
description: "Sannolikt och potentiell kontinuitetsskog (preciserad 2024)"
- #product_url: https://geodatakatalogen.naturvardsverket.se/geonetwork/srv/swe/catalog.search#/metadata/2b9d4c76-3b0e-4a55-a16c-51513da84558
- product_url: https://geodata.naturvardsverket.se/nedladdning/Kartering_av_kontinuitetsskog/Leverans-PM_Sannolikt_och_potentiell_kontinuitetsskog_i_Boreal_Region_2024.pdf
+ product_url: https://geodatakatalogen.naturvardsverket.se/geonetwork/srv/swe/catalog.search#/metadata/2b9d4c76-3b0e-4a55-a16c-51513da84558
+ #product_url: https://geodata.naturvardsverket.se/nedladdning/Kartering_av_kontinuitetsskog/Leverans-PM_Sannolikt_och_potentiell_kontinuitetsskog_i_Boreal_Region_2024.pdf
copyright: © Naturvårdsverket
license: Inga begränsningar
mrr/mineralrattigheter.zip:
@@ -5439,12 +5441,18 @@ layers:
'nvk:kskog':
description: "Sannolikt och potentiell kontinuitetsskog (preciserad 2024)"
+ type: raster
source:
- path: nvk/k-skog/Sannolikt_och_potentiell_kontinuitetsskog_BorealRegion_2024.zip
+ path: nvk/Sannolikt_och_potentiell_kontinuitetsskog_BorealRegion_2024.zip
unar:
format: zip
patterns:
- 'Sannolikt_och_potentiell_kontinuitetsskog_BorealRegion_2024/Sannolikt_och_potentiell_kontinuitetsskog_BorealRegion_2024.*'
import:
path: Sannolikt_och_potentiell_kontinuitetsskog_BorealRegion_2024/Sannolikt_och_potentiell_kontinuitetsskog_BorealRegion_2024.tif
- format: GeoTIFF
+ format: GTiff
+ publish:
+ 1: "Sannolikt kontinuitetsskog (preciserad)"
+ 2: "Sannolikt påverkad kontinuitetsskog (preciserad)"
+ 3: "Sannolikt kontinuitetsskog i fjällen (grövre precisering)"
+ 4: "Potentiell kontinuitetsskog (2015)"
diff --git a/export_mvt.py b/export_mvt.py
index 8d90129..e50620f 100644
--- a/export_mvt.py
+++ b/export_mvt.py
@@ -450,7 +450,8 @@ def exportMVT(ds : gdal.Dataset,
drvname : str = 'MVT',
default_options : dict[str,Any]|None = None,
tile_extension : str = '.pbf',
- compress : bool = False) -> None:
+ compress : bool = False,
+ compress_metadata : bool = False) -> None:
"""Export some layers to MVT."""
drv = gdal.GetDriverByName(drvname)
if drv is None:
@@ -588,7 +589,7 @@ def exportMVT(ds : gdal.Dataset,
last_modified=last_modified,
last_updated=creation_time // 1000000),
dir_fd=dir_fd,
- compress=compress)
+ compress=compress_metadata)
try:
# atomically exchange paths
diff --git a/export_raster.py b/export_raster.py
new file mode 100644
index 0000000..32dff56
--- /dev/null
+++ b/export_raster.py
@@ -0,0 +1,251 @@
+#!/usr/bin/python3
+
+#----------------------------------------------------------------------
+# Backend utilities for the Klimatanalys Norr project (Cloud Optimized GeoTIFF generator)
+# Copyright © 2025 Guilhem Moulin <info@guilhem.se>
+#
+# This program is free software: you can redistribute it and/or modify
+# it under the terms of the GNU General Public License as published by
+# the Free Software Foundation, either version 3 of the License, or
+# (at your option) any later version.
+#
+# This program is distributed in the hope that it will be useful,
+# but WITHOUT ANY WARRANTY; without even the implied warranty of
+# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+# GNU General Public License for more details.
+#
+# You should have received a copy of the GNU General Public License
+# along with this program. If not, see <https://www.gnu.org/licenses/>.
+#----------------------------------------------------------------------
+
+# pylint: disable=invalid-name, missing-module-docstring
+
+from os import O_RDONLY, O_CLOEXEC, O_DIRECTORY, F_OK
+import os
+import sys
+import logging
+from pathlib import Path
+from typing import Any
+import shutil
+import tempfile
+from time import time_ns
+
+from osgeo import gdal, ogr, osr
+from osgeo.gdalconst import (
+ OF_RASTER as GDAL_OF_RASTER,
+ OF_READONLY as GDAL_OF_READONLY,
+ OF_VERBOSE_ERROR as GDAL_OF_VERBOSE_ERROR,
+)
+
+from common import BadConfiguration
+from import_source import importSource0
+from common_gdal import (
+ gdalSetOpenExArgs,
+ gdalVersionMin,
+ getSpatialFilterFromGeometry,
+)
+from export_mvt import exportMetadata, getLayerMetadata
+from rename_exchange import rename_exchange
+
+def processRaster(layername : str,
+ layerdef : dict[str,Any],
+ sources : dict[str,Any],
+ license_info: dict[str,str|dict[str,str]],
+ last_modified : dict[str,int],
+ dst : Path,
+ cachedir : Path|None = None,
+ extent : ogr.Geometry|None = None,
+ compress_metadata : bool = False) -> None:
+ """Process a raster file."""
+ source = layerdef['sources']
+ assert layerdef['type'] == 'raster'
+ if len(source) != 1:
+ raise BadConfiguration(f'{layername} has {len(source)} != 1 sources')
+ publish = layerdef.get('publish', None)
+ if publish is None or len(publish) < 1:
+ raise BadConfiguration(f'{layername} has no export definition')
+ if not isinstance(publish, dict) or not all(isinstance(l, str) for l in publish.values()):
+ raise BadConfiguration(f'{layername} has invalid export definition {publish}')
+ source = source[0]
+
+ if sys.stderr.isatty():
+ from tqdm import tqdm # pylint: disable=import-outside-toplevel
+ progress = tqdm
+ else:
+ progress = None
+
+ if len(last_modified) < 1:
+ last_modified_ns = None
+ else:
+ last_modified_ns = max(last_modified.values()) * 1000000
+ try:
+ st = os.stat(str(dst))
+ if st.st_mtime_ns <= last_modified_ns:
+ logging.info('Output directory "%s" is up to date, skipping', str(dst))
+ return
+ except (OSError, ValueError):
+ #logging.warning('Could not stat(%s)', str(dst))
+ pass
+
+ # use a sibling temporary directory to make sure we can atomically rename/exchange
+ # directories
+ tmpdir = tempfile.mkdtemp(prefix='.tmp.' + dst.name + '-', dir=dst.parent)
+ logging.debug('Using "%s" as temporary directory for MVT', tmpdir)
+
+ dir_fd = os.open(tmpdir, O_RDONLY|O_CLOEXEC|O_DIRECTORY)
+ try:
+ creation_time = time_ns()
+ os.mkdir(dst.name, mode=0o755, dir_fd=dir_fd)
+
+ source['import'] |= {
+ '_progress': progress,
+ '_dest': Path(f'/proc/self/fd/{dir_fd}').joinpath(dst.name).joinpath(dst.name + '.tiff')
+ }
+ importSource0(None, **source['source'], args=source['import'],
+ cachedir=cachedir,
+ extent=extent,
+ callback=_processRaster2)
+
+ exportMetadata(basedir=Path(dst.name),
+ data=getLayerMetadata({str(i):layerdef | {'description':desc}
+ for i,desc in publish.items()},
+ sources=sources,
+ license_info=license_info,
+ last_modified=last_modified,
+ last_updated=creation_time // 1000000),
+ dir_fd=dir_fd, # pylint: disable=duplicate-code
+ compress=compress_metadata)
+
+ if last_modified_ns is not None:
+ os.utime(dst.name, ns=(last_modified_ns, last_modified_ns),
+ dir_fd=dir_fd, follow_symlinks=False)
+
+ try:
+ # atomically exchange paths
+ rename_exchange(dst.name, dst, olddirfd=dir_fd)
+ except FileNotFoundError:
+ # dst doesn't exist, use normal os.rename() instead
+ os.rename(dst.name, dst, src_dir_fd=dir_fd)
+
+ finally:
+ if progress is not None and '_pbar' in source['import']:
+ source['import'].pop('_pbar').close()
+ if os.access(dst.name, F_OK, dir_fd=dir_fd, follow_symlinks=False):
+ logging.debug('rmtree("%s/%s")', tmpdir, dst.name)
+ shutil.rmtree(dst.name, dir_fd=dir_fd)
+
+ logging.debug('rmdir("%s")', tmpdir)
+ os.rmdir(tmpdir)
+
+ try:
+ os.close(dir_fd) # pylint: disable=duplicate-code
+ except (OSError, ValueError):
+ logging.exception('Could not close directory')
+
+def _processRaster2(_ : None, path : str, args : dict[str,Any],
+ basedir : Path|None, extent : ogr.Geometry|None) -> gdal.Dataset:
+ kwargs, _ = gdalSetOpenExArgs(args, flags=GDAL_OF_RASTER|GDAL_OF_READONLY|GDAL_OF_VERBOSE_ERROR)
+ path2 = path if basedir is None else str(basedir.joinpath(path))
+
+ logging.debug('OpenEx(%s, %s)', path2, str(kwargs))
+ ds = gdal.OpenEx(path2, **kwargs) # pylint: disable=duplicate-code
+ if ds is None:
+ raise RuntimeError(f'Could not open {path2}')
+
+ if ds.RasterCount != 1:
+ raise NotImplementedError(f'Input raster {path2} has {ds.RasterCount} != 1 bands')
+ rb = ds.GetRasterBand(1)
+
+ gt = ds.GetGeoTransform()
+ xs = ds.RasterXSize
+ ys = ds.RasterYSize
+
+ srs = ds.GetSpatialRef()
+ srs.SetAxisMappingStrategy(gdal.osr.OAMS_TRADITIONAL_GIS_ORDER) # force x,y
+ ulx, uly = gdal.ApplyGeoTransform(gt, 0, 0)
+ lrx, lry = gdal.ApplyGeoTransform(gt, xs, ys)
+ assert ulx <= lrx
+ assert uly >= lry
+
+ extent = getSpatialFilterFromGeometry(extent, srs)
+ ct = osr.CoordinateTransformation(extent.GetSpatialReference(), srs)
+ extent = extent.GetEnvelope()
+ ulxy = ct.TransformPoint(extent[0], extent[3])
+ lrxy = ct.TransformPoint(extent[1], extent[2])
+ assert ulxy[0] <= lrxy[0]
+ assert ulxy[1] >= lrxy[1]
+
+ ulx2 = max(ulx, ulxy[0])
+ uly2 = min(uly, ulxy[1])
+ lrx2 = min(lrx, lrxy[0])
+ lry2 = max(lry, lrxy[1])
+ assert ulx2 < lrx2
+ assert lry2 < uly2
+
+ # don't care about overview here, GDAL will take the ceiling when sizing
+ # (the source width is not even disible by 2)
+ r = (lrx2 - ulx2) % abs(gt[1])
+ if r != 0:
+ # extend X boundaries to preserve xres
+ d = abs(gt[1]) - r
+ if ulxy[0] < ulx:
+ ulx2 -= d
+ else:
+ lrx2 += r
+ assert (lrx2 - ulx2) % abs(gt[1]) == 0
+
+ r = (uly2 - lry2) % abs(gt[5])
+ if r != 0:
+ # extend Y boundaries to preserve yres
+ d = abs(gt[5]) - r
+ if lrxy[1] < lry:
+ uly2 += r
+ else:
+ lry2 -= d
+ assert (uly2 - lry2) % abs(gt[5]) == 0
+
+ # see https://gdal.org/en/stable/drivers/raster/cog.html
+ creationOptions = [
+ 'BLOCKSIZE=256',
+ 'COMPRESS=LZW',
+ 'RESAMPLING=NEAREST',
+ 'OVERVIEWS=IGNORE_EXISTING',
+ ]
+ if (rb.GetColorInterpretation() in (gdal.GCI_PaletteIndex, gdal.GCI_GrayIndex)
+ and rb.DataType == gdal.GDT_Byte):
+ # 8-bit gray, assume a palette so don't interpolate
+ creationOptions.append('RESAMPLING=NEAREST')
+ if gdalVersionMin(maj=3, min=11):
+ creationOptions.append('INTERLEAVE=BAND')
+ if gdalVersionMin(maj=3, min=8):
+ creationOptions.append('STATISTICS=YES')
+
+ warpOptions = {
+ 'format': 'COG',
+ # preserve source SRS and resolution
+ 'outputBounds': (ulx2, lry2, lrx2, uly2),
+ 'setColorInterpretation': True,
+ 'creationOptions': creationOptions,
+ }
+
+ if args.get('_progress', None) is None:
+ callback = pbar = None
+ else:
+ callback = _gdal_callback
+ pbar = args['_pbar'] = args['_progress'](
+ total=100,
+ leave=False,
+ bar_format='{l_bar}{bar}| [{elapsed}<{remaining}]',
+ )
+
+ logging.debug('warp(%s, ds, %s)', args['_dest'],
+ ', '.join([str(k) + '=' + (f'\'{v}\'' if isinstance(v,str) else str(v))
+ for k,v in warpOptions.items()]))
+ return gdal.Warp(args['_dest'], ds,
+ **warpOptions,
+ callback=callback,
+ callback_data=pbar,
+ )
+
+def _gdal_callback(info, _message, pbar):
+ pbar.update(info * 100 - pbar.n)
diff --git a/import_source.py b/import_source.py
index 0431486..d5931ad 100644
--- a/import_source.py
+++ b/import_source.py
@@ -26,7 +26,7 @@ import re
from fnmatch import fnmatchcase
from pathlib import Path
from datetime import datetime, timedelta, UTC
-from typing import Any, Final, Iterator, Optional
+from typing import Any, Callable, Final, Iterator, Optional
import traceback
from enum import Enum, unique as enum_unique
from hashlib import sha256
@@ -37,6 +37,7 @@ from osgeo.gdalconst import (
OF_ALL as GDAL_OF_ALL,
OF_READONLY as GDAL_OF_READONLY,
OF_UPDATE as GDAL_OF_UPDATE,
+ OF_VECTOR as GDAL_OF_VECTOR,
OF_VERBOSE_ERROR as GDAL_OF_VERBOSE_ERROR,
DCAP_CREATE as GDAL_DCAP_CREATE,
)
@@ -56,7 +57,8 @@ def openOutputDS(def_dict : dict[str, Any]) -> gdal.Dataset:
create-options is a non-empty dictionary."""
path = def_dict['path']
- kwargs, drv = gdalSetOpenExArgs(def_dict, flags=GDAL_OF_UPDATE|GDAL_OF_VERBOSE_ERROR)
+ kwargs, drv = gdalSetOpenExArgs(def_dict,
+ flags=GDAL_OF_VECTOR|GDAL_OF_UPDATE|GDAL_OF_VERBOSE_ERROR)
try:
logging.debug('OpenEx(%s, %s)', path, str(kwargs))
return gdal.OpenEx(path, **kwargs)
@@ -485,10 +487,11 @@ def importSources(dso : gdal.Dataset, lyr : ogr.Layer,
clearLayer(dso, lyr) # TODO conditional (only if not new)?
for source in sources:
- _importSource(lyr, **source['source'],
+ importSource0(lyr, **source['source'],
args=source['import'],
cachedir=cachedir,
- extent=extent)
+ extent=extent,
+ callback=_importSource2)
# force the PG driver to call EndCopy() to detect errors and trigger a
# rollback if needed
@@ -550,15 +553,17 @@ def importSources(dso : gdal.Dataset, lyr : ogr.Layer,
return rv
# pylint: disable-next=dangerous-default-value
-def _importSource(lyr : ogr.Layer,
+def importSource0(lyr : ogr.Layer|None = None,
path : str = '/nonexistent',
unar : dict[str,Any]|None = None,
args : dict[str,Any] = {},
cachedir : Path|None = None,
- extent : ogr.Geometry|None = None) -> None:
+ extent : ogr.Geometry|None = None,
+ callback : Callable[[ogr.Layer|None, str, dict[str,Any], Path|None,
+ ogr.Geometry|None], None]|None = None) -> None:
"""Import a source layer"""
if unar is None:
- return _importSource2(lyr, path, args=args, basedir=cachedir, extent=extent)
+ return callback(lyr, path, args=args, basedir=cachedir, extent=extent)
ds_srcpath = Path(args['path'])
if ds_srcpath.is_absolute():
@@ -574,7 +579,7 @@ def _importSource(lyr : ogr.Layer,
fmt=unar.get('format', None),
patterns=unar.get('patterns', None),
exact_matches=[ds_srcpath])
- return _importSource2(lyr, ds_srcpath, args=args, basedir=Path(tmpdir), extent=extent)
+ return callback(lyr, ds_srcpath, args=args, basedir=Path(tmpdir), extent=extent)
def setFieldMapValue(fld : ogr.FieldDefn,
idx : int,
@@ -613,7 +618,7 @@ def _importSource2(lyr_dst : ogr.Layer, path : str, args : dict[str,Any],
calling StartTransaction() https://github.com/OSGeo/gdal/issues/3403
while we want a single transaction for the entire desination layer,
including truncation, source imports, and metadata changes."""
- kwargs, _ = gdalSetOpenExArgs(args, flags=GDAL_OF_READONLY|GDAL_OF_VERBOSE_ERROR)
+ kwargs, _ = gdalSetOpenExArgs(args, flags=GDAL_OF_VECTOR|GDAL_OF_READONLY|GDAL_OF_VERBOSE_ERROR)
path2 = path if basedir is None else str(basedir.joinpath(path))
logging.debug('OpenEx(%s, %s)', path2, str(kwargs))
diff --git a/webmap-import b/webmap-import
index e5a1426..5be25ca 100755
--- a/webmap-import
+++ b/webmap-import
@@ -70,6 +70,7 @@ from import_source import (
ImportStatus
)
from export_mvt import exportMVT
+from export_raster import processRaster
def setFieldIf(cond : bool,
attrName : str,
@@ -607,6 +608,10 @@ def main() -> NoReturn:
help='optional directory for Mapbox Vector Tiles (MVT)')
parser.add_argument('--mvt-compress', default=False, action='store_true',
help='whether to compress Mapbox Vector Tiles (MVT) files')
+ parser.add_argument('--rasterdir', default=None,
+ help='optional directory for raster files')
+ parser.add_argument('--metadata-compress', default=False, action='store_true',
+ help='whether to compress metadata.json files')
parser.add_argument('--force', default=False, action='store_true',
help='import even if no new changes were detected')
parser.add_argument('groupname', nargs='*', help='group layer name(s) to process')
@@ -631,13 +636,6 @@ def main() -> NoReturn:
logging.debug('gdal.SetConfigOption(%s, %s)', pszKey, pszValue)
gdal.SetConfigOption(pszKey, pszValue)
- # open output dataset (possibly create it first)
- dso = openOutputDS(config['dataset'])
-
- validate_schema(layers,
- drvo=dso.GetDriver(),
- lco_defaults=config['dataset'].get('create-layer-options', None))
-
# get configured Spatial Reference System and extent
srs = getSRS(config.get('SRS', None))
extent = getExtent(config.get('extent', None), srs=srs)
@@ -648,19 +646,6 @@ def main() -> NoReturn:
logging.debug('flock("%s", LOCK_EX)', args.lockfile)
flock(lock_fd, LOCK_EX)
- # create all output layers before starting the transaction
- for layername, layerdef in layers.items():
- lyr = dso.GetLayerByName(layername)
- if lyr is not None:
- # TODO dso.DeleteLayer(layername) if --overwrite and
- # dso.TestCapability(ogr.ODsCDeleteLayer)
- # (Sets OVERWRITE=YES for PostgreSQL and GPKG.)
- continue
- if not dso.TestCapability(ogr.ODsCCreateLayer):
- raise RuntimeError(f'Output driver {dso.GetDriver().ShortName} does not '
- 'support layer creation')
- createOutputLayer(dso, layername, srs=srs, options=layerdef.get('create', None))
-
if args.mvtdir is not None:
args.mvtdir = Path(args.mvtdir)
if args.mvtdir == Path(): # make sure it's not curdir as we don't want to exchange it
@@ -675,6 +660,55 @@ def main() -> NoReturn:
sourcePathLocks = lockSourcePaths(layerdefs=layers.values(),
lockdir=args.lockdir_sources)
+ # special handling for raster layers
+ if any(l.get('type') == 'raster' for l in layers.values()):
+ if not all(l.get('type') == 'raster' for l in layers.values()):
+ raise NotImplementedError('Mix of raster and vector layers is not supported')
+ if args.rasterdir is None:
+ raise RuntimeError('Missing required value for --rasterdir')
+ if len(layers) != 1:
+ raise RuntimeError('Raster layers need to be processed one at a time')
+ args.rasterdir = Path(args.rasterdir)
+ if args.rasterdir == Path(): # make sure it's not curdir as we don't want to exchange it
+ raise RuntimeError('Invalid value for --rasterdir')
+ args.rasterdir.parent.mkdir(parents=True, exist_ok=True)
+ last_modified = getLastMTimes(layerdefs=layers.values(), basedir=args.cachedir)
+ rv = 0
+ for layername, layerdef in layers.items():
+ try:
+ processRaster(layername, layerdef,
+ sources=parse_config_dl(config.get('downloads', [])),
+ license_info=config.get('license-info', {}),
+ last_modified=last_modified,
+ dst=args.rasterdir,
+ cachedir=args.cachedir,
+ extent=extent,
+ compress_metadata=args.metadata_compress)
+ except Exception: # pylint: disable=broad-exception-caught
+ rv = 1
+ traceback.print_exc()
+ sys.exit(rv)
+
+ # open output dataset (possibly create it first)
+ dso = openOutputDS(config['dataset'])
+
+ validate_schema(layers,
+ drvo=dso.GetDriver(),
+ lco_defaults=config['dataset'].get('create-layer-options', None))
+
+ # create all output layers before starting the transaction
+ for layername, layerdef in layers.items():
+ lyr = dso.GetLayerByName(layername)
+ if lyr is not None:
+ # TODO dso.DeleteLayer(layername) if --overwrite and
+ # dso.TestCapability(ogr.ODsCDeleteLayer)
+ # (Sets OVERWRITE=YES for PostgreSQL and GPKG.)
+ continue
+ if not dso.TestCapability(ogr.ODsCCreateLayer):
+ raise RuntimeError(f'Output driver {dso.GetDriver().ShortName} does not '
+ 'support layer creation')
+ createOutputLayer(dso, layername, srs=srs, options=layerdef.get('create', None))
+
if (dso.TestCapability(ogr.ODsCTransactions) and
# we need SAVEPOINT support
dso.GetDriver().ShortName in ('PostgreSQL', 'SQLite', 'GPKG')):
@@ -747,7 +781,8 @@ def main() -> NoReturn:
last_modified=last_modified,
dst=args.mvtdir,
default_options=config.get('vector-tiles', None),
- compress=args.mvt_compress)
+ compress=args.mvt_compress,
+ compress_metadata=args.metadata_compress)
if dsoTransaction:
dsoTransaction = False