aboutsummaryrefslogtreecommitdiffstats
path: root/webmap-import
diff options
context:
space:
mode:
authorGuilhem Moulin <guilhem@fripost.org>2025-07-23 09:56:59 +0200
committerGuilhem Moulin <guilhem@fripost.org>2025-07-23 15:21:20 +0200
commit7ffa6c549efcf2c85d56b4402110e5846a724f5f (patch)
tree697fd2594bbb77d9e80f73e34c62002ae2f68b8b /webmap-import
parent91abd89d67748a1e057d1299698d506613ee0f9f (diff)
Add logic to export raster files (as COG).
Raster data is not stored in the PostGIS database. Instead, the mtime of the target directory is used to determine whether the COG is up to date. Add a new flag --metadata-compress for JSON metadata compression (which also applies to MVT metadata), and --rasterdir for the target raster directory.
Diffstat (limited to 'webmap-import')
-rwxr-xr-xwebmap-import77
1 files changed, 56 insertions, 21 deletions
diff --git a/webmap-import b/webmap-import
index e5a1426..5be25ca 100755
--- a/webmap-import
+++ b/webmap-import
@@ -70,6 +70,7 @@ from import_source import (
ImportStatus
)
from export_mvt import exportMVT
+from export_raster import processRaster
def setFieldIf(cond : bool,
attrName : str,
@@ -607,6 +608,10 @@ def main() -> NoReturn:
help='optional directory for Mapbox Vector Tiles (MVT)')
parser.add_argument('--mvt-compress', default=False, action='store_true',
help='whether to compress Mapbox Vector Tiles (MVT) files')
+ parser.add_argument('--rasterdir', default=None,
+ help='optional directory for raster files')
+ parser.add_argument('--metadata-compress', default=False, action='store_true',
+ help='whether to compress metadata.json files')
parser.add_argument('--force', default=False, action='store_true',
help='import even if no new changes were detected')
parser.add_argument('groupname', nargs='*', help='group layer name(s) to process')
@@ -631,13 +636,6 @@ def main() -> NoReturn:
logging.debug('gdal.SetConfigOption(%s, %s)', pszKey, pszValue)
gdal.SetConfigOption(pszKey, pszValue)
- # open output dataset (possibly create it first)
- dso = openOutputDS(config['dataset'])
-
- validate_schema(layers,
- drvo=dso.GetDriver(),
- lco_defaults=config['dataset'].get('create-layer-options', None))
-
# get configured Spatial Reference System and extent
srs = getSRS(config.get('SRS', None))
extent = getExtent(config.get('extent', None), srs=srs)
@@ -648,19 +646,6 @@ def main() -> NoReturn:
logging.debug('flock("%s", LOCK_EX)', args.lockfile)
flock(lock_fd, LOCK_EX)
- # create all output layers before starting the transaction
- for layername, layerdef in layers.items():
- lyr = dso.GetLayerByName(layername)
- if lyr is not None:
- # TODO dso.DeleteLayer(layername) if --overwrite and
- # dso.TestCapability(ogr.ODsCDeleteLayer)
- # (Sets OVERWRITE=YES for PostgreSQL and GPKG.)
- continue
- if not dso.TestCapability(ogr.ODsCCreateLayer):
- raise RuntimeError(f'Output driver {dso.GetDriver().ShortName} does not '
- 'support layer creation')
- createOutputLayer(dso, layername, srs=srs, options=layerdef.get('create', None))
-
if args.mvtdir is not None:
args.mvtdir = Path(args.mvtdir)
if args.mvtdir == Path(): # make sure it's not curdir as we don't want to exchange it
@@ -675,6 +660,55 @@ def main() -> NoReturn:
sourcePathLocks = lockSourcePaths(layerdefs=layers.values(),
lockdir=args.lockdir_sources)
+ # special handling for raster layers
+ if any(l.get('type') == 'raster' for l in layers.values()):
+ if not all(l.get('type') == 'raster' for l in layers.values()):
+ raise NotImplementedError('Mix of raster and vector layers is not supported')
+ if args.rasterdir is None:
+ raise RuntimeError('Missing required value for --rasterdir')
+ if len(layers) != 1:
+ raise RuntimeError('Raster layers need to be processed one at a time')
+ args.rasterdir = Path(args.rasterdir)
+ if args.rasterdir == Path(): # make sure it's not curdir as we don't want to exchange it
+ raise RuntimeError('Invalid value for --rasterdir')
+ args.rasterdir.parent.mkdir(parents=True, exist_ok=True)
+ last_modified = getLastMTimes(layerdefs=layers.values(), basedir=args.cachedir)
+ rv = 0
+ for layername, layerdef in layers.items():
+ try:
+ processRaster(layername, layerdef,
+ sources=parse_config_dl(config.get('downloads', [])),
+ license_info=config.get('license-info', {}),
+ last_modified=last_modified,
+ dst=args.rasterdir,
+ cachedir=args.cachedir,
+ extent=extent,
+ compress_metadata=args.metadata_compress)
+ except Exception: # pylint: disable=broad-exception-caught
+ rv = 1
+ traceback.print_exc()
+ sys.exit(rv)
+
+ # open output dataset (possibly create it first)
+ dso = openOutputDS(config['dataset'])
+
+ validate_schema(layers,
+ drvo=dso.GetDriver(),
+ lco_defaults=config['dataset'].get('create-layer-options', None))
+
+ # create all output layers before starting the transaction
+ for layername, layerdef in layers.items():
+ lyr = dso.GetLayerByName(layername)
+ if lyr is not None:
+ # TODO dso.DeleteLayer(layername) if --overwrite and
+ # dso.TestCapability(ogr.ODsCDeleteLayer)
+ # (Sets OVERWRITE=YES for PostgreSQL and GPKG.)
+ continue
+ if not dso.TestCapability(ogr.ODsCCreateLayer):
+ raise RuntimeError(f'Output driver {dso.GetDriver().ShortName} does not '
+ 'support layer creation')
+ createOutputLayer(dso, layername, srs=srs, options=layerdef.get('create', None))
+
if (dso.TestCapability(ogr.ODsCTransactions) and
# we need SAVEPOINT support
dso.GetDriver().ShortName in ('PostgreSQL', 'SQLite', 'GPKG')):
@@ -747,7 +781,8 @@ def main() -> NoReturn:
last_modified=last_modified,
dst=args.mvtdir,
default_options=config.get('vector-tiles', None),
- compress=args.mvt_compress)
+ compress=args.mvt_compress,
+ compress_metadata=args.metadata_compress)
if dsoTransaction:
dsoTransaction = False