aboutsummaryrefslogtreecommitdiffstats
path: root/webmap-import
diff options
context:
space:
mode:
Diffstat (limited to 'webmap-import')
-rwxr-xr-xwebmap-import158
1 files changed, 100 insertions, 58 deletions
diff --git a/webmap-import b/webmap-import
index c86e7a2..b4552d5 100755
--- a/webmap-import
+++ b/webmap-import
@@ -47,12 +47,12 @@ from osgeo import gdalconst
import common
from common import (
BadConfiguration,
+ parse_config_dl,
escape_identifier,
escape_literal_str,
getSourcePathLockFileName
)
from common_gdal import (
- gdalVersionMin,
gdalGetMetadataItem,
getSRS,
getExtent,
@@ -69,6 +69,7 @@ from import_source import (
ImportStatus
)
from export_mvt import exportMVT
+from export_raster import processRaster
def setFieldIf(cond : bool,
attrName : str,
@@ -97,21 +98,10 @@ def validate_schema(layers : dict[str, Any],
(We need the driver of the output dataset to determine capability on
constraints.)"""
- # Cf. https://github.com/OSGeo/gdal/blob/master/NEWS.md
- if gdalVersionMin(maj=3, min=7):
- # list of capability flags supported by the CreateField() API
- drvoFieldDefnFlags = drvo.GetMetadataItem(gdalconst.DMD_CREATION_FIELD_DEFN_FLAGS)
- drvoFieldDefnFlags = drvoFieldDefnFlags.split(' ') if drvoFieldDefnFlags is not None else []
- drvoSupportsFieldComment = 'Comment' in drvoFieldDefnFlags
- # GetTZFlag()/SetTZFlag() and OGR_TZFLAG_* constants added in 3.8.0
- hasTZFlagSupport = gdalVersionMin(maj=3, min=8)
- else:
- # list of flags supported by the OGRLayer::AlterFieldDefn() API
- drvoFieldDefnFlags = drvo.GetMetadataItem(gdalconst.DMD_ALTER_FIELD_DEFN_FLAGS)
- drvoFieldDefnFlags = drvoFieldDefnFlags.split(' ') if drvoFieldDefnFlags is not None else []
- # GetComment()/SetComment() added in 3.7.0
- drvoSupportsFieldComment = False
- hasTZFlagSupport = False
+ # list of capability flags supported by the CreateField() API
+ drvoFieldDefnFlags = drvo.GetMetadataItem(gdalconst.DMD_CREATION_FIELD_DEFN_FLAGS)
+ drvoFieldDefnFlags = drvoFieldDefnFlags.split(' ') if drvoFieldDefnFlags is not None else []
+ drvoSupportsFieldComment = 'Comment' in drvoFieldDefnFlags
# cache driver capabilities
drvoSupportsFieldWidthPrecision = 'WidthPrecision' in drvoFieldDefnFlags
@@ -175,11 +165,7 @@ def validate_schema(layers : dict[str, Any],
elif k2 == 'subtype':
fld_def2['SubType'] = parseSubFieldType(v)
elif k2 == 'tz':
- if hasTZFlagSupport:
- fld_def2['TZFlag'] = parseTimeZone(v)
- else:
- logging.debug('Ignoring TZ="%s" on field "%s" (OGR v%s is too old)',
- v, fld_name, gdal.__version__)
+ fld_def2['TZFlag'] = parseTimeZone(v)
elif k2 == 'width' and v is not None and isinstance(v, int):
setFieldIf(drvoSupportsFieldWidthPrecision,
'Width', v, fld_def2, fld_name, drvo.ShortName)
@@ -288,7 +274,7 @@ def processOutputLayer(ds : gdal.Dataset,
# setup output field mapping in the sources dictionary
setOutputFieldMap(lyr.GetLayerDefn(), sources)
- return importSources(dso=ds, lyr=lyr, sources=sources,
+ return importSources(lyr=lyr, sources=sources,
cachedir=cachedir, extent=extent,
dsoTransaction=dsTransaction,
lyrcache=lyrcache,
@@ -388,12 +374,10 @@ def validateCacheLayer(ds : gdal.Dataset, name : str) -> bool:
logging.warning('Table "%s" does not exist', name)
return False
-# if not (lyr.TestCapability(ogr.OLCRandomWrite) and
-# gdalVersionMin(maj=3, min=7) and
-# lyr.TestCapability(ogr.OLCUpdateFeature)):
-# logging.warning('Layer "%s" does not support OLCUpdateFeature capability, '
-# 'ignoring cache', name)
-# return None
+ if not (lyr.TestCapability(ogr.OLCRandomWrite) and lyr.TestCapability(ogr.OLCUpdateFeature)):
+ logging.warning('Layer "%s" does not support OLCUpdateFeature capability, '
+ 'ignoring cache', name)
+ return False
defn = lyr.GetLayerDefn()
fields = [
@@ -422,11 +406,10 @@ def validateCacheLayer(ds : gdal.Dataset, name : str) -> bool:
logging.warning('Layer cache "%s" has %d > 0 geometry field(s): %s',
name, n, ', '.join(geomFieldNames))
- if gdalVersionMin(maj=3, min=5):
- style = lyr.GetStyleTable()
- if style is not None:
- logging.warning('Layer cache "%s" has a style table "%s"',
- name, style.GetLastStyleName())
+ style = lyr.GetStyleTable()
+ if style is not None:
+ logging.warning('Layer cache "%s" has a style table "%s"',
+ name, style.GetLastStyleName())
return True
def areSourceFilesNewer(layername : str,
@@ -480,9 +463,7 @@ def areSourceFilesNewer(layername : str,
# https://gdal.org/en/stable/api/python/vector_api.html#osgeo.ogr.Feature.GetFieldAsDateTime
# [ year, month, day, hour, minute, second, timezone flag ]
dt = feature.GetFieldAsDateTime(1)
- if not gdalVersionMin(maj=3, min=8):
- tz = None # assume local time
- elif dt[6] == ogr.TZFLAG_UNKNOWN:
+ if dt[6] == ogr.TZFLAG_UNKNOWN:
logging.warning('Datetime specified with unknown timezone in layer cache\'s '
'field #%d "%s", assuming local time', 1,
feature.GetDefnRef().GetFieldDefn(1).GetName())
@@ -524,6 +505,26 @@ def areSourceFilesNewer(layername : str,
source_path, dt.astimezone().isoformat(timespec='seconds'))
return ret
+def getLastMTimes(layerdefs : dict[str,Any], basedir : Optional[Path] = None) -> dict[str,int]:
+ """Return a directing mapping source paths to their last modification time
+ (as a timestamp in milliseconds)."""
+ ret = {}
+ for layerdef in layerdefs:
+ for source in layerdef['sources']:
+ source_path = source['source']['path']
+ if source_path in ret:
+ continue
+ path = source_path if basedir is None else str(basedir.joinpath(source_path))
+ try:
+ st = os.stat(path)
+ if not S_ISREG(st.st_mode):
+ raise FileNotFoundError
+ ret[source_path] = st.st_mtime_ns // 1000000
+ except (OSError, ValueError):
+ #logging.warning('Could not stat(%s)', path)
+ pass
+ return ret
+
def lockSourcePaths(layerdefs : dict[str,Any], lockdir: str) -> dict[str,int]:
"""Place shared locks on each source path and return their respective file
descriptors. We could do that one layerdef at a time (one output layer at a
@@ -586,6 +587,10 @@ def main() -> NoReturn:
help='optional directory for Mapbox Vector Tiles (MVT)')
parser.add_argument('--mvt-compress', default=False, action='store_true',
help='whether to compress Mapbox Vector Tiles (MVT) files')
+ parser.add_argument('--rasterdir', default=None,
+ help='optional directory for raster files')
+ parser.add_argument('--metadata-compress', default=False, action='store_true',
+ help='whether to compress metadata.json files')
parser.add_argument('--force', default=False, action='store_true',
help='import even if no new changes were detected')
parser.add_argument('groupname', nargs='*', help='group layer name(s) to process')
@@ -610,13 +615,6 @@ def main() -> NoReturn:
logging.debug('gdal.SetConfigOption(%s, %s)', pszKey, pszValue)
gdal.SetConfigOption(pszKey, pszValue)
- # open output dataset (possibly create it first)
- dso = openOutputDS(config['dataset'])
-
- validate_schema(layers,
- drvo=dso.GetDriver(),
- lco_defaults=config['dataset'].get('create-layer-options', None))
-
# get configured Spatial Reference System and extent
srs = getSRS(config.get('SRS', None))
extent = getExtent(config.get('extent', None), srs=srs)
@@ -627,19 +625,6 @@ def main() -> NoReturn:
logging.debug('flock("%s", LOCK_EX)', args.lockfile)
flock(lock_fd, LOCK_EX)
- # create all output layers before starting the transaction
- for layername, layerdef in layers.items():
- lyr = dso.GetLayerByName(layername)
- if lyr is not None:
- # TODO dso.DeleteLayer(layername) if --overwrite and
- # dso.TestCapability(ogr.ODsCDeleteLayer)
- # (Sets OVERWRITE=YES for PostgreSQL and GPKG.)
- continue
- if not dso.TestCapability(ogr.ODsCCreateLayer):
- raise RuntimeError(f'Output driver {dso.GetDriver().ShortName} does not '
- 'support layer creation')
- createOutputLayer(dso, layername, srs=srs, options=layerdef.get('create', None))
-
if args.mvtdir is not None:
args.mvtdir = Path(args.mvtdir)
if args.mvtdir == Path(): # make sure it's not curdir as we don't want to exchange it
@@ -654,6 +639,55 @@ def main() -> NoReturn:
sourcePathLocks = lockSourcePaths(layerdefs=layers.values(),
lockdir=args.lockdir_sources)
+ # special handling for raster layers
+ if any(l.get('type') == 'raster' for l in layers.values()):
+ if not all(l.get('type') == 'raster' for l in layers.values()):
+ raise NotImplementedError('Mix of raster and vector layers is not supported')
+ if args.rasterdir is None:
+ raise RuntimeError('Missing required value for --rasterdir')
+ if len(layers) != 1:
+ raise RuntimeError('Raster layers need to be processed one at a time')
+ args.rasterdir = Path(args.rasterdir)
+ if args.rasterdir == Path(): # make sure it's not curdir as we don't want to exchange it
+ raise RuntimeError('Invalid value for --rasterdir')
+ args.rasterdir.parent.mkdir(parents=True, exist_ok=True)
+ last_modified = getLastMTimes(layerdefs=layers.values(), basedir=args.cachedir)
+ rv = 0
+ for layername, layerdef in layers.items():
+ try:
+ processRaster(layername, layerdef,
+ sources=parse_config_dl(config.get('downloads', [])),
+ license_info=config.get('license-info', {}),
+ last_modified=last_modified,
+ dst=args.rasterdir,
+ cachedir=args.cachedir,
+ extent=extent,
+ compress_metadata=args.metadata_compress)
+ except Exception: # pylint: disable=broad-exception-caught
+ rv = 1
+ traceback.print_exc()
+ sys.exit(rv)
+
+ # open output dataset (possibly create it first)
+ dso = openOutputDS(config['dataset'])
+
+ validate_schema(layers,
+ drvo=dso.GetDriver(),
+ lco_defaults=config['dataset'].get('create-layer-options', None))
+
+ # create all output layers before starting the transaction
+ for layername, layerdef in layers.items():
+ lyr = dso.GetLayerByName(layername)
+ if lyr is not None:
+ # TODO dso.DeleteLayer(layername) if --overwrite and
+ # dso.TestCapability(ogr.ODsCDeleteLayer)
+ # (Sets OVERWRITE=YES for PostgreSQL and GPKG.)
+ continue
+ if not dso.TestCapability(ogr.ODsCCreateLayer):
+ raise RuntimeError(f'Output driver {dso.GetDriver().ShortName} does not '
+ 'support layer creation')
+ createOutputLayer(dso, layername, srs=srs, options=layerdef.get('create', None))
+
if (dso.TestCapability(ogr.ODsCTransactions) and
# we need SAVEPOINT support
dso.GetDriver().ShortName in ('PostgreSQL', 'SQLite', 'GPKG')):
@@ -703,6 +737,9 @@ def main() -> NoReturn:
elapsed = time_monotonic() - start
logging.info('Processed %d destination layers in %s', n, common.format_time(elapsed))
+ # get mtimes before releasing the source locks
+ last_modified = getLastMTimes(layerdefs=layers.values(), basedir=args.cachedir)
+
if sourcePathLocks is not None:
releaseSourcePathLocks(sourcePathLocks)
@@ -716,10 +753,15 @@ def main() -> NoReturn:
logging.info('Skipping MVT export for group %s (no changes)',
', '.join(args.groupname) if args.groupname is not None else '*')
else:
- exportMVT(dso, layers=export_layers,
+ exportMVT(dso,
+ layers=export_layers,
+ sources=parse_config_dl(config.get('downloads', [])),
+ license_info=config.get('license-info', {}),
+ last_modified=last_modified,
dst=args.mvtdir,
default_options=config.get('vector-tiles', None),
- compress=args.mvt_compress)
+ compress=args.mvt_compress,
+ compress_metadata=args.metadata_compress)
if dsoTransaction:
dsoTransaction = False