aboutsummaryrefslogtreecommitdiffstats
path: root/webmap-import
diff options
context:
space:
mode:
Diffstat (limited to 'webmap-import')
-rwxr-xr-xwebmap-import128
1 files changed, 71 insertions, 57 deletions
diff --git a/webmap-import b/webmap-import
index e5a1426..b4552d5 100755
--- a/webmap-import
+++ b/webmap-import
@@ -53,7 +53,6 @@ from common import (
getSourcePathLockFileName
)
from common_gdal import (
- gdalVersionMin,
gdalGetMetadataItem,
getSRS,
getExtent,
@@ -70,6 +69,7 @@ from import_source import (
ImportStatus
)
from export_mvt import exportMVT
+from export_raster import processRaster
def setFieldIf(cond : bool,
attrName : str,
@@ -98,21 +98,10 @@ def validate_schema(layers : dict[str, Any],
(We need the driver of the output dataset to determine capability on
constraints.)"""
- # Cf. https://github.com/OSGeo/gdal/blob/master/NEWS.md
- if gdalVersionMin(maj=3, min=7):
- # list of capability flags supported by the CreateField() API
- drvoFieldDefnFlags = drvo.GetMetadataItem(gdalconst.DMD_CREATION_FIELD_DEFN_FLAGS)
- drvoFieldDefnFlags = drvoFieldDefnFlags.split(' ') if drvoFieldDefnFlags is not None else []
- drvoSupportsFieldComment = 'Comment' in drvoFieldDefnFlags
- # GetTZFlag()/SetTZFlag() and OGR_TZFLAG_* constants added in 3.8.0
- hasTZFlagSupport = gdalVersionMin(maj=3, min=8)
- else:
- # list of flags supported by the OGRLayer::AlterFieldDefn() API
- drvoFieldDefnFlags = drvo.GetMetadataItem(gdalconst.DMD_ALTER_FIELD_DEFN_FLAGS)
- drvoFieldDefnFlags = drvoFieldDefnFlags.split(' ') if drvoFieldDefnFlags is not None else []
- # GetComment()/SetComment() added in 3.7.0
- drvoSupportsFieldComment = False
- hasTZFlagSupport = False
+ # list of capability flags supported by the CreateField() API
+ drvoFieldDefnFlags = drvo.GetMetadataItem(gdalconst.DMD_CREATION_FIELD_DEFN_FLAGS)
+ drvoFieldDefnFlags = drvoFieldDefnFlags.split(' ') if drvoFieldDefnFlags is not None else []
+ drvoSupportsFieldComment = 'Comment' in drvoFieldDefnFlags
# cache driver capabilities
drvoSupportsFieldWidthPrecision = 'WidthPrecision' in drvoFieldDefnFlags
@@ -176,11 +165,7 @@ def validate_schema(layers : dict[str, Any],
elif k2 == 'subtype':
fld_def2['SubType'] = parseSubFieldType(v)
elif k2 == 'tz':
- if hasTZFlagSupport:
- fld_def2['TZFlag'] = parseTimeZone(v)
- else:
- logging.debug('Ignoring TZ="%s" on field "%s" (OGR v%s is too old)',
- v, fld_name, gdal.__version__)
+ fld_def2['TZFlag'] = parseTimeZone(v)
elif k2 == 'width' and v is not None and isinstance(v, int):
setFieldIf(drvoSupportsFieldWidthPrecision,
'Width', v, fld_def2, fld_name, drvo.ShortName)
@@ -289,7 +274,7 @@ def processOutputLayer(ds : gdal.Dataset,
# setup output field mapping in the sources dictionary
setOutputFieldMap(lyr.GetLayerDefn(), sources)
- return importSources(dso=ds, lyr=lyr, sources=sources,
+ return importSources(lyr=lyr, sources=sources,
cachedir=cachedir, extent=extent,
dsoTransaction=dsTransaction,
lyrcache=lyrcache,
@@ -389,12 +374,10 @@ def validateCacheLayer(ds : gdal.Dataset, name : str) -> bool:
logging.warning('Table "%s" does not exist', name)
return False
-# if not (lyr.TestCapability(ogr.OLCRandomWrite) and
-# gdalVersionMin(maj=3, min=7) and
-# lyr.TestCapability(ogr.OLCUpdateFeature)):
-# logging.warning('Layer "%s" does not support OLCUpdateFeature capability, '
-# 'ignoring cache', name)
-# return None
+ if not (lyr.TestCapability(ogr.OLCRandomWrite) and lyr.TestCapability(ogr.OLCUpdateFeature)):
+ logging.warning('Layer "%s" does not support OLCUpdateFeature capability, '
+ 'ignoring cache', name)
+ return False
defn = lyr.GetLayerDefn()
fields = [
@@ -423,11 +406,10 @@ def validateCacheLayer(ds : gdal.Dataset, name : str) -> bool:
logging.warning('Layer cache "%s" has %d > 0 geometry field(s): %s',
name, n, ', '.join(geomFieldNames))
- if gdalVersionMin(maj=3, min=5):
- style = lyr.GetStyleTable()
- if style is not None:
- logging.warning('Layer cache "%s" has a style table "%s"',
- name, style.GetLastStyleName())
+ style = lyr.GetStyleTable()
+ if style is not None:
+ logging.warning('Layer cache "%s" has a style table "%s"',
+ name, style.GetLastStyleName())
return True
def areSourceFilesNewer(layername : str,
@@ -481,9 +463,7 @@ def areSourceFilesNewer(layername : str,
# https://gdal.org/en/stable/api/python/vector_api.html#osgeo.ogr.Feature.GetFieldAsDateTime
# [ year, month, day, hour, minute, second, timezone flag ]
dt = feature.GetFieldAsDateTime(1)
- if not gdalVersionMin(maj=3, min=8):
- tz = None # assume local time
- elif dt[6] == ogr.TZFLAG_UNKNOWN:
+ if dt[6] == ogr.TZFLAG_UNKNOWN:
logging.warning('Datetime specified with unknown timezone in layer cache\'s '
'field #%d "%s", assuming local time', 1,
feature.GetDefnRef().GetFieldDefn(1).GetName())
@@ -607,6 +587,10 @@ def main() -> NoReturn:
help='optional directory for Mapbox Vector Tiles (MVT)')
parser.add_argument('--mvt-compress', default=False, action='store_true',
help='whether to compress Mapbox Vector Tiles (MVT) files')
+ parser.add_argument('--rasterdir', default=None,
+ help='optional directory for raster files')
+ parser.add_argument('--metadata-compress', default=False, action='store_true',
+ help='whether to compress metadata.json files')
parser.add_argument('--force', default=False, action='store_true',
help='import even if no new changes were detected')
parser.add_argument('groupname', nargs='*', help='group layer name(s) to process')
@@ -631,13 +615,6 @@ def main() -> NoReturn:
logging.debug('gdal.SetConfigOption(%s, %s)', pszKey, pszValue)
gdal.SetConfigOption(pszKey, pszValue)
- # open output dataset (possibly create it first)
- dso = openOutputDS(config['dataset'])
-
- validate_schema(layers,
- drvo=dso.GetDriver(),
- lco_defaults=config['dataset'].get('create-layer-options', None))
-
# get configured Spatial Reference System and extent
srs = getSRS(config.get('SRS', None))
extent = getExtent(config.get('extent', None), srs=srs)
@@ -648,19 +625,6 @@ def main() -> NoReturn:
logging.debug('flock("%s", LOCK_EX)', args.lockfile)
flock(lock_fd, LOCK_EX)
- # create all output layers before starting the transaction
- for layername, layerdef in layers.items():
- lyr = dso.GetLayerByName(layername)
- if lyr is not None:
- # TODO dso.DeleteLayer(layername) if --overwrite and
- # dso.TestCapability(ogr.ODsCDeleteLayer)
- # (Sets OVERWRITE=YES for PostgreSQL and GPKG.)
- continue
- if not dso.TestCapability(ogr.ODsCCreateLayer):
- raise RuntimeError(f'Output driver {dso.GetDriver().ShortName} does not '
- 'support layer creation')
- createOutputLayer(dso, layername, srs=srs, options=layerdef.get('create', None))
-
if args.mvtdir is not None:
args.mvtdir = Path(args.mvtdir)
if args.mvtdir == Path(): # make sure it's not curdir as we don't want to exchange it
@@ -675,6 +639,55 @@ def main() -> NoReturn:
sourcePathLocks = lockSourcePaths(layerdefs=layers.values(),
lockdir=args.lockdir_sources)
+ # special handling for raster layers
+ if any(l.get('type') == 'raster' for l in layers.values()):
+ if not all(l.get('type') == 'raster' for l in layers.values()):
+ raise NotImplementedError('Mix of raster and vector layers is not supported')
+ if args.rasterdir is None:
+ raise RuntimeError('Missing required value for --rasterdir')
+ if len(layers) != 1:
+ raise RuntimeError('Raster layers need to be processed one at a time')
+ args.rasterdir = Path(args.rasterdir)
+ if args.rasterdir == Path(): # make sure it's not curdir as we don't want to exchange it
+ raise RuntimeError('Invalid value for --rasterdir')
+ args.rasterdir.parent.mkdir(parents=True, exist_ok=True)
+ last_modified = getLastMTimes(layerdefs=layers.values(), basedir=args.cachedir)
+ rv = 0
+ for layername, layerdef in layers.items():
+ try:
+ processRaster(layername, layerdef,
+ sources=parse_config_dl(config.get('downloads', [])),
+ license_info=config.get('license-info', {}),
+ last_modified=last_modified,
+ dst=args.rasterdir,
+ cachedir=args.cachedir,
+ extent=extent,
+ compress_metadata=args.metadata_compress)
+ except Exception: # pylint: disable=broad-exception-caught
+ rv = 1
+ traceback.print_exc()
+ sys.exit(rv)
+
+ # open output dataset (possibly create it first)
+ dso = openOutputDS(config['dataset'])
+
+ validate_schema(layers,
+ drvo=dso.GetDriver(),
+ lco_defaults=config['dataset'].get('create-layer-options', None))
+
+ # create all output layers before starting the transaction
+ for layername, layerdef in layers.items():
+ lyr = dso.GetLayerByName(layername)
+ if lyr is not None:
+ # TODO dso.DeleteLayer(layername) if --overwrite and
+ # dso.TestCapability(ogr.ODsCDeleteLayer)
+ # (Sets OVERWRITE=YES for PostgreSQL and GPKG.)
+ continue
+ if not dso.TestCapability(ogr.ODsCCreateLayer):
+ raise RuntimeError(f'Output driver {dso.GetDriver().ShortName} does not '
+ 'support layer creation')
+ createOutputLayer(dso, layername, srs=srs, options=layerdef.get('create', None))
+
if (dso.TestCapability(ogr.ODsCTransactions) and
# we need SAVEPOINT support
dso.GetDriver().ShortName in ('PostgreSQL', 'SQLite', 'GPKG')):
@@ -747,7 +760,8 @@ def main() -> NoReturn:
last_modified=last_modified,
dst=args.mvtdir,
default_options=config.get('vector-tiles', None),
- compress=args.mvt_compress)
+ compress=args.mvt_compress,
+ compress_metadata=args.metadata_compress)
if dsoTransaction:
dsoTransaction = False