aboutsummaryrefslogtreecommitdiffstats
path: root/webmap-cgi
diff options
context:
space:
mode:
authorGuilhem Moulin <guilhem@fripost.org>2025-05-25 12:54:50 +0200
committerGuilhem Moulin <guilhem@fripost.org>2025-05-25 13:05:18 +0200
commit527fdfd1e46d66007758861dd0decf9c03043d0a (patch)
tree3e1106a117f38cde44d84f11e0ad39c8958baac4 /webmap-cgi
parentb9b9eef91e5c33e6938b64e4e60f066c36201deb (diff)
CGI: Take a list of feature IDs to query to avoid pounding the backend.
application() returns a iterator, so we serve responses immediately as they as received from the server (no buffering).
Diffstat (limited to 'webmap-cgi')
-rwxr-xr-xwebmap-cgi59
1 files changed, 43 insertions, 16 deletions
diff --git a/webmap-cgi b/webmap-cgi
index 616b2fd..c32a389 100755
--- a/webmap-cgi
+++ b/webmap-cgi
@@ -63,11 +63,15 @@ def get_query(layername : str) -> bytes:
query += 'pretty_bool=>\'f\'),'
query += '\'UTF8\') AS "GeoJSON" '
query += 'FROM ('
- query += 'SELECT l.* '
+ query += 'SELECT l.*, %s AS layer_group, %s AS layer '
query += 'FROM ' + common.escape_identifier(SCHEMA_NAME)
query += '.' + common.escape_identifier(layername) + ' l '
query += 'WHERE l.ogc_fid = %s'
query += ') m'
+ # The query never returns more than one row since we filter on a single FID.
+ # Don't try to be clever and batch queries in an IN set or ANY as we
+ # want to preserve the order in the response (so the feature(s)
+ # exactly under the cursor are returned first).
return query.encode('utf-8')
@@ -99,26 +103,48 @@ def application(env, start_response) -> Iterator[bytes]:
first = True
try:
body = json_load(env['wsgi.input'])
- if not isinstance(body, dict):
+ if not isinstance(body, list):
raise ValueError
start_response(STATUS_OK, [CONTENT_TYPE_JSON])
+ if not body:
+ yield b'[]'
+ return
+
+ if len(body) > MAX_FEATURE_COUNT:
+ logging.warning('Query has too many feature requests (%d), truncating to %d',
+ len(body), MAX_FEATURE_COUNT)
+ body = body[:MAX_FEATURE_COUNT]
+
# pylint: disable-next=no-member
with PG_CONN.cursor(binary=True, scrollable=False, withhold=False) as cur:
- if not isinstance(body, dict):
- raise ValueError
- mvt = body.get('mvt', None)
- layername = body.get('layer', None)
- if not isinstance(mvt, str) or not isinstance(layername, str):
- raise ValueError
- query = QUERY_MAP[TABLE_MAP[(mvt, layername)]]
- fid = body.get('fid', None)
- if not isinstance(fid, int):
- raise ValueError
- cur.execute(query, params=(fid,))
- resp = cur.fetchone()
- if resp is not None:
+ for item in body:
+ if not isinstance(item, dict):
+ raise ValueError
+ layer_group = item.get('layer_group', None)
+ layer = item.get('layer', None)
+ if not isinstance(layer_group, str) or not isinstance(layer, str):
+ raise ValueError
+ query = QUERY_MAP[TABLE_MAP[(layer_group, layer)]]
+ fid = item.get('fid', None)
+ if not isinstance(fid, int):
+ raise ValueError
+ cur.execute(query, params=(layer_group, layer, fid))
+ resp = cur.fetchone()
+ if resp is None:
+ continue # no match for this tuple
+ if first:
+ yield b'['
+ first = False
+ else:
+ yield b','
yield resp[0]
+ # the query never returns more than one row since we filter on a single FID
+ if first:
+ yield b'[]' # no match, empty response
+ first = False
+ else:
+ yield b']'
except (JSONDecodeError, LookupError, UnicodeDecodeError, ValueError) as exc:
logging.exception('Invalid request body')
@@ -170,8 +196,9 @@ PG_CONN.execute( # pylint: disable=no-member
'SET statement_timeout TO 15000', # 15s
prepare=False)
-del sys.modules['common']
+# drop functions and modules we don't need anymore
del common
+del sys.modules['common']
del get_query
del get_table_map
del os_path