diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 2cb8f54fd..a9266caec 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -24,6 +24,7 @@ jobs: - python-version: '3.12' env: PYGEOAPI_CONFIG: "$(pwd)/pygeoapi-config.yml" + DOCKER_API_VERSION: "1.44" services: postgres: @@ -42,7 +43,6 @@ jobs: docker pull elasticsearch:8.17.0 & docker pull opensearchproject/opensearch:2.18.0 & docker pull mongo:8.0.4 & - docker pull ghcr.io/cgs-earth/sensorthings-action:0.1.0 & docker pull postgis/postgis:14-3.2 & - name: Clear up GitHub runner diskspace run: | @@ -96,7 +96,7 @@ jobs: with: mongodb-version: '8.0.4' - name: Install and run SensorThingsAPI - uses: cgs-earth/sensorthings-action@v0.1.0 + uses: cgs-earth/sensorthings-action@v0.1.2 - name: Install sqlite and gpkg dependencies uses: awalsh128/cache-apt-pkgs-action@v1.4.3 with: @@ -126,6 +126,7 @@ jobs: pip3 install -r requirements-provider.txt pip3 install -r requirements-manager.txt pip3 install -r requirements-django.txt + pip3 install -r requirements-pubsub.txt pip3 install . pip3 install GDAL==`gdal-config --version` - name: setup test data ⚙️ diff --git a/Dockerfile b/Dockerfile index b9992faea..833f7a3ca 100644 --- a/Dockerfile +++ b/Dockerfile @@ -5,7 +5,7 @@ # Francesco Bartoli # Angelos Tzotsos # -# Copyright (c) 2025 Tom Kralidis +# Copyright (c) 2026 Tom Kralidis # Copyright (c) 2019 Just van den Broecke # Copyright (c) 2025 Francesco Bartoli # Copyright (c) 2025 Angelos Tzotsos @@ -34,7 +34,7 @@ # # ================================================================= -FROM ubuntu:noble-20251013 +FROM ubuntu:noble LABEL maintainer="Just van den Broecke " @@ -133,7 +133,7 @@ ADD . /pygeoapi RUN python3 -m venv --system-site-packages /venv \ && /venv/bin/python3 -m pip install --no-cache-dir -r requirements-docker.txt \ && /venv/bin/python3 -m pip install --no-cache-dir -r requirements-admin.txt \ - && /venv/bin/python3 -m pip install --no-cache-dir gunicorn \ + && /venv/bin/python3 -m pip install --no-cache-dir "gunicorn<24" \ && /venv/bin/python3 -m pip install --no-cache-dir -e . # Set default config and entrypoint for Docker Image diff --git a/SECURITY.md b/SECURITY.md index ec9a04f14..87520abc4 100644 --- a/SECURITY.md +++ b/SECURITY.md @@ -13,5 +13,5 @@ The pygeoapi Project Steering Committee (PSC) will release patches for security | Version | Supported | | ------- | ------------------ | -| 0.10.x | :white_check_mark: | -| < 0.10 | :x: | +| 0.2x | :white_check_mark: | +| < 0.20 | :x: | diff --git a/docker/entrypoint.sh b/docker/entrypoint.sh index 91be8913e..0b0f47301 100755 --- a/docker/entrypoint.sh +++ b/docker/entrypoint.sh @@ -46,6 +46,9 @@ fi if [[ -z "$PYGEOAPI_OPENAPI" ]]; then export PYGEOAPI_OPENAPI="${PYGEOAPI_HOME}/local.openapi.yml" fi +if [[ -z "$PYGEOAPI_ASYNCAPI" ]]; then + export PYGEOAPI_ASYNCAPI="${PYGEOAPI_HOME}/local.asyncapi.yml" +fi # gunicorn env settings with defaults SCRIPT_NAME=${SCRIPT_NAME:=/} @@ -87,6 +90,11 @@ echo "Trying to generate openapi.yml" echo "openapi.yml generated continue to pygeoapi" +echo "Trying to generate asyncapi.yml" +/venv/bin/pygeoapi asyncapi generate ${PYGEOAPI_CONFIG} --output-file ${PYGEOAPI_ASYNCAPI} + +[[ $? -ne 0 ]] && echo "asyncapi.yml could not be generated; skipping" + start_gunicorn() { # SCRIPT_NAME should not have value '/' [[ "${SCRIPT_NAME}" = '/' ]] && export SCRIPT_NAME="" && echo "make SCRIPT_NAME empty from /" diff --git a/docs/source/administration.rst b/docs/source/administration.rst index fcebd291b..c947365b3 100644 --- a/docs/source/administration.rst +++ b/docs/source/administration.rst @@ -24,12 +24,6 @@ To generate the OpenAPI document, run the following: This will dump the OpenAPI document as YAML to your system's ``stdout``. To save to a file on disk, run: -.. code-block:: bash - - pygeoapi openapi generate /path/to/my-pygeoapi-config.yml > /path/to/my-pygeoapi-openapi.yml - -You can also write to a file explicitly via the ``--output-file`` option: - .. code-block:: bash pygeoapi openapi generate /path/to/my-pygeoapi-config.yml --output-file /path/to/my-pygeoapi-openapi.yml @@ -38,7 +32,7 @@ To generate the OpenAPI document as JSON, run: .. code-block:: bash - pygeoapi openapi generate /path/to/my-pygeoapi-config.yml -f json > /path/to/my-pygeoapi-openapi.json + pygeoapi openapi generate /path/to/my-pygeoapi-config.yml --format json --output-file /path/to/my-pygeoapi-openapi.json .. note:: Generate as YAML or JSON? If your OpenAPI YAML definition is slow to render as JSON, @@ -83,6 +77,11 @@ In UNIX: # or if OpenAPI JSON export PYGEOAPI_OPENAPI=/path/to/my-pygeoapi-openapi.json + # if your server supports AsyncAPI and Pub/Sub + export PYGEOAPI_ASYNCAPI=/path/to/my-pygeoapi-asyncapi.yml + # or if AsyncAPI JSON + export PYGEOAPI_ASYNCAPI=/path/to/my-pygeoapi-asyncapi.json + In Windows: .. code-block:: bat @@ -92,6 +91,14 @@ In Windows: # or if OpenAPI JSON set PYGEOAPI_OPENAPI=/path/to/my-pygeoapi-openapi.json + # if your server supports AsyncAPI and Pub/Sub + set PYGEOAPI_ASYNCAPI=/path/to/my-pygeoapi-asyncapi.yml + # or if AsyncAPI JSON + set PYGEOAPI_ASYNCAPI=/path/to/my-pygeoapi-asyncapi.json + +.. note:: + + More information on AsyncAPI and Pub/Sub can be found at :ref:`pubsub`. Summary ------- diff --git a/docs/source/conf.py b/docs/source/conf.py index a3f0ca7ed..1b96113a2 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -112,7 +112,7 @@ def __getattr__(cls, name): # built documents. # # The short X.Y version. -version = '0.23.dev0' +version = '0.24.dev0' # The full version, including alpha/beta/rc tags. release = version diff --git a/docs/source/configuration.rst b/docs/source/configuration.rst index 11c39255b..d718e85e6 100644 --- a/docs/source/configuration.rst +++ b/docs/source/configuration.rst @@ -14,6 +14,7 @@ file whatever you wish; typical filenames end with ``.yml``. pygeoapi configuration contains the following core sections: - ``server``: server-wide settings +- ``pubsub``: Publish-Subscribe settings (optional) - ``logging``: logging configuration - ``metadata``: server-wide metadata (contact, licensing, etc.) - ``resources``: dataset collections, processes and stac-collections offered by the server @@ -90,6 +91,23 @@ For more information related to API design rules (the ``api_rules`` property in url_prefix: 'v{api_major}' # adds a /v1 prefix to all URL paths version_header: X-API-Version # add a response header of this name with the API version +``pubsub`` +^^^^^^^^^^ + +The ``pubsub`` section provides directives for enabling publication of CloudEvent messaages on item-based transactions + + +.. code-block:: yaml + + pubsub: + name: MQTT + broker: + url: mqtt://localhost:1883 + channel: my/service/topic + +.. seealso:: + :ref:`pubsub` for more information on Publish-Subscribe capabilities + ``logging`` ^^^^^^^^^^^ @@ -225,6 +243,14 @@ default. begin: 2000-10-30T18:24:39Z # start datetime in RFC3339 end: 2007-10-30T08:57:29Z # end datetime in RFC3339 trs: http://www.opengis.net/def/uom/ISO-8601/0/Gregorian # TRS + resolution: P1D # ISO 8601 duration + default: 2000-10-30T18:24:39Z # default time + # additional extents can be added as desired (1..n) + foo: + url: https://example.org/def # required URL of the extent + range: [0, 10] # required overall range/extent + units: °C # optional units + values: [0, 2, 5, 5, 10] # optional, enumeration of values providers: # list of 1..n required connections information - type: feature # underlying data geospatial type. Allowed values are: feature, coverage, record, tile, edr name: CSV # required: plugin name or import path. See Plugins section for more information. diff --git a/docs/source/index.rst b/docs/source/index.rst index 4caa2efcc..4579a27b3 100644 --- a/docs/source/index.rst +++ b/docs/source/index.rst @@ -39,6 +39,7 @@ reference documentation on all aspects of the project. openapi publishing/index transactions + pubsub admin-api security plugins diff --git a/docs/source/plugins.rst b/docs/source/plugins.rst index 4d02699ca..4b9a86922 100644 --- a/docs/source/plugins.rst +++ b/docs/source/plugins.rst @@ -30,10 +30,15 @@ The core pygeoapi plugin registry can be found in ``pygeoapi.plugin.PLUGINS``. Each plugin type implements its relevant base class as the API contract: -* data providers: ``pygeoapi.provider.base`` -* output formats: ``pygeoapi.formatter.base`` -* processes: ``pygeoapi.process.base`` -* process_manager: ``pygeoapi.process.manager.base`` +* data providers: + + * features/records/maps: ``pygeoapi.provider.base.BaseProvider`` + * edr: ``pygeoapi.provider.base_edr.BaseEDRProvider`` + * tiles: ``pygeoapi.provider.tile.BaseTileProvider`` + +* output formats: ``pygeoapi.formatter.base.BaseFormatter`` +* processes: ``pygeoapi.process.base.BaseProcessor`` +* process_manager: ``pygeoapi.process.manager.base.BaseManager`` .. todo:: link PLUGINS to API doc @@ -150,7 +155,7 @@ option 2 above). Example: custom pygeoapi vector data provider --------------------------------------------- -Lets consider the steps for a vector data provider plugin: +Let's consider the steps for a vector data provider plugin: Python code ^^^^^^^^^^^ @@ -223,7 +228,7 @@ Each base class documents the functions, arguments and return types required for Example: custom pygeoapi raster data provider --------------------------------------------- -Lets consider the steps for a raster data provider plugin: +Let's consider the steps for a raster data provider plugin: Python code ^^^^^^^^^^^ @@ -278,6 +283,51 @@ Each base class documents the functions, arguments and return types required for .. _example-custom-pygeoapi-processing-plugin: +Example: custom pygeoapi EDR data provider +------------------------------------------ + +Let's consider the steps for an EDR data provider plugin: + +Python code +^^^^^^^^^^^ + +The below template provides a minimal example (let's call the file ``mycooledrdata.py``: + +.. code-block:: python + + from pygeoapi.provider.base_edr import BaseEDRProvider + + class MyCoolEDRDataProvider(BaseEDRProvider): + + def __init__(self, provider_def): + """Inherit from the parent class""" + + super().__init__(provider_def) + + self.covjson = {...} + + def get_instances(self): + return ['foo', 'bar'] + + def get_instance(self, instance): + return instance in get_instances() + + def position(self, **kwargs): + return self.covjson + + def trajectory(self, **kwargs): + return self.covjson + + +For brevity, the ``position`` function returns ``self.covjson`` which is a +dictionary of a CoverageJSON representation. ``get_instances`` returns a list +of instances associated with the collection/plugin, and ``get_instance`` returns +a boolean of whether a given instance exists/is valid. EDR query types are subject +to the query functions defined in the plugin. In the example above, the plugin +implements ``position`` and ``trajectory`` queries, which will be advertised as +supported query types. + + Example: custom pygeoapi processing plugin ------------------------------------------ @@ -360,6 +410,7 @@ Below is a sample process definition as a Python dictionary: 'it back as output. Intended to demonstrate a simple ' 'process with a single literal input.', 'jobControlOptions': ['sync-execute', 'async-execute'], # whether the process can be executed in sync or async mode + 'outputTransmission': ['value', 'reference'], # whether the process can return inline data or URL references 'keywords': ['hello world', 'example', 'echo'], # keywords associated with the process 'links': [{ # a list of 1..n # link objects relevant to the process 'type': 'text/html', diff --git a/docs/source/publishing/ogcapi-coverages.rst b/docs/source/publishing/ogcapi-coverages.rst index f277f7c5b..d2016b22f 100644 --- a/docs/source/publishing/ogcapi-coverages.rst +++ b/docs/source/publishing/ogcapi-coverages.rst @@ -89,11 +89,20 @@ The `Xarray`_ provider plugin reads and extracts `NetCDF`_ and `Zarr`_ data. format: name: zarr mimetype: application/zip + options: + zarr: + consolidated: true + squeeze: true + .. note:: `Zarr`_ files are directories with files and subdirectories. Therefore a zip file is returned upon request for said format. +.. note:: + + ``options.zarr`` is a custom property that can be used to set `Zarr-specific open options`_. + .. note:: When referencing `NetCDF`_ or `Zarr`_ data stored in an S3 bucket, be sure to provide the full S3 URL. Any parameters required to open the dataset @@ -155,3 +164,4 @@ Data access examples .. _`Zarr`: https://zarr.readthedocs.io/en/stable .. _`GDAL raster driver short name`: https://gdal.org/drivers/raster/index.html .. _`pyproj.CRS.from_user_input`: https://pyproj4.github.io/pyproj/stable/api/crs/coordinate_system.html#pyproj.crs.CoordinateSystem.from_user_input +.. _`Zarr-specific open options`: https://docs.xarray.dev/en/stable/generated/xarray.open_zarr.html diff --git a/docs/source/publishing/ogcapi-edr.rst b/docs/source/publishing/ogcapi-edr.rst index edd205d17..faf7fb37d 100644 --- a/docs/source/publishing/ogcapi-edr.rst +++ b/docs/source/publishing/ogcapi-edr.rst @@ -90,11 +90,15 @@ The `xarray-edr`_ provider plugin reads and extracts `NetCDF`_ and `Zarr`_ data a zip file is returned upon request for said format. .. note:: + + ``options.zarr`` is a custom property that can be used to set `Zarr-specific open options`_. + +.. note:: + When referencing data stored in an S3 bucket, be sure to provide the full S3 URL. Any parameters required to open the dataset using fsspec can be added to the config file under `options` and `s3`, as shown above. - SensorThingsEDR ^^^^^^^^^^^^^^^ @@ -143,3 +147,4 @@ Data access examples .. _`NetCDF`: https://en.wikipedia.org/wiki/NetCDF .. _`Zarr`: https://zarr.readthedocs.io/en/stable .. _`OGC Environmental Data Retrieval (EDR) (API)`: https://ogcapi.ogc.org/edr +.. _`Zarr-specific open options`: https://docs.xarray.dev/en/stable/generated/xarray.open_zarr.html diff --git a/docs/source/publishing/ogcapi-features.rst b/docs/source/publishing/ogcapi-features.rst index 3a1c2e9e5..df40d27bd 100644 --- a/docs/source/publishing/ogcapi-features.rst +++ b/docs/source/publishing/ogcapi-features.rst @@ -625,6 +625,19 @@ Must have PostGIS installed. id_field: osm_id table: hotosm_bdi_waterways geom_field: foo_geom + count: true # Optional; Default true; Enable/disable count for improved performance. + +This can be represented as a connection dictionary or as a connection string as follows: + +.. code-block:: yaml + + providers: + - type: feature + name: PostgreSQL + data: postgresql://postgres:postgres@127.0.0.1:3010/test + id_field: osm_id + table: hotosm_bdi_waterways + geom_field: foo_geom A number of database connection options can be also configured in the provider in order to adjust properly the sqlalchemy engine client. These are optional and if not specified, the default from the engine will be used. Please see also `SQLAlchemy docs `_. @@ -662,6 +675,7 @@ These are optional and if not specified, the default from the engine will be use id_field: osm_id table: hotosm_bdi_waterways geom_field: foo_geom + count: true # Optional; Default true; Enable/disable count for improved performance. The PostgreSQL provider is also able to connect to Cloud SQL databases. @@ -677,6 +691,7 @@ The PostgreSQL provider is also able to connect to Cloud SQL databases. password: postgres id_field: id table: states + count: true # Optional; Default true; Enable/disable count for improved performance. This is what a configuration for `Google Cloud SQL`_ connection looks like. The ``host`` block contains the necessary socket connection information. diff --git a/docs/source/publishing/ogcapi-maps.rst b/docs/source/publishing/ogcapi-maps.rst index 679e445c9..6924e3a39 100644 --- a/docs/source/publishing/ogcapi-maps.rst +++ b/docs/source/publishing/ogcapi-maps.rst @@ -136,5 +136,9 @@ Data visualization examples * http://localhost:5000/collections/foo/map?bbox-crs=http%3A%2F%2Fwww.opengis.net%2Fdef%2Fcrs%2FEPSG%2F0%2F3857&bbox=4.022369384765626%2C50.690447870569436%2C4.681549072265626%2C51.00260125274477&width=800&height=600&transparent +* map with vertical subset (``extents.vertical`` must be set in resource level config) + + * http://localhost:5000/collections/foo/map?bbox=-142,42,-52,84&subset=vertical(435) + .. _`OGC API - Maps`: https://ogcapi.ogc.org/maps .. _`see website`: https://mapserver.org/mapscript/index.html diff --git a/docs/source/pubsub.rst b/docs/source/pubsub.rst new file mode 100644 index 000000000..739a50d93 --- /dev/null +++ b/docs/source/pubsub.rst @@ -0,0 +1,162 @@ +.. _pubsub: + +Publish-Subscribe integration (Pub/Sub) +======================================= + +pygeoapi supports Publish-Subscribe (Pub/Sub) integration by implementing +the `OGC API Publish-Subscribe Workflow - Part 1: Core`_ (draft) specification. + +Pub/Sub integration can be enabled by defining a broker that pycsw can use to +publish notifications on given topics using CloudEvents (as per the specification). + +When enabled, core functionality of Pub/Sub includes: + +- providing an AsyncAPI document (JSON and HTML) +- providing the following links on the OGC API landing page: + + - the broker link (``rel=hub`` link relation) + - the AsyncAPI JSON link (``rel=service-desc`` link relation and ``type=application/asyncapi+json`` media type) + - the AsyncAPI HTML link (``rel=service-doc`` link relation and ``type=text/html`` media type) + +- sending a notification message on the following events: + + - feature or record transactions (create, replace, update, delete) + - process executions/job creation + +AsyncAPI +-------- + +`AsyncAPI`_ is the event-driven equivalent to :ref:`openapi` + +The official AsyncAPI specification can be found on the `AsyncAPI`_ website. pygeoapi supports AsyncAPI version 3.0.0. + +AsyncAPI is an optional capability in pygeoapi. To enable AsyncAPI, the following steps are required: + +- defining a ``pubsub`` section in configuration (see :ref:`configuration` and :ref:`brokers` for more information) +- generating an AsyncAPI document +- setting the ``PYGEOAPI_ASYNCAPI`` environment variable + +Creating the AsyncAPI document +^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ + +The AsyncAPI document is a YAML or JSON configuration which is generated from the pygeoapi configuration, and describes the server information, channels and the message payloads structures. + +To generate the AsyncAPI document, run the following: + +.. code-block:: bash + + pygeoapi asyncapi generate /path/to/my-pygeoapi-config.yml + +This will dump the AsyncAPI document as YAML to your system's ``stdout``. To save to a file on disk, run: + +.. code-block:: bash + + pygeoapi asyncapi generate /path/to/my-pygeoapi-config.yml --output-file /path/to/my-pygeoapi-asyncapi.yml + +To generate the AsyncAPI document as JSON, run: + +.. code-block:: bash + + pygeoapi asyncapi generate /path/to/my-pygeoapi-config.yml --format json --output-file /path/to/my-pygeoapi-asyncapi.json + +.. note:: + Generate as YAML or JSON? If your AsyncAPI YAML definition is slow to render as JSON, + saving as JSON to disk will help with performance at run-time. + +.. note:: + The AsyncAPI document provides detailed information on query parameters, and dataset + property names and their data types. Whenever you make changes to your pygeoapi configuration, + always refresh the accompanying AsyncAPI document. + +Validating the AsyncAPI document +-------------------------------- + +To ensure your AsyncAPI document is valid, pygeoapi provides a validation +utility that can be run as follows: + +.. code-block:: bash + + pygeoapi asyncapi validate /path/to/my-pygeoapi-asyncapi.yml + +.. _brokers: + +Brokers +------- + +The following protocols are supported: + +.. note:: + + Pub/Sub client dependencies will vary based on the selected broker. ``requirements-pubsub.txt`` contains all requirements for supported brokers, as a reference point. + + +MQTT +^^^^ + +Example directive: + +.. code-block:: yaml + + pubsub: + name: MQTT + broker: + url: mqtt://localhost:1883 + channel: messages/a/data # optional + hidden: false # default + +Kafka +^^^^^ + +Example directive: + +.. code-block:: yaml + + pubsub: + name: Kafka + broker: + url: tcp://localhost:9092 + channel: messages-a-data + # if using authentication: + # sasl_mechanism: PLAIN # default PLAIN + # sasl_security_protocol: SASL_PLAINTEXT # default SASL_PLAINTEXT + hidden: true # default false + +HTTP +^^^^ + +Example directive: + +.. code-block:: yaml + + pubsub: + name: HTTP + broker: + url: https://ntfy.sh + channel: messages-a-data # optional + hidden: true # default false + +Additional information +---------------------- + +.. note:: + + For any Pub/Sub endpoints requiring authentication, encode the ``url`` value as follows: + + * ``mqtt://username:password@localhost:1883`` + * ``https://username:password@localhost`` + * ``tcp://username:password@localhost:9092`` + + As with any section of the pygeoapi configuration, environment variables may be used as needed, for example + to set username/password information in a URL. If ``pubsub.broker.url`` contains authentication, and + ``pubsub.broker.hidden`` is ``false``, the authentication information will be stripped from the URL + before displaying it on the landing page. + +.. note:: + + If a ``channel`` is defined, it is used as a prefix to the relevant OGC API endpoint used. + + If a ``channel`` is not defined, only the relevant OGC API endpoint is used. + + +.. _`OGC API Publish-Subscribe Workflow - Part 1: Core`: https://docs.ogc.org/DRAFTS/25-030.html +.. _`AsyncAPI`: https://www.asyncapi.com diff --git a/locale/en/LC_MESSAGES/messages.po b/locale/en/LC_MESSAGES/messages.po index 17a2698e9..34ad8c485 100644 --- a/locale/en/LC_MESSAGES/messages.po +++ b/locale/en/LC_MESSAGES/messages.po @@ -736,3 +736,21 @@ msgstr "" msgid "Instances" msgstr "" + +msgid "Pub/Sub Notifications" +msgstr "" + +msgid "Pub/Sub broker" +msgstr "" + +msgid "Subscribe to notifications from this service" +msgstr "" + +msgid "AsyncAPI Definition" +msgstr "" + +msgid "The AsyncAPI document as HTML" +msgstr "" + +msgid "The AsyncAPI document as JSON" +msgstr "" diff --git a/pygeoapi-config_indoorFeature.yml b/pygeoapi-config_indoorFeature.yml new file mode 100644 index 000000000..83ab14a49 --- /dev/null +++ b/pygeoapi-config_indoorFeature.yml @@ -0,0 +1,87 @@ +server: + bind: + host: 0.0.0.0 + port: 5000 + url: http://localhost:5000 + mimetype: application/json; charset=UTF-8 + encoding: utf-8 + language: en-US + cors: true + map: + url: https://tile.openstreetmap.org/{z}/{x}/{y}.png + attribution: '© OpenStreetMap contributors' + limits: + default_items: 20 + max_items: 50 + pretty_print: true + # templates: + # path: /home/csm/venv/IndoorGML_API/pygeoapi/templates + # static: /home/csm/venv/IndoorGML_API/pygeoapi/static/folder # css/js/img + + +logging: + level: DEBUG + #logfile: /pygeoapi/log.py # Or a path on your MacBook like ./pygeoapi.log + +metadata: + identification: + title: IndoorFeatures API Demonstration + description: Developing IndoorGML and IndoorJSON support for OGC API Standards. + keywords: + - IndoorGML + - PNU + - AIST + - OGC + terms_of_service: https://creativecommons.org/licenses/by/4.0/ # Add this line! + url: http://localhost:5000 + provider: + name: PNU STEMLab and AIST IPRI + url: https://github.com/STEMLab + license: + name: MIT + url: https://opensource.org/licenses/MIT + contact: + name: Taehoon KIM + email: kim.taehoon@aist.go.jp + url: https://github.com/STEMLab/IndoorGML_API + +resources: + pusan_national_university: + type: collection + itemType: indoorfeature + title: + en: Open IndoorFeauture sample data + fr: Ouvrir les données d'exemple IndoorFeature + description: + en: Sample IndoorFeature data from Pusan National University + fr: Données d'échantillon IndoorFeature de l'Université Nationale de Pusan + keywords: + en: + - indoorGML + - pnu-201 + fr: + - indoorGML + - pnu-201 + links: + - type: text/html + rel: canonical + title: information + href: https://github.com/STEMLab/IndoorGML_API + hreflang: en-CA + extents: + spatial: + bbox: [-180,-90,180,90] + crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 + providers: + - type: indoorfeature + name: CSV + data: data/sample/sample_indoor.json + id_field: id + geometry: + x_field: long + y_field: lat + + hello-world: + type: process + processor: + name: HelloWorld diff --git a/pygeoapi/__init__.py b/pygeoapi/__init__.py index f62d8c5a2..e906bc2c3 100644 --- a/pygeoapi/__init__.py +++ b/pygeoapi/__init__.py @@ -3,7 +3,7 @@ # Authors: Tom Kralidis # Ricardo Garcia Silva # -# Copyright (c) 2021 Tom Kralidis +# Copyright (c) 2026 Tom Kralidis # Copyright (c) 2023 Ricardo Garcia Silva # Copyright (c) 2025 Angelos Tzotsos # @@ -30,7 +30,7 @@ # # ================================================================= -__version__ = '0.23.dev0' +__version__ = '0.24.dev0' import click try: @@ -38,6 +38,8 @@ from importlib.metadata import entry_points except ImportError: from importlib_metadata import entry_points + +from pygeoapi.asyncapi import asyncapi from pygeoapi.config import config from pygeoapi.openapi import openapi @@ -110,3 +112,4 @@ def serve(ctx, server): cli.add_command(config) cli.add_command(openapi) +cli.add_command(asyncapi) diff --git a/pygeoapi/api/__init__.py b/pygeoapi/api/__init__.py index 2a7067b04..815567798 100644 --- a/pygeoapi/api/__init__.py +++ b/pygeoapi/api/__init__.py @@ -7,8 +7,8 @@ # Colin Blackburn # Ricardo Garcia Silva # -# Copyright (c) 2025 Tom Kralidis -# Copyright (c) 2025 Francesco Bartoli +# Copyright (c) 2026 Tom Kralidis +# Copyright (c) 2026 Francesco Bartoli # Copyright (c) 2022 John A Stevenson and Colin Blackburn # Copyright (c) 2023 Ricardo Garcia Silva # @@ -40,7 +40,7 @@ Returns content from plugins and sets responses. """ -from collections import ChainMap, OrderedDict +from collections import ChainMap from copy import deepcopy from datetime import datetime from functools import partial @@ -56,60 +56,19 @@ import pytz from pygeoapi import __version__, l10n -from pygeoapi.crs import DEFAULT_STORAGE_CRS, get_supported_crs_list +from pygeoapi.api.collection import gen_collection, OGC_RELTYPES_BASE +from pygeoapi.formats import FORMAT_TYPES, F_GZIP, F_HTML, F_JSON, F_JSONLD from pygeoapi.linked_data import jsonldify, jsonldify_collection from pygeoapi.log import setup_logger from pygeoapi.plugin import load_plugin from pygeoapi.process.manager.base import get_manager -from pygeoapi.provider.base import ( - ProviderConnectionError, ProviderGenericError, ProviderTypeError) - -# --- DATABASE IMPORTS --- -import functools - -# --- ENSURE THESE HELPER FUNCTIONS EXIST BEFORE "class API" --- -# (If they are missing, paste them in. If they exist, leave them alone.) - -def gzip(f): - """ - Decorator to gzip response - """ - @functools.wraps(f) - def view_func(*args, **kwargs): - # Unpack all 3 return values - headers, status, content = f(*args, **kwargs) - - # Only pass headers and content to apply_gzip - content = apply_gzip(headers, content) - - # Return all 3 values - return headers, status, content - return view_func - -def pre_process(f): - """ - Decorator to pre-process request - """ - @functools.wraps(f) - def view_func(*args, **kwargs): - return f(*args, **kwargs) - return view_func - -def jsonldify(f): - """ - Decorator to inject JSON-LD into response - """ - @functools.wraps(f) - def view_func(*args, **kwargs): - return f(*args, **kwargs) - return view_func +from pygeoapi.provider import filter_providers_by_type, get_provider_by_type +from pygeoapi.provider.base import ProviderGenericError, ProviderTypeError from pygeoapi.util import ( - TEMPLATESDIR, UrlPrefetcher, dategetter, - filter_dict_by_key_value, filter_providers_by_type, get_api_rules, - get_base_url, get_provider_by_type, get_provider_default, get_typed_value, - render_j2_template, to_json, get_choice_from_headers, get_from_headers, - get_dataset_formatters + TEMPLATESDIR, UrlPrefetcher, filter_dict_by_key_value, get_api_rules, + get_base_url, get_typed_value, render_j2_template, to_json, + get_choice_from_headers, get_from_headers ) LOGGER = logging.getLogger(__name__) @@ -121,26 +80,6 @@ def view_func(*args, **kwargs): } CHARSET = ['utf-8'] -F_JSON = 'json' -F_COVERAGEJSON = 'json' -F_HTML = 'html' -F_JSONLD = 'jsonld' -F_GZIP = 'gzip' -F_PNG = 'png' -F_JPEG = 'jpeg' -F_MVT = 'mvt' -F_NETCDF = 'NetCDF' - -#: Formats allowed for ?f= requests (order matters for complex MIME types) -FORMAT_TYPES = OrderedDict(( - (F_HTML, 'text/html'), - (F_JSONLD, 'application/ld+json'), - (F_JSON, 'application/json'), - (F_PNG, 'image/png'), - (F_JPEG, 'image/jpeg'), - (F_MVT, 'application/vnd.mapbox-vector-tile'), - (F_NETCDF, 'application/x-netcdf'), -)) #: Locale used for system responses (e.g. exceptions) SYSTEM_LOCALE = l10n.Locale('en', 'US') @@ -154,8 +93,6 @@ def view_func(*args, **kwargs): 'http://www.opengis.net/spec/ogcapi-common-1/1.0/conf/oas30' ] -OGC_RELTYPES_BASE = 'http://www.opengis.net/def/rel/ogc/1.0' - def all_apis() -> dict: """ @@ -167,7 +104,7 @@ def all_apis() -> dict: """ from . import (coverages, environmental_data_retrieval, itemtypes, maps, - processes, tiles, stac, indoorgml) + processes, pubsub, tiles, stac, indoorgml) return { 'coverage': coverages, @@ -175,6 +112,7 @@ def all_apis() -> dict: 'itemtypes': itemtypes, 'map': maps, 'process': processes, + 'pubsub': pubsub, 'tile': tiles, 'stac': stac, 'indoorgml': indoorgml @@ -550,7 +488,7 @@ def get_response_headers(self, force_lang: l10n.Locale | None = None, if F_GZIP in FORMAT_TYPES: if force_encoding: headers['Content-Encoding'] = force_encoding - elif F_GZIP in get_from_headers(self._headers, 'accept-encoding'): + elif F_GZIP in get_from_headers(self._headers, 'accept-encoding'): # noqa headers['Content-Encoding'] = F_GZIP return headers @@ -572,21 +510,25 @@ def get_request_headers(self, headers: dict) -> dict: class API: """API object""" - def __init__(self, config: dict, openapi: dict) -> Self | None: + def __init__(self, config: dict, openapi: dict, + asyncapi: dict = {}) -> Self | None: """ constructor :param config: configuration dict :param openapi: openapi dict + :param asyncapi: asyncapi dict :returns: `pygeoapi.API` instance """ self.config = config self.openapi = openapi + self.asyncapi = asyncapi self.api_headers = get_api_rules(self.config).response_headers self.base_url = get_base_url(self.config) self.prefetcher = UrlPrefetcher() + self.pubsub_client = None CHARSET[0] = config['server'].get('encoding', 'utf-8') if config['server'].get('gzip'): @@ -614,6 +556,10 @@ def __init__(self, config: dict, openapi: dict) -> Self | None: self.manager = get_manager(self.config) LOGGER.info('Process manager plugin loaded') + if self.config.get('pubsub') is not None: + LOGGER.debug('Loading PubSub client') + self.pubsub_client = load_plugin('pubsub', self.config['pubsub']) + def get_exception(self, status: int, headers: dict, format_: str | None, code: str, description: str) -> Tuple[dict, int, str]: """ @@ -629,6 +575,7 @@ def get_exception(self, status: int, headers: dict, format_: str | None, """ exception_info = sys.exc_info() + LOGGER.error( description, exc_info=exception_info if exception_info[0] is not None else None @@ -750,27 +697,54 @@ def landing_page(api: API, 'title': l10n.translate('Collections', request.locale), 'href': api.get_collections_url() }, { - 'rel': 'http://www.opengis.net/def/rel/ogc/1.0/processes', + 'rel': f'{OGC_RELTYPES_BASE}/processes', 'type': FORMAT_TYPES[F_JSON], 'title': l10n.translate('Processes', request.locale), 'href': f"{api.base_url}/processes" }, { - 'rel': 'http://www.opengis.net/def/rel/ogc/1.0/job-list', + 'rel': f'{OGC_RELTYPES_BASE}/job-list', 'type': FORMAT_TYPES[F_JSON], 'title': l10n.translate('Jobs', request.locale), 'href': f"{api.base_url}/jobs" }, { - 'rel': 'http://www.opengis.net/def/rel/ogc/1.0/tiling-schemes', + 'rel': f'{OGC_RELTYPES_BASE}/tiling-schemes', 'type': FORMAT_TYPES[F_JSON], 'title': l10n.translate('The list of supported tiling schemes as JSON', request.locale), # noqa 'href': f"{api.base_url}/TileMatrixSets?f=json" }, { - 'rel': 'http://www.opengis.net/def/rel/ogc/1.0/tiling-schemes', + 'rel': f'{OGC_RELTYPES_BASE}/tiling-schemes', 'type': FORMAT_TYPES[F_HTML], 'title': l10n.translate('The list of supported tiling schemes as HTML', request.locale), # noqa 'href': f"{api.base_url}/TileMatrixSets?f=html" }] + if api.pubsub_client is not None and not api.pubsub_client.hidden: + LOGGER.debug('Adding PubSub broker link') + pubsub_link = { + 'rel': 'hub', + 'type': 'application/json', + 'title': l10n.translate('Pub/Sub broker', request.locale), + 'href': api.pubsub_client.broker_safe_url + } + if api.pubsub_client.channel is not None: + pubsub_link['channel'] = api.pubsub_client.channel + + fcm['links'].append(pubsub_link) + + if api.asyncapi: + fcm['links'].append({ + 'rel': 'service-doc', + 'type': 'text/html', + 'title': l10n.translate('The AsyncAPI definition as HTML', request.locale), # noqa + 'href': f'{api.base_url}/asyncapi?f=html' + }) + fcm['links'].append({ + 'rel': 'service-desc', + 'type': 'application/asyncapi+json', + 'title': l10n.translate('The AsyncAPI definition as JSON', request.locale), # noqa + 'href': f'{api.base_url}/asyncapi?f=json' + }) + headers = request.get_response_headers(**api.api_headers) if request.format == F_HTML: # render @@ -787,6 +761,14 @@ def landing_page(api: API, 'tile'): fcm['tile'] = True + if api.pubsub_client is not None and not api.pubsub_client.hidden: + fcm['pubsub'] = { + 'name': api.pubsub_client.name, + 'url': api.pubsub_client.broker_safe_url, + 'channel': api.pubsub_client.channel, + 'asyncapi': api.asyncapi + } + content = render_j2_template( api.tpl_config, api.config['server']['templates'], 'landing_page.html', fcm, request.locale) @@ -835,6 +817,41 @@ def openapi_(api: API, request: APIRequest) -> Tuple[dict, int, str]: return headers, HTTPStatus.OK, api.openapi +def asyncapi_(api: API, request: APIRequest) -> Tuple[dict, int, str]: + """ + Provide AsyncAPI document + + :param request: A request object + + :returns: tuple of headers, status code, content + """ + + headers = request.get_response_headers(**api.api_headers) + + if not api.asyncapi: + msg = 'AsyncAPI not supported/configured' + return api.get_exception( + HTTPStatus.NOT_IMPLEMENTED, headers, request.format, + 'NoApplicableCode', msg) + + if request.format == F_HTML: + template = 'asyncapi.html' + + path = f'{api.base_url}/asyncapi' + data = { + 'asyncapi-document-path': path + } + content = render_j2_template( + api.tpl_config, api.config['server']['templates'], template, data, + request.locale) + + return headers, HTTPStatus.OK, content + + headers['Content-Type'] = 'application/asyncapi+json' + + return headers, HTTPStatus.OK, to_json(api.asyncapi, api.pretty_print) + + def conformance(api: API, request: APIRequest) -> Tuple[dict, int, str]: """ Provide conformance definition @@ -846,7 +863,7 @@ def conformance(api: API, request: APIRequest) -> Tuple[dict, int, str]: apis_dict = all_apis() - conformance_list = CONFORMANCE_CLASSES + conformance_list = list(CONFORMANCE_CLASSES) for key, value in api.config['resources'].items(): if value['type'] == 'process': @@ -864,6 +881,9 @@ def conformance(api: API, request: APIRequest) -> Tuple[dict, int, str]: conformance_list.extend( apis_dict['itemtypes'].CONFORMANCE_CLASSES_RECORDS) + if api.pubsub_client is not None: + conformance_list.extend(apis_dict['pubsub'].CONFORMANCE_CLASSES) + conformance = { 'conformsTo': sorted(list(set(conformance_list))) } @@ -879,10 +899,9 @@ def conformance(api: API, request: APIRequest) -> Tuple[dict, int, str]: return headers, HTTPStatus.OK, to_json(conformance, api.pretty_print) -@gzip -@pre_process @jsonldify -def describe_collections(self, request: APIRequest, dataset=None) -> Tuple[dict, int, str]: +def describe_collections(api: API, request: APIRequest, + dataset: str | None = None) -> Tuple[dict, int, str]: """ Provide collection metadata @@ -892,280 +911,96 @@ def describe_collections(self, request: APIRequest, dataset=None) -> Tuple[dict, :returns: tuple of headers, status code, content """ - headers = request.get_response_headers(**self.api_headers) + headers = request.get_response_headers(**api.api_headers) fcm = { 'collections': [], 'links': [] } - # --- PART 1: Standard YAML Config Loop --- - collections = filter_dict_by_key_value(self.config['resources'], 'type', 'collection') - - # [MODIFICATION 1] COMMENTED OUT: Early 404 Check - # We disable this so we can check the Database later if YAML fails. - # if all([dataset is not None, dataset not in collections.keys()]): - # msg = 'Collection not found' - # return self.get_exception( - # HTTPStatus.NOT_FOUND, headers, request.format, 'NotFound', msg) + collections = filter_dict_by_key_value(api.config['resources'], + 'type', 'collection') + if all([dataset is not None, dataset not in collections.keys()]): + msg = 'Collection not found' + return api.get_exception( + HTTPStatus.NOT_FOUND, headers, request.format, 'NotFound', msg) + if dataset is not None: - collections_dict = { - k: v for k, v in collections.items() if k == dataset - } + collections_dict = {dataset: api.config['resources'][dataset]} else: collections_dict = collections - LOGGER.debug('Creating collections from Config') - - # Track if we found the requested dataset in the YAML config - found_in_yaml = False - + LOGGER.debug('Creating collections') for k, v in collections_dict.items(): if v.get('visibility', 'default') == 'hidden': LOGGER.debug(f'Skipping hidden layer: {k}') continue - - collection_data = get_provider_default(v['providers']) - collection_data_type = collection_data['type'] - - collection_data_format = None - if 'format' in collection_data: - collection_data_format = collection_data['format'] - - is_vector_tile = (collection_data_type == 'tile' and - collection_data_format['name'] not - in [F_PNG, F_JPEG]) - - collection = { - 'id': k, - 'title': l10n.translate(v['title'], request.locale), - 'description': l10n.translate(v['description'], request.locale), - 'keywords': l10n.translate(v['keywords'], request.locale), - 'links': [] - } - - bbox = v['extents']['spatial']['bbox'] - if not isinstance(bbox[0], list): bbox = [bbox] - collection['extent'] = {'spatial': {'bbox': bbox}} - if 'crs' in v['extents']['spatial']: - collection['extent']['spatial']['crs'] = v['extents']['spatial']['crs'] - - t_ext = v.get('extents', {}).get('temporal', {}) - if t_ext: - begins = dategetter('begin', t_ext) - ends = dategetter('end', t_ext) - collection['extent']['temporal'] = {'interval': [[begins, ends]]} - if 'trs' in t_ext: - collection['extent']['temporal']['trs'] = t_ext['trs'] - - # Standard Links - for link in l10n.translate(v.get('links', []), request.locale): - lnk = {'type': link['type'], 'rel': link['rel'], 'href': l10n.translate(link['href'], request.locale)} - if 'hreflang' in link: - lnk['hreflang'] = l10n.translate(link['hreflang'], request.locale) - collection['links'].append(lnk) - - # Root Links - collection['links'].append({ - 'type': FORMAT_TYPES[F_JSON], 'rel': 'root', - 'title': l10n.translate('The landing page of this server as JSON', request.locale), - 'href': f"{self.base_url}?f={F_JSON}" - }) - collection['links'].append({ - 'type': FORMAT_TYPES[F_HTML], 'rel': 'root', - 'title': l10n.translate('The landing page of this server as HTML', request.locale), - 'href': f"{self.base_url}?f={F_HTML}" - }) - collection['links'].append({ - 'type': FORMAT_TYPES[F_JSON], 'rel': request.get_linkrel(F_JSON), - 'title': l10n.translate('This document as JSON', request.locale), - 'href': f'{self.get_collections_url()}/{k}?f={F_JSON}' - }) - collection['links'].append({ - 'type': FORMAT_TYPES[F_JSONLD], 'rel': request.get_linkrel(F_JSONLD), - 'title': l10n.translate('This document as RDF (JSON-LD)', request.locale), - 'href': f'{self.get_collections_url()}/{k}?f={F_JSONLD}' - }) - collection['links'].append({ - 'type': FORMAT_TYPES[F_HTML], 'rel': request.get_linkrel(F_HTML), - 'title': l10n.translate('This document as HTML', request.locale), - 'href': f'{self.get_collections_url()}/{k}?f={F_HTML}' - }) - - # Add Item Type Links - if is_vector_tile or collection_data_type in ['feature', 'record']: - collection['itemType'] = collection_data_type - collection['links'].append({ - 'type': 'application/geo+json', 'rel': 'items', - 'title': l10n.translate('Items as GeoJSON', request.locale), - 'href': f'{self.get_collections_url()}/{k}/items?f={F_JSON}' - }) - collection['links'].append({ - 'type': FORMAT_TYPES[F_HTML], 'rel': 'items', - 'title': l10n.translate('Items as HTML', request.locale), - 'href': f'{self.get_collections_url()}/{k}/items?f={F_HTML}' - }) - - # Add Queryables - collection['links'].append({ - 'type': 'application/schema+json', - 'rel': 'http://www.opengis.net/def/rel/ogc/1.0/queryables', - 'title': l10n.translate('Queryables for this collection as JSON', request.locale), - 'href': f'{self.get_collections_url()}/{k}/queryables?f={F_JSON}' - }) - - if dataset is not None and k == dataset: - fcm = collection - found_in_yaml = True - break - - fcm['collections'].append(collection) - - - # --- [MODIFICATION 2] PART 2: Database Query --- - # Only run if we are listing ALL, or if we looked for one and missed it. - if not dataset or (dataset and not found_in_yaml): - LOGGER.debug("get collections in db") - # 1. Initialize Provider - from pygeoapi.provider.postgresql_indoordb import PostgresIndoorDB - provider = PostgresIndoorDB() try: - # 2. Fetch Data from DB - provider.connect() - db_collections_list = provider.get_collections_list() - - # 3. Filter for specific dataset (if requested) - if dataset: - filtered_list = [c for c in db_collections_list if c['id'] == dataset] - db_collections_list = filtered_list - - # 4. Loop through Provider Data and build Metadata - for item in db_collections_list: - - c_id = item['id'] - c_title = item.get('title', c_id) - c_item_type = item.get('itemType', 'feature') - - collection = { - 'id': c_id, - 'title': c_title, - 'description': f'IndoorGML data for {c_title}', - 'itemType': c_item_type, - 'keywords': [], # Empty list to satisfy legacy requirements - 'links': [] - } - - # Manual Extents (Global Default) - - collection['links'].append({ - 'type': 'application/schema+json', - 'rel': 'http://www.opengis.net/def/rel/ogc/1.0/queryables', - 'title': l10n.translate('Queryables for this collection as JSON', request.locale), - 'href': f'{self.get_collections_url()}/{c_id}/queryables?f={F_JSON}' - }) - - LOGGER.debug(f'Adding links for DB collection {c_id}') - - # --- Link Generation (Standard) --- - collection['links'].append({ - 'type': FORMAT_TYPES[F_JSON], 'rel': 'root', - 'title': l10n.translate('The landing page of this server as JSON', request.locale), - 'href': f"{self.base_url}?f={F_JSON}" - }) - - collection['links'].append({ - 'type': 'application/json', 'rel': 'self', - 'title': l10n.translate('This document as JSON', request.locale), - 'href': f'{self.get_collections_url()}/{c_id}?f={F_JSON}' - }) - - collection['links'].append({ - 'type': 'application/geo+json', 'rel': 'items', - 'title': l10n.translate('Items as GeoJSON', request.locale), - 'href': f'{self.get_collections_url()}/{c_id}/items?f={F_JSON}' - }) - - collection['links'].append({ - 'type': FORMAT_TYPES[F_HTML], 'rel': 'items', - 'title': l10n.translate('Items as HTML', request.locale), - 'href': f'{self.get_collections_url()}/{c_id}/items?f={F_HTML}' - }) - - # 5. Append to main response object - if dataset is not None and c_id == dataset: - fcm = collection - found_in_yaml = True # Mark as found in DB - break - - if isinstance(fcm, dict) and 'collections' in fcm: - fcm['collections'].append(collection) - - except Exception as e: - LOGGER.error(f"Provider Error: {e}") - # We log errors but try not to crash the whole list if one DB call fails - finally: - provider.disconnect() - - # --- [MODIFICATION 3] Final 404 Check --- - # If we checked BOTH Config and DB and still found nothing: - if dataset is not None and not found_in_yaml: - msg = 'Collection not found' - return self.get_exception( - HTTPStatus.NOT_FOUND, headers, request.format, 'NotFound', msg) - + fcm['collections'].append( + gen_collection(api, request, k, request.locale)) + except Exception as err: + LOGGER.warning(f'Error generating collection {k}: {err}') + if dataset is None: + LOGGER.debug('Skipping failed dataset') + else: + return api.get_exception( + HTTPStatus.INTERNAL_SERVER_ERROR, headers, request.format, + 'NoApplicableCode', 'Error generating collection') - # --- PART 3: Final Response Generation --- + if dataset is not None: + fcm = fcm['collections'][0] if dataset is None: + # TODO: translate fcm['links'].append({ 'type': FORMAT_TYPES[F_JSON], 'rel': request.get_linkrel(F_JSON), 'title': l10n.translate('This document as JSON', request.locale), # noqa - 'href': f'{self.get_collections_url()}?f={F_JSON}' + 'href': f'{api.get_collections_url()}?f={F_JSON}' }) fcm['links'].append({ 'type': FORMAT_TYPES[F_JSONLD], 'rel': request.get_linkrel(F_JSONLD), 'title': l10n.translate('This document as RDF (JSON-LD)', request.locale), # noqa - 'href': f'{self.get_collections_url()}?f={F_JSONLD}' + 'href': f'{api.get_collections_url()}?f={F_JSONLD}' }) fcm['links'].append({ 'type': FORMAT_TYPES[F_HTML], 'rel': request.get_linkrel(F_HTML), 'title': l10n.translate('This document as HTML', request.locale), # noqa - 'href': f'{self.get_collections_url()}?f={F_HTML}' + 'href': f'{api.get_collections_url()}?f={F_HTML}' }) if request.format == F_HTML: # render - fcm['base_url'] = self.base_url - fcm['collections_path'] = self.get_collections_url() + fcm['base_url'] = api.base_url + fcm['collections_path'] = api.get_collections_url() if dataset is not None: - tpl_config = self.get_dataset_templates(dataset) - content = render_j2_template(self.tpl_config, tpl_config, + tpl_config = api.get_dataset_templates(dataset) + content = render_j2_template(api.tpl_config, tpl_config, 'collections/collection.html', fcm, request.locale) else: content = render_j2_template( - self.tpl_config, self.config['server']['templates'], + api.tpl_config, api.config['server']['templates'], 'collections/index.html', fcm, request.locale) return headers, HTTPStatus.OK, content if request.format == F_JSONLD: - jsonld = self.fcmld.copy() + jsonld = api.fcmld.copy() if dataset is not None: - jsonld['dataset'] = jsonldify_collection(self, fcm, + jsonld['dataset'] = jsonldify_collection(api, fcm, request.locale) else: jsonld['dataset'] = [ - jsonldify_collection(self, c, request.locale) + jsonldify_collection(api, c, request.locale) for c in fcm.get('collections', []) ] - return headers, HTTPStatus.OK, to_json(jsonld, self.pretty_print) + return headers, HTTPStatus.OK, to_json(jsonld, api.pretty_print) - return headers, HTTPStatus.OK, to_json(fcm, self.pretty_print) + return headers, HTTPStatus.OK, to_json(fcm, api.pretty_print) def get_collection_schema(api: API, request: Union[APIRequest, Any], dataset: str) -> Tuple[dict, int, str]: @@ -1198,9 +1033,14 @@ def get_collection_schema(api: API, request: Union[APIRequest, Any], p = load_plugin('provider', get_provider_by_type( api.config['resources'][dataset]['providers'], 'coverage')) # noqa except ProviderTypeError: - LOGGER.debug('Loading record provider') - p = load_plugin('provider', get_provider_by_type( - api.config['resources'][dataset]['providers'], 'record')) + try: + LOGGER.debug('Loading record provider') + p = load_plugin('provider', get_provider_by_type( + api.config['resources'][dataset]['providers'], 'record')) + except ProviderTypeError: + LOGGER.debug('Loading edr provider') + p = load_plugin('provider', get_provider_by_type( + api.config['resources'][dataset]['providers'], 'edr')) except ProviderGenericError as err: LOGGER.error(err) return api.get_exception( diff --git a/pygeoapi/api/admin.py b/pygeoapi/api/admin.py index bf485515b..a971e1f25 100644 --- a/pygeoapi/api/admin.py +++ b/pygeoapi/api/admin.py @@ -3,7 +3,7 @@ # Authors: Tom Kralidis # Benjamin Webb # -# Copyright (c) 2024 Tom Kralidis +# Copyright (c) 2026 Tom Kralidis # Copyright (c) 2023 Benjamin Webb # # Permission is hereby granted, free of charge, to any person @@ -39,8 +39,9 @@ from jsonpatch import make_patch from jsonschema.exceptions import ValidationError -from pygeoapi.api import API, APIRequest, F_HTML +from pygeoapi.api import API, APIRequest from pygeoapi.config import get_config, validate_config +from pygeoapi.formats import F_HTML from pygeoapi.openapi import get_oas from pygeoapi.util import to_json, render_j2_template, yaml_dump diff --git a/pygeoapi/api/collection.py b/pygeoapi/api/collection.py new file mode 100644 index 000000000..ce6f3bbda --- /dev/null +++ b/pygeoapi/api/collection.py @@ -0,0 +1,486 @@ +# ================================================================= +# +# Authors: Tom Kralidis +# Francesco Bartoli +# Sander Schaminee +# John A Stevenson +# Colin Blackburn +# Ricardo Garcia Silva +# +# Copyright (c) 2026 Tom Kralidis +# Copyright (c) 2026 Francesco Bartoli +# Copyright (c) 2022 John A Stevenson and Colin Blackburn +# Copyright (c) 2023 Ricardo Garcia Silva +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation +# files (the "Software"), to deal in the Software without +# restriction, including without limitation the rights to use, +# copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. +# +# ================================================================= + +from copy import deepcopy +import logging + +from pygeoapi import l10n +from pygeoapi.formats import (F_JSON, F_JSONLD, F_HTML, F_JPEG, + F_PNG, FORMAT_TYPES) +from pygeoapi.crs import DEFAULT_STORAGE_CRS, get_supported_crs_list +from pygeoapi.plugin import load_plugin +from pygeoapi.provider import get_provider_by_type, get_provider_default +from pygeoapi.provider.base import ProviderConnectionError, ProviderTypeError +from pygeoapi.util import dategetter, get_dataset_formatters + +LOGGER = logging.getLogger(__name__) + +OGC_RELTYPES_BASE = 'http://www.opengis.net/def/rel/ogc/1.0' + + +def gen_collection(api, request, dataset: str, + locale_: str) -> dict: + """ + Generate OGC API Collection description + + :param api: `APIRequest` object + :param dataset: `str` of dataset name + :param locale_: `str` of requested locale + + :returns: `dict` of OGC API Collection description + """ + + config = api.config['resources'][dataset] + + data = { + 'id': dataset, + 'links': [] + } + + collection_data = get_provider_default(config['providers']) + collection_data_type = collection_data['type'] + + collection_data_format = None + + if 'format' in collection_data: + collection_data_format = collection_data['format'] + + is_vector_tile = (collection_data_type == 'tile' and + collection_data_format['name'] not + in [F_PNG, F_JPEG]) + + data.update({ + 'title': l10n.translate(config['title'], locale_), + 'description': l10n.translate(config['description'], locale_), + 'keywords': l10n.translate(config['keywords'], locale_), + }) + + extents = deepcopy(config['extents']) + + bbox = extents['spatial']['bbox'] + LOGGER.debug('Setting spatial extents from configuration') + # The output should be an array of bbox, so if the user only + # provided a single bbox, wrap it in a array. + if not isinstance(bbox[0], list): + bbox = [bbox] + + data['extent'] = { + 'spatial': { + 'bbox': bbox + } + } + + if 'crs' in extents['spatial']: + data['extent']['spatial']['crs'] = extents['spatial']['crs'] + + t_ext = extents.get('temporal', {}) + if t_ext: + LOGGER.debug('Setting temporal extents from configuration') + begins = dategetter('begin', t_ext) + ends = dategetter('end', t_ext) + data['extent']['temporal'] = { + 'interval': [[begins, ends]] + } + if 'trs' in t_ext: + data['extent']['temporal']['trs'] = t_ext['trs'] + if 'resolution' in t_ext: + data['extent']['temporal']['grid'] = { + 'resolution': t_ext['resolution'] + } + if 'default' in t_ext: + data['extent']['temporal']['default'] = t_ext['default'] + + _ = extents.pop('spatial', None) + _ = extents.pop('temporal', None) + + for ek, ev in extents.items(): + LOGGER.debug(f'Adding extent {ek}') + data['extent'][ek] = { + 'definition': ev['url'], + 'interval': [ev['range']] + } + if 'units' in ev: + data['extent'][ek]['unit'] = ev['units'] + + if 'values' in ev: + data['extent'][ek]['grid'] = { + 'cellsCount': len(ev['values']), + 'coordinates': ev['values'] + } + + LOGGER.debug('Processing configured collection links') + for link in l10n.translate(config.get('links', []), locale_): + lnk = { + 'type': link['type'], + 'rel': link['rel'], + 'title': l10n.translate(link['title'], locale_), + 'href': l10n.translate(link['href'], locale_), + } + if 'hreflang' in link: + lnk['hreflang'] = l10n.translate( + link['hreflang'], locale_) + content_length = link.get('length', 0) + + if lnk['rel'] == 'enclosure' and content_length == 0: + # Issue HEAD request for enclosure links without length + lnk_headers = api.prefetcher.get_headers(lnk['href']) + content_length = int(lnk_headers.get('content-length', 0)) + content_type = lnk_headers.get('content-type', lnk['type']) + if content_length == 0: + # Skip this (broken) link + LOGGER.debug(f"Enclosure {lnk['href']} is invalid") + continue + if content_type != lnk['type']: + # Update content type if different from specified + lnk['type'] = content_type + LOGGER.debug( + f"Fixed media type for enclosure {lnk['href']}") + + if content_length > 0: + lnk['length'] = content_length + + data['links'].append(lnk) + + # TODO: provide translations + LOGGER.debug('Adding JSON and HTML link relations') + data['links'].extend([{ + 'type': FORMAT_TYPES[F_JSON], + 'rel': 'root', + 'title': l10n.translate('The landing page of this server as JSON', locale_), # noqa + 'href': f"{api.base_url}?f={F_JSON}" + }, { + 'type': FORMAT_TYPES[F_HTML], + 'rel': 'root', + 'title': l10n.translate('The landing page of this server as HTML', locale_), # noqa + 'href': f"{api.base_url}?f={F_HTML}" + }, { + 'type': FORMAT_TYPES[F_JSON], + 'rel': request.get_linkrel(F_JSON), + 'title': l10n.translate('This document as JSON', locale_), + 'href': f'{api.get_collections_url()}/{dataset}?f={F_JSON}' + }, { + 'type': FORMAT_TYPES[F_JSONLD], + 'rel': request.get_linkrel(F_JSONLD), + 'title': l10n.translate('This document as RDF (JSON-LD)', locale_), + 'href': f'{api.get_collections_url()}/{dataset}?f={F_JSONLD}' + }, { + 'type': FORMAT_TYPES[F_HTML], + 'rel': request.get_linkrel(F_HTML), + 'title': l10n.translate('This document as HTML', locale_), + 'href': f'{api.get_collections_url()}/{dataset}?f={F_HTML}' + }]) + + if collection_data_type == 'record': + data['links'].extend([{ + 'type': FORMAT_TYPES[F_JSON], + 'rel': f'{OGC_RELTYPES_BASE}/ogc-catalog', + 'title': l10n.translate('Record catalogue as JSON', locale_), + 'href': f'{api.get_collections_url()}/{dataset}?f={F_JSON}' + }, { + 'type': FORMAT_TYPES[F_HTML], + 'rel': f'{OGC_RELTYPES_BASE}/ogc-catalog', + 'title': l10n.translate('Record catalogue as HTML', locale_), + 'href': f'{api.get_collections_url()}/{dataset}?f={F_HTML}' + }]) + + if collection_data_type in ['feature', 'coverage', 'record']: + data['links'].extend([{ + 'type': 'application/schema+json', + 'rel': f'{OGC_RELTYPES_BASE}/schema', + 'title': l10n.translate('Schema of collection in JSON', locale_), + 'href': f'{api.get_collections_url()}/{dataset}/schema?f={F_JSON}' + }, { + 'type': FORMAT_TYPES[F_HTML], + 'rel': f'{OGC_RELTYPES_BASE}/schema', + 'title': l10n.translate('Schema of collection in HTML', locale_), + 'href': f'{api.get_collections_url()}/{dataset}/schema?f={F_HTML}' + }]) + + if is_vector_tile or collection_data_type in ['feature', 'record']: + # TODO: translate + data['itemType'] = collection_data_type + LOGGER.debug('Adding feature/record based links') + data['links'].extend([{ + 'type': 'application/schema+json', + 'rel': f'{OGC_RELTYPES_BASE}/queryables', + 'title': l10n.translate('Queryables for this collection as JSON', locale_), # noqa + 'href': f'{api.get_collections_url()}/{dataset}/queryables?f={F_JSON}' # noqa + }, { + 'type': FORMAT_TYPES[F_HTML], + 'rel': f'{OGC_RELTYPES_BASE}/queryables', + 'title': l10n.translate('Queryables for this collection as HTML', locale_), # noqa + 'href': f'{api.get_collections_url()}/{dataset}/queryables?f={F_HTML}' # noqa + }, { + 'type': 'application/geo+json', + 'rel': 'items', + 'title': l10n.translate('Items as GeoJSON', locale_), + 'href': f'{api.get_collections_url()}/{dataset}/items?f={F_JSON}' + }, { + 'type': FORMAT_TYPES[F_JSONLD], + 'rel': 'items', + 'title': l10n.translate('Items as RDF (GeoJSON-LD)', locale_), + 'href': f'{api.get_collections_url()}/{dataset}/items?f={F_JSONLD}' + }, { + 'type': FORMAT_TYPES[F_HTML], + 'rel': 'items', + 'title': l10n.translate('Items as HTML', locale_), # noqa + 'href': f'{api.get_collections_url()}/{dataset}/items?f={F_HTML}' + }]) + + for key, value in get_dataset_formatters(config).items(): + data['links'].append({ + 'type': value.mimetype, + 'rel': 'items', + 'title': l10n.translate(f'Items as {key}', locale_), # noqa + 'href': f'{api.get_collections_url()}/{dataset}/items?f={value.f}' # noqa + }) + + # OAPIF Part 2 - list supported CRSs and StorageCRS + if collection_data_type in ['edr', 'feature']: + data['crs'] = get_supported_crs_list(collection_data) + data['storageCrs'] = collection_data.get('storage_crs', DEFAULT_STORAGE_CRS) # noqa + if 'storage_crs_coordinate_epoch' in collection_data: + data['storageCrsCoordinateEpoch'] = collection_data.get('storage_crs_coordinate_epoch') # noqa + + elif collection_data_type == 'coverage': + LOGGER.debug('Adding coverage based links') + data['links'].append({ + 'type': 'application/prs.coverage+json', + 'rel': f'{OGC_RELTYPES_BASE}/coverage', + 'title': l10n.translate('Coverage data', locale_), + 'href': f'{api.get_collections_url()}/{dataset}/coverage?f={F_JSON}' # noqa + }) + if collection_data_format is not None: + title_ = l10n.translate('Coverage data as', locale_) + title_ = f"{title_} {collection_data_format['name']}" + data['links'].append({ + 'type': collection_data_format['mimetype'], + 'rel': f'{OGC_RELTYPES_BASE}/coverage', + 'title': title_, + 'href': f"{api.get_collections_url()}/{dataset}/coverage?f={collection_data_format['name']}" # noqa + }) + if dataset is not None: + LOGGER.debug('Creating extended coverage metadata') + try: + provider_def = get_provider_by_type( + api.config['resources'][dataset]['providers'], + 'coverage') + p = load_plugin('provider', provider_def) + except ProviderConnectionError: + raise + except ProviderTypeError: + pass + else: + data['extent']['spatial']['grid'] = [{ + 'cellsCount': p._coverage_properties['width'], + 'resolution': p._coverage_properties['resx'] + }, { + 'cellsCount': p._coverage_properties['height'], + 'resolution': p._coverage_properties['resy'] + }] + if 'time_range' in p._coverage_properties: + data['extent']['temporal'] = { + 'interval': [p._coverage_properties['time_range']] + } + if 'restime' in p._coverage_properties: + data['extent']['temporal']['grid'] = { + 'resolution': p._coverage_properties['restime'] + } + if 'uad' in p._coverage_properties: + data['extent'].update(p._coverage_properties['uad']) + + elif collection_data_type == 'indoorfeature': + data['itemType'] = collection_data_type + data['links'].append({ + "href": f"{api.config['server']['url']}/collections/{dataset}?f=json", + "rel": "self", "type": "application/json", "title": "Metadata" + }) + data['links'].append({ + "href": f"{api.config['server']['url']}/collections/{dataset}/items?f=json", + "rel": "items", "type": "application/geo+json", "title": "IndoorGML Features" + }) + + try: + tile = get_provider_by_type(config['providers'], 'tile') + p = load_plugin('provider', tile) + except ProviderConnectionError: + raise + except ProviderTypeError: + tile = None + + if tile: + LOGGER.debug('Adding tile links') + data['links'].extend([{ + 'type': FORMAT_TYPES[F_JSON], + 'rel': f'{OGC_RELTYPES_BASE}/tilesets-{p.tile_type}', + 'title': l10n.translate('Tiles as JSON', locale_), + 'href': f'{api.get_collections_url()}/{dataset}/tiles?f={F_JSON}' + }, { + 'type': FORMAT_TYPES[F_HTML], + 'rel': f'{OGC_RELTYPES_BASE}/tilesets-{p.tile_type}', + 'title': l10n.translate('Tiles as HTML', locale_), + 'href': f'{api.get_collections_url()}/{dataset}/tiles?f={F_HTML}' + }]) + + try: + map_ = get_provider_by_type(config['providers'], 'map') + p = load_plugin('provider', map_) + except ProviderTypeError: + map_ = None + + if map_: + LOGGER.debug('Adding map links') + + map_mimetype = map_['format']['mimetype'] + map_format = map_['format']['name'] + + title_ = l10n.translate('Map as', locale_) + title_ = f'{title_} {map_format}' + + data['links'].append({ + 'type': map_mimetype, + 'rel': f'{OGC_RELTYPES_BASE}/map', + 'title': title_, + 'href': f'{api.get_collections_url()}/{dataset}/map?f={map_format}' + }) + + if p._fields: + schema_reltype = f'{OGC_RELTYPES_BASE}/schema', + schema_links = [s for s in data['links'] if + schema_reltype in s] + + if not schema_links: + title_ = l10n.translate('Schema of collection in JSON', locale_) # noqa + data['links'].append({ + 'type': 'application/schema+json', + 'rel': f'{OGC_RELTYPES_BASE}/schema', + 'title': title_, + 'href': f'{api.get_collections_url()}/{dataset}/schema?f=json' # noqa + }) + title_ = l10n.translate('Schema of collection in HTML', locale_) # noqa + data['links'].append({ + 'type': 'text/html', + 'rel': f'{OGC_RELTYPES_BASE}/schema', + 'title': title_, + 'href': f'{api.get_collections_url()}/{dataset}/schema?f=html' # noqa + }) + + try: + edr = get_provider_by_type(config['providers'], 'edr') + p = load_plugin('provider', edr) + except ProviderConnectionError: + raise + except ProviderTypeError: + edr = None + + if edr: + # TODO: translate + LOGGER.debug('Adding EDR links') + data['data_queries'] = {} + parameters = p.get_fields() + if parameters: + data['parameter_names'] = {} + for key, value in parameters.items(): + data['parameter_names'][key] = { + 'id': key, + 'type': 'Parameter', + 'name': value['title'], + 'observedProperty': { + 'label': { + 'id': key, + 'en': value['title'] + }, + }, + 'unit': { + 'label': { + 'en': value['title'] + }, + 'symbol': { + 'value': value['x-ogc-unit'], + 'type': 'http://www.opengis.net/def/uom/UCUM/' + } + } + } + + data['parameter_names'][key].update({ + 'description': value['description']} + if 'description' in value else {} + ) + + for qt in p.get_query_types(): + data_query = { + 'link': { + 'href': f'{api.get_collections_url()}/{dataset}/{qt}', + 'rel': 'data', + 'variables': { + 'query_type': qt + } + } + } + + if request.format is not None and request.format == 'json': + data_query['link']['type'] = 'application/vnd.cov+json' + + data['data_queries'][qt] = data_query + + title1 = l10n.translate('query for this collection as JSON', locale_) # noqa + title1 = f'{qt} {title1}' + title2 = l10n.translate('query for this collection as HTML', locale_) # noqa + title2 = f'{qt} {title2}' + + data['links'].extend([{ + 'type': 'application/json', + 'rel': 'data', + 'title': title1, + 'href': f'{api.get_collections_url()}/{dataset}/{qt}?f={F_JSON}' # noqa + }, { + 'type': FORMAT_TYPES[F_HTML], + 'rel': 'data', + 'title': title2, + 'href': f'{api.get_collections_url()}/{dataset}/{qt}?f={F_HTML}' # noqa + }]) + + for key, value in get_dataset_formatters(config).items(): + title3 = f'{qt} query for this collection as {key}' + data['links'].append({ + 'type': value.mimetype, + 'rel': 'data', + 'title': title3, + 'href': f'{api.get_collections_url()}/{dataset}/{qt}?f={value.f}' # noqa + }) + + return data diff --git a/pygeoapi/api/coverages.py b/pygeoapi/api/coverages.py index 38b66ef7c..67755407d 100644 --- a/pygeoapi/api/coverages.py +++ b/pygeoapi/api/coverages.py @@ -8,7 +8,7 @@ # Ricardo Garcia Silva # Bernhard Mallinger # -# Copyright (c) 2024 Tom Kralidis +# Copyright (c) 2026 Tom Kralidis # Copyright (c) 2025 Francesco Bartoli # Copyright (c) 2022 John A Stevenson and Colin Blackburn # Copyright (c) 2023 Ricardo Garcia Silva @@ -37,20 +37,21 @@ # # ================================================================= - +from copy import deepcopy import logging from http import HTTPStatus from typing import Tuple from pygeoapi import l10n +from pygeoapi.formats import F_JSON +from pygeoapi.openapi import get_oas_30_parameters from pygeoapi.plugin import load_plugin from pygeoapi.provider.base import ProviderGenericError, ProviderTypeError -from pygeoapi.util import ( - filter_dict_by_key_value, get_provider_by_type, to_json -) +from pygeoapi.provider import get_provider_by_type +from pygeoapi.util import filter_dict_by_key_value, to_json from . import ( - APIRequest, API, F_JSON, SYSTEM_LOCALE, validate_bbox, validate_datetime, + APIRequest, API, SYSTEM_LOCALE, validate_bbox, validate_datetime, validate_subset ) @@ -216,8 +217,8 @@ def get_oas_30(cfg: dict, locale: str) -> tuple[list[dict[str, str]], dict[str, for k, v in get_visible_collections(cfg).items(): try: - load_plugin('provider', get_provider_by_type( - collections[k]['providers'], 'coverage')) + p = load_plugin('provider', get_provider_by_type( + collections[k]['providers'], 'coverage')) except ProviderTypeError: LOGGER.debug('collection is not coverage based') continue @@ -226,6 +227,11 @@ def get_oas_30(cfg: dict, locale: str) -> tuple[list[dict[str, str]], dict[str, title = l10n.translate(v['title'], locale) description = l10n.translate(v['description'], locale) + parameters = get_oas_30_parameters(cfg, locale) + + coll_properties = deepcopy(parameters)['properties'] + coll_properties['schema']['items']['enum'] = list(p.fields.keys()) + paths[coverage_path] = { 'get': { 'summary': f'Get {title} coverage', @@ -236,7 +242,9 @@ def get_oas_30(cfg: dict, locale: str) -> tuple[list[dict[str, str]], dict[str, {'$ref': '#/components/parameters/lang'}, {'$ref': '#/components/parameters/f'}, {'$ref': '#/components/parameters/bbox'}, - {'$ref': '#/components/parameters/bbox-crs'} + {'$ref': '#/components/parameters/bbox-crs'}, + {'$ref': f"{OPENAPI_YAML['oacov']}#/components/parameters/subset"}, # noqa + coll_properties ], 'responses': { '200': {'$ref': f"{OPENAPI_YAML['oapif-1']}#/components/responses/Features"}, # noqa diff --git a/pygeoapi/api/environmental_data_retrieval.py b/pygeoapi/api/environmental_data_retrieval.py index 96eacb426..6f2ed36bb 100644 --- a/pygeoapi/api/environmental_data_retrieval.py +++ b/pygeoapi/api/environmental_data_retrieval.py @@ -49,19 +49,19 @@ from pygeoapi import l10n from pygeoapi.api import evaluate_limit +from pygeoapi.formats import F_COVERAGEJSON, F_HTML, F_JSON, F_JSONLD from pygeoapi.formatter.base import FormatterSerializationError from pygeoapi.crs import (create_crs_transform_spec, set_content_crs_header) from pygeoapi.openapi import get_oas_30_parameters from pygeoapi.plugin import load_plugin, PLUGINS +from pygeoapi.provider import filter_providers_by_type, get_provider_by_type from pygeoapi.provider.base import ( ProviderGenericError, ProviderItemNotFoundError) -from pygeoapi.util import ( - filter_providers_by_type, get_dataset_formatters, get_provider_by_type, - get_typed_value, render_j2_template, to_json, filter_dict_by_key_value -) +from pygeoapi.util import (get_dataset_formatters, get_typed_value, + render_j2_template, to_json, + filter_dict_by_key_value) -from . import (APIRequest, API, F_COVERAGEJSON, F_HTML, F_JSON, F_JSONLD, - validate_datetime, validate_bbox) +from . import APIRequest, API, validate_datetime, validate_bbox LOGGER = logging.getLogger(__name__) @@ -113,13 +113,14 @@ def get_collection_edr_instances(api: API, request: APIRequest, if instance_id is not None: try: - instances = [p.get_instance(instance_id)] + if p.get_instance(instance_id): + instances = [instance_id] except ProviderItemNotFoundError: msg = 'Instance not found' return api.get_exception( HTTPStatus.NOT_FOUND, headers, request.format, 'NotFound', msg) else: - instances = p.instances() + instances = p.get_instances() for instance in instances: instance_dict = { @@ -149,13 +150,18 @@ def get_collection_edr_instances(api: API, request: APIRequest, for qt in p.get_query_types(): if qt == 'instances': continue + data_query = { 'link': { - 'href': f'{uri}/instances/{instance}/{qt}', + 'href': f'{uri}/instances/{instance}/{qt}?f={request.format}', # noqa 'rel': 'data', 'title': f'{qt} query' } } + + if request.format is not None and request.format == 'json': + data_query['link']['type'] = 'application/vnd.cov+json' + instance_dict['data_queries'][qt] = data_query data['instances'].append(instance_dict) @@ -369,6 +375,15 @@ def get_collection_edr_query(api: API, request: APIRequest, within = request.params.get('within') within_units = request.params.get('within-units') + corridor_width = width_units = None + corridor_height = height_units = None + if query_type == 'corridor': + LOGGER.debug('Processing corridor width / height / units parameters') + corridor_width = request.params.get('corridor-width') + width_units = request.params.get('width-units') + corridor_height = request.params.get('corridor-height') + height_units = request.params.get('height-units') + LOGGER.debug('Processing z parameter') try: z = get_typed_value(request.params.get('z')) @@ -408,6 +423,10 @@ def get_collection_edr_query(api: API, request: APIRequest, bbox=bbox, within=within, within_units=within_units, + corridor_width=corridor_width, + width_units=width_units, + corridor_height=corridor_height, + height_units=height_units, limit=limit, location_id=location_id, crs_transform_spec=crs_transform_spec @@ -481,6 +500,7 @@ def get_collection_edr_query(api: API, request: APIRequest, headers['Content-Disposition'] = cd else: + headers['Content-Type'] = 'application/vnd.cov+json' content = to_json(data, api.pretty_print) return headers, HTTPStatus.OK, content diff --git a/pygeoapi/api/itemtypes.py b/pygeoapi/api/itemtypes.py index 20936f759..db0a6e6fb 100644 --- a/pygeoapi/api/itemtypes.py +++ b/pygeoapi/api/itemtypes.py @@ -49,26 +49,25 @@ from pygeoapi import l10n from pygeoapi.api import evaluate_limit +from pygeoapi.api.pubsub import publish_message from pygeoapi.crs import (DEFAULT_CRS, DEFAULT_STORAGE_CRS, create_crs_transform_spec, get_supported_crs_list, modify_pygeofilter, transform_bbox, set_content_crs_header) +from pygeoapi.formats import F_JSON, FORMAT_TYPES, F_HTML, F_JSONLD from pygeoapi.formatter.base import FormatterSerializationError from pygeoapi.linked_data import geojson2jsonld from pygeoapi.openapi import get_oas_30_parameters from pygeoapi.plugin import load_plugin, PLUGINS +from pygeoapi.provider import filter_providers_by_type, get_provider_by_type from pygeoapi.provider.base import ( - ProviderGenericError, ProviderTypeError, SchemaType) + ProviderGenericError, ProviderItemNotFoundError, + ProviderTypeError, SchemaType) -from pygeoapi.util import (filter_providers_by_type, to_json, - filter_dict_by_key_value, str2bool, - get_provider_by_type, render_j2_template, - get_dataset_formatters) +from pygeoapi.util import (to_json, filter_dict_by_key_value, str2bool, + render_j2_template, get_dataset_formatters) -from . import ( - APIRequest, API, SYSTEM_LOCALE, F_JSON, FORMAT_TYPES, F_HTML, F_JSONLD, - validate_bbox, validate_datetime -) +from . import APIRequest, API, SYSTEM_LOCALE, validate_bbox, validate_datetime LOGGER = logging.getLogger(__name__) @@ -750,6 +749,9 @@ def manage_collection_item( collections = filter_dict_by_key_value(api.config['resources'], 'type', 'collection') + http_status = HTTPStatus.OK + payload = None + if dataset not in collections.keys(): msg = 'Collection not found' return api.get_exception( @@ -795,7 +797,8 @@ def manage_collection_item( if action == 'create': LOGGER.debug('Creating item') try: - identifier = p.create(request.data) + payload = request.data + identifier = p.create(payload) except TypeError as err: msg = str(err) return api.get_exception( @@ -808,12 +811,13 @@ def manage_collection_item( headers['Location'] = f'{api.get_collections_url()}/{dataset}/items/{identifier}' # noqa - return headers, HTTPStatus.CREATED, '' + http_status = HTTPStatus.CREATED if action == 'update': LOGGER.debug('Updating item') try: - _ = p.update(identifier, request.data) + payload = request.data + _ = p.update(identifier, payload) except TypeError as err: msg = str(err) return api.get_exception( @@ -824,10 +828,17 @@ def manage_collection_item( err.http_status_code, headers, request.format, err.ogc_exception_code, err.message) - return headers, HTTPStatus.NO_CONTENT, '' + http_status = HTTPStatus.NO_CONTENT if action == 'delete': LOGGER.debug('Deleting item') + try: + _ = p.get(identifier) + except ProviderItemNotFoundError as err: + return api.get_exception( + err.http_status_code, headers, request.format, + err.ogc_exception_code, err.message) + try: _ = p.delete(identifier) except ProviderGenericError as err: @@ -835,7 +846,14 @@ def manage_collection_item( err.http_status_code, headers, request.format, err.ogc_exception_code, err.message) - return headers, HTTPStatus.OK, '' + http_status = HTTPStatus.OK + + if api.pubsub_client is not None: + LOGGER.debug('Publishing message') + publish_message(api.pubsub_client, api.base_url, action, dataset, + identifier, payload) + + return headers, http_status, '' def get_collection_item(api: API, request: APIRequest, @@ -1024,21 +1042,6 @@ def get_oas_30(cfg: dict, locale: str) -> tuple[list[dict[str, str]], dict[str, from pygeoapi.openapi import OPENAPI_YAML, get_visible_collections - properties = { - 'name': 'properties', - 'in': 'query', - 'description': 'The properties that should be included for each feature. The parameter value is a comma-separated list of property names.', # noqa - 'required': False, - 'style': 'form', - 'explode': False, - 'schema': { - 'type': 'array', - 'items': { - 'type': 'string' - } - } - } - limit = { 'name': 'limit', 'in': 'query', @@ -1093,8 +1096,9 @@ def get_oas_30(cfg: dict, locale: str) -> tuple[list[dict[str, str]], dict[str, title = l10n.translate(v['title'], locale) description = l10n.translate(v['description'], locale) - coll_properties = deepcopy(properties) + oas_30_parameters = get_oas_30_parameters(cfg, locale) + coll_properties = deepcopy(oas_30_parameters)['properties'] coll_properties['schema']['items']['enum'] = list(p.fields.keys()) coll_limit = _derive_limit( @@ -1103,7 +1107,7 @@ def get_oas_30(cfg: dict, locale: str) -> tuple[list[dict[str, str]], dict[str, ) dataset_formatters = get_dataset_formatters(v) - coll_f_parameter = deepcopy(get_oas_30_parameters(cfg, locale))['f'] # noqa + coll_f_parameter = deepcopy(oas_30_parameters)['f'] for key, value in dataset_formatters.items(): coll_f_parameter['schema']['enum'].append(value.f) diff --git a/pygeoapi/api/maps.py b/pygeoapi/api/maps.py index d8df6d354..8712e4c94 100644 --- a/pygeoapi/api/maps.py +++ b/pygeoapi/api/maps.py @@ -8,7 +8,7 @@ # Ricardo Garcia Silva # Bernhard Mallinger # -# Copyright (c) 2024 Tom Kralidis +# Copyright (c) 2026 Tom Kralidis # Copyright (c) 2025 Francesco Bartoli # Copyright (c) 2022 John A Stevenson and Colin Blackburn # Copyright (c) 2023 Ricardo Garcia Silva @@ -44,15 +44,16 @@ from typing import Tuple from pygeoapi.crs import transform_bbox +from pygeoapi.formats import F_JSON, FORMAT_TYPES from pygeoapi.openapi import get_oas_30_parameters from pygeoapi.plugin import load_plugin -from pygeoapi.provider.base import ProviderGenericError -from pygeoapi.util import ( - get_provider_by_type, to_json, filter_providers_by_type, - filter_dict_by_key_value +from pygeoapi.provider import filter_providers_by_type, get_provider_by_type +from pygeoapi.provider.base import ( + ProviderGenericError, ProviderInvalidDataError ) +from pygeoapi.util import to_json, filter_dict_by_key_value -from . import APIRequest, API, validate_datetime +from . import APIRequest, API, validate_datetime, validate_subset LOGGER = logging.getLogger(__name__) @@ -68,7 +69,7 @@ def get_collection_map(api: API, request: APIRequest, dataset: str, style: str | None = None ) -> Tuple[dict, int, str]: """ - Returns a subset of a collection map + Returns an image of a collection map :param request: A request object :param dataset: dataset name @@ -167,10 +168,58 @@ def get_collection_map(api: API, request: APIRequest, HTTPStatus.BAD_REQUEST, headers, request.format, 'InvalidParameterValue', msg) + if 'subset' in request.params: + # TODO get subsets from provider + subsets = deepcopy(api.config['resources'][dataset]['extents']) + subsets.pop('spatial', None) # bbox + subsets.pop('temporal', None) # datetime + LOGGER.debug('Processing subset parameter') + try: + query_args['subsets'] = validate_subset( + request.params['subset'] or '') + except (AttributeError, ValueError) as err: + msg = f'Invalid subset: {err}' + return api.get_exception( + HTTPStatus.BAD_REQUEST, headers, format_, + 'InvalidParameterValue', msg) + + for sk in query_args['subsets'].keys(): + if sk not in subsets.keys(): + msg = f'Subset not found; valid values are {subsets}' + return api.get_exception( + HTTPStatus.BAD_REQUEST, headers, format_, + 'InvalidParameterValue', msg) + + if request.params.get('properties'): + try: + fields = p.get_fields() or {} + except NotImplementedError: + msg = 'No properties implemented' + headers['Content-Type'] = FORMAT_TYPES[F_JSON] + return api.get_exception( + HTTPStatus.NOT_IMPLEMENTED, headers, format_, + 'InvalidParameterValue', msg) + + LOGGER.debug('Processing properties parameter') + properties = request.params.get('properties') or [] + if isinstance(properties, str): + properties = properties.split(',') + + if properties and not any((fld in properties) + for fld in fields.keys()): + msg = f'Invalid property; valid property names are {list(fields.keys())}' # noqa + headers['Content-Type'] = FORMAT_TYPES[F_JSON] + return api.get_exception( + HTTPStatus.BAD_REQUEST, headers, request.format, + 'InvalidParameterValue', msg) + + query_args['select_properties'] = properties + LOGGER.debug('Generating map') try: data = p.query(**query_args) - except ProviderGenericError as err: + except (ProviderGenericError, ProviderInvalidDataError) as err: + headers['Content-Type'] = FORMAT_TYPES[F_JSON] return api.get_exception( err.http_status_code, headers, request.format, err.ogc_exception_code, err.message) @@ -197,7 +246,7 @@ def get_collection_map_legend(api: API, request: APIRequest, dataset: str, style: str | None = None ) -> Tuple[dict, int, str]: """ - Returns a subset of a collection map legend + Returns an image of a collection map legend :param request: A request object :param dataset: dataset name @@ -279,6 +328,9 @@ def get_oas_30(cfg: dict, locale: str) -> tuple[list[dict[str, str]], dict[str, if map_extension: mp = load_plugin('provider', map_extension) + coll_properties = deepcopy(parameters)['properties'] + coll_properties['schema']['items']['enum'] = list(mp.fields.keys()) + map_f = deepcopy(parameters['f']) map_f['schema']['enum'] = [map_extension['format']['name']] map_f['schema']['default'] = map_extension['format']['name'] @@ -293,6 +345,7 @@ def get_oas_30(cfg: dict, locale: str) -> tuple[list[dict[str, str]], dict[str, 'parameters': [ {'$ref': '#/components/parameters/bbox'}, {'$ref': f"{OPENAPI_YAML['oapif-1']}#/components/parameters/datetime"}, # noqa + {'$ref': f"{OPENAPI_YAML['oamaps']}#/components/parameters/subset"}, # noqa { 'name': 'width', 'in': 'query', @@ -342,6 +395,9 @@ def get_oas_30(cfg: dict, locale: str) -> tuple[list[dict[str, str]], dict[str, } } } + if coll_properties['schema']['items']['enum']: + paths[pth]['get']['parameters'].append(coll_properties) + if mp.time_field is not None: paths[pth]['get']['parameters'].append( {'$ref': f"{OPENAPI_YAML['oapif-1']}#/components/parameters/datetime"}) # noqa diff --git a/pygeoapi/api/processes.py b/pygeoapi/api/processes.py index 39a165ea0..8fdde5b61 100644 --- a/pygeoapi/api/processes.py +++ b/pygeoapi/api/processes.py @@ -9,12 +9,12 @@ # Bernhard Mallinger # Francesco Martinelli # -# Copyright (c) 2024 Tom Kralidis +# Copyright (c) 2026 Tom Kralidis # Copyright (c) 2025 Francesco Bartoli # Copyright (c) 2022 John A Stevenson and Colin Blackburn # Copyright (c) 2023 Ricardo Garcia Silva # Copyright (c) 2024 Bernhard Mallinger -# Copyright (c) 2024 Francesco Martinelli +# Copyright (c) 2026 Francesco Martinelli # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -49,7 +49,9 @@ from typing import Tuple from pygeoapi import l10n +from pygeoapi.formats import FORMAT_TYPES, F_HTML, F_JSON, F_JSONLD from pygeoapi.api import evaluate_limit +from pygeoapi.api.pubsub import publish_message from pygeoapi.process.base import ( JobNotFoundError, JobResultNotFoundError, @@ -60,9 +62,7 @@ json_serial, render_j2_template, JobStatus, RequestedProcessExecutionMode, to_json, DATETIME_FORMAT) -from . import ( - APIRequest, API, SYSTEM_LOCALE, F_JSON, FORMAT_TYPES, F_HTML, F_JSONLD, -) +from . import APIRequest, API, SYSTEM_LOCALE LOGGER = logging.getLogger(__name__) @@ -130,11 +130,16 @@ def describe_processes(api: API, request: APIRequest, p2.pop('outputs') p2.pop('example', None) - p2['jobControlOptions'] = ['sync-execute'] - if api.manager.is_async: + jco = p.metadata.get('jobControlOptions', ['sync-execute']) + p2['jobControlOptions'] = jco + + if api.manager.is_async and 'async-execute' not in jco: + LOGGER.debug('Adding async capability') p2['jobControlOptions'].append('async-execute') - p2['outputTransmission'] = ['value'] + p2['outputTransmission'] = p.metadata.get( + 'outputTransmission', ['value']) + p2['links'] = p2.get('links', []) jobs_url = f"{api.base_url}/jobs" @@ -518,18 +523,32 @@ def execute_process(api: API, request: APIRequest, else: http_status = HTTPStatus.OK - if mime_type == 'application/json' or requested_response == 'document': - response2 = to_json(response, api.pretty_print) + if mime_type == 'application/json': + if requested_response == 'document': + pretty_print_ = api.pretty_print + else: # raw + pretty_print_ = False + response2 = to_json(response, pretty_print_) else: response2 = response - if execution_mode == RequestedProcessExecutionMode.respond_async: + if (headers.get('Preference-Applied', '') == RequestedProcessExecutionMode.respond_async.value): # noqa LOGGER.debug('Asynchronous mode detected, returning statusInfo') response2 = { 'jobID': job_id, 'type': 'process', 'status': status.value } + response2 = to_json(response2, pretty_print_) + + if api.pubsub_client is not None: + LOGGER.debug('Publishing message') + try: + publish_message(api.pubsub_client, api.base_url, 'process', + process_id, job_id, response2) + except Exception as err: + msg = f'Could not publish message {err}' + LOGGER.warning(msg) return headers, http_status, response2 @@ -705,11 +724,11 @@ def get_oas_30(cfg: dict, locale: str 'externalDocs': {} } for link in p.metadata.get('links', []): - if link['type'] == 'information': + if link.get('rel', '') == 'information': translated_link = l10n.translate(link, locale) tag['externalDocs']['description'] = translated_link[ - 'type'] - tag['externalDocs']['url'] = translated_link['url'] + 'rel'] + tag['externalDocs']['url'] = translated_link['href'] break if len(tag['externalDocs']) == 0: del tag['externalDocs'] @@ -745,7 +764,7 @@ def get_oas_30(cfg: dict, locale: str 'description': 'Indicates client preferences, including whether the client is capable of asynchronous processing.', # noqa 'schema': { 'type': 'string', - 'enum': ['respond-async'] + 'enum': [] } }], 'responses': { @@ -769,6 +788,12 @@ def get_oas_30(cfg: dict, locale: str } } + jco = p.metadata.get('jobControlOptions', ['sync-execute']) + if 'sync-execute' in jco: + paths[f'{process_name_path}/execution']['post']['parameters'][0]['schema']['enum'].append('respond-sync') # noqa + if 'async-execute' in jco: + paths[f'{process_name_path}/execution']['post']['parameters'][0]['schema']['enum'].append('respond-async') # noqa + try: first_key = list(p.metadata['outputs'])[0] p_output = p.metadata['outputs'][first_key] diff --git a/pygeoapi/api/pubsub.py b/pygeoapi/api/pubsub.py new file mode 100644 index 000000000..fdc528e1e --- /dev/null +++ b/pygeoapi/api/pubsub.py @@ -0,0 +1,128 @@ +# ================================================================= + +# Authors: Tom Kralidis +# +# Copyright (c) 2026 Tom Kralidis +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation +# files (the "Software"), to deal in the Software without +# restriction, including without limitation the rights to use, +# copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. +# +# ================================================================= + +from datetime import datetime, UTC +import json +import logging +import uuid +from typing import Union + +LOGGER = logging.getLogger(__name__) + +CONFORMANCE_CLASSES = [ + 'https://www.opengis.net/spec/ogcapi-pubsub-1/1.0/conf/message-payload-cloudevents-json', # noqa + 'https://www.opengis.net/spec/ogcapi-pubsub-1/1.0/conf/discovery' +] + + +def publish_message(pubsub_client, url: str, action: str, + resource: str = None, item: str = None, + data: dict = None) -> bool: + """ + Publish broker message + + :param pubsub_client: `pygeoapi.pubsub.BasePubSubClient` instance + :param url: `str` of server base URL + :param action: `str` of action trigger name (create, update, delete) + :param resource: `str` of resource identifier + :param item: `str` of item identifier + :param data: `dict` of data payload + + :returns: `bool` of whether message publishing was successful + """ + + if action in ['create', 'update']: + channel = f'collections/{resource}' + data_ = data + media_type = 'application/geo+json' + type_ = f'org.ogc.api.collection.item.{action}' + elif action == 'delete': + channel = f'collections/{resource}' + data_ = item + media_type = 'text/plain' + type_ = f'org.ogc.api.collection.item.{action}' + elif action == 'process': + channel = f'processes/{resource}' + media_type = 'application/json' + data_ = data + type_ = 'org.ogc.api.job.result' + + if pubsub_client.channel is not None: + channel = f'{pubsub_client.channel}/{channel}' + + message = generate_ogc_cloudevent(type_, media_type, url, + channel, data_) + LOGGER.debug(f'Message: {message}') + + try: + pubsub_client.connect() + pubsub_client.pub(channel, json.dumps(message)) + except Exception as err: + raise RuntimeError(err) + + +def generate_ogc_cloudevent(type_: str, media_type: str, source: str, + subject: str, data: Union[dict, str]) -> dict: + """ + Generate CloudEvent + + :param type_: `str` of CloudEvents type + :param source: `str` of source + :param subject: `str` of subject + :param media_type: `str` of media type + :param data: `str` or `dict` of data + + :returns: `dict` of OGC CloudEvent payload + """ + + try: + data2 = json.loads(data) + except Exception: + if isinstance(data, bytes): + data2 = data.decode('utf-8') + else: + data2 = data + + message = { + 'specversion': '1.0', + 'type': type_, + 'source': source, + 'subject': subject, + 'id': str(uuid.uuid4()), + 'time': datetime.now(UTC).strftime('%Y-%m-%dT%H:%M:%SZ'), + 'datacontenttype': media_type, + # 'dataschema': 'TODO', + 'data': data2 + } + + return message + + +def get_oas_30(cfg, locale_): + return [], {} diff --git a/pygeoapi/api/stac.py b/pygeoapi/api/stac.py index 37dfbedc9..ebf86b6e8 100644 --- a/pygeoapi/api/stac.py +++ b/pygeoapi/api/stac.py @@ -8,7 +8,7 @@ # Ricardo Garcia Silva # Bernhard Mallinger # -# Copyright (c) 2025 Tom Kralidis +# Copyright (c) 2026 Tom Kralidis # Copyright (c) 2025 Francesco Bartoli # Copyright (c) 2022 John A Stevenson and Colin Blackburn # Copyright (c) 2023 Ricardo Garcia Silva @@ -47,19 +47,19 @@ from shapely import from_geojson from pygeoapi import l10n +from pygeoapi.formats import FORMAT_TYPES, F_JSON, F_HTML from pygeoapi import api as ogc_api from pygeoapi.api import itemtypes as itemtypes_api from pygeoapi.plugin import load_plugin +from pygeoapi.provider import get_provider_by_type from pygeoapi.provider.base import ( ProviderConnectionError, ProviderNotFoundError, ProviderTypeError ) -from pygeoapi.util import ( - filter_dict_by_key_value, get_current_datetime, get_provider_by_type, - render_j2_template, to_json -) +from pygeoapi.util import (filter_dict_by_key_value, get_current_datetime, + render_j2_template, to_json) -from . import APIRequest, API, FORMAT_TYPES, F_JSON, F_HTML +from . import APIRequest, API LOGGER = logging.getLogger(__name__) diff --git a/pygeoapi/api/tiles.py b/pygeoapi/api/tiles.py index 2e31a6faf..afdde22b1 100644 --- a/pygeoapi/api/tiles.py +++ b/pygeoapi/api/tiles.py @@ -8,7 +8,7 @@ # Ricardo Garcia Silva # Bernhard Mallinger # -# Copyright (c) 2024 Tom Kralidis +# Copyright (c) 2026 Tom Kralidis # Copyright (c) 2025 Francesco Bartoli # Copyright (c) 2022 John A Stevenson and Colin Blackburn # Copyright (c) 2023 Ricardo Garcia Silva @@ -43,22 +43,19 @@ from typing import Tuple from pygeoapi import l10n +from pygeoapi.formats import FORMAT_TYPES, F_JSON, F_HTML, F_JSONLD from pygeoapi.plugin import load_plugin from pygeoapi.models.provider.base import (TilesMetadataFormat, TileMatrixSetEnum) +from pygeoapi.provider import get_provider_by_type, filter_providers_by_type from pygeoapi.provider.base import ( ProviderGenericError, ProviderTypeError ) from pygeoapi.provider.tile import ProviderTileNotFoundError -from pygeoapi.util import ( - get_provider_by_type, to_json, filter_dict_by_key_value, - filter_providers_by_type, render_j2_template -) +from pygeoapi.util import to_json, filter_dict_by_key_value, render_j2_template -from . import ( - APIRequest, API, FORMAT_TYPES, F_JSON, F_HTML, SYSTEM_LOCALE, F_JSONLD -) +from . import APIRequest, API, SYSTEM_LOCALE LOGGER = logging.getLogger(__name__) diff --git a/pygeoapi/asyncapi.py b/pygeoapi/asyncapi.py new file mode 100644 index 000000000..055cc1ba0 --- /dev/null +++ b/pygeoapi/asyncapi.py @@ -0,0 +1,292 @@ +# ================================================================= +# +# Authors: Tom Kralidis +# +# Copyright (c) 2026 Tom Kralidis +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation +# files (the "Software"), to deal in the Software without +# restriction, including without limitation the rights to use, +# copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. +# +# ================================================================= + +import os +import json +import logging +from pathlib import Path +from urllib.parse import urlparse + +import click +from jsonschema import validate as jsonschema_validate +import yaml + +from pygeoapi import __version__, l10n +from pygeoapi.models.openapi import OAPIFormat +from pygeoapi.util import to_json, yaml_load, remove_url_auth + +LOGGER = logging.getLogger(__name__) + +THISDIR = os.path.dirname(os.path.realpath(__file__)) + + +def gen_asyncapi(cfg: dict) -> dict: + """ + Generate an AsyncAPI document + + :param cfg: `dict` of pygeoapi configuration + + :returns: `dict` of AsyncAPI document + """ + + server_locales = l10n.get_locales(cfg) + locale_ = server_locales[0] + + LOGGER.debug('Generating AsyncAPI document') + + title = l10n.translate(cfg['metadata']['identification']['title'], locale_) # noqa + description = l10n.translate(cfg['metadata']['identification']['description'], locale_) # noqa + tags = l10n.translate(cfg['metadata']['identification']['keywords'], locale_) # noqa + + u = cfg['pubsub']['broker']['url'] + up = urlparse(u) + protocol = up.scheme + url = remove_url_auth(u).replace(f'{protocol}://', '') + + a = { + 'asyncapi': '3.0.0', + 'id': cfg['server']['url'], + 'defaultContentType': 'application/json', + 'info': { + 'version': __version__, + 'title': title, + 'description': description, + 'license': { + 'name': cfg['metadata']['license']['name'], + 'url': cfg['metadata']['license']['url'] + }, + 'contact': { + 'name': cfg['metadata']['contact']['name'], + 'email': cfg['metadata']['contact']['email'] + }, + 'tags': [{'name': tag} for tag in tags], + 'externalDocs': { + 'url': cfg['metadata']['identification']['url'] + }, + }, + 'servers': { + 'default': { + 'host': url, + 'protocol': protocol, + 'description': description + } + }, + 'channels': {}, + 'operations': {} + } + if cfg['metadata']['contact']['url'].startswith('http'): + a['info']['contact']['url'] = cfg['metadata']['contact']['url'] + + if cfg['pubsub']['broker'].get('channel') is not None: + channel_prefix = cfg['pubsub']['broker']['channel'] + else: + channel_prefix = '' + + LOGGER.debug('Generating channels foreach collection') + for key, value in cfg['resources'].items(): + if value['type'] not in ['collection']: + LOGGER.debug('Skipping') + continue + + title = l10n.translate(value['title'], locale_) + channel_address = f'{channel_prefix}/collections/{key}' + + channel = { + 'description': title, + 'address': channel_address, + 'messages': { + 'DefaultMessage': { + 'payload': { + '$ref': 'https://raw.githubusercontent.com/wmo-im/wis2-monitoring-events/refs/heads/main/schemas/cloudevents-v1.0.2.yaml' # noqa + } + } + } + } + + operation = { + f'publish-{key}': { + 'action': 'send', + 'channel': { + '$ref': f'#/channels/notify-{key}' + } + }, + f'consume-{key}': { + 'action': 'receive', + 'channel': { + '$ref': f'#/channels/notify-{key}' + } + } + } + + a['channels'][f'notify-{key}'] = channel + a['operations'].update(operation) + + return a + + +def get_asyncapi(cfg, version='3.0'): + """ + Stub to generate AsyncAPI Document + + :param cfg: configuration object + :param version: version of AsyncAPI (default 3.0) + + :returns: AsyncAPI definition YAML dict + """ + + if version == '3.0': + return gen_asyncapi(cfg) + else: + raise RuntimeError('AsyncAPI version not supported') + + +def validate_asyncapi_document(instance_dict): + """ + Validate an AsyncAPI document against the AsyncAPI schema + + :param instance_dict: dict of AsyncAPI instance + + :returns: `bool` of validation + """ + + schema_file = os.path.join( + THISDIR, 'resources', 'schemas', 'asyncapi', 'asyncapi-3.0.0.json') + + LOGGER.debug(f'Validating against {schema_file}') + with open(schema_file) as fh2: + schema_dict = json.load(fh2) + jsonschema_validate(instance_dict, schema_dict) + + return True + + +def generate_asyncapi_document(cfg: dict, output_format: OAPIFormat): + """ + Generate an AsyncAPI document from the configuration file + + :param cfg: `dict` of configuration + :param output_format: output format for AsyncAPI document + + :returns: content of the AsyncAPI document in the output + format requested + """ + + pretty_print = cfg['server'].get('pretty_print', False) + + if output_format == 'yaml': + content = yaml.safe_dump(get_asyncapi(cfg), default_flow_style=False) + else: + content = to_json(get_asyncapi(cfg), pretty=pretty_print) + return content + + +def load_asyncapi_document() -> dict: + """ + Open AsyncAPI document from `PYGEOAPI_ASYNCAPI` environment variable + + :returns: `dict` of AsyncAPI document + """ + + pygeoapi_asyncapi = os.environ.get('PYGEOAPI_ASYNCAPI') + + if pygeoapi_asyncapi is None: + LOGGER.debug('PYGEOAPI_ASYNCAPI environment not set') + return {} + + if not os.path.exists(pygeoapi_asyncapi): + msg = (f'AsyncAPI document {pygeoapi_asyncapi} does not exist. ' + 'Please generate before starting pygeoapi') + LOGGER.warning(msg) + return {} + + with open(pygeoapi_asyncapi, encoding='utf8') as ff: + if pygeoapi_asyncapi.endswith(('.yaml', '.yml')): + asyncapi_ = yaml_load(ff) + else: # JSON string, do not transform + asyncapi_ = ff.read() + + return asyncapi_ + + +@click.group() +def asyncapi(): + """AsyncAPI management""" + pass + + +@click.command() +@click.pass_context +@click.argument('config_file', type=click.File(encoding='utf-8')) +@click.option('--format', '-f', 'format_', type=click.Choice(['json', 'yaml']), + default='yaml', help='output format (json|yaml)') +@click.option('--output-file', '-of', type=click.File('w', encoding='utf-8'), + help='Name of output file') +def generate(ctx, config_file, output_file, format_='yaml'): + """Generate AsyncAPI Document""" + + if config_file is None: + raise click.ClickException('--config/-c required') + + if isinstance(config_file, Path): + with config_file.open(mode='r') as cf: + cfg = yaml_load(cf) + else: + cfg = yaml_load(config_file) + + if 'pubsub' not in cfg: + click.echo('pubsub not configured; aborting') + ctx.exit(1) + + content = generate_asyncapi_document(cfg, format_) + + if output_file is None: + click.echo(content) + else: + click.echo(f'Generating {output_file.name}') + output_file.write(content) + click.echo('Done') + + +@click.command() +@click.pass_context +@click.argument('asyncapi_file', type=click.File()) +def validate(ctx, asyncapi_file): + """Validate AsyncAPI Document""" + + if asyncapi_file is None: + raise click.ClickException('--asyncapi/-o required') + + click.echo(f'Validating {asyncapi_file.name}') + instance = yaml_load(asyncapi_file) + validate_asyncapi_document(instance) + click.echo('Valid AsyncAPI document') + + +asyncapi.add_command(generate) +asyncapi.add_command(validate) diff --git a/pygeoapi/crs.py b/pygeoapi/crs.py index efeab27fa..188414fa1 100644 --- a/pygeoapi/crs.py +++ b/pygeoapi/crs.py @@ -70,7 +70,7 @@ class CrsTransformSpec: def get_srid(crs: Union[str, pyproj.CRS]) -> Union[int, None]: """ - Helper function to attempt to exctract an ESPG SRID from + Helper function to attempt to extract an EPSG SRID from a `pyproj.CRS` object. :param crs: `pyproj.CRS` object @@ -278,7 +278,8 @@ def crs_transform_feature(feature: dict, transform_func: Callable): ) -def transform_bbox(bbox: list, from_crs: str, to_crs: str) -> list: +def transform_bbox(bbox: list, from_crs: Union[str, pyproj.CRS], + to_crs: Union[str, pyproj.CRS]) -> list: """ helper function to transform a bounding box (bbox) from a source to a target CRS. CRSs in URI str format. @@ -286,7 +287,7 @@ def transform_bbox(bbox: list, from_crs: str, to_crs: str) -> list: :param bbox: list of coordinates in 'from_crs' projection :param from_crs: CRS to transform from - :param to_crs: CRSto transform to + :param to_crs: CRS to transform to :raises `CRSError`: Error raised if no CRS could be identified from an URI. diff --git a/pygeoapi/django_/urls.py b/pygeoapi/django_/urls.py index 37a19d9ef..108705d08 100644 --- a/pygeoapi/django_/urls.py +++ b/pygeoapi/django_/urls.py @@ -8,7 +8,7 @@ # Copyright (c) 2025 Francesco Bartoli # Copyright (c) 2022 Luca Delucchi # Copyright (c) 2022 Krishna Lodha -# Copyright (c) 2025 Tom Kralidis +# Copyright (c) 2026 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -71,6 +71,7 @@ def apply_slash_rule(url: str): urlpatterns = [ path('', views.landing_page, name='landing-page'), path(apply_slash_rule('openapi/'), views.openapi, name='openapi'), + path(apply_slash_rule('asyncapi/'), views.asyncapi, name='asyncapi'), path( apply_slash_rule('conformance/'), views.conformance, diff --git a/pygeoapi/django_/views.py b/pygeoapi/django_/views.py index 620581f11..976d4236d 100644 --- a/pygeoapi/django_/views.py +++ b/pygeoapi/django_/views.py @@ -8,7 +8,7 @@ # Copyright (c) 2025 Francesco Bartoli # Copyright (c) 2022 Luca Delucchi # Copyright (c) 2022 Krishna Lodha -# Copyright (c) 2025 Tom Kralidis +# Copyright (c) 2026 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -79,6 +79,18 @@ def openapi(request: HttpRequest) -> HttpResponse: return execute_from_django(core_api.openapi_, request) +def asyncapi(request: HttpRequest) -> HttpResponse: + """ + AsyncAPI endpoint + + :request Django HTTP Request + + :returns: Django HTTP Response + """ + + return execute_from_django(core_api.asyncapi_, request) + + def conformance(request: HttpRequest) -> HttpResponse: """ OGC API conformance endpoint diff --git a/pygeoapi/flask_app.py b/pygeoapi/flask_app.py index cb15b1777..108773d55 100644 --- a/pygeoapi/flask_app.py +++ b/pygeoapi/flask_app.py @@ -3,7 +3,7 @@ # Authors: Tom Kralidis # Norman Barker # -# Copyright (c) 2025 Tom Kralidis +# Copyright (c) 2026 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -47,6 +47,7 @@ import pygeoapi.api.stac as stac_api import pygeoapi.api.tiles as tiles_api import pygeoapi.api.indoorgml as indoorgml +from pygeoapi.asyncapi import load_asyncapi_document from pygeoapi.openapi import load_openapi_document from pygeoapi.config import get_config from pygeoapi.util import get_mimetype, get_api_rules, filter_dict_by_key_value @@ -54,6 +55,7 @@ CONFIG = get_config() OPENAPI = load_openapi_document() +ASYNCAPI = load_asyncapi_document() API_RULES = get_api_rules(CONFIG) @@ -92,7 +94,7 @@ APP.config['JSONIFY_PRETTYPRINT_REGULAR'] = CONFIG['server'].get( 'pretty_print', True) -api_ = API(CONFIG, OPENAPI) +api_ = API(CONFIG, OPENAPI, ASYNCAPI) OGC_SCHEMAS_LOCATION = CONFIG['server'].get('ogc_schemas_location') @@ -181,6 +183,18 @@ def openapi(): return execute_from_flask(core_api.openapi_, request) + +@BLUEPRINT.route('/asyncapi') +def asyncapi(): + """ + AsyncAPI endpoint + + :returns: HTTP response + """ + + return execute_from_flask(core_api.asyncapi_, request) + + @BLUEPRINT.route('/conformance') def conformance(): """ @@ -231,6 +245,7 @@ def collections(collection_id: str | None = None): if collection_id is None: if request.method == 'GET': # Returns list (Merged YAML + DB from our previous step) + print("call desribe_collection") return execute_from_flask(core_api.describe_collections, request) elif request.method == 'POST': @@ -241,7 +256,6 @@ def collections(collection_id: str | None = None): else: # Check if it exists in the standard YAML config resource = api_.config['resources'].get(collection_id) - # LOGIC: Route to IndoorGML handler if: # A. It is explicitly marked 'indoorfeature' in Config # B. It is NOT in Config (meaning it exists only in the DB) diff --git a/pygeoapi/formats.py b/pygeoapi/formats.py new file mode 100644 index 000000000..3ad1f481e --- /dev/null +++ b/pygeoapi/formats.py @@ -0,0 +1,51 @@ +# ================================================================= +# +# Authors: Tom Kralidis +# +# Copyright (c) 2026 Tom Kralidis +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation +# files (the "Software"), to deal in the Software without +# restriction, including without limitation the rights to use, +# copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. +# +# ================================================================= + +from collections import OrderedDict + +F_JSON = 'json' +F_COVERAGEJSON = 'json' +F_HTML = 'html' +F_JSONLD = 'jsonld' +F_GZIP = 'gzip' +F_PNG = 'png' +F_JPEG = 'jpeg' +F_MVT = 'mvt' +F_NETCDF = 'NetCDF' + +#: Formats allowed for ?f= requests (order matters for complex MIME types) +FORMAT_TYPES = OrderedDict(( + (F_HTML, 'text/html'), + (F_JSONLD, 'application/ld+json'), + (F_JSON, 'application/json'), + (F_PNG, 'image/png'), + (F_JPEG, 'image/jpeg'), + (F_MVT, 'application/vnd.mapbox-vector-tile'), + (F_NETCDF, 'application/x-netcdf'), +)) diff --git a/pygeoapi/openapi.py b/pygeoapi/openapi.py index c3016c828..09cfee333 100644 --- a/pygeoapi/openapi.py +++ b/pygeoapi/openapi.py @@ -4,7 +4,7 @@ # Authors: Francesco Bartoli # Authors: Ricardo Garcia Silva # -# Copyright (c) 2025 Tom Kralidis +# Copyright (c) 2026 Tom Kralidis # Copyright (c) 2025 Francesco Bartoli # Copyright (c) 2023 Ricardo Garcia Silva # @@ -56,7 +56,8 @@ 'oapif-1': 'https://schemas.opengis.net/ogcapi/features/part1/1.0/openapi/ogcapi-features-1.yaml', # noqa 'oapif-2': 'https://schemas.opengis.net/ogcapi/features/part2/1.0/openapi/ogcapi-features-2.yaml', # noqa 'oapip': 'https://schemas.opengis.net/ogcapi/processes/part1/1.0/openapi', - 'oacov': 'https://raw.githubusercontent.com/tomkralidis/ogcapi-coverages-1/fix-cis/yaml-unresolved', # noqa + 'oacov': 'https://raw.githubusercontent.com/opengeospatial/ogcapi-coverages/refs/heads/master/standard/openapi/ogcapi-coverages-1.yaml', # noqa + 'oamaps': 'https://schemas.opengis.net/ogcapi/maps/part1/1.0/openapi/ogcapi-maps-1.yaml', # noqa 'oapir': 'https://raw.githubusercontent.com/opengeospatial/ogcapi-records/master/core/openapi', # noqa 'oaedr': 'https://schemas.opengis.net/ogcapi/edr/1.0/openapi', # noqa 'oapit': 'https://schemas.opengis.net/ogcapi/tiles/part1/1.0/openapi/ogcapi-tiles-1.yaml', # noqa @@ -537,6 +538,11 @@ def get_oas_30(cfg: dict, fail_on_invalid_collection: bool = True) -> dict: try: sub_tags, sub_paths = api_module.get_oas_30(cfg, locale_) + + if not sub_tags and not sub_paths: + LOGGER.debug('Empty content from {api_name}; skipping') + continue + oas['paths'].update(sub_paths['paths']) oas['tags'].extend(sub_tags) except Exception as err: @@ -666,6 +672,20 @@ def get_oas_30_parameters(cfg: dict, locale_: str): 'style': 'form', 'explode': False }, + 'properties': { + 'name': 'properties', + 'in': 'query', + 'description': 'The properties that should be included. The parameter value is a comma-separated list of property names.', # noqa + 'required': False, + 'style': 'form', + 'explode': False, + 'schema': { + 'type': 'array', + 'items': { + 'type': 'string' + } + } + }, 'vendorSpecificParameters': { 'name': 'vendorSpecificParameters', 'in': 'query', @@ -716,10 +736,32 @@ def get_admin(cfg: dict) -> dict: schema_dict = get_config_schema() paths = {} - - res_eg_key = next(iter(cfg['resources'])) + if cfg['resources']: + res_eg_key = next(iter(cfg['resources'])) + else: + res_eg_key = 'example' res_eg = { res_eg_key: cfg['resources'][res_eg_key] + } if cfg['resources'] else { + 'example': { + 'type': 'collection', + 'title': 'Example', + 'description': 'Example', + 'keywords': ['example'], + 'links': [], + 'linked-data': {}, + 'extents': { + 'spatial': { + 'bbox': [-180, -90, 180, 90], + 'crs': 'http://www.opengis.net/def/crs/OGC/1.3/CRS84' + }, + 'temporal': { + 'begin': '2000-10-30T18:24:39Z', + 'end': '2007-10-30T08:57:29Z', + 'trs': 'http://www.opengis.net/def/uom/ISO-8601/0/Gregorian' # noqa + } + } + } } if 'extents' in res_eg[res_eg_key]: res_eg_eg_key = 'extents' @@ -824,7 +866,7 @@ def get_admin(cfg: dict) -> dict: 'description': 'Adds resource to configuration', 'content': { 'application/json': { - 'example': {'new-collection': cfg['resources'][res_eg_key]}, # noqa + 'example': {'new-collection': cfg['resources'][res_eg_key] if cfg['resources'] else res_eg['example'] }, # noqa 'schema': schema_dict['properties']['resources']['patternProperties']['^.*$'] # noqa } }, @@ -957,6 +999,7 @@ def validate_openapi_document(instance_dict: dict) -> bool: schema_file = SCHEMASDIR / 'openapi' / 'openapi-3.0.x.json' + LOGGER.debug(f'Validating against {schema_file}') with schema_file.open() as fh2: schema_dict = json.load(fh2) jsonschema_validate(instance_dict, schema_dict) diff --git a/pygeoapi/plugin.py b/pygeoapi/plugin.py index 54f71be0e..32292c895 100644 --- a/pygeoapi/plugin.py +++ b/pygeoapi/plugin.py @@ -2,7 +2,7 @@ # # Authors: Tom Kralidis # -# Copyright (c) 2024 Tom Kralidis +# Copyright (c) 2026 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -85,6 +85,11 @@ 'MongoDB': 'pygeoapi.process.manager.mongodb_.MongoDBManager', 'TinyDB': 'pygeoapi.process.manager.tinydb_.TinyDBManager', 'PostgreSQL': 'pygeoapi.process.manager.postgresql.PostgreSQLManager' + }, + 'pubsub': { + 'HTTP': 'pygeoapi.pubsub.http.HTTPPubSubClient', + 'Kafka': 'pygeoapi.pubsub.kafka.KafkaPubSubClient', + 'MQTT': 'pygeoapi.pubsub.mqtt.MQTTPubSubClient' } } diff --git a/pygeoapi/process/base.py b/pygeoapi/process/base.py index 87c05a3cd..9e2136476 100644 --- a/pygeoapi/process/base.py +++ b/pygeoapi/process/base.py @@ -82,7 +82,7 @@ def execute(self, data: dict, outputs: Optional[dict] = None The value of any key may be an object and include the property `transmissionMode` - defaults to `value`. :returns: tuple of MIME type and process response - (string or bytes, or dict) + (string, bytes, list or dict) """ raise NotImplementedError() diff --git a/pygeoapi/process/manager/base.py b/pygeoapi/process/manager/base.py index d3d3285b0..fadb90fbd 100644 --- a/pygeoapi/process/manager/base.py +++ b/pygeoapi/process/manager/base.py @@ -6,7 +6,7 @@ # # Copyright (c) 2024 Tom Kralidis # (c) 2023 Ricardo Garcia Silva -# (c) 2024 Francesco Martinelli +# (c) 2026 Francesco Martinelli # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -277,6 +277,9 @@ def _execute_handler_sync(self, p: BaseProcessor, job_id: str, current_status = JobStatus.running jfmt, outputs = p.execute(data_dict, **extra_execute_parameters) + if isinstance(outputs, bytes): + outputs = outputs.decode('utf-8') + if requested_response == RequestedResponse.document.value: outputs = { 'outputs': [outputs] @@ -299,6 +302,10 @@ def _execute_handler_sync(self, p: BaseProcessor, job_id: str, mode = 'wb' data = outputs encoding = None + elif isinstance(outputs, str): + mode = 'w' + data = outputs + encoding = None with job_filename.open(mode=mode, encoding=encoding) as fh: fh.write(data) @@ -393,9 +400,10 @@ def execute_process( 'requested_response': requested_response } + job_control_options = processor.metadata.get( + 'jobControlOptions', []) + if execution_mode == RequestedProcessExecutionMode.respond_async: - job_control_options = processor.metadata.get( - 'jobControlOptions', []) # client wants async - do we support it? process_supports_async = ( ProcessExecutionMode.async_execute.value in job_control_options @@ -414,17 +422,30 @@ def execute_process( 'Preference-Applied': ( RequestedProcessExecutionMode.wait.value) } - elif execution_mode == RequestedProcessExecutionMode.wait: - # client wants sync - pygeoapi implicitly supports sync mode - LOGGER.debug('Synchronous execution') - handler = self._execute_handler_sync - response_headers = { - 'Preference-Applied': RequestedProcessExecutionMode.wait.value} - else: # client has no preference - # according to OAPI - Processes spec we ought to respond with sync - LOGGER.debug('Synchronous execution') - handler = self._execute_handler_sync - response_headers = None + else: # client has no preference or clients wants sync + # do we support sync? + process_supports_sync = ( + ProcessExecutionMode.sync_execute.value in job_control_options + ) + if not process_supports_sync: + LOGGER.debug('Asynchronous execution') + handler = self._execute_handler_async + response_headers = { + 'Preference-Applied': ( + RequestedProcessExecutionMode.respond_async.value) + } + else: + # according to OAPI - Processes spec we ought to + # respond with sync + LOGGER.debug('Synchronous execution') + handler = self._execute_handler_sync + if execution_mode == RequestedProcessExecutionMode.wait: + response_headers = None + else: + response_headers = { + 'Preference-Applied': ( + RequestedProcessExecutionMode.wait.value) + } # Add Job before returning any response. current_status = JobStatus.accepted diff --git a/pygeoapi/process/manager/mongodb_.py b/pygeoapi/process/manager/mongodb_.py index 44bce6dbe..06e6d909a 100644 --- a/pygeoapi/process/manager/mongodb_.py +++ b/pygeoapi/process/manager/mongodb_.py @@ -1,8 +1,10 @@ # ================================================================= # # Authors: Alexander Pilz +# Tom Kralidis # # Copyright (c) 2023 Alexander Pilz +# Copyright (c) 2026 Alexander Pilz # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -32,11 +34,11 @@ from pymongo import MongoClient -from pygeoapi.api import FORMAT_TYPES, F_JSON, F_JSONLD from pygeoapi.process.base import ( JobNotFoundError, JobResultNotFoundError, ) +from pygeoapi.formats import FORMAT_TYPES, F_JSON, F_JSONLD from pygeoapi.process.manager.base import BaseManager LOGGER = logging.getLogger(__name__) diff --git a/pygeoapi/process/manager/postgresql.py b/pygeoapi/process/manager/postgresql.py index bf5033eef..05dc408ee 100644 --- a/pygeoapi/process/manager/postgresql.py +++ b/pygeoapi/process/manager/postgresql.py @@ -1,8 +1,10 @@ # ================================================================= # # Authors: Francesco Martinelli +# Tom Kralidis # # Copyright (c) 2024 Francesco Martinelli +# Copyright (c) 2026 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -46,17 +48,18 @@ from typing import Any, Tuple from sqlalchemy import insert, update, delete -from sqlalchemy.engine import make_url from sqlalchemy.orm import Session -from pygeoapi.api import FORMAT_TYPES, F_JSON, F_JSONLD from pygeoapi.process.base import ( JobNotFoundError, JobResultNotFoundError, ProcessorGenericError ) +from pygeoapi.formats import FORMAT_TYPES, F_JSON, F_JSONLD from pygeoapi.process.manager.base import BaseManager -from pygeoapi.provider.sql import get_engine, get_table_model +from pygeoapi.provider.sql import ( + get_engine, get_table_model, store_db_parameters +) from pygeoapi.util import JobStatus @@ -66,13 +69,15 @@ class PostgreSQLManager(BaseManager): """PostgreSQL Manager""" + default_port = 5432 + def __init__(self, manager_def: dict): """ Initialize object :param manager_def: manager definition - :returns: `pygeoapi.process.manager.postgresqs.PostgreSQLManager` + :returns: `pygeoapi.process.manager.postgresql.PostgreSQLManager` """ super().__init__(manager_def) @@ -81,30 +86,18 @@ def __init__(self, manager_def: dict): self.supports_subscribing = True self.connection = manager_def['connection'] - try: - self.db_search_path = tuple(self.connection.get('search_path', - ['public'])) - except Exception: - self.db_search_path = ('public',) - - try: - LOGGER.debug('Connecting to database') - if isinstance(self.connection, str): - _url = make_url(self.connection) - self._engine = get_engine( - 'postgresql+psycopg2', - _url.host, - _url.port, - _url.database, - _url.username, - _url.password) - else: - self._engine = get_engine('postgresql+psycopg2', - **self.connection) - except Exception as err: - msg = 'Test connecting to DB failed' - LOGGER.error(f'{msg}: {err}') - raise ProcessorGenericError(msg) + options = manager_def.get('options', {}) + store_db_parameters(self, manager_def['connection'], options) + self._engine = get_engine( + 'postgresql+psycopg2', + self.db_host, + self.db_port, + self.db_name, + self.db_user, + self._db_password, + self.db_conn, + **self.db_options + ) try: LOGGER.debug('Getting table model') diff --git a/pygeoapi/process/manager/tinydb_.py b/pygeoapi/process/manager/tinydb_.py index b04d29a49..c15e9d36a 100644 --- a/pygeoapi/process/manager/tinydb_.py +++ b/pygeoapi/process/manager/tinydb_.py @@ -2,7 +2,7 @@ # # Authors: Tom Kralidis # -# Copyright (c) 2022 Tom Kralidis +# Copyright (c) 2026 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -37,7 +37,7 @@ import tinydb from filelock import FileLock -from pygeoapi.api import FORMAT_TYPES, F_JSON, F_JSONLD +from pygeoapi.formats import FORMAT_TYPES, F_JSON, F_JSONLD from pygeoapi.process.base import ( JobNotFoundError, JobResultNotFoundError, diff --git a/pygeoapi/provider/__init__.py b/pygeoapi/provider/__init__.py index 7595c47ca..1ebd319cf 100644 --- a/pygeoapi/provider/__init__.py +++ b/pygeoapi/provider/__init__.py @@ -2,7 +2,7 @@ # # Authors: Tom Kralidis # -# Copyright (c) 2019 Tom Kralidis +# Copyright (c) 2026 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -27,4 +27,63 @@ # # ================================================================= -"""Provider module containing the plugins wrapping data sources""" +import logging + +from pygeoapi.provider.base import ProviderTypeError + +LOGGER = logging.getLogger(__name__) + + +def filter_providers_by_type(providers: list, type: str) -> dict: + """ + helper function to filter a list of providers by type + + :param providers: ``list`` + :param type: str + + :returns: filtered ``dict`` provider + """ + + providers_ = {provider['type']: provider for provider in providers} + return providers_.get(type) + + +def get_provider_by_type(providers: list, provider_type: str) -> dict: + """ + helper function to load a provider by a provider type + + :param providers: ``list`` of providers + :param provider_type: type of provider (e.g. feature) + + :returns: provider based on type + """ + + LOGGER.debug(f'Searching for provider type {provider_type}') + try: + p = (next(d for i, d in enumerate(providers) + if d['type'] == provider_type)) + except (RuntimeError, StopIteration): + raise ProviderTypeError('Invalid provider type requested') + + return p + + +def get_provider_default(providers: list) -> dict: + """ + helper function to get a resource's default provider + + :param providers: ``list`` of providers + + :returns: filtered ``dict`` + """ + + try: + default = (next(d for i, d in enumerate(providers) if 'default' in d + and d['default'])) + LOGGER.debug('found default provider type') + except StopIteration: + LOGGER.debug('no default provider type. Returning first provider') + default = providers[0] + + LOGGER.debug(f"Default provider: {default['type']}") + return default diff --git a/pygeoapi/provider/base.py b/pygeoapi/provider/base.py index 0c58d90cd..3dd4c5a18 100644 --- a/pygeoapi/provider/base.py +++ b/pygeoapi/provider/base.py @@ -2,7 +2,7 @@ # # Authors: Tom Kralidis # -# Copyright (c) 2025 Tom Kralidis +# Copyright (c) 2026 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -57,6 +57,8 @@ def __init__(self, provider_def): :returns: pygeoapi.provider.base.BaseProvider """ + from pygeoapi.util import str2bool + try: self.name = provider_def['name'] self.type = provider_def['type'] @@ -65,6 +67,7 @@ def __init__(self, provider_def): raise RuntimeError('name/type/data are required') self.editable = provider_def.get('editable', False) + self.count = str2bool(provider_def.get('count', True)) self.options = provider_def.get('options') self.id_field = provider_def.get('id_field') self.uri_field = provider_def.get('uri_field') @@ -285,7 +288,7 @@ def _load_and_prepare_item(self, item, identifier=None, msg = 'record already exists' LOGGER.error(msg) - raise ProviderInvalidDataError(msg) + raise ProviderInvalidDataError(user_msg=msg) except ProviderItemNotFoundError: LOGGER.debug('record does not exist') diff --git a/pygeoapi/provider/base_edr.py b/pygeoapi/provider/base_edr.py index 69669fe6e..01b1602b6 100644 --- a/pygeoapi/provider/base_edr.py +++ b/pygeoapi/provider/base_edr.py @@ -2,7 +2,7 @@ # # Authors: Tom Kralidis # -# Copyright (c) 2021 Tom Kralidis +# Copyright (c) 2026 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -55,8 +55,6 @@ def __init__(self, provider_def): BaseProvider.__init__(self, provider_def) -# self.instances = [] - def __init_subclass__(cls, **kwargs): super().__init_subclass__(**kwargs) @@ -80,6 +78,15 @@ def __init_subclass__(cls, **kwargs): 'but requests will be routed to a feature provider' ) + def get_instances(self): + """ + Get a list of instance identifiers + + :returns: `list` of instance identifiers + """ + + return NotImplementedError() + def get_instance(self, instance): """ Validate instance identifier diff --git a/pygeoapi/provider/esri.py b/pygeoapi/provider/esri.py index 85cc69d9e..c84b6e1f1 100644 --- a/pygeoapi/provider/esri.py +++ b/pygeoapi/provider/esri.py @@ -32,7 +32,7 @@ import logging from requests import Session, codes -from pygeoapi.crs import crs_transform, get_srid +from pygeoapi.crs import get_srid from pygeoapi.provider.base import (BaseProvider, ProviderConnectionError, ProviderTypeError, ProviderQueryError) from pygeoapi.util import format_datetime @@ -60,7 +60,7 @@ def __init__(self, provider_def): super().__init__(provider_def) self.url = f'{self.data}/query' - self.crs = get_srid(self.storage_crs) + self.srid = get_srid(self.storage_crs) self.username = provider_def.get('username') self.password = provider_def.get('password') self.token_url = provider_def.get('token_service', ARCGIS_URL) @@ -68,6 +68,11 @@ def __init__(self, provider_def): self.token = None self.session = Session() + self.using_deafult_id = any( + kw == self.id_field + for kw in ['OBJECTID', 'objectid', 'fid'] + ) + self.login() self.get_fields() @@ -80,13 +85,17 @@ def get_fields(self): if not self._fields: # Load fields - params = {'f': 'pjson'} - resp = self.get_response(self.data, params=params) + try: + resp = self.get_response(self.data, params={'f': 'pjson'}) + except ProviderConnectionError as err: + msg = f'Could not access resource {self.data}: {err}' + LOGGER.error(msg) + return {} if resp.get('error') is not None: msg = f"Connection error: {resp['error']['message']}" LOGGER.error(msg) - raise ProviderConnectionError(msg) + return {} try: # Verify Feature/Map Service supports required capabilities @@ -108,10 +117,10 @@ def get_fields(self): return self._fields - @crs_transform def query(self, offset=0, limit=10, resulttype='results', bbox=[], datetime_=None, properties=[], sortby=[], - select_properties=[], skip_geometry=False, q=None, **kwargs): + select_properties=[], skip_geometry=False, + crs_transform_spec=None, **kwargs): """ ESRI query @@ -124,7 +133,7 @@ def query(self, offset=0, limit=10, resulttype='results', :param sortby: list of dicts (property, order) :param select_properties: list of property names :param skip_geometry: bool of whether to skip geometry (default False) - :param q: full-text search term(s) + :param crs_transform_spec: `CrsTransformSpec` instance, optional :returns: `dict` of GeoJSON FeatureCollection """ @@ -133,7 +142,7 @@ def query(self, offset=0, limit=10, resulttype='results', params = { 'f': 'geoJSON', - 'outSR': self.crs, + 'outSR': self._get_srid(crs_transform_spec), 'outFields': self._make_fields(select_properties), 'where': self._make_where(properties, datetime_) } @@ -166,12 +175,12 @@ def query(self, offset=0, limit=10, resulttype='results', return fc - @crs_transform - def get(self, identifier, **kwargs): + def get(self, identifier, crs_transform_spec=None, **kwargs): """ Query ESRI by id :param identifier: feature id + :param crs_transform_spec: `CrsTransformSpec` instance, optional :returns: dict of single GeoJSON feature """ @@ -179,17 +188,28 @@ def get(self, identifier, **kwargs): LOGGER.debug(f'Fetching item: {identifier}') params = { 'f': 'geoJSON', - 'outSR': self.crs, - 'objectIds': identifier, + 'outSR': self._get_srid(crs_transform_spec), 'outFields': self._make_fields() } - resp = self.get_response(self.url, params=params) + if self.using_deafult_id: + params['objectIds'] = identifier + else: + params['where'] = self._make_where( + [(self.id_field, identifier)] + ) + LOGGER.debug('Returning item') - return resp['features'].pop() + [feature] = self._make_features( + self.get_response(params=params) + ) + + return feature def login(self): - # Generate token from username and password + """ + Generate login token from username and password + """ if self.token is None: if None in [self.username, self.password]: @@ -211,7 +231,17 @@ def login(self): 'X-Esri-Authorization': f'Bearer {self.token}' }) - def get_response(self, url, **kwargs): + def get_response(self, url: str = None, **kwargs): + """ + Get response from ESRI service + + :param url: `str` of ESRI service URL if not using default + + :returns: `dict` of ESRI response + """ + if url is None: + url = self.url + # Form URL for GET request LOGGER.debug('Sending query') with self.session.get(url, **kwargs) as r: @@ -314,9 +344,22 @@ def _get_count(self, params): params['returnCountOnly'] = 'true' params['f'] = 'pjson' - response = self.get_response(self.url, params=params) + response = self.get_response(params=params) return response.get('count', 0) + def _get_srid(self, crs_transform_spec): + """ + Get SRID from CrsTransformSpec + + :param crs_transform_spec: `CrsTransformSpec` instance + + :returns: `int` of SRID + """ + if crs_transform_spec is not None: + return get_srid(crs_transform_spec.target_crs) + + return self.srid + def _get_all(self, params, hits_): """ Get all features from query args @@ -329,7 +372,9 @@ def _get_all(self, params, hits_): params = deepcopy(params) # Return feature collection - features = self.get_response(self.url, params=params).get('features') + features = self._make_features( + self.get_response(params=params) + ) step = len(features) # Query if values are less than expected @@ -338,7 +383,9 @@ def _get_all(self, params, hits_): params['resultOffset'] += step params['resultRecordCount'] += step - fs = self.get_response(self.url, params=params).get('features') + fs = self._make_features( + self.get_response(params=params) + ) if len(fs) != 0: features.extend(fs) else: @@ -346,7 +393,27 @@ def _get_all(self, params, hits_): return features + def _make_features(self, feature_collection: dict = {}): + """ + Make a feature from features list + + :param features: `dict` of features + + :returns: `dict` of single feature + """ + features = feature_collection.get('features', []) + + for feature in features: + if not self.using_deafult_id: + feature['id'] = \ + feature['properties'][self.id_field] + + return features + def __exit__(self, **kwargs): + """ + Exit and close session + """ self.session.close() def __repr__(self): diff --git a/pygeoapi/provider/parquet.py b/pygeoapi/provider/parquet.py index 0f4ab3de1..8d69e9940 100644 --- a/pygeoapi/provider/parquet.py +++ b/pygeoapi/provider/parquet.py @@ -1,8 +1,10 @@ # ================================================================= # # Authors: Leo Ghignone +# Colton Loftus # -# Copyright (c) 2024 Leo Ghignone +# Copyright (c) 2026 Leo Ghignone +# Copyright (c) 2026 Colton Loftus # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -36,6 +38,7 @@ import pyarrow import pyarrow.compute as pc import pyarrow.dataset +import pyarrow.types as pat import s3fs from pygeoapi.crs import crs_transform @@ -60,7 +63,41 @@ def arrow_to_pandas_type(arrow_type): return pd_type +def has_geoparquet_bbox_column( + pyarrow_geo_metadata: dict, primary_geometry_column_name: str +) -> bool: + """ + Check if the metadata on the parquet dataset + indicates there is a geoparquet bbox column + + :param pyarrow_geo_metadata: dict serialized version of the 'geo' + key within the pyarrow metadata json + :param primary_geometry_column_name: name of the primary geometry column + where the geometry is stored as specified in the 'geo' metadata + + :returns: bool whether or not the dataset has a geoparquet bbox column + """ + primary_column = pyarrow_geo_metadata.get('primary_column') + if primary_column is None: + return False + + columns = pyarrow_geo_metadata.get('columns') + if columns is None: + return False + + geometry_column_metadata = columns.get(primary_geometry_column_name) + if geometry_column_metadata is None: + return False + + geometry_covering = geometry_column_metadata.get('covering') + if geometry_covering is None: + return False + + return geometry_covering.get('bbox') is not None + + class ParquetProvider(BaseProvider): + def __init__(self, provider_def): """ Initialize object @@ -85,48 +122,107 @@ def __init__(self, provider_def): # Source url is required self.source = self.data.get('source') if not self.source: - msg = "Need explicit 'source' attr " \ - "in data field of provider config" + msg = 'Need explicit "source" attr in data' \ + ' field of provider config' LOGGER.error(msg) - raise Exception(msg) + raise ProviderGenericError(msg) # Manage AWS S3 sources if self.source.startswith('s3'): self.source = self.source.split('://', 1)[1] self.fs = s3fs.S3FileSystem(default_cache_type='none') else: + # If none, pyarrow will attempt to auto-detect self.fs = None # Build pyarrow dataset pointing to the data self.ds = pyarrow.dataset.dataset(self.source, filesystem=self.fs) + if not self.id_field: + LOGGER.info( + 'No "id_field" specified in parquet provider config' + ' will use pandas index as the identifier' + ) + else: + id_type = self.ds.schema.field(self.id_field).type + if ( + pat.is_integer(id_type) + or pat.is_decimal(id_type) + or pat.is_float_value(id_type) + ): + LOGGER.warning( + f'id_field is of type {id_type},' + ' and not numeric; this is harder to query and' + ' may cause slow full scans' + ) + LOGGER.debug('Grabbing field information') self.get_fields() # Must be set to visualise queryables - # Column names for bounding box data. - if None in [self.x_field, self.y_field]: + # Get the CRS of the data + if b'geo' in self.ds.schema.metadata: + geo_metadata = json.loads(self.ds.schema.metadata[b'geo']) + + geom_column = geo_metadata['primary_column'] + + if geom_column: + self.has_geometry = True + + # if the CRS is not set default to EPSG:4326, per geoparquet spec + self.crs = geo_metadata['columns'][geom_column].get('crs') \ + or 'OGC:CRS84' + + # self.bbox_filterable indicates whether or not + # we can resolve a bbox request + # against the data, either by using an explicit + # bbox column or by using x_field and y_field + # columns + self.bbox_filterable = \ + has_geoparquet_bbox_column(geo_metadata, geom_column) + if self.bbox_filterable: + # Whether or not the data has the geoparquet + # standardized bbox column + self.has_bbox_column = True + # if there is a bbox column we + # don't need to parse the x_fields and y_fields + # and can just return early + return + else: + self.has_bbox_column = False + else: self.has_geometry = False + self.has_bbox_column = False + + for field_name, field_value in [ + ('x_field', self.x_field), + ('y_field', self.y_field) + ]: + if not field_value: + LOGGER.warning( + f'No geometry for {self.source};' + f'missing {field_name} in parquet provider config' + ) + self.bbox_filterable = False + self.has_bbox_column = False + return + + # If there is not a geoparquet bbox column, + # then we fall back to reading fields for minx, maxx, miny, maxy + # as direct column names; these can be set and use regardless of + # whether or not there is 'geo' metadata + if isinstance(self.x_field, str): + self.minx = self.x_field + self.maxx = self.x_field else: - self.has_geometry = True - if isinstance(self.x_field, str): - self.minx = self.x_field - self.maxx = self.x_field - else: - self.minx, self.maxx = self.x_field + self.minx, self.maxx = self.x_field - if isinstance(self.y_field, str): - self.miny = self.y_field - self.maxy = self.y_field - else: - self.miny, self.maxy = self.y_field - self.bb = [self.minx, self.miny, self.maxx, self.maxy] + if isinstance(self.y_field, str): + self.miny = self.y_field + self.maxy = self.y_field + else: + self.miny, self.maxy = self.y_field - # Get the CRS of the data - geo_metadata = json.loads(self.ds.schema.metadata[b'geo']) - geom_column = geo_metadata['primary_column'] - # if the CRS is not set default to EPSG:4326, per geoparquet spec - self.crs = (geo_metadata['columns'][geom_column].get('crs') - or 'OGC:CRS84') + self.bbox_filterable = True def _read_parquet(self, return_scanner=False, **kwargs): """ @@ -134,7 +230,10 @@ def _read_parquet(self, return_scanner=False, **kwargs): :returns: generator of RecordBatch with the queried values """ - scanner = pyarrow.dataset.Scanner.from_dataset(self.ds, **kwargs) + scanner = self.ds.scanner( + use_threads=True, + **kwargs + ) batches = scanner.to_batches() if return_scanner: return batches, scanner @@ -149,12 +248,19 @@ def get_fields(self): """ if not self._fields: - - for field_name, field_type in zip(self.ds.schema.names, - self.ds.schema.types): + for field_name, field_type in zip( + self.ds.schema.names, self.ds.schema.types + ): # Geometry is managed as a special case by pygeoapi if field_name == 'geometry': continue + # if we find the geoparquet bbox column and the + # type is a struct of any type, either double or + # float, then we skip it since it isn't + # meant to be a queryable field, rather just metadata + if field_name == 'bbox' and 'struct' in str(field_type): + self.bbox_filterable = True + continue field_type = str(field_type) converted_type = None @@ -213,28 +319,44 @@ def query( :returns: dict of 0..n GeoJSON features """ - result = None try: - filter = pc.scalar(True) + filter_ = pc.scalar(True) + if bbox: - if self.has_geometry is False: - msg = ( - 'Dataset does not have a geometry field, ' - 'querying by bbox is not supported.' + if not self.has_geometry: + raise ProviderQueryError( + ( + 'Dataset does not have a geometry field, ' + 'querying by bbox is not supported.' + ) + ) + + if not self.bbox_filterable: + raise ProviderQueryError( + ( + 'Dataset does not have a proper bbox metadata, ' + 'querying by bbox is not supported.' + ) ) - raise ProviderQueryError(msg) - LOGGER.debug('processing bbox parameter') - if any(b is None for b in bbox): - msg = 'Dataset does not support bbox filtering' - raise ProviderQueryError(msg) minx, miny, maxx, maxy = [float(b) for b in bbox] - filter = ( - (pc.field(self.minx) > pc.scalar(minx)) - & (pc.field(self.miny) > pc.scalar(miny)) - & (pc.field(self.maxx) < pc.scalar(maxx)) - & (pc.field(self.maxy) < pc.scalar(maxy)) - ) + + if self.has_bbox_column: + # GeoParquet bbox column is a struct + # with xmin, ymin, xmax, ymax + filter_ = filter_ & ( + (pc.field('bbox', 'xmin') >= pc.scalar(minx)) + & (pc.field('bbox', 'ymin') >= pc.scalar(miny)) + & (pc.field('bbox', 'xmax') <= pc.scalar(maxx)) + & (pc.field('bbox', 'ymax') <= pc.scalar(maxy)) + ) + else: + filter_ = ( + (pc.field(self.minx) >= pc.scalar(minx)) + & (pc.field(self.miny) >= pc.scalar(miny)) + & (pc.field(self.maxx) <= pc.scalar(maxx)) + & (pc.field(self.maxy) <= pc.scalar(maxy)) + ) if datetime_ is not None: if self.time_field is None: @@ -248,13 +370,13 @@ def query( begin, end = datetime_.split('/') if begin != '..': begin = isoparse(begin) - filter = filter & (timefield >= begin) + filter_ = filter_ & (timefield >= begin) if end != '..': end = isoparse(end) - filter = filter & (timefield <= end) + filter_ = filter_ & (timefield <= end) else: target_time = isoparse(datetime_) - filter = filter & (timefield == target_time) + filter_ = filter_ & (timefield == target_time) if properties: LOGGER.debug('processing properties') @@ -263,7 +385,7 @@ def query( pd_type = arrow_to_pandas_type(field.type) expr = pc.field(name) == pc.scalar(pd_type(value)) - filter = filter & expr + filter_ = filter_ & expr if len(select_properties) == 0: select_properties = self.ds.schema.names @@ -279,11 +401,11 @@ def query( # Make response based on resulttype specified if resulttype == 'hits': LOGGER.debug('hits only specified') - result = self._response_feature_hits(filter) + return self._response_feature_hits(filter_) elif resulttype == 'results': LOGGER.debug('results specified') - result = self._response_feature_collection( - filter, offset, limit, columns=select_properties + return self._response_feature_collection( + filter_, offset, limit, columns=select_properties ) else: LOGGER.error(f'Invalid resulttype: {resulttype}') @@ -298,8 +420,6 @@ def query( LOGGER.error(err) raise ProviderGenericError(err) - return result - @crs_transform def get(self, identifier, **kwargs): """ @@ -309,22 +429,22 @@ def get(self, identifier, **kwargs): :returns: a single feature """ - result = None try: LOGGER.debug(f'Fetching identifier {identifier}') id_type = arrow_to_pandas_type( - self.ds.schema.field(self.id_field).type) + self.ds.schema.field(self.id_field).type + ) batches = self._read_parquet( filter=( - pc.field(self.id_field) == pc.scalar(id_type(identifier)) - ) + pc.field(self.id_field) == pc.scalar(id_type(identifier) + )) ) for batch in batches: if batch.num_rows > 0: - assert ( - batch.num_rows == 1 - ), f'Multiple items found with ID {identifier}' + assert batch.num_rows == 1, ( + f'Multiple items found with ID {identifier}' + ) row = batch.to_pandas() break else: @@ -335,10 +455,14 @@ def get(self, identifier, **kwargs): else: geom = [None] gdf = gpd.GeoDataFrame(row, geometry=geom) + # If there is an id field, set it as index + # instead of the default numeric index + if self.id_field in gdf.columns: + gdf = gdf.set_index(self.id_field, drop=False) LOGGER.debug('results computed') # Grab the collection from geopandas geo_interface - result = gdf.__geo_interface__['features'][0] + return gdf.__geo_interface__['features'][0] except RuntimeError as err: LOGGER.error(err) @@ -353,13 +477,11 @@ def get(self, identifier, **kwargs): LOGGER.error(err) raise ProviderGenericError(err) - return result - def __repr__(self): return f' {self.data}' - def _response_feature_collection(self, filter, offset, limit, - columns=None): + def _response_feature_collection(self, filter, offset, + limit, columns=None): """ Assembles output from query as GeoJSON FeatureCollection structure. @@ -426,6 +548,10 @@ def _response_feature_collection(self, filter, offset, limit, geom = gpd.GeoSeries.from_wkb(rp['geometry'], crs=self.crs) gdf = gpd.GeoDataFrame(rp, geometry=geom) + # If there is an id_field in the data, set it as index + # instead of the default numerical index + if self.id_field in gdf.columns: + gdf = gdf.set_index(self.id_field, drop=False) LOGGER.debug('results computed') result = gdf.__geo_interface__ @@ -446,8 +572,9 @@ def _response_feature_hits(self, filter): """ try: - scanner = pyarrow.dataset.Scanner.from_dataset(self.ds, - filter=filter) + scanner = pyarrow.dataset.Scanner.from_dataset( + self.ds, filter=filter + ) return { 'type': 'FeatureCollection', 'numberMatched': scanner.count_rows(), diff --git a/pygeoapi/provider/postgresql_indoordb.py b/pygeoapi/provider/postgresql_indoordb.py index 7f5cdf38f..317dc511b 100644 --- a/pygeoapi/provider/postgresql_indoordb.py +++ b/pygeoapi/provider/postgresql_indoordb.py @@ -142,7 +142,7 @@ def get_collection(self, collection_id: str): except Exception as e: LOGGER.debug(e) raise e - + def post_collection(self, collection): """ Creates a new collection. diff --git a/pygeoapi/provider/sql.py b/pygeoapi/provider/sql.py index cba5abaea..19cc35ca8 100644 --- a/pygeoapi/provider/sql.py +++ b/pygeoapi/provider/sql.py @@ -39,25 +39,12 @@ # # ================================================================= -# Testing local postgis with docker: -# docker run --name "postgis" \ -# -v postgres_data:/var/lib/postgresql -p 5432:5432 \ -# -e ALLOW_IP_RANGE=0.0.0.0/0 \ -# -e POSTGRES_USER=postgres \ -# -e POSTGRES_PASS=postgres \ -# -e POSTGRES_DBNAME=test \ -# -d -t kartoza/postgis - -# Import dump: -# gunzip < tests/data/hotosm_bdi_waterways.sql.gz | -# psql -U postgres -h 127.0.0.1 -p 5432 test - from copy import deepcopy from datetime import datetime from decimal import Decimal import functools import logging -from typing import Optional +from typing import Optional, Any from geoalchemy2 import Geometry # noqa - this isn't used explicitly but is needed to process Geometry columns from geoalchemy2.functions import ST_MakeEnvelope, ST_Intersects @@ -73,7 +60,7 @@ desc, delete ) -from sqlalchemy.engine import URL +from sqlalchemy.engine import URL, Engine from sqlalchemy.exc import ( ConstraintColumnNotFoundError, InvalidRequestError, @@ -82,6 +69,7 @@ from sqlalchemy.ext.automap import automap_base from sqlalchemy.orm import Session, load_only from sqlalchemy.sql.expression import and_ +from sqlalchemy.schema import Table from pygeoapi.crs import get_transform_from_spec, get_srid from pygeoapi.provider.base import ( @@ -135,8 +123,8 @@ def __init__( LOGGER.debug(f'Configured Storage CRS: {self.storage_crs}') # Read table information from database - options = provider_def.get('options', {}) - self._store_db_parameters(provider_def['data'], options) + options = provider_def.get('options', {}) | extra_conn_args + store_db_parameters(self, provider_def['data'], options) self._engine = get_engine( driver_name, self.db_host, @@ -144,13 +132,13 @@ def __init__( self.db_name, self.db_user, self._db_password, - **self.db_options | extra_conn_args + self.db_conn, + **self.db_options ) self.table_model = get_table_model( self.table, self.id_field, self.db_search_path, self._engine ) - LOGGER.debug(f'DB connection: {repr(self._engine.url)}') self.get_fields() def query( @@ -212,18 +200,20 @@ def query( .options(selected_properties) ) - matched = results.count() - - LOGGER.debug(f'Found {matched} result(s)') - LOGGER.debug('Preparing response') response = { 'type': 'FeatureCollection', 'features': [], - 'numberMatched': matched, 'numberReturned': 0 } + if self.count or resulttype == 'hits': + matched = results.count() + response['numberMatched'] = matched + LOGGER.debug(f'Found {matched} result(s)') + else: + LOGGER.debug('Count disabled') + if resulttype == 'hits' or not results: return response @@ -424,22 +414,6 @@ def delete(self, identifier): return result.rowcount > 0 - def _store_db_parameters(self, parameters, options): - self.db_user = parameters.get('user') - self.db_host = parameters.get('host') - self.db_port = parameters.get('port', self.default_port) - self.db_name = parameters.get('dbname') - # db_search_path gets converted to a tuple here in order to ensure it - # is hashable - which allows us to use functools.cache() when - # reflecting the table definition from the DB - self.db_search_path = tuple(parameters.get('search_path', ['public'])) - self._db_password = parameters.get('password') - self.db_options = { - k: v - for k, v in options.items() - if not isinstance(v, dict) - } - def _sqlalchemy_to_feature(self, item, crs_transform_out=None, select_properties=[]): """ @@ -600,6 +574,48 @@ def _select_properties_clause(self, select_properties, skip_geometry): return selected_properties_clause +def store_db_parameters( + self: GenericSQLProvider | Any, + connection_data: str | dict[str], + options: dict[str, str] +) -> None: + """ + Store database connection parameters + + :self: instance of provider or manager class + :param connection_data: connection string or dict of connection params + :param options: additional connection options + + :returns: None + """ + if isinstance(connection_data, str): + self.db_conn = connection_data + connection_data = {} + else: + self.db_conn = None + # OR + self.db_user = connection_data.get('user') + self.db_host = connection_data.get('host') + self.db_port = connection_data.get('port', self.default_port) + self.db_name = ( + connection_data.get('dbname') or connection_data.get('database') + ) + self.db_query = connection_data.get('query') + self._db_password = connection_data.get('password') + # db_search_path gets converted to a tuple here in order to ensure it + # is hashable - which allows us to use functools.cache() when + # reflecting the table definition from the DB + self.db_search_path = tuple( + connection_data.get('search_path') or + options.pop('search_path', ['public']) + ) + self.db_options = { + k: v + for k, v in options.items() + if not isinstance(v, dict) + } + + @functools.cache def get_engine( driver_name: str, @@ -608,20 +624,38 @@ def get_engine( database: str, user: str, password: str, + conn_str: Optional[str] = None, **connect_args -): - """Create SQL Alchemy engine.""" - conn_str = URL.create( - drivername=driver_name, - username=user, - password=password, - host=host, - port=int(port), - database=database - ) +) -> Engine: + """ + Get SQL Alchemy engine. + + :param driver_name: database driver name + :param host: database host + :param port: database port + :param database: database name + :param user: database user + :param password: database password + :param conn_str: optional connection URL + :param connect_args: custom connection arguments to pass to create_engine() + + :returns: SQL Alchemy engine + """ + if conn_str is None: + conn_str = URL.create( + drivername=driver_name, + username=user, + password=password, + host=host, + port=int(port), + database=database + ) + engine = create_engine( conn_str, connect_args=connect_args, pool_pre_ping=True ) + + LOGGER.debug(f'Created engine for {repr(engine.url)}.') return engine @@ -630,14 +664,25 @@ def get_table_model( table_name: str, id_field: str, db_search_path: tuple[str], - engine -): - """Reflect table.""" + engine: Engine +) -> Table: + """ + Reflect table using SQLAlchemy Automap. + + :param table_name: name of table to reflect + :param id_field: name of primary key field + :param db_search_path: tuple of database schemas to search for the table + :param engine: SQLAlchemy engine to use for reflection + + :returns: SQLAlchemy model of the reflected table + """ + LOGGER.debug('Reflecting table definition from database') metadata = MetaData() # Look for table in the first schema in the search path schema = db_search_path[0] try: + LOGGER.debug(f'Looking for table {table_name} in schema {schema}') metadata.reflect( bind=engine, schema=schema, only=[table_name], views=True ) diff --git a/pygeoapi/provider/tinydb_.py b/pygeoapi/provider/tinydb_.py index f2fbba228..5453f70c8 100644 --- a/pygeoapi/provider/tinydb_.py +++ b/pygeoapi/provider/tinydb_.py @@ -2,7 +2,7 @@ # # Authors: Tom Kralidis # -# Copyright (c) 2025 Tom Kralidis +# Copyright (c) 2026 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -448,7 +448,7 @@ def __repr__(self): return f' {self.data}' -def bbox_intersects(record_geometry, input_bbox): +def bbox_intersects(record_geometry, input_bbox) -> bool: """ Manual bbox intersection calculation @@ -458,7 +458,15 @@ def bbox_intersects(record_geometry, input_bbox): :returns: `bool` of whether the record_bbox intersects input_bbox """ - bbox1 = list(shape(record_geometry).bounds) + if record_geometry is None: + LOGGER.debug('Record geometry is none; skipping') + return False + + try: + bbox1 = list(shape(record_geometry).bounds) + except Exception as err: + LOGGER.debug(f'Invalid geometry: {err}') + return False bbox2 = [float(c) for c in input_bbox.split(',')] diff --git a/pygeoapi/provider/wms_facade.py b/pygeoapi/provider/wms_facade.py index b4f2495f1..e96f3c244 100644 --- a/pygeoapi/provider/wms_facade.py +++ b/pygeoapi/provider/wms_facade.py @@ -2,7 +2,7 @@ # # Authors: Tom Kralidis # -# Copyright (c) 2022 Tom Kralidis +# Copyright (c) 2026 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -66,7 +66,7 @@ def __init__(self, provider_def): def query(self, style=None, bbox=[-180, -90, 180, 90], width=500, height=300, crs=4326, datetime_=None, transparent=True, - bbox_crs=4326, format_='png'): + bbox_crs=4326, format_='png', **kwargs): """ Generate map diff --git a/pygeoapi/provider/xarray_.py b/pygeoapi/provider/xarray_.py index 6dbd9060f..36353243a 100644 --- a/pygeoapi/provider/xarray_.py +++ b/pygeoapi/provider/xarray_.py @@ -4,7 +4,7 @@ # Authors: Tom Kralidis # # Copyright (c) 2020 Gregory Petrochenkov -# Copyright (c) 2025 Tom Kralidis +# Copyright (c) 2026 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -61,9 +61,14 @@ def __init__(self, provider_def): super().__init__(provider_def) + open_options = {} + squeeze = provider_def.get('options', {}).get('squeeze', False) + zarr_options = provider_def.get('options', {}).get('zarr', {}) + try: if provider_def['data'].endswith('.zarr'): open_func = xarray.open_zarr + open_options = zarr_options else: if '*' in self.data: LOGGER.debug('Detected multi file dataset') @@ -84,7 +89,7 @@ def __init__(self, provider_def): data_to_open = self.data try: - self._data = open_func(data_to_open) + self._data = open_func(data_to_open, **open_options) except ValueError as err: # Manage non-cf-compliant time dimensions if 'time' in str(err): @@ -92,6 +97,10 @@ def __init__(self, provider_def): else: raise err + if squeeze: + LOGGER.debug('Squeezing data') + self._data = self._data.squeeze() + if provider_def.get('storage_crs') is None: self.storage_crs = self._parse_storage_crs() @@ -117,6 +126,11 @@ def get_fields(self): elif dtype.name.startswith('str'): dtype = 'string' + if value.attrs.get('units') is None: + msg = f'Field {key} missing units, will be skipped' + LOGGER.warning(msg) + continue + self._fields[key] = { 'type': dtype, 'title': value.attrs.get('long_name'), @@ -240,19 +254,21 @@ def query(self, properties=[], subsets={}, bbox=[], bbox_crs=4326, data.coords[self.x_field].values[-1], data.coords[self.y_field].values[-1] ], - "driver": "xarray", - "height": data.sizes[self.y_field], - "width": data.sizes[self.x_field], - "variables": {var_name: var.attrs - for var_name, var in data.variables.items()} + 'driver': 'xarray', + 'height': data.sizes[self.y_field], + 'width': data.sizes[self.x_field], + 'variables': { + var_name: var.attrs + for var_name, var in data.variables.items() + } } if self.time_field is not None: out_meta['time'] = [ _to_datetime_string(data.coords[self.time_field].values[0]), - _to_datetime_string(data.coords[self.time_field].values[-1]), + _to_datetime_string(data.coords[self.time_field].values[-1]) ] - out_meta["time_steps"] = data.sizes[self.time_field] + out_meta['time_steps'] = data.sizes[self.time_field] LOGGER.debug('Serializing data in memory') if format_ == 'json': @@ -386,25 +402,30 @@ def gen_covjson(self, metadata, data, fields): try: for key, value in selected_fields.items(): LOGGER.debug(f'Adding range {key}') - cj['ranges'][key] = { + range = { 'type': 'NdArray', 'dataType': value['type'], 'axisNames': [ 'y', 'x' ], - 'shape': [metadata['height'], - metadata['width']] + 'shape': [ + metadata['height'], metadata['width'] + ], + 'values': [ + None if np.isnan(v) else v + for v in data[key].values.flatten() + ] } - cj['ranges'][key]['values'] = [ - None if np.isnan(v) else v - for v in data[key].values.flatten() - ] if self.time_field is not None: - cj['ranges'][key]['axisNames'].append('t') - cj['ranges'][key]['shape'].append(metadata['time_steps']) + LOGGER.debug(f'Adding time axis to range {key}') + range['axisNames'].insert(0, 't') + range['shape'].insert(0, metadata['time_steps']) + + cj['ranges'][key] = range + except IndexError as err: - LOGGER.warning(err) + LOGGER.error(err) raise ProviderQueryError('Invalid query parameter') LOGGER.debug('Returning data') @@ -675,11 +696,11 @@ def _get_zarr_data(data): def _convert_float32_to_float64(data): """ - Converts DataArray values of float32 to float64 - :param data: Xarray dataset of coverage data + Converts DataArray values of float32 to float64 + :param data: Xarray dataset of coverage data - :returns: Xarray dataset of coverage data - """ + :returns: Xarray dataset of coverage data + """ for var_name in data.variables: if data[var_name].dtype == 'float32': diff --git a/pygeoapi/provider/xarray_edr.py b/pygeoapi/provider/xarray_edr.py index a2403b014..c7af0c5c4 100644 --- a/pygeoapi/provider/xarray_edr.py +++ b/pygeoapi/provider/xarray_edr.py @@ -36,7 +36,7 @@ from pygeoapi.provider.xarray_ import ( _to_datetime_string, _convert_float32_to_float64, - XarrayProvider, + XarrayProvider ) LOGGER = logging.getLogger(__name__) @@ -73,10 +73,9 @@ def position(self, **kwargs): query_params = {} + LOGGER.debug('Query type: position') LOGGER.debug(f'Query parameters: {kwargs}') - LOGGER.debug(f"Query type: {kwargs.get('query_type')}") - wkt = kwargs.get('wkt') if wkt is not None: LOGGER.debug('Processing WKT') @@ -115,7 +114,10 @@ def position(self, **kwargs): try: if select_properties: - self._fields = {k: v for k, v in self._fields.items() if k in select_properties} # noqa + self._fields = { + k: v for k, v in self._fields.items() + if k in select_properties + } data = self._data[[*select_properties]] else: data = self._data @@ -156,12 +158,12 @@ def position(self, **kwargs): bbox = wkt.bounds out_meta = { 'bbox': [bbox[0], bbox[1], bbox[2], bbox[3]], - "time": time, - "driver": "xarray", - "height": height, - "width": width, - "time_steps": time_steps, - "variables": {var_name: var.attrs + 'time': time, + 'driver': 'xarray', + 'height': height, + 'width': width, + 'time_steps': time_steps, + 'variables': {var_name: var.attrs for var_name, var in data.variables.items()} } @@ -183,12 +185,11 @@ def cube(self, **kwargs): query_params = {} + LOGGER.debug('Query type: cube') LOGGER.debug(f'Query parameters: {kwargs}') - LOGGER.debug(f"Query type: {kwargs.get('query_type')}") - bbox = kwargs.get('bbox') - xmin, ymin, xmax, ymax = self._configure_bbox(bbox) + xmin, ymin, xmax, ymax = self._configure_bbox() if len(bbox) == 4: query_params[self.x_field] = slice(bbox[xmin], bbox[xmax]) @@ -208,15 +209,17 @@ def cube(self, **kwargs): if datetime_ is not None: query_params[self.time_field] = self._make_datetime(datetime_) + fields = { + field: self.fields[field] + for field in select_properties + if field in self.fields + } if select_properties else self.fields + LOGGER.debug(f'query parameters: {query_params}') try: - if select_properties: - self._fields = {k: v for k, v in self._fields.items() if k in select_properties} # noqa - data = self._data[[*select_properties]] - else: - data = self._data - data = data.sel(query_params) - data = _convert_float32_to_float64(data) + data = _convert_float32_to_float64( + self._data[[*fields]].sel(query_params) + ) except KeyError: raise ProviderNoDataError() @@ -231,16 +234,18 @@ def cube(self, **kwargs): data.coords[self.x_field].values[-1], data.coords[self.y_field].values[-1] ], - "time": time, - "driver": "xarray", - "height": height, - "width": width, - "time_steps": time_steps, - "variables": {var_name: var.attrs - for var_name, var in data.variables.items()} + 'time': time, + 'driver': 'xarray', + 'height': height, + 'width': width, + 'time_steps': time_steps, + 'variables': { + var_name: var.attrs + for var_name, var in data.variables.items() + } } - return self.gen_covjson(out_meta, data, self.fields) + return self.gen_covjson(out_meta, data, fields) def _make_datetime(self, datetime_): """ @@ -300,7 +305,7 @@ def _parse_time_metadata(self, data, kwargs): time_steps = kwargs.get('limit') return time, time_steps - def _configure_bbox(self, bbox): + def _configure_bbox(self): xmin, ymin, xmax, ymax = 0, 1, 2, 3 if self._data[self.x_field][0] > self._data[self.x_field][-1]: xmin, xmax = xmax, xmin diff --git a/pygeoapi/pubsub/__init__.py b/pygeoapi/pubsub/__init__.py new file mode 100644 index 000000000..87d5c01b8 --- /dev/null +++ b/pygeoapi/pubsub/__init__.py @@ -0,0 +1,30 @@ +# ================================================================= +# +# Authors: Tom Kralidis +# +# Copyright (c) 2026 Tom Kralidis +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation +# files (the "Software"), to deal in the Software without +# restriction, including without limitation the rights to use, +# copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. +# +# ================================================================= + +"""Pub/Sub module""" diff --git a/pygeoapi/pubsub/base.py b/pygeoapi/pubsub/base.py new file mode 100644 index 000000000..6523649a1 --- /dev/null +++ b/pygeoapi/pubsub/base.py @@ -0,0 +1,107 @@ +# ================================================================= +# +# Authors: Tom Kralidis +# +# Copyright (c) 2026 Tom Kralidis +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation +# files (the "Software"), to deal in the Software without +# restriction, including without limitation the rights to use, +# copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. +# +# ================================================================= + +import logging +import random +from urllib.parse import urlparse + +from pygeoapi.error import GenericError +from pygeoapi.util import remove_url_auth + +LOGGER = logging.getLogger(__name__) + + +class BasePubSubClient: + """Base Pub/Sub client""" + + def __init__(self, publisher_def: dict): + """ + Initialize object + + :param publisher_def: publisher definition + + :returns: pycsw.broker.base.BasePubSubClient + """ + + self.type = 'pubsub' + + try: + self.name = publisher_def['name'] + self.broker = publisher_def['broker']['url'] + except KeyError: + raise RuntimeError('name/type/broker.url are required') + + self.broker_url = urlparse(self.broker) + self.broker_safe_url = remove_url_auth(self.broker) + + self.hidden = publisher_def['broker'].get('hidden', False) + self.channel = publisher_def['broker'].get('channel') + self.client_id = f'pygeoapi-pubsub-{random.randint(0, 1000)}' + + def connect(self) -> None: + """ + Connect to a Pub/Sub broker + + :returns: None + """ + + raise NotImplementedError() + + def pub(self, channel: str, message: str) -> bool: + """ + Publish a message to a broker/channel + + :param channel: `str` of channel + :param message: `str` of message + + :returns: `bool` of publish result + """ + + raise NotImplementedError() + + def __repr__(self): + return f' {self.broker_safe_url}' + + +class PubSubClientConnectionError(GenericError): + """Pub/Sub client client connection error""" + + default_msg = 'Pub/Sub client connection error (check logs)' + + +class PubSubClientSubscriptionError(GenericError): + """Pub/Sub client client subscription error""" + + default_msg = 'Pub/Sub client subscription error (check logs)' + + +class PubSubClientPublishError(GenericError): + """Pub/Sub client client publish error""" + + default_msg = 'Pub/Sub client publish error (check logs)' diff --git a/pygeoapi/pubsub/http.py b/pygeoapi/pubsub/http.py new file mode 100644 index 000000000..c19accc7d --- /dev/null +++ b/pygeoapi/pubsub/http.py @@ -0,0 +1,105 @@ +# ================================================================= +# +# Authors: Tom Kralidis +# Angelos Tzotsos +# +# Copyright (c) 2026 Tom Kralidis +# Copyright (c) 2025 Angelos Tzotsos +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation +# files (the "Software"), to deal in the Software without +# restriction, including without limitation the rights to use, +# copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. +# +# ================================================================= + +import logging + +import requests + +from pygeoapi.pubsub.base import BasePubSubClient, PubSubClientConnectionError + +LOGGER = logging.getLogger(__name__) + + +class HTTPPubSubClient(BasePubSubClient): + """HTTP client""" + + def __init__(self, publisher_def): + """ + Initialize object + + :param publisher_def: provider definition + + :returns: pygeoapi.pubsub.http.HTTPPubSubClient + """ + + super().__init__(publisher_def) + self.name = 'HTTP' + self.type = 'http' + self.auth = None + + msg = f'Initializing to broker {self.broker_safe_url} with id {self.client_id}' # noqa + LOGGER.debug(msg) + + if None not in [self.broker_url.username, self.broker_url.password]: + LOGGER.debug('Setting credentials') + self.auth = ( + self.broker_url.username, + self.broker_url.password + ) + + def connect(self) -> None: + """ + Connect to an HTTP broker + + :returns: None + """ + + LOGGER.debug('No connection to HTTP') + pass + + def pub(self, channel: str, message: str, qos: int = 1) -> bool: + """ + Publish a message to a broker/channel + + :param channel: `str` of topic + :param message: `str` of message + + :returns: `bool` of publish result + """ + + LOGGER.debug(f'Publishing to broker {self.broker_safe_url}') + LOGGER.debug(f'Channel: {channel}') + LOGGER.debug(f'Message: {message}') + LOGGER.debug('Sanitizing channel for HTTP') + channel = channel.replace('/', '-') + channel = channel.replace(':', '-') + LOGGER.debug(f'Sanitized channel for HTTP: {channel}') + + url = f'{self.broker}/{channel}' + + try: + response = requests.post(url, auth=self.auth, json=message) + response.raise_for_status() + except Exception as err: + raise PubSubClientConnectionError(err) + + def __repr__(self): + return f' {self.broker_safe_url}' diff --git a/pygeoapi/pubsub/kafka.py b/pygeoapi/pubsub/kafka.py new file mode 100644 index 000000000..20033dea8 --- /dev/null +++ b/pygeoapi/pubsub/kafka.py @@ -0,0 +1,109 @@ +# ================================================================= +# +# Authors: Tom Kralidis +# +# Copyright (c) 2026 Tom Kralidis +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation +# files (the "Software"), to deal in the Software without +# restriction, including without limitation the rights to use, +# copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. +# +# ================================================================= + +import logging + +from kafka import errors, KafkaProducer + +from pygeoapi.pubsub.base import BasePubSubClient, PubSubClientConnectionError +from pygeoapi.util import to_json + +LOGGER = logging.getLogger(__name__) + + +class KafkaPubSubClient(BasePubSubClient): + """Kafka client""" + + def __init__(self, publisher_def): + """ + Initialize object + + :param publisher_def: provider definition + + :returns: pygeoapi.pubsub.kafka.KafkaPubSubClient + """ + + super().__init__(publisher_def) + self.name = 'Kafka' + self.type = 'kafka' + self.sasl_mechanism = publisher_def.get('sasl.mechanism', 'PLAIN') + self.security_protocol = publisher_def.get('security.protocol', 'SASL_SSL') # noqa + + msg = f'Initializing to broker {self.broker_safe_url} with id {self.client_id}' # noqa + LOGGER.debug(msg) + + def connect(self) -> None: + """ + Connect to an Kafka broker + + :returns: None + """ + + args = { + 'bootstrap_servers': f'{self.broker_url.hostname}:{self.broker_url.port}', # noqa + 'client_id': self.client_id, + 'value_serializer': lambda v: to_json(v).encode('utf-8') + } + if None not in [self.broker_url.username, self.broker_url.password]: + args.update({ + 'security.protocol': self.security_protocol, + 'sasl.mechanism': self.sasl_mechanism, + 'sasl.username': self.broker_url.username, + 'sasl.password': self.broker_url.password + }) + + LOGGER.debug('Creating Kafka producer') + try: + self.producer = KafkaProducer(**args) + except errors.NoBrokersAvailable as err: + raise PubSubClientConnectionError(err) + + def pub(self, channel: str, message: str) -> bool: + """ + Publish a message to a broker/channel + + :param channel: `str` of topic + :param message: `str` of message + + :returns: `bool` of publish result + """ + + LOGGER.debug(f'Publishing to broker {self.broker_safe_url}') + LOGGER.debug(f'Channel: {channel}') + LOGGER.debug(f'Message: {message}') + LOGGER.debug('Sanitizing channel for HTTP') + channel = channel.replace('/', '-') + channel = channel.replace(':', '-') + LOGGER.debug(f'Sanitized channel for Kafka: {channel}') + + self.producer.send(channel, value=message) + self.producer.flush() + + def __repr__(self): + return f' {self.broker_safe_url}' diff --git a/pygeoapi/pubsub/mqtt.py b/pygeoapi/pubsub/mqtt.py new file mode 100644 index 000000000..0f88d670b --- /dev/null +++ b/pygeoapi/pubsub/mqtt.py @@ -0,0 +1,121 @@ +# ================================================================= +# +# Authors: Tom Kralidis +# +# Copyright (c) 2026 Tom Kralidis +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation +# files (the "Software"), to deal in the Software without +# restriction, including without limitation the rights to use, +# copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. +# +# ================================================================= + +import logging + +from paho.mqtt import client as mqtt_client + +from pygeoapi.pubsub.base import BasePubSubClient, PubSubClientConnectionError + +LOGGER = logging.getLogger(__name__) + + +class MQTTPubSubClient(BasePubSubClient): + """MQTT client""" + + def __init__(self, publisher_def): + """ + Initialize object + + :param publisher_def: provider definition + + :returns: pycsw.pubsub.mqtt.MQTTPubSubClient + """ + + super().__init__(publisher_def) + self.type = 'mqtt' + self.port = self.broker_url.port + + self.userdata = {} + + msg = f'Connecting to broker {self.broker_safe_url} with id {self.client_id}' # noqa + LOGGER.debug(msg) + self.conn = mqtt_client.Client(mqtt_client.CallbackAPIVersion.VERSION2, + client_id=self.client_id) + + self.conn.enable_logger(logger=LOGGER) + + if None not in [self.broker_url.username, self.broker_url.password]: + LOGGER.debug('Setting credentials') + self.conn.username_pw_set( + self.broker_url.username, + self.broker_url.password) + + if self.port is None: + if self.broker_url.scheme == 'mqtts': + self.port = 8883 + else: + self.port = 1883 + + if self.broker_url.scheme == 'mqtts': + self.conn.tls_set(tls_version=2) + + def connect(self) -> None: + """ + Connect to an MQTT broker + + :returns: None + """ + + try: + self.conn.connect(self.broker_url.hostname, self.port) + LOGGER.debug('Connected to broker') + except Exception as err: + raise PubSubClientConnectionError(err) + + def pub(self, channel: str, message: str, qos: int = 1) -> bool: + """ + Publish a message to a broker/channel + + :param channel: `str` of channel + :param message: `str` of message + + :returns: `bool` of publish result + """ + + LOGGER.debug(f'Publishing to broker {self.broker_safe_url}') + LOGGER.debug(f'Channel: {channel}') + LOGGER.debug(f'Message: {message}') + + result = self.conn.publish(channel, message, qos) + LOGGER.debug(f'Result: {result}') + + # TODO: investigate implication + # result.wait_for_publish() + + if result.is_published: + LOGGER.debug('Message published') + return True + else: + msg = f'Publishing error code: {result[1]}' + LOGGER.warning(msg) + return False + + def __repr__(self): + return f' {self.broker_safe_url}' diff --git a/pygeoapi/resources/schemas/asyncapi/asyncapi-3.0.0.json b/pygeoapi/resources/schemas/asyncapi/asyncapi-3.0.0.json new file mode 100644 index 000000000..5f63927db --- /dev/null +++ b/pygeoapi/resources/schemas/asyncapi/asyncapi-3.0.0.json @@ -0,0 +1,9077 @@ +{ + "$id": "http://asyncapi.com/definitions/3.0.0/asyncapi.json", + "$schema": "http://json-schema.org/draft-07/schema", + "title": "AsyncAPI 3.0.0 schema.", + "type": "object", + "required": [ + "asyncapi", + "info" + ], + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "asyncapi": { + "type": "string", + "const": "3.0.0", + "description": "The AsyncAPI specification version of this document." + }, + "id": { + "type": "string", + "description": "A unique id representing the application.", + "format": "uri" + }, + "info": { + "$ref": "http://asyncapi.com/definitions/3.0.0/info.json" + }, + "servers": { + "$ref": "http://asyncapi.com/definitions/3.0.0/servers.json" + }, + "defaultContentType": { + "type": "string", + "description": "Default content type to use when encoding/decoding a message's payload." + }, + "channels": { + "$ref": "http://asyncapi.com/definitions/3.0.0/channels.json" + }, + "operations": { + "$ref": "http://asyncapi.com/definitions/3.0.0/operations.json" + }, + "components": { + "$ref": "http://asyncapi.com/definitions/3.0.0/components.json" + } + }, + "definitions": { + "http://asyncapi.com/definitions/3.0.0/specificationExtension.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json", + "description": "Any property starting with x- is valid.", + "additionalProperties": true, + "additionalItems": true + }, + "http://asyncapi.com/definitions/3.0.0/info.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/info.json", + "description": "The object provides metadata about the API. The metadata can be used by the clients if needed.", + "allOf": [ + { + "type": "object", + "required": [ + "version", + "title" + ], + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "title": { + "type": "string", + "description": "A unique and precise title of the API." + }, + "version": { + "type": "string", + "description": "A semantic version number of the API." + }, + "description": { + "type": "string", + "description": "A longer description of the API. Should be different from the title. CommonMark is allowed." + }, + "termsOfService": { + "type": "string", + "description": "A URL to the Terms of Service for the API. MUST be in the format of a URL.", + "format": "uri" + }, + "contact": { + "$ref": "http://asyncapi.com/definitions/3.0.0/contact.json" + }, + "license": { + "$ref": "http://asyncapi.com/definitions/3.0.0/license.json" + }, + "tags": { + "type": "array", + "description": "A list of tags for application API documentation control. Tags can be used for logical grouping of applications.", + "items": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/tag.json" + } + ] + }, + "uniqueItems": true + }, + "externalDocs": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/externalDocs.json" + } + ] + } + } + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/infoExtensions.json" + } + ], + "examples": [ + { + "title": "AsyncAPI Sample App", + "version": "1.0.1", + "description": "This is a sample app.", + "termsOfService": "https://asyncapi.org/terms/", + "contact": { + "name": "API Support", + "url": "https://www.asyncapi.org/support", + "email": "support@asyncapi.org" + }, + "license": { + "name": "Apache 2.0", + "url": "https://www.apache.org/licenses/LICENSE-2.0.html" + }, + "externalDocs": { + "description": "Find more info here", + "url": "https://www.asyncapi.org" + }, + "tags": [ + { + "name": "e-commerce" + } + ] + } + ] + }, + "http://asyncapi.com/definitions/3.0.0/contact.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/contact.json", + "type": "object", + "description": "Contact information for the exposed API.", + "additionalProperties": false, + "properties": { + "name": { + "type": "string", + "description": "The identifying name of the contact person/organization." + }, + "url": { + "type": "string", + "description": "The URL pointing to the contact information.", + "format": "uri" + }, + "email": { + "type": "string", + "description": "The email address of the contact person/organization.", + "format": "email" + } + }, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "examples": [ + { + "name": "API Support", + "url": "https://www.example.com/support", + "email": "support@example.com" + } + ] + }, + "http://asyncapi.com/definitions/3.0.0/license.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/license.json", + "type": "object", + "required": [ + "name" + ], + "additionalProperties": false, + "properties": { + "name": { + "type": "string", + "description": "The name of the license type. It's encouraged to use an OSI compatible license." + }, + "url": { + "type": "string", + "description": "The URL pointing to the license.", + "format": "uri" + } + }, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "examples": [ + { + "name": "Apache 2.0", + "url": "https://www.apache.org/licenses/LICENSE-2.0.html" + } + ] + }, + "http://asyncapi.com/definitions/3.0.0/Reference.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/Reference.json", + "type": "object", + "description": "A simple object to allow referencing other components in the specification, internally and externally.", + "required": [ + "$ref" + ], + "properties": { + "$ref": { + "description": "The reference string.", + "$ref": "http://asyncapi.com/definitions/3.0.0/ReferenceObject.json" + } + }, + "examples": [ + { + "$ref": "#/components/schemas/Pet" + } + ] + }, + "http://asyncapi.com/definitions/3.0.0/ReferenceObject.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/ReferenceObject.json", + "type": "string", + "format": "uri-reference" + }, + "http://asyncapi.com/definitions/3.0.0/tag.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/tag.json", + "type": "object", + "description": "Allows adding metadata to a single tag.", + "additionalProperties": false, + "required": [ + "name" + ], + "properties": { + "name": { + "type": "string", + "description": "The name of the tag." + }, + "description": { + "type": "string", + "description": "A short description for the tag. CommonMark syntax can be used for rich text representation." + }, + "externalDocs": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/externalDocs.json" + } + ] + } + }, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "examples": [ + { + "name": "user", + "description": "User-related messages" + } + ] + }, + "http://asyncapi.com/definitions/3.0.0/externalDocs.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/externalDocs.json", + "type": "object", + "additionalProperties": false, + "description": "Allows referencing an external resource for extended documentation.", + "required": [ + "url" + ], + "properties": { + "description": { + "type": "string", + "description": "A short description of the target documentation. CommonMark syntax can be used for rich text representation." + }, + "url": { + "type": "string", + "description": "The URL for the target documentation. This MUST be in the form of an absolute URL.", + "format": "uri" + } + }, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "examples": [ + { + "description": "Find more info here", + "url": "https://example.com" + } + ] + }, + "http://asyncapi.com/definitions/3.0.0/infoExtensions.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/infoExtensions.json", + "type": "object", + "description": "The object that lists all the extensions of Info", + "properties": { + "x-x": { + "$ref": "http://asyncapi.com/extensions/x/0.1.0/schema.json" + }, + "x-linkedin": { + "$ref": "http://asyncapi.com/extensions/linkedin/0.1.0/schema.json" + } + } + }, + "http://asyncapi.com/extensions/x/0.1.0/schema.json": { + "$id": "http://asyncapi.com/extensions/x/0.1.0/schema.json", + "type": "string", + "description": "This extension allows you to provide the Twitter username of the account representing the team/company of the API.", + "example": [ + "sambhavgupta75", + "AsyncAPISpec" + ] + }, + "http://asyncapi.com/extensions/linkedin/0.1.0/schema.json": { + "$id": "http://asyncapi.com/extensions/linkedin/0.1.0/schema.json", + "type": "string", + "pattern": "^http(s)?://(www\\.)?linkedin\\.com.*$", + "description": "This extension allows you to provide the Linkedin profile URL of the account representing the team/company of the API.", + "example": [ + "https://www.linkedin.com/company/asyncapi/", + "https://www.linkedin.com/in/sambhavgupta0705/" + ] + }, + "http://asyncapi.com/definitions/3.0.0/servers.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/servers.json", + "description": "An object representing multiple servers.", + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/server.json" + } + ] + }, + "examples": [ + { + "development": { + "host": "localhost:5672", + "description": "Development AMQP broker.", + "protocol": "amqp", + "protocolVersion": "0-9-1", + "tags": [ + { + "name": "env:development", + "description": "This environment is meant for developers to run their own tests." + } + ] + }, + "staging": { + "host": "rabbitmq-staging.in.mycompany.com:5672", + "description": "RabbitMQ broker for the staging environment.", + "protocol": "amqp", + "protocolVersion": "0-9-1", + "tags": [ + { + "name": "env:staging", + "description": "This environment is a replica of the production environment." + } + ] + }, + "production": { + "host": "rabbitmq.in.mycompany.com:5672", + "description": "RabbitMQ broker for the production environment.", + "protocol": "amqp", + "protocolVersion": "0-9-1", + "tags": [ + { + "name": "env:production", + "description": "This environment is the live environment available for final users." + } + ] + } + } + ] + }, + "http://asyncapi.com/definitions/3.0.0/server.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/server.json", + "type": "object", + "description": "An object representing a message broker, a server or any other kind of computer program capable of sending and/or receiving data.", + "required": [ + "host", + "protocol" + ], + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "host": { + "type": "string", + "description": "The server host name. It MAY include the port. This field supports Server Variables. Variable substitutions will be made when a variable is named in {braces}." + }, + "pathname": { + "type": "string", + "description": "The path to a resource in the host. This field supports Server Variables. Variable substitutions will be made when a variable is named in {braces}." + }, + "title": { + "type": "string", + "description": "A human-friendly title for the server." + }, + "summary": { + "type": "string", + "description": "A brief summary of the server." + }, + "description": { + "type": "string", + "description": "A longer description of the server. CommonMark is allowed." + }, + "protocol": { + "type": "string", + "description": "The protocol this server supports for connection." + }, + "protocolVersion": { + "type": "string", + "description": "An optional string describing the server. CommonMark syntax MAY be used for rich text representation." + }, + "variables": { + "$ref": "http://asyncapi.com/definitions/3.0.0/serverVariables.json" + }, + "security": { + "$ref": "http://asyncapi.com/definitions/3.0.0/securityRequirements.json" + }, + "tags": { + "type": "array", + "items": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/tag.json" + } + ] + }, + "uniqueItems": true + }, + "externalDocs": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/externalDocs.json" + } + ] + }, + "bindings": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/serverBindingsObject.json" + } + ] + } + }, + "examples": [ + { + "host": "kafka.in.mycompany.com:9092", + "description": "Production Kafka broker.", + "protocol": "kafka", + "protocolVersion": "3.2" + }, + { + "host": "rabbitmq.in.mycompany.com:5672", + "pathname": "/production", + "protocol": "amqp", + "description": "Production RabbitMQ broker (uses the `production` vhost)." + } + ] + }, + "http://asyncapi.com/definitions/3.0.0/serverVariables.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/serverVariables.json", + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/serverVariable.json" + } + ] + } + }, + "http://asyncapi.com/definitions/3.0.0/serverVariable.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/serverVariable.json", + "type": "object", + "description": "An object representing a Server Variable for server URL template substitution.", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "enum": { + "type": "array", + "description": "An enumeration of string values to be used if the substitution options are from a limited set.", + "items": { + "type": "string" + }, + "uniqueItems": true + }, + "default": { + "type": "string", + "description": "The default value to use for substitution, and to send, if an alternate value is not supplied." + }, + "description": { + "type": "string", + "description": "An optional description for the server variable. CommonMark syntax MAY be used for rich text representation." + }, + "examples": { + "type": "array", + "description": "An array of examples of the server variable.", + "items": { + "type": "string" + } + } + }, + "examples": [ + { + "host": "rabbitmq.in.mycompany.com:5672", + "pathname": "/{env}", + "protocol": "amqp", + "description": "RabbitMQ broker. Use the `env` variable to point to either `production` or `staging`.", + "variables": { + "env": { + "description": "Environment to connect to. It can be either `production` or `staging`.", + "enum": [ + "production", + "staging" + ] + } + } + } + ] + }, + "http://asyncapi.com/definitions/3.0.0/securityRequirements.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/securityRequirements.json", + "description": "An array representing security requirements.", + "type": "array", + "items": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/SecurityScheme.json" + } + ] + } + }, + "http://asyncapi.com/definitions/3.0.0/SecurityScheme.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/SecurityScheme.json", + "description": "Defines a security scheme that can be used by the operations.", + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/userPassword.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/apiKey.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/X509.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/symmetricEncryption.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/asymmetricEncryption.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/HTTPSecurityScheme.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/oauth2Flows.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/openIdConnect.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/SaslSecurityScheme.json" + } + ], + "examples": [ + { + "type": "userPassword" + } + ] + }, + "http://asyncapi.com/definitions/3.0.0/userPassword.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/userPassword.json", + "type": "object", + "required": [ + "type" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "userPassword" + ] + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "additionalProperties": false, + "examples": [ + { + "type": "userPassword" + } + ] + }, + "http://asyncapi.com/definitions/3.0.0/apiKey.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/apiKey.json", + "type": "object", + "required": [ + "type", + "in" + ], + "properties": { + "type": { + "type": "string", + "description": "The type of the security scheme", + "enum": [ + "apiKey" + ] + }, + "in": { + "type": "string", + "description": " The location of the API key.", + "enum": [ + "user", + "password" + ] + }, + "description": { + "type": "string", + "description": "A short description for security scheme. CommonMark syntax MAY be used for rich text representation." + } + }, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "additionalProperties": false, + "examples": [ + { + "type": "apiKey", + "in": "user" + } + ] + }, + "http://asyncapi.com/definitions/3.0.0/X509.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/X509.json", + "type": "object", + "required": [ + "type" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "X509" + ] + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "additionalProperties": false, + "examples": [ + { + "type": "X509" + } + ] + }, + "http://asyncapi.com/definitions/3.0.0/symmetricEncryption.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/symmetricEncryption.json", + "type": "object", + "required": [ + "type" + ], + "properties": { + "type": { + "type": "string", + "enum": [ + "symmetricEncryption" + ] + }, + "description": { + "type": "string" + } + }, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "additionalProperties": false, + "examples": [ + { + "type": "symmetricEncryption" + } + ] + }, + "http://asyncapi.com/definitions/3.0.0/asymmetricEncryption.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/asymmetricEncryption.json", + "type": "object", + "required": [ + "type" + ], + "properties": { + "type": { + "type": "string", + "description": "The type of the security scheme.", + "enum": [ + "asymmetricEncryption" + ] + }, + "description": { + "type": "string", + "description": "A short description for security scheme." + } + }, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "additionalProperties": false + }, + "http://asyncapi.com/definitions/3.0.0/HTTPSecurityScheme.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/HTTPSecurityScheme.json", + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/NonBearerHTTPSecurityScheme.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/BearerHTTPSecurityScheme.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/APIKeyHTTPSecurityScheme.json" + } + ] + }, + "http://asyncapi.com/definitions/3.0.0/NonBearerHTTPSecurityScheme.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/NonBearerHTTPSecurityScheme.json", + "not": { + "type": "object", + "properties": { + "scheme": { + "type": "string", + "description": "A short description for security scheme.", + "enum": [ + "bearer" + ] + } + } + }, + "type": "object", + "required": [ + "scheme", + "type" + ], + "properties": { + "scheme": { + "type": "string", + "description": "The name of the HTTP Authorization scheme to be used in the Authorization header as defined in RFC7235." + }, + "description": { + "type": "string", + "description": "A short description for security scheme." + }, + "type": { + "type": "string", + "description": "The type of the security scheme.", + "enum": [ + "http" + ] + } + }, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "additionalProperties": false + }, + "http://asyncapi.com/definitions/3.0.0/BearerHTTPSecurityScheme.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/BearerHTTPSecurityScheme.json", + "type": "object", + "required": [ + "type", + "scheme" + ], + "properties": { + "scheme": { + "type": "string", + "description": "The name of the HTTP Authorization scheme to be used in the Authorization header as defined in RFC7235.", + "enum": [ + "bearer" + ] + }, + "bearerFormat": { + "type": "string", + "description": "A hint to the client to identify how the bearer token is formatted. Bearer tokens are usually generated by an authorization server, so this information is primarily for documentation purposes." + }, + "type": { + "type": "string", + "description": "The type of the security scheme.", + "enum": [ + "http" + ] + }, + "description": { + "type": "string", + "description": "A short description for security scheme. CommonMark syntax MAY be used for rich text representation." + } + }, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "additionalProperties": false + }, + "http://asyncapi.com/definitions/3.0.0/APIKeyHTTPSecurityScheme.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/APIKeyHTTPSecurityScheme.json", + "type": "object", + "required": [ + "type", + "name", + "in" + ], + "properties": { + "type": { + "type": "string", + "description": "The type of the security scheme.", + "enum": [ + "httpApiKey" + ] + }, + "name": { + "type": "string", + "description": "The name of the header, query or cookie parameter to be used." + }, + "in": { + "type": "string", + "description": "The location of the API key", + "enum": [ + "header", + "query", + "cookie" + ] + }, + "description": { + "type": "string", + "description": "A short description for security scheme. CommonMark syntax MAY be used for rich text representation." + } + }, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "additionalProperties": false, + "examples": [ + { + "type": "httpApiKey", + "name": "api_key", + "in": "header" + } + ] + }, + "http://asyncapi.com/definitions/3.0.0/oauth2Flows.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/oauth2Flows.json", + "type": "object", + "description": "Allows configuration of the supported OAuth Flows.", + "required": [ + "type", + "flows" + ], + "properties": { + "type": { + "type": "string", + "description": "The type of the security scheme.", + "enum": [ + "oauth2" + ] + }, + "description": { + "type": "string", + "description": "A short description for security scheme." + }, + "flows": { + "type": "object", + "properties": { + "implicit": { + "description": "Configuration for the OAuth Implicit flow.", + "allOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/oauth2Flow.json" + }, + { + "required": [ + "authorizationUrl", + "availableScopes" + ] + }, + { + "not": { + "required": [ + "tokenUrl" + ] + } + } + ] + }, + "password": { + "description": "Configuration for the OAuth Resource Owner Protected Credentials flow.", + "allOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/oauth2Flow.json" + }, + { + "required": [ + "tokenUrl", + "availableScopes" + ] + }, + { + "not": { + "required": [ + "authorizationUrl" + ] + } + } + ] + }, + "clientCredentials": { + "description": "Configuration for the OAuth Client Credentials flow.", + "allOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/oauth2Flow.json" + }, + { + "required": [ + "tokenUrl", + "availableScopes" + ] + }, + { + "not": { + "required": [ + "authorizationUrl" + ] + } + } + ] + }, + "authorizationCode": { + "description": "Configuration for the OAuth Authorization Code flow.", + "allOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/oauth2Flow.json" + }, + { + "required": [ + "authorizationUrl", + "tokenUrl", + "availableScopes" + ] + } + ] + } + }, + "additionalProperties": false + }, + "scopes": { + "type": "array", + "description": "List of the needed scope names.", + "items": { + "type": "string" + } + } + }, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + } + }, + "http://asyncapi.com/definitions/3.0.0/oauth2Flow.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/oauth2Flow.json", + "type": "object", + "description": "Configuration details for a supported OAuth Flow", + "properties": { + "authorizationUrl": { + "type": "string", + "format": "uri", + "description": "The authorization URL to be used for this flow. This MUST be in the form of an absolute URL." + }, + "tokenUrl": { + "type": "string", + "format": "uri", + "description": "The token URL to be used for this flow. This MUST be in the form of an absolute URL." + }, + "refreshUrl": { + "type": "string", + "format": "uri", + "description": "The URL to be used for obtaining refresh tokens. This MUST be in the form of an absolute URL." + }, + "availableScopes": { + "$ref": "http://asyncapi.com/definitions/3.0.0/oauth2Scopes.json", + "description": "The available scopes for the OAuth2 security scheme. A map between the scope name and a short description for it." + } + }, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "additionalProperties": false, + "examples": [ + { + "authorizationUrl": "https://example.com/api/oauth/dialog", + "tokenUrl": "https://example.com/api/oauth/token", + "availableScopes": { + "write:pets": "modify pets in your account", + "read:pets": "read your pets" + } + } + ] + }, + "http://asyncapi.com/definitions/3.0.0/oauth2Scopes.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/oauth2Scopes.json", + "type": "object", + "additionalProperties": { + "type": "string" + } + }, + "http://asyncapi.com/definitions/3.0.0/openIdConnect.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/openIdConnect.json", + "type": "object", + "required": [ + "type", + "openIdConnectUrl" + ], + "properties": { + "type": { + "type": "string", + "description": "The type of the security scheme.", + "enum": [ + "openIdConnect" + ] + }, + "description": { + "type": "string", + "description": "A short description for security scheme. CommonMark syntax MAY be used for rich text representation." + }, + "openIdConnectUrl": { + "type": "string", + "format": "uri", + "description": "OpenId Connect URL to discover OAuth2 configuration values. This MUST be in the form of an absolute URL." + }, + "scopes": { + "type": "array", + "description": "List of the needed scope names. An empty array means no scopes are needed.", + "items": { + "type": "string" + } + } + }, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "additionalProperties": false + }, + "http://asyncapi.com/definitions/3.0.0/SaslSecurityScheme.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/SaslSecurityScheme.json", + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/SaslPlainSecurityScheme.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/SaslScramSecurityScheme.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/SaslGssapiSecurityScheme.json" + } + ] + }, + "http://asyncapi.com/definitions/3.0.0/SaslPlainSecurityScheme.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/SaslPlainSecurityScheme.json", + "type": "object", + "required": [ + "type" + ], + "properties": { + "type": { + "type": "string", + "description": "The type of the security scheme. Valid values", + "enum": [ + "plain" + ] + }, + "description": { + "type": "string", + "description": "A short description for security scheme." + } + }, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "additionalProperties": false, + "examples": [ + { + "type": "scramSha512" + } + ] + }, + "http://asyncapi.com/definitions/3.0.0/SaslScramSecurityScheme.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/SaslScramSecurityScheme.json", + "type": "object", + "required": [ + "type" + ], + "properties": { + "type": { + "type": "string", + "description": "The type of the security scheme.", + "enum": [ + "scramSha256", + "scramSha512" + ] + }, + "description": { + "type": "string", + "description": "A short description for security scheme." + } + }, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "additionalProperties": false, + "examples": [ + { + "type": "scramSha512" + } + ] + }, + "http://asyncapi.com/definitions/3.0.0/SaslGssapiSecurityScheme.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/SaslGssapiSecurityScheme.json", + "type": "object", + "required": [ + "type" + ], + "properties": { + "type": { + "type": "string", + "description": "The type of the security scheme.", + "enum": [ + "gssapi" + ] + }, + "description": { + "type": "string", + "description": "A short description for security scheme." + } + }, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "additionalProperties": false, + "examples": [ + { + "type": "scramSha512" + } + ] + }, + "http://asyncapi.com/definitions/3.0.0/serverBindingsObject.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/serverBindingsObject.json", + "type": "object", + "description": "Map describing protocol-specific definitions for a server.", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "http": {}, + "ws": {}, + "amqp": {}, + "amqp1": {}, + "mqtt": { + "properties": { + "bindingVersion": { + "enum": [ + "0.2.0" + ] + } + }, + "allOf": [ + { + "description": "If no bindingVersion specified, use the latest binding", + "if": { + "not": { + "required": [ + "bindingVersion" + ] + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/mqtt/0.2.0/server.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.2.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/mqtt/0.2.0/server.json" + } + } + ] + }, + "kafka": { + "properties": { + "bindingVersion": { + "enum": [ + "0.5.0", + "0.4.0", + "0.3.0" + ] + } + }, + "allOf": [ + { + "description": "If no bindingVersion specified, use the latest binding", + "if": { + "not": { + "required": [ + "bindingVersion" + ] + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/kafka/0.5.0/server.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.5.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/kafka/0.5.0/server.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.4.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/kafka/0.4.0/server.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.3.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/kafka/0.3.0/server.json" + } + } + ] + }, + "anypointmq": {}, + "nats": {}, + "jms": { + "properties": { + "bindingVersion": { + "enum": [ + "0.0.1" + ] + } + }, + "allOf": [ + { + "description": "If no bindingVersion specified, use the latest binding", + "if": { + "not": { + "required": [ + "bindingVersion" + ] + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/jms/0.0.1/server.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.0.1" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/jms/0.0.1/server.json" + } + } + ] + }, + "sns": {}, + "sqs": {}, + "stomp": {}, + "redis": {}, + "ibmmq": { + "properties": { + "bindingVersion": { + "enum": [ + "0.1.0" + ] + } + }, + "allOf": [ + { + "description": "If no bindingVersion specified, use the latest binding", + "if": { + "not": { + "required": [ + "bindingVersion" + ] + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/ibmmq/0.1.0/server.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.1.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/ibmmq/0.1.0/server.json" + } + } + ] + }, + "solace": { + "properties": { + "bindingVersion": { + "enum": [ + "0.4.0", + "0.3.0", + "0.2.0" + ] + } + }, + "allOf": [ + { + "description": "If no bindingVersion specified, use the latest binding", + "if": { + "not": { + "required": [ + "bindingVersion" + ] + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/solace/0.4.0/server.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.4.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/solace/0.4.0/server.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.3.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/solace/0.3.0/server.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.2.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/solace/0.2.0/server.json" + } + } + ] + }, + "googlepubsub": {}, + "pulsar": { + "properties": { + "bindingVersion": { + "enum": [ + "0.1.0" + ] + } + }, + "allOf": [ + { + "description": "If no bindingVersion specified, use the latest binding", + "if": { + "not": { + "required": [ + "bindingVersion" + ] + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/pulsar/0.1.0/server.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.1.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/pulsar/0.1.0/server.json" + } + } + ] + } + } + }, + "http://asyncapi.com/bindings/mqtt/0.2.0/server.json": { + "$id": "http://asyncapi.com/bindings/mqtt/0.2.0/server.json", + "title": "Server Schema", + "description": "This object contains information about the server representation in MQTT.", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "clientId": { + "type": "string", + "description": "The client identifier." + }, + "cleanSession": { + "type": "boolean", + "description": "Whether to create a persistent connection or not. When 'false', the connection will be persistent. This is called clean start in MQTTv5." + }, + "lastWill": { + "type": "object", + "description": "Last Will and Testament configuration.", + "properties": { + "topic": { + "type": "string", + "description": "The topic where the Last Will and Testament message will be sent." + }, + "qos": { + "type": "integer", + "enum": [ + 0, + 1, + 2 + ], + "description": "Defines how hard the broker/client will try to ensure that the Last Will and Testament message is received. Its value MUST be either 0, 1 or 2." + }, + "message": { + "type": "string", + "description": "Last Will message." + }, + "retain": { + "type": "boolean", + "description": "Whether the broker should retain the Last Will and Testament message or not." + } + } + }, + "keepAlive": { + "type": "integer", + "description": "Interval in seconds of the longest period of time the broker and the client can endure without sending a message." + }, + "sessionExpiryInterval": { + "oneOf": [ + { + "type": "integer", + "minimum": 0 + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/schema.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + } + ], + "description": "Interval time in seconds or a Schema Object containing the definition of the interval. The broker maintains a session for a disconnected client until this interval expires." + }, + "maximumPacketSize": { + "oneOf": [ + { + "type": "integer", + "minimum": 1, + "maximum": 4294967295 + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/schema.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + } + ], + "description": "Number of bytes or a Schema Object representing the Maximum Packet Size the Client is willing to accept." + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.2.0" + ], + "description": "The version of this binding. If omitted, 'latest' MUST be assumed." + } + }, + "examples": [ + { + "clientId": "guest", + "cleanSession": true, + "lastWill": { + "topic": "/last-wills", + "qos": 2, + "message": "Guest gone offline.", + "retain": false + }, + "keepAlive": 60, + "sessionExpiryInterval": 120, + "maximumPacketSize": 1024, + "bindingVersion": "0.2.0" + } + ] + }, + "http://asyncapi.com/definitions/3.0.0/schema.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/schema.json", + "description": "The Schema Object allows the definition of input and output data types. These types can be objects, but also primitives and arrays. This object is a superset of the JSON Schema Specification Draft 07. The empty schema (which allows any instance to validate) MAY be represented by the boolean value true and a schema which allows no instance to validate MAY be represented by the boolean value false.", + "allOf": [ + { + "$ref": "http://json-schema.org/draft-07/schema#" + }, + { + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "additionalProperties": { + "anyOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/schema.json" + }, + { + "type": "boolean" + } + ], + "default": {} + }, + "items": { + "anyOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/schema.json" + }, + { + "type": "array", + "minItems": 1, + "items": { + "$ref": "http://asyncapi.com/definitions/3.0.0/schema.json" + } + } + ], + "default": {} + }, + "allOf": { + "type": "array", + "minItems": 1, + "items": { + "$ref": "http://asyncapi.com/definitions/3.0.0/schema.json" + } + }, + "oneOf": { + "type": "array", + "minItems": 1, + "items": { + "$ref": "http://asyncapi.com/definitions/3.0.0/schema.json" + } + }, + "anyOf": { + "type": "array", + "minItems": 1, + "items": { + "$ref": "http://asyncapi.com/definitions/3.0.0/schema.json" + } + }, + "not": { + "$ref": "http://asyncapi.com/definitions/3.0.0/schema.json" + }, + "properties": { + "type": "object", + "additionalProperties": { + "$ref": "http://asyncapi.com/definitions/3.0.0/schema.json" + }, + "default": {} + }, + "patternProperties": { + "type": "object", + "additionalProperties": { + "$ref": "http://asyncapi.com/definitions/3.0.0/schema.json" + }, + "default": {} + }, + "propertyNames": { + "$ref": "http://asyncapi.com/definitions/3.0.0/schema.json" + }, + "contains": { + "$ref": "http://asyncapi.com/definitions/3.0.0/schema.json" + }, + "discriminator": { + "type": "string", + "description": "Adds support for polymorphism. The discriminator is the schema property name that is used to differentiate between other schema that inherit this schema. The property name used MUST be defined at this schema and it MUST be in the required property list. When used, the value MUST be the name of this schema or any schema that inherits it. See Composition and Inheritance for more details." + }, + "externalDocs": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/externalDocs.json" + } + ] + }, + "deprecated": { + "type": "boolean", + "description": "Specifies that a schema is deprecated and SHOULD be transitioned out of usage. Default value is false.", + "default": false + } + } + } + ] + }, + "http://json-schema.org/draft-07/schema": { + "$id": "http://json-schema.org/draft-07/schema", + "title": "Core schema meta-schema", + "definitions": { + "schemaArray": { + "type": "array", + "minItems": 1, + "items": { + "$ref": "#" + } + }, + "nonNegativeInteger": { + "type": "integer", + "minimum": 0 + }, + "nonNegativeIntegerDefault0": { + "allOf": [ + { + "$ref": "#/definitions/nonNegativeInteger" + }, + { + "default": 0 + } + ] + }, + "simpleTypes": { + "enum": [ + "array", + "boolean", + "integer", + "null", + "number", + "object", + "string" + ] + }, + "stringArray": { + "type": "array", + "items": { + "type": "string" + }, + "uniqueItems": true, + "default": [] + } + }, + "type": [ + "object", + "boolean" + ], + "properties": { + "$id": { + "type": "string", + "format": "uri-reference" + }, + "$schema": { + "type": "string", + "format": "uri" + }, + "$ref": { + "type": "string", + "format": "uri-reference" + }, + "$comment": { + "type": "string" + }, + "title": { + "type": "string" + }, + "description": { + "type": "string" + }, + "default": true, + "readOnly": { + "type": "boolean", + "default": false + }, + "writeOnly": { + "type": "boolean", + "default": false + }, + "examples": { + "type": "array", + "items": true + }, + "multipleOf": { + "type": "number", + "exclusiveMinimum": 0 + }, + "maximum": { + "type": "number" + }, + "exclusiveMaximum": { + "type": "number" + }, + "minimum": { + "type": "number" + }, + "exclusiveMinimum": { + "type": "number" + }, + "maxLength": { + "$ref": "#/definitions/nonNegativeInteger" + }, + "minLength": { + "$ref": "#/definitions/nonNegativeIntegerDefault0" + }, + "pattern": { + "type": "string", + "format": "regex" + }, + "additionalItems": { + "$ref": "#" + }, + "items": { + "anyOf": [ + { + "$ref": "#" + }, + { + "$ref": "#/definitions/schemaArray" + } + ], + "default": true + }, + "maxItems": { + "$ref": "#/definitions/nonNegativeInteger" + }, + "minItems": { + "$ref": "#/definitions/nonNegativeIntegerDefault0" + }, + "uniqueItems": { + "type": "boolean", + "default": false + }, + "contains": { + "$ref": "#" + }, + "maxProperties": { + "$ref": "#/definitions/nonNegativeInteger" + }, + "minProperties": { + "$ref": "#/definitions/nonNegativeIntegerDefault0" + }, + "required": { + "$ref": "#/definitions/stringArray" + }, + "additionalProperties": { + "$ref": "#" + }, + "definitions": { + "type": "object", + "additionalProperties": { + "$ref": "#" + }, + "default": {} + }, + "properties": { + "type": "object", + "additionalProperties": { + "$ref": "#" + }, + "default": {} + }, + "patternProperties": { + "type": "object", + "additionalProperties": { + "$ref": "#" + }, + "propertyNames": { + "format": "regex" + }, + "default": {} + }, + "dependencies": { + "type": "object", + "additionalProperties": { + "anyOf": [ + { + "$ref": "#" + }, + { + "$ref": "#/definitions/stringArray" + } + ] + } + }, + "propertyNames": { + "$ref": "#" + }, + "const": true, + "enum": { + "type": "array", + "items": true, + "minItems": 1, + "uniqueItems": true + }, + "type": { + "anyOf": [ + { + "$ref": "#/definitions/simpleTypes" + }, + { + "type": "array", + "items": { + "$ref": "#/definitions/simpleTypes" + }, + "minItems": 1, + "uniqueItems": true + } + ] + }, + "format": { + "type": "string" + }, + "contentMediaType": { + "type": "string" + }, + "contentEncoding": { + "type": "string" + }, + "if": { + "$ref": "#" + }, + "then": { + "$ref": "#" + }, + "else": { + "$ref": "#" + }, + "allOf": { + "$ref": "#/definitions/schemaArray" + }, + "anyOf": { + "$ref": "#/definitions/schemaArray" + }, + "oneOf": { + "$ref": "#/definitions/schemaArray" + }, + "not": { + "$ref": "#" + } + }, + "default": true + }, + "http://asyncapi.com/bindings/kafka/0.5.0/server.json": { + "$id": "http://asyncapi.com/bindings/kafka/0.5.0/server.json", + "title": "Server Schema", + "description": "This object contains server connection information to a Kafka broker. This object contains additional information not possible to represent within the core AsyncAPI specification.", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "schemaRegistryUrl": { + "type": "string", + "description": "API URL for the Schema Registry used when producing Kafka messages (if a Schema Registry was used)." + }, + "schemaRegistryVendor": { + "type": "string", + "description": "The vendor of the Schema Registry and Kafka serdes library that should be used." + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.5.0" + ], + "description": "The version of this binding." + } + }, + "examples": [ + { + "schemaRegistryUrl": "https://my-schema-registry.com", + "schemaRegistryVendor": "confluent", + "bindingVersion": "0.5.0" + } + ] + }, + "http://asyncapi.com/bindings/kafka/0.4.0/server.json": { + "$id": "http://asyncapi.com/bindings/kafka/0.4.0/server.json", + "title": "Server Schema", + "description": "This object contains server connection information to a Kafka broker. This object contains additional information not possible to represent within the core AsyncAPI specification.", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "schemaRegistryUrl": { + "type": "string", + "description": "API URL for the Schema Registry used when producing Kafka messages (if a Schema Registry was used)." + }, + "schemaRegistryVendor": { + "type": "string", + "description": "The vendor of the Schema Registry and Kafka serdes library that should be used." + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.4.0" + ], + "description": "The version of this binding." + } + }, + "examples": [ + { + "schemaRegistryUrl": "https://my-schema-registry.com", + "schemaRegistryVendor": "confluent", + "bindingVersion": "0.4.0" + } + ] + }, + "http://asyncapi.com/bindings/kafka/0.3.0/server.json": { + "$id": "http://asyncapi.com/bindings/kafka/0.3.0/server.json", + "title": "Server Schema", + "description": "This object contains server connection information to a Kafka broker. This object contains additional information not possible to represent within the core AsyncAPI specification.", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "schemaRegistryUrl": { + "type": "string", + "description": "API URL for the Schema Registry used when producing Kafka messages (if a Schema Registry was used)." + }, + "schemaRegistryVendor": { + "type": "string", + "description": "The vendor of the Schema Registry and Kafka serdes library that should be used." + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.3.0" + ], + "description": "The version of this binding." + } + }, + "examples": [ + { + "schemaRegistryUrl": "https://my-schema-registry.com", + "schemaRegistryVendor": "confluent", + "bindingVersion": "0.3.0" + } + ] + }, + "http://asyncapi.com/bindings/jms/0.0.1/server.json": { + "$id": "http://asyncapi.com/bindings/jms/0.0.1/server.json", + "title": "Server Schema", + "description": "This object contains configuration for describing a JMS broker as an AsyncAPI server. This objects only contains configuration that can not be provided in the AsyncAPI standard server object.", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "required": [ + "jmsConnectionFactory" + ], + "properties": { + "jmsConnectionFactory": { + "type": "string", + "description": "The classname of the ConnectionFactory implementation for the JMS Provider." + }, + "properties": { + "type": "array", + "items": { + "$ref": "http://asyncapi.com/bindings/jms/0.0.1/server.json#/definitions/property" + }, + "description": "Additional properties to set on the JMS ConnectionFactory implementation for the JMS Provider." + }, + "clientID": { + "type": "string", + "description": "A client identifier for applications that use this JMS connection factory. If the Client ID Policy is set to 'Restricted' (the default), then configuring a Client ID on the ConnectionFactory prevents more than one JMS client from using a connection from this factory." + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.0.1" + ], + "description": "The version of this binding. If omitted, 'latest' MUST be assumed." + } + }, + "definitions": { + "property": { + "type": "object", + "required": [ + "name", + "value" + ], + "properties": { + "name": { + "type": "string", + "description": "The name of a property" + }, + "value": { + "type": [ + "string", + "boolean", + "number", + "null" + ], + "description": "The name of a property" + } + } + } + }, + "examples": [ + { + "jmsConnectionFactory": "org.apache.activemq.ActiveMQConnectionFactory", + "properties": [ + { + "name": "disableTimeStampsByDefault", + "value": false + } + ], + "clientID": "my-application-1", + "bindingVersion": "0.0.1" + } + ] + }, + "http://asyncapi.com/bindings/ibmmq/0.1.0/server.json": { + "$id": "http://asyncapi.com/bindings/ibmmq/0.1.0/server.json", + "title": "IBM MQ server bindings object", + "description": "This object contains server connection information about the IBM MQ server, referred to as an IBM MQ queue manager. This object contains additional connectivity information not possible to represent within the core AsyncAPI specification.", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "groupId": { + "type": "string", + "description": "Defines a logical group of IBM MQ server objects. This is necessary to specify multi-endpoint configurations used in high availability deployments. If omitted, the server object is not part of a group." + }, + "ccdtQueueManagerName": { + "type": "string", + "default": "*", + "description": "The name of the IBM MQ queue manager to bind to in the CCDT file." + }, + "cipherSpec": { + "type": "string", + "description": "The recommended cipher specification used to establish a TLS connection between the client and the IBM MQ queue manager. More information on SSL/TLS cipher specifications supported by IBM MQ can be found on this page in the IBM MQ Knowledge Center." + }, + "multiEndpointServer": { + "type": "boolean", + "default": false, + "description": "If 'multiEndpointServer' is 'true' then multiple connections can be workload balanced and applications should not make assumptions as to where messages are processed. Where message ordering, or affinity to specific message resources is necessary, a single endpoint ('multiEndpointServer' = 'false') may be required." + }, + "heartBeatInterval": { + "type": "integer", + "minimum": 0, + "maximum": 999999, + "default": 300, + "description": "The recommended value (in seconds) for the heartbeat sent to the queue manager during periods of inactivity. A value of zero means that no heart beats are sent. A value of 1 means that the client will use the value defined by the queue manager. More information on heart beat interval can be found on this page in the IBM MQ Knowledge Center." + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.1.0" + ], + "description": "The version of this binding." + } + }, + "examples": [ + { + "groupId": "PRODCLSTR1", + "cipherSpec": "ANY_TLS12_OR_HIGHER", + "bindingVersion": "0.1.0" + }, + { + "groupId": "PRODCLSTR1", + "bindingVersion": "0.1.0" + } + ] + }, + "http://asyncapi.com/bindings/solace/0.4.0/server.json": { + "$id": "http://asyncapi.com/bindings/solace/0.4.0/server.json", + "title": "Solace server bindings object", + "description": "This object contains server connection information about the Solace broker. This object contains additional connectivity information not possible to represent within the core AsyncAPI specification.", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "msgVpn": { + "type": "string", + "description": "The name of the Virtual Private Network to connect to on the Solace broker." + }, + "clientName": { + "type": "string", + "minLength": 1, + "maxLength": 160, + "description": "A unique client name to use to register to the appliance. If specified, it must be a valid Topic name, and a maximum of 160 bytes in length when encoded as UTF-8." + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.4.0" + ], + "description": "The version of this binding." + } + }, + "examples": [ + { + "msgVpn": "ProdVPN", + "bindingVersion": "0.4.0" + } + ] + }, + "http://asyncapi.com/bindings/solace/0.3.0/server.json": { + "$id": "http://asyncapi.com/bindings/solace/0.3.0/server.json", + "title": "Solace server bindings object", + "description": "This object contains server connection information about the Solace broker. This object contains additional connectivity information not possible to represent within the core AsyncAPI specification.", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "msgVpn": { + "type": "string", + "description": "The name of the Virtual Private Network to connect to on the Solace broker." + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.3.0" + ], + "description": "The version of this binding." + } + }, + "examples": [ + { + "msgVpn": "ProdVPN", + "bindingVersion": "0.3.0" + } + ] + }, + "http://asyncapi.com/bindings/solace/0.2.0/server.json": { + "$id": "http://asyncapi.com/bindings/solace/0.2.0/server.json", + "title": "Solace server bindings object", + "description": "This object contains server connection information about the Solace broker. This object contains additional connectivity information not possible to represent within the core AsyncAPI specification.", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "msvVpn": { + "type": "string", + "description": "The name of the Virtual Private Network to connect to on the Solace broker." + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.2.0" + ], + "description": "The version of this binding." + } + }, + "examples": [ + { + "msgVpn": "ProdVPN", + "bindingVersion": "0.2.0" + } + ] + }, + "http://asyncapi.com/bindings/pulsar/0.1.0/server.json": { + "$id": "http://asyncapi.com/bindings/pulsar/0.1.0/server.json", + "title": "Server Schema", + "description": "This object contains server information of Pulsar broker, which covers cluster and tenant admin configuration. This object contains additional information not possible to represent within the core AsyncAPI specification.", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "tenant": { + "type": "string", + "description": "The pulsar tenant. If omitted, 'public' MUST be assumed." + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.1.0" + ], + "description": "The version of this binding. If omitted, 'latest' MUST be assumed." + } + }, + "examples": [ + { + "tenant": "contoso", + "bindingVersion": "0.1.0" + } + ] + }, + "http://asyncapi.com/definitions/3.0.0/channels.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/channels.json", + "type": "object", + "description": "An object containing all the Channel Object definitions the Application MUST use during runtime.", + "additionalProperties": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/channel.json" + } + ] + }, + "examples": [ + { + "userSignedUp": { + "address": "user.signedup", + "messages": { + "userSignedUp": { + "$ref": "#/components/messages/userSignedUp" + } + } + } + } + ] + }, + "http://asyncapi.com/definitions/3.0.0/channel.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/channel.json", + "type": "object", + "description": "Describes a shared communication channel.", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "address": { + "type": [ + "string", + "null" + ], + "description": "An optional string representation of this channel's address. The address is typically the \"topic name\", \"routing key\", \"event type\", or \"path\". When `null` or absent, it MUST be interpreted as unknown. This is useful when the address is generated dynamically at runtime or can't be known upfront. It MAY contain Channel Address Expressions." + }, + "messages": { + "$ref": "http://asyncapi.com/definitions/3.0.0/channelMessages.json" + }, + "parameters": { + "$ref": "http://asyncapi.com/definitions/3.0.0/parameters.json" + }, + "title": { + "type": "string", + "description": "A human-friendly title for the channel." + }, + "summary": { + "type": "string", + "description": "A brief summary of the channel." + }, + "description": { + "type": "string", + "description": "A longer description of the channel. CommonMark is allowed." + }, + "servers": { + "type": "array", + "description": "The references of the servers on which this channel is available. If absent or empty then this channel must be available on all servers.", + "items": { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + "uniqueItems": true + }, + "tags": { + "type": "array", + "description": "A list of tags for logical grouping of channels.", + "items": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/tag.json" + } + ] + }, + "uniqueItems": true + }, + "externalDocs": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/externalDocs.json" + } + ] + }, + "bindings": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/channelBindingsObject.json" + } + ] + } + }, + "examples": [ + { + "address": "users.{userId}", + "title": "Users channel", + "description": "This channel is used to exchange messages about user events.", + "messages": { + "userSignedUp": { + "$ref": "#/components/messages/userSignedUp" + }, + "userCompletedOrder": { + "$ref": "#/components/messages/userCompletedOrder" + } + }, + "parameters": { + "userId": { + "$ref": "#/components/parameters/userId" + } + }, + "servers": [ + { + "$ref": "#/servers/rabbitmqInProd" + }, + { + "$ref": "#/servers/rabbitmqInStaging" + } + ], + "bindings": { + "amqp": { + "is": "queue", + "queue": { + "exclusive": true + } + } + }, + "tags": [ + { + "name": "user", + "description": "User-related messages" + } + ], + "externalDocs": { + "description": "Find more info here", + "url": "https://example.com" + } + } + ] + }, + "http://asyncapi.com/definitions/3.0.0/channelMessages.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/channelMessages.json", + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/messageObject.json" + } + ] + }, + "description": "A map of the messages that will be sent to this channel by any application at any time. **Every message sent to this channel MUST be valid against one, and only one, of the message objects defined in this map.**" + }, + "http://asyncapi.com/definitions/3.0.0/messageObject.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/messageObject.json", + "type": "object", + "description": "Describes a message received on a given channel and operation.", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "contentType": { + "type": "string", + "description": "The content type to use when encoding/decoding a message's payload. The value MUST be a specific media type (e.g. application/json). When omitted, the value MUST be the one specified on the defaultContentType field." + }, + "headers": { + "$ref": "http://asyncapi.com/definitions/3.0.0/anySchema.json" + }, + "payload": { + "$ref": "http://asyncapi.com/definitions/3.0.0/anySchema.json" + }, + "correlationId": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/correlationId.json" + } + ] + }, + "tags": { + "type": "array", + "items": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/tag.json" + } + ] + }, + "uniqueItems": true + }, + "summary": { + "type": "string", + "description": "A brief summary of the message." + }, + "name": { + "type": "string", + "description": "Name of the message." + }, + "title": { + "type": "string", + "description": "A human-friendly title for the message." + }, + "description": { + "type": "string", + "description": "A longer description of the message. CommonMark is allowed." + }, + "externalDocs": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/externalDocs.json" + } + ] + }, + "deprecated": { + "type": "boolean", + "default": false + }, + "examples": { + "type": "array", + "description": "List of examples.", + "items": { + "$ref": "http://asyncapi.com/definitions/3.0.0/messageExampleObject.json" + } + }, + "bindings": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/messageBindingsObject.json" + } + ] + }, + "traits": { + "type": "array", + "description": "A list of traits to apply to the message object. Traits MUST be merged using traits merge mechanism. The resulting object MUST be a valid Message Object.", + "items": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/messageTrait.json" + }, + { + "type": "array", + "items": [ + { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/messageTrait.json" + } + ] + }, + { + "type": "object", + "additionalItems": true + } + ] + } + ] + } + } + }, + "examples": [ + { + "messageId": "userSignup", + "name": "UserSignup", + "title": "User signup", + "summary": "Action to sign a user up.", + "description": "A longer description", + "contentType": "application/json", + "tags": [ + { + "name": "user" + }, + { + "name": "signup" + }, + { + "name": "register" + } + ], + "headers": { + "type": "object", + "properties": { + "correlationId": { + "description": "Correlation ID set by application", + "type": "string" + }, + "applicationInstanceId": { + "description": "Unique identifier for a given instance of the publishing application", + "type": "string" + } + } + }, + "payload": { + "type": "object", + "properties": { + "user": { + "$ref": "#/components/schemas/userCreate" + }, + "signup": { + "$ref": "#/components/schemas/signup" + } + } + }, + "correlationId": { + "description": "Default Correlation ID", + "location": "$message.header#/correlationId" + }, + "traits": [ + { + "$ref": "#/components/messageTraits/commonHeaders" + } + ], + "examples": [ + { + "name": "SimpleSignup", + "summary": "A simple UserSignup example message", + "headers": { + "correlationId": "my-correlation-id", + "applicationInstanceId": "myInstanceId" + }, + "payload": { + "user": { + "someUserKey": "someUserValue" + }, + "signup": { + "someSignupKey": "someSignupValue" + } + } + } + ] + } + ] + }, + "http://asyncapi.com/definitions/3.0.0/anySchema.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/anySchema.json", + "if": { + "required": [ + "schema" + ] + }, + "then": { + "$ref": "http://asyncapi.com/definitions/3.0.0/multiFormatSchema.json" + }, + "else": { + "$ref": "http://asyncapi.com/definitions/3.0.0/schema.json" + }, + "description": "An object representing either a schema or a multiFormatSchema based on the existence of the 'schema' property. If the property 'schema' is present, use the multi-format schema. Use the default AsyncAPI Schema otherwise." + }, + "http://asyncapi.com/definitions/3.0.0/multiFormatSchema.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/multiFormatSchema.json", + "description": "The Multi Format Schema Object represents a schema definition. It differs from the Schema Object in that it supports multiple schema formats or languages (e.g., JSON Schema, Avro, etc.).", + "type": "object", + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "if": { + "not": { + "type": "object" + } + }, + "then": { + "$ref": "http://asyncapi.com/definitions/3.0.0/schema.json" + }, + "else": { + "properties": { + "schemaFormat": { + "description": "A string containing the name of the schema format that is used to define the information. If schemaFormat is missing, it MUST default to application/vnd.aai.asyncapi+json;version={{asyncapi}} where {{asyncapi}} matches the AsyncAPI Version String. In such a case, this would make the Multi Format Schema Object equivalent to the Schema Object. When using Reference Object within the schema, the schemaFormat of the resource being referenced MUST match the schemaFormat of the schema that contains the initial reference. For example, if you reference Avro schema, then schemaFormat of referencing resource and the resource being reference MUST match.", + "anyOf": [ + { + "type": "string" + }, + { + "description": "All the schema formats tooling MUST support", + "enum": [ + "application/schema+json;version=draft-07", + "application/schema+yaml;version=draft-07", + "application/vnd.aai.asyncapi;version=3.0.0", + "application/vnd.aai.asyncapi+json;version=3.0.0", + "application/vnd.aai.asyncapi+yaml;version=3.0.0" + ] + }, + { + "description": "All the schema formats tools are RECOMMENDED to support", + "enum": [ + "application/vnd.oai.openapi;version=3.0.0", + "application/vnd.oai.openapi+json;version=3.0.0", + "application/vnd.oai.openapi+yaml;version=3.0.0", + "application/vnd.apache.avro;version=1.9.0", + "application/vnd.apache.avro+json;version=1.9.0", + "application/vnd.apache.avro+yaml;version=1.9.0", + "application/raml+yaml;version=1.0" + ] + } + ] + } + }, + "allOf": [ + { + "if": { + "not": { + "description": "If no schemaFormat has been defined, default to schema or reference", + "required": [ + "schemaFormat" + ] + } + }, + "then": { + "properties": { + "schema": { + "$ref": "http://asyncapi.com/definitions/3.0.0/schema.json" + } + } + } + }, + { + "if": { + "description": "If schemaFormat has been defined check if it's one of the AsyncAPI Schema Object formats", + "required": [ + "schemaFormat" + ], + "properties": { + "schemaFormat": { + "enum": [ + "application/vnd.aai.asyncapi;version=2.0.0", + "application/vnd.aai.asyncapi+json;version=2.0.0", + "application/vnd.aai.asyncapi+yaml;version=2.0.0", + "application/vnd.aai.asyncapi;version=2.1.0", + "application/vnd.aai.asyncapi+json;version=2.1.0", + "application/vnd.aai.asyncapi+yaml;version=2.1.0", + "application/vnd.aai.asyncapi;version=2.2.0", + "application/vnd.aai.asyncapi+json;version=2.2.0", + "application/vnd.aai.asyncapi+yaml;version=2.2.0", + "application/vnd.aai.asyncapi;version=2.3.0", + "application/vnd.aai.asyncapi+json;version=2.3.0", + "application/vnd.aai.asyncapi+yaml;version=2.3.0", + "application/vnd.aai.asyncapi;version=2.4.0", + "application/vnd.aai.asyncapi+json;version=2.4.0", + "application/vnd.aai.asyncapi+yaml;version=2.4.0", + "application/vnd.aai.asyncapi;version=2.5.0", + "application/vnd.aai.asyncapi+json;version=2.5.0", + "application/vnd.aai.asyncapi+yaml;version=2.5.0", + "application/vnd.aai.asyncapi;version=2.6.0", + "application/vnd.aai.asyncapi+json;version=2.6.0", + "application/vnd.aai.asyncapi+yaml;version=2.6.0", + "application/vnd.aai.asyncapi;version=3.0.0", + "application/vnd.aai.asyncapi+json;version=3.0.0", + "application/vnd.aai.asyncapi+yaml;version=3.0.0" + ] + } + } + }, + "then": { + "properties": { + "schema": { + "$ref": "http://asyncapi.com/definitions/3.0.0/schema.json" + } + } + } + }, + { + "if": { + "required": [ + "schemaFormat" + ], + "properties": { + "schemaFormat": { + "enum": [ + "application/schema+json;version=draft-07", + "application/schema+yaml;version=draft-07" + ] + } + } + }, + "then": { + "properties": { + "schema": { + "$ref": "http://json-schema.org/draft-07/schema" + } + } + } + }, + { + "if": { + "required": [ + "schemaFormat" + ], + "properties": { + "schemaFormat": { + "enum": [ + "application/vnd.oai.openapi;version=3.0.0", + "application/vnd.oai.openapi+json;version=3.0.0", + "application/vnd.oai.openapi+yaml;version=3.0.0" + ] + } + } + }, + "then": { + "properties": { + "schema": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/openapiSchema_3_0.json" + } + ] + } + } + } + }, + { + "if": { + "required": [ + "schemaFormat" + ], + "properties": { + "schemaFormat": { + "enum": [ + "application/vnd.apache.avro;version=1.9.0", + "application/vnd.apache.avro+json;version=1.9.0", + "application/vnd.apache.avro+yaml;version=1.9.0" + ] + } + } + }, + "then": { + "properties": { + "schema": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/avroSchema_v1.json" + } + ] + } + } + } + } + ] + } + }, + "http://asyncapi.com/definitions/3.0.0/openapiSchema_3_0.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/openapiSchema_3_0.json", + "type": "object", + "definitions": { + "ExternalDocumentation": { + "type": "object", + "required": [ + "url" + ], + "properties": { + "description": { + "type": "string" + }, + "url": { + "type": "string", + "format": "uri-reference" + } + }, + "patternProperties": { + "^x-": {} + }, + "additionalProperties": false + }, + "Discriminator": { + "type": "object", + "required": [ + "propertyName" + ], + "properties": { + "propertyName": { + "type": "string" + }, + "mapping": { + "type": "object", + "additionalProperties": { + "type": "string" + } + } + } + }, + "Reference": { + "type": "object", + "required": [ + "$ref" + ], + "patternProperties": { + "^\\$ref$": { + "type": "string", + "format": "uri-reference" + } + } + }, + "XML": { + "type": "object", + "properties": { + "name": { + "type": "string" + }, + "namespace": { + "type": "string", + "format": "uri" + }, + "prefix": { + "type": "string" + }, + "attribute": { + "type": "boolean", + "default": false + }, + "wrapped": { + "type": "boolean", + "default": false + } + }, + "patternProperties": { + "^x-": {} + }, + "additionalProperties": false + } + }, + "properties": { + "title": { + "type": "string" + }, + "multipleOf": { + "type": "number", + "exclusiveMinimum": 0 + }, + "maximum": { + "type": "number" + }, + "exclusiveMaximum": { + "type": "boolean", + "default": false + }, + "minimum": { + "type": "number" + }, + "exclusiveMinimum": { + "type": "boolean", + "default": false + }, + "maxLength": { + "type": "integer", + "minimum": 0 + }, + "minLength": { + "type": "integer", + "minimum": 0, + "default": 0 + }, + "pattern": { + "type": "string", + "format": "regex" + }, + "maxItems": { + "type": "integer", + "minimum": 0 + }, + "minItems": { + "type": "integer", + "minimum": 0, + "default": 0 + }, + "uniqueItems": { + "type": "boolean", + "default": false + }, + "maxProperties": { + "type": "integer", + "minimum": 0 + }, + "minProperties": { + "type": "integer", + "minimum": 0, + "default": 0 + }, + "required": { + "type": "array", + "items": { + "type": "string" + }, + "minItems": 1, + "uniqueItems": true + }, + "enum": { + "type": "array", + "items": true, + "minItems": 1, + "uniqueItems": false + }, + "type": { + "type": "string", + "enum": [ + "array", + "boolean", + "integer", + "number", + "object", + "string" + ] + }, + "not": { + "oneOf": [ + { + "$ref": "#" + }, + { + "$ref": "#/definitions/Reference" + } + ] + }, + "allOf": { + "type": "array", + "items": { + "oneOf": [ + { + "$ref": "#" + }, + { + "$ref": "#/definitions/Reference" + } + ] + } + }, + "oneOf": { + "type": "array", + "items": { + "oneOf": [ + { + "$ref": "#" + }, + { + "$ref": "#/definitions/Reference" + } + ] + } + }, + "anyOf": { + "type": "array", + "items": { + "oneOf": [ + { + "$ref": "#" + }, + { + "$ref": "#/definitions/Reference" + } + ] + } + }, + "items": { + "oneOf": [ + { + "$ref": "#" + }, + { + "$ref": "#/definitions/Reference" + } + ] + }, + "properties": { + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "$ref": "#" + }, + { + "$ref": "#/definitions/Reference" + } + ] + } + }, + "additionalProperties": { + "oneOf": [ + { + "$ref": "#" + }, + { + "$ref": "#/definitions/Reference" + }, + { + "type": "boolean" + } + ], + "default": true + }, + "description": { + "type": "string" + }, + "format": { + "type": "string" + }, + "default": true, + "nullable": { + "type": "boolean", + "default": false + }, + "discriminator": { + "$ref": "#/definitions/Discriminator" + }, + "readOnly": { + "type": "boolean", + "default": false + }, + "writeOnly": { + "type": "boolean", + "default": false + }, + "example": true, + "externalDocs": { + "$ref": "#/definitions/ExternalDocumentation" + }, + "deprecated": { + "type": "boolean", + "default": false + }, + "xml": { + "$ref": "#/definitions/XML" + } + }, + "patternProperties": { + "^x-": true + }, + "additionalProperties": false + }, + "http://asyncapi.com/definitions/3.0.0/avroSchema_v1.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/avroSchema_v1.json", + "definitions": { + "avroSchema": { + "title": "Avro Schema", + "description": "Root Schema", + "oneOf": [ + { + "$ref": "#/definitions/types" + } + ] + }, + "types": { + "title": "Avro Types", + "description": "Allowed Avro types", + "oneOf": [ + { + "$ref": "#/definitions/primitiveType" + }, + { + "$ref": "#/definitions/primitiveTypeWithMetadata" + }, + { + "$ref": "#/definitions/customTypeReference" + }, + { + "$ref": "#/definitions/avroRecord" + }, + { + "$ref": "#/definitions/avroEnum" + }, + { + "$ref": "#/definitions/avroArray" + }, + { + "$ref": "#/definitions/avroMap" + }, + { + "$ref": "#/definitions/avroFixed" + }, + { + "$ref": "#/definitions/avroUnion" + } + ] + }, + "primitiveType": { + "title": "Primitive Type", + "description": "Basic type primitives.", + "type": "string", + "enum": [ + "null", + "boolean", + "int", + "long", + "float", + "double", + "bytes", + "string" + ] + }, + "primitiveTypeWithMetadata": { + "title": "Primitive Type With Metadata", + "description": "A primitive type with metadata attached.", + "type": "object", + "properties": { + "type": { + "$ref": "#/definitions/primitiveType" + } + }, + "required": [ + "type" + ] + }, + "customTypeReference": { + "title": "Custom Type", + "description": "Reference to a ComplexType", + "not": { + "$ref": "#/definitions/primitiveType" + }, + "type": "string", + "pattern": "^[A-Za-z_][A-Za-z0-9_]*(\\.[A-Za-z_][A-Za-z0-9_]*)*$" + }, + "avroUnion": { + "title": "Union", + "description": "A Union of types", + "type": "array", + "items": { + "$ref": "#/definitions/avroSchema" + }, + "minItems": 1 + }, + "avroField": { + "title": "Field", + "description": "A field within a Record", + "type": "object", + "properties": { + "name": { + "$ref": "#/definitions/name" + }, + "type": { + "$ref": "#/definitions/types" + }, + "doc": { + "type": "string" + }, + "default": true, + "order": { + "enum": [ + "ascending", + "descending", + "ignore" + ] + }, + "aliases": { + "type": "array", + "items": { + "$ref": "#/definitions/name" + } + } + }, + "required": [ + "name", + "type" + ] + }, + "avroRecord": { + "title": "Record", + "description": "A Record", + "type": "object", + "properties": { + "type": { + "type": "string", + "const": "record" + }, + "name": { + "$ref": "#/definitions/name" + }, + "namespace": { + "$ref": "#/definitions/namespace" + }, + "doc": { + "type": "string" + }, + "aliases": { + "type": "array", + "items": { + "$ref": "#/definitions/name" + } + }, + "fields": { + "type": "array", + "items": { + "$ref": "#/definitions/avroField" + } + } + }, + "required": [ + "type", + "name", + "fields" + ] + }, + "avroEnum": { + "title": "Enum", + "description": "An enumeration", + "type": "object", + "properties": { + "type": { + "type": "string", + "const": "enum" + }, + "name": { + "$ref": "#/definitions/name" + }, + "namespace": { + "$ref": "#/definitions/namespace" + }, + "doc": { + "type": "string" + }, + "aliases": { + "type": "array", + "items": { + "$ref": "#/definitions/name" + } + }, + "symbols": { + "type": "array", + "items": { + "$ref": "#/definitions/name" + } + } + }, + "required": [ + "type", + "name", + "symbols" + ] + }, + "avroArray": { + "title": "Array", + "description": "An array", + "type": "object", + "properties": { + "type": { + "type": "string", + "const": "array" + }, + "name": { + "$ref": "#/definitions/name" + }, + "namespace": { + "$ref": "#/definitions/namespace" + }, + "doc": { + "type": "string" + }, + "aliases": { + "type": "array", + "items": { + "$ref": "#/definitions/name" + } + }, + "items": { + "$ref": "#/definitions/types" + } + }, + "required": [ + "type", + "items" + ] + }, + "avroMap": { + "title": "Map", + "description": "A map of values", + "type": "object", + "properties": { + "type": { + "type": "string", + "const": "map" + }, + "name": { + "$ref": "#/definitions/name" + }, + "namespace": { + "$ref": "#/definitions/namespace" + }, + "doc": { + "type": "string" + }, + "aliases": { + "type": "array", + "items": { + "$ref": "#/definitions/name" + } + }, + "values": { + "$ref": "#/definitions/types" + } + }, + "required": [ + "type", + "values" + ] + }, + "avroFixed": { + "title": "Fixed", + "description": "A fixed sized array of bytes", + "type": "object", + "properties": { + "type": { + "type": "string", + "const": "fixed" + }, + "name": { + "$ref": "#/definitions/name" + }, + "namespace": { + "$ref": "#/definitions/namespace" + }, + "doc": { + "type": "string" + }, + "aliases": { + "type": "array", + "items": { + "$ref": "#/definitions/name" + } + }, + "size": { + "type": "number" + } + }, + "required": [ + "type", + "name", + "size" + ] + }, + "name": { + "type": "string", + "pattern": "^[A-Za-z_][A-Za-z0-9_]*$" + }, + "namespace": { + "type": "string", + "pattern": "^([A-Za-z_][A-Za-z0-9_]*(\\.[A-Za-z_][A-Za-z0-9_]*)*)*$" + } + }, + "description": "Json-Schema definition for Avro AVSC files.", + "oneOf": [ + { + "$ref": "#/definitions/avroSchema" + } + ], + "title": "Avro Schema Definition" + }, + "http://asyncapi.com/definitions/3.0.0/correlationId.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/correlationId.json", + "type": "object", + "description": "An object that specifies an identifier at design time that can used for message tracing and correlation.", + "required": [ + "location" + ], + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "description": { + "type": "string", + "description": "A optional description of the correlation ID. GitHub Flavored Markdown is allowed." + }, + "location": { + "type": "string", + "description": "A runtime expression that specifies the location of the correlation ID", + "pattern": "^\\$message\\.(header|payload)#(\\/(([^\\/~])|(~[01]))*)*" + } + }, + "examples": [ + { + "description": "Default Correlation ID", + "location": "$message.header#/correlationId" + } + ] + }, + "http://asyncapi.com/definitions/3.0.0/messageExampleObject.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/messageExampleObject.json", + "type": "object", + "additionalProperties": false, + "anyOf": [ + { + "required": [ + "payload" + ] + }, + { + "required": [ + "headers" + ] + } + ], + "properties": { + "name": { + "type": "string", + "description": "Machine readable name of the message example." + }, + "summary": { + "type": "string", + "description": "A brief summary of the message example." + }, + "headers": { + "type": "object", + "description": "Example of the application headers. It MUST be a map of key-value pairs." + }, + "payload": { + "type": [ + "number", + "string", + "boolean", + "object", + "array", + "null" + ], + "description": "Example of the message payload. It can be of any type." + } + } + }, + "http://asyncapi.com/definitions/3.0.0/messageBindingsObject.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/messageBindingsObject.json", + "type": "object", + "description": "Map describing protocol-specific definitions for a message.", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "http": { + "properties": { + "bindingVersion": { + "enum": [ + "0.2.0", + "0.3.0" + ] + } + }, + "allOf": [ + { + "description": "If no bindingVersion specified, use the latest binding", + "if": { + "not": { + "required": [ + "bindingVersion" + ] + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/http/0.3.0/message.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.2.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/http/0.2.0/message.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.3.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/http/0.3.0/message.json" + } + } + ] + }, + "ws": {}, + "amqp": { + "properties": { + "bindingVersion": { + "enum": [ + "0.3.0" + ] + } + }, + "allOf": [ + { + "description": "If no bindingVersion specified, use the latest binding", + "if": { + "not": { + "required": [ + "bindingVersion" + ] + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/amqp/0.3.0/message.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.3.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/amqp/0.3.0/message.json" + } + } + ] + }, + "amqp1": {}, + "mqtt": { + "properties": { + "bindingVersion": { + "enum": [ + "0.2.0" + ] + } + }, + "allOf": [ + { + "description": "If no bindingVersion specified, use the latest binding", + "if": { + "not": { + "required": [ + "bindingVersion" + ] + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/mqtt/0.2.0/message.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.2.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/mqtt/0.2.0/message.json" + } + } + ] + }, + "kafka": { + "properties": { + "bindingVersion": { + "enum": [ + "0.5.0", + "0.4.0", + "0.3.0" + ] + } + }, + "allOf": [ + { + "description": "If no bindingVersion specified, use the latest binding", + "if": { + "not": { + "required": [ + "bindingVersion" + ] + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/kafka/0.5.0/message.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.5.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/kafka/0.5.0/message.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.4.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/kafka/0.4.0/message.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.3.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/kafka/0.3.0/message.json" + } + } + ] + }, + "anypointmq": { + "properties": { + "bindingVersion": { + "enum": [ + "0.0.1" + ] + } + }, + "allOf": [ + { + "description": "If no bindingVersion specified, use the latest binding", + "if": { + "not": { + "required": [ + "bindingVersion" + ] + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/anypointmq/0.0.1/message.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.0.1" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/anypointmq/0.0.1/message.json" + } + } + ] + }, + "nats": {}, + "jms": { + "properties": { + "bindingVersion": { + "enum": [ + "0.0.1" + ] + } + }, + "allOf": [ + { + "description": "If no bindingVersion specified, use the latest binding", + "if": { + "not": { + "required": [ + "bindingVersion" + ] + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/jms/0.0.1/message.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.0.1" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/jms/0.0.1/message.json" + } + } + ] + }, + "sns": {}, + "sqs": {}, + "stomp": {}, + "redis": {}, + "ibmmq": { + "properties": { + "bindingVersion": { + "enum": [ + "0.1.0" + ] + } + }, + "allOf": [ + { + "description": "If no bindingVersion specified, use the latest binding", + "if": { + "not": { + "required": [ + "bindingVersion" + ] + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/ibmmq/0.1.0/message.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.1.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/ibmmq/0.1.0/message.json" + } + } + ] + }, + "solace": {}, + "googlepubsub": { + "properties": { + "bindingVersion": { + "enum": [ + "0.2.0" + ] + } + }, + "allOf": [ + { + "description": "If no bindingVersion specified, use the latest binding", + "if": { + "not": { + "required": [ + "bindingVersion" + ] + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/googlepubsub/0.2.0/message.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.2.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/googlepubsub/0.2.0/message.json" + } + } + ] + } + } + }, + "http://asyncapi.com/bindings/http/0.3.0/message.json": { + "$id": "http://asyncapi.com/bindings/http/0.3.0/message.json", + "title": "HTTP message bindings object", + "description": "This object contains information about the message representation in HTTP.", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "headers": { + "$ref": "http://asyncapi.com/definitions/3.0.0/schema.json", + "description": "\tA Schema object containing the definitions for HTTP-specific headers. This schema MUST be of type 'object' and have a 'properties' key." + }, + "statusCode": { + "type": "number", + "description": "The HTTP response status code according to [RFC 9110](https://httpwg.org/specs/rfc9110.html#overview.of.status.codes). `statusCode` is only relevant for messages referenced by the [Operation Reply Object](https://www.asyncapi.com/docs/reference/specification/v3.0.0#operationReplyObject), as it defines the status code for the response. In all other cases, this value can be safely ignored." + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.3.0" + ], + "description": "The version of this binding. If omitted, \"latest\" MUST be assumed." + } + }, + "examples": [ + { + "headers": { + "type": "object", + "properties": { + "Content-Type": { + "type": "string", + "enum": [ + "application/json" + ] + } + } + }, + "bindingVersion": "0.3.0" + } + ] + }, + "http://asyncapi.com/bindings/http/0.2.0/message.json": { + "$id": "http://asyncapi.com/bindings/http/0.2.0/message.json", + "title": "HTTP message bindings object", + "description": "This object contains information about the message representation in HTTP.", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "headers": { + "$ref": "http://asyncapi.com/definitions/3.0.0/schema.json", + "description": "\tA Schema object containing the definitions for HTTP-specific headers. This schema MUST be of type 'object' and have a 'properties' key." + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.2.0" + ], + "description": "The version of this binding. If omitted, \"latest\" MUST be assumed." + } + }, + "examples": [ + { + "headers": { + "type": "object", + "properties": { + "Content-Type": { + "type": "string", + "enum": [ + "application/json" + ] + } + } + }, + "bindingVersion": "0.2.0" + } + ] + }, + "http://asyncapi.com/bindings/amqp/0.3.0/message.json": { + "$id": "http://asyncapi.com/bindings/amqp/0.3.0/message.json", + "title": "AMQP message bindings object", + "description": "This object contains information about the message representation in AMQP.", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "contentEncoding": { + "type": "string", + "description": "A MIME encoding for the message content." + }, + "messageType": { + "type": "string", + "description": "Application-specific message type." + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.3.0" + ], + "description": "The version of this binding. If omitted, \"latest\" MUST be assumed." + } + }, + "examples": [ + { + "contentEncoding": "gzip", + "messageType": "user.signup", + "bindingVersion": "0.3.0" + } + ] + }, + "http://asyncapi.com/bindings/mqtt/0.2.0/message.json": { + "$id": "http://asyncapi.com/bindings/mqtt/0.2.0/message.json", + "title": "MQTT message bindings object", + "description": "This object contains information about the message representation in MQTT.", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "payloadFormatIndicator": { + "type": "integer", + "enum": [ + 0, + 1 + ], + "description": "1 indicates that the payload is UTF-8 encoded character data. 0 indicates that the payload format is unspecified.", + "default": 0 + }, + "correlationData": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/schema.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + } + ], + "description": "Correlation Data is used by the sender of the request message to identify which request the response message is for when it is received." + }, + "contentType": { + "type": "string", + "description": "String describing the content type of the message payload. This should not conflict with the contentType field of the associated AsyncAPI Message object." + }, + "responseTopic": { + "oneOf": [ + { + "type": "string", + "format": "uri-template", + "minLength": 1 + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/schema.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + } + ], + "description": "The topic (channel URI) to be used for a response message." + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.2.0" + ], + "description": "The version of this binding. If omitted, 'latest' MUST be assumed." + } + }, + "examples": [ + { + "bindingVersion": "0.2.0" + }, + { + "contentType": "application/json", + "correlationData": { + "type": "string", + "format": "uuid" + }, + "responseTopic": "application/responses", + "bindingVersion": "0.2.0" + } + ] + }, + "http://asyncapi.com/bindings/kafka/0.5.0/message.json": { + "$id": "http://asyncapi.com/bindings/kafka/0.5.0/message.json", + "title": "Message Schema", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "key": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/schema.json" + } + ], + "description": "The message key." + }, + "schemaIdLocation": { + "type": "string", + "description": "If a Schema Registry is used when performing this operation, tells where the id of schema is stored.", + "enum": [ + "header", + "payload" + ] + }, + "schemaIdPayloadEncoding": { + "type": "string", + "description": "Number of bytes or vendor specific values when schema id is encoded in payload." + }, + "schemaLookupStrategy": { + "type": "string", + "description": "Freeform string for any naming strategy class to use. Clients should default to the vendor default if not supplied." + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.5.0" + ], + "description": "The version of this binding. If omitted, 'latest' MUST be assumed." + } + }, + "examples": [ + { + "key": { + "type": "string", + "enum": [ + "myKey" + ] + }, + "schemaIdLocation": "payload", + "schemaIdPayloadEncoding": "apicurio-new", + "schemaLookupStrategy": "TopicIdStrategy", + "bindingVersion": "0.5.0" + }, + { + "key": { + "$ref": "path/to/user-create.avsc#/UserCreate" + }, + "schemaIdLocation": "payload", + "schemaIdPayloadEncoding": "4", + "bindingVersion": "0.5.0" + } + ] + }, + "http://asyncapi.com/bindings/kafka/0.4.0/message.json": { + "$id": "http://asyncapi.com/bindings/kafka/0.4.0/message.json", + "title": "Message Schema", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "key": { + "anyOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/schema.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/avroSchema_v1.json" + } + ], + "description": "The message key." + }, + "schemaIdLocation": { + "type": "string", + "description": "If a Schema Registry is used when performing this operation, tells where the id of schema is stored.", + "enum": [ + "header", + "payload" + ] + }, + "schemaIdPayloadEncoding": { + "type": "string", + "description": "Number of bytes or vendor specific values when schema id is encoded in payload." + }, + "schemaLookupStrategy": { + "type": "string", + "description": "Freeform string for any naming strategy class to use. Clients should default to the vendor default if not supplied." + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.4.0" + ], + "description": "The version of this binding. If omitted, 'latest' MUST be assumed." + } + }, + "examples": [ + { + "key": { + "type": "string", + "enum": [ + "myKey" + ] + }, + "schemaIdLocation": "payload", + "schemaIdPayloadEncoding": "apicurio-new", + "schemaLookupStrategy": "TopicIdStrategy", + "bindingVersion": "0.4.0" + }, + { + "key": { + "$ref": "path/to/user-create.avsc#/UserCreate" + }, + "schemaIdLocation": "payload", + "schemaIdPayloadEncoding": "4", + "bindingVersion": "0.4.0" + } + ] + }, + "http://asyncapi.com/bindings/kafka/0.3.0/message.json": { + "$id": "http://asyncapi.com/bindings/kafka/0.3.0/message.json", + "title": "Message Schema", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "key": { + "$ref": "http://asyncapi.com/definitions/3.0.0/schema.json", + "description": "The message key." + }, + "schemaIdLocation": { + "type": "string", + "description": "If a Schema Registry is used when performing this operation, tells where the id of schema is stored.", + "enum": [ + "header", + "payload" + ] + }, + "schemaIdPayloadEncoding": { + "type": "string", + "description": "Number of bytes or vendor specific values when schema id is encoded in payload." + }, + "schemaLookupStrategy": { + "type": "string", + "description": "Freeform string for any naming strategy class to use. Clients should default to the vendor default if not supplied." + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.3.0" + ], + "description": "The version of this binding. If omitted, 'latest' MUST be assumed." + } + }, + "examples": [ + { + "key": { + "type": "string", + "enum": [ + "myKey" + ] + }, + "schemaIdLocation": "payload", + "schemaIdPayloadEncoding": "apicurio-new", + "schemaLookupStrategy": "TopicIdStrategy", + "bindingVersion": "0.3.0" + }, + { + "key": { + "$ref": "path/to/user-create.avsc#/UserCreate" + }, + "schemaIdLocation": "payload", + "schemaIdPayloadEncoding": "4", + "bindingVersion": "0.3.0" + } + ] + }, + "http://asyncapi.com/bindings/anypointmq/0.0.1/message.json": { + "$id": "http://asyncapi.com/bindings/anypointmq/0.0.1/message.json", + "title": "Anypoint MQ message bindings object", + "description": "This object contains configuration for describing an Anypoint MQ message as an AsyncAPI message. This objects only contains configuration that can not be provided in the AsyncAPI standard message object.", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "headers": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/schema.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + } + ], + "description": "A Schema object containing the definitions for Anypoint MQ-specific headers (protocol headers). This schema MUST be of type 'object' and have a 'properties' key. Examples of Anypoint MQ protocol headers are 'messageId' and 'messageGroupId'." + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.0.1" + ], + "description": "The version of this binding. If omitted, 'latest' MUST be assumed." + } + }, + "examples": [ + { + "headers": { + "type": "object", + "properties": { + "messageId": { + "type": "string" + } + } + }, + "bindingVersion": "0.0.1" + } + ] + }, + "http://asyncapi.com/bindings/jms/0.0.1/message.json": { + "$id": "http://asyncapi.com/bindings/jms/0.0.1/message.json", + "title": "Message Schema", + "description": "This object contains configuration for describing a JMS message as an AsyncAPI message. This objects only contains configuration that can not be provided in the AsyncAPI standard message object.", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "headers": { + "$ref": "http://asyncapi.com/definitions/3.0.0/schema.json", + "description": "A Schema object containing the definitions for JMS headers (protocol headers). This schema MUST be of type 'object' and have a 'properties' key. Examples of JMS protocol headers are 'JMSMessageID', 'JMSTimestamp', and 'JMSCorrelationID'." + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.0.1" + ], + "description": "The version of this binding. If omitted, 'latest' MUST be assumed." + } + }, + "examples": [ + { + "headers": { + "type": "object", + "required": [ + "JMSMessageID" + ], + "properties": { + "JMSMessageID": { + "type": [ + "string", + "null" + ], + "description": "A unique message identifier. This may be set by your JMS Provider on your behalf." + }, + "JMSTimestamp": { + "type": "integer", + "description": "The time the message was sent. This may be set by your JMS Provider on your behalf. The time the message was sent. The value of the timestamp is the amount of time, measured in milliseconds, that has elapsed since midnight, January 1, 1970, UTC." + }, + "JMSDeliveryMode": { + "type": "string", + "enum": [ + "PERSISTENT", + "NON_PERSISTENT" + ], + "default": "PERSISTENT", + "description": "Denotes the delivery mode for the message. This may be set by your JMS Provider on your behalf." + }, + "JMSPriority": { + "type": "integer", + "default": 4, + "description": "The priority of the message. This may be set by your JMS Provider on your behalf." + }, + "JMSExpires": { + "type": "integer", + "description": "The time at which the message expires. This may be set by your JMS Provider on your behalf. A value of zero means that the message does not expire. Any non-zero value is the amount of time, measured in milliseconds, that has elapsed since midnight, January 1, 1970, UTC, at which the message will expire." + }, + "JMSType": { + "type": [ + "string", + "null" + ], + "description": "The type of message. Some JMS providers use a message repository that contains the definitions of messages sent by applications. The 'JMSType' header field may reference a message's definition in the provider's repository. The JMS API does not define a standard message definition repository, nor does it define a naming policy for the definitions it contains. Some messaging systems require that a message type definition for each application message be created and that each message specify its type. In order to work with such JMS providers, JMS clients should assign a value to 'JMSType', whether the application makes use of it or not. This ensures that the field is properly set for those providers that require it." + }, + "JMSCorrelationID": { + "type": [ + "string", + "null" + ], + "description": "The correlation identifier of the message. A client can use the 'JMSCorrelationID' header field to link one message with another. A typical use is to link a response message with its request message. Since each message sent by a JMS provider is assigned a message ID value, it is convenient to link messages via message ID, such message ID values must start with the 'ID:' prefix. Conversely, application-specified values must not start with the 'ID:' prefix; this is reserved for provider-generated message ID values." + }, + "JMSReplyTo": { + "type": "string", + "description": "The queue or topic that the message sender expects replies to." + } + } + }, + "bindingVersion": "0.0.1" + } + ] + }, + "http://asyncapi.com/bindings/ibmmq/0.1.0/message.json": { + "$id": "http://asyncapi.com/bindings/ibmmq/0.1.0/message.json", + "title": "IBM MQ message bindings object", + "description": "This object contains information about the message representation in IBM MQ.", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "type": { + "type": "string", + "enum": [ + "string", + "jms", + "binary" + ], + "default": "string", + "description": "The type of the message." + }, + "headers": { + "type": "string", + "description": "Defines the IBM MQ message headers to include with this message. More than one header can be specified as a comma separated list. Supporting information on IBM MQ message formats can be found on this [page](https://www.ibm.com/docs/en/ibm-mq/9.2?topic=mqmd-format-mqchar8) in the IBM MQ Knowledge Center." + }, + "description": { + "type": "string", + "description": "Provides additional information for application developers: describes the message type or format." + }, + "expiry": { + "type": "integer", + "minimum": 0, + "default": 0, + "description": "The recommended setting the client should use for the TTL (Time-To-Live) of the message. This is a period of time expressed in milliseconds and set by the application that puts the message. 'expiry' values are API dependant e.g., MQI and JMS use different units of time and default values for 'unlimited'. General information on IBM MQ message expiry can be found on this [page](https://www.ibm.com/docs/en/ibm-mq/9.2?topic=mqmd-expiry-mqlong) in the IBM MQ Knowledge Center." + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.1.0" + ], + "description": "The version of this binding." + } + }, + "oneOf": [ + { + "properties": { + "type": { + "const": "binary" + } + } + }, + { + "properties": { + "type": { + "const": "jms" + } + }, + "not": { + "required": [ + "headers" + ] + } + }, + { + "properties": { + "type": { + "const": "string" + } + }, + "not": { + "required": [ + "headers" + ] + } + } + ], + "examples": [ + { + "type": "string", + "bindingVersion": "0.1.0" + }, + { + "type": "jms", + "description": "JMS stream message", + "bindingVersion": "0.1.0" + } + ] + }, + "http://asyncapi.com/bindings/googlepubsub/0.2.0/message.json": { + "$id": "http://asyncapi.com/bindings/googlepubsub/0.2.0/message.json", + "title": "Cloud Pub/Sub Channel Schema", + "description": "This object contains information about the message representation for Google Cloud Pub/Sub.", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "bindingVersion": { + "type": "string", + "enum": [ + "0.2.0" + ], + "description": "The version of this binding." + }, + "attributes": { + "type": "object" + }, + "orderingKey": { + "type": "string" + }, + "schema": { + "type": "object", + "additionalItems": false, + "properties": { + "name": { + "type": "string" + } + }, + "required": [ + "name" + ] + } + }, + "examples": [ + { + "schema": { + "name": "projects/your-project-id/schemas/your-avro-schema-id" + } + }, + { + "schema": { + "name": "projects/your-project-id/schemas/your-protobuf-schema-id" + } + } + ] + }, + "http://asyncapi.com/definitions/3.0.0/messageTrait.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/messageTrait.json", + "type": "object", + "description": "Describes a trait that MAY be applied to a Message Object. This object MAY contain any property from the Message Object, except payload and traits.", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "contentType": { + "type": "string", + "description": "The content type to use when encoding/decoding a message's payload. The value MUST be a specific media type (e.g. application/json). When omitted, the value MUST be the one specified on the defaultContentType field." + }, + "headers": { + "$ref": "http://asyncapi.com/definitions/3.0.0/anySchema.json" + }, + "correlationId": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/correlationId.json" + } + ] + }, + "tags": { + "type": "array", + "items": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/tag.json" + } + ] + }, + "uniqueItems": true + }, + "summary": { + "type": "string", + "description": "A brief summary of the message." + }, + "name": { + "type": "string", + "description": "Name of the message." + }, + "title": { + "type": "string", + "description": "A human-friendly title for the message." + }, + "description": { + "type": "string", + "description": "A longer description of the message. CommonMark is allowed." + }, + "externalDocs": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/externalDocs.json" + } + ] + }, + "deprecated": { + "type": "boolean", + "default": false + }, + "examples": { + "type": "array", + "description": "List of examples.", + "items": { + "$ref": "http://asyncapi.com/definitions/3.0.0/messageExampleObject.json" + } + }, + "bindings": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/messageBindingsObject.json" + } + ] + } + }, + "examples": [ + { + "contentType": "application/json" + } + ] + }, + "http://asyncapi.com/definitions/3.0.0/parameters.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/parameters.json", + "type": "object", + "additionalProperties": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/parameter.json" + } + ] + }, + "description": "JSON objects describing re-usable channel parameters.", + "examples": [ + { + "address": "user/{userId}/signedup", + "parameters": { + "userId": { + "description": "Id of the user." + } + } + } + ] + }, + "http://asyncapi.com/definitions/3.0.0/parameter.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/parameter.json", + "description": "Describes a parameter included in a channel address.", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "description": { + "type": "string", + "description": "A brief description of the parameter. This could contain examples of use. GitHub Flavored Markdown is allowed." + }, + "enum": { + "description": "An enumeration of string values to be used if the substitution options are from a limited set.", + "type": "array", + "items": { + "type": "string" + } + }, + "default": { + "description": "The default value to use for substitution, and to send, if an alternate value is not supplied.", + "type": "string" + }, + "examples": { + "description": "An array of examples of the parameter value.", + "type": "array", + "items": { + "type": "string" + } + }, + "location": { + "type": "string", + "description": "A runtime expression that specifies the location of the parameter value", + "pattern": "^\\$message\\.(header|payload)#(\\/(([^\\/~])|(~[01]))*)*" + } + }, + "examples": [ + { + "address": "user/{userId}/signedup", + "parameters": { + "userId": { + "description": "Id of the user.", + "location": "$message.payload#/user/id" + } + } + } + ] + }, + "http://asyncapi.com/definitions/3.0.0/channelBindingsObject.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/channelBindingsObject.json", + "type": "object", + "description": "Map describing protocol-specific definitions for a channel.", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "http": {}, + "ws": { + "properties": { + "bindingVersion": { + "enum": [ + "0.1.0" + ] + } + }, + "allOf": [ + { + "description": "If no bindingVersion specified, use the latest binding", + "if": { + "not": { + "required": [ + "bindingVersion" + ] + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/websockets/0.1.0/channel.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.1.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/websockets/0.1.0/channel.json" + } + } + ] + }, + "amqp": { + "properties": { + "bindingVersion": { + "enum": [ + "0.3.0" + ] + } + }, + "allOf": [ + { + "description": "If no bindingVersion specified, use the latest binding", + "if": { + "not": { + "required": [ + "bindingVersion" + ] + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/amqp/0.3.0/channel.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.3.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/amqp/0.3.0/channel.json" + } + } + ] + }, + "amqp1": {}, + "mqtt": {}, + "kafka": { + "properties": { + "bindingVersion": { + "enum": [ + "0.5.0", + "0.4.0", + "0.3.0" + ] + } + }, + "allOf": [ + { + "description": "If no bindingVersion specified, use the latest binding", + "if": { + "not": { + "required": [ + "bindingVersion" + ] + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/kafka/0.5.0/channel.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.5.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/kafka/0.5.0/channel.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.4.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/kafka/0.4.0/channel.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.3.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/kafka/0.3.0/channel.json" + } + } + ] + }, + "anypointmq": { + "properties": { + "bindingVersion": { + "enum": [ + "0.0.1" + ] + } + }, + "allOf": [ + { + "description": "If no bindingVersion specified, use the latest binding", + "if": { + "not": { + "required": [ + "bindingVersion" + ] + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/anypointmq/0.0.1/channel.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.0.1" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/anypointmq/0.0.1/channel.json" + } + } + ] + }, + "nats": {}, + "jms": { + "properties": { + "bindingVersion": { + "enum": [ + "0.0.1" + ] + } + }, + "allOf": [ + { + "description": "If no bindingVersion specified, use the latest binding", + "if": { + "not": { + "required": [ + "bindingVersion" + ] + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/jms/0.0.1/channel.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.0.1" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/jms/0.0.1/channel.json" + } + } + ] + }, + "sns": { + "properties": { + "bindingVersion": { + "enum": [ + "0.1.0" + ] + } + }, + "allOf": [ + { + "description": "If no bindingVersion specified, use the latest binding", + "if": { + "not": { + "required": [ + "bindingVersion" + ] + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/sns/0.1.0/channel.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.1.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/sns/0.1.0/channel.json" + } + } + ] + }, + "sqs": { + "properties": { + "bindingVersion": { + "enum": [ + "0.2.0" + ] + } + }, + "allOf": [ + { + "description": "If no bindingVersion specified, use the latest binding", + "if": { + "not": { + "required": [ + "bindingVersion" + ] + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/sqs/0.2.0/channel.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.2.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/sqs/0.2.0/channel.json" + } + } + ] + }, + "stomp": {}, + "redis": {}, + "ibmmq": { + "properties": { + "bindingVersion": { + "enum": [ + "0.1.0" + ] + } + }, + "allOf": [ + { + "description": "If no bindingVersion specified, use the latest binding", + "if": { + "not": { + "required": [ + "bindingVersion" + ] + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/ibmmq/0.1.0/channel.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.1.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/ibmmq/0.1.0/channel.json" + } + } + ] + }, + "solace": {}, + "googlepubsub": { + "properties": { + "bindingVersion": { + "enum": [ + "0.2.0" + ] + } + }, + "allOf": [ + { + "description": "If no bindingVersion specified, use the latest binding", + "if": { + "not": { + "required": [ + "bindingVersion" + ] + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/googlepubsub/0.2.0/channel.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.2.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/googlepubsub/0.2.0/channel.json" + } + } + ] + }, + "pulsar": { + "properties": { + "bindingVersion": { + "enum": [ + "0.1.0" + ] + } + }, + "allOf": [ + { + "description": "If no bindingVersion specified, use the latest binding", + "if": { + "not": { + "required": [ + "bindingVersion" + ] + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/pulsar/0.1.0/channel.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.1.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/pulsar/0.1.0/channel.json" + } + } + ] + } + } + }, + "http://asyncapi.com/bindings/websockets/0.1.0/channel.json": { + "$id": "http://asyncapi.com/bindings/websockets/0.1.0/channel.json", + "title": "WebSockets channel bindings object", + "description": "When using WebSockets, the channel represents the connection. Unlike other protocols that support multiple virtual channels (topics, routing keys, etc.) per connection, WebSockets doesn't support virtual channels or, put it another way, there's only one channel and its characteristics are strongly related to the protocol used for the handshake, i.e., HTTP.", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "method": { + "type": "string", + "enum": [ + "GET", + "POST" + ], + "description": "The HTTP method to use when establishing the connection. Its value MUST be either 'GET' or 'POST'." + }, + "query": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/schema.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + } + ], + "description": "A Schema object containing the definitions for each query parameter. This schema MUST be of type 'object' and have a 'properties' key." + }, + "headers": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/schema.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + } + ], + "description": "A Schema object containing the definitions of the HTTP headers to use when establishing the connection. This schema MUST be of type 'object' and have a 'properties' key." + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.1.0" + ], + "description": "The version of this binding. If omitted, 'latest' MUST be assumed." + } + }, + "examples": [ + { + "method": "POST", + "bindingVersion": "0.1.0" + } + ] + }, + "http://asyncapi.com/bindings/amqp/0.3.0/channel.json": { + "$id": "http://asyncapi.com/bindings/amqp/0.3.0/channel.json", + "title": "AMQP channel bindings object", + "description": "This object contains information about the channel representation in AMQP.", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "is": { + "type": "string", + "enum": [ + "queue", + "routingKey" + ], + "description": "Defines what type of channel is it. Can be either 'queue' or 'routingKey' (default)." + }, + "exchange": { + "type": "object", + "properties": { + "name": { + "type": "string", + "maxLength": 255, + "description": "The name of the exchange. It MUST NOT exceed 255 characters long." + }, + "type": { + "type": "string", + "enum": [ + "topic", + "direct", + "fanout", + "default", + "headers" + ], + "description": "The type of the exchange. Can be either 'topic', 'direct', 'fanout', 'default' or 'headers'." + }, + "durable": { + "type": "boolean", + "description": "Whether the exchange should survive broker restarts or not." + }, + "autoDelete": { + "type": "boolean", + "description": "Whether the exchange should be deleted when the last queue is unbound from it." + }, + "vhost": { + "type": "string", + "default": "/", + "description": "The virtual host of the exchange. Defaults to '/'." + } + }, + "description": "When is=routingKey, this object defines the exchange properties." + }, + "queue": { + "type": "object", + "properties": { + "name": { + "type": "string", + "maxLength": 255, + "description": "The name of the queue. It MUST NOT exceed 255 characters long." + }, + "durable": { + "type": "boolean", + "description": "Whether the queue should survive broker restarts or not." + }, + "exclusive": { + "type": "boolean", + "description": "Whether the queue should be used only by one connection or not." + }, + "autoDelete": { + "type": "boolean", + "description": "Whether the queue should be deleted when the last consumer unsubscribes." + }, + "vhost": { + "type": "string", + "default": "/", + "description": "The virtual host of the queue. Defaults to '/'." + } + }, + "description": "When is=queue, this object defines the queue properties." + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.3.0" + ], + "description": "The version of this binding. If omitted, 'latest' MUST be assumed." + } + }, + "oneOf": [ + { + "properties": { + "is": { + "const": "routingKey" + } + }, + "required": [ + "exchange" + ], + "not": { + "required": [ + "queue" + ] + } + }, + { + "properties": { + "is": { + "const": "queue" + } + }, + "required": [ + "queue" + ], + "not": { + "required": [ + "exchange" + ] + } + } + ], + "examples": [ + { + "is": "routingKey", + "exchange": { + "name": "myExchange", + "type": "topic", + "durable": true, + "autoDelete": false, + "vhost": "/" + }, + "bindingVersion": "0.3.0" + }, + { + "is": "queue", + "queue": { + "name": "my-queue-name", + "durable": true, + "exclusive": true, + "autoDelete": false, + "vhost": "/" + }, + "bindingVersion": "0.3.0" + } + ] + }, + "http://asyncapi.com/bindings/kafka/0.5.0/channel.json": { + "$id": "http://asyncapi.com/bindings/kafka/0.5.0/channel.json", + "title": "Channel Schema", + "description": "This object contains information about the channel representation in Kafka.", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "topic": { + "type": "string", + "description": "Kafka topic name if different from channel name." + }, + "partitions": { + "type": "integer", + "minimum": 1, + "description": "Number of partitions configured on this topic." + }, + "replicas": { + "type": "integer", + "minimum": 1, + "description": "Number of replicas configured on this topic." + }, + "topicConfiguration": { + "description": "Topic configuration properties that are relevant for the API.", + "type": "object", + "additionalProperties": true, + "properties": { + "cleanup.policy": { + "description": "The [`cleanup.policy`](https://kafka.apache.org/documentation/#topicconfigs_cleanup.policy) configuration option.", + "type": "array", + "items": { + "type": "string", + "enum": [ + "compact", + "delete" + ] + } + }, + "retention.ms": { + "description": "The [`retention.ms`](https://kafka.apache.org/documentation/#topicconfigs_retention.ms) configuration option.", + "type": "integer", + "minimum": -1 + }, + "retention.bytes": { + "description": "The [`retention.bytes`](https://kafka.apache.org/documentation/#topicconfigs_retention.bytes) configuration option.", + "type": "integer", + "minimum": -1 + }, + "delete.retention.ms": { + "description": "The [`delete.retention.ms`](https://kafka.apache.org/documentation/#topicconfigs_delete.retention.ms) configuration option.", + "type": "integer", + "minimum": 0 + }, + "max.message.bytes": { + "description": "The [`max.message.bytes`](https://kafka.apache.org/documentation/#topicconfigs_max.message.bytes) configuration option.", + "type": "integer", + "minimum": 0 + }, + "confluent.key.schema.validation": { + "description": "It shows whether the schema validation for the message key is enabled. Vendor specific config. For more details: (https://docs.confluent.io/platform/current/installation/configuration/topic-configs.html#confluent-key-schema-validation)", + "type": "boolean" + }, + "confluent.key.subject.name.strategy": { + "description": "The name of the schema lookup strategy for the message key. Vendor specific config. For more details: (https://docs.confluent.io/platform/current/installation/configuration/topic-configs.html#confluent-key-subject-name-strategy)", + "type": "string" + }, + "confluent.value.schema.validation": { + "description": "It shows whether the schema validation for the message value is enabled. Vendor specific config. For more details: (https://docs.confluent.io/platform/current/installation/configuration/topic-configs.html#confluent-value-schema-validation)", + "type": "boolean" + }, + "confluent.value.subject.name.strategy": { + "description": "The name of the schema lookup strategy for the message value. Vendor specific config. For more details: (https://docs.confluent.io/platform/current/installation/configuration/topic-configs.html#confluent-value-subject-name-strategy)", + "type": "string" + } + } + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.5.0" + ], + "description": "The version of this binding. If omitted, 'latest' MUST be assumed." + } + }, + "examples": [ + { + "topic": "my-specific-topic", + "partitions": 20, + "replicas": 3, + "bindingVersion": "0.5.0" + } + ] + }, + "http://asyncapi.com/bindings/kafka/0.4.0/channel.json": { + "$id": "http://asyncapi.com/bindings/kafka/0.4.0/channel.json", + "title": "Channel Schema", + "description": "This object contains information about the channel representation in Kafka.", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "topic": { + "type": "string", + "description": "Kafka topic name if different from channel name." + }, + "partitions": { + "type": "integer", + "minimum": 1, + "description": "Number of partitions configured on this topic." + }, + "replicas": { + "type": "integer", + "minimum": 1, + "description": "Number of replicas configured on this topic." + }, + "topicConfiguration": { + "description": "Topic configuration properties that are relevant for the API.", + "type": "object", + "additionalProperties": false, + "properties": { + "cleanup.policy": { + "description": "The [`cleanup.policy`](https://kafka.apache.org/documentation/#topicconfigs_cleanup.policy) configuration option.", + "type": "array", + "items": { + "type": "string", + "enum": [ + "compact", + "delete" + ] + } + }, + "retention.ms": { + "description": "The [`retention.ms`](https://kafka.apache.org/documentation/#topicconfigs_retention.ms) configuration option.", + "type": "integer", + "minimum": -1 + }, + "retention.bytes": { + "description": "The [`retention.bytes`](https://kafka.apache.org/documentation/#topicconfigs_retention.bytes) configuration option.", + "type": "integer", + "minimum": -1 + }, + "delete.retention.ms": { + "description": "The [`delete.retention.ms`](https://kafka.apache.org/documentation/#topicconfigs_delete.retention.ms) configuration option.", + "type": "integer", + "minimum": 0 + }, + "max.message.bytes": { + "description": "The [`max.message.bytes`](https://kafka.apache.org/documentation/#topicconfigs_max.message.bytes) configuration option.", + "type": "integer", + "minimum": 0 + } + } + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.4.0" + ], + "description": "The version of this binding. If omitted, 'latest' MUST be assumed." + } + }, + "examples": [ + { + "topic": "my-specific-topic", + "partitions": 20, + "replicas": 3, + "bindingVersion": "0.4.0" + } + ] + }, + "http://asyncapi.com/bindings/kafka/0.3.0/channel.json": { + "$id": "http://asyncapi.com/bindings/kafka/0.3.0/channel.json", + "title": "Channel Schema", + "description": "This object contains information about the channel representation in Kafka.", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "topic": { + "type": "string", + "description": "Kafka topic name if different from channel name." + }, + "partitions": { + "type": "integer", + "minimum": 1, + "description": "Number of partitions configured on this topic." + }, + "replicas": { + "type": "integer", + "minimum": 1, + "description": "Number of replicas configured on this topic." + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.3.0" + ], + "description": "The version of this binding. If omitted, 'latest' MUST be assumed." + } + }, + "examples": [ + { + "topic": "my-specific-topic", + "partitions": 20, + "replicas": 3, + "bindingVersion": "0.3.0" + } + ] + }, + "http://asyncapi.com/bindings/anypointmq/0.0.1/channel.json": { + "$id": "http://asyncapi.com/bindings/anypointmq/0.0.1/channel.json", + "title": "Anypoint MQ channel bindings object", + "description": "This object contains configuration for describing an Anypoint MQ exchange, queue, or FIFO queue as an AsyncAPI channel. This objects only contains configuration that can not be provided in the AsyncAPI standard channel object.", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "destination": { + "type": "string", + "description": "The destination (queue or exchange) name for this channel. SHOULD only be specified if the channel name differs from the actual destination name, such as when the channel name is not a valid destination name in Anypoint MQ. Defaults to the channel name." + }, + "destinationType": { + "type": "string", + "enum": [ + "exchange", + "queue", + "fifo-queue" + ], + "default": "queue", + "description": "The type of destination. SHOULD be specified to document the messaging model (publish/subscribe, point-to-point, strict message ordering) supported by this channel." + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.0.1" + ], + "description": "The version of this binding. If omitted, 'latest' MUST be assumed." + } + }, + "examples": [ + { + "destination": "user-signup-exchg", + "destinationType": "exchange", + "bindingVersion": "0.0.1" + } + ] + }, + "http://asyncapi.com/bindings/jms/0.0.1/channel.json": { + "$id": "http://asyncapi.com/bindings/jms/0.0.1/channel.json", + "title": "Channel Schema", + "description": "This object contains configuration for describing a JMS queue, or FIFO queue as an AsyncAPI channel. This objects only contains configuration that can not be provided in the AsyncAPI standard channel object.", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "destination": { + "type": "string", + "description": "The destination (queue) name for this channel. SHOULD only be specified if the channel name differs from the actual destination name, such as when the channel name is not a valid destination name according to the JMS Provider. Defaults to the channel name." + }, + "destinationType": { + "type": "string", + "enum": [ + "queue", + "fifo-queue" + ], + "default": "queue", + "description": "The type of destination. SHOULD be specified to document the messaging model (point-to-point, or strict message ordering) supported by this channel." + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.0.1" + ], + "description": "The version of this binding. If omitted, 'latest' MUST be assumed." + } + }, + "examples": [ + { + "destination": "user-signed-up", + "destinationType": "fifo-queue", + "bindingVersion": "0.0.1" + } + ] + }, + "http://asyncapi.com/bindings/sns/0.1.0/channel.json": { + "$id": "http://asyncapi.com/bindings/sns/0.1.0/channel.json", + "title": "Channel Schema", + "description": "This object contains information about the channel representation in SNS.", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "name": { + "type": "string", + "description": "The name of the topic. Can be different from the channel name to allow flexibility around AWS resource naming limitations." + }, + "ordering": { + "$ref": "http://asyncapi.com/bindings/sns/0.1.0/channel.json#/definitions/ordering" + }, + "policy": { + "$ref": "http://asyncapi.com/bindings/sns/0.1.0/channel.json#/definitions/policy" + }, + "tags": { + "type": "object", + "description": "Key-value pairs that represent AWS tags on the topic." + }, + "bindingVersion": { + "type": "string", + "description": "The version of this binding.", + "default": "latest" + } + }, + "required": [ + "name" + ], + "definitions": { + "ordering": { + "type": "object", + "description": "By default, we assume an unordered SNS topic. This field allows configuration of a FIFO SNS Topic.", + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "type": { + "type": "string", + "description": "Defines the type of SNS Topic.", + "enum": [ + "standard", + "FIFO" + ] + }, + "contentBasedDeduplication": { + "type": "boolean", + "description": "True to turn on de-duplication of messages for a channel." + } + }, + "required": [ + "type" + ] + }, + "policy": { + "type": "object", + "description": "The security policy for the SNS Topic.", + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "statements": { + "type": "array", + "description": "An array of statement objects, each of which controls a permission for this topic", + "items": { + "$ref": "http://asyncapi.com/bindings/sns/0.1.0/channel.json#/definitions/statement" + } + } + }, + "required": [ + "statements" + ] + }, + "statement": { + "type": "object", + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "effect": { + "type": "string", + "enum": [ + "Allow", + "Deny" + ] + }, + "principal": { + "description": "The AWS account or resource ARN that this statement applies to.", + "oneOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + } + ] + }, + "action": { + "description": "The SNS permission being allowed or denied e.g. sns:Publish", + "oneOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + } + ] + } + }, + "required": [ + "effect", + "principal", + "action" + ] + } + }, + "examples": [ + { + "name": "my-sns-topic", + "policy": { + "statements": [ + { + "effect": "Allow", + "principal": "*", + "action": "SNS:Publish" + } + ] + } + } + ] + }, + "http://asyncapi.com/bindings/sqs/0.2.0/channel.json": { + "$id": "http://asyncapi.com/bindings/sqs/0.2.0/channel.json", + "title": "Channel Schema", + "description": "This object contains information about the channel representation in SQS.", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "queue": { + "description": "A definition of the queue that will be used as the channel.", + "$ref": "http://asyncapi.com/bindings/sqs/0.2.0/channel.json#/definitions/queue" + }, + "deadLetterQueue": { + "description": "A definition of the queue that will be used for un-processable messages.", + "$ref": "http://asyncapi.com/bindings/sqs/0.2.0/channel.json#/definitions/queue" + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.1.0", + "0.2.0" + ], + "description": "The version of this binding. If omitted, 'latest' MUST be assumed.", + "default": "latest" + } + }, + "required": [ + "queue" + ], + "definitions": { + "queue": { + "type": "object", + "description": "A definition of a queue.", + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "name": { + "type": "string", + "description": "The name of the queue. When an SNS Operation Binding Object references an SQS queue by name, the identifier should be the one in this field." + }, + "fifoQueue": { + "type": "boolean", + "description": "Is this a FIFO queue?", + "default": false + }, + "deduplicationScope": { + "type": "string", + "enum": [ + "queue", + "messageGroup" + ], + "description": "Specifies whether message deduplication occurs at the message group or queue level. Valid values are messageGroup and queue (default).", + "default": "queue" + }, + "fifoThroughputLimit": { + "type": "string", + "enum": [ + "perQueue", + "perMessageGroupId" + ], + "description": "Specifies whether the FIFO queue throughput quota applies to the entire queue or per message group. Valid values are perQueue (default) and perMessageGroupId.", + "default": "perQueue" + }, + "deliveryDelay": { + "type": "integer", + "description": "The number of seconds to delay before a message sent to the queue can be received. used to create a delay queue.", + "minimum": 0, + "maximum": 900, + "default": 0 + }, + "visibilityTimeout": { + "type": "integer", + "description": "The length of time, in seconds, that a consumer locks a message - hiding it from reads - before it is unlocked and can be read again.", + "minimum": 0, + "maximum": 43200, + "default": 30 + }, + "receiveMessageWaitTime": { + "type": "integer", + "description": "Determines if the queue uses short polling or long polling. Set to zero the queue reads available messages and returns immediately. Set to a non-zero integer, long polling waits the specified number of seconds for messages to arrive before returning.", + "default": 0 + }, + "messageRetentionPeriod": { + "type": "integer", + "description": "How long to retain a message on the queue in seconds, unless deleted.", + "minimum": 60, + "maximum": 1209600, + "default": 345600 + }, + "redrivePolicy": { + "$ref": "http://asyncapi.com/bindings/sqs/0.2.0/channel.json#/definitions/redrivePolicy" + }, + "policy": { + "$ref": "http://asyncapi.com/bindings/sqs/0.2.0/channel.json#/definitions/policy" + }, + "tags": { + "type": "object", + "description": "Key-value pairs that represent AWS tags on the queue." + } + }, + "required": [ + "name", + "fifoQueue" + ] + }, + "redrivePolicy": { + "type": "object", + "description": "Prevent poison pill messages by moving un-processable messages to an SQS dead letter queue.", + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "deadLetterQueue": { + "$ref": "http://asyncapi.com/bindings/sqs/0.2.0/channel.json#/definitions/identifier" + }, + "maxReceiveCount": { + "type": "integer", + "description": "The number of times a message is delivered to the source queue before being moved to the dead-letter queue.", + "default": 10 + } + }, + "required": [ + "deadLetterQueue" + ] + }, + "identifier": { + "type": "object", + "description": "The SQS queue to use as a dead letter queue (DLQ).", + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "arn": { + "type": "string", + "description": "The target is an ARN. For example, for SQS, the identifier may be an ARN, which will be of the form: arn:aws:sqs:{region}:{account-id}:{queueName}" + }, + "name": { + "type": "string", + "description": "The endpoint is identified by a name, which corresponds to an identifying field called 'name' of a binding for that protocol on this publish Operation Object. For example, if the protocol is 'sqs' then the name refers to the name field sqs binding." + } + } + }, + "policy": { + "type": "object", + "description": "The security policy for the SQS Queue", + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "statements": { + "type": "array", + "description": "An array of statement objects, each of which controls a permission for this queue.", + "items": { + "$ref": "http://asyncapi.com/bindings/sqs/0.2.0/channel.json#/definitions/statement" + } + } + }, + "required": [ + "statements" + ] + }, + "statement": { + "type": "object", + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "effect": { + "type": "string", + "enum": [ + "Allow", + "Deny" + ] + }, + "principal": { + "description": "The AWS account or resource ARN that this statement applies to.", + "oneOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + } + ] + }, + "action": { + "description": "The SQS permission being allowed or denied e.g. sqs:ReceiveMessage", + "oneOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + } + ] + } + }, + "required": [ + "effect", + "principal", + "action" + ] + } + }, + "examples": [ + { + "queue": { + "name": "myQueue", + "fifoQueue": true, + "deduplicationScope": "messageGroup", + "fifoThroughputLimit": "perMessageGroupId", + "deliveryDelay": 15, + "visibilityTimeout": 60, + "receiveMessageWaitTime": 0, + "messageRetentionPeriod": 86400, + "redrivePolicy": { + "deadLetterQueue": { + "arn": "arn:aws:SQS:eu-west-1:0000000:123456789" + }, + "maxReceiveCount": 15 + }, + "policy": { + "statements": [ + { + "effect": "Deny", + "principal": "arn:aws:iam::123456789012:user/dec.kolakowski", + "action": [ + "sqs:SendMessage", + "sqs:ReceiveMessage" + ] + } + ] + }, + "tags": { + "owner": "AsyncAPI.NET", + "platform": "AsyncAPIOrg" + } + }, + "deadLetterQueue": { + "name": "myQueue_error", + "deliveryDelay": 0, + "visibilityTimeout": 0, + "receiveMessageWaitTime": 0, + "messageRetentionPeriod": 604800 + } + } + ] + }, + "http://asyncapi.com/bindings/ibmmq/0.1.0/channel.json": { + "$id": "http://asyncapi.com/bindings/ibmmq/0.1.0/channel.json", + "title": "IBM MQ channel bindings object", + "description": "This object contains information about the channel representation in IBM MQ. Each channel corresponds to a Queue or Topic within IBM MQ.", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "destinationType": { + "type": "string", + "enum": [ + "topic", + "queue" + ], + "default": "topic", + "description": "Defines the type of AsyncAPI channel." + }, + "queue": { + "type": "object", + "description": "Defines the properties of a queue.", + "properties": { + "objectName": { + "type": "string", + "maxLength": 48, + "description": "Defines the name of the IBM MQ queue associated with the channel." + }, + "isPartitioned": { + "type": "boolean", + "default": false, + "description": "Defines if the queue is a cluster queue and therefore partitioned. If 'true', a binding option MAY be specified when accessing the queue. More information on binding options can be found on this page in the IBM MQ Knowledge Center." + }, + "exclusive": { + "type": "boolean", + "default": false, + "description": "Specifies if it is recommended to open the queue exclusively." + } + }, + "required": [ + "objectName" + ] + }, + "topic": { + "type": "object", + "description": "Defines the properties of a topic.", + "properties": { + "string": { + "type": "string", + "maxLength": 10240, + "description": "The value of the IBM MQ topic string to be used." + }, + "objectName": { + "type": "string", + "maxLength": 48, + "description": "The name of the IBM MQ topic object." + }, + "durablePermitted": { + "type": "boolean", + "default": true, + "description": "Defines if the subscription may be durable." + }, + "lastMsgRetained": { + "type": "boolean", + "default": false, + "description": "Defines if the last message published will be made available to new subscriptions." + } + } + }, + "maxMsgLength": { + "type": "integer", + "minimum": 0, + "maximum": 104857600, + "description": "The maximum length of the physical message (in bytes) accepted by the Topic or Queue. Messages produced that are greater in size than this value may fail to be delivered. More information on the maximum message length can be found on this [page](https://www.ibm.com/support/knowledgecenter/SSFKSJ_latest/com.ibm.mq.ref.dev.doc/q097520_.html) in the IBM MQ Knowledge Center." + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.1.0" + ], + "description": "The version of this binding." + } + }, + "oneOf": [ + { + "properties": { + "destinationType": { + "const": "topic" + } + }, + "not": { + "required": [ + "queue" + ] + } + }, + { + "properties": { + "destinationType": { + "const": "queue" + } + }, + "required": [ + "queue" + ], + "not": { + "required": [ + "topic" + ] + } + } + ], + "examples": [ + { + "destinationType": "topic", + "topic": { + "objectName": "myTopicName" + }, + "bindingVersion": "0.1.0" + }, + { + "destinationType": "queue", + "queue": { + "objectName": "myQueueName", + "exclusive": true + }, + "bindingVersion": "0.1.0" + } + ] + }, + "http://asyncapi.com/bindings/googlepubsub/0.2.0/channel.json": { + "$id": "http://asyncapi.com/bindings/googlepubsub/0.2.0/channel.json", + "title": "Cloud Pub/Sub Channel Schema", + "description": "This object contains information about the channel representation for Google Cloud Pub/Sub.", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "bindingVersion": { + "type": "string", + "enum": [ + "0.2.0" + ], + "description": "The version of this binding." + }, + "labels": { + "type": "object" + }, + "messageRetentionDuration": { + "type": "string" + }, + "messageStoragePolicy": { + "type": "object", + "additionalProperties": false, + "properties": { + "allowedPersistenceRegions": { + "type": "array", + "items": { + "type": "string" + } + } + } + }, + "schemaSettings": { + "type": "object", + "additionalItems": false, + "properties": { + "encoding": { + "type": "string" + }, + "firstRevisionId": { + "type": "string" + }, + "lastRevisionId": { + "type": "string" + }, + "name": { + "type": "string" + } + }, + "required": [ + "encoding", + "name" + ] + } + }, + "required": [ + "schemaSettings" + ], + "examples": [ + { + "labels": { + "label1": "value1", + "label2": "value2" + }, + "messageRetentionDuration": "86400s", + "messageStoragePolicy": { + "allowedPersistenceRegions": [ + "us-central1", + "us-east1" + ] + }, + "schemaSettings": { + "encoding": "json", + "name": "projects/your-project-id/schemas/your-schema" + } + } + ] + }, + "http://asyncapi.com/bindings/pulsar/0.1.0/channel.json": { + "$id": "http://asyncapi.com/bindings/pulsar/0.1.0/channel.json", + "title": "Channel Schema", + "description": "This object contains information about the channel representation in Pulsar, which covers namespace and topic level admin configuration. This object contains additional information not possible to represent within the core AsyncAPI specification.", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "required": [ + "namespace", + "persistence" + ], + "properties": { + "namespace": { + "type": "string", + "description": "The namespace, the channel is associated with." + }, + "persistence": { + "type": "string", + "enum": [ + "persistent", + "non-persistent" + ], + "description": "persistence of the topic in Pulsar." + }, + "compaction": { + "type": "integer", + "minimum": 0, + "description": "Topic compaction threshold given in MB" + }, + "geo-replication": { + "type": "array", + "description": "A list of clusters the topic is replicated to.", + "items": { + "type": "string" + } + }, + "retention": { + "type": "object", + "additionalProperties": false, + "properties": { + "time": { + "type": "integer", + "minimum": 0, + "description": "Time given in Minutes. `0` = Disable message retention." + }, + "size": { + "type": "integer", + "minimum": 0, + "description": "Size given in MegaBytes. `0` = Disable message retention." + } + } + }, + "ttl": { + "type": "integer", + "description": "TTL in seconds for the specified topic" + }, + "deduplication": { + "type": "boolean", + "description": "Whether deduplication of events is enabled or not." + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.1.0" + ], + "description": "The version of this binding. If omitted, 'latest' MUST be assumed." + } + }, + "examples": [ + { + "namespace": "ns1", + "persistence": "persistent", + "compaction": 1000, + "retention": { + "time": 15, + "size": 1000 + }, + "ttl": 360, + "geo-replication": [ + "us-west", + "us-east" + ], + "deduplication": true, + "bindingVersion": "0.1.0" + } + ] + }, + "http://asyncapi.com/definitions/3.0.0/operations.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/operations.json", + "type": "object", + "description": "Holds a dictionary with all the operations this application MUST implement.", + "additionalProperties": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/operation.json" + } + ] + }, + "examples": [ + { + "onUserSignUp": { + "title": "User sign up", + "summary": "Action to sign a user up.", + "description": "A longer description", + "channel": { + "$ref": "#/channels/userSignup" + }, + "action": "send", + "tags": [ + { + "name": "user" + }, + { + "name": "signup" + }, + { + "name": "register" + } + ], + "bindings": { + "amqp": { + "ack": false + } + }, + "traits": [ + { + "$ref": "#/components/operationTraits/kafka" + } + ] + } + } + ] + }, + "http://asyncapi.com/definitions/3.0.0/operation.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/operation.json", + "type": "object", + "description": "Describes a specific operation.", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "required": [ + "action", + "channel" + ], + "properties": { + "action": { + "type": "string", + "description": "Allowed values are send and receive. Use send when it's expected that the application will send a message to the given channel, and receive when the application should expect receiving messages from the given channel.", + "enum": [ + "send", + "receive" + ] + }, + "channel": { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + "messages": { + "type": "array", + "description": "A list of $ref pointers pointing to the supported Message Objects that can be processed by this operation. It MUST contain a subset of the messages defined in the channel referenced in this operation. Every message processed by this operation MUST be valid against one, and only one, of the message objects referenced in this list. Please note the messages property value MUST be a list of Reference Objects and, therefore, MUST NOT contain Message Objects. However, it is RECOMMENDED that parsers (or other software) dereference this property for a better development experience.", + "items": { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + } + }, + "reply": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/operationReply.json" + } + ] + }, + "traits": { + "type": "array", + "description": "A list of traits to apply to the operation object. Traits MUST be merged using traits merge mechanism. The resulting object MUST be a valid Operation Object.", + "items": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/operationTrait.json" + } + ] + } + }, + "title": { + "type": "string", + "description": "A human-friendly title for the operation." + }, + "summary": { + "type": "string", + "description": "A brief summary of the operation." + }, + "description": { + "type": "string", + "description": "A longer description of the operation. CommonMark is allowed." + }, + "security": { + "$ref": "http://asyncapi.com/definitions/3.0.0/securityRequirements.json" + }, + "tags": { + "type": "array", + "description": "A list of tags for logical grouping and categorization of operations.", + "items": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/tag.json" + } + ] + }, + "uniqueItems": true + }, + "externalDocs": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/externalDocs.json" + } + ] + }, + "bindings": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/operationBindingsObject.json" + } + ] + } + }, + "examples": [ + { + "title": "User sign up", + "summary": "Action to sign a user up.", + "description": "A longer description", + "channel": { + "$ref": "#/channels/userSignup" + }, + "action": "send", + "security": [ + { + "petstore_auth": [ + "write:pets", + "read:pets" + ] + } + ], + "tags": [ + { + "name": "user" + }, + { + "name": "signup" + }, + { + "name": "register" + } + ], + "bindings": { + "amqp": { + "ack": false + } + }, + "traits": [ + { + "$ref": "#/components/operationTraits/kafka" + } + ], + "messages": [ + { + "$ref": "/components/messages/userSignedUp" + } + ], + "reply": { + "address": { + "location": "$message.header#/replyTo" + }, + "channel": { + "$ref": "#/channels/userSignupReply" + }, + "messages": [ + { + "$ref": "/components/messages/userSignedUpReply" + } + ] + } + } + ] + }, + "http://asyncapi.com/definitions/3.0.0/operationReply.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/operationReply.json", + "type": "object", + "description": "Describes the reply part that MAY be applied to an Operation Object. If an operation implements the request/reply pattern, the reply object represents the response message.", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "address": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/operationReplyAddress.json" + } + ] + }, + "channel": { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + "messages": { + "type": "array", + "description": "A list of $ref pointers pointing to the supported Message Objects that can be processed by this operation as reply. It MUST contain a subset of the messages defined in the channel referenced in this operation reply. Every message processed by this operation MUST be valid against one, and only one, of the message objects referenced in this list. Please note the messages property value MUST be a list of Reference Objects and, therefore, MUST NOT contain Message Objects. However, it is RECOMMENDED that parsers (or other software) dereference this property for a better development experience.", + "items": { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + } + } + } + }, + "http://asyncapi.com/definitions/3.0.0/operationReplyAddress.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/operationReplyAddress.json", + "type": "object", + "description": "An object that specifies where an operation has to send the reply", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "required": [ + "location" + ], + "properties": { + "location": { + "type": "string", + "description": "A runtime expression that specifies the location of the reply address.", + "pattern": "^\\$message\\.(header|payload)#(\\/(([^\\/~])|(~[01]))*)*" + }, + "description": { + "type": "string", + "description": "An optional description of the address. CommonMark is allowed." + } + }, + "examples": [ + { + "description": "Consumer inbox", + "location": "$message.header#/replyTo" + } + ] + }, + "http://asyncapi.com/definitions/3.0.0/operationTrait.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/operationTrait.json", + "type": "object", + "description": "Describes a trait that MAY be applied to an Operation Object. This object MAY contain any property from the Operation Object, except the action, channel and traits ones.", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "title": { + "description": "A human-friendly title for the operation.", + "$ref": "http://asyncapi.com/definitions/3.0.0/operation.json#/properties/title" + }, + "summary": { + "description": "A short summary of what the operation is about.", + "$ref": "http://asyncapi.com/definitions/3.0.0/operation.json#/properties/summary" + }, + "description": { + "description": "A verbose explanation of the operation. CommonMark syntax can be used for rich text representation.", + "$ref": "http://asyncapi.com/definitions/3.0.0/operation.json#/properties/description" + }, + "security": { + "description": "A declaration of which security schemes are associated with this operation. Only one of the security scheme objects MUST be satisfied to authorize an operation. In cases where Server Security also applies, it MUST also be satisfied.", + "$ref": "http://asyncapi.com/definitions/3.0.0/operation.json#/properties/security" + }, + "tags": { + "description": "A list of tags for logical grouping and categorization of operations.", + "$ref": "http://asyncapi.com/definitions/3.0.0/operation.json#/properties/tags" + }, + "externalDocs": { + "description": "Additional external documentation for this operation.", + "$ref": "http://asyncapi.com/definitions/3.0.0/operation.json#/properties/externalDocs" + }, + "bindings": { + "description": "A map where the keys describe the name of the protocol and the values describe protocol-specific definitions for the operation.", + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/operationBindingsObject.json" + } + ] + } + }, + "examples": [ + { + "bindings": { + "amqp": { + "ack": false + } + } + } + ] + }, + "http://asyncapi.com/definitions/3.0.0/operationBindingsObject.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/operationBindingsObject.json", + "type": "object", + "description": "Map describing protocol-specific definitions for an operation.", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "http": { + "properties": { + "bindingVersion": { + "enum": [ + "0.2.0", + "0.3.0" + ] + } + }, + "allOf": [ + { + "description": "If no bindingVersion specified, use the latest binding", + "if": { + "not": { + "required": [ + "bindingVersion" + ] + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/http/0.3.0/operation.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.2.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/http/0.2.0/operation.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.3.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/http/0.3.0/operation.json" + } + } + ] + }, + "ws": {}, + "amqp": { + "properties": { + "bindingVersion": { + "enum": [ + "0.3.0" + ] + } + }, + "allOf": [ + { + "description": "If no bindingVersion specified, use the latest binding", + "if": { + "not": { + "required": [ + "bindingVersion" + ] + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/amqp/0.3.0/operation.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.3.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/amqp/0.3.0/operation.json" + } + } + ] + }, + "amqp1": {}, + "mqtt": { + "properties": { + "bindingVersion": { + "enum": [ + "0.2.0" + ] + } + }, + "allOf": [ + { + "description": "If no bindingVersion specified, use the latest binding", + "if": { + "not": { + "required": [ + "bindingVersion" + ] + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/mqtt/0.2.0/operation.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.2.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/mqtt/0.2.0/operation.json" + } + } + ] + }, + "kafka": { + "properties": { + "bindingVersion": { + "enum": [ + "0.5.0", + "0.4.0", + "0.3.0" + ] + } + }, + "allOf": [ + { + "description": "If no bindingVersion specified, use the latest binding", + "if": { + "not": { + "required": [ + "bindingVersion" + ] + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/kafka/0.5.0/operation.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.5.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/kafka/0.5.0/operation.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.4.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/kafka/0.4.0/operation.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.3.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/kafka/0.3.0/operation.json" + } + } + ] + }, + "anypointmq": {}, + "nats": { + "properties": { + "bindingVersion": { + "enum": [ + "0.1.0" + ] + } + }, + "allOf": [ + { + "description": "If no bindingVersion specified, use the latest binding", + "if": { + "not": { + "required": [ + "bindingVersion" + ] + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/nats/0.1.0/operation.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.1.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/nats/0.1.0/operation.json" + } + } + ] + }, + "jms": {}, + "sns": { + "properties": { + "bindingVersion": { + "enum": [ + "0.1.0" + ] + } + }, + "allOf": [ + { + "description": "If no bindingVersion specified, use the latest binding", + "if": { + "not": { + "required": [ + "bindingVersion" + ] + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/sns/0.1.0/operation.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.1.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/sns/0.1.0/operation.json" + } + } + ] + }, + "sqs": { + "properties": { + "bindingVersion": { + "enum": [ + "0.2.0" + ] + } + }, + "allOf": [ + { + "description": "If no bindingVersion specified, use the latest binding", + "if": { + "not": { + "required": [ + "bindingVersion" + ] + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/sqs/0.2.0/operation.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.2.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/sqs/0.2.0/operation.json" + } + } + ] + }, + "stomp": {}, + "redis": {}, + "ibmmq": {}, + "solace": { + "properties": { + "bindingVersion": { + "enum": [ + "0.4.0", + "0.3.0", + "0.2.0" + ] + } + }, + "allOf": [ + { + "description": "If no bindingVersion specified, use the latest binding", + "if": { + "not": { + "required": [ + "bindingVersion" + ] + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/solace/0.4.0/operation.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.4.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/solace/0.4.0/operation.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.3.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/solace/0.3.0/operation.json" + } + }, + { + "if": { + "required": [ + "bindingVersion" + ], + "properties": { + "bindingVersion": { + "const": "0.2.0" + } + } + }, + "then": { + "$ref": "http://asyncapi.com/bindings/solace/0.2.0/operation.json" + } + } + ] + }, + "googlepubsub": {} + } + }, + "http://asyncapi.com/bindings/http/0.3.0/operation.json": { + "$id": "http://asyncapi.com/bindings/http/0.3.0/operation.json", + "title": "HTTP operation bindings object", + "description": "This object contains information about the operation representation in HTTP.", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "method": { + "type": "string", + "enum": [ + "GET", + "PUT", + "POST", + "PATCH", + "DELETE", + "HEAD", + "OPTIONS", + "CONNECT", + "TRACE" + ], + "description": "When 'type' is 'request', this is the HTTP method, otherwise it MUST be ignored. Its value MUST be one of 'GET', 'POST', 'PUT', 'PATCH', 'DELETE', 'HEAD', 'OPTIONS', 'CONNECT', and 'TRACE'." + }, + "query": { + "$ref": "http://asyncapi.com/definitions/3.0.0/schema.json", + "description": "A Schema object containing the definitions for each query parameter. This schema MUST be of type 'object' and have a properties key." + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.3.0" + ], + "description": "The version of this binding. If omitted, 'latest' MUST be assumed." + } + }, + "examples": [ + { + "query": { + "type": "object", + "required": [ + "companyId" + ], + "properties": { + "companyId": { + "type": "number", + "minimum": 1, + "description": "The Id of the company." + } + }, + "additionalProperties": false + }, + "bindingVersion": "0.3.0" + }, + { + "method": "GET", + "query": { + "type": "object", + "required": [ + "companyId" + ], + "properties": { + "companyId": { + "type": "number", + "minimum": 1, + "description": "The Id of the company." + } + }, + "additionalProperties": false + }, + "bindingVersion": "0.3.0" + } + ] + }, + "http://asyncapi.com/bindings/http/0.2.0/operation.json": { + "$id": "http://asyncapi.com/bindings/http/0.2.0/operation.json", + "title": "HTTP operation bindings object", + "description": "This object contains information about the operation representation in HTTP.", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "method": { + "type": "string", + "enum": [ + "GET", + "PUT", + "POST", + "PATCH", + "DELETE", + "HEAD", + "OPTIONS", + "CONNECT", + "TRACE" + ], + "description": "When 'type' is 'request', this is the HTTP method, otherwise it MUST be ignored. Its value MUST be one of 'GET', 'POST', 'PUT', 'PATCH', 'DELETE', 'HEAD', 'OPTIONS', 'CONNECT', and 'TRACE'." + }, + "query": { + "$ref": "http://asyncapi.com/definitions/3.0.0/schema.json", + "description": "A Schema object containing the definitions for each query parameter. This schema MUST be of type 'object' and have a properties key." + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.2.0" + ], + "description": "The version of this binding. If omitted, 'latest' MUST be assumed." + } + }, + "examples": [ + { + "query": { + "type": "object", + "required": [ + "companyId" + ], + "properties": { + "companyId": { + "type": "number", + "minimum": 1, + "description": "The Id of the company." + } + }, + "additionalProperties": false + }, + "bindingVersion": "0.2.0" + }, + { + "method": "GET", + "query": { + "type": "object", + "required": [ + "companyId" + ], + "properties": { + "companyId": { + "type": "number", + "minimum": 1, + "description": "The Id of the company." + } + }, + "additionalProperties": false + }, + "bindingVersion": "0.2.0" + } + ] + }, + "http://asyncapi.com/bindings/amqp/0.3.0/operation.json": { + "$id": "http://asyncapi.com/bindings/amqp/0.3.0/operation.json", + "title": "AMQP operation bindings object", + "description": "This object contains information about the operation representation in AMQP.", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "expiration": { + "type": "integer", + "minimum": 0, + "description": "TTL (Time-To-Live) for the message. It MUST be greater than or equal to zero." + }, + "userId": { + "type": "string", + "description": "Identifies the user who has sent the message." + }, + "cc": { + "type": "array", + "items": { + "type": "string" + }, + "description": "The routing keys the message should be routed to at the time of publishing." + }, + "priority": { + "type": "integer", + "description": "A priority for the message." + }, + "deliveryMode": { + "type": "integer", + "enum": [ + 1, + 2 + ], + "description": "Delivery mode of the message. Its value MUST be either 1 (transient) or 2 (persistent)." + }, + "mandatory": { + "type": "boolean", + "description": "Whether the message is mandatory or not." + }, + "bcc": { + "type": "array", + "items": { + "type": "string" + }, + "description": "Like cc but consumers will not receive this information." + }, + "timestamp": { + "type": "boolean", + "description": "Whether the message should include a timestamp or not." + }, + "ack": { + "type": "boolean", + "description": "Whether the consumer should ack the message or not." + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.3.0" + ], + "description": "The version of this binding. If omitted, \"latest\" MUST be assumed." + } + }, + "examples": [ + { + "expiration": 100000, + "userId": "guest", + "cc": [ + "user.logs" + ], + "priority": 10, + "deliveryMode": 2, + "mandatory": false, + "bcc": [ + "external.audit" + ], + "timestamp": true, + "ack": false, + "bindingVersion": "0.3.0" + } + ] + }, + "http://asyncapi.com/bindings/mqtt/0.2.0/operation.json": { + "$id": "http://asyncapi.com/bindings/mqtt/0.2.0/operation.json", + "title": "MQTT operation bindings object", + "description": "This object contains information about the operation representation in MQTT.", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "qos": { + "type": "integer", + "enum": [ + 0, + 1, + 2 + ], + "description": "Defines the Quality of Service (QoS) levels for the message flow between client and server. Its value MUST be either 0 (At most once delivery), 1 (At least once delivery), or 2 (Exactly once delivery)." + }, + "retain": { + "type": "boolean", + "description": "Whether the broker should retain the message or not." + }, + "messageExpiryInterval": { + "oneOf": [ + { + "type": "integer", + "minimum": 0, + "maximum": 4294967295 + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/schema.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + } + ], + "description": "Lifetime of the message in seconds" + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.2.0" + ], + "description": "The version of this binding. If omitted, 'latest' MUST be assumed." + } + }, + "examples": [ + { + "qos": 2, + "retain": true, + "messageExpiryInterval": 60, + "bindingVersion": "0.2.0" + } + ] + }, + "http://asyncapi.com/bindings/kafka/0.5.0/operation.json": { + "$id": "http://asyncapi.com/bindings/kafka/0.5.0/operation.json", + "title": "Operation Schema", + "description": "This object contains information about the operation representation in Kafka.", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "groupId": { + "$ref": "http://asyncapi.com/definitions/3.0.0/schema.json", + "description": "Id of the consumer group." + }, + "clientId": { + "$ref": "http://asyncapi.com/definitions/3.0.0/schema.json", + "description": "Id of the consumer inside a consumer group." + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.5.0" + ], + "description": "The version of this binding. If omitted, 'latest' MUST be assumed." + } + }, + "examples": [ + { + "groupId": { + "type": "string", + "enum": [ + "myGroupId" + ] + }, + "clientId": { + "type": "string", + "enum": [ + "myClientId" + ] + }, + "bindingVersion": "0.5.0" + } + ] + }, + "http://asyncapi.com/bindings/kafka/0.4.0/operation.json": { + "$id": "http://asyncapi.com/bindings/kafka/0.4.0/operation.json", + "title": "Operation Schema", + "description": "This object contains information about the operation representation in Kafka.", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "groupId": { + "$ref": "http://asyncapi.com/definitions/3.0.0/schema.json", + "description": "Id of the consumer group." + }, + "clientId": { + "$ref": "http://asyncapi.com/definitions/3.0.0/schema.json", + "description": "Id of the consumer inside a consumer group." + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.4.0" + ], + "description": "The version of this binding. If omitted, 'latest' MUST be assumed." + } + }, + "examples": [ + { + "groupId": { + "type": "string", + "enum": [ + "myGroupId" + ] + }, + "clientId": { + "type": "string", + "enum": [ + "myClientId" + ] + }, + "bindingVersion": "0.4.0" + } + ] + }, + "http://asyncapi.com/bindings/kafka/0.3.0/operation.json": { + "$id": "http://asyncapi.com/bindings/kafka/0.3.0/operation.json", + "title": "Operation Schema", + "description": "This object contains information about the operation representation in Kafka.", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "groupId": { + "$ref": "http://asyncapi.com/definitions/3.0.0/schema.json", + "description": "Id of the consumer group." + }, + "clientId": { + "$ref": "http://asyncapi.com/definitions/3.0.0/schema.json", + "description": "Id of the consumer inside a consumer group." + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.3.0" + ], + "description": "The version of this binding. If omitted, 'latest' MUST be assumed." + } + }, + "examples": [ + { + "groupId": { + "type": "string", + "enum": [ + "myGroupId" + ] + }, + "clientId": { + "type": "string", + "enum": [ + "myClientId" + ] + }, + "bindingVersion": "0.3.0" + } + ] + }, + "http://asyncapi.com/bindings/nats/0.1.0/operation.json": { + "$id": "http://asyncapi.com/bindings/nats/0.1.0/operation.json", + "title": "NATS operation bindings object", + "description": "This object contains information about the operation representation in NATS.", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "queue": { + "type": "string", + "description": "Defines the name of the queue to use. It MUST NOT exceed 255 characters.", + "maxLength": 255 + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.1.0" + ], + "description": "The version of this binding. If omitted, 'latest' MUST be assumed." + } + }, + "examples": [ + { + "queue": "MyCustomQueue", + "bindingVersion": "0.1.0" + } + ] + }, + "http://asyncapi.com/bindings/sns/0.1.0/operation.json": { + "$id": "http://asyncapi.com/bindings/sns/0.1.0/operation.json", + "title": "Operation Schema", + "description": "This object contains information about the operation representation in SNS.", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "topic": { + "$ref": "http://asyncapi.com/bindings/sns/0.1.0/operation.json#/definitions/identifier", + "description": "Often we can assume that the SNS Topic is the channel name-we provide this field in case the you need to supply the ARN, or the Topic name is not the channel name in the AsyncAPI document." + }, + "consumers": { + "type": "array", + "description": "The protocols that listen to this topic and their endpoints.", + "items": { + "$ref": "http://asyncapi.com/bindings/sns/0.1.0/operation.json#/definitions/consumer" + }, + "minItems": 1 + }, + "deliveryPolicy": { + "$ref": "http://asyncapi.com/bindings/sns/0.1.0/operation.json#/definitions/deliveryPolicy", + "description": "Policy for retries to HTTP. The field is the default for HTTP receivers of the SNS Topic which may be overridden by a specific consumer." + }, + "bindingVersion": { + "type": "string", + "description": "The version of this binding.", + "default": "latest" + } + }, + "required": [ + "consumers" + ], + "definitions": { + "identifier": { + "type": "object", + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "url": { + "type": "string", + "description": "The endpoint is a URL." + }, + "email": { + "type": "string", + "description": "The endpoint is an email address." + }, + "phone": { + "type": "string", + "description": "The endpoint is a phone number." + }, + "arn": { + "type": "string", + "description": "The target is an ARN. For example, for SQS, the identifier may be an ARN, which will be of the form: arn:aws:sqs:{region}:{account-id}:{queueName}" + }, + "name": { + "type": "string", + "description": "The endpoint is identified by a name, which corresponds to an identifying field called 'name' of a binding for that protocol on this publish Operation Object. For example, if the protocol is 'sqs' then the name refers to the name field sqs binding. We don't use $ref because we are referring, not including." + } + } + }, + "consumer": { + "type": "object", + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "protocol": { + "description": "The protocol that this endpoint receives messages by.", + "type": "string", + "enum": [ + "http", + "https", + "email", + "email-json", + "sms", + "sqs", + "application", + "lambda", + "firehose" + ] + }, + "endpoint": { + "description": "The endpoint messages are delivered to.", + "$ref": "http://asyncapi.com/bindings/sns/0.1.0/operation.json#/definitions/identifier" + }, + "filterPolicy": { + "type": "object", + "description": "Only receive a subset of messages from the channel, determined by this policy. Depending on the FilterPolicyScope, a map of either a message attribute or message body to an array of possible matches. The match may be a simple string for an exact match, but it may also be an object that represents a constraint and values for that constraint.", + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "additionalProperties": { + "oneOf": [ + { + "type": "array", + "items": { + "type": "string" + } + }, + { + "type": "string" + }, + { + "type": "object" + } + ] + } + }, + "filterPolicyScope": { + "type": "string", + "description": "Determines whether the FilterPolicy applies to MessageAttributes or MessageBody.", + "enum": [ + "MessageAttributes", + "MessageBody" + ], + "default": "MessageAttributes" + }, + "rawMessageDelivery": { + "type": "boolean", + "description": "If true AWS SNS attributes are removed from the body, and for SQS, SNS message attributes are copied to SQS message attributes. If false the SNS attributes are included in the body." + }, + "redrivePolicy": { + "$ref": "http://asyncapi.com/bindings/sns/0.1.0/operation.json#/definitions/redrivePolicy" + }, + "deliveryPolicy": { + "$ref": "http://asyncapi.com/bindings/sns/0.1.0/operation.json#/definitions/deliveryPolicy", + "description": "Policy for retries to HTTP. The parameter is for that SNS Subscription and overrides any policy on the SNS Topic." + }, + "displayName": { + "type": "string", + "description": "The display name to use with an SNS subscription" + } + }, + "required": [ + "protocol", + "endpoint", + "rawMessageDelivery" + ] + }, + "deliveryPolicy": { + "type": "object", + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "minDelayTarget": { + "type": "integer", + "description": "The minimum delay for a retry in seconds." + }, + "maxDelayTarget": { + "type": "integer", + "description": "The maximum delay for a retry in seconds." + }, + "numRetries": { + "type": "integer", + "description": "The total number of retries, including immediate, pre-backoff, backoff, and post-backoff retries." + }, + "numNoDelayRetries": { + "type": "integer", + "description": "The number of immediate retries (with no delay)." + }, + "numMinDelayRetries": { + "type": "integer", + "description": "The number of immediate retries (with delay)." + }, + "numMaxDelayRetries": { + "type": "integer", + "description": "The number of post-backoff phase retries, with the maximum delay between retries." + }, + "backoffFunction": { + "type": "string", + "description": "The algorithm for backoff between retries.", + "enum": [ + "arithmetic", + "exponential", + "geometric", + "linear" + ] + }, + "maxReceivesPerSecond": { + "type": "integer", + "description": "The maximum number of deliveries per second, per subscription." + } + } + }, + "redrivePolicy": { + "type": "object", + "description": "Prevent poison pill messages by moving un-processable messages to an SQS dead letter queue.", + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "deadLetterQueue": { + "$ref": "http://asyncapi.com/bindings/sns/0.1.0/operation.json#/definitions/identifier", + "description": "The SQS queue to use as a dead letter queue (DLQ)." + }, + "maxReceiveCount": { + "type": "integer", + "description": "The number of times a message is delivered to the source queue before being moved to the dead-letter queue.", + "default": 10 + } + }, + "required": [ + "deadLetterQueue" + ] + } + }, + "examples": [ + { + "topic": { + "name": "someTopic" + }, + "consumers": [ + { + "protocol": "sqs", + "endpoint": { + "name": "someQueue" + }, + "filterPolicy": { + "store": [ + "asyncapi_corp" + ], + "event": [ + { + "anything-but": "order_cancelled" + } + ], + "customer_interests": [ + "rugby", + "football", + "baseball" + ] + }, + "filterPolicyScope": "MessageAttributes", + "rawMessageDelivery": false, + "redrivePolicy": { + "deadLetterQueue": { + "arn": "arn:aws:SQS:eu-west-1:0000000:123456789" + }, + "maxReceiveCount": 25 + }, + "deliveryPolicy": { + "minDelayTarget": 10, + "maxDelayTarget": 100, + "numRetries": 5, + "numNoDelayRetries": 2, + "numMinDelayRetries": 3, + "numMaxDelayRetries": 5, + "backoffFunction": "linear", + "maxReceivesPerSecond": 2 + } + } + ] + } + ] + }, + "http://asyncapi.com/bindings/sqs/0.2.0/operation.json": { + "$id": "http://asyncapi.com/bindings/sqs/0.2.0/operation.json", + "title": "Operation Schema", + "description": "This object contains information about the operation representation in SQS.", + "type": "object", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "queues": { + "type": "array", + "description": "Queue objects that are either the endpoint for an SNS Operation Binding Object, or the deadLetterQueue of the SQS Operation Binding Object.", + "items": { + "$ref": "http://asyncapi.com/bindings/sqs/0.2.0/operation.json#/definitions/queue" + } + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.1.0", + "0.2.0" + ], + "description": "The version of this binding. If omitted, 'latest' MUST be assumed.", + "default": "latest" + } + }, + "required": [ + "queues" + ], + "definitions": { + "queue": { + "type": "object", + "description": "A definition of a queue.", + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "$ref": { + "type": "string", + "description": "Allows for an external definition of a queue. The referenced structure MUST be in the format of a Queue. If there are conflicts between the referenced definition and this Queue's definition, the behavior is undefined." + }, + "name": { + "type": "string", + "description": "The name of the queue. When an SNS Operation Binding Object references an SQS queue by name, the identifier should be the one in this field." + }, + "fifoQueue": { + "type": "boolean", + "description": "Is this a FIFO queue?", + "default": false + }, + "deduplicationScope": { + "type": "string", + "enum": [ + "queue", + "messageGroup" + ], + "description": "Specifies whether message deduplication occurs at the message group or queue level. Valid values are messageGroup and queue (default).", + "default": "queue" + }, + "fifoThroughputLimit": { + "type": "string", + "enum": [ + "perQueue", + "perMessageGroupId" + ], + "description": "Specifies whether the FIFO queue throughput quota applies to the entire queue or per message group. Valid values are perQueue (default) and perMessageGroupId.", + "default": "perQueue" + }, + "deliveryDelay": { + "type": "integer", + "description": "The number of seconds to delay before a message sent to the queue can be received. Used to create a delay queue.", + "minimum": 0, + "maximum": 900, + "default": 0 + }, + "visibilityTimeout": { + "type": "integer", + "description": "The length of time, in seconds, that a consumer locks a message - hiding it from reads - before it is unlocked and can be read again.", + "minimum": 0, + "maximum": 43200, + "default": 30 + }, + "receiveMessageWaitTime": { + "type": "integer", + "description": "Determines if the queue uses short polling or long polling. Set to zero the queue reads available messages and returns immediately. Set to a non-zero integer, long polling waits the specified number of seconds for messages to arrive before returning.", + "default": 0 + }, + "messageRetentionPeriod": { + "type": "integer", + "description": "How long to retain a message on the queue in seconds, unless deleted.", + "minimum": 60, + "maximum": 1209600, + "default": 345600 + }, + "redrivePolicy": { + "$ref": "http://asyncapi.com/bindings/sqs/0.2.0/operation.json#/definitions/redrivePolicy" + }, + "policy": { + "$ref": "http://asyncapi.com/bindings/sqs/0.2.0/operation.json#/definitions/policy" + }, + "tags": { + "type": "object", + "description": "Key-value pairs that represent AWS tags on the queue." + } + }, + "required": [ + "name" + ] + }, + "redrivePolicy": { + "type": "object", + "description": "Prevent poison pill messages by moving un-processable messages to an SQS dead letter queue.", + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "deadLetterQueue": { + "$ref": "http://asyncapi.com/bindings/sqs/0.2.0/operation.json#/definitions/identifier" + }, + "maxReceiveCount": { + "type": "integer", + "description": "The number of times a message is delivered to the source queue before being moved to the dead-letter queue.", + "default": 10 + } + }, + "required": [ + "deadLetterQueue" + ] + }, + "identifier": { + "type": "object", + "description": "The SQS queue to use as a dead letter queue (DLQ).", + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "arn": { + "type": "string", + "description": "The target is an ARN. For example, for SQS, the identifier may be an ARN, which will be of the form: arn:aws:sqs:{region}:{account-id}:{queueName}" + }, + "name": { + "type": "string", + "description": "The endpoint is identified by a name, which corresponds to an identifying field called 'name' of a binding for that protocol on this publish Operation Object. For example, if the protocol is 'sqs' then the name refers to the name field sqs binding." + } + } + }, + "policy": { + "type": "object", + "description": "The security policy for the SQS Queue", + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "statements": { + "type": "array", + "description": "An array of statement objects, each of which controls a permission for this queue.", + "items": { + "$ref": "http://asyncapi.com/bindings/sqs/0.2.0/operation.json#/definitions/statement" + } + } + }, + "required": [ + "statements" + ] + }, + "statement": { + "type": "object", + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "effect": { + "type": "string", + "enum": [ + "Allow", + "Deny" + ] + }, + "principal": { + "description": "The AWS account or resource ARN that this statement applies to.", + "oneOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + } + ] + }, + "action": { + "description": "The SQS permission being allowed or denied e.g. sqs:ReceiveMessage", + "oneOf": [ + { + "type": "string" + }, + { + "type": "array", + "items": { + "type": "string" + } + } + ] + } + }, + "required": [ + "effect", + "principal", + "action" + ] + } + }, + "examples": [ + { + "queues": [ + { + "name": "myQueue", + "fifoQueue": true, + "deduplicationScope": "messageGroup", + "fifoThroughputLimit": "perMessageGroupId", + "deliveryDelay": 10, + "redrivePolicy": { + "deadLetterQueue": { + "name": "myQueue_error" + }, + "maxReceiveCount": 15 + }, + "policy": { + "statements": [ + { + "effect": "Deny", + "principal": "arn:aws:iam::123456789012:user/dec.kolakowski", + "action": [ + "sqs:SendMessage", + "sqs:ReceiveMessage" + ] + } + ] + } + }, + { + "name": "myQueue_error", + "deliveryDelay": 10 + } + ] + } + ] + }, + "http://asyncapi.com/bindings/solace/0.4.0/operation.json": { + "$id": "http://asyncapi.com/bindings/solace/0.4.0/operation.json", + "title": "Solace operation bindings object", + "description": "This object contains information about the operation representation in Solace.", + "type": "object", + "additionalProperties": false, + "properties": { + "bindingVersion": { + "type": "string", + "enum": [ + "0.4.0" + ], + "description": "The version of this binding. If omitted, \"latest\" MUST be assumed." + }, + "destinations": { + "description": "The list of Solace destinations referenced in the operation.", + "type": "array", + "items": { + "type": "object", + "properties": { + "deliveryMode": { + "type": "string", + "enum": [ + "direct", + "persistent" + ] + } + }, + "oneOf": [ + { + "properties": { + "destinationType": { + "type": "string", + "const": "queue", + "description": "If the type is queue, then the subscriber can bind to the queue. The queue subscribes to the given topicSubscriptions. If no topicSubscriptions are provied, the queue will subscribe to the topic as represented by the channel name." + }, + "queue": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "The name of the queue" + }, + "topicSubscriptions": { + "type": "array", + "description": "The list of topics that the queue subscribes to.", + "items": { + "type": "string" + } + }, + "accessType": { + "type": "string", + "enum": [ + "exclusive", + "nonexclusive" + ] + }, + "maxTtl": { + "type": "string", + "description": "The maximum TTL to apply to messages to be spooled." + }, + "maxMsgSpoolUsage": { + "type": "string", + "description": "The maximum amount of message spool that the given queue may use" + } + } + } + } + }, + { + "properties": { + "destinationType": { + "type": "string", + "const": "topic", + "description": "If the type is topic, then the subscriber subscribes to the given topicSubscriptions. If no topicSubscriptions are provided, the client will subscribe to the topic as represented by the channel name." + }, + "topicSubscriptions": { + "type": "array", + "description": "The list of topics that the client subscribes to.", + "items": { + "type": "string" + } + } + } + } + ] + } + }, + "timeToLive": { + "type": "integer", + "description": "Interval in milliseconds or a Schema Object containing the definition of the lifetime of the message." + }, + "priority": { + "type": "integer", + "minimum": 0, + "maximum": 255, + "description": "The valid priority value range is 0-255 with 0 as the lowest priority and 255 as the highest or a Schema Object containing the definition of the priority." + }, + "dmqEligible": { + "type": "boolean", + "description": "Set the message to be eligible to be moved to a Dead Message Queue. The default value is false." + } + }, + "examples": [ + { + "bindingVersion": "0.4.0", + "destinations": [ + { + "destinationType": "queue", + "queue": { + "name": "sampleQueue", + "topicSubscriptions": [ + "samples/*" + ], + "accessType": "nonexclusive" + } + }, + { + "destinationType": "topic", + "topicSubscriptions": [ + "samples/*" + ] + } + ] + } + ] + }, + "http://asyncapi.com/bindings/solace/0.3.0/operation.json": { + "$id": "http://asyncapi.com/bindings/solace/0.3.0/operation.json", + "title": "Solace operation bindings object", + "description": "This object contains information about the operation representation in Solace.", + "type": "object", + "additionalProperties": false, + "properties": { + "destinations": { + "description": "The list of Solace destinations referenced in the operation.", + "type": "array", + "items": { + "type": "object", + "properties": { + "deliveryMode": { + "type": "string", + "enum": [ + "direct", + "persistent" + ] + } + }, + "oneOf": [ + { + "properties": { + "destinationType": { + "type": "string", + "const": "queue", + "description": "If the type is queue, then the subscriber can bind to the queue. The queue subscribes to the given topicSubscriptions. If no topicSubscriptions are provied, the queue will subscribe to the topic as represented by the channel name." + }, + "queue": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "The name of the queue" + }, + "topicSubscriptions": { + "type": "array", + "description": "The list of topics that the queue subscribes to.", + "items": { + "type": "string" + } + }, + "accessType": { + "type": "string", + "enum": [ + "exclusive", + "nonexclusive" + ] + }, + "maxTtl": { + "type": "string", + "description": "The maximum TTL to apply to messages to be spooled." + }, + "maxMsgSpoolUsage": { + "type": "string", + "description": "The maximum amount of message spool that the given queue may use" + } + } + } + } + }, + { + "properties": { + "destinationType": { + "type": "string", + "const": "topic", + "description": "If the type is topic, then the subscriber subscribes to the given topicSubscriptions. If no topicSubscriptions are provided, the client will subscribe to the topic as represented by the channel name." + }, + "topicSubscriptions": { + "type": "array", + "description": "The list of topics that the client subscribes to.", + "items": { + "type": "string" + } + } + } + } + ] + } + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.3.0" + ], + "description": "The version of this binding. If omitted, \"latest\" MUST be assumed." + } + }, + "examples": [ + { + "bindingVersion": "0.3.0", + "destinations": [ + { + "destinationType": "queue", + "queue": { + "name": "sampleQueue", + "topicSubscriptions": [ + "samples/*" + ], + "accessType": "nonexclusive" + } + }, + { + "destinationType": "topic", + "topicSubscriptions": [ + "samples/*" + ] + } + ] + } + ] + }, + "http://asyncapi.com/bindings/solace/0.2.0/operation.json": { + "$id": "http://asyncapi.com/bindings/solace/0.2.0/operation.json", + "title": "Solace operation bindings object", + "description": "This object contains information about the operation representation in Solace.", + "type": "object", + "additionalProperties": false, + "properties": { + "destinations": { + "description": "The list of Solace destinations referenced in the operation.", + "type": "array", + "items": { + "type": "object", + "properties": { + "deliveryMode": { + "type": "string", + "enum": [ + "direct", + "persistent" + ] + } + }, + "oneOf": [ + { + "properties": { + "destinationType": { + "type": "string", + "const": "queue", + "description": "If the type is queue, then the subscriber can bind to the queue. The queue subscribes to the given topicSubscriptions. If no topicSubscriptions are provied, the queue will subscribe to the topic as represented by the channel name." + }, + "queue": { + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "The name of the queue" + }, + "topicSubscriptions": { + "type": "array", + "description": "The list of topics that the queue subscribes to.", + "items": { + "type": "string" + } + }, + "accessType": { + "type": "string", + "enum": [ + "exclusive", + "nonexclusive" + ] + } + } + } + } + }, + { + "properties": { + "destinationType": { + "type": "string", + "const": "topic", + "description": "If the type is topic, then the subscriber subscribes to the given topicSubscriptions. If no topicSubscriptions are provided, the client will subscribe to the topic as represented by the channel name." + }, + "topicSubscriptions": { + "type": "array", + "description": "The list of topics that the client subscribes to.", + "items": { + "type": "string" + } + } + } + } + ] + } + }, + "bindingVersion": { + "type": "string", + "enum": [ + "0.2.0" + ], + "description": "The version of this binding. If omitted, \"latest\" MUST be assumed." + } + }, + "examples": [ + { + "bindingVersion": "0.2.0", + "destinations": [ + { + "destinationType": "queue", + "queue": { + "name": "sampleQueue", + "topicSubscriptions": [ + "samples/*" + ], + "accessType": "nonexclusive" + } + }, + { + "destinationType": "topic", + "topicSubscriptions": [ + "samples/*" + ] + } + ] + } + ] + }, + "http://asyncapi.com/definitions/3.0.0/components.json": { + "$id": "http://asyncapi.com/definitions/3.0.0/components.json", + "type": "object", + "description": "An object to hold a set of reusable objects for different aspects of the AsyncAPI specification. All objects defined within the components object will have no effect on the API unless they are explicitly referenced from properties outside the components object.", + "additionalProperties": false, + "patternProperties": { + "^x-[\\w\\d\\.\\x2d_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/specificationExtension.json" + } + }, + "properties": { + "schemas": { + "type": "object", + "description": "An object to hold reusable Schema Object. If this is a Schema Object, then the schemaFormat will be assumed to be 'application/vnd.aai.asyncapi+json;version=asyncapi' where the version is equal to the AsyncAPI Version String.", + "patternProperties": { + "^[\\w\\d\\.\\-_]+$": { + "$ref": "http://asyncapi.com/definitions/3.0.0/anySchema.json" + } + } + }, + "servers": { + "type": "object", + "description": "An object to hold reusable Server Objects.", + "patternProperties": { + "^[\\w\\d\\.\\-_]+$": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/server.json" + } + ] + } + } + }, + "channels": { + "type": "object", + "description": "An object to hold reusable Channel Objects.", + "patternProperties": { + "^[\\w\\d\\.\\-_]+$": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/channel.json" + } + ] + } + } + }, + "serverVariables": { + "type": "object", + "description": "An object to hold reusable Server Variable Objects.", + "patternProperties": { + "^[\\w\\d\\.\\-_]+$": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/serverVariable.json" + } + ] + } + } + }, + "operations": { + "type": "object", + "patternProperties": { + "^[\\w\\d\\.\\-_]+$": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/operation.json" + } + ] + } + } + }, + "messages": { + "type": "object", + "description": "An object to hold reusable Message Objects.", + "patternProperties": { + "^[\\w\\d\\.\\-_]+$": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/messageObject.json" + } + ] + } + } + }, + "securitySchemes": { + "type": "object", + "description": "An object to hold reusable Security Scheme Objects.", + "patternProperties": { + "^[\\w\\d\\.\\-_]+$": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/SecurityScheme.json" + } + ] + } + } + }, + "parameters": { + "type": "object", + "description": "An object to hold reusable Parameter Objects.", + "patternProperties": { + "^[\\w\\d\\.\\-_]+$": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/parameter.json" + } + ] + } + } + }, + "correlationIds": { + "type": "object", + "description": "An object to hold reusable Correlation ID Objects.", + "patternProperties": { + "^[\\w\\d\\.\\-_]+$": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/correlationId.json" + } + ] + } + } + }, + "operationTraits": { + "type": "object", + "description": "An object to hold reusable Operation Trait Objects.", + "patternProperties": { + "^[\\w\\d\\.\\-_]+$": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/operationTrait.json" + } + ] + } + } + }, + "messageTraits": { + "type": "object", + "description": "An object to hold reusable Message Trait Objects.", + "patternProperties": { + "^[\\w\\d\\.\\-_]+$": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/messageTrait.json" + } + ] + } + } + }, + "replies": { + "type": "object", + "description": "An object to hold reusable Operation Reply Objects.", + "patternProperties": { + "^[\\w\\d\\.\\-_]+$": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/operationReply.json" + } + ] + } + } + }, + "replyAddresses": { + "type": "object", + "description": "An object to hold reusable Operation Reply Address Objects.", + "patternProperties": { + "^[\\w\\d\\.\\-_]+$": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/operationReplyAddress.json" + } + ] + } + } + }, + "serverBindings": { + "type": "object", + "description": "An object to hold reusable Server Bindings Objects.", + "patternProperties": { + "^[\\w\\d\\.\\-_]+$": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/serverBindingsObject.json" + } + ] + } + } + }, + "channelBindings": { + "type": "object", + "description": "An object to hold reusable Channel Bindings Objects.", + "patternProperties": { + "^[\\w\\d\\.\\-_]+$": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/channelBindingsObject.json" + } + ] + } + } + }, + "operationBindings": { + "type": "object", + "description": "An object to hold reusable Operation Bindings Objects.", + "patternProperties": { + "^[\\w\\d\\.\\-_]+$": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/operationBindingsObject.json" + } + ] + } + } + }, + "messageBindings": { + "type": "object", + "description": "An object to hold reusable Message Bindings Objects.", + "patternProperties": { + "^[\\w\\d\\.\\-_]+$": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/messageBindingsObject.json" + } + ] + } + } + }, + "tags": { + "type": "object", + "description": "An object to hold reusable Tag Objects.", + "patternProperties": { + "^[\\w\\d\\.\\-_]+$": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/tag.json" + } + ] + } + } + }, + "externalDocs": { + "type": "object", + "description": "An object to hold reusable External Documentation Objects.", + "patternProperties": { + "^[\\w\\d\\.\\-_]+$": { + "oneOf": [ + { + "$ref": "http://asyncapi.com/definitions/3.0.0/Reference.json" + }, + { + "$ref": "http://asyncapi.com/definitions/3.0.0/externalDocs.json" + } + ] + } + } + } + }, + "examples": [ + { + "components": { + "schemas": { + "Category": { + "type": "object", + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "name": { + "type": "string" + } + } + }, + "Tag": { + "type": "object", + "properties": { + "id": { + "type": "integer", + "format": "int64" + }, + "name": { + "type": "string" + } + } + }, + "AvroExample": { + "schemaFormat": "application/vnd.apache.avro+json;version=1.9.0", + "schema": { + "$ref": "path/to/user-create.avsc#/UserCreate" + } + } + }, + "servers": { + "development": { + "host": "{stage}.in.mycompany.com:{port}", + "description": "RabbitMQ broker", + "protocol": "amqp", + "protocolVersion": "0-9-1", + "variables": { + "stage": { + "$ref": "#/components/serverVariables/stage" + }, + "port": { + "$ref": "#/components/serverVariables/port" + } + } + } + }, + "serverVariables": { + "stage": { + "default": "demo", + "description": "This value is assigned by the service provider, in this example `mycompany.com`" + }, + "port": { + "enum": [ + "5671", + "5672" + ], + "default": "5672" + } + }, + "channels": { + "user/signedup": { + "subscribe": { + "message": { + "$ref": "#/components/messages/userSignUp" + } + } + } + }, + "messages": { + "userSignUp": { + "summary": "Action to sign a user up.", + "description": "Multiline description of what this action does.\nHere you have another line.\n", + "tags": [ + { + "name": "user" + }, + { + "name": "signup" + } + ], + "headers": { + "type": "object", + "properties": { + "applicationInstanceId": { + "description": "Unique identifier for a given instance of the publishing application", + "type": "string" + } + } + }, + "payload": { + "type": "object", + "properties": { + "user": { + "$ref": "#/components/schemas/userCreate" + }, + "signup": { + "$ref": "#/components/schemas/signup" + } + } + } + } + }, + "parameters": { + "userId": { + "description": "Id of the user." + } + }, + "correlationIds": { + "default": { + "description": "Default Correlation ID", + "location": "$message.header#/correlationId" + } + }, + "messageTraits": { + "commonHeaders": { + "headers": { + "type": "object", + "properties": { + "my-app-header": { + "type": "integer", + "minimum": 0, + "maximum": 100 + } + } + } + } + } + } + } + ] + } + }, + "description": "!!Auto generated!! \n Do not manually edit. " +} \ No newline at end of file diff --git a/pygeoapi/resources/schemas/config/pygeoapi-config-0.x.yml b/pygeoapi/resources/schemas/config/pygeoapi-config-0.x.yml index f48f615ff..19c18f5dc 100644 --- a/pygeoapi/resources/schemas/config/pygeoapi-config-0.x.yml +++ b/pygeoapi/resources/schemas/config/pygeoapi-config-0.x.yml @@ -167,6 +167,33 @@ properties: - mimetype - encoding - map + pubsub: + type: object + description: Pub/Sub settings for event driven notifications + properties: + name: + type: string + description: name of pubsub client + broker: + type: object + description: broker definition + properties: + url: + type: string + format: uri + description: URL of broker + channel: + type: string + description: channel to subscribe to + hidden: + type: boolean + default: false + description: whether to hide broker link on API responses + required: + - url + required: + - name + - broker logging: type: object description: logging definitions @@ -415,7 +442,7 @@ properties: - href extents: type: object - description: spatial and temporal extents + description: spatial and temporal extents. Note that adding custom named dimensions is also possible properties: spatial: type: object @@ -450,6 +477,34 @@ properties: type: string description: temporal reference system of features default: 'http://www.opengis.net/def/uom/ISO-8601/0/Gregorian' + resolution: + type: string + description: temporal resolution + default: + type: string + description: default time value + patternProperties: + "^(?!spatial$|temporal$).*": + type: object + description: additional custom dimensions + properties: + range: + type: array + description: The overall range of the dimension + minItems: 2 + url: + type: string + format: uri + description: A URI to a description of the dimension + units: + type: string + description: Units of the dimension + values: + type: array + description: enumerated list of values + required: + - url + - range required: - spatial limits: @@ -567,7 +622,6 @@ properties: http://www.opengis.net/def/crs/OGC/1.3/CRS84 storage_crs_coordinate_epoch: type: number - format: uri description: |- point in time at which coordinates in the spatial feature collection are referenced to the dynamic coordinate reference system in `storageCrs`, that may be used to retrieve features from a diff --git a/pygeoapi/starlette_app.py b/pygeoapi/starlette_app.py index dfe059f8a..ffb5b656f 100644 --- a/pygeoapi/starlette_app.py +++ b/pygeoapi/starlette_app.py @@ -5,7 +5,7 @@ # Abdulazeez Abdulazeez Adeshina # # Copyright (c) 2025 Francesco Bartoli -# Copyright (c) 2025 Tom Kralidis +# Copyright (c) 2026 Tom Kralidis # Copyright (c) 2022 Abdulazeez Abdulazeez Adeshina # # Permission is hereby granted, free of charge, to any person @@ -59,6 +59,7 @@ import pygeoapi.api.processes as processes_api import pygeoapi.api.stac as stac_api import pygeoapi.api.tiles as tiles_api +from pygeoapi.asyncapi import load_asyncapi_document from pygeoapi.openapi import load_openapi_document from pygeoapi.config import get_config from pygeoapi.util import get_api_rules @@ -69,6 +70,7 @@ raise RuntimeError('PYGEOAPI_OPENAPI environment variable not set') OPENAPI = load_openapi_document() +ASYNCAPI = load_asyncapi_document() if CONFIG['server'].get('admin'): import pygeoapi.api.admin as admin_api @@ -86,7 +88,7 @@ API_RULES = get_api_rules(CONFIG) -api_ = API(CONFIG, OPENAPI) +api_ = API(CONFIG, OPENAPI, ASYNCAPI) def call_api_threadsafe( @@ -167,6 +169,17 @@ async def openapi(request: Request) -> Response: return await execute_from_starlette(core_api.openapi_, request) +async def asyncapi(request: Request) -> Response: + """ + AsyncAPI endpoint + + :param request: Starlette Request instance + + :returns: Starlette HTTP Response + """ + return await execute_from_starlette(core_api.asyncapi_, request) + + async def conformance(request: Request) -> Response: """ OGC API conformance endpoint @@ -699,6 +712,7 @@ async def __call__(self, scope: Scope, api_routes = [ Route('/', landing_page), Route('/openapi', openapi), + Route('/asyncapi', asyncapi), Route('/conformance', conformance), Route('/TileMatrixSets/{tileMatrixSetId}', get_tilematrix_set), Route('/TileMatrixSets', get_tilematrix_sets), diff --git a/pygeoapi/static/workSpace/viewer_demo.js b/pygeoapi/static/workSpace/viewer_demo.js index 0106400b6..1bd07b816 100644 --- a/pygeoapi/static/workSpace/viewer_demo.js +++ b/pygeoapi/static/workSpace/viewer_demo.js @@ -840,6 +840,7 @@ document.getElementById("api-get-collections").addEventListener("click", async ( const data = await api.getIndoorCollections(); renderCollections(data.filtered); + console.log(data.filtered); apiLog.textContent = JSON.stringify(data.raw, null, 2); } catch (err) { apiLog.textContent = "Error: " + err.message; diff --git a/pygeoapi/templates/asyncapi.html b/pygeoapi/templates/asyncapi.html new file mode 100644 index 000000000..bc1915150 --- /dev/null +++ b/pygeoapi/templates/asyncapi.html @@ -0,0 +1,24 @@ + + + + AsyncAPI UI - {{ config['metadata']['identification']['title'] }} + + + +
+ + + + diff --git a/pygeoapi/templates/collections/collection.html b/pygeoapi/templates/collections/collection.html index d56fcb210..bc123ac65 100644 --- a/pygeoapi/templates/collections/collection.html +++ b/pygeoapi/templates/collections/collection.html @@ -166,13 +166,6 @@

{% trans %}Storage CRS{% endtrans %}

} )); - {# if this collection has a map representation, add it to the map #} - {% for link in data['links'] %} - {% if link['rel'] == 'http://www.opengis.net/def/rel/ogc/1.0/map' and link['href'] %} - L.imageOverlay.ogcapi("{{ data['base_url'] }}", {collection: "{{ data['id'] }}", "opacity": .7, "transparent": true}).addTo(map); - {% endif %} - {% endfor %} - var bbox_layer = L.polygon([ ['{{ data['extent']['spatial']['bbox'][0][1] }}', '{{ data['extent']['spatial']['bbox'][0][0] }}'], ['{{ data['extent']['spatial']['bbox'][0][3] }}', '{{ data['extent']['spatial']['bbox'][0][0] }}'], @@ -180,6 +173,16 @@

{% trans %}Storage CRS{% endtrans %}

['{{ data['extent']['spatial']['bbox'][0][1] }}', '{{ data['extent']['spatial']['bbox'][0][2] }}'] ]); + {# if this collection has a map representation, add it to the map #} + {% for link in data['links'] %} + {% if link['rel'] == 'http://www.opengis.net/def/rel/ogc/1.0/map' and link['href'] %} + L.imageOverlay.ogcapi("{{ data['base_url'] }}", {collection: "{{ data['id'] }}", "opacity": .7, "transparent": true}).addTo(map); + bbox_layer.setStyle({ + fillOpacity: 0 + }); + {% endif %} + {% endfor %} + map.addLayer(bbox_layer); map.fitBounds(bbox_layer.getBounds(), {maxZoom: 10}); diff --git a/pygeoapi/templates/collections/items/index.html b/pygeoapi/templates/collections/items/index.html index e2cabb827..3d325c445 100644 --- a/pygeoapi/templates/collections/items/index.html +++ b/pygeoapi/templates/collections/items/index.html @@ -33,28 +33,30 @@

{% for l in data['links'] if l.rel == 'collection' %} {{ l['title'] }} {% en {% if data['itemtype'] == 'record' %}
-
-
- - -
-
- - -
-
- - +
+
+
+ + +
+
+ + +
+
+ + +
-
-
-
- - - - +
+
+ + + + +
-
+
{% endif %} @@ -190,6 +192,7 @@

{% for l in data['links'] if l.rel == 'collection' %} {{ l['title'] }} {% en document.getElementById("q").addEventListener("keydown", function(event) { if (event.key === "Enter") { + event.preventDefault(); submitForm(); } }); @@ -205,12 +208,12 @@

{% for l in data['links'] if l.rel == 'collection' %} {{ l['title'] }} {% en if (q) { query_string.push('q=' + encodeURIComponent(q)); } - if (datetime_begin) { + if (datetime_begin !== "") { datetime.push(datetime_begin + 'T00:00:00Z'); } else { datetime.push('..'); } - if (datetime_end) { + if (datetime_end !== "") { datetime.push(datetime_end + 'T23:59:59Z'); } else { datetime.push('..'); @@ -286,6 +289,9 @@

{% for l in data['links'] if l.rel == 'collection' %} {{ l['title'] }} {% en setRectangle(map.getBounds().pad(-0.95)); } } + + var form = document.getElementById("searchForm"); + form.addEventListener("submit", submitForm); {% endif %} {% endif %} diff --git a/pygeoapi/util.py b/pygeoapi/util.py index a4b3ad0c5..c91a4c3e0 100644 --- a/pygeoapi/util.py +++ b/pygeoapi/util.py @@ -2,7 +2,7 @@ # # Authors: Tom Kralidis # -# Copyright (c) 2025 Tom Kralidis +# Copyright (c) 2026 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -65,8 +65,7 @@ from pygeoapi import l10n from pygeoapi.models import config as config_models from pygeoapi.plugin import load_plugin, PLUGINS -from pygeoapi.provider.base import ProviderTypeError - +from pygeoapi.provider import get_provider_default LOGGER = logging.getLogger(__name__) @@ -249,7 +248,7 @@ def str2bool(value: Union[bool, str]) -> bool: def to_json(dict_: dict, pretty: bool = False) -> str: """ - Serialize dict to json + Serialize dict to JSON :param dict_: `dict` of JSON representation :param pretty: `bool` of whether to prettify JSON (default is `False`) @@ -527,61 +526,6 @@ def filter_dict_by_key_value(dict_: dict, key: str, value: str) -> dict: return {k: v for (k, v) in dict_.items() if v[key] == value} -def filter_providers_by_type(providers: list, type: str) -> dict: - """ - helper function to filter a list of providers by type - - :param providers: ``list`` - :param type: str - - :returns: filtered ``dict`` provider - """ - - providers_ = {provider['type']: provider for provider in providers} - return providers_.get(type) - - -def get_provider_by_type(providers: list, provider_type: str) -> dict: - """ - helper function to load a provider by a provider type - - :param providers: ``list`` of providers - :param provider_type: type of provider (e.g. feature) - - :returns: provider based on type - """ - - LOGGER.debug(f'Searching for provider type {provider_type}') - try: - p = (next(d for i, d in enumerate(providers) - if d['type'] == provider_type)) - except (RuntimeError, StopIteration): - raise ProviderTypeError('Invalid provider type requested') - - return p - - -def get_provider_default(providers: list) -> dict: - """ - helper function to get a resource's default provider - - :param providers: ``list`` of providers - - :returns: filtered ``dict`` - """ - - try: - default = (next(d for i, d in enumerate(providers) if 'default' in d - and d['default'])) - LOGGER.debug('found default provider type') - except StopIteration: - LOGGER.debug('no default provider type. Returning first provider') - default = providers[0] - - LOGGER.debug(f"Default provider: {default['type']}") - return default - - class ProcessExecutionMode(Enum): sync_execute = 'sync-execute' async_execute = 'async-execute' @@ -792,3 +736,15 @@ def get_dataset_formatters(dataset: dict) -> dict: dataset_formatters[df2.name] = df2 return dataset_formatters + + +def remove_url_auth(url: str) -> str: + """ + Provide a RFC1738 URL without embedded authentication + :param url: RFC1738 URL + :returns: RFC1738 URL without authentication + """ + + u = urlparse(url) + auth = f'{u.username}:{u.password}@' + return url.replace(auth, '') diff --git a/requirements-pubsub.txt b/requirements-pubsub.txt new file mode 100644 index 000000000..1e32c725e --- /dev/null +++ b/requirements-pubsub.txt @@ -0,0 +1,2 @@ +kafka-python +paho-mqtt diff --git a/setup.py b/setup.py index 161941e21..efd7dd99c 100644 --- a/setup.py +++ b/setup.py @@ -2,7 +2,7 @@ # # Authors: Tom Kralidis # -# Copyright (c) 2025 Tom Kralidis +# Copyright (c) 2026 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -84,26 +84,7 @@ def finalize_options(self): def run(self): import subprocess - errno = subprocess.call(['pytest', 'tests/test_api.py']) - raise SystemExit(errno) - - -class PyCoverage(Command): - user_options = [] - - def initialize_options(self): - pass - - def finalize_options(self): - pass - - def run(self): - import subprocess - - errno = subprocess.call(['coverage', 'run', '--source=pygeoapi', - '-m', 'unittest', - 'pygeoapi.tests.run_tests']) - errno = subprocess.call(['coverage', 'report', '-m']) + errno = subprocess.call(['pytest', 'tests/api/test_api.py']) raise SystemExit(errno) @@ -169,14 +150,12 @@ def get_package_version(): 'Environment :: Console', 'Intended Audience :: Developers', 'Intended Audience :: Science/Research', - 'License :: OSI Approved :: MIT License', 'Operating System :: OS Independent', 'Programming Language :: Python', 'Topic :: Scientific/Engineering :: GIS' ], cmdclass={ 'test': PyTest, - 'coverage': PyCoverage, 'cleanbuild': PyCleanBuild } ) diff --git a/start.sh b/start.sh index 7b8490983..7090be992 100755 --- a/start.sh +++ b/start.sh @@ -2,7 +2,7 @@ # 1. Define the Environment Variables export PYTHONPATH=$(pwd) -export PYGEOAPI_CONFIG=$(pwd)/pygeoapi-config.yml +export PYGEOAPI_CONFIG=$(pwd)/pygeoapi-config_indoorFeature.yml export PYGEOAPI_OPENAPI=$(pwd)/openAPI/IndoorFeature_openAPI.yaml # 2. Generate the OpenAPI Document diff --git a/tests/api/test_api.py b/tests/api/test_api.py index 3625eac94..bee15c1b5 100644 --- a/tests/api/test_api.py +++ b/tests/api/test_api.py @@ -4,9 +4,11 @@ # John A Stevenson # Colin Blackburn # Bernhard Mallinger +# Francesco Bartoli # -# Copyright (c) 2024 Tom Kralidis +# Copyright (c) 2026 Tom Kralidis # Copyright (c) 2022 John A Stevenson and Colin Blackburn +# Copyright (c) 2026 Francesco Bartoli # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -39,11 +41,11 @@ import pytest from pygeoapi.api import ( - API, APIRequest, FORMAT_TYPES, F_HTML, F_JSON, F_JSONLD, F_GZIP, - __version__, validate_bbox, validate_datetime, evaluate_limit, - validate_subset, landing_page, openapi_, conformance, describe_collections, - get_collection_schema, -) + API, APIRequest, CONFORMANCE_CLASSES, __version__, validate_bbox, + validate_datetime, evaluate_limit, validate_subset, landing_page, openapi_, + conformance, describe_collections, get_collection_schema) + +from pygeoapi.formats import FORMAT_TYPES, F_GZIP, F_JSON, F_JSONLD, F_HTML from pygeoapi.util import yaml_load, get_api_rules, get_base_url from tests.util import (get_test_file_path, mock_api_request, mock_flask, @@ -77,6 +79,13 @@ def config_hidden_resources(): return yaml_load(fh) +@pytest.fixture() +def config_failing_collection(): + filename = 'pygeoapi-test-config-failing-collection.yml' + with open(get_test_file_path(filename)) as fh: + return yaml_load(fh) + + @pytest.fixture() def enclosure_api(config_enclosure, openapi): """ Returns an API instance with a collection with enclosure links. """ @@ -96,6 +105,11 @@ def api_hidden_resources(config_hidden_resources, openapi): return API(config_hidden_resources, openapi) +@pytest.fixture() +def api_failing_collection(config_failing_collection, openapi): + return API(config_failing_collection, openapi) + + def test_apirequest(api_): # Test without (valid) locales with pytest.raises(ValueError): @@ -567,6 +581,37 @@ def test_conformance(config, api_): assert rsp_headers['Content-Language'] == 'en-US' +def test_conformance_does_not_mutate_global_list(config, api_): + """Test conformance method does not mutate CONFORMANCE_CLASSES. + + This test verifies that the global CONFORMANCE_CLASSES list is not + mutated by calls to the conformance function. The base conformance + classes should remain unchanged after multiple calls. + """ + + # Store the original length and content of the global list + original_length = len(CONFORMANCE_CLASSES) + original_classes = list(CONFORMANCE_CLASSES) + + req = mock_api_request() + + # Make multiple calls to conformance + for _ in range(3): + conformance(api_, req) + + # The global list should NOT have been mutated + assert len(CONFORMANCE_CLASSES) == original_length, ( + f'Global CONFORMANCE_CLASSES was mutated! ' + f'Original length: {original_length}, ' + f'Current length: {len(CONFORMANCE_CLASSES)}. ' + f'The conformance() function should create a copy of the list ' + f'before extending it.' + ) + assert CONFORMANCE_CLASSES == original_classes, ( + 'Global CONFORMANCE_CLASSES content was modified' + ) + + def test_describe_collections(config, api_): req = mock_api_request({"f": "html"}) rsp_headers, code, response = describe_collections(api_, req) @@ -592,6 +637,7 @@ def test_describe_collections(config, api_): assert collection['title'] == 'Observations' assert collection['description'] == 'My cool observations' assert len(collection['links']) == 15 + assert collection['extent'] == { 'spatial': { 'bbox': [[-180, -90, 180, 90]], @@ -601,7 +647,11 @@ def test_describe_collections(config, api_): 'interval': [ ['2000-10-30T18:24:39+00:00', '2007-10-30T08:57:29+00:00'] ], - 'trs': 'http://www.opengis.net/def/uom/ISO-8601/0/Gregorian' + 'trs': 'http://www.opengis.net/def/uom/ISO-8601/0/Gregorian', + 'grid': { + 'resolution': 'P1D' + }, + 'default': '2000-10-30T18:24:39+00:00' } } @@ -656,6 +706,28 @@ def test_describe_collections(config, api_): assert collection['storageCrs'] == 'http://www.opengis.net/def/crs/OGC/1.3/CRS84' # noqa assert collection['storageCrsCoordinateEpoch'] == 2017.23 + # test custom extents + rsp_headers, code, response = describe_collections( + api_, req, 'mapserver_world_map') + + collection = json.loads(response) + + assert collection['extent'] == { + 'spatial': { + 'bbox': [[-180, -90, 180, 90]], + 'crs': 'http://www.opengis.net/def/crs/OGC/1.3/CRS84' + }, + 'custom-extent': { + 'definition': 'https://example.org/custom-extent', + 'interval': [[0, 10]], + 'unit': '°C', + 'grid': { + 'cellsCount': 3, + 'coordinates': [0, 5, 10] + } + } + } + def test_describe_collections_hidden_resources( config_hidden_resources, api_hidden_resources): @@ -669,6 +741,22 @@ def test_describe_collections_hidden_resources( assert len(collections['collections']) == 1 +def test_describe_collections_failing_collection( + config_failing_collection, api_failing_collection): + req = mock_api_request({}) + rsp_headers, code, response = describe_collections(api_failing_collection, req) # noqa + assert code == HTTPStatus.OK + + assert len(config_failing_collection['resources']) == 3 + + collections = json.loads(response) + assert len(collections['collections']) == 2 + + req = mock_api_request({}) + rsp_headers, code, response = describe_collections(api_failing_collection, req, 'cmip5') # noqa + assert code == HTTPStatus.INTERNAL_SERVER_ERROR + + def test_describe_collections_json_ld(config, api_): req = mock_api_request({'f': 'jsonld'}) rsp_headers, code, response = describe_collections(api_, req, 'obs') diff --git a/tests/api/test_environmental_data_retrieval.py b/tests/api/test_environmental_data_retrieval.py index 8b7d665e1..8d028a7c2 100644 --- a/tests/api/test_environmental_data_retrieval.py +++ b/tests/api/test_environmental_data_retrieval.py @@ -5,7 +5,7 @@ # Colin Blackburn # Bernhard Mallinger # -# Copyright (c) 2025 Tom Kralidis +# Copyright (c) 2026 Tom Kralidis # Copyright (c) 2022 John A Stevenson and Colin Blackburn # # Permission is hereby granted, free of charge, to any person @@ -81,6 +81,7 @@ def test_get_collection_edr_query(config, api_): rsp_headers, code, response = get_collection_edr_query( api_, req, 'icoads-sst', None, 'position') assert code == HTTPStatus.OK + assert rsp_headers['Content-Type'] == 'application/vnd.cov+json' data = json.loads(response) diff --git a/tests/api/test_itemtypes.py b/tests/api/test_itemtypes.py index dd3fb9431..e5fceaefc 100644 --- a/tests/api/test_itemtypes.py +++ b/tests/api/test_itemtypes.py @@ -5,7 +5,7 @@ # Colin Blackburn # Francesco Bartoli # -# Copyright (c) 2025 Tom Kralidis +# Copyright (c) 2026 Tom Kralidis # Copyright (c) 2022 John A Stevenson and Colin Blackburn # Copyright (c) 2025 Francesco Bartoli # @@ -42,12 +42,12 @@ import pyproj from shapely.geometry import Point -from pygeoapi.api import (API, FORMAT_TYPES, F_GZIP, F_HTML, F_JSONLD, - apply_gzip) +from pygeoapi.api import API, apply_gzip from pygeoapi.api.itemtypes import ( get_collection_queryables, get_collection_item, get_collection_items, manage_collection_item) from pygeoapi.crs import get_crs +from pygeoapi.formats import FORMAT_TYPES, F_GZIP, F_HTML, F_JSONLD from pygeoapi.util import yaml_load from tests.util import get_test_file_path, mock_api_request diff --git a/tests/api/test_maps.py b/tests/api/test_maps.py index fb043dfeb..b13228d2f 100644 --- a/tests/api/test_maps.py +++ b/tests/api/test_maps.py @@ -5,7 +5,7 @@ # Colin Blackburn # Bernhard Mallinger # -# Copyright (c) 2024 Tom Kralidis +# Copyright (c) 2026 Tom Kralidis # Copyright (c) 2022 John A Stevenson and Colin Blackburn # # Permission is hereby granted, free of charge, to any person @@ -51,6 +51,18 @@ def test_get_collection_map(config, api_): assert isinstance(response, bytes) assert response[1:4] == b'PNG' + req = mock_api_request({'subset': 'foo("bar")'}) + rsp_headers, code, response = get_collection_map( + api_, req, 'mapserver_world_map') + + assert code == HTTPStatus.BAD_REQUEST + + req = mock_api_request({'properties': 'foo,bar'}) + rsp_headers, code, response = get_collection_map( + api_, req, 'mapserver_world_map') + + assert code == HTTPStatus.NOT_IMPLEMENTED + def test_map_crs_transform(config, api_): # Florida in EPSG:4326 @@ -58,9 +70,11 @@ def test_map_crs_transform(config, api_): 'bbox': '-88.374023,24.826625,-78.112793,31.015279', # crs is 4326 by implicit since it is the default } + req = mock_api_request(params) _, code, floridaIn4326 = get_collection_map( api_, req, 'mapserver_world_map') + assert code == HTTPStatus.OK # Area that isn't florida in the ocean; used to make sure @@ -73,6 +87,7 @@ def test_map_crs_transform(config, api_): req = mock_api_request(params) _, code, florida4326InWrongCRS = get_collection_map( api_, req, 'mapserver_world_map') + assert code == HTTPStatus.OK assert florida4326InWrongCRS != floridaIn4326 @@ -82,8 +97,10 @@ def test_map_crs_transform(config, api_): 'bbox': '-9837751.2884,2854464.3843,-8695476.3377,3634733.5690', 'bbox-crs': 'http://www.opengis.net/def/crs/EPSG/0/3857' } + req = mock_api_request(params) _, code, floridaProjectedIn3857 = get_collection_map( api_, req, 'mapserver_world_map') + assert code == HTTPStatus.OK assert floridaIn4326 == floridaProjectedIn3857 diff --git a/tests/api/test_processes.py b/tests/api/test_processes.py index fbe27ed20..9d837747c 100644 --- a/tests/api/test_processes.py +++ b/tests/api/test_processes.py @@ -5,7 +5,7 @@ # Colin Blackburn # Bernhard Mallinger # -# Copyright (c) 2024 Tom Kralidis +# Copyright (c) 2026 Tom Kralidis # Copyright (c) 2022 John A Stevenson and Colin Blackburn # # Permission is hereby granted, free of charge, to any person @@ -37,12 +37,28 @@ import time from unittest import mock -from pygeoapi.api import FORMAT_TYPES, F_HTML, F_JSON +import pytest + +from pygeoapi.api import API from pygeoapi.api.processes import ( describe_processes, execute_process, delete_job, get_job_result, get_jobs ) +from pygeoapi.formats import FORMAT_TYPES, F_HTML, F_JSON +from pygeoapi.util import yaml_load + +from tests.util import get_test_file_path, mock_api_request + -from tests.util import mock_api_request +@pytest.fixture() +def config_process_metadata() -> dict: + """ Returns a pygeoapi configuration with process metadata.""" + with open(get_test_file_path('pygeoapi-test-config-process-metadata.yml')) as fh: # noqa + return yaml_load(fh) + + +@pytest.fixture() +def api_process_metadata(config_process_metadata, openapi): + return API(config_process_metadata, openapi) def test_describe_processes(config, api_): @@ -143,8 +159,8 @@ def test_describe_processes(config, api_): # Test describe doesn't crash if example is missing req = mock_api_request() - processor = api_.manager.get_processor("hello-world") - example = processor.metadata.pop("example") + processor = api_.manager.get_processor('hello-world') + example = processor.metadata.pop('example') rsp_headers, code, response = describe_processes(api_, req) processor.metadata['example'] = example data = json.loads(response) @@ -152,6 +168,23 @@ def test_describe_processes(config, api_): assert len(data['processes']) == 2 +def test_describe_processes_metadata(config_process_metadata, + api_process_metadata): + + req = mock_api_request({'limit': 1}) + # Test for description of single processes + rsp_headers, code, response = describe_processes( + api_process_metadata, req, 'echo') + data = json.loads(response) + assert code == HTTPStatus.OK + assert len(data['jobControlOptions']) == 2 + assert 'sync-execute' in data['jobControlOptions'] + assert 'async-execute' in data['jobControlOptions'] + assert len(data['outputTransmission']) == 2 + assert 'value' in data['outputTransmission'] + assert 'reference' in data['outputTransmission'] + + def test_execute_process(config, api_): req_body_0 = { 'inputs': { @@ -204,6 +237,11 @@ def test_execute_process(config, api_): }, 'response': 'document' } + req_body_9 = { + 'inputs': { + 'name': 'Test document' + } + } cleanup_jobs = set() @@ -326,8 +364,10 @@ def test_execute_process(config, api_): req = mock_api_request(data=req_body_1, HTTP_Prefer='respond-async') rsp_headers, code, response = execute_process(api_, req, 'hello-world') - assert 'Location' in rsp_headers + response = json.loads(response) assert code == HTTPStatus.CREATED + + assert 'Location' in rsp_headers assert isinstance(response, dict) assert 'jobID' in response assert 'type' in response @@ -364,6 +404,12 @@ def test_execute_process(config, api_): assert 'outputs' in response assert isinstance(response['outputs'], list) + req = mock_api_request(data=req_body_9) + rsp_headers, code, response = execute_process(api_, req, 'hello-world') + + response2 = '{"id":"echo","value":"Hello Test document!"}' + assert response == response2 + # Cleanup time.sleep(2) # Allow time for any outstanding async jobs for _, job_id in cleanup_jobs: diff --git a/tests/api/test_pubsub.py b/tests/api/test_pubsub.py new file mode 100644 index 000000000..243c4661d --- /dev/null +++ b/tests/api/test_pubsub.py @@ -0,0 +1,106 @@ +# ================================================================= +# +# Authors: Tom Kralidis +# +# Copyright (c) 2026 Tom Kralidis +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation +# files (the "Software"), to deal in the Software without +# restriction, including without limitation the rights to use, +# copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. +# +# ================================================================= + +from copy import deepcopy +import json + +import pytest + +from pygeoapi.api import API, landing_page +from pygeoapi.util import yaml_load + +from tests.util import get_test_file_path, mock_api_request + + +@pytest.fixture() +def config(): + with open(get_test_file_path('pygeoapi-test-config-pubsub.yml')) as fh: + return yaml_load(fh) + + +def test_landing_page(config, openapi, asyncapi): + api_ = API(config, openapi, asyncapi) + + broker_link = None + + req = mock_api_request() + rsp_headers, code, response = landing_page(api_, req) + + content = json.loads(response) + + assert len(content['links']) == 15 + + for link in content['links']: + if link.get('rel') == 'hub': + broker_link = link + + assert broker_link is not None + assert broker_link['href'] == 'mqtt://localhost:1883' + assert broker_link['channel'] == 'my/channel' + + config2 = deepcopy(config) + config2['pubsub']['broker']['hidden'] = True + + api_ = API(config2, openapi) + + broker_link = None + + req = mock_api_request() + rsp_headers, code, response = landing_page(api_, req) + + content = json.loads(response) + + assert len(content['links']) == 12 + + for link in content['links']: + if link.get('rel') == 'hub': + broker_link = link + + assert broker_link is None + + config2 = deepcopy(config) + config2['pubsub']['broker'].pop('channel', None) + + api_ = API(config2, openapi, asyncapi) + + broker_link = None + + req = mock_api_request() + rsp_headers, code, response = landing_page(api_, req) + + content = json.loads(response) + + assert len(content['links']) == 15 + + for link in content['links']: + if link.get('rel') == 'hub': + broker_link = link + + assert broker_link is not None + assert 'channel' not in broker_link diff --git a/tests/api/test_stac.py b/tests/api/test_stac.py index dea4de7bb..1da63919b 100644 --- a/tests/api/test_stac.py +++ b/tests/api/test_stac.py @@ -2,7 +2,7 @@ # # Authors: Tom Kralidis # -# Copyright (c) 2025 Tom Kralidis +# Copyright (c) 2026 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -31,8 +31,8 @@ import pytest -from pygeoapi.api import FORMAT_TYPES, F_JSON from pygeoapi.api.stac import search, landing_page +from pygeoapi.formats import FORMAT_TYPES, F_JSON from pygeoapi.util import yaml_load from tests.util import get_test_file_path, mock_api_request diff --git a/tests/api/test_tiles.py b/tests/api/test_tiles.py index d804f6a21..c463abf10 100644 --- a/tests/api/test_tiles.py +++ b/tests/api/test_tiles.py @@ -5,7 +5,7 @@ # Colin Blackburn # Bernhard Mallinger # -# Copyright (c) 2024 Tom Kralidis +# Copyright (c) 2026 Tom Kralidis # Copyright (c) 2022 John A Stevenson and Colin Blackburn # Copyright (c) 2025 Joana Simoes # @@ -37,12 +37,12 @@ from http import HTTPStatus import pytest -from pygeoapi.api import FORMAT_TYPES, F_HTML from pygeoapi.api.tiles import ( get_collection_tiles, tilematrixset, tilematrixsets, get_collection_tiles_metadata, get_collection_tiles_data ) +from pygeoapi.formats import FORMAT_TYPES, F_HTML from pygeoapi.models.provider.base import TileMatrixSetEnum from tests.util import mock_api_request diff --git a/tests/conftest.py b/tests/conftest.py index d17a7107d..b9eef7b63 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -52,6 +52,12 @@ def openapi(): return yaml_load(fh) +@pytest.fixture() +def asyncapi(): + with open(get_test_file_path('pygeoapi-test-asyncapi.yml')) as fh: + return yaml_load(fh) + + @pytest.fixture() def api_(config, openapi): return API(config, openapi) diff --git a/tests/data/README.md b/tests/data/README.md index 96b2fed8f..7363fb500 100644 --- a/tests/data/README.md +++ b/tests/data/README.md @@ -106,3 +106,8 @@ This directory provides test data to demonstrate functionality. - source: [Canadian National Water Data Archive](https://www.canada.ca/en/environment-climate-change/services/water-overview/quantity/monitoring/survey/data-products-services/national-archive-hydat.html) as extracted from the [MSC GeoMet OGC API](https://eccc-msc.github.io/open-data/msc-geomet/web-services_en/#ogc-api-features) service - URL: https://www.canada.ca/en/environment-climate-change/services/water-overview/quantity/monitoring/survey/data-products-services/national-archive-hydat.html - License: https://eccc-msc.github.io/open-data/licence/readme_en + +### `dutch-nationaalgeoregister/sample-records.tinydb` +- source: Dutch National Georegister +- URL: https://nationaalgeoregister.nl +- License: CC0: https://creativecommons.org/share-your-work/public-domain/cc0 diff --git a/tests/data/dutch-nationaalgeoregister/sample-records.tinydb b/tests/data/dutch-nationaalgeoregister/sample-records.tinydb new file mode 100644 index 000000000..51069154f --- /dev/null +++ b/tests/data/dutch-nationaalgeoregister/sample-records.tinydb @@ -0,0 +1,376 @@ +{ + "_default": { + "1": { + "id": "35149dfb-31d3-431c-a8bc-12a4034dac48", + "conformsTo": [ + "http://www.opengis.net/spec/ogcapi-records-1/1.0/conf/record-core" + ], + "type": "Feature", + "time": { + "interval": [ + null, + null + ] + }, + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [ + 4.690751953125, + 52.358740234375 + ], + [ + 4.690751953125, + 52.6333984375 + ], + [ + 5.020341796875, + 52.6333984375 + ], + [ + 5.020341796875, + 52.358740234375 + ], + [ + 4.690751953125, + 52.358740234375 + ] + ] + ] + }, + "properties": { + "created": "2021-12-08Z", + "updated": "2025-06-16T12:39:57Z", + "type": "dataset", + "title": "Kaartboeck 1635", + "description": "Data uit kaartboeken van de periode 1635 tot 1775. De kaartboeken werden door het waterschap gebruikt om er op toe te zien dat de eigenaren geen water in beslag namen door demping.\nDe percelen op de kaart zijn naar de huidige maatstaven vrij nauwkeurig gemeten en voorzien van een administratie met de eigenaren. bijzondere locaties van molens werven en beroepen worden in de boeken vermeld. Alle 97 kaarten aan een geven een zeer gedetailleerd beeld van de Voorzaan, Nieuwe Haven en de Achterzaan. De bladen Oost en West van de zaan zijn vrij nauwkeurig. De bladen aan de Voorzaan zijn een schetsmatige weergave van de situatie. De kaart van de Nieuwe Haven si weer nauwkeurig te noemen.", + "contacts": [ + { + "name": "Team Geo", + "organization": "Gemeente Zaanstad", + "phones": [ + { + "value": "14 075" + } + ], + "emails": [ + { + "value": "geo-informatie@zaanstad.nl" + } + ], + "addresses": [ + { + "deliveryPoint": [ + "Stadhuisplein 100" + ], + "city": [ + "Zaanstad" + ], + "administrativeArea": [ + "Noord-Holland" + ], + "postalCode": [ + "1506 MZ" + ], + "country": [ + "Netherlands" + ] + } + ], + "roles": [ + "custodian" + ] + } + ], + "externalIds": [ + { + "scheme": "default", + "value": "35149dfb-31d3-431c-a8bc-12a4034dac48" + } + ], + "themes": [ + { + "concepts": [ + { + "id": "ARGEOLOGIE" + }, + { + "id": "MONUMENTEN" + }, + { + "id": "KADASTER" + }, + { + "id": "KAARTBOEK" + }, + { + "id": "KAARTBOECK" + }, + { + "id": "HISTORIE" + } + ] + } + ], + "_metadata-anytext": "35149dfb-31d3-431c-a8bc-12a4034dac48 Team Geo Gemeente Zaanstad Informatie Beheer en Techniek 14 075 Stadhuisplein 100 Zaanstad Noord-Holland 1506 MZ Netherlands geo-informatie@zaanstad.nl ISO 19115 Nederlands metadata profiel op ISO 19115 voor geografie 1.3.1 28992 EPSG Kaartboeck 1635 56d832e1-7e33-4e60-a01a-6bdcd716aaa0 Eigenaren en percelen langs de Zaan en Voorzaan tussen 1635 en 1775 Data uit kaartboeken van de periode 1635 tot 1775. De kaartboeken werden door het waterschap gebruikt om er op toe te zien dat de eigenaren geen water in beslag namen door demping.\nDe percelen op de kaart zijn naar de huidige maatstaven vrij nauwkeurig gemeten en voorzien van een administratie met de eigenaren. bijzondere locaties van molens werven en beroepen worden in de boeken vermeld. Alle 97 kaarten aan een geven een zeer gedetailleerd beeld van de Voorzaan, Nieuwe Haven en de Achterzaan. De bladen Oost en West van de zaan zijn vrij nauwkeurig. De bladen aan de Voorzaan zijn een schetsmatige weergave van de situatie. De kaart van de Nieuwe Haven si weer nauwkeurig te noemen. Archeologisch onderzoek Kleij, Piet Gemeente Zaanstad, monumenten en argeologie Inhoudelijk contactpersoon 14 075 Stadhuisplein 100 Zaanstad Noord-Holland 1506 MZ Nederland geo-informatie@zaanstad.nl WWW:LINK-1.0-http--link Profielsite (intranet) https://geo.zaanstad.nl:443/geonetwork/srv/eng/resources.get?uuid=35149dfb-31d3-431c-a8bc-12a4034dac48&fname=monumenten-kaartboeck_s.png thumbnail png https://geo.zaanstad.nl:443/geonetwork/srv/eng/resources.get?uuid=35149dfb-31d3-431c-a8bc-12a4034dac48&fname=monumenten-kaartboeck.jpg large_thumbnail jpg ARGEOLOGIE MONUMENTEN KADASTER KAARTBOEK KAARTBOECK HISTORIE Niet geschikt voor commercieel gebruik Niet voor commercieel gebruik en naamsvermelding verplicht; Gemeente Zaanstad https://creativecommons.org/licenses/by-nc/4.0/ Geen beperkingen bekend dut WMS 1.1.1 OGC:WMS geo:kaartboeck geo:kaartboeck OGC:WFS geo:kaartboeck download download De dataset is afkomstig uit diverse kaartboeken uit verschillende periode." + }, + "links": [ + { + "href": "https://maps-intern.zaanstad.gem.local/geoserver/wms?SERVICE=WMS", + "rel": "item", + "title": "geo:kaartboeck", + "type": "OGC:WMS" + }, + { + "href": "https://maps-intern.zaanstad.gem.local/geoserver/wfs?SERVICE=WFS", + "rel": "item", + "title": "geo:kaartboeck", + "type": "OGC:WFS" + }, + { + "href": "https://maps-intern.zaanstad.gem.local/geoserver/wfs?SERVICE=WFS&version=1.0.0&request=GetFeature&typeName=geo:kaartboeck&outputFormat=csv", + "rel": "item", + "type": "download" + }, + { + "href": "https://maps-intern.zaanstad.gem.local/geoserver/wfs?SERVICE=WFS&version=1.0.0&request=GetFeature&typeName=geo:kaartboeck&outputFormat=shape-zip", + "rel": "item", + "type": "download" + } + ] + }, + "2": { + "id": "ffffffaa-4087-59ec-9ea7-8416f58e99dd", + "conformsTo": [ + "http://www.opengis.net/spec/ogcapi-records-1/1.0/conf/record-core" + ], + "type": "Feature", + "time": { + "interval": [ + null, + null + ] + }, + "geometry": { + "type": "Polygon", + "coordinates": [ + [ + [ + 4.4552947, + 52.3348457 + ], + [ + 4.4552947, + 53.388444 + ], + [ + 7.135964, + 53.388444 + ], + [ + 7.135964, + 52.3348457 + ], + [ + 4.4552947, + 52.3348457 + ] + ] + ] + }, + "properties": { + "created": "2022-06-01Z", + "updated": "2025-06-16T12:39:57Z", + "type": "dataset", + "title": "Diepteligging onderkant keileem (t.o.v. NAP)", + "description": "Diepteligging van de onderkant (basis) van keileem in Drenthe, in meters ten opzichte van NAP.", + "contacts": [ + { + "name": "Team Gis/Cartografie", + "organization": "Provincie Drenthe", + "phones": [ + { + "value": "0592-365555" + } + ], + "emails": [ + { + "value": "post@drenthe.nl" + } + ], + "addresses": [ + { + "deliveryPoint": [ + "Westerbrink 1" + ], + "city": [ + "Assen" + ], + "administrativeArea": [ + "Drenthe" + ], + "postalCode": [ + "9400AC" + ], + "country": [ + "Nederland" + ] + } + ], + "links": [ + { + "href": "https://www.provincie.drenthe.nl", + "rel": null, + "title": null, + "description": null + } + ], + "roles": [ + "pointOfContact" + ] + } + ], + "externalIds": [ + { + "scheme": "default", + "value": "ffffffaa-4087-59ec-9ea7-8416f58e99dd" + } + ], + "themes": [ + { + "concepts": [ + { + "id": "beleidsinstrument" + }, + { + "id": "bodem" + }, + { + "id": "grondwaterstand" + }, + { + "id": "landbouw" + }, + { + "id": "landbouwgrond" + }, + { + "id": "waterhuishouding" + } + ], + "scheme": null + } + ], + "_metadata-anytext": "ffffffaa-4087-59ec-9ea7-8416f58e99dd Team Gis/Cartografie Provincie Drenthe Auteur 0592-365555 0592-365777 Westerbrink 1 Assen Drenthe 9400AC Nederland post@drenthe.nl ISO 19115 Nederlandse metadata profiel op ISO 19115 voor geografie 1.3 28992 EPSG Diepteligging onderkant keileem (t.o.v. NAP) GBI.KEILEEM_DIEPTE_ONDER_NAP_R 3d30357b-1bf0-5ccc-8a40-4b980f2ba493 Diepteligging van de onderkant (basis) van keileem in Drenthe, in meters ten opzichte van NAP. Milieu, energie en klimaat Team Natuur en Water Provincie Drenthe Auteur 0592-365555 0592-365777 Westerbrink 1 Assen Drenthe 9400AC Nederland post@drenthe.nl https://kaartportaal.drenthe.nl/portal/sharing/rest/content/items/d547218a21fd4c7ca666e01e4c33f237/info/thumbnail/thumbnail1651583291057.png thumbnail beleidsinstrument bodem grondwaterstand landbouw landbouwgrond waterhuishouding Trefwoordenlijst Provincie Drenthe dataset niet gebruiken bij een schaal kleiner dan Geen beperkingen http://creativecommons.org/publicdomain/mark/1.0/deed.nl Provincie Drenthe zie rapportage Team Gis/Cartografie Provincie Drenthe Auteur 0592-365555 0592-365777 Westerbrink 1 Assen Drenthe 9400AC Nederland post@drenthe.nl Gratis Neem contact op met Provincie Drenthe OGC:WMS 0 Diepteligging onderkant keileem (t.o.v. NAP) Productieproces en achtergrondinformatie zijn beschreven in de rapportage van TNO (TNO 2013 R10107). zie rapportage" + }, + "links": [ + { + "href": "https://kaartportaal.drenthe.nl/server/services/GDB_actueel/GBI_KEILEEM_DIEPTE_ONDER_NAP_R/MapServer/WMSServer", + "rel": "item", + "title": "0", + "type": "OGC:WMS" + } + ] + }, + "3": { + "id": "59352e7f-3792-4e17-bd73-9bba84a98890", + "conformsTo": [ + "http://www.opengis.net/spec/ogcapi-records-1/1.0/conf/record-core" + ], + "type": "Feature", + "time": { + "interval": [ + null, + null + ] + }, + "geometry": null, + "properties": { + "created": "2021-06-30Z", + "updated": "2025-06-16T12:39:57Z", + "type": "dataset", + "title": "Clusters geluid - wegen gecumuleerd", + "description": "Clusters (omtreklijn) gebaseerd op gemeentegrenzen. Per cluster zijn de aantallen woningen en gevoelige bestemmingen per GES-score geteld. Bij de gevoelige bestemmingen is onderscheid gemaakt in 3 categorien: Ziekenhuizen, Scholen en dagverblijven voor jeugd, Verpleeg en verzorgingshuizen.", + "contacts": [ + { + "name": "GIS", + "organization": "Provincie Utrecht", + "emails": [ + { + "value": "GIS@provincie-utrecht.nl" + } + ], + "links": [ + { + "href": "http://www.provincie-utrecht.nl", + "rel": null, + "title": null, + "description": null + } + ], + "roles": [ + "pointOfContact" + ] + } + ], + "externalIds": [ + { + "scheme": "default", + "value": "59352e7f-3792-4e17-bd73-9bba84a98890" + } + ], + "themes": [ + { + "concepts": [ + { + "id": "GELUIDHINDER" + }, + { + "id": "GELUIDSZONES" + }, + { + "id": "PROVINCIALE WEGEN" + }, + { + "id": "VERKEERSLAWAAI" + }, + { + "id": "WET GELUIDHINDER" + } + ], + "scheme": null + }, + { + "concepts": [ + { + "id": "Informatief" + } + ], + "scheme": null + } + ], + "_metadata-anytext": "59352e7f-3792-4e17-bd73-9bba84a98890 GIS Provincie Utrecht Technisch verantwoordelijk GIS@provincie-utrecht.nl ISO 19115 Nederlands metadata profiel op ISO 19115 voor geografie 2.0.0 https://www.opengis.net/def/crs/EPSG/0/28992 EPSG Clusters geluid - wegen gecumuleerd milieu.mgkp_cluster_gl_wegen_cumulati d5e50c1f-37a3-4687-a146-99c0820f5e8c Clusters (omtreklijn) gebaseerd op gemeentegrenzen. Per cluster zijn de aantallen woningen en gevoelige bestemmingen per GES-score geteld. Bij de gevoelige bestemmingen is onderscheid gemaakt in 3 categorien: Ziekenhuizen, Scholen en dagverblijven voor jeugd, Verpleeg en verzorgingshuizen. milieubelasting koppelen aan gezondheidseffecten zodat het gebruikt kan worden voor andere beleidsvelden oa Europese Richtlijn Omgevingslawaai Bosch, Hans van den Provincie Utrecht Technisch verantwoordelijk GIS@provincie-utrecht.nl Janssen, Geert Provincie Utrecht Inhoudelijk verantwoordelijk gis@provincie-utrecht.nl https://services.geodata-utrecht.nl/geoserver/m01_4_overlast_hinder_mgkp/wms?request=GetMap&service=WMS&SRS=EPSG:28992&CRS=EPSG:28992&bbox=112656.06,436541.64,173834.94,482076.36&width=600&height=446&format=image/png&styles=&layers=Clusters_geluid_-_wegen_gecumuleerd GELUIDHINDER GELUIDSZONES PROVINCIALE WEGEN VERKEERSLAWAAI WET GELUIDHINDER Interprovinciale thesaurus Informatief Typering Geen gebruiksbeperkingen geen beperkingen https://creativecommons.org/publicdomain/mark/1.0/deed.nl SDE Feature Class 10.4.1 GIS Provincie Utrecht Technisch verantwoordelijk GIS@provincie-utrecht.nl landingpage OGC:WFS Clusters_geluid_-_wegen_gecumuleerd OGC:WMS Clusters_geluid_-_wegen_gecumuleerd De clusters zijn de gemeentegrenzen. Per gemeente zijn de aantallen belaste woningen en gevoelige bestemmingen per GES klasse bepaald." + }, + "links": [ + { + "href": "https://download.geodata-utrecht.nl/download/vector/59352e7f-3792-4e17-bd73-9bba84a98890", + "rel": "item", + "type": "landingpage" + }, + { + "href": "https://services.geodata-utrecht.nl/geoserver/m01_4_overlast_hinder_mgkp/wfs", + "rel": "item", + "title": "Clusters_geluid_-_wegen_gecumuleerd", + "type": "OGC:WFS" + }, + { + "href": "https://services.geodata-utrecht.nl/geoserver/m01_4_overlast_hinder_mgkp/wms", + "rel": "item", + "title": "Clusters_geluid_-_wegen_gecumuleerd", + "type": "OGC:WMS" + } + ] + } + } +} diff --git a/tests/data/parquet/geoparquet1.1/data-polygon-encoding_wkb_no_bbox.parquet b/tests/data/parquet/geoparquet1.1/data-polygon-encoding_wkb_no_bbox.parquet new file mode 100644 index 000000000..cce77baad Binary files /dev/null and b/tests/data/parquet/geoparquet1.1/data-polygon-encoding_wkb_no_bbox.parquet differ diff --git a/tests/data/parquet/geoparquet1.1/nyc_subset_overture.parquet b/tests/data/parquet/geoparquet1.1/nyc_subset_overture.parquet new file mode 100644 index 000000000..9abe3f071 Binary files /dev/null and b/tests/data/parquet/geoparquet1.1/nyc_subset_overture.parquet differ diff --git a/tests/data/random.parquet b/tests/data/parquet/naive/random.parquet similarity index 100% rename from tests/data/random.parquet rename to tests/data/parquet/naive/random.parquet diff --git a/tests/data/random_nocrs.parquet b/tests/data/parquet/naive/random_nocrs.parquet similarity index 100% rename from tests/data/random_nocrs.parquet rename to tests/data/parquet/naive/random_nocrs.parquet diff --git a/tests/data/random_nogeom.parquet b/tests/data/parquet/naive/random_nogeom.parquet similarity index 100% rename from tests/data/random_nogeom.parquet rename to tests/data/parquet/naive/random_nogeom.parquet diff --git a/tests/other/test_openapi.py b/tests/other/test_openapi.py index 49f4f70ba..0832ec433 100644 --- a/tests/other/test_openapi.py +++ b/tests/other/test_openapi.py @@ -43,6 +43,14 @@ def config(): return yaml_load(fh) +@pytest.fixture() +def config_admin_empty_resources(): + with open( + get_test_file_path('pygeoapi-test-config-admin-empty-resources.yml') + ) as fh: + return yaml_load(fh) + + @pytest.fixture() def config_hidden_resources(): filename = 'pygeoapi-test-config-hidden-resources.yml' @@ -131,3 +139,8 @@ def test_hidden_resources(config_hidden_resources): assert '/collections/obs' not in openapi_doc['paths'] assert '/collections/obs/items' not in openapi_doc['paths'] + + +def test_admin_empty_resources(config_admin_empty_resources): + openapi_doc = get_oas(config_admin_empty_resources) + assert '/admin/config' in openapi_doc['paths'] diff --git a/tests/other/test_util.py b/tests/other/test_util.py index 6d3d70aa6..4e54840c6 100644 --- a/tests/other/test_util.py +++ b/tests/other/test_util.py @@ -38,6 +38,7 @@ from pygeoapi import util from pygeoapi.api import __version__ +from pygeoapi.provider import get_provider_by_type, get_provider_default from pygeoapi.provider.base import ProviderTypeError from ..util import get_test_file_path @@ -181,25 +182,25 @@ def test_filter_dict_by_key_value(config): def test_get_provider_by_type(config): - p = util.get_provider_by_type(config['resources']['obs']['providers'], - 'feature') + p = get_provider_by_type(config['resources']['obs']['providers'], + 'feature') assert isinstance(p, dict) assert p['type'] == 'feature' assert p['name'] == 'CSV' with pytest.raises(ProviderTypeError): - p = util.get_provider_by_type(config['resources']['obs']['providers'], - 'something-else') + p = get_provider_by_type(config['resources']['obs']['providers'], + 'something-else') def test_get_provider_default(config): - pd = util.get_provider_default(config['resources']['obs']['providers']) + pd = get_provider_default(config['resources']['obs']['providers']) assert pd['type'] == 'feature' assert pd['name'] == 'CSV' - pd = util.get_provider_default(config['resources']['obs']['providers']) + pd = get_provider_default(config['resources']['obs']['providers']) def test_read_data(): diff --git a/tests/provider/test_esri_provider.py b/tests/provider/test_esri_provider.py index b90b6cf1e..65870de79 100644 --- a/tests/provider/test_esri_provider.py +++ b/tests/provider/test_esri_provider.py @@ -35,20 +35,34 @@ TIME_FIELD = 'Date_Time' +BASE_URL = 'https://sampleserver6.arcgisonline.com/arcgis/rest/services' + @pytest.fixture() def config(): - # National Hurricane Center () + # National Hurricane Center # source: ESRI, NOAA/National Weather Service return { 'name': 'ESRI', 'type': 'feature', - 'data': 'https://sampleserver6.arcgisonline.com/arcgis/rest/services/Hurricanes/MapServer/0', # noqa + 'data': f'{BASE_URL}/Hurricanes/MapServer/0', 'id_field': 'OBJECTID', 'time_field': TIME_FIELD } +@pytest.fixture() +def config_alt_id(): + # Emergency Facilities + # source: ESRI + return { + 'name': 'ESRI', + 'type': 'feature', + 'data': f'{BASE_URL}/EmergencyFacilities/FeatureServer/0', + 'id_field': 'facilityid' + } + + def test_query(config): p = ESRIServiceProvider(config) @@ -179,3 +193,11 @@ def test_get(config): result = p.get(6) assert result['id'] == 6 assert result['properties']['EVENTID'] == 'Alberto' + + +def test_alternative_id_field(config_alt_id): + p = ESRIServiceProvider(config_alt_id) + + result = p.get('F0234') + assert result['id'] == 'F0234' + assert result['properties']['facname'] == 'Redlands Community Hospital' diff --git a/tests/provider/test_filesystem_provider.py b/tests/provider/test_filesystem_provider.py index 6e70efa1d..f1cfffcf0 100644 --- a/tests/provider/test_filesystem_provider.py +++ b/tests/provider/test_filesystem_provider.py @@ -54,7 +54,7 @@ def test_query(config): r = p.get_data_path(baseurl, urlpath, dirpath) - assert len(r['links']) == 12 + assert len(r['links']) == 14 r = p.get_data_path(baseurl, urlpath, '/poi_portugal') diff --git a/tests/provider/test_mysql_provider.py b/tests/provider/test_mysql_provider.py index 0f470d750..1ac10e1c1 100644 --- a/tests/provider/test_mysql_provider.py +++ b/tests/provider/test_mysql_provider.py @@ -37,44 +37,45 @@ PASSWORD = os.environ.get('MYSQL_PASSWORD', 'mysql') -""" -For local testing, a MySQL database can be spun up with docker -compose as follows: - -services: - - mysql: - image: mysql:8 - ports: - - 3306:3306 - environment: - MYSQL_ROOT_PASSWORD: mysql - MYSQL_USER: pygeoapi - MYSQL_PASSWORD: mysql - MYSQL_DATABASE: test_geo_app - volumes: - - ./tests/data/mysql_data.sql:/docker-entrypoint-initdb.d/init.sql:ro -""" - - -@pytest.fixture() -def config(): - return { +# Testing local MySQL with docker: +''' +docker run --name mysql-test \ + -e MYSQL_ROOT_PASSWORD=mysql \ + -e MYSQL_USER=pygeoapi \ + -e MYSQL_PASSWORD=mysql \ + -e MYSQL_DATABASE=test_geo_app \ + -p 3306:3306 \ + -v ./tests/data/mysql_data.sql:/docker-entrypoint-initdb.d/init.sql:ro \ + -d mysql:8 +''' + + +@pytest.fixture(params=['default', 'connection_string']) +def config(request): + config_ = { 'name': 'MySQL', 'type': 'feature', - 'data': { + 'options': {'connect_timeout': 10}, + 'id_field': 'locationID', + 'table': 'location', + 'geom_field': 'locationCoordinates' + } + if request.param == 'default': + config_['data'] = { 'host': 'localhost', 'dbname': 'test_geo_app', 'user': 'root', 'port': 3306, 'password': PASSWORD, 'search_path': ['test_geo_app'] - }, - 'options': {'connect_timeout': 10}, - 'id_field': 'locationID', - 'table': 'location', - 'geom_field': 'locationCoordinates' - } + } + elif request.param == 'connection_string': + config_['data'] = ( + f'mysql+pymysql://root:{PASSWORD}@localhost:3306/test_geo_app' + ) + config_['options']['search_path'] = ['test_geo_app'] + + return config_ def test_valid_connection_options(config): @@ -87,7 +88,8 @@ def test_valid_connection_options(config): 'keepalives', 'keepalives_idle', 'keepalives_count', - 'keepalives_interval' + 'keepalives_interval', + 'search_path' ] diff --git a/tests/provider/test_parquet_provider.py b/tests/provider/test_parquet_provider.py index 736e3dff4..6d45a51d7 100644 --- a/tests/provider/test_parquet_provider.py +++ b/tests/provider/test_parquet_provider.py @@ -5,6 +5,7 @@ # # Copyright (c) 2024 Leo Ghignone # Copyright (c) 2025 Tom Kralidis +# Copyright (c) 2026 Colton Loftus # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -29,6 +30,8 @@ # # ================================================================= +from copy import copy + import pytest from pygeoapi.provider.base import ProviderItemNotFoundError @@ -36,15 +39,6 @@ from ..util import get_test_file_path -path = get_test_file_path( - 'data/random.parquet') - -path_nogeom = get_test_file_path( - 'data/random_nogeom.parquet') - -path_nocrs = get_test_file_path( - 'data/random_nocrs.parquet') - @pytest.fixture() def config_parquet(): @@ -52,13 +46,12 @@ def config_parquet(): 'name': 'Parquet', 'type': 'feature', 'data': { - 'source_type': 'Parquet', - 'source': path, + 'source': get_test_file_path('data/parquet/naive/random.parquet'), }, 'id_field': 'id', 'time_field': 'time', 'x_field': 'lon', - 'y_field': 'lat', + 'y_field': 'lat' } @@ -68,8 +61,8 @@ def config_parquet_nogeom_notime(): 'name': 'ParquetNoGeomNoTime', 'type': 'feature', 'data': { - 'source_type': 'Parquet', - 'source': path_nogeom, + 'source': get_test_file_path( + 'data/parquet/naive/random_nogeom.parquet') }, 'id_field': 'id' } @@ -81,162 +74,267 @@ def config_parquet_nocrs(): 'name': 'ParquetNoCrs', 'type': 'feature', 'data': { - 'source_type': 'Parquet', - 'source': path_nocrs, + 'source': get_test_file_path( + 'data/parquet/naive/random_nocrs.parquet') }, 'id_field': 'id', 'time_field': 'time', 'x_field': 'lon', - 'y_field': 'lat', + 'y_field': 'lat' + } + + +@pytest.fixture +def geoparquet_no_bbox(): + # Data originating from + # https://github.com/opengeospatial/geoparquet/blob/main/test_data/data-polygon-encoding_wkb.parquet + + # As CSV: + # "col","geometry" + # 0,"POLYGON ((30 10, 40 40, 20 40, 10 20, 30 10))" + # 1,"POLYGON ((35 10, 45 45, 15 40, 10 20, 35 10), (20 30, 35 35, 30 20, 20 30))" # noqa + # 2,"POLYGON EMPTY" + # 3, + return { + 'name': 'GeoparquetNoBbox', + 'type': 'feature', + 'data': { + 'source': get_test_file_path( + 'data/parquet/geoparquet1.1/data-polygon-encoding_wkb_no_bbox.parquet' # noqa + ) + } + } + + +@pytest.fixture +def geoparquet_with_bbox(): + # Geneated with the overture python CLI + # overturemaps download --bbox=-74,40.98,-73.98,41 -f geoparquet --type=building -o nyc_subset_overture.parquet # noqa + return { + 'name': 'GeoparquetWithBbox', + 'type': 'feature', + 'data': { + 'source': get_test_file_path( + 'data/parquet/geoparquet1.1/nyc_subset_overture.parquet' + ) + } } -def test_get_fields(config_parquet): - """Testing field types""" - - p = ParquetProvider(config_parquet) - results = p.get_fields() - assert results['lat']['type'] == 'number' - assert results['lon']['format'] == 'double' - assert results['time']['format'] == 'date-time' - - -def test_get(config_parquet): - """Testing query for a specific object""" - - p = ParquetProvider(config_parquet) - result = p.get('42') - assert result['id'] == '42' - assert result['properties']['lon'] == 4.947447 - - -def test_get_not_existing_feature_raise_exception( - config_parquet -): - """Testing query for a not existing object""" - p = ParquetProvider(config_parquet) - with pytest.raises(ProviderItemNotFoundError): - p.get(-1) - - -def test_query_hits(config_parquet): - """Testing query on entire collection for hits""" - - p = ParquetProvider(config_parquet) - feature_collection = p.query(resulttype='hits') - assert feature_collection.get('type') == 'FeatureCollection' - features = feature_collection.get('features') - assert len(features) == 0 - hits = feature_collection.get('numberMatched') - assert hits is not None - assert hits == 100 - - -def test_query_bbox_hits(config_parquet): - """Testing query for a valid JSON object with geometry""" - - p = ParquetProvider(config_parquet) - feature_collection = p.query( - bbox=[100, -50, 150, 0], - resulttype='hits') - assert feature_collection.get('type') == 'FeatureCollection' - features = feature_collection.get('features') - assert len(features) == 0 - hits = feature_collection.get('numberMatched') - assert hits is not None - assert hits == 6 - - -def test_query_with_limit(config_parquet): - """Testing query for a valid JSON object with geometry""" - - p = ParquetProvider(config_parquet) - feature_collection = p.query(limit=2, resulttype='results') - assert feature_collection.get('type') == 'FeatureCollection' - features = feature_collection.get('features') - assert len(features) == 2 - hits = feature_collection.get('numberMatched') - assert hits > 2 - feature = features[0] - properties = feature.get('properties') - assert properties is not None - geometry = feature.get('geometry') - assert geometry is not None - - -def test_query_with_offset(config_parquet): - """Testing query for a valid JSON object with geometry""" - - p = ParquetProvider(config_parquet) - feature_collection = p.query(offset=20, limit=10, resulttype='results') - assert feature_collection.get('type') == 'FeatureCollection' - features = feature_collection.get('features') - assert len(features) == 10 - hits = feature_collection.get('numberMatched') - assert hits > 30 - feature = features[0] - properties = feature.get('properties') - assert properties is not None - assert feature['id'] == '21' - assert properties['lat'] == 66.264988 - geometry = feature.get('geometry') - assert geometry is not None - - -def test_query_with_property(config_parquet): - """Testing query for a valid JSON object with property filter""" - - p = ParquetProvider(config_parquet) - feature_collection = p.query( - resulttype='results', - properties=[('lon', -12.855022)]) - assert feature_collection.get('type') == 'FeatureCollection' - features = feature_collection.get('features') - assert len(features) == 1 - for feature in features: - assert feature['properties']['lon'] == -12.855022 - - -def test_query_with_skip_geometry(config_parquet): - """Testing query for a valid JSON object with property filter""" - - p = ParquetProvider(config_parquet) - feature_collection = p.query(skip_geometry=True) - for feature in feature_collection['features']: - assert feature.get('geometry') is None - - -def test_query_with_datetime(config_parquet): - """Testing query for a valid JSON object with time""" - - p = ParquetProvider(config_parquet) - feature_collection = p.query( - datetime_='2022-05-01T00:00:00Z/2022-05-31T23:59:59Z') - assert feature_collection.get('type') == 'FeatureCollection' - features = feature_collection.get('features') - assert len(features) == 7 - for feature in feature_collection['features']: - time = feature['properties'][config_parquet['time_field']] - assert time.year == 2022 - assert time.month == 5 - - -def test_query_nogeom(config_parquet_nogeom_notime): - """Testing query for a valid JSON object without geometry""" - - p = ParquetProvider(config_parquet_nogeom_notime) - feature_collection = p.query(resulttype='results') - assert feature_collection.get('type') == 'FeatureCollection' - assert len(feature_collection.get('features')) > 0 - for feature in feature_collection['features']: - assert feature.get('geometry') is None - - -def test_query_nocrs(config_parquet_nocrs): - """Testing a parquet provider without CRS""" - - p = ParquetProvider(config_parquet_nocrs) - results = p.get_fields() - assert results['lat']['type'] == 'number' - assert results['lon']['format'] == 'double' - assert results['time']['format'] == 'date-time' +class TestParquetProviderWithNaiveOrMissingGeometry: + """Tests for parquet that do not comply to geoparquet standard""" + + def test_get_fields(self, config_parquet): + """Testing field types""" + + p = ParquetProvider(config_parquet) + assert p.bbox_filterable + assert p.has_geometry + assert not p.has_bbox_column + results = p.get_fields() + assert results['lat']['type'] == 'number' + assert results['lon']['format'] == 'double' + assert results['time']['format'] == 'date-time' + + def test_get(self, config_parquet): + """Testing query for a specific object""" + + p = ParquetProvider(config_parquet) + result = p.get('42') + assert result['id'] == '42' + assert result['properties']['lon'] == 4.947447 + + def test_get_not_existing_feature_raise_exception( + self, config_parquet + ): + """Testing query for a not existing object""" + p = ParquetProvider(config_parquet) + with pytest.raises(ProviderItemNotFoundError): + p.get(-1) + + def test_query_hits(self, config_parquet): + """Testing query on entire collection for hits""" + + p = ParquetProvider(config_parquet) + feature_collection = p.query(resulttype='hits') + assert feature_collection.get('type') == 'FeatureCollection' + features = feature_collection.get('features') + assert len(features) == 0 + hits = feature_collection.get('numberMatched') + assert hits is not None + assert hits == 100 + + def test_query_bbox_hits(self, config_parquet): + """Testing query for a valid JSON object with geometry""" + + p = ParquetProvider(config_parquet) + feature_collection = p.query( + bbox=[100, -50, 150, 0], + resulttype='hits') + assert feature_collection.get('type') == 'FeatureCollection' + features = feature_collection.get('features') + assert len(features) == 0 + hits = feature_collection.get('numberMatched') + assert hits is not None + assert hits == 6 + + def test_query_with_limit(self, config_parquet): + """Testing query for a valid JSON object with geometry""" + + p = ParquetProvider(config_parquet) + feature_collection = p.query(limit=2, resulttype='results') + assert feature_collection.get('type') == 'FeatureCollection' + features = feature_collection.get('features') + assert len(features) == 2 + hits = feature_collection.get('numberMatched') + assert hits > 2 + feature = features[0] + properties = feature.get('properties') + assert properties is not None + geometry = feature.get('geometry') + assert geometry is not None + + def test_query_with_offset(self, config_parquet): + """Testing query for a valid JSON object with geometry""" + + p = ParquetProvider(config_parquet) + feature_collection = p.query(offset=20, limit=10, resulttype='results') + assert feature_collection.get('type') == 'FeatureCollection' + features = feature_collection.get('features') + assert len(features) == 10 + hits = feature_collection.get('numberMatched') + assert hits > 30 + feature = features[0] + properties = feature.get('properties') + assert properties is not None + assert feature['id'] == '21' + assert properties['lat'] == 66.264988 + geometry = feature.get('geometry') + assert geometry is not None + + def test_query_with_property(self, config_parquet): + """Testing query for a valid JSON object with property filter""" + + p = ParquetProvider(config_parquet) + feature_collection = p.query( + resulttype='results', + properties=[('lon', -12.855022)]) + assert feature_collection.get('type') == 'FeatureCollection' + features = feature_collection.get('features') + assert len(features) == 1 + for feature in features: + assert feature['properties']['lon'] == -12.855022 + + def test_query_with_skip_geometry(self, config_parquet): + """Testing query for a valid JSON object with property filter""" + + p = ParquetProvider(config_parquet) + feature_collection = p.query(skip_geometry=True) + for feature in feature_collection['features']: + assert feature.get('geometry') is None + + def test_query_with_datetime(self, config_parquet): + """Testing query for a valid JSON object with time""" + + p = ParquetProvider(config_parquet) + feature_collection = p.query( + datetime_='2022-05-01T00:00:00Z/2022-05-31T23:59:59Z') + assert feature_collection.get('type') == 'FeatureCollection' + features = feature_collection.get('features') + assert len(features) == 7 + for feature in feature_collection['features']: + time = feature['properties'][config_parquet['time_field']] + assert time.year == 2022 + assert time.month == 5 + + def test_query_nogeom(self, config_parquet_nogeom_notime): + """Testing query for a valid JSON object without geometry""" + + p = ParquetProvider(config_parquet_nogeom_notime) + assert not p.has_geometry + assert not p.bbox_filterable + feature_collection = p.query(resulttype='results') + assert feature_collection.get('type') == 'FeatureCollection' + assert len(feature_collection.get('features')) > 0 + for feature in feature_collection['features']: + assert feature.get('geometry') is None + + def test_query_nocrs(self, config_parquet_nocrs): + """Testing a parquet provider without CRS""" + + p = ParquetProvider(config_parquet_nocrs) + assert p.bbox_filterable + assert p.has_geometry + assert not p.has_bbox_column + results = p.get_fields() + assert results['lat']['type'] == 'number' + assert results['lon']['format'] == 'double' + assert results['time']['format'] == 'date-time' + + +class TestParquetProviderWithGeoparquetMetadata: + + def test_file_without_bbox_without_id_specified(self, geoparquet_no_bbox): + + p = ParquetProvider(geoparquet_no_bbox) + assert not p.bbox_filterable + assert not p.has_bbox_column + assert p.id_field is None + results = p.get_fields() + assert results['col']['type'] == 'integer' + + feature_collection = p.query(resulttype='results') + assert feature_collection.get('type') == 'FeatureCollection' + assert feature_collection['features'][0]['geometry']['coordinates'] == ( # noqa + ( + ((30, 10), (40, 40), (20, 40), (10, 20), (30, 10)),) + ) + assert feature_collection['features'][0]['properties']['col'] == 0 + + def test_file_without_bbox_with_id_specified(self, geoparquet_no_bbox): + config = copy(geoparquet_no_bbox) + config['id_field'] = 'col' + + p = ParquetProvider( + config + ) + results = p.get_fields() + assert p.id_field == 'col' + assert results['col']['type'] == 'integer' + + feature_collection = p.query(resulttype='results') + assert feature_collection.get('type') == 'FeatureCollection' + assert feature_collection['features'][0]['geometry']['coordinates'] == ( # noqa + (((30, 10), (40, 40), (20, 40), (10, 20), (30, 10)),) + ) + assert feature_collection['features'][0]['properties']['col'] == 0 + assert feature_collection['features'][0]['id'] == '0' + + def test_get_by_id(self, geoparquet_no_bbox): + + config = copy(geoparquet_no_bbox) + config['id_field'] = 'col' + p = ParquetProvider( + config + ) + + feature = p.get('2') + assert feature.get('type') == 'Feature' + assert feature['geometry'] is None + + def test_file_with_bbox(self, geoparquet_with_bbox): + + p = ParquetProvider(geoparquet_with_bbox) + assert p.has_bbox_column + assert p.bbox_filterable + assert p.has_geometry + + hits = p.query(resulttype='hits')['numberMatched'] + assert hits == 679 + + huge_bbox = p.query(bbox=[-90, -90, 90, 90], resulttype='hits')[ + 'numberMatched' + ] + dataset_bounds = p.query(bbox=[-74.1, 40.97, -73.95, 41.1], + resulttype='hits')['numberMatched'] + assert huge_bbox == dataset_bounds diff --git a/tests/provider/test_postgresql_provider.py b/tests/provider/test_postgresql_provider.py index c8cb8140e..eb0b8760c 100644 --- a/tests/provider/test_postgresql_provider.py +++ b/tests/provider/test_postgresql_provider.py @@ -37,7 +37,17 @@ # ================================================================= # Needs to be run like: python3 -m pytest -# See pygeoapi/provider/postgresql.py for instructions on setting up +# Testing local postgis with docker: +''' +docker run --name postgis \ + --rm \ + -p 5432:5432 \ + -e ALLOW_IP_RANGE=0.0.0.0/0 \ + -e POSTGRES_USER=postgres \ + -e POSTGRES_PASS=postgres \ + -e POSTGRES_DBNAME=test \ + -d -t kartoza/postgis +''' # test database in Docker from http import HTTPStatus @@ -69,44 +79,58 @@ PASSWORD = os.environ.get('POSTGRESQL_PASSWORD', 'postgres') -@pytest.fixture() -def config(): - return { +@pytest.fixture(params=['default', 'connection_string']) +def config(request): + config_ = { 'name': 'PostgreSQL', 'type': 'feature', - 'data': {'host': '127.0.0.1', - 'dbname': 'test', - 'user': 'postgres', - 'password': PASSWORD, - 'search_path': ['osm', 'public'] - }, - 'options': { - 'connect_timeout': 10 - }, + 'options': {'connect_timeout': 10}, 'id_field': 'osm_id', 'table': 'hotosm_bdi_waterways', 'geom_field': 'foo_geom' } + if request.param == 'default': + config_['data'] = { + 'host': '127.0.0.1', + 'dbname': 'test', + 'user': 'postgres', + 'password': PASSWORD, + 'search_path': ['osm', 'public'] + } + elif request.param == 'connection_string': + config_['data'] = ( + f'postgresql://postgres:{PASSWORD}@127.0.0.1:5432/test' + ) + config_['options']['search_path'] = ['osm', 'public'] + return config_ -@pytest.fixture() -def config_types(): - return { + +@pytest.fixture(params=['default', 'connection_string']) +def config_types(request): + config_ = { 'name': 'PostgreSQL', 'type': 'feature', - 'data': {'host': '127.0.0.1', - 'dbname': 'test', - 'user': 'postgres', - 'password': PASSWORD, - 'search_path': ['public'] - }, - 'options': { - 'connect_timeout': 10 - }, + 'options': {'connect_timeout': 10}, 'id_field': 'id', 'table': 'foo', 'geom_field': 'the_geom' } + if request.param == 'default': + config_['data'] = { + 'host': '127.0.0.1', + 'dbname': 'test', + 'user': 'postgres', + 'password': PASSWORD, + 'search_path': ['public', 'osm'] + } + elif request.param == 'connection_string': + config_['data'] = ( + f'postgresql://postgres:{PASSWORD}@127.0.0.1:5432/test' + ) + config_['options']['search_path'] = ['public', 'osm'] + + return config_ @pytest.fixture() @@ -148,14 +172,20 @@ def test_valid_connection_options(config): for key in keys: assert key in ['connect_timeout', 'tcp_user_timeout', 'keepalives', 'keepalives_idle', 'keepalives_count', - 'keepalives_interval'] + 'keepalives_interval', 'search_path'] def test_schema_path_search(config): - config['data']['search_path'] = ['public', 'osm'] + if isinstance(config['data'], dict): + config['data']['search_path'] = ['public', 'osm'] + else: + config['options']['search_path'] = ['public', 'osm'] PostgreSQLProvider(config) - config['data']['search_path'] = ['public', 'notosm'] + if isinstance(config['data'], dict): + config['data']['search_path'] = ['public', 'notosm'] + else: + config['options']['search_path'] = ['public', 'notosm'] with pytest.raises(ProviderQueryError): PostgreSQLProvider(config) @@ -189,13 +219,13 @@ def test_query_materialised_view(config): provider = PostgreSQLProvider(config_materialised_view) # Only ID, width and depth properties should be available - assert set(provider.get_fields().keys()) == {"osm_id", "width", "depth"} + assert set(provider.get_fields().keys()) == {'osm_id', 'width', 'depth'} def test_query_with_property_filter(config): """Test query valid features when filtering by property""" p = PostgreSQLProvider(config) - feature_collection = p.query(properties=[("waterway", "stream")]) + feature_collection = p.query(properties=[('waterway', 'stream')]) features = feature_collection.get('features') stream_features = list( filter(lambda feature: feature['properties']['waterway'] == 'stream', @@ -246,19 +276,19 @@ def test_query_with_config_properties(config): feature = result.get('features')[0] properties = feature.get('properties') for property_name in properties.keys(): - assert property_name in config["properties"] + assert property_name in config['properties'] -@pytest.mark.parametrize("property_filter, expected", [ +@pytest.mark.parametrize('property_filter, expected', [ ([], 14776), - ([("waterway", "stream")], 13930), - ([("waterway", "this does not exist")], 0), + ([('waterway', 'stream')], 13930), + ([('waterway', 'this does not exist')], 0), ]) def test_query_hits_with_property_filter(config, property_filter, expected): """Test query resulttype=hits""" provider = PostgreSQLProvider(config) - results = provider.query(properties=property_filter, resulttype="hits") - assert results["numberMatched"] == expected + results = provider.query(properties=property_filter, resulttype='hits') + assert results['numberMatched'] == expected def test_query_bbox(config): @@ -337,7 +367,7 @@ def test_get_with_config_properties(config): result = provider.get(80835483) properties = result.get('properties') for property_name in properties.keys(): - assert property_name in config["properties"] + assert property_name in config['properties'] def test_get_not_existing_item_raise_exception(config): @@ -376,7 +406,7 @@ def test_query_cql(config, cql, expected_ids): assert feature_collection.get('type') == 'FeatureCollection' features = feature_collection.get('features') - ids = [feature["id"] for feature in features] + ids = [feature['id'] for feature in features] assert ids == expected_ids @@ -385,7 +415,7 @@ def test_query_cql_properties_bbox_filters(config): # Arrange properties = [('waterway', 'stream')] bbox = [29, -2.8, 29.2, -2.9] - filterq = parse("osm_id BETWEEN 80800000 AND 80900000") + filterq = parse('osm_id BETWEEN 80800000 AND 80900000') expected_ids = [80835470] # Act @@ -395,7 +425,7 @@ def test_query_cql_properties_bbox_filters(config): bbox=bbox) # Assert - ids = [feature["id"] for feature in feature_collection.get('features')] + ids = [feature['id'] for feature in feature_collection.get('features')] assert ids == expected_ids @@ -457,9 +487,9 @@ def test_instantiation(config): provider = PostgreSQLProvider(config) # Assert - assert provider.name == "PostgreSQL" - assert provider.table == "hotosm_bdi_waterways" - assert provider.id_field == "osm_id" + assert provider.name == 'PostgreSQL' + assert provider.table == 'hotosm_bdi_waterways' + assert provider.id_field == 'osm_id' @pytest.mark.parametrize('bad_data, exception, match', [ @@ -484,8 +514,14 @@ def test_instantiation_with_bad_config(config, bad_data, exception, match): def test_instantiation_with_bad_credentials(config): # Arrange - config['data'].update({'user': 'bad_user'}) - match = r'Could not connect to .*bad_user:\*\*\*@' + if isinstance(config['data'], dict): + config['data'].update({'user': 'bad_user'}) + match = r'Could not connect to .*bad_user:\*\*\*@' + + else: + config['data'] = config['data'].replace('postgres:', 'bad_user:') + match = r'Could not connect to .*bad_user:\*\*\*@' + # Make sure we don't use a cached connection in the tests postgresql_provider_module._ENGINE_STORE = {} @@ -505,7 +541,7 @@ def test_engine_and_table_model_stores(config): # Same database connection details, but different table different_table = config.copy() - different_table.update(table="hotosm_bdi_drains") + different_table.update(table='hotosm_bdi_drains') provider2 = PostgreSQLProvider(different_table) assert repr(provider2._engine) == repr(provider0._engine) assert provider2._engine is provider0._engine @@ -515,7 +551,11 @@ def test_engine_and_table_model_stores(config): # and also a different table_model, as two databases may have different # tables with the same name different_host = config.copy() - different_host["data"]["host"] = "localhost" + if isinstance(config['data'], dict): + different_host['data']['host'] = 'localhost' + else: + different_host['data'] = config['data'].replace( + '127.0.0.1', 'localhost') provider3 = PostgreSQLProvider(different_host) assert provider3._engine is not provider0._engine assert provider3.table_model is not provider0.table_model @@ -584,7 +624,7 @@ def test_get_collection_items_postgresql_cql_invalid_filter_language(pg_api_): assert error_response['description'] == 'Invalid filter language' -@pytest.mark.parametrize("bad_cql", [ +@pytest.mark.parametrize('bad_cql', [ 'id IN (1, ~)', 'id EATS (1, 2)', # Valid CQL relations only 'id IN (1, 2' # At some point this may return UnexpectedEOF @@ -664,7 +704,7 @@ def test_get_collection_items_postgresql_cql_json_invalid_filter_language(pg_api """ # Arrange # CQL should never be parsed - cql = {"in": {"value": {"property": "id"}, "list": [1, 2]}} + cql = {'in': {'value': {'property': 'id'}, 'list': [1, 2]}} headers = {'CONTENT_TYPE': 'application/query-cql-json'} # Act @@ -681,9 +721,9 @@ def test_get_collection_items_postgresql_cql_json_invalid_filter_language(pg_api assert error_response['description'] == 'Bad CQL JSON' -@pytest.mark.parametrize("bad_cql", [ +@pytest.mark.parametrize('bad_cql', [ # Valid CQL relations only - {"eats": {"value": {"property": "id"}, "list": [1, 2]}}, + {'eats': {'value': {'property': 'id'}, 'list': [1, 2]}}, # At some point this may return UnexpectedEOF '{"in": {"value": {"property": "id"}, "list": [1, 2}}' ]) @@ -892,6 +932,13 @@ def test_transaction_basic_workflow(pg_api_, data): identifier=123) assert code == HTTPStatus.OK + # delete again (item should not be in backend) + req = mock_api_request(data=data) + headers, code, content = manage_collection_item( + pg_api_, req, action='delete', dataset='hot_osm_waterways', + identifier=123) + assert code == HTTPStatus.NOT_FOUND + def test_transaction_create_handles_invalid_input_data(pg_api_, data): data_parsed = json.loads(data) @@ -901,3 +948,38 @@ def test_transaction_create_handles_invalid_input_data(pg_api_, data): headers, code, content = manage_collection_item( pg_api_, req, action='create', dataset='hot_osm_waterways') assert 'generic error' in content + + +def test_provider_count_default_value(config): + # Arrange + provider = PostgreSQLProvider(config) + + # Act + results = provider.query() + + # Assert + assert results['numberMatched'] == 14776 + + +def test_provider_count_false(config): + # Arrange + config['count'] = 'false' + provider = PostgreSQLProvider(config) + + # Act + results = provider.query() + + # Assert + assert 'numberMatched' not in results + + +def test_provider_count_false_with_resulttype_hits(config): + # Arrange + config['count'] = 'false' + provider = PostgreSQLProvider(config) + + # Act + results = provider.query(resulttype='hits') + + # Assert + assert results['numberMatched'] == 14776 diff --git a/tests/provider/test_tinydb_catalogue_provider.py b/tests/provider/test_tinydb_catalogue_provider.py index b3f8f3346..92cecf22b 100644 --- a/tests/provider/test_tinydb_catalogue_provider.py +++ b/tests/provider/test_tinydb_catalogue_provider.py @@ -2,7 +2,7 @@ # # Authors: Tom Kralidis # -# Copyright (c) 2025 Tom Kralidis +# Copyright (c) 2026 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -38,6 +38,7 @@ from ..util import get_test_file_path path = get_test_file_path('tests/data/open.canada.ca/sample-records.tinydb') +path2 = get_test_file_path('tests/data/dutch-nationaalgeoregister/sample-records.tinydb') # noqa @pytest.fixture() @@ -90,6 +91,19 @@ def config(tmp_path): } +@pytest.fixture() +def config2(tmp_path): + tmp_file = tmp_path / 'sample-records.tinydb' + shutil.copy(path2, tmp_file) + return { + 'name': 'TinyDBCatalogue', + 'type': 'feature', + 'data': tmp_file, + 'id_field': 'externalId', + 'time_field': 'created' + } + + def test_domains(config): p = TinyDBCatalogueProvider(config) @@ -173,6 +187,12 @@ def test_query(config): assert results['features'][0]['id'] == '8a09413a-0a01-4aab-8925-720d987deb20' # noqa +def test_query_no_geom(config2): + p = TinyDBCatalogueProvider(config2) + results = p.query(bbox=[-180, -90, 180, 90]) + assert len(results['features']) == 2 + + def test_get(config): p = TinyDBCatalogueProvider(config) diff --git a/tests/provider/test_xarray_zarr_provider.py b/tests/provider/test_xarray_zarr_provider.py index 9b1bdb660..1b66f6a07 100644 --- a/tests/provider/test_xarray_zarr_provider.py +++ b/tests/provider/test_xarray_zarr_provider.py @@ -73,7 +73,7 @@ def config_no_time(tmp_path): def test_provider(config): p = XarrayProvider(config) - assert len(p.fields) == 4 + assert len(p.fields) == 3 assert len(p.axes) == 3 assert p.axes == ['lon', 'lat', 'time'] @@ -82,7 +82,7 @@ def test_schema(config): p = XarrayProvider(config) assert isinstance(p.fields, dict) - assert len(p.fields) == 4 + assert len(p.fields) == 3 assert p.fields['analysed_sst']['title'] == 'analysed sea surface temperature' # noqa @@ -107,7 +107,7 @@ def test_numpy_json_serial(): def test_no_time(config_no_time): p = XarrayProvider(config_no_time) - assert len(p.fields) == 4 + assert len(p.fields) == 3 assert p.axes == ['lon', 'lat'] coverage = p.query(format='json') diff --git a/tests/pygeoapi-test-asyncapi.yml b/tests/pygeoapi-test-asyncapi.yml new file mode 100644 index 000000000..1e7d1292a --- /dev/null +++ b/tests/pygeoapi-test-asyncapi.yml @@ -0,0 +1,56 @@ +asyncapi: 3.0.0 +channels: + notify-canada-metadata: + address: collections/canada-metadata + description: Open Canada sample data + messages: + DefaultMessage: + payload: + $ref: https://raw.githubusercontent.com/wmo-im/wis2-monitoring-events/refs/heads/main/schemas/cloudevents-v1.0.2.yaml + notify-heif-sample: + address: collections/heif-sample + description: HEIF sample + messages: + DefaultMessage: + payload: + $ref: https://raw.githubusercontent.com/wmo-im/wis2-monitoring-events/refs/heads/main/schemas/cloudevents-v1.0.2.yaml +defaultContentType: application/json +id: http://localhost:5000 +info: + contact: + email: you@example.org + name: Lastname, Firstname + description: pygeoapi provides an API to geospatial data https://github.com + externalDocs: + url: https://github.com/geopython/demo.pygeoapi.io/tree/master/services/pygeoapi_cite + license: + name: CC-BY 4.0 license + url: https://creativecommons.org/licenses/by/4.0/ + tags: + - name: geospatial + - name: data + - name: api + title: pygeoapi default instance + version: 0.23.dev0 +operations: + consume-canada-metadata: + action: receive + channel: + $ref: '#/channels/notify-canada-metadata' + consume-heif-sample: + action: receive + channel: + $ref: '#/channels/notify-heif-sample' + publish-canada-metadata: + action: send + channel: + $ref: '#/channels/notify-canada-metadata' + publish-heif-sample: + action: send + channel: + $ref: '#/channels/notify-heif-sample' +servers: + default: + description: pygeoapi provides an API to geospatial data https://github.com + host: localhost:1883 + protocol: mqtt diff --git a/tests/pygeoapi-test-config-admin-empty-resources.yml b/tests/pygeoapi-test-config-admin-empty-resources.yml new file mode 100644 index 000000000..202022ad4 --- /dev/null +++ b/tests/pygeoapi-test-config-admin-empty-resources.yml @@ -0,0 +1,103 @@ +# ================================================================= +# +# Authors: Tom Kralidis +# +# Copyright (c) 2019 Tom Kralidis +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation +# files (the "Software"), to deal in the Software without +# restriction, including without limitation the rights to use, +# copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. +# +# ================================================================= + +server: + bind: + host: 0.0.0.0 + port: 5000 + url: http://localhost:5000/ + mimetype: application/json; charset=UTF-8 + encoding: utf-8 + gzip: false + languages: + # First language is the default language + - en-US + - fr-CA + cors: true + pretty_print: true + limits: + default_items: 10 + max_items: 10 + # templates: /path/to/templates + map: + url: https://tile.openstreetmap.org/{z}/{x}/{y}.png + attribution: '© OpenStreetMap contributors' + manager: + name: TinyDB + connection: /tmp/pygeoapi-test-process-manager.db + output_dir: /tmp + admin: true + +logging: + level: DEBUG + #logfile: /tmp/pygeoapi.log + +metadata: + identification: + title: + en: pygeoapi default instance + fr: instance par défaut de pygeoapi + description: + en: pygeoapi provides an API to geospatial data + fr: pygeoapi fournit une API aux données géospatiales + keywords: + en: + - geospatial + - data + - api + fr: + - géospatiale + - données + - api + keywords_type: theme + terms_of_service: https://creativecommons.org/licenses/by/4.0/ + url: http://example.org + license: + name: CC-BY 4.0 license + url: https://creativecommons.org/licenses/by/4.0/ + provider: + name: Organization Name + url: https://pygeoapi.io + contact: + name: Lastname, Firstname + position: Position Title + address: Mailing Address + city: City + stateorprovince: Administrative Area + postalcode: Zip or Postal Code + country: Country + phone: +xx-xxx-xxx-xxxx + fax: +xx-xxx-xxx-xxxx + email: you@example.org + url: Contact URL + hours: Hours of Service + instructions: During hours of service. Off on weekends. + role: pointOfContact + +resources: {} diff --git a/tests/pygeoapi-test-config-failing-collection.yml b/tests/pygeoapi-test-config-failing-collection.yml new file mode 100644 index 000000000..8baddbd90 --- /dev/null +++ b/tests/pygeoapi-test-config-failing-collection.yml @@ -0,0 +1,206 @@ +# ================================================================= +# +# Authors: Tom Kralidis +# +# Copyright (c) 2026 Tom Kralidis +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation +# files (the "Software"), to deal in the Software without +# restriction, including without limitation the rights to use, +# copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. +# +# ================================================================= + +server: + bind: + host: 0.0.0.0 + port: 5000 + url: http://localhost:5000/ + mimetype: application/json; charset=UTF-8 + encoding: utf-8 + gzip: false + languages: + # First language is the default language + - en-US + - fr-CA + cors: true + pretty_print: true + limits: + default_items: 10 + max_items: 10 + # templates: /path/to/templates + map: + url: https://tile.openstreetmap.org/{z}/{x}/{y}.png + attribution: '© OpenStreetMap contributors' + manager: + name: TinyDB + connection: /tmp/pygeoapi-test-process-manager.db + output_dir: /tmp + +logging: + level: DEBUG + #logfile: /tmp/pygeoapi.log + +metadata: + identification: + title: + en: pygeoapi default instance + fr: instance par défaut de pygeoapi + description: + en: pygeoapi provides an API to geospatial data + fr: pygeoapi fournit une API aux données géospatiales + keywords: + en: + - geospatial + - data + - api + fr: + - géospatiale + - données + - api + keywords_type: theme + terms_of_service: https://creativecommons.org/licenses/by/4.0/ + url: http://example.org + license: + name: CC-BY 4.0 license + url: https://creativecommons.org/licenses/by/4.0/ + provider: + name: Organization Name + url: https://pygeoapi.io + contact: + name: Lastname, Firstname + position: Position Title + address: Mailing Address + city: City + stateorprovince: Administrative Area + postalcode: Zip or Postal Code + country: Country + phone: +xx-xxx-xxx-xxxx + fax: +xx-xxx-xxx-xxxx + email: you@example.org + url: Contact URL + hours: Hours of Service + instructions: During hours of service. Off on weekends. + role: pointOfContact + +resources: + obs: + type: collection + title: + en: Observations + fr: Observations + description: + en: My cool observations + fr: Mes belles observations + keywords: + - observations + - monitoring + links: + - type: text/csv + rel: canonical + title: data + href: https://github.com/mapserver/mapserver/blob/branch-7-0/msautotest/wxs/data/obs.csv + hreflang: en-US + - type: text/csv + rel: alternate + title: data + href: https://raw.githubusercontent.com/mapserver/mapserver/branch-7-0/msautotest/wxs/data/obs.csv + hreflang: en-US + linked-data: + context: + - schema: https://schema.org/ + stn_id: + "@id": schema:identifier + "@type": schema:Text + datetime: + "@type": schema:DateTime + "@id": schema:observationDate + value: + "@type": schema:Number + "@id": schema:QuantitativeValue + extents: + spatial: + bbox: [-180,-90,180,90] + crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 + temporal: + begin: 2000-10-30T18:24:39Z + end: 2007-10-30T08:57:29Z + trs: http://www.opengis.net/def/uom/ISO-8601/0/Gregorian + providers: + - type: feature + name: CSV + data: tests/data/obs.csv + id_field: id + geometry: + x_field: long + y_field: lat + + cmip5: + type: collection + title: CMIP5 sample + description: CMIP5 sample + keywords: + - cmip5 + - climate + extents: + spatial: + bbox: [-150,40,-45,90] + crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 + links: + - type: text/html + rel: canonical + title: information + href: https://open.canada.ca/data/en/dataset/eddd6eaf-34d7-4452-a994-3d928115a68b + hreflang: en-CA + providers: + - type: coverage + name: xarray + data: tests/data/CMIP5_rcp8.5_annual_abs_latlon1x1_PCP_pctl25_P1Y.nc404 + x_field: lon + y_field: lat + time_field: time + format: + name: NetCDF + mimetype: application/x-netcdf + + objects: + type: collection + title: GeoJSON objects + description: GeoJSON geometry types for GeoSparql and Schema Geometry conversion. + keywords: + - shapes + links: + - type: text/html + rel: canonical + title: data source + href: https://en.wikipedia.org/wiki/GeoJSON + hreflang: en-US + extents: + spatial: + bbox: [-180,-90,180,90] + crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 + temporal: + begin: null + end: null # or empty (either means open ended) + providers: + - type: feature + name: GeoJSON + data: tests/data/items.geojson + id_field: fid + uri_field: uri diff --git a/tests/pygeoapi-test-config-process-metadata.yml b/tests/pygeoapi-test-config-process-metadata.yml new file mode 100644 index 000000000..1d8ddb8fd --- /dev/null +++ b/tests/pygeoapi-test-config-process-metadata.yml @@ -0,0 +1,106 @@ +# ================================================================= +# +# Authors: Tom Kralidis +# +# Copyright (c) 2026 Tom Kralidis +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation +# files (the "Software"), to deal in the Software without +# restriction, including without limitation the rights to use, +# copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. +# +# ================================================================= + +server: + bind: + host: 0.0.0.0 + port: 5000 + url: http://localhost:5000/ + mimetype: application/json; charset=UTF-8 + encoding: utf-8 + gzip: false + languages: + # First language is the default language + - en-US + - fr-CA + cors: true + pretty_print: true + limits: + default_items: 10 + max_items: 10 + # templates: /path/to/templates + map: + url: https://tile.openstreetmap.org/{z}/{x}/{y}.png + attribution: '© OpenStreetMap contributors' + manager: + name: TinyDB + connection: /tmp/pygeoapi-test-process-manager.db + output_dir: /tmp + +logging: + level: DEBUG + #logfile: /tmp/pygeoapi.log + +metadata: + identification: + title: + en: pygeoapi default instance + fr: instance par défaut de pygeoapi + description: + en: pygeoapi provides an API to geospatial data + fr: pygeoapi fournit une API aux données géospatiales + keywords: + en: + - geospatial + - data + - api + fr: + - géospatiale + - données + - api + keywords_type: theme + terms_of_service: https://creativecommons.org/licenses/by/4.0/ + url: http://example.org + license: + name: CC-BY 4.0 license + url: https://creativecommons.org/licenses/by/4.0/ + provider: + name: Organization Name + url: https://pygeoapi.io + contact: + name: Lastname, Firstname + position: Position Title + address: Mailing Address + city: City + stateorprovince: Administrative Area + postalcode: Zip or Postal Code + country: Country + phone: +xx-xxx-xxx-xxxx + fax: +xx-xxx-xxx-xxxx + email: you@example.org + url: Contact URL + hours: Hours of Service + instructions: During hours of service. Off on weekends. + role: pointOfContact + +resources: + echo: + type: process + processor: + name: Echo diff --git a/tests/pygeoapi-test-config-pubsub.yml b/tests/pygeoapi-test-config-pubsub.yml new file mode 100644 index 000000000..51ed71d16 --- /dev/null +++ b/tests/pygeoapi-test-config-pubsub.yml @@ -0,0 +1,458 @@ +# ================================================================= +# +# Authors: Tom Kralidis +# +# Copyright (c) 2026 Tom Kralidis +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation +# files (the "Software"), to deal in the Software without +# restriction, including without limitation the rights to use, +# copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the +# Software is furnished to do so, subject to the following +# conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. +# +# ================================================================= + +server: + bind: + host: 0.0.0.0 + port: 5000 + url: http://localhost:5000/ + mimetype: application/json; charset=UTF-8 + encoding: utf-8 + gzip: false + languages: + # First language is the default language + - en-US + - fr-CA + cors: true + pretty_print: true + limits: + default_items: 10 + max_items: 10 + # templates: /path/to/templates + map: + url: https://tile.openstreetmap.org/{z}/{x}/{y}.png + attribution: '© OpenStreetMap contributors' + manager: + name: TinyDB + connection: /tmp/pygeoapi-test-process-manager.db + output_dir: /tmp + +pubsub: + name: MQTT + broker: + url: mqtt://everyone:everyone@localhost:1883 + channel: my/channel + +logging: + level: DEBUG + #logfile: /tmp/pygeoapi.log + +metadata: + identification: + title: + en: pygeoapi default instance + fr: instance par défaut de pygeoapi + description: + en: pygeoapi provides an API to geospatial data + fr: pygeoapi fournit une API aux données géospatiales + keywords: + en: + - geospatial + - data + - api + fr: + - géospatiale + - données + - api + keywords_type: theme + terms_of_service: https://creativecommons.org/licenses/by/4.0/ + url: http://example.org + license: + name: CC-BY 4.0 license + url: https://creativecommons.org/licenses/by/4.0/ + provider: + name: Organization Name + url: https://pygeoapi.io + contact: + name: Lastname, Firstname + position: Position Title + address: Mailing Address + city: City + stateorprovince: Administrative Area + postalcode: Zip or Postal Code + country: Country + phone: +xx-xxx-xxx-xxxx + fax: +xx-xxx-xxx-xxxx + email: you@example.org + url: Contact URL + hours: Hours of Service + instructions: During hours of service. Off on weekends. + role: pointOfContact + +resources: + obs: + type: collection + title: + en: Observations + fr: Observations + description: + en: My cool observations + fr: Mes belles observations + keywords: + - observations + - monitoring + links: + - type: text/csv + rel: canonical + title: data + href: https://github.com/mapserver/mapserver/blob/branch-7-0/msautotest/wxs/data/obs.csv + hreflang: en-US + - type: text/csv + rel: alternate + title: data + href: https://raw.githubusercontent.com/mapserver/mapserver/branch-7-0/msautotest/wxs/data/obs.csv + hreflang: en-US + linked-data: + context: + - schema: https://schema.org/ + stn_id: + "@id": schema:identifier + "@type": schema:Text + datetime: + "@type": schema:DateTime + "@id": schema:observationDate + value: + "@type": schema:Number + "@id": schema:QuantitativeValue + extents: + spatial: + bbox: [-180,-90,180,90] + crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 + temporal: + begin: 2000-10-30T18:24:39Z + end: 2007-10-30T08:57:29Z + trs: http://www.opengis.net/def/uom/ISO-8601/0/Gregorian + providers: + - type: feature + name: CSV + data: tests/data/obs.csv + crs: + - http://www.opengis.net/def/crs/OGC/1.3/CRS84 + - http://www.opengis.net/def/crs/EPSG/0/4326 + - http://www.opengis.net/def/crs/EPSG/0/3857 + - http://www.opengis.net/def/crs/EPSG/0/28992 + storage_crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 + id_field: id + geometry: + x_field: long + y_field: lat + + norway_pop: + type: collection + title: Norwegian urban areas + description: Most populated Norwegian urban areas + keywords: + - population + - Norway + links: + - type: text/html + rel: canonical + title: information + href: https://www.ssb.no/statbank/table/04859/ + hreflang: nb-NO + extents: + spatial: + bbox: [-180,-90,180,90] + crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 + providers: + - type: feature + name: CSV + data: tests/data/norwegian_urban_areas.csv + id_field: id + geometry: + x_field: easting + y_field: northing + crs: + - http://www.opengis.net/def/crs/EPSG/0/4258 + - http://www.opengis.net/def/crs/EPSG/0/25833 + storage_crs: http://www.opengis.net/def/crs/EPSG/0/25833 + + cmip5: + type: collection + title: CMIP5 sample + description: CMIP5 sample + keywords: + - cmip5 + - climate + extents: + spatial: + bbox: [-150,40,-45,90] + crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 + links: + - type: text/html + rel: canonical + title: information + href: https://open.canada.ca/data/en/dataset/eddd6eaf-34d7-4452-a994-3d928115a68b + hreflang: en-CA + providers: + - type: coverage + name: xarray + data: tests/data/CMIP5_rcp8.5_annual_abs_latlon1x1_PCP_pctl25_P1Y.nc + x_field: lon + y_field: lat + time_field: time + format: + name: NetCDF + mimetype: application/x-netcdf + + naturalearth/lakes: + type: collection + title: + en: Large Lakes + fr: Grands Lacs + description: + en: lakes of the world, public domain + fr: lacs du monde, domaine public + keywords: + - lakes + links: + - type: text/html + rel: canonical + title: information + href: http://www.naturalearthdata.com/ + hreflang: en-US + extents: + spatial: + bbox: [-180,-90,180,90] + crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 + temporal: + begin: 2011-11-11T11:11:11Z + end: null # or empty (either means open ended) + providers: + - type: feature + name: GeoJSON + data: tests/data/ne_110m_lakes.geojson + id_field: id + crs: + - http://www.opengis.net/def/crs/OGC/1.3/CRS84 + storage_crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 + storage_crs_coordinate_epoch: 2017.23 + - type: tile + name: MVT-tippecanoe + # data: http://localhost:9000/ne_110m_lakes/{z}/{x}/{y} + data: tests/data/tiles/ne_110m_lakes + options: + bounds: [[-124.953634,-16.536406],[109.929807,66.969298]] + zoom: + min: 0 + max: 11 + schemes: + - WorldCRS84Quad + format: + name: pbf + mimetype: application/vnd.mapbox-vector-tile + + gdps-temperature: + type: collection + title: Global Deterministic Prediction System sample + description: Global Deterministic Prediction System sample + keywords: + - gdps + - global + extents: + spatial: + bbox: [-180,-90,180,90] + crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 + links: + - type: text/html + rel: canonical + title: information + href: https://eccc-msc.github.io/open-data/msc-data/nwp_gdps/readme_gdps_en + hreflang: en-CA + providers: + - type: coverage + name: rasterio + data: tests/data/CMC_glb_TMP_TGL_2_latlon.15x.15_2020081000_P000.grib2 + format: + name: GRIB + mimetype: application/x-grib2 + + icoads-sst: + type: collection + title: International Comprehensive Ocean-Atmosphere Data Set (ICOADS) + description: International Comprehensive Ocean-Atmosphere Data Set (ICOADS) + keywords: + - icoads + - sst + - air temperature + extents: + spatial: + bbox: [-180,-90,180,90] + crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 + links: + - type: text/html + rel: canonical + title: information + href: https://psl.noaa.gov/data/gridded/data.coads.1deg.html + hreflang: en-US + providers: + - type: edr + name: xarray-edr + data: tests/data/coads_sst.nc + format: + name: NetCDF + mimetype: application/x-netcdf + + usgs-prism: + type: collection + title: Parameter-elevation Regressions on Independent Slopes Model (PRISM) + description: PRISM Monthly Climate Data for the Continental United States + keywords: + - temperature + - precipitation + extents: + spatial: + bbox: [-125.020836, 24.104166, -66.520836, 49.9375] + crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 + links: + - type: text/html + rel: canonical + title: information + href: https://cida.usgs.gov/thredds/catalog.html?dataset=cida.usgs.gov/prism_v2 + hreflang: en-US + providers: + - type: edr + name: xarray-edr + data: s3://mdmf/gdp/PRISM_v2.zarr + format: + name: zarr + mimetype: application/zip + options: + s3: + anon: true + requester_pays: false + client_kwargs: + endpoint_url: https://usgs.osn.mghpcc.org/ + + objects: + type: collection + title: GeoJSON objects + description: GeoJSON geometry types for GeoSparql and Schema Geometry conversion. + keywords: + - shapes + links: + - type: text/html + rel: canonical + title: data source + href: https://en.wikipedia.org/wiki/GeoJSON + hreflang: en-US + extents: + spatial: + bbox: [-180,-90,180,90] + crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 + temporal: + begin: null + end: null # or empty (either means open ended) + providers: + - type: feature + name: GeoJSON + data: tests/data/items.geojson + id_field: fid + uri_field: uri + + mapserver_world_map: + type: collection + title: MapServer demo WMS world map + description: MapServer demo WMS world map + keywords: + - MapServer + - world map + links: + - type: text/html + rel: canonical + title: information + href: https://demo.mapserver.org + hreflang: en-US + extents: + spatial: + bbox: [-180,-90,180,90] + crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 + custom-extent: + url: https://example.org/custom-extent + units: °C + range: [0, 10] + values: [0, 5, 10] + providers: + - type: map + name: WMSFacade + data: https://demo.mapserver.org/cgi-bin/msautotest + options: + layer: world_latlong + style: default + format: + name: png + mimetype: image/png + + canada-metadata: + type: collection + title: + en: Open Canada sample data + fr: Exemple de donn\u00e9es Canada Ouvert + description: + en: Sample metadata records from open.canada.ca + fr: Exemples d'enregistrements de m\u00e9tadonn\u00e9es sur ouvert.canada.ca + keywords: + en: + - canada + - open data + fr: + - canada + - donn\u00e9es ouvertes + links: + - type: text/html + rel: canonical + title: information + href: https://open.canada.ca/en/open-data + hreflang: en-CA + - type: text/html + rel: alternate + title: informations + href: https://ouvert.canada.ca/fr/donnees-ouvertes + hreflang: fr-CA + extents: + spatial: + bbox: [-180,-90,180,90] + crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 + providers: + - type: record + name: TinyDBCatalogue + data: tests/data/open.canada.ca/sample-records.tinydb + id_field: externalId + time_field: created + title_field: title + + hello-world: + type: process + processor: + name: HelloWorld + + pygeometa-metadata-validate: + type: process + processor: + name: pygeometa.pygeoapi_plugin.PygeometaMetadataValidateProcessor diff --git a/tests/pygeoapi-test-config.yml b/tests/pygeoapi-test-config.yml index 6efe0312f..e64afbf28 100644 --- a/tests/pygeoapi-test-config.yml +++ b/tests/pygeoapi-test-config.yml @@ -2,7 +2,7 @@ # # Authors: Tom Kralidis # -# Copyright (c) 2019 Tom Kralidis +# Copyright (c) 2026 Tom Kralidis # # Permission is hereby granted, free of charge, to any person # obtaining a copy of this software and associated documentation @@ -142,6 +142,8 @@ resources: begin: 2000-10-30T18:24:39Z end: 2007-10-30T08:57:29Z trs: http://www.opengis.net/def/uom/ISO-8601/0/Gregorian + resolution: P1D + default: 2000-10-30T18:24:39Z providers: - type: feature name: CSV @@ -387,6 +389,11 @@ resources: spatial: bbox: [-180,-90,180,90] crs: http://www.opengis.net/def/crs/OGC/1.3/CRS84 + custom-extent: + url: https://example.org/custom-extent + units: °C + range: [0, 10] + values: [0, 5, 10] providers: - type: map name: WMSFacade