EOX GitLab Instance

Commit 8b24ba8b authored by Mussab Abdalla's avatar Mussab Abdalla
Browse files

Merge branch 'staging' into 3D_terrain-support

parents da59d621 393e604c
[bumpversion]
current_version = 1.3.5
current_version = 1.3.11
commit = True
tag = True
parse = (?P<major>\d+)\.(?P<minor>\d+)\.(?P<patch>\d+)(\-(?P<release>[a-z]+)\.(?P<build>\d+))?
......@@ -40,16 +40,10 @@ replace = :release-{new_version}
search = release-{current_version}
replace = release-{new_version}
[bumpversion:file:chart/values.yaml]
search = tag: "release-{current_version}"
replace = tag: "release-{new_version}"
[bumpversion:file:chart/Chart.yaml]
[bumpversion:glob:chart/Chart.yaml]
search = appVersion: {current_version}
replace = appVersion: {new_version}
[bumpversion:glob:client/package*.json]
search = "name": "VSClient",
"version": "{current_version}"
replace = "name": "VSClient",
"version": "{new_version}"
[bumpversion:file:.gitlab-ci.yml]
search = LATEST_VERSION: {current_version}
replace = LATEST_VERSION: {new_version}
......@@ -6,3 +6,6 @@ __pycache__
tmp/
*.code-workspace
*.egg-info/
*db.env
*django.env
*obs.env
\ No newline at end of file
......@@ -38,9 +38,6 @@ build-tag:
- IMAGE_8="$CI_REGISTRY_IMAGE/pvs_shibauth"
- docker pull "$IMAGE_8":latest || true
- docker build --cache-from "$IMAGE_8":latest -t "$IMAGE_8":dev -t "$IMAGE_8":$CI_COMMIT_TAG shibauth/
# - cd ./testing && ./gitlab_test.sh
# - if [ $? -ne 0 ]; then exit 1; fi # actually fail build
# - cd -
- docker push "$IMAGE_1":$CI_COMMIT_TAG
- docker push "$IMAGE_2":$CI_COMMIT_TAG
- docker push "$IMAGE_3":$CI_COMMIT_TAG
......@@ -86,7 +83,6 @@ build-master-staging:
- docker build --cache-from "$IMAGE_8":latest -t "$IMAGE_8":dev -t "$IMAGE_8":"$TAG_USED" shibauth/
- cd ./testing && ./gitlab_test.sh
- if [ $? -ne 0 ]; then exit 1; fi # actually fail build
# - cd -
- docker push "$IMAGE_1":"$TAG_USED"
- docker push "$IMAGE_2":"$TAG_USED"
- docker push "$IMAGE_3":"$TAG_USED"
......@@ -104,7 +100,9 @@ build-master-staging:
when: always
expire_in: 4 week
paths:
- emg-pvs_cache.txt
- emg-pvs_database.txt
- emg-pvs_ingestor.txt
- emg-pvs_preprocessor.txt
- emg-pvs_registrar.txt
- emg-pvs_renderer.txt
......@@ -150,17 +148,15 @@ build:
when: always
expire_in: 4 week
paths:
# - emg-pvs_cache.txt
- emg-pvs_cache.txt
# - emg-pvs_client.txt
- emg-pvs_database.txt
# - emg-pvs_fluentd.txt
# - emg-pvs_ingestor.txt
- emg-pvs_ingestor.txt
- emg-pvs_preprocessor.txt
# - emg-pvs_redis.txt
- emg-pvs_registrar.txt
- emg-pvs_renderer.txt
# - emg-pvs_seeder.txt
# - emg-pvs_sftp.txt
review-docs:
image: python:3.8-slim
......@@ -221,7 +217,8 @@ pages:
- cd ../user-guide && make build
- mv _build/html/ ../../public/user
variables:
DOCS_VERSIONS: (1.3.4|1.2.0|1.1.1|1.0.0)
LATEST_VERSION: 1.3.11
DOCS_VERSIONS: ($LATEST_VERSION|1.2.0|1.1.1|1.0.0)
artifacts:
paths:
- public
......
......@@ -214,7 +214,7 @@ docker network create -d overlay vhr18-extnet
docker network create -d overlay emg-extnet
docker network create -d overlay dem-extnet
```
Add following .env files with credentials to the cloned copy of the repository /env folder: `vhr18_db.env`, `vhr18_obs.env`, `vhr18_django.env`.
Add following .env files with credentials to the cloned copy of the repository config/<stack>/ folder: `vhr18_db.env`, `vhr18_obs.env`, `vhr18_django.env`.
create docker secrets:
......@@ -225,6 +225,7 @@ Sensitive environment variables are not included in the .env files, and must be
printf "<OS_PASSWORD_DOWNLOAD>" | docker secret create OS_PASSWORD_DOWNLOAD -
printf "<DJANGO_PASSWORD>" | docker secret create DJANGO_PASSWORD -
printf "<OS_PASSWORD>" | docker secret create OS_PASSWORD -
printf "<DJANGO_SECRET_KEY>" | docker secret create DJANGO_SECRET_KEY -
# configs creation
printf "<user>:<password>:<UID>:<GID>" | docker config create sftp_users_<name> -
......@@ -236,10 +237,10 @@ docker secret create BASIC_AUTH_USERS_AUTH auth_list.txt
docker secret create BASIC_AUTH_USERS_APIAUTH auth_list_api.txt
```
In case **shibauth** service will be used, for production deployment, two more secrets need to be created for each stack, where **shibauth** is deployed. These ensure that the SP is recognized and its identity confirmed by the IDP. They are configured as **stack-name-capitalized_SHIB_KEY** and **stack-name-capitalized_SHIB_CERT**. In order to create them, use the attached **keygen.sh** command-line tool in */config* folder.
Currently all deployments use the same certificates for **shibauth** service. If more need to be created, for each new stack, two more secrets need to be created, where **shibauth** is deployed. These ensure that the SP is recognized and its identity confirmed by the IDP. They are configured as **stack-name-capitalized_SHIB_KEY** and **stack-name-capitalized_SHIB_CERT**. In order to create them, use the attached **keygen.sh** command-line tool in */config* folder.
```bash
SPURL="https://emg.pass.copernicus.eu" # service initial access point made accessible by traefik
./config/keygen.sh -h $SPURL -y 20 -e https://$SPURL/shibboleth -n sp-signing -f
./config/keygen.sh -h $SPURL -y 20 -e $SPURL/shibboleth -n sp-signing -f
docker secret create EMG_SHIB_CERT sp-signing-cert.pem
docker secret create EMG_SHIB_KEY sp-signing-key.pem
```
......@@ -280,7 +281,7 @@ services:
target: /node_modules/eoxc
```
```
patch _cloned_eoxc/node_modules/backbone.marionette/lib/core/backbone.marionette.js vs/client/eoxc_marionette.patch
patch _cloned_eoxc/node_modules/backbone.marionette/lib/core/backbone.marionette.js client/eoxc_marionette.patch
```
Tear town stack including data:
......@@ -330,13 +331,17 @@ You will log in into`/home/eox/data` directory which contains the 2 logging dir
# Documentation
Documentation `user-guide` and `operator-guide` is built with each commit in CI step `pages` - for master, staging and tags and is deployed to our Gitlab pages on https://esa.pages.eox.at/prism/vs/<user|operator>/<branch>/index.html or in CI step `review-docs` - deployed to https://esa.pages.eox.at/-/prism/vs/-/jobs/$CI_JOB_ID/artifacts/public/master/index.html
## Installation
If you want to build it locally, do following:
```bash
python3 -m pip install sphinx recommonmark sphinx-autobuild
```
## Generate html and synchronize with client/html/user-guide
## Generate html
```bash
make html
......@@ -344,7 +349,7 @@ make html
# For watched html automatic building
make html-watch
# For pdf output and sync it to client/html/
# For pdf output
make latexpdf
# To shrink size of pdf
gs -sDEVICE=pdfwrite -dPDFSETTINGS=/ebook -dPrinted=false -q -o View-Server_-_User-Guide_small.pdf View-Server_-_User-Guide.pdf
......
......@@ -31,7 +31,7 @@ LABEL name="prism view server cache" \
vendor="EOX IT Services GmbH <https://eox.at>" \
license="MIT Copyright (C) 2019 EOX IT Services GmbH <https://eox.at>" \
type="prism view server cache" \
version="1.3.5"
version="1.3.11"
USER root
ADD install.sh \
......
CREATE OR REPLACE VIEW mapcache_items AS
SELECT
product_eoobject."begin_time" AS "begin_time",
product_eoobject."end_time" AS "end_time",
date_trunc('second', product_eoobject."begin_time") AS "begin_time",
CASE
WHEN date_trunc('second', product_eoobject."end_time") = product_eoobject."end_time"
THEN product_eoobject."end_time"
ELSE date_trunc('second', product_eoobject."end_time") + '1 second'::interval
END AS "end_time",
product_eoobject."footprint" AS "footprint",
CONCAT(to_char(product_eoobject."begin_time", 'YYYY-MM-DD"T"HH24:MI:SS"Z"'), CONCAT('/', to_char(product_eoobject."end_time", 'YYYY-MM-DD"T"HH24:MI:SS"Z"'))) AS "interval",
to_char(product_eoobject."begin_time", 'YYYY-MM-DD"T"HH24:MI:SS"Z"') || '/' || to_char(
CASE
WHEN date_trunc('second', product_eoobject."end_time") = product_eoobject."end_time"
THEN product_eoobject."end_time"
ELSE date_trunc('second', product_eoobject."end_time") + '1 second'::interval
END, 'YYYY-MM-DD"T"HH24:MI:SS"Z"'
) AS "interval",
collection_eoobject.identifier AS "collection"
FROM "coverages_product"
INNER JOIN "coverages_product_collections" ON ("coverages_product"."eoobject_ptr_id" = "coverages_product_collections"."product_id")
......
......@@ -7,11 +7,11 @@ type: application
# This is the chart version. This version number should be incremented each time you make changes
# to the chart and its templates, including the app version.
# Versions are expected to follow Semantic Versioning (https://semver.org/)
version: 1.0.3
version: 1.2.2
# This is the version number of the application being deployed. This version number should be
# incremented each time you make changes to the application.
appVersion: 1.3.5
appVersion: 1.3.11
maintainers:
- name: EOX IT Services GmbH
......
......@@ -39,27 +39,18 @@ var config = {
"searchEnabled" : true,
"searchDebounceTime": 500,
"language": "en",
"timeDomain": [
"2019-01-01T00:00:00Z",
"2020-12-31T23:59:59Z",
],
"timeDomain": {{ .Values.config.client.time_domain | default ( list "2019-01-01T00:00:00Z" "2020-12-31T23:59:59Z" ) | toJson }},
"constrainTimeDomain": true,
"displayTimeDomain": [
"2019-01-01T00:00:00Z",
"2020-12-31T23:59:59Z",
],
"displayInterval": "P1096D",
"selectedTimeDomain": [
"2020-08-01T00:00:00Z",
"2020-08-31T23:59:59Z",
],
"selectableInterval": "P1096D",
"displayTimeDomain": {{ .Values.config.client.display_time_domain | default ( list "2019-01-01T00:00:00Z" "2020-12-31T23:59:59Z" ) | toJson }},
"displayInterval": {{ .Values.config.client.display_interval | default "P1096D" | quote }},
"selectedTimeDomain": {{ .Values.config.client.selected_time_domain | default ( list "2020-08-01T00:00:00Z" "2020-08-31T23:59:59Z" ) | toJson }},
"selectableInterval": {{ .Values.config.client.selectable_interval | default "P1096D" | quote }},
"timeSliderControls": true,
"enableSingleLayerMode": false,
"maxTooltips": 1,
"center": [12, 49],
"zoom": 5,
"maxZoom": 17,
"center": {{ .Values.config.client.center | default ( list 12 49 ) | toJson }},
"zoom": {{ .Values.config.client.zoom | default 5 }},
"maxZoom": {{ .Values.config.client.max_zoom | default 17 }},
"footprintFillColor": "rgba(0, 0, 0, 0.2)",
"footprintStrokeColor": "rgba(0, 0, 0, 1)",
"filterFillColor": "rgba(0, 165, 255, 0)",
......
......@@ -85,6 +85,7 @@
{{- end -}}
# Check if collection exits in database and initialize database only if not
{{- if .Values.config.collections }}
if python3 manage.py id check {{ index (keys .Values.config.collections) 0 | quote }}; then
echo "Initialize database"
......@@ -96,6 +97,14 @@ if python3 manage.py id check {{ index (keys .Values.config.collections) 0 | quo
echo "Initializing collection {{ index (keys .Values.config.collections) 0 | squote }}."
{{- if hasKey .Values.config.products "coverages" -}}
# import coverage types
echo {{ toJson .Values.config.products.coverages | squote }} | python3 manage.py coveragetype import --in --traceback
{{- end -}}
{{- range $product_type_name, $product_type := .Values.config.products.types | default dict }}
#
......@@ -143,3 +152,4 @@ if python3 manage.py id check {{ index (keys .Values.config.collections) 0 | quo
else
echo "Using existing database"
fi
{{- end }}
......@@ -53,6 +53,7 @@
<threaded_fetching>true</threaded_fetching>
<!-- Cache -->
{{- if .Values.config.objectStorage.cache.type }}
{{- if eq .Values.config.objectStorage.cache.type "swift" }}
<cache name="cache" type="swift">
<auth_url>{{ .Values.config.objectStorage.cache.auth_url_short }}</auth_url>
......@@ -87,6 +88,11 @@
<dbfile>/tmp/cache.db</dbfile>
</cache>
{{- end }}
{{- else }}
<cache name="cache" type="sqlite3">
<dbfile>/tmp/cache.db</dbfile>
</cache>
{{- end }}
{{- define "mapcache-layerid" -}}{{ .collection_name }}{{ if .level_name }}_{{ .level_name }}{{ end }}{{ if .sub_type_name }}__{{ .sub_type_name }}{{ end }}{{- end }}
......@@ -128,11 +134,17 @@
<source>{{ $layer_id }}</source>
<cache>cache</cache>
<!-- Grids -->
{{- range $grid := (get $.Values.config.cache.tilesets $layer_id | default dict ).grids }}
{{- $grids := (get $.Values.config.cache.tilesets $layer_id | default dict ).grids }}
{{- if $grids }}
{{- range $grid := $grids }}
<grid max-cached-zoom="{{ $grid.zoom }}" out-of-zoom-strategy="reassemble">{{ $grid.name }}</grid>
{{- end }}
{{- else }}
<grid max-cached-zoom="10" out-of-zoom-strategy="reassemble">WGS84</grid>
{{- end }}
<format>mixed</format>
<metatile>1 1</metatile>
<metabuffer>1</metabuffer>
<expires>{{ $.Values.config.cache.expires | default 3600 }}</expires>
<dimensions>
<assembly_type>stack</assembly_type>
......@@ -141,7 +153,7 @@
<dimension type="postgresql" name="time" default="2017/2019" time="true" unit="ISO8601">
<connection>host={{ $.Release.Name }}-database user={{ $.Values.config.database.DB_USER }} password={{ $.Values.config.database.DB_PW }} dbname={{ $.Values.config.database.DB_NAME }} port={{ $.Values.config.database.DB_PORT | default 5432 }}</connection>
<list_query>SELECT to_char(MIN(mapcache_items.begin_time), 'YYYY-MM-DD"T"HH24:MI:SS"Z"') || '/' || to_char(MAX(mapcache_items.end_time), 'YYYY-MM-DD"T"HH24:MI:SS"Z"') FROM mapcache_items WHERE mapcache_items.collection = '{{ $collection_name }}';</list_query>
<validate_query>SELECT * FROM (SELECT to_char(mapcache_items.begin_time, 'YYYY-MM-DD"T"HH24:MI:SS"Z"') || '/' || to_char(mapcache_items.end_time, 'YYYY-MM-DD"T"HH24:MI:SS"Z"') AS "interval" FROM mapcache_items WHERE (mapcache_items.collection = '{{ $collection_name }}' AND ((mapcache_items."begin_time" &lt; to_timestamp(:end_timestamp) AND mapcache_items."end_time" &gt; to_timestamp(:start_timestamp)) or (mapcache_items."begin_time" = mapcache_items."end_time" AND mapcache_items."begin_time" &lt;= to_timestamp(:end_timestamp) AND mapcache_items."end_time" &gt;= to_timestamp(:start_timestamp)))) AND mapcache_items."footprint" &amp;&amp; ST_MakeEnvelope(:minx, :miny, :maxx, :maxy, 4326) ORDER BY mapcache_items."end_time" DESC LIMIT 20) AS sub ORDER BY interval ASC;</validate_query>
<validate_query>SELECT * FROM (SELECT to_char(mapcache_items.begin_time, 'YYYY-MM-DD"T"HH24:MI:SS"Z"') || '/' || to_char(mapcache_items.end_time, 'YYYY-MM-DD"T"HH24:MI:SS"Z"') AS "interval" FROM mapcache_items WHERE (mapcache_items.collection = '{{ $collection_name }}' AND ((mapcache_items."begin_time" &lt; to_timestamp(:end_timestamp) AND mapcache_items."end_time" &gt; to_timestamp(:start_timestamp)) or (mapcache_items."begin_time" = mapcache_items."end_time" AND mapcache_items."begin_time" &lt;= to_timestamp(:end_timestamp) AND mapcache_items."end_time" &gt;= to_timestamp(:start_timestamp)))) AND mapcache_items."footprint" &amp;&amp; ST_Transform(ST_MakeEnvelope(:minx, :miny, :maxx, :maxy, CAST(SUBSTRING(:gridsrs, 6) AS INTEGER)), 4326) ORDER BY mapcache_items."end_time" DESC LIMIT 20) AS sub ORDER BY interval ASC;</validate_query>
</dimension>
</dimensions>
</tileset>
......
apiVersion: v1
kind: Secret
metadata:
name: django-secret-key
type: Opaque
data:
# Example:
DJANGO_SECRET_KEY: {{ .Values.config.django.DJANGO_SECRET_KEY | default (randAlphaNum 36) | b64enc }}
......@@ -50,16 +50,10 @@ spec:
- name: {{ $key }}
value: {{ $value | quote }}
{{- end }}
{{- range $key, $value := .Values.config.objectStorage.data }}
- name: {{ $key }}
value: {{ $value | quote }}
{{- end }}
{{- range $key, $value := .Values.config.redis }}
- name: {{ $key }}
value: {{ $value | quote }}
{{- end }}
- name: DJANGO_SECRET_KEY
value: {{ randAlphaNum 36 | quote }}
- name: REDIS_HOST
value: {{ .Release.Name }}-redis-master
- name: INIT_SCRIPTS
......@@ -74,6 +68,9 @@ spec:
value: {{ .Release.Name }}-database:{{ .Values.config.database.DB_PORT }} {{ .Release.Name }}-redis-master:{{ .Values.config.redis.REDIS_PORT }}
- name: DEBUG
value: "false"
envFrom:
- secretRef:
name: django-secret-key
volumeMounts:
- mountPath: /init-db.sh
name: init-db
......
......@@ -68,12 +68,6 @@ spec:
- name: {{ $key }}
value: {{ $value | quote }}
{{- end }}
{{- range $key, $value := .Values.config.objectStorage.data }}
- name: {{ $key }}
value: {{ $value | quote }}
{{- end }}
- name: DJANGO_SECRET_KEY
value: {{ randAlphaNum 36 | quote }}
- name: INIT_SCRIPTS
value: /configure.sh /init-db.sh /initialized.sh
- name: INSTALL_DIR
......@@ -84,6 +78,9 @@ spec:
value: /wait-initialized.sh {{- range $script := .Values.config.startup_scripts }} {{ $script }}{{ end }}
- name: WAIT_SERVICES
value: {{ .Release.Name }}-database:{{ .Values.config.database.DB_PORT }}
envFrom:
- secretRef:
name: django-secret-key
volumeMounts:
- mountPath: /init-db.sh
name: init-db
......
......@@ -17,19 +17,21 @@ config:
DJANGO_PASSWORD: djangopw
DJANGO_USER: djangouser
objectStorage:
download:
type: swift
username: "username"
password: "password"
tenant_name: "tenant_name"
tenant_id: "tenant_id"
region_name: "region_name"
auth_url: "auth_url"
auth_url_short: "auth_url_short"
auth_version: "auth_version"
user_domain_name: "user_domain_name"
data:
# swift:
download: {}
# example:
# type: swift
# username: "username"
# password: "password"
# tenant_name: "tenant_name"
# tenant_id: "tenant_id"
# region_name: "region_name"
# auth_url: "auth_url"
# auth_url_short: "auth_url_short"
# auth_version: "auth_version"
# user_domain_name: "user_domain_name"
data: {}
# examples:
# swift-data: the key is the storage name
# type: swift
# username: "username"
# password: "password"
......@@ -40,20 +42,20 @@ config:
# auth_url_short: "auth_url_short"
# auth_version: "auth_version"
# user_domain_name: "user_domain_name"
# s3:
# s3-data:
# type: S3
# bucket: "bucket"
# endpoint_url: "endpoint_url"
# access_key_id: "access_key_id"
# secret_access_key: "secret_access_key"
# region: "region"
cache:
type: S3
bucket: "bucket"
endpoint_url: "endpoint_url"
access_key_id: "access_key_id"
secret_access_key: "secret_access_key"
region: "region"
cache: {}
# type: S3
# bucket: "bucket"
# endpoint_url: "endpoint_url"
# access_key_id: "access_key_id"
# secret_access_key: "secret_access_key"
# region: "region"
redis:
REDIS_PORT: "6379"
REDIS_PREPROCESS_QUEUE_KEY: preprocess_queue
......@@ -67,48 +69,28 @@ config:
REDIS_REGISTER_SUCCESS_KEY: register-success_set
REDIS_SEED_QUEUE_KEY: seed_queue
client:
layers: {}
# VHR_IMAGE_2018_Level_1:
layers: []
# example:
# - id: VHR_IMAGE_2018_Level_1
# display_color: '#eb3700'
# title: VHR Image 2018 Level 1
# layer: VHR_IMAGE_2018_Level_1__TRUE_COLOR
# sub_layers:
# VHR_IMAGE_2018_Level_1__TRUE_COLOR:
# - id: VHR_IMAGE_2018_Level_1__TRUE_COLOR
# label: VHR Image 2018 True Color
# VHR_IMAGE_2018_Level_1__masked_validity:
# - id: VHR_IMAGE_2018_Level_1__masked_validity
# label: VHR Image 2018 True Color with masked validity
# VHR_IMAGE_2018_Level_1__FALSE_COLOR:
# - id: VHR_IMAGE_2018_Level_1__FALSE_COLOR
# label: VHR Image 2018 False Color
# VHR_IMAGE_2018_Level_1__NDVI:
# label: VHR Image 2018 NDVI
# VHR_IMAGE_2018_Level_3:
# display_color: '#eb3700'
# title: VHR Image 2018 Level 3
# layer: VHR_IMAGE_2018_Level_3__TRUE_COLOR
# sub_layers:
# VHR_IMAGE_2018_Level_3__TRUE_COLOR:
# label: VHR Image 2018 True Color
# VHR_IMAGE_2018_Level_3__masked_validity:
# label: VHR Image 2018 True Color with masked validity
# VHR_IMAGE_2018_Level_3__FALSE_COLOR:
# label: VHR Image 2018 False Color
# VHR_IMAGE_2018_Level_3__NDVI:
# - id: VHR_IMAGE_2018_Level_1__NDVI
# label: VHR Image 2018 NDVI
overlay_layers: {}
# example:
# VHR_IMAGE_2018_Level_3__outlines:
# display_color: '#187465'
# title: VHR Image 2018 Level 3 Outlines
# layer: VHR_IMAGE_2018_Level_3__outlines
# VHR_IMAGE_2018_Level_3__masked_validity__Full:
# display_color: '#187465'
# title: VHR Image 2018 Level 3 True Color with masked validity Full Coverage
# layer: VHR_IMAGE_2018_Level_3__masked_validity__Full
# VHR_IMAGE_2018_Level_3__Full:
# display_color: '#187465'
# title: VHR Image 2018 Level 3 True Color Full Coverage
# layer: VHR_IMAGE_2018_Level_3__Full
# cache related options
cache:
metadata:
title: PRISM Data Access Service (PASS) developed by EOX
......@@ -143,39 +125,15 @@ config:
expires: 3600
key: /{tileset}/{grid}/{dim}/{z}/{x}/{y}.{ext}
tilesets: {}
# example:
# VHR_IMAGE_2018:
# title: VHR Image 2018 True Color
# abstract: VHR Image 2018 True Color
# VHR_IMAGE_2018__TRUE_COLOR:
# title: VHR Image 2018 True Color
# abstract: VHR Image 2018 True Color
# VHR_IMAGE_2018__FALSE_COLOR:
# title: VHR Image 2018 False Color
# abstract: VHR Image 2018 False Color
# VHR_IMAGE_2018__NDVI:
# title: VHR Image 2018 NDVI
# abstract: VHR Image 2018 NDVI
# style: earth
# VHR_IMAGE_2018_Level_1__TRUE_COLOR:
# title: VHR Image 2018 Level 1 True Color
# abstract: VHR Image 2018 Level 1 True Color
# VHR_IMAGE_2018_Level_1__FALSE_COLOR:
# title: VHR Image 2018 Level 1 False Color
# abstract: VHR Image 2018 Level 1 False Color
# VHR_IMAGE_2018_Level_1__NDVI:
# title: VHR Image 2018 Level 1 NDVI
# abstract: VHR Image 2018 Level 1 NDVI
# style: earth
# VHR_IMAGE_2018_Level_1__TRUE_COLOR:
# title: VHR Image 2018 Level 3 True Color
# abstract: VHR Image 2018 Level 3 True Color
# VHR_IMAGE_2018_Level_1__FALSE_COLOR:
# title: VHR Image 2018 Level 3 False Color
# abstract: VHR Image 2018 Level 3 False Color
# VHR_IMAGE_2018_Level_1__NDVI:
# title: VHR Image 2018 Level 3 NDVI
# abstract: VHR Image 2018 Level 3 NDVI
# style: earth
# grids:
# - name: GoogleMapsCompatible
# zoom: 15
# - name: WGS84
# - zoom: 14
preprocessor:
metadata_glob: '*GSC*.xml'
......@@ -203,45 +161,45 @@ config:
- BIGTIFF=IF_SAFER
- OVERVIEWS=AUTO
types: {}
# example:
# PH1B: # just to pass validation
# nested: true
registrar:
schemes:
- type: gsc
schemes: []
# example:
# - type: gsc
# filter: sample/filter
# kwargs: {}
# mapping of collection name to objects
collections: {}
# VHR_IMAGE_2018:
# product_types:
# - PL00
# - DM02
# - KS03
# - KS04
# - PH1A
# - PH1B
# - SP06
# - SP07
# - SW00
# - TR00
# product_levels:
# - Level_1
# - Level_3
# coverage_types:
# - RGBNir
# example
# VHR_IMAGE_2018:
# product_types:
# example:
# - PL00
# product_levels:
# example:
# - Level_1
# - Level_3
# coverage_types:
# example:
# - RGBNir
products:
type_extractor:
xpath:
- /gsc:report/gsc:opt_metadata/gml:using/eop:EarthObservationEquipment/eop:platform/eop:Platform/eop:shortName/text()
- /gsc:report/gsc:sar_metadata/gml:using/eop:EarthObservationEquipment/eop:platform/eop:Platform/eop:shortName/text()
namespace_map:
namespace_map: null
level_extractor:
xpath:
namespace_map:
xpath: null
namespace_map: null
types: {}
# # example:
# PL00:
# coverages:
# PL00: RGBNir
......@@ -281,7 +239,23 @@ config:
# validity:
# validity: true
coverages: {}
coverages: []
# # example:
# - name: grayscale
# data_type: Uint16
# bands:
# - identifier: gray
# name: gray
# definition: http://www.opengis.net/def/property/OGC/0/Radiance
# description: Gray Channel
# gdal_interpretation: GrayBand
# nil_values:
# - reason: http://www.opengis.net/def/nil/OGC/0/unknown
# value: 0
# uom: W.m-2.Sr-1
# significant_figures: 5
# allowed_value_ranges:
# - [0, 65535]
# only RGBNir? SAR? complete list with all options here?
database:
......@@ -309,6 +283,11 @@ redis:
preprocessor:
replicaCount: 1