From b03ca0fb0a9d91ffb47f9ec7ecc9132aef36c1c4 Mon Sep 17 00:00:00 2001 From: Elizabeth Paige Harper Date: Tue, 9 Apr 2024 12:26:06 -0400 Subject: [PATCH] environment documentation and docker-compose updates --- .../main/kotlin/vdi/component/env/EnvKey.kt | 4 +- docker-compose.yml | 49 +++++- env-vars.adoc | 153 +++++++++++------- example.full.env | 30 ++++ example.min.env | 11 -- 5 files changed, 174 insertions(+), 73 deletions(-) diff --git a/components/env/src/main/kotlin/vdi/component/env/EnvKey.kt b/components/env/src/main/kotlin/vdi/component/env/EnvKey.kt index 0643d244..a8a9b906 100644 --- a/components/env/src/main/kotlin/vdi/component/env/EnvKey.kt +++ b/components/env/src/main/kotlin/vdi/component/env/EnvKey.kt @@ -651,12 +651,12 @@ object EnvKey { object Reconciler { /** - * Whether the reconciler should be enabled at all. + * Whether the full dataset reconciliation daemon should be enabled at all. * * Type: Boolean * Required: no */ - const val Enabled = "RECONCILER_ENABLED" + const val Enabled = "RECONCILER_FULL_ENABLED" /** * Run interval for the full reconciliation process. diff --git a/docker-compose.yml b/docker-compose.yml index ba0da0f5..29ad92a9 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -29,7 +29,6 @@ services: labels: - "com.centurylinklabs.watchtower.enable=${VDI_POSTGRES_WATCHTOWER:-false}" - kafka: image: veupathdb/apache-kafka:${VDI_KAFKA_TAG:-latest} healthcheck: @@ -123,12 +122,18 @@ services: UPDATE_META_HANDLER_WORKER_POOL_SIZE: ${UPDATE_META_HANDLER_WORKER_POOL_SIZE} UPDATE_META_HANDLER_WORK_QUEUE_SIZE: ${UPDATE_META_HANDLER_WORK_QUEUE_SIZE} + # Reconciliation Handler + RECONCILIATION_HANDLER_KAFKA_CONSUMER_CLIENT_ID: ${RECONCILIATION_HANDLER_KAFKA_CONSUMER_CLIENT_ID} + RECONCILIATION_HANDLER_WORKER_POOL_SIZE: ${RECONCILIATION_HANDLER_WORKER_POOL_SIZE} + RECONCILIATION_HANDLER_WORK_QUEUE_SIZE: ${RECONCILIATION_HANDLER_WORK_QUEUE_SIZE} + # Cache DB CACHE_DB_USERNAME: ${CACHE_DB_USERNAME:?} CACHE_DB_PASSWORD: ${CACHE_DB_PASSWORD:?} CACHE_DB_NAME: ${CACHE_DB_NAME:?} CACHE_DB_HOST: ${CACHE_DB_HOST:?} - CACHE_DB_PORT: ${CACHE_DB_PORT:-5432} + CACHE_DB_PORT: ${CACHE_DB_PORT} + CACHE_DB_POOL_SIZE: ${CACHE_DB_POOL_SIZE} # Kafka KAFKA_SERVERS: ${KAFKA_SERVERS:?} @@ -141,6 +146,7 @@ services: KAFKA_TOPIC_SHARE_TRIGGERS: ${KAFKA_TOPIC_SHARE_TRIGGERS} KAFKA_TOPIC_SOFT_DELETE_TRIGGERS: ${KAFKA_TOPIC_SOFT_DELETE_TRIGGERS} KAFKA_TOPIC_UPDATE_META_TRIGGERS: ${KAFKA_TOPIC_UPDATE_META_TRIGGERS} + KAFKA_TOPIC_RECONCILIATION_TRIGGERS: ${KAFKA_TOPIC_RECONCILIATION_TRIGGERS} KAFKA_MESSAGE_KEY_HARD_DELETE_TRIGGERS: ${KAFKA_MESSAGE_KEY_HARD_DELETE_TRIGGERS} KAFKA_MESSAGE_KEY_IMPORT_TRIGGERS: ${KAFKA_MESSAGE_KEY_IMPORT_TRIGGERS} @@ -148,6 +154,7 @@ services: KAFKA_MESSAGE_KEY_SHARE_TRIGGERS: ${KAFKA_MESSAGE_KEY_SHARE_TRIGGERS} KAFKA_MESSAGE_KEY_SOFT_DELETE_TRIGGERS: ${KAFKA_MESSAGE_KEY_SOFT_DELETE_TRIGGERS} KAFKA_MESSAGE_KEY_UPDATE_META_TRIGGERS: ${KAFKA_MESSAGE_KEY_UPDATE_META_TRIGGERS} + KAFKA_MESSAGE_KEY_RECONCILIATION_TRIGGERS: ${KAFKA_MESSAGE_KEY_RECONCILIATION_TRIGGERS} # RabbitMQ GLOBAL_RABBIT_HOST: ${GLOBAL_RABBIT_HOST:?} @@ -178,7 +185,7 @@ services: S3_BUCKET_NAME: ${S3_BUCKET_NAME:?} # Reconciler - RECONCILER_ENABLED: ${RECONCILER_ENABLED} + RECONCILER_FULL_ENABLED: ${RECONCILER_FULL_ENABLED} RECONCILER_FULL_RUN_INTERVAL: ${RECONCILER_FULL_RUN_INTERVAL} RECONCILER_SLIM_RUN_INTERVAL: ${RECONCILER_SLIM_RUN_INTERVAL} @@ -379,6 +386,12 @@ services: SITE_BUILD: ${SITE_BUILD:?} DATASET_INSTALL_ROOT: ${DATASET_DIRECTORY_TARGET_PATH:?} + IMPORT_SCRIPT_MAX_DURATION: ${PLUGIN_HANDLER_NOOP_IMPORT_SCRIPT_MAX_DURATION} + INSTALL_DATA_SCRIPT_MAX_DURATION: ${PLUGIN_HANDLER_NOOP_INSTALL_DATA_SCRIPT_MAX_DURATION} + INSTALL_META_SCRIPT_MAX_DURATION: ${PLUGIN_HANDLER_NOOP_INSTALL_META_SCRIPT_MAX_DURATION} + UNINSTALL_SCRIPT_MAX_DURATION: ${PLUGIN_HANDLER_NOOP_UNINSTALL_SCRIPT_MAX_DURATION} + CHECK_COMPAT_SCRIPT_MAX_DURATION: ${PLUGIN_HANDLER_NOOP_CHECK_COMPAT_SCRIPT_MAX_DURATION} + DB_CONNECTION_ENABLED_AMOEBA: ${DB_CONNECTION_ENABLED_AMOEBA} DB_CONNECTION_NAME_AMOEBA: ${DB_CONNECTION_NAME_AMOEBA:?} DB_CONNECTION_LDAP_AMOEBA: ${DB_CONNECTION_LDAP_AMOEBA:?} @@ -518,6 +531,12 @@ services: SITE_BUILD: ${SITE_BUILD:?} DATASET_INSTALL_ROOT: ${DATASET_DIRECTORY_TARGET_PATH:?} + IMPORT_SCRIPT_MAX_DURATION: ${PLUGIN_HANDLER_GENELIST_IMPORT_SCRIPT_MAX_DURATION} + INSTALL_DATA_SCRIPT_MAX_DURATION: ${PLUGIN_HANDLER_GENELIST_INSTALL_DATA_SCRIPT_MAX_DURATION} + INSTALL_META_SCRIPT_MAX_DURATION: ${PLUGIN_HANDLER_GENELIST_INSTALL_META_SCRIPT_MAX_DURATION} + UNINSTALL_SCRIPT_MAX_DURATION: ${PLUGIN_HANDLER_GENELIST_UNINSTALL_SCRIPT_MAX_DURATION} + CHECK_COMPAT_SCRIPT_MAX_DURATION: ${PLUGIN_HANDLER_GENELIST_CHECK_COMPAT_SCRIPT_MAX_DURATION} + DB_CONNECTION_ENABLED_AMOEBA: ${DB_CONNECTION_ENABLED_AMOEBA} DB_CONNECTION_NAME_AMOEBA: ${DB_CONNECTION_NAME_AMOEBA:?} DB_CONNECTION_LDAP_AMOEBA: ${DB_CONNECTION_LDAP_AMOEBA:?} @@ -657,7 +676,11 @@ services: SITE_BUILD: ${SITE_BUILD:?} DATASET_INSTALL_ROOT: ${DATASET_DIRECTORY_TARGET_PATH:?} - IMPORT_SCRIPT_MAX_DURATION: 6h + IMPORT_SCRIPT_MAX_DURATION: ${PLUGIN_HANDLER_ISASIMPLE_IMPORT_SCRIPT_MAX_DURATION} + INSTALL_DATA_SCRIPT_MAX_DURATION: ${PLUGIN_HANDLER_ISASIMPLE_INSTALL_DATA_SCRIPT_MAX_DURATION} + INSTALL_META_SCRIPT_MAX_DURATION: ${PLUGIN_HANDLER_ISASIMPLE_INSTALL_META_SCRIPT_MAX_DURATION} + UNINSTALL_SCRIPT_MAX_DURATION: ${PLUGIN_HANDLER_ISASIMPLE_UNINSTALL_SCRIPT_MAX_DURATION} + CHECK_COMPAT_SCRIPT_MAX_DURATION: ${PLUGIN_HANDLER_ISASIMPLE_CHECK_COMPAT_SCRIPT_MAX_DURATION} DB_CONNECTION_ENABLED_AMOEBA: ${DB_CONNECTION_ENABLED_AMOEBA} DB_CONNECTION_NAME_AMOEBA: ${DB_CONNECTION_NAME_AMOEBA:?} @@ -798,6 +821,12 @@ services: SITE_BUILD: ${SITE_BUILD:?} DATASET_INSTALL_ROOT: ${DATASET_DIRECTORY_TARGET_PATH:?} + IMPORT_SCRIPT_MAX_DURATION: ${PLUGIN_HANDLER_BIGWIG_IMPORT_SCRIPT_MAX_DURATION} + INSTALL_DATA_SCRIPT_MAX_DURATION: ${PLUGIN_HANDLER_BIGWIG_INSTALL_DATA_SCRIPT_MAX_DURATION} + INSTALL_META_SCRIPT_MAX_DURATION: ${PLUGIN_HANDLER_BIGWIG_INSTALL_META_SCRIPT_MAX_DURATION} + UNINSTALL_SCRIPT_MAX_DURATION: ${PLUGIN_HANDLER_BIGWIG_UNINSTALL_SCRIPT_MAX_DURATION} + CHECK_COMPAT_SCRIPT_MAX_DURATION: ${PLUGIN_HANDLER_BIGWIG_CHECK_COMPAT_SCRIPT_MAX_DURATION} + DB_CONNECTION_ENABLED_AMOEBA: ${DB_CONNECTION_ENABLED_AMOEBA} DB_CONNECTION_NAME_AMOEBA: ${DB_CONNECTION_NAME_AMOEBA:?} DB_CONNECTION_LDAP_AMOEBA: ${DB_CONNECTION_LDAP_AMOEBA:?} @@ -937,6 +966,12 @@ services: SITE_BUILD: ${SITE_BUILD:?} DATASET_INSTALL_ROOT: ${DATASET_DIRECTORY_TARGET_PATH:?} + IMPORT_SCRIPT_MAX_DURATION: ${PLUGIN_HANDLER_BIOM_IMPORT_SCRIPT_MAX_DURATION} + INSTALL_DATA_SCRIPT_MAX_DURATION: ${PLUGIN_HANDLER_BIOM_INSTALL_DATA_SCRIPT_MAX_DURATION} + INSTALL_META_SCRIPT_MAX_DURATION: ${PLUGIN_HANDLER_BIOM_INSTALL_META_SCRIPT_MAX_DURATION} + UNINSTALL_SCRIPT_MAX_DURATION: ${PLUGIN_HANDLER_BIOM_UNINSTALL_SCRIPT_MAX_DURATION} + CHECK_COMPAT_SCRIPT_MAX_DURATION: ${PLUGIN_HANDLER_BIOM_CHECK_COMPAT_SCRIPT_MAX_DURATION} + DB_CONNECTION_ENABLED_AMOEBA: ${DB_CONNECTION_ENABLED_AMOEBA} DB_CONNECTION_NAME_AMOEBA: ${DB_CONNECTION_NAME_AMOEBA:?} DB_CONNECTION_LDAP_AMOEBA: ${DB_CONNECTION_LDAP_AMOEBA:?} @@ -1076,6 +1111,12 @@ services: SITE_BUILD: ${SITE_BUILD:?} DATASET_INSTALL_ROOT: ${DATASET_DIRECTORY_TARGET_PATH:?} + IMPORT_SCRIPT_MAX_DURATION: ${PLUGIN_HANDLER_RNASEQ_IMPORT_SCRIPT_MAX_DURATION} + INSTALL_DATA_SCRIPT_MAX_DURATION: ${PLUGIN_HANDLER_RNASEQ_INSTALL_DATA_SCRIPT_MAX_DURATION} + INSTALL_META_SCRIPT_MAX_DURATION: ${PLUGIN_HANDLER_RNASEQ_INSTALL_META_SCRIPT_MAX_DURATION} + UNINSTALL_SCRIPT_MAX_DURATION: ${PLUGIN_HANDLER_RNASEQ_UNINSTALL_SCRIPT_MAX_DURATION} + CHECK_COMPAT_SCRIPT_MAX_DURATION: ${PLUGIN_HANDLER_RNASEQ_CHECK_COMPAT_SCRIPT_MAX_DURATION} + DB_CONNECTION_ENABLED_AMOEBA: ${DB_CONNECTION_ENABLED_AMOEBA} DB_CONNECTION_NAME_AMOEBA: ${DB_CONNECTION_NAME_AMOEBA:?} DB_CONNECTION_LDAP_AMOEBA: ${DB_CONNECTION_LDAP_AMOEBA:?} diff --git a/env-vars.adoc b/env-vars.adoc index 4bdc38e6..da121f88 100644 --- a/env-vars.adoc +++ b/env-vars.adoc @@ -22,29 +22,24 @@ application databases or plugin handler service configurations. [source, shell] ---- -# Rest Service Configuration -VDI_SERVICE_HTTP_PORT=80 +DATASET_DIRECTORY_SOURCE_PATH=/var/www/Common/userDatasets/vdi_datasets_feat_s/ +DATASET_DIRECTORY_TARGET_PATH=/datasets -AUTH_SECRET_KEY=someSecretKey - -ADMIN_AUTH_TOKEN=someAdminAuthToken - -SITE_BUILD=build-65 - -LDAP_SERVER=some.ldap.server:1234,other.ldap.server:2313 +AUTH_SECRET_KEY= +ADMIN_AUTH_TOKEN= +LDAP_SERVER= ORACLE_BASE_DN=ou=applications,dc=apidb,dc=org -ACCT_DB_TNS_NAME=some-tns-name -ACCT_DB_USER=someUser -ACCT_DB_PASS=somePassword +ACCT_DB_TNS_NAME=acctdbn +ACCT_DB_USER= +ACCT_DB_PASS= ACCT_DB_POOL_SIZE=5 -USER_DB_TNS_NAME=some-tns-name -USER_DB_USER=someUser -USER_DB_PASS=somePassword +USER_DB_TNS_NAME=apicommn +USER_DB_USER= +USER_DB_PASS= USER_DB_POOL_SIZE=5 -# External RabbitMQ Connection GLOBAL_RABBIT_USERNAME=someUser GLOBAL_RABBIT_PASSWORD=somePassword GLOBAL_RABBIT_HOST=rabbit-external @@ -52,21 +47,11 @@ GLOBAL_RABBIT_VDI_EXCHANGE_NAME=vdi-bucket-notifications GLOBAL_RABBIT_VDI_QUEUE_NAME=vdi-bucket-notifications GLOBAL_RABBIT_VDI_ROUTING_KEY=vdi-bucket-notifications -# Handler Configurations -IMPORT_HANDLER_KAFKA_CONSUMER_CLIENT_ID=import-handler -UPDATE_META_HANDLER_KAFKA_CONSUMER_CLIENT_ID=update-meta-handler -INSTALL_DATA_HANDLER_KAFKA_CONSUMER_CLIENT_ID=install-data-handler -SHARE_HANDLER_KAFKA_CONSUMER_CLIENT_ID=share-handler -SOFT_DELETE_HANDLER_KAFKA_CONSUMER_CLIENT_ID=soft-delete-handler -HARD_DELETE_HANDLER_KAFKA_CONSUMER_CLIENT_ID=hard-delete-handler - -# Internal Kafka Connection KAFKA_SERVERS=kafka:9092 KAFKA_PRODUCER_CLIENT_ID=vdi-event-router KAFKA_CONSUMER_GROUP_ID=vdi-kafka-consumers KAFKA_ADVERTISED_LISTENERS=PLAINTEXT://kafka:9092 -# External MinIO Connection S3_HOST=minio-external S3_PORT=9000 S3_USE_HTTPS=true @@ -74,13 +59,24 @@ S3_ACCESS_TOKEN=someToken S3_SECRET_KEY=someSecretKey S3_BUCKET_NAME=some-other-bucket -# Internal Cache DB Connection CACHE_DB_USERNAME=someUser CACHE_DB_PASSWORD=somePassword CACHE_DB_NAME=vdi CACHE_DB_HOST=cache-db -CACHE_DB_PORT=5432 -CACHE_DB_POOL_SIZE=5 + +SITE_BUILD=build-65 + +PLUGIN_HANDLER_NOOP_NAME=noop +PLUGIN_HANDLER_NOOP_DISPLAY_NAME="Example Plugin" +PLUGIN_HANDLER_NOOP_VERSION=1.0 +PLUGIN_HANDLER_NOOP_ADDRESS=plugin-example:80 + +DB_CONNECTION_ENABLED_{SOME_PROJECT}=true +DB_CONNECTION_NAME_{SOME_PROJECT}=ProjectDB +DB_CONNECTION_LDAP_{SOME_PROJECT}=dbTnsName +DB_CONNECTION_PASS_{SOME_PROJECT}=someDBPass +DB_CONNECTION_DATA_SCHEMA_{SOME_PROJECT}=vdi_datasets_dev_n +DB_CONNECTION_CONTROL_SCHEMA_{SOME_PROJECT}=vdi_control_dev_n ---- === Modules @@ -92,7 +88,7 @@ CACHE_DB_POOL_SIZE=5 | Req. | Name | Type | Description | -| VDI_SERVICE_HTTP_PORT +| SERVER_PORT | uint16 | Port exposed and used by the VDI REST API service. @@ -117,6 +113,11 @@ authentication. | String | Auth token value used to authenticate requests to administration endpoints. +| +| ENABLE_CORS +| boolean +| Enable cross origin requests (used for development) + | | MAX_UPLOAD_FILE_SIZE | uint64 @@ -146,26 +147,6 @@ authentication. | ACCT_DB_POOL_SIZE | uint8 | - -| :exclamation: -| USER_DB_TNS_NAME -| String -| - -| :exclamation: -| USER_DB_USER -| String -| - -| :exclamation: -| USER_DB_PASS -| String -| - -| -| USER_DB_POOL_SIZE -| uint8 -| |=== ==== Dataset Reinstaller @@ -202,7 +183,7 @@ authentication. | uint16 | Size the worker pool job queue is allowed to fill to before blocking. -| :exclamation: +| | HARD_DELETE_HANDLER_KAFKA_CONSUMER_CLIENT_ID | String | Kafka client ID for the `KafkaConsumer` that will be used to receive messages @@ -227,7 +208,7 @@ from the VDI Kafka instance. + | uint16 | Size the worker pool job queue is allowed to fill to before blocking. -| :exclamation: +| | IMPORT_HANDLER_KAFKA_CONSUMER_CLIENT_ID | String | Kafka client ID for the `KafkaConsumer` that will be used to receive messages @@ -252,7 +233,7 @@ from the VDI Kafka instance. + | uint16 | Size the worker pool job queue is allowed to fill to before blocking. -| :exclamation: +| | INSTALL_DATA_HANDLER_KAFKA_CONSUMER_CLIENT_ID | String | Kafka client ID for the `KafkaConsumer` that will be used to receive messages @@ -285,6 +266,55 @@ VDI system shutdown signal. |=== +==== Dataset Reconciler + +[%header, cols="1,3m,3m,8"] +|=== +| Req. | Name | Type | Description + +| +| RECONCILER_FULL_ENABLED +| boolean +| Whether the full dataset reconciliation process is enabled. + +| +| RECONCILER_FULL_RUN_INTERVAL +| Duration +| Interval at which the full reconciliation process will run. + +| +| RECONCILER_SLIM_RUN_INTERVAL +| Duration +| Interval at which the slim reconciliation process will run. +|=== + + +==== Reconciliation Event Handler + +[%header, cols="1,3m,3m,8"] +|=== +| Req. | Name | Type | Description + +| +| RECONCILIATION_HANDLER_WORKER_POOL_SIZE +| uint8 +| Number of workers to use while processing reconciliation events. + +| +| RECONCILIATION_HANDLER_WORK_QUEUE_SIZE +| Duration +| Size the worker pool job queue is allowed to fill to before blocking. + +| +| RECONCILIATION_HANDLER_KAFKA_CONSUMER_CLIENT_ID +| String +| Kafka client ID for the `KafkaConsumer` that will be used to receive messages +from the VDI Kafka instance. + + +*THIS VALUE MUST BE UNIQUE ACROSS ALL KAFKA CLIENT IDS* +|=== + + ==== Share Trigger Handler [%header, cols="1,3m,3m,8"] @@ -301,7 +331,7 @@ VDI system | uint16 | Size the worker pool job queue is allowed to fill to before blocking. -| :exclamation: +| | SHARE_HANDLER_KAFKA_CONSUMER_CLIENT_ID | String | Kafka client ID for the `KafkaConsumer` that will be used to receive messages @@ -326,7 +356,7 @@ from the VDI Kafka instance. + | uint16 | Size the worker pool job queue is allowed to fill to before blocking. -| :exclamation: +| | SOFT_DELETE_HANDLER_KAFKA_CONSUMER_CLIENT_ID | String | Kafka client ID for the `KafkaConsumer` that will be used to receive messages @@ -351,7 +381,7 @@ from the VDI Kafka instance. + | uint16 | Size the worker pool job queue is allowed to fill to before blocking. -| :exclamation: +| | UPDATE_META_HANDLER_KAFKA_CONSUMER_CLIENT_ID | String | Kafka client ID for the `KafkaConsumer` that will be used to receive messages @@ -841,6 +871,12 @@ An update-meta event is the creation or overwriting of the dataset metadata object in MinIO. These events will trigger a call to the plugin handler server to install or update the metadata for the dataset in the target application databases. + +| +| KAFKA_TOPIC_RECONCILIATION_TRIGGERS +| String +| Name of the reconciliation trigger topic that messages will be routed to for +events fired by the dataset reconciler. |=== ===== Message Keys @@ -882,6 +918,11 @@ value will be ignored by the VDI service. | KAFKA_MESSAGE_KEY_UPDATE_META_TRIGGERS | String | Message key for update-meta trigger events. + +| +| KAFKA_MESSAGE_KEY_RECONCILIATION_TRIGGERS +| String +| Message key for reconciliation trigger events. |=== ==== Rabbit diff --git a/example.full.env b/example.full.env index 715a3567..7cf1e1a2 100644 --- a/example.full.env +++ b/example.full.env @@ -446,6 +446,11 @@ PLUGIN_HANDLER_NOOP_VERSION=1.0 PLUGIN_HANDLER_NOOP_ADDRESS=plugin-example:80 PLUGIN_HANDLER_NOOP_PROJECT_IDS= PLUGIN_HANDLER_NOOP_CUSTOM_PATH= +PLUGIN_HANDLER_NOOP_IMPORT_SCRIPT_MAX_DURATION=1h +PLUGIN_HANDLER_NOOP_INSTALL_DATA_SCRIPT_MAX_DURATION=1h +PLUGIN_HANDLER_NOOP_INSTALL_META_SCRIPT_MAX_DURATION=1h +PLUGIN_HANDLER_NOOP_UNINSTALL_SCRIPT_MAX_DURATION=1h +PLUGIN_HANDLER_NOOP_CHECK_COMPAT_SCRIPT_MAX_DURATION=1h # GeneList Plugin PLUGIN_HANDLER_GENELIST_NAME=genelist @@ -454,6 +459,11 @@ PLUGIN_HANDLER_GENELIST_VERSION=1.0 PLUGIN_HANDLER_GENELIST_ADDRESS=plugin-genelist:80 PLUGIN_HANDLER_GENELIST_PROJECT_IDS=AmoebaDB,CryptoDB,FungiDB,GiardiaDB,HostDB,MicrosporidiaDB,PiroplasmaDB,PlasmoDB,ToxoDB,TrichDB,TriTrypDB,VectorBase PLUGIN_HANDLER_GENELIST_CUSTOM_PATH= +PLUGIN_HANDLER_GENELIST_IMPORT_SCRIPT_MAX_DURATION=1h +PLUGIN_HANDLER_GENELIST_INSTALL_DATA_SCRIPT_MAX_DURATION=1h +PLUGIN_HANDLER_GENELIST_INSTALL_META_SCRIPT_MAX_DURATION=1h +PLUGIN_HANDLER_GENELIST_UNINSTALL_SCRIPT_MAX_DURATION=1h +PLUGIN_HANDLER_GENELIST_CHECK_COMPAT_SCRIPT_MAX_DURATION=1h # ISASimple Plugin PLUGIN_HANDLER_ISASIMPLE_NAME=isasimple @@ -462,6 +472,11 @@ PLUGIN_HANDLER_ISASIMPLE_VERSION=1.0 PLUGIN_HANDLER_ISASIMPLE_ADDRESS=plugin-isasimple:80 PLUGIN_HANDLER_ISASIMPLE_PROJECT_IDS=ClinEpiDB PLUGIN_HANDLER_ISASIMPLE_CUSTOM_PATH= +PLUGIN_HANDLER_ISASIMPLE_IMPORT_SCRIPT_MAX_DURATION=1h +PLUGIN_HANDLER_ISASIMPLE_INSTALL_DATA_SCRIPT_MAX_DURATION=1h +PLUGIN_HANDLER_ISASIMPLE_INSTALL_META_SCRIPT_MAX_DURATION=1h +PLUGIN_HANDLER_ISASIMPLE_UNINSTALL_SCRIPT_MAX_DURATION=1h +PLUGIN_HANDLER_ISASIMPLE_CHECK_COMPAT_SCRIPT_MAX_DURATION=1h # BigWig Plugin PLUGIN_HANDLER_BIGWIG_NAME=bigwigfiles @@ -470,6 +485,11 @@ PLUGIN_HANDLER_BIGWIG_VERSION=1.0 PLUGIN_HANDLER_BIGWIG_ADDRESS=plugin-bigwig:80 PLUGIN_HANDLER_BIGWIG_PROJECT_IDS=AmoebaDB,CryptoDB,FungiDB,GiardiaDB,HostDB,MicrosporidiaDB,PiroplasmaDB,PlasmoDB,ToxoDB,TrichDB,TriTrypDB,VectorBase PLUGIN_HANDLER_BIGWIG_CUSTOM_PATH= +PLUGIN_HANDLER_BIGWIG_IMPORT_SCRIPT_MAX_DURATION=1h +PLUGIN_HANDLER_BIGWIG_INSTALL_DATA_SCRIPT_MAX_DURATION=1h +PLUGIN_HANDLER_BIGWIG_INSTALL_META_SCRIPT_MAX_DURATION=1h +PLUGIN_HANDLER_BIGWIG_UNINSTALL_SCRIPT_MAX_DURATION=1h +PLUGIN_HANDLER_BIGWIG_CHECK_COMPAT_SCRIPT_MAX_DURATION=1h # BIOM Plugin PLUGIN_HANDLER_BIOM_NAME=biom @@ -478,6 +498,11 @@ PLUGIN_HANDLER_BIOM_VERSION=1.0 PLUGIN_HANDLER_BIOM_ADDRESS=plugin-biom:80 PLUGIN_HANDLER_BIOM_PROJECT_IDS=MicrobiomeDB PLUGIN_HANDLER_BIOM_CUSTOM_PATH= +PLUGIN_HANDLER_BIOM_IMPORT_SCRIPT_MAX_DURATION=1h +PLUGIN_HANDLER_BIOM_INSTALL_DATA_SCRIPT_MAX_DURATION=1h +PLUGIN_HANDLER_BIOM_INSTALL_META_SCRIPT_MAX_DURATION=1h +PLUGIN_HANDLER_BIOM_UNINSTALL_SCRIPT_MAX_DURATION=1h +PLUGIN_HANDLER_BIOM_CHECK_COMPAT_SCRIPT_MAX_DURATION=1h # RNAseq Plugin PLUGIN_HANDLER_RNASEQ_NAME=rnaseq @@ -486,6 +511,11 @@ PLUGIN_HANDLER_RNASEQ_VERSION=1.0 PLUGIN_HANDLER_RNASEQ_ADDRESS=plugin-rnaseq:80 PLUGIN_HANDLER_RNASEQ_PROJECT_IDS=AmoebaDB,CryptoDB,FungiDB,GiardiaDB,HostDB,MicrosporidiaDB,PiroplasmaDB,PlasmoDB,ToxoDB,TrichDB,TriTrypDB,VectorBase PLUGIN_HANDLER_RNASEQ_CUSTOM_PATH= +PLUGIN_HANDLER_RNASEQ_IMPORT_SCRIPT_MAX_DURATION=1h +PLUGIN_HANDLER_RNASEQ_INSTALL_DATA_SCRIPT_MAX_DURATION=1h +PLUGIN_HANDLER_RNASEQ_INSTALL_META_SCRIPT_MAX_DURATION=1h +PLUGIN_HANDLER_RNASEQ_UNINSTALL_SCRIPT_MAX_DURATION=1h +PLUGIN_HANDLER_RNASEQ_CHECK_COMPAT_SCRIPT_MAX_DURATION=1h # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # # diff --git a/example.min.env b/example.min.env index f3575d8a..eec8941b 100644 --- a/example.min.env +++ b/example.min.env @@ -1,15 +1,4 @@ # Image Versions -VDI_SERVICE_TAG=latest -VDI_CACHE_DB_TAG=latest -VDI_KAFKA_TAG=3.4.0 - -VDI_PLUGIN_EXAMPLE_TAG=latest -VDI_PLUGIN_GENELIST_TAG=latest -VDI_PLUGIN_ISASIMPLE_TAG=latest -VDI_PLUGIN_BIGWIG_TAG=latest -VDI_PLUGIN_BIOM_TAG=latest -VDI_PLUGIN_RNASEQ_TAG=latest - DATASET_DIRECTORY_SOURCE_PATH=/var/www/Common/userDatasets/vdi_datasets_feat_s/ DATASET_DIRECTORY_TARGET_PATH=/datasets