Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
47 changes: 47 additions & 0 deletions .github/workflows/hdc-pipeline-consumer.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
name: HDC ci/cd pipeline consumer

permissions:
contents: write
issues: write
pull-requests: write

on:
push:
branches:
- main
paths:
- 'consumer/**'
pull_request:
branches:
- main
paths:
- 'consumer/**'

jobs:
run_tests_hdc:
uses: PilotDataPlatform/pilot-hdc-ci-tools/.github/workflows/run_tests.yml@main
with:
min_coverage_percent: 0
coverage_target: 'consumer'
poetry_directory: 'consumer'
pytest_directory: 'consumer'
secrets: inherit

build_and_publish_hdc:
needs: [run_tests_hdc]
uses: PilotDataPlatform/pilot-hdc-ci-tools/.github/workflows/build_and_publish.yml@main
with:
matrix_config: '["consumer"]'
service_name: 'consumer'
path_to_dockerfile: './consumer/Dockerfile'
path_to_pyproject: './consumer/pyproject.toml'
registry_subpath: 'queue'
secrets: inherit

deploy_hdc:
needs: [build_and_publish_hdc]
uses: PilotDataPlatform/pilot-hdc-ci-tools/.github/workflows/trigger_deployment.yml@main
with:
hdc_service_name: 'queue-consumer'
pyproject_folder: 'consumer'
secrets: inherit
47 changes: 47 additions & 0 deletions .github/workflows/hdc-pipeline-producer.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
name: HDC ci/cd pipeline producer

permissions:
contents: write
issues: write
pull-requests: write

on:
push:
branches:
- main
paths:
- 'producer/**'
pull_request:
branches:
- main
paths:
- 'producer/**'

jobs:
run_tests_hdc:
uses: PilotDataPlatform/pilot-hdc-ci-tools/.github/workflows/run_tests.yml@main
with:
min_coverage_percent: 0
coverage_target: 'producer'
poetry_directory: 'producer'
pytest_directory: 'producer'
secrets: inherit

build_and_publish_hdc:
needs: [run_tests_hdc]
uses: PilotDataPlatform/pilot-hdc-ci-tools/.github/workflows/build_and_publish.yml@main
with:
matrix_config: '["producer"]'
service_name: 'producer'
path_to_dockerfile: './producer/Dockerfile'
path_to_pyproject: './producer/pyproject.toml'
registry_subpath: 'queue'
secrets: inherit

deploy_hdc:
needs: [build_and_publish_hdc]
uses: PilotDataPlatform/pilot-hdc-ci-tools/.github/workflows/trigger_deployment.yml@main
with:
hdc_service_name: 'queue-producer'
pyproject_folder: 'producer'
secrets: inherit
47 changes: 47 additions & 0 deletions .github/workflows/hdc-pipeline-socketio.yml
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
name: HDC ci/cd pipeline socketio

permissions:
contents: write
issues: write
pull-requests: write

on:
push:
branches:
- main
paths:
- 'socketio/**'
pull_request:
branches:
- main
paths:
- 'socketio/**'

jobs:
run_tests_hdc:
uses: PilotDataPlatform/pilot-hdc-ci-tools/.github/workflows/run_tests.yml@main
with:
min_coverage_percent: 0
coverage_target: 'socketio'
poetry_directory: 'socketio'
pytest_directory: 'socketio'
secrets: inherit

build_and_publish_hdc:
needs: [run_tests_hdc]
uses: PilotDataPlatform/pilot-hdc-ci-tools/.github/workflows/build_and_publish.yml@main
with:
matrix_config: '["socketio"]'
service_name: 'socketio'
path_to_dockerfile: './socketio/Dockerfile'
path_to_pyproject: './socketio/pyproject.toml'
registry_subpath: 'queue'
secrets: inherit

deploy_hdc:
needs: [build_and_publish_hdc]
uses: PilotDataPlatform/pilot-hdc-ci-tools/.github/workflows/trigger_deployment.yml@main
with:
hdc_service_name: 'queue-socketio'
pyproject_folder: 'socketio'
secrets: inherit
18 changes: 9 additions & 9 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -67,15 +67,15 @@ repos:
'--max-line-length=120',
]

# - repo: https://github.com/PyCQA/docformatter
# rev: v1.7.5
# hooks:
# - id: docformatter
# args: [
# '--wrap-summaries=120',
# '--wrap-descriptions=120',
# '--in-place',
# ]
- repo: https://github.com/PyCQA/docformatter
rev: v1.7.7
hooks:
- id: docformatter
args: [
'--wrap-summaries=120',
'--wrap-descriptions=120',
'--in-place',
]

- repo: https://github.com/Lucas-C/pre-commit-hooks
rev: v1.5.5
Expand Down
10 changes: 3 additions & 7 deletions consumer/.env.schema
Original file line number Diff line number Diff line change
@@ -1,10 +1,3 @@
CONFIG_CENTER_ENABLED=false

# If config center has been enabled
VAULT_URL=
VAULT_CRT=
VAULT_TOKEN=
CONFIG_CENTER_BASE_URL=
data_lake=
data_transfer_image =
bids_validate_image =
Expand Down Expand Up @@ -36,3 +29,6 @@ PROJECT_SERVICE=
KAFKA_URL=
REDIS_HOST=
REDIS_PORT=
REDIS_PASSWORD=
S3_ACCESS_KEY=
S3_SECRET_KEY=
2 changes: 1 addition & 1 deletion consumer/Dockerfile
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
FROM docker-registry.ebrains.eu/hdc-services-image/base-image:python-3.10.12-v2 AS consumer-image
FROM docker-registry.ebrains.eu/hdc-services-image/base-image:python-3.10.14-v1 AS consumer-image

ENV PYTHONDONTWRITEBYTECODE=true \
PYTHONIOENCODING=UTF-8 \
Expand Down
41 changes: 3 additions & 38 deletions consumer/config.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,40 +6,11 @@

import logging
from functools import lru_cache
from typing import Any
from typing import Dict
from typing import Optional

from common import VaultClient
from pydantic import BaseSettings
from pydantic import Extra


class VaultConfig(BaseSettings):
"""Store vault related configuration."""

APP_NAME: str = 'service_queue'
CONFIG_CENTER_ENABLED: bool = False

VAULT_URL: Optional[str]
VAULT_CRT: Optional[str]
VAULT_TOKEN: Optional[str]

class Config:
env_file = '.env'
env_file_encoding = 'utf-8'


def load_vault_settings(settings: BaseSettings) -> Dict[str, Any]:
config = VaultConfig()

if not config.CONFIG_CENTER_ENABLED:
return {}

client = VaultClient(config.VAULT_URL, config.VAULT_CRT, config.VAULT_TOKEN)
return client.get_from_vault(config.APP_NAME)


class Settings(BaseSettings):
"""Store service configuration settings."""

Expand All @@ -52,11 +23,6 @@ class Settings(BaseSettings):
LOGGING_LEVEL: int = logging.INFO
LOGGING_FORMAT: str = 'json'

CONFIG_CENTER_ENABLED: bool = False
VAULT_URL: str
VAULT_CRT: str
VAULT_TOKEN: str

# greenroom queue
gm_queue_endpoint: str
gm_username: str
Expand Down Expand Up @@ -94,6 +60,8 @@ class Settings(BaseSettings):
S3_HOST: str
S3_PORT: str = ''
S3_INTERNAL_HTTPS: str
S3_ACCESS_KEY: str
S3_SECRET_KEY: str
DATAOPS_SERVICE: str
DATASET_SERVICE: str
QUEUE_SERVICE: str
Expand All @@ -104,6 +72,7 @@ class Settings(BaseSettings):
KAFKA_URL: str
REDIS_HOST: str
REDIS_PORT: str
REDIS_PASSWORD: str
ATLAS_HOST: str
ATLAS_PORT: str

Expand All @@ -112,10 +81,6 @@ class Config:
env_file_encoding = 'utf-8'
extra = Extra.allow

@classmethod
def customise_sources(cls, init_settings, env_settings, file_secret_settings):
return env_settings, load_vault_settings, init_settings, file_secret_settings


@lru_cache(1)
def get_settings():
Expand Down
2 changes: 1 addition & 1 deletion consumer/exceptions.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,12 +6,12 @@

from abc import ABCMeta
from abc import abstractmethod
from collections.abc import Sequence
from http.client import CONFLICT
from http.client import INTERNAL_SERVER_ERROR
from http.client import NOT_FOUND
from http.client import SERVICE_UNAVAILABLE
from http.client import UNPROCESSABLE_ENTITY
from typing import Sequence

from pydantic.error_wrappers import ErrorList

Expand Down
20 changes: 16 additions & 4 deletions consumer/job.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,10 +31,6 @@ def create_job(self, job_name, container_image, volume_path, command, args, pipe
volume_mount = client.V1VolumeMount(mount_path=volume_path, name=ConfigClass.NFS_MOUNT)

env = [
client.V1EnvVar(name='CONFIG_CENTER_ENABLED', value=str(ConfigClass.CONFIG_CENTER_ENABLED)),
client.V1EnvVar(name='VAULT_URL', value=ConfigClass.VAULT_URL),
client.V1EnvVar(name='VAULT_CRT', value=ConfigClass.VAULT_CRT),
client.V1EnvVar(name='VAULT_TOKEN', value=ConfigClass.VAULT_TOKEN),
client.V1EnvVar(name='GREEN_ZONE_LABEL', value=ConfigClass.GREEN_ZONE_LABEL),
client.V1EnvVar(name='CORE_ZONE_LABEL', value=ConfigClass.CORE_ZONE_LABEL),
client.V1EnvVar(name='RDS_DBNAME', value=ConfigClass.RDS_DBNAME),
Expand All @@ -43,16 +39,20 @@ def create_job(self, job_name, container_image, volume_path, command, args, pipe
client.V1EnvVar(name='S3_HOST', value=ConfigClass.S3_HOST),
client.V1EnvVar(name='S3_PORT', value=ConfigClass.S3_PORT),
client.V1EnvVar(name='S3_INTERNAL_HTTPS', value=ConfigClass.S3_INTERNAL_HTTPS),
client.V1EnvVar(name='S3_ACCESS_KEY', value=ConfigClass.S3_ACCESS_KEY),
client.V1EnvVar(name='S3_SECRET_KEY', value=ConfigClass.S3_SECRET_KEY),
client.V1EnvVar(name='DATAOPS_SERVICE', value=ConfigClass.DATAOPS_SERVICE),
client.V1EnvVar(name='PROJECT_SERVICE', value=ConfigClass.PROJECT_SERVICE),
client.V1EnvVar(name='KAFKA_URL', value=ConfigClass.KAFKA_URL),
client.V1EnvVar(name='REDIS_HOST', value=ConfigClass.REDIS_HOST),
client.V1EnvVar(name='REDIS_PORT', value=ConfigClass.REDIS_PORT),
client.V1EnvVar(name='REDIS_PASSWORD', value=ConfigClass.REDIS_PASSWORD),
client.V1EnvVar(name='ATLAS_HOST', value=ConfigClass.ATLAS_HOST),
client.V1EnvVar(name='ATLAS_PORT', value=ConfigClass.ATLAS_PORT),
client.V1EnvVar(name='APPROVAL_SERVICE', value=ConfigClass.APPROVAL_SERVICE),
client.V1EnvVar(name='NOTIFICATION_SERVICE', value=ConfigClass.NOTIFICATION_SERVICE),
client.V1EnvVar(name='DATASET_SERVICE', value=ConfigClass.DATASET_SERVICE),
client.V1EnvVar(name='METADATA_SERVICE', value=ConfigClass.METADATA_SERVICE),
]

container = client.V1Container(
Expand Down Expand Up @@ -88,6 +88,18 @@ def bids_validate_job_obj(self, job_name, container_image, volume_path, command,

return job

def share_dataset_version_job_obj(
self, job_name, container_image, volume_path, command, args, version_id, destination_project_code
):
anno = {
'version_id': version_id,
'destination_project_code': destination_project_code,
}

job = self.create_job(job_name, container_image, volume_path, command, args, ConfigClass.copy_pipeline, anno)

return job

def copy_folder_job_obj(self, job_name, container_image, volume_path, command, args, project_code, event_payload):
anno = {
'source_geid': args[1],
Expand Down
Loading
Loading