diff --git a/.env.example b/.env.example index 86f0c77..76206e8 100644 --- a/.env.example +++ b/.env.example @@ -1,13 +1,7 @@ -# Vault configuration # is sensitive/secret -VAULT_TOKEN= RDS_PASSWORD= REDIS_PASSWORD= -# contains defaults but can be overriden -VAULT_URL= -VAULT_CRT= - # Microservices # needs to be set (no defaults) QUEUE_SERVICE= @@ -37,7 +31,6 @@ RDS_ECHO_SQL_QUERIES=false # Service configuration # contains defaults but can be overriden -CONFIG_CENTER_ENABLED=false APP_NAME=dataops_service VERSION=2.0.0 PORT=5063 diff --git a/.github/workflows/hdc-pipeline.yml b/.github/workflows/hdc-pipeline.yml new file mode 100644 index 0000000..d03b142 --- /dev/null +++ b/.github/workflows/hdc-pipeline.yml @@ -0,0 +1,37 @@ +name: HDC ci/cd pipeline + +permissions: + contents: write + issues: write + pull-requests: write + +on: + push: + branches: + - main + pull_request: + branches: + - main + +jobs: + run_tests_hdc: + uses: PilotDataPlatform/pilot-hdc-ci-tools/.github/workflows/run_tests.yml@main + with: + min_coverage_percent: 92 + coverage_target: 'dataops' + secrets: inherit + + build_and_publish_hdc: + needs: [run_tests_hdc] + uses: PilotDataPlatform/pilot-hdc-ci-tools/.github/workflows/build_and_publish.yml@main + with: + matrix_config: '["alembic","dataops"]' + service_name: 'dataops' + secrets: inherit + + deploy_hdc: + needs: [build_and_publish_hdc] + uses: PilotDataPlatform/pilot-hdc-ci-tools/.github/workflows/trigger_deployment.yml@main + with: + hdc_service_name: 'dataops' + secrets: inherit diff --git a/.gitignore b/.gitignore index f661348..425ecd0 100644 --- a/.gitignore +++ b/.gitignore @@ -36,6 +36,11 @@ tests/logs *.manifest *.spec +# pyenv +# For a library or package, you might want to ignore these files since the code is +# intended to run in multiple environments; otherwise, check them in: +.python-version + # Installer logs pip-log.txt pip-delete-this-directory.txt diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 9ffd5fa..96be327 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -67,15 +67,15 @@ repos: '--max-line-length=120', ] -# - repo: https://github.com/PyCQA/docformatter -# rev: v1.7.5 -# hooks: -# - id: docformatter -# args: [ -# '--wrap-summaries=120', -# '--wrap-descriptions=120', -# '--in-place', -# ] + - repo: https://github.com/PyCQA/docformatter + rev: v1.7.7 + hooks: + - id: docformatter + args: [ + '--wrap-summaries=120', + '--wrap-descriptions=120', + '--in-place', + ] - repo: https://github.com/Lucas-C/pre-commit-hooks rev: v1.5.5 diff --git a/Jenkinsfile b/Jenkinsfile deleted file mode 100644 index 4ce19de..0000000 --- a/Jenkinsfile +++ /dev/null @@ -1,124 +0,0 @@ -pipeline { - agent { label 'small' } - environment { - imagename = "ghcr.io/pilotdataplatform/dataops" - imagename_staging = "ghcr.io/pilotdataplatform/dataops" - commit = sh(returnStdout: true, script: 'git describe --always').trim() - registryCredential = 'pilot-ghcr' - dockerImage = '' - } - - stages { - stage('Git clone for dev') { - when {branch "develop"} - steps{ - script { - git branch: "develop", - url: 'https://github.com/PilotDataPlatform/dataops.git', - credentialsId: 'lzhao' - } - } - } -/** it will be handled by github actions - stage('DEV: Run unit tests') { - when { branch 'develop' } - steps { - withCredentials([ - usernamePassword(credentialsId: 'readonly', usernameVariable: 'PIP_USERNAME', passwordVariable: 'PIP_PASSWORD'), - string(credentialsId:'VAULT_TOKEN', variable: 'VAULT_TOKEN'), - string(credentialsId:'VAULT_URL', variable: 'VAULT_URL'), - file(credentialsId:'VAULT_CRT', variable: 'VAULT_CRT') - ]) { - sh """ - pip install --user poetry==1.1.12 - ${HOME}/.local/bin/poetry config virtualenvs.in-project true - ${HOME}/.local/bin/poetry config http-basic.pilot ${PIP_USERNAME} ${PIP_PASSWORD} - ${HOME}/.local/bin/poetry install --no-root --no-interaction - ${HOME}/.local/bin/poetry run pytest --verbose -c tests/pytest.ini - """ - } - } - } -**/ - stage('DEV Build and push image') { - when {branch "develop"} - steps{ - script { - docker.withRegistry('https://ghcr.io', registryCredential) { - customImage = docker.build('$imagename:alembic-$commit-CAC', '--target alembic-image .') - customImage.push() - } - docker.withRegistry('https://ghcr.io', registryCredential) { - customImage = docker.build('$imagename:dataops-$commit-CAC', '--target dataops-image .') - customImage.push() - } - } - } - } - stage('DEV Remove image') { - when {branch "develop"} - steps{ - sh 'docker rmi $imagename:alembic-$commit-CAC' - sh 'docker rmi $imagename:dataops-$commit-CAC' - } - } - - stage('DEV Deploy') { - when {branch "develop"} - steps{ - build(job: "/VRE-IaC/UpdateAppVersion", parameters: [ - [$class: 'StringParameterValue', name: 'TF_TARGET_ENV', value: 'dev' ], - [$class: 'StringParameterValue', name: 'TARGET_RELEASE', value: 'dataops-utility' ], - [$class: 'StringParameterValue', name: 'NEW_APP_VERSION', value: "$commit" ] - ]) - } - } -/** - stage('Git clone staging') { - when {branch "main"} - steps{ - script { - git branch: "main", - url: 'https://github.com/PilotDataPlatform/dataops.git', - credentialsId: 'lzhao' - } - } - } - - stage('STAGING Building and push image') { - when {branch "main"} - steps{ - script { - docker.withRegistry('https://ghcr.io', registryCredential) { - customImage = docker.build("$imagename_staging:${env.commit}", ".") - customImage.push() - } - } - } - } - - stage('STAGING Remove image') { - when {branch "main"} - steps{ - sh "docker rmi $imagename_staging:$commit" - } - } - - stage('STAGING Deploy') { - when {branch "main"} - steps{ - build(job: "/VRE-IaC/Staging-UpdateAppVersion", parameters: [ - [$class: 'StringParameterValue', name: 'TF_TARGET_ENV', value: 'staging' ], - [$class: 'StringParameterValue', name: 'TARGET_RELEASE', value: 'dataops-utility' ], - [$class: 'StringParameterValue', name: 'NEW_APP_VERSION', value: "$commit" ] - ]) - } - } -**/ - } - post { - failure { - slackSend color: '#FF0000', message: "Build Failed! - ${env.JOB_NAME} ${env.commit} (<${env.BUILD_URL}|Open>)", channel: 'jenkins-dev-staging-monitor' - } - } -} diff --git a/dataops/components/archive_preview/views.py b/dataops/components/archive_preview/views.py index b917c29..4057fa2 100644 --- a/dataops/components/archive_preview/views.py +++ b/dataops/components/archive_preview/views.py @@ -4,7 +4,6 @@ # Version 3.0 (the "License") available at https://www.gnu.org/licenses/agpl-3.0.en.html. # You may not use this file except in compliance with the License. -from typing import Union from uuid import UUID from fastapi import APIRouter @@ -23,7 +22,7 @@ @router.get('', summary='Get a archive preview given file id', response_model=ArchivePreviewResponseSchema) async def get_archive_preview( - file_id: Union[UUID, str], archive_preview_crud: ArchivePreviewCRUD = Depends(get_archive_preview_crud) + file_id: UUID | str, archive_preview_crud: ArchivePreviewCRUD = Depends(get_archive_preview_crud) ) -> ArchivePreviewResponseSchema: """Get an archive preview by id or code.""" diff --git a/dataops/components/cache.py b/dataops/components/cache.py index 7c274b9..03c4069 100644 --- a/dataops/components/cache.py +++ b/dataops/components/cache.py @@ -4,9 +4,6 @@ # Version 3.0 (the "License") available at https://www.gnu.org/licenses/agpl-3.0.en.html. # You may not use this file except in compliance with the License. -from typing import Optional -from typing import Union - from redis.asyncio.client import Redis @@ -16,12 +13,12 @@ class Cache: def __init__(self, redis: Redis) -> None: self.redis = redis - async def set(self, key: str, value: Union[str, bytes]) -> bool: + async def set(self, key: str, value: str | bytes) -> bool: """Set the value for the key.""" return await self.redis.set(key, value) - async def get(self, key: str) -> Optional[bytes]: + async def get(self, key: str) -> bytes | None: """Return the value for the key or None if the key doesn't exist.""" return await self.redis.get(key) diff --git a/dataops/components/crud.py b/dataops/components/crud.py index 25c73d4..de3afae 100644 --- a/dataops/components/crud.py +++ b/dataops/components/crud.py @@ -7,8 +7,6 @@ from datetime import timedelta from typing import Any from typing import Optional -from typing import Type -from typing import Union from uuid import UUID from pydantic import BaseModel @@ -35,7 +33,7 @@ class CRUD: """Base CRUD class for managing database models.""" session: AsyncSession - model: Type[DBModel] + model: type[DBModel] db_error_codes: dict[str, ServiceException] = { '23503': NotFound(), # missing foreign key '23505': AlreadyExists(), # duplicated entry @@ -65,7 +63,7 @@ def select_query(self) -> Select: """Create base select.""" return select(self.model) - async def execute(self, statement: Executable, **kwds: Any) -> Union[CursorResult, Result]: + async def execute(self, statement: Executable, **kwds: Any) -> CursorResult | Result: """Execute a statement and return buffered result.""" return await self.session.execute(statement, **kwds) @@ -75,7 +73,7 @@ async def scalars(self, statement: Executable, **kwds: Any) -> ScalarResult: return await self.session.scalars(statement, **kwds) - async def _create_one(self, statement: Executable) -> Union[UUID, str]: + async def _create_one(self, statement: Executable) -> UUID | str: """Execute a statement to create one entry.""" try: @@ -155,7 +153,7 @@ async def set_by_key(self, key: str, content: str, expire: int = Optional[None]) async def mget_by_prefix(self, prefix: str) -> bytes: """Query to find key with pattern and retrieve respective record.""" - query = '{}:*'.format(prefix) + query = f'{prefix}:*' keys = await self.__instance.keys(query) return await self.__instance.mget(keys) @@ -169,7 +167,7 @@ async def unlink_by_key(self, key: str) -> int: async def mdele_by_prefix(self, prefix: str) -> list: """Query to find key with pattern and delete respective record.""" - query = '{}:*'.format(prefix) + query = f'{prefix}:*' keys = await self.__instance.keys(query) results = [] for key in keys: diff --git a/dataops/components/exceptions.py b/dataops/components/exceptions.py index 12a6e69..9ca2673 100644 --- a/dataops/components/exceptions.py +++ b/dataops/components/exceptions.py @@ -6,6 +6,7 @@ from abc import ABCMeta from abc import abstractmethod +from collections.abc import Sequence from http.client import BAD_REQUEST from http.client import CONFLICT from http.client import INTERNAL_SERVER_ERROR @@ -13,7 +14,6 @@ from http.client import SERVICE_UNAVAILABLE from http.client import UNAUTHORIZED from http.client import UNPROCESSABLE_ENTITY -from typing import Sequence from pydantic.error_wrappers import ErrorList diff --git a/dataops/components/resource_lock/cache.py b/dataops/components/resource_lock/cache.py index e6f56ca..075769a 100644 --- a/dataops/components/resource_lock/cache.py +++ b/dataops/components/resource_lock/cache.py @@ -4,8 +4,6 @@ # Version 3.0 (the "License") available at https://www.gnu.org/licenses/agpl-3.0.en.html. # You may not use this file except in compliance with the License. -from typing import List - from dataops.components.cache import Cache from dataops.components.resource_lock.schemas import ResourceLockBulkResponseSchema from dataops.components.resource_lock.schemas import ResourceLockResponseSchema @@ -15,15 +13,15 @@ class ResourceLockerCache(Cache): """Manages resource operation locking and unlocking and their respective lock statuses.""" - def str_to_int_list(self, input_: bytes) -> List[int]: + def str_to_int_list(self, input_: bytes) -> list[int]: """Return decoded strings as a list of integers.""" return [int(x) for x in input_.decode('utf-8').split(',')] - def int_list_to_str(self, input_: List[int]) -> str: + def int_list_to_str(self, input_: list[int]) -> str: """Return joined list as a string with delimiter.""" return ','.join([str(x) for x in input_]) - async def perform_bulk_lock(self, keys: List[str], operation: str) -> ResourceLockBulkResponseSchema: + async def perform_bulk_lock(self, keys: list[str], operation: str) -> ResourceLockBulkResponseSchema: """Perform bulk lock for multiple keys. If one of the lock attempts fails, the locking attempts of the following keys are stopped. @@ -46,7 +44,7 @@ async def perform_bulk_lock(self, keys: List[str], operation: str) -> ResourceLo return ResourceLockBulkResponseSchema(keys_status=status) - async def perform_bulk_unlock(self, keys: List[str], operation: str) -> ResourceLockBulkResponseSchema: + async def perform_bulk_unlock(self, keys: list[str], operation: str) -> ResourceLockBulkResponseSchema: """Perform bulk unlock for multiple keys.""" keys = sorted(keys) diff --git a/dataops/components/resource_lock/schemas.py b/dataops/components/resource_lock/schemas.py index 07b1db9..eb01ee0 100644 --- a/dataops/components/resource_lock/schemas.py +++ b/dataops/components/resource_lock/schemas.py @@ -6,9 +6,6 @@ from enum import Enum from enum import unique -from typing import List -from typing import Optional -from typing import Tuple from pydantic import BaseModel @@ -34,7 +31,7 @@ class ResourceLockCreateSchema(BaseModel): class ResourceLockBulkCreateSchema(BaseModel): """Schema for bulk creating resource lock by keys and operations.""" - resource_keys: List[str] + resource_keys: list[str] operation: ResourceLockOperationSchema @@ -42,13 +39,13 @@ class ResourceLockResponseSchema(BaseModel): """Schema for key and status of locked resource in response.""" key: str - status: Optional[str] = False + status: str | None = False class ResourceLockBulkResponseSchema(BaseModel): """Schema for status of operation locking for multiple keys.""" - keys_status: List[Tuple[str, bool]] + keys_status: list[tuple[str, bool]] def is_successful(self) -> bool: """Return true if all statuses are true.""" diff --git a/dataops/components/resource_lock/views.py b/dataops/components/resource_lock/views.py index 355f25c..b902a70 100644 --- a/dataops/components/resource_lock/views.py +++ b/dataops/components/resource_lock/views.py @@ -15,7 +15,6 @@ from dataops.components.resource_lock.schemas import ResourceLockBulkResponseSchema from dataops.components.resource_lock.schemas import ResourceLockCreateSchema from dataops.components.resource_lock.schemas import ResourceLockResponseSchema -from dataops.logger import logger router = APIRouter(prefix='/resource/lock', tags=['Resource Locking']) diff --git a/dataops/components/resource_operations/crud/base.py b/dataops/components/resource_operations/crud/base.py index 9d21525..fb0a363 100644 --- a/dataops/components/resource_operations/crud/base.py +++ b/dataops/components/resource_operations/crud/base.py @@ -5,7 +5,6 @@ # You may not use this file except in compliance with the License. from time import time -from typing import List from uuid import UUID from dataops.components.crud import RedisCRUD @@ -38,7 +37,7 @@ async def validate_base(self, item_id: str) -> bool: logger.exception(f'Source/destination is not a valid resource type: {item_id}') raise InvalidInput() - async def validate_targets(self, targets: List[ResourceOperationTargetSchema]) -> ItemFilter: + async def validate_targets(self, targets: list[ResourceOperationTargetSchema]) -> ItemFilter: """Validate resource type and archive status of target ids involved in resource operation.""" fetched = [] for target in targets: diff --git a/dataops/components/resource_operations/filtering.py b/dataops/components/resource_operations/filtering.py index 1c6ad72..c7e7582 100644 --- a/dataops/components/resource_operations/filtering.py +++ b/dataops/components/resource_operations/filtering.py @@ -5,9 +5,6 @@ # You may not use this file except in compliance with the License. from typing import Any -from typing import Dict -from typing import List -from typing import Set from dataops.components.resource_operations.schemas import ResourceType @@ -29,16 +26,16 @@ def name(self) -> str: class ItemFilter(list): """Filter metadata items.""" - def __init__(self, items: List[Dict[str, Any]]) -> None: + def __init__(self, items: list[dict[str, Any]]) -> None: super().__init__([Item(item) for item in items]) @property - def ids(self) -> Set[str]: + def ids(self) -> set[str]: """Return item ids.""" return {item.id for item in self} @property - def names(self) -> List[str]: + def names(self) -> list[str]: """Return item names.""" return [item.name for item in self] @@ -48,14 +45,14 @@ def target_type(self) -> str: item_type = self[0]['type'] if len(self) == 1 else 'batch' return item_type - def _get_by_resource_type(self, resource_type: ResourceType) -> List[Item]: + def _get_by_resource_type(self, resource_type: ResourceType) -> list[Item]: """Validates source type against defined resource types.""" return [source for source in self if source['type'] == resource_type] - def filter_folders(self) -> List[Item]: + def filter_folders(self) -> list[Item]: """Returns items with folder resource type.""" return self._get_by_resource_type(ResourceType.FOLDER) - def filter_files(self) -> List[Item]: + def filter_files(self) -> list[Item]: """Returns items with file resource type.""" return self._get_by_resource_type(ResourceType.FILE) diff --git a/dataops/components/resource_operations/schemas.py b/dataops/components/resource_operations/schemas.py index 1a07a3e..33d49f6 100644 --- a/dataops/components/resource_operations/schemas.py +++ b/dataops/components/resource_operations/schemas.py @@ -6,8 +6,6 @@ from enum import Enum from enum import unique -from typing import List -from typing import Optional from uuid import UUID from pydantic import BaseModel @@ -52,10 +50,10 @@ class Config: class ResourceOperationPayloadSchema(BaseSchema): """Schema of payload for resource operation request.""" - request_info: Optional[dict] - targets: List[ResourceOperationTargetSchema] + request_info: dict | None + targets: list[ResourceOperationTargetSchema] source: str - destination: Optional[str] + destination: str | None class ResourceOperationSchema(BaseSchema): @@ -88,4 +86,4 @@ class FileStatusSchema(BaseSchema): class ResourceOperationResponseSchema(BaseModel): """Schema of resource operation request response.""" - operation_info: List[FileStatusSchema] + operation_info: list[FileStatusSchema] diff --git a/dataops/components/task_dispatch/crud.py b/dataops/components/task_dispatch/crud.py index e3529a0..ec0946f 100644 --- a/dataops/components/task_dispatch/crud.py +++ b/dataops/components/task_dispatch/crud.py @@ -6,8 +6,6 @@ import json import time -from typing import Optional -from typing import Union from dataops.components.crud import RedisCRUD from dataops.components.exceptions import AlreadyExists @@ -28,10 +26,10 @@ async def get_job( code: str, action: str, operator: str, - sorting: Optional[bool] = False, + sorting: bool | None = False, ) -> list: """Get job records in Redis for a respective session.""" - key = 'dataaction:{}:{}:{}:{}:{}:{}'.format(session_id, label, job_id, action, code, operator) + key = f'dataaction:{session_id}:{label}:{job_id}:{action}:{code}:{operator}' value = await self.mget_by_prefix(key) value_decode = [json.loads(record.decode('utf-8')) for record in value] if value else [] session_jobs = sort_by_update_time(value_decode) if sorting else value_decode @@ -47,18 +45,19 @@ async def check_job_id(self, entry: BaseSchema): logger.exception(f'Job id already exists: {job["job_id"]}') raise AlreadyExists() - async def set_job(self, entry: Union[BaseSchema, dict]) -> dict: + async def set_job(self, entry: BaseSchema | dict) -> dict: """Create new or update existing job record (key,value) in Redis for a respective session.""" job = dict(entry) - key = 'dataaction:{}:{}:{}:{}:{}:{}:{}'.format( - job['session_id'], - job['label'], - job['job_id'], - job['action'], - job['code'], - job['operator'], - job['source'], + key = ( + f'dataaction' + f':{job["session_id"]}' + f':{job["label"]}' + f':{job["job_id"]}' + f':{job["action"]}' + f':{job["code"]}' + f':{job["operator"]}' + f':{job["source"]}' ) value = { @@ -107,8 +106,14 @@ async def update_job( async def delete_job(self, entry: BaseSchema) -> list: """Delete existing job for a respective session.""" job = entry.dict() - key = 'dataaction:{}:{}:{}:{}:{}:{}'.format( - job['session_id'], job['label'], job['job_id'], job['action'], job['code'], job['operator'] + key = ( + f'dataaction' + f':{job["session_id"]}' + f':{job["label"]}' + f':{job["job_id"]}' + f':{job["action"]}' + f':{job["code"]}' + f':{job["operator"]}' ) value = await self.mdele_by_prefix(key) return value diff --git a/dataops/components/task_stream/parsing.py b/dataops/components/task_stream/parsing.py index e970072..74f4812 100644 --- a/dataops/components/task_stream/parsing.py +++ b/dataops/components/task_stream/parsing.py @@ -5,7 +5,6 @@ # You may not use this file except in compliance with the License. import ast -from typing import Union from dataops.components.task_stream.schemas import SSETaskStreamSchema from dataops.components.task_stream.schemas import TaskStreamCreateSchema @@ -42,7 +41,7 @@ def parse_file_status(self, results: list) -> list[TaskStreamCreateSchema]: return status def filter_parsed_status( - self, file_statuses: list[TaskStreamCreateSchema], params: Union[TaskStreamRetrieveSchema, SSETaskStreamSchema] + self, file_statuses: list[TaskStreamCreateSchema], params: TaskStreamRetrieveSchema | SSETaskStreamSchema ) -> list[dict]: """Filters parsed file statuses.""" filtered_statuses = [] diff --git a/dataops/components/task_stream/schemas.py b/dataops/components/task_stream/schemas.py index fd56022..13fac55 100644 --- a/dataops/components/task_stream/schemas.py +++ b/dataops/components/task_stream/schemas.py @@ -5,8 +5,6 @@ # You may not use this file except in compliance with the License. import re -from typing import Optional -from typing import Union from uuid import UUID from uuid import uuid4 @@ -22,17 +20,17 @@ class TaskStreamRetrieveSchema(BaseModel): """Schema for retrieving file status.""" session_id: str - container_code: Optional[str] - container_type: Optional[str] - action_type: Optional[str] - target_names: Optional[str] - job_id: Optional[UUID] + container_code: str | None + container_type: str | None + action_type: str | None + target_names: str | None + job_id: UUID | None class SSETaskStreamSchema(TaskStreamRetrieveSchema): """Schema for stream SSE timeout.""" - request_timeout: Optional[int] + request_timeout: int | None class TaskStreamCreateSchema(BaseSchema): @@ -45,7 +43,7 @@ class TaskStreamCreateSchema(BaseSchema): container_type: str action_type: str status: str - job_id: Optional[UUID] = uuid4() + job_id: UUID | None = uuid4() entry_id: str = None @validator('session_id') @@ -97,4 +95,4 @@ class TaskStreamResponseSchema(BaseModel): """Response schema for writing and retrieving file status.""" total: int = 0 - stream_info: Union[dict, list, list[dict]] = {} + stream_info: dict | list | list[dict] = {} diff --git a/dataops/components/task_stream/views.py b/dataops/components/task_stream/views.py index c9dcbfd..31f95de 100644 --- a/dataops/components/task_stream/views.py +++ b/dataops/components/task_stream/views.py @@ -19,7 +19,6 @@ from dataops.components.task_stream.schemas import TaskStreamResponseSchema from dataops.components.task_stream.schemas import TaskStreamRetrieveSchema from dataops.dependencies.auth import AuthManager -from dataops.logger import logger router = APIRouter(prefix='/task-stream', tags=['Task Streaming']) diff --git a/dataops/config.py b/dataops/config.py index e1efda0..99b0a1d 100644 --- a/dataops/config.py +++ b/dataops/config.py @@ -7,45 +7,16 @@ import base64 import logging from functools import lru_cache -from typing import Any -from typing import Dict -from typing import Optional -from common import VaultClient from pydantic import BaseSettings from pydantic import Extra -class VaultConfig(BaseSettings): - """Store vault related configuration.""" - - APP_NAME: str = 'dataops' - CONFIG_CENTER_ENABLED: bool = False - - VAULT_URL: Optional[str] - VAULT_CRT: Optional[str] - VAULT_TOKEN: Optional[str] - - class Config: - env_file = '.env' - env_file_encoding = 'utf-8' - - -def load_vault_settings(settings: BaseSettings) -> Dict[str, Any]: - config = VaultConfig() - - if not config.CONFIG_CENTER_ENABLED: - return {} - - client = VaultClient(config.VAULT_URL, config.VAULT_CRT, config.VAULT_TOKEN) - return client.get_from_vault(config.APP_NAME) - - class Settings(BaseSettings): """Store service configuration settings.""" APP_NAME: str = 'dataops_service' - VERSION = '2.5.0a0' + VERSION = '2.5.8' PORT: int = 5063 HOST: str = '127.0.0.1' WORKERS: int = 1 @@ -95,10 +66,6 @@ class Config: env_file_encoding = 'utf-8' extra = Extra.allow - @classmethod - def customise_sources(cls, init_settings, env_settings, file_secret_settings): - return env_settings, load_vault_settings, init_settings, file_secret_settings - @lru_cache(1) def get_settings(): diff --git a/migrations/env.py b/migrations/env.py index 148560c..70d764f 100644 --- a/migrations/env.py +++ b/migrations/env.py @@ -6,7 +6,6 @@ import logging from logging.config import fileConfig -from typing import Optional from urllib.parse import urlparse from alembic import context @@ -32,7 +31,7 @@ ) -def include_name(name: Optional[str], type_: str, parent_names: dict[str, Optional[str]]) -> bool: +def include_name(name: str | None, type_: str, parent_names: dict[str, str | None]) -> bool: """Consider only tables from desired schema.""" if type_ == 'schema': diff --git a/migrations/versions/5b4ff5276434_alter_archive_preview_table.py b/migrations/versions/5b4ff5276434_alter_archive_preview_table.py index d756a38..948e95c 100644 --- a/migrations/versions/5b4ff5276434_alter_archive_preview_table.py +++ b/migrations/versions/5b4ff5276434_alter_archive_preview_table.py @@ -3,8 +3,7 @@ # Licensed under the GNU AFFERO GENERAL PUBLIC LICENSE, # Version 3.0 (the "License") available at https://www.gnu.org/licenses/agpl-3.0.en.html. # You may not use this file except in compliance with the License. - -"""add index to file_id, alter archive_preview to jsonb. +"""Add index to file_id, alter archive_preview to jsonb. Revision ID: 5b4ff5276434 Revises: ebd1730ac381 diff --git a/migrations/versions/ebd1730ac381_add_archive_preview_model.py b/migrations/versions/ebd1730ac381_add_archive_preview_model.py index 89fdbea..ae4ecd2 100644 --- a/migrations/versions/ebd1730ac381_add_archive_preview_model.py +++ b/migrations/versions/ebd1730ac381_add_archive_preview_model.py @@ -3,7 +3,6 @@ # Licensed under the GNU AFFERO GENERAL PUBLIC LICENSE, # Version 3.0 (the "License") available at https://www.gnu.org/licenses/agpl-3.0.en.html. # You may not use this file except in compliance with the License. - """add_archive_preview_model. Revision ID: ebd1730ac381 diff --git a/poetry.lock b/poetry.lock index 3693f0a..6bd2d8f 100644 --- a/poetry.lock +++ b/poetry.lock @@ -1845,37 +1845,58 @@ files = [ [[package]] name = "pydantic" -version = "1.8.2" -description = "Data validation and settings management using python 3.6 type hinting" -optional = false -python-versions = ">=3.6.1" -files = [ - {file = "pydantic-1.8.2-cp36-cp36m-macosx_10_9_x86_64.whl", hash = "sha256:05ddfd37c1720c392f4e0d43c484217b7521558302e7069ce8d318438d297739"}, - {file = "pydantic-1.8.2-cp36-cp36m-manylinux1_i686.whl", hash = "sha256:a7c6002203fe2c5a1b5cbb141bb85060cbff88c2d78eccbc72d97eb7022c43e4"}, - {file = "pydantic-1.8.2-cp36-cp36m-manylinux2014_i686.whl", hash = "sha256:589eb6cd6361e8ac341db97602eb7f354551482368a37f4fd086c0733548308e"}, - {file = "pydantic-1.8.2-cp36-cp36m-manylinux2014_x86_64.whl", hash = "sha256:10e5622224245941efc193ad1d159887872776df7a8fd592ed746aa25d071840"}, - {file = "pydantic-1.8.2-cp36-cp36m-win_amd64.whl", hash = "sha256:99a9fc39470010c45c161a1dc584997f1feb13f689ecf645f59bb4ba623e586b"}, - {file = "pydantic-1.8.2-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:a83db7205f60c6a86f2c44a61791d993dff4b73135df1973ecd9eed5ea0bda20"}, - {file = "pydantic-1.8.2-cp37-cp37m-manylinux1_i686.whl", hash = "sha256:41b542c0b3c42dc17da70554bc6f38cbc30d7066d2c2815a94499b5684582ecb"}, - {file = "pydantic-1.8.2-cp37-cp37m-manylinux2014_i686.whl", hash = "sha256:ea5cb40a3b23b3265f6325727ddfc45141b08ed665458be8c6285e7b85bd73a1"}, - {file = "pydantic-1.8.2-cp37-cp37m-manylinux2014_x86_64.whl", hash = "sha256:18b5ea242dd3e62dbf89b2b0ec9ba6c7b5abaf6af85b95a97b00279f65845a23"}, - {file = "pydantic-1.8.2-cp37-cp37m-win_amd64.whl", hash = "sha256:234a6c19f1c14e25e362cb05c68afb7f183eb931dd3cd4605eafff055ebbf287"}, - {file = "pydantic-1.8.2-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:021ea0e4133e8c824775a0cfe098677acf6fa5a3cbf9206a376eed3fc09302cd"}, - {file = "pydantic-1.8.2-cp38-cp38-manylinux1_i686.whl", hash = "sha256:e710876437bc07bd414ff453ac8ec63d219e7690128d925c6e82889d674bb505"}, - {file = "pydantic-1.8.2-cp38-cp38-manylinux2014_i686.whl", hash = "sha256:ac8eed4ca3bd3aadc58a13c2aa93cd8a884bcf21cb019f8cfecaae3b6ce3746e"}, - {file = "pydantic-1.8.2-cp38-cp38-manylinux2014_x86_64.whl", hash = "sha256:4a03cbbe743e9c7247ceae6f0d8898f7a64bb65800a45cbdc52d65e370570820"}, - {file = "pydantic-1.8.2-cp38-cp38-win_amd64.whl", hash = "sha256:8621559dcf5afacf0069ed194278f35c255dc1a1385c28b32dd6c110fd6531b3"}, - {file = "pydantic-1.8.2-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:8b223557f9510cf0bfd8b01316bf6dd281cf41826607eada99662f5e4963f316"}, - {file = "pydantic-1.8.2-cp39-cp39-manylinux1_i686.whl", hash = "sha256:244ad78eeb388a43b0c927e74d3af78008e944074b7d0f4f696ddd5b2af43c62"}, - {file = "pydantic-1.8.2-cp39-cp39-manylinux2014_i686.whl", hash = "sha256:05ef5246a7ffd2ce12a619cbb29f3307b7c4509307b1b49f456657b43529dc6f"}, - {file = "pydantic-1.8.2-cp39-cp39-manylinux2014_x86_64.whl", hash = "sha256:54cd5121383f4a461ff7644c7ca20c0419d58052db70d8791eacbbe31528916b"}, - {file = "pydantic-1.8.2-cp39-cp39-win_amd64.whl", hash = "sha256:4be75bebf676a5f0f87937c6ddb061fa39cbea067240d98e298508c1bda6f3f3"}, - {file = "pydantic-1.8.2-py3-none-any.whl", hash = "sha256:fec866a0b59f372b7e776f2d7308511784dace622e0992a0b59ea3ccee0ae833"}, - {file = "pydantic-1.8.2.tar.gz", hash = "sha256:26464e57ccaafe72b7ad156fdaa4e9b9ef051f69e175dbbb463283000c05ab7b"}, -] - -[package.dependencies] -typing-extensions = ">=3.7.4.3" +version = "1.10.19" +description = "Data validation and settings management using python type hints" +optional = false +python-versions = ">=3.7" +files = [ + {file = "pydantic-1.10.19-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:a415b9e95fa602b10808113967f72b2da8722061265d6af69268c111c254832d"}, + {file = "pydantic-1.10.19-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:11965f421f7eb026439d4eb7464e9182fe6d69c3d4d416e464a4485d1ba61ab6"}, + {file = "pydantic-1.10.19-cp310-cp310-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:f5bb81fcfc6d5bff62cd786cbd87480a11d23f16d5376ad2e057c02b3b44df96"}, + {file = "pydantic-1.10.19-cp310-cp310-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:83ee8c9916689f8e6e7d90161e6663ac876be2efd32f61fdcfa3a15e87d4e413"}, + {file = "pydantic-1.10.19-cp310-cp310-musllinux_1_1_i686.whl", hash = "sha256:0399094464ae7f28482de22383e667625e38e1516d6b213176df1acdd0c477ea"}, + {file = "pydantic-1.10.19-cp310-cp310-musllinux_1_1_x86_64.whl", hash = "sha256:8b2cf5e26da84f2d2dee3f60a3f1782adedcee785567a19b68d0af7e1534bd1f"}, + {file = "pydantic-1.10.19-cp310-cp310-win_amd64.whl", hash = "sha256:1fc8cc264afaf47ae6a9bcbd36c018d0c6b89293835d7fb0e5e1a95898062d59"}, + {file = "pydantic-1.10.19-cp311-cp311-macosx_10_9_x86_64.whl", hash = "sha256:d7a8a1dd68bac29f08f0a3147de1885f4dccec35d4ea926e6e637fac03cdb4b3"}, + {file = "pydantic-1.10.19-cp311-cp311-macosx_11_0_arm64.whl", hash = "sha256:07d00ca5ef0de65dd274005433ce2bb623730271d495a7d190a91c19c5679d34"}, + {file = "pydantic-1.10.19-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:ad57004e5d73aee36f1e25e4e73a4bc853b473a1c30f652dc8d86b0a987ffce3"}, + {file = "pydantic-1.10.19-cp311-cp311-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:dce355fe7ae53e3090f7f5fa242423c3a7b53260747aa398b4b3aaf8b25f41c3"}, + {file = "pydantic-1.10.19-cp311-cp311-musllinux_1_1_i686.whl", hash = "sha256:0d32227ea9a3bf537a2273fd2fdb6d64ab4d9b83acd9e4e09310a777baaabb98"}, + {file = "pydantic-1.10.19-cp311-cp311-musllinux_1_1_x86_64.whl", hash = "sha256:e351df83d1c9cffa53d4e779009a093be70f1d5c6bb7068584086f6a19042526"}, + {file = "pydantic-1.10.19-cp311-cp311-win_amd64.whl", hash = "sha256:d8d72553d2f3f57ce547de4fa7dc8e3859927784ab2c88343f1fc1360ff17a08"}, + {file = "pydantic-1.10.19-cp312-cp312-macosx_10_9_x86_64.whl", hash = "sha256:d5b5b7c6bafaef90cbb7dafcb225b763edd71d9e22489647ee7df49d6d341890"}, + {file = "pydantic-1.10.19-cp312-cp312-macosx_11_0_arm64.whl", hash = "sha256:570ad0aeaf98b5e33ff41af75aba2ef6604ee25ce0431ecd734a28e74a208555"}, + {file = "pydantic-1.10.19-cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:0890fbd7fec9e151c7512941243d830b2d6076d5df159a2030952d480ab80a4e"}, + {file = "pydantic-1.10.19-cp312-cp312-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:ec5c44e6e9eac5128a9bfd21610df3b8c6b17343285cc185105686888dc81206"}, + {file = "pydantic-1.10.19-cp312-cp312-musllinux_1_1_i686.whl", hash = "sha256:6eb56074b11a696e0b66c7181da682e88c00e5cebe6570af8013fcae5e63e186"}, + {file = "pydantic-1.10.19-cp312-cp312-musllinux_1_1_x86_64.whl", hash = "sha256:9d7d48fbc5289efd23982a0d68e973a1f37d49064ccd36d86de4543aff21e086"}, + {file = "pydantic-1.10.19-cp312-cp312-win_amd64.whl", hash = "sha256:fd34012691fbd4e67bdf4accb1f0682342101015b78327eaae3543583fcd451e"}, + {file = "pydantic-1.10.19-cp37-cp37m-macosx_10_9_x86_64.whl", hash = "sha256:4a5d5b877c7d3d9e17399571a8ab042081d22fe6904416a8b20f8af5909e6c8f"}, + {file = "pydantic-1.10.19-cp37-cp37m-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:9c46f58ef2df958ed2ea7437a8be0897d5efe9ee480818405338c7da88186fb3"}, + {file = "pydantic-1.10.19-cp37-cp37m-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:6d8a38a44bb6a15810084316ed69c854a7c06e0c99c5429f1d664ad52cec353c"}, + {file = "pydantic-1.10.19-cp37-cp37m-musllinux_1_1_i686.whl", hash = "sha256:a82746c6d6e91ca17e75f7f333ed41d70fce93af520a8437821dec3ee52dfb10"}, + {file = "pydantic-1.10.19-cp37-cp37m-musllinux_1_1_x86_64.whl", hash = "sha256:566bebdbe6bc0ac593fa0f67d62febbad9f8be5433f686dc56401ba4aab034e3"}, + {file = "pydantic-1.10.19-cp37-cp37m-win_amd64.whl", hash = "sha256:22a1794e01591884741be56c6fba157c4e99dcc9244beb5a87bd4aa54b84ea8b"}, + {file = "pydantic-1.10.19-cp38-cp38-macosx_10_9_x86_64.whl", hash = "sha256:076c49e24b73d346c45f9282d00dbfc16eef7ae27c970583d499f11110d9e5b0"}, + {file = "pydantic-1.10.19-cp38-cp38-macosx_11_0_arm64.whl", hash = "sha256:5d4320510682d5a6c88766b2a286d03b87bd3562bf8d78c73d63bab04b21e7b4"}, + {file = "pydantic-1.10.19-cp38-cp38-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:7e66aa0fa7f8aa9d0a620361834f6eb60d01d3e9cea23ca1a92cda99e6f61dac"}, + {file = "pydantic-1.10.19-cp38-cp38-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:d216f8d0484d88ab72ab45d699ac669fe031275e3fa6553e3804e69485449fa0"}, + {file = "pydantic-1.10.19-cp38-cp38-musllinux_1_1_i686.whl", hash = "sha256:9f28a81978e936136c44e6a70c65bde7548d87f3807260f73aeffbf76fb94c2f"}, + {file = "pydantic-1.10.19-cp38-cp38-musllinux_1_1_x86_64.whl", hash = "sha256:d3449633c207ec3d2d672eedb3edbe753e29bd4e22d2e42a37a2c1406564c20f"}, + {file = "pydantic-1.10.19-cp38-cp38-win_amd64.whl", hash = "sha256:7ea24e8614f541d69ea72759ff635df0e612b7dc9d264d43f51364df310081a3"}, + {file = "pydantic-1.10.19-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:573254d844f3e64093f72fcd922561d9c5696821ff0900a0db989d8c06ab0c25"}, + {file = "pydantic-1.10.19-cp39-cp39-macosx_11_0_arm64.whl", hash = "sha256:ff09600cebe957ecbb4a27496fe34c1d449e7957ed20a202d5029a71a8af2e35"}, + {file = "pydantic-1.10.19-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:4739c206bfb6bb2bdc78dcd40bfcebb2361add4ceac6d170e741bb914e9eff0f"}, + {file = "pydantic-1.10.19-cp39-cp39-manylinux_2_5_i686.manylinux1_i686.manylinux_2_17_i686.manylinux2014_i686.whl", hash = "sha256:0bfb5b378b78229119d66ced6adac2e933c67a0aa1d0a7adffbe432f3ec14ce4"}, + {file = "pydantic-1.10.19-cp39-cp39-musllinux_1_1_i686.whl", hash = "sha256:7f31742c95e3f9443b8c6fa07c119623e61d76603be9c0d390bcf7e888acabcb"}, + {file = "pydantic-1.10.19-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:c6444368b651a14c2ce2fb22145e1496f7ab23cbdb978590d47c8d34a7bc0289"}, + {file = "pydantic-1.10.19-cp39-cp39-win_amd64.whl", hash = "sha256:945407f4d08cd12485757a281fca0e5b41408606228612f421aa4ea1b63a095d"}, + {file = "pydantic-1.10.19-py3-none-any.whl", hash = "sha256:2206a1752d9fac011e95ca83926a269fb0ef5536f7e053966d058316e24d929f"}, + {file = "pydantic-1.10.19.tar.gz", hash = "sha256:fea36c2065b7a1d28c6819cc2e93387b43dd5d3cf5a1e82d8132ee23f36d1f10"}, +] + +[package.dependencies] +typing-extensions = ">=4.2.0" [package.extras] dotenv = ["python-dotenv (>=0.10.4)"] @@ -2407,13 +2428,13 @@ files = [ [[package]] name = "typing-extensions" -version = "4.0.0" -description = "Backported and Experimental Type Hints for Python 3.6+" +version = "4.12.2" +description = "Backported and Experimental Type Hints for Python 3.8+" optional = false -python-versions = ">=3.6" +python-versions = ">=3.8" files = [ - {file = "typing_extensions-4.0.0-py3-none-any.whl", hash = "sha256:829704698b22e13ec9eaf959122315eabb370b0884400e9818334d8b677023d9"}, - {file = "typing_extensions-4.0.0.tar.gz", hash = "sha256:2cdf80e4e04866a9b3689a51869016d36db0814d84b8d8a568d22781d45d27ed"}, + {file = "typing_extensions-4.12.2-py3-none-any.whl", hash = "sha256:04e5ca0351e0f3f85c6853954072df659d0d13fac324d0072316b67d7794700d"}, + {file = "typing_extensions-4.12.2.tar.gz", hash = "sha256:1a7ead55c7e559dd4dee8856e3a88b41225abfe1ce8df57b7c13915fe121ffb8"}, ] [[package]] @@ -2811,4 +2832,4 @@ testing = ["func-timeout", "jaraco.itertools", "pytest (>=4.6)", "pytest-black ( [metadata] lock-version = "2.0" python-versions = "^3.9" -content-hash = "e0450f491b6bad7141ddb73c0326e975f5d2743a2091857bc2474fbefc1234c1" +content-hash = "135c5d8702bdafc8143fbdf27e6736e401529370ebf6315adf350e36850520d1" diff --git a/pyproject.toml b/pyproject.toml index 6145968..c7af547 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,11 +1,11 @@ [tool.poetry] name = "dataops" -version = "2.5.5" +version = "2.5.8" description = "" authors = ["Indoc Research"] [tool.poetry.dependencies] -python = "^3.9" +python = ">=3.10,<3.11" alembic = "^1.8.0" async-asgi-testclient = "^1.4.10" asyncpg = "^0.25.0" @@ -32,7 +32,7 @@ opentelemetry-instrumentation-requests = "^0.30b1" opentelemetry-instrumentation-sqlalchemy = "^0.30b1" pilot-platform-common = "0.3.0" psycopg2-binary = "2.9.3" -pydantic = "1.8.2" +pydantic = "1.10.19" pyjwt = "2.6.0" python-dotenv = "0.19.1" python-multipart = "0.0.5" @@ -42,7 +42,6 @@ six = "1.16.0" SQLAlchemy = "1.4.27" sse-starlette = "1.1.6" testcontainers = "3.4.2" -typing_extensions = "4.0.0" urllib3 = "1.25.11" uvicorn = {version = "0.17.6", extras = ["standard"]} zipp = "3.6.0" diff --git a/tests/fixtures/components/resource_operations.py b/tests/fixtures/components/resource_operations.py index 09a65ef..712f9fb 100644 --- a/tests/fixtures/components/resource_operations.py +++ b/tests/fixtures/components/resource_operations.py @@ -7,8 +7,8 @@ import random import time import uuid +from collections.abc import Callable from typing import Any -from typing import Callable import pytest diff --git a/tests/fixtures/db.py b/tests/fixtures/db.py index 73f352f..b644c53 100644 --- a/tests/fixtures/db.py +++ b/tests/fixtures/db.py @@ -35,10 +35,10 @@ def chdir(directory: Path) -> None: def project_root() -> Path: path = Path(__file__) - while path.name != 'dataops': + while not (path / 'pyproject.toml').is_file(): path = path.parent - yield path + return path @pytest.fixture(scope='session')