diff --git a/tests/conftest.py b/tests/conftest.py index c000d9f..c1da77f 100644 --- a/tests/conftest.py +++ b/tests/conftest.py @@ -143,6 +143,9 @@ def cleanup(self) -> None: self.ec2_client.revoke_security_group_ingress(**self.vpc_sg_rule_params) def delete(self) -> None: + # never used (AWS tests skipped) + if not hasattr(self, "rds_client"): + return self.rds_client.delete_db_instance( DBInstanceIdentifier=self.db_name, SkipFinalSnapshot=True, @@ -189,20 +192,33 @@ def cleanup(self) -> bool: return True def delete(self) -> None: + # never used (AWS tests skipped) + if not hasattr(self, "s3_client"): + return exists = self.cleanup() if exists: self.s3_client.delete_bucket(Bucket=self.bucket_name) @pytest.fixture(scope="session") -def rds_testing_instance() -> RDSTestingInstance: - return RDSTestingInstance("decodecloudintegrationtestsworkerapi") +def rds_testing_instance() -> Generator[RDSTestingInstance, Any, None]: + # tests themselves must create the instance by calling instance.create(); + # this way, if no test that needs the DB is run, no RDS instance is created + # instance.delete() only deletes the RDS instance if it was created + instance = RDSTestingInstance("decodecloudintegrationtestsworkerapi") + yield instance + instance.delete() @pytest.fixture(scope="session") -def s3_testing_bucket() -> S3TestingBucket: +def s3_testing_bucket() -> Generator[S3TestingBucket, Any, None]: + # tests themselves must create the bucket by calling bucket.create(); + # this way, if no test that needs the bucket is run, no S3 bucket is created + # bucket.delete() only deletes the S3 bucket if it was created bucket_suffix = datetime.datetime.now(datetime.UTC).strftime("%Y%m%d%H%M%S") - return S3TestingBucket(bucket_suffix) + bucket = S3TestingBucket(bucket_suffix) + yield bucket + bucket.delete() @pytest.mark.aws diff --git a/tests/integration/conftest.py b/tests/integration/conftest.py index 8bb0c4c..fd58219 100644 --- a/tests/integration/conftest.py +++ b/tests/integration/conftest.py @@ -48,8 +48,8 @@ def env( s3_testing_bucket.create() yield env if env == "aws": - rds_testing_instance.delete() - s3_testing_bucket.delete() + rds_testing_instance.cleanup() + s3_testing_bucket.cleanup() @pytest.fixture(scope="session") @@ -103,9 +103,6 @@ def queue( queue = RDSJobQueue(rds_testing_instance.db_url) queue.create(err_on_exists=True) yield queue - queue.delete() - if env == "aws": - rds_testing_instance.cleanup() @pytest.fixture(scope="session", autouse=True) diff --git a/tests/integration/endpoints/test_jobs.py b/tests/integration/endpoints/test_jobs.py index 275cad0..3243c44 100644 --- a/tests/integration/endpoints/test_jobs.py +++ b/tests/integration/endpoints/test_jobs.py @@ -9,6 +9,7 @@ import requests from fastapi.testclient import TestClient +from tests.conftest import RDSTestingInstance from tests.integration.endpoints.conftest import EndpointParams, _TestEndpoint from workerfacing_api.core.filesystem import FileSystem, LocalFilesystem, S3Filesystem from workerfacing_api.core.queue import RDSJobQueue @@ -60,8 +61,16 @@ def passing_params(self) -> list[EndpointParams]: return [EndpointParams("get", params={"memory": 1})] @pytest.fixture(scope="function", autouse=True) - def cleanup_queue(self, queue: RDSJobQueue) -> None: - queue.delete() + def cleanup_queue( + self, + queue: RDSJobQueue, + env: str, + rds_testing_instance: RDSTestingInstance, + ) -> None: + if env == "local": + queue.delete() + else: + rds_testing_instance.cleanup() queue.create() @pytest.fixture(scope="function") diff --git a/tests/unit/core/test_filesystem.py b/tests/unit/core/test_filesystem.py index 504d11d..80f847e 100644 --- a/tests/unit/core/test_filesystem.py +++ b/tests/unit/core/test_filesystem.py @@ -240,7 +240,7 @@ def base_filesystem( yield S3Filesystem( s3_testing_bucket.s3_client, s3_testing_bucket.bucket_name ) - s3_testing_bucket.delete() + s3_testing_bucket.cleanup() @pytest.fixture(scope="class", autouse=True) def data_file1( diff --git a/tests/unit/core/test_queue.py b/tests/unit/core/test_queue.py index 32d2b7d..5d527f7 100644 --- a/tests/unit/core/test_queue.py +++ b/tests/unit/core/test_queue.py @@ -168,6 +168,7 @@ def base_queue( queue_path = str(tmpdir_factory.mktemp("queue") / "queue.pkl") base_queue = LocalJobQueue(queue_path) yield base_queue + base_queue.delete() class TestSQSQueue(_TestJobQueue): @@ -294,8 +295,7 @@ class TestRDSLocalQueue(_TestRDSQueue): def base_queue( self, tmpdir_factory: pytest.TempdirFactory ) -> Generator[RDSJobQueue, Any, None]: - base_queue = RDSJobQueue(f"sqlite:///{tmpdir_factory.mktemp('queue')}/local.db") - yield base_queue + yield RDSJobQueue(f"sqlite:///{tmpdir_factory.mktemp('queue')}/local.db") @pytest.mark.aws @@ -304,5 +304,6 @@ class TestRDSAWSQueue(_TestRDSQueue): def base_queue( self, rds_testing_instance: RDSTestingInstance ) -> Generator[RDSJobQueue, Any, None]: + rds_testing_instance.create() yield RDSJobQueue(rds_testing_instance.db_url) rds_testing_instance.cleanup()