Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -82,7 +82,7 @@ Run them with `poetry run pytest`.

Note that tests marked with `aws` are skipped by default, to avoid the need for an AWS setup.
They are however ran in the GitHub Action.
For this to work, they must have been ran once locally with an account with sufficient permissions (`poetry run pytest -m "aws"`), since for security reasons, the AWS account used on GitHub does not have permissions to create RDS instances.
You can run them locally by adding `-m 'aws or not(aws)'` to the `pytest` command.

### Add/modify runnable applications
#### Dockerize the application
Expand Down
49 changes: 35 additions & 14 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@
REGION_NAME: BucketLocationConstraintType = "eu-central-1"


@pytest.fixture(scope="module")
@pytest.fixture(scope="session")
def monkeypatch_module() -> Generator[pytest.MonkeyPatch, Any, None]:
with pytest.MonkeyPatch.context() as mp:
yield mp
Expand All @@ -38,14 +38,16 @@ def enqueueing_func(monkeypatch_module: pytest.MonkeyPatch) -> MagicMock:
class RDSTestingInstance:
def __init__(self, db_name: str):
self.db_name = db_name

def create(self) -> None:
self.rds_client = boto3.client("rds", REGION_NAME)
self.ec2_client = boto3.client("ec2", REGION_NAME)
self.add_ingress_rule()
self.db_url = self.create_db_url()
self.engine = self.get_engine()
self.delete_db_tables()

@property
def engine(self) -> Engine:
def get_engine(self) -> Engine:
for _ in range(5):
try:
engine = create_engine(self.db_url)
Expand Down Expand Up @@ -117,12 +119,14 @@ def create_db_url(self) -> str:
DBName=self.db_name,
DBInstanceIdentifier=self.db_name,
AllocatedStorage=20,
DBInstanceClass="db.t3.micro",
DBInstanceClass="db.t4g.micro",
Engine="postgres",
MasterUsername=user,
MasterUserPassword=password,
DeletionProtection=False,
BackupRetentionPeriod=0,
MultiAZ=False,
EnablePerformanceInsights=False,
)
break
except self.rds_client.exceptions.DBInstanceAlreadyExistsFault:
Expand All @@ -143,19 +147,34 @@ def cleanup(self) -> None:
self.delete_db_tables()
self.ec2_client.revoke_security_group_ingress(**self.vpc_sg_rule_params)

def delete(self) -> None:
self.rds_client.delete_db_instance(
DBInstanceIdentifier=self.db_name,
SkipFinalSnapshot=True,
DeleteAutomatedBackups=True,
)


class S3TestingBucket:
def __init__(self, bucket_name_suffix: str):
# S3 bucket names must be globally unique - avoid collisions by adding suffix
self.bucket_name = f"{TEST_BUCKET_PREFIX}-{bucket_name_suffix}"
self.region_name: BucketLocationConstraintType = REGION_NAME

def create(self) -> None:
self.s3_client = boto3.client(
"s3",
region_name=self.region_name,
# required for pre-signing URLs to work
endpoint_url=f"https://s3.{self.region_name}.amazonaws.com",
)
self.initialize_bucket()
exists = self.cleanup()
if not exists:
self.s3_client.create_bucket(
Bucket=self.bucket_name,
CreateBucketConfiguration={"LocationConstraint": self.region_name},
)
self.s3_client.get_waiter("bucket_exists").wait(Bucket=self.bucket_name)

def cleanup(self) -> bool:
"""Returns True if bucket exists and all objects are deleted."""
Expand All @@ -174,19 +193,21 @@ def cleanup(self) -> bool:
s3_bucket.objects.all().delete()
return True

def initialize_bucket(self) -> None:
def delete(self) -> None:
exists = self.cleanup()
if not exists:
self.s3_client.create_bucket(
Bucket=self.bucket_name,
CreateBucketConfiguration={"LocationConstraint": self.region_name},
)
self.s3_client.get_waiter("bucket_exists").wait(Bucket=self.bucket_name)
if exists:
self.s3_client.delete_bucket(Bucket=self.bucket_name)


@pytest.fixture(scope="session")
def rds_testing_instance() -> RDSTestingInstance:
return RDSTestingInstance("decodecloudintegrationtestsuserapi")


@pytest.fixture(scope="session")
def bucket_suffix() -> str:
return datetime.datetime.now(datetime.UTC).strftime("%Y%m%d%H%M%S")
def s3_testing_bucket() -> S3TestingBucket:
bucket_suffix = datetime.datetime.now(datetime.UTC).strftime("%Y%m%d%H%M%S")
return S3TestingBucket(bucket_suffix)


@pytest.mark.aws
Expand Down
53 changes: 32 additions & 21 deletions tests/integration/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -31,50 +31,62 @@
from tests.conftest import REGION_NAME, RDSTestingInstance, S3TestingBucket


@pytest.fixture(scope="module")
@pytest.fixture(scope="session")
def username() -> str:
return "test_user"


@pytest.fixture(scope="module")
@pytest.fixture(scope="session")
def user_email() -> str:
return "user@example.com"


@pytest.fixture(scope="module")
@pytest.fixture(scope="session")
def base_user_dir() -> str:
return "test_user_dir"


@pytest.fixture(scope="module")
@pytest.fixture(scope="session")
def internal_api_key_secret() -> str:
return "test_internal_api_key"


@pytest.fixture(scope="module")
@pytest.fixture(scope="session")
def application() -> dict[str, str]:
return {"application": "app", "version": "latest", "entrypoint": "test"}


@pytest.fixture(
scope="module",
scope="session",
params=["local", pytest.param("aws", marks=pytest.mark.aws)],
)
def env(request: pytest.FixtureRequest) -> str:
return cast(str, request.param)
def env(
request: pytest.FixtureRequest,
rds_testing_instance: RDSTestingInstance,
s3_testing_bucket: S3TestingBucket,
) -> Generator[str, Any, None]:
env = cast(str, request.param)
if env == "aws":
rds_testing_instance.create()
s3_testing_bucket.create()
yield env
if env == "aws":
rds_testing_instance.delete()
s3_testing_bucket.delete()


@pytest.fixture
def db_session(env: str) -> Generator[Session, Any, None]:
def db_session(
env: str, rds_testing_instance: RDSTestingInstance
) -> Generator[Session, Any, None]:
if env == "local":
rel_test_db_path = "./test_app.db"
shutil.rmtree(rel_test_db_path, ignore_errors=True)
engine = create_engine(
f"sqlite:///{rel_test_db_path}", connect_args={"check_same_thread": False}
)
elif env == "aws":
rds_instance = RDSTestingInstance("decodecloudintegrationtestsuserapi")
engine = rds_instance.engine
engine = rds_testing_instance.engine
else:
raise NotImplementedError

Expand All @@ -85,15 +97,15 @@ def db_session(env: str) -> Generator[Session, Any, None]:
if env == "local":
os.remove(rel_test_db_path)
elif env == "aws":
rds_instance.cleanup()
rds_testing_instance.cleanup()


@pytest.fixture
def base_filesystem(
env: str,
base_user_dir: str,
monkeypatch_module: pytest.MonkeyPatch,
bucket_suffix: str,
s3_testing_bucket: S3TestingBucket,
) -> Generator[FileSystem, Any, None]:
if env == "local":
base_user_dir = f"./{base_user_dir}"
Expand All @@ -120,17 +132,16 @@ def base_filesystem(
shutil.rmtree(base_user_dir, ignore_errors=True)

elif env == "aws":
testing_bucket = S3TestingBucket(bucket_suffix)
# Update settings to use the actual unique bucket name created by S3TestingBucket
monkeypatch_module.setattr(
settings,
"s3_bucket",
testing_bucket.bucket_name,
s3_testing_bucket.bucket_name,
)
yield S3Filesystem(
base_user_dir, testing_bucket.s3_client, testing_bucket.bucket_name
base_user_dir, s3_testing_bucket.s3_client, s3_testing_bucket.bucket_name
)
testing_bucket.cleanup()
s3_testing_bucket.cleanup()

else:
raise NotImplementedError
Expand Down Expand Up @@ -163,7 +174,7 @@ def override_filesystem_dep(
)


@pytest.fixture(autouse=True, scope="module")
@pytest.fixture(autouse=True, scope="session")
def override_auth(
monkeypatch_module: pytest.MonkeyPatch, username: str, user_email: str
) -> None:
Expand All @@ -174,7 +185,7 @@ def override_auth(
)


@pytest.fixture(scope="module", autouse=True)
@pytest.fixture(scope="session", autouse=True)
def override_internal_api_key_secret(
monkeypatch_module: pytest.MonkeyPatch, internal_api_key_secret: str
) -> None:
Expand All @@ -185,7 +196,7 @@ def override_internal_api_key_secret(
)


@pytest.fixture(scope="module", autouse=True)
@pytest.fixture(scope="session", autouse=True)
def override_email_sender(monkeypatch_module: pytest.MonkeyPatch) -> None:
monkeypatch_module.setitem(
app.dependency_overrides, # type: ignore
Expand All @@ -194,7 +205,7 @@ def override_email_sender(monkeypatch_module: pytest.MonkeyPatch) -> None:
)


@pytest.fixture(scope="module", autouse=True)
@pytest.fixture(scope="session", autouse=True)
def override_application_config(
monkeypatch_module: pytest.MonkeyPatch, application: dict[str, str]
) -> None:
Expand Down
8 changes: 4 additions & 4 deletions tests/unit/core/test_filesystem.py
Original file line number Diff line number Diff line change
Expand Up @@ -263,15 +263,15 @@ def mock_aws_(self, request: pytest.FixtureRequest) -> bool:

@pytest.fixture(scope="class")
def filesystem(
self, base_dir: str, mock_aws_: bool, bucket_suffix: str
self, base_dir: str, mock_aws_: bool, s3_testing_bucket: S3TestingBucket
) -> Generator[S3Filesystem, Any, None]:
context_manager = mock_aws if mock_aws_ else nullcontext
with context_manager():
testing_bucket = S3TestingBucket(bucket_suffix)
s3_testing_bucket.create()
yield S3Filesystem(
base_dir, testing_bucket.s3_client, testing_bucket.bucket_name
base_dir, s3_testing_bucket.s3_client, s3_testing_bucket.bucket_name
)
testing_bucket.cleanup()
s3_testing_bucket.delete()

@pytest.fixture
def data_file1(
Expand Down