Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 4 additions & 0 deletions .env
Original file line number Diff line number Diff line change
@@ -0,0 +1,4 @@
CRYPKIT_POSTGRES_HOST=localhost
CRYPKIT_POSTGRES_USERNAME=test
CRYPKIT_POSTGRES_PASSWORD=test
CRYPKIT_POSTGRES_DATABASE_NAME=test
6 changes: 3 additions & 3 deletions .github/workflows/python-app.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,7 @@ permissions:
contents: read

jobs:
test:
unit-test:

runs-on: ubuntu-latest

Expand All @@ -32,5 +32,5 @@ jobs:
- name: Run linting
run: make lint

- name: Run tests
run: poetry run pytest
- name: Run unit tests
run: poetry run pytest tests/unit
2 changes: 1 addition & 1 deletion .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ celerybeat.pid
*.sage.py

# Environments
.env
#.env
.venv
env/
venv/
Expand Down
19 changes: 18 additions & 1 deletion README.md
Original file line number Diff line number Diff line change
Expand Up @@ -26,11 +26,28 @@ I analysed the assignment and decided to do the following things:
- Generate openapi documentation using FastAPI
- Think about further improvements

## Local setup

## Further improvements
You need to install poetry and dependencies with it:

```shell
pip install poetry
poetry install
```

## How to run tests

There are multiple test levels.

For unit tests you don't anything special. Just run them in you IDE.

For integration tests you need running compose, so run `docker compose up --build -d` before running them.

## Further improvements

We can do:

- run integration tests in github CI/CD using services
- Load testing with Locust
- Add authentication and authorization (for example with keycloack and JWT tokens)
- Add ratelimiting
Expand Down
105 changes: 105 additions & 0 deletions alembic.ini
Original file line number Diff line number Diff line change
@@ -0,0 +1,105 @@
# A generic, single database configuration.

[alembic]
# path to migration scripts
script_location = alembic

# template used to generate migration file names; The default value is %%(rev)s_%%(slug)s
# Uncomment the line below if you want the files to be prepended with date and time
# see https://alembic.sqlalchemy.org/en/latest/tutorial.html#editing-the-ini-file
# for all available tokens
# file_template = %%(year)d_%%(month).2d_%%(day).2d_%%(hour).2d%%(minute).2d-%%(rev)s_%%(slug)s

# sys.path path, will be prepended to sys.path if present.
# defaults to the current working directory.
prepend_sys_path = .

# timezone to use when rendering the date within the migration file
# as well as the filename.
# If specified, requires the python-dateutil library that can be
# installed by adding `alembic[tz]` to the pip requirements
# string value is passed to dateutil.tz.gettz()
# leave blank for localtime
# timezone =

# max length of characters to apply to the
# "slug" field
# truncate_slug_length = 40

# set to 'true' to run the environment during
# the 'revision' command, regardless of autogenerate
# revision_environment = false

# set to 'true' to allow .pyc and .pyo files without
# a source .py file to be detected as revisions in the
# versions/ directory
# sourceless = false

# version location specification; This defaults
# to alembic/versions. When using multiple version
# directories, initial revisions must be specified with --version-path.
# The path separator used here should be the separator specified by "version_path_separator" below.
# version_locations = %(here)s/bar:%(here)s/bat:alembic/versions

# version path separator; As mentioned above, this is the character used to split
# version_locations. The default within new alembic.ini files is "os", which uses os.pathsep.
# If this key is omitted entirely, it falls back to the legacy behavior of splitting on spaces and/or commas.
# Valid values for version_path_separator are:
#
# version_path_separator = :
# version_path_separator = ;
# version_path_separator = space
version_path_separator = os # Use os.pathsep. Default configuration used for new projects.

# the output encoding used when revision files
# are written from script.py.mako
# output_encoding = utf-8

;sqlalchemy.url = driver://user:pass@localhost/dbname


[post_write_hooks]
# post_write_hooks defines scripts or Python functions that are run
# on newly generated revision scripts. See the documentation for further
# detail and examples

# format using "black" - use the console_scripts runner, against the "black" entrypoint
# hooks = black
# black.type = console_scripts
# black.entrypoint = black
# black.options = -l 79 REVISION_SCRIPT_FILENAME

# Logging configuration
[loggers]
keys = root,sqlalchemy,alembic

[handlers]
keys = console

[formatters]
keys = generic

[logger_root]
level = WARN
handlers = console
qualname =

[logger_sqlalchemy]
level = WARN
handlers =
qualname = sqlalchemy.engine

[logger_alembic]
level = INFO
handlers =
qualname = alembic

[handler_console]
class = StreamHandler
args = (sys.stderr,)
level = NOTSET
formatter = generic

[formatter_generic]
format = %(levelname)-5.5s [%(name)s] %(message)s
datefmt = %H:%M:%S
1 change: 1 addition & 0 deletions alembic/README
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Generic single-database configuration.
78 changes: 78 additions & 0 deletions alembic/env.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,78 @@
import asyncio
from logging.config import fileConfig

from sqlalchemy import Connection
from sqlalchemy.ext.asyncio import create_async_engine

from alembic import context
from crypkit.adapters.driven.repository.config import PostgresSettings
from crypkit.adapters.driven.repository.schema import metadata

# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config

# Interpret the config file for Python logging.
# This line sets up loggers basically.
if config.config_file_name is not None:
fileConfig(config.config_file_name)

target_metadata = metadata


def run_migrations_offline() -> None:
"""Run migrations in 'offline' mode.

This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.

Calls to context.execute() here emit the given string to the
script output.

"""
settings = PostgresSettings()
context.configure(
url=settings.to_url(),
target_metadata=target_metadata,
literal_binds=True,
dialect_opts={"paramstyle": "named"},
)

with context.begin_transaction():
context.run_migrations()


def do_run_migrations(connection: Connection) -> None:
"""Execute migrations."""
context.configure(
connection=connection,
target_metadata=target_metadata,
compare_type=True,
)
with context.begin_transaction():
context.run_migrations()


async def run_migrations_online() -> None:
"""Run migrations in 'online' mode.

In this scenario we need to create an Engine
and associate a connection with the context.

"""
settings = PostgresSettings()

engine = create_async_engine(
url=settings.to_url(),
)

async with engine.connect() as conn:
await conn.run_sync(do_run_migrations)


if context.is_offline_mode():
run_migrations_offline()
else:
asyncio.run(run_migrations_online())
24 changes: 24 additions & 0 deletions alembic/script.py.mako
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
"""${message}

Revision ID: ${up_revision}
Revises: ${down_revision | comma,n}
Create Date: ${create_date}

"""
from alembic import op
import sqlalchemy as sa
${imports if imports else ""}

# revision identifiers, used by Alembic.
revision = ${repr(up_revision)}
down_revision = ${repr(down_revision)}
branch_labels = ${repr(branch_labels)}
depends_on = ${repr(depends_on)}


def upgrade() -> None:
${upgrades if upgrades else "pass"}


def downgrade() -> None:
${downgrades if downgrades else "pass"}
36 changes: 36 additions & 0 deletions alembic/versions/0001_init.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,36 @@
"""init

Revision ID: 97c3eeffdbe6
Revises:
Create Date: 2025-03-29 22:46:05.403489

"""

import sqlalchemy as sa
from sqlalchemy.dialects import postgresql

from alembic import op

# revision identifiers, used by Alembic.
revision = "97c3eeffdbe6"
down_revision = None
branch_labels = None
depends_on = None


def upgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.create_table(
"cryptocurrencies",
sa.Column("id", sa.UUID(), nullable=False),
sa.Column("symbol", sa.String(), nullable=False),
sa.Column("metadata", postgresql.JSONB(none_as_null=True, astext_type=sa.Text()), nullable=False),
sa.PrimaryKeyConstraint("id"),
)
# ### end Alembic commands ###


def downgrade() -> None:
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table("cryptocurrencies")
# ### end Alembic commands ###
File renamed without changes.
Empty file.
Empty file.
16 changes: 16 additions & 0 deletions crypkit/adapters/driven/repository/config.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,16 @@
from pydantic_settings import BaseSettings, SettingsConfigDict


class PostgresSettings(BaseSettings):
"""Initialize Postgres configuration."""

host: str
port: int = 5432
username: str
password: str
database_name: str
max_connections: int = 5
model_config = SettingsConfigDict(env_prefix="CRYPKIT_POSTGRES_")

def to_url(self) -> str:
return f"postgresql+asyncpg://{self.username}:{self.password}@{self.host}:{self.port}/{self.database_name}"
12 changes: 12 additions & 0 deletions crypkit/adapters/driven/repository/schema.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,12 @@
from sqlalchemy import Column, MetaData, String, Table
from sqlalchemy.dialects.postgresql import JSONB, UUID

metadata = MetaData()

CryptoCurrencyTable = Table(
"cryptocurrencies",
metadata,
Column("id", UUID, nullable=False, primary_key=True),
Column("symbol", String, nullable=False),
Column("metadata", JSONB(none_as_null=True), nullable=False),
)
Loading