Skip to content

Commit

Permalink
[DOP-19992] - split settings to different classes
Browse files Browse the repository at this point in the history
  • Loading branch information
maxim-lixakov committed Nov 20, 2024
1 parent 9ede4d5 commit 77d3109
Show file tree
Hide file tree
Showing 15 changed files with 85 additions and 29 deletions.
14 changes: 10 additions & 4 deletions .env.docker
Original file line number Diff line number Diff line change
Expand Up @@ -7,14 +7,19 @@ SYNCMASTER__SERVER__DEBUG=true
# Logging
SYNCMASTER__LOGGING__SETUP=True
SYNCMASTER__LOGGING__PRESET=colored
SYNCMASTER__LOG_URL_TEMPLATE=https://grafana.example.com?correlation_id={{ correlation_id }}&run_id={{ run.id }}

# Session
SYNCMASTER__SERVER__SESSION__SECRET_KEY=session_secret_key

# Encrypt / Decrypt credentials data
SYNCMASTER__CRYPTO_KEY=UBgPTioFrtH2unlC4XFDiGf5sYfzbdSf_VgiUSaQc94=

# Worker settings
SYNCMASTER__WORKER__LOG_URL_TEMPLATE=https://grafana.example.com?correlation_id={{ correlation_id }}&run_id={{ run.id }}

# Scheduler settings
SYNCMASTER__SCHEDULER__TRANSFER_FETCHING_TIMEOUT_SECONDS=200

# Session
SYNCMASTER__SERVER__SESSION__SECRET_KEY=session_secret_key

# Postgres
SYNCMASTER__DATABASE__URL=postgresql+asyncpg://syncmaster:changeme@db:5432/syncmaster

Expand All @@ -26,6 +31,7 @@ SYNCMASTER__AUTH__CLIENT_ID=manually_created
SYNCMASTER__AUTH__CLIENT_SECRET=generated_by_keycloak
SYNCMASTER__AUTH__REDIRECT_URI=http://localhost:8000/v1/auth/callback
SYNCMASTER__AUTH__SCOPE=email
SYNCMASTER__AUTH__VERIFY_SSL=False
SYNCMASTER__AUTH__PROVIDER=syncmaster.backend.providers.auth.keycloak_provider.KeycloakAuthProvider

# Dummy Auth
Expand Down
8 changes: 7 additions & 1 deletion .env.local
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,12 @@ export SYNCMASTER__SERVER__DEBUG=true
# Logging
export SYNCMASTER__LOGGING__SETUP=True
export SYNCMASTER__LOGGING__PRESET=colored
export SYNCMASTER__LOG_URL_TEMPLATE="https://grafana.example.com?correlation_id={{ correlation_id }}&run_id={{ run.id }}"

# Worker settings
export SYNCMASTER__WORKER__LOG_URL_TEMPLATE="https://grafana.example.com?correlation_id={{ correlation_id }}&run_id={{ run.id }}"

# Scheduler settings
export SYNCMASTER__SCHEDULER__TRANSFER_FETCHING_TIMEOUT_SECONDS=200

# Session
export SYNCMASTER__SERVER__SESSION__SECRET_KEY=session_secret_key
Expand All @@ -25,6 +30,7 @@ export SYNCMASTER__AUTH__CLIENT_ID=manually_created
export SYNCMASTER__AUTH__CLIENT_SECRET=generated_by_keycloak
export SYNCMASTER__AUTH__REDIRECT_URI=http://localhost:8000/auth/callback
export SYNCMASTER__AUTH__SCOPE=email
export SYNCMASTER__AUTH__VERIFY_SSL=False
export SYNCMASTER__AUTH__PROVIDER=syncmaster.backend.providers.auth.keycloak_provider.KeycloakAuthProvider

# Dummy Auth
Expand Down
2 changes: 1 addition & 1 deletion .readthedocs.yaml
Original file line number Diff line number Diff line change
Expand Up @@ -17,7 +17,7 @@ build:
- VIRTUAL_ENV=$READTHEDOCS_VIRTUALENV_PATH python -m poetry install --no-root --all-extras --with docs --without dev,test
- VIRTUAL_ENV=$READTHEDOCS_VIRTUALENV_PATH python -m poetry show -v
- python -m pip list -v
- SYNCMASTER__DATABASE__URL=postgresql+psycopg://fake:fake@127.0.0.1:5432/fake SYNCMASTER__SERVER__SESSION__SECRET_KEY=session_secret_key SYNCMASTER__BROKER__URL=amqp://fake:faket@fake:5672/ SYNCMASTER__CRYPTO_KEY=crypto_key SYNCMASTER__AUTH__ACCESS_TOKEN__SECRET_KEY=fakepython python -m syncmaster.backend.export_openapi_schema docs/_static/openapi.json
- SYNCMASTER__DATABASE__URL=postgresql+psycopg://fake:fake@127.0.0.1:5432/fake SYNCMASTER__SERVER__SESSION__SECRET_KEY=session_secret_key SYNCMASTER__BROKER__URL=amqp://fake:faket@fake:5672/ SYNCMASTER__CRYPTO_KEY=crypto_key SYNCMASTER__AUTH__ACCESS_TOKEN__SECRET_KEY=fakepython python -m syncmaster.backend.export_openapi_schema docs/_static/openapi.json

sphinx:
configuration: docs/conf.py
2 changes: 1 addition & 1 deletion docker/Dockerfile.worker
Original file line number Diff line number Diff line change
Expand Up @@ -39,7 +39,7 @@ COPY ./syncmaster/ /app/syncmaster/

FROM base as test

Check warning on line 40 in docker/Dockerfile.worker

View workflow job for this annotation

GitHub Actions / Oracle tests / Run Oracle tests

The 'as' keyword should match the case of the 'from' keyword

FromAsCasing: 'as' and 'FROM' keywords' casing do not match More info: https://docs.docker.com/go/dockerfile/rule/from-as-casing/

Check warning on line 40 in docker/Dockerfile.worker

View workflow job for this annotation

GitHub Actions / HDFS tests / Run HDFS tests

The 'as' keyword should match the case of the 'from' keyword

FromAsCasing: 'as' and 'FROM' keywords' casing do not match More info: https://docs.docker.com/go/dockerfile/rule/from-as-casing/

Check warning on line 40 in docker/Dockerfile.worker

View workflow job for this annotation

GitHub Actions / S3 tests / Run S3 tests

The 'as' keyword should match the case of the 'from' keyword

FromAsCasing: 'as' and 'FROM' keywords' casing do not match More info: https://docs.docker.com/go/dockerfile/rule/from-as-casing/

Check warning on line 40 in docker/Dockerfile.worker

View workflow job for this annotation

GitHub Actions / Hive tests / Run Hive tests

The 'as' keyword should match the case of the 'from' keyword

FromAsCasing: 'as' and 'FROM' keywords' casing do not match More info: https://docs.docker.com/go/dockerfile/rule/from-as-casing/

Check warning on line 40 in docker/Dockerfile.worker

View workflow job for this annotation

GitHub Actions / Scheduler tests / Run Scheduler tests

The 'as' keyword should match the case of the 'from' keyword

FromAsCasing: 'as' and 'FROM' keywords' casing do not match More info: https://docs.docker.com/go/dockerfile/rule/from-as-casing/

ENV SYNCMASTER__CREATE_SPARK_SESSION_FUNCTION=tests.spark.get_worker_spark_session
ENV SYNCMASTER__WORKER__CREATE_SPARK_SESSION_FUNCTION=tests.spark.get_worker_spark_session

# CI runs tests in the worker container, so we need backend dependencies too
RUN poetry install --no-root --all-extras --with test --without docs,dev
Expand Down
2 changes: 1 addition & 1 deletion syncmaster/backend/api/v1/runs.py
Original file line number Diff line number Diff line change
Expand Up @@ -167,5 +167,5 @@ async def stop_run(

async with unit_of_work:
run = await unit_of_work.run.stop(run_id=run_id)
# TODO: add immdiate stop transfer after stop Run
# TODO: add immediate stop transfer after stop Run
return ReadRunSchema.from_orm(run)
4 changes: 2 additions & 2 deletions syncmaster/scheduler/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,7 @@
import asyncio
import logging

from syncmaster.backend.settings import BackendSettings as Settings
from syncmaster.scheduler.settings import SchedulerSettings as Settings
from syncmaster.scheduler.transfer_fetcher import TransferFetcher
from syncmaster.scheduler.transfer_job_manager import TransferJobManager

Expand All @@ -30,7 +30,7 @@ async def main():
transfer_fetcher.last_updated_at = max(t.updated_at for t in transfers)
logger.info("Scheduler state has been updated. Last updated at: %s", transfer_fetcher.last_updated_at)

await asyncio.sleep(settings.SCHEDULER_TRANSFER_FETCHING_TIMEOUT)
await asyncio.sleep(settings.TRANSFER_FETCHING_TIMEOUT_SECONDS)


if __name__ == "__main__":
Expand Down
28 changes: 28 additions & 0 deletions syncmaster/scheduler/settings/__init__.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,28 @@
# SPDX-FileCopyrightText: 2023-2024 MTS PJSC
# SPDX-License-Identifier: Apache-2.0
from pydantic import Field

from syncmaster.settings import SyncmasterSettings


class SchedulerSettings(SyncmasterSettings):
"""Celery scheduler settings.
Examples
--------
.. code-block:: bash
SYNCMASTER__SCHEDULER__TRANSFER_FETCHING_TIMEOUT=200
"""

TRANSFER_FETCHING_TIMEOUT_SECONDS: int = Field(
180,
description="Timeout for fetching transfers in seconds",
alias="SCHEDULER__TRANSFER_FETCHING_TIMEOUT_SECONDS",
)
MISFIRE_GRACE_TIME_SECONDS: int = Field(
300,
description="Grace time for misfired jobs in seconds",
alias="SCHEDULER__MISFIRE_GRACE_TIME_SECONDS",
)
2 changes: 1 addition & 1 deletion syncmaster/scheduler/transfer_fetcher.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,8 @@
# SPDX-License-Identifier: Apache-2.0
from sqlalchemy import select

from syncmaster.backend.settings import BackendSettings as Settings
from syncmaster.db.models import Transfer
from syncmaster.scheduler.settings import SchedulerSettings as Settings
from syncmaster.scheduler.utils import get_async_session


Expand Down
6 changes: 3 additions & 3 deletions syncmaster/scheduler/transfer_job_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,9 +7,9 @@
from kombu.exceptions import KombuError

from syncmaster.backend.services.unit_of_work import UnitOfWork
from syncmaster.backend.settings import BackendSettings as Settings
from syncmaster.db.models import RunType, Status, Transfer
from syncmaster.exceptions.run import CannotConnectToTaskQueueError
from syncmaster.scheduler.settings import SchedulerSettings as Settings
from syncmaster.scheduler.utils import get_async_session
from syncmaster.schemas.v1.connections.connection import ReadAuthDataSchema
from syncmaster.worker.config import celery
Expand All @@ -35,15 +35,15 @@ def update_jobs(self, transfers: list[Transfer]) -> None:
self.scheduler.modify_job(
job_id=job_id,
trigger=CronTrigger.from_crontab(transfer.schedule),
misfire_grace_time=self.settings.SCHEDULER_MISFIRE_GRACE_TIME,
misfire_grace_time=self.settings.MISFIRE_GRACE_TIME_SECONDS,
args=(transfer.id,),
)
else:
self.scheduler.add_job(
func=TransferJobManager.send_job_to_celery,
id=job_id,
trigger=CronTrigger.from_crontab(transfer.schedule),
misfire_grace_time=self.settings.SCHEDULER_MISFIRE_GRACE_TIME,
misfire_grace_time=self.settings.MISFIRE_GRACE_TIME_SECONDS,
args=(transfer.id,),
)

Expand Down
2 changes: 1 addition & 1 deletion syncmaster/scheduler/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
# SPDX-License-Identifier: Apache-2.0
from sqlalchemy.ext.asyncio import AsyncSession, async_sessionmaker, create_async_engine

from syncmaster.backend.settings import BackendSettings as Settings
from syncmaster.scheduler.settings import SchedulerSettings as Settings


def get_async_session(settings: Settings) -> AsyncSession:
Expand Down
6 changes: 1 addition & 5 deletions syncmaster/settings/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,12 +41,8 @@ class SyncmasterSettings(BaseSettings):
SYNCMASTER__SERVER__DEBUG=True
"""

crypto_key: str

# TODO: move settings to corresponding classes (scheduler also)
TZ: str = "UTC"
SCHEDULER_TRANSFER_FETCHING_TIMEOUT: int = 180 # seconds
SCHEDULER_MISFIRE_GRACE_TIME: int = 300 # seconds
crypto_key: str = Field(description="Key for encrypt/decrypt credentials data")

database: DatabaseSettings = Field(description=":ref:`Database settings <backend-configuration-database>`")
broker: RabbitMQSettings = Field(description=":ref:`Broker settings <backend-configuration-broker>`")
Expand Down
22 changes: 17 additions & 5 deletions syncmaster/worker/settings/__init__.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
# SPDX-FileCopyrightText: 2023-2024 MTS PJSC
# SPDX-License-Identifier: Apache-2.0
from pydantic import Field
from pydantic.types import ImportString

from syncmaster.settings import SyncmasterSettings
Expand All @@ -13,10 +14,21 @@ class WorkerSettings(SyncmasterSettings):
.. code-block:: bash
SYNCMASTER__WORKER__LOGGING__PRESET=colored
SYNCMASTER__WORKER__LOG_URL_TEMPLATE="https://grafana.example.com?correlation_id={{ correlation_id }}&run_id={{ run.id }}"
"""

CORRELATION_CELERY_HEADER_ID: str = "CORRELATION_CELERY_HEADER_ID"
LOG_URL_TEMPLATE: str = ""

CREATE_SPARK_SESSION_FUNCTION: ImportString = "syncmaster.worker.spark.get_worker_spark_session"
LOG_URL_TEMPLATE: str = Field(
"",
description="URL template for logging",
alias="SYNCMASTER__WORKER__LOG_URL_TEMPLATE",
)
CORRELATION_CELERY_HEADER_ID: str = Field(
"CORRELATION_CELERY_HEADER_ID",
description="Header ID for correlation in Celery",
alias="SYNCMASTER__WORKER__CORRELATION_CELERY_HEADER_ID",
)
CREATE_SPARK_SESSION_FUNCTION: ImportString = Field(
"syncmaster.worker.spark.get_worker_spark_session",
description="Function to create Spark session for worker",
alias="SYNCMASTER__WORKER__CREATE_SPARK_SESSION_FUNCTION",
)
6 changes: 6 additions & 0 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
from syncmaster.backend.settings.auth.jwt import JWTSettings
from syncmaster.backend.utils.jwt import sign_jwt
from syncmaster.db.models import Base
from syncmaster.scheduler.settings import SchedulerSettings
from tests.mocks import UserTestRoles
from tests.settings import TestSettings
from tests.utils import prepare_new_database, run_async_migrations
Expand Down Expand Up @@ -70,6 +71,11 @@ def settings(request: pytest.FixtureRequest) -> Settings:
return Settings.parse_obj(request.param)


@pytest.fixture(scope="session", params=[{}])
def scheduler_settings(request: pytest.FixtureRequest) -> SchedulerSettings:
return SchedulerSettings.parse_obj(request.param)


@pytest.fixture(scope="session")
def test_settings():
return TestSettings()
Expand Down
1 change: 0 additions & 1 deletion tests/test_unit/test_connections/test_delete_connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -38,7 +38,6 @@ async def test_maintainer_plus_can_delete_connection(
assert deleted_connection.is_deleted


# TODO: rename tests with simple_user to new group role name
async def test_groupless_user_cannot_delete_connection(
client: AsyncClient,
group_connection: MockConnection,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,13 +4,16 @@
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession

from syncmaster.backend.settings import BackendSettings as Settings
from syncmaster.scheduler.settings import SchedulerSettings as Settings
from syncmaster.scheduler.transfer_job_manager import TransferJobManager


@pytest_asyncio.fixture
async def transfer_job_manager(session: AsyncSession, settings: Settings) -> AsyncGenerator[TransferJobManager, None]:
transfer_job_manager = TransferJobManager(settings)
async def transfer_job_manager(
session: AsyncSession,
scheduler_settings: Settings,
) -> AsyncGenerator[TransferJobManager, None]:
transfer_job_manager = TransferJobManager(scheduler_settings)
transfer_job_manager.scheduler.start()

yield transfer_job_manager
Expand Down

0 comments on commit 77d3109

Please sign in to comment.