Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[DOP-19900] Add MSSQL API schema #125

Merged
merged 2 commits into from
Nov 12, 2024
Merged
Show file tree
Hide file tree
Changes from 1 commit
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions README.rst
Original file line number Diff line number Diff line change
Expand Up @@ -36,6 +36,7 @@ List of currently supported connections:
* Clickhouse
* Postgres
* Oracle
* MSSQL
* HDFS
* S3

Expand Down
1 change: 1 addition & 0 deletions docs/changelog/next_release/125.feature.rst
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Add MSSQL API schema
3 changes: 2 additions & 1 deletion syncmaster/backend/api/v1/connections.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,6 +21,7 @@
CLICKHOUSE_TYPE,
HDFS_TYPE,
HIVE_TYPE,
MSSQL_TYPE,
ORACLE_TYPE,
POSTGRES_TYPE,
S3_TYPE,
Expand All @@ -38,7 +39,7 @@

router = APIRouter(tags=["Connections"], responses=get_error_responses())

CONNECTION_TYPES = ORACLE_TYPE, POSTGRES_TYPE, CLICKHOUSE_TYPE, HIVE_TYPE, S3_TYPE, HDFS_TYPE
CONNECTION_TYPES = ORACLE_TYPE, POSTGRES_TYPE, CLICKHOUSE_TYPE, HIVE_TYPE, MSSQL_TYPE, S3_TYPE, HDFS_TYPE


@router.get("/connections")
Expand Down
2 changes: 2 additions & 0 deletions syncmaster/schemas/v1/connection_types.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
ORACLE_TYPE = Literal["oracle"]
POSTGRES_TYPE = Literal["postgres"]
CLICKHOUSE_TYPE = Literal["clickhouse"]
MSSQL_TYPE = Literal["mssql"]
S3_TYPE = Literal["s3"]
HDFS_TYPE = Literal["hdfs"]

Expand All @@ -16,5 +17,6 @@ class ConnectionType(str, Enum):
HIVE = "hive"
ORACLE = "oracle"
CLICKHOUSE = "clickhouse"
MSSQL = "mssql"
S3 = "s3"
HDFS = "hdfs"
14 changes: 14 additions & 0 deletions syncmaster/schemas/v1/connections/connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,6 +27,14 @@
UpdateHiveAuthSchema,
UpdateHiveConnectionSchema,
)
from syncmaster.schemas.v1.connections.mssql import (
CreateMSSQLAuthSchema,
CreateMSSQLConnectionSchema,
ReadMSSQLAuthSchema,
ReadMSSQLConnectionSchema,
UpdateMSSQLAuthSchema,
UpdateMSSQLConnectionSchema,
)
from syncmaster.schemas.v1.connections.oracle import (
CreateOracleAuthSchema,
CreateOracleConnectionSchema,
Expand Down Expand Up @@ -60,13 +68,15 @@
| ReadOracleConnectionSchema
| ReadPostgresConnectionSchema
| ReadClickhouseConnectionSchema
| ReadMSSQLConnectionSchema
| S3ReadConnectionSchema
)
CreateConnectionDataSchema = (
CreateHiveConnectionSchema
| CreateOracleConnectionSchema
| CreatePostgresConnectionSchema
| CreateClickhouseConnectionSchema
| CreateMSSQLConnectionSchema
| HDFSCreateConnectionSchema
| S3CreateConnectionSchema
)
Expand All @@ -77,12 +87,14 @@
| UpdateOracleConnectionSchema
| UpdatePostgresConnectionSchema
| UpdateClickhouseConnectionSchema
| UpdateMSSQLConnectionSchema
)
ReadConnectionAuthDataSchema = (
ReadHiveAuthSchema
| ReadOracleAuthSchema
| ReadPostgresAuthSchema
| ReadClickhouseAuthSchema
| ReadMSSQLAuthSchema
| S3ReadAuthSchema
| HDFSReadAuthSchema
)
Expand All @@ -91,6 +103,7 @@
| CreateOracleAuthSchema
| CreatePostgresAuthSchema
| CreateClickhouseAuthSchema
| CreateMSSQLAuthSchema
| S3CreateAuthSchema
| HDFSCreateAuthSchema
)
Expand All @@ -99,6 +112,7 @@
| UpdateOracleAuthSchema
| UpdatePostgresAuthSchema
| UpdateClickhouseAuthSchema
| UpdateMSSQLAuthSchema
| S3UpdateAuthSchema
| HDFSUpdateAuthSchema
)
Expand Down
47 changes: 47 additions & 0 deletions syncmaster/schemas/v1/connections/mssql.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
# SPDX-FileCopyrightText: 2023-2024 MTS PJSC
# SPDX-License-Identifier: Apache-2.0
from pydantic import BaseModel, Field, SecretStr

from syncmaster.schemas.v1.connection_types import MSSQL_TYPE


class MSSQLBaseSchema(BaseModel):
type: MSSQL_TYPE

class Config:
from_attributes = True


class ReadMSSQLConnectionSchema(MSSQLBaseSchema):
host: str
port: int
database: str
additional_params: dict = Field(default_factory=dict)


class ReadMSSQLAuthSchema(MSSQLBaseSchema):
user: str


class UpdateMSSQLConnectionSchema(MSSQLBaseSchema):
host: str | None = None
port: int | None = None
database: str | None = None
additional_params: dict | None = Field(default_factory=dict)


class UpdateMSSQLAuthSchema(MSSQLBaseSchema):
user: str | None = None # noqa: F722
password: SecretStr | None = None


class CreateMSSQLConnectionSchema(MSSQLBaseSchema):
host: str
port: int
database: str
additional_params: dict = Field(default_factory=dict)


class CreateMSSQLAuthSchema(MSSQLBaseSchema):
user: str
password: SecretStr
7 changes: 7 additions & 0 deletions syncmaster/schemas/v1/transfers/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,6 +9,7 @@
from syncmaster.schemas.v1.transfers.db import (
ClickhouseReadTransferSourceAndTarget,
HiveReadTransferSourceAndTarget,
MSSQLReadTransferSourceAndTarget,
OracleReadTransferSourceAndTarget,
PostgresReadTransferSourceAndTarget,
)
Expand All @@ -33,6 +34,7 @@
| HiveReadTransferSourceAndTarget
| OracleReadTransferSourceAndTarget
| ClickhouseReadTransferSourceAndTarget
| MSSQLReadTransferSourceAndTarget
| S3ReadTransferSource
)

Expand All @@ -42,6 +44,7 @@
| HiveReadTransferSourceAndTarget
| OracleReadTransferSourceAndTarget
| ClickhouseReadTransferSourceAndTarget
| MSSQLReadTransferSourceAndTarget
| S3ReadTransferTarget
)

Expand All @@ -51,6 +54,7 @@
| HiveReadTransferSourceAndTarget
| OracleReadTransferSourceAndTarget
| ClickhouseReadTransferSourceAndTarget
| MSSQLReadTransferSourceAndTarget
| S3CreateTransferSource
)

Expand All @@ -60,6 +64,7 @@
| HiveReadTransferSourceAndTarget
| OracleReadTransferSourceAndTarget
| ClickhouseReadTransferSourceAndTarget
| MSSQLReadTransferSourceAndTarget
| S3CreateTransferTarget
)

Expand All @@ -69,6 +74,7 @@
| HiveReadTransferSourceAndTarget
| OracleReadTransferSourceAndTarget
| ClickhouseReadTransferSourceAndTarget
| MSSQLReadTransferSourceAndTarget
| S3CreateTransferSource
| None
)
Expand All @@ -79,6 +85,7 @@
| HiveReadTransferSourceAndTarget
| OracleReadTransferSourceAndTarget
| ClickhouseReadTransferSourceAndTarget
| MSSQLReadTransferSourceAndTarget
| S3CreateTransferTarget
| None
)
Expand Down
5 changes: 5 additions & 0 deletions syncmaster/schemas/v1/transfers/db.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@
from syncmaster.schemas.v1.connection_types import (
CLICKHOUSE_TYPE,
HIVE_TYPE,
MSSQL_TYPE,
ORACLE_TYPE,
POSTGRES_TYPE,
)
Expand All @@ -30,3 +31,7 @@ class PostgresReadTransferSourceAndTarget(ReadDBTransfer):

class ClickhouseReadTransferSourceAndTarget(ReadDBTransfer):
type: CLICKHOUSE_TYPE


class MSSQLReadTransferSourceAndTarget(ReadDBTransfer):
type: MSSQL_TYPE
Original file line number Diff line number Diff line change
@@ -1,3 +1,5 @@
from collections.abc import AsyncGenerator

import pytest_asyncio
from sqlalchemy.ext.asyncio import AsyncSession

Expand All @@ -7,7 +9,10 @@


@pytest_asyncio.fixture
async def group_connections(group_connection: MockConnection, session: AsyncSession) -> list[MockConnection]:
async def group_connections(
group_connection: MockConnection,
session: AsyncSession,
) -> AsyncGenerator[list[MockConnection], None]:
connection = group_connection.connection

# start with the connection from group_connection fixture
Expand All @@ -17,13 +22,30 @@ async def group_connections(group_connection: MockConnection, session: AsyncSess
# since group_connection already created a connection, we start from index 1
for conn_type in connection_types[1:]:

new_data = { # TODO: create different dicts
new_data = {
**connection.data,
"type": conn_type.value,
"cluster": "cluster",
"bucket": "bucket",
}

if conn_type in [ConnectionType.HDFS, ConnectionType.HIVE]:
new_data.update(
{
"cluster": "cluster",
},
)
elif conn_type == ConnectionType.S3:
new_data.update(
{
"bucket": "bucket",
},
)
elif conn_type in [ConnectionType.ORACLE, ConnectionType.CLICKHOUSE, ConnectionType.MSSQL]:
IlyasDevelopment marked this conversation as resolved.
Show resolved Hide resolved
new_data.update(
{
"database": "database",
},
)

new_connection = Connection(
group_id=connection.group_id,
name=f"{connection.name}_{conn_type.value}",
Expand Down
4 changes: 2 additions & 2 deletions tests/test_unit/test_connections/test_create_connection.py
Original file line number Diff line number Diff line change
Expand Up @@ -282,7 +282,7 @@ async def test_check_fields_validation_on_create_connection(
"context": {
"discriminator": "'type'",
"tag": "POSTGRESQL",
"expected_tags": "'hive', 'oracle', 'postgres', 'clickhouse', 'hdfs', 's3'",
"expected_tags": "'hive', 'oracle', 'postgres', 'clickhouse', 'mssql', 'hdfs', 's3'",
},
"input": {
"type": "POSTGRESQL",
Expand All @@ -292,7 +292,7 @@ async def test_check_fields_validation_on_create_connection(
"database_name": "postgres",
},
"location": ["body", "connection_data"],
"message": "Input tag 'POSTGRESQL' found using 'type' does not match any of the expected tags: 'hive', 'oracle', 'postgres', 'clickhouse', 'hdfs', 's3'",
"message": "Input tag 'POSTGRESQL' found using 'type' does not match any of the expected tags: 'hive', 'oracle', 'postgres', 'clickhouse', 'mssql', 'hdfs', 's3'",
"code": "union_tag_invalid",
},
],
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
import pytest
from httpx import AsyncClient
from sqlalchemy import select
from sqlalchemy.ext.asyncio import AsyncSession

from syncmaster.db.models import AuthData, Connection
from syncmaster.db.repositories.utils import decrypt_auth_data
from syncmaster.settings import Settings
from tests.mocks import MockGroup, UserTestRoles

pytestmark = [pytest.mark.asyncio, pytest.mark.backend, pytest.mark.mssql]


async def test_developer_plus_can_create_mssql_connection(
client: AsyncClient,
group: MockGroup,
session: AsyncSession,
settings: Settings,
role_developer_plus: UserTestRoles,
):
# Arrange
user = group.get_member_of_role(role_developer_plus)

# Act
result = await client.post(
"v1/connections",
headers={"Authorization": f"Bearer {user.token}"},
json={
"group_id": group.id,
"name": "New connection",
"description": "",
"connection_data": {
"type": "mssql",
"host": "127.0.0.1",
"port": 1433,
"database": "database_name",
},
"auth_data": {
"type": "mssql",
"user": "user",
"password": "secret",
},
},
)
connection = (
await session.scalars(
select(Connection).filter_by(
name="New connection",
),
)
).first()

creds = (
await session.scalars(
select(AuthData).filter_by(
connection_id=connection.id,
),
)
).one()

# Assert
decrypted = decrypt_auth_data(creds.value, settings=settings)
assert result.status_code == 200
assert result.json() == {
"id": connection.id,
"name": connection.name,
"description": connection.description,
"group_id": connection.group_id,
"connection_data": {
"type": connection.data["type"],
"host": connection.data["host"],
"port": connection.data["port"],
"database": connection.data["database"],
"additional_params": connection.data["additional_params"],
},
"auth_data": {
"type": decrypted["type"],
"user": decrypted["user"],
},
}
Loading
Loading