Skip to content

Commit

Permalink
Merge branch 'main' into 1.7.latest
Browse files Browse the repository at this point in the history
  • Loading branch information
benc-db committed Mar 19, 2024
2 parents 9d5c592 + c3781f8 commit 54a92e7
Show file tree
Hide file tree
Showing 12 changed files with 59 additions and 21 deletions.
1 change: 0 additions & 1 deletion .python-version

This file was deleted.

9 changes: 9 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,12 @@
## dbt-databricks 1.7.10 (Mar 19, 2024)

### Fixes

- Fix a corner case for insert into where NULL should be DEFAULT ([607](https://github.com/databricks/dbt-databricks/pull/607))
- Fixed integration tests that were leaving behind schemas after running ([613](https://github.com/databricks/dbt-databricks/pull/613))
- Fix performance issue associated with persist docs by turning off incremental catalog generation (thanks @mikealfare!) ([615](https://github.com/databricks/dbt-databricks/pull/615))
- Pin protobuf to < 5 to stop incompatibility breaks ([616](https://github.com/databricks/dbt-databricks/pull/616))

## dbt-databricks 1.7.9 (Mar 5, 2024)

### Fixes
Expand Down
2 changes: 1 addition & 1 deletion dbt/adapters/databricks/__version__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
version: str = "1.7.9"
version: str = "1.7.10"
2 changes: 1 addition & 1 deletion dbt/adapters/databricks/impl.py
Original file line number Diff line number Diff line change
Expand Up @@ -123,7 +123,7 @@ class DatabricksAdapter(SparkAdapter):
_capabilities = CapabilityDict(
{
Capability.TableLastModifiedMetadata: CapabilitySupport(support=Support.Full),
Capability.SchemaMetadataByRelations: CapabilitySupport(support=Support.Full),
Capability.SchemaMetadataByRelations: CapabilitySupport(support=Support.NotImplemented),
}
)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -60,7 +60,7 @@
{%- if dest_col in source_columns -%}
{%- do common_columns.append(dest_col) -%}
{%- else -%}
{%- do common_columns.append('NULL') -%}
{%- do common_columns.append('DEFAULT') -%}
{%- endif -%}
{%- endfor -%}
{%- set dest_cols_csv = dest_columns | join(', ') -%}
Expand Down
3 changes: 2 additions & 1 deletion requirements.txt
Original file line number Diff line number Diff line change
Expand Up @@ -2,4 +2,5 @@ databricks-sql-connector>=2.9.3, <3.0.0
dbt-spark~=1.7.1
databricks-sdk==0.17.0
keyring>=23.13.0
pandas<2.2.0
pandas<2.2.0
protobuf<5.0.0
1 change: 1 addition & 0 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,6 +59,7 @@ def _get_plugin_version() -> str:
"databricks-sdk==0.17.0",
"keyring>=23.13.0",
"pandas<2.2.0",
"protobuf<5.0.0",
],
zip_safe=False,
classifiers=[
Expand Down
33 changes: 33 additions & 0 deletions tests/functional/adapter/aliases/test_aliases.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,12 +29,45 @@ def macros(self):


class TestDatabricksSameAliasDifferentSchemas(BaseSameAliasDifferentSchemas):
@pytest.fixture(autouse=True)
def clean_up(self, project):
yield
with project.adapter.connection_named("__test"):
relation = project.adapter.Relation.create(
database=project.database, schema=f"{project.test_schema}_schema_a"
)
project.adapter.drop_schema(relation)

relation = project.adapter.Relation.create(
database=project.database, schema=f"{project.test_schema}_schema_b"
)
project.adapter.drop_schema(relation)

relation = project.adapter.Relation.create(
database=project.database, schema=project.test_schema
)
project.adapter.drop_schema(relation)

@pytest.fixture(scope="class")
def macros(self):
return macro_override


class TestDatabricksSameAliasDifferentDatabases(BaseSameAliasDifferentDatabases):
@pytest.fixture(autouse=True)
def clean_up(self, project):
yield
with project.adapter.connection_named("__test"):
relation = project.adapter.Relation.create(
database=project.database, schema=f"{project.test_schema}_{project.test_schema}_alt"
)
project.adapter.drop_schema(relation)

relation = project.adapter.Relation.create(
database=project.database, schema=project.test_schema
)
project.adapter.drop_schema(relation)

@pytest.fixture(scope="class")
def macros(self):
return macro_override
8 changes: 6 additions & 2 deletions tests/functional/adapter/dbt_clone/test_dbt_clone.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
import pytest


class TestClonePossible(BaseClonePossible):
class CleanupMixin:
@pytest.fixture(autouse=True)
def clean_up(self, project):
yield
Expand All @@ -20,7 +20,11 @@ def clean_up(self, project):
project.adapter.drop_schema(relation)


class TestCloneSameTargetAndState(BaseClone):
class TestClonePossible(BaseClonePossible, CleanupMixin):
pass


class TestCloneSameTargetAndState(BaseClone, CleanupMixin):
def test_clone_same_target_and_state(self, project, other_schema):
project.create_test_schema(other_schema)
self.run_and_save_state(project.project_root)
Expand Down
13 changes: 0 additions & 13 deletions tests/functional/adapter/dbt_debug/test_dbt_debug.py

This file was deleted.

Original file line number Diff line number Diff line change
Expand Up @@ -30,6 +30,10 @@ def test_run_incremental_sync_all_columns(self, project):


class TestIncrementalOnSchemaChangeAppend(BaseIncrementalOnSchemaChange):
@pytest.fixture(scope="class")
def project_config_update(self):
return {"models": {"+incremental_strategy": "append"}}

@pytest.fixture(scope="class")
def models(self):
return {
Expand Down
2 changes: 1 addition & 1 deletion tests/unit/macros/relations/test_incremental_macros.py
Original file line number Diff line number Diff line change
Expand Up @@ -87,7 +87,7 @@ def test_insert_into_sql_impl__matching_columns(self, template):

def test_insert_into_sql_impl__target_has_extra_columns(self, template):
sql = self.render_insert_into(template, dest_columns=["a", "b"], source_columns=["b"])
expected = "insert into table target (a, b)\nselect NULL, b from source"
expected = "insert into table target (a, b)\nselect DEFAULT, b from source"
assert sql == expected

def test_insert_into_sql_impl__source_has_extra_columns(self, template):
Expand Down

0 comments on commit 54a92e7

Please sign in to comment.