Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Update to 1.9 #598

Open
wants to merge 5 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion dbt/adapters/sqlserver/__version__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
version = "1.8.7"
version = "1.9.0"
27 changes: 26 additions & 1 deletion dbt/adapters/sqlserver/sqlserver_relation.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
from dataclasses import dataclass, field
from typing import Optional, Type

from dbt.adapters.base.relation import BaseRelation
from dbt.adapters.base.relation import BaseRelation, EventTimeFilter
from dbt.adapters.utils import classproperty
from dbt_common.exceptions import DbtRuntimeError

Expand Down Expand Up @@ -49,3 +49,28 @@ def __post_init__(self):

def relation_max_name_length(self):
return MAX_CHARACTERS_IN_IDENTIFIER

def _render_event_time_filtered(self, event_time_filter: EventTimeFilter) -> str:
"""
Returns "" if start and end are both None
"""
filter = ""
if event_time_filter.start and event_time_filter.end:
filter = (
f"{event_time_filter.field_name} >="
f" cast('{event_time_filter.start}' as datetimeoffset)"
f" and {event_time_filter.field_name} <"
f" cast('{event_time_filter.end}' as datetimeoffset)"
)
elif event_time_filter.start:
filter = (
f"{event_time_filter.field_name} >="
f" cast('{event_time_filter.start}' as datetimeoffset)"
)
elif event_time_filter.end:
filter = (
f"{event_time_filter.field_name} <"
f" cast('{event_time_filter.end}' as datetimeoffset)"
)

return filter
2 changes: 1 addition & 1 deletion dbt/include/sqlserver/dbt_project.yml
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
name: dbt_sqlserver
version: 1.8.0
version: 1.9.0
config-version: 2

macro-paths: ["macros"]
Original file line number Diff line number Diff line change
@@ -0,0 +1,31 @@
{% macro sqlserver__get_incremental_microbatch_sql(arg_dict) %}
{%- set target = arg_dict["target_relation"] -%}
{%- set source = arg_dict["temp_relation"] -%}
{%- set dest_columns = arg_dict["dest_columns"] -%}
{%- set incremental_predicates = [] if arg_dict.get('incremental_predicates') is none else arg_dict.get('incremental_predicates') -%}

{#-- Add additional incremental_predicates to filter for batch --#}
{% if model.config.get("__dbt_internal_microbatch_event_time_start") -%}
{{ log("incremental append event start time > DBT_INTERNAL_TARGET." ~ model.config.event_time ~ " >= cast('" ~ model.config.__dbt_internal_microbatch_event_time_start ~ "' as datetimeoffset)") }}
{% do incremental_predicates.append("DBT_INTERNAL_TARGET." ~ model.config.event_time ~ " >= cast('" ~ model.config.__dbt_internal_microbatch_event_time_start ~ "' as datetimeoffset)") %}
{% endif %}
{% if model.config.__dbt_internal_microbatch_event_time_end -%}
{{ log("incremental append event end time < DBT_INTERNAL_TARGET." ~ model.config.event_time ~ " < cast('" ~ model.config.__dbt_internal_microbatch_event_time_end ~ "' as datetimeoffset)") }}
{% do incremental_predicates.append("DBT_INTERNAL_TARGET." ~ model.config.event_time ~ " < cast('" ~ model.config.__dbt_internal_microbatch_event_time_end ~ "' as datetimeoffset)") %}
{% endif %}
{% do arg_dict.update({'incremental_predicates': incremental_predicates}) %}

delete DBT_INTERNAL_TARGET from {{ target }} AS DBT_INTERNAL_TARGET
where (
{% for predicate in incremental_predicates %}
{%- if not loop.first %}and {% endif -%} {{ predicate }}
{% endfor %}
);

{%- set dest_cols_csv = get_quoted_csv(dest_columns | map(attribute="name")) -%}
insert into {{ target }} ({{ dest_cols_csv }})
(
select {{ dest_cols_csv }}
from {{ source }}
)
{% endmacro %}
2 changes: 1 addition & 1 deletion dev_requirements.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@

dbt-tests-adapter>=1.8.0, <1.9.0
dbt-tests-adapter>=1.9.0

ruff
black==24.8.0
Expand Down
8 changes: 4 additions & 4 deletions setup.py
Original file line number Diff line number Diff line change
Expand Up @@ -8,7 +8,7 @@

package_name = "dbt-sqlserver"
authors_list = ["Mikael Ene", "Anders Swanson", "Sam Debruyn", "Cor Zuurmond", "Cody Scott"]
dbt_version = "1.8"
dbt_version = "1.9"
description = """A Microsoft SQL Server adapter plugin for dbt"""

this_directory = os.path.abspath(os.path.dirname(__file__))
Expand Down Expand Up @@ -66,10 +66,10 @@ def run(self):
packages=find_namespace_packages(include=["dbt", "dbt.*"]),
include_package_data=True,
install_requires=[
"dbt-fabric>=1.8.0,<1.9.0",
"dbt-core>=1.8.0,<1.9.0",
"dbt-fabric>=1.9.0,<1.10.0",
"dbt-core>=1.9.0,<1.10.0",
"dbt-common>=1.0,<2.0",
"dbt-adapters>=1.1.1,<2.0",
"dbt-adapters>=1.11.0,<2.0",
],
cmdclass={
"verify": VerifyVersionCommand,
Expand Down
3 changes: 1 addition & 2 deletions tests/functional/adapter/dbt/test_empty.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,11 @@
import pytest
from dbt.tests.adapter.empty._models import model_input_sql, schema_sources_yml

# switch for 1.9
# from dbt.tests.adapter.empty import _models
from dbt.tests.adapter.empty.test_empty import ( # MetadataWithEmptyFlag
BaseTestEmpty,
BaseTestEmptyInlineSourceRef,
model_input_sql,
schema_sources_yml,
)
from dbt.tests.util import run_dbt

Expand Down
7 changes: 0 additions & 7 deletions tests/functional/adapter/dbt/test_incremental.py
Original file line number Diff line number Diff line change
@@ -1,8 +1,5 @@
import pytest
from dbt.tests.adapter.incremental import fixtures
from dbt.tests.adapter.incremental.test_incremental_merge_exclude_columns import (
BaseMergeExcludeColumns,
)
from dbt.tests.adapter.incremental.test_incremental_on_schema_change import (
BaseIncrementalOnSchemaChange,
)
Expand Down Expand Up @@ -76,10 +73,6 @@
"""


class TestIncrementalMergeExcludeColumns(BaseMergeExcludeColumns):
pass


class TestIncrementalOnSchemaChange(BaseIncrementalOnSchemaChange):
@pytest.fixture(scope="class")
def models(self):
Expand Down
Original file line number Diff line number Diff line change
@@ -0,0 +1,47 @@
import pytest
from dbt.tests.adapter.incremental.test_incremental_microbatch import BaseMicrobatch

_microbatch_model_no_unique_id_sql_datetime = """
{{ config(materialized='incremental', incremental_strategy='microbatch',
event_time='event_time', batch_size='day', begin='2020-01-01 00:00:00') }}
select * from {{ ref('input_model') }}
"""

_input_model_sql_datetime = """
{{ config(materialized='table', event_time='event_time') }}
select 1 as id, '2020-01-01 00:00:00' as event_time
union all
select 2 as id, '2020-01-02 00:00:00' as event_time
union all
select 3 as id, '2020-01-03 00:00:00' as event_time
"""


class TestSQLServerMicrobatchDateTime(BaseMicrobatch):
"""
Setup a version of the microbatch testing that uses a datetime column as the event_time
This is to test that the microbatch strategy can handle datetime columns when passing in
event times as UTC strings
"""

@pytest.fixture(scope="class")
def microbatch_model_sql(self) -> str:
return _microbatch_model_no_unique_id_sql_datetime

@pytest.fixture(scope="class")
def input_model_sql(self) -> str:
"""
This is the SQL that defines the input model to the microbatch model,
including any {{ config(..) }}. event_time is a required configuration of this input
"""
return _input_model_sql_datetime

@pytest.fixture(scope="class")
def insert_two_rows_sql(self, project) -> str:
test_schema_relation = project.adapter.Relation.create(
database=project.database, schema=project.test_schema
)
return (
f"insert into {test_schema_relation}.input_model (id, event_time) "
f"values (4, '2020-01-04 00:00:00'), (5, '2020-01-05 00:00:00')"
)