Skip to content

Commit 510c247

Browse files
authored
Merge branch 'main' into fix/databricks-oauth-shared-connection
2 parents 8fe0816 + 41a9d29 commit 510c247

16 files changed

Lines changed: 49 additions & 38 deletions

File tree

pyproject.toml

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,7 +24,7 @@ dependencies = [
2424
"requests",
2525
"rich[jupyter]",
2626
"ruamel.yaml",
27-
"sqlglot~=30.0.1",
27+
"sqlglot~=30.2.1",
2828
"tenacity",
2929
"time-machine",
3030
"json-stream"

sqlmesh/core/config/connection.py

Lines changed: 8 additions & 15 deletions
Original file line numberDiff line numberDiff line change
@@ -2343,35 +2343,28 @@ def init(cursor: t.Any) -> None:
23432343
return init
23442344

23452345

2346+
_CONNECTION_CONFIG_EXCLUDE: t.Set[t.Type[ConnectionConfig]] = {
2347+
ConnectionConfig, # type: ignore[type-abstract]
2348+
BaseDuckDBConnectionConfig, # type: ignore[type-abstract]
2349+
}
2350+
23462351
CONNECTION_CONFIG_TO_TYPE = {
23472352
# Map all subclasses of ConnectionConfig to the value of their `type_` field.
23482353
tpe.all_field_infos()["type_"].default: tpe
2349-
for tpe in subclasses(
2350-
__name__,
2351-
ConnectionConfig,
2352-
exclude={ConnectionConfig, BaseDuckDBConnectionConfig},
2353-
)
2354+
for tpe in subclasses(__name__, ConnectionConfig, exclude=_CONNECTION_CONFIG_EXCLUDE)
23542355
}
23552356

23562357
DIALECT_TO_TYPE = {
23572358
tpe.all_field_infos()["type_"].default: tpe.DIALECT
2358-
for tpe in subclasses(
2359-
__name__,
2360-
ConnectionConfig,
2361-
exclude={ConnectionConfig, BaseDuckDBConnectionConfig},
2362-
)
2359+
for tpe in subclasses(__name__, ConnectionConfig, exclude=_CONNECTION_CONFIG_EXCLUDE)
23632360
}
23642361

23652362
INIT_DISPLAY_INFO_TO_TYPE = {
23662363
tpe.all_field_infos()["type_"].default: (
23672364
tpe.DISPLAY_ORDER,
23682365
tpe.DISPLAY_NAME,
23692366
)
2370-
for tpe in subclasses(
2371-
__name__,
2372-
ConnectionConfig,
2373-
exclude={ConnectionConfig, BaseDuckDBConnectionConfig},
2374-
)
2367+
for tpe in subclasses(__name__, ConnectionConfig, exclude=_CONNECTION_CONFIG_EXCLUDE)
23752368
}
23762369

23772370

sqlmesh/core/config/scheduler.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -144,9 +144,10 @@ def get_default_catalog_per_gateway(self, context: GenericContext) -> t.Dict[str
144144
return default_catalogs_per_gateway
145145

146146

147-
SCHEDULER_CONFIG_TO_TYPE = {
147+
SCHEDULER_CONFIG_TO_TYPE: t.Dict[str, t.Type[SchedulerConfig]] = {
148148
tpe.all_field_infos()["type_"].default: tpe
149149
for tpe in subclasses(__name__, BaseConfig, exclude={BaseConfig})
150+
if issubclass(tpe, SchedulerConfig)
150151
}
151152

152153

sqlmesh/core/dialect.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -14,6 +14,7 @@
1414
from sqlglot.dialects.dialect import DialectType
1515
from sqlglot.dialects import DuckDB, Snowflake, TSQL
1616
import sqlglot.dialects.athena as athena
17+
import sqlglot.generators.athena as athena_generators
1718
from sqlglot.parsers.athena import AthenaTrinoParser
1819
from sqlglot.helper import seq_get
1920
from sqlglot.optimizer.normalize_identifiers import normalize_identifiers
@@ -1048,8 +1049,8 @@ def extend_sqlglot() -> None:
10481049
if dialect == athena.Athena:
10491050
tokenizers.add(athena._TrinoTokenizer)
10501051
parsers.add(AthenaTrinoParser)
1051-
generators.add(athena._TrinoGenerator)
1052-
generators.add(athena._HiveGenerator)
1052+
generators.add(athena_generators.AthenaTrinoGenerator)
1053+
generators.add(athena_generators._HiveGenerator)
10531054

10541055
if hasattr(dialect, "Tokenizer"):
10551056
tokenizers.add(dialect.Tokenizer)

sqlmesh/core/linter/rules/builtin.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -318,4 +318,5 @@ def check_model(self, model: Model) -> t.Optional[RuleViolation]:
318318
return None
319319

320320

321-
BUILTIN_RULES = RuleSet(subclasses(__name__, Rule, exclude={Rule}))
321+
_RULE_EXCLUDE: t.Set[t.Type[Rule]] = {Rule} # type: ignore[type-abstract]
322+
BUILTIN_RULES = RuleSet(subclasses(__name__, Rule, exclude=_RULE_EXCLUDE))

sqlmesh/core/loader.py

Lines changed: 2 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -840,7 +840,8 @@ def _load_linting_rules(self) -> RuleSet:
840840
if os.path.getsize(path):
841841
self._track_file(path)
842842
module = import_python_file(path, self.config_path)
843-
module_rules = subclasses(module.__name__, Rule, exclude={Rule})
843+
_rule_exclude: t.Set[t.Type[Rule]] = {Rule} # type: ignore[type-abstract]
844+
module_rules = subclasses(module.__name__, Rule, exclude=_rule_exclude)
844845
for user_rule in module_rules:
845846
user_rules[user_rule.name] = user_rule
846847

sqlmesh/core/macros.py

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -294,7 +294,9 @@ def evaluate_macros(
294294
return node
295295

296296
transformed = exp.replace_tree(
297-
expression.copy(), evaluate_macros, prune=lambda n: isinstance(n, exp.Lambda)
297+
expression.copy(),
298+
evaluate_macros, # type: ignore[arg-type]
299+
prune=lambda n: isinstance(n, exp.Lambda),
298300
)
299301

300302
if changed:

sqlmesh/core/renderer.py

Lines changed: 1 addition & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -331,7 +331,7 @@ def _resolve_table(
331331
deployability_index: t.Optional[DeployabilityIndex] = None,
332332
) -> exp.Table:
333333
table = exp.replace_tables(
334-
exp.maybe_parse(table_name, into=exp.Table, dialect=self._dialect),
334+
t.cast(exp.Table, exp.maybe_parse(table_name, into=exp.Table, dialect=self._dialect)),
335335
{
336336
**self._to_table_mapping((snapshots or {}).values(), deployability_index),
337337
**(table_mapping or {}),

sqlmesh/core/table_diff.py

Lines changed: 3 additions & 2 deletions
Original file line numberDiff line numberDiff line change
@@ -255,12 +255,13 @@ def __init__(
255255
self.source_alias = source_alias
256256
self.target_alias = target_alias
257257

258+
cols: t.List[str] = ensure_list(skip_columns)
258259
self.skip_columns = {
259260
normalize_identifiers(
260-
exp.parse_identifier(t.cast(str, col)),
261+
exp.parse_identifier(col),
261262
dialect=self.model_dialect or self.dialect,
262263
).name
263-
for col in ensure_list(skip_columns)
264+
for col in cols
264265
}
265266

266267
self._on = on

sqlmesh/core/test/definition.py

Lines changed: 11 additions & 4 deletions
Original file line numberDiff line numberDiff line change
@@ -612,20 +612,27 @@ def _concurrent_render_context(self) -> t.Iterator[None]:
612612
- Globally patch the SQLGlot dialect so that any date/time nodes are evaluated at the `execution_time` during generation
613613
"""
614614
import time_machine
615+
from sqlglot.generator import _DISPATCH_CACHE
615616

616617
lock_ctx: AbstractContextManager = (
617618
self.CONCURRENT_RENDER_LOCK if self.concurrency else nullcontext()
618619
)
619620
time_ctx: AbstractContextManager = nullcontext()
620621
dialect_patch_ctx: AbstractContextManager = nullcontext()
622+
dispatch_patch_ctx: AbstractContextManager = nullcontext()
621623

622624
if self._execution_time:
625+
generator_class = self._test_adapter_dialect.generator_class
623626
time_ctx = time_machine.travel(self._execution_time, tick=False)
624-
dialect_patch_ctx = patch.dict(
625-
self._test_adapter_dialect.generator_class.TRANSFORMS, self._transforms
626-
)
627+
dialect_patch_ctx = patch.dict(generator_class.TRANSFORMS, self._transforms)
628+
629+
# sqlglot caches a dispatch table per generator class, so we need to patch
630+
# it as well to ensure the overridden transforms are actually used
631+
dispatch = _DISPATCH_CACHE.get(generator_class)
632+
if dispatch is not None:
633+
dispatch_patch_ctx = patch.dict(dispatch, self._transforms)
627634

628-
with lock_ctx, time_ctx, dialect_patch_ctx:
635+
with lock_ctx, time_ctx, dialect_patch_ctx, dispatch_patch_ctx:
629636
yield
630637

631638
def _execute(self, query: exp.Query | str) -> pd.DataFrame:

0 commit comments

Comments
 (0)