sqlglot.dialects.tsql
1from __future__ import annotations 2 3import datetime 4import re 5import typing as t 6from functools import partial, reduce 7 8from sqlglot import exp, generator, parser, tokens, transforms 9from sqlglot.dialects.dialect import ( 10 Dialect, 11 NormalizationStrategy, 12 any_value_to_max_sql, 13 date_delta_sql, 14 datestrtodate_sql, 15 generatedasidentitycolumnconstraint_sql, 16 max_or_greatest, 17 min_or_least, 18 build_date_delta, 19 rename_func, 20 trim_sql, 21 timestrtotime_sql, 22) 23from sqlglot.helper import seq_get 24from sqlglot.parser import build_coalesce 25from sqlglot.time import format_time 26from sqlglot.tokens import TokenType 27 28if t.TYPE_CHECKING: 29 from sqlglot._typing import E 30 31FULL_FORMAT_TIME_MAPPING = { 32 "weekday": "%A", 33 "dw": "%A", 34 "w": "%A", 35 "month": "%B", 36 "mm": "%B", 37 "m": "%B", 38} 39 40DATE_DELTA_INTERVAL = { 41 "year": "year", 42 "yyyy": "year", 43 "yy": "year", 44 "quarter": "quarter", 45 "qq": "quarter", 46 "q": "quarter", 47 "month": "month", 48 "mm": "month", 49 "m": "month", 50 "week": "week", 51 "ww": "week", 52 "wk": "week", 53 "day": "day", 54 "dd": "day", 55 "d": "day", 56} 57 58 59DATE_FMT_RE = re.compile("([dD]{1,2})|([mM]{1,2})|([yY]{1,4})|([hH]{1,2})|([sS]{1,2})") 60 61# N = Numeric, C=Currency 62TRANSPILE_SAFE_NUMBER_FMT = {"N", "C"} 63 64DEFAULT_START_DATE = datetime.date(1900, 1, 1) 65 66BIT_TYPES = {exp.EQ, exp.NEQ, exp.Is, exp.In, exp.Select, exp.Alias} 67 68# Unsupported options: 69# - OPTIMIZE FOR ( @variable_name { UNKNOWN | = <literal_constant> } [ , ...n ] ) 70# - TABLE HINT 71OPTIONS: parser.OPTIONS_TYPE = { 72 **dict.fromkeys( 73 ( 74 "DISABLE_OPTIMIZED_PLAN_FORCING", 75 "FAST", 76 "IGNORE_NONCLUSTERED_COLUMNSTORE_INDEX", 77 "LABEL", 78 "MAXDOP", 79 "MAXRECURSION", 80 "MAX_GRANT_PERCENT", 81 "MIN_GRANT_PERCENT", 82 "NO_PERFORMANCE_SPOOL", 83 "QUERYTRACEON", 84 "RECOMPILE", 85 ), 86 tuple(), 87 ), 88 "CONCAT": ("UNION",), 89 "DISABLE": ("EXTERNALPUSHDOWN", "SCALEOUTEXECUTION"), 90 "EXPAND": ("VIEWS",), 91 "FORCE": ("EXTERNALPUSHDOWN", "ORDER", "SCALEOUTEXECUTION"), 92 "HASH": ("GROUP", "JOIN", "UNION"), 93 "KEEP": ("PLAN",), 94 "KEEPFIXED": ("PLAN",), 95 "LOOP": ("JOIN",), 96 "MERGE": ("JOIN", "UNION"), 97 "OPTIMIZE": (("FOR", "UNKNOWN"),), 98 "ORDER": ("GROUP",), 99 "PARAMETERIZATION": ("FORCED", "SIMPLE"), 100 "ROBUST": ("PLAN",), 101 "USE": ("PLAN",), 102} 103 104OPTIONS_THAT_REQUIRE_EQUAL = ("MAX_GRANT_PERCENT", "MIN_GRANT_PERCENT", "LABEL") 105 106 107def _build_formatted_time( 108 exp_class: t.Type[E], full_format_mapping: t.Optional[bool] = None 109) -> t.Callable[[t.List], E]: 110 def _builder(args: t.List) -> E: 111 assert len(args) == 2 112 113 return exp_class( 114 this=exp.cast(args[1], exp.DataType.Type.DATETIME), 115 format=exp.Literal.string( 116 format_time( 117 args[0].name.lower(), 118 ( 119 {**TSQL.TIME_MAPPING, **FULL_FORMAT_TIME_MAPPING} 120 if full_format_mapping 121 else TSQL.TIME_MAPPING 122 ), 123 ) 124 ), 125 ) 126 127 return _builder 128 129 130def _build_format(args: t.List) -> exp.NumberToStr | exp.TimeToStr: 131 this = seq_get(args, 0) 132 fmt = seq_get(args, 1) 133 culture = seq_get(args, 2) 134 135 number_fmt = fmt and (fmt.name in TRANSPILE_SAFE_NUMBER_FMT or not DATE_FMT_RE.search(fmt.name)) 136 137 if number_fmt: 138 return exp.NumberToStr(this=this, format=fmt, culture=culture) 139 140 if fmt: 141 fmt = exp.Literal.string( 142 format_time(fmt.name, TSQL.FORMAT_TIME_MAPPING) 143 if len(fmt.name) == 1 144 else format_time(fmt.name, TSQL.TIME_MAPPING) 145 ) 146 147 return exp.TimeToStr(this=this, format=fmt, culture=culture) 148 149 150def _build_eomonth(args: t.List) -> exp.LastDay: 151 date = exp.TsOrDsToDate(this=seq_get(args, 0)) 152 month_lag = seq_get(args, 1) 153 154 if month_lag is None: 155 this: exp.Expression = date 156 else: 157 unit = DATE_DELTA_INTERVAL.get("month") 158 this = exp.DateAdd(this=date, expression=month_lag, unit=unit and exp.var(unit)) 159 160 return exp.LastDay(this=this) 161 162 163def _build_hashbytes(args: t.List) -> exp.Expression: 164 kind, data = args 165 kind = kind.name.upper() if kind.is_string else "" 166 167 if kind == "MD5": 168 args.pop(0) 169 return exp.MD5(this=data) 170 if kind in ("SHA", "SHA1"): 171 args.pop(0) 172 return exp.SHA(this=data) 173 if kind == "SHA2_256": 174 return exp.SHA2(this=data, length=exp.Literal.number(256)) 175 if kind == "SHA2_512": 176 return exp.SHA2(this=data, length=exp.Literal.number(512)) 177 178 return exp.func("HASHBYTES", *args) 179 180 181DATEPART_ONLY_FORMATS = {"DW", "HOUR", "QUARTER"} 182 183 184def _format_sql(self: TSQL.Generator, expression: exp.NumberToStr | exp.TimeToStr) -> str: 185 fmt = expression.args["format"] 186 187 if not isinstance(expression, exp.NumberToStr): 188 if fmt.is_string: 189 mapped_fmt = format_time(fmt.name, TSQL.INVERSE_TIME_MAPPING) 190 191 name = (mapped_fmt or "").upper() 192 if name in DATEPART_ONLY_FORMATS: 193 return self.func("DATEPART", name, expression.this) 194 195 fmt_sql = self.sql(exp.Literal.string(mapped_fmt)) 196 else: 197 fmt_sql = self.format_time(expression) or self.sql(fmt) 198 else: 199 fmt_sql = self.sql(fmt) 200 201 return self.func("FORMAT", expression.this, fmt_sql, expression.args.get("culture")) 202 203 204def _string_agg_sql(self: TSQL.Generator, expression: exp.GroupConcat) -> str: 205 this = expression.this 206 distinct = expression.find(exp.Distinct) 207 if distinct: 208 # exp.Distinct can appear below an exp.Order or an exp.GroupConcat expression 209 self.unsupported("T-SQL STRING_AGG doesn't support DISTINCT.") 210 this = distinct.pop().expressions[0] 211 212 order = "" 213 if isinstance(expression.this, exp.Order): 214 if expression.this.this: 215 this = expression.this.this.pop() 216 # Order has a leading space 217 order = f" WITHIN GROUP ({self.sql(expression.this)[1:]})" 218 219 separator = expression.args.get("separator") or exp.Literal.string(",") 220 return f"STRING_AGG({self.format_args(this, separator)}){order}" 221 222 223def _build_date_delta( 224 exp_class: t.Type[E], unit_mapping: t.Optional[t.Dict[str, str]] = None 225) -> t.Callable[[t.List], E]: 226 def _builder(args: t.List) -> E: 227 unit = seq_get(args, 0) 228 if unit and unit_mapping: 229 unit = exp.var(unit_mapping.get(unit.name.lower(), unit.name)) 230 231 start_date = seq_get(args, 1) 232 if start_date and start_date.is_number: 233 # Numeric types are valid DATETIME values 234 if start_date.is_int: 235 adds = DEFAULT_START_DATE + datetime.timedelta(days=int(start_date.this)) 236 start_date = exp.Literal.string(adds.strftime("%F")) 237 else: 238 # We currently don't handle float values, i.e. they're not converted to equivalent DATETIMEs. 239 # This is not a problem when generating T-SQL code, it is when transpiling to other dialects. 240 return exp_class(this=seq_get(args, 2), expression=start_date, unit=unit) 241 242 return exp_class( 243 this=exp.TimeStrToTime(this=seq_get(args, 2)), 244 expression=exp.TimeStrToTime(this=start_date), 245 unit=unit, 246 ) 247 248 return _builder 249 250 251def qualify_derived_table_outputs(expression: exp.Expression) -> exp.Expression: 252 """Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.""" 253 alias = expression.args.get("alias") 254 255 if ( 256 isinstance(expression, (exp.CTE, exp.Subquery)) 257 and isinstance(alias, exp.TableAlias) 258 and not alias.columns 259 ): 260 from sqlglot.optimizer.qualify_columns import qualify_outputs 261 262 # We keep track of the unaliased column projection indexes instead of the expressions 263 # themselves, because the latter are going to be replaced by new nodes when the aliases 264 # are added and hence we won't be able to reach these newly added Alias parents 265 query = expression.this 266 unaliased_column_indexes = ( 267 i for i, c in enumerate(query.selects) if isinstance(c, exp.Column) and not c.alias 268 ) 269 270 qualify_outputs(query) 271 272 # Preserve the quoting information of columns for newly added Alias nodes 273 query_selects = query.selects 274 for select_index in unaliased_column_indexes: 275 alias = query_selects[select_index] 276 column = alias.this 277 if isinstance(column.this, exp.Identifier): 278 alias.args["alias"].set("quoted", column.this.quoted) 279 280 return expression 281 282 283# https://learn.microsoft.com/en-us/sql/t-sql/functions/datetimefromparts-transact-sql?view=sql-server-ver16#syntax 284def _build_datetimefromparts(args: t.List) -> exp.TimestampFromParts: 285 return exp.TimestampFromParts( 286 year=seq_get(args, 0), 287 month=seq_get(args, 1), 288 day=seq_get(args, 2), 289 hour=seq_get(args, 3), 290 min=seq_get(args, 4), 291 sec=seq_get(args, 5), 292 milli=seq_get(args, 6), 293 ) 294 295 296# https://learn.microsoft.com/en-us/sql/t-sql/functions/timefromparts-transact-sql?view=sql-server-ver16#syntax 297def _build_timefromparts(args: t.List) -> exp.TimeFromParts: 298 return exp.TimeFromParts( 299 hour=seq_get(args, 0), 300 min=seq_get(args, 1), 301 sec=seq_get(args, 2), 302 fractions=seq_get(args, 3), 303 precision=seq_get(args, 4), 304 ) 305 306 307def _build_with_arg_as_text( 308 klass: t.Type[exp.Expression], 309) -> t.Callable[[t.List[exp.Expression]], exp.Expression]: 310 def _parse(args: t.List[exp.Expression]) -> exp.Expression: 311 this = seq_get(args, 0) 312 313 if this and not this.is_string: 314 this = exp.cast(this, exp.DataType.Type.TEXT) 315 316 expression = seq_get(args, 1) 317 kwargs = {"this": this} 318 319 if expression: 320 kwargs["expression"] = expression 321 322 return klass(**kwargs) 323 324 return _parse 325 326 327# https://learn.microsoft.com/en-us/sql/t-sql/functions/parsename-transact-sql?view=sql-server-ver16 328def _build_parsename(args: t.List) -> exp.SplitPart | exp.Anonymous: 329 # PARSENAME(...) will be stored into exp.SplitPart if: 330 # - All args are literals 331 # - The part index (2nd arg) is <= 4 (max valid value, otherwise TSQL returns NULL) 332 if len(args) == 2 and all(isinstance(arg, exp.Literal) for arg in args): 333 this = args[0] 334 part_index = args[1] 335 split_count = len(this.name.split(".")) 336 if split_count <= 4: 337 return exp.SplitPart( 338 this=this, 339 delimiter=exp.Literal.string("."), 340 part_index=exp.Literal.number(split_count + 1 - part_index.to_py()), 341 ) 342 343 return exp.Anonymous(this="PARSENAME", expressions=args) 344 345 346def _build_json_query(args: t.List, dialect: Dialect) -> exp.JSONExtract: 347 if len(args) == 1: 348 # The default value for path is '$'. As a result, if you don't provide a 349 # value for path, JSON_QUERY returns the input expression. 350 args.append(exp.Literal.string("$")) 351 352 return parser.build_extract_json_with_path(exp.JSONExtract)(args, dialect) 353 354 355def _json_extract_sql( 356 self: TSQL.Generator, expression: exp.JSONExtract | exp.JSONExtractScalar 357) -> str: 358 json_query = self.func("JSON_QUERY", expression.this, expression.expression) 359 json_value = self.func("JSON_VALUE", expression.this, expression.expression) 360 return self.func("ISNULL", json_query, json_value) 361 362 363def _timestrtotime_sql(self: TSQL.Generator, expression: exp.TimeStrToTime): 364 sql = timestrtotime_sql(self, expression) 365 if expression.args.get("zone"): 366 # If there is a timezone, produce an expression like: 367 # CAST('2020-01-01 12:13:14-08:00' AS DATETIMEOFFSET) AT TIME ZONE 'UTC' 368 # If you dont have AT TIME ZONE 'UTC', wrapping that expression in another cast back to DATETIME2 just drops the timezone information 369 return self.sql(exp.AtTimeZone(this=sql, zone=exp.Literal.string("UTC"))) 370 return sql 371 372 373class TSQL(Dialect): 374 SUPPORTS_SEMI_ANTI_JOIN = False 375 LOG_BASE_FIRST = False 376 TYPED_DIVISION = True 377 CONCAT_COALESCE = True 378 NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE 379 380 TIME_FORMAT = "'yyyy-mm-dd hh:mm:ss'" 381 382 TIME_MAPPING = { 383 "year": "%Y", 384 "dayofyear": "%j", 385 "day": "%d", 386 "dy": "%d", 387 "y": "%Y", 388 "week": "%W", 389 "ww": "%W", 390 "wk": "%W", 391 "hour": "%h", 392 "hh": "%I", 393 "minute": "%M", 394 "mi": "%M", 395 "n": "%M", 396 "second": "%S", 397 "ss": "%S", 398 "s": "%-S", 399 "millisecond": "%f", 400 "ms": "%f", 401 "weekday": "%W", 402 "dw": "%W", 403 "month": "%m", 404 "mm": "%M", 405 "m": "%-M", 406 "Y": "%Y", 407 "YYYY": "%Y", 408 "YY": "%y", 409 "MMMM": "%B", 410 "MMM": "%b", 411 "MM": "%m", 412 "M": "%-m", 413 "dddd": "%A", 414 "dd": "%d", 415 "d": "%-d", 416 "HH": "%H", 417 "H": "%-H", 418 "h": "%-I", 419 "ffffff": "%f", 420 "yyyy": "%Y", 421 "yy": "%y", 422 } 423 424 CONVERT_FORMAT_MAPPING = { 425 "0": "%b %d %Y %-I:%M%p", 426 "1": "%m/%d/%y", 427 "2": "%y.%m.%d", 428 "3": "%d/%m/%y", 429 "4": "%d.%m.%y", 430 "5": "%d-%m-%y", 431 "6": "%d %b %y", 432 "7": "%b %d, %y", 433 "8": "%H:%M:%S", 434 "9": "%b %d %Y %-I:%M:%S:%f%p", 435 "10": "mm-dd-yy", 436 "11": "yy/mm/dd", 437 "12": "yymmdd", 438 "13": "%d %b %Y %H:%M:ss:%f", 439 "14": "%H:%M:%S:%f", 440 "20": "%Y-%m-%d %H:%M:%S", 441 "21": "%Y-%m-%d %H:%M:%S.%f", 442 "22": "%m/%d/%y %-I:%M:%S %p", 443 "23": "%Y-%m-%d", 444 "24": "%H:%M:%S", 445 "25": "%Y-%m-%d %H:%M:%S.%f", 446 "100": "%b %d %Y %-I:%M%p", 447 "101": "%m/%d/%Y", 448 "102": "%Y.%m.%d", 449 "103": "%d/%m/%Y", 450 "104": "%d.%m.%Y", 451 "105": "%d-%m-%Y", 452 "106": "%d %b %Y", 453 "107": "%b %d, %Y", 454 "108": "%H:%M:%S", 455 "109": "%b %d %Y %-I:%M:%S:%f%p", 456 "110": "%m-%d-%Y", 457 "111": "%Y/%m/%d", 458 "112": "%Y%m%d", 459 "113": "%d %b %Y %H:%M:%S:%f", 460 "114": "%H:%M:%S:%f", 461 "120": "%Y-%m-%d %H:%M:%S", 462 "121": "%Y-%m-%d %H:%M:%S.%f", 463 } 464 465 FORMAT_TIME_MAPPING = { 466 "y": "%B %Y", 467 "d": "%m/%d/%Y", 468 "H": "%-H", 469 "h": "%-I", 470 "s": "%Y-%m-%d %H:%M:%S", 471 "D": "%A,%B,%Y", 472 "f": "%A,%B,%Y %-I:%M %p", 473 "F": "%A,%B,%Y %-I:%M:%S %p", 474 "g": "%m/%d/%Y %-I:%M %p", 475 "G": "%m/%d/%Y %-I:%M:%S %p", 476 "M": "%B %-d", 477 "m": "%B %-d", 478 "O": "%Y-%m-%dT%H:%M:%S", 479 "u": "%Y-%M-%D %H:%M:%S%z", 480 "U": "%A, %B %D, %Y %H:%M:%S%z", 481 "T": "%-I:%M:%S %p", 482 "t": "%-I:%M", 483 "Y": "%a %Y", 484 } 485 486 class Tokenizer(tokens.Tokenizer): 487 IDENTIFIERS = [("[", "]"), '"'] 488 QUOTES = ["'", '"'] 489 HEX_STRINGS = [("0x", ""), ("0X", "")] 490 VAR_SINGLE_TOKENS = {"@", "$", "#"} 491 492 KEYWORDS = { 493 **tokens.Tokenizer.KEYWORDS, 494 "CLUSTERED INDEX": TokenType.INDEX, 495 "DATETIME2": TokenType.DATETIME, 496 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 497 "DECLARE": TokenType.DECLARE, 498 "EXEC": TokenType.COMMAND, 499 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 500 "IMAGE": TokenType.IMAGE, 501 "MONEY": TokenType.MONEY, 502 "NONCLUSTERED INDEX": TokenType.INDEX, 503 "NTEXT": TokenType.TEXT, 504 "OPTION": TokenType.OPTION, 505 "OUTPUT": TokenType.RETURNING, 506 "PRINT": TokenType.COMMAND, 507 "PROC": TokenType.PROCEDURE, 508 "REAL": TokenType.FLOAT, 509 "ROWVERSION": TokenType.ROWVERSION, 510 "SMALLDATETIME": TokenType.DATETIME, 511 "SMALLMONEY": TokenType.SMALLMONEY, 512 "SQL_VARIANT": TokenType.VARIANT, 513 "SYSTEM_USER": TokenType.CURRENT_USER, 514 "TOP": TokenType.TOP, 515 "TIMESTAMP": TokenType.ROWVERSION, 516 "TINYINT": TokenType.UTINYINT, 517 "UNIQUEIDENTIFIER": TokenType.UNIQUEIDENTIFIER, 518 "UPDATE STATISTICS": TokenType.COMMAND, 519 "XML": TokenType.XML, 520 } 521 KEYWORDS.pop("/*+") 522 523 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END} 524 525 class Parser(parser.Parser): 526 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 527 LOG_DEFAULTS_TO_LN = True 528 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 529 STRING_ALIASES = True 530 NO_PAREN_IF_COMMANDS = False 531 532 QUERY_MODIFIER_PARSERS = { 533 **parser.Parser.QUERY_MODIFIER_PARSERS, 534 TokenType.OPTION: lambda self: ("options", self._parse_options()), 535 } 536 537 FUNCTIONS = { 538 **parser.Parser.FUNCTIONS, 539 "CHARINDEX": lambda args: exp.StrPosition( 540 this=seq_get(args, 1), 541 substr=seq_get(args, 0), 542 position=seq_get(args, 2), 543 ), 544 "COUNT": lambda args: exp.Count( 545 this=seq_get(args, 0), expressions=args[1:], big_int=False 546 ), 547 "COUNT_BIG": lambda args: exp.Count( 548 this=seq_get(args, 0), expressions=args[1:], big_int=True 549 ), 550 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 551 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 552 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 553 "DATEPART": _build_formatted_time(exp.TimeToStr), 554 "DATETIMEFROMPARTS": _build_datetimefromparts, 555 "EOMONTH": _build_eomonth, 556 "FORMAT": _build_format, 557 "GETDATE": exp.CurrentTimestamp.from_arg_list, 558 "HASHBYTES": _build_hashbytes, 559 "ISNULL": build_coalesce, 560 "JSON_QUERY": _build_json_query, 561 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 562 "LEN": _build_with_arg_as_text(exp.Length), 563 "LEFT": _build_with_arg_as_text(exp.Left), 564 "RIGHT": _build_with_arg_as_text(exp.Right), 565 "PARSENAME": _build_parsename, 566 "REPLICATE": exp.Repeat.from_arg_list, 567 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 568 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 569 "SUSER_NAME": exp.CurrentUser.from_arg_list, 570 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 571 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 572 "TIMEFROMPARTS": _build_timefromparts, 573 } 574 575 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 576 577 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 578 TokenType.TABLE, 579 *parser.Parser.TYPE_TOKENS, 580 } 581 582 STATEMENT_PARSERS = { 583 **parser.Parser.STATEMENT_PARSERS, 584 TokenType.DECLARE: lambda self: self._parse_declare(), 585 } 586 587 RANGE_PARSERS = { 588 **parser.Parser.RANGE_PARSERS, 589 TokenType.DCOLON: lambda self, this: self.expression( 590 exp.ScopeResolution, 591 this=this, 592 expression=self._parse_function() or self._parse_var(any_token=True), 593 ), 594 } 595 596 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 597 COLUMN_OPERATORS = { 598 **parser.Parser.COLUMN_OPERATORS, 599 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 600 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 601 else self.expression(exp.ScopeResolution, this=this, expression=to), 602 } 603 604 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 605 # We want to use _parse_types() if the first token after :: is a known type, 606 # otherwise we could parse something like x::varchar(max) into a function 607 if self._match_set(self.TYPE_TOKENS, advance=False): 608 return self._parse_types() 609 610 return self._parse_function() or self._parse_types() 611 612 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 613 if not self._match(TokenType.OPTION): 614 return None 615 616 def _parse_option() -> t.Optional[exp.Expression]: 617 option = self._parse_var_from_options(OPTIONS) 618 if not option: 619 return None 620 621 self._match(TokenType.EQ) 622 return self.expression( 623 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 624 ) 625 626 return self._parse_wrapped_csv(_parse_option) 627 628 def _parse_projections(self) -> t.List[exp.Expression]: 629 """ 630 T-SQL supports the syntax alias = expression in the SELECT's projection list, 631 so we transform all parsed Selects to convert their EQ projections into Aliases. 632 633 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 634 """ 635 return [ 636 ( 637 exp.alias_(projection.expression, projection.this.this, copy=False) 638 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 639 else projection 640 ) 641 for projection in super()._parse_projections() 642 ] 643 644 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 645 """Applies to SQL Server and Azure SQL Database 646 COMMIT [ { TRAN | TRANSACTION } 647 [ transaction_name | @tran_name_variable ] ] 648 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 649 650 ROLLBACK { TRAN | TRANSACTION } 651 [ transaction_name | @tran_name_variable 652 | savepoint_name | @savepoint_variable ] 653 """ 654 rollback = self._prev.token_type == TokenType.ROLLBACK 655 656 self._match_texts(("TRAN", "TRANSACTION")) 657 this = self._parse_id_var() 658 659 if rollback: 660 return self.expression(exp.Rollback, this=this) 661 662 durability = None 663 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 664 self._match_text_seq("DELAYED_DURABILITY") 665 self._match(TokenType.EQ) 666 667 if self._match_text_seq("OFF"): 668 durability = False 669 else: 670 self._match(TokenType.ON) 671 durability = True 672 673 self._match_r_paren() 674 675 return self.expression(exp.Commit, this=this, durability=durability) 676 677 def _parse_transaction(self) -> exp.Transaction | exp.Command: 678 """Applies to SQL Server and Azure SQL Database 679 BEGIN { TRAN | TRANSACTION } 680 [ { transaction_name | @tran_name_variable } 681 [ WITH MARK [ 'description' ] ] 682 ] 683 """ 684 if self._match_texts(("TRAN", "TRANSACTION")): 685 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 686 if self._match_text_seq("WITH", "MARK"): 687 transaction.set("mark", self._parse_string()) 688 689 return transaction 690 691 return self._parse_as_command(self._prev) 692 693 def _parse_returns(self) -> exp.ReturnsProperty: 694 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 695 returns = super()._parse_returns() 696 returns.set("table", table) 697 return returns 698 699 def _parse_convert( 700 self, strict: bool, safe: t.Optional[bool] = None 701 ) -> t.Optional[exp.Expression]: 702 this = self._parse_types() 703 self._match(TokenType.COMMA) 704 args = [this, *self._parse_csv(self._parse_assignment)] 705 convert = exp.Convert.from_arg_list(args) 706 convert.set("safe", safe) 707 convert.set("strict", strict) 708 return convert 709 710 def _parse_user_defined_function( 711 self, kind: t.Optional[TokenType] = None 712 ) -> t.Optional[exp.Expression]: 713 this = super()._parse_user_defined_function(kind=kind) 714 715 if ( 716 kind == TokenType.FUNCTION 717 or isinstance(this, exp.UserDefinedFunction) 718 or self._match(TokenType.ALIAS, advance=False) 719 ): 720 return this 721 722 expressions = self._parse_csv(self._parse_function_parameter) 723 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 724 725 def _parse_id_var( 726 self, 727 any_token: bool = True, 728 tokens: t.Optional[t.Collection[TokenType]] = None, 729 ) -> t.Optional[exp.Expression]: 730 is_temporary = self._match(TokenType.HASH) 731 is_global = is_temporary and self._match(TokenType.HASH) 732 733 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 734 if this: 735 if is_global: 736 this.set("global", True) 737 elif is_temporary: 738 this.set("temporary", True) 739 740 return this 741 742 def _parse_create(self) -> exp.Create | exp.Command: 743 create = super()._parse_create() 744 745 if isinstance(create, exp.Create): 746 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 747 if isinstance(table, exp.Table) and table.this.args.get("temporary"): 748 if not create.args.get("properties"): 749 create.set("properties", exp.Properties(expressions=[])) 750 751 create.args["properties"].append("expressions", exp.TemporaryProperty()) 752 753 return create 754 755 def _parse_if(self) -> t.Optional[exp.Expression]: 756 index = self._index 757 758 if self._match_text_seq("OBJECT_ID"): 759 self._parse_wrapped_csv(self._parse_string) 760 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 761 return self._parse_drop(exists=True) 762 self._retreat(index) 763 764 return super()._parse_if() 765 766 def _parse_unique(self) -> exp.UniqueColumnConstraint: 767 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 768 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 769 else: 770 this = self._parse_schema(self._parse_id_var(any_token=False)) 771 772 return self.expression(exp.UniqueColumnConstraint, this=this) 773 774 def _parse_partition(self) -> t.Optional[exp.Partition]: 775 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 776 return None 777 778 def parse_range(): 779 low = self._parse_bitwise() 780 high = self._parse_bitwise() if self._match_text_seq("TO") else None 781 782 return ( 783 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 784 ) 785 786 partition = self.expression( 787 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 788 ) 789 790 self._match_r_paren() 791 792 return partition 793 794 def _parse_declare(self) -> exp.Declare | exp.Command: 795 index = self._index 796 expressions = self._try_parse(partial(self._parse_csv, self._parse_declareitem)) 797 798 if not expressions or self._curr: 799 self._retreat(index) 800 return self._parse_as_command(self._prev) 801 802 return self.expression(exp.Declare, expressions=expressions) 803 804 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 805 var = self._parse_id_var() 806 if not var: 807 return None 808 809 value = None 810 self._match(TokenType.ALIAS) 811 if self._match(TokenType.TABLE): 812 data_type = self._parse_schema() 813 else: 814 data_type = self._parse_types() 815 if self._match(TokenType.EQ): 816 value = self._parse_bitwise() 817 818 return self.expression(exp.DeclareItem, this=var, kind=data_type, default=value) 819 820 class Generator(generator.Generator): 821 LIMIT_IS_TOP = True 822 QUERY_HINTS = False 823 RETURNING_END = False 824 NVL2_SUPPORTED = False 825 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 826 LIMIT_FETCH = "FETCH" 827 COMPUTED_COLUMN_WITH_TYPE = False 828 CTE_RECURSIVE_KEYWORD_REQUIRED = False 829 ENSURE_BOOLS = True 830 NULL_ORDERING_SUPPORTED = None 831 SUPPORTS_SINGLE_ARG_CONCAT = False 832 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 833 SUPPORTS_SELECT_INTO = True 834 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 835 SUPPORTS_TO_NUMBER = False 836 SET_OP_MODIFIERS = False 837 COPY_PARAMS_EQ_REQUIRED = True 838 PARSE_JSON_NAME = None 839 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 840 841 EXPRESSIONS_WITHOUT_NESTED_CTES = { 842 exp.Create, 843 exp.Delete, 844 exp.Insert, 845 exp.Intersect, 846 exp.Except, 847 exp.Merge, 848 exp.Select, 849 exp.Subquery, 850 exp.Union, 851 exp.Update, 852 } 853 854 SUPPORTED_JSON_PATH_PARTS = { 855 exp.JSONPathKey, 856 exp.JSONPathRoot, 857 exp.JSONPathSubscript, 858 } 859 860 TYPE_MAPPING = { 861 **generator.Generator.TYPE_MAPPING, 862 exp.DataType.Type.BOOLEAN: "BIT", 863 exp.DataType.Type.DECIMAL: "NUMERIC", 864 exp.DataType.Type.DATETIME: "DATETIME2", 865 exp.DataType.Type.DOUBLE: "FLOAT", 866 exp.DataType.Type.INT: "INTEGER", 867 exp.DataType.Type.ROWVERSION: "ROWVERSION", 868 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 869 exp.DataType.Type.TIMESTAMP: "DATETIME2", 870 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 871 exp.DataType.Type.UTINYINT: "TINYINT", 872 exp.DataType.Type.VARIANT: "SQL_VARIANT", 873 } 874 875 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 876 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 877 878 TRANSFORMS = { 879 **generator.Generator.TRANSFORMS, 880 exp.AnyValue: any_value_to_max_sql, 881 exp.ArrayToString: rename_func("STRING_AGG"), 882 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 883 exp.DateAdd: date_delta_sql("DATEADD"), 884 exp.DateDiff: date_delta_sql("DATEDIFF"), 885 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 886 exp.CurrentDate: rename_func("GETDATE"), 887 exp.CurrentTimestamp: rename_func("GETDATE"), 888 exp.DateStrToDate: datestrtodate_sql, 889 exp.Extract: rename_func("DATEPART"), 890 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 891 exp.GroupConcat: _string_agg_sql, 892 exp.If: rename_func("IIF"), 893 exp.JSONExtract: _json_extract_sql, 894 exp.JSONExtractScalar: _json_extract_sql, 895 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 896 exp.Max: max_or_greatest, 897 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 898 exp.Min: min_or_least, 899 exp.NumberToStr: _format_sql, 900 exp.Repeat: rename_func("REPLICATE"), 901 exp.Select: transforms.preprocess( 902 [ 903 transforms.eliminate_distinct_on, 904 transforms.eliminate_semi_and_anti_joins, 905 transforms.eliminate_qualify, 906 transforms.unnest_generate_date_array_using_recursive_cte, 907 ] 908 ), 909 exp.Stddev: rename_func("STDEV"), 910 exp.StrPosition: lambda self, e: self.func( 911 "CHARINDEX", e.args.get("substr"), e.this, e.args.get("position") 912 ), 913 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 914 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 915 exp.SHA2: lambda self, e: self.func( 916 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 917 ), 918 exp.TemporaryProperty: lambda self, e: "", 919 exp.TimeStrToTime: _timestrtotime_sql, 920 exp.TimeToStr: _format_sql, 921 exp.Trim: trim_sql, 922 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 923 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 924 } 925 926 TRANSFORMS.pop(exp.ReturnsProperty) 927 928 PROPERTIES_LOCATION = { 929 **generator.Generator.PROPERTIES_LOCATION, 930 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 931 } 932 933 def scope_resolution(self, rhs: str, scope_name: str) -> str: 934 return f"{scope_name}::{rhs}" 935 936 def select_sql(self, expression: exp.Select) -> str: 937 if expression.args.get("offset"): 938 if not expression.args.get("order"): 939 # ORDER BY is required in order to use OFFSET in a query, so we use 940 # a noop order by, since we don't really care about the order. 941 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 942 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 943 944 limit = expression.args.get("limit") 945 if isinstance(limit, exp.Limit): 946 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 947 # we replace here because otherwise TOP would be generated in select_sql 948 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 949 950 return super().select_sql(expression) 951 952 def convert_sql(self, expression: exp.Convert) -> str: 953 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 954 return self.func( 955 name, expression.this, expression.expression, expression.args.get("style") 956 ) 957 958 def queryoption_sql(self, expression: exp.QueryOption) -> str: 959 option = self.sql(expression, "this") 960 value = self.sql(expression, "expression") 961 if value: 962 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 963 return f"{option} {optional_equal_sign}{value}" 964 return option 965 966 def lateral_op(self, expression: exp.Lateral) -> str: 967 cross_apply = expression.args.get("cross_apply") 968 if cross_apply is True: 969 return "CROSS APPLY" 970 if cross_apply is False: 971 return "OUTER APPLY" 972 973 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 974 self.unsupported("LATERAL clause is not supported.") 975 return "LATERAL" 976 977 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 978 this = expression.this 979 split_count = len(this.name.split(".")) 980 delimiter = expression.args.get("delimiter") 981 part_index = expression.args.get("part_index") 982 983 if ( 984 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 985 or (delimiter and delimiter.name != ".") 986 or not part_index 987 or split_count > 4 988 ): 989 self.unsupported( 990 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 991 ) 992 return "" 993 994 return self.func( 995 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 996 ) 997 998 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 999 nano = expression.args.get("nano") 1000 if nano is not None: 1001 nano.pop() 1002 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1003 1004 if expression.args.get("fractions") is None: 1005 expression.set("fractions", exp.Literal.number(0)) 1006 if expression.args.get("precision") is None: 1007 expression.set("precision", exp.Literal.number(0)) 1008 1009 return rename_func("TIMEFROMPARTS")(self, expression) 1010 1011 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1012 zone = expression.args.get("zone") 1013 if zone is not None: 1014 zone.pop() 1015 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1016 1017 nano = expression.args.get("nano") 1018 if nano is not None: 1019 nano.pop() 1020 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1021 1022 if expression.args.get("milli") is None: 1023 expression.set("milli", exp.Literal.number(0)) 1024 1025 return rename_func("DATETIMEFROMPARTS")(self, expression) 1026 1027 def setitem_sql(self, expression: exp.SetItem) -> str: 1028 this = expression.this 1029 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1030 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1031 return f"{self.sql(this.left)} {self.sql(this.right)}" 1032 1033 return super().setitem_sql(expression) 1034 1035 def boolean_sql(self, expression: exp.Boolean) -> str: 1036 if type(expression.parent) in BIT_TYPES or isinstance( 1037 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1038 ): 1039 return "1" if expression.this else "0" 1040 1041 return "(1 = 1)" if expression.this else "(1 = 0)" 1042 1043 def is_sql(self, expression: exp.Is) -> str: 1044 if isinstance(expression.expression, exp.Boolean): 1045 return self.binary(expression, "=") 1046 return self.binary(expression, "IS") 1047 1048 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1049 sql = self.sql(expression, "this") 1050 properties = expression.args.get("properties") 1051 1052 if sql[:1] != "#" and any( 1053 isinstance(prop, exp.TemporaryProperty) 1054 for prop in (properties.expressions if properties else []) 1055 ): 1056 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1057 1058 return sql 1059 1060 def create_sql(self, expression: exp.Create) -> str: 1061 kind = expression.kind 1062 exists = expression.args.pop("exists", None) 1063 1064 like_property = expression.find(exp.LikeProperty) 1065 if like_property: 1066 ctas_expression = like_property.this 1067 else: 1068 ctas_expression = expression.expression 1069 1070 if kind == "VIEW": 1071 expression.this.set("catalog", None) 1072 with_ = expression.args.get("with") 1073 if ctas_expression and with_: 1074 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1075 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1076 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1077 ctas_expression.set("with", with_.pop()) 1078 1079 sql = super().create_sql(expression) 1080 1081 table = expression.find(exp.Table) 1082 1083 # Convert CTAS statement to SELECT .. INTO .. 1084 if kind == "TABLE" and ctas_expression: 1085 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1086 ctas_expression = ctas_expression.subquery() 1087 1088 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1089 select_into.set("into", exp.Into(this=table)) 1090 1091 if like_property: 1092 select_into.limit(0, copy=False) 1093 1094 sql = self.sql(select_into) 1095 1096 if exists: 1097 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1098 sql = self.sql(exp.Literal.string(sql)) 1099 if kind == "SCHEMA": 1100 sql = f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql})""" 1101 elif kind == "TABLE": 1102 assert table 1103 where = exp.and_( 1104 exp.column("table_name").eq(table.name), 1105 exp.column("table_schema").eq(table.db) if table.db else None, 1106 exp.column("table_catalog").eq(table.catalog) if table.catalog else None, 1107 ) 1108 sql = f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql})""" 1109 elif kind == "INDEX": 1110 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1111 sql = f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql})""" 1112 elif expression.args.get("replace"): 1113 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1114 1115 return self.prepend_ctes(expression, sql) 1116 1117 def count_sql(self, expression: exp.Count) -> str: 1118 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1119 return rename_func(func_name)(self, expression) 1120 1121 def offset_sql(self, expression: exp.Offset) -> str: 1122 return f"{super().offset_sql(expression)} ROWS" 1123 1124 def version_sql(self, expression: exp.Version) -> str: 1125 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1126 this = f"FOR {name}" 1127 expr = expression.expression 1128 kind = expression.text("kind") 1129 if kind in ("FROM", "BETWEEN"): 1130 args = expr.expressions 1131 sep = "TO" if kind == "FROM" else "AND" 1132 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1133 else: 1134 expr_sql = self.sql(expr) 1135 1136 expr_sql = f" {expr_sql}" if expr_sql else "" 1137 return f"{this} {kind}{expr_sql}" 1138 1139 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1140 table = expression.args.get("table") 1141 table = f"{table} " if table else "" 1142 return f"RETURNS {table}{self.sql(expression, 'this')}" 1143 1144 def returning_sql(self, expression: exp.Returning) -> str: 1145 into = self.sql(expression, "into") 1146 into = self.seg(f"INTO {into}") if into else "" 1147 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1148 1149 def transaction_sql(self, expression: exp.Transaction) -> str: 1150 this = self.sql(expression, "this") 1151 this = f" {this}" if this else "" 1152 mark = self.sql(expression, "mark") 1153 mark = f" WITH MARK {mark}" if mark else "" 1154 return f"BEGIN TRANSACTION{this}{mark}" 1155 1156 def commit_sql(self, expression: exp.Commit) -> str: 1157 this = self.sql(expression, "this") 1158 this = f" {this}" if this else "" 1159 durability = expression.args.get("durability") 1160 durability = ( 1161 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1162 if durability is not None 1163 else "" 1164 ) 1165 return f"COMMIT TRANSACTION{this}{durability}" 1166 1167 def rollback_sql(self, expression: exp.Rollback) -> str: 1168 this = self.sql(expression, "this") 1169 this = f" {this}" if this else "" 1170 return f"ROLLBACK TRANSACTION{this}" 1171 1172 def identifier_sql(self, expression: exp.Identifier) -> str: 1173 identifier = super().identifier_sql(expression) 1174 1175 if expression.args.get("global"): 1176 identifier = f"##{identifier}" 1177 elif expression.args.get("temporary"): 1178 identifier = f"#{identifier}" 1179 1180 return identifier 1181 1182 def constraint_sql(self, expression: exp.Constraint) -> str: 1183 this = self.sql(expression, "this") 1184 expressions = self.expressions(expression, flat=True, sep=" ") 1185 return f"CONSTRAINT {this} {expressions}" 1186 1187 def length_sql(self, expression: exp.Length) -> str: 1188 return self._uncast_text(expression, "LEN") 1189 1190 def right_sql(self, expression: exp.Right) -> str: 1191 return self._uncast_text(expression, "RIGHT") 1192 1193 def left_sql(self, expression: exp.Left) -> str: 1194 return self._uncast_text(expression, "LEFT") 1195 1196 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1197 this = expression.this 1198 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1199 this_sql = self.sql(this, "this") 1200 else: 1201 this_sql = self.sql(this) 1202 expression_sql = self.sql(expression, "expression") 1203 return self.func(name, this_sql, expression_sql if expression_sql else None) 1204 1205 def partition_sql(self, expression: exp.Partition) -> str: 1206 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1207 1208 def alter_sql(self, expression: exp.Alter) -> str: 1209 action = seq_get(expression.args.get("actions") or [], 0) 1210 if isinstance(action, exp.AlterRename): 1211 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1212 return super().alter_sql(expression) 1213 1214 def drop_sql(self, expression: exp.Drop) -> str: 1215 if expression.args["kind"] == "VIEW": 1216 expression.this.set("catalog", None) 1217 return super().drop_sql(expression) 1218 1219 def declare_sql(self, expression: exp.Declare) -> str: 1220 return f"DECLARE {self.expressions(expression, flat=True)}" 1221 1222 def declareitem_sql(self, expression: exp.DeclareItem) -> str: 1223 variable = self.sql(expression, "this") 1224 default = self.sql(expression, "default") 1225 default = f" = {default}" if default else "" 1226 1227 kind = self.sql(expression, "kind") 1228 if isinstance(expression.args.get("kind"), exp.Schema): 1229 kind = f"TABLE {kind}" 1230 1231 return f"{variable} AS {kind}{default}" 1232 1233 def options_modifier(self, expression: exp.Expression) -> str: 1234 options = self.expressions(expression, key="options") 1235 return f" OPTION{self.wrap(options)}" if options else "" 1236 1237 def dpipe_sql(self, expression: exp.DPipe) -> str: 1238 return self.sql( 1239 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1240 )
252def qualify_derived_table_outputs(expression: exp.Expression) -> exp.Expression: 253 """Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.""" 254 alias = expression.args.get("alias") 255 256 if ( 257 isinstance(expression, (exp.CTE, exp.Subquery)) 258 and isinstance(alias, exp.TableAlias) 259 and not alias.columns 260 ): 261 from sqlglot.optimizer.qualify_columns import qualify_outputs 262 263 # We keep track of the unaliased column projection indexes instead of the expressions 264 # themselves, because the latter are going to be replaced by new nodes when the aliases 265 # are added and hence we won't be able to reach these newly added Alias parents 266 query = expression.this 267 unaliased_column_indexes = ( 268 i for i, c in enumerate(query.selects) if isinstance(c, exp.Column) and not c.alias 269 ) 270 271 qualify_outputs(query) 272 273 # Preserve the quoting information of columns for newly added Alias nodes 274 query_selects = query.selects 275 for select_index in unaliased_column_indexes: 276 alias = query_selects[select_index] 277 column = alias.this 278 if isinstance(column.this, exp.Identifier): 279 alias.args["alias"].set("quoted", column.this.quoted) 280 281 return expression
Ensures all (unnamed) output columns are aliased for CTEs and Subqueries.
374class TSQL(Dialect): 375 SUPPORTS_SEMI_ANTI_JOIN = False 376 LOG_BASE_FIRST = False 377 TYPED_DIVISION = True 378 CONCAT_COALESCE = True 379 NORMALIZATION_STRATEGY = NormalizationStrategy.CASE_INSENSITIVE 380 381 TIME_FORMAT = "'yyyy-mm-dd hh:mm:ss'" 382 383 TIME_MAPPING = { 384 "year": "%Y", 385 "dayofyear": "%j", 386 "day": "%d", 387 "dy": "%d", 388 "y": "%Y", 389 "week": "%W", 390 "ww": "%W", 391 "wk": "%W", 392 "hour": "%h", 393 "hh": "%I", 394 "minute": "%M", 395 "mi": "%M", 396 "n": "%M", 397 "second": "%S", 398 "ss": "%S", 399 "s": "%-S", 400 "millisecond": "%f", 401 "ms": "%f", 402 "weekday": "%W", 403 "dw": "%W", 404 "month": "%m", 405 "mm": "%M", 406 "m": "%-M", 407 "Y": "%Y", 408 "YYYY": "%Y", 409 "YY": "%y", 410 "MMMM": "%B", 411 "MMM": "%b", 412 "MM": "%m", 413 "M": "%-m", 414 "dddd": "%A", 415 "dd": "%d", 416 "d": "%-d", 417 "HH": "%H", 418 "H": "%-H", 419 "h": "%-I", 420 "ffffff": "%f", 421 "yyyy": "%Y", 422 "yy": "%y", 423 } 424 425 CONVERT_FORMAT_MAPPING = { 426 "0": "%b %d %Y %-I:%M%p", 427 "1": "%m/%d/%y", 428 "2": "%y.%m.%d", 429 "3": "%d/%m/%y", 430 "4": "%d.%m.%y", 431 "5": "%d-%m-%y", 432 "6": "%d %b %y", 433 "7": "%b %d, %y", 434 "8": "%H:%M:%S", 435 "9": "%b %d %Y %-I:%M:%S:%f%p", 436 "10": "mm-dd-yy", 437 "11": "yy/mm/dd", 438 "12": "yymmdd", 439 "13": "%d %b %Y %H:%M:ss:%f", 440 "14": "%H:%M:%S:%f", 441 "20": "%Y-%m-%d %H:%M:%S", 442 "21": "%Y-%m-%d %H:%M:%S.%f", 443 "22": "%m/%d/%y %-I:%M:%S %p", 444 "23": "%Y-%m-%d", 445 "24": "%H:%M:%S", 446 "25": "%Y-%m-%d %H:%M:%S.%f", 447 "100": "%b %d %Y %-I:%M%p", 448 "101": "%m/%d/%Y", 449 "102": "%Y.%m.%d", 450 "103": "%d/%m/%Y", 451 "104": "%d.%m.%Y", 452 "105": "%d-%m-%Y", 453 "106": "%d %b %Y", 454 "107": "%b %d, %Y", 455 "108": "%H:%M:%S", 456 "109": "%b %d %Y %-I:%M:%S:%f%p", 457 "110": "%m-%d-%Y", 458 "111": "%Y/%m/%d", 459 "112": "%Y%m%d", 460 "113": "%d %b %Y %H:%M:%S:%f", 461 "114": "%H:%M:%S:%f", 462 "120": "%Y-%m-%d %H:%M:%S", 463 "121": "%Y-%m-%d %H:%M:%S.%f", 464 } 465 466 FORMAT_TIME_MAPPING = { 467 "y": "%B %Y", 468 "d": "%m/%d/%Y", 469 "H": "%-H", 470 "h": "%-I", 471 "s": "%Y-%m-%d %H:%M:%S", 472 "D": "%A,%B,%Y", 473 "f": "%A,%B,%Y %-I:%M %p", 474 "F": "%A,%B,%Y %-I:%M:%S %p", 475 "g": "%m/%d/%Y %-I:%M %p", 476 "G": "%m/%d/%Y %-I:%M:%S %p", 477 "M": "%B %-d", 478 "m": "%B %-d", 479 "O": "%Y-%m-%dT%H:%M:%S", 480 "u": "%Y-%M-%D %H:%M:%S%z", 481 "U": "%A, %B %D, %Y %H:%M:%S%z", 482 "T": "%-I:%M:%S %p", 483 "t": "%-I:%M", 484 "Y": "%a %Y", 485 } 486 487 class Tokenizer(tokens.Tokenizer): 488 IDENTIFIERS = [("[", "]"), '"'] 489 QUOTES = ["'", '"'] 490 HEX_STRINGS = [("0x", ""), ("0X", "")] 491 VAR_SINGLE_TOKENS = {"@", "$", "#"} 492 493 KEYWORDS = { 494 **tokens.Tokenizer.KEYWORDS, 495 "CLUSTERED INDEX": TokenType.INDEX, 496 "DATETIME2": TokenType.DATETIME, 497 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 498 "DECLARE": TokenType.DECLARE, 499 "EXEC": TokenType.COMMAND, 500 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 501 "IMAGE": TokenType.IMAGE, 502 "MONEY": TokenType.MONEY, 503 "NONCLUSTERED INDEX": TokenType.INDEX, 504 "NTEXT": TokenType.TEXT, 505 "OPTION": TokenType.OPTION, 506 "OUTPUT": TokenType.RETURNING, 507 "PRINT": TokenType.COMMAND, 508 "PROC": TokenType.PROCEDURE, 509 "REAL": TokenType.FLOAT, 510 "ROWVERSION": TokenType.ROWVERSION, 511 "SMALLDATETIME": TokenType.DATETIME, 512 "SMALLMONEY": TokenType.SMALLMONEY, 513 "SQL_VARIANT": TokenType.VARIANT, 514 "SYSTEM_USER": TokenType.CURRENT_USER, 515 "TOP": TokenType.TOP, 516 "TIMESTAMP": TokenType.ROWVERSION, 517 "TINYINT": TokenType.UTINYINT, 518 "UNIQUEIDENTIFIER": TokenType.UNIQUEIDENTIFIER, 519 "UPDATE STATISTICS": TokenType.COMMAND, 520 "XML": TokenType.XML, 521 } 522 KEYWORDS.pop("/*+") 523 524 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END} 525 526 class Parser(parser.Parser): 527 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 528 LOG_DEFAULTS_TO_LN = True 529 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 530 STRING_ALIASES = True 531 NO_PAREN_IF_COMMANDS = False 532 533 QUERY_MODIFIER_PARSERS = { 534 **parser.Parser.QUERY_MODIFIER_PARSERS, 535 TokenType.OPTION: lambda self: ("options", self._parse_options()), 536 } 537 538 FUNCTIONS = { 539 **parser.Parser.FUNCTIONS, 540 "CHARINDEX": lambda args: exp.StrPosition( 541 this=seq_get(args, 1), 542 substr=seq_get(args, 0), 543 position=seq_get(args, 2), 544 ), 545 "COUNT": lambda args: exp.Count( 546 this=seq_get(args, 0), expressions=args[1:], big_int=False 547 ), 548 "COUNT_BIG": lambda args: exp.Count( 549 this=seq_get(args, 0), expressions=args[1:], big_int=True 550 ), 551 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 552 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 553 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 554 "DATEPART": _build_formatted_time(exp.TimeToStr), 555 "DATETIMEFROMPARTS": _build_datetimefromparts, 556 "EOMONTH": _build_eomonth, 557 "FORMAT": _build_format, 558 "GETDATE": exp.CurrentTimestamp.from_arg_list, 559 "HASHBYTES": _build_hashbytes, 560 "ISNULL": build_coalesce, 561 "JSON_QUERY": _build_json_query, 562 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 563 "LEN": _build_with_arg_as_text(exp.Length), 564 "LEFT": _build_with_arg_as_text(exp.Left), 565 "RIGHT": _build_with_arg_as_text(exp.Right), 566 "PARSENAME": _build_parsename, 567 "REPLICATE": exp.Repeat.from_arg_list, 568 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 569 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 570 "SUSER_NAME": exp.CurrentUser.from_arg_list, 571 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 572 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 573 "TIMEFROMPARTS": _build_timefromparts, 574 } 575 576 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 577 578 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 579 TokenType.TABLE, 580 *parser.Parser.TYPE_TOKENS, 581 } 582 583 STATEMENT_PARSERS = { 584 **parser.Parser.STATEMENT_PARSERS, 585 TokenType.DECLARE: lambda self: self._parse_declare(), 586 } 587 588 RANGE_PARSERS = { 589 **parser.Parser.RANGE_PARSERS, 590 TokenType.DCOLON: lambda self, this: self.expression( 591 exp.ScopeResolution, 592 this=this, 593 expression=self._parse_function() or self._parse_var(any_token=True), 594 ), 595 } 596 597 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 598 COLUMN_OPERATORS = { 599 **parser.Parser.COLUMN_OPERATORS, 600 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 601 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 602 else self.expression(exp.ScopeResolution, this=this, expression=to), 603 } 604 605 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 606 # We want to use _parse_types() if the first token after :: is a known type, 607 # otherwise we could parse something like x::varchar(max) into a function 608 if self._match_set(self.TYPE_TOKENS, advance=False): 609 return self._parse_types() 610 611 return self._parse_function() or self._parse_types() 612 613 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 614 if not self._match(TokenType.OPTION): 615 return None 616 617 def _parse_option() -> t.Optional[exp.Expression]: 618 option = self._parse_var_from_options(OPTIONS) 619 if not option: 620 return None 621 622 self._match(TokenType.EQ) 623 return self.expression( 624 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 625 ) 626 627 return self._parse_wrapped_csv(_parse_option) 628 629 def _parse_projections(self) -> t.List[exp.Expression]: 630 """ 631 T-SQL supports the syntax alias = expression in the SELECT's projection list, 632 so we transform all parsed Selects to convert their EQ projections into Aliases. 633 634 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 635 """ 636 return [ 637 ( 638 exp.alias_(projection.expression, projection.this.this, copy=False) 639 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 640 else projection 641 ) 642 for projection in super()._parse_projections() 643 ] 644 645 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 646 """Applies to SQL Server and Azure SQL Database 647 COMMIT [ { TRAN | TRANSACTION } 648 [ transaction_name | @tran_name_variable ] ] 649 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 650 651 ROLLBACK { TRAN | TRANSACTION } 652 [ transaction_name | @tran_name_variable 653 | savepoint_name | @savepoint_variable ] 654 """ 655 rollback = self._prev.token_type == TokenType.ROLLBACK 656 657 self._match_texts(("TRAN", "TRANSACTION")) 658 this = self._parse_id_var() 659 660 if rollback: 661 return self.expression(exp.Rollback, this=this) 662 663 durability = None 664 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 665 self._match_text_seq("DELAYED_DURABILITY") 666 self._match(TokenType.EQ) 667 668 if self._match_text_seq("OFF"): 669 durability = False 670 else: 671 self._match(TokenType.ON) 672 durability = True 673 674 self._match_r_paren() 675 676 return self.expression(exp.Commit, this=this, durability=durability) 677 678 def _parse_transaction(self) -> exp.Transaction | exp.Command: 679 """Applies to SQL Server and Azure SQL Database 680 BEGIN { TRAN | TRANSACTION } 681 [ { transaction_name | @tran_name_variable } 682 [ WITH MARK [ 'description' ] ] 683 ] 684 """ 685 if self._match_texts(("TRAN", "TRANSACTION")): 686 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 687 if self._match_text_seq("WITH", "MARK"): 688 transaction.set("mark", self._parse_string()) 689 690 return transaction 691 692 return self._parse_as_command(self._prev) 693 694 def _parse_returns(self) -> exp.ReturnsProperty: 695 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 696 returns = super()._parse_returns() 697 returns.set("table", table) 698 return returns 699 700 def _parse_convert( 701 self, strict: bool, safe: t.Optional[bool] = None 702 ) -> t.Optional[exp.Expression]: 703 this = self._parse_types() 704 self._match(TokenType.COMMA) 705 args = [this, *self._parse_csv(self._parse_assignment)] 706 convert = exp.Convert.from_arg_list(args) 707 convert.set("safe", safe) 708 convert.set("strict", strict) 709 return convert 710 711 def _parse_user_defined_function( 712 self, kind: t.Optional[TokenType] = None 713 ) -> t.Optional[exp.Expression]: 714 this = super()._parse_user_defined_function(kind=kind) 715 716 if ( 717 kind == TokenType.FUNCTION 718 or isinstance(this, exp.UserDefinedFunction) 719 or self._match(TokenType.ALIAS, advance=False) 720 ): 721 return this 722 723 expressions = self._parse_csv(self._parse_function_parameter) 724 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 725 726 def _parse_id_var( 727 self, 728 any_token: bool = True, 729 tokens: t.Optional[t.Collection[TokenType]] = None, 730 ) -> t.Optional[exp.Expression]: 731 is_temporary = self._match(TokenType.HASH) 732 is_global = is_temporary and self._match(TokenType.HASH) 733 734 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 735 if this: 736 if is_global: 737 this.set("global", True) 738 elif is_temporary: 739 this.set("temporary", True) 740 741 return this 742 743 def _parse_create(self) -> exp.Create | exp.Command: 744 create = super()._parse_create() 745 746 if isinstance(create, exp.Create): 747 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 748 if isinstance(table, exp.Table) and table.this.args.get("temporary"): 749 if not create.args.get("properties"): 750 create.set("properties", exp.Properties(expressions=[])) 751 752 create.args["properties"].append("expressions", exp.TemporaryProperty()) 753 754 return create 755 756 def _parse_if(self) -> t.Optional[exp.Expression]: 757 index = self._index 758 759 if self._match_text_seq("OBJECT_ID"): 760 self._parse_wrapped_csv(self._parse_string) 761 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 762 return self._parse_drop(exists=True) 763 self._retreat(index) 764 765 return super()._parse_if() 766 767 def _parse_unique(self) -> exp.UniqueColumnConstraint: 768 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 769 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 770 else: 771 this = self._parse_schema(self._parse_id_var(any_token=False)) 772 773 return self.expression(exp.UniqueColumnConstraint, this=this) 774 775 def _parse_partition(self) -> t.Optional[exp.Partition]: 776 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 777 return None 778 779 def parse_range(): 780 low = self._parse_bitwise() 781 high = self._parse_bitwise() if self._match_text_seq("TO") else None 782 783 return ( 784 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 785 ) 786 787 partition = self.expression( 788 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 789 ) 790 791 self._match_r_paren() 792 793 return partition 794 795 def _parse_declare(self) -> exp.Declare | exp.Command: 796 index = self._index 797 expressions = self._try_parse(partial(self._parse_csv, self._parse_declareitem)) 798 799 if not expressions or self._curr: 800 self._retreat(index) 801 return self._parse_as_command(self._prev) 802 803 return self.expression(exp.Declare, expressions=expressions) 804 805 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 806 var = self._parse_id_var() 807 if not var: 808 return None 809 810 value = None 811 self._match(TokenType.ALIAS) 812 if self._match(TokenType.TABLE): 813 data_type = self._parse_schema() 814 else: 815 data_type = self._parse_types() 816 if self._match(TokenType.EQ): 817 value = self._parse_bitwise() 818 819 return self.expression(exp.DeclareItem, this=var, kind=data_type, default=value) 820 821 class Generator(generator.Generator): 822 LIMIT_IS_TOP = True 823 QUERY_HINTS = False 824 RETURNING_END = False 825 NVL2_SUPPORTED = False 826 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 827 LIMIT_FETCH = "FETCH" 828 COMPUTED_COLUMN_WITH_TYPE = False 829 CTE_RECURSIVE_KEYWORD_REQUIRED = False 830 ENSURE_BOOLS = True 831 NULL_ORDERING_SUPPORTED = None 832 SUPPORTS_SINGLE_ARG_CONCAT = False 833 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 834 SUPPORTS_SELECT_INTO = True 835 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 836 SUPPORTS_TO_NUMBER = False 837 SET_OP_MODIFIERS = False 838 COPY_PARAMS_EQ_REQUIRED = True 839 PARSE_JSON_NAME = None 840 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 841 842 EXPRESSIONS_WITHOUT_NESTED_CTES = { 843 exp.Create, 844 exp.Delete, 845 exp.Insert, 846 exp.Intersect, 847 exp.Except, 848 exp.Merge, 849 exp.Select, 850 exp.Subquery, 851 exp.Union, 852 exp.Update, 853 } 854 855 SUPPORTED_JSON_PATH_PARTS = { 856 exp.JSONPathKey, 857 exp.JSONPathRoot, 858 exp.JSONPathSubscript, 859 } 860 861 TYPE_MAPPING = { 862 **generator.Generator.TYPE_MAPPING, 863 exp.DataType.Type.BOOLEAN: "BIT", 864 exp.DataType.Type.DECIMAL: "NUMERIC", 865 exp.DataType.Type.DATETIME: "DATETIME2", 866 exp.DataType.Type.DOUBLE: "FLOAT", 867 exp.DataType.Type.INT: "INTEGER", 868 exp.DataType.Type.ROWVERSION: "ROWVERSION", 869 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 870 exp.DataType.Type.TIMESTAMP: "DATETIME2", 871 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 872 exp.DataType.Type.UTINYINT: "TINYINT", 873 exp.DataType.Type.VARIANT: "SQL_VARIANT", 874 } 875 876 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 877 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 878 879 TRANSFORMS = { 880 **generator.Generator.TRANSFORMS, 881 exp.AnyValue: any_value_to_max_sql, 882 exp.ArrayToString: rename_func("STRING_AGG"), 883 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 884 exp.DateAdd: date_delta_sql("DATEADD"), 885 exp.DateDiff: date_delta_sql("DATEDIFF"), 886 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 887 exp.CurrentDate: rename_func("GETDATE"), 888 exp.CurrentTimestamp: rename_func("GETDATE"), 889 exp.DateStrToDate: datestrtodate_sql, 890 exp.Extract: rename_func("DATEPART"), 891 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 892 exp.GroupConcat: _string_agg_sql, 893 exp.If: rename_func("IIF"), 894 exp.JSONExtract: _json_extract_sql, 895 exp.JSONExtractScalar: _json_extract_sql, 896 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 897 exp.Max: max_or_greatest, 898 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 899 exp.Min: min_or_least, 900 exp.NumberToStr: _format_sql, 901 exp.Repeat: rename_func("REPLICATE"), 902 exp.Select: transforms.preprocess( 903 [ 904 transforms.eliminate_distinct_on, 905 transforms.eliminate_semi_and_anti_joins, 906 transforms.eliminate_qualify, 907 transforms.unnest_generate_date_array_using_recursive_cte, 908 ] 909 ), 910 exp.Stddev: rename_func("STDEV"), 911 exp.StrPosition: lambda self, e: self.func( 912 "CHARINDEX", e.args.get("substr"), e.this, e.args.get("position") 913 ), 914 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 915 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 916 exp.SHA2: lambda self, e: self.func( 917 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 918 ), 919 exp.TemporaryProperty: lambda self, e: "", 920 exp.TimeStrToTime: _timestrtotime_sql, 921 exp.TimeToStr: _format_sql, 922 exp.Trim: trim_sql, 923 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 924 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 925 } 926 927 TRANSFORMS.pop(exp.ReturnsProperty) 928 929 PROPERTIES_LOCATION = { 930 **generator.Generator.PROPERTIES_LOCATION, 931 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 932 } 933 934 def scope_resolution(self, rhs: str, scope_name: str) -> str: 935 return f"{scope_name}::{rhs}" 936 937 def select_sql(self, expression: exp.Select) -> str: 938 if expression.args.get("offset"): 939 if not expression.args.get("order"): 940 # ORDER BY is required in order to use OFFSET in a query, so we use 941 # a noop order by, since we don't really care about the order. 942 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 943 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 944 945 limit = expression.args.get("limit") 946 if isinstance(limit, exp.Limit): 947 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 948 # we replace here because otherwise TOP would be generated in select_sql 949 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 950 951 return super().select_sql(expression) 952 953 def convert_sql(self, expression: exp.Convert) -> str: 954 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 955 return self.func( 956 name, expression.this, expression.expression, expression.args.get("style") 957 ) 958 959 def queryoption_sql(self, expression: exp.QueryOption) -> str: 960 option = self.sql(expression, "this") 961 value = self.sql(expression, "expression") 962 if value: 963 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 964 return f"{option} {optional_equal_sign}{value}" 965 return option 966 967 def lateral_op(self, expression: exp.Lateral) -> str: 968 cross_apply = expression.args.get("cross_apply") 969 if cross_apply is True: 970 return "CROSS APPLY" 971 if cross_apply is False: 972 return "OUTER APPLY" 973 974 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 975 self.unsupported("LATERAL clause is not supported.") 976 return "LATERAL" 977 978 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 979 this = expression.this 980 split_count = len(this.name.split(".")) 981 delimiter = expression.args.get("delimiter") 982 part_index = expression.args.get("part_index") 983 984 if ( 985 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 986 or (delimiter and delimiter.name != ".") 987 or not part_index 988 or split_count > 4 989 ): 990 self.unsupported( 991 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 992 ) 993 return "" 994 995 return self.func( 996 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 997 ) 998 999 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1000 nano = expression.args.get("nano") 1001 if nano is not None: 1002 nano.pop() 1003 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1004 1005 if expression.args.get("fractions") is None: 1006 expression.set("fractions", exp.Literal.number(0)) 1007 if expression.args.get("precision") is None: 1008 expression.set("precision", exp.Literal.number(0)) 1009 1010 return rename_func("TIMEFROMPARTS")(self, expression) 1011 1012 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1013 zone = expression.args.get("zone") 1014 if zone is not None: 1015 zone.pop() 1016 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1017 1018 nano = expression.args.get("nano") 1019 if nano is not None: 1020 nano.pop() 1021 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1022 1023 if expression.args.get("milli") is None: 1024 expression.set("milli", exp.Literal.number(0)) 1025 1026 return rename_func("DATETIMEFROMPARTS")(self, expression) 1027 1028 def setitem_sql(self, expression: exp.SetItem) -> str: 1029 this = expression.this 1030 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1031 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1032 return f"{self.sql(this.left)} {self.sql(this.right)}" 1033 1034 return super().setitem_sql(expression) 1035 1036 def boolean_sql(self, expression: exp.Boolean) -> str: 1037 if type(expression.parent) in BIT_TYPES or isinstance( 1038 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1039 ): 1040 return "1" if expression.this else "0" 1041 1042 return "(1 = 1)" if expression.this else "(1 = 0)" 1043 1044 def is_sql(self, expression: exp.Is) -> str: 1045 if isinstance(expression.expression, exp.Boolean): 1046 return self.binary(expression, "=") 1047 return self.binary(expression, "IS") 1048 1049 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1050 sql = self.sql(expression, "this") 1051 properties = expression.args.get("properties") 1052 1053 if sql[:1] != "#" and any( 1054 isinstance(prop, exp.TemporaryProperty) 1055 for prop in (properties.expressions if properties else []) 1056 ): 1057 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1058 1059 return sql 1060 1061 def create_sql(self, expression: exp.Create) -> str: 1062 kind = expression.kind 1063 exists = expression.args.pop("exists", None) 1064 1065 like_property = expression.find(exp.LikeProperty) 1066 if like_property: 1067 ctas_expression = like_property.this 1068 else: 1069 ctas_expression = expression.expression 1070 1071 if kind == "VIEW": 1072 expression.this.set("catalog", None) 1073 with_ = expression.args.get("with") 1074 if ctas_expression and with_: 1075 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1076 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1077 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1078 ctas_expression.set("with", with_.pop()) 1079 1080 sql = super().create_sql(expression) 1081 1082 table = expression.find(exp.Table) 1083 1084 # Convert CTAS statement to SELECT .. INTO .. 1085 if kind == "TABLE" and ctas_expression: 1086 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1087 ctas_expression = ctas_expression.subquery() 1088 1089 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1090 select_into.set("into", exp.Into(this=table)) 1091 1092 if like_property: 1093 select_into.limit(0, copy=False) 1094 1095 sql = self.sql(select_into) 1096 1097 if exists: 1098 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1099 sql = self.sql(exp.Literal.string(sql)) 1100 if kind == "SCHEMA": 1101 sql = f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql})""" 1102 elif kind == "TABLE": 1103 assert table 1104 where = exp.and_( 1105 exp.column("table_name").eq(table.name), 1106 exp.column("table_schema").eq(table.db) if table.db else None, 1107 exp.column("table_catalog").eq(table.catalog) if table.catalog else None, 1108 ) 1109 sql = f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql})""" 1110 elif kind == "INDEX": 1111 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1112 sql = f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql})""" 1113 elif expression.args.get("replace"): 1114 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1115 1116 return self.prepend_ctes(expression, sql) 1117 1118 def count_sql(self, expression: exp.Count) -> str: 1119 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1120 return rename_func(func_name)(self, expression) 1121 1122 def offset_sql(self, expression: exp.Offset) -> str: 1123 return f"{super().offset_sql(expression)} ROWS" 1124 1125 def version_sql(self, expression: exp.Version) -> str: 1126 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1127 this = f"FOR {name}" 1128 expr = expression.expression 1129 kind = expression.text("kind") 1130 if kind in ("FROM", "BETWEEN"): 1131 args = expr.expressions 1132 sep = "TO" if kind == "FROM" else "AND" 1133 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1134 else: 1135 expr_sql = self.sql(expr) 1136 1137 expr_sql = f" {expr_sql}" if expr_sql else "" 1138 return f"{this} {kind}{expr_sql}" 1139 1140 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1141 table = expression.args.get("table") 1142 table = f"{table} " if table else "" 1143 return f"RETURNS {table}{self.sql(expression, 'this')}" 1144 1145 def returning_sql(self, expression: exp.Returning) -> str: 1146 into = self.sql(expression, "into") 1147 into = self.seg(f"INTO {into}") if into else "" 1148 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1149 1150 def transaction_sql(self, expression: exp.Transaction) -> str: 1151 this = self.sql(expression, "this") 1152 this = f" {this}" if this else "" 1153 mark = self.sql(expression, "mark") 1154 mark = f" WITH MARK {mark}" if mark else "" 1155 return f"BEGIN TRANSACTION{this}{mark}" 1156 1157 def commit_sql(self, expression: exp.Commit) -> str: 1158 this = self.sql(expression, "this") 1159 this = f" {this}" if this else "" 1160 durability = expression.args.get("durability") 1161 durability = ( 1162 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1163 if durability is not None 1164 else "" 1165 ) 1166 return f"COMMIT TRANSACTION{this}{durability}" 1167 1168 def rollback_sql(self, expression: exp.Rollback) -> str: 1169 this = self.sql(expression, "this") 1170 this = f" {this}" if this else "" 1171 return f"ROLLBACK TRANSACTION{this}" 1172 1173 def identifier_sql(self, expression: exp.Identifier) -> str: 1174 identifier = super().identifier_sql(expression) 1175 1176 if expression.args.get("global"): 1177 identifier = f"##{identifier}" 1178 elif expression.args.get("temporary"): 1179 identifier = f"#{identifier}" 1180 1181 return identifier 1182 1183 def constraint_sql(self, expression: exp.Constraint) -> str: 1184 this = self.sql(expression, "this") 1185 expressions = self.expressions(expression, flat=True, sep=" ") 1186 return f"CONSTRAINT {this} {expressions}" 1187 1188 def length_sql(self, expression: exp.Length) -> str: 1189 return self._uncast_text(expression, "LEN") 1190 1191 def right_sql(self, expression: exp.Right) -> str: 1192 return self._uncast_text(expression, "RIGHT") 1193 1194 def left_sql(self, expression: exp.Left) -> str: 1195 return self._uncast_text(expression, "LEFT") 1196 1197 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1198 this = expression.this 1199 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1200 this_sql = self.sql(this, "this") 1201 else: 1202 this_sql = self.sql(this) 1203 expression_sql = self.sql(expression, "expression") 1204 return self.func(name, this_sql, expression_sql if expression_sql else None) 1205 1206 def partition_sql(self, expression: exp.Partition) -> str: 1207 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1208 1209 def alter_sql(self, expression: exp.Alter) -> str: 1210 action = seq_get(expression.args.get("actions") or [], 0) 1211 if isinstance(action, exp.AlterRename): 1212 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1213 return super().alter_sql(expression) 1214 1215 def drop_sql(self, expression: exp.Drop) -> str: 1216 if expression.args["kind"] == "VIEW": 1217 expression.this.set("catalog", None) 1218 return super().drop_sql(expression) 1219 1220 def declare_sql(self, expression: exp.Declare) -> str: 1221 return f"DECLARE {self.expressions(expression, flat=True)}" 1222 1223 def declareitem_sql(self, expression: exp.DeclareItem) -> str: 1224 variable = self.sql(expression, "this") 1225 default = self.sql(expression, "default") 1226 default = f" = {default}" if default else "" 1227 1228 kind = self.sql(expression, "kind") 1229 if isinstance(expression.args.get("kind"), exp.Schema): 1230 kind = f"TABLE {kind}" 1231 1232 return f"{variable} AS {kind}{default}" 1233 1234 def options_modifier(self, expression: exp.Expression) -> str: 1235 options = self.expressions(expression, key="options") 1236 return f" OPTION{self.wrap(options)}" if options else "" 1237 1238 def dpipe_sql(self, expression: exp.DPipe) -> str: 1239 return self.sql( 1240 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1241 )
Whether the base comes first in the LOG
function.
Possible values: True
, False
, None
(two arguments are not supported by LOG
)
Whether the behavior of a / b
depends on the types of a
and b
.
False means a / b
is always float division.
True means a / b
is integer division if both a
and b
are integers.
A NULL
arg in CONCAT
yields NULL
by default, but in some dialects it yields an empty string.
Specifies the strategy according to which identifiers should be normalized.
Associates this dialect's time formats with their equivalent Python strftime
formats.
Inherited Members
- sqlglot.dialects.dialect.Dialect
- Dialect
- INDEX_OFFSET
- WEEK_OFFSET
- UNNEST_COLUMN_ONLY
- ALIAS_POST_TABLESAMPLE
- TABLESAMPLE_SIZE_IS_PERCENT
- IDENTIFIERS_CAN_START_WITH_DIGIT
- DPIPE_IS_STRING_CONCAT
- STRICT_STRING_CONCAT
- SUPPORTS_USER_DEFINED_TYPES
- COPY_PARAMS_ARE_CSV
- NORMALIZE_FUNCTIONS
- NULL_ORDERING
- SAFE_DIVISION
- HEX_LOWERCASE
- DATE_FORMAT
- DATEINT_FORMAT
- FORMAT_MAPPING
- UNESCAPED_SEQUENCES
- PSEUDOCOLUMNS
- PREFER_CTE_ALIAS_COLUMN
- FORCE_EARLY_ALIAS_REF_EXPANSION
- EXPAND_ALIAS_REFS_EARLY_ONLY_IN_GROUP_BY
- SUPPORTS_ORDER_BY_ALL
- HAS_DISTINCT_ARRAY_CONSTRUCTORS
- SUPPORTS_FIXED_SIZE_ARRAYS
- STRICT_JSON_PATH_SYNTAX
- ON_CONDITION_EMPTY_BEFORE_ERROR
- ARRAY_AGG_INCLUDES_NULLS
- REGEXP_EXTRACT_DEFAULT_GROUP
- SET_OP_DISTINCT_BY_DEFAULT
- CREATABLE_KIND_MAPPING
- DATE_PART_MAPPING
- TYPE_TO_EXPRESSIONS
- ANNOTATORS
- get_or_raise
- format_time
- settings
- normalize_identifier
- case_sensitive
- can_identify
- quote_identifier
- to_json_path
- parse
- parse_into
- generate
- transpile
- tokenize
- tokenizer
- jsonpath_tokenizer
- parser
- generator
487 class Tokenizer(tokens.Tokenizer): 488 IDENTIFIERS = [("[", "]"), '"'] 489 QUOTES = ["'", '"'] 490 HEX_STRINGS = [("0x", ""), ("0X", "")] 491 VAR_SINGLE_TOKENS = {"@", "$", "#"} 492 493 KEYWORDS = { 494 **tokens.Tokenizer.KEYWORDS, 495 "CLUSTERED INDEX": TokenType.INDEX, 496 "DATETIME2": TokenType.DATETIME, 497 "DATETIMEOFFSET": TokenType.TIMESTAMPTZ, 498 "DECLARE": TokenType.DECLARE, 499 "EXEC": TokenType.COMMAND, 500 "FOR SYSTEM_TIME": TokenType.TIMESTAMP_SNAPSHOT, 501 "IMAGE": TokenType.IMAGE, 502 "MONEY": TokenType.MONEY, 503 "NONCLUSTERED INDEX": TokenType.INDEX, 504 "NTEXT": TokenType.TEXT, 505 "OPTION": TokenType.OPTION, 506 "OUTPUT": TokenType.RETURNING, 507 "PRINT": TokenType.COMMAND, 508 "PROC": TokenType.PROCEDURE, 509 "REAL": TokenType.FLOAT, 510 "ROWVERSION": TokenType.ROWVERSION, 511 "SMALLDATETIME": TokenType.DATETIME, 512 "SMALLMONEY": TokenType.SMALLMONEY, 513 "SQL_VARIANT": TokenType.VARIANT, 514 "SYSTEM_USER": TokenType.CURRENT_USER, 515 "TOP": TokenType.TOP, 516 "TIMESTAMP": TokenType.ROWVERSION, 517 "TINYINT": TokenType.UTINYINT, 518 "UNIQUEIDENTIFIER": TokenType.UNIQUEIDENTIFIER, 519 "UPDATE STATISTICS": TokenType.COMMAND, 520 "XML": TokenType.XML, 521 } 522 KEYWORDS.pop("/*+") 523 524 COMMANDS = {*tokens.Tokenizer.COMMANDS, TokenType.END}
Inherited Members
- sqlglot.tokens.Tokenizer
- Tokenizer
- SINGLE_TOKENS
- BIT_STRINGS
- BYTE_STRINGS
- RAW_STRINGS
- HEREDOC_STRINGS
- UNICODE_STRINGS
- IDENTIFIER_ESCAPES
- STRING_ESCAPES
- HEREDOC_TAG_IS_IDENTIFIER
- HEREDOC_STRING_ALTERNATIVE
- STRING_ESCAPES_ALLOWED_IN_RAW_STRINGS
- NESTED_COMMENTS
- WHITE_SPACE
- COMMAND_PREFIX_TOKENS
- NUMERIC_LITERALS
- COMMENTS
- dialect
- reset
- tokenize
- tokenize_rs
- size
- sql
- tokens
526 class Parser(parser.Parser): 527 SET_REQUIRES_ASSIGNMENT_DELIMITER = False 528 LOG_DEFAULTS_TO_LN = True 529 ALTER_TABLE_ADD_REQUIRED_FOR_EACH_COLUMN = False 530 STRING_ALIASES = True 531 NO_PAREN_IF_COMMANDS = False 532 533 QUERY_MODIFIER_PARSERS = { 534 **parser.Parser.QUERY_MODIFIER_PARSERS, 535 TokenType.OPTION: lambda self: ("options", self._parse_options()), 536 } 537 538 FUNCTIONS = { 539 **parser.Parser.FUNCTIONS, 540 "CHARINDEX": lambda args: exp.StrPosition( 541 this=seq_get(args, 1), 542 substr=seq_get(args, 0), 543 position=seq_get(args, 2), 544 ), 545 "COUNT": lambda args: exp.Count( 546 this=seq_get(args, 0), expressions=args[1:], big_int=False 547 ), 548 "COUNT_BIG": lambda args: exp.Count( 549 this=seq_get(args, 0), expressions=args[1:], big_int=True 550 ), 551 "DATEADD": build_date_delta(exp.DateAdd, unit_mapping=DATE_DELTA_INTERVAL), 552 "DATEDIFF": _build_date_delta(exp.DateDiff, unit_mapping=DATE_DELTA_INTERVAL), 553 "DATENAME": _build_formatted_time(exp.TimeToStr, full_format_mapping=True), 554 "DATEPART": _build_formatted_time(exp.TimeToStr), 555 "DATETIMEFROMPARTS": _build_datetimefromparts, 556 "EOMONTH": _build_eomonth, 557 "FORMAT": _build_format, 558 "GETDATE": exp.CurrentTimestamp.from_arg_list, 559 "HASHBYTES": _build_hashbytes, 560 "ISNULL": build_coalesce, 561 "JSON_QUERY": _build_json_query, 562 "JSON_VALUE": parser.build_extract_json_with_path(exp.JSONExtractScalar), 563 "LEN": _build_with_arg_as_text(exp.Length), 564 "LEFT": _build_with_arg_as_text(exp.Left), 565 "RIGHT": _build_with_arg_as_text(exp.Right), 566 "PARSENAME": _build_parsename, 567 "REPLICATE": exp.Repeat.from_arg_list, 568 "SQUARE": lambda args: exp.Pow(this=seq_get(args, 0), expression=exp.Literal.number(2)), 569 "SYSDATETIME": exp.CurrentTimestamp.from_arg_list, 570 "SUSER_NAME": exp.CurrentUser.from_arg_list, 571 "SUSER_SNAME": exp.CurrentUser.from_arg_list, 572 "SYSTEM_USER": exp.CurrentUser.from_arg_list, 573 "TIMEFROMPARTS": _build_timefromparts, 574 } 575 576 JOIN_HINTS = {"LOOP", "HASH", "MERGE", "REMOTE"} 577 578 RETURNS_TABLE_TOKENS = parser.Parser.ID_VAR_TOKENS - { 579 TokenType.TABLE, 580 *parser.Parser.TYPE_TOKENS, 581 } 582 583 STATEMENT_PARSERS = { 584 **parser.Parser.STATEMENT_PARSERS, 585 TokenType.DECLARE: lambda self: self._parse_declare(), 586 } 587 588 RANGE_PARSERS = { 589 **parser.Parser.RANGE_PARSERS, 590 TokenType.DCOLON: lambda self, this: self.expression( 591 exp.ScopeResolution, 592 this=this, 593 expression=self._parse_function() or self._parse_var(any_token=True), 594 ), 595 } 596 597 # The DCOLON (::) operator serves as a scope resolution (exp.ScopeResolution) operator in T-SQL 598 COLUMN_OPERATORS = { 599 **parser.Parser.COLUMN_OPERATORS, 600 TokenType.DCOLON: lambda self, this, to: self.expression(exp.Cast, this=this, to=to) 601 if isinstance(to, exp.DataType) and to.this != exp.DataType.Type.USERDEFINED 602 else self.expression(exp.ScopeResolution, this=this, expression=to), 603 } 604 605 def _parse_dcolon(self) -> t.Optional[exp.Expression]: 606 # We want to use _parse_types() if the first token after :: is a known type, 607 # otherwise we could parse something like x::varchar(max) into a function 608 if self._match_set(self.TYPE_TOKENS, advance=False): 609 return self._parse_types() 610 611 return self._parse_function() or self._parse_types() 612 613 def _parse_options(self) -> t.Optional[t.List[exp.Expression]]: 614 if not self._match(TokenType.OPTION): 615 return None 616 617 def _parse_option() -> t.Optional[exp.Expression]: 618 option = self._parse_var_from_options(OPTIONS) 619 if not option: 620 return None 621 622 self._match(TokenType.EQ) 623 return self.expression( 624 exp.QueryOption, this=option, expression=self._parse_primary_or_var() 625 ) 626 627 return self._parse_wrapped_csv(_parse_option) 628 629 def _parse_projections(self) -> t.List[exp.Expression]: 630 """ 631 T-SQL supports the syntax alias = expression in the SELECT's projection list, 632 so we transform all parsed Selects to convert their EQ projections into Aliases. 633 634 See: https://learn.microsoft.com/en-us/sql/t-sql/queries/select-clause-transact-sql?view=sql-server-ver16#syntax 635 """ 636 return [ 637 ( 638 exp.alias_(projection.expression, projection.this.this, copy=False) 639 if isinstance(projection, exp.EQ) and isinstance(projection.this, exp.Column) 640 else projection 641 ) 642 for projection in super()._parse_projections() 643 ] 644 645 def _parse_commit_or_rollback(self) -> exp.Commit | exp.Rollback: 646 """Applies to SQL Server and Azure SQL Database 647 COMMIT [ { TRAN | TRANSACTION } 648 [ transaction_name | @tran_name_variable ] ] 649 [ WITH ( DELAYED_DURABILITY = { OFF | ON } ) ] 650 651 ROLLBACK { TRAN | TRANSACTION } 652 [ transaction_name | @tran_name_variable 653 | savepoint_name | @savepoint_variable ] 654 """ 655 rollback = self._prev.token_type == TokenType.ROLLBACK 656 657 self._match_texts(("TRAN", "TRANSACTION")) 658 this = self._parse_id_var() 659 660 if rollback: 661 return self.expression(exp.Rollback, this=this) 662 663 durability = None 664 if self._match_pair(TokenType.WITH, TokenType.L_PAREN): 665 self._match_text_seq("DELAYED_DURABILITY") 666 self._match(TokenType.EQ) 667 668 if self._match_text_seq("OFF"): 669 durability = False 670 else: 671 self._match(TokenType.ON) 672 durability = True 673 674 self._match_r_paren() 675 676 return self.expression(exp.Commit, this=this, durability=durability) 677 678 def _parse_transaction(self) -> exp.Transaction | exp.Command: 679 """Applies to SQL Server and Azure SQL Database 680 BEGIN { TRAN | TRANSACTION } 681 [ { transaction_name | @tran_name_variable } 682 [ WITH MARK [ 'description' ] ] 683 ] 684 """ 685 if self._match_texts(("TRAN", "TRANSACTION")): 686 transaction = self.expression(exp.Transaction, this=self._parse_id_var()) 687 if self._match_text_seq("WITH", "MARK"): 688 transaction.set("mark", self._parse_string()) 689 690 return transaction 691 692 return self._parse_as_command(self._prev) 693 694 def _parse_returns(self) -> exp.ReturnsProperty: 695 table = self._parse_id_var(any_token=False, tokens=self.RETURNS_TABLE_TOKENS) 696 returns = super()._parse_returns() 697 returns.set("table", table) 698 return returns 699 700 def _parse_convert( 701 self, strict: bool, safe: t.Optional[bool] = None 702 ) -> t.Optional[exp.Expression]: 703 this = self._parse_types() 704 self._match(TokenType.COMMA) 705 args = [this, *self._parse_csv(self._parse_assignment)] 706 convert = exp.Convert.from_arg_list(args) 707 convert.set("safe", safe) 708 convert.set("strict", strict) 709 return convert 710 711 def _parse_user_defined_function( 712 self, kind: t.Optional[TokenType] = None 713 ) -> t.Optional[exp.Expression]: 714 this = super()._parse_user_defined_function(kind=kind) 715 716 if ( 717 kind == TokenType.FUNCTION 718 or isinstance(this, exp.UserDefinedFunction) 719 or self._match(TokenType.ALIAS, advance=False) 720 ): 721 return this 722 723 expressions = self._parse_csv(self._parse_function_parameter) 724 return self.expression(exp.UserDefinedFunction, this=this, expressions=expressions) 725 726 def _parse_id_var( 727 self, 728 any_token: bool = True, 729 tokens: t.Optional[t.Collection[TokenType]] = None, 730 ) -> t.Optional[exp.Expression]: 731 is_temporary = self._match(TokenType.HASH) 732 is_global = is_temporary and self._match(TokenType.HASH) 733 734 this = super()._parse_id_var(any_token=any_token, tokens=tokens) 735 if this: 736 if is_global: 737 this.set("global", True) 738 elif is_temporary: 739 this.set("temporary", True) 740 741 return this 742 743 def _parse_create(self) -> exp.Create | exp.Command: 744 create = super()._parse_create() 745 746 if isinstance(create, exp.Create): 747 table = create.this.this if isinstance(create.this, exp.Schema) else create.this 748 if isinstance(table, exp.Table) and table.this.args.get("temporary"): 749 if not create.args.get("properties"): 750 create.set("properties", exp.Properties(expressions=[])) 751 752 create.args["properties"].append("expressions", exp.TemporaryProperty()) 753 754 return create 755 756 def _parse_if(self) -> t.Optional[exp.Expression]: 757 index = self._index 758 759 if self._match_text_seq("OBJECT_ID"): 760 self._parse_wrapped_csv(self._parse_string) 761 if self._match_text_seq("IS", "NOT", "NULL") and self._match(TokenType.DROP): 762 return self._parse_drop(exists=True) 763 self._retreat(index) 764 765 return super()._parse_if() 766 767 def _parse_unique(self) -> exp.UniqueColumnConstraint: 768 if self._match_texts(("CLUSTERED", "NONCLUSTERED")): 769 this = self.CONSTRAINT_PARSERS[self._prev.text.upper()](self) 770 else: 771 this = self._parse_schema(self._parse_id_var(any_token=False)) 772 773 return self.expression(exp.UniqueColumnConstraint, this=this) 774 775 def _parse_partition(self) -> t.Optional[exp.Partition]: 776 if not self._match_text_seq("WITH", "(", "PARTITIONS"): 777 return None 778 779 def parse_range(): 780 low = self._parse_bitwise() 781 high = self._parse_bitwise() if self._match_text_seq("TO") else None 782 783 return ( 784 self.expression(exp.PartitionRange, this=low, expression=high) if high else low 785 ) 786 787 partition = self.expression( 788 exp.Partition, expressions=self._parse_wrapped_csv(parse_range) 789 ) 790 791 self._match_r_paren() 792 793 return partition 794 795 def _parse_declare(self) -> exp.Declare | exp.Command: 796 index = self._index 797 expressions = self._try_parse(partial(self._parse_csv, self._parse_declareitem)) 798 799 if not expressions or self._curr: 800 self._retreat(index) 801 return self._parse_as_command(self._prev) 802 803 return self.expression(exp.Declare, expressions=expressions) 804 805 def _parse_declareitem(self) -> t.Optional[exp.DeclareItem]: 806 var = self._parse_id_var() 807 if not var: 808 return None 809 810 value = None 811 self._match(TokenType.ALIAS) 812 if self._match(TokenType.TABLE): 813 data_type = self._parse_schema() 814 else: 815 data_type = self._parse_types() 816 if self._match(TokenType.EQ): 817 value = self._parse_bitwise() 818 819 return self.expression(exp.DeclareItem, this=var, kind=data_type, default=value)
Parser consumes a list of tokens produced by the Tokenizer and produces a parsed syntax tree.
Arguments:
- error_level: The desired error level. Default: ErrorLevel.IMMEDIATE
- error_message_context: The amount of context to capture from a query string when displaying the error message (in number of characters). Default: 100
- max_errors: Maximum number of error messages to include in a raised ParseError. This is only relevant if error_level is ErrorLevel.RAISE. Default: 3
Inherited Members
- sqlglot.parser.Parser
- Parser
- NO_PAREN_FUNCTIONS
- STRUCT_TYPE_TOKENS
- NESTED_TYPE_TOKENS
- ENUM_TYPE_TOKENS
- AGGREGATE_TYPE_TOKENS
- TYPE_TOKENS
- SIGNED_TO_UNSIGNED_TYPE_TOKEN
- SUBQUERY_PREDICATES
- RESERVED_TOKENS
- DB_CREATABLES
- CREATABLES
- ALTERABLES
- INTERVAL_VARS
- ALIAS_TOKENS
- ARRAY_CONSTRUCTORS
- COMMENT_TABLE_ALIAS_TOKENS
- UPDATE_ALIAS_TOKENS
- TRIM_TYPES
- FUNC_TOKENS
- CONJUNCTION
- ASSIGNMENT
- DISJUNCTION
- EQUALITY
- COMPARISON
- BITWISE
- TERM
- FACTOR
- EXPONENT
- TIMES
- TIMESTAMPS
- SET_OPERATIONS
- JOIN_METHODS
- JOIN_SIDES
- JOIN_KINDS
- LAMBDAS
- EXPRESSION_PARSERS
- UNARY_PARSERS
- STRING_PARSERS
- NUMERIC_PARSERS
- PRIMARY_PARSERS
- PLACEHOLDER_PARSERS
- PROPERTY_PARSERS
- CONSTRAINT_PARSERS
- ALTER_PARSERS
- ALTER_ALTER_PARSERS
- SCHEMA_UNNAMED_CONSTRAINTS
- NO_PAREN_FUNCTION_PARSERS
- INVALID_FUNC_NAME_TOKENS
- FUNCTIONS_WITH_ALIASED_ARGS
- KEY_VALUE_DEFINITIONS
- FUNCTION_PARSERS
- SET_PARSERS
- SHOW_PARSERS
- TYPE_LITERAL_PARSERS
- TYPE_CONVERTERS
- DDL_SELECT_TOKENS
- PRE_VOLATILE_TOKENS
- TRANSACTION_KIND
- TRANSACTION_CHARACTERISTICS
- CONFLICT_ACTIONS
- CREATE_SEQUENCE
- ISOLATED_LOADING_OPTIONS
- USABLES
- CAST_ACTIONS
- SCHEMA_BINDING_OPTIONS
- KEY_CONSTRAINT_OPTIONS
- INSERT_ALTERNATIVES
- CLONE_KEYWORDS
- HISTORICAL_DATA_PREFIX
- HISTORICAL_DATA_KIND
- OPCLASS_FOLLOW_KEYWORDS
- OPTYPE_FOLLOW_TOKENS
- TABLE_INDEX_HINT_TOKENS
- VIEW_ATTRIBUTES
- WINDOW_ALIAS_TOKENS
- WINDOW_BEFORE_PAREN_TOKENS
- WINDOW_SIDES
- JSON_KEY_VALUE_SEPARATOR_TOKENS
- FETCH_TOKENS
- ADD_CONSTRAINT_TOKENS
- DISTINCT_TOKENS
- NULL_TOKENS
- UNNEST_OFFSET_ALIAS_TOKENS
- SELECT_START_TOKENS
- COPY_INTO_VARLEN_OPTIONS
- IS_JSON_PREDICATE_KIND
- ODBC_DATETIME_LITERALS
- ON_CONDITION_TOKENS
- PRIVILEGE_FOLLOW_TOKENS
- DESCRIBE_STYLES
- OPERATION_MODIFIERS
- STRICT_CAST
- PREFIXED_PIVOT_COLUMNS
- IDENTIFY_PIVOT_STRINGS
- TABLESAMPLE_CSV
- DEFAULT_SAMPLING_METHOD
- TRIM_PATTERN_FIRST
- MODIFIERS_ATTACHED_TO_SET_OP
- SET_OP_MODIFIERS
- JSON_ARROWS_REQUIRE_JSON_TYPE
- COLON_IS_VARIANT_EXTRACT
- VALUES_FOLLOWED_BY_PAREN
- SUPPORTS_IMPLICIT_UNNEST
- INTERVAL_SPANS
- SUPPORTS_PARTITION_SELECTION
- error_level
- error_message_context
- max_errors
- dialect
- reset
- parse
- parse_into
- check_errors
- raise_error
- expression
- validate_expression
- errors
- sql
821 class Generator(generator.Generator): 822 LIMIT_IS_TOP = True 823 QUERY_HINTS = False 824 RETURNING_END = False 825 NVL2_SUPPORTED = False 826 ALTER_TABLE_INCLUDE_COLUMN_KEYWORD = False 827 LIMIT_FETCH = "FETCH" 828 COMPUTED_COLUMN_WITH_TYPE = False 829 CTE_RECURSIVE_KEYWORD_REQUIRED = False 830 ENSURE_BOOLS = True 831 NULL_ORDERING_SUPPORTED = None 832 SUPPORTS_SINGLE_ARG_CONCAT = False 833 TABLESAMPLE_SEED_KEYWORD = "REPEATABLE" 834 SUPPORTS_SELECT_INTO = True 835 JSON_PATH_BRACKETED_KEY_SUPPORTED = False 836 SUPPORTS_TO_NUMBER = False 837 SET_OP_MODIFIERS = False 838 COPY_PARAMS_EQ_REQUIRED = True 839 PARSE_JSON_NAME = None 840 EXCEPT_INTERSECT_SUPPORT_ALL_CLAUSE = False 841 842 EXPRESSIONS_WITHOUT_NESTED_CTES = { 843 exp.Create, 844 exp.Delete, 845 exp.Insert, 846 exp.Intersect, 847 exp.Except, 848 exp.Merge, 849 exp.Select, 850 exp.Subquery, 851 exp.Union, 852 exp.Update, 853 } 854 855 SUPPORTED_JSON_PATH_PARTS = { 856 exp.JSONPathKey, 857 exp.JSONPathRoot, 858 exp.JSONPathSubscript, 859 } 860 861 TYPE_MAPPING = { 862 **generator.Generator.TYPE_MAPPING, 863 exp.DataType.Type.BOOLEAN: "BIT", 864 exp.DataType.Type.DECIMAL: "NUMERIC", 865 exp.DataType.Type.DATETIME: "DATETIME2", 866 exp.DataType.Type.DOUBLE: "FLOAT", 867 exp.DataType.Type.INT: "INTEGER", 868 exp.DataType.Type.ROWVERSION: "ROWVERSION", 869 exp.DataType.Type.TEXT: "VARCHAR(MAX)", 870 exp.DataType.Type.TIMESTAMP: "DATETIME2", 871 exp.DataType.Type.TIMESTAMPTZ: "DATETIMEOFFSET", 872 exp.DataType.Type.UTINYINT: "TINYINT", 873 exp.DataType.Type.VARIANT: "SQL_VARIANT", 874 } 875 876 TYPE_MAPPING.pop(exp.DataType.Type.NCHAR) 877 TYPE_MAPPING.pop(exp.DataType.Type.NVARCHAR) 878 879 TRANSFORMS = { 880 **generator.Generator.TRANSFORMS, 881 exp.AnyValue: any_value_to_max_sql, 882 exp.ArrayToString: rename_func("STRING_AGG"), 883 exp.AutoIncrementColumnConstraint: lambda *_: "IDENTITY", 884 exp.DateAdd: date_delta_sql("DATEADD"), 885 exp.DateDiff: date_delta_sql("DATEDIFF"), 886 exp.CTE: transforms.preprocess([qualify_derived_table_outputs]), 887 exp.CurrentDate: rename_func("GETDATE"), 888 exp.CurrentTimestamp: rename_func("GETDATE"), 889 exp.DateStrToDate: datestrtodate_sql, 890 exp.Extract: rename_func("DATEPART"), 891 exp.GeneratedAsIdentityColumnConstraint: generatedasidentitycolumnconstraint_sql, 892 exp.GroupConcat: _string_agg_sql, 893 exp.If: rename_func("IIF"), 894 exp.JSONExtract: _json_extract_sql, 895 exp.JSONExtractScalar: _json_extract_sql, 896 exp.LastDay: lambda self, e: self.func("EOMONTH", e.this), 897 exp.Max: max_or_greatest, 898 exp.MD5: lambda self, e: self.func("HASHBYTES", exp.Literal.string("MD5"), e.this), 899 exp.Min: min_or_least, 900 exp.NumberToStr: _format_sql, 901 exp.Repeat: rename_func("REPLICATE"), 902 exp.Select: transforms.preprocess( 903 [ 904 transforms.eliminate_distinct_on, 905 transforms.eliminate_semi_and_anti_joins, 906 transforms.eliminate_qualify, 907 transforms.unnest_generate_date_array_using_recursive_cte, 908 ] 909 ), 910 exp.Stddev: rename_func("STDEV"), 911 exp.StrPosition: lambda self, e: self.func( 912 "CHARINDEX", e.args.get("substr"), e.this, e.args.get("position") 913 ), 914 exp.Subquery: transforms.preprocess([qualify_derived_table_outputs]), 915 exp.SHA: lambda self, e: self.func("HASHBYTES", exp.Literal.string("SHA1"), e.this), 916 exp.SHA2: lambda self, e: self.func( 917 "HASHBYTES", exp.Literal.string(f"SHA2_{e.args.get('length', 256)}"), e.this 918 ), 919 exp.TemporaryProperty: lambda self, e: "", 920 exp.TimeStrToTime: _timestrtotime_sql, 921 exp.TimeToStr: _format_sql, 922 exp.Trim: trim_sql, 923 exp.TsOrDsAdd: date_delta_sql("DATEADD", cast=True), 924 exp.TsOrDsDiff: date_delta_sql("DATEDIFF"), 925 } 926 927 TRANSFORMS.pop(exp.ReturnsProperty) 928 929 PROPERTIES_LOCATION = { 930 **generator.Generator.PROPERTIES_LOCATION, 931 exp.VolatileProperty: exp.Properties.Location.UNSUPPORTED, 932 } 933 934 def scope_resolution(self, rhs: str, scope_name: str) -> str: 935 return f"{scope_name}::{rhs}" 936 937 def select_sql(self, expression: exp.Select) -> str: 938 if expression.args.get("offset"): 939 if not expression.args.get("order"): 940 # ORDER BY is required in order to use OFFSET in a query, so we use 941 # a noop order by, since we don't really care about the order. 942 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 943 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 944 945 limit = expression.args.get("limit") 946 if isinstance(limit, exp.Limit): 947 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 948 # we replace here because otherwise TOP would be generated in select_sql 949 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 950 951 return super().select_sql(expression) 952 953 def convert_sql(self, expression: exp.Convert) -> str: 954 name = "TRY_CONVERT" if expression.args.get("safe") else "CONVERT" 955 return self.func( 956 name, expression.this, expression.expression, expression.args.get("style") 957 ) 958 959 def queryoption_sql(self, expression: exp.QueryOption) -> str: 960 option = self.sql(expression, "this") 961 value = self.sql(expression, "expression") 962 if value: 963 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 964 return f"{option} {optional_equal_sign}{value}" 965 return option 966 967 def lateral_op(self, expression: exp.Lateral) -> str: 968 cross_apply = expression.args.get("cross_apply") 969 if cross_apply is True: 970 return "CROSS APPLY" 971 if cross_apply is False: 972 return "OUTER APPLY" 973 974 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 975 self.unsupported("LATERAL clause is not supported.") 976 return "LATERAL" 977 978 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 979 this = expression.this 980 split_count = len(this.name.split(".")) 981 delimiter = expression.args.get("delimiter") 982 part_index = expression.args.get("part_index") 983 984 if ( 985 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 986 or (delimiter and delimiter.name != ".") 987 or not part_index 988 or split_count > 4 989 ): 990 self.unsupported( 991 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 992 ) 993 return "" 994 995 return self.func( 996 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 997 ) 998 999 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1000 nano = expression.args.get("nano") 1001 if nano is not None: 1002 nano.pop() 1003 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1004 1005 if expression.args.get("fractions") is None: 1006 expression.set("fractions", exp.Literal.number(0)) 1007 if expression.args.get("precision") is None: 1008 expression.set("precision", exp.Literal.number(0)) 1009 1010 return rename_func("TIMEFROMPARTS")(self, expression) 1011 1012 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1013 zone = expression.args.get("zone") 1014 if zone is not None: 1015 zone.pop() 1016 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1017 1018 nano = expression.args.get("nano") 1019 if nano is not None: 1020 nano.pop() 1021 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1022 1023 if expression.args.get("milli") is None: 1024 expression.set("milli", exp.Literal.number(0)) 1025 1026 return rename_func("DATETIMEFROMPARTS")(self, expression) 1027 1028 def setitem_sql(self, expression: exp.SetItem) -> str: 1029 this = expression.this 1030 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1031 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1032 return f"{self.sql(this.left)} {self.sql(this.right)}" 1033 1034 return super().setitem_sql(expression) 1035 1036 def boolean_sql(self, expression: exp.Boolean) -> str: 1037 if type(expression.parent) in BIT_TYPES or isinstance( 1038 expression.find_ancestor(exp.Values, exp.Select), exp.Values 1039 ): 1040 return "1" if expression.this else "0" 1041 1042 return "(1 = 1)" if expression.this else "(1 = 0)" 1043 1044 def is_sql(self, expression: exp.Is) -> str: 1045 if isinstance(expression.expression, exp.Boolean): 1046 return self.binary(expression, "=") 1047 return self.binary(expression, "IS") 1048 1049 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1050 sql = self.sql(expression, "this") 1051 properties = expression.args.get("properties") 1052 1053 if sql[:1] != "#" and any( 1054 isinstance(prop, exp.TemporaryProperty) 1055 for prop in (properties.expressions if properties else []) 1056 ): 1057 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1058 1059 return sql 1060 1061 def create_sql(self, expression: exp.Create) -> str: 1062 kind = expression.kind 1063 exists = expression.args.pop("exists", None) 1064 1065 like_property = expression.find(exp.LikeProperty) 1066 if like_property: 1067 ctas_expression = like_property.this 1068 else: 1069 ctas_expression = expression.expression 1070 1071 if kind == "VIEW": 1072 expression.this.set("catalog", None) 1073 with_ = expression.args.get("with") 1074 if ctas_expression and with_: 1075 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1076 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1077 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1078 ctas_expression.set("with", with_.pop()) 1079 1080 sql = super().create_sql(expression) 1081 1082 table = expression.find(exp.Table) 1083 1084 # Convert CTAS statement to SELECT .. INTO .. 1085 if kind == "TABLE" and ctas_expression: 1086 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1087 ctas_expression = ctas_expression.subquery() 1088 1089 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1090 select_into.set("into", exp.Into(this=table)) 1091 1092 if like_property: 1093 select_into.limit(0, copy=False) 1094 1095 sql = self.sql(select_into) 1096 1097 if exists: 1098 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1099 sql = self.sql(exp.Literal.string(sql)) 1100 if kind == "SCHEMA": 1101 sql = f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql})""" 1102 elif kind == "TABLE": 1103 assert table 1104 where = exp.and_( 1105 exp.column("table_name").eq(table.name), 1106 exp.column("table_schema").eq(table.db) if table.db else None, 1107 exp.column("table_catalog").eq(table.catalog) if table.catalog else None, 1108 ) 1109 sql = f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql})""" 1110 elif kind == "INDEX": 1111 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1112 sql = f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql})""" 1113 elif expression.args.get("replace"): 1114 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1115 1116 return self.prepend_ctes(expression, sql) 1117 1118 def count_sql(self, expression: exp.Count) -> str: 1119 func_name = "COUNT_BIG" if expression.args.get("big_int") else "COUNT" 1120 return rename_func(func_name)(self, expression) 1121 1122 def offset_sql(self, expression: exp.Offset) -> str: 1123 return f"{super().offset_sql(expression)} ROWS" 1124 1125 def version_sql(self, expression: exp.Version) -> str: 1126 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1127 this = f"FOR {name}" 1128 expr = expression.expression 1129 kind = expression.text("kind") 1130 if kind in ("FROM", "BETWEEN"): 1131 args = expr.expressions 1132 sep = "TO" if kind == "FROM" else "AND" 1133 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1134 else: 1135 expr_sql = self.sql(expr) 1136 1137 expr_sql = f" {expr_sql}" if expr_sql else "" 1138 return f"{this} {kind}{expr_sql}" 1139 1140 def returnsproperty_sql(self, expression: exp.ReturnsProperty) -> str: 1141 table = expression.args.get("table") 1142 table = f"{table} " if table else "" 1143 return f"RETURNS {table}{self.sql(expression, 'this')}" 1144 1145 def returning_sql(self, expression: exp.Returning) -> str: 1146 into = self.sql(expression, "into") 1147 into = self.seg(f"INTO {into}") if into else "" 1148 return f"{self.seg('OUTPUT')} {self.expressions(expression, flat=True)}{into}" 1149 1150 def transaction_sql(self, expression: exp.Transaction) -> str: 1151 this = self.sql(expression, "this") 1152 this = f" {this}" if this else "" 1153 mark = self.sql(expression, "mark") 1154 mark = f" WITH MARK {mark}" if mark else "" 1155 return f"BEGIN TRANSACTION{this}{mark}" 1156 1157 def commit_sql(self, expression: exp.Commit) -> str: 1158 this = self.sql(expression, "this") 1159 this = f" {this}" if this else "" 1160 durability = expression.args.get("durability") 1161 durability = ( 1162 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1163 if durability is not None 1164 else "" 1165 ) 1166 return f"COMMIT TRANSACTION{this}{durability}" 1167 1168 def rollback_sql(self, expression: exp.Rollback) -> str: 1169 this = self.sql(expression, "this") 1170 this = f" {this}" if this else "" 1171 return f"ROLLBACK TRANSACTION{this}" 1172 1173 def identifier_sql(self, expression: exp.Identifier) -> str: 1174 identifier = super().identifier_sql(expression) 1175 1176 if expression.args.get("global"): 1177 identifier = f"##{identifier}" 1178 elif expression.args.get("temporary"): 1179 identifier = f"#{identifier}" 1180 1181 return identifier 1182 1183 def constraint_sql(self, expression: exp.Constraint) -> str: 1184 this = self.sql(expression, "this") 1185 expressions = self.expressions(expression, flat=True, sep=" ") 1186 return f"CONSTRAINT {this} {expressions}" 1187 1188 def length_sql(self, expression: exp.Length) -> str: 1189 return self._uncast_text(expression, "LEN") 1190 1191 def right_sql(self, expression: exp.Right) -> str: 1192 return self._uncast_text(expression, "RIGHT") 1193 1194 def left_sql(self, expression: exp.Left) -> str: 1195 return self._uncast_text(expression, "LEFT") 1196 1197 def _uncast_text(self, expression: exp.Expression, name: str) -> str: 1198 this = expression.this 1199 if isinstance(this, exp.Cast) and this.is_type(exp.DataType.Type.TEXT): 1200 this_sql = self.sql(this, "this") 1201 else: 1202 this_sql = self.sql(this) 1203 expression_sql = self.sql(expression, "expression") 1204 return self.func(name, this_sql, expression_sql if expression_sql else None) 1205 1206 def partition_sql(self, expression: exp.Partition) -> str: 1207 return f"WITH (PARTITIONS({self.expressions(expression, flat=True)}))" 1208 1209 def alter_sql(self, expression: exp.Alter) -> str: 1210 action = seq_get(expression.args.get("actions") or [], 0) 1211 if isinstance(action, exp.AlterRename): 1212 return f"EXEC sp_rename '{self.sql(expression.this)}', '{action.this.name}'" 1213 return super().alter_sql(expression) 1214 1215 def drop_sql(self, expression: exp.Drop) -> str: 1216 if expression.args["kind"] == "VIEW": 1217 expression.this.set("catalog", None) 1218 return super().drop_sql(expression) 1219 1220 def declare_sql(self, expression: exp.Declare) -> str: 1221 return f"DECLARE {self.expressions(expression, flat=True)}" 1222 1223 def declareitem_sql(self, expression: exp.DeclareItem) -> str: 1224 variable = self.sql(expression, "this") 1225 default = self.sql(expression, "default") 1226 default = f" = {default}" if default else "" 1227 1228 kind = self.sql(expression, "kind") 1229 if isinstance(expression.args.get("kind"), exp.Schema): 1230 kind = f"TABLE {kind}" 1231 1232 return f"{variable} AS {kind}{default}" 1233 1234 def options_modifier(self, expression: exp.Expression) -> str: 1235 options = self.expressions(expression, key="options") 1236 return f" OPTION{self.wrap(options)}" if options else "" 1237 1238 def dpipe_sql(self, expression: exp.DPipe) -> str: 1239 return self.sql( 1240 reduce(lambda x, y: exp.Add(this=x, expression=y), expression.flatten()) 1241 )
Generator converts a given syntax tree to the corresponding SQL string.
Arguments:
- pretty: Whether to format the produced SQL string. Default: False.
- identify: Determines when an identifier should be quoted. Possible values are: False (default): Never quote, except in cases where it's mandatory by the dialect. True or 'always': Always quote. 'safe': Only quote identifiers that are case insensitive.
- normalize: Whether to normalize identifiers to lowercase. Default: False.
- pad: The pad size in a formatted string. For example, this affects the indentation of a projection in a query, relative to its nesting level. Default: 2.
- indent: The indentation size in a formatted string. For example, this affects the
indentation of subqueries and filters under a
WHERE
clause. Default: 2. - normalize_functions: How to normalize function names. Possible values are: "upper" or True (default): Convert names to uppercase. "lower": Convert names to lowercase. False: Disables function name normalization.
- unsupported_level: Determines the generator's behavior when it encounters unsupported expressions. Default ErrorLevel.WARN.
- max_unsupported: Maximum number of unsupported messages to include in a raised UnsupportedError. This is only relevant if unsupported_level is ErrorLevel.RAISE. Default: 3
- leading_comma: Whether the comma is leading or trailing in select expressions. This is only relevant when generating in pretty mode. Default: False
- max_text_width: The max number of characters in a segment before creating new lines in pretty mode. The default is on the smaller end because the length only represents a segment and not the true line length. Default: 80
- comments: Whether to preserve comments in the output SQL code. Default: True
937 def select_sql(self, expression: exp.Select) -> str: 938 if expression.args.get("offset"): 939 if not expression.args.get("order"): 940 # ORDER BY is required in order to use OFFSET in a query, so we use 941 # a noop order by, since we don't really care about the order. 942 # See: https://www.microsoftpressstore.com/articles/article.aspx?p=2314819 943 expression.order_by(exp.select(exp.null()).subquery(), copy=False) 944 945 limit = expression.args.get("limit") 946 if isinstance(limit, exp.Limit): 947 # TOP and OFFSET can't be combined, we need use FETCH instead of TOP 948 # we replace here because otherwise TOP would be generated in select_sql 949 limit.replace(exp.Fetch(direction="FIRST", count=limit.expression)) 950 951 return super().select_sql(expression)
959 def queryoption_sql(self, expression: exp.QueryOption) -> str: 960 option = self.sql(expression, "this") 961 value = self.sql(expression, "expression") 962 if value: 963 optional_equal_sign = "= " if option in OPTIONS_THAT_REQUIRE_EQUAL else "" 964 return f"{option} {optional_equal_sign}{value}" 965 return option
967 def lateral_op(self, expression: exp.Lateral) -> str: 968 cross_apply = expression.args.get("cross_apply") 969 if cross_apply is True: 970 return "CROSS APPLY" 971 if cross_apply is False: 972 return "OUTER APPLY" 973 974 # TODO: perhaps we can check if the parent is a Join and transpile it appropriately 975 self.unsupported("LATERAL clause is not supported.") 976 return "LATERAL"
978 def splitpart_sql(self: TSQL.Generator, expression: exp.SplitPart) -> str: 979 this = expression.this 980 split_count = len(this.name.split(".")) 981 delimiter = expression.args.get("delimiter") 982 part_index = expression.args.get("part_index") 983 984 if ( 985 not all(isinstance(arg, exp.Literal) for arg in (this, delimiter, part_index)) 986 or (delimiter and delimiter.name != ".") 987 or not part_index 988 or split_count > 4 989 ): 990 self.unsupported( 991 "SPLIT_PART can be transpiled to PARSENAME only for '.' delimiter and literal values" 992 ) 993 return "" 994 995 return self.func( 996 "PARSENAME", this, exp.Literal.number(split_count + 1 - part_index.to_py()) 997 )
999 def timefromparts_sql(self, expression: exp.TimeFromParts) -> str: 1000 nano = expression.args.get("nano") 1001 if nano is not None: 1002 nano.pop() 1003 self.unsupported("Specifying nanoseconds is not supported in TIMEFROMPARTS.") 1004 1005 if expression.args.get("fractions") is None: 1006 expression.set("fractions", exp.Literal.number(0)) 1007 if expression.args.get("precision") is None: 1008 expression.set("precision", exp.Literal.number(0)) 1009 1010 return rename_func("TIMEFROMPARTS")(self, expression)
1012 def timestampfromparts_sql(self, expression: exp.TimestampFromParts) -> str: 1013 zone = expression.args.get("zone") 1014 if zone is not None: 1015 zone.pop() 1016 self.unsupported("Time zone is not supported in DATETIMEFROMPARTS.") 1017 1018 nano = expression.args.get("nano") 1019 if nano is not None: 1020 nano.pop() 1021 self.unsupported("Specifying nanoseconds is not supported in DATETIMEFROMPARTS.") 1022 1023 if expression.args.get("milli") is None: 1024 expression.set("milli", exp.Literal.number(0)) 1025 1026 return rename_func("DATETIMEFROMPARTS")(self, expression)
1028 def setitem_sql(self, expression: exp.SetItem) -> str: 1029 this = expression.this 1030 if isinstance(this, exp.EQ) and not isinstance(this.left, exp.Parameter): 1031 # T-SQL does not use '=' in SET command, except when the LHS is a variable. 1032 return f"{self.sql(this.left)} {self.sql(this.right)}" 1033 1034 return super().setitem_sql(expression)
1049 def createable_sql(self, expression: exp.Create, locations: t.DefaultDict) -> str: 1050 sql = self.sql(expression, "this") 1051 properties = expression.args.get("properties") 1052 1053 if sql[:1] != "#" and any( 1054 isinstance(prop, exp.TemporaryProperty) 1055 for prop in (properties.expressions if properties else []) 1056 ): 1057 sql = f"[#{sql[1:]}" if sql.startswith("[") else f"#{sql}" 1058 1059 return sql
1061 def create_sql(self, expression: exp.Create) -> str: 1062 kind = expression.kind 1063 exists = expression.args.pop("exists", None) 1064 1065 like_property = expression.find(exp.LikeProperty) 1066 if like_property: 1067 ctas_expression = like_property.this 1068 else: 1069 ctas_expression = expression.expression 1070 1071 if kind == "VIEW": 1072 expression.this.set("catalog", None) 1073 with_ = expression.args.get("with") 1074 if ctas_expression and with_: 1075 # We've already preprocessed the Create expression to bubble up any nested CTEs, 1076 # but CREATE VIEW actually requires the WITH clause to come after it so we need 1077 # to amend the AST by moving the CTEs to the CREATE VIEW statement's query. 1078 ctas_expression.set("with", with_.pop()) 1079 1080 sql = super().create_sql(expression) 1081 1082 table = expression.find(exp.Table) 1083 1084 # Convert CTAS statement to SELECT .. INTO .. 1085 if kind == "TABLE" and ctas_expression: 1086 if isinstance(ctas_expression, exp.UNWRAPPED_QUERIES): 1087 ctas_expression = ctas_expression.subquery() 1088 1089 select_into = exp.select("*").from_(exp.alias_(ctas_expression, "temp", table=True)) 1090 select_into.set("into", exp.Into(this=table)) 1091 1092 if like_property: 1093 select_into.limit(0, copy=False) 1094 1095 sql = self.sql(select_into) 1096 1097 if exists: 1098 identifier = self.sql(exp.Literal.string(exp.table_name(table) if table else "")) 1099 sql = self.sql(exp.Literal.string(sql)) 1100 if kind == "SCHEMA": 1101 sql = f"""IF NOT EXISTS (SELECT * FROM information_schema.schemata WHERE schema_name = {identifier}) EXEC({sql})""" 1102 elif kind == "TABLE": 1103 assert table 1104 where = exp.and_( 1105 exp.column("table_name").eq(table.name), 1106 exp.column("table_schema").eq(table.db) if table.db else None, 1107 exp.column("table_catalog").eq(table.catalog) if table.catalog else None, 1108 ) 1109 sql = f"""IF NOT EXISTS (SELECT * FROM information_schema.tables WHERE {where}) EXEC({sql})""" 1110 elif kind == "INDEX": 1111 index = self.sql(exp.Literal.string(expression.this.text("this"))) 1112 sql = f"""IF NOT EXISTS (SELECT * FROM sys.indexes WHERE object_id = object_id({identifier}) AND name = {index}) EXEC({sql})""" 1113 elif expression.args.get("replace"): 1114 sql = sql.replace("CREATE OR REPLACE ", "CREATE OR ALTER ", 1) 1115 1116 return self.prepend_ctes(expression, sql)
1125 def version_sql(self, expression: exp.Version) -> str: 1126 name = "SYSTEM_TIME" if expression.name == "TIMESTAMP" else expression.name 1127 this = f"FOR {name}" 1128 expr = expression.expression 1129 kind = expression.text("kind") 1130 if kind in ("FROM", "BETWEEN"): 1131 args = expr.expressions 1132 sep = "TO" if kind == "FROM" else "AND" 1133 expr_sql = f"{self.sql(seq_get(args, 0))} {sep} {self.sql(seq_get(args, 1))}" 1134 else: 1135 expr_sql = self.sql(expr) 1136 1137 expr_sql = f" {expr_sql}" if expr_sql else "" 1138 return f"{this} {kind}{expr_sql}"
1157 def commit_sql(self, expression: exp.Commit) -> str: 1158 this = self.sql(expression, "this") 1159 this = f" {this}" if this else "" 1160 durability = expression.args.get("durability") 1161 durability = ( 1162 f" WITH (DELAYED_DURABILITY = {'ON' if durability else 'OFF'})" 1163 if durability is not None 1164 else "" 1165 ) 1166 return f"COMMIT TRANSACTION{this}{durability}"
1173 def identifier_sql(self, expression: exp.Identifier) -> str: 1174 identifier = super().identifier_sql(expression) 1175 1176 if expression.args.get("global"): 1177 identifier = f"##{identifier}" 1178 elif expression.args.get("temporary"): 1179 identifier = f"#{identifier}" 1180 1181 return identifier
1223 def declareitem_sql(self, expression: exp.DeclareItem) -> str: 1224 variable = self.sql(expression, "this") 1225 default = self.sql(expression, "default") 1226 default = f" = {default}" if default else "" 1227 1228 kind = self.sql(expression, "kind") 1229 if isinstance(expression.args.get("kind"), exp.Schema): 1230 kind = f"TABLE {kind}" 1231 1232 return f"{variable} AS {kind}{default}"
Inherited Members
- sqlglot.generator.Generator
- Generator
- IGNORE_NULLS_IN_FUNC
- LOCKING_READS_SUPPORTED
- WRAP_DERIVED_VALUES
- CREATE_FUNCTION_RETURN_AS
- MATCHED_BY_SOURCE
- SINGLE_STRING_INTERVAL
- INTERVAL_ALLOWS_PLURAL_FORM
- LIMIT_ONLY_LITERALS
- RENAME_TABLE_WITH_DB
- GROUPINGS_SEP
- INDEX_ON
- JOIN_HINTS
- TABLE_HINTS
- QUERY_HINT_SEP
- IS_BOOL_ALLOWED
- DUPLICATE_KEY_UPDATE_WITH_SET
- EXTRACT_ALLOWS_QUOTES
- TZ_TO_WITH_TIME_ZONE
- VALUES_AS_TABLE
- UNNEST_WITH_ORDINALITY
- AGGREGATE_FILTER_SUPPORTED
- SEMI_ANTI_JOIN_WITH_SIDE
- SUPPORTS_TABLE_COPY
- TABLESAMPLE_REQUIRES_PARENS
- TABLESAMPLE_SIZE_IS_ROWS
- TABLESAMPLE_KEYWORDS
- TABLESAMPLE_WITH_METHOD
- COLLATE_IS_FUNC
- DATA_TYPE_SPECIFIERS_ALLOWED
- LAST_DAY_SUPPORTS_DATE_PART
- SUPPORTS_TABLE_ALIAS_COLUMNS
- UNPIVOT_ALIASES_ARE_IDENTIFIERS
- JSON_KEY_VALUE_PAIR_SEP
- INSERT_OVERWRITE
- SUPPORTS_UNLOGGED_TABLES
- SUPPORTS_CREATE_TABLE_LIKE
- LIKE_PROPERTY_INSIDE_SCHEMA
- MULTI_ARG_DISTINCT
- JSON_TYPE_REQUIRED_FOR_EXTRACTION
- JSON_PATH_SINGLE_QUOTE_ESCAPE
- CAN_IMPLEMENT_ARRAY_ANY
- COPY_PARAMS_ARE_WRAPPED
- COPY_HAS_INTO_KEYWORD
- STAR_EXCEPT
- HEX_FUNC
- WITH_PROPERTIES_PREFIX
- QUOTE_JSON_PATH
- PAD_FILL_PATTERN_IS_REQUIRED
- SUPPORTS_EXPLODING_PROJECTIONS
- ARRAY_CONCAT_IS_VAR_LEN
- SUPPORTS_CONVERT_TIMEZONE
- TIME_PART_SINGULARS
- TOKEN_MAPPING
- STRUCT_DELIMITER
- PARAMETER_TOKEN
- NAMED_PLACEHOLDER_TOKEN
- RESERVED_KEYWORDS
- WITH_SEPARATED_COMMENTS
- EXCLUDE_COMMENTS
- UNWRAPPED_INTERVAL_VALUES
- PARAMETERIZABLE_TEXT_TYPES
- SENTINEL_LINE_BREAK
- pretty
- identify
- normalize
- pad
- unsupported_level
- max_unsupported
- leading_comma
- max_text_width
- comments
- dialect
- normalize_functions
- unsupported_messages
- generate
- preprocess
- unsupported
- sep
- seg
- pad_comment
- maybe_comment
- wrap
- no_identify
- normalize_func
- indent
- sql
- uncache_sql
- cache_sql
- characterset_sql
- column_parts
- column_sql
- columnposition_sql
- columndef_sql
- columnconstraint_sql
- computedcolumnconstraint_sql
- autoincrementcolumnconstraint_sql
- compresscolumnconstraint_sql
- generatedasidentitycolumnconstraint_sql
- generatedasrowcolumnconstraint_sql
- periodforsystemtimeconstraint_sql
- notnullcolumnconstraint_sql
- transformcolumnconstraint_sql
- primarykeycolumnconstraint_sql
- uniquecolumnconstraint_sql
- sequenceproperties_sql
- clone_sql
- describe_sql
- heredoc_sql
- prepend_ctes
- with_sql
- cte_sql
- tablealias_sql
- bitstring_sql
- hexstring_sql
- bytestring_sql
- unicodestring_sql
- rawstring_sql
- datatypeparam_sql
- datatype_sql
- directory_sql
- delete_sql
- set_operation
- set_operations
- fetch_sql
- filter_sql
- hint_sql
- indexparameters_sql
- index_sql
- hex_sql
- lowerhex_sql
- inputoutputformat_sql
- national_sql
- properties_sql
- root_properties
- properties
- with_properties
- locate_properties
- property_name
- property_sql
- likeproperty_sql
- fallbackproperty_sql
- journalproperty_sql
- freespaceproperty_sql
- checksumproperty_sql
- mergeblockratioproperty_sql
- datablocksizeproperty_sql
- blockcompressionproperty_sql
- isolatedloadingproperty_sql
- partitionboundspec_sql
- partitionedofproperty_sql
- lockingproperty_sql
- withdataproperty_sql
- withsystemversioningproperty_sql
- insert_sql
- introducer_sql
- kill_sql
- pseudotype_sql
- objectidentifier_sql
- onconflict_sql
- rowformatdelimitedproperty_sql
- withtablehint_sql
- indextablehint_sql
- historicaldata_sql
- table_parts
- table_sql
- tablesample_sql
- pivot_sql
- tuple_sql
- update_sql
- values_sql
- var_sql
- into_sql
- from_sql
- groupingsets_sql
- rollup_sql
- cube_sql
- group_sql
- having_sql
- connect_sql
- prior_sql
- join_sql
- lambda_sql
- lateral_sql
- limit_sql
- set_sql
- pragma_sql
- lock_sql
- literal_sql
- escape_str
- loaddata_sql
- null_sql
- order_sql
- withfill_sql
- cluster_sql
- distribute_sql
- sort_sql
- ordered_sql
- matchrecognizemeasure_sql
- matchrecognize_sql
- query_modifiers
- offset_limit_modifiers
- after_limit_modifiers
- schema_sql
- schema_columns_sql
- star_sql
- parameter_sql
- sessionparameter_sql
- placeholder_sql
- subquery_sql
- qualify_sql
- unnest_sql
- prewhere_sql
- where_sql
- window_sql
- partition_by_sql
- windowspec_sql
- withingroup_sql
- between_sql
- bracket_offset_expressions
- bracket_sql
- all_sql
- any_sql
- exists_sql
- case_sql
- nextvaluefor_sql
- extract_sql
- trim_sql
- convert_concat_args
- concat_sql
- concatws_sql
- check_sql
- foreignkey_sql
- primarykey_sql
- if_sql
- matchagainst_sql
- jsonkeyvalue_sql
- jsonpath_sql
- json_path_part
- formatjson_sql
- jsonobject_sql
- jsonobjectagg_sql
- jsonarray_sql
- jsonarrayagg_sql
- jsoncolumndef_sql
- jsonschema_sql
- jsontable_sql
- openjsoncolumndef_sql
- openjson_sql
- in_sql
- in_unnest_op
- interval_sql
- return_sql
- reference_sql
- anonymous_sql
- paren_sql
- neg_sql
- not_sql
- alias_sql
- pivotalias_sql
- aliases_sql
- atindex_sql
- attimezone_sql
- fromtimezone_sql
- add_sql
- and_sql
- or_sql
- xor_sql
- connector_sql
- bitwiseand_sql
- bitwiseleftshift_sql
- bitwisenot_sql
- bitwiseor_sql
- bitwiserightshift_sql
- bitwisexor_sql
- cast_sql
- currentdate_sql
- collate_sql
- command_sql
- comment_sql
- mergetreettlaction_sql
- mergetreettl_sql
- altercolumn_sql
- alterdiststyle_sql
- altersortkey_sql
- alterrename_sql
- renamecolumn_sql
- alterset_sql
- add_column_sql
- droppartition_sql
- addconstraint_sql
- distinct_sql
- ignorenulls_sql
- respectnulls_sql
- havingmax_sql
- intdiv_sql
- div_sql
- overlaps_sql
- distance_sql
- dot_sql
- eq_sql
- propertyeq_sql
- escape_sql
- glob_sql
- gt_sql
- gte_sql
- ilike_sql
- ilikeany_sql
- like_sql
- likeany_sql
- similarto_sql
- lt_sql
- lte_sql
- mod_sql
- mul_sql
- neq_sql
- nullsafeeq_sql
- nullsafeneq_sql
- slice_sql
- sub_sql
- trycast_sql
- try_sql
- log_sql
- use_sql
- binary
- function_fallback_sql
- func
- format_args
- too_wide
- format_time
- expressions
- op_expressions
- naked_property
- tag_sql
- token_sql
- userdefinedfunction_sql
- joinhint_sql
- kwarg_sql
- when_sql
- merge_sql
- tochar_sql
- tonumber_sql
- dictproperty_sql
- dictrange_sql
- dictsubproperty_sql
- duplicatekeyproperty_sql
- distributedbyproperty_sql
- oncluster_sql
- clusteredbyproperty_sql
- anyvalue_sql
- querytransform_sql
- indexconstraintoption_sql
- checkcolumnconstraint_sql
- indexcolumnconstraint_sql
- nvl2_sql
- comprehension_sql
- columnprefix_sql
- opclass_sql
- predict_sql
- forin_sql
- refresh_sql
- toarray_sql
- tsordstotime_sql
- tsordstotimestamp_sql
- tsordstodate_sql
- unixdate_sql
- lastday_sql
- dateadd_sql
- arrayany_sql
- struct_sql
- partitionrange_sql
- truncatetable_sql
- copyparameter_sql
- credentials_sql
- copy_sql
- semicolon_sql
- datadeletionproperty_sql
- maskingpolicycolumnconstraint_sql
- gapfill_sql
- scoperesolution_sql
- parsejson_sql
- rand_sql
- changes_sql
- pad_sql
- summarize_sql
- explodinggenerateseries_sql
- arrayconcat_sql
- converttimezone_sql
- json_sql
- jsonvalue_sql
- conditionalinsert_sql
- multitableinserts_sql
- oncondition_sql
- jsonexists_sql
- arrayagg_sql
- apply_sql
- grant_sql
- grantprivilege_sql
- grantprincipal_sql
- columns_sql
- overlay_sql