diff --git a/querybuilder/drivers/sql/tokenizer.py b/querybuilder/drivers/sql/tokenizer.py index 574949f2145867a07f9b2edb9a4c5ebea40302d5..99beabee1407abe6f13c62f513350cf7d7eda6db 100644 --- a/querybuilder/drivers/sql/tokenizer.py +++ b/querybuilder/drivers/sql/tokenizer.py @@ -280,7 +280,7 @@ class Tokenizer: where, (groupby, TkInd(), having), orderby, - (offset, limit), + (limit, offset), ) @__call__.register(dql.SetCombination) diff --git a/querybuilder/drivers/sqlite/tokenizer.py b/querybuilder/drivers/sqlite/tokenizer.py index 139730b78e90eda6dcfc99300be63174aafb044d..e686f16f3bb41122bb62fdb688eec0ea024386c2 100644 --- a/querybuilder/drivers/sqlite/tokenizer.py +++ b/querybuilder/drivers/sqlite/tokenizer.py @@ -110,6 +110,38 @@ class Tokenizer(sqlTokenizer): # ⟶ fallback to INTERSECT / EXCEPT / UNION return super().__call__(obj, combinator=combinator, all=all or None) + @__call__.register(qb.queries.dql.Select) + def _( + self, + obj: qb.queries.dql.Select, + /, + *, + columns: tuple[TkTree, ...], + distinct: TkTree = (), + from_: TkTree = (), + where: TkTree = (), + groupby: TkTree = (), + having: TkTree = (), + orderby: TkTree = (), + offset: TkTree = (), + limit: TkTree = (), + ) -> TkTree: + # TODO: It is probably better to have a clause limit+offset, so that this is handled there + if offset and not limit: + limit = (self.tokenize_keyword("LIMIT"), self.tokenize_constant(int, -1)) + return super().__call__( + obj, + columns=columns, + distinct=distinct, + from_=from_, + where=where, + groupby=groupby, + having=having, + orderby=orderby, + offset=offset, + limit=limit, + ) + # TCL @__call__.register(qb.queries.tcl.Start) def _(