From f83e6795373e9d91ea515031807637866b20b810 Mon Sep 17 00:00:00 2001
From: Bruno Guillon <bruno.guillon@uca.fr>
Date: Wed, 12 Jun 2024 17:27:31 +0200
Subject: [PATCH 1/2] fix bug of limit+offset (order, and sqlite LIMIT -1
 workaround)

---
 querybuilder/drivers/sql/tokenizer.py    |  2 +-
 querybuilder/drivers/sqlite/tokenizer.py | 32 ++++++++++++++++++++++++
 2 files changed, 33 insertions(+), 1 deletion(-)

diff --git a/querybuilder/drivers/sql/tokenizer.py b/querybuilder/drivers/sql/tokenizer.py
index 574949f2..99beabee 100644
--- a/querybuilder/drivers/sql/tokenizer.py
+++ b/querybuilder/drivers/sql/tokenizer.py
@@ -280,7 +280,7 @@ class Tokenizer:
             where,
             (groupby, TkInd(), having),
             orderby,
-            (offset, limit),
+            (limit, offset),
         )
 
     @__call__.register(dql.SetCombination)
diff --git a/querybuilder/drivers/sqlite/tokenizer.py b/querybuilder/drivers/sqlite/tokenizer.py
index 139730b7..1dbd3aa0 100644
--- a/querybuilder/drivers/sqlite/tokenizer.py
+++ b/querybuilder/drivers/sqlite/tokenizer.py
@@ -110,6 +110,38 @@ class Tokenizer(sqlTokenizer):
         # ⟶  fallback to INTERSECT / EXCEPT / UNION
         return super().__call__(obj, combinator=combinator, all=all or None)
 
+    @__call__.register(qb.queries.dql.Select)
+    def _(
+        self,
+        obj: dql.Select,
+        /,
+        *,
+        columns: tuple[TkTree, ...],
+        distinct: TkTree = (),
+        from_: TkTree = (),
+        where: TkTree = (),
+        groupby: TkTree = (),
+        having: TkTree = (),
+        orderby: TkTree = (),
+        offset: TkTree = (),
+        limit: TkTree = (),
+    ) -> TkTree:
+        # TODO: It is probably better to have a clause limit+offset, so that this is handled there
+        if offset and not limit:
+            limit = (self.tokenize_keyword("LIMIT"), self.tokenize_constant(int, -1))
+        return super().__call__(
+            obj,
+            columns=columns,
+            distinct=distinct,
+            from_=from_,
+            where=where,
+            groupby=groupby,
+            having=having,
+            orderby=orderby,
+            offset=offset,
+            limit=limit,
+        )
+
     # TCL
     @__call__.register(qb.queries.tcl.Start)
     def _(
-- 
GitLab


From d30f09acbcdd317f3509db2a99b7b6f917883438 Mon Sep 17 00:00:00 2001
From: Bruno Guillon <bruno.guillon@uca.fr>
Date: Fri, 14 Jun 2024 22:33:49 +0200
Subject: [PATCH 2/2] ruff

---
 querybuilder/drivers/sqlite/tokenizer.py | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/querybuilder/drivers/sqlite/tokenizer.py b/querybuilder/drivers/sqlite/tokenizer.py
index 1dbd3aa0..e686f16f 100644
--- a/querybuilder/drivers/sqlite/tokenizer.py
+++ b/querybuilder/drivers/sqlite/tokenizer.py
@@ -113,7 +113,7 @@ class Tokenizer(sqlTokenizer):
     @__call__.register(qb.queries.dql.Select)
     def _(
         self,
-        obj: dql.Select,
+        obj: qb.queries.dql.Select,
         /,
         *,
         columns: tuple[TkTree, ...],
-- 
GitLab