diff --git a/querybuilder/drivers/sql/tokenizer.py b/querybuilder/drivers/sql/tokenizer.py
index 85dde0a74464820a22c460d73871ce4ad20f496d..427aca7a2812252edfcf80e647c18ecff18e184b 100644
--- a/querybuilder/drivers/sql/tokenizer.py
+++ b/querybuilder/drivers/sql/tokenizer.py
@@ -248,7 +248,8 @@ class Tokenizer:
         columns = (TkInd(),) + self.tokenize_list(tcomma, columns, accolate=True)
 
         return (
-            (select, columns),
+            (select, TkInd(), columns),
+            TkInd(),
             from_,
             where,
             (groupby, TkInd(), having),
diff --git a/querybuilder/tests/drivers/sql/test_tokenizer.py b/querybuilder/tests/drivers/sql/test_tokenizer.py
index e2afa87d16f9d9431d80b292c18b4fe86aeebf43..f2ec9d30512ded5cf9656835097e505a00fd8949 100644
--- a/querybuilder/tests/drivers/sql/test_tokenizer.py
+++ b/querybuilder/tests/drivers/sql/test_tokenizer.py
@@ -412,6 +412,7 @@ class TestTokenizer:
                     TkInd(),
                     TkStr(qbtoken.Token.Keyword, "DISTINCT").to_seq(),
                 ),
+                TkInd(),
                 (
                     TkInd(),
                     columns[0] + sepseq,
@@ -419,6 +420,7 @@ class TestTokenizer:
                     columns[2],
                 ),
             ),
+            TkInd(),
             from_,
             where,
             (
diff --git a/querybuilder/utils/logger.py b/querybuilder/utils/logger.py
index f9ed408644a2bcf272338443aacaf289d0dc7120..dceb8bd4e90abc00aa5fafaab12a09dcc270ccb4 100644
--- a/querybuilder/utils/logger.py
+++ b/querybuilder/utils/logger.py
@@ -19,7 +19,7 @@ import querybuilder.utils.typing as qbtyping
 import querybuilder.formatting.formatter as qbformatter
 from querybuilder.formatting import token as qbtoken
 from querybuilder.queries.queries import Query
-from querybuilder.formatting.tokentree import TkStr
+from querybuilder.formatting.tokentree import TkStr, TkSeq
 import querybuilder
 
 
@@ -96,12 +96,14 @@ class Logger:
         self.query_log.append(query)
 
         tokens = query.tokenize(self.tokenizer)
+        finaltokens: tuple[TkSeq, ...] = ()
         if parameters:
-            finaltokens = (
+            finaltokens += (
                 TkStr(qbtoken.Punctuation.Linebreak, "\n").to_seq(),
                 TkStr(qbtoken.Comment.Args, f"  --  ↖{parameters}").to_seq(),
             )
-            tokens = (tokens, finaltokens)
+        finaltokens += (TkStr(qbtoken.Punctuation.Semicolon, ";").to_seq(),)
+        tokens = (tokens, finaltokens)
         self.formatter.format(tokens, self.file)
         self.file.write("\n")