Mentions légales du service

Skip to content
Snippets Groups Projects
Commit b0c570aa authored by Bruno Guillon's avatar Bruno Guillon
Browse files

logger: suffixing queries with ';', indentation of linebreaks within queries

parent 90dc4165
Branches
Tags merge
5 merge requests!69CartesianProduct.init_columns and boolean operations hotfixes,!66Hotfix join is scopable,!65Hotfix sqlite orderby scope,!64Hotfix,!62Hotfix
Pipeline #849138 passed
......@@ -248,7 +248,8 @@ class Tokenizer:
columns = (TkInd(),) + self.tokenize_list(tcomma, columns, accolate=True)
return (
(select, columns),
(select, TkInd(), columns),
TkInd(),
from_,
where,
(groupby, TkInd(), having),
......
......@@ -412,6 +412,7 @@ class TestTokenizer:
TkInd(),
TkStr(qbtoken.Token.Keyword, "DISTINCT").to_seq(),
),
TkInd(),
(
TkInd(),
columns[0] + sepseq,
......@@ -419,6 +420,7 @@ class TestTokenizer:
columns[2],
),
),
TkInd(),
from_,
where,
(
......
......@@ -19,7 +19,7 @@ import querybuilder.utils.typing as qbtyping
import querybuilder.formatting.formatter as qbformatter
from querybuilder.formatting import token as qbtoken
from querybuilder.queries.queries import Query
from querybuilder.formatting.tokentree import TkStr
from querybuilder.formatting.tokentree import TkStr, TkSeq
import querybuilder
......@@ -96,12 +96,14 @@ class Logger:
self.query_log.append(query)
tokens = query.tokenize(self.tokenizer)
finaltokens: tuple[TkSeq, ...] = ()
if parameters:
finaltokens = (
finaltokens += (
TkStr(qbtoken.Punctuation.Linebreak, "\n").to_seq(),
TkStr(qbtoken.Comment.Args, f" -- ↖{parameters}").to_seq(),
)
tokens = (tokens, finaltokens)
finaltokens += (TkStr(qbtoken.Punctuation.Semicolon, ";").to_seq(),)
tokens = (tokens, finaltokens)
self.formatter.format(tokens, self.file)
self.file.write("\n")
......
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Please register or to comment