diff --git a/declearn/dataset/_split_data.py b/declearn/dataset/_split_data.py
index 017c9c08d0a3866f99573ae7b9bf02dfc8837473..1729c2ddea23edb0719a9181c0d2e66d94cab5e6 100644
--- a/declearn/dataset/_split_data.py
+++ b/declearn/dataset/_split_data.py
@@ -144,7 +144,8 @@ def split_data(
 ) -> None:
     """Randomly split a dataset into shards.
 
-    The resulting folder structure is :
+    The resulting folder structure is:
+
         folder/
         └─── data*/
             └─── client*/
diff --git a/declearn/model/haiku/_vector.py b/declearn/model/haiku/_vector.py
index e1f106f141848d5686297c23289d610e025ec1a2..7797ee62c8f09d1d47a52280d847dc870865989a 100644
--- a/declearn/model/haiku/_vector.py
+++ b/declearn/model/haiku/_vector.py
@@ -53,10 +53,10 @@ class JaxNumpyVector(Vector):
     Notes
     -----
     - A `JaxnumpyVector` can be operated with either a:
-      - scalar value
-      - `NumpyVector` that has similar specifications
-      - `JaxNumpyVector` that has similar specifications
-      => resulting in a `JaxNumpyVector` in each of these cases.
+        - scalar value
+        - `NumpyVector` that has similar specifications
+        - `JaxNumpyVector` that has similar specifications
+        - => resulting in a `JaxNumpyVector` in each of these cases.
     - The wrapped arrays may be placed on any device (CPU, GPU...)
       and may not be all on the same device.
     - The device-placement of the initial `JaxNumpyVector`'s data
diff --git a/declearn/model/tensorflow/_vector.py b/declearn/model/tensorflow/_vector.py
index 401bbb0665ae95b80a78f0bbb55fa90feb16a3af..079aa17016af9f0947c85d9eb8130c33ca94d914 100644
--- a/declearn/model/tensorflow/_vector.py
+++ b/declearn/model/tensorflow/_vector.py
@@ -96,10 +96,10 @@ class TensorflowVector(Vector):
     Notes
     -----
     - A `TensorflowVector` can be operated with either a:
-      - scalar value
-      - `NumpyVector` that has similar specifications
-      - `TensorflowVector` that has similar specifications
-      => resulting in a `TensorflowVector` in each of these cases.
+        - scalar value
+        - `NumpyVector` that has similar specifications
+        - `TensorflowVector` that has similar specifications
+        - => resulting in a `TensorflowVector` in each of these cases.
     - The wrapped tensors may be placed on any device (CPU, GPU...)
       and may not be all on the same device.
     - The device-placement of the initial `TensorflowVector`'s data
diff --git a/declearn/model/torch/_vector.py b/declearn/model/torch/_vector.py
index 662aaa100fde401335d9f80657f32b505986614e..91d93be33bb24ce58592732876f549b91a880b4c 100644
--- a/declearn/model/torch/_vector.py
+++ b/declearn/model/torch/_vector.py
@@ -48,10 +48,10 @@ class TorchVector(Vector):
     Notes
     -----
     - A `TorchVector` can be operated with either a:
-      - scalar value
-      - `NumpyVector` that has similar specifications
-      - `TorchVector` that has similar specifications
-      => resulting in a `TorchVector` in each of these cases.
+        - scalar value
+        - `NumpyVector` that has similar specifications
+        - `TorchVector` that has similar specifications
+        - => resulting in a `TorchVector` in each of these cases.
     - The wrapped tensors may be placed on any device (CPU, GPU...)
       and may not be all on the same device.
     - The device-placement of the initial `TorchVector`'s data
diff --git a/docs/quickstart.md b/docs/quickstart.md
index be711050f18ecd067b2898ade1bdca86d082b035..f2c4d8f681d06434a4046c1bc630858e3920a076 100644
--- a/docs/quickstart.md
+++ b/docs/quickstart.md
@@ -52,8 +52,8 @@ declearn-quickrun --config "examples/mnist_quickrun/config.toml"
 
 **To better understand the details** of what happens under the hood you can
 look at what the key element of the declearn process are in
-[section 1.2.](#12-python-script). To understand how to use the quickrun mode
-in practice, see [section 2.1.](#21-quickrun-on-your-problem).
+[section 1.2](#12-python-script). To understand how to use the quickrun mode
+in practice, see [section 2.1](#21-quickrun-on-your-problem).
 
 ### 1.2. Python script