`
+ sponsors.forEach(function (sponsor) {
+ html += `
+
+
+
+ `
+ });
+ html += '
'
+
+root = Path(__file__).parent.parent
+src = root / "src"
+
+for path in sorted(src.rglob("*.py")):
+ module_path = path.relative_to(src).with_suffix("")
+ doc_path = path.relative_to(src / "mkdocstrings").with_suffix(".md")
+ full_doc_path = Path("reference", doc_path)
+
+ parts = tuple(module_path.parts)
+
+ if parts[-1] == "__init__":
+ parts = parts[:-1]
+ doc_path = doc_path.with_name("index.md")
+ full_doc_path = full_doc_path.with_name("index.md")
+ elif parts[-1].startswith("_"):
+ continue
+
+ nav_parts = [f"{mod_symbol} {part}" for part in parts]
+ nav[tuple(nav_parts)] = doc_path.as_posix()
+
+ with mkdocs_gen_files.open(full_doc_path, "w") as fd:
+ ident = ".".join(parts)
+ fd.write(f"---\ntitle: {ident}\n---\n\n::: {ident}")
+
+ mkdocs_gen_files.set_edit_path(full_doc_path, ".." / path.relative_to(root))
+
+with mkdocs_gen_files.open("reference/SUMMARY.md", "w") as nav_file:
+ nav_file.writelines(nav.build_literate_nav())
diff --git a/scripts/get_version.py b/scripts/get_version.py
new file mode 100644
index 00000000..f4a30a8c
--- /dev/null
+++ b/scripts/get_version.py
@@ -0,0 +1,27 @@
+"""Get current project version from Git tags or changelog."""
+
+import re
+from contextlib import suppress
+from pathlib import Path
+
+from pdm.backend.hooks.version import SCMVersion, Version, default_version_formatter, get_version_from_scm
+
+_root = Path(__file__).parent.parent
+_changelog = _root / "CHANGELOG.md"
+_changelog_version_re = re.compile(r"^## \[(\d+\.\d+\.\d+)\].*$")
+_default_scm_version = SCMVersion(Version("0.0.0"), None, False, None, None) # noqa: FBT003
+
+
+def get_version() -> str:
+ """Get current project version from Git tags or changelog."""
+ scm_version = get_version_from_scm(_root) or _default_scm_version
+ if scm_version.version <= Version("0.1"): # Missing Git tags?
+ with suppress(OSError, StopIteration): # noqa: SIM117
+ with _changelog.open("r", encoding="utf8") as file:
+ match = next(filter(None, map(_changelog_version_re.match, file)))
+ scm_version = scm_version._replace(version=Version(match.group(1)))
+ return default_version_formatter(scm_version)
+
+
+if __name__ == "__main__":
+ print(get_version())
diff --git a/scripts/insiders.py b/scripts/insiders.py
new file mode 100644
index 00000000..a7da99bc
--- /dev/null
+++ b/scripts/insiders.py
@@ -0,0 +1,206 @@
+"""Functions related to Insiders funding goals."""
+
+from __future__ import annotations
+
+import json
+import logging
+import os
+import posixpath
+from dataclasses import dataclass
+from datetime import date, datetime, timedelta
+from itertools import chain
+from pathlib import Path
+from typing import TYPE_CHECKING, cast
+from urllib.error import HTTPError
+from urllib.parse import urljoin
+from urllib.request import urlopen
+
+import yaml
+
+if TYPE_CHECKING:
+ from collections.abc import Iterable
+
+logger = logging.getLogger(f"mkdocs.logs.{__name__}")
+
+
+def human_readable_amount(amount: int) -> str: # noqa: D103
+ str_amount = str(amount)
+ if len(str_amount) >= 4: # noqa: PLR2004
+ return f"{str_amount[: len(str_amount) - 3]},{str_amount[-3:]}"
+ return str_amount
+
+
+@dataclass
+class Project:
+ """Class representing an Insiders project."""
+
+ name: str
+ url: str
+
+
+@dataclass
+class Feature:
+ """Class representing an Insiders feature."""
+
+ name: str
+ ref: str | None
+ since: date | None
+ project: Project | None
+
+ def url(self, rel_base: str = "..") -> str | None: # noqa: D102
+ if not self.ref:
+ return None
+ if self.project:
+ rel_base = self.project.url
+ return posixpath.join(rel_base, self.ref.lstrip("/"))
+
+ def render(self, rel_base: str = "..", *, badge: bool = False) -> None: # noqa: D102
+ new = ""
+ if badge:
+ recent = self.since and date.today() - self.since <= timedelta(days=60) # noqa: DTZ011
+ if recent:
+ ft_date = self.since.strftime("%B %d, %Y") # type: ignore[union-attr]
+ new = f' :material-alert-decagram:{{ .new-feature .vibrate title="Added on {ft_date}" }}'
+ project = f"[{self.project.name}]({self.project.url}) — " if self.project else ""
+ feature = f"[{self.name}]({self.url(rel_base)})" if self.ref else self.name
+ print(f"- [{'x' if self.since else ' '}] {project}{feature}{new}")
+
+
+@dataclass
+class Goal:
+ """Class representing an Insiders goal."""
+
+ name: str
+ amount: int
+ features: list[Feature]
+ complete: bool = False
+
+ @property
+ def human_readable_amount(self) -> str: # noqa: D102
+ return human_readable_amount(self.amount)
+
+ def render(self, rel_base: str = "..") -> None: # noqa: D102
+ print(f"#### $ {self.human_readable_amount} — {self.name}\n")
+ if self.features:
+ for feature in self.features:
+ feature.render(rel_base)
+ print("")
+ else:
+ print("There are no features in this goal for this project. ")
+ print(
+ "[See the features in this goal **for all Insiders projects.**]"
+ f"(https://pawamoy.github.io/insiders/#{self.amount}-{self.name.lower().replace(' ', '-')})",
+ )
+
+
+def load_goals(data: str, funding: int = 0, project: Project | None = None) -> dict[int, Goal]:
+ """Load goals from JSON data.
+
+ Parameters:
+ data: The JSON data.
+ funding: The current total funding, per month.
+ origin: The origin of the data (URL).
+
+ Returns:
+ A dictionaries of goals, keys being their target monthly amount.
+ """
+ goals_data = yaml.safe_load(data)["goals"]
+ return {
+ amount: Goal(
+ name=goal_data["name"],
+ amount=amount,
+ complete=funding >= amount,
+ features=[
+ Feature(
+ name=feature_data["name"],
+ ref=feature_data.get("ref"),
+ since=feature_data.get("since") and datetime.strptime(feature_data["since"], "%Y/%m/%d").date(), # noqa: DTZ007
+ project=project,
+ )
+ for feature_data in goal_data["features"]
+ ],
+ )
+ for amount, goal_data in goals_data.items()
+ }
+
+
+def _load_goals_from_disk(path: str, funding: int = 0) -> dict[int, Goal]:
+ project_dir = os.getenv("MKDOCS_CONFIG_DIR", ".")
+ try:
+ data = Path(project_dir, path).read_text()
+ except OSError as error:
+ raise RuntimeError(f"Could not load data from disk: {path}") from error
+ return load_goals(data, funding)
+
+
+def _load_goals_from_url(source_data: tuple[str, str, str], funding: int = 0) -> dict[int, Goal]:
+ project_name, project_url, data_fragment = source_data
+ data_url = urljoin(project_url, data_fragment)
+ try:
+ with urlopen(data_url) as response: # noqa: S310
+ data = response.read()
+ except HTTPError as error:
+ raise RuntimeError(f"Could not load data from network: {data_url}") from error
+ return load_goals(data, funding, project=Project(name=project_name, url=project_url))
+
+
+def _load_goals(source: str | tuple[str, str, str], funding: int = 0) -> dict[int, Goal]:
+ if isinstance(source, str):
+ return _load_goals_from_disk(source, funding)
+ return _load_goals_from_url(source, funding)
+
+
+def funding_goals(source: str | list[str | tuple[str, str, str]], funding: int = 0) -> dict[int, Goal]:
+ """Load funding goals from a given data source.
+
+ Parameters:
+ source: The data source (local file path or URL).
+ funding: The current total funding, per month.
+
+ Returns:
+ A dictionaries of goals, keys being their target monthly amount.
+ """
+ if isinstance(source, str):
+ return _load_goals_from_disk(source, funding)
+ goals = {}
+ for src in source:
+ source_goals = _load_goals(src, funding)
+ for amount, goal in source_goals.items():
+ if amount not in goals:
+ goals[amount] = goal
+ else:
+ goals[amount].features.extend(goal.features)
+ return {amount: goals[amount] for amount in sorted(goals)}
+
+
+def feature_list(goals: Iterable[Goal]) -> list[Feature]:
+ """Extract feature list from funding goals.
+
+ Parameters:
+ goals: A list of funding goals.
+
+ Returns:
+ A list of features.
+ """
+ return list(chain.from_iterable(goal.features for goal in goals))
+
+
+def load_json(url: str) -> str | list | dict: # noqa: D103
+ with urlopen(url) as response: # noqa: S310
+ return json.loads(response.read().decode())
+
+
+data_source = globals()["data_source"]
+sponsor_url = "https://github.com/sponsors/pawamoy"
+data_url = "https://raw.githubusercontent.com/pawamoy/sponsors/main"
+numbers: dict[str, int] = load_json(f"{data_url}/numbers.json") # type: ignore[assignment]
+sponsors: list[dict] = load_json(f"{data_url}/sponsors.json") # type: ignore[assignment]
+current_funding = numbers["total"]
+sponsors_count = numbers["count"]
+goals = funding_goals(data_source, funding=current_funding)
+ongoing_goals = [goal for goal in goals.values() if not goal.complete]
+unreleased_features = sorted(
+ (ft for ft in feature_list(ongoing_goals) if ft.since),
+ key=lambda ft: cast(date, ft.since),
+ reverse=True,
+)
diff --git a/scripts/make b/scripts/make
new file mode 120000
index 00000000..c2eda0df
--- /dev/null
+++ b/scripts/make
@@ -0,0 +1 @@
+make.py
\ No newline at end of file
diff --git a/scripts/make.py b/scripts/make.py
new file mode 100755
index 00000000..3d427296
--- /dev/null
+++ b/scripts/make.py
@@ -0,0 +1,191 @@
+#!/usr/bin/env python3
+"""Management commands."""
+
+from __future__ import annotations
+
+import os
+import shutil
+import subprocess
+import sys
+from contextlib import contextmanager
+from pathlib import Path
+from textwrap import dedent
+from typing import TYPE_CHECKING, Any
+
+if TYPE_CHECKING:
+ from collections.abc import Iterator
+
+
+PYTHON_VERSIONS = os.getenv("PYTHON_VERSIONS", "3.9 3.10 3.11 3.12 3.13 3.14").split()
+
+
+def shell(cmd: str, *, capture_output: bool = False, **kwargs: Any) -> str | None:
+ """Run a shell command."""
+ if capture_output:
+ return subprocess.check_output(cmd, shell=True, text=True, **kwargs) # noqa: S602
+ subprocess.run(cmd, shell=True, check=True, stderr=subprocess.STDOUT, **kwargs) # noqa: S602
+ return None
+
+
+@contextmanager
+def environ(**kwargs: str) -> Iterator[None]:
+ """Temporarily set environment variables."""
+ original = dict(os.environ)
+ os.environ.update(kwargs)
+ try:
+ yield
+ finally:
+ os.environ.clear()
+ os.environ.update(original)
+
+
+def uv_install(venv: Path) -> None:
+ """Install dependencies using uv."""
+ with environ(UV_PROJECT_ENVIRONMENT=str(venv), PYO3_USE_ABI3_FORWARD_COMPATIBILITY="1"):
+ if "CI" in os.environ:
+ shell("uv sync --no-editable")
+ else:
+ shell("uv sync")
+
+
+def setup() -> None:
+ """Setup the project."""
+ if not shutil.which("uv"):
+ raise ValueError("make: setup: uv must be installed, see https://github.com/astral-sh/uv")
+
+ print("Installing dependencies (default environment)")
+ default_venv = Path(".venv")
+ if not default_venv.exists():
+ shell("uv venv")
+ uv_install(default_venv)
+
+ if PYTHON_VERSIONS:
+ for version in PYTHON_VERSIONS:
+ print(f"\nInstalling dependencies (python{version})")
+ venv_path = Path(f".venvs/{version}")
+ if not venv_path.exists():
+ shell(f"uv venv --python {version} {venv_path}")
+ with environ(UV_PROJECT_ENVIRONMENT=str(venv_path.resolve())):
+ uv_install(venv_path)
+
+
+def run(version: str, cmd: str, *args: str, **kwargs: Any) -> None:
+ """Run a command in a virtual environment."""
+ kwargs = {"check": True, **kwargs}
+ uv_run = ["uv", "run", "--no-sync"]
+ if version == "default":
+ with environ(UV_PROJECT_ENVIRONMENT=".venv"):
+ subprocess.run([*uv_run, cmd, *args], **kwargs) # noqa: S603, PLW1510
+ else:
+ with environ(UV_PROJECT_ENVIRONMENT=f".venvs/{version}", MULTIRUN="1"):
+ subprocess.run([*uv_run, cmd, *args], **kwargs) # noqa: S603, PLW1510
+
+
+def multirun(cmd: str, *args: str, **kwargs: Any) -> None:
+ """Run a command for all configured Python versions."""
+ if PYTHON_VERSIONS:
+ for version in PYTHON_VERSIONS:
+ run(version, cmd, *args, **kwargs)
+ else:
+ run("default", cmd, *args, **kwargs)
+
+
+def allrun(cmd: str, *args: str, **kwargs: Any) -> None:
+ """Run a command in all virtual environments."""
+ run("default", cmd, *args, **kwargs)
+ if PYTHON_VERSIONS:
+ multirun(cmd, *args, **kwargs)
+
+
+def clean() -> None:
+ """Delete build artifacts and cache files."""
+ paths_to_clean = ["build", "dist", "htmlcov", "site", ".coverage*", ".pdm-build"]
+ for path in paths_to_clean:
+ shutil.rmtree(path, ignore_errors=True)
+
+ cache_dirs = {".cache", ".pytest_cache", ".mypy_cache", ".ruff_cache", "__pycache__"}
+ for dirpath in Path(".").rglob("*/"):
+ if dirpath.parts[0] not in (".venv", ".venvs") and dirpath.name in cache_dirs:
+ shutil.rmtree(dirpath, ignore_errors=True)
+
+
+def vscode() -> None:
+ """Configure VSCode to work on this project."""
+ shutil.copytree("config/vscode", ".vscode", dirs_exist_ok=True)
+
+
+def main() -> int:
+ """Main entry point."""
+ args = list(sys.argv[1:])
+ if not args or args[0] == "help":
+ if len(args) > 1:
+ run("default", "duty", "--help", args[1])
+ else:
+ print(
+ dedent(
+ """
+ Available commands
+ help Print this help. Add task name to print help.
+ setup Setup all virtual environments (install dependencies).
+ run Run a command in the default virtual environment.
+ multirun Run a command for all configured Python versions.
+ allrun Run a command in all virtual environments.
+ 3.x Run a command in the virtual environment for Python 3.x.
+ clean Delete build artifacts and cache files.
+ vscode Configure VSCode to work on this project.
+ """,
+ ),
+ flush=True,
+ )
+ if os.path.exists(".venv"):
+ print("\nAvailable tasks", flush=True)
+ run("default", "duty", "--list")
+ return 0
+
+ while args:
+ cmd = args.pop(0)
+
+ if cmd == "run":
+ run("default", *args)
+ return 0
+
+ if cmd == "multirun":
+ multirun(*args)
+ return 0
+
+ if cmd == "allrun":
+ allrun(*args)
+ return 0
+
+ if cmd.startswith("3."):
+ run(cmd, *args)
+ return 0
+
+ opts = []
+ while args and (args[0].startswith("-") or "=" in args[0]):
+ opts.append(args.pop(0))
+
+ if cmd == "clean":
+ clean()
+ elif cmd == "setup":
+ setup()
+ elif cmd == "vscode":
+ vscode()
+ elif cmd == "check":
+ multirun("duty", "check-quality", "check-types", "check-docs")
+ run("default", "duty", "check-api")
+ elif cmd in {"check-quality", "check-docs", "check-types", "test"}:
+ multirun("duty", cmd, *opts)
+ else:
+ run("default", "duty", cmd, *opts)
+
+ return 0
+
+
+if __name__ == "__main__":
+ try:
+ sys.exit(main())
+ except subprocess.CalledProcessError as process:
+ if process.output:
+ print(process.output, file=sys.stderr)
+ sys.exit(process.returncode)
diff --git a/scripts/multirun.sh b/scripts/multirun.sh
deleted file mode 100755
index a55d1746..00000000
--- a/scripts/multirun.sh
+++ /dev/null
@@ -1,26 +0,0 @@
-#!/usr/bin/env bash
-set -e
-
-PYTHON_VERSIONS="${PYTHON_VERSIONS-3.7 3.8 3.9 3.10 3.11}"
-
-restore_previous_python_version() {
- if pdm use -f "$1" &>/dev/null; then
- echo "> Restored previous Python version: ${1##*/}"
- fi
-}
-
-if [ -n "${PYTHON_VERSIONS}" ]; then
- old_python_version="$(pdm config python.path)"
- echo "> Currently selected Python version: ${old_python_version##*/}"
- trap "restore_previous_python_version ${old_python_version}" EXIT
- for python_version in ${PYTHON_VERSIONS}; do
- if pdm use -f "python${python_version}" &>/dev/null; then
- echo "> pdm run $@ (python${python_version})"
- pdm run "$@"
- else
- echo "> pdm use -f python${python_version}: Python interpreter not available?" >&2
- fi
- done
-else
- pdm run "$@"
-fi
diff --git a/scripts/setup.sh b/scripts/setup.sh
deleted file mode 100755
index 188eaebc..00000000
--- a/scripts/setup.sh
+++ /dev/null
@@ -1,38 +0,0 @@
-#!/usr/bin/env bash
-set -e
-
-PYTHON_VERSIONS="${PYTHON_VERSIONS-3.7 3.8 3.9 3.10 3.11}"
-
-install_with_pipx() {
- if ! command -v "$1" &>/dev/null; then
- if ! command -v pipx &>/dev/null; then
- python3 -m pip install --user pipx
- fi
- pipx install "$1"
- fi
-}
-
-install_with_pipx pdm
-
-restore_previous_python_version() {
- if pdm use -f "$1" &>/dev/null; then
- echo "> Restored previous Python version: ${1##*/}"
- fi
-}
-
-if [ -n "${PYTHON_VERSIONS}" ]; then
- if old_python_version="$(pdm config python.path 2>/dev/null)"; then
- echo "> Currently selected Python version: ${old_python_version##*/}"
- trap "restore_previous_python_version ${old_python_version}" EXIT
- fi
- for python_version in ${PYTHON_VERSIONS}; do
- if pdm use -f "python${python_version}" &>/dev/null; then
- echo "> Using Python ${python_version} interpreter"
- pdm install
- else
- echo "> pdm use -f python${python_version}: Python interpreter not available?" >&2
- fi
- done
-else
- pdm install
-fi
diff --git a/src/mkdocstrings/__init__.py b/src/mkdocstrings/__init__.py
new file mode 100644
index 00000000..03550f9b
--- /dev/null
+++ b/src/mkdocstrings/__init__.py
@@ -0,0 +1,4 @@
+"""mkdocstrings package.
+
+Automatic documentation from sources, for MkDocs.
+"""
diff --git a/src/mkdocstrings/_download.py b/src/mkdocstrings/_download.py
new file mode 100644
index 00000000..b9af327d
--- /dev/null
+++ b/src/mkdocstrings/_download.py
@@ -0,0 +1,77 @@
+import base64
+import gzip
+import os
+import re
+import urllib.parse
+import urllib.request
+from collections.abc import Mapping
+from typing import BinaryIO, Optional
+
+from mkdocstrings.loggers import get_logger
+
+log = get_logger(__name__)
+
+# Regex pattern for an environment variable in the form ${ENV_VAR}.
+ENV_VAR_PATTERN = re.compile(r"\$\{([A-Za-z_][A-Za-z0-9_]*)\}")
+
+
+def download_url_with_gz(url: str) -> bytes:
+ url, auth_header = _extract_auth_from_url(url)
+
+ req = urllib.request.Request( # noqa: S310
+ url,
+ headers={"Accept-Encoding": "gzip", "User-Agent": "mkdocstrings/0.15.0", **auth_header},
+ )
+ with urllib.request.urlopen(req) as resp: # noqa: S310
+ content: BinaryIO = resp
+ if "gzip" in resp.headers.get("content-encoding", ""):
+ content = gzip.GzipFile(fileobj=resp) # type: ignore[assignment]
+ return content.read()
+
+
+def _expand_env_vars(credential: str, url: str, env: Optional[Mapping[str, str]] = None) -> str:
+ """A safe implementation of environment variable substitution.
+
+ It only supports the following forms: `${ENV_VAR}`.
+ Neither `$ENV_VAR` or `%ENV_VAR` are supported.
+ """
+ if env is None:
+ env = os.environ
+
+ def replace_func(match: re.Match) -> str:
+ try:
+ return env[match.group(1)]
+ except KeyError:
+ log.warning("Environment variable '%s' is not set, but is used in inventory URL %s", match.group(1), url)
+ return match.group(0)
+
+ return re.sub(ENV_VAR_PATTERN, replace_func, credential)
+
+
+# Implementation adapted from PDM: https://github.com/pdm-project/pdm.
+def _extract_auth_from_url(url: str) -> tuple[str, dict[str, str]]:
+ """Extract credentials from the URL if present, and return the URL and the appropriate auth header for the credentials."""
+ if "@" not in url:
+ return url, {}
+
+ scheme, netloc, *rest = urllib.parse.urlparse(url)
+ auth, host = netloc.split("@", 1)
+ auth = _expand_env_vars(credential=auth, url=url)
+ auth_header = _create_auth_header(credential=auth, url=url)
+
+ url = urllib.parse.urlunparse((scheme, host, *rest))
+ return url, auth_header
+
+
+def _create_auth_header(credential: str, url: str) -> dict[str, str]:
+ """Create the Authorization header for basic or bearer authentication, depending on credential."""
+ if ":" not in credential:
+ # We assume that the user is using a token.
+ log.debug("Using bearer token authentication for %s", url)
+ return {"Authorization": f"Bearer {credential}"}
+
+ # Else, we assume that the user is using user:password.
+ user, pwd = credential.split(":", 1)
+ log.debug("Using basic authentication for %s", url)
+ credentials = base64.encodebytes(f"{user}:{pwd}".encode()).decode().strip()
+ return {"Authorization": f"Basic {credentials}"}
diff --git a/src/mkdocstrings/debug.py b/src/mkdocstrings/debug.py
new file mode 100644
index 00000000..b5da78f2
--- /dev/null
+++ b/src/mkdocstrings/debug.py
@@ -0,0 +1,109 @@
+"""Debugging utilities."""
+
+from __future__ import annotations
+
+import os
+import platform
+import sys
+from dataclasses import dataclass
+from importlib import metadata
+
+
+@dataclass
+class Variable:
+ """Dataclass describing an environment variable."""
+
+ name: str
+ """Variable name."""
+ value: str
+ """Variable value."""
+
+
+@dataclass
+class Package:
+ """Dataclass describing a Python package."""
+
+ name: str
+ """Package name."""
+ version: str
+ """Package version."""
+
+
+@dataclass
+class Environment:
+ """Dataclass to store environment information."""
+
+ interpreter_name: str
+ """Python interpreter name."""
+ interpreter_version: str
+ """Python interpreter version."""
+ interpreter_path: str
+ """Path to Python executable."""
+ platform: str
+ """Operating System."""
+ packages: list[Package]
+ """Installed packages."""
+ variables: list[Variable]
+ """Environment variables."""
+
+
+def _interpreter_name_version() -> tuple[str, str]:
+ if hasattr(sys, "implementation"):
+ impl = sys.implementation.version
+ version = f"{impl.major}.{impl.minor}.{impl.micro}"
+ kind = impl.releaselevel
+ if kind != "final":
+ version += kind[0] + str(impl.serial)
+ return sys.implementation.name, version
+ return "", "0.0.0"
+
+
+def get_version(dist: str = "mkdocstrings") -> str:
+ """Get version of the given distribution.
+
+ Parameters:
+ dist: A distribution name.
+
+ Returns:
+ A version number.
+ """
+ try:
+ return metadata.version(dist)
+ except metadata.PackageNotFoundError:
+ return "0.0.0"
+
+
+def get_debug_info() -> Environment:
+ """Get debug/environment information.
+
+ Returns:
+ Environment information.
+ """
+ py_name, py_version = _interpreter_name_version()
+ packages = ["mkdocstrings"]
+ variables = ["PYTHONPATH", *[var for var in os.environ if var.startswith("MKDOCSTRINGS")]]
+ return Environment(
+ interpreter_name=py_name,
+ interpreter_version=py_version,
+ interpreter_path=sys.executable,
+ platform=platform.platform(),
+ variables=[Variable(var, val) for var in variables if (val := os.getenv(var))],
+ packages=[Package(pkg, get_version(pkg)) for pkg in packages],
+ )
+
+
+def print_debug_info() -> None:
+ """Print debug/environment information."""
+ info = get_debug_info()
+ print(f"- __System__: {info.platform}")
+ print(f"- __Python__: {info.interpreter_name} {info.interpreter_version} ({info.interpreter_path})")
+ print("- __Environment variables__:")
+ for var in info.variables:
+ print(f" - `{var.name}`: `{var.value}`")
+ print("- __Installed packages__:")
+ for pkg in info.packages:
+ print(f" - `{pkg.name}` v{pkg.version}")
+
+
+if __name__ == "__main__":
+ print_debug_info()
diff --git a/src/mkdocstrings/extension.py b/src/mkdocstrings/extension.py
index be0c48bb..ea38b83f 100644
--- a/src/mkdocstrings/extension.py
+++ b/src/mkdocstrings/extension.py
@@ -1,7 +1,7 @@
"""This module holds the code of the Markdown extension responsible for matching "autodoc" instructions.
The extension is composed of a Markdown [block processor](https://python-markdown.github.io/extensions/api/#blockparser)
-that matches indented blocks starting with a line like '::: identifier'.
+that matches indented blocks starting with a line like `::: identifier`.
For each of these blocks, it uses a [handler][mkdocstrings.handlers.base.BaseHandler] to collect documentation about
the given identifier and render it with Jinja templates.
@@ -12,37 +12,37 @@
```yaml
::: some.identifier
handler: python
- selection:
+ options:
option1: value1
option2:
- - value2a
- - value2b
- rendering:
+ - value2a
+ - value2b
option_x: etc
```
"""
+
+from __future__ import annotations
+
import re
-from collections import ChainMap
-from typing import Any, MutableSequence, Tuple
+from typing import TYPE_CHECKING, Any
from warnings import warn
from xml.etree.ElementTree import Element
import yaml
from jinja2.exceptions import TemplateNotFound
-from markdown import Markdown
-from markdown.blockparser import BlockParser
from markdown.blockprocessors import BlockProcessor
from markdown.extensions import Extension
from markdown.treeprocessors import Treeprocessor
-from mkdocs_autorefs.plugin import AutorefsPlugin
+from mkdocs.exceptions import PluginError
from mkdocstrings.handlers.base import BaseHandler, CollectionError, CollectorItem, Handlers
from mkdocstrings.loggers import get_logger
-try:
- from mkdocs.exceptions import PluginError # New in MkDocs 1.2
-except ImportError:
- PluginError = SystemExit # noqa: WPS440
+if TYPE_CHECKING:
+ from collections.abc import MutableSequence
+
+ from markdown import Markdown
+ from mkdocs_autorefs import AutorefsPlugin
log = get_logger(__name__)
@@ -61,26 +61,26 @@ class AutoDocProcessor(BlockProcessor):
regex = re.compile(r"^(?P{result.text}')
+ # From the maintainer of codehilite, the codehilite CSS class, as defined by the user,
+ # should never be added to inline code, because codehilite does not support inline code.
+ # See https://github.com/Python-Markdown/markdown/issues/1220#issuecomment-1692160297.
+ css_class = "" if self._highlighter == "codehilite" else kwargs["css_class"]
+ return Markup(f'{result.text}')
return Markup(result)
@@ -133,20 +148,36 @@ def __init__(self, md: Markdown, id_prefix: str):
super().__init__(md)
self.id_prefix = id_prefix
- def run(self, root: Element): # noqa: D102 (ignore missing docstring)
- if not self.id_prefix:
- return
- for el in root.iter():
- id_attr = el.get("id")
- if id_attr:
- el.set("id", self.id_prefix + id_attr)
+ def run(self, root: Element) -> None: # noqa: D102 (ignore missing docstring)
+ if self.id_prefix:
+ self._prefix_ids(root)
+ def _prefix_ids(self, root: Element) -> None:
+ index = len(root)
+ for el in reversed(root): # Reversed mainly for the ability to mutate during iteration.
+ index -= 1
+
+ self._prefix_ids(el)
href_attr = el.get("href")
+
+ if id_attr := el.get("id"):
+ if el.tag == "a" and not href_attr:
+ # An anchor with id and no href is used by autorefs:
+ # leave it untouched and insert a copy with updated id after it.
+ new_el = copy.deepcopy(el)
+ new_el.set("id", self.id_prefix + id_attr)
+ root.insert(index + 1, new_el)
+ else:
+ # Anchors with id and href are not used by autorefs:
+ # update in place.
+ el.set("id", self.id_prefix + id_attr)
+
+ # Always update hrefs, names and labels-for:
+ # there will always be a corresponding id.
if href_attr and href_attr.startswith("#"):
el.set("href", "#" + self.id_prefix + href_attr[1:])
- name_attr = el.get("name")
- if name_attr:
+ if name_attr := el.get("name"):
el.set("name", self.id_prefix + name_attr)
if el.tag == "label":
@@ -174,7 +205,7 @@ def __init__(self, md: Markdown, shift_by: int):
super().__init__(md)
self.shift_by = shift_by
- def run(self, root: Element): # noqa: D102 (ignore missing docstring)
+ def run(self, root: Element) -> None: # noqa: D102 (ignore missing docstring)
if not self.shift_by:
return
for el in root.iter():
@@ -191,20 +222,21 @@ class _HeadingReportingTreeprocessor(Treeprocessor):
name = "mkdocstrings_headings_list"
regex = re.compile(r"[Hh][1-6]")
- headings: List[Element]
+ headings: list[Element]
"""The list (the one passed in the initializer) that is used to record the heading elements (by appending to it)."""
- def __init__(self, md: Markdown, headings: List[Element]):
+ def __init__(self, md: Markdown, headings: list[Element]):
super().__init__(md)
self.headings = headings
- def run(self, root: Element):
+ def run(self, root: Element) -> None:
+ permalink_class = self.md.treeprocessors["toc"].permalink_class # type: ignore[attr-defined]
for el in root.iter():
if self.regex.fullmatch(el.tag):
- el = copy.copy(el)
+ el = copy.copy(el) # noqa: PLW2901
# 'toc' extension's first pass (which we require to build heading stubs/ids) also edits the HTML.
# Undo the permalink edit so we can pass this heading to the outer pass of the 'toc' extension.
- if len(el) > 0 and el[-1].get("class") == self.md.treeprocessors["toc"].permalink_class: # noqa: WPS507
+ if len(el) > 0 and el[-1].get("class") == permalink_class:
del el[-1]
self.headings.append(el)
@@ -215,17 +247,18 @@ class ParagraphStrippingTreeprocessor(Treeprocessor):
name = "mkdocstrings_strip_paragraph"
strip = False
- def run(self, root: Element): # noqa: D102 (ignore missing docstring)
+ def run(self, root: Element) -> Element | None: # noqa: D102 (ignore missing docstring)
if self.strip and len(root) == 1 and root[0].tag == "p":
# Turn the single element into the root element and inherit its tag name (it's significant!)
root[0].tag = root.tag
return root[0]
+ return None
class MkdocstringsInnerExtension(Extension):
"""Extension that should always be added to Markdown sub-documents that handlers request (and *only* them)."""
- def __init__(self, headings: List[Element]):
+ def __init__(self, headings: list[Element]):
"""Initialize the object.
Arguments:
diff --git a/src/mkdocstrings/inventory.py b/src/mkdocstrings/inventory.py
index 6c1b8558..fb2d0018 100644
--- a/src/mkdocstrings/inventory.py
+++ b/src/mkdocstrings/inventory.py
@@ -3,17 +3,28 @@
# Credits to Brian Skinn and the sphobjinv project:
# https://github.com/bskinn/sphobjinv
+from __future__ import annotations
+
import re
import zlib
from textwrap import dedent
-from typing import BinaryIO, Collection, List, Optional
+from typing import TYPE_CHECKING, BinaryIO
+
+if TYPE_CHECKING:
+ from collections.abc import Collection
class InventoryItem:
"""Inventory item."""
def __init__(
- self, name: str, domain: str, role: str, uri: str, priority: str = "1", dispname: Optional[str] = None
+ self,
+ name: str,
+ domain: str,
+ role: str,
+ uri: str,
+ priority: int = 1,
+ dispname: str | None = None,
):
"""Initialize the object.
@@ -22,14 +33,14 @@ def __init__(
domain: The item domain, like 'python' or 'crystal'.
role: The item role, like 'class' or 'method'.
uri: The item URI.
- priority: The item priority. It can help for inventory suggestions.
+ priority: The item priority. Only used internally by mkdocstrings and Sphinx.
dispname: The item display name.
"""
self.name: str = name
self.domain: str = domain
self.role: str = role
self.uri: str = uri
- self.priority: str = priority
+ self.priority: int = priority
self.dispname: str = dispname or name
def format_sphinx(self) -> str:
@@ -46,10 +57,10 @@ def format_sphinx(self) -> str:
uri = uri[: -len(self.name)] + "$"
return f"{self.name} {self.domain}:{self.role} {self.priority} {uri} {dispname}"
- sphinx_item_regex = re.compile(r"^(.+?)\s+(\S+):(\S+)\s+(-?\d+)\s+(\S+)\s+(.*)$")
+ sphinx_item_regex = re.compile(r"^(.+?)\s+(\S+):(\S+)\s+(-?\d+)\s+(\S+)\s*(.*)$")
@classmethod
- def parse_sphinx(cls, line: str) -> "InventoryItem":
+ def parse_sphinx(cls, line: str) -> InventoryItem:
"""Parse a line from a Sphinx v2 inventory file and return an `InventoryItem` from it."""
match = cls.sphinx_item_regex.search(line)
if not match:
@@ -59,13 +70,13 @@ def parse_sphinx(cls, line: str) -> "InventoryItem":
uri = uri[:-1] + name
if dispname == "-":
dispname = name
- return cls(name, domain, role, uri, priority, dispname)
+ return cls(name, domain, role, uri, int(priority), dispname)
class Inventory(dict):
"""Inventory of collected and rendered objects."""
- def __init__(self, items: Optional[List[InventoryItem]] = None, project: str = "project", version: str = "0.0.0"):
+ def __init__(self, items: list[InventoryItem] | None = None, project: str = "project", version: str = "0.0.0"):
"""Initialize the object.
Arguments:
@@ -80,15 +91,33 @@ def __init__(self, items: Optional[List[InventoryItem]] = None, project: str = "
self.project = project
self.version = version
- def register(self, *args: str, **kwargs: str):
+ def register(
+ self,
+ name: str,
+ domain: str,
+ role: str,
+ uri: str,
+ priority: int = 1,
+ dispname: str | None = None,
+ ) -> None:
"""Create and register an item.
Arguments:
- *args: Arguments passed to [InventoryItem][mkdocstrings.inventory.InventoryItem].
- **kwargs: Keyword arguments passed to [InventoryItem][mkdocstrings.inventory.InventoryItem].
+ name: The item name.
+ domain: The item domain, like 'python' or 'crystal'.
+ role: The item role, like 'class' or 'method'.
+ uri: The item URI.
+ priority: The item priority. Only used internally by mkdocstrings and Sphinx.
+ dispname: The item display name.
"""
- item = InventoryItem(*args, **kwargs)
- self[item.name] = item
+ self[name] = InventoryItem(
+ name=name,
+ domain=domain,
+ role=role,
+ uri=uri,
+ priority=priority,
+ dispname=dispname,
+ )
def format_sphinx(self) -> bytes:
"""Format this inventory as a Sphinx `objects.inv` file.
@@ -103,17 +132,20 @@ def format_sphinx(self) -> bytes:
# Project: {self.project}
# Version: {self.version}
# The remainder of this file is compressed using zlib.
- """
+ """,
)
.lstrip()
.encode("utf8")
)
- lines = [item.format_sphinx().encode("utf8") for item in self.values()]
+ lines = [
+ item.format_sphinx().encode("utf8")
+ for item in sorted(self.values(), key=lambda item: (item.domain, item.name))
+ ]
return header + zlib.compress(b"\n".join(lines) + b"\n", 9)
@classmethod
- def parse_sphinx(cls, in_file: BinaryIO, *, domain_filter: Collection[str] = ()) -> "Inventory":
+ def parse_sphinx(cls, in_file: BinaryIO, *, domain_filter: Collection[str] = ()) -> Inventory:
"""Parse a Sphinx v2 inventory file and return an `Inventory` from it.
Arguments:
@@ -121,7 +153,7 @@ def parse_sphinx(cls, in_file: BinaryIO, *, domain_filter: Collection[str] = ())
domain_filter: A collection of domain values to allow (and filter out all other ones).
Returns:
- An `Inventory` containing the collected `InventoryItem`s.
+ An inventory containing the collected items.
"""
for _ in range(4):
in_file.readline()
diff --git a/src/mkdocstrings/loggers.py b/src/mkdocstrings/loggers.py
index d2722616..89f3d7f8 100644
--- a/src/mkdocstrings/loggers.py
+++ b/src/mkdocstrings/loggers.py
@@ -1,28 +1,51 @@
"""Logging functions."""
+from __future__ import annotations
+
import logging
from contextlib import suppress
from pathlib import Path
-from typing import Any, Callable, MutableMapping, Optional, Sequence, Tuple
-
-from jinja2.runtime import Context
-from mkdocs.utils import warning_filter
+from typing import TYPE_CHECKING, Any, Callable
try:
from jinja2 import pass_context
except ImportError: # TODO: remove once Jinja2 < 3.1 is dropped
- from jinja2 import contextfunction as pass_context # noqa: WPS440
+ from jinja2 import contextfunction as pass_context # type: ignore[attr-defined,no-redef]
try:
import mkdocstrings_handlers
except ImportError:
TEMPLATES_DIRS: Sequence[Path] = ()
else:
- TEMPLATES_DIRS = tuple(mkdocstrings_handlers.__path__) # noqa: WPS609
+ TEMPLATES_DIRS = tuple(mkdocstrings_handlers.__path__)
+
+
+if TYPE_CHECKING:
+ from collections.abc import MutableMapping, Sequence
+
+ from jinja2.runtime import Context
class LoggerAdapter(logging.LoggerAdapter):
- """A logger adapter to prefix messages."""
+ """A logger adapter to prefix messages.
+
+ This adapter also adds an additional parameter to logging methods
+ called `once`: if `True`, the message will only be logged once.
+
+ Examples:
+ In Python code:
+
+ >>> logger = get_logger("myplugin")
+ >>> logger.debug("This is a debug message.")
+ >>> logger.info("This is an info message.", once=True)
+
+ In Jinja templates (logger available in context as `log`):
+
+ ```jinja
+ {{ log.debug("This is a debug message.") }}
+ {{ log.info("This is an info message.", once=True) }}
+ ```
+ """
def __init__(self, prefix: str, logger: logging.Logger):
"""Initialize the object.
@@ -33,8 +56,9 @@ def __init__(self, prefix: str, logger: logging.Logger):
"""
super().__init__(logger, {})
self.prefix = prefix
+ self._logged: set[tuple[LoggerAdapter, str]] = set()
- def process(self, msg: str, kwargs: MutableMapping[str, Any]) -> Tuple[str, Any]:
+ def process(self, msg: str, kwargs: MutableMapping[str, Any]) -> tuple[str, Any]:
"""Process the message.
Arguments:
@@ -46,11 +70,32 @@ def process(self, msg: str, kwargs: MutableMapping[str, Any]) -> Tuple[str, Any]
"""
return f"{self.prefix}: {msg}", kwargs
+ def log(self, level: int, msg: object, *args: object, **kwargs: object) -> None:
+ """Log a message.
+
+ Arguments:
+ level: The logging level.
+ msg: The message.
+ *args: Additional arguments passed to parent method.
+ **kwargs: Additional keyword arguments passed to parent method.
+ """
+ if kwargs.pop("once", False):
+ if (key := (self, str(msg))) in self._logged:
+ return
+ self._logged.add(key)
+ super().log(level, msg, *args, **kwargs) # type: ignore[arg-type]
+
class TemplateLogger:
"""A wrapper class to allow logging in templates.
- Attributes:
+ The logging methods provided by this class all accept
+ two parameters:
+
+ - `msg`: The message to log.
+ - `once`: If `True`, the message will only be logged once.
+
+ Methods:
debug: Function to log a DEBUG message.
info: Function to log an INFO message.
warning: Function to log a WARNING message.
@@ -71,6 +116,27 @@ def __init__(self, logger: LoggerAdapter):
self.critical = get_template_logger_function(logger.critical)
+class _Lazy:
+ unset = object()
+
+ def __init__(self, func: Callable, *args: Any, **kwargs: Any):
+ self.func = func
+ self.args = args
+ self.kwargs = kwargs
+ self.result = self.unset
+
+ def __call__(self):
+ if self.result is self.unset:
+ self.result = self.func(*self.args, **self.kwargs)
+ return self.result
+
+ def __str__(self) -> str:
+ return str(self())
+
+ def __repr__(self) -> str:
+ return repr(self())
+
+
def get_template_logger_function(logger_func: Callable) -> Callable:
"""Create a wrapper function that automatically receives the Jinja template context.
@@ -82,18 +148,18 @@ def get_template_logger_function(logger_func: Callable) -> Callable:
"""
@pass_context
- def wrapper(context: Context, msg: Optional[str] = None) -> str:
+ def wrapper(context: Context, msg: str | None = None, *args: Any, **kwargs: Any) -> str:
"""Log a message.
Arguments:
context: The template context, automatically provided by Jinja.
msg: The message to log.
+ **kwargs: Additional arguments passed to the logger function.
Returns:
An empty string.
"""
- template_path = get_template_path(context)
- logger_func(f"{template_path}: {msg or 'Rendering'}")
+ logger_func(f"%s: {msg or 'Rendering'}", _Lazy(get_template_path, context), *args, **kwargs)
return ""
return wrapper
@@ -130,14 +196,17 @@ def get_logger(name: str) -> LoggerAdapter:
A logger configured to work well in MkDocs.
"""
logger = logging.getLogger(f"mkdocs.plugins.{name}")
- logger.addFilter(warning_filter)
return LoggerAdapter(name.split(".", 1)[0], logger)
-def get_template_logger() -> TemplateLogger:
+def get_template_logger(handler_name: str | None = None) -> TemplateLogger:
"""Return a logger usable in templates.
+ Parameters:
+ handler_name: The name of the handler.
+
Returns:
A template logger.
"""
- return TemplateLogger(get_logger("mkdocstrings.templates"))
+ handler_name = handler_name or "base"
+ return TemplateLogger(get_logger(f"mkdocstrings_handlers.{handler_name}.templates"))
diff --git a/src/mkdocstrings/plugin.py b/src/mkdocstrings/plugin.py
index 34edcc06..9cda9696 100644
--- a/src/mkdocstrings/plugin.py
+++ b/src/mkdocstrings/plugin.py
@@ -12,69 +12,64 @@
during the [`on_serve` event hook](https://www.mkdocs.org/user-guide/plugins/#on_serve).
"""
-import collections
-import functools
-import gzip
+from __future__ import annotations
+
import os
-from concurrent import futures
-from typing import Any, BinaryIO, Callable, Iterable, List, Mapping, Optional, Tuple
-from urllib import request
-from warnings import warn
+import sys
+from collections.abc import Iterable, Mapping
+from typing import TYPE_CHECKING, Any, Callable, TypeVar
+from warnings import catch_warnings, simplefilter
from mkdocs.config import Config
-from mkdocs.config.config_options import Type as MkType
-from mkdocs.livereload import LiveReloadServer
+from mkdocs.config import config_options as opt
from mkdocs.plugins import BasePlugin
from mkdocs.utils import write_file
-from mkdocs_autorefs.plugin import AutorefsPlugin
+from mkdocs_autorefs import AutorefsConfig, AutorefsPlugin
from mkdocstrings.extension import MkdocstringsExtension
from mkdocstrings.handlers.base import BaseHandler, Handlers
from mkdocstrings.loggers import get_logger
+if sys.version_info < (3, 10):
+ from typing_extensions import ParamSpec
+else:
+ from typing import ParamSpec
+
+if TYPE_CHECKING:
+ from jinja2.environment import Environment
+ from mkdocs.config.defaults import MkDocsConfig
+
+
log = get_logger(__name__)
-SELECTION_OPTS_KEY: str = "selection"
-"""The name of the selection parameter in YAML configuration blocks."""
-RENDERING_OPTS_KEY: str = "rendering"
-"""The name of the rendering parameter in YAML configuration blocks."""
+InventoryImportType = list[tuple[str, Mapping[str, Any]]]
+InventoryLoaderType = Callable[..., Iterable[tuple[str, str]]]
-InventoryImportType = List[Tuple[str, Mapping[str, Any]]]
-InventoryLoaderType = Callable[..., Iterable[Tuple[str, str]]]
+P = ParamSpec("P")
+R = TypeVar("R")
-class MkdocstringsPlugin(BasePlugin):
- """An `mkdocs` plugin.
+def list_to_tuple(function: Callable[P, R]) -> Callable[P, R]:
+ """Decorater to convert lists to tuples in the arguments."""
- This plugin defines the following event hooks:
+ def wrapper(*args: P.args, **kwargs: P.kwargs) -> R:
+ safe_args = [tuple(item) if isinstance(item, list) else item for item in args]
+ if kwargs:
+ kwargs = {key: tuple(value) if isinstance(value, list) else value for key, value in kwargs.items()} # type: ignore[assignment]
+ return function(*safe_args, **kwargs) # type: ignore[arg-type]
- - `on_config`
- - `on_env`
- - `on_post_build`
- - `on_serve`
+ return wrapper
- Check the [Developing Plugins](https://www.mkdocs.org/user-guide/plugins/#developing-plugins) page of `mkdocs`
- for more information about its plugin system.
- """
- config_scheme: Tuple[Tuple[str, MkType]] = (
- ("watch", MkType(list, default=[])), # type: ignore
- ("handlers", MkType(dict, default={})),
- ("default_handler", MkType(str, default="python")),
- ("custom_templates", MkType(str, default=None)),
- ("enable_inventory", MkType(bool, default=None)),
- )
- """
- The configuration options of `mkdocstrings`, written in `mkdocs.yml`.
+class PluginConfig(Config):
+ """The configuration options of `mkdocstrings`, written in `mkdocs.yml`."""
- Available options are:
+ handlers = opt.Type(dict, default={})
+ """
+ Global configuration of handlers.
- - **`watch` (deprecated)**: A list of directories to watch. Only used when serving the documentation with mkdocs.
- Whenever a file changes in one of directories, the whole documentation is built again, and the browser refreshed.
- Deprecated in favor of the now built-in `watch` feature of MkDocs.
- - **`default_handler`**: The default handler to use. The value is the name of the handler module. Default is "python".
- - **`handlers`**: Global configuration of handlers. You can set global configuration per handler, applied everywhere,
- but overridable in each "autodoc" instruction. Example:
+ You can set global configuration per handler, applied everywhere,
+ but overridable in each "autodoc" instruction. Example:
```yaml
plugins:
@@ -82,20 +77,46 @@ class MkdocstringsPlugin(BasePlugin):
handlers:
python:
options:
- selection_opt: true
- rendering_opt: "value"
+ option1: true
+ option2: "value"
rust:
options:
- selection_opt: 2
+ option9: 2
```
"""
+ default_handler = opt.Type(str, default="python")
+ """The default handler to use. The value is the name of the handler module. Default is "python"."""
+ custom_templates = opt.Optional(opt.Dir(exists=True))
+ """Location of custom templates to use when rendering API objects.
+
+ Value should be the path of a directory relative to the MkDocs configuration file.
+ """
+ enable_inventory = opt.Optional(opt.Type(bool))
+ """Whether to enable object inventory creation."""
+ enabled = opt.Type(bool, default=True)
+ """Whether to enable the plugin. Default is true. If false, *mkdocstrings* will not collect or render anything."""
+
+
+class MkdocstringsPlugin(BasePlugin[PluginConfig]):
+ """An `mkdocs` plugin.
+
+ This plugin defines the following event hooks:
+
+ - `on_config`
+ - `on_env`
+ - `on_post_build`
+
+ Check the [Developing Plugins](https://www.mkdocs.org/user-guide/plugins/#developing-plugins) page of `mkdocs`
+ for more information about its plugin system.
+ """
+
css_filename = "assets/_mkdocstrings.css"
def __init__(self) -> None:
"""Initialize the object."""
super().__init__()
- self._handlers: Optional[Handlers] = None
+ self._handlers: Handlers | None = None
@property
def handlers(self) -> Handlers:
@@ -111,31 +132,7 @@ def handlers(self) -> Handlers:
raise RuntimeError("The plugin hasn't been initialized with a config yet")
return self._handlers
- # TODO: remove once watch feature is removed
- def on_serve(self, server: LiveReloadServer, builder: Callable, **kwargs: Any): # noqa: W0613 (unused arguments)
- """Watch directories.
-
- Hook for the [`on_serve` event](https://www.mkdocs.org/user-guide/plugins/#on_serve).
- In this hook, we add the directories specified in the plugin's configuration to the list of directories
- watched by `mkdocs`. Whenever a change occurs in one of these directories, the documentation is built again
- and the site reloaded.
-
- Arguments:
- server: The `livereload` server instance.
- builder: The function to build the site.
- **kwargs: Additional arguments passed by MkDocs.
- """
- if self.config["watch"]:
- warn(
- "mkdocstrings' watch feature is deprecated in favor of MkDocs' watch feature, "
- "see https://www.mkdocs.org/user-guide/configuration/#watch.",
- DeprecationWarning,
- )
- for element in self.config["watch"]:
- log.debug(f"Adding directory '{element}' to watcher")
- server.watch(element, builder)
-
- def on_config(self, config: Config, **kwargs: Any) -> Config: # noqa: W0613 (unused arguments)
+ def on_config(self, config: MkDocsConfig) -> MkDocsConfig | None:
"""Instantiate our Markdown extension.
Hook for the [`on_config` event](https://www.mkdocs.org/user-guide/plugins/#on_config).
@@ -147,64 +144,52 @@ def on_config(self, config: Config, **kwargs: Any) -> Config: # noqa: W0613 (un
Arguments:
config: The MkDocs config object.
- **kwargs: Additional arguments passed by MkDocs.
Returns:
The modified config.
"""
+ if not self.plugin_enabled:
+ log.debug("Plugin is not enabled. Skipping.")
+ return config
log.debug("Adding extension to the list")
- theme_name = None
- if config["theme"].name is None:
- theme_name = os.path.dirname(config["theme"].dirs[0])
- else:
- theme_name = config["theme"].name
-
- to_import: InventoryImportType = []
- for handler_name, conf in self.config["handlers"].items():
- for import_item in conf.pop("import", ()):
- if isinstance(import_item, str):
- import_item = {"url": import_item}
- to_import.append((handler_name, import_item))
-
- extension_config = {
- "site_name": config["site_name"],
- "config_file_path": config["config_file_path"],
- "theme_name": theme_name,
- "mdx": config["markdown_extensions"],
- "mdx_configs": config["mdx_configs"],
- "mkdocstrings": self.config,
- }
- self._handlers = Handlers(extension_config)
-
- try: # noqa: WPS229
+ handlers = Handlers(
+ default=self.config.default_handler,
+ handlers_config=self.config.handlers,
+ theme=config.theme.name or os.path.dirname(config.theme.dirs[0]),
+ custom_templates=self.config.custom_templates,
+ mdx=config.markdown_extensions,
+ mdx_config=config.mdx_configs,
+ inventory_project=config.site_name,
+ inventory_version="0.0.0", # TODO: Find a way to get actual version.
+ tool_config=config,
+ )
+
+ handlers._download_inventories()
+
+ autorefs: AutorefsPlugin
+ try:
# If autorefs plugin is explicitly enabled, just use it.
- autorefs = config["plugins"]["autorefs"]
- log.debug(f"Picked up existing autorefs instance {autorefs!r}")
+ autorefs = config.plugins["autorefs"] # type: ignore[assignment]
+ log.debug("Picked up existing autorefs instance %r", autorefs)
except KeyError:
# Otherwise, add a limited instance of it that acts only on what's added through `register_anchor`.
autorefs = AutorefsPlugin()
+ autorefs.config = AutorefsConfig()
autorefs.scan_toc = False
- config["plugins"]["autorefs"] = autorefs
- log.debug(f"Added a subdued autorefs instance {autorefs!r}")
- # Add collector-based fallback in either case.
- autorefs.get_fallback_anchor = self.handlers.get_anchors
-
- mkdocstrings_extension = MkdocstringsExtension(extension_config, self.handlers, autorefs)
- config["markdown_extensions"].append(mkdocstrings_extension)
-
- config["extra_css"].insert(0, self.css_filename) # So that it has lower priority than user files.
-
- self._inv_futures = []
- if to_import:
- inv_loader = futures.ThreadPoolExecutor(4)
- for handler_name, import_item in to_import: # noqa: WPS440
- future = inv_loader.submit(
- self._load_inventory, self.get_handler(handler_name).load_inventory, **import_item
- )
- self._inv_futures.append(future)
- inv_loader.shutdown(wait=False)
+ config.plugins["autorefs"] = autorefs
+ log.debug("Added a subdued autorefs instance %r", autorefs)
+ # YORE: Bump 1: Remove block.
+ with catch_warnings():
+ simplefilter("ignore", category=DeprecationWarning)
+ autorefs.get_fallback_anchor = handlers.get_anchors
+
+ mkdocstrings_extension = MkdocstringsExtension(handlers, autorefs)
+ config.markdown_extensions.append(mkdocstrings_extension) # type: ignore[arg-type]
+
+ config.extra_css.insert(0, self.css_filename) # So that it has lower priority than user files.
+ self._handlers = handlers
return config
@property
@@ -214,12 +199,21 @@ def inventory_enabled(self) -> bool:
Returns:
Whether the inventory is enabled.
"""
- inventory_enabled = self.config["enable_inventory"]
+ inventory_enabled = self.config.enable_inventory
if inventory_enabled is None:
inventory_enabled = any(handler.enable_inventory for handler in self.handlers.seen_handlers)
return inventory_enabled
- def on_env(self, env, config: Config, **kwargs):
+ @property
+ def plugin_enabled(self) -> bool:
+ """Tell if the plugin is enabled or not.
+
+ Returns:
+ Whether the plugin is enabled.
+ """
+ return self.config.enabled
+
+ def on_env(self, env: Environment, config: MkDocsConfig, *args: Any, **kwargs: Any) -> None: # noqa: ARG002
"""Extra actions that need to happen after all Markdown rendering and before HTML rendering.
Hook for the [`on_env` event](https://www.mkdocs.org/user-guide/plugins/#on_env).
@@ -227,25 +221,27 @@ def on_env(self, env, config: Config, **kwargs):
- Write mkdocstrings' extra files into the site dir.
- Gather results from background inventory download tasks.
"""
+ if not self.plugin_enabled:
+ return
+
if self._handlers:
css_content = "\n".join(handler.extra_css for handler in self.handlers.seen_handlers)
- write_file(css_content.encode("utf-8"), os.path.join(config["site_dir"], self.css_filename))
+ write_file(css_content.encode("utf-8"), os.path.join(config.site_dir, self.css_filename))
if self.inventory_enabled:
log.debug("Creating inventory file objects.inv")
inv_contents = self.handlers.inventory.format_sphinx()
- write_file(inv_contents, os.path.join(config["site_dir"], "objects.inv"))
+ write_file(inv_contents, os.path.join(config.site_dir, "objects.inv"))
- if self._inv_futures:
- log.debug(f"Waiting for {len(self._inv_futures)} inventory download(s)")
- futures.wait(self._inv_futures, timeout=30)
- for page, identifier in collections.ChainMap(*(fut.result() for fut in self._inv_futures)).items():
- config["plugins"]["autorefs"].register_url(page, identifier)
- self._inv_futures = []
+ register = config.plugins["autorefs"].register_url # type: ignore[attr-defined]
+ for identifier, url in self._handlers._yield_inventory_items():
+ register(identifier, url)
def on_post_build(
- self, config: Config, **kwargs: Any
- ) -> None: # noqa: W0613,R0201 (unused arguments, cannot be static)
+ self,
+ config: MkDocsConfig, # noqa: ARG002
+ **kwargs: Any, # noqa: ARG002
+ ) -> None:
"""Teardown the handlers.
Hook for the [`on_post_build` event](https://www.mkdocs.org/user-guide/plugins/#on_post_build).
@@ -253,14 +249,14 @@ def on_post_build(
For example, a handler could open a subprocess in the background and keep it open
to feed it "autodoc" instructions and get back JSON data. If so, it should then close the subprocess at some point:
- the proper place to do this is in the collector's `teardown` method, which is indirectly called by this hook.
+ the proper place to do this is in the handler's `teardown` method, which is indirectly called by this hook.
Arguments:
config: The MkDocs config object.
**kwargs: Additional arguments passed by MkDocs.
"""
- for future in self._inv_futures:
- future.cancel()
+ if not self.plugin_enabled:
+ return
if self._handlers:
log.debug("Tearing handlers down")
@@ -276,26 +272,3 @@ def get_handler(self, handler_name: str) -> BaseHandler:
An instance of a subclass of [`BaseHandler`][mkdocstrings.handlers.base.BaseHandler].
"""
return self.handlers.get_handler(handler_name)
-
- @classmethod
- @functools.lru_cache(maxsize=None)
- def _load_inventory(cls, loader: InventoryLoaderType, url: str, **kwargs: Any) -> Mapping[str, str]:
- """Download and process inventory files using a handler.
-
- Arguments:
- loader: A function returning a sequence of pairs (identifier, url).
- url: The URL to download and process.
- **kwargs: Extra arguments to pass to the loader.
-
- Returns:
- A mapping from identifier to absolute URL.
- """
- log.debug(f"Downloading inventory from {url!r}")
- req = request.Request(url, headers={"Accept-Encoding": "gzip", "User-Agent": "mkdocstrings/0.15.0"})
- with request.urlopen(req) as resp: # noqa: S310 (URL audit OK: comes from a checked-in config)
- content: BinaryIO = resp
- if "gzip" in resp.headers.get("content-encoding", ""):
- content = gzip.GzipFile(fileobj=resp) # type: ignore[assignment]
- result = dict(loader(content, url=url, **kwargs))
- log.debug(f"Loaded inventory from {url!r}: {len(result)} items")
- return result
diff --git a/tests/conftest.py b/tests/conftest.py
index 7025b8fd..74688fe7 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -1,26 +1,29 @@
"""Configuration for the pytest test suite."""
+from __future__ import annotations
+
from collections import ChainMap
+from typing import TYPE_CHECKING, Any
import pytest
from markdown.core import Markdown
-from mkdocs import config
+from mkdocs.config.defaults import MkDocsConfig
+
+if TYPE_CHECKING:
+ from collections.abc import Iterator
+ from pathlib import Path
-try:
- from mkdocs.config.defaults import get_schema
-except ImportError:
+ from mkdocs import config
- def get_schema(): # noqa: WPS440
- """Fallback for old versions of MkDocs."""
- return config.DEFAULT_SCHEMA
+ from mkdocstrings.plugin import MkdocstringsPlugin
@pytest.fixture(name="mkdocs_conf")
-def fixture_mkdocs_conf(request, tmp_path):
+def fixture_mkdocs_conf(request: pytest.FixtureRequest, tmp_path: Path) -> Iterator[config.Config]:
"""Yield a MkDocs configuration object."""
- conf = config.Config(schema=get_schema())
- while hasattr(request, "_parent_request") and hasattr(request._parent_request, "_parent_request"): # noqa: WPS437
- request = request._parent_request # noqa: WPS437
+ conf = MkDocsConfig()
+ while hasattr(request, "_parent_request") and hasattr(request._parent_request, "_parent_request"):
+ request = request._parent_request
conf_dict = {
"site_name": "foo",
@@ -30,7 +33,7 @@ def fixture_mkdocs_conf(request, tmp_path):
**getattr(request, "param", {}),
}
# Re-create it manually as a workaround for https://github.com/mkdocs/mkdocs/issues/2289
- mdx_configs = dict(ChainMap(*conf_dict.get("markdown_extensions", [])))
+ mdx_configs: dict[str, Any] = dict(ChainMap(*conf_dict.get("markdown_extensions", [])))
conf.load_dict(conf_dict)
assert conf.validate() == ([], [])
@@ -45,14 +48,12 @@ def fixture_mkdocs_conf(request, tmp_path):
@pytest.fixture(name="plugin")
-def fixture_plugin(mkdocs_conf):
+def fixture_plugin(mkdocs_conf: config.Config) -> MkdocstringsPlugin:
"""Return a plugin instance."""
- plugin = mkdocs_conf["plugins"]["mkdocstrings"]
- plugin.md = Markdown(extensions=mkdocs_conf["markdown_extensions"], extension_configs=mkdocs_conf["mdx_configs"])
- return plugin
+ return mkdocs_conf["plugins"]["mkdocstrings"]
@pytest.fixture(name="ext_markdown")
-def fixture_ext_markdown(plugin):
+def fixture_ext_markdown(mkdocs_conf: MkDocsConfig) -> Markdown:
"""Return a Markdown instance with MkdocstringsExtension."""
- return plugin.md
+ return Markdown(extensions=mkdocs_conf["markdown_extensions"], extension_configs=mkdocs_conf["mdx_configs"])
diff --git a/tests/fixtures/builtin.py b/tests/fixtures/builtin.py
deleted file mode 100644
index cab198e3..00000000
--- a/tests/fixtures/builtin.py
+++ /dev/null
@@ -1,2 +0,0 @@
-def func(foo=print):
- """test"""
diff --git a/tests/fixtures/headings_many.py b/tests/fixtures/headings_many.py
new file mode 100644
index 00000000..fa643a48
--- /dev/null
+++ b/tests/fixtures/headings_many.py
@@ -0,0 +1,10 @@
+def heading_1():
+ """## Heading one"""
+
+
+def heading_2():
+ """### Heading two"""
+
+
+def heading_3():
+ """#### Heading three"""
diff --git a/tests/fixtures/markdown_anchors.py b/tests/fixtures/markdown_anchors.py
new file mode 100644
index 00000000..74cea744
--- /dev/null
+++ b/tests/fixtures/markdown_anchors.py
@@ -0,0 +1,16 @@
+"""Module docstring.
+
+[](){#anchor}
+
+Paragraph.
+
+[](){#heading-anchor-1}
+[](){#heading-anchor-2}
+[](){#heading-anchor-3}
+## Heading
+
+[](#has-href1)
+[](#has-href2){#with-id}
+
+Pararaph.
+"""
\ No newline at end of file
diff --git a/tests/fixtures/nesting.py b/tests/fixtures/nesting.py
new file mode 100644
index 00000000..92f7a9ee
--- /dev/null
+++ b/tests/fixtures/nesting.py
@@ -0,0 +1,10 @@
+class Class:
+ """A class.
+
+ ## ::: tests.fixtures.nesting.Class.method
+ options:
+ show_root_heading: true
+ """
+
+ def method(self) -> None:
+ """A method."""
diff --git a/tests/test_download.py b/tests/test_download.py
new file mode 100644
index 00000000..95dc0233
--- /dev/null
+++ b/tests/test_download.py
@@ -0,0 +1,103 @@
+"""Tests for the internal mkdocstrings _download module."""
+
+from __future__ import annotations
+
+import logging
+from typing import TYPE_CHECKING
+
+import pytest
+
+from mkdocstrings import _download
+
+if TYPE_CHECKING:
+ from collections.abc import Mapping
+
+
+@pytest.mark.parametrize(
+ ("credential", "expected", "env"),
+ [
+ ("USER", "USER", {"USER": "testuser"}),
+ ("$USER", "$USER", {"USER": "testuser"}),
+ ("${USER", "${USER", {"USER": "testuser"}),
+ ("$USER}", "$USER}", {"USER": "testuser"}),
+ ("${TOKEN}", "testtoken", {"TOKEN": "testtoken"}),
+ ("${USER}:${PASSWORD}", "${USER}:testpass", {"PASSWORD": "testpass"}),
+ ("${USER}:${PASSWORD}", "testuser:testpass", {"USER": "testuser", "PASSWORD": "testpass"}),
+ (
+ "user_prefix_${USER}_user_$uffix:pwd_prefix_${PASSWORD}_pwd_${uffix",
+ "user_prefix_testuser_user_$uffix:pwd_prefix_testpass_pwd_${uffix",
+ {"USER": "testuser", "PASSWORD": "testpass"},
+ ),
+ ],
+)
+def test_expand_env_vars(credential: str, expected: str, env: Mapping[str, str]) -> None:
+ """Test expanding environment variables."""
+ assert _download._expand_env_vars(credential, url="https://test.example.com", env=env) == expected
+
+
+def test_expand_env_vars_with_missing_env_var(caplog: pytest.LogCaptureFixture) -> None:
+ """Test expanding environment variables with a missing environment variable."""
+ caplog.set_level(logging.WARNING, logger="mkdocs.plugins.mkdocstrings._download")
+
+ credential = "${USER}"
+ env: dict[str, str] = {}
+ assert _download._expand_env_vars(credential, url="https://test.example.com", env=env) == "${USER}"
+
+ output = caplog.records[0].getMessage()
+ assert "'USER' is not set" in output
+
+
+@pytest.mark.parametrize(
+ ("url", "expected_url"),
+ [
+ ("http://host/path", "http://host/path"),
+ ("http://token@host/path", "http://host/path"),
+ ("http://${token}@host/path", "http://host/path"),
+ ("http://username:password@host/path", "http://host/path"),
+ ("http://username:${PASSWORD}@host/path", "http://host/path"),
+ ("http://${USERNAME}:${PASSWORD}@host/path", "http://host/path"),
+ ("http://prefix${USERNAME}suffix:prefix${PASSWORD}suffix@host/path", "http://host/path"),
+ ],
+)
+def test_extract_auth_from_url(monkeypatch: pytest.MonkeyPatch, url: str, expected_url: str) -> None:
+ """Test extracting the auth part from the URL."""
+ monkeypatch.setattr(_download, "_create_auth_header", lambda *args, **kwargs: {})
+ result_url, _result_auth_header = _download._extract_auth_from_url(url)
+ assert result_url == expected_url
+
+
+def test_create_auth_header_basic_auth() -> None:
+ """Test creating the Authorization header for basic authentication."""
+ auth_header = _download._create_auth_header(credential="testuser:testpass", url="https://test.example.com")
+ assert auth_header == {"Authorization": "Basic dGVzdHVzZXI6dGVzdHBhc3M="}
+
+
+def test_create_auth_header_bearer_auth() -> None:
+ """Test creating the Authorization header for bearer token authentication."""
+ auth_header = _download._create_auth_header(credential="token123", url="https://test.example.com")
+ assert auth_header == {"Authorization": "Bearer token123"}
+
+
+@pytest.mark.parametrize(
+ ("var", "match"),
+ [
+ ("${var}", "var"),
+ ("${VAR}", "VAR"),
+ ("${_}", "_"),
+ ("${_VAR}", "_VAR"),
+ ("${VAR123}", "VAR123"),
+ ("${VAR123_}", "VAR123_"),
+ ("VAR", None),
+ ("$1VAR", None),
+ ("${1VAR}", None),
+ ("${}", None),
+ ("${ }", None),
+ ],
+)
+def test_env_var_pattern(var: str, match: str | None) -> None:
+ """Test the environment variable regex pattern."""
+ _match = _download.ENV_VAR_PATTERN.match(var)
+ if _match is None:
+ assert match is _match
+ else:
+ assert _match.group(1) == match
diff --git a/tests/test_extension.py b/tests/test_extension.py
index df388723..d7e5b88a 100644
--- a/tests/test_extension.py
+++ b/tests/test_extension.py
@@ -1,13 +1,22 @@
"""Tests for the extension module."""
+
+from __future__ import annotations
+
import re
import sys
from textwrap import dedent
+from typing import TYPE_CHECKING
import pytest
+if TYPE_CHECKING:
+ from markdown import Markdown
+
+ from mkdocstrings.plugin import MkdocstringsPlugin
+
@pytest.mark.parametrize("ext_markdown", [{"markdown_extensions": [{"footnotes": {}}]}], indirect=["ext_markdown"])
-def test_multiple_footnotes(ext_markdown):
+def test_multiple_footnotes(ext_markdown: Markdown) -> None:
"""Assert footnotes don't get added to subsequent docstrings."""
output = ext_markdown.convert(
dedent(
@@ -29,15 +38,15 @@ def test_multiple_footnotes(ext_markdown):
assert output.count("Top footnote") == 1
-def test_markdown_heading_level(ext_markdown):
+def test_markdown_heading_level(ext_markdown: Markdown) -> None:
"""Assert that Markdown headings' level doesn't exceed heading_level."""
- output = ext_markdown.convert("::: tests.fixtures.headings\n rendering:\n show_root_heading: true")
+ output = ext_markdown.convert("::: tests.fixtures.headings\n options:\n show_root_heading: true")
assert ">Foo" in output
assert ">Bar" in output
assert ">Baz" in output
-def test_keeps_preceding_text(ext_markdown):
+def test_keeps_preceding_text(ext_markdown: Markdown) -> None:
"""Assert that autodoc is recognized in the middle of a block and preceding text is kept."""
output = ext_markdown.convert("**preceding**\n::: tests.fixtures.headings")
assert "preceding" in output
@@ -45,21 +54,21 @@ def test_keeps_preceding_text(ext_markdown):
assert ":::" not in output
-def test_reference_inside_autodoc(ext_markdown):
+def test_reference_inside_autodoc(ext_markdown: Markdown) -> None:
"""Assert cross-reference Markdown extension works correctly."""
output = ext_markdown.convert("::: tests.fixtures.cross_reference")
assert re.search(r"Link to <.*something\.Else.*>something\.Else<.*>\.", output)
@pytest.mark.skipif(sys.version_info < (3, 8), reason="typing.Literal requires Python 3.8")
-def test_quote_inside_annotation(ext_markdown):
+def test_quote_inside_annotation(ext_markdown: Markdown) -> None:
"""Assert that inline highlighting doesn't double-escape HTML."""
output = ext_markdown.convert("::: tests.fixtures.string_annotation.Foo")
assert ";hi&" in output
assert "&" not in output
-def test_html_inside_heading(ext_markdown):
+def test_html_inside_heading(ext_markdown: Markdown) -> None:
"""Assert that headings don't double-escape HTML."""
output = ext_markdown.convert("::: tests.fixtures.html_tokens")
assert "'<" in output
@@ -75,7 +84,7 @@ def test_html_inside_heading(ext_markdown):
],
indirect=["ext_markdown"],
)
-def test_no_double_toc(ext_markdown, expect_permalink):
+def test_no_double_toc(ext_markdown: Markdown, expect_permalink: str) -> None:
"""Assert that the 'toc' extension doesn't apply its modification twice."""
output = ext_markdown.convert(
dedent(
@@ -83,67 +92,161 @@ def test_no_double_toc(ext_markdown, expect_permalink):
# aa
::: tests.fixtures.headings
- rendering:
+ options:
show_root_toc_entry: false
# bb
- """
- )
+ """,
+ ),
)
assert output.count(expect_permalink) == 5
assert 'id="tests.fixtures.headings--foo"' in output
- assert ext_markdown.toc_tokens == [ # noqa: E1101 (the member gets populated only with 'toc' extension)
+ assert ext_markdown.toc_tokens == [ # type: ignore[attr-defined] # the member gets populated only with 'toc' extension
{
"level": 1,
"id": "aa",
+ "html": "aa",
"name": "aa",
+ "data-toc-label": "",
"children": [
{
"level": 2,
"id": "tests.fixtures.headings--foo",
+ "html": "Foo",
"name": "Foo",
+ "data-toc-label": "",
"children": [
{
"level": 4,
"id": "tests.fixtures.headings--bar",
+ "html": "Bar",
"name": "Bar",
+ "data-toc-label": "",
"children": [
- {"level": 6, "id": "tests.fixtures.headings--baz", "name": "Baz", "children": []}
+ {
+ "level": 6,
+ "id": "tests.fixtures.headings--baz",
+ "html": "Baz",
+ "name": "Baz",
+ "data-toc-label": "",
+ "children": [],
+ },
],
- }
+ },
],
- }
+ },
],
},
- {"level": 1, "id": "bb", "name": "bb", "children": []},
+ {
+ "level": 1,
+ "id": "bb",
+ "html": "bb",
+ "name": "bb",
+ "data-toc-label": "",
+ "children": [],
+ },
]
-def test_use_custom_handler(ext_markdown):
+def test_use_custom_handler(ext_markdown: Markdown) -> None:
"""Assert that we use the custom handler declared in an individual autodoc instruction."""
with pytest.raises(ModuleNotFoundError):
ext_markdown.convert("::: tests.fixtures.headings\n handler: not_here")
-def test_dont_register_every_identifier_as_anchor(plugin):
+def test_register_every_identifier_alias(plugin: MkdocstringsPlugin, ext_markdown: Markdown) -> None:
"""Assert that we don't preemptively register all identifiers of a rendered object."""
- handler = plugin._handlers.get_handler("python") # noqa: WPS437
- ids = {"id1", "id2", "id3"}
- handler.get_anchors = lambda _: ids
- plugin.md.convert("::: tests.fixtures.headings")
- autorefs = plugin.md.parser.blockprocessors["mkdocstrings"]._autorefs # noqa: WPS219,WPS437
+ handler = plugin._handlers.get_handler("python") # type: ignore[union-attr]
+ ids = ("id1", "id2", "id3")
+ # TODO: Remove line when Python handler removes its `get_anchors` method.
+ handler.get_anchors = lambda _: ids # type: ignore[union-attr]
+ handler.get_aliases = lambda _: ids # type: ignore[method-assign]
+ autorefs = ext_markdown.parser.blockprocessors["mkdocstrings"]._autorefs # type: ignore[attr-defined]
+
+ class Page:
+ url = "foo"
+
+ autorefs.current_page = Page()
+ ext_markdown.convert("::: tests.fixtures.headings")
for identifier in ids:
- assert identifier not in autorefs._url_map # noqa: WPS437
- assert identifier not in autorefs._abs_url_map # noqa: WPS437
-
-
-def test_use_deprecated_yaml_keys(ext_markdown):
- """Check that using the deprecated 'selection' and 'rendering' YAML keys emits a deprecation warning."""
- with pytest.warns(DeprecationWarning, match="single 'options' YAML key"):
- assert "h1" not in ext_markdown.convert("::: tests.fixtures.headings\n rendering:\n heading_level: 2")
+ assert identifier in autorefs._secondary_url_map
-def test_use_new_options_yaml_key(ext_markdown):
- """Check that using the new 'options' YAML key works as expected."""
+def test_use_options_yaml_key(ext_markdown: Markdown) -> None:
+ """Check that using the 'options' YAML key works as expected."""
assert "h1" in ext_markdown.convert("::: tests.fixtures.headings\n options:\n heading_level: 1")
assert "h1" not in ext_markdown.convert("::: tests.fixtures.headings\n options:\n heading_level: 2")
+
+
+def test_use_yaml_options_after_blank_line(ext_markdown: Markdown) -> None:
+ """Check that YAML options are detected even after a blank line."""
+ assert "h1" not in ext_markdown.convert("::: tests.fixtures.headings\n\n options:\n heading_level: 2")
+
+
+@pytest.mark.parametrize("ext_markdown", [{"markdown_extensions": [{"admonition": {}}]}], indirect=["ext_markdown"])
+def test_removing_duplicated_headings(ext_markdown: Markdown) -> None:
+ """Assert duplicated headings are removed from the output."""
+ output = ext_markdown.convert(
+ dedent(
+ """
+ ::: tests.fixtures.headings_many.heading_1
+
+ !!! note
+
+ ::: tests.fixtures.headings_many.heading_2
+
+ ::: tests.fixtures.headings_many.heading_3
+ """,
+ ),
+ )
+ assert output.count(">Heading one<") == 1
+ assert output.count(">Heading two<") == 1
+ assert output.count(">Heading three<") == 1
+ assert output.count('class="mkdocstrings') == 0
+
+
+def _assert_contains_in_order(items: list[str], string: str) -> None:
+ index = 0
+ for item in items:
+ assert item in string[index:]
+ index = string.index(item, index) + len(item)
+
+
+@pytest.mark.parametrize("ext_markdown", [{"markdown_extensions": [{"attr_list": {}}]}], indirect=["ext_markdown"])
+def test_backup_of_anchors(ext_markdown: Markdown) -> None:
+ """Anchors with empty `href` are backed up."""
+ output = ext_markdown.convert("::: tests.fixtures.markdown_anchors")
+
+ # Anchors with id and no href have been backed up and updated.
+ _assert_contains_in_order(
+ [
+ 'id="anchor"',
+ 'id="tests.fixtures.markdown_anchors--anchor"',
+ 'id="heading-anchor-1"',
+ 'id="tests.fixtures.markdown_anchors--heading-anchor-1"',
+ 'id="heading-anchor-2"',
+ 'id="tests.fixtures.markdown_anchors--heading-anchor-2"',
+ 'id="heading-anchor-3"',
+ 'id="tests.fixtures.markdown_anchors--heading-anchor-3"',
+ ],
+ output,
+ )
+
+ # Anchors with href and with or without id have been updated but not backed up.
+ _assert_contains_in_order(
+ [
+ 'id="tests.fixtures.markdown_anchors--with-id"',
+ ],
+ output,
+ )
+ assert 'id="with-id"' not in output
+
+ _assert_contains_in_order(
+ [
+ 'href="#tests.fixtures.markdown_anchors--has-href1"',
+ 'href="#tests.fixtures.markdown_anchors--has-href2"',
+ ],
+ output,
+ )
+ assert 'href="#has-href1"' not in output
+ assert 'href="#has-href2"' not in output
diff --git a/tests/test_handlers.py b/tests/test_handlers.py
index cfe04cd8..cea80657 100644
--- a/tests/test_handlers.py
+++ b/tests/test_handlers.py
@@ -1,20 +1,31 @@
"""Tests for the handlers.base module."""
+from __future__ import annotations
+
+from textwrap import dedent
+from typing import TYPE_CHECKING
+
import pytest
+from jinja2.exceptions import TemplateNotFound
from markdown import Markdown
from mkdocstrings.handlers.base import Highlighter
+if TYPE_CHECKING:
+ from pathlib import Path
+
+ from mkdocstrings.plugin import MkdocstringsPlugin
+
@pytest.mark.parametrize("extension_name", ["codehilite", "pymdownx.highlight"])
-def test_highlighter_without_pygments(extension_name):
+def test_highlighter_without_pygments(extension_name: str) -> None:
"""Assert that it's possible to disable Pygments highlighting.
Arguments:
extension_name: The "user-chosen" Markdown extension for syntax highlighting.
"""
configs = {extension_name: {"use_pygments": False, "css_class": "hiiii"}}
- md = Markdown(extensions=configs, extension_configs=configs)
+ md = Markdown(extensions=[extension_name], extension_configs=configs)
hl = Highlighter(md)
assert (
hl.highlight("import foo", language="python")
@@ -22,25 +33,105 @@ def test_highlighter_without_pygments(extension_name):
)
assert (
hl.highlight("import foo", language="python", inline=True)
- == 'import foo'
+ == f'import foo'
)
@pytest.mark.parametrize("extension_name", [None, "codehilite", "pymdownx.highlight"])
@pytest.mark.parametrize("inline", [False, True])
-def test_highlighter_basic(extension_name, inline):
+def test_highlighter_basic(extension_name: str | None, inline: bool) -> None:
"""Assert that Pygments syntax highlighting works.
Arguments:
extension_name: The "user-chosen" Markdown extension for syntax highlighting.
inline: Whether the highlighting was inline.
"""
- configs = {}
- if extension_name:
- configs[extension_name] = {}
- md = Markdown(extensions=configs, extension_configs=configs)
+ md = Markdown(extensions=[extension_name], extension_configs={extension_name: {}}) if extension_name else Markdown()
hl = Highlighter(md)
actual = hl.highlight("import foo", language="python", inline=inline)
assert "import" in actual
assert "import foo" not in actual # Highlighting has split it up.
+
+
+def test_extended_templates(tmp_path: Path, plugin: MkdocstringsPlugin) -> None:
+ """Test the extended templates functionality.
+
+ Parameters:
+ tmp_path: Temporary folder.
+ plugin: Instance of our plugin.
+ """
+ handler = plugin._handlers.get_handler("python") # type: ignore[union-attr]
+
+ # monkeypatch Jinja env search path
+ search_paths = [
+ base_theme := tmp_path / "base_theme",
+ base_fallback_theme := tmp_path / "base_fallback_theme",
+ extended_theme := tmp_path / "extended_theme",
+ extended_fallback_theme := tmp_path / "extended_fallback_theme",
+ ]
+ handler.env.loader.searchpath = search_paths # type: ignore[union-attr]
+
+ # assert "new" template is not found
+ with pytest.raises(expected_exception=TemplateNotFound):
+ handler.env.get_template("new.html")
+
+ # check precedence: base theme, base fallback theme, extended theme, extended fallback theme
+ # start with last one and go back up
+ handler.env.cache = None
+
+ extended_fallback_theme.mkdir()
+ extended_fallback_theme.joinpath("new.html").write_text("extended fallback new")
+ assert handler.env.get_template("new.html").render() == "extended fallback new"
+
+ extended_theme.mkdir()
+ extended_theme.joinpath("new.html").write_text("extended new")
+ assert handler.env.get_template("new.html").render() == "extended new"
+
+ base_fallback_theme.mkdir()
+ base_fallback_theme.joinpath("new.html").write_text("base fallback new")
+ assert handler.env.get_template("new.html").render() == "base fallback new"
+
+ base_theme.mkdir()
+ base_theme.joinpath("new.html").write_text("base new")
+ assert handler.env.get_template("new.html").render() == "base new"
+
+
+@pytest.mark.parametrize(
+ "ext_markdown",
+ [{"markdown_extensions": [{"toc": {"permalink": True}}]}],
+ indirect=["ext_markdown"],
+)
+def test_nested_autodoc(ext_markdown: Markdown) -> None:
+ """Assert that nested autodocs render well and do not mess up the TOC."""
+ output = ext_markdown.convert(
+ dedent(
+ """
+ # ::: tests.fixtures.nesting.Class
+ options:
+ members: false
+ show_root_heading: true
+ """,
+ ),
+ )
+ assert 'id="tests.fixtures.nesting.Class"' in output
+ assert 'id="tests.fixtures.nesting.Class.method"' in output
+ assert ext_markdown.toc_tokens == [ # type: ignore[attr-defined]
+ {
+ "level": 1,
+ "id": "tests.fixtures.nesting.Class",
+ "html": "",
+ "name": "Class",
+ "data-toc-label": "Class",
+ "children": [
+ {
+ "level": 2,
+ "id": "tests.fixtures.nesting.Class.method",
+ "html": "",
+ "name": "method",
+ "data-toc-label": "method",
+ "children": [],
+ },
+ ],
+ },
+ ]
diff --git a/tests/test_inventory.py b/tests/test_inventory.py
index 471ed941..ecbb3cd2 100644
--- a/tests/test_inventory.py
+++ b/tests/test_inventory.py
@@ -1,5 +1,7 @@
"""Tests for the inventory module."""
+from __future__ import annotations
+
import sys
from io import BytesIO
from os.path import join
@@ -22,7 +24,7 @@
Inventory([InventoryItem(name="object_path", domain="py", role="obj", uri="page_url#other_anchor")]),
],
)
-def test_sphinx_load_inventory_file(our_inv):
+def test_sphinx_load_inventory_file(our_inv: Inventory) -> None:
"""Perform the 'live' inventory load test."""
buffer = BytesIO(our_inv.format_sphinx())
sphinx_inv = sphinx.InventoryFile.load(buffer, "", join)
@@ -35,10 +37,14 @@ def test_sphinx_load_inventory_file(our_inv):
@pytest.mark.skipif(sys.version_info < (3, 7), reason="using plugins that require Python 3.7")
-def test_sphinx_load_mkdocstrings_inventory_file():
+def test_sphinx_load_mkdocstrings_inventory_file() -> None:
"""Perform the 'live' inventory load test on mkdocstrings own inventory."""
mkdocs_config = load_config()
- build(mkdocs_config)
+ mkdocs_config["plugins"].run_event("startup", command="build", dirty=False)
+ try:
+ build(mkdocs_config)
+ finally:
+ mkdocs_config["plugins"].run_event("shutdown")
own_inv = mkdocs_config["plugins"]["mkdocstrings"].handlers.inventory
with open("site/objects.inv", "rb") as fp:
diff --git a/tests/test_loggers.py b/tests/test_loggers.py
new file mode 100644
index 00000000..1644c0f0
--- /dev/null
+++ b/tests/test_loggers.py
@@ -0,0 +1,64 @@
+"""Tests for the loggers module."""
+
+from unittest.mock import MagicMock
+
+import pytest
+
+from mkdocstrings.loggers import get_logger, get_template_logger
+
+
+@pytest.mark.parametrize(
+ "kwargs",
+ [
+ {},
+ {"once": False},
+ {"once": True},
+ ],
+)
+def test_logger(kwargs: dict, caplog: pytest.LogCaptureFixture) -> None:
+ """Test logger methods.
+
+ Parameters:
+ kwargs: Keyword arguments passed to the logger methods.
+ """
+ logger = get_logger("mkdocstrings.test")
+ caplog.set_level(0)
+ for _ in range(2):
+ logger.debug("Debug message", **kwargs)
+ logger.info("Info message", **kwargs)
+ logger.warning("Warning message", **kwargs)
+ logger.error("Error message", **kwargs)
+ logger.critical("Critical message", **kwargs)
+ if kwargs.get("once", False):
+ assert len(caplog.records) == 5
+ else:
+ assert len(caplog.records) == 10
+
+
+@pytest.mark.parametrize(
+ "kwargs",
+ [
+ {},
+ {"once": False},
+ {"once": True},
+ ],
+)
+def test_template_logger(kwargs: dict, caplog: pytest.LogCaptureFixture) -> None:
+ """Test template logger methods.
+
+ Parameters:
+ kwargs: Keyword arguments passed to the template logger methods.
+ """
+ logger = get_template_logger()
+ mock = MagicMock()
+ caplog.set_level(0)
+ for _ in range(2):
+ logger.debug(mock, "Debug message", **kwargs)
+ logger.info(mock, "Info message", **kwargs)
+ logger.warning(mock, "Warning message", **kwargs)
+ logger.error(mock, "Error message", **kwargs)
+ logger.critical(mock, "Critical message", **kwargs)
+ if kwargs.get("once", False):
+ assert len(caplog.records) == 5
+ else:
+ assert len(caplog.records) == 10
diff --git a/tests/test_plugin.py b/tests/test_plugin.py
new file mode 100644
index 00000000..3342e2aa
--- /dev/null
+++ b/tests/test_plugin.py
@@ -0,0 +1,71 @@
+"""Tests for the mkdocstrings plugin."""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from mkdocs.commands.build import build
+from mkdocs.config import load_config
+
+from mkdocstrings.plugin import MkdocstringsPlugin
+
+if TYPE_CHECKING:
+ from pathlib import Path
+
+
+def test_disabling_plugin(tmp_path: Path) -> None:
+ """Test disabling plugin."""
+ docs_dir = tmp_path / "docs"
+ site_dir = tmp_path / "site"
+ docs_dir.mkdir()
+ site_dir.mkdir()
+ docs_dir.joinpath("index.md").write_text("::: mkdocstrings")
+
+ mkdocs_config = load_config()
+ mkdocs_config["docs_dir"] = str(docs_dir)
+ mkdocs_config["site_dir"] = str(site_dir)
+ mkdocs_config["plugins"]["mkdocstrings"].config["enabled"] = False
+ mkdocs_config["plugins"].run_event("startup", command="build", dirty=False)
+ try:
+ build(mkdocs_config)
+ finally:
+ mkdocs_config["plugins"].run_event("shutdown")
+
+ # make sure the instruction was not processed
+ assert "::: mkdocstrings" in site_dir.joinpath("index.html").read_text()
+
+
+def test_plugin_default_config(tmp_path: Path) -> None:
+ """Test default config options are set for Plugin."""
+ config_file_path = tmp_path / "mkdocs.yml"
+ plugin = MkdocstringsPlugin()
+ errors, warnings = plugin.load_config({}, config_file_path=str(config_file_path))
+ assert errors == []
+ assert warnings == []
+ assert plugin.config == {
+ "handlers": {},
+ "default_handler": "python",
+ "custom_templates": None,
+ "enable_inventory": None,
+ "enabled": True,
+ }
+
+
+def test_plugin_config_custom_templates(tmp_path: Path) -> None:
+ """Test custom_templates option is relative to config file."""
+ config_file_path = tmp_path / "mkdocs.yml"
+ options = {"custom_templates": "docs/templates"}
+ template_dir = tmp_path / options["custom_templates"]
+ # Path must exist or config validation will fail.
+ template_dir.mkdir(parents=True)
+ plugin = MkdocstringsPlugin()
+ errors, warnings = plugin.load_config(options, config_file_path=str(config_file_path))
+ assert errors == []
+ assert warnings == []
+ assert plugin.config == {
+ "handlers": {},
+ "default_handler": "python",
+ "custom_templates": str(template_dir),
+ "enable_inventory": None,
+ "enabled": True,
+ }