diff --git a/.gitattributes b/.gitattributes
index d0f6ad06464..c8acd10815a 100644
--- a/.gitattributes
+++ b/.gitattributes
@@ -51,6 +51,8 @@ tests/roots/test-pycode/cp_1251_coded.py dos
# Non UTF-8 encodings
tests/roots/test-pycode/cp_1251_coded.py working-tree-encoding=windows-1251
+tests/roots/test-root/wrongenc.inc working-tree-encoding=latin-1
+tests/roots/test-warnings/wrongenc.inc working-tree-encoding=latin-1
# Generated files
# https://github.com/github/linguist/blob/master/docs/overrides.md
@@ -62,4 +64,5 @@ tests/roots/test-pycode/cp_1251_coded.py working-tree-encoding=windows-1251
tests/js/fixtures/**/*.js generated
sphinx/search/minified-js/*.js generated
+sphinx/search/_stopwords/ generated
sphinx/themes/bizstyle/static/css3-mediaqueries.js generated
diff --git a/.github/workflows/builddoc.yml b/.github/workflows/builddoc.yml
index 7f8471deecb..aa982884afc 100644
--- a/.github/workflows/builddoc.yml
+++ b/.github/workflows/builddoc.yml
@@ -21,22 +21,22 @@ jobs:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v5
with:
persist-credentials: false
- name: Set up Python
- uses: actions/setup-python@v5
+ uses: actions/setup-python@v6
with:
python-version: "3"
- name: Install graphviz
run: sudo apt-get install --no-install-recommends --yes graphviz
- name: Install uv
- uses: astral-sh/setup-uv@v5
+ uses: astral-sh/setup-uv@v6
with:
version: latest
enable-cache: false
- name: Install dependencies
- run: uv pip install .[docs]
+ run: uv pip install . --group docs
- name: Render the documentation
run: >
sphinx-build
diff --git a/.github/workflows/create-release.yml b/.github/workflows/create-release.yml
index 6f3ebf264a8..8279f552fbe 100644
--- a/.github/workflows/create-release.yml
+++ b/.github/workflows/create-release.yml
@@ -27,31 +27,27 @@ jobs:
attestations: write # for actions/attest
id-token: write # for actions/attest & PyPI trusted publishing
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v5
with:
persist-credentials: false
- name: Set up Python
- uses: actions/setup-python@v5
+ uses: actions/setup-python@v6
with:
python-version: "3"
- name: Install uv
- uses: astral-sh/setup-uv@v5
+ uses: astral-sh/setup-uv@v6
with:
version: latest
enable-cache: false
- name: Install build dependencies (pypa/build, twine)
- run: |
- uv pip install build "twine>=5.1"
- # resolution fails without betterproto
- uv pip install pypi-attestations==0.0.21 betterproto==2.0.0b6
+ run: uv pip install --group package
- name: Build distribution
run: python -m build
- name: Check distribution
- run: |
- twine check dist/*
+ run: twine check dist/*
- name: Create Sigstore attestations for built distributions
uses: actions/attest@v1
@@ -90,39 +86,10 @@ jobs:
name: attestation-bundles
path: /tmp/attestation-bundles/
- - name: Mint PyPI API token
- id: mint-token
- uses: actions/github-script@v7
- with:
- # language=JavaScript
- script: |
- // retrieve the ambient OIDC token
- const oidc_request_token = process.env.ACTIONS_ID_TOKEN_REQUEST_TOKEN;
- const oidc_request_url = process.env.ACTIONS_ID_TOKEN_REQUEST_URL;
- const oidc_resp = await fetch(`${oidc_request_url}&audience=pypi`, {
- headers: {Authorization: `bearer ${oidc_request_token}`},
- });
- const oidc_token = (await oidc_resp.json()).value;
-
- // exchange the OIDC token for an API token
- const mint_resp = await fetch('https://pypi.org/_/oidc/github/mint-token', {
- method: 'post',
- body: `{"token": "${oidc_token}"}` ,
- headers: {'Content-Type': 'application/json'},
- });
- const api_token = (await mint_resp.json()).token;
-
- // mask the newly minted API token, so that we don't accidentally leak it
- core.setSecret(api_token)
- core.setOutput('api-token', api_token)
-
- name: Upload to PyPI
env:
TWINE_NON_INTERACTIVE: "true"
- TWINE_USERNAME: "__token__"
- TWINE_PASSWORD: "${{ steps.mint-token.outputs.api-token }}"
- run: |
- twine upload dist/* --attestations
+ run: twine upload dist/* --attestations
github-release:
runs-on: ubuntu-latest
@@ -132,12 +99,12 @@ jobs:
permissions:
contents: write # for softprops/action-gh-release to create GitHub release
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v5
with:
persist-credentials: false
- name: Get release version
id: get_version
- uses: actions/github-script@v7
+ uses: actions/github-script@v8
with:
script: core.setOutput('version', context.ref.replace("refs/tags/v", ""))
diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml
index a3b5cf7ae52..f4f950a05cc 100644
--- a/.github/workflows/lint.yml
+++ b/.github/workflows/lint.yml
@@ -23,7 +23,7 @@ jobs:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v5
with:
persist-credentials: false
@@ -42,20 +42,20 @@ jobs:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v5
with:
persist-credentials: false
- name: Set up Python
- uses: actions/setup-python@v5
+ uses: actions/setup-python@v6
with:
python-version: "3"
- name: Install uv
- uses: astral-sh/setup-uv@v5
+ uses: astral-sh/setup-uv@v6
with:
version: latest
enable-cache: false
- name: Install dependencies
- run: uv pip install ".[lint,test]"
+ run: uv pip install -r pyproject.toml --group package --group test --group types
- name: Type check with mypy
run: mypy
@@ -63,20 +63,20 @@ jobs:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v5
with:
persist-credentials: false
- name: Set up Python
- uses: actions/setup-python@v5
+ uses: actions/setup-python@v6
with:
python-version: "3"
- name: Install uv
- uses: astral-sh/setup-uv@v5
+ uses: astral-sh/setup-uv@v6
with:
version: latest
enable-cache: false
- name: Install dependencies
- run: uv pip install ".[lint,test]"
+ run: uv pip install -r pyproject.toml --group package --group test --group types
- name: Type check with pyright
run: pyright
@@ -84,20 +84,20 @@ jobs:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v5
with:
persist-credentials: false
- name: Set up Python
- uses: actions/setup-python@v5
+ uses: actions/setup-python@v6
with:
python-version: "3"
- name: Install uv
- uses: astral-sh/setup-uv@v5
+ uses: astral-sh/setup-uv@v6
with:
version: latest
enable-cache: false
- name: Install dependencies
- run: uv pip install --upgrade sphinx-lint
+ run: uv pip install --group lint
- name: Lint documentation with sphinx-lint
run: make doclinter
@@ -105,21 +105,39 @@ jobs:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v5
with:
persist-credentials: false
- name: Set up Python
- uses: actions/setup-python@v5
+ uses: actions/setup-python@v6
with:
python-version: "3"
- name: Install uv
- uses: astral-sh/setup-uv@v5
+ uses: astral-sh/setup-uv@v6
with:
version: latest
enable-cache: false
- name: Install dependencies
- run: uv pip install --upgrade twine build
+ run: uv pip install --group package
- name: Lint with twine
run: |
python -m build .
twine check dist/*
+
+ prettier:
+ runs-on: ubuntu-latest
+
+ steps:
+ - uses: actions/checkout@v5
+ - name: Set up Node.js
+ uses: actions/setup-node@v5
+ with:
+ node-version: "20"
+ cache: "npm"
+ - run: >
+ npx prettier@3.5
+ --check
+ "sphinx/themes/**/*.js"
+ "!sphinx/themes/bizstyle/static/css3-mediaqueries*.js"
+ "tests/js/**/*.{js,mjs}"
+ "!tests/js/fixtures/**"
diff --git a/.github/workflows/lock.yml b/.github/workflows/lock.yml
index e9d58e4896a..04831b72dcc 100644
--- a/.github/workflows/lock.yml
+++ b/.github/workflows/lock.yml
@@ -15,7 +15,7 @@ jobs:
issues: write
pull-requests: write
steps:
- - uses: actions/github-script@v7
+ - uses: actions/github-script@v8
with:
retries: 3
# language=JavaScript
diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml
index 1758254c633..26fa5fb5ab0 100644
--- a/.github/workflows/main.yml
+++ b/.github/workflows/main.yml
@@ -37,16 +37,19 @@ jobs:
- "3.11"
- "3.12"
- "3.13"
+ - "3.13t"
+ - "3.14"
+ - "3.14t"
docutils:
- "0.20"
- - "0.21"
-# include:
-# # test every supported Docutils version for the latest supported Python
-# - python: "3.13"
-# docutils: "0.20"
+ - "0.22"
+ include:
+ # test every supported Docutils version for the latest supported Python
+ - python: "3.14"
+ docutils: "0.21"
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v5
with:
persist-credentials: false
- name: Mount the test roots as read-only
@@ -54,7 +57,7 @@ jobs:
mkdir -p ./tests/roots-read-only
sudo mount -v --bind --read-only ./tests/roots ./tests/roots-read-only
- name: Set up Python ${{ matrix.python }}
- uses: actions/setup-python@v5
+ uses: actions/setup-python@v6
with:
python-version: ${{ matrix.python }}
- name: Check Python version
@@ -62,14 +65,18 @@ jobs:
- name: Install graphviz
run: sudo apt-get install --no-install-recommends --yes graphviz
- name: Install uv
- uses: astral-sh/setup-uv@v5
+ uses: astral-sh/setup-uv@v6
with:
version: latest
enable-cache: false
- name: Install dependencies
- run: uv pip install .[test]
+ run: uv pip install . --group test
+ env:
+ UV_PYTHON: "python${{ matrix.python }}"
- name: Install Docutils ${{ matrix.docutils }}
run: uv pip install --upgrade "docutils~=${{ matrix.docutils }}.0"
+ env:
+ UV_PYTHON: "python${{ matrix.python }}"
- name: Test with pytest
run: python -m pytest -n logical --dist=worksteal -vv --durations 25
env:
@@ -83,13 +90,13 @@ jobs:
fail-fast: false
matrix:
python:
- - "3.14"
+ - "3.15"
docutils:
- "0.20"
- - "0.21"
+ - "0.22"
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v5
with:
persist-credentials: false
- name: Set up Python ${{ matrix.python }} (deadsnakes)
@@ -103,7 +110,7 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip
- python -m pip install .[test]
+ python -m pip install . --group test
- name: Install Docutils ${{ matrix.docutils }}
run: python -m pip install --upgrade "docutils~=${{ matrix.docutils }}.0"
- name: Test with pytest
@@ -111,38 +118,6 @@ jobs:
env:
PYTHONWARNINGS: "error" # treat all warnings as errors
- free-threaded:
- runs-on: ubuntu-latest
- name: Python ${{ matrix.python }} (free-threaded)
- timeout-minutes: 15
- strategy:
- fail-fast: false
- matrix:
- python:
- - "3.13"
-
- steps:
- - uses: actions/checkout@v4
- with:
- persist-credentials: false
- - name: Set up Python ${{ matrix.python }} (deadsnakes)
- uses: deadsnakes/action@v3.2.0
- with:
- python-version: ${{ matrix.python }}
- nogil: true
- - name: Check Python version
- run: python --version --version
- - name: Install graphviz
- run: sudo apt-get install --no-install-recommends --yes graphviz
- - name: Install dependencies
- run: |
- python -m pip install --upgrade pip
- python -m pip install .[test]
- - name: Test with pytest
- run: python -m pytest -n logical --dist=worksteal -vv --durations 25
- env:
- PYTHONWARNINGS: "error" # treat all warnings as errors
-
deadsnakes-free-threaded:
runs-on: ubuntu-latest
name: Python ${{ matrix.python }} (free-threaded)
@@ -151,10 +126,10 @@ jobs:
fail-fast: false
matrix:
python:
- - "3.14"
+ - "3.15"
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v5
with:
persist-credentials: false
- name: Set up Python ${{ matrix.python }} (deadsnakes)
@@ -169,7 +144,7 @@ jobs:
- name: Install dependencies
run: |
python -m pip install --upgrade pip
- python -m pip install .[test]
+ python -m pip install . --group test
- name: Test with pytest
run: python -m pytest -n logical --dist=worksteal -vv --durations 25
env:
@@ -181,11 +156,19 @@ jobs:
timeout-minutes: 15
steps:
- - uses: actions/checkout@v4
+ # https://github.com/actions/runner-images/issues/8755
+ # On standard runners, the D: drive is much faster.
+ - name: Set %TMP% and %TEMP% to D:\\Temp
+ run: |
+ mkdir "D:\\Tmp"
+ echo "TMP=D:\\Tmp" >> $env:GITHUB_ENV
+ echo "TEMP=D:\\Tmp" >> $env:GITHUB_ENV
+
+ - uses: actions/checkout@v5
with:
persist-credentials: false
- name: Set up Python
- uses: actions/setup-python@v5
+ uses: actions/setup-python@v6
with:
python-version: "3"
- name: Check Python version
@@ -193,12 +176,12 @@ jobs:
- name: Install graphviz
run: choco install --no-progress graphviz
- name: Install uv
- uses: astral-sh/setup-uv@v5
+ uses: astral-sh/setup-uv@v6
with:
version: latest
enable-cache: false
- name: Install dependencies
- run: uv pip install .[test]
+ run: uv pip install . --group test
- name: Test with pytest
run: python -m pytest -vv --durations 25
env:
@@ -210,11 +193,11 @@ jobs:
timeout-minutes: 15
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v5
with:
persist-credentials: false
- name: Set up Python
- uses: actions/setup-python@v5
+ uses: actions/setup-python@v6
with:
python-version: "3"
- name: Check Python version
@@ -222,12 +205,12 @@ jobs:
- name: Install graphviz
run: brew install graphviz
- name: Install uv
- uses: astral-sh/setup-uv@v5
+ uses: astral-sh/setup-uv@v6
with:
version: latest
enable-cache: false
- name: Install dependencies
- run: uv pip install .[test]
+ run: uv pip install . --group test
- name: Test with pytest
run: python -m pytest -vv --durations 25
env:
@@ -245,11 +228,11 @@ jobs:
mkdir /tmp/epubcheck && cd /tmp/epubcheck
wget --no-verbose https://github.com/w3c/epubcheck/releases/download/v${EPUBCHECK_VERSION}/epubcheck-${EPUBCHECK_VERSION}.zip
unzip epubcheck-${EPUBCHECK_VERSION}.zip
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v5
with:
persist-credentials: false
- name: Set up Python
- uses: actions/setup-python@v5
+ uses: actions/setup-python@v6
with:
python-version: "3"
- name: Check Python version
@@ -257,12 +240,12 @@ jobs:
- name: Install graphviz
run: sudo apt-get install --no-install-recommends --yes graphviz
- name: Install uv
- uses: astral-sh/setup-uv@v5
+ uses: astral-sh/setup-uv@v6
with:
version: latest
enable-cache: false
- name: Install dependencies
- run: uv pip install .[test]
+ run: uv pip install . --group test
- name: Install Docutils' HEAD
run: uv pip install "docutils @ git+https://repo.or.cz/docutils.git#subdirectory=docutils"
- name: Test with pytest
@@ -278,11 +261,11 @@ jobs:
timeout-minutes: 15
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v5
with:
persist-credentials: false
- name: Set up Python
- uses: actions/setup-python@v5
+ uses: actions/setup-python@v6
with:
python-version: "3"
- name: Check Python version
@@ -290,13 +273,13 @@ jobs:
- name: Install graphviz
run: sudo apt-get install --no-install-recommends --yes graphviz
- name: Install uv
- uses: astral-sh/setup-uv@v5
+ uses: astral-sh/setup-uv@v6
with:
version: latest
enable-cache: false
- name: Install dependencies
run: |
- uv pip install .[test] --resolution lowest-direct
+ uv pip install . --group test --resolution lowest-direct
uv pip install alabaster==1.0.0
- name: Test with pytest
run: python -m pytest -n logical --dist=worksteal -vv --durations 25
@@ -311,22 +294,24 @@ jobs:
image: ghcr.io/sphinx-doc/sphinx-ci
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v5
with:
persist-credentials: false
- name: Set up Python
- uses: actions/setup-python@v5
+ uses: actions/setup-python@v6
with:
python-version: "3"
- name: Check Python version
run: python --version --version
- name: Install uv
- uses: astral-sh/setup-uv@v5
+ uses: astral-sh/setup-uv@v6
with:
version: latest
enable-cache: false
- name: Install dependencies
- run: uv pip install .[test]
+ run: uv pip install . --group test
+ - name: Install Docutils' HEAD
+ run: uv pip install "docutils @ git+https://repo.or.cz/docutils.git#subdirectory=docutils"
- name: Test with pytest
run: python -m pytest -vv --durations 25
env:
@@ -340,11 +325,11 @@ jobs:
timeout-minutes: 15
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v5
with:
persist-credentials: false
- name: Set up Python
- uses: actions/setup-python@v5
+ uses: actions/setup-python@v6
with:
python-version: "3"
- name: Check Python version
@@ -352,12 +337,12 @@ jobs:
- name: Install graphviz
run: sudo apt-get install --no-install-recommends --yes graphviz
- name: Install uv
- uses: astral-sh/setup-uv@v5
+ uses: astral-sh/setup-uv@v6
with:
version: latest
enable-cache: false
- name: Install dependencies
- run: uv pip install .[test] pytest-cov
+ run: uv pip install . --group test pytest-cov
- name: Test with pytest
run: python -m pytest -vv --cov . --cov-append --cov-config pyproject.toml
env:
diff --git a/.github/workflows/nodejs.yml b/.github/workflows/nodejs.yml
index 84727288fde..267c5e6372a 100644
--- a/.github/workflows/nodejs.yml
+++ b/.github/workflows/nodejs.yml
@@ -33,11 +33,11 @@ jobs:
node-version: "20"
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v5
with:
persist-credentials: false
- name: Use Node.js ${{ env.node-version }}
- uses: actions/setup-node@v4
+ uses: actions/setup-node@v5
with:
node-version: ${{ env.node-version }}
cache: "npm"
diff --git a/.github/workflows/transifex.yml b/.github/workflows/transifex.yml
index 09437cb7ece..af89ce146f7 100644
--- a/.github/workflows/transifex.yml
+++ b/.github/workflows/transifex.yml
@@ -23,11 +23,11 @@ jobs:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v5
with:
persist-credentials: false
- name: Set up Python
- uses: actions/setup-python@v5
+ uses: actions/setup-python@v6
with:
python-version: "3"
- name: Install transifex client
@@ -36,12 +36,12 @@ jobs:
curl -o- https://raw.githubusercontent.com/transifex/cli/master/install.sh | bash
shell: bash
- name: Install uv
- uses: astral-sh/setup-uv@v5
+ uses: astral-sh/setup-uv@v6
with:
version: latest
enable-cache: false
- name: Install dependencies
- run: uv pip install --upgrade babel jinja2
+ run: uv pip install --group translations
- name: Extract translations from source code
run: python utils/babel_runner.py extract
- name: Push translations to transifex.com
@@ -59,11 +59,11 @@ jobs:
runs-on: ubuntu-latest
steps:
- - uses: actions/checkout@v4
+ - uses: actions/checkout@v5
with:
persist-credentials: false
- name: Set up Python
- uses: actions/setup-python@v5
+ uses: actions/setup-python@v6
with:
python-version: "3"
- name: Install transifex client
@@ -72,12 +72,12 @@ jobs:
curl -o- https://raw.githubusercontent.com/transifex/cli/master/install.sh | bash
shell: bash
- name: Install uv
- uses: astral-sh/setup-uv@v5
+ uses: astral-sh/setup-uv@v6
with:
version: latest
enable-cache: false
- name: Install dependencies
- run: uv pip install --upgrade babel jinja2
+ run: uv pip install --group translations
- name: Extract translations from source code
run: python utils/babel_runner.py extract
- name: Pull translations from transifex.com
diff --git a/.gitignore b/.gitignore
index 35fd23178f5..5a50535097e 100644
--- a/.gitignore
+++ b/.gitignore
@@ -3,6 +3,7 @@
*.so
*.swp
+.auto/
.dir-locals.el
.cache/
.idea
diff --git a/.prettierrc.toml b/.prettierrc.toml
new file mode 100644
index 00000000000..1799612bfdd
--- /dev/null
+++ b/.prettierrc.toml
@@ -0,0 +1,2 @@
+# https://prettier.io/docs/options
+experimentalOperatorPosition = "start"
diff --git a/.ruff.toml b/.ruff.toml
index f82928eca65..8011e7ffc55 100644
--- a/.ruff.toml
+++ b/.ruff.toml
@@ -9,6 +9,9 @@ extend-exclude = [
"tests/roots/test-pycode/cp_1251_coded.py", # Not UTF-8
]
+[per-file-target-version]
+"tests/roots/test-ext-autodoc/target/pep695.py" = "py312"
+
[format]
preview = true
quote-style = "single"
diff --git a/AUTHORS.rst b/AUTHORS.rst
index ff92ab7eab7..707c77aec04 100644
--- a/AUTHORS.rst
+++ b/AUTHORS.rst
@@ -63,6 +63,7 @@ Contributors
* Hong Xu -- svg support in imgmath extension and various bug fixes
* Horst Gutmann -- internationalization support
* Hugo van Kemenade -- support FORCE_COLOR and NO_COLOR
+* Ian Hunt-Isaak -- typealias reference improvement
* Ian Lee -- quickstart improvements
* Jacob Mason -- websupport library (GSOC project)
* James Addison -- linkcheck and HTML search improvements
@@ -83,10 +84,12 @@ Contributors
* Louis Maddox -- better docstrings
* Łukasz Langa -- partial support for autodoc
* Marco Buttu -- doctest extension (pyversion option)
+* Mark Ostroth -- semantic HTML contributions
* Martin Hans -- autodoc improvements
* Martin Larralde -- additional napoleon admonitions
* Martin Liška -- option directive and role improvements
* Martin Mahner -- nature theme
+* Martin Matouš -- initial support for PEP 695
* Matthew Fernandez -- todo extension fix
* Matthew Woodcraft -- text output improvements
* Matthias Geier -- style improvements
@@ -102,11 +105,16 @@ Contributors
* Slawek Figiel -- additional warning suppression
* Stefan Seefeld -- toctree improvements
* Stefan van der Walt -- autosummary extension
+* Steve Piercy -- documentation improvements
+* Szymon Karpinski -- intersphinx improvements
* \T. Powers -- HTML output improvements
* Taku Shimizu -- epub3 builder
+* Tamika Nomara -- bug fixes
* Thomas Lamb -- linkcheck builder
* Thomas Waldmann -- apidoc module fixes
+* Till Hoffmann -- doctest option to exit after first failed test
* Tim Hoffmann -- theme improvements
+* Victor Wheeler -- documentation improvements
* Vince Salvino -- JavaScript search improvements
* Will Maier -- directory HTML builder
* Zac Hatfield-Dodds -- doctest reporting improvements, intersphinx performance
diff --git a/CHANGES.rst b/CHANGES.rst
index c257b3b11b1..cd36d83957b 100644
--- a/CHANGES.rst
+++ b/CHANGES.rst
@@ -4,17 +4,143 @@ Release 8.3.0 (in development)
Dependencies
------------
+* #13786: Support `Docutils 0.22`_. Patch by Adam Turner.
+
+ .. _Docutils 0.22: https://docutils.sourceforge.io/RELEASE-NOTES.html#release-0-22-2026-07-29
+
Incompatible changes
--------------------
+* #13639: :py:meth:`!SphinxComponentRegistry.create_source_parser` no longer
+ has an *app* parameter, instead taking *config* and *env*.
+ Patch by Adam Turner.
+
Deprecated
----------
+* 13627: Deprecate remaining public :py:attr:`!.app` attributes,
+ including ``builder.app``, ``env.app``, ``events.app``,
+ and ``SphinxTransform.`app``.
+ Patch by Adam Turner.
+* #13637: Deprecate the :py:meth:`!set_application` method
+ of :py:class:`~sphinx.parsers.Parser` objects.
+ Patch by Adam Turner.
+* #13644: Deprecate the :py:attr:`!Parser.config` and :py:attr:`!env` attributes.
+ Patch by Adam Turner.
+* #13665: Deprecate support for non-UTF 8 source encodings,
+ scheduled for removal in Sphinx 10.
+ Patch by Adam Turner.
+* #13679: Non-decodable characters in source files will raise an error in Sphinx 9.
+ Currently, such bytes are replaced with '?' along with logging a warning.
+ Patch by Adam Turner.
+* #13682: Deprecate :py:mod:`!sphinx.io`.
+ Sphinx no longer uses the :py:mod:`!sphinx.io` classes,
+ having replaced them with standard Python I/O.
+ The entire :py:mod:`!sphinx.io` module will be removed in Sphinx 10.
+ Patch by Adam Turner.
+
Features added
--------------
+* #13332: Add :confval:`doctest_fail_fast` option to exit after the first failed
+ test.
+ Patch by Till Hoffmann.
+* #13439: linkcheck: Permit warning on every redirect with
+ ``linkcheck_allowed_redirects = {}``.
+ Patch by Adam Turner and James Addison.
+* #13497: Support C domain objects in the table of contents.
+* #13500: LaTeX: add support for ``fontawesome6`` package.
+ Patch by Jean-François B.
+* #13509: autodoc: Detect :py:func:`typing_extensions.overload `
+ and :py:func:`~typing.final` decorators.
+ Patch by Spencer Brown.
+* #13535: html search: Update to the latest version of Snowball (v3.0.1).
+ Patch by Adam Turner.
+* #13647: LaTeX: allow more cases of table nesting.
+ Patch by Jean-François B.
+* #13657: LaTeX: support CSS3 length units.
+ Patch by Jean-François B.
+* #13684: intersphinx: Add a file-based cache for remote inventories.
+ The location of the cache directory must not be relied upon externally,
+ as it may change without notice or warning in future releases.
+ Patch by Adam Turner.
+* #13805: LaTeX: add support for ``fontawesome7`` package.
+ Patch by Jean-François B.
+* #13508: Initial support for :pep:`695` type aliases.
+ Patch by Martin Matouš, Jeremy Maitin-Shepard, and Adam Turner.
+
Bugs fixed
----------
+* #13926: multiple py:type directives for the same canonical type no
+ longer result in spurious duplicate object description warnings.
+ Patch by Jeremy Maitin-Shepard.
+* #1327: LaTeX: tables using longtable raise error if
+ :rst:dir:`tabularcolumns` specifies automatic widths
+ (``L``, ``R``, ``C``, or ``J``).
+ Patch by Jean-François B.
+* #3447: LaTeX: when assigning longtable class to table for PDF, it may render
+ "horizontally" and overflow in right margin.
+ Patch by Jean-François B.
+* #8828: LaTeX: adding a footnote to a longtable cell causes table to occupy
+ full width.
+ Patch by Jean-François B.
+* #11498: LaTeX: Table in cell fails to build if it has many rows.
+ Patch by Jean-François B.
+* #11515: LaTeX: longtable does not allow nested table.
+ Patch by Jean-François B.
+* #11973: LaTeX: links in table captions do not work in PDF.
+ Patch by Jean-François B.
+* #12821: LaTeX: URLs/links in section titles should render in PDF.
+ Patch by Jean-François B.
+* #13369: Correctly parse and cross-reference unpacked type annotations.
+ Patch by Alicia Garcia-Raboso.
+* #13528: Add tilde ``~`` prefix support for :rst:role:`py:deco`.
+ Patch by Shengyu Zhang and Adam Turner.
+* #13597: LaTeX: table nested in a merged cell leads to invalid LaTeX mark-up
+ and PDF cannot be built.
+ Patch by Jean-François B.
+* #13619: LaTeX: possible duplicated footnotes in PDF from object signatures
+ (typically if :confval:`latex_show_urls` ``= 'footnote'``).
+ Patch by Jean-François B.
+* #13635: LaTeX: if a cell contains a table, row coloring is turned off for
+ the next table cells.
+ Patch by Jean-François B.
+* #13685: gettext: Correctly ignore trailing backslashes.
+ Patch by Bénédikt Tran.
+* #13712: intersphinx: Don't add "v" prefix to non-numeric versions.
+ Patch by Szymon Karpinski.
+* #13688: HTML builder: Replace ```` with
+ ```` for attribute type annotations
+ to improve `semantic HTML structure
+ `__.
+ Patch by Mark Ostroth.
+* #13812 (discussion): LaTeX: long :rst:dir:`confval` value does not wrap at
+ spaces in PDF.
+ Patch by Jean-François B.
+* #10785: Autodoc: Allow type aliases defined in the project to be properly
+ cross-referenced when used as type annotations. This makes it possible
+ for objects documented as ``:py:data:`` to be hyperlinked in function signatures.
+* #13858: doctest: doctest blocks are now correctly added to a group defined by the
+ configuration variable ``doctest_test_doctest_blocks``.
+* #13885: Coverage builder: Fix TypeError when warning about missing modules.
+ Patch by Damien Ayers.
+* #13929: Duplicate equation label warnings now have a new warning
+ sub-type, ``ref.equation``.
+ Patch by Jared Dillard.
+* #13935: autoclass: parent class members no longer considered
+ directly defined in certain cases, depending on autodoc processing
+ order.
+ Patch by Jeremy Maitin-Shepard.
+* #13939: LaTeX: page break can separate admonition title from contents.
+ Patch by Jean-François B.
+* #14004: Fix :confval:`autodoc_type_aliases` when they appear in PEP 604
+ union syntax (``Alias | Type``).
+ Patch by Tamika Nomara.
+* #14059: LaTeX: Footnotes cause pdflatex error with French language
+ (since late June 2025 upstream change to LaTeX ``babel-french``).
+ Patch by Jean-François B.
+
+
Testing
-------
diff --git a/doc/changes/5.3.rst b/doc/changes/5.3.rst
index b2a2e5a78f1..171b0792bbe 100644
--- a/doc/changes/5.3.rst
+++ b/doc/changes/5.3.rst
@@ -8,7 +8,10 @@ Release 5.3.0 (released Oct 16, 2022)
* #10759: LaTeX: add :confval:`latex_table_style` and support the
``'booktabs'``, ``'borderless'``, and ``'colorrows'`` styles.
- (thanks to Stefan Wiehler for initial pull requests #6666, #6671)
+ (thanks to Stefan Wiehler for initial pull requests #6666, #6671).
+ Using the ``'booktabs'`` style solves #6740 (Removing LaTeX
+ column borders for automatic colspec).
+ Patch by Jean-François B.
* #10840: One can cross-reference including an option value like
``:option:`--module=foobar```, ``:option:`--module[=foobar]```,
or ``:option:`--module foobar```.
diff --git a/doc/changes/7.3.rst b/doc/changes/7.3.rst
index b544a722041..c9395c18c4a 100644
--- a/doc/changes/7.3.rst
+++ b/doc/changes/7.3.rst
@@ -86,7 +86,7 @@ Dependencies
* #11858: Increase the minimum supported version of Alabaster to 0.7.14.
Patch by Adam Turner.
-* #11411: Support `Docutils 0.21`_. Patch by Adam Turner.
+* #12267: Support `Docutils 0.21`_. Patch by Adam Turner.
.. _Docutils 0.21: https://docutils.sourceforge.io/RELEASE-NOTES.html#release-0-21-2024-04-09
* #12012: Use ``types-docutils`` instead of ``docutils-stubs``.
diff --git a/doc/conf.py b/doc/conf.py
index 9cf2f9b4856..ef28f92ff1a 100644
--- a/doc/conf.py
+++ b/doc/conf.py
@@ -185,7 +185,11 @@
('js:func', 'number'),
('js:func', 'string'),
('py:attr', 'srcline'),
+ # sphinx.application.Sphinx.connect
('py:class', '_AutodocProcessDocstringListener'),
+ # sphinx.application.Sphinx.connect
+ ('py:class', '_AutodocProcessSignatureListener'),
+ ('py:class', '_AutodocSkipMemberListener'), # sphinx.application.Sphinx.connect
('py:class', '_ConfigRebuild'), # sphinx.application.Sphinx.add_config_value
# sphinx.application.Sphinx.add_html_math_renderer
('py:class', '_MathsBlockRenderers'),
@@ -231,6 +235,7 @@
('py:class', 'pygments.lexer.Lexer'),
('py:class', 'sphinx.directives.ObjDescT'),
('py:class', 'sphinx.domains.IndexEntry'),
+ # sphinx.application.Sphinx.add_autodocumenter
('py:class', 'sphinx.ext.autodoc.Documenter'),
('py:class', 'sphinx.errors.NoUri'),
('py:class', 'sphinx.roles.XRefRole'),
@@ -297,14 +302,12 @@ def linkify_issues_in_changelog(
) -> None:
"""Linkify issue references like #123 in changelog to GitHub."""
if docname == 'changes':
+ linkified_changelog = re.sub(r'(?:PR)?#([0-9]+)\b', _linkify, source[0])
+ source[0] = linkified_changelog
- def linkify(match: re.Match[str]) -> str:
- url = 'https://github.com/sphinx-doc/sphinx/issues/' + match[1]
- return f'`{match[0]} <{url}>`_'
-
- linkified_changelog = re.sub(r'(?:PR)?#([0-9]+)\b', linkify, source[0])
- source[0] = linkified_changelog
+def _linkify(match: re.Match[str], /) -> str:
+ return f'`{match[0]} `__'
REDIRECT_TEMPLATE = """
diff --git a/doc/development/html_themes/templating.rst b/doc/development/html_themes/templating.rst
index e7c1d11f453..77b43882f86 100644
--- a/doc/development/html_themes/templating.rst
+++ b/doc/development/html_themes/templating.rst
@@ -6,6 +6,32 @@
Templating
==========
+What Is Templating?
+-------------------
+
+Templating is a method of generating HTML pages by combining static templates
+with variable data.
+The template files contain the static parts of the desired HTML output
+and include special syntax describing how variable content will be inserted.
+For example, this can be used to insert the current date in the footer of each page,
+or to surround the main content of the document with a scaffold of HTML for layout
+and formatting purposes.
+Doing so only requires an understanding of HTML and the templating syntax.
+Knowledge of Python can be helpful, but is not required.
+
+Templating uses an inheritance mechanism which allows child templates files
+(e.g. in a theme) to override as much (or as little) of their 'parents' as desired.
+Likewise, content authors can use their own local templates to override as much (or
+as little) of the theme templates as desired.
+
+The result is that the Sphinx core, without needing to be changed, provides basic
+HTML generation, independent of the structure and appearance of the final output,
+while granting a great deal of flexibility to theme and content authors.
+
+
+Sphinx Templating
+-----------------
+
Sphinx uses the `Jinja `_ templating engine
for its HTML templates. Jinja is a text-based engine, inspired by Django
templates, so anyone having used Django will already be familiar with it. It
diff --git a/doc/development/tutorials/examples/autodoc_intenum.py b/doc/development/tutorials/examples/autodoc_intenum.py
index 2dd8d6324e6..bb36ea0e6bf 100644
--- a/doc/development/tutorials/examples/autodoc_intenum.py
+++ b/doc/development/tutorials/examples/autodoc_intenum.py
@@ -4,6 +4,7 @@
from typing import TYPE_CHECKING
from sphinx.ext.autodoc import ClassDocumenter, bool_option
+from sphinx.ext.autodoc._generate import _docstring_source_name
if TYPE_CHECKING:
from typing import Any
@@ -11,47 +12,53 @@
from docutils.statemachine import StringList
from sphinx.application import Sphinx
+ from sphinx.ext.autodoc import Documenter
from sphinx.util.typing import ExtensionMetadata
class IntEnumDocumenter(ClassDocumenter):
objtype = 'intenum'
directivetype = ClassDocumenter.objtype
- priority = 10 + ClassDocumenter.priority
+ priority = 25
option_spec = dict(ClassDocumenter.option_spec)
option_spec['hex'] = bool_option
@classmethod
def can_document_member(
- cls, member: Any, membername: str, isattr: bool, parent: Any
+ cls, member: Any, membername: str, isattr: bool, parent: Documenter
) -> bool:
try:
return issubclass(member, IntEnum)
except TypeError:
return False
- def add_directive_header(self, sig: str) -> None:
- super().add_directive_header(sig)
- self.add_line(' :final:', self.get_sourcename())
+ def add_line(self, line: str, source: str = '', *lineno: int, indent: str) -> None:
+ """Append one line of generated reST to the output."""
+ analyzer_source = '' if self.analyzer is None else self.analyzer.srcname
+ source_name = _docstring_source_name(props=self.props, source=analyzer_source)
+ if line.strip(): # not a blank line
+ self.result.append(indent + line, source_name, *lineno)
+ else:
+ self.result.append('', source_name, *lineno)
- def add_content(
- self,
- more_content: StringList | None,
- ) -> None:
- super().add_content(more_content)
+ def add_directive_header(self, *, indent: str) -> None:
+ super().add_directive_header(indent=indent)
+ self.add_line(' :final:', indent=indent)
- source_name = self.get_sourcename()
- enum_object: IntEnum = self.object
+ def add_content(self, more_content: StringList | None, *, indent: str) -> None:
+ super().add_content(more_content, indent=indent)
+
+ enum_object: IntEnum = self.props._obj
use_hex = self.options.hex
- self.add_line('', source_name)
+ self.add_line('', indent=indent)
for the_member_name, enum_member in enum_object.__members__.items(): # type: ignore[attr-defined]
the_member_value = enum_member.value
if use_hex:
the_member_value = hex(the_member_value)
- self.add_line(f'**{the_member_name}**: {the_member_value}', source_name)
- self.add_line('', source_name)
+ self.add_line(f'**{the_member_name}**: {the_member_value}', indent=indent)
+ self.add_line('', indent=indent)
def setup(app: Sphinx) -> ExtensionMetadata:
diff --git a/doc/development/tutorials/examples/recipe.py b/doc/development/tutorials/examples/recipe.py
index 9848629216a..da52fa2df67 100644
--- a/doc/development/tutorials/examples/recipe.py
+++ b/doc/development/tutorials/examples/recipe.py
@@ -165,7 +165,7 @@ def add_recipe(self, signature, ingredients):
name,
signature,
'Recipe',
- self.env.docname,
+ self.env.current_document.docname,
anchor,
0,
))
diff --git a/doc/development/tutorials/examples/todo.py b/doc/development/tutorials/examples/todo.py
index a8aa1ec4a1d..c9993eda198 100644
--- a/doc/development/tutorials/examples/todo.py
+++ b/doc/development/tutorials/examples/todo.py
@@ -44,7 +44,7 @@ def run(self):
self.env.todo_all_todos = []
self.env.todo_all_todos.append({
- 'docname': self.env.docname,
+ 'docname': self.env.current_document.docname,
'lineno': self.lineno,
'todo': todo_node.deepcopy(),
'target': targetnode,
diff --git a/doc/development/tutorials/extending_build.rst b/doc/development/tutorials/extending_build.rst
index 4d3606a0a33..9894d656fed 100644
--- a/doc/development/tutorials/extending_build.rst
+++ b/doc/development/tutorials/extending_build.rst
@@ -143,7 +143,7 @@ Looking first at the ``TodolistDirective`` directive:
.. literalinclude:: examples/todo.py
:language: python
:linenos:
- :lines: 24-27
+ :pyobject: TodolistDirective
It's very simple, creating and returning an instance of our ``todolist`` node
class. The ``TodolistDirective`` directive itself has neither content nor
@@ -153,7 +153,7 @@ directive:
.. literalinclude:: examples/todo.py
:language: python
:linenos:
- :lines: 30-53
+ :pyobject: TodoDirective
Several important things are covered here. First, as you can see, we're now
subclassing the :class:`~sphinx.util.docutils.SphinxDirective` helper class
@@ -168,16 +168,16 @@ new unique integer on each call and therefore leads to unique target names. The
target node is instantiated without any text (the first two arguments).
On creating admonition node, the content body of the directive are parsed using
-``self.state.nested_parse``. The first argument gives the content body, and
-the second one gives content offset. The third argument gives the parent node
-of parsed result, in our case the ``todo`` node. Following this, the ``todo``
-node is added to the environment. This is needed to be able to create a list of
-all todo entries throughout the documentation, in the place where the author
-puts a ``todolist`` directive. For this case, the environment attribute
-``todo_all_todos`` is used (again, the name should be unique, so it is prefixed
-by the extension name). It does not exist when a new environment is created, so
-the directive must check and create it if necessary. Various information about
-the todo entry's location are stored along with a copy of the node.
+``self.parse_content_to_nodes()``.
+Following this, the ``todo`` node is added to the environment.
+This is needed to be able to create a list of all todo entries throughout
+the documentation, in the place where the author puts a ``todolist`` directive.
+For this case, the environment attribute ``todo_all_todos`` is used
+(again, the name should be unique, so it is prefixed by the extension name).
+It does not exist when a new environment is created, so the directive must
+check and create it if necessary.
+Various information about the todo entry's location are stored along with
+a copy of the node.
In the last line, the nodes that should be put into the doctree are returned:
the target node and the admonition node.
@@ -211,7 +211,7 @@ the :event:`env-purge-doc` event:
.. literalinclude:: examples/todo.py
:language: python
:linenos:
- :lines: 56-61
+ :pyobject: purge_todos
Since we store information from source files in the environment, which is
persistent, it may become out of date when the source file changes. Therefore,
@@ -229,7 +229,7 @@ to be merged:
.. literalinclude:: examples/todo.py
:language: python
:linenos:
- :lines: 64-68
+ :pyobject: merge_todos
The other handler belongs to the :event:`doctree-resolved` event:
@@ -237,12 +237,13 @@ The other handler belongs to the :event:`doctree-resolved` event:
.. literalinclude:: examples/todo.py
:language: python
:linenos:
- :lines: 71-113
+ :pyobject: process_todo_nodes
-The :event:`doctree-resolved` event is emitted at the end of :ref:`phase 3
-(resolving) ` and allows custom resolving to be done. The handler
-we have written for this event is a bit more involved. If the
-``todo_include_todos`` config value (which we'll describe shortly) is false,
+The :event:`doctree-resolved` event is emitted for each document that is
+about to be written at the end of :ref:`phase 3 (resolving) `
+and allows custom resolving to be done on that document.
+The handler we have written for this event is a bit more involved.
+If the ``todo_include_todos`` config value (which we'll describe shortly) is false,
all ``todo`` and ``todolist`` nodes are removed from the documents. If not,
``todo`` nodes just stay where and how they are. ``todolist`` nodes are
replaced by a list of todo entries, complete with backlinks to the location
@@ -266,17 +267,17 @@ the other parts of our extension. Let's look at our ``setup`` function:
.. literalinclude:: examples/todo.py
:language: python
:linenos:
- :lines: 116-
+ :pyobject: setup
The calls in this function refer to the classes and functions we added earlier.
What the individual calls do is the following:
* :meth:`~Sphinx.add_config_value` lets Sphinx know that it should recognize the
- new *config value* ``todo_include_todos``, whose default value should be
- ``False`` (this also tells Sphinx that it is a boolean value).
+ new *config value* ``todo_include_todos``, whose default value is ``False``
+ (which also tells Sphinx that it is a boolean value).
- If the third argument was ``'html'``, HTML documents would be full rebuild if the
- config value changed its value. This is needed for config values that
+ If the third argument was ``'html'``, HTML documents would be fully rebuilt
+ if the config value changed its value. This is needed for config values that
influence reading (build :ref:`phase 1 (reading) `).
* :meth:`~Sphinx.add_node` adds a new *node class* to the build system. It also
diff --git a/doc/extdev/deprecated.rst b/doc/extdev/deprecated.rst
index ad05b054d99..484f52cb7e7 100644
--- a/doc/extdev/deprecated.rst
+++ b/doc/extdev/deprecated.rst
@@ -22,6 +22,36 @@ The following is a list of deprecated interfaces.
- Removed
- Alternatives
+ * - ``sphinx.io`` (entire module)
+ - 8.3
+ - 10.0
+ - ``docutils.io`` or standard Python I/O
+
+ * - ``sphinx.builders.Builder.app``
+ - 8.3
+ - 10.0
+ - N/A
+
+ * - ``sphinx.environment.BuildEnvironment.app``
+ - 8.3
+ - 10.0
+ - N/A
+
+ * - ``sphinx.transforms.Transform.app``
+ - 8.3
+ - 10.0
+ - N/A
+
+ * - ``sphinx.transforms.post_transforms.SphinxPostTransform.app``
+ - 8.3
+ - 10.0
+ - N/A
+
+ * - ``sphinx.events.EventManager.app``
+ - 8.3
+ - 10.0
+ - N/A
+
* - ``sphinx.builders.singlehtml.SingleFileHTMLBuilder.fix_refuris``
- 8.2
- 10.0
diff --git a/doc/extdev/event_callbacks.rst b/doc/extdev/event_callbacks.rst
index 04eae51be1d..aec9a47e848 100644
--- a/doc/extdev/event_callbacks.rst
+++ b/doc/extdev/event_callbacks.rst
@@ -70,8 +70,8 @@ Below is an overview of the core event that happens during a build.
14. apply post-transforms (by priority): docutils.document -> docutils.document
15. event.doctree-resolved(app, doctree, docname)
- In the event that any reference nodes fail to resolve, the following may emit:
- - event.missing-reference(env, node, contnode)
- - event.warn-missing-reference(domain, node)
+ - event.missing-reference(app, env, node, contnode)
+ - event.warn-missing-reference(app, domain, node)
16. Generate output files
17. event.build-finished(app, exception)
diff --git a/doc/extdev/markupapi.rst b/doc/extdev/markupapi.rst
index 7aa632446da..184bd2bd8e4 100644
--- a/doc/extdev/markupapi.rst
+++ b/doc/extdev/markupapi.rst
@@ -173,9 +173,9 @@ The methods are used as follows:
def run(self) -> list[Node]:
container = docutils.nodes.Element()
# either
- nested_parse_with_titles(self.state, self.result, container)
+ nested_parse_with_titles(self.state, self.result, container, self.content_offset)
# or
- self.state.nested_parse(self.result, 0, container)
+ self.state.nested_parse(self.result, self.content_offset, container)
parsed = container.children
return parsed
diff --git a/doc/internals/contributing.rst b/doc/internals/contributing.rst
index 4b8ca84a945..90d7600866d 100644
--- a/doc/internals/contributing.rst
+++ b/doc/internals/contributing.rst
@@ -138,6 +138,10 @@ These are the basic steps needed to start developing on Sphinx.
#. Wait for a core developer or contributor to review your changes.
+ You may be asked to address comments on the review. If so, please avoid
+ force pushing to the branch. Sphinx uses the *squash merge* strategy when
+ merging PRs, so follow-up commits will all be combined.
+
Coding style
~~~~~~~~~~~~
@@ -201,7 +205,7 @@ You can also test by installing dependencies in your local environment:
.. code-block:: shell
- pip install .[test]
+ pip install . --group test
To run JavaScript tests, use :program:`npm`:
@@ -337,13 +341,15 @@ Updating generated files
------------------------
* JavaScript stemming algorithms in :file:`sphinx/search/non-minified-js/*.js`
- are generated using `snowball `_
- by cloning the repository, executing ``make dist_libstemmer_js`` and then
- unpacking the tarball which is generated in :file:`dist` directory.
+ and stopword files in :file:`sphinx/search/_stopwords/`
+ are generated from the `Snowball project`_
+ by running :file:`utils/generate_snowball.py`.
Minified files in :file:`sphinx/search/minified-js/*.js` are generated from
- non-minified ones using :program:`uglifyjs` (installed via npm), with ``-m``
- option to enable mangling.
+ non-minified ones using :program:`uglifyjs` (installed via npm).
+ See :file:`sphinx/search/minified-js/README.rst`.
+
+ .. _Snowball project: https://snowballstem.org/
* The :file:`searchindex.js` files found in
the :file:`tests/js/fixtures/*` directories
diff --git a/doc/latex.rst b/doc/latex.rst
index fce61480941..edb2f0c18cb 100644
--- a/doc/latex.rst
+++ b/doc/latex.rst
@@ -500,7 +500,7 @@ Keys that don't need to be overridden unless in special cases are:
.. hint::
If the key value is set to
- :code-tex:`r'\\newcommand\sphinxbackoftitlepage{}\\sphinxmaketitle'`, then ```` will be
typeset on back of title page (``'manual'`` docclass only).
@@ -1006,18 +1006,20 @@ The color used in the above example is available from having passed the
``iconpackage``
- The name of the LaTeX package used for icons in the admonition titles. It
- defaults to ``fontawesome5`` or to fall-back ``fontawesome``. In case
- neither one is available the option value will automatically default to
- ``none``, which means that no attempt at loading a package is done.
- Independently of this setting, arbitrary LaTeX code can be associated to
- each admonition type via ``div._icon-title`` keys which are
- described in the :ref:`additionalcss` section. If these keys are not
- used, Sphinx will either apply its default choices of icons (if
- ``fontawesome{5,}`` is available) or not draw the icon at all. Notice that
- if fall-back ``fontawesome`` is used the common icon for :dudir:`caution`
- and :dudir:`danger` will default to "bolt" not "radiation", which is only
- found in ``fontawesome5``.
+ The name of the LaTeX package used for rendering icons in the admonition
+ titles. Its default is set dynamically to either ``fontawesome7``,
+ ``fontawesome6``,
+ ``fontawesome5``, ``fontawesome``, or ``none``, in decreasing order of
+ priority and depending on whether
+ packages with those names exist in the used LaTeX installation. The LaTeX
+ code for each admonition icon will use ``\faIcon`` command if with
+ ``fontawesome{5,6,7}`` and
+ ``\faicon`` if with ``fontawesome``.
+ If no "Font Awesome" related package is found (or if the option is set
+ forcefully to ``none``) the icons are silently dropped. User can set this
+ option to some specific package and must configure then the
+ ``div.note_title-icon`` and similar keys to use then that LaTeX package
+ interface (see the :ref:`additionalcss` section about this).
.. versionadded:: 7.4.0
@@ -1410,17 +1412,21 @@ The next keys, for admonitions, :dudir:`topic`, contents_, and
(it applies only to the icon, not to the title of the admonition).
- ``div._title-icon``: the LaTeX code responsible for producing the
- icon. For example, the default for :dudir:`note` is
- ``div.note_title-icon=\faIcon{info-circle}``. This uses a command from the
- LaTeX ``fontawesome5`` package, which is loaded automatically if available.
-
- If neither ``fontawesome5`` nor fall-back ``fontawesome`` (for which the
- associated command is :code-tex:`\\faicon`, not :code-tex:`\\faIcon`) are
- found, or if the ``iconpackage`` key of :ref:`'sphinxsetup'
- ` is set to load some other user-chosen package, or no
- package at all, all the ``title-icons`` default to empty LaTeX code. It is
- up to user to employ this interface to inject the icon (or anything else)
- into the PDF output.
+ icon for the given ````.
+ For example the default for :dudir:`note` is
+ ``div.note_title-icon=\faIcon{info-circle}`` with ``fontawesome5``, but
+ ``div.note_title-icon=\faIcon{circle-info}`` with ``fontawesome6``
+ and ``fontawesome7``.
+ If you want to modify the icons used by Sphinx, employ in these keys
+ the ``\faIcon`` LaTeX command if one of ``fontawesome5``, ``6`` or ``7`` is
+ on your LaTeX installation.
+ If your system only provides the
+ ``fontawesome`` package use its command ``\faicon`` (not ``\faIcon``)
+ in order to modify the choice of icons. The ``iconpackage`` key of
+ ``'sphinxsetup'`` can be used to force usage of one among
+ ``fontawesome{,5,6,7}`` or be the name of some other icon-providing package.
+ In that latter case you must configure the ``div._title-icon`` keys
+ to use the LaTeX commands appropriate to that custom icon package.
.. note::
@@ -1694,7 +1700,7 @@ Macros
.. hint::
If adding to preamble the loading of ``tocloft`` package, also add to
- preamble :code-tex:`\\renewcommand\sphinxtableofcontentshook{}` else it
+ preamble :code-tex:`\\renewcommand\\sphinxtableofcontentshook{}` else it
will reset :code-tex:`\\l@section` and :code-tex:`\\l@subsection`
cancelling ``tocloft`` customization.
diff --git a/doc/man/sphinx-build.rst b/doc/man/sphinx-build.rst
index 63af7e49b4c..6815d10a424 100644
--- a/doc/man/sphinx-build.rst
+++ b/doc/man/sphinx-build.rst
@@ -58,6 +58,9 @@ Options
*info*
Build Texinfo files and run them through :program:`makeinfo`.
+ *help*
+ Output a list of valid builder targets, and exit.
+
.. note::
The default output directory locations when using *make-mode*
@@ -272,13 +275,13 @@ Options
From Sphinx 8.1, :option:`!--keep-going` is always enabled.
Previously, it was only applicable whilst using :option:`--fail-on-warning`,
which by default exited :program:`sphinx-build` on the first warning.
- Using :option:`!--keep-going` runs :program:`!sphinx-build` to completion
+ Using :option:`!--keep-going` runs :program:`sphinx-build` to completion
and exits with exit status 1 if errors are encountered.
.. versionadded:: 1.8
.. versionchanged:: 8.1
:program:`sphinx-build` no longer exits on the first warning,
- meaning that in effect :option:`!--fail-on-warning` is always enabled.
+ meaning that in effect :option:`!--keep-going` is always enabled.
The option is retained for compatibility, but may be removed at some
later date.
diff --git a/doc/tutorial/first-steps.rst b/doc/tutorial/first-steps.rst
index fd5c631353e..dccf1838de3 100644
--- a/doc/tutorial/first-steps.rst
+++ b/doc/tutorial/first-steps.rst
@@ -73,6 +73,7 @@ shown right after the corresponding link, in parentheses. You can change that
behavior by adding the following code at the end of your ``conf.py``:
.. code-block:: python
+ :caption: docs/source/conf.py
# EPUB options
epub_show_urls = 'footnote'
diff --git a/doc/usage/configuration.rst b/doc/usage/configuration.rst
index 75e08d7654b..ff903fa4f6c 100644
--- a/doc/usage/configuration.rst
+++ b/doc/usage/configuration.rst
@@ -1157,6 +1157,9 @@ Options for source files
The recommended encoding is ``'utf-8-sig'``.
.. versionadded:: 0.5
+ .. deprecated:: 8.3
+ Support for source encodings other than UTF-8 is deprecated.
+ Sphinx 10 will only support UTF-8 files.
.. confval:: source_suffix
:type: :code-py:`dict[str, str] | Sequence[str] | str`
@@ -1391,6 +1394,7 @@ Options for warning control
* ``ref.any``
* ``ref.citation``
* ``ref.doc``
+ * ``ref.equation``
* ``ref.footnote``
* ``ref.keyword``
* ``ref.numref``
@@ -3083,7 +3087,7 @@ These options influence LaTeX output.
the :code-tex:`\\rowcolors` LaTeX command becomes a no-op
(this command has limitations and has never correctly
supported all types of tables Sphinx produces in LaTeX).
- Please update your project to use the
+ Please use the
:ref:`latex table color configuration ` keys instead.
To customise the styles for a table,
@@ -3096,7 +3100,7 @@ These options influence LaTeX output.
The latter two can be combined with any of the first three.
The ``standard`` class produces tables with
both horizontal and vertical lines
- (as has been the default so far with Sphinx).
+ (as had been the default prior to Sphinx 6.0.0).
A single-row multi-column merged cell will obey the row colour,
if it is set.
@@ -3642,7 +3646,6 @@ and which failures and redirects it ignores.
.. confval:: linkcheck_allowed_redirects
:type: :code-py:`dict[str, str]`
- :default: :code-py:`{}`
A dictionary that maps a pattern of the source URI
to a pattern of the canonical URI.
@@ -3668,6 +3671,11 @@ and which failures and redirects it ignores.
.. versionadded:: 4.1
+ .. versionchanged:: 8.3
+ Setting :confval:`!linkcheck_allowed_redirects` to an empty dictionary
+ may now be used to warn on all redirects encountered
+ by the *linkcheck* builder.
+
.. confval:: linkcheck_anchors
:type: :code-py:`bool`
:default: :code-py:`True`
diff --git a/doc/usage/domains/index.rst b/doc/usage/domains/index.rst
index cc3f272646c..acb0f2ee97b 100644
--- a/doc/usage/domains/index.rst
+++ b/doc/usage/domains/index.rst
@@ -35,6 +35,71 @@ easier to write.
This section describes what the domains that are included with Sphinx provide.
The domain API is documented as well, in the section :ref:`domain-api`.
+Built-in domains
+----------------
+
+The following domains are included within Sphinx:
+
+.. toctree::
+ :maxdepth: 1
+
+ standard
+ c
+ cpp
+ javascript
+ mathematics
+ python
+ restructuredtext
+
+
+Third-party domains
+-------------------
+
+Several third-party domains are available as extensions, including:
+
+* `Ada `__
+* `Antlr4 `__
+* `Bazel `__
+* `BibTex `__
+* `Bison/YACC `__
+* `Chapel `__
+* `CMake `__
+* `Common Lisp `__
+* `Erlang `__
+* `Fortran `__
+* `GraphQL `__
+* `Go `__
+* `HTTP `__
+* `Hy `__
+* `Lua `__
+* `MATLAB `__
+* `PHP `__
+* `Ruby `__
+* `Rust `__
+* `Verilog `__
+* `VHDL `__
+* `Visual Basic `__
+
+Other domains may be found on the Python Package Index
+(via the `Framework :: Sphinx :: Domain`__ classifier),
+`GitHub `__, or
+`GitLab `__.
+
+__ https://pypi.org/search/?c=Framework+%3A%3A+Sphinx+%3A%3A+Domain
+
+.. NOTE: The following all seem unmaintained, last released 2018 or earlier.
+ The links are preserved in this comment for reference.
+
+ * `CoffeeScript `__
+ * `DotNET `__
+ * `dqn `__
+ * `Jinja `__
+ * `JSON `__
+ * `Lasso `__
+ * `Operation `__
+ * `Scala `__
+ * `Lua `__
+
.. _basic-domain-markup:
@@ -174,40 +239,3 @@ In short:
component of the target.
For example, ``:py:meth:`~queue.Queue.get``` will
refer to ``queue.Queue.get`` but only display ``get`` as the link text.
-
-Built-in domains
-----------------
-
-The following domains are included within Sphinx:
-
-.. toctree::
- :maxdepth: 1
-
- standard
- c
- cpp
- javascript
- mathematics
- python
- restructuredtext
-
-More domains
-------------
-
-There are several third-party domains available as extensions, including:
-
-* `Ada `__
-* `Chapel `__
-* `CoffeeScript `__
-* `Common Lisp `__
-* `dqn `__
-* `Erlang `__
-* `Go `__
-* `HTTP `__
-* `Jinja `__
-* `Lasso `__
-* `MATLAB `__
-* `Operation `__
-* `PHP `__
-* `Ruby `__
-* `Scala `__
diff --git a/doc/usage/extensions/autodoc.rst b/doc/usage/extensions/autodoc.rst
index 1b873f0d819..925d1450acf 100644
--- a/doc/usage/extensions/autodoc.rst
+++ b/doc/usage/extensions/autodoc.rst
@@ -966,6 +966,38 @@ Automatically document attributes or data
``:no-value:`` has no effect.
+Automatically document type aliases
+^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+.. rst:directive:: autotype
+
+ .. versionadded:: 8.3
+
+ Document a :pep:`695` type alias (the :keyword:`type` statement).
+ By default, the directive only inserts the docstring of the alias itself:
+
+ The directive can also contain content of its own,
+ which will be inserted into the resulting non-auto directive source
+ after the docstring (but before any automatic member documentation).
+
+ Therefore, you can also mix automatic and non-automatic member documentation.
+
+ .. rubric:: Options
+
+ .. rst:directive:option:: no-index
+ :type:
+
+ Do not generate an index entry for the documented class
+ or any auto-documented members.
+
+ .. rst:directive:option:: no-index-entry
+ :type:
+
+ Do not generate an index entry for the documented class
+ or any auto-documented members.
+ Unlike ``:no-index:``, cross-references are still created.
+
+
Configuration
-------------
@@ -1379,7 +1411,7 @@ autodoc provides the following additional events:
``'(parameter_1, parameter_2)'``, or ``None`` if introspection didn't
succeed and signature wasn't specified in the directive.
:param return_annotation: function return annotation as a string of the form
- ``' -> annotation'``, or ``None`` if there is no return annotation
+ ``'annotation'``, or ``''`` if there is no return annotation.
The :mod:`sphinx.ext.autodoc` module provides factory functions for commonly
needed docstring processing in event :event:`autodoc-process-docstring`:
diff --git a/doc/usage/extensions/autosectionlabel.rst b/doc/usage/extensions/autosectionlabel.rst
index 1e9e1dba722..161b285a290 100644
--- a/doc/usage/extensions/autosectionlabel.rst
+++ b/doc/usage/extensions/autosectionlabel.rst
@@ -8,6 +8,9 @@
.. versionadded:: 1.4
+.. role:: code-py(code)
+ :language: Python
+
By default, cross-references to sections use labels (see :rst:role:`ref`).
This extension allows you to instead refer to sections by their title.
diff --git a/doc/usage/extensions/autosummary.rst b/doc/usage/extensions/autosummary.rst
index 456faee1830..c84dcb60eff 100644
--- a/doc/usage/extensions/autosummary.rst
+++ b/doc/usage/extensions/autosummary.rst
@@ -412,3 +412,27 @@ the title of a page.
Stub pages are generated also based on these directives.
.. _`escape filter`: https://jinja.palletsprojects.com/en/3.0.x/templates/#jinja-filters.escape
+
+Autolink role
+-------------
+
+.. rst:role:: autolink
+
+ The ``:autolink:`` role functions as ``:py:obj:`` when the referenced *name*
+ can be resolved to a Python object, and otherwise it becomes simple emphasis.
+
+ There are some known design flaws.
+ For example, in the case of multiple objects having the same name,
+ :rst:role:`!autolink` could resolve to the wrong object.
+ It will fail silently if the referenced object is not found,
+ for example due to a spelling mistake or renaming.
+ This is sometimes unwanted behaviour.
+
+ Some users choose to configure their :confval:`default_role` to ``autolink``
+ for 'smart' referencing using the default interpreted text role (```content```).
+
+ .. seealso::
+
+ :rst:role:`any`
+
+ :rst:role:`py:obj`
diff --git a/doc/usage/extensions/doctest.rst b/doc/usage/extensions/doctest.rst
index 60c67827967..10e8f67dfe2 100644
--- a/doc/usage/extensions/doctest.rst
+++ b/doc/usage/extensions/doctest.rst
@@ -452,3 +452,11 @@ The doctest extension uses the following configuration values:
Also, removal of ```` and ``# doctest:`` options only works in
:rst:dir:`doctest` blocks, though you may set :confval:`trim_doctest_flags`
to achieve that in all code blocks with Python console content.
+
+.. confval:: doctest_fail_fast
+ :type: :code-py:`bool`
+ :default: :code-py:`False`
+
+ Exit when the first failure is encountered.
+
+ .. versionadded:: 8.3
diff --git a/doc/usage/extensions/math.rst b/doc/usage/extensions/math.rst
index 6fa8ab851f8..fb41d66d8fb 100644
--- a/doc/usage/extensions/math.rst
+++ b/doc/usage/extensions/math.rst
@@ -318,14 +318,25 @@ Sphinx but is set to automatically include it from a third-party site.
This has been renamed to :confval:`mathjax2_config`.
:confval:`mathjax_config` is still supported for backwards compatibility.
-:mod:`sphinx.ext.jsmath` -- Render math via JavaScript
-------------------------------------------------------
+:mod:`sphinxcontrib.jsmath` -- Render math via JavaScript
+---------------------------------------------------------
-.. module:: sphinx.ext.jsmath
+.. module:: sphinxcontrib.jsmath
:synopsis: Render math using JavaScript via JSMath.
This extension works just as the MathJax extension does, but uses the older
-package jsMath_. It provides this config value:
+package jsMath_. jsMath is no longer actively developed, but it has the
+advantage that the size of the JavaScript package is much smaller than
+MathJax.
+
+.. versionadded:: 0.5
+ The :mod:`!sphinx.ext.jsmath` extension.
+.. versionchanged:: 2.0
+ :mod:`!sphinx.ext.jsmath` was moved to :mod:`sphinxcontrib.jsmath`.
+.. versionremoved:: 4.0
+ The alias from :mod:`!sphinx.ext.jsmath` to :mod:`sphinxcontrib.jsmath`.
+
+Config value:
.. confval:: jsmath_path
:type: :code-py:`str`
@@ -337,7 +348,7 @@ package jsMath_. It provides this config value:
The path can be absolute or relative; if it is relative, it is relative to
the ``_static`` directory of the built docs.
- For example, if you put JSMath into the static path of the Sphinx docs, this
+ For example, if you put jsMath into the static path of the Sphinx docs, this
value would be ``jsMath/easy/load.js``. If you host more than one
Sphinx documentation set on one server, it is advisable to install jsMath in
a shared location.
@@ -347,5 +358,5 @@ package jsMath_. It provides this config value:
.. _dvisvgm: https://dvisvgm.de/
.. _dvisvgm FAQ: https://dvisvgm.de/FAQ
.. _MathJax: https://www.mathjax.org/
-.. _jsMath: https://www.math.union.edu/~dpvc/jsmath/
+.. _jsMath: https://www.math.union.edu/~dpvc/jsMath/
.. _LaTeX preview package: https://www.gnu.org/software/auctex/preview-latex.html
diff --git a/doc/usage/installation.rst b/doc/usage/installation.rst
index 8b0aca1cab3..27adf3ab676 100644
--- a/doc/usage/installation.rst
+++ b/doc/usage/installation.rst
@@ -56,7 +56,7 @@ Run the following command::
Or, if writing documentation for a Python package,
place the dependencies in the `pyproject.toml file`__::
- $ pip install .[docs]
+ $ pip install . --group docs
__ https://pip.pypa.io/en/stable/reference/requirements-file-format/
__ https://packaging.python.org/en/latest/guides/writing-pyproject-toml/#dependencies-optional-dependencies
diff --git a/doc/usage/referencing.rst b/doc/usage/referencing.rst
index 2597c9ce597..571d3c798bc 100644
--- a/doc/usage/referencing.rst
+++ b/doc/usage/referencing.rst
@@ -136,8 +136,8 @@ There is also a way to directly link to documents:
.. rst:role:: doc
- Link to the specified document; the document name can be specified in
- absolute or relative fashion. For example, if the reference
+ Link to the specified document; the document name can be a relative or absolute
+ path and is always case-sensitive, even on Windows. For example, if the reference
``:doc:`parrot``` occurs in the document ``sketches/index``, then the link
refers to ``sketches/parrot``. If the reference is ``:doc:`/people``` or
``:doc:`../people```, the link refers to ``people``.
diff --git a/doc/usage/restructuredtext/basics.rst b/doc/usage/restructuredtext/basics.rst
index 5d60ea81de4..8f408f45e38 100644
--- a/doc/usage/restructuredtext/basics.rst
+++ b/doc/usage/restructuredtext/basics.rst
@@ -208,11 +208,39 @@ Hyperlinks
External links
~~~~~~~~~~~~~~
-Use ```Link text `_`` for inline web links. If the
-link text should be the web address, you don't need special markup at all, the
-parser finds links and mail addresses in ordinary text.
+URLs and email addresses in text are automatically linked and do not need
+explicit markup at all.
+For example, https://domain.invalid/ is written with no special markup
+in the source of this document, and is recognised as an external hyperlink.
-.. important:: There must be a space between the link text and the opening \< for the URL.
+To create text with a link, the best approach is generally to put the URL
+below the paragraph as follows (:duref:`ref `)::
+
+ This is a paragraph that contains `a link`_.
+
+ .. _a link: https://domain.invalid/
+
+This keeps the paragraph more readable in source code.
+
+Alternatively, you can embed the URL within the prose for an 'inline link'.
+This can lead to longer lines, but has the benefit of keeping the link text
+and the URL pointed to in the same place.
+This uses the following syntax: ```Link text `__``
+(:duref:`ref `).
+
+.. important::
+
+ There must be a space between the link text
+ and the opening angle bracket ('``<``') for the URL.
+
+.. tip::
+
+ Use two trailing underscores when embedding the URL.
+ Technically, a single underscore works as well,
+ but that would create a named reference instead of an anonymous one.
+ Named references typically do not have a benefit when the URL is embedded.
+ Moreover, they have the disadvantage that you must make sure that you
+ do not use the same "Link text" for another link in your document.
You can also separate the link and the target definition (:duref:`ref
`), like this::
@@ -618,10 +646,11 @@ configurations:
Source encoding
---------------
-Since the easiest way to include special characters like em dashes or copyright
-signs in reStructuredText is to directly write them as Unicode characters, one has to
-specify an encoding. Sphinx assumes source files to be encoded in UTF-8 by
-default; you can change this with the :confval:`source_encoding` config value.
+Sphinx supports source files that are encoded in UTF-8.
+This means that the full range of Unicode__ characters may be used
+directly in reStructuredText.
+
+__ https://www.unicode.org/
Gotchas
diff --git a/doc/usage/restructuredtext/directives.rst b/doc/usage/restructuredtext/directives.rst
index 33269b522a6..4b1a2042df9 100644
--- a/doc/usage/restructuredtext/directives.rst
+++ b/doc/usage/restructuredtext/directives.rst
@@ -473,8 +473,8 @@ and the generic :rst:dir:`admonition` directive.
.. seealso::
- Module :py:mod:`zipfile`
- Documentation of the :py:mod:`zipfile` standard module.
+ Python's :py:mod:`zipfile` module
+ Documentation of Python's standard :py:mod:`zipfile` module.
`GNU tar manual, Basic Tar Format `_
Documentation for tar archive files, including GNU tar extensions.
@@ -537,7 +537,8 @@ Describing changes between versions
pair: changes; in version
pair: removed; in version
-.. rst:directive:: .. versionadded:: version [brief explanation]
+.. rst:directive:: .. version-added:: version [brief explanation]
+ .. versionadded:: version [brief explanation]
This directive documents the version of the project
which added the described feature.
@@ -551,56 +552,75 @@ Describing changes between versions
There must be no blank line between the directive head and the explanation;
this is to make these blocks visually continuous in the markup.
+ .. version-changed:: 8.3
+ The :rst:dir:`versionadded` directive was renamed to :rst:dir:`version-added`.
+ The previous name is retained as an alias.
+
Example::
- .. versionadded:: 2.5
+ .. version-added:: 2.5
The *spam* parameter.
- .. versionadded:: 2.5
+ .. version-added:: 2.5
The *spam* parameter.
-.. rst:directive:: .. versionchanged:: version [brief explanation]
+.. rst:directive:: .. version-changed:: version [brief explanation]
+ .. versionchanged:: version [brief explanation]
- Similar to :rst:dir:`versionadded`, but describes when and what changed in
+ Similar to :rst:dir:`version-added`, but describes when and what changed in
the named feature in some way (new parameters, changed side effects, etc.).
+ .. version-changed:: 8.3
+ The :rst:dir:`versionchanged` directive was renamed to :rst:dir:`version-changed`.
+ The previous name is retained as an alias.
+
Example::
- .. versionchanged:: 2.8
+ .. version-changed:: 2.8
The *spam* parameter is now of type *boson*.
- .. versionchanged:: 2.8
+ .. version-changed:: 2.8
The *spam* parameter is now of type *boson*.
-.. rst:directive:: .. deprecated:: version [brief explanation]
+.. rst:directive:: .. version-deprecated:: version [brief explanation]
+ .. deprecated:: version [brief explanation]
- Similar to :rst:dir:`versionadded`, but describes when the feature was
+ Similar to :rst:dir:`version-added`, but describes when the feature was
deprecated.
A *brief* explanation can also be given,
for example to tell the reader what to use instead.
+ .. version-changed:: 8.3
+ The :rst:dir:`deprecated` directive was renamed to :rst:dir:`version-deprecated`.
+ The previous name is retained as an alias
+
Example::
- .. deprecated:: 3.1
+ .. version-deprecated:: 3.1
Use :py:func:`spam` instead.
- .. deprecated:: 3.1
+ .. version-deprecated:: 3.1
Use :py:func:`!spam` instead.
-.. rst:directive:: .. versionremoved:: version [brief explanation]
+.. rst:directive:: .. version-removed:: version [brief explanation]
+ .. versionremoved:: version [brief explanation]
- Similar to :rst:dir:`versionadded`, but describes when the feature was removed.
+ Similar to :rst:dir:`version-added`, but describes when the feature was removed.
An explanation may be provided to tell the reader what to use instead,
or why the feature was removed.
- .. versionadded:: 7.3
+ .. version-added:: 7.3
+
+ .. version-changed:: 8.3
+ The :rst:dir:`versionremoved` directive was renamed to :rst:dir:`version-removed`.
+ The previous name is retained as an alias.
Example::
- .. versionremoved:: 4.0
+ .. version-removed:: 4.0
The :py:func:`spam` function is more flexible, and should be used instead.
- .. versionremoved:: 4.0
+ .. version-removed:: 4.0
The :py:func:`!spam` function is more flexible, and should be used instead.
@@ -971,7 +991,7 @@ __ https://pygments.org/docs/lexers
:type: text
Explicitly specify the encoding of the file.
- This overwrites the default encoding (:confval:`source_encoding`).
+ This overwrites the default encoding (UTF-8).
For example:
.. code-block:: rst
@@ -1472,11 +1492,20 @@ Check the :confval:`latex_table_style`.
complex contents such as multiple paragraphs, blockquotes, lists, literal
blocks, will render correctly to LaTeX output.
+.. versionchanged:: 8.3.0
+ The partial support of the LaTeX builder for nesting a table in another
+ has been extended.
+ Formerly Sphinx would raise an error if ``longtable`` class was specified
+ for a table containing a nested table, and some cases would not raise an
+ error at Sphinx level but fail at LaTeX level during PDF build. This is a
+ complex topic in LaTeX rendering and the output can sometimes be improved
+ via the :rst:dir:`tabularcolumns` directive.
+
.. rst:directive:: .. tabularcolumns:: column spec
- This directive influences only the LaTeX output for the next table in
- source. The mandatory argument is a column specification (known as an
- "alignment preamble" in LaTeX idiom). Please refer to a LaTeX
+ This directive influences only the LaTeX output, and only for the next
+ table in source. The mandatory argument is a column specification (known
+ as an "alignment preamble" in LaTeX idiom). Please refer to a LaTeX
documentation, such as the `wiki page`_, for basics of such a column
specification.
@@ -1484,52 +1513,85 @@ Check the :confval:`latex_table_style`.
.. versionadded:: 0.3
+ Sphinx renders tables with at most 30 rows using ``tabulary`` (or
+ ``tabular`` if at least one cell contains either a code-block or a nested
+ table), and those with more rows with ``longtable``. The advantage of
+ using ``tabulary`` is that it tries to compute automatically (internally to
+ LaTeX) suitable column widths.
+
+ The ``tabulary`` algorithm often works well, but in some cases when a cell
+ contains long paragraphs, the column will be given a large width and other
+ columns whose cells contain only single words may end up too narrow. The
+ :rst:dir:`tabularcolumns` can help solve this via providing to LaTeX a
+ custom "alignment preamble" (aka "colspec"). For example ``lJJ`` will be
+ suitable for a three-columns table whose first column contains only single
+ words and the other two have cells with long paragraphs.
+
.. note::
- :rst:dir:`tabularcolumns` conflicts with ``:widths:`` option of table
- directives. If both are specified, ``:widths:`` option will be ignored.
+ Of course, a fully automated solution would be better, and it is still
+ hoped for, but it is an intrinsic aspect of ``tabulary``, and the latter
+ is in use by Sphinx ever since ``0.3``... It looks as if solving the
+ problem of squeezed columns could require substantial changes to that
+ LaTeX package. And no good alternative appears to exist, as of 2025.
- Sphinx will render tables with more than 30 rows with ``longtable``.
- Besides the ``l``, ``r``, ``c`` and ``p{width}`` column specifiers, one can
- also use ``\X{a}{b}`` (new in version 1.5) which configures the column
- width to be a fraction ``a/b`` of the total line width and ``\Y{f}`` (new
- in version 1.6) where ``f`` is a decimal: for example ``\Y{0.2}`` means that
- the column will occupy ``0.2`` times the line width.
+ .. hint::
- When this directive is used for a table with at most 30 rows, Sphinx will
- render it with ``tabulary``. One can then use specific column types ``L``
- (left), ``R`` (right), ``C`` (centered) and ``J`` (justified). They have
- the effect of a ``p{width}`` (i.e. each cell is a LaTeX ``\parbox``) with
- the specified internal text alignment and an automatically computed
- ``width``.
+ A way to solve the issue for all tables at once, is to inject in the
+ LaTeX preamble (see :confval:`latex_elements`) a command such as
+ ``\setlength{\tymin}{1cm}`` which causes all columns to be at least
+ ``1cm`` wide (not counting inter-column whitespace). Currently, Sphinx
+ configures ``\tymin`` to allow room for three characters at least.
- .. warning::
+ Here is a more sophisticated "colspec", for a 4-columns table:
- - Cells that contain list-like elements such as object descriptions,
- blockquotes or any kind of lists are not compatible with the ``LRCJ``
- column types. The column type must then be some ``p{width}`` with an
- explicit ``width`` (or ``\X{a}{b}`` or ``\Y{f}``).
+ .. code-block:: latex
- - Literal blocks do not work with ``tabulary`` at all. Sphinx will
- fall back to ``tabular`` or ``longtable`` environments and generate a
- suitable column specification.
+ .. tabularcolumns:: >{\raggedright}\Y{.4}>{\centering}\Y{.1}>{\sphinxcolorblend{!95!red}\centering\noindent\bfseries\color{red}}\Y{.12}>{\raggedright\arraybackslash}\Y{.38}
-In absence of the :rst:dir:`tabularcolumns` directive, and for a table with at
-most 30 rows and no problematic cells as described in the above warning,
-Sphinx uses ``tabulary`` and the ``J`` column-type for every column.
+ This is used in Sphinx own PDF docs at :ref:`dev-deprecated-apis`.
+ Regarding column widths, this "colspec" achieves the same as would
+ ``:widths:`` option set to ``40 10 12 38`` but it injects extra effects.
-.. versionchanged:: 1.6
+ .. note::
- Formerly, the ``L`` column-type was used (text is flushed-left). To revert
- to this, include ``\newcolumntype{T}{L}`` in the LaTeX preamble, as in fact
- Sphinx uses ``T`` and sets it by default to be an alias of ``J``.
+ In case both :rst:dir:`tabularcolumns` and ``:widths:`` option of table
+ directives are used, ``:widths:`` option will be ignored by the LaTeX
+ builder. Of course it is obeyed by other builders.
-.. hint::
+ Literal blocks do not work at all with ``tabulary`` and Sphinx will then
+ fall back to ``tabular`` LaTeX environment. It will employ the
+ :rst:dir:`tabularcolumns` specification in that case only if it contains no
+ usage of the ``tabulary`` specific column types (which are ``L``, ``R``,
+ ``C`` and ``J``).
+
+ Besides the LaTeX ``l``, ``r``, ``c`` and ``p{width}`` column specifiers,
+ and the ``tabulary`` specific ``L``, ``R``, ``C`` and ``J``, one can also
+ use (with all table types) ``\X{a}{b}`` which configures the column width
+ to be a fraction ``a/b`` of the total line width and ``\Y{f}`` where ``f``
+ is a decimal: for example ``\Y{0.2}`` means that the column will occupy
+ ``0.2`` times the line width.
+
+.. versionchanged:: 1.6
- A frequent issue with ``tabulary`` is that columns with little contents
- appear to be "squeezed". One can add to the LaTeX preamble for example
- ``\setlength{\tymin}{40pt}`` to ensure a minimal column width of ``40pt``,
- the ``tabulary`` default of ``10pt`` being too small.
+ Sphinx uses ``J`` (justified) by default with ``tabulary``, not ``L``
+ (flushed-left). To revert, include ``\newcolumntype{T}{L}`` in the LaTeX
+ preamble, as in fact Sphinx uses ``T`` and sets it by default to be an
+ alias of ``J``.
+
+.. versionchanged:: 8.3.0
+
+ Formerly, Sphinx did not use ``tabulary`` if the table had at least one
+ cell containing "problematic" elements such as lists, object descriptions,
+ blockquotes (etc...) because such contents are not out-of-the-box
+ compatible with ``tabulary``. At ``8.3.0`` a technique, which was already
+ in use for merged cells, was extended to such cases, and the sole
+ "problematic" contents are code-blocks and nested tables. So tables
+ containing (only) cells with mutliple paragraphs, bullet or enumerated
+ lists, or line blocks, will now better fit to their contents (if not
+ rendered by ``longtable``). Cells with object descriptions or admonitions
+ will still have a tendency to induce the table to fill the full text area
+ width, but columns in that table with no such contents will be tighter.
.. hint::
diff --git a/pyproject.toml b/pyproject.toml
index 9645f148dd3..e3418ab98a2 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -1,5 +1,5 @@
[build-system]
-requires = ["flit_core>=3.11"]
+requires = ["flit_core>=3.12"]
build-backend = "flit_core.buildapi"
# project metadata
@@ -39,6 +39,7 @@ classifiers = [
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
"Programming Language :: Python :: 3.14",
+ "Programming Language :: Python :: 3.15",
"Programming Language :: Python :: Implementation :: CPython",
"Programming Language :: Python :: Implementation :: PyPy",
"Framework :: Sphinx",
@@ -75,7 +76,7 @@ dependencies = [
"sphinxcontrib-serializinghtml>=1.1.9",
"Jinja2>=3.1",
"Pygments>=2.17",
- "docutils>=0.20,<0.22",
+ "docutils>=0.20,<0.23",
"snowballstemmer>=2.2",
"babel>=2.13",
"alabaster>=0.7.14",
@@ -84,38 +85,10 @@ dependencies = [
"roman-numerals-py>=1.0.0",
"packaging>=23.0",
"colorama>=0.4.6; sys_platform == 'win32'",
+ "ipython>=9.6.0",
]
dynamic = ["version"]
-[project.optional-dependencies]
-docs = [
- "sphinxcontrib-websupport",
-]
-lint = [
- "ruff==0.9.9",
- "mypy==1.15.0",
- "sphinx-lint>=0.9",
- "types-colorama==0.4.15.20240311",
- "types-defusedxml==0.7.0.20240218",
- "types-docutils==0.21.0.20241128",
- "types-Pillow==10.2.0.20240822",
- "types-Pygments==2.19.0.20250219",
- "types-requests==2.32.0.20250301", # align with requests
- "types-urllib3==1.26.25.14",
- "pyright==1.1.396",
- "pytest>=8.0",
- "pypi-attestations==0.0.21",
- "betterproto==2.0.0b6",
-]
-test = [
- "pytest>=8.0",
- "pytest-xdist[psutil]>=3.4",
- "defusedxml>=0.7.1", # for secure XML/HTML parsing
- "cython>=3.0",
- "setuptools>=70.0", # for Cython compilation
- "typing_extensions>=4.9", # for typing_extensions.Unpack
-]
-
[[project.authors]]
name = "Adam Turner"
email = "aa-turner@users.noreply.github.com"
@@ -130,6 +103,50 @@ sphinx-quickstart = "sphinx.cmd.quickstart:main"
sphinx-apidoc = "sphinx.ext.apidoc:main"
sphinx-autogen = "sphinx.ext.autosummary.generate:main"
+[dependency-groups]
+docs = [
+ "sphinxcontrib-websupport",
+]
+lint = [
+ "ruff==0.14.0",
+ "sphinx-lint>=0.9",
+]
+package = [
+ "betterproto==2.0.0b6", # resolution fails without betterproto
+ "build",
+ "pypi-attestations==0.0.27",
+ "twine>=6.1",
+]
+test = [
+ "pytest>=8.0",
+ "pytest-xdist[psutil]>=3.4",
+ "cython>=3.0", # for Cython compilation
+ "defusedxml>=0.7.1", # for secure XML/HTML parsing
+ "setuptools>=70.0", # for Cython compilation
+ "typing_extensions>=4.9", # for typing_extensions.Unpack
+]
+translations = [
+ "babel>=2.13",
+ "Jinja2>=3.1",
+]
+types = [
+ "mypy==1.18.2",
+ "pyrefly",
+ "pyright==1.1.406",
+ "ty",
+ { include-group = "type-stubs" },
+]
+type-stubs = [
+ # align with versions used elsewhere
+ "types-colorama==0.4.15.20250801",
+ "types-defusedxml==0.7.0.20250822",
+ "types-docutils==0.21.0.20250525",
+ "types-Pillow==10.2.0.20240822",
+ "types-Pygments==2.19.0.20250809",
+ "types-requests==2.32.4.20250809",
+ "types-urllib3==1.26.25.14",
+]
+
[tool.flit.module]
name = "sphinx"
@@ -156,7 +173,7 @@ exclude = [
[tool.mypy]
files = [
"doc/conf.py",
- "doc/development/tutorials/examples/autodoc_intenum.py",
+# "doc/development/tutorials/examples/autodoc_intenum.py",
"doc/development/tutorials/examples/helloworld.py",
"sphinx",
"tests",
@@ -203,57 +220,32 @@ ignore_missing_imports = true
[[tool.mypy.overrides]]
module = [
# tests/
- "tests.test_addnodes",
- "tests.test_application",
- "tests.test_events",
- "tests.test_highlighting",
- "tests.test_project",
"tests.test_versioning",
# tests/test_builders
"tests.test_builders.test_build",
"tests.test_builders.test_build_html",
"tests.test_builders.test_build_html_5_output",
- "tests.test_builders.test_build_html_assets",
- "tests.test_builders.test_build_html_maths",
- "tests.test_builders.test_build_html_numfig",
- "tests.test_builders.test_build_html_tocdepth",
- "tests.test_builders.test_build_html_toctree",
"tests.test_builders.test_build_linkcheck",
- "tests.test_builders.test_build_warnings",
# tests/test_directives
"tests.test_directives.test_directive_code",
"tests.test_directives.test_directives_no_typesetting",
- # tests/test_environment
- "tests.test_environment.test_environment",
+ # tests/test_ext_autodoc
+ "tests.test_ext_autodoc.test_ext_autodoc_autoclass",
+ # tests/test_ext_autosummary
+ "tests.test_ext_autosummary.test_ext_autosummary_imports",
+ # tests/test_ext_intersphinx
+ "tests.test_ext_intersphinx.test_ext_intersphinx_cache",
+ # tests/test_ext_napoleon
+ "tests.test_ext_napoleon.test_ext_napoleon",
# tests/test_extensions
- "tests.test_extensions.test_ext_autodoc_autoclass",
- "tests.test_extensions.test_ext_autosummary_imports",
- "tests.test_extensions.test_ext_imgconverter",
- "tests.test_extensions.test_ext_intersphinx_cache",
"tests.test_extensions.test_ext_math",
- "tests.test_extensions.test_ext_napoleon",
- "tests.test_extensions.test_ext_todo",
- "tests.test_extensions.test_ext_viewcode",
- # tests/test_intl
- "tests.test_intl.test_catalogs",
- "tests.test_intl.test_locale",
# tests/test_markup
"tests.test_markup.test_markup",
- "tests.test_markup.test_parser",
# tests/test_theming
"tests.test_theming.test_templating",
"tests.test_theming.test_theming",
# tests/test_transforms
- "tests.test_transforms.test_transforms_move_module_targets",
"tests.test_transforms.test_transforms_post_transforms_images",
- "tests.test_transforms.test_transforms_reorder_nodes",
- # tests/test_util
- "tests.test_util.test_util",
- "tests.test_util.test_util_display",
- "tests.test_util.test_util_docutils",
- "tests.test_util.test_util_images",
- "tests.test_util.test_util_inventory",
- "tests.test_util.test_util_matching",
# tests/test_writers
"tests.test_writers.test_docutilsconf",
]
@@ -262,14 +254,12 @@ disallow_untyped_defs = false
[[tool.mypy.overrides]]
module = [
# tests/
- "tests.test_quickstart",
"tests.test_search",
# tests/test_builders
"tests.test_builders.test_build_latex",
# tests/test_config
"tests.test_config.test_config",
# tests/test_directives
- "tests.test_directives.test_directive_only",
"tests.test_directives.test_directive_other",
"tests.test_directives.test_directive_patch",
# tests/test_domains
@@ -284,25 +274,25 @@ module = [
"tests.test_domains.test_domain_std",
# tests/test_environment
"tests.test_environment.test_environment_toctree",
+ # tests/test_ext_autodoc
+ "tests.test_ext_autodoc.test_ext_autodoc",
+ "tests.test_ext_autodoc.test_ext_autodoc_events",
+ "tests.test_ext_autodoc.test_ext_autodoc_mock",
+ # tests/test_ext_autosummary
+ "tests.test_ext_autosummary.test_ext_autosummary",
+ # tests/test_ext_intersphinx
+ "tests.test_ext_intersphinx.test_ext_intersphinx",
+ # tests/test_ext_napoleon
+ "tests.test_ext_napoleon.test_ext_napoleon_docstring",
# tests/test_extensions
"tests.test_extensions.test_ext_apidoc",
- "tests.test_extensions.test_ext_autodoc",
- "tests.test_extensions.test_ext_autodoc_events",
- "tests.test_extensions.test_ext_autodoc_mock",
- "tests.test_extensions.test_ext_autosummary",
"tests.test_extensions.test_ext_doctest",
"tests.test_extensions.test_ext_inheritance_diagram",
- "tests.test_extensions.test_ext_intersphinx",
- "tests.test_extensions.test_ext_napoleon_docstring",
# tests/test_intl
"tests.test_intl.test_intl",
- # tests/test_pycode
- "tests.test_pycode.test_pycode",
- "tests.test_pycode.test_pycode_ast",
# tests/test_transforms
"tests.test_transforms.test_transforms_post_transforms",
# tests/test_util
- "tests.test_util.test_util_fileutil",
"tests.test_util.test_util_i18n",
"tests.test_util.test_util_inspect",
"tests.test_util.test_util_logging",
@@ -313,7 +303,6 @@ check_untyped_defs = false
disable_error_code = [
"annotation-unchecked",
]
-disallow_incomplete_defs = false
disallow_untyped_calls = false
disallow_untyped_defs = false
@@ -422,3 +411,6 @@ reportUnusedFunction = "none"
reportUnusedImport = "none"
reportUnusedVariable = "none"
reportWildcardImportFromLibrary = "none"
+
+[tool.uv]
+default-groups = "all"
diff --git a/pyrefly.toml b/pyrefly.toml
new file mode 100644
index 00000000000..88ccae4d84c
--- /dev/null
+++ b/pyrefly.toml
@@ -0,0 +1,27 @@
+# Configuration file for Pyrefly_.
+# n.b. Pyrefly is early in development.
+# Sphinx's current primary/reference type-checker is mypy.
+#
+# .. _Pyrefly: https://pyrefly.org/en/docs/configuration/
+
+project_includes = [
+ "doc/conf.py",
+ "doc/development/tutorials/examples/autodoc_intenum.py",
+ "doc/development/tutorials/examples/helloworld.py",
+ "sphinx",
+ "tests",
+ "utils",
+]
+project_excludes = [
+ "**/tests/roots*",
+]
+python_version = "3.11"
+replace_imports_with_any = [
+ "imagesize",
+ "pyximport",
+ "snowballstemmer",
+]
+
+# https://pyrefly.org/en/docs/error-kinds/
+[errors]
+implicitly-defined-attribute = false # many false positives
diff --git a/sphinx/__init__.py b/sphinx/__init__.py
index b70b6db47a6..79df3e09df3 100644
--- a/sphinx/__init__.py
+++ b/sphinx/__init__.py
@@ -5,13 +5,7 @@
from __future__ import annotations
-import warnings
-
-# work around flit error in parsing annotated assignments
-try:
- from sphinx.util._pathlib import _StrPath
-except ImportError:
- from pathlib import Path as _StrPath # type: ignore[assignment]
+from sphinx.util._pathlib import _StrPath
TYPE_CHECKING = False
if TYPE_CHECKING:
@@ -20,13 +14,6 @@
__version__: Final = '8.3.0'
__display_version__: Final = __version__ # used for command line version
-warnings.filterwarnings(
- 'ignore',
- 'The frontend.Option class .*',
- DeprecationWarning,
- module='docutils.frontend',
-)
-
#: Version info for better programmatic use.
#:
#: A tuple of five elements; for Sphinx version 1.2.1 beta 3 this would be
@@ -38,6 +25,7 @@
version_info: Final = (8, 3, 0, 'beta', 0)
package_dir: Final = _StrPath(__file__).resolve().parent
+del _StrPath
_in_development = True
if _in_development:
diff --git a/sphinx/_cli/__init__.py b/sphinx/_cli/__init__.py
index 87128b0a5a0..8c305442de3 100644
--- a/sphinx/_cli/__init__.py
+++ b/sphinx/_cli/__init__.py
@@ -64,7 +64,7 @@ def _load_subcommand_descriptions() -> Iterator[tuple[str, str]]:
# log an error here, but don't fail the full enumeration
print(f'Failed to load the description for {command}', file=sys.stderr)
else:
- yield command, description.split('\n\n', 1)[0]
+ yield command, description.partition('\n\n')[0]
class _RootArgumentParser(argparse.ArgumentParser):
diff --git a/sphinx/addnodes.py b/sphinx/addnodes.py
index 3cf63b6b053..4ce85dabcf2 100644
--- a/sphinx/addnodes.py
+++ b/sphinx/addnodes.py
@@ -402,8 +402,8 @@ class desc_sig_literal_char(desc_sig_element, _sig_element=True):
class versionmodified(nodes.Admonition, nodes.TextElement):
"""Node for version change entries.
- Currently used for "versionadded", "versionchanged", "deprecated"
- and "versionremoved" directives.
+ Currently used for "version-added", "version-changed", "version-deprecated"
+ and "version-removed" directives, along with their aliases.
"""
diff --git a/sphinx/application.py b/sphinx/application.py
index fe0e8bdf195..f1ca7d13541 100644
--- a/sphinx/application.py
+++ b/sphinx/application.py
@@ -52,7 +52,12 @@
from sphinx.config import ENUM, _ConfigRebuild
from sphinx.domains import Domain, Index
from sphinx.environment.collectors import EnvironmentCollector
- from sphinx.ext.autodoc import Documenter, _AutodocProcessDocstringListener
+ from sphinx.ext.autodoc._documenters import Documenter
+ from sphinx.ext.autodoc._event_listeners import (
+ _AutodocProcessDocstringListener,
+ _AutodocProcessSignatureListener,
+ _AutodocSkipMemberListener,
+ )
from sphinx.ext.todo import todo_node
from sphinx.extension import Extension
from sphinx.registry import (
@@ -196,7 +201,6 @@ def __init__(
:param pdb: If true, enable the Python debugger on an exception.
:param exception_on_warning: If true, raise an exception on warnings.
"""
- self.phase = BuildPhase.INITIALIZATION
self.verbosity = verbosity
self._fresh_env_used: bool | None = None
self.extensions: dict[str, Extension] = {}
@@ -240,7 +244,7 @@ def __init__(
self._fail_on_warnings = bool(warningiserror)
self.pdb = pdb
self._exception_on_warning = exception_on_warning
- logging.setup(self, self._status, self._warning)
+ logging.setup(self, self._status, self._warning, verbosity=verbosity)
self.events = EventManager(self)
@@ -255,15 +259,17 @@ def __init__(
self.statuscode = 0
# read config
+ overrides = confoverrides or {}
self.tags = Tags(tags)
if confdir is None:
# set confdir to srcdir if -C given (!= no confdir); a few pieces
# of code expect a confdir to be set
self.confdir = self.srcdir
- self.config = Config({}, confoverrides or {})
+ self.config = Config({}, overrides)
else:
self.confdir = _StrPath(confdir).resolve()
- self.config = Config.read(self.confdir, confoverrides or {}, self.tags)
+ self.config = Config.read(self.confdir, overrides=overrides, tags=self.tags)
+ self.config._verbosity = -1 if self.quiet else self.verbosity
# set up translation infrastructure
self._init_i18n()
@@ -338,6 +344,12 @@ def fresh_env_used(self) -> bool | None:
"""
return self._fresh_env_used
+ @property
+ def phase(self) -> BuildPhase:
+ if not hasattr(self, 'builder'):
+ return BuildPhase.INITIALIZATION
+ return self.builder.phase
+
def _init_i18n(self) -> None:
"""Load translated strings from the configured localedirs if enabled in
the configuration.
@@ -399,6 +411,8 @@ def _post_init_env(self) -> None:
if self._fresh_env_used:
self.env.find_files(self.config, self.builder)
+ self.env._builder_cls = self.builder.__class__
+
def preload_builder(self, name: str) -> None:
self.registry.preload_builder(self, name)
@@ -416,7 +430,7 @@ def _init_builder(self) -> None:
# ---- main "build" method -------------------------------------------------
def build(self, force_all: bool = False, filenames: Sequence[Path] = ()) -> None:
- self.phase = BuildPhase.READING
+ self.builder.phase = BuildPhase.READING
try:
if force_all:
self.builder.build_all()
@@ -715,20 +729,7 @@ def connect(
def connect(
self,
event: Literal['autodoc-process-signature'],
- callback: Callable[
- [
- Sphinx,
- Literal[
- 'module', 'class', 'exception', 'function', 'method', 'attribute'
- ],
- str,
- Any,
- dict[str, bool],
- str | None,
- str | None,
- ],
- tuple[str | None, str | None] | None,
- ],
+ callback: _AutodocProcessSignatureListener,
priority: int = 500,
) -> int: ...
@@ -744,19 +745,7 @@ def connect(
def connect(
self,
event: Literal['autodoc-skip-member'],
- callback: Callable[
- [
- Sphinx,
- Literal[
- 'module', 'class', 'exception', 'function', 'method', 'attribute'
- ],
- str,
- Any,
- bool,
- dict[str, bool],
- ],
- bool,
- ],
+ callback: _AutodocSkipMemberListener,
priority: int = 500,
) -> int: ...
@@ -933,6 +922,9 @@ def add_config_value(
``'env'``) to a string. However, booleans are still accepted and
converted internally.
+ .. versionadded:: 1.4
+ The *types* parameter.
+
.. versionadded:: 7.4
The *description* parameter.
"""
@@ -1108,7 +1100,7 @@ def setup(app):
.. versionchanged:: 0.6
Docutils 0.5-style directive classes are now supported.
- .. deprecated:: 1.8
+ .. versionchanged:: 1.8
Docutils 0.4-style (function based) directives support is deprecated.
.. versionchanged:: 1.8
Add *override* keyword.
@@ -1637,8 +1629,9 @@ def add_autodocumenter(self, cls: type[Documenter], override: bool = False) -> N
logger.debug('[app] adding autodocumenter: %r', cls)
from sphinx.ext.autodoc.directive import AutodocDirective
- self.registry.add_documenter(cls.objtype, cls)
- self.add_directive('auto' + cls.objtype, AutodocDirective, override=override)
+ objtype = cls.objtype # type: ignore[attr-defined]
+ self.registry.add_documenter(objtype, cls)
+ self.add_directive('auto' + objtype, AutodocDirective, override=override)
def add_autodoc_attrgetter(
self, typ: type, getter: Callable[[Any, str, Any], Any]
diff --git a/sphinx/builders/__init__.py b/sphinx/builders/__init__.py
index 21a1eb8b5c4..2dd972ecfe0 100644
--- a/sphinx/builders/__init__.py
+++ b/sphinx/builders/__init__.py
@@ -11,9 +11,9 @@
from typing import TYPE_CHECKING, final
from docutils import nodes
-from docutils.utils import DependencyList
from sphinx._cli.util.colour import bold
+from sphinx.deprecation import _deprecation_warning
from sphinx.environment import (
CONFIG_CHANGED_REASON,
CONFIG_OK,
@@ -22,16 +22,12 @@
from sphinx.environment.adapters.asset import ImageAdapter
from sphinx.errors import SphinxError
from sphinx.locale import __
-from sphinx.util import (
- get_filetype,
- logging,
- rst,
-)
+from sphinx.util import get_filetype, logging
from sphinx.util._importer import import_object
from sphinx.util._pathlib import _StrPathProperty
from sphinx.util.build_phase import BuildPhase
from sphinx.util.display import progress_message, status_iterator
-from sphinx.util.docutils import sphinx_domains
+from sphinx.util.docutils import _parse_str_to_doctree
from sphinx.util.i18n import CatalogRepository, docname_to_domain
from sphinx.util.osutil import ensuredir, relative_uri, relpath
from sphinx.util.parallel import (
@@ -48,7 +44,7 @@
if TYPE_CHECKING:
from collections.abc import Iterable, Sequence, Set
from gettext import NullTranslations
- from typing import Any, Literal
+ from typing import Any, ClassVar, Literal
from docutils.nodes import Node
@@ -70,37 +66,39 @@ class Builder:
#: The builder's name.
#: This is the value used to select builders on the command line.
- name: str = ''
+ name: ClassVar[str] = ''
#: The builder's output format, or '' if no document output is produced.
#: This is commonly the file extension, e.g. "html",
#: though any string value is accepted.
#: The builder's format string can be used by various components
#: such as :class:`.SphinxPostTransform` or extensions to determine
#: their compatibility with the builder.
- format: str = ''
+ format: ClassVar[str] = ''
#: The message emitted upon successful build completion.
#: This can be a printf-style template string
#: with the following keys: ``outdir``, ``project``
- epilog: str = ''
+ epilog: ClassVar[str] = ''
#: default translator class for the builder. This can be overridden by
#: :py:meth:`~sphinx.application.Sphinx.set_translator`.
- default_translator_class: type[nodes.NodeVisitor]
+ default_translator_class: ClassVar[type[nodes.NodeVisitor]]
# doctree versioning method
- versioning_method = 'none'
- versioning_compare = False
+ versioning_method: ClassVar[str] = 'none'
+ versioning_compare: ClassVar[bool] = False
#: Whether it is safe to make parallel :meth:`~.Builder.write_doc` calls.
- allow_parallel: bool = False
+ allow_parallel: ClassVar[bool] = False
# support translation
- use_message_catalog = True
+ use_message_catalog: ClassVar[bool] = True
#: The list of MIME types of image formats supported by the builder.
#: Image files are searched in the order in which they appear here.
- supported_image_types: list[str] = []
+ supported_image_types: ClassVar[list[str]] = []
#: The builder can produce output documents that may fetch external images when opened.
- supported_remote_images: bool = False
+ supported_remote_images: ClassVar[bool] = False
#: The file format produced by the builder allows images to be embedded using data-URIs.
- supported_data_uri_images: bool = False
+ supported_data_uri_images: ClassVar[bool] = False
+
+ phase: BuildPhase = BuildPhase.INITIALIZATION
srcdir = _StrPathProperty()
confdir = _StrPathProperty()
@@ -114,7 +112,7 @@ def __init__(self, app: Sphinx, env: BuildEnvironment) -> None:
self.doctreedir = app.doctreedir
ensuredir(self.doctreedir)
- self.app: Sphinx = app
+ self._app: Sphinx = app
self.env: BuildEnvironment = env
self.env.set_versioning_method(self.versioning_method, self.versioning_compare)
self.events: EventManager = app.events
@@ -124,6 +122,7 @@ def __init__(self, app: Sphinx, env: BuildEnvironment) -> None:
self.tags.add(self.name)
self.tags.add(f'format_{self.format}')
self.tags.add(f'builder_{self.name}')
+ self._registry = app.registry
# images that need to be copied over (source -> dest)
self.images: dict[str, str] = {}
@@ -136,13 +135,20 @@ def __init__(self, app: Sphinx, env: BuildEnvironment) -> None:
self.parallel_ok = False
self.finish_tasks: Any = None
+ @property
+ def app(self) -> Sphinx:
+ cls_module = self.__class__.__module__
+ cls_name = self.__class__.__qualname__
+ _deprecation_warning(cls_module, f'{cls_name}.app', remove=(10, 0))
+ return self._app
+
@property
def _translator(self) -> NullTranslations | None:
- return self.app.translator
+ return self._app.translator
def get_translator_class(self, *args: Any) -> type[nodes.NodeVisitor]:
"""Return a class of translator."""
- return self.env._registry.get_translator_class(self)
+ return self._registry.get_translator_class(self)
def create_translator(self, *args: Any) -> nodes.NodeVisitor:
"""Return an instance of translator.
@@ -150,7 +156,7 @@ def create_translator(self, *args: Any) -> nodes.NodeVisitor:
This method returns an instance of ``default_translator_class`` by default.
Users can replace the translator class with ``app.set_translator()`` API.
"""
- return self.env._registry.create_translator(self, *args)
+ return self._registry.create_translator(self, *args)
# helper methods
def init(self) -> None:
@@ -258,7 +264,7 @@ def cat2relpath(cat: CatalogInfo, srcdir: Path = self.srcdir) -> str:
__('writing output... '),
'darkgreen',
len(catalogs),
- self.app.verbosity,
+ self.config.verbosity,
stringify_func=cat2relpath,
):
catalog.write_mo(
@@ -397,14 +403,14 @@ def build(
# while reading, collect all warnings from docutils
with (
nullcontext()
- if self.app._exception_on_warning
+ if self._app._exception_on_warning
else logging.pending_warnings()
):
updated_docnames = set(self.read())
doccount = len(updated_docnames)
logger.info(bold(__('looking for now-outdated files... ')), nonl=True)
- updated_docnames.update(self.env.check_dependents(self.app, updated_docnames))
+ updated_docnames.update(self.env.check_dependents(self._app, updated_docnames))
outdated = len(updated_docnames) - doccount
if outdated:
logger.info(__('%d found'), outdated)
@@ -422,14 +428,14 @@ def build(
pickle.dump(self.env, f, pickle.HIGHEST_PROTOCOL)
# global actions
- self.app.phase = BuildPhase.CONSISTENCY_CHECK
+ self.phase = BuildPhase.CONSISTENCY_CHECK
with progress_message(__('checking consistency')):
self.env.check_consistency()
else:
if method == 'update' and not docnames:
logger.info(bold(__('no targets are out of date.')))
- self.app.phase = BuildPhase.RESOLVING
+ self.phase = BuildPhase.RESOLVING
# filter "docnames" (list of outdated files) by the updated
# found_docs of the environment; this will remove docs that
@@ -438,14 +444,14 @@ def build(
docnames = set(docnames) & self.env.found_docs
# determine if we can write in parallel
- if parallel_available and self.app.parallel > 1 and self.allow_parallel:
- self.parallel_ok = self.app.is_parallel_allowed('write')
+ if parallel_available and self._app.parallel > 1 and self.allow_parallel:
+ self.parallel_ok = self._app.is_parallel_allowed('write')
else:
self.parallel_ok = False
# create a task executor to use for misc. "finish-up" tasks
# if self.parallel_ok:
- # self.finish_tasks = ParallelTasks(self.app.parallel)
+ # self.finish_tasks = ParallelTasks(self._app.parallel)
# else:
# for now, just execute them serially
self.finish_tasks = SerialTasks()
@@ -508,13 +514,13 @@ def read(self) -> list[str]:
self.events.emit('env-before-read-docs', self.env, docnames)
# check if we should do parallel or serial read
- if parallel_available and self.app.parallel > 1:
- par_ok = self.app.is_parallel_allowed('read')
+ if parallel_available and self._app.parallel > 1:
+ par_ok = self._app.is_parallel_allowed('read')
else:
par_ok = False
if par_ok:
- self._read_parallel(docnames, nproc=self.app.parallel)
+ self._read_parallel(docnames, nproc=self._app.parallel)
else:
self._read_serial(docnames)
@@ -576,7 +582,7 @@ def _read_serial(self, docnames: list[str]) -> None:
__('reading sources... '),
'purple',
len(docnames),
- self.app.verbosity,
+ self.config.verbosity,
):
# remove all inventory entries for that file
self.events.emit('env-purge-doc', self.env, docname)
@@ -589,7 +595,11 @@ def _read_parallel(self, docnames: list[str], nproc: int) -> None:
# create a status_iterator to step progressbar after reading a document
# (see: ``merge()`` function)
progress = status_iterator(
- chunks, __('reading sources... '), 'purple', len(chunks), self.app.verbosity
+ chunks,
+ __('reading sources... '),
+ 'purple',
+ len(chunks),
+ self.config.verbosity,
)
# clear all outdated docs at once
@@ -598,7 +608,7 @@ def _read_parallel(self, docnames: list[str], nproc: int) -> None:
self.env.clear_doc(docname)
def read_process(docs: list[str]) -> bytes:
- self.env.app = self.app
+ self.env._app = self._app
for docname in docs:
self.read_doc(docname, _cache=False)
# allow pickling self to send it back
@@ -606,7 +616,7 @@ def read_process(docs: list[str]) -> bytes:
def merge(docs: list[str], otherenv: bytes) -> None:
env = pickle.loads(otherenv)
- self.env.merge_info_from(docs, env, self.app)
+ self.env.merge_info_from(docs, env, self._app)
next(progress)
@@ -629,24 +639,34 @@ def read_doc(self, docname: str, *, _cache: bool = True) -> None:
if docutils_conf.is_file():
env.note_dependency(docutils_conf)
- filename = str(env.doc2path(docname))
- filetype = get_filetype(self.app.config.source_suffix, filename)
- publisher = self.env._registry.get_publisher(self.app, filetype)
- self.env.current_document._parser = publisher.parser
- # record_dependencies is mutable even though it is in settings,
- # explicitly re-initialise for each document
- publisher.settings.record_dependencies = DependencyList()
- with (
- sphinx_domains(env),
- rst.default_role(docname, self.config.default_role),
- ):
- # set up error_handler for the target document
- error_handler = _UnicodeDecodeErrorHandler(docname)
- codecs.register_error('sphinx', error_handler) # type: ignore[arg-type]
+ filename = env.doc2path(docname)
- publisher.set_source(source_path=filename)
- publisher.publish()
- doctree = publisher.document
+ # set up error_handler for the target document
+ # xref RemovedInSphinx90Warning
+ error_handler = _UnicodeDecodeErrorHandler(docname)
+ codecs.register_error('sphinx', error_handler) # type: ignore[arg-type]
+
+ # read the source file
+ content = filename.read_text(
+ encoding=env.settings['input_encoding'], errors='sphinx'
+ )
+
+ # TODO: move the "source-read" event to here.
+
+ filetype = get_filetype(self.config.source_suffix, filename)
+ parser = self._registry.create_source_parser(
+ filetype, config=self.config, env=env
+ )
+ doctree = _parse_str_to_doctree(
+ content,
+ filename=filename,
+ default_role=self.config.default_role,
+ default_settings=env.settings,
+ env=env,
+ events=self.events,
+ parser=parser,
+ transforms=self._registry.get_transforms(),
+ )
# store time of reading, for outdated files detection
env.all_docs[docname] = time.time_ns() // 1_000
@@ -744,14 +764,14 @@ def write_documents(self, docnames: Set[str]) -> None:
if self.parallel_ok:
# number of subprocesses is parallel-1 because the main process
# is busy loading doctrees and doing write_doc_serialized()
- self._write_parallel(sorted_docnames, nproc=self.app.parallel - 1)
+ self._write_parallel(sorted_docnames, nproc=self._app.parallel - 1)
else:
self._write_serial(sorted_docnames)
def _write_serial(self, docnames: Sequence[str]) -> None:
with (
nullcontext()
- if self.app._exception_on_warning
+ if self._app._exception_on_warning
else logging.pending_warnings()
):
for docname in status_iterator(
@@ -759,27 +779,19 @@ def _write_serial(self, docnames: Sequence[str]) -> None:
__('writing output... '),
'darkgreen',
len(docnames),
- self.app.verbosity,
+ self.config.verbosity,
):
- self.app.phase = BuildPhase.RESOLVING
- doctree = self.env.get_and_resolve_doctree(docname, self)
- self.app.phase = BuildPhase.WRITING
- self.write_doc_serialized(docname, doctree)
- self.write_doc(docname, doctree)
+ _write_docname(docname, env=self.env, builder=self, tags=self.tags)
def _write_parallel(self, docnames: Sequence[str], nproc: int) -> None:
def write_process(docs: list[tuple[str, nodes.document]]) -> None:
- self.app.phase = BuildPhase.WRITING
+ self.phase = BuildPhase.WRITING
for docname, doctree in docs:
self.write_doc(docname, doctree)
# warm up caches/compile templates using the first document
firstname, docnames = docnames[0], docnames[1:]
- self.app.phase = BuildPhase.RESOLVING
- doctree = self.env.get_and_resolve_doctree(firstname, self)
- self.app.phase = BuildPhase.WRITING
- self.write_doc_serialized(firstname, doctree)
- self.write_doc(firstname, doctree)
+ _write_docname(firstname, env=self.env, builder=self, tags=self.tags)
tasks = ParallelTasks(nproc)
chunks = make_chunks(docnames, nproc)
@@ -791,17 +803,19 @@ def write_process(docs: list[tuple[str, nodes.document]]) -> None:
__('writing output... '),
'darkgreen',
len(chunks),
- self.app.verbosity,
+ self.config.verbosity,
)
def on_chunk_done(args: list[tuple[str, nodes.document]], result: None) -> None:
next(progress)
- self.app.phase = BuildPhase.RESOLVING
+ self.phase = BuildPhase.RESOLVING
for chunk in chunks:
arg = []
for docname in chunk:
- doctree = self.env.get_and_resolve_doctree(docname, self)
+ doctree = self.env.get_and_resolve_doctree(
+ docname, self, tags=self.tags
+ )
self.write_doc_serialized(docname, doctree)
arg.append((docname, doctree))
tasks.add_task(write_process, arg, on_chunk_done)
@@ -867,6 +881,22 @@ def get_builder_config(self, option: str, default: str) -> Any:
return getattr(self.config, optname)
+def _write_docname(
+ docname: str,
+ /,
+ *,
+ env: BuildEnvironment,
+ builder: Builder,
+ tags: Tags,
+) -> None:
+ """Write a single document."""
+ builder.phase = BuildPhase.RESOLVING
+ doctree = env.get_and_resolve_doctree(docname, builder=builder, tags=tags)
+ builder.phase = BuildPhase.WRITING
+ builder.write_doc_serialized(docname, doctree)
+ builder.write_doc(docname, doctree)
+
+
class _UnicodeDecodeErrorHandler:
"""Custom error handler for open() that warns and replaces."""
@@ -874,20 +904,21 @@ def __init__(self, docname: str, /) -> None:
self.docname = docname
def __call__(self, error: UnicodeDecodeError) -> tuple[str, int]:
- line_start = error.object.rfind(b'\n', 0, error.start)
- line_end = error.object.find(b'\n', error.start)
+ obj = error.object
+ line_start = obj.rfind(b'\n', 0, error.start)
+ line_end = obj.find(b'\n', error.start)
if line_end == -1:
- line_end = len(error.object)
- line_num = error.object.count(b'\n', 0, error.start) + 1
+ line_end = len(obj)
+ line_num = obj.count(b'\n', 0, error.start) + 1
logger.warning(
- __('undecodable source characters, replacing with "?": %r'),
- (
- error.object[line_start + 1 : error.start]
- + b'>>>'
- + error.object[error.start : error.end]
- + b'<<<'
- + error.object[error.end : line_end]
+ __(
+ "undecodable source characters, replacing with '?': '%s>>>%s<<<%s'. "
+ 'This will become an error in Sphinx 9.0.'
+ # xref RemovedInSphinx90Warning
),
+ obj[line_start + 1 : error.start].decode(errors='backslashreplace'),
+ obj[error.start : error.end].decode(errors='backslashreplace'),
+ obj[error.end : line_end].decode(errors='backslashreplace'),
location=(self.docname, line_num),
)
return '?', error.end
diff --git a/sphinx/builders/_epub_base.py b/sphinx/builders/_epub_base.py
index a9527c3c0e3..3c7c93dfd1f 100644
--- a/sphinx/builders/_epub_base.py
+++ b/sphinx/builders/_epub_base.py
@@ -114,8 +114,8 @@ class NavPoint(NamedTuple):
def sphinx_smarty_pants(t: str, language: str = 'en') -> str:
t = t.replace('"', '"')
- t = smartquotes.educateDashesOldSchool(t) # type: ignore[no-untyped-call]
- t = smartquotes.educateQuotes(t, language) # type: ignore[no-untyped-call]
+ t = smartquotes.educateDashesOldSchool(t)
+ t = smartquotes.educateQuotes(t, language)
t = t.replace('"', '"')
return t
@@ -233,7 +233,11 @@ def get_toc(self) -> None:
and pre and post files not managed by Sphinx.
"""
doctree = self.env.get_and_resolve_doctree(
- self.config.master_doc, self, prune_toctrees=False, includehidden=True
+ self.config.master_doc,
+ self,
+ tags=self.tags,
+ prune_toctrees=False,
+ includehidden=True,
)
self.refnodes = self.get_refnodes(doctree, [])
master_dir = Path(self.config.master_doc).parent
@@ -279,16 +283,6 @@ def fix_ids(self, tree: nodes.document) -> None:
Some readers crash because they interpret the part as a
transport protocol specification.
"""
-
- def update_node_id(node: Element) -> None:
- """Update IDs of given *node*."""
- new_ids: list[str] = []
- for node_id in node['ids']:
- new_id = self.fix_fragment('', node_id)
- if new_id not in new_ids:
- new_ids.append(new_id)
- node['ids'] = new_ids
-
for reference in tree.findall(nodes.reference):
if 'refuri' in reference:
m = self.refuri_re.match(reference['refuri'])
@@ -298,66 +292,75 @@ def update_node_id(node: Element) -> None:
reference['refid'] = self.fix_fragment('', reference['refid'])
for target in tree.findall(nodes.target):
- update_node_id(target)
+ self._update_node_id(target)
next_node: Node = target.next_node(ascend=True)
if isinstance(next_node, nodes.Element):
- update_node_id(next_node)
+ self._update_node_id(next_node)
for desc_signature in tree.findall(addnodes.desc_signature):
- update_node_id(desc_signature)
+ self._update_node_id(desc_signature)
+
+ def _update_node_id(self, node: Element, /) -> None:
+ """Update IDs of given *node*."""
+ new_ids: list[str] = []
+ for node_id in node['ids']:
+ new_id = self.fix_fragment('', node_id)
+ if new_id not in new_ids:
+ new_ids.append(new_id)
+ node['ids'] = new_ids
+
+ @staticmethod
+ def _make_footnote_ref(doc: nodes.document, label: str) -> nodes.footnote_reference:
+ """Create a footnote_reference node with children"""
+ footnote_ref = nodes.footnote_reference('[#]_')
+ footnote_ref.append(nodes.Text(label))
+ doc.note_autofootnote_ref(footnote_ref)
+ return footnote_ref
+
+ @staticmethod
+ def _make_footnote(doc: nodes.document, label: str, uri: str) -> nodes.footnote:
+ """Create a footnote node with children"""
+ footnote = nodes.footnote(uri)
+ para = nodes.paragraph()
+ para.append(nodes.Text(uri))
+ footnote.append(para)
+ footnote.insert(0, nodes.label('', label))
+ doc.note_autofootnote(footnote)
+ return footnote
+
+ @staticmethod
+ def _footnote_spot(tree: nodes.document) -> tuple[Element, int]:
+ """Find or create a spot to place footnotes.
+
+ The function returns the tuple (parent, index).
+ """
+ # The code uses the following heuristic:
+ # a) place them after the last existing footnote
+ # b) place them after an (empty) Footnotes rubric
+ # c) create an empty Footnotes rubric at the end of the document
+ fns = list(tree.findall(nodes.footnote))
+ if fns:
+ fn = fns[-1]
+ return fn.parent, fn.parent.index(fn) + 1
+ for node in tree.findall(nodes.rubric):
+ if len(node) == 1 and node.astext() == FOOTNOTES_RUBRIC_NAME:
+ return node.parent, node.parent.index(node) + 1
+ doc = next(tree.findall(nodes.document))
+ rub = nodes.rubric()
+ rub.append(nodes.Text(FOOTNOTES_RUBRIC_NAME))
+ doc.append(rub)
+ return doc, doc.index(rub) + 1
def add_visible_links(
self, tree: nodes.document, show_urls: str = 'inline'
) -> None:
"""Add visible link targets for external links"""
-
- def make_footnote_ref(
- doc: nodes.document, label: str
- ) -> nodes.footnote_reference:
- """Create a footnote_reference node with children"""
- footnote_ref = nodes.footnote_reference('[#]_')
- footnote_ref.append(nodes.Text(label))
- doc.note_autofootnote_ref(footnote_ref)
- return footnote_ref
-
- def make_footnote(doc: nodes.document, label: str, uri: str) -> nodes.footnote:
- """Create a footnote node with children"""
- footnote = nodes.footnote(uri)
- para = nodes.paragraph()
- para.append(nodes.Text(uri))
- footnote.append(para)
- footnote.insert(0, nodes.label('', label))
- doc.note_autofootnote(footnote)
- return footnote
-
- def footnote_spot(tree: nodes.document) -> tuple[Element, int]:
- """Find or create a spot to place footnotes.
-
- The function returns the tuple (parent, index).
- """
- # The code uses the following heuristic:
- # a) place them after the last existing footnote
- # b) place them after an (empty) Footnotes rubric
- # c) create an empty Footnotes rubric at the end of the document
- fns = list(tree.findall(nodes.footnote))
- if fns:
- fn = fns[-1]
- return fn.parent, fn.parent.index(fn) + 1
- for node in tree.findall(nodes.rubric):
- if len(node) == 1 and node.astext() == FOOTNOTES_RUBRIC_NAME:
- return node.parent, node.parent.index(node) + 1
- doc = next(tree.findall(nodes.document))
- rub = nodes.rubric()
- rub.append(nodes.Text(FOOTNOTES_RUBRIC_NAME))
- doc.append(rub)
- return doc, doc.index(rub) + 1
-
if show_urls == 'no':
return
if show_urls == 'footnote':
doc = next(tree.findall(nodes.document))
- fn_spot, fn_idx = footnote_spot(tree)
+ fn_spot, fn_idx = self._footnote_spot(tree)
nr = 1
for node in list(tree.findall(nodes.reference)):
uri = node.get('refuri', '')
@@ -371,9 +374,9 @@ def footnote_spot(tree: nodes.document) -> tuple[Element, int]:
elif show_urls == 'footnote':
label = FOOTNOTE_LABEL_TEMPLATE % nr
nr += 1
- footnote_ref = make_footnote_ref(doc, label)
+ footnote_ref = self._make_footnote_ref(doc, label)
node.parent.insert(idx, footnote_ref)
- footnote = make_footnote(doc, label, uri)
+ footnote = self._make_footnote(doc, label, uri)
fn_spot.insert(fn_idx, footnote)
footnote_ref['refid'] = footnote['ids'][0]
footnote.add_backref(footnote_ref['ids'][0])
@@ -422,7 +425,7 @@ def copy_image_files_pil(self) -> None:
__('copying images... '),
'brown',
len(self.images),
- self.app.verbosity,
+ self.config.verbosity,
):
dest = self.images[src]
try:
@@ -766,7 +769,11 @@ def build_toc(self) -> None:
if self.config.epub_tocscope == 'default':
doctree = self.env.get_and_resolve_doctree(
- self.config.root_doc, self, prune_toctrees=False, includehidden=False
+ self.config.root_doc,
+ self,
+ tags=self.tags,
+ prune_toctrees=False,
+ includehidden=False,
)
refnodes = self.get_refnodes(doctree, [])
self.toc_add_files(refnodes)
diff --git a/sphinx/builders/changes.py b/sphinx/builders/changes.py
index aa926e0809c..6d38a6acbbc 100644
--- a/sphinx/builders/changes.py
+++ b/sphinx/builders/changes.py
@@ -23,14 +23,19 @@
class ChangesBuilder(Builder):
- """Write a summary with all versionadded/changed/deprecated/removed directives."""
+ """Write a summary with all version-related directives."""
name = 'changes'
epilog = __('The overview file is in %(outdir)s.')
def init(self) -> None:
self.create_template_bridge()
- theme_factory = HTMLThemeFactory(self.app)
+ theme_factory = HTMLThemeFactory(
+ confdir=self.confdir,
+ app=self._app,
+ config=self.config,
+ registry=self._registry,
+ )
self.theme = theme_factory.create('default')
self.templates.init(self, self.theme)
@@ -38,9 +43,13 @@ def get_outdated_docs(self) -> str:
return str(self.outdir)
typemap = {
+ 'version-added': 'added',
'versionadded': 'added',
+ 'version-changed': 'changed',
'versionchanged': 'changed',
+ 'version-deprecated': 'deprecated',
'deprecated': 'deprecated',
+ 'version-removed': 'removed',
'versionremoved': 'removed',
}
@@ -107,9 +116,13 @@ def write_documents(self, _docnames: Set[str]) -> None:
f.write(self.templates.render('changes/versionchanges.html', ctx))
hltext = [
+ f'.. version-added:: {version}',
f'.. versionadded:: {version}',
+ f'.. version-changed:: {version}',
f'.. versionchanged:: {version}',
+ f'.. version-deprecated:: {version}',
f'.. deprecated:: {version}',
+ f'.. version-removed:: {version}',
f'.. versionremoved:: {version}',
]
diff --git a/sphinx/builders/epub3.py b/sphinx/builders/epub3.py
index 2ea66c34b8b..e09b884046a 100644
--- a/sphinx/builders/epub3.py
+++ b/sphinx/builders/epub3.py
@@ -7,7 +7,6 @@
import html
import os
-import os.path
import re
import time
from typing import TYPE_CHECKING, NamedTuple
@@ -190,7 +189,11 @@ def build_navigation_doc(self) -> None:
if self.config.epub_tocscope == 'default':
doctree = self.env.get_and_resolve_doctree(
- self.config.root_doc, self, prune_toctrees=False, includehidden=False
+ self.config.root_doc,
+ self,
+ tags=self.tags,
+ prune_toctrees=False,
+ includehidden=False,
)
refnodes = self.get_refnodes(doctree, [])
self.toc_add_files(refnodes)
diff --git a/sphinx/builders/gettext.py b/sphinx/builders/gettext.py
index f5f26ffcc88..fc659d744d5 100644
--- a/sphinx/builders/gettext.py
+++ b/sphinx/builders/gettext.py
@@ -2,7 +2,6 @@
from __future__ import annotations
-import codecs
import operator
import os
import os.path
@@ -165,7 +164,7 @@ class I18nBuilder(Builder):
def init(self) -> None:
super().init()
self.env.set_versioning_method(self.versioning_method, self.config.gettext_uuid)
- self.tags = self.app.tags = I18nTags()
+ self.tags = self._app.tags = I18nTags()
self.catalogs: defaultdict[str, Catalog] = defaultdict(Catalog)
def get_target_uri(self, docname: str, typ: str | None = None) -> str:
@@ -212,7 +211,7 @@ def should_write(filepath: Path, new_content: str) -> bool:
if not filepath.exists():
return True
try:
- with codecs.open(str(filepath), encoding='utf-8') as oldpot:
+ with open(filepath, encoding='utf-8') as oldpot:
old_content = oldpot.read()
old_header_index = old_content.index('"POT-Creation-Date:')
new_header_index = new_content.index('"POT-Creation-Date:')
@@ -251,7 +250,7 @@ def init(self) -> None:
def _collect_templates(self) -> set[str]:
template_files = set()
for template_path in self.config.templates_path:
- tmpl_abs_path = self.app.srcdir / template_path
+ tmpl_abs_path = self.srcdir / template_path
for dirpath, _dirs, files in walk(tmpl_abs_path):
for fn in files:
if fn.endswith('.html'):
@@ -268,10 +267,14 @@ def _extract_from_template(self) -> None:
extract_translations = self.templates.environment.extract_translations
for template in status_iterator(
- files, __('reading templates... '), 'purple', len(files), self.app.verbosity
+ files,
+ __('reading templates... '),
+ 'purple',
+ len(files),
+ self.config.verbosity,
):
try:
- with codecs.open(template, encoding='utf-8') as f:
+ with open(template, encoding='utf-8') as f:
context = f.read()
for line, _meth, msg in extract_translations(context):
origin = MsgOrigin(source=template, line=line)
@@ -307,7 +310,7 @@ def finish(self) -> None:
__('writing message catalogs... '),
'darkgreen',
len(self.catalogs),
- self.app.verbosity,
+ self.config.verbosity,
operator.itemgetter(0),
):
# noop if config.gettext_compact is set
@@ -315,14 +318,14 @@ def finish(self) -> None:
context['messages'] = list(catalog)
template_path = [
- self.app.srcdir / rel_path for rel_path in self.config.templates_path
+ self.srcdir / rel_path for rel_path in self.config.templates_path
]
renderer = GettextRenderer(template_path, outdir=self.outdir)
content = renderer.render('message.pot.jinja', context)
pofn = self.outdir / f'{textdomain}.pot'
if should_write(pofn, content):
- with codecs.open(str(pofn), 'w', encoding='utf-8') as pofile:
+ with open(pofn, 'w', encoding='utf-8') as pofile:
pofile.write(content)
diff --git a/sphinx/builders/html/__init__.py b/sphinx/builders/html/__init__.py
index 5e6acdeaf9d..6146201fa9b 100644
--- a/sphinx/builders/html/__init__.py
+++ b/sphinx/builders/html/__init__.py
@@ -10,17 +10,16 @@
import re
import shutil
import sys
-import warnings
from pathlib import Path
from types import NoneType
from typing import TYPE_CHECKING
from urllib.parse import quote
+import docutils.parsers.rst
import docutils.readers.doctree
+import docutils.utils
+import jinja2.exceptions
from docutils import nodes
-from docutils.core import Publisher
-from docutils.frontend import OptionParser
-from docutils.io import DocTreeInput, StringOutput
from sphinx import __display_version__, package_dir
from sphinx import version_info as sphinx_version
@@ -48,7 +47,7 @@
from sphinx.util._timestamps import _format_rfc3339_microseconds
from sphinx.util._uri import is_url
from sphinx.util.display import progress_message, status_iterator
-from sphinx.util.docutils import new_document
+from sphinx.util.docutils import _get_settings, new_document
from sphinx.util.fileutil import copy_asset
from sphinx.util.i18n import format_date
from sphinx.util.inventory import InventoryFile
@@ -69,7 +68,6 @@
from typing import Any, TypeAlias
from docutils.nodes import Node
- from docutils.readers import Reader
from sphinx.application import Sphinx
from sphinx.config import Config
@@ -93,6 +91,10 @@
bool,
]
+_READER_TRANSFORMS = docutils.readers.doctree.Reader().get_transforms()
+_PARSER_TRANSFORMS = docutils.parsers.rst.Parser().get_transforms()
+_WRITER_TRANSFORMS = HTMLWriter(None).get_transforms() # type: ignore[arg-type]
+
def convert_locale_to_language_tag(locale: str | None) -> str | None:
"""Convert a locale string to a language tag (ex. en_US -> en-US).
@@ -150,19 +152,13 @@ def __init__(self, app: Sphinx, env: BuildEnvironment) -> None:
# JS files
self._js_files: list[_JavaScript] = []
- # Cached Publisher for writing doctrees to HTML
- reader: Reader[DocTreeInput] = docutils.readers.doctree.Reader(
- parser_name='restructuredtext'
- )
- pub = Publisher(
- reader=reader,
- parser=reader.parser,
- writer=HTMLWriter(self),
- source_class=DocTreeInput,
- destination=StringOutput(encoding='unicode'),
+ # Cached settings for render_partial()
+ self._settings = _get_settings(
+ docutils.readers.doctree.Reader,
+ docutils.parsers.rst.Parser,
+ HTMLWriter,
+ defaults={'output_encoding': 'unicode', 'traceback': True},
)
- pub.get_settings(output_encoding='unicode', traceback=True)
- self._publisher = pub
def init(self) -> None:
self.build_info = self.create_build_info()
@@ -227,7 +223,12 @@ def get_theme_config(self) -> tuple[str, dict[str, str | int | bool]]:
return self.config.html_theme, self.config.html_theme_options
def init_templates(self) -> None:
- theme_factory = HTMLThemeFactory(self.app)
+ theme_factory = HTMLThemeFactory(
+ confdir=self.confdir,
+ app=self._app,
+ config=self.config,
+ registry=self._registry,
+ )
theme_name, theme_options = self.get_theme_config()
self.theme = theme_factory.create(theme_name)
self.theme_options = theme_options
@@ -254,11 +255,6 @@ def init_highlighter(self) -> None:
self.dark_highlighter: PygmentsBridge | None
if dark_style is not None:
self.dark_highlighter = PygmentsBridge('html', dark_style)
- self.app.add_css_file(
- 'pygments_dark.css',
- media='(prefers-color-scheme: dark)',
- id='pygments_dark_css',
- )
else:
self.dark_highlighter = None
@@ -272,11 +268,18 @@ def css_files(self) -> list[_CascadingStyleSheet]:
def init_css_files(self) -> None:
self._css_files = []
self.add_css_file('pygments.css', priority=200)
+ if self.dark_highlighter is not None:
+ self.add_css_file(
+ 'pygments_dark.css',
+ priority=200,
+ media='(prefers-color-scheme: dark)',
+ id='pygments_dark_css',
+ )
for filename in self._get_style_filenames():
self.add_css_file(filename, priority=200)
- for filename, attrs in self.env._registry.css_files:
+ for filename, attrs in self._registry.css_files:
self.add_css_file(filename, **attrs)
for filename, attrs in self.get_builder_config('css_files', 'html'):
@@ -303,7 +306,7 @@ def init_js_files(self) -> None:
self.add_js_file('doctools.js', priority=200)
self.add_js_file('sphinx_highlight.js', priority=200)
- for filename, attrs in self.env._registry.js_files:
+ for filename, attrs in self._registry.js_files:
self.add_js_file(filename or '', **attrs)
for filename, attrs in self.get_builder_config('js_files', 'html'):
@@ -328,7 +331,7 @@ def math_renderer_name(self) -> str | None:
return name
else:
# not given: choose a math_renderer from registered ones as possible
- renderers = list(self.env._registry.html_inline_math_renderers)
+ renderers = list(self._registry.html_inline_math_renderers)
if len(renderers) == 1:
# only default math_renderer (mathjax) is registered
return renderers[0]
@@ -421,12 +424,19 @@ def render_partial(self, node: Node | None) -> dict[str, str]:
"""Utility: Render a lone doctree node."""
if node is None:
return {'fragment': ''}
-
- doc = new_document('')
+ doc = docutils.utils.new_document('', self._settings)
doc.append(node)
- self._publisher.set_source(doc)
- self._publisher.publish()
- return self._publisher.writer.parts
+ doc.transformer.add_transforms(_READER_TRANSFORMS)
+ doc.transformer.add_transforms(_PARSER_TRANSFORMS)
+ doc.transformer.add_transforms(_WRITER_TRANSFORMS)
+ doc.transformer.apply_transforms()
+ visitor: HTML5Translator = self.create_translator(doc, self) # type: ignore[assignment]
+ doc.walkabout(visitor)
+ parts = {
+ 'fragment': ''.join(visitor.fragment),
+ 'title': ''.join(visitor.title),
+ }
+ return parts
def prepare_writing(self, docnames: Set[str]) -> None:
# create the search indexer
@@ -443,16 +453,9 @@ def prepare_writing(self, docnames: Set[str]) -> None:
)
self.load_indexer(docnames)
- self.docwriter = HTMLWriter(self)
- with warnings.catch_warnings():
- warnings.filterwarnings('ignore', category=DeprecationWarning)
- # DeprecationWarning: The frontend.OptionParser class will be replaced
- # by a subclass of argparse.ArgumentParser in Docutils 0.21 or later.
- self.docsettings: Any = OptionParser(
- defaults=self.env.settings,
- components=(self.docwriter,),
- read_config_files=True,
- ).get_default_values()
+ self.docsettings = _get_settings(
+ HTMLWriter, defaults=self.env.settings, read_config_files=True
+ )
self.docsettings.compact_lists = bool(self.config.html_compact_lists)
# determine the additional indices to include
@@ -516,9 +519,9 @@ def prepare_writing(self, docnames: Set[str]) -> None:
))
# add assets registered after ``Builder.init()``.
- for css_filename, attrs in self.env._registry.css_files:
+ for css_filename, attrs in self._registry.css_files:
self.add_css_file(css_filename, **attrs)
- for js_filename, attrs in self.env._registry.js_files:
+ for js_filename, attrs in self._registry.js_files:
self.add_js_file(js_filename or '', **attrs)
# back up _css_files and _js_files to allow adding CSS/JS files to a specific page.
@@ -659,7 +662,6 @@ def copy_assets(self) -> None:
self.finish_tasks.join()
def write_doc(self, docname: str, doctree: nodes.document) -> None:
- destination = StringOutput(encoding='utf-8')
doctree.settings = self.docsettings
self.secnumbers = self.env.toc_secnumbers.get(docname, {})
@@ -667,13 +669,13 @@ def write_doc(self, docname: str, doctree: nodes.document) -> None:
self.imgpath = relative_uri(self.get_target_uri(docname), '_images')
self.dlpath = relative_uri(self.get_target_uri(docname), '_downloads')
self.current_docname = docname
- self.docwriter.write(doctree, destination)
- self.docwriter.assemble_parts()
- body = self.docwriter.parts['fragment']
- metatags = self.docwriter.clean_meta
+ visitor: HTML5Translator = self.create_translator(doctree, self) # type: ignore[assignment]
+ doctree.walkabout(visitor)
+ body = ''.join(visitor.fragment)
+ clean_meta = ''.join(visitor.meta[2:])
- ctx = self.get_doc_context(docname, body, metatags)
- ctx['has_maths_elements'] = self.docwriter._has_maths_elements
+ ctx = self.get_doc_context(docname, body, clean_meta)
+ ctx['has_maths_elements'] = getattr(visitor, '_has_maths_elements', False)
self.handle_page(docname, ctx, event_arg=doctree)
def write_doc_serialized(self, docname: str, doctree: nodes.document) -> None:
@@ -779,7 +781,7 @@ def copy_image_files(self) -> None:
__('copying images... '),
'brown',
len(self.images),
- self.app.verbosity,
+ self.config.verbosity,
stringify_func=stringify_func,
):
dest = self.images[src]
@@ -806,7 +808,7 @@ def to_relpath(f: str) -> str:
__('copying downloadable files... '),
'brown',
len(self.env.dlfiles),
- self.app.verbosity,
+ self.config.verbosity,
stringify_func=to_relpath,
):
try:
@@ -1028,7 +1030,7 @@ def _get_local_toctree(
if kwargs.get('maxdepth') == '': # NoQA: PLC1901
kwargs.pop('maxdepth')
toctree = global_toctree_for_doc(
- self.env, docname, self, collapse=collapse, **kwargs
+ self.env, docname, self, tags=self.tags, collapse=collapse, **kwargs
)
return self.render_partial(toctree)['fragment']
@@ -1038,31 +1040,30 @@ def get_output_path(self, page_name: str, /) -> Path:
def get_outfilename(self, pagename: str) -> _StrPath:
return _StrPath(self.get_output_path(pagename))
- def add_sidebars(self, pagename: str, ctx: dict[str, Any]) -> None:
- def has_wildcard(pattern: str) -> bool:
- return any(char in pattern for char in '*?[')
-
+ def _get_sidebars(self, pagename: str, /) -> tuple[str, ...]:
matched = None
# default sidebars settings for selected theme
- sidebars = list(self.theme.sidebar_templates)
+ sidebars = self.theme.sidebar_templates
# user sidebar settings
html_sidebars = self.get_builder_config('sidebars', 'html')
msg = __('page %s matches two patterns in html_sidebars: %r and %r')
for pattern, pat_sidebars in html_sidebars.items():
if patmatch(pagename, pattern):
- if matched and has_wildcard(pattern):
+ if matched and _has_wildcard(pattern):
# warn if both patterns contain wildcards
- if has_wildcard(matched):
+ if _has_wildcard(matched):
logger.warning(msg, pagename, matched, pattern)
# else the already matched pattern is more specific
# than the present one, because it contains no wildcard
continue
matched = pattern
- sidebars = pat_sidebars
+ sidebars = tuple(pat_sidebars)
+ return sidebars
- ctx['sidebars'] = list(sidebars)
+ def add_sidebars(self, pagename: str, ctx: dict[str, Any]) -> None:
+ ctx['sidebars'] = list(self._get_sidebars(pagename))
# --------- these are overwritten by the serialization builder
@@ -1121,13 +1122,13 @@ def hasdoc(name: str) -> bool:
ctx['hasdoc'] = hasdoc
ctx['toctree'] = lambda **kwargs: self._get_local_toctree(pagename, **kwargs)
- self.add_sidebars(pagename, ctx)
+ ctx['sidebars'] = list(self._get_sidebars(pagename))
ctx.update(addctx)
# 'blah.html' should have content_root = './' not ''.
ctx['content_root'] = (f'..{SEP}' * default_baseuri.count(SEP)) or f'.{SEP}'
- outdir = self.app.outdir
+ outdir = self.outdir
def css_tag(css: _CascadingStyleSheet) -> str:
attrs = [
@@ -1221,6 +1222,19 @@ def js_tag(js: _JavaScript | str) -> str:
)
return
except Exception as exc:
+ if (
+ isinstance(exc, jinja2.exceptions.UndefinedError)
+ and exc.message == "'style' is undefined"
+ ):
+ msg = __(
+ "The '%s' theme does not support this version of Sphinx, "
+ "because it uses the 'style' field in HTML templates, "
+ 'which was was deprecated in Sphinx 5.1 and removed in Sphinx 7.0. '
+ "The theme must be updated to use the 'styles' field instead. "
+ 'See https://www.sphinx-doc.org/en/master/development/html_themes/templating.html#styles'
+ )
+ raise ThemeError(msg % self.config.html_theme) from None
+
msg = __('An error happened in rendering the page %s.\nReason: %r') % (
pagename,
exc,
@@ -1277,6 +1291,10 @@ def dump_search_index(self) -> None:
Path(search_index_tmp).replace(search_index_path)
+def _has_wildcard(pattern: str, /) -> bool:
+ return any(char in pattern for char in '*?[')
+
+
def convert_html_css_files(app: Sphinx, config: Config) -> None:
"""Convert string styled html_css_files to tuple styled one."""
html_css_files: list[tuple[str, dict[str, str]]] = []
diff --git a/sphinx/builders/latex/__init__.py b/sphinx/builders/latex/__init__.py
index 5aeafca8bfd..69c11d515b8 100644
--- a/sphinx/builders/latex/__init__.py
+++ b/sphinx/builders/latex/__init__.py
@@ -4,12 +4,9 @@
import os
import os.path
-import warnings
from pathlib import Path
from typing import TYPE_CHECKING
-from docutils.frontend import OptionParser
-
import sphinx.builders.latex.nodes # NoQA: F401 # Workaround: import this before writer to avoid ImportError
from sphinx import addnodes, highlighting, package_dir
from sphinx._cli.util.colour import darkgreen
@@ -27,7 +24,7 @@
from sphinx.locale import _, __
from sphinx.util import logging, texescape
from sphinx.util.display import progress_message, status_iterator
-from sphinx.util.docutils import SphinxFileOutput, new_document
+from sphinx.util.docutils import _get_settings, new_document
from sphinx.util.fileutil import copy_asset_file
from sphinx.util.i18n import format_date
from sphinx.util.nodes import inline_all_toctrees
@@ -132,7 +129,7 @@ def init(self) -> None:
self.context: dict[str, Any] = {}
self.docnames: Iterable[str] = {}
self.document_data: list[tuple[str, str, str, str, str, bool]] = []
- self.themes = ThemeFactory(self.app)
+ self.themes = ThemeFactory(srcdir=self.srcdir, config=self.config)
texescape.init()
self.init_context()
@@ -211,7 +208,7 @@ def init_context(self) -> None:
def update_context(self) -> None:
"""Update template variables for .tex file just before writing."""
# Apply extension settings to context
- registry = self.env._registry
+ registry = self._registry
self.context['packages'] = registry.latex_packages
self.context['packages_after_hyperref'] = registry.latex_packages_after_hyperref
@@ -300,16 +297,9 @@ def copy_assets(self) -> None:
self.copy_latex_additional_files()
def write_documents(self, _docnames: Set[str]) -> None:
- docwriter = LaTeXWriter(self)
- with warnings.catch_warnings():
- warnings.filterwarnings('ignore', category=DeprecationWarning)
- # DeprecationWarning: The frontend.OptionParser class will be replaced
- # by a subclass of argparse.ArgumentParser in Docutils 0.21 or later.
- docsettings: Any = OptionParser(
- defaults=self.env.settings,
- components=(docwriter,),
- read_config_files=True,
- ).get_default_values()
+ docsettings = _get_settings(
+ LaTeXWriter, defaults=self.env.settings, read_config_files=True
+ )
for entry in self.document_data:
docname, targetname, title, author, themename = entry[:5]
@@ -317,11 +307,6 @@ def write_documents(self, _docnames: Set[str]) -> None:
toctree_only = False
if len(entry) > 5:
toctree_only = entry[5]
- destination = SphinxFileOutput(
- destination_path=self.outdir / targetname,
- encoding='utf-8',
- overwrite_if_changed=True,
- )
with progress_message(__('processing %s') % targetname, nonl=False):
doctree = self.env.get_doctree(docname)
toctree = next(doctree.findall(addnodes.toctree), None)
@@ -352,8 +337,16 @@ def write_documents(self, _docnames: Set[str]) -> None:
docsettings._docclass = theme.name
doctree.settings = docsettings
- docwriter.theme = theme
- docwriter.write(doctree, destination)
+ visitor: LaTeXTranslator = self.create_translator(doctree, self, theme) # type: ignore[assignment]
+ doctree.walkabout(visitor)
+ output = visitor.astext()
+ destination_path = self.outdir / targetname
+ # https://github.com/sphinx-doc/sphinx/issues/4362
+ if (
+ not destination_path.is_file()
+ or destination_path.read_bytes() != output.encode()
+ ):
+ destination_path.write_text(output, encoding='utf-8')
def get_contentsname(self, indexfile: str) -> str:
tree = self.env.get_doctree(indexfile)
@@ -481,7 +474,7 @@ def copy_image_files(self) -> None:
__('copying images... '),
'brown',
len(self.images),
- self.app.verbosity,
+ self.config.verbosity,
stringify_func=stringify_func,
):
dest = self.images[src]
@@ -513,9 +506,9 @@ def write_message_catalog(self) -> None:
formats = self.config.numfig_format
context = {
'addtocaptions': r'\@iden',
- 'figurename': formats.get('figure', '').split('%s', 1),
- 'tablename': formats.get('table', '').split('%s', 1),
- 'literalblockname': formats.get('code-block', '').split('%s', 1),
+ 'figurename': formats.get('figure', '').split('%s', maxsplit=1),
+ 'tablename': formats.get('table', '').split('%s', maxsplit=1),
+ 'literalblockname': formats.get('code-block', '').split('%s', maxsplit=1),
}
if self.context['babel'] or self.context['polyglossia']:
diff --git a/sphinx/builders/latex/theming.py b/sphinx/builders/latex/theming.py
index f55c077c9ca..df8eb48ec4f 100644
--- a/sphinx/builders/latex/theming.py
+++ b/sphinx/builders/latex/theming.py
@@ -12,7 +12,6 @@
if TYPE_CHECKING:
from pathlib import Path
- from sphinx.application import Sphinx
from sphinx.config import Config
logger = logging.getLogger(__name__)
@@ -102,11 +101,11 @@ def __init__(self, name: str, filename: Path) -> None:
class ThemeFactory:
"""A factory class for LaTeX Themes."""
- def __init__(self, app: Sphinx) -> None:
+ def __init__(self, *, srcdir: Path, config: Config) -> None:
self.themes: dict[str, Theme] = {}
- self.theme_paths = [app.srcdir / p for p in app.config.latex_theme_path]
- self.config = app.config
- self.load_builtin_themes(app.config)
+ self.theme_paths = [srcdir / p for p in config.latex_theme_path]
+ self.config = config
+ self.load_builtin_themes(config)
def load_builtin_themes(self, config: Config) -> None:
"""Load built-in themes."""
diff --git a/sphinx/builders/latex/transforms.py b/sphinx/builders/latex/transforms.py
index 9fa180a7dd9..2d7cbb80809 100644
--- a/sphinx/builders/latex/transforms.py
+++ b/sphinx/builders/latex/transforms.py
@@ -40,7 +40,7 @@ class FootnoteDocnameUpdater(SphinxTransform):
def apply(self, **kwargs: Any) -> None:
matcher = NodeMatcher(*self.TARGET_NODES)
for node in matcher.findall(self.document):
- node['docname'] = self.env.docname
+ node['docname'] = self.env.current_document.docname
class SubstitutionDefinitionsRemover(SphinxPostTransform):
@@ -420,7 +420,7 @@ def depart_caption(self, node: nodes.caption) -> None:
self.unrestrict(node)
def visit_title(self, node: nodes.title) -> None:
- if isinstance(node.parent, nodes.section | nodes.table):
+ if isinstance(node.parent, (nodes.section, nodes.table)):
self.restrict(node)
def depart_title(self, node: nodes.title) -> None:
diff --git a/sphinx/builders/linkcheck.py b/sphinx/builders/linkcheck.py
index 93ab2e78b00..d3ce638fea4 100644
--- a/sphinx/builders/linkcheck.py
+++ b/sphinx/builders/linkcheck.py
@@ -25,6 +25,7 @@
from sphinx._cli.util.colour import darkgray, darkgreen, purple, red, turquoise
from sphinx.builders.dummy import DummyBuilder
+from sphinx.errors import ConfigError
from sphinx.locale import __
from sphinx.transforms.post_transforms import SphinxPostTransform
from sphinx.util import logging, requests
@@ -70,6 +71,15 @@ class _Status(StrEnum):
DEFAULT_DELAY = 60.0
+@object.__new__
+class _SENTINEL_LAR:
+ def __repr__(self) -> str:
+ return '_SENTINEL_LAR'
+
+ def __reduce__(self) -> str:
+ return self.__class__.__name__
+
+
class CheckExternalLinksBuilder(DummyBuilder):
"""Checks for broken external links."""
@@ -97,7 +107,7 @@ def finish(self) -> None:
self.process_result(result)
if self.broken_hyperlinks or self.timed_out_hyperlinks:
- self.app.statuscode = 1
+ self._app.statuscode = 1
def process_result(self, result: CheckResult) -> None:
filename = self.env.doc2path(result.docname, False)
@@ -129,7 +139,7 @@ def process_result(self, result: CheckResult) -> None:
case _Status.WORKING:
logger.info(darkgreen('ok ') + f'{res_uri}{result.message}') # NoQA: G003
case _Status.TIMEOUT:
- if self.app.quiet:
+ if self.config.verbosity < 0:
msg = 'timeout ' + f'{res_uri}{result.message}'
logger.warning(msg, location=(result.docname, result.lineno))
else:
@@ -144,7 +154,7 @@ def process_result(self, result: CheckResult) -> None:
)
self.timed_out_hyperlinks += 1
case _Status.BROKEN:
- if self.app.quiet:
+ if self.config.verbosity < 0:
logger.warning(
__('broken link: %s (%s)'),
res_uri,
@@ -178,7 +188,7 @@ def process_result(self, result: CheckResult) -> None:
text = 'with unknown code'
linkstat['text'] = text
redirection = f'{text} to {result.message}'
- if self.config.linkcheck_allowed_redirects:
+ if self.config.linkcheck_allowed_redirects is not _SENTINEL_LAR:
msg = f'redirect {res_uri} - {redirection}'
logger.warning(msg, location=(result.docname, result.lineno))
else:
@@ -258,11 +268,11 @@ def _add_uri(self, uri: str, node: nodes.Element) -> None:
:param uri: URI to add
:param node: A node class where the URI was found
"""
- builder = cast('CheckExternalLinksBuilder', self.app.builder)
+ builder = cast('CheckExternalLinksBuilder', self.env._app.builder)
hyperlinks = builder.hyperlinks
- docname = self.env.docname
+ docname = self.env.current_document.docname
- if newuri := self.app.events.emit_firstresult('linkcheck-process-uri', uri):
+ if newuri := self.env.events.emit_firstresult('linkcheck-process-uri', uri):
uri = newuri
try:
@@ -721,6 +731,8 @@ def handle_starttag(self, tag: Any, attrs: Any) -> None:
def _allowed_redirect(
url: str, new_url: str, allowed_redirects: dict[re.Pattern[str], re.Pattern[str]]
) -> bool:
+ if allowed_redirects is _SENTINEL_LAR:
+ return False
return any(
from_url.match(url) and to_url.match(new_url)
for from_url, to_url in allowed_redirects.items()
@@ -748,20 +760,26 @@ def rewrite_github_anchor(app: Sphinx, uri: str) -> str | None:
def compile_linkcheck_allowed_redirects(app: Sphinx, config: Config) -> None:
- """Compile patterns in linkcheck_allowed_redirects to the regexp objects."""
- linkcheck_allowed_redirects = app.config.linkcheck_allowed_redirects
- for url, pattern in list(linkcheck_allowed_redirects.items()):
+ """Compile patterns to the regexp objects."""
+ if config.linkcheck_allowed_redirects is _SENTINEL_LAR:
+ return
+ if not isinstance(config.linkcheck_allowed_redirects, dict):
+ msg = __(
+ f'Invalid value `{config.linkcheck_allowed_redirects!r}` in '
+ 'linkcheck_allowed_redirects. Expected a dictionary.'
+ )
+ raise ConfigError(msg)
+ allowed_redirects = {}
+ for url, pattern in config.linkcheck_allowed_redirects.items():
try:
- linkcheck_allowed_redirects[re.compile(url)] = re.compile(pattern)
+ allowed_redirects[re.compile(url)] = re.compile(pattern)
except re.error as exc:
logger.warning(
__('Failed to compile regex in linkcheck_allowed_redirects: %r %s'),
exc.pattern,
exc.msg,
)
- finally:
- # Remove the original regexp-string
- linkcheck_allowed_redirects.pop(url)
+ config.linkcheck_allowed_redirects = allowed_redirects
def setup(app: Sphinx) -> ExtensionMetadata:
@@ -772,7 +790,9 @@ def setup(app: Sphinx) -> ExtensionMetadata:
app.add_config_value(
'linkcheck_exclude_documents', [], '', types=frozenset({list, tuple})
)
- app.add_config_value('linkcheck_allowed_redirects', {}, '', types=frozenset({dict}))
+ app.add_config_value(
+ 'linkcheck_allowed_redirects', _SENTINEL_LAR, '', types=frozenset({dict})
+ )
app.add_config_value('linkcheck_auth', [], '', types=frozenset({list, tuple}))
app.add_config_value('linkcheck_request_headers', {}, '', types=frozenset({dict}))
app.add_config_value('linkcheck_retries', 1, '', types=frozenset({int}))
@@ -799,7 +819,8 @@ def setup(app: Sphinx) -> ExtensionMetadata:
app.add_event('linkcheck-process-uri')
- app.connect('config-inited', compile_linkcheck_allowed_redirects, priority=800)
+ # priority 900 to happen after ``check_confval_types()``
+ app.connect('config-inited', compile_linkcheck_allowed_redirects, priority=900)
# FIXME: Disable URL rewrite handler for github.com temporarily.
# See: https://github.com/sphinx-doc/sphinx/issues/9435
diff --git a/sphinx/builders/manpage.py b/sphinx/builders/manpage.py
index 7b62b7dca5a..d30e697d292 100644
--- a/sphinx/builders/manpage.py
+++ b/sphinx/builders/manpage.py
@@ -2,25 +2,25 @@
from __future__ import annotations
-import warnings
from typing import TYPE_CHECKING
-from docutils.frontend import OptionParser
-from docutils.io import FileOutput
-
from sphinx import addnodes
from sphinx._cli.util.colour import darkgreen
from sphinx.builders import Builder
from sphinx.locale import __
from sphinx.util import logging
from sphinx.util.display import progress_message
+from sphinx.util.docutils import _get_settings
from sphinx.util.nodes import inline_all_toctrees
from sphinx.util.osutil import ensuredir, make_filename_from_project
-from sphinx.writers.manpage import ManualPageTranslator, ManualPageWriter
+from sphinx.writers.manpage import (
+ ManualPageTranslator,
+ ManualPageWriter,
+ NestedInlineTransform,
+)
if TYPE_CHECKING:
from collections.abc import Set
- from typing import Any
from sphinx.application import Sphinx
from sphinx.config import Config
@@ -37,7 +37,7 @@ class ManualPageBuilder(Builder):
epilog = __('The manual pages are in %(outdir)s.')
default_translator_class = ManualPageTranslator
- supported_image_types: list[str] = []
+ supported_image_types = []
def init(self) -> None:
if not self.config.man_pages:
@@ -53,16 +53,9 @@ def get_target_uri(self, docname: str, typ: str | None = None) -> str:
@progress_message(__('writing'))
def write_documents(self, _docnames: Set[str]) -> None:
- docwriter = ManualPageWriter(self)
- with warnings.catch_warnings():
- warnings.filterwarnings('ignore', category=DeprecationWarning)
- # DeprecationWarning: The frontend.OptionParser class will be replaced
- # by a subclass of argparse.ArgumentParser in Docutils 0.21 or later.
- docsettings: Any = OptionParser(
- defaults=self.env.settings,
- components=(docwriter,),
- read_config_files=True,
- ).get_default_values()
+ docsettings = _get_settings(
+ ManualPageWriter, defaults=self.env.settings, read_config_files=True
+ )
for info in self.config.man_pages:
docname, name, description, authors, section = info
@@ -91,10 +84,6 @@ def write_documents(self, _docnames: Set[str]) -> None:
targetname = f'{name}.{section}'
logger.info('%s { ', darkgreen(targetname))
- destination = FileOutput(
- destination_path=self.outdir / targetname,
- encoding='utf-8',
- )
tree = self.env.get_doctree(docname)
docnames: set[str] = set()
@@ -108,7 +97,11 @@ def write_documents(self, _docnames: Set[str]) -> None:
for pendingnode in largetree.findall(addnodes.pending_xref):
pendingnode.replace_self(pendingnode.children)
- docwriter.write(largetree, destination)
+ transform = NestedInlineTransform(largetree)
+ transform.apply()
+ visitor: ManualPageTranslator = self.create_translator(largetree, self) # type: ignore[assignment]
+ largetree.walkabout(visitor)
+ (self.outdir / targetname).write_text(visitor.astext(), encoding='utf-8')
def finish(self) -> None:
pass
diff --git a/sphinx/builders/singlehtml.py b/sphinx/builders/singlehtml.py
index c95603927ce..1888f6679d1 100644
--- a/sphinx/builders/singlehtml.py
+++ b/sphinx/builders/singlehtml.py
@@ -84,7 +84,7 @@ def _get_local_toctree(
if kwargs.get('maxdepth') == '': # NoQA: PLC1901
kwargs.pop('maxdepth')
toctree = global_toctree_for_doc(
- self.env, docname, self, collapse=collapse, **kwargs
+ self.env, docname, self, tags=self.tags, collapse=collapse, **kwargs
)
return self.render_partial(toctree)['fragment']
@@ -141,7 +141,7 @@ def assemble_toc_fignumbers(
def get_doc_context(self, docname: str, body: str, metatags: str) -> dict[str, Any]:
# no relation links...
toctree = global_toctree_for_doc(
- self.env, self.config.root_doc, self, collapse=False
+ self.env, self.config.root_doc, self, tags=self.tags, collapse=False
)
# if there is no toctree, toc is None
if toctree:
diff --git a/sphinx/builders/texinfo.py b/sphinx/builders/texinfo.py
index 79afafab84d..ba3cd0c0d10 100644
--- a/sphinx/builders/texinfo.py
+++ b/sphinx/builders/texinfo.py
@@ -3,12 +3,9 @@
from __future__ import annotations
import os.path
-import warnings
from typing import TYPE_CHECKING
from docutils import nodes
-from docutils.frontend import OptionParser
-from docutils.io import FileOutput
from sphinx import addnodes, package_dir
from sphinx._cli.util.colour import darkgreen
@@ -18,14 +15,13 @@
from sphinx.locale import _, __
from sphinx.util import logging
from sphinx.util.display import progress_message, status_iterator
-from sphinx.util.docutils import new_document
+from sphinx.util.docutils import _get_settings, new_document
from sphinx.util.nodes import inline_all_toctrees
from sphinx.util.osutil import SEP, copyfile, ensuredir, make_filename_from_project
from sphinx.writers.texinfo import TexinfoTranslator, TexinfoWriter
if TYPE_CHECKING:
from collections.abc import Iterable, Set
- from typing import Any
from docutils.nodes import Node
@@ -106,10 +102,6 @@ def write_documents(self, _docnames: Set[str]) -> None:
toctree_only = False
if len(entry) > 7:
toctree_only = entry[7]
- destination = FileOutput(
- destination_path=self.outdir / targetname,
- encoding='utf-8',
- )
with progress_message(__('processing %s') % targetname, nonl=False):
appendices = self.config.texinfo_appendices or []
doctree = self.assemble_doctree(
@@ -118,16 +110,9 @@ def write_documents(self, _docnames: Set[str]) -> None:
with progress_message(__('writing')):
self.post_process_images(doctree)
- docwriter = TexinfoWriter(self)
- with warnings.catch_warnings():
- warnings.filterwarnings('ignore', category=DeprecationWarning)
- # DeprecationWarning: The frontend.OptionParser class will be replaced
- # by a subclass of argparse.ArgumentParser in Docutils 0.21 or later.
- settings: Any = OptionParser(
- defaults=self.env.settings,
- components=(docwriter,),
- read_config_files=True,
- ).get_default_values()
+ settings = _get_settings(
+ TexinfoWriter, defaults=self.env.settings, read_config_files=True
+ )
settings.author = author
settings.title = title
settings.texinfo_filename = targetname[:-5] + '.info'
@@ -137,7 +122,10 @@ def write_documents(self, _docnames: Set[str]) -> None:
settings.texinfo_dir_description = description or ''
settings.docname = docname
doctree.settings = settings
- docwriter.write(doctree, destination)
+ visitor: TexinfoTranslator = self.create_translator(doctree, self) # type: ignore[assignment]
+ doctree.walkabout(visitor)
+ visitor.finish()
+ (self.outdir / targetname).write_text(visitor.output, encoding='utf-8')
self.copy_image_files(targetname[:-5])
def assemble_doctree(
@@ -198,7 +186,7 @@ def copy_image_files(self, targetname: str) -> None:
__('copying images... '),
'brown',
len(self.images),
- self.app.verbosity,
+ self.config.verbosity,
stringify_func=stringify_func,
):
dest = self.images[src]
diff --git a/sphinx/builders/text.py b/sphinx/builders/text.py
index bd7731fdb49..186e71e79da 100644
--- a/sphinx/builders/text.py
+++ b/sphinx/builders/text.py
@@ -4,16 +4,14 @@
from typing import TYPE_CHECKING
-from docutils.io import StringOutput
-
from sphinx.builders import Builder
from sphinx.locale import __
from sphinx.util import logging
from sphinx.util.osutil import _last_modified_time
-from sphinx.writers.text import TextTranslator, TextWriter
+from sphinx.writers.text import TextTranslator
if TYPE_CHECKING:
- from collections.abc import Iterator, Set
+ from collections.abc import Iterator
from docutils import nodes
@@ -59,19 +57,16 @@ def get_outdated_docs(self) -> Iterator[str]:
def get_target_uri(self, docname: str, typ: str | None = None) -> str:
return ''
- def prepare_writing(self, docnames: Set[str]) -> None:
- self.writer = TextWriter(self)
-
def write_doc(self, docname: str, doctree: nodes.document) -> None:
self.current_docname = docname
self.secnumbers = self.env.toc_secnumbers.get(docname, {})
- destination = StringOutput(encoding='utf-8')
- self.writer.write(doctree, destination)
+ visitor: TextTranslator = self.create_translator(doctree, self) # type: ignore[assignment]
+ doctree.walkabout(visitor)
+ output = visitor.body
out_file_name = self.outdir / (docname + self.out_suffix)
out_file_name.parent.mkdir(parents=True, exist_ok=True)
try:
- with open(out_file_name, 'w', encoding='utf-8') as f:
- f.write(self.writer.output)
+ out_file_name.write_text(output, encoding='utf-8')
except OSError as err:
logger.warning(__('error writing file %s: %s'), out_file_name, err)
diff --git a/sphinx/builders/xml.py b/sphinx/builders/xml.py
index fab0f7cb5c4..cf86ea5afef 100644
--- a/sphinx/builders/xml.py
+++ b/sphinx/builders/xml.py
@@ -5,17 +5,15 @@
from typing import TYPE_CHECKING
from docutils import nodes
-from docutils.io import StringOutput
from docutils.writers.docutils_xml import XMLTranslator
from sphinx.builders import Builder
from sphinx.locale import __
from sphinx.util import logging
from sphinx.util.osutil import _last_modified_time
-from sphinx.writers.xml import PseudoXMLWriter, XMLWriter
if TYPE_CHECKING:
- from collections.abc import Iterator, Set
+ from collections.abc import Iterator
from sphinx.application import Sphinx
from sphinx.util.typing import ExtensionMetadata
@@ -33,8 +31,6 @@ class XMLBuilder(Builder):
out_suffix = '.xml'
allow_parallel = True
- _writer_class: type[XMLWriter | PseudoXMLWriter] = XMLWriter
- writer: XMLWriter | PseudoXMLWriter
default_translator_class = XMLTranslator
def init(self) -> None:
@@ -61,9 +57,6 @@ def get_outdated_docs(self) -> Iterator[str]:
def get_target_uri(self, docname: str, typ: str | None = None) -> str:
return docname
- def prepare_writing(self, docnames: Set[str]) -> None:
- self.writer = self._writer_class(self)
-
def write_doc(self, docname: str, doctree: nodes.document) -> None:
# work around multiple string % tuple issues in docutils;
# replace tuples in attribute values with lists
@@ -79,16 +72,25 @@ def write_doc(self, docname: str, doctree: nodes.document) -> None:
for i, val in enumerate(value):
if isinstance(val, tuple):
value[i] = list(val)
- destination = StringOutput(encoding='utf-8')
- self.writer.write(doctree, destination)
+ output = self._translate(doctree)
out_file_name = self.outdir / (docname + self.out_suffix)
out_file_name.parent.mkdir(parents=True, exist_ok=True)
try:
- with open(out_file_name, 'w', encoding='utf-8') as f:
- f.write(self.writer.output)
+ out_file_name.write_text(output, encoding='utf-8')
except OSError as err:
logger.warning(__('error writing file %s: %s'), out_file_name, err)
+ def _translate(self, doctree: nodes.document) -> str:
+ doctree.settings.newlines = doctree.settings.indents = self.config.xml_pretty
+ doctree.settings.xml_declaration = True
+ doctree.settings.doctype_declaration = True
+
+ # copied from docutils.writers.docutils_xml.Writer.translate()
+ # so that we can override the translator class
+ visitor: XMLTranslator = self.create_translator(doctree)
+ doctree.walkabout(visitor)
+ return ''.join(visitor.output)
+
def finish(self) -> None:
pass
@@ -102,7 +104,8 @@ class PseudoXMLBuilder(XMLBuilder):
out_suffix = '.pseudoxml'
- _writer_class = PseudoXMLWriter
+ def _translate(self, doctree: nodes.document) -> str:
+ return doctree.pformat()
def setup(app: Sphinx) -> ExtensionMetadata:
diff --git a/sphinx/cmd/build.py b/sphinx/cmd/build.py
index 11a70df0c6c..58f3ad26746 100644
--- a/sphinx/cmd/build.py
+++ b/sphinx/cmd/build.py
@@ -371,14 +371,14 @@ def _parse_confoverrides(
val: Any
for val in define:
try:
- key, val = val.split('=', 1)
+ key, _, val = val.partition('=')
except ValueError:
parser.error(__('-D option argument must be in the form name=value'))
confoverrides[key] = val
for val in htmldefine:
try:
- key, val = val.split('=')
+ key, _, val = val.partition('=')
except ValueError:
parser.error(__('-A option argument must be in the form name=value'))
with contextlib.suppress(ValueError):
diff --git a/sphinx/cmd/quickstart.py b/sphinx/cmd/quickstart.py
index 0275343e847..a11856e497a 100644
--- a/sphinx/cmd/quickstart.py
+++ b/sphinx/cmd/quickstart.py
@@ -89,7 +89,8 @@
# function to get input from terminal -- overridden by the test suite
-def term_input(prompt: str) -> str:
+# Arguments are positional-only to match ``input``.
+def term_input(prompt: str, /) -> str:
if sys.platform == 'win32':
# Important: On windows, readline is not enabled by default. In these
# environment, escape sequences have been broken. To avoid the
@@ -801,7 +802,7 @@ def main(argv: Sequence[str] = (), /) -> int:
print('[Interrupted.]')
return 130 # 128 + SIGINT
- for variable in d.get('variables', []):
+ for variable in d.get('variables', []): # type: ignore[union-attr]
try:
name, value = variable.split('=')
d[name] = value
diff --git a/sphinx/config.py b/sphinx/config.py
index bedc69f2337..f82e2b761ee 100644
--- a/sphinx/config.py
+++ b/sphinx/config.py
@@ -65,7 +65,7 @@ def is_serializable(obj: object, *, _seen: frozenset[int] = frozenset()) -> bool
is_serializable(key, _seen=seen) and is_serializable(value, _seen=seen)
for key, value in obj.items()
)
- elif isinstance(obj, list | tuple | set | frozenset):
+ elif isinstance(obj, (list, tuple, set, frozenset)):
seen = _seen | {id(obj)}
return all(is_serializable(item, _seen=seen) for item in obj)
@@ -89,7 +89,7 @@ def __repr__(self) -> str:
return f'ENUM({", ".join(sorted(map(repr, self._candidates)))})'
def match(self, value: str | bool | None | Sequence[str | bool | None]) -> bool: # NoQA: RUF036
- if isinstance(value, str | bool | None):
+ if isinstance(value, (str, bool, types.NoneType)):
return value in self._candidates
return all(item in self._candidates for item in value)
@@ -320,7 +320,7 @@ def __init__(
for name in list(self._overrides.keys()):
if '.' in name:
- real_name, key = name.split('.', 1)
+ real_name, _, key = name.partition('.')
raw_config.setdefault(real_name, {})[key] = self._overrides.pop(name)
self.setup: _ExtensionSetupFunc | None = raw_config.get('setup')
@@ -333,6 +333,8 @@ def __init__(
raw_config['extensions'] = extensions
self.extensions: list[str] = raw_config.get('extensions', [])
+ self._verbosity: int = 0 # updated in Sphinx.__init__()
+
@property
def values(self) -> dict[str, _Opt]:
return self._options
@@ -341,12 +343,17 @@ def values(self) -> dict[str, _Opt]:
def overrides(self) -> dict[str, Any]:
return self._overrides
+ @property
+ def verbosity(self) -> int:
+ return self._verbosity
+
@classmethod
def read(
cls: type[Config],
confdir: str | os.PathLike[str],
- overrides: dict[str, Any] | None = None,
- tags: Tags | None = None,
+ *,
+ overrides: dict[str, Any],
+ tags: Tags,
) -> Config:
"""Create a Config object from configuration file."""
filename = Path(confdir, CONFIG_FILENAME)
@@ -354,23 +361,7 @@ def read(
raise ConfigError(
__("config directory doesn't contain a conf.py file (%s)") % confdir
)
- namespace = eval_config_file(filename, tags)
-
- # Note: Old sphinx projects have been configured as "language = None" because
- # sphinx-quickstart previously generated this by default.
- # To keep compatibility, they should be fallback to 'en' for a while
- # (This conversion should not be removed before 2025-01-01).
- if namespace.get('language', ...) is None:
- logger.warning(
- __(
- "Invalid configuration value found: 'language = None'. "
- 'Update your configuration to a valid language code. '
- "Falling back to 'en' (English)."
- )
- )
- namespace['language'] = 'en'
-
- return cls(namespace, overrides)
+ return _read_conf_py(filename, overrides=overrides, tags=tags)
def convert_overrides(self, name: str, value: str) -> Any:
opt = self._options[name]
@@ -583,12 +574,28 @@ def __setstate__(self, state: dict[str, Any]) -> None:
self.__dict__.update(state)
-def eval_config_file(
- filename: str | os.PathLike[str], tags: Tags | None
-) -> dict[str, Any]:
- """Evaluate a config file."""
- filename = Path(filename)
+def _read_conf_py(conf_path: Path, *, overrides: dict[str, Any], tags: Tags) -> Config:
+ """Create a Config object from a conf.py file."""
+ namespace = eval_config_file(conf_path, tags)
+ # Note: Old sphinx projects have been configured as "language = None" because
+ # sphinx-quickstart previously generated this by default.
+ # To keep compatibility, they should be fallback to 'en' for a while
+ # (This conversion should not be removed before 2025-01-01).
+ if namespace.get('language', ...) is None:
+ logger.warning(
+ __(
+ "Invalid configuration value found: 'language = None'. "
+ 'Update your configuration to a valid language code. '
+ "Falling back to 'en' (English)."
+ )
+ )
+ namespace['language'] = 'en'
+ return Config(namespace, overrides)
+
+
+def eval_config_file(filename: Path, tags: Tags) -> dict[str, Any]:
+ """Evaluate a config file."""
namespace: dict[str, Any] = {
'__file__': str(filename),
'tags': tags,
@@ -623,12 +630,12 @@ def _validate_valid_types(
) -> frozenset[type] | ENUM:
if not valid_types:
return frozenset()
- if isinstance(valid_types, frozenset | ENUM):
+ if isinstance(valid_types, (frozenset, ENUM)):
return valid_types
if isinstance(valid_types, type):
return frozenset((valid_types,))
if valid_types is Any:
- return frozenset({Any}) # type: ignore[arg-type]
+ return frozenset({Any})
if isinstance(valid_types, set):
return frozenset(valid_types)
try:
@@ -656,7 +663,7 @@ def convert_source_suffix(app: Sphinx, config: Config) -> None:
source_suffix,
config.source_suffix,
)
- elif isinstance(source_suffix, list | tuple):
+ elif isinstance(source_suffix, (list, tuple)):
# if list, considers as all of them are default filetype
config.source_suffix = dict.fromkeys(source_suffix, 'restructuredtext')
logger.info(
@@ -888,7 +895,21 @@ def check_master_doc(
return changed
+def deprecate_source_encoding(_app: Sphinx, config: Config) -> None:
+ """Warn on non-UTF 8 source_encoding."""
+ # RemovedInSphinx10Warning
+ if config.source_encoding.lower() not in {'utf-8', 'utf-8-sig', 'utf8'}:
+ msg = _(
+ 'Support for source encodings other than UTF-8 '
+ 'is deprecated and will be removed in Sphinx 10. '
+ 'Please comment at https://github.com/sphinx-doc/sphinx/issues/13665 '
+ 'if this causes a problem.'
+ )
+ logger.warning(msg)
+
+
def setup(app: Sphinx) -> ExtensionMetadata:
+ app.connect('config-inited', deprecate_source_encoding, priority=790)
app.connect('config-inited', convert_source_suffix, priority=800)
app.connect('config-inited', convert_highlight_options, priority=800)
app.connect('config-inited', init_numfig_format, priority=800)
diff --git a/sphinx/directives/__init__.py b/sphinx/directives/__init__.py
index b4fb7f76006..c442ea8e6c8 100644
--- a/sphinx/directives/__init__.py
+++ b/sphinx/directives/__init__.py
@@ -201,7 +201,7 @@ def run(self) -> list[Node]:
* parse the content and handle doc fields in it
"""
if ':' in self.name:
- self.domain, self.objtype = self.name.split(':', 1)
+ self.domain, _, self.objtype = self.name.partition(':')
else:
self.domain, self.objtype = '', self.name
self.indexnode = addnodes.index(entries=[])
diff --git a/sphinx/directives/other.py b/sphinx/directives/other.py
index d9c2b98fd84..090e58a4cf0 100644
--- a/sphinx/directives/other.py
+++ b/sphinx/directives/other.py
@@ -63,7 +63,7 @@ class TocTree(SphinxDirective):
def run(self) -> list[Node]:
subnode = addnodes.toctree()
- subnode['parent'] = self.env.docname
+ subnode['parent'] = self.env.current_document.docname
# (title, ref) pairs, where ref may be a document, or an external link,
# and title may be None if the document's title is to be used
@@ -90,7 +90,7 @@ def parse_content(self, toctree: addnodes.toctree) -> None:
"""Populate ``toctree['entries']`` and ``toctree['includefiles']`` from content."""
generated_docnames = frozenset(StandardDomain._virtual_doc_names)
suffixes = self.config.source_suffix
- current_docname = self.env.docname
+ current_docname = self.env.current_document.docname
glob = toctree['glob']
# glob target documents
@@ -267,7 +267,7 @@ def run(self) -> list[Node]:
if len(children) != 1 or not isinstance(children[0], nodes.bullet_list):
logger.warning(
__('.. acks content is not a list'),
- location=(self.env.docname, self.lineno),
+ location=(self.env.current_document.docname, self.lineno),
)
return []
return [addnodes.acks('', *children)]
@@ -290,7 +290,7 @@ def run(self) -> list[Node]:
if len(children) != 1 or not isinstance(children[0], nodes.bullet_list):
logger.warning(
__('.. hlist content is not a list'),
- location=(self.env.docname, self.lineno),
+ location=(self.env.current_document.docname, self.lineno),
)
return []
fulllist = children[0]
@@ -388,7 +388,7 @@ def _insert_input(include_lines: list[str], source: str) -> None:
text = '\n'.join(include_lines[:-2])
path = Path(relpath(Path(source).resolve(), start=self.env.srcdir))
- docname = self.env.docname
+ docname = self.env.current_document.docname
# Emit the "include-read" event
arg = [text]
@@ -411,7 +411,7 @@ def _insert_input(include_lines: list[str], source: str) -> None:
if self.arguments[0].startswith('<') and self.arguments[0].endswith('>'):
# docutils "standard" includes, do not do path processing
return super().run()
- rel_filename, filename = self.env.relfn2path(self.arguments[0])
+ _rel_filename, filename = self.env.relfn2path(self.arguments[0])
self.arguments[0] = str(filename)
self.env.note_included(filename)
return super().run()
diff --git a/sphinx/directives/patches.py b/sphinx/directives/patches.py
index 94184de502c..5b346d6a737 100644
--- a/sphinx/directives/patches.py
+++ b/sphinx/directives/patches.py
@@ -9,12 +9,11 @@
from docutils.parsers.rst import directives
from docutils.parsers.rst.directives import images, tables
from docutils.parsers.rst.directives.misc import Meta
-from docutils.parsers.rst.roles import set_classes
from sphinx.directives import optional_int
from sphinx.locale import __
from sphinx.util import logging
-from sphinx.util.docutils import SphinxDirective
+from sphinx.util.docutils import SphinxDirective, _normalize_options
from sphinx.util.nodes import set_source_info
from sphinx.util.osutil import SEP, relpath
@@ -72,11 +71,11 @@ def run(self) -> list[Node]:
'an absolute path as a relative path from source directory. '
'Please update your document.'
),
- location=(env.docname, self.lineno),
+ location=(env.current_document.docname, self.lineno),
)
else:
abspath = env.srcdir / self.options['file'][1:]
- doc_dir = env.doc2path(env.docname).parent
+ doc_dir = env.doc2path(env.current_document.docname).parent
self.options['file'] = relpath(abspath, doc_dir)
return super().run()
@@ -100,7 +99,7 @@ class Code(SphinxDirective):
def run(self) -> list[Node]:
self.assert_has_content()
- set_classes(self.options)
+ self.options = _normalize_options(self.options)
code = '\n'.join(self.content)
node = nodes.literal_block(
code,
@@ -162,7 +161,7 @@ def run(self) -> list[Node]:
latex,
latex,
classes=self.options.get('class', []),
- docname=self.env.docname,
+ docname=self.env.current_document.docname,
number=None,
label=label,
)
@@ -180,7 +179,7 @@ def add_target(self, ret: list[Node]) -> None:
# assign label automatically if math_number_all enabled
if node['label'] == '' or (self.config.math_number_all and not node['label']): # NoQA: PLC1901
seq = self.env.new_serialno('sphinx.ext.math#equations')
- node['label'] = f'{self.env.docname}:{seq}'
+ node['label'] = f'{self.env.current_document.docname}:{seq}'
# no targets and numbers are needed
if not node['label']:
@@ -188,7 +187,9 @@ def add_target(self, ret: list[Node]) -> None:
# register label to domain
domain = self.env.domains.math_domain
- domain.note_equation(self.env.docname, node['label'], location=node)
+ domain.note_equation(
+ self.env.current_document.docname, node['label'], location=node
+ )
node['number'] = domain.get_equation_number_for(node['label'])
# add target node
@@ -213,7 +214,7 @@ class Rubric(SphinxDirective):
}
def run(self) -> list[nodes.rubric | nodes.system_message]:
- set_classes(self.options)
+ self.options = _normalize_options(self.options)
rubric_text = self.arguments[0]
textnodes, messages = self.parse_inline(rubric_text, lineno=self.lineno)
if 'heading-level' in self.options:
diff --git a/sphinx/domains/__init__.py b/sphinx/domains/__init__.py
index 61be6049579..17aa7bdc453 100644
--- a/sphinx/domains/__init__.py
+++ b/sphinx/domains/__init__.py
@@ -14,7 +14,7 @@
if TYPE_CHECKING:
from collections.abc import Iterable, Sequence, Set
- from typing import Any
+ from typing import Any, ClassVar
from docutils import nodes
from docutils.nodes import Element, Node
@@ -82,27 +82,27 @@ class Domain:
"""
#: domain name: should be short, but unique
- name = ''
+ name: ClassVar[str] = ''
#: domain label: longer, more descriptive (used in messages)
- label = ''
+ label: ClassVar[str] = ''
#: type (usually directive) name -> ObjType instance
- object_types: dict[str, ObjType] = {}
+ object_types: ClassVar[dict[str, ObjType]] = {}
#: directive name -> directive class
- directives: dict[str, type[Directive]] = {}
+ directives: ClassVar[dict[str, type[Directive]]] = {}
#: role name -> role callable
- roles: dict[str, RoleFunction | XRefRole] = {}
+ roles: ClassVar[dict[str, RoleFunction | XRefRole]] = {}
#: a list of Index subclasses
- indices: list[type[Index]] = []
+ indices: ClassVar[list[type[Index]]] = []
#: role name -> a warning message if reference is missing
- dangling_warnings: dict[str, str] = {}
+ dangling_warnings: ClassVar[dict[str, str]] = {}
#: node_class -> (enum_node_type, title_getter)
- enumerable_nodes: dict[type[Node], tuple[str, TitleGetter | None]] = {}
+ enumerable_nodes: ClassVar[dict[type[Node], tuple[str, TitleGetter | None]]] = {}
#: data value for a fresh environment
- initial_data: dict[str, Any] = {}
+ initial_data: ClassVar[dict[str, Any]] = {}
#: data value
data: dict[str, Any]
#: data version, bump this when the format of `self.data` changes
- data_version = 0
+ data_version: ClassVar[int] = 0
def __init__(self, env: BuildEnvironment) -> None:
domain_data: dict[str, dict[str, Any]] = env.domaindata
@@ -113,10 +113,10 @@ def __init__(self, env: BuildEnvironment) -> None:
self._type2role: dict[str, str] = {}
# convert class variables to instance one (to enhance through API)
- self.object_types = dict(self.object_types)
- self.directives = dict(self.directives)
- self.roles = dict(self.roles)
- self.indices = list(self.indices)
+ self.object_types = dict(self.object_types) # type: ignore[misc]
+ self.directives = dict(self.directives) # type: ignore[misc]
+ self.roles = dict(self.roles) # type: ignore[misc]
+ self.indices = list(self.indices) # type: ignore[misc]
if self.name not in domain_data:
assert isinstance(self.initial_data, dict)
diff --git a/sphinx/domains/_index.py b/sphinx/domains/_index.py
index afb5be4007b..3845a97ba7b 100644
--- a/sphinx/domains/_index.py
+++ b/sphinx/domains/_index.py
@@ -9,6 +9,7 @@
if TYPE_CHECKING:
from collections.abc import Iterable
+ from typing import ClassVar
from sphinx.domains import Domain
@@ -73,9 +74,9 @@ class Index(ABC):
:rst:role:`ref` role.
"""
- name: str
- localname: str
- shortname: str | None = None
+ name: ClassVar[str]
+ localname: ClassVar[str]
+ shortname: ClassVar[str | None] = None
def __init__(self, domain: Domain) -> None:
if not self.name or self.localname is None:
diff --git a/sphinx/domains/c/__init__.py b/sphinx/domains/c/__init__.py
index 6dbbf70ac92..194916122cd 100644
--- a/sphinx/domains/c/__init__.py
+++ b/sphinx/domains/c/__init__.py
@@ -39,7 +39,7 @@
from docutils.nodes import Element, Node, TextElement, system_message
- from sphinx.addnodes import pending_xref
+ from sphinx.addnodes import desc_signature, pending_xref
from sphinx.application import Sphinx
from sphinx.builders import Builder
from sphinx.domains.c._symbol import LookupKey
@@ -156,7 +156,7 @@ def _add_enumerator_to_parent(self, ast: ASTDeclaration) -> None:
parent=target_symbol,
ident=symbol.ident,
declaration=decl_clone,
- docname=self.env.docname,
+ docname=self.env.current_document.docname,
line=self.get_source_info()[1],
)
@@ -259,7 +259,9 @@ def handle_signature(self, sig: str, signode: TextElement) -> ASTDeclaration:
try:
symbol = parent_symbol.add_declaration(
- ast, docname=self.env.docname, line=self.get_source_info()[1]
+ ast,
+ docname=self.env.current_document.docname,
+ line=self.get_source_info()[1],
)
# append the new declaration to the sibling list
assert symbol.siblingAbove is None
@@ -309,6 +311,32 @@ def after_content(self) -> None:
self.env.current_document.c_parent_symbol = self.oldParentSymbol
self.env.ref_context['c:parent_key'] = self.oldParentKey
+ def _object_hierarchy_parts(self, sig_node: desc_signature) -> tuple[str, ...]:
+ last_symbol: Symbol = self.env.current_document.c_last_symbol
+ return tuple(map(str, last_symbol.get_full_nested_name().names))
+
+ def _toc_entry_name(self, sig_node: desc_signature) -> str:
+ if not sig_node.get('_toc_parts'):
+ return ''
+
+ config = self.config
+ objtype = sig_node.parent.get('objtype')
+ if config.add_function_parentheses and (
+ objtype in {'function', 'method'}
+ or (objtype == 'macro' and '(' in sig_node.rawsource)
+ ):
+ parens = '()'
+ else:
+ parens = ''
+ *parents, name = sig_node['_toc_parts']
+ if config.toc_object_entries_show_parents == 'domain':
+ return '::'.join((name + parens,))
+ if config.toc_object_entries_show_parents == 'hide':
+ return name + parens
+ if config.toc_object_entries_show_parents == 'all':
+ return '::'.join([*parents, name + parens])
+ return ''
+
class CMemberObject(CObject):
object_type = 'member'
@@ -642,7 +670,7 @@ def run(self) -> list[Node]:
The code is therefore based on the ObjectDescription version.
"""
if ':' in self.name:
- self.domain, self.objtype = self.name.split(':', 1)
+ self.domain, _, self.objtype = self.name.partition(':')
else:
self.domain, self.objtype = '', self.name
@@ -792,7 +820,7 @@ class CDomain(Domain):
'expr': CExprRole(asCode=True),
'texpr': CExprRole(asCode=False),
}
- initial_data: dict[str, Symbol | dict[str, tuple[str, str, str]]] = {
+ initial_data: ClassVar[dict[str, Symbol | dict[str, tuple[str, str, str]]]] = {
'root_symbol': Symbol(None, None, None, None, None),
'objects': {}, # fullname -> docname, node_id, objtype
}
diff --git a/sphinx/domains/c/_parser.py b/sphinx/domains/c/_parser.py
index 7eb09f6f7b8..c59352b6ee2 100644
--- a/sphinx/domains/c/_parser.py
+++ b/sphinx/domains/c/_parser.py
@@ -230,7 +230,7 @@ def _parse_paren_expression_list(self) -> ASTParenExprList | None:
#
# expression-list
# -> initializer-list
- exprs, trailing_comma = self._parse_initializer_list(
+ exprs, _trailing_comma = self._parse_initializer_list(
'parenthesized expression-list', '(', ')'
)
if exprs is None:
@@ -369,10 +369,7 @@ def _parse_logical_or_expression(self) -> ASTExpression:
# pm = cast .*, ->*
def _parse_bin_op_expr(self: DefinitionParser, op_id: int) -> ASTExpression:
if op_id + 1 == len(_expression_bin_ops):
-
- def parser() -> ASTExpression:
- return self._parse_cast_expression()
-
+ parser = self._parse_cast_expression
else:
def parser() -> ASTExpression:
@@ -760,10 +757,7 @@ def _parse_declarator_name_suffix(
if self.skip_string(']'):
size = None
else:
-
- def parser() -> ASTExpression:
- return self._parse_expression()
-
+ parser = self._parse_expression
size = self._parse_expression_fallback([']'], parser)
self.skip_ws()
if not self.skip_string(']'):
@@ -1025,10 +1019,7 @@ def _parse_enumerator(self) -> ASTEnumerator:
init = None
if self.skip_string('='):
self.skip_ws()
-
- def parser() -> ASTExpression:
- return self._parse_constant_expression()
-
+ parser = self._parse_constant_expression
init_val = self._parse_expression_fallback([], parser)
init = ASTInitializer(init_val)
return ASTEnumerator(name, init, attrs)
diff --git a/sphinx/domains/c/_symbol.py b/sphinx/domains/c/_symbol.py
index cb43910e7ab..7ac555415ac 100644
--- a/sphinx/domains/c/_symbol.py
+++ b/sphinx/domains/c/_symbol.py
@@ -445,43 +445,19 @@ def on_missing_qualified_symbol(
# First check if one of those with a declaration matches.
# If it's a function, we need to compare IDs,
# otherwise there should be only one symbol with a declaration.
- def make_cand_symbol() -> Symbol:
- if Symbol.debug_lookup:
- Symbol.debug_print('begin: creating candidate symbol')
- symbol = Symbol(
- parent=lookup_result.parent_symbol,
- ident=lookup_result.ident,
- declaration=declaration,
- docname=docname,
- line=line,
- )
- if Symbol.debug_lookup:
- Symbol.debug_print('end: creating candidate symbol')
- return symbol
if len(with_decl) == 0:
cand_symbol = None
else:
- cand_symbol = make_cand_symbol()
-
- def handle_duplicate_declaration(
- symbol: Symbol, cand_symbol: Symbol
- ) -> None:
- if Symbol.debug_lookup:
- Symbol.debug_indent += 1
- Symbol.debug_print('redeclaration')
- Symbol.debug_indent -= 1
- Symbol.debug_indent -= 2
- # Redeclaration of the same symbol.
- # Let the new one be there, but raise an error to the client
- # so it can use the real symbol as subscope.
- # This will probably result in a duplicate id warning.
- cand_symbol.isRedeclaration = True
- raise _DuplicateSymbolError(symbol, declaration)
+ cand_symbol = self._make_cand_symbol(
+ lookup_result, declaration, docname, line
+ )
if declaration.objectType != 'function':
assert len(with_decl) <= 1
- handle_duplicate_declaration(with_decl[0], cand_symbol)
+ self._handle_duplicate_declaration(
+ with_decl[0], cand_symbol, declaration
+ )
# (not reachable)
# a function, so compare IDs
@@ -493,7 +469,7 @@ def handle_duplicate_declaration(
if Symbol.debug_lookup:
Symbol.debug_print('old_id: ', old_id)
if cand_id == old_id:
- handle_duplicate_declaration(symbol, cand_symbol)
+ self._handle_duplicate_declaration(symbol, cand_symbol, declaration)
# (not reachable)
# no candidate symbol found with matching ID
# if there is an empty symbol, fill that one
@@ -507,7 +483,7 @@ def handle_duplicate_declaration(
if cand_symbol is not None:
return cand_symbol
else:
- return make_cand_symbol()
+ return self._make_cand_symbol(lookup_result, declaration, docname, line)
else:
if Symbol.debug_lookup:
Symbol.debug_print(
@@ -529,6 +505,42 @@ def handle_duplicate_declaration(
symbol._fill_empty(declaration, docname, line)
return symbol
+ @staticmethod
+ def _make_cand_symbol(
+ lookup_result: SymbolLookupResult,
+ declaration: ASTDeclaration | None,
+ docname: str | None,
+ line: int | None,
+ ) -> Symbol:
+ if Symbol.debug_lookup:
+ Symbol.debug_print('begin: creating candidate symbol')
+ symbol = Symbol(
+ parent=lookup_result.parent_symbol,
+ ident=lookup_result.ident,
+ declaration=declaration,
+ docname=docname,
+ line=line,
+ )
+ if Symbol.debug_lookup:
+ Symbol.debug_print('end: creating candidate symbol')
+ return symbol
+
+ @staticmethod
+ def _handle_duplicate_declaration(
+ symbol: Symbol, cand_symbol: Symbol, declaration: ASTDeclaration
+ ) -> None:
+ if Symbol.debug_lookup:
+ Symbol.debug_indent += 1
+ Symbol.debug_print('redeclaration')
+ Symbol.debug_indent -= 1
+ Symbol.debug_indent -= 2
+ # Redeclaration of the same symbol.
+ # Let the new one be there, but raise an error to the client
+ # so it can use the real symbol as subscope.
+ # This will probably result in a duplicate id warning.
+ cand_symbol.isRedeclaration = True
+ raise _DuplicateSymbolError(symbol, declaration)
+
def merge_with(
self, other: Symbol, docnames: list[str], env: BuildEnvironment
) -> None:
diff --git a/sphinx/domains/changeset.py b/sphinx/domains/changeset.py
index 2d520e6ff64..4349595f9df 100644
--- a/sphinx/domains/changeset.py
+++ b/sphinx/domains/changeset.py
@@ -21,6 +21,12 @@
from sphinx.environment import BuildEnvironment
from sphinx.util.typing import ExtensionMetadata, OptionSpec
+name_aliases = {
+ 'version-added': 'versionadded',
+ 'version-changed': 'versionchanged',
+ 'version-deprecated': 'deprecated',
+ 'version-removed': 'versionremoved',
+}
versionlabels = {
'versionadded': _('Added in version %s'),
@@ -56,12 +62,13 @@ class VersionChange(SphinxDirective):
option_spec: ClassVar[OptionSpec] = {}
def run(self) -> list[Node]:
+ name = name_aliases.get(self.name, self.name)
node = addnodes.versionmodified()
node.document = self.state.document
self.set_source_info(node)
- node['type'] = self.name
+ node['type'] = name
node['version'] = self.arguments[0]
- text = versionlabels[self.name] % self.arguments[0]
+ text = versionlabels[name] % self.arguments[0]
if len(self.arguments) == 2:
inodes, messages = self.parse_inline(
self.arguments[1], lineno=self.lineno + 1
@@ -73,7 +80,7 @@ def run(self) -> list[Node]:
messages = []
if self.content:
node += self.parse_content_to_nodes()
- classes = ['versionmodified', versionlabel_classes[self.name]]
+ classes = ['versionmodified', versionlabel_classes[name]]
if len(node) > 0 and isinstance(node[0], nodes.paragraph):
# the contents start with a paragraph
if node[0].rawsource:
@@ -121,7 +128,7 @@ class ChangeSetDomain(Domain):
name = 'changeset'
label = 'changeset'
- initial_data: dict[str, dict[str, list[ChangeSet]]] = {
+ initial_data: ClassVar[dict[str, dict[str, list[ChangeSet]]]] = {
'changes': {}, # version -> list of ChangeSet
}
@@ -135,7 +142,7 @@ def note_changeset(self, node: addnodes.versionmodified) -> None:
objname = self.env.current_document.obj_desc_name
changeset = ChangeSet(
node['type'],
- self.env.docname,
+ self.env.current_document.docname,
node.line, # type: ignore[arg-type]
module,
objname,
@@ -168,9 +175,13 @@ def get_changesets_for(self, version: str) -> list[ChangeSet]:
def setup(app: Sphinx) -> ExtensionMetadata:
app.add_domain(ChangeSetDomain)
+ app.add_directive('version-deprecated', VersionChange)
app.add_directive('deprecated', VersionChange)
+ app.add_directive('version-added', VersionChange)
app.add_directive('versionadded', VersionChange)
+ app.add_directive('version-changed', VersionChange)
app.add_directive('versionchanged', VersionChange)
+ app.add_directive('version-removed', VersionChange)
app.add_directive('versionremoved', VersionChange)
return {
diff --git a/sphinx/domains/citation.py b/sphinx/domains/citation.py
index 49b74cca269..da7fc6a3fdd 100644
--- a/sphinx/domains/citation.py
+++ b/sphinx/domains/citation.py
@@ -83,7 +83,7 @@ def note_citation(self, node: nodes.citation) -> None:
def note_citation_reference(self, node: pending_xref) -> None:
docnames = self.citation_refs.setdefault(node['reftarget'], set())
- docnames.add(self.env.docname)
+ docnames.add(self.env.current_document.docname)
def check_consistency(self) -> None:
for name, (docname, _labelid, lineno) in self.citations.items():
@@ -106,7 +106,7 @@ def resolve_xref(
node: pending_xref,
contnode: Element,
) -> nodes.reference | None:
- docname, labelid, lineno = self.citations.get(target, ('', '', 0))
+ docname, labelid, _lineno = self.citations.get(target, ('', '', 0))
if not docname:
return None
@@ -139,7 +139,7 @@ def apply(self, **kwargs: Any) -> None:
domain = self.env.domains.citation_domain
for node in self.document.findall(nodes.citation):
# register citation node to domain
- node['docname'] = self.env.docname
+ node['docname'] = self.env.current_document.docname
domain.note_citation(node)
# mark citation labels as not smartquoted
diff --git a/sphinx/domains/cpp/__init__.py b/sphinx/domains/cpp/__init__.py
index 75d7732a405..0ccdc106c44 100644
--- a/sphinx/domains/cpp/__init__.py
+++ b/sphinx/domains/cpp/__init__.py
@@ -219,7 +219,7 @@ def _add_enumerator_to_parent(self, ast: ASTDeclaration) -> None:
templateParams=None,
templateArgs=None,
declaration=decl_clone,
- docname=self.env.docname,
+ docname=self.env.current_document.docname,
line=self.get_source_info()[1],
)
@@ -374,7 +374,9 @@ def handle_signature(self, sig: str, signode: desc_signature) -> ASTDeclaration:
try:
symbol = parent_symbol.add_declaration(
- ast, docname=self.env.docname, line=self.get_source_info()[1]
+ ast,
+ docname=self.env.current_document.docname,
+ line=self.get_source_info()[1],
)
# append the new declaration to the sibling list
assert symbol.siblingAbove is None
@@ -744,7 +746,7 @@ def apply(self, **kwargs: Any) -> None:
template_decls = ns.templatePrefix.templates
else:
template_decls = []
- symbols, fail_reason = parent_symbol.find_name(
+ symbols, _fail_reason = parent_symbol.find_name(
nestedName=name,
templateDecls=template_decls,
typ='any',
@@ -812,7 +814,7 @@ def run(self) -> list[Node]:
The code is therefore based on the ObjectDescription version.
"""
if ':' in self.name:
- self.domain, self.objtype = self.name.split(':', 1)
+ self.domain, _, self.objtype = self.name.partition(':')
else:
self.domain, self.objtype = '', self.name
@@ -1056,6 +1058,15 @@ def merge_domaindata(self, docnames: Set[str], otherdata: dict[str, Any]) -> Non
logger.debug('\tresult end')
logger.debug('merge_domaindata end')
+ def _check_type(self, typ: str, decl_typ: str) -> bool:
+ if typ == 'any':
+ return True
+ objtypes = self.objtypes_for_role(typ)
+ if objtypes:
+ return decl_typ in objtypes
+ logger.debug(f'Type is {typ}, declaration type is {decl_typ}') # NoQA: G004
+ raise AssertionError
+
def _resolve_xref_inner(
self,
env: BuildEnvironment,
@@ -1150,16 +1161,7 @@ def _resolve_xref_inner(
typ = typ.removeprefix('cpp:')
decl_typ = s.declaration.objectType
- def check_type() -> bool:
- if typ == 'any':
- return True
- objtypes = self.objtypes_for_role(typ)
- if objtypes:
- return decl_typ in objtypes
- logger.debug(f'Type is {typ}, declaration type is {decl_typ}') # NoQA: G004
- raise AssertionError
-
- if not check_type():
+ if not self._check_type(typ, decl_typ):
logger.warning(
'cpp:%s targets a %s (%s).',
typ,
@@ -1299,6 +1301,12 @@ def get_full_qualified_name(self, node: Element) -> str | None:
return f'{parent_name}::{target}'
+def _init_stuff(app: Sphinx) -> None:
+ Symbol.debug_lookup = app.config.cpp_debug_lookup
+ Symbol.debug_show_tree = app.config.cpp_debug_show_tree
+ app.config.cpp_index_common_prefix.sort(reverse=True)
+
+
def setup(app: Sphinx) -> ExtensionMetadata:
app.add_domain(CPPDomain)
app.add_config_value('cpp_index_common_prefix', [], 'env', types=frozenset({list}))
@@ -1318,12 +1326,7 @@ def setup(app: Sphinx) -> ExtensionMetadata:
app.add_config_value('cpp_debug_lookup', False, '', types=frozenset({bool}))
app.add_config_value('cpp_debug_show_tree', False, '', types=frozenset({bool}))
- def init_stuff(app: Sphinx) -> None:
- Symbol.debug_lookup = app.config.cpp_debug_lookup
- Symbol.debug_show_tree = app.config.cpp_debug_show_tree
- app.config.cpp_index_common_prefix.sort(reverse=True)
-
- app.connect('builder-inited', init_stuff)
+ app.connect('builder-inited', _init_stuff)
return {
'version': 'builtin',
diff --git a/sphinx/domains/cpp/_parser.py b/sphinx/domains/cpp/_parser.py
index d28c474795d..2055a942c68 100644
--- a/sphinx/domains/cpp/_parser.py
+++ b/sphinx/domains/cpp/_parser.py
@@ -365,7 +365,7 @@ def _parse_paren_expression_list(self) -> ASTParenExprList:
#
# expression-list
# -> initializer-list
- exprs, trailing_comma = self._parse_initializer_list(
+ exprs, _trailing_comma = self._parse_initializer_list(
'parenthesized expression-list', '(', ')'
)
if exprs is None:
@@ -438,9 +438,7 @@ def _parse_postfix_expression(self) -> ASTPostfixExpr:
if not self.skip_string('('):
self.fail("Expected '(' in '%s'." % cast)
- def parser() -> ASTExpression:
- return self._parse_expression()
-
+ parser = self._parse_expression
expr = self._parse_expression_fallback([')'], parser)
self.skip_ws()
if not self.skip_string(')'):
@@ -459,10 +457,7 @@ def parser() -> ASTExpression:
except DefinitionError as e_type:
self.pos = pos
try:
-
- def parser() -> ASTExpression:
- return self._parse_expression()
-
+ parser = self._parse_expression
expr = self._parse_expression_fallback([')'], parser)
prefix = ASTTypeId(expr, isType=False)
if not self.skip_string(')'):
@@ -1423,9 +1418,7 @@ def _parse_declarator_name_suffix(
array_ops.append(ASTArray(None))
continue
- def parser() -> ASTExpression:
- return self._parse_expression()
-
+ parser = self._parse_expression
value = self._parse_expression_fallback([']'], parser)
if not self.skip_string(']'):
self.fail("Expected ']' in end of array operator.")
diff --git a/sphinx/domains/cpp/_symbol.py b/sphinx/domains/cpp/_symbol.py
index 36b965e52ae..7449e616a03 100644
--- a/sphinx/domains/cpp/_symbol.py
+++ b/sphinx/domains/cpp/_symbol.py
@@ -38,6 +38,10 @@ def __str__(self) -> str:
return 'Internal C++ duplicate symbol error:\n%s' % self.symbol.dump(0)
+class _QualifiedSymbolIsTemplateParam(Exception):
+ pass
+
+
class SymbolLookupResult:
__slots__ = (
'symbols',
@@ -419,53 +423,19 @@ def _find_named_symbols(
if not _is_specialization(template_params, template_args):
template_args = None
- def matches(s: Symbol) -> bool:
- if s.identOrOp != ident_or_op:
- return False
- if (s.templateParams is None) != (template_params is None):
- if template_params is not None:
- # we query with params, they must match params
- return False
- if not template_shorthand:
- # we don't query with params, and we do care about them
- return False
- if template_params:
- # TODO: do better comparison
- if str(s.templateParams) != str(template_params):
- return False
- if (s.templateArgs is None) != (template_args is None):
- return False
- if s.templateArgs:
- # TODO: do better comparison
- if str(s.templateArgs) != str(template_args):
- return False
- return True
-
- def candidates() -> Iterator[Symbol]:
- s = self
- if Symbol.debug_lookup:
- Symbol.debug_print('searching in self:')
- logger.debug(s.to_string(Symbol.debug_indent + 1), end='')
- while True:
- if match_self:
- yield s
- if recurse_in_anon:
- yield from s.children_recurse_anon
- else:
- yield from s._children
-
- if s.siblingAbove is None:
- break
- s = s.siblingAbove
- if Symbol.debug_lookup:
- Symbol.debug_print('searching in sibling:')
- logger.debug(s.to_string(Symbol.debug_indent + 1), end='')
-
- for s in candidates():
+ for s in self._candidates(
+ match_self=match_self, recurse_in_anon=recurse_in_anon
+ ):
if Symbol.debug_lookup:
Symbol.debug_print('candidate:')
logger.debug(s.to_string(Symbol.debug_indent + 1), end='')
- if matches(s):
+ if self._matches(
+ s,
+ ident_or_op=ident_or_op,
+ template_params=template_params,
+ template_args=template_args,
+ template_shorthand=template_shorthand,
+ ):
if Symbol.debug_lookup:
Symbol.debug_indent += 1
Symbol.debug_print('matches')
@@ -476,6 +446,59 @@ def candidates() -> Iterator[Symbol]:
if Symbol.debug_lookup:
Symbol.debug_indent -= 2
+ @staticmethod
+ def _matches(
+ s: Symbol,
+ /,
+ *,
+ ident_or_op: ASTIdentifier | ASTOperator,
+ template_params: ASTTemplateParams | ASTTemplateIntroduction,
+ template_args: ASTTemplateArgs,
+ template_shorthand: bool,
+ ) -> bool:
+ if s.identOrOp != ident_or_op:
+ return False
+ if (s.templateParams is None) != (template_params is None):
+ if template_params is not None:
+ # we query with params, they must match params
+ return False
+ if not template_shorthand:
+ # we don't query with params, and we do care about them
+ return False
+ if template_params:
+ # TODO: do better comparison
+ if str(s.templateParams) != str(template_params):
+ return False
+ if (s.templateArgs is None) != (template_args is None):
+ return False
+ if s.templateArgs:
+ # TODO: do better comparison
+ if str(s.templateArgs) != str(template_args):
+ return False
+ return True
+
+ def _candidates(
+ self, *, match_self: bool, recurse_in_anon: bool
+ ) -> Iterator[Symbol]:
+ s = self
+ if Symbol.debug_lookup:
+ Symbol.debug_print('searching in self:')
+ logger.debug(s.to_string(Symbol.debug_indent + 1), end='')
+ while True:
+ if match_self:
+ yield s
+ if recurse_in_anon:
+ yield from s.children_recurse_anon
+ else:
+ yield from s._children
+
+ if s.siblingAbove is None:
+ break
+ s = s.siblingAbove
+ if Symbol.debug_lookup:
+ Symbol.debug_print('searching in sibling:')
+ logger.debug(s.to_string(Symbol.debug_indent + 1), end='')
+
def _symbol_lookup(
self,
nested_name: ASTNestedName,
@@ -661,34 +684,10 @@ def _add_symbols(
Symbol.debug_print('decl: ', declaration)
Symbol.debug_print(f'location: {docname}:{line}')
- def on_missing_qualified_symbol(
- parent_symbol: Symbol,
- ident_or_op: ASTIdentifier | ASTOperator,
- template_params: Any,
- template_args: ASTTemplateArgs,
- ) -> Symbol | None:
- if Symbol.debug_lookup:
- Symbol.debug_indent += 1
- Symbol.debug_print('_add_symbols, on_missing_qualified_symbol:')
- Symbol.debug_indent += 1
- Symbol.debug_print('template_params:', template_params)
- Symbol.debug_print('ident_or_op: ', ident_or_op)
- Symbol.debug_print('template_args: ', template_args)
- Symbol.debug_indent -= 2
- return Symbol(
- parent=parent_symbol,
- identOrOp=ident_or_op,
- templateParams=template_params,
- templateArgs=template_args,
- declaration=None,
- docname=None,
- line=None,
- )
-
lookup_result = self._symbol_lookup(
nested_name,
template_decls,
- on_missing_qualified_symbol,
+ _on_missing_qualified_symbol_fresh,
strict_template_param_arg_lists=True,
ancestor_lookup_type=None,
template_shorthand=False,
@@ -759,45 +758,18 @@ def on_missing_qualified_symbol(
# First check if one of those with a declaration matches.
# If it's a function, we need to compare IDs,
# otherwise there should be only one symbol with a declaration.
- def make_cand_symbol() -> Symbol:
- if Symbol.debug_lookup:
- Symbol.debug_print('begin: creating candidate symbol')
- symbol = Symbol(
- parent=lookup_result.parent_symbol,
- identOrOp=lookup_result.ident_or_op,
- templateParams=lookup_result.template_params,
- templateArgs=lookup_result.template_args,
- declaration=declaration,
- docname=docname,
- line=line,
- )
- if Symbol.debug_lookup:
- Symbol.debug_print('end: creating candidate symbol')
- return symbol
-
if len(with_decl) == 0:
cand_symbol = None
else:
- cand_symbol = make_cand_symbol()
-
- def handle_duplicate_declaration(
- symbol: Symbol, cand_symbol: Symbol
- ) -> None:
- if Symbol.debug_lookup:
- Symbol.debug_indent += 1
- Symbol.debug_print('redeclaration')
- Symbol.debug_indent -= 1
- Symbol.debug_indent -= 2
- # Redeclaration of the same symbol.
- # Let the new one be there, but raise an error to the client
- # so it can use the real symbol as subscope.
- # This will probably result in a duplicate id warning.
- cand_symbol.isRedeclaration = True
- raise _DuplicateSymbolError(symbol, declaration)
+ cand_symbol = self._make_cand_symbol(
+ lookup_result, declaration, docname, line
+ )
if declaration.objectType != 'function':
assert len(with_decl) <= 1
- handle_duplicate_declaration(with_decl[0], cand_symbol)
+ self._handle_duplicate_declaration(
+ with_decl[0], cand_symbol, declaration
+ )
# (not reachable)
# a function, so compare IDs
@@ -808,13 +780,13 @@ def handle_duplicate_declaration(
# but all existing must be functions as well,
# otherwise we declare it to be a duplicate
if symbol.declaration.objectType != 'function':
- handle_duplicate_declaration(symbol, cand_symbol)
+ self._handle_duplicate_declaration(symbol, cand_symbol, declaration)
# (not reachable)
old_id = symbol.declaration.get_newest_id()
if Symbol.debug_lookup:
Symbol.debug_print('old_id: ', old_id)
if cand_id == old_id:
- handle_duplicate_declaration(symbol, cand_symbol)
+ self._handle_duplicate_declaration(symbol, cand_symbol, declaration)
# (not reachable)
# no candidate symbol found with matching ID
# if there is an empty symbol, fill that one
@@ -824,12 +796,12 @@ def handle_duplicate_declaration(
if cand_symbol is not None:
Symbol.debug_print('result is already created cand_symbol')
else:
- Symbol.debug_print('result is make_cand_symbol()')
+ Symbol.debug_print('result is self._make_cand_symbol()')
Symbol.debug_indent -= 2
if cand_symbol is not None:
return cand_symbol
else:
- return make_cand_symbol()
+ return self._make_cand_symbol(lookup_result, declaration, docname, line)
else:
if Symbol.debug_lookup:
Symbol.debug_print(
@@ -851,6 +823,44 @@ def handle_duplicate_declaration(
symbol._fill_empty(declaration, docname, line)
return symbol
+ @staticmethod
+ def _make_cand_symbol(
+ lookup_result: SymbolLookupResult,
+ declaration: ASTDeclaration | None,
+ docname: str | None,
+ line: int | None,
+ ) -> Symbol:
+ if Symbol.debug_lookup:
+ Symbol.debug_print('begin: creating candidate symbol')
+ symbol = Symbol(
+ parent=lookup_result.parent_symbol,
+ identOrOp=lookup_result.ident_or_op,
+ templateParams=lookup_result.template_params,
+ templateArgs=lookup_result.template_args,
+ declaration=declaration,
+ docname=docname,
+ line=line,
+ )
+ if Symbol.debug_lookup:
+ Symbol.debug_print('end: creating candidate symbol')
+ return symbol
+
+ @staticmethod
+ def _handle_duplicate_declaration(
+ symbol: Symbol, cand_symbol: Symbol, declaration: ASTDeclaration
+ ) -> None:
+ if Symbol.debug_lookup:
+ Symbol.debug_indent += 1
+ Symbol.debug_print('redeclaration')
+ Symbol.debug_indent -= 1
+ Symbol.debug_indent -= 2
+ # Redeclaration of the same symbol.
+ # Let the new one be there, but raise an error to the client
+ # so it can use the real symbol as subscope.
+ # This will probably result in a duplicate id warning.
+ cand_symbol.isRedeclaration = True
+ raise _DuplicateSymbolError(symbol, declaration)
+
def merge_with(
self, other: Symbol, docnames: list[str], env: BuildEnvironment
) -> None:
@@ -859,12 +869,6 @@ def merge_with(
Symbol.debug_print('merge_with:')
assert other is not None
- def unconditional_add(self: Symbol, other_child: Symbol) -> None:
- # TODO: hmm, should we prune by docnames?
- self._children.append(other_child)
- other_child.parent = self
- other_child._assert_invariants()
-
if Symbol.debug_lookup:
Symbol.debug_indent += 1
for other_child in other._children:
@@ -874,7 +878,7 @@ def unconditional_add(self: Symbol, other_child: Symbol) -> None:
)
Symbol.debug_indent += 1
if other_child.isRedeclaration:
- unconditional_add(self, other_child)
+ self._unconditional_add(other_child)
if Symbol.debug_lookup:
Symbol.debug_print('is_redeclaration')
Symbol.debug_indent -= 1
@@ -898,7 +902,7 @@ def unconditional_add(self: Symbol, other_child: Symbol) -> None:
Symbol.debug_print('non-duplicate candidate symbols:', len(symbols))
if len(symbols) == 0:
- unconditional_add(self, other_child)
+ self._unconditional_add(other_child)
if Symbol.debug_lookup:
Symbol.debug_indent -= 1
continue
@@ -929,7 +933,7 @@ def unconditional_add(self: Symbol, other_child: Symbol) -> None:
if Symbol.debug_lookup:
Symbol.debug_indent -= 1
if our_child is None:
- unconditional_add(self, other_child)
+ self._unconditional_add(other_child)
continue
if other_child.declaration and other_child.docname in docnames:
if not our_child.declaration:
@@ -978,6 +982,12 @@ def unconditional_add(self: Symbol, other_child: Symbol) -> None:
if Symbol.debug_lookup:
Symbol.debug_indent -= 2
+ def _unconditional_add(self, other_child: Symbol) -> None:
+ # TODO: hmm, should we prune by docnames?
+ self._children.append(other_child)
+ other_child.parent = self
+ other_child._assert_invariants()
+
def add_name(
self,
nestedName: ASTNestedName,
@@ -1125,29 +1135,11 @@ def find_name(
Symbol.debug_print('recurseInAnon: ', recurseInAnon)
Symbol.debug_print('searchInSiblings: ', searchInSiblings)
- class QualifiedSymbolIsTemplateParam(Exception):
- pass
-
- def on_missing_qualified_symbol(
- parent_symbol: Symbol,
- ident_or_op: ASTIdentifier | ASTOperator,
- template_params: Any,
- template_args: ASTTemplateArgs,
- ) -> Symbol | None:
- # TODO: Maybe search without template args?
- # Though, the correct_primary_template_args does
- # that for primary templates.
- # Is there another case where it would be good?
- if parent_symbol.declaration is not None:
- if parent_symbol.declaration.objectType == 'templateParam':
- raise QualifiedSymbolIsTemplateParam
- return None
-
try:
lookup_result = self._symbol_lookup(
nestedName,
templateDecls,
- on_missing_qualified_symbol,
+ _on_missing_qualified_symbol_raise,
strict_template_param_arg_lists=False,
ancestor_lookup_type=typ,
template_shorthand=templateShorthand,
@@ -1156,7 +1148,7 @@ def on_missing_qualified_symbol(
correct_primary_template_args=False,
search_in_siblings=searchInSiblings,
)
- except QualifiedSymbolIsTemplateParam:
+ except _QualifiedSymbolIsTemplateParam:
return None, 'templateParamInQualified'
if lookup_result is None:
@@ -1210,18 +1202,10 @@ def find_declaration(
else:
template_decls = []
- def on_missing_qualified_symbol(
- parent_symbol: Symbol,
- ident_or_op: ASTIdentifier | ASTOperator,
- template_params: Any,
- template_args: ASTTemplateArgs,
- ) -> Symbol | None:
- return None
-
lookup_result = self._symbol_lookup(
nested_name,
template_decls,
- on_missing_qualified_symbol,
+ _on_missing_qualified_symbol_none,
strict_template_param_arg_lists=False,
ancestor_lookup_type=typ,
template_shorthand=templateShorthand,
@@ -1296,3 +1280,53 @@ def dump(self, indent: int) -> str:
self.to_string(indent),
*(c.dump(indent + 1) for c in self._children),
])
+
+
+def _on_missing_qualified_symbol_fresh(
+ parent_symbol: Symbol,
+ ident_or_op: ASTIdentifier | ASTOperator,
+ template_params: Any,
+ template_args: ASTTemplateArgs,
+) -> Symbol | None:
+ if Symbol.debug_lookup:
+ Symbol.debug_indent += 1
+ Symbol.debug_print('_add_symbols, on_missing_qualified_symbol:')
+ Symbol.debug_indent += 1
+ Symbol.debug_print('template_params:', template_params)
+ Symbol.debug_print('ident_or_op: ', ident_or_op)
+ Symbol.debug_print('template_args: ', template_args)
+ Symbol.debug_indent -= 2
+ return Symbol(
+ parent=parent_symbol,
+ identOrOp=ident_or_op,
+ templateParams=template_params,
+ templateArgs=template_args,
+ declaration=None,
+ docname=None,
+ line=None,
+ )
+
+
+def _on_missing_qualified_symbol_raise(
+ parent_symbol: Symbol,
+ ident_or_op: ASTIdentifier | ASTOperator,
+ template_params: Any,
+ template_args: ASTTemplateArgs,
+) -> Symbol | None:
+ # TODO: Maybe search without template args?
+ # Though, the correct_primary_template_args does
+ # that for primary templates.
+ # Is there another case where it would be good?
+ if parent_symbol.declaration is not None:
+ if parent_symbol.declaration.objectType == 'templateParam':
+ raise _QualifiedSymbolIsTemplateParam
+ return None
+
+
+def _on_missing_qualified_symbol_none(
+ parent_symbol: Symbol,
+ ident_or_op: ASTIdentifier | ASTOperator,
+ template_params: Any,
+ template_args: ASTTemplateArgs,
+) -> Symbol | None:
+ return None
diff --git a/sphinx/domains/index.py b/sphinx/domains/index.py
index 09a18d0180e..cefa64a8d5f 100644
--- a/sphinx/domains/index.py
+++ b/sphinx/domains/index.py
@@ -47,7 +47,7 @@ def merge_domaindata(self, docnames: Set[str], otherdata: dict[str, Any]) -> Non
def process_doc(self, env: BuildEnvironment, docname: str, document: Node) -> None:
"""Process a document after it is read by the environment."""
- entries = self.entries.setdefault(env.docname, [])
+ entries = self.entries.setdefault(env.current_document.docname, [])
for node in list(document.findall(addnodes.index)):
node_entries = node['entries']
try:
diff --git a/sphinx/domains/javascript.py b/sphinx/domains/javascript.py
index 968f73aa3d3..6ebd1dec3fd 100644
--- a/sphinx/domains/javascript.py
+++ b/sphinx/domains/javascript.py
@@ -70,7 +70,7 @@ def handle_signature(self, sig: str, signode: desc_signature) -> tuple[str, str]
"""
sig = sig.strip()
if '(' in sig and sig[-1:] == ')':
- member, arglist = sig.split('(', 1)
+ member, _, arglist = sig.partition('(')
member = member.strip()
arglist = arglist[:-1].strip()
else:
@@ -137,10 +137,11 @@ def handle_signature(self, sig: str, signode: desc_signature) -> tuple[str, str]
_pseudo_parse_arglist(
signode,
arglist,
- multi_line_parameter_list,
- trailing_comma,
+ multi_line_parameter_list=multi_line_parameter_list,
+ trailing_comma=trailing_comma,
+ env=self.env,
)
- return fullname, prefix
+ return fullname, prefix # type: ignore[return-value]
def _object_hierarchy_parts(self, sig_node: desc_signature) -> tuple[str, ...]:
if 'fullname' not in sig_node:
@@ -362,7 +363,10 @@ def run(self) -> list[Node]:
# Make a duplicate entry in 'objects' to facilitate searching for
# the module in JavaScriptDomain.find_obj()
domain.note_object(
- mod_name, 'module', node_id, location=(self.env.docname, self.lineno)
+ mod_name,
+ 'module',
+ node_id,
+ location=(self.env.current_document.docname, self.lineno),
)
# The node order is: index node first, then target node
@@ -435,7 +439,7 @@ class JavaScriptDomain(Domain):
'attr': JSXRefRole(),
'mod': JSXRefRole(),
}
- initial_data: dict[str, dict[str, tuple[str, str]]] = {
+ initial_data: ClassVar[dict[str, dict[str, tuple[str, str]]]] = {
'objects': {}, # fullname -> docname, node_id, objtype
'modules': {}, # modname -> docname, node_id
}
@@ -458,14 +462,14 @@ def note_object(
docname,
location=location,
)
- self.objects[fullname] = (self.env.docname, node_id, objtype)
+ self.objects[fullname] = (self.env.current_document.docname, node_id, objtype)
@property
def modules(self) -> dict[str, tuple[str, str]]:
return self.data.setdefault('modules', {}) # modname -> docname, node_id
def note_module(self, modname: str, node_id: str) -> None:
- self.modules[modname] = (self.env.docname, node_id)
+ self.modules[modname] = (self.env.current_document.docname, node_id)
def clear_doc(self, docname: str) -> None:
for fullname, (pkg_docname, _node_id, _l) in list(self.objects.items()):
diff --git a/sphinx/domains/math.py b/sphinx/domains/math.py
index 56e543917ad..433e35b7a2f 100644
--- a/sphinx/domains/math.py
+++ b/sphinx/domains/math.py
@@ -15,7 +15,7 @@
if TYPE_CHECKING:
from collections.abc import Iterable, Set
- from typing import Any
+ from typing import Any, ClassVar
from docutils.nodes import Element, Node, system_message
@@ -47,7 +47,7 @@ class MathDomain(Domain):
name = 'math'
label = 'mathematics'
- initial_data: dict[str, Any] = {
+ initial_data: ClassVar[dict[str, Any]] = {
'objects': {}, # labelid -> (docname, eqno)
# backwards compatibility
'has_equations': {}, # https://github.com/sphinx-doc/sphinx/issues/13346
@@ -74,6 +74,8 @@ def note_equation(self, docname: str, labelid: str, location: Any = None) -> Non
labelid,
other,
location=location,
+ type='ref',
+ subtype='equation',
)
self.equations[labelid] = (docname, self.env.new_serialno('eqno') + 1)
diff --git a/sphinx/domains/python/__init__.py b/sphinx/domains/python/__init__.py
index 97519ee028e..3cca270abf6 100644
--- a/sphinx/domains/python/__init__.py
+++ b/sphinx/domains/python/__init__.py
@@ -29,7 +29,7 @@
from collections.abc import Iterable, Iterator, Sequence, Set
from typing import Any, ClassVar
- from docutils.nodes import Element, Node, TextElement
+ from docutils.nodes import Element, Node
from sphinx.addnodes import desc_signature, pending_xref
from sphinx.application import Sphinx
@@ -52,6 +52,8 @@
py_sig_re,
)
+_TYPING_ALL = frozenset(typing.__all__)
+
logger = logging.getLogger(__name__)
pairindextypes = {
@@ -108,7 +110,7 @@ def add_target_and_index(
modname = self.options.get('module', self.env.ref_context.get('py:module'))
node_id = signode['ids'][0]
- name, cls = name_cls
+ name, _cls = name_cls
if modname:
text = _('%s() (in module %s)') % (name, modname)
self.indexnode['entries'].append(('single', text, node_id, '', None))
@@ -175,7 +177,7 @@ def handle_signature(self, sig: str, signode: desc_signature) -> tuple[str, str]
return fullname, prefix
def get_index_text(self, modname: str, name_cls: tuple[str, str]) -> str:
- name, cls = name_cls
+ name, _cls = name_cls
if modname:
return _('%s (in module %s)') % (name, modname)
else:
@@ -268,7 +270,7 @@ def get_signature_prefix(self, sig: str) -> Sequence[nodes.Node]:
return prefix
def get_index_text(self, modname: str, name_cls: tuple[str, str]) -> str:
- name, cls = name_cls
+ name, _cls = name_cls
try:
clsname, methname = name.rsplit('.', 1)
if modname and self.config.add_module_names:
@@ -364,7 +366,7 @@ def handle_signature(self, sig: str, signode: desc_signature) -> tuple[str, str]
return fullname, prefix
def get_index_text(self, modname: str, name_cls: tuple[str, str]) -> str:
- name, cls = name_cls
+ name, _cls = name_cls
try:
clsname, attrname = name.rsplit('.', 1)
if modname and self.config.add_module_names:
@@ -424,7 +426,7 @@ def get_signature_prefix(self, sig: str) -> Sequence[nodes.Node]:
return prefix
def get_index_text(self, modname: str, name_cls: tuple[str, str]) -> str:
- name, cls = name_cls
+ name, _cls = name_cls
try:
clsname, attrname = name.rsplit('.', 1)
if modname and self.config.add_module_names:
@@ -464,7 +466,7 @@ def handle_signature(self, sig: str, signode: desc_signature) -> tuple[str, str]
return fullname, prefix
def get_index_text(self, modname: str, name_cls: tuple[str, str]) -> str:
- name, cls = name_cls
+ name, _cls = name_cls
try:
clsname, attrname = name.rsplit('.', 1)
if modname and self.config.add_module_names:
@@ -594,23 +596,17 @@ def process_link(
class _PyDecoXRefRole(PyXRefRole):
- def __init__(
+ def process_link(
self,
- fix_parens: bool = False,
- lowercase: bool = False,
- nodeclass: type[Element] | None = None,
- innernodeclass: type[TextElement] | None = None,
- warn_dangling: bool = False,
- ) -> None:
- super().__init__(
- fix_parens=True,
- lowercase=lowercase,
- nodeclass=nodeclass,
- innernodeclass=innernodeclass,
- warn_dangling=warn_dangling,
+ env: BuildEnvironment,
+ refnode: Element,
+ has_explicit_title: bool,
+ title: str,
+ target: str,
+ ) -> tuple[str, str]:
+ title, target = super().process_link(
+ env, refnode, has_explicit_title, title, target
)
-
- def update_title_and_target(self, title: str, target: str) -> tuple[str, str]:
return f'@{title}', target
@@ -675,7 +671,7 @@ def generate(
entries = content.setdefault(modname[0].lower(), [])
- package = modname.split('.', maxsplit=1)[0]
+ package = modname.partition('.')[0]
if package != modname:
# it's a submodule
if prev_modname == package:
@@ -736,7 +732,7 @@ class PythonDomain(Domain):
name = 'py'
label = 'Python'
- object_types: dict[str, ObjType] = {
+ object_types = {
'function': ObjType(_('function'), 'func', 'obj'),
'data': ObjType(_('data'), 'data', 'obj'),
'class': ObjType(_('class'), 'class', 'exc', 'obj'),
@@ -746,7 +742,7 @@ class PythonDomain(Domain):
'staticmethod': ObjType(_('static method'), 'meth', 'obj'),
'attribute': ObjType(_('attribute'), 'attr', 'obj'),
'property': ObjType(_('property'), 'attr', '_prop', 'obj'),
- 'type': ObjType(_('type alias'), 'type', 'obj'),
+ 'type': ObjType(_('type alias'), 'type', 'class', 'obj'),
'module': ObjType(_('module'), 'mod', 'obj'),
}
@@ -779,7 +775,7 @@ class PythonDomain(Domain):
'mod': PyXRefRole(),
'obj': PyXRefRole(),
}
- initial_data: dict[str, dict[str, tuple[Any]]] = {
+ initial_data: ClassVar[dict[str, dict[str, tuple[Any]]]] = {
'objects': {}, # fullname -> docname, objtype
'modules': {}, # modname -> docname, synopsis, platform, deprecated
}
@@ -822,7 +818,9 @@ def note_object(
other.docname,
location=location,
)
- self.objects[name] = ObjectEntry(self.env.docname, node_id, objtype, aliased)
+ self.objects[name] = ObjectEntry(
+ self.env.current_document.docname, node_id, objtype, aliased
+ )
@property
def modules(self) -> dict[str, ModuleEntry]:
@@ -836,7 +834,7 @@ def note_module(
.. versionadded:: 2.1
"""
self.modules[name] = ModuleEntry(
- docname=self.env.docname,
+ docname=self.env.current_document.docname,
node_id=node_id,
synopsis=synopsis,
platform=platform,
@@ -954,6 +952,14 @@ def resolve_xref(
searchmode = 1 if node.hasattr('refspecific') else 0
matches = self.find_obj(env, modname, clsname, target, type, searchmode)
+ if not matches and type == 'class':
+ # fallback to data/attr (for type aliases)
+ # type aliases are documented as data/attr but referenced as class
+ matches = self.find_obj(env, modname, clsname, target, 'data', searchmode)
+ if not matches:
+ matches = self.find_obj(
+ env, modname, clsname, target, 'attr', searchmode
+ )
if not matches and type == 'attr':
# fallback to meth (for property; Sphinx 2.4.x)
# this ensures that `:attr:` role continues to refer to the old property entry
@@ -1082,13 +1088,6 @@ def builtin_resolver(
app: Sphinx, env: BuildEnvironment, node: pending_xref, contnode: Element
) -> Element | None:
"""Do not emit nitpicky warnings for built-in types."""
-
- def istyping(s: str) -> bool:
- if s.startswith('typing.'):
- s = s.split('.', 1)[1]
-
- return s in typing.__all__
-
if node.get('refdomain') != 'py':
return None
elif node.get('reftype') in {'class', 'obj'} and node.get('reftarget') == 'None':
@@ -1098,13 +1097,17 @@ def istyping(s: str) -> bool:
if inspect.isclass(getattr(builtins, reftarget, None)):
# built-in class
return contnode
- if istyping(reftarget):
+ if _is_typing(reftarget):
# typing class
return contnode
return None
+def _is_typing(s: str, /) -> bool:
+ return s.removeprefix('typing.') in _TYPING_ALL
+
+
def setup(app: Sphinx) -> ExtensionMetadata:
app.setup_extension('sphinx.directives')
diff --git a/sphinx/domains/python/_annotations.py b/sphinx/domains/python/_annotations.py
index 823aac01316..f476ff22fd4 100644
--- a/sphinx/domains/python/_annotations.py
+++ b/sphinx/domains/python/_annotations.py
@@ -6,6 +6,7 @@
import token
from collections import deque
from inspect import Parameter
+from itertools import chain, islice
from typing import TYPE_CHECKING
from docutils import nodes
@@ -124,6 +125,10 @@ def unparse(node: ast.AST) -> list[Node]:
return [nodes.Text(repr(node.value))]
if isinstance(node, ast.Expr):
return unparse(node.value)
+ if isinstance(node, ast.Starred):
+ result = [addnodes.desc_sig_operator('', '*')]
+ result.extend(unparse(node.value))
+ return result
if isinstance(node, ast.Invert):
return [addnodes.desc_sig_punctuation('', '~')]
if isinstance(node, ast.USub):
@@ -312,18 +317,6 @@ def parse(self) -> None:
self.type_params.append(type_param)
def _build_identifier(self, tokens: list[Token]) -> str:
- from itertools import chain, islice
-
- def triplewise(iterable: Iterable[Token]) -> Iterator[tuple[Token, ...]]:
- # sliding_window('ABCDEFG', 4) --> ABCD BCDE CDEF DEFG
- it = iter(iterable)
- window = deque(islice(it, 3), maxlen=3)
- if len(window) == 3:
- yield tuple(window)
- for x in it:
- window.append(x)
- yield tuple(window)
-
idents: list[str] = []
tokens: Iterable[Token] = iter(tokens) # type: ignore[no-redef]
# do not format opening brackets
@@ -338,7 +331,7 @@ def triplewise(iterable: Iterable[Token]) -> Iterator[tuple[Token, ...]]:
# check the remaining tokens
stop = Token(token.ENDMARKER, '', (-1, -1), (-1, -1), '')
is_unpack_operator = False
- for tok, op, after in triplewise(chain(tokens, [stop, stop])):
+ for tok, op, after in _triplewise(chain(tokens, [stop, stop])):
ident = self._pformat_token(tok, native=is_unpack_operator)
idents.append(ident)
# determine if the next token is an unpack operator depending
@@ -548,8 +541,10 @@ def _keyword_only_separator() -> addnodes.desc_parameter:
def _pseudo_parse_arglist(
signode: desc_signature,
arglist: str,
+ *,
multi_line_parameter_list: bool = False,
trailing_comma: bool = True,
+ env: BuildEnvironment,
) -> None:
"""'Parse' a list of arguments separated by commas.
@@ -557,6 +552,7 @@ def _pseudo_parse_arglist(
brackets. Currently, this will split at any comma, even if it's inside a
string literal (e.g. default argument value).
"""
+ # TODO: decompose 'env' parameter into only the required bits
paramlist = addnodes.desc_parameterlist()
paramlist['multi_line_parameter_list'] = multi_line_parameter_list
paramlist['multi_line_trailing_comma'] = trailing_comma
@@ -579,9 +575,30 @@ def _pseudo_parse_arglist(
ends_open += 1
argument = argument[:-1].strip()
if argument:
- stack[-1] += addnodes.desc_parameter(
- '', '', addnodes.desc_sig_name(argument, argument)
- )
+ param_with_annotation, _, default_value = argument.partition('=')
+ param_name, _, annotation = param_with_annotation.partition(':')
+ del param_with_annotation
+
+ node = addnodes.desc_parameter()
+ node += addnodes.desc_sig_name('', param_name.strip())
+ if annotation:
+ children = _parse_annotation(annotation.strip(), env=env)
+ node += addnodes.desc_sig_punctuation('', ':')
+ node += addnodes.desc_sig_space()
+ node += addnodes.desc_sig_name('', '', *children) # type: ignore[arg-type]
+ if default_value:
+ if annotation:
+ node += addnodes.desc_sig_space()
+ node += addnodes.desc_sig_operator('', '=')
+ if annotation:
+ node += addnodes.desc_sig_space()
+ node += nodes.inline(
+ '',
+ default_value.strip(),
+ classes=['default_value'],
+ support_smartquotes=False,
+ )
+ stack[-1] += node
while ends_open:
stack.append(addnodes.desc_optional())
stack[-2] += stack[-1]
@@ -600,3 +617,14 @@ def _pseudo_parse_arglist(
signode += paramlist
else:
signode += paramlist
+
+
+def _triplewise(iterable: Iterable[Token]) -> Iterator[tuple[Token, ...]]:
+ # sliding_window('ABCDEFG', 4) --> ABCD BCDE CDEF DEFG
+ it = iter(iterable)
+ window = deque(islice(it, 3), maxlen=3)
+ if len(window) == 3:
+ yield tuple(window)
+ for x in it:
+ window.append(x)
+ yield tuple(window)
diff --git a/sphinx/domains/python/_object.py b/sphinx/domains/python/_object.py
index a858afe8a3e..6a0f0ff7334 100644
--- a/sphinx/domains/python/_object.py
+++ b/sphinx/domains/python/_object.py
@@ -93,7 +93,7 @@ def make_xref(
children = result.children
result.clear()
- shortname = target.split('.')[-1]
+ shortname = target.rpartition('.')[-1]
textnode = innernode('', shortname) # type: ignore[call-arg]
contnodes = [
pending_xref_condition('', '', textnode, condition='resolved'),
@@ -363,8 +363,9 @@ def handle_signature(self, sig: str, signode: desc_signature) -> tuple[str, str]
_pseudo_parse_arglist(
signode,
arglist,
- multi_line_parameter_list,
- trailing_comma,
+ multi_line_parameter_list=multi_line_parameter_list,
+ trailing_comma=trailing_comma,
+ env=self.env,
)
except (NotImplementedError, ValueError) as exc:
# duplicated parameter names raise ValueError and not a SyntaxError
@@ -374,8 +375,9 @@ def handle_signature(self, sig: str, signode: desc_signature) -> tuple[str, str]
_pseudo_parse_arglist(
signode,
arglist,
- multi_line_parameter_list,
- trailing_comma,
+ multi_line_parameter_list=multi_line_parameter_list,
+ trailing_comma=trailing_comma,
+ env=self.env,
)
else:
if self.needs_arglist():
@@ -422,14 +424,20 @@ def add_target_and_index(
domain = self.env.domains.python_domain
domain.note_object(fullname, self.objtype, node_id, location=signode)
- canonical_name = self.options.get('canonical')
- if canonical_name:
- domain.note_object(
- canonical_name, self.objtype, node_id, aliased=True, location=signode
- )
+ if self.objtype != 'type':
+ # py:type directive uses `canonical` option for a different meaning
+ canonical_name = self.options.get('canonical')
+ if canonical_name:
+ domain.note_object(
+ canonical_name,
+ self.objtype,
+ node_id,
+ aliased=True,
+ location=signode,
+ )
if 'no-index-entry' not in self.options:
- if index_text := self.get_index_text(mod_name, name_cls):
+ if index_text := self.get_index_text(mod_name, name_cls): # type: ignore[arg-type]
self.indexnode['entries'].append((
'single',
index_text,
diff --git a/sphinx/domains/rst.py b/sphinx/domains/rst.py
index cd5d8312d4a..64aff25a015 100644
--- a/sphinx/domains/rst.py
+++ b/sphinx/domains/rst.py
@@ -83,7 +83,7 @@ def _toc_entry_name(self, sig_node: desc_signature) -> str:
return ''
objtype = sig_node.parent.get('objtype')
- *parents, name = sig_node['_toc_parts']
+ *_parents, name = sig_node['_toc_parts']
if objtype == 'directive:option':
return f':{name}:'
if self.config.toc_object_entries_show_parents in {'domain', 'all'}:
@@ -244,7 +244,7 @@ class ReSTDomain(Domain):
'dir': XRefRole(),
'role': XRefRole(),
}
- initial_data: dict[str, dict[tuple[str, str], str]] = {
+ initial_data: ClassVar[dict[str, dict[tuple[str, str], str]]] = {
'objects': {}, # fullname -> docname, objtype
}
@@ -266,7 +266,7 @@ def note_object(
location=location,
)
- self.objects[objtype, name] = (self.env.docname, node_id)
+ self.objects[objtype, name] = (self.env.current_document.docname, node_id)
def clear_doc(self, docname: str) -> None:
for (typ, name), (doc, _node_id) in list(self.objects.items()):
diff --git a/sphinx/domains/std/__init__.py b/sphinx/domains/std/__init__.py
index e123ce85786..52ccef67c24 100644
--- a/sphinx/domains/std/__init__.py
+++ b/sphinx/domains/std/__init__.py
@@ -27,7 +27,6 @@
from typing import Any, ClassVar, Final
from docutils.nodes import Element, Node, system_message
- from docutils.parsers.rst import Directive
from sphinx.addnodes import desc_signature
from sphinx.application import Sphinx
@@ -36,8 +35,6 @@
from sphinx.util.typing import (
ExtensionMetadata,
OptionSpec,
- RoleFunction,
- TitleGetter,
)
logger = logging.getLogger(__name__)
@@ -218,7 +215,7 @@ def run(self) -> list[Node]:
ret.insert(0, inode)
name = self.name
if ':' in self.name:
- _, name = self.name.split(':', 1)
+ name = self.name.partition(':')[-1]
std = self.env.domains.standard_domain
std.note_object(name, fullname, node_id, location=node)
@@ -311,7 +308,10 @@ def add_target_and_index(
domain = self.env.domains.standard_domain
for optname in signode.get('allnames', ()):
domain.add_program_option(
- currprogram, optname, self.env.docname, signode['ids'][0]
+ currprogram,
+ optname,
+ self.env.current_document.docname,
+ signode['ids'][0],
)
# create an index entry
@@ -725,7 +725,7 @@ class StandardDomain(Domain):
name = 'std'
label = 'Default'
- object_types: dict[str, ObjType] = {
+ object_types = {
'term': ObjType(_('glossary term'), 'term', searchprio=-1),
'token': ObjType(_('grammar token'), 'token', searchprio=-1),
'label': ObjType(_('reference label'), 'ref', 'keyword', searchprio=-1),
@@ -735,7 +735,7 @@ class StandardDomain(Domain):
'doc': ObjType(_('document'), 'doc', searchprio=-1),
}
- directives: dict[str, type[Directive]] = {
+ directives = {
'program': Program,
'cmdoption': Cmdoption, # old name for backwards compatibility
'option': Cmdoption,
@@ -744,7 +744,7 @@ class StandardDomain(Domain):
'glossary': Glossary,
'productionlist': ProductionList,
}
- roles: dict[str, RoleFunction | XRefRole] = {
+ roles = {
'option': OptionXRefRole(warn_dangling=True),
'confval': XRefRole(warn_dangling=True),
'envvar': EnvVarXRefRole(),
@@ -780,7 +780,7 @@ class StandardDomain(Domain):
}
# labelname -> docname, sectionname
- _virtual_doc_names: dict[str, tuple[str, str]] = {
+ _virtual_doc_names: Final = {
'genindex': ('genindex', _('Index')),
'modindex': ('py-modindex', _('Module Index')),
'search': ('search', _('Search Page')),
@@ -795,7 +795,7 @@ class StandardDomain(Domain):
}
# node_class -> (figtype, title_getter)
- enumerable_nodes: dict[type[Node], tuple[str, TitleGetter | None]] = {
+ enumerable_nodes = {
nodes.figure: ('figure', None),
nodes.table: ('table', None),
nodes.container: ('code-block', None),
@@ -805,9 +805,9 @@ def __init__(self, env: BuildEnvironment) -> None:
super().__init__(env)
# set up enumerable nodes
- self.enumerable_nodes = copy(
- self.enumerable_nodes
- ) # create a copy for this instance
+
+ # create a copy for this instance
+ self.enumerable_nodes = copy(self.enumerable_nodes) # type: ignore[misc]
for node, settings in env._registry.enumerable_nodes.items():
self.enumerable_nodes[node] = settings
@@ -860,7 +860,7 @@ def note_object(
docname,
location=location,
)
- self.objects[objtype, name] = (self.env.docname, labelid)
+ self.objects[objtype, name] = (self.env.current_document.docname, labelid)
@property
def _terms(self) -> dict[str, tuple[str, str]]:
@@ -874,7 +874,7 @@ def _note_term(self, term: str, labelid: str, location: Any = None) -> None:
"""
self.note_object('term', term, labelid, location)
- self._terms[term.lower()] = (self.env.docname, labelid)
+ self._terms[term.lower()] = (self.env.current_document.docname, labelid)
@property
def progoptions(self) -> dict[tuple[str | None, str], tuple[str, str]]:
@@ -974,13 +974,13 @@ def process_doc(
continue
else:
if (
- isinstance(node, nodes.definition_list | nodes.field_list)
+ isinstance(node, (nodes.definition_list, nodes.field_list))
and node.children
):
node = cast('nodes.Element', node.children[0])
- if isinstance(node, nodes.field | nodes.definition_list_item):
+ if isinstance(node, (nodes.field, nodes.definition_list_item)):
node = cast('nodes.Element', node.children[0])
- if isinstance(node, nodes.term | nodes.field_name):
+ if isinstance(node, (nodes.term, nodes.field_name)):
sectname = clean_astext(node)
else:
toctree = next(node.findall(addnodes.toctree), None)
@@ -1235,7 +1235,7 @@ def _resolve_option_xref(
if not docname:
commands = []
while ws_re.search(target):
- subcommand, target = ws_re.split(target, 1)
+ subcommand, target = ws_re.split(target, maxsplit=1)
commands.append(subcommand)
progname = '-'.join(commands)
@@ -1371,23 +1371,19 @@ def get_numfig_title(self, node: Node) -> str | None:
return title_getter(elem)
else:
for subnode in elem:
- if isinstance(subnode, nodes.caption | nodes.title):
+ if isinstance(subnode, (nodes.caption, nodes.title)):
return clean_astext(subnode)
return None
def get_enumerable_node_type(self, node: Node) -> str | None:
"""Get type of enumerable nodes."""
-
- def has_child(node: Element, cls: type) -> bool:
- return any(isinstance(child, cls) for child in node)
-
if isinstance(node, nodes.section):
return 'section'
elif (
isinstance(node, nodes.container)
and 'literal_block' in node
- and has_child(node, nodes.literal_block)
+ and _has_child(node, nodes.literal_block)
):
# given node is a code-block having caption
return 'code-block'
@@ -1440,6 +1436,10 @@ def get_full_qualified_name(self, node: Element) -> str | None:
return None
+def _has_child(node: Element, cls: type) -> bool:
+ return any(isinstance(child, cls) for child in node)
+
+
def warn_missing_reference(
app: Sphinx,
domain: Domain,
diff --git a/sphinx/environment/__init__.py b/sphinx/environment/__init__.py
index 79fa6278549..fa7d17d7800 100644
--- a/sphinx/environment/__init__.py
+++ b/sphinx/environment/__init__.py
@@ -11,6 +11,7 @@
from typing import TYPE_CHECKING
from sphinx import addnodes
+from sphinx.deprecation import _deprecation_warning
from sphinx.domains._domains_container import _DomainsContainer
from sphinx.environment.adapters import toctree as toctree_adapters
from sphinx.errors import (
@@ -23,7 +24,7 @@
from sphinx.transforms import SphinxTransformer
from sphinx.util import logging
from sphinx.util._files import DownloadFiles, FilenameUniqDict
-from sphinx.util._pathlib import _StrPath, _StrPathProperty
+from sphinx.util._pathlib import _StrPathProperty
from sphinx.util._serialise import stable_str
from sphinx.util._timestamps import _format_rfc3339_microseconds
from sphinx.util.docutils import LoggingReporter
@@ -32,7 +33,7 @@
from sphinx.util.osutil import _last_modified_time, _relative_path
if TYPE_CHECKING:
- from collections.abc import Callable, Iterable, Iterator, Mapping
+ from collections.abc import Callable, Iterable, Iterator, Mapping, Set
from typing import Any, Final, Literal
from docutils import nodes
@@ -49,6 +50,7 @@
from sphinx.extension import Extension
from sphinx.project import Project
from sphinx.registry import SphinxComponentRegistry
+ from sphinx.util._pathlib import _StrPath
from sphinx.util.tags import Tags
logger = logging.getLogger(__name__)
@@ -73,7 +75,7 @@
# This is increased every time an environment attribute is added
# or changed to properly invalidate pickle files.
-ENV_VERSION = 65
+ENV_VERSION = 66
# config status
CONFIG_UNSET = -1
@@ -106,8 +108,11 @@ class BuildEnvironment:
srcdir = _StrPathProperty()
doctreedir = _StrPathProperty()
+ # builder is created after the environment.
+ _builder_cls: type[Builder]
+
def __init__(self, app: Sphinx) -> None:
- self.app: Sphinx = app
+ self._app: Sphinx = app
self.doctreedir = app.doctreedir
self.srcdir = app.srcdir
self.config: Config = None # type: ignore[assignment]
@@ -237,7 +242,7 @@ def __getstate__(self) -> dict[str, Any]:
"""Obtains serializable data for pickling."""
__dict__ = self.__dict__.copy()
# clear unpickleable attributes
- __dict__.update(app=None, domains=None, events=None)
+ __dict__.update(_app=None, domains=None, events=None)
# clear in-memory doctree caches, to reduce memory consumption and
# ensure that, upon restoring the state, the most recent pickled files
# on the disk are used instead of those from a possibly outdated state
@@ -257,7 +262,7 @@ def setup(self, app: Sphinx) -> None:
if self.project:
app.project.restore(self.project)
- self.app = app
+ self._app = app
self.doctreedir = app.doctreedir
self.events = app.events
self.srcdir = app.srcdir
@@ -277,20 +282,37 @@ def setup(self, app: Sphinx) -> None:
# The old config is self.config, restored from the pickled environment.
# The new config is app.config, always recreated from ``conf.py``
self.config_status, self.config_status_extra = self._config_status(
- old_config=self.config, new_config=app.config, verbosity=app.verbosity
+ old_config=self.config,
+ new_config=app.config,
+ verbosity=app.config.verbosity,
)
self.config = app.config
# initialize settings
self._update_settings(app.config)
+ @property
+ def app(self) -> Sphinx:
+ _deprecation_warning(__name__, 'BuildEnvironment.app', remove=(10, 0))
+ return self._app
+
+ @app.setter
+ def app(self, app: Sphinx) -> None:
+ _deprecation_warning(__name__, 'BuildEnvironment.app', remove=(10, 0))
+ self._app = app
+
+ @app.deleter
+ def app(self) -> None:
+ _deprecation_warning(__name__, 'BuildEnvironment.app', remove=(10, 0))
+ del self._app
+
@property
def _registry(self) -> SphinxComponentRegistry:
- return self.app.registry
+ return self._app.registry
@property
def _tags(self) -> Tags:
- return self.app.tags
+ return self._app.tags
@staticmethod
def _config_status(
@@ -498,7 +520,7 @@ def get_outdated_files(
) -> tuple[set[str], set[str], set[str]]:
"""Return (added, changed, removed) sets."""
# clear all files no longer present
- removed = set(self.all_docs) - self.found_docs
+ removed = self.all_docs.keys() - self.found_docs
added: set[str] = set()
changed: set[str] = set()
@@ -506,65 +528,25 @@ def get_outdated_files(
if config_changed:
# config values affect e.g. substitutions
added = self.found_docs
- else:
- for docname in self.found_docs:
- if docname not in self.all_docs:
- logger.debug('[build target] added %r', docname)
- added.add(docname)
- continue
- # if the doctree file is not there, rebuild
- filename = self.doctreedir / f'{docname}.doctree'
- if not filename.is_file():
- logger.debug('[build target] changed %r', docname)
- changed.add(docname)
- continue
- # check the "reread always" list
- if docname in self.reread_always:
- logger.debug('[build target] changed %r', docname)
- changed.add(docname)
- continue
- # check the mtime of the document
- mtime = self.all_docs[docname]
- newmtime = _last_modified_time(self.doc2path(docname))
- if newmtime > mtime:
- logger.debug(
- '[build target] outdated %r: %s -> %s',
- docname,
- _format_rfc3339_microseconds(mtime),
- _format_rfc3339_microseconds(newmtime),
- )
- changed.add(docname)
- continue
- # finally, check the mtime of dependencies
- if docname not in self.dependencies:
- continue
- for dep in self.dependencies[docname]:
- try:
- # this will do the right thing when dep is absolute too
- dep_path = self.srcdir / dep
- if not dep_path.is_file():
- logger.debug(
- '[build target] changed %r missing dependency %r',
- docname,
- dep_path,
- )
- changed.add(docname)
- break
- depmtime = _last_modified_time(dep_path)
- if depmtime > mtime:
- logger.debug(
- '[build target] outdated %r from dependency %r: %s -> %s',
- docname,
- dep_path,
- _format_rfc3339_microseconds(mtime),
- _format_rfc3339_microseconds(depmtime),
- )
- changed.add(docname)
- break
- except OSError:
- # give it another chance
- changed.add(docname)
- break
+ return added, changed, removed
+
+ for docname in self.found_docs:
+ if docname not in self.all_docs:
+ logger.debug('[build target] added %r', docname)
+ added.add(docname)
+ continue
+
+ # if the document has changed, rebuild
+ if _has_doc_changed(
+ docname,
+ filename=self.doc2path(docname),
+ reread_always=self.reread_always,
+ doctreedir=self.doctreedir,
+ all_docs=self.all_docs,
+ dependencies=self.dependencies,
+ ):
+ changed.add(docname)
+ continue
return added, changed, removed
@@ -628,7 +610,9 @@ def note_dependency(
"""
if docname is None:
docname = self.docname
- self.dependencies.setdefault(docname, set()).add(_StrPath(filename))
+ # this will do the right thing when *filename* is absolute too
+ filename = self.srcdir / filename
+ self.dependencies.setdefault(docname, set()).add(filename)
def note_included(self, filename: str | os.PathLike[str]) -> None:
"""Add *filename* as a included from other document.
@@ -682,6 +666,8 @@ def get_and_resolve_doctree(
self,
docname: str,
builder: Builder,
+ *,
+ tags: Tags,
doctree: nodes.document | None = None,
prune_toctrees: bool = True,
includehidden: bool = False,
@@ -701,6 +687,7 @@ def get_and_resolve_doctree(
self.apply_post_transforms(doctree, docname)
# now, resolve all toctree nodes
+ tags = builder.tags
for toctreenode in doctree.findall(addnodes.toctree):
result = toctree_adapters._resolve_toctree(
self,
@@ -709,7 +696,7 @@ def get_and_resolve_doctree(
toctreenode,
prune=prune_toctrees,
includehidden=includehidden,
- tags=builder.tags,
+ tags=tags,
)
if result is None:
toctreenode.parent.replace(toctreenode, [])
@@ -750,7 +737,7 @@ def resolve_toctree(
titles_only=titles_only,
collapse=collapse,
includehidden=includehidden,
- tags=builder.tags,
+ tags=self._tags,
)
def resolve_references(
@@ -764,7 +751,7 @@ def apply_post_transforms(self, doctree: nodes.document, docname: str) -> None:
new = deepcopy(backup)
new.docname = docname
try:
- # set env.docname during applying post-transforms
+ # set env.current_document.docname during applying post-transforms
self.current_document = new
transformer = SphinxTransformer(doctree)
@@ -848,6 +835,71 @@ def _differing_config_keys(old: Config, new: Config) -> frozenset[str]:
return frozenset(not_in_both | different_values)
+def _has_doc_changed(
+ docname: str,
+ *,
+ filename: Path,
+ reread_always: Set[str],
+ doctreedir: Path,
+ all_docs: Mapping[str, int],
+ dependencies: Mapping[str, Set[Path]],
+) -> bool:
+ # check the "reread always" list
+ if docname in reread_always:
+ logger.debug('[build target] changed %r: re-read forced', docname)
+ return True
+
+ # if the doctree file is not there, rebuild
+ doctree_path = doctreedir / f'{docname}.doctree'
+ if not doctree_path.is_file():
+ logger.debug('[build target] changed %r: doctree file does not exist', docname)
+ return True
+
+ # check the mtime of the document
+ mtime = all_docs[docname]
+ new_mtime = _last_modified_time(filename)
+ if new_mtime > mtime:
+ logger.debug(
+ '[build target] changed: %r is outdated (%s -> %s)',
+ docname,
+ _format_rfc3339_microseconds(mtime),
+ _format_rfc3339_microseconds(new_mtime),
+ )
+ return True
+
+ # finally, check the mtime of dependencies
+ if docname not in dependencies:
+ return False
+ for dep_path in dependencies[docname]:
+ try:
+ dep_path_is_file = dep_path.is_file()
+ except OSError:
+ return True # give it another chance
+ if not dep_path_is_file:
+ logger.debug(
+ '[build target] changed: %r is missing dependency %r',
+ docname,
+ dep_path,
+ )
+ return True
+
+ try:
+ dep_mtime = _last_modified_time(dep_path)
+ except OSError:
+ return True # give it another chance
+ if dep_mtime > mtime:
+ logger.debug(
+ '[build target] changed: %r is outdated due to dependency %r (%s -> %s)',
+ docname,
+ dep_path,
+ _format_rfc3339_microseconds(mtime),
+ _format_rfc3339_microseconds(dep_mtime),
+ )
+ return True
+
+ return False
+
+
def _traverse_toctree(
traversed: set[str],
parent: str | None,
diff --git a/sphinx/environment/adapters/indexentries.py b/sphinx/environment/adapters/indexentries.py
index c19628515b6..0428e488308 100644
--- a/sphinx/environment/adapters/indexentries.py
+++ b/sphinx/environment/adapters/indexentries.py
@@ -50,7 +50,6 @@
class IndexEntries:
def __init__(self, env: BuildEnvironment) -> None:
self.env = env
- self.builder: Builder
def create_index(
self,
@@ -253,7 +252,7 @@ def _key_func_2(entry: tuple[str, _IndexEntryTargets]) -> str:
def _group_by_func(entry: tuple[str, _IndexEntry]) -> str:
"""Group the entries by letter or category key."""
- key, (targets, sub_items, category_key) = entry
+ key, (_targets, _sub_items, category_key) = entry
if category_key is not None:
return category_key
diff --git a/sphinx/environment/adapters/toctree.py b/sphinx/environment/adapters/toctree.py
index bedeca2f299..1435c069492 100644
--- a/sphinx/environment/adapters/toctree.py
+++ b/sphinx/environment/adapters/toctree.py
@@ -2,12 +2,14 @@
from __future__ import annotations
+import warnings
from typing import TYPE_CHECKING, TypeVar
from docutils import nodes
from docutils.nodes import Element
from sphinx import addnodes
+from sphinx.deprecation import RemovedInSphinx10Warning
from sphinx.locale import __
from sphinx.util import logging, url_re
from sphinx.util.matching import Matcher
@@ -69,6 +71,8 @@ def global_toctree_for_doc(
env: BuildEnvironment,
docname: str,
builder: Builder,
+ *,
+ tags: Tags = ..., # type: ignore[assignment]
collapse: bool = False,
includehidden: bool = True,
maxdepth: int = 0,
@@ -78,6 +82,15 @@ def global_toctree_for_doc(
This gives the global ToC, with all ancestors and their siblings.
"""
+ if tags is ...:
+ warnings.warn(
+ "'tags' will become a required keyword argument "
+ 'for global_toctree_for_doc() in Sphinx 10.0.',
+ RemovedInSphinx10Warning,
+ stacklevel=2,
+ )
+ tags = builder.tags
+
resolved = (
_resolve_toctree(
env,
@@ -89,7 +102,7 @@ def global_toctree_for_doc(
titles_only=titles_only,
collapse=collapse,
includehidden=includehidden,
- tags=builder.tags,
+ tags=tags,
)
for toctree_node in env.master_doctree.findall(addnodes.toctree)
)
@@ -191,9 +204,7 @@ def _resolve_toctree(
# prune the tree to maxdepth, also set toc depth and current classes
_toctree_add_classes(newnode, 1, docname)
- newnode = _toctree_copy(
- newnode, 1, maxdepth if prune else 0, collapse, builder.tags
- )
+ newnode = _toctree_copy(newnode, 1, maxdepth if prune else 0, collapse, tags)
if (
isinstance(newnode[-1], nodes.Element) and len(newnode[-1]) == 0
@@ -444,7 +455,7 @@ def _toctree_standard_entry(
def _toctree_add_classes(node: Element, depth: int, docname: str) -> None:
"""Add 'toctree-l%d' and 'current' classes to the toctree."""
for subnode in node.children:
- if isinstance(subnode, addnodes.compact_paragraph | nodes.list_item):
+ if isinstance(subnode, (addnodes.compact_paragraph, nodes.list_item)):
# for and
, indicate the depth level and recurse
subnode['classes'].append(f'toctree-l{depth - 1}')
_toctree_add_classes(subnode, depth, docname)
@@ -471,56 +482,84 @@ def _toctree_add_classes(node: Element, depth: int, docname: str) -> None:
subnode = subnode.parent
-ET = TypeVar('ET', bound=Element)
+_ET = TypeVar('_ET', bound=Element)
def _toctree_copy(
- node: ET, depth: int, maxdepth: int, collapse: bool, tags: Tags
-) -> ET:
+ node: _ET, depth: int, maxdepth: int, collapse: bool, tags: Tags
+) -> _ET:
"""Utility: Cut and deep-copy a TOC at a specified depth."""
- keep_bullet_list_sub_nodes = depth <= 1 or (
- (depth <= maxdepth or maxdepth <= 0) and (not collapse or 'iscurrent' in node)
- )
+ assert not isinstance(node, addnodes.only)
+ depth = max(depth - 1, 1)
+ copied = _toctree_copy_seq(node, depth, maxdepth, collapse, tags, initial_call=True)
+ assert len(copied) == 1
+ return copied[0] # type: ignore[return-value]
- copy = node.copy()
- for subnode in node.children:
- if isinstance(subnode, addnodes.compact_paragraph | nodes.list_item):
- # for and
, just recurse
- copy.append(_toctree_copy(subnode, depth, maxdepth, collapse, tags))
- elif isinstance(subnode, nodes.bullet_list):
- # for , copy if the entry is top-level
- # or, copy if the depth is within bounds and;
- # collapsing is disabled or the sub-entry's parent is 'current'.
- # The boolean is constant so is calculated outwith the loop.
- if keep_bullet_list_sub_nodes:
- copy.append(_toctree_copy(subnode, depth + 1, maxdepth, collapse, tags))
- elif isinstance(subnode, addnodes.toctree):
- # copy sub toctree nodes for later processing
- copy.append(subnode.copy())
- elif isinstance(subnode, addnodes.only):
- # only keep children if the only node matches the tags
- if _only_node_keep_children(subnode, tags):
- for child in subnode.children:
- copy.append(
- _toctree_copy(
- child,
- depth,
- maxdepth,
- collapse,
- tags, # type: ignore[type-var]
- )
- )
- elif isinstance(subnode, nodes.reference | nodes.title):
- # deep copy references and captions
- sub_node_copy = subnode.copy()
- sub_node_copy.children = [child.deepcopy() for child in subnode.children]
- for child in sub_node_copy.children:
- child.parent = sub_node_copy
- copy.append(sub_node_copy)
- else:
- msg = f'Unexpected node type {subnode.__class__.__name__!r}!'
- raise ValueError(msg) # NoQA: TRY004
- return copy
+
+def _toctree_copy_seq(
+ node: Node,
+ depth: int,
+ maxdepth: int,
+ collapse: bool,
+ tags: Tags,
+ *,
+ initial_call: bool = False,
+ is_current: bool = False,
+) -> list[Element]:
+ copy: Element
+ if isinstance(node, (addnodes.compact_paragraph, nodes.list_item)):
+ # for and
- , just recurse
+ copy = node.copy()
+ for subnode in node.children:
+ copy += _toctree_copy_seq( # type: ignore[assignment,operator]
+ subnode, depth, maxdepth, collapse, tags, is_current='iscurrent' in node
+ )
+ return [copy]
+
+ if isinstance(node, nodes.bullet_list):
+ # for
, copy if the entry is top-level
+ # or, copy if the depth is within bounds and;
+ # collapsing is disabled or the sub-entry's parent is 'current'.
+ # The boolean is constant so is calculated outwith the loop.
+ keep_bullet_list_sub_nodes = depth <= 1 or (
+ (depth <= maxdepth or maxdepth <= 0)
+ and (not collapse or is_current or 'iscurrent' in node)
+ )
+ if not keep_bullet_list_sub_nodes and not initial_call:
+ return []
+ depth += 1
+ copy = node.copy()
+ for subnode in node.children:
+ copy += _toctree_copy_seq(
+ subnode, depth, maxdepth, collapse, tags, is_current='iscurrent' in node
+ )
+ return [copy]
+
+ if isinstance(node, addnodes.toctree):
+ # copy sub toctree nodes for later processing
+ return [node.copy()]
+
+ if isinstance(node, addnodes.only):
+ # only keep children if the only node matches the tags
+ if not _only_node_keep_children(node, tags):
+ return []
+ copied: list[Element] = []
+ for subnode in node.children:
+ copied += _toctree_copy_seq(
+ subnode, depth, maxdepth, collapse, tags, is_current='iscurrent' in node
+ )
+ return copied
+
+ if isinstance(node, (nodes.reference, nodes.title)):
+ # deep copy references and captions
+ sub_node_copy = node.copy()
+ sub_node_copy.children = [child.deepcopy() for child in node.children]
+ for child in sub_node_copy.children:
+ child.parent = sub_node_copy
+ return [sub_node_copy]
+
+ msg = f'Unexpected node type {node.__class__.__name__!r}!'
+ raise ValueError(msg)
def _get_toctree_ancestors(
@@ -584,5 +623,5 @@ def get_toctree_for(
**kwargs: Any,
) -> Element | None:
return global_toctree_for_doc(
- self.env, docname, builder, collapse=collapse, **kwargs
+ self.env, docname, builder, tags=builder.tags, collapse=collapse, **kwargs
)
diff --git a/sphinx/environment/collectors/asset.py b/sphinx/environment/collectors/asset.py
index 13105587673..a1af7c33474 100644
--- a/sphinx/environment/collectors/asset.py
+++ b/sphinx/environment/collectors/asset.py
@@ -47,7 +47,7 @@ def merge_other(
def process_doc(self, app: Sphinx, doctree: nodes.document) -> None:
"""Process and rewrite image URIs."""
- docname = app.env.docname
+ docname = app.env.current_document.docname
for node in doctree.findall(nodes.image):
# Map the mimetype to the corresponding image. The writer may
@@ -117,7 +117,7 @@ def collect_candidates(
try:
mimetype = guess_mimetype(filename)
if mimetype is None:
- basename, suffix = os.path.splitext(filename)
+ _basename, suffix = os.path.splitext(filename)
mimetype = 'image/x-' + suffix[1:]
if mimetype not in candidates:
globbed.setdefault(mimetype, []).append(new_imgpath.as_posix())
@@ -156,7 +156,9 @@ def process_doc(self, app: Sphinx, doctree: nodes.document) -> None:
if '://' in targetname:
node['refuri'] = targetname
else:
- rel_filename, filename = app.env.relfn2path(targetname, app.env.docname)
+ rel_filename, filename = app.env.relfn2path(
+ targetname, app.env.current_document.docname
+ )
app.env.note_dependency(rel_filename)
if not os.access(filename, os.R_OK):
logger.warning(
@@ -168,7 +170,7 @@ def process_doc(self, app: Sphinx, doctree: nodes.document) -> None:
)
continue
node['filename'] = app.env.dlfiles.add_file(
- app.env.docname, rel_filename
+ app.env.current_document.docname, rel_filename
).as_posix()
diff --git a/sphinx/environment/collectors/metadata.py b/sphinx/environment/collectors/metadata.py
index 2cda65beec2..8936341a919 100644
--- a/sphinx/environment/collectors/metadata.py
+++ b/sphinx/environment/collectors/metadata.py
@@ -41,7 +41,7 @@ def process_doc(self, app: Sphinx, doctree: nodes.document) -> None:
if index is None:
return
elif isinstance(doctree[index], nodes.docinfo):
- md = app.env.metadata[app.env.docname]
+ md = app.env.metadata[app.env.current_document.docname]
for node in doctree[index]: # type: ignore[attr-defined]
# nodes are multiply inherited...
if isinstance(node, nodes.authors):
diff --git a/sphinx/environment/collectors/title.py b/sphinx/environment/collectors/title.py
index 4bd3ed7e146..50dfa2bdc54 100644
--- a/sphinx/environment/collectors/title.py
+++ b/sphinx/environment/collectors/title.py
@@ -55,8 +55,8 @@ def process_doc(self, app: Sphinx, doctree: nodes.document) -> None:
else:
# document has no title
titlenode += nodes.Text(doctree.get('title', ''))
- app.env.titles[app.env.docname] = titlenode
- app.env.longtitles[app.env.docname] = longtitlenode
+ app.env.titles[app.env.current_document.docname] = titlenode
+ app.env.longtitles[app.env.current_document.docname] = longtitlenode
def setup(app: Sphinx) -> ExtensionMetadata:
diff --git a/sphinx/environment/collectors/toctree.py b/sphinx/environment/collectors/toctree.py
index fddd269e1b7..5c3d5c97f8c 100644
--- a/sphinx/environment/collectors/toctree.py
+++ b/sphinx/environment/collectors/toctree.py
@@ -65,7 +65,7 @@ def merge_other(
def process_doc(self, app: Sphinx, doctree: nodes.document) -> None:
"""Build a TOC from the doctree and store it in the inventory."""
- docname = app.env.docname
+ docname = app.env.current_document.docname
numentries = [0] # nonlocal again...
def build_toc(
diff --git a/sphinx/events.py b/sphinx/events.py
index 571ad143269..e5d0f715233 100644
--- a/sphinx/events.py
+++ b/sphinx/events.py
@@ -9,6 +9,7 @@
from operator import attrgetter
from typing import TYPE_CHECKING, NamedTuple, overload
+from sphinx.deprecation import _deprecation_warning
from sphinx.errors import ExtensionError, SphinxError
from sphinx.locale import __
from sphinx.util import logging
@@ -27,7 +28,11 @@
from sphinx.config import Config
from sphinx.domains import Domain
from sphinx.environment import BuildEnvironment
- from sphinx.ext.autodoc import _AutodocProcessDocstringListener
+ from sphinx.ext.autodoc._event_listeners import (
+ _AutodocProcessDocstringListener,
+ _AutodocProcessSignatureListener,
+ _AutodocSkipMemberListener,
+ )
from sphinx.ext.todo import todo_node
@@ -66,17 +71,25 @@ class EventManager:
"""Event manager for Sphinx."""
def __init__(self, app: Sphinx) -> None:
- self.app = app
+ self._app = app
self.events = core_events.copy()
self.listeners: dict[str, list[EventListener]] = defaultdict(list)
self.next_listener_id = 0
+ # pass through errors for debugging.
+ self._reraise_errors: bool = app.pdb
+
def add(self, name: str) -> None:
"""Register a custom Sphinx event."""
if name in self.events:
raise ExtensionError(__('Event %r already present') % name)
self.events[name] = ''
+ @property
+ def app(self) -> Sphinx:
+ _deprecation_warning(__name__, 'EventManager.app', remove=(10, 0))
+ return self._app
+
# ---- Core events -------------------------------------------------------
@overload
@@ -282,20 +295,7 @@ def connect(
def connect(
self,
name: Literal['autodoc-process-signature'],
- callback: Callable[
- [
- Sphinx,
- Literal[
- 'module', 'class', 'exception', 'function', 'method', 'attribute'
- ],
- str,
- Any,
- dict[str, bool],
- str | None,
- str | None,
- ],
- tuple[str | None, str | None] | None,
- ],
+ callback: _AutodocProcessSignatureListener,
priority: int,
) -> int: ...
@@ -311,19 +311,7 @@ def connect(
def connect(
self,
name: Literal['autodoc-skip-member'],
- callback: Callable[
- [
- Sphinx,
- Literal[
- 'module', 'class', 'exception', 'function', 'method', 'attribute'
- ],
- str,
- Any,
- bool,
- dict[str, bool],
- ],
- bool,
- ],
+ callback: _AutodocSkipMemberListener,
priority: int,
) -> int: ...
@@ -401,15 +389,14 @@ def emit(
listeners = sorted(self.listeners[name], key=attrgetter('priority'))
for listener in listeners:
try:
- results.append(listener.handler(self.app, *args))
+ results.append(listener.handler(self._app, *args))
except allowed_exceptions:
# pass through the errors specified as *allowed_exceptions*
raise
except SphinxError:
raise
except Exception as exc:
- if self.app.pdb:
- # Just pass through the error, so that it can be debugged.
+ if self._reraise_errors:
raise
modname = safe_getattr(listener.handler, '__module__', None)
raise ExtensionError(
diff --git a/sphinx/ext/apidoc/_extension.py b/sphinx/ext/apidoc/_extension.py
index c5a67528ec0..7199fbba6dd 100644
--- a/sphinx/ext/apidoc/_extension.py
+++ b/sphinx/ext/apidoc/_extension.py
@@ -243,7 +243,7 @@ def _check_collection_of_strings(
"""
if key not in options:
return default
- if not isinstance(options[key], list | tuple | set | frozenset):
+ if not isinstance(options[key], (list, tuple, set, frozenset)):
LOGGER.warning(
__("apidoc_modules item %i '%s' must be a sequence"),
index,
diff --git a/sphinx/ext/autodoc/__init__.py b/sphinx/ext/autodoc/__init__.py
index 8cdb039df3e..31d6f8e6647 100644
--- a/sphinx/ext/autodoc/__init__.py
+++ b/sphinx/ext/autodoc/__init__.py
@@ -7,3158 +7,91 @@
from __future__ import annotations
-import functools
-import operator
-import re
-import sys
-from inspect import Parameter, Signature
-from typing import TYPE_CHECKING, Any, NewType, TypeVar
-
-from docutils.statemachine import StringList
+from typing import TYPE_CHECKING
import sphinx
from sphinx.config import ENUM
-from sphinx.errors import PycodeError
-from sphinx.ext.autodoc.importer import get_class_members, import_module, import_object
-from sphinx.ext.autodoc.mock import ismock, mock, undecorate
-from sphinx.locale import _, __
-from sphinx.pycode import ModuleAnalyzer
-from sphinx.util import inspect, logging
-from sphinx.util.docstrings import prepare_docstring, separate_metadata
-from sphinx.util.inspect import (
- evaluate_signature,
- getdoc,
- object_description,
- safe_getattr,
- stringify_signature,
+from sphinx.ext.autodoc._directive_options import (
+ Options,
+ annotation_option,
+ bool_option,
+ class_doc_from_option,
+ exclude_members_option,
+ identity,
+ inherited_members_option,
+ member_order_option,
+ members_option,
+ merge_members_option,
+)
+from sphinx.ext.autodoc._event_listeners import between, cut_lines
+from sphinx.ext.autodoc._member_finder import ObjectMember, special_member_re
+from sphinx.ext.autodoc._names import py_ext_sig_re
+from sphinx.ext.autodoc._sentinels import ALL, EMPTY, SUPPRESS, UNINITIALIZED_ATTR
+from sphinx.ext.autodoc._sentinels import (
+ INSTANCE_ATTR as INSTANCEATTR,
)
-from sphinx.util.typing import get_type_hints, restify, stringify_annotation
+from sphinx.ext.autodoc._sentinels import (
+ SLOTS_ATTR as SLOTSATTR,
+)
+from sphinx.ext.autodoc.directive import AutodocDirective
+from sphinx.ext.autodoc.typehints import _merge_typehints
if TYPE_CHECKING:
- from collections.abc import Callable, Iterator, Sequence
- from types import ModuleType
- from typing import ClassVar, Literal, TypeAlias
-
from sphinx.application import Sphinx
- from sphinx.config import Config
- from sphinx.environment import BuildEnvironment, _CurrentDocument
- from sphinx.events import EventManager
- from sphinx.ext.autodoc.directive import DocumenterBridge
- from sphinx.registry import SphinxComponentRegistry
- from sphinx.util.typing import ExtensionMetadata, OptionSpec, _RestifyMode
-
- _AutodocObjType = Literal[
- 'module', 'class', 'exception', 'function', 'method', 'attribute'
- ]
- _AutodocProcessDocstringListener: TypeAlias = Callable[
- [Sphinx, _AutodocObjType, str, Any, dict[str, bool], list[str]], None
- ]
-
-logger = logging.getLogger(__name__)
-
-
-# This type isn't exposed directly in any modules, but can be found
-# here in most Python versions
-MethodDescriptorType = type(type.__subclasses__)
-
-
-#: extended signature RE: with explicit module name separated by ::
-py_ext_sig_re = re.compile(
- r"""^ ([\w.]+::)? # explicit module name
- ([\w.]+\.)? # module and/or class name(s)
- (\w+) \s* # thing name
- (?: \[\s*(.*?)\s*])? # optional: type parameters list
- (?: \((.*)\) # optional: arguments
- (?:\s* -> \s* (.*))? # return annotation
- )? $ # and nothing more
- """,
- re.VERBOSE,
+ from sphinx.ext.autodoc._property_types import _AutodocObjType
+ from sphinx.util.typing import ExtensionMetadata
+
+__all__ = (
+ # Useful event listener factories for autodoc-process-docstring
+ 'cut_lines',
+ 'between',
+ # This class is only used in ``sphinx.ext.autodoc.directive``,
+ # but we export it here for compatibility.
+ # See: https://github.com/sphinx-doc/sphinx/issues/4538
+ 'Options',
+ # Option spec functions.
+ # Exported for compatibility.
+ 'annotation_option',
+ 'bool_option',
+ 'class_doc_from_option',
+ 'exclude_members_option',
+ 'identity',
+ 'inherited_members_option',
+ 'member_order_option',
+ 'members_option',
+ 'merge_members_option',
+ # Sentinels.
+ # Exported for compatibility.
+ 'ALL',
+ 'EMPTY',
+ 'INSTANCEATTR',
+ 'SLOTSATTR',
+ 'SUPPRESS',
+ 'UNINITIALIZED_ATTR',
+ # Miscellaneous other names.
+ # Exported for compatibility.
+ 'ObjectMember',
+ 'py_ext_sig_re',
+ 'special_member_re',
)
-special_member_re = re.compile(r'^__\S+__$')
-
-
-def _get_render_mode(
- typehints_format: Literal['fully-qualified', 'short'],
-) -> _RestifyMode:
- if typehints_format == 'short':
- return 'smart'
- return 'fully-qualified-except-typing'
-
-
-def identity(x: Any) -> Any:
- return x
-
-
-class _All:
- """A special value for :*-members: that matches to any member."""
-
- def __contains__(self, item: Any) -> bool:
- return True
-
- def append(self, item: Any) -> None:
- pass # nothing
-
-
-class _Empty:
- """A special value for :exclude-members: that never matches to any member."""
-
- def __contains__(self, item: Any) -> bool:
- return False
-
-
-ALL = _All()
-EMPTY = _Empty()
-UNINITIALIZED_ATTR = object()
-INSTANCEATTR = object()
-SLOTSATTR = object()
-
-
-def members_option(arg: Any) -> object | list[str]:
- """Used to convert the :members: option to auto directives."""
- if arg in {None, True}:
- return ALL
- elif arg is False:
- return None
- else:
- return [x.strip() for x in arg.split(',') if x.strip()]
-
-
-def exclude_members_option(arg: Any) -> object | set[str]:
- """Used to convert the :exclude-members: option."""
- if arg in {None, True}:
- return EMPTY
- return {x.strip() for x in arg.split(',') if x.strip()}
-
-
-def inherited_members_option(arg: Any) -> set[str]:
- """Used to convert the :inherited-members: option to auto directives."""
- if arg in {None, True}:
- return {'object'}
- elif arg:
- return {x.strip() for x in arg.split(',')}
- else:
- return set()
-
-
-def member_order_option(arg: Any) -> str | None:
- """Used to convert the :member-order: option to auto directives."""
- if arg in {None, True}:
- return None
- elif arg in {'alphabetical', 'bysource', 'groupwise'}:
- return arg
- else:
- raise ValueError(__('invalid value for member-order option: %s') % arg)
-
-
-def class_doc_from_option(arg: Any) -> str | None:
- """Used to convert the :class-doc-from: option to autoclass directives."""
- if arg in {'both', 'class', 'init'}:
- return arg
- else:
- raise ValueError(__('invalid value for class-doc-from option: %s') % arg)
-
-
-SUPPRESS = object()
-
-
-def annotation_option(arg: Any) -> Any:
- if arg in {None, True}:
- # suppress showing the representation of the object
- return SUPPRESS
- else:
- return arg
-
-
-def bool_option(arg: Any) -> bool:
- """Used to convert flag options to auto directives. (Instead of
- directives.flag(), which returns None).
- """
- return True
-
-
-def merge_members_option(options: dict[str, Any]) -> None:
- """Merge :private-members: and :special-members: options to the
- :members: option.
- """
- if options.get('members') is ALL:
- # merging is not needed when members: ALL
- return
-
- members = options.setdefault('members', [])
- for key in ('private-members', 'special-members'):
- other_members = options.get(key)
- if other_members is not None and other_members is not ALL:
- for member in other_members:
- if member not in members:
- members.append(member)
-
-
-# Some useful event listener factories for autodoc-process-docstring.
-
-
-def cut_lines(
- pre: int, post: int = 0, what: Sequence[str] | None = None
-) -> _AutodocProcessDocstringListener:
- """Return a listener that removes the first *pre* and last *post*
- lines of every docstring. If *what* is a sequence of strings,
- only docstrings of a type in *what* will be processed.
-
- Use like this (e.g. in the ``setup()`` function of :file:`conf.py`)::
-
- from sphinx.ext.autodoc import cut_lines
-
- app.connect('autodoc-process-docstring', cut_lines(4, what={'module'}))
-
- This can (and should) be used in place of :confval:`automodule_skip_lines`.
- """
- if not what:
- what_unique: frozenset[str] = frozenset()
- elif isinstance(what, str): # strongly discouraged
- what_unique = frozenset({what})
- else:
- what_unique = frozenset(what)
-
- def process(
- app: Sphinx,
- what_: _AutodocObjType,
- name: str,
- obj: Any,
- options: dict[str, bool],
- lines: list[str],
- ) -> None:
- if what_unique and what_ not in what_unique:
- return
- del lines[:pre]
- if post:
- # remove one trailing blank line.
- if lines and not lines[-1]:
- lines.pop(-1)
- del lines[-post:]
- # make sure there is a blank line at the end
- if lines and lines[-1]:
- lines.append('')
-
- return process
-
-
-def between(
- marker: str,
- what: Sequence[str] | None = None,
- keepempty: bool = False,
- exclude: bool = False,
-) -> _AutodocProcessDocstringListener:
- """Return a listener that either keeps, or if *exclude* is True excludes,
- lines between lines that match the *marker* regular expression. If no line
- matches, the resulting docstring would be empty, so no change will be made
- unless *keepempty* is true.
-
- If *what* is a sequence of strings, only docstrings of a type in *what* will
- be processed.
- """
- marker_re = re.compile(marker)
-
- def process(
- app: Sphinx,
- what_: _AutodocObjType,
- name: str,
- obj: Any,
- options: dict[str, bool],
- lines: list[str],
- ) -> None:
- if what and what_ not in what:
- return
- deleted = 0
- delete = not exclude
- orig_lines = lines.copy()
- for i, line in enumerate(orig_lines):
- if delete:
- lines.pop(i - deleted)
- deleted += 1
- if marker_re.match(line):
- delete = not delete
- if delete:
- lines.pop(i - deleted)
- deleted += 1
- if not lines and not keepempty:
- lines[:] = orig_lines
- # make sure there is a blank line at the end
- if lines and lines[-1]:
- lines.append('')
-
- return process
-
-
-# This class is used only in ``sphinx.ext.autodoc.directive``,
-# But we define this class here to keep compatibility
-# See: https://github.com/sphinx-doc/sphinx/issues/4538
-class Options(dict[str, Any]): # NoQA: FURB189
- """A dict/attribute hybrid that returns None on nonexisting keys."""
-
- def copy(self) -> Options:
- return Options(super().copy())
-
- def __getattr__(self, name: str) -> Any:
- try:
- return self[name.replace('_', '-')]
- except KeyError:
- return None
-
-
-class ObjectMember:
- """A member of object.
-
- This is used for the result of `Documenter.get_module_members()` to
- represent each member of the object.
- """
-
- __slots__ = '__name__', 'object', 'docstring', 'class_', 'skipped'
-
- __name__: str
- object: Any
- docstring: str | None
- class_: Any
- skipped: bool
-
- def __init__(
- self,
- name: str,
- obj: Any,
- *,
- docstring: str | None = None,
- class_: Any = None,
- skipped: bool = False,
- ) -> None:
- self.__name__ = name
- self.object = obj
- self.docstring = docstring
- self.class_ = class_
- self.skipped = skipped
-
- def __repr__(self) -> str:
- return (
- f'ObjectMember('
- f'name={self.__name__!r}, '
- f'obj={self.object!r}, '
- f'docstring={self.docstring!r}, '
- f'class_={self.class_!r}, '
- f'skipped={self.skipped!r}'
- f')'
- )
-
-
-class Documenter:
- """A Documenter knows how to autodocument a single object type. When
- registered with the AutoDirective, it will be used to document objects
- of that type when needed by autodoc.
-
- Its *objtype* attribute selects what auto directive it is assigned to
- (the directive name is 'auto' + objtype), and what directive it generates
- by default, though that can be overridden by an attribute called
- *directivetype*.
-
- A Documenter has an *option_spec* that works like a docutils directive's;
- in fact, it will be used to parse an auto directive's options that matches
- the Documenter.
- """
-
- #: name by which the directive is called (auto...) and the default
- #: generated directive name
- objtype = 'object'
- #: indentation by which to indent the directive content
- content_indent = ' '
- #: priority if multiple documenters return True from can_document_member
- priority = 0
- #: order if autodoc_member_order is set to 'groupwise'
- member_order = 0
- #: true if the generated content may contain titles
- titles_allowed = True
-
- option_spec: ClassVar[OptionSpec] = {
- 'no-index': bool_option,
- 'no-index-entry': bool_option,
- 'noindex': bool_option,
- }
-
- def get_attr(self, obj: Any, name: str, *defargs: Any) -> Any:
- """getattr() override for types such as Zope interfaces."""
- return autodoc_attrgetter(obj, name, *defargs, registry=self.env._registry)
-
- @classmethod
- def can_document_member(
- cls: type[Documenter], member: Any, membername: str, isattr: bool, parent: Any
- ) -> bool:
- """Called to see if a member can be documented by this Documenter."""
- msg = 'must be implemented in subclasses'
- raise NotImplementedError(msg)
-
- def __init__(
- self, directive: DocumenterBridge, name: str, indent: str = ''
- ) -> None:
- self.directive = directive
- self.config: Config = directive.env.config
- self.env: BuildEnvironment = directive.env
- self._current_document: _CurrentDocument = directive.env.current_document
- self._events: EventManager = directive.env.events
- self.options = directive.genopt
- self.name = name
- self.indent = indent
- # the module and object path within the module, and the fully
- # qualified name (all set after resolve_name succeeds)
- self.modname: str = ''
- self.module: ModuleType | None = None
- self.objpath: list[str] = []
- self.fullname = ''
- # extra signature items (arguments and return annotation,
- # also set after resolve_name succeeds)
- self.args: str | None = None
- self.retann: str = ''
- # the object to document (set after import_object succeeds)
- self.object: Any = None
- self.object_name = ''
- # the parent/owner of the object to document
- self.parent: Any = None
- # the module analyzer to get at attribute docs, or None
- self.analyzer: ModuleAnalyzer | None = None
-
- @property
- def documenters(self) -> dict[str, type[Documenter]]:
- """Returns registered Documenter classes"""
- return self.env._registry.documenters
-
- def add_line(self, line: str, source: str, *lineno: int) -> None:
- """Append one line of generated reST to the output."""
- if line.strip(): # not a blank line
- self.directive.result.append(self.indent + line, source, *lineno)
- else:
- self.directive.result.append('', source, *lineno)
-
- def resolve_name(
- self, modname: str | None, parents: Any, path: str, base: str
- ) -> tuple[str | None, list[str]]:
- """Resolve the module and name of the object to document given by the
- arguments and the current module/class.
-
- Must return a pair of the module name and a chain of attributes; for
- example, it would return ``('zipfile', ['ZipFile', 'open'])`` for the
- ``zipfile.ZipFile.open`` method.
- """
- msg = 'must be implemented in subclasses'
- raise NotImplementedError(msg)
-
- def parse_name(self) -> bool:
- """Determine what module to import and what attribute to document.
-
- Returns True and sets *self.modname*, *self.objpath*, *self.fullname*,
- *self.args* and *self.retann* if parsing and resolving was successful.
- """
- # first, parse the definition -- auto directives for classes and
- # functions can contain a signature which is then used instead of
- # an autogenerated one
- matched = py_ext_sig_re.match(self.name)
- if matched is None:
- logger.warning(
- __('invalid signature for auto%s (%r)'),
- self.objtype,
- self.name,
- type='autodoc',
- )
- return False
- explicit_modname, path, base, tp_list, args, retann = matched.groups()
-
- # support explicit module and class name separation via ::
- if explicit_modname is not None:
- modname = explicit_modname[:-2]
- parents = path.rstrip('.').split('.') if path else []
- else:
- modname = None
- parents = []
-
- with mock(self.config.autodoc_mock_imports):
- modname, self.objpath = self.resolve_name(modname, parents, path, base)
-
- if not modname:
- return False
-
- self.modname = modname
- self.args = args
- self.retann = retann
- self.fullname = '.'.join((self.modname or '', *self.objpath))
- return True
-
- def import_object(self, raiseerror: bool = False) -> bool:
- """Import the object given by *self.modname* and *self.objpath* and set
- it as *self.object*.
-
- Returns True if successful, False if an error occurred.
- """
- with mock(self.config.autodoc_mock_imports):
- try:
- ret = import_object(
- self.modname, self.objpath, self.objtype, attrgetter=self.get_attr
- )
- self.module, self.parent, self.object_name, self.object = ret
- if ismock(self.object):
- self.object = undecorate(self.object)
- return True
- except ImportError as exc:
- if raiseerror:
- raise
- logger.warning(exc.args[0], type='autodoc', subtype='import_object')
- self.env.note_reread()
- return False
-
- def get_real_modname(self) -> str:
- """Get the real module name of an object to document.
-
- It can differ from the name of the module through which the object was
- imported.
- """
- return self.get_attr(self.object, '__module__', None) or self.modname
-
- def check_module(self) -> bool:
- """Check if *self.object* is really defined in the module given by
- *self.modname*.
- """
- if self.options.imported_members:
- return True
-
- subject = inspect.unpartial(self.object)
- modname = self.get_attr(subject, '__module__', None)
- return not modname or modname == self.modname
-
- def format_args(self, **kwargs: Any) -> str:
- """Format the argument signature of *self.object*.
-
- Should return None if the object does not have a signature.
- """
- return ''
-
- def format_name(self) -> str:
- """Format the name of *self.object*.
-
- This normally should be something that can be parsed by the generated
- directive, but doesn't need to be (Sphinx will display it unparsed
- then).
- """
- # normally the name doesn't contain the module (except for module
- # directives of course)
- return '.'.join(self.objpath) or self.modname
-
- def _call_format_args(self, **kwargs: Any) -> str:
- if kwargs:
- try:
- return self.format_args(**kwargs)
- except TypeError:
- # avoid chaining exceptions, by putting nothing here
- pass
-
- # retry without arguments for old documenters
- return self.format_args()
-
- def format_signature(self, **kwargs: Any) -> str:
- """Format the signature (arguments and return annotation) of the object.
-
- Let the user process it via the ``autodoc-process-signature`` event.
- """
- if self.args is not None:
- # signature given explicitly
- args = f'({self.args})'
- retann = self.retann
- else:
- # try to introspect the signature
- try:
- retann = None
- args = self._call_format_args(**kwargs)
- if args:
- matched = re.match(r'^(\(.*\))\s+->\s+(.*)$', args)
- if matched:
- args = matched.group(1)
- retann = matched.group(2)
- except Exception as exc:
- logger.warning(
- __('error while formatting arguments for %s: %s'),
- self.fullname,
- exc,
- type='autodoc',
- )
- args = None
-
- result = self._events.emit_firstresult(
- 'autodoc-process-signature',
- self.objtype,
- self.fullname,
- self.object,
- self.options,
- args,
- retann,
- )
- if result:
- args, retann = result
-
- if args is not None:
- return args + ((' -> %s' % retann) if retann else '')
- else:
- return ''
-
- def add_directive_header(self, sig: str) -> None:
- """Add the directive header and options to the generated content."""
- domain = getattr(self, 'domain', 'py')
- directive = getattr(self, 'directivetype', self.objtype)
- name = self.format_name()
- sourcename = self.get_sourcename()
-
- # one signature per line, indented by column
- prefix = f'.. {domain}:{directive}:: '
- for i, sig_line in enumerate(sig.split('\n')):
- self.add_line(f'{prefix}{name}{sig_line}', sourcename)
- if i == 0:
- prefix = ' ' * len(prefix)
-
- if self.options.no_index or self.options.noindex:
- self.add_line(' :no-index:', sourcename)
- if self.options.no_index_entry:
- self.add_line(' :no-index-entry:', sourcename)
- if self.objpath:
- # Be explicit about the module, this is necessary since .. class::
- # etc. don't support a prepended module name
- self.add_line(' :module: %s' % self.modname, sourcename)
-
- def get_doc(self) -> list[list[str]] | None:
- """Decode and return lines of the docstring(s) for the object.
-
- When it returns None, autodoc-process-docstring will not be called for this
- object.
- """
- docstring = getdoc(
- self.object,
- self.get_attr,
- self.config.autodoc_inherit_docstrings,
- self.parent,
- self.object_name,
- )
- if docstring:
- tab_width = self.directive.state.document.settings.tab_width
- return [prepare_docstring(docstring, tab_width)]
- return []
-
- def process_doc(self, docstrings: list[list[str]]) -> Iterator[str]:
- """Let the user process the docstrings before adding them."""
- for docstringlines in docstrings:
- if self._events is not None:
- # let extensions preprocess docstrings
- self._events.emit(
- 'autodoc-process-docstring',
- self.objtype,
- self.fullname,
- self.object,
- self.options,
- docstringlines,
- )
-
- if docstringlines and docstringlines[-1]:
- # append a blank line to the end of the docstring
- docstringlines.append('')
-
- yield from docstringlines
-
- def get_sourcename(self) -> str:
- obj_module = inspect.safe_getattr(self.object, '__module__', None)
- obj_qualname = inspect.safe_getattr(self.object, '__qualname__', None)
- if obj_module and obj_qualname:
- # Get the correct location of docstring from self.object
- # to support inherited methods
- fullname = f'{self.object.__module__}.{self.object.__qualname__}'
- else:
- fullname = self.fullname
-
- if self.analyzer:
- return f'{self.analyzer.srcname}:docstring of {fullname}'
- else:
- return 'docstring of %s' % fullname
-
- def add_content(self, more_content: StringList | None) -> None:
- """Add content from docstrings, attribute documentation and user."""
- docstring = True
-
- # set sourcename and add content from attribute documentation
- sourcename = self.get_sourcename()
- if self.analyzer:
- attr_docs = self.analyzer.find_attr_docs()
- if self.objpath:
- key = ('.'.join(self.objpath[:-1]), self.objpath[-1])
- if key in attr_docs:
- docstring = False
- # make a copy of docstring for attributes to avoid cache
- # the change of autodoc-process-docstring event.
- attribute_docstrings = [list(attr_docs[key])]
-
- for i, line in enumerate(self.process_doc(attribute_docstrings)):
- self.add_line(line, sourcename, i)
-
- # add content from docstrings
- if docstring:
- docstrings = self.get_doc()
- if docstrings is None:
- # Do not call autodoc-process-docstring on get_doc() returns None.
- pass
- else:
- if not docstrings:
- # append at least a dummy docstring, so that the event
- # autodoc-process-docstring is fired and can add some
- # content if desired
- docstrings.append([])
- for i, line in enumerate(self.process_doc(docstrings)):
- self.add_line(line, sourcename, i)
-
- # add additional content (e.g. from document), if present
- if more_content:
- for line, src in zip(more_content.data, more_content.items, strict=True):
- self.add_line(line, src[0], src[1])
-
- def get_object_members(self, want_all: bool) -> tuple[bool, list[ObjectMember]]:
- """Return `(members_check_module, members)` where `members` is a
- list of `(membername, member)` pairs of the members of *self.object*.
-
- If *want_all* is True, return all members. Else, only return those
- members given by *self.options.members* (which may also be None).
- """
- msg = 'must be implemented in subclasses'
- raise NotImplementedError(msg)
-
- def filter_members(
- self, members: list[ObjectMember], want_all: bool
- ) -> list[tuple[str, Any, bool]]:
- """Filter the given member list.
-
- Members are skipped if
-
- - they are private (except if given explicitly or the private-members
- option is set)
- - they are special methods (except if given explicitly or the
- special-members option is set)
- - they are undocumented (except if the undoc-members option is set)
-
- The user can override the skipping decision by connecting to the
- ``autodoc-skip-member`` event.
- """
-
- def is_filtered_inherited_member(name: str, obj: Any) -> bool:
- inherited_members = self.options.inherited_members or set()
- seen = set()
-
- if inspect.isclass(self.object):
- for cls in self.object.__mro__:
- if name in cls.__dict__:
- seen.add(cls)
- if (
- cls.__name__ in inherited_members
- and cls != self.object
- and any(
- issubclass(potential_child, cls) for potential_child in seen
- )
- ):
- # given member is a member of specified *super class*
- return True
- if name in cls.__dict__:
- return False
- if name in self.get_attr(cls, '__annotations__', {}):
- return False
- if isinstance(obj, ObjectMember) and obj.class_ is cls:
- return False
-
- return False
-
- ret = []
-
- # search for members in source code too
- namespace = '.'.join(self.objpath) # will be empty for modules
-
- if self.analyzer:
- attr_docs = self.analyzer.find_attr_docs()
- else:
- attr_docs = {}
-
- # process members and determine which to skip
- for obj in members:
- membername = obj.__name__
- member = obj.object
-
- # if isattr is True, the member is documented as an attribute
- isattr = member is INSTANCEATTR or (namespace, membername) in attr_docs
-
- try:
- doc = getdoc(
- member,
- self.get_attr,
- self.config.autodoc_inherit_docstrings,
- self.object,
- membername,
- )
- if not isinstance(doc, str):
- # Ignore non-string __doc__
- doc = None
-
- # if the member __doc__ is the same as self's __doc__, it's just
- # inherited and therefore not the member's doc
- cls = self.get_attr(member, '__class__', None)
- if cls:
- cls_doc = self.get_attr(cls, '__doc__', None)
- if cls_doc == doc:
- doc = None
-
- if isinstance(obj, ObjectMember) and obj.docstring:
- # hack for ClassDocumenter to inject docstring via ObjectMember
- doc = obj.docstring
-
- doc, metadata = separate_metadata(doc)
- has_doc = bool(doc)
-
- if 'private' in metadata:
- # consider a member private if docstring has "private" metadata
- isprivate = True
- elif 'public' in metadata:
- # consider a member public if docstring has "public" metadata
- isprivate = False
- else:
- isprivate = membername.startswith('_')
-
- keep = False
- if ismock(member) and (namespace, membername) not in attr_docs:
- # mocked module or object
- pass
- elif (
- self.options.exclude_members
- and membername in self.options.exclude_members
- ):
- # remove members given by exclude-members
- keep = False
- elif want_all and special_member_re.match(membername):
- # special __methods__
- if (
- self.options.special_members
- and membername in self.options.special_members
- ):
- if membername == '__doc__': # NoQA: SIM114
- keep = False
- elif is_filtered_inherited_member(membername, obj):
- keep = False
- else:
- keep = has_doc or self.options.undoc_members
- else:
- keep = False
- elif (namespace, membername) in attr_docs:
- if want_all and isprivate:
- if self.options.private_members is None:
- keep = False
- else:
- keep = membername in self.options.private_members
- else:
- # keep documented attributes
- keep = True
- elif want_all and isprivate:
- if has_doc or self.options.undoc_members:
- if self.options.private_members is None: # NoQA: SIM114
- keep = False
- elif is_filtered_inherited_member(membername, obj):
- keep = False
- else:
- keep = membername in self.options.private_members
- else:
- keep = False
- else:
- if self.options.members is ALL and is_filtered_inherited_member(
- membername, obj
- ):
- keep = False
- else:
- # ignore undocumented members if :undoc-members: is not given
- keep = has_doc or self.options.undoc_members
-
- if isinstance(obj, ObjectMember) and obj.skipped:
- # forcedly skipped member (ex. a module attribute not defined in __all__)
- keep = False
-
- # give the user a chance to decide whether this member
- # should be skipped
- if self._events is not None:
- # let extensions preprocess docstrings
- skip_user = self._events.emit_firstresult(
- 'autodoc-skip-member',
- self.objtype,
- membername,
- member,
- not keep,
- self.options,
- )
- if skip_user is not None:
- keep = not skip_user
- except Exception as exc:
- logger.warning(
- __(
- 'autodoc: failed to determine %s.%s (%r) to be documented, '
- 'the following exception was raised:\n%s'
- ),
- self.name,
- membername,
- member,
- exc,
- type='autodoc',
- )
- keep = False
-
- if keep:
- ret.append((membername, member, isattr))
-
- return ret
-
- def document_members(self, all_members: bool = False) -> None:
- """Generate reST for member documentation.
-
- If *all_members* is True, document all members, else those given by
- *self.options.members*.
- """
- # set current namespace for finding members
- self._current_document.autodoc_module = self.modname
- if self.objpath:
- self._current_document.autodoc_class = self.objpath[0]
-
- want_all = (
- all_members or self.options.inherited_members or self.options.members is ALL
- )
- # find out which members are documentable
- members_check_module, members = self.get_object_members(want_all)
-
- # document non-skipped members
- member_documenters: list[tuple[Documenter, bool]] = []
- for mname, member, isattr in self.filter_members(members, want_all):
- classes = [
- cls
- for cls in self.documenters.values()
- if cls.can_document_member(member, mname, isattr, self)
- ]
- if not classes:
- # don't know how to document this member
- continue
- # prefer the documenter with the highest priority
- classes.sort(key=lambda cls: cls.priority)
- # give explicitly separated module name, so that members
- # of inner classes can be documented
- full_mname = f'{self.modname}::' + '.'.join((*self.objpath, mname))
- documenter = classes[-1](self.directive, full_mname, self.indent)
- member_documenters.append((documenter, isattr))
-
- member_order = self.options.member_order or self.config.autodoc_member_order
- # We now try to import all objects before ordering them. This is to
- # avoid possible circular imports if we were to import objects after
- # their associated documenters have been sorted.
- member_documenters = [
- (documenter, isattr)
- for documenter, isattr in member_documenters
- if documenter.parse_name() and documenter.import_object()
- ]
- member_documenters = self.sort_members(member_documenters, member_order)
-
- for documenter, isattr in member_documenters:
- assert documenter.modname
- # We can directly call ._generate() since the documenters
- # already called parse_name() and import_object() before.
- #
- # Note that those two methods above do not emit events, so
- # whatever objects we deduced should not have changed.
- documenter._generate(
- all_members=True,
- real_modname=self.real_modname,
- check_module=members_check_module and not isattr,
- )
-
- # reset current objects
- self._current_document.autodoc_module = ''
- self._current_document.autodoc_class = ''
-
- def sort_members(
- self, documenters: list[tuple[Documenter, bool]], order: str
- ) -> list[tuple[Documenter, bool]]:
- """Sort the given member list."""
- if order == 'groupwise':
- # sort by group; alphabetically within groups
- documenters.sort(key=lambda e: (e[0].member_order, e[0].name))
- elif order == 'bysource':
- # By default, member discovery order matches source order,
- # as dicts are insertion-ordered from Python 3.7.
- if self.analyzer:
- # sort by source order, by virtue of the module analyzer
- tagorder = self.analyzer.tagorder
-
- def keyfunc(entry: tuple[Documenter, bool]) -> int:
- fullname = entry[0].name.split('::')[1]
- return tagorder.get(fullname, len(tagorder))
-
- documenters.sort(key=keyfunc)
- else: # alphabetical
- documenters.sort(key=lambda e: e[0].name)
-
- return documenters
-
- def generate(
- self,
- more_content: StringList | None = None,
- real_modname: str | None = None,
- check_module: bool = False,
- all_members: bool = False,
- ) -> None:
- """Generate reST for the object given by *self.name*, and possibly for
- its members.
-
- If *more_content* is given, include that content. If *real_modname* is
- given, use that module name to find attribute docs. If *check_module* is
- True, only generate if the object is defined in the module name it is
- imported from. If *all_members* is True, document all members.
- """
- if not self.parse_name():
- # need a module to import
- logger.warning(
- __(
- "don't know which module to import for autodocumenting "
- '%r (try placing a "module" or "currentmodule" directive '
- 'in the document, or giving an explicit module name)'
- ),
- self.name,
- type='autodoc',
- )
- return
-
- # now, import the module and get object to document
- if not self.import_object():
- return
-
- self._generate(more_content, real_modname, check_module, all_members)
-
- def _generate(
- self,
- more_content: StringList | None = None,
- real_modname: str | None = None,
- check_module: bool = False,
- all_members: bool = False,
- ) -> None:
- # If there is no real module defined, figure out which to use.
- # The real module is used in the module analyzer to look up the module
- # where the attribute documentation would actually be found in.
- # This is used for situations where you have a module that collects the
- # functions and classes of internal submodules.
- guess_modname = self.get_real_modname()
- self.real_modname: str = real_modname or guess_modname
-
- # try to also get a source code analyzer for attribute docs
- try:
- self.analyzer = ModuleAnalyzer.for_module(self.real_modname)
- # parse right now, to get PycodeErrors on parsing (results will
- # be cached anyway)
- self.analyzer.find_attr_docs()
- except PycodeError as exc:
- logger.debug('[autodoc] module analyzer failed: %s', exc)
- # no source file -- e.g. for builtin and C modules
- self.analyzer = None
- # at least add the module.__file__ as a dependency
- if module___file__ := getattr(self.module, '__file__', ''):
- self.directive.record_dependencies.add(module___file__)
- else:
- self.directive.record_dependencies.add(self.analyzer.srcname)
-
- if self.real_modname != guess_modname:
- # Add module to dependency list if target object is defined in other module.
- try:
- analyzer = ModuleAnalyzer.for_module(guess_modname)
- self.directive.record_dependencies.add(analyzer.srcname)
- except PycodeError:
- pass
-
- docstrings: list[str] = functools.reduce(
- operator.iadd, self.get_doc() or [], []
- )
- if ismock(self.object) and not docstrings:
- logger.warning(
- __('A mocked object is detected: %r'),
- self.name,
- type='autodoc',
- subtype='mocked_object',
- )
-
- # check __module__ of object (for members not given explicitly)
- if check_module:
- if not self.check_module():
- return
-
- sourcename = self.get_sourcename()
-
- # make sure that the result starts with an empty line. This is
- # necessary for some situations where another directive preprocesses
- # reST and no starting newline is present
- self.add_line('', sourcename)
-
- # format the object's signature, if any
- try:
- sig = self.format_signature()
- except Exception as exc:
- logger.warning(
- __('error while formatting signature for %s: %s'),
- self.fullname,
- exc,
- type='autodoc',
- )
- return
-
- # generate the directive header and options, if applicable
- self.add_directive_header(sig)
- self.add_line('', sourcename)
-
- # e.g. the module directive doesn't have content
- self.indent += self.content_indent
-
- # add all content (from docstrings, attribute docs etc.)
- self.add_content(more_content)
-
- # document members, if possible
- self.document_members(all_members)
-
-
-class ModuleDocumenter(Documenter):
- """Specialized Documenter subclass for modules."""
-
- objtype = 'module'
- content_indent = ''
- _extra_indent = ' '
-
- option_spec: ClassVar[OptionSpec] = {
- 'members': members_option,
- 'undoc-members': bool_option,
- 'no-index': bool_option,
- 'no-index-entry': bool_option,
- 'inherited-members': inherited_members_option,
- 'show-inheritance': bool_option,
- 'synopsis': identity,
- 'platform': identity,
- 'deprecated': bool_option,
- 'member-order': member_order_option,
- 'exclude-members': exclude_members_option,
- 'private-members': members_option,
- 'special-members': members_option,
- 'imported-members': bool_option,
- 'ignore-module-all': bool_option,
- 'no-value': bool_option,
- 'noindex': bool_option,
- }
-
- def __init__(self, *args: Any) -> None:
- super().__init__(*args)
- merge_members_option(self.options)
- self.__all__: Sequence[str] | None = None
-
- def add_content(self, more_content: StringList | None) -> None:
- old_indent = self.indent
- self.indent += self._extra_indent
- super().add_content(None)
- self.indent = old_indent
- if more_content:
- for line, src in zip(more_content.data, more_content.items, strict=True):
- self.add_line(line, src[0], src[1])
-
- @classmethod
- def can_document_member(
- cls: type[Documenter], member: Any, membername: str, isattr: bool, parent: Any
- ) -> bool:
- # don't document submodules automatically
- return False
-
- def resolve_name(
- self, modname: str | None, parents: Any, path: str, base: str
- ) -> tuple[str | None, list[str]]:
- if modname is not None:
- logger.warning(
- __('"::" in automodule name doesn\'t make sense'), type='autodoc'
- )
- return (path or '') + base, []
-
- def parse_name(self) -> bool:
- ret = super().parse_name()
- if self.args or self.retann:
- logger.warning(
- __('signature arguments or return annotation given for automodule %s'),
- self.fullname,
- type='autodoc',
- )
- return ret
-
- def import_object(self, raiseerror: bool = False) -> bool:
- ret = super().import_object(raiseerror)
-
- try:
- if not self.options.ignore_module_all:
- self.__all__ = inspect.getall(self.object)
- except ValueError as exc:
- # invalid __all__ found.
- logger.warning(
- __(
- '__all__ should be a list of strings, not %r '
- '(in module %s) -- ignoring __all__'
- ),
- exc.args[0],
- self.fullname,
- type='autodoc',
- )
-
- return ret
-
- def add_directive_header(self, sig: str) -> None:
- Documenter.add_directive_header(self, sig)
-
- sourcename = self.get_sourcename()
-
- # add some module-specific options
- if self.options.synopsis:
- self.add_line(' :synopsis: ' + self.options.synopsis, sourcename)
- if self.options.platform:
- self.add_line(' :platform: ' + self.options.platform, sourcename)
- if self.options.deprecated:
- self.add_line(' :deprecated:', sourcename)
- if self.options.no_index_entry:
- self.add_line(' :no-index-entry:', sourcename)
-
- def get_module_members(self) -> dict[str, ObjectMember]:
- """Get members of target module."""
- if self.analyzer:
- attr_docs = self.analyzer.attr_docs
- else:
- attr_docs = {}
-
- members: dict[str, ObjectMember] = {}
- for name in dir(self.object):
- try:
- value = safe_getattr(self.object, name, None)
- if ismock(value):
- value = undecorate(value)
- docstring = attr_docs.get(('', name), [])
- members[name] = ObjectMember(
- name, value, docstring='\n'.join(docstring)
- )
- except AttributeError:
- continue
-
- # annotation only member (ex. attr: int)
- for name in inspect.getannotations(self.object):
- if name not in members:
- docstring = attr_docs.get(('', name), [])
- members[name] = ObjectMember(
- name, INSTANCEATTR, docstring='\n'.join(docstring)
- )
-
- return members
-
- def get_object_members(self, want_all: bool) -> tuple[bool, list[ObjectMember]]:
- members = self.get_module_members()
- if want_all:
- if self.__all__ is None:
- # for implicit module members, check __module__ to avoid
- # documenting imported objects
- return True, list(members.values())
- else:
- for member in members.values():
- if member.__name__ not in self.__all__:
- member.skipped = True
-
- return False, list(members.values())
- else:
- memberlist = self.options.members or []
- ret = []
- for name in memberlist:
- if name in members:
- ret.append(members[name])
- else:
- logger.warning(
- __(
- 'missing attribute mentioned in :members: option: '
- 'module %s, attribute %s'
- ),
- safe_getattr(self.object, '__name__', '???'),
- name,
- type='autodoc',
- )
- return False, ret
-
- def sort_members(
- self, documenters: list[tuple[Documenter, bool]], order: str
- ) -> list[tuple[Documenter, bool]]:
- if order == 'bysource' and self.__all__:
- assert self.__all__ is not None
- module_all = self.__all__
- module_all_set = set(module_all)
- module_all_len = len(module_all)
-
- # Sort alphabetically first (for members not listed on the __all__)
- documenters.sort(key=lambda e: e[0].name)
-
- # Sort by __all__
- def keyfunc(entry: tuple[Documenter, bool]) -> int:
- name = entry[0].name.split('::')[1]
- if name in module_all_set:
- return module_all.index(name)
- else:
- return module_all_len
-
- documenters.sort(key=keyfunc)
-
- return documenters
- else:
- return super().sort_members(documenters, order)
-
-
-class ModuleLevelDocumenter(Documenter):
- """Specialized Documenter subclass for objects on module level (functions,
- classes, data/constants).
- """
-
- def resolve_name(
- self, modname: str | None, parents: Any, path: str, base: str
- ) -> tuple[str | None, list[str]]:
- if modname is not None:
- return modname, [*parents, base]
- if path:
- modname = path.rstrip('.')
- return modname, [*parents, base]
-
- # if documenting a toplevel object without explicit module,
- # it can be contained in another auto directive ...
- modname = self._current_document.autodoc_module
- # ... or in the scope of a module directive
- if not modname:
- modname = self.env.ref_context.get('py:module')
- # ... else, it stays None, which means invalid
- return modname, [*parents, base]
-
-
-class ClassLevelDocumenter(Documenter):
- """Specialized Documenter subclass for objects on class level (methods,
- attributes).
- """
-
- def resolve_name(
- self, modname: str | None, parents: Any, path: str, base: str
- ) -> tuple[str | None, list[str]]:
- if modname is not None:
- return modname, [*parents, base]
-
- if path:
- mod_cls = path.rstrip('.')
- else:
- # if documenting a class-level object without path,
- # there must be a current class, either from a parent
- # auto directive ...
- mod_cls = self._current_document.autodoc_class
- # ... or from a class directive
- if not mod_cls:
- mod_cls = self.env.ref_context.get('py:class', '')
- # ... if still falsy, there's no way to know
- if not mod_cls:
- return None, []
- modname, sep, cls = mod_cls.rpartition('.')
- parents = [cls]
- # if the module name is still missing, get it like above
- if not modname:
- modname = self._current_document.autodoc_module
- if not modname:
- modname = self.env.ref_context.get('py:module')
- # ... else, it stays None, which means invalid
- return modname, [*parents, base]
-
-
-class DocstringSignatureMixin:
- """Mixin for FunctionDocumenter and MethodDocumenter to provide the
- feature of reading the signature from the docstring.
- """
-
- _new_docstrings: list[list[str]] | None = None
- _signatures: list[str] = []
-
- def _find_signature(self) -> tuple[str | None, str | None] | None:
- # candidates of the object name
- valid_names = [self.objpath[-1]] # type: ignore[attr-defined]
- if isinstance(self, ClassDocumenter):
- valid_names.append('__init__')
- if hasattr(self.object, '__mro__'):
- valid_names.extend(cls.__name__ for cls in self.object.__mro__)
-
- docstrings = self.get_doc()
- if docstrings is None:
- return None, None
- self._new_docstrings = docstrings[:]
- self._signatures = []
- result = None
- for i, doclines in enumerate(docstrings):
- for j, line in enumerate(doclines):
- if not line:
- # no lines in docstring, no match
- break
-
- if line.endswith('\\'):
- line = line.rstrip('\\').rstrip()
-
- # match first line of docstring against signature RE
- match = py_ext_sig_re.match(line)
- if not match:
- break
- exmod, path, base, tp_list, args, retann = match.groups()
-
- # the base name must match ours
- if base not in valid_names:
- break
-
- # re-prepare docstring to ignore more leading indentation
- directive = self.directive # type: ignore[attr-defined]
- tab_width = directive.state.document.settings.tab_width
- self._new_docstrings[i] = prepare_docstring(
- '\n'.join(doclines[j + 1 :]), tab_width
- )
-
- if result is None:
- # first signature
- result = args, retann
- else:
- # subsequent signatures
- self._signatures.append(f'({args}) -> {retann}')
-
- if result is not None:
- # finish the loop when signature found
- break
-
- return result
-
- def get_doc(self) -> list[list[str]] | None:
- if self._new_docstrings is not None:
- return self._new_docstrings
- return super().get_doc() # type: ignore[misc]
-
- def format_signature(self, **kwargs: Any) -> str:
- self.args: str | None
- if self.args is None and self.config.autodoc_docstring_signature: # type: ignore[attr-defined]
- # only act if a signature is not explicitly given already, and if
- # the feature is enabled
- result = self._find_signature()
- if result is not None:
- self.args, self.retann = result
- sig = super().format_signature(**kwargs) # type: ignore[misc]
- if self._signatures:
- return '\n'.join((sig, *self._signatures))
- else:
- return sig
-
-
-class DocstringStripSignatureMixin(DocstringSignatureMixin):
- """Mixin for AttributeDocumenter to provide the
- feature of stripping any function signature from the docstring.
- """
-
- def format_signature(self, **kwargs: Any) -> str:
- if self.args is None and self.config.autodoc_docstring_signature: # type: ignore[attr-defined]
- # only act if a signature is not explicitly given already, and if
- # the feature is enabled
- result = self._find_signature()
- if result is not None:
- # Discarding _args is a only difference with
- # DocstringSignatureMixin.format_signature.
- # Documenter.format_signature use self.args value to format.
- _args, self.retann = result
- return super().format_signature(**kwargs)
-
-
-class FunctionDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type: ignore[misc]
- """Specialized Documenter subclass for functions."""
-
- objtype = 'function'
- member_order = 30
-
- @classmethod
- def can_document_member(
- cls: type[Documenter], member: Any, membername: str, isattr: bool, parent: Any
- ) -> bool:
- # supports functions, builtins and bound methods exported at the module level
- return (
- inspect.isfunction(member)
- or inspect.isbuiltin(member)
- or (inspect.isroutine(member) and isinstance(parent, ModuleDocumenter))
- )
-
- def format_args(self, **kwargs: Any) -> str:
- if self.config.autodoc_typehints in {'none', 'description'}:
- kwargs.setdefault('show_annotation', False)
- if self.config.autodoc_typehints_format == 'short':
- kwargs.setdefault('unqualified_typehints', True)
- if self.config.python_display_short_literal_types:
- kwargs.setdefault('short_literals', True)
-
- try:
- self._events.emit('autodoc-before-process-signature', self.object, False)
- sig = inspect.signature(
- self.object, type_aliases=self.config.autodoc_type_aliases
- )
- args = stringify_signature(sig, **kwargs)
- except TypeError as exc:
- logger.warning(
- __('Failed to get a function signature for %s: %s'), self.fullname, exc
- )
- return ''
- except ValueError:
- args = ''
-
- if self.config.strip_signature_backslash:
- # escape backslashes for reST
- args = args.replace('\\', '\\\\')
- return args
-
- def document_members(self, all_members: bool = False) -> None:
- pass
-
- def add_directive_header(self, sig: str) -> None:
- sourcename = self.get_sourcename()
- super().add_directive_header(sig)
-
- is_coro = inspect.iscoroutinefunction(self.object)
- is_acoro = inspect.isasyncgenfunction(self.object)
- if is_coro or is_acoro:
- self.add_line(' :async:', sourcename)
-
- def format_signature(self, **kwargs: Any) -> str:
- if self.config.autodoc_typehints_format == 'short':
- kwargs.setdefault('unqualified_typehints', True)
- if self.config.python_display_short_literal_types:
- kwargs.setdefault('short_literals', True)
-
- sigs = []
- if (
- self.analyzer
- and '.'.join(self.objpath) in self.analyzer.overloads
- and self.config.autodoc_typehints != 'none'
- ):
- # Use signatures for overloaded functions instead of the implementation function.
- overloaded = True
- else:
- overloaded = False
- sig = super().format_signature(**kwargs)
- sigs.append(sig)
-
- if inspect.is_singledispatch_function(self.object):
- # append signature of singledispatch'ed functions
- for typ, func in self.object.registry.items():
- if typ is object:
- pass # default implementation. skipped.
- else:
- dispatchfunc = self.annotate_to_first_argument(func, typ)
- if dispatchfunc:
- documenter = FunctionDocumenter(self.directive, '')
- documenter.object = dispatchfunc
- documenter.objpath = ['']
- sigs.append(documenter.format_signature())
- if overloaded and self.analyzer is not None:
- actual = inspect.signature(
- self.object, type_aliases=self.config.autodoc_type_aliases
- )
- __globals__ = safe_getattr(self.object, '__globals__', {})
- for overload in self.analyzer.overloads['.'.join(self.objpath)]:
- overload = self.merge_default_value(actual, overload)
- overload = evaluate_signature(
- overload, __globals__, self.config.autodoc_type_aliases
- )
-
- sig = stringify_signature(overload, **kwargs)
- sigs.append(sig)
-
- return '\n'.join(sigs)
-
- def merge_default_value(self, actual: Signature, overload: Signature) -> Signature:
- """Merge default values of actual implementation to the overload variants."""
- parameters = list(overload.parameters.values())
- for i, param in enumerate(parameters):
- actual_param = actual.parameters.get(param.name)
- if actual_param and param.default == '...':
- parameters[i] = param.replace(default=actual_param.default)
-
- return overload.replace(parameters=parameters)
-
- def annotate_to_first_argument(
- self, func: Callable[..., Any], typ: type
- ) -> Callable[..., Any] | None:
- """Annotate type hint to the first argument of function if needed."""
- try:
- sig = inspect.signature(func, type_aliases=self.config.autodoc_type_aliases)
- except TypeError as exc:
- logger.warning(
- __('Failed to get a function signature for %s: %s'), self.fullname, exc
- )
- return None
- except ValueError:
- return None
-
- if len(sig.parameters) == 0:
- return None
-
- def dummy(): # type: ignore[no-untyped-def] # NoQA: ANN202
- pass
-
- params = list(sig.parameters.values())
- if params[0].annotation is Parameter.empty:
- params[0] = params[0].replace(annotation=typ)
- try:
- dummy.__signature__ = sig.replace(parameters=params) # type: ignore[attr-defined]
- return dummy
- except (AttributeError, TypeError):
- # failed to update signature (ex. built-in or extension types)
- return None
-
- return func
-
-
-class DecoratorDocumenter(FunctionDocumenter):
- """Specialized Documenter subclass for decorator functions."""
-
- objtype = 'decorator'
-
- # must be lower than FunctionDocumenter
- priority = -1
-
- def format_args(self, **kwargs: Any) -> str:
- args = super().format_args(**kwargs)
- if ',' in args:
- return args
- else:
- return ''
-
-
-# Types which have confusing metaclass signatures it would be best not to show.
-# These are listed by name, rather than storing the objects themselves, to avoid
-# needing to import the modules.
-_METACLASS_CALL_BLACKLIST = frozenset({
- 'enum.EnumType.__call__',
-})
-
-
-# Types whose __new__ signature is a pass-through.
-_CLASS_NEW_BLACKLIST = frozenset({
- 'typing.Generic.__new__',
-})
-
-
-class ClassDocumenter(DocstringSignatureMixin, ModuleLevelDocumenter): # type: ignore[misc]
- """Specialized Documenter subclass for classes."""
-
- objtype = 'class'
- member_order = 20
- option_spec: ClassVar[OptionSpec] = {
- 'members': members_option,
- 'undoc-members': bool_option,
- 'no-index': bool_option,
- 'no-index-entry': bool_option,
- 'inherited-members': inherited_members_option,
- 'show-inheritance': bool_option,
- 'member-order': member_order_option,
- 'exclude-members': exclude_members_option,
- 'private-members': members_option,
- 'special-members': members_option,
- 'class-doc-from': class_doc_from_option,
- 'noindex': bool_option,
- }
-
- # Must be higher than FunctionDocumenter, ClassDocumenter, and
- # AttributeDocumenter as NewType can be an attribute and is a class
- # after Python 3.10.
- priority = 15
-
- _signature_class: Any = None
- _signature_method_name: str = ''
-
- def __init__(self, *args: Any) -> None:
- super().__init__(*args)
-
- if self.config.autodoc_class_signature == 'separated':
- self.options = self.options.copy()
-
- # show __init__() method
- if self.options.special_members is None:
- self.options['special-members'] = ['__new__', '__init__']
- else:
- self.options.special_members.append('__new__')
- self.options.special_members.append('__init__')
-
- merge_members_option(self.options)
-
- @classmethod
- def can_document_member(
- cls: type[Documenter], member: Any, membername: str, isattr: bool, parent: Any
- ) -> bool:
- return isinstance(member, type) or (
- isattr and isinstance(member, NewType | TypeVar)
- )
-
- def import_object(self, raiseerror: bool = False) -> bool:
- ret = super().import_object(raiseerror)
- # if the class is documented under another name, document it
- # as data/attribute
- if ret:
- if hasattr(self.object, '__name__'):
- self.doc_as_attr = self.objpath[-1] != self.object.__name__
- else:
- self.doc_as_attr = True
- if isinstance(self.object, NewType | TypeVar):
- modname = getattr(self.object, '__module__', self.modname)
- if modname != self.modname and self.modname.startswith(modname):
- bases = self.modname[len(modname) :].strip('.').split('.')
- self.objpath = bases + self.objpath
- self.modname = modname
- return ret
-
- def _get_signature(self) -> tuple[Any | None, str | None, Signature | None]:
- if isinstance(self.object, NewType | TypeVar):
- # Suppress signature
- return None, None, None
-
- def get_user_defined_function_or_method(obj: Any, attr: str) -> Any:
- """Get the `attr` function or method from `obj`, if it is user-defined."""
- if inspect.is_builtin_class_method(obj, attr):
- return None
- attr = self.get_attr(obj, attr, None)
- if not (inspect.ismethod(attr) or inspect.isfunction(attr)):
- return None
- return attr
-
- # This sequence is copied from inspect._signature_from_callable.
- # ValueError means that no signature could be found, so we keep going.
-
- # First, we check if obj has a __signature__ attribute
- if hasattr(self.object, '__signature__'):
- object_sig = self.object.__signature__
- if isinstance(object_sig, Signature):
- return None, None, object_sig
- if sys.version_info[:2] in {(3, 12), (3, 13)} and callable(object_sig):
- # Support for enum.Enum.__signature__ in Python 3.12
- if isinstance(object_sig_str := object_sig(), str):
- return None, None, inspect.signature_from_str(object_sig_str)
-
- # Next, let's see if it has an overloaded __call__ defined
- # in its metaclass
- call = get_user_defined_function_or_method(type(self.object), '__call__')
-
- if call is not None:
- if f'{call.__module__}.{call.__qualname__}' in _METACLASS_CALL_BLACKLIST:
- call = None
-
- if call is not None:
- self._events.emit('autodoc-before-process-signature', call, True)
- try:
- sig = inspect.signature(
- call,
- bound_method=True,
- type_aliases=self.config.autodoc_type_aliases,
- )
- return type(self.object), '__call__', sig
- except ValueError:
- pass
-
- # Now we check if the 'obj' class has a '__new__' method
- new = get_user_defined_function_or_method(self.object, '__new__')
-
- if new is not None:
- if f'{new.__module__}.{new.__qualname__}' in _CLASS_NEW_BLACKLIST:
- new = None
-
- if new is not None:
- self._events.emit('autodoc-before-process-signature', new, True)
- try:
- sig = inspect.signature(
- new,
- bound_method=True,
- type_aliases=self.config.autodoc_type_aliases,
- )
- return self.object, '__new__', sig
- except ValueError:
- pass
-
- # Finally, we should have at least __init__ implemented
- init = get_user_defined_function_or_method(self.object, '__init__')
- if init is not None:
- self._events.emit('autodoc-before-process-signature', init, True)
- try:
- sig = inspect.signature(
- init,
- bound_method=True,
- type_aliases=self.config.autodoc_type_aliases,
- )
- return self.object, '__init__', sig
- except ValueError:
- pass
-
- # None of the attributes are user-defined, so fall back to let inspect
- # handle it.
- # We don't know the exact method that inspect.signature will read
- # the signature from, so just pass the object itself to our hook.
- self._events.emit('autodoc-before-process-signature', self.object, False)
- try:
- sig = inspect.signature(
- self.object,
- bound_method=False,
- type_aliases=self.config.autodoc_type_aliases,
- )
- return None, None, sig
- except ValueError:
- pass
-
- # Still no signature: happens e.g. for old-style classes
- # with __init__ in C and no `__text_signature__`.
- return None, None, None
-
- def format_args(self, **kwargs: Any) -> str:
- if self.config.autodoc_typehints in {'none', 'description'}:
- kwargs.setdefault('show_annotation', False)
- if self.config.autodoc_typehints_format == 'short':
- kwargs.setdefault('unqualified_typehints', True)
- if self.config.python_display_short_literal_types:
- kwargs.setdefault('short_literals', True)
-
- try:
- self._signature_class, _signature_method_name, sig = self._get_signature()
- except TypeError as exc:
- # __signature__ attribute contained junk
- logger.warning(
- __('Failed to get a constructor signature for %s: %s'),
- self.fullname,
- exc,
- )
- return ''
- self._signature_method_name = _signature_method_name or ''
-
- if sig is None:
- return ''
-
- return stringify_signature(sig, show_return_annotation=False, **kwargs)
-
- def _find_signature(self) -> tuple[str | None, str | None] | None:
- result = super()._find_signature()
- if result is not None:
- # Strip a return value from signature of constructor in docstring (first entry)
- result = (result[0], None)
-
- for i, sig in enumerate(self._signatures):
- if sig.endswith(' -> None'):
- # Strip a return value from signatures of constructor in docstring (subsequent
- # entries)
- self._signatures[i] = sig[:-8]
-
- return result
-
- def format_signature(self, **kwargs: Any) -> str:
- if self.doc_as_attr:
- return ''
- if self.config.autodoc_class_signature == 'separated':
- # do not show signatures
- return ''
-
- if self.config.autodoc_typehints_format == 'short':
- kwargs.setdefault('unqualified_typehints', True)
- if self.config.python_display_short_literal_types:
- kwargs.setdefault('short_literals', True)
-
- sig = super().format_signature()
- sigs = []
-
- overloads = self.get_overloaded_signatures()
- if overloads and self.config.autodoc_typehints != 'none':
- # Use signatures for overloaded methods instead of the implementation method.
- method = safe_getattr(
- self._signature_class, self._signature_method_name, None
- )
- __globals__ = safe_getattr(method, '__globals__', {})
- for overload in overloads:
- overload = evaluate_signature(
- overload, __globals__, self.config.autodoc_type_aliases
- )
-
- parameters = list(overload.parameters.values())
- overload = overload.replace(
- parameters=parameters[1:], return_annotation=Parameter.empty
- )
- sig = stringify_signature(overload, **kwargs)
- sigs.append(sig)
- else:
- sigs.append(sig)
-
- return '\n'.join(sigs)
-
- def get_overloaded_signatures(self) -> list[Signature]:
- if self._signature_class and self._signature_method_name:
- for cls in self._signature_class.__mro__:
- try:
- analyzer = ModuleAnalyzer.for_module(cls.__module__)
- analyzer.analyze()
- qualname = f'{cls.__qualname__}.{self._signature_method_name}'
- if qualname in analyzer.overloads:
- return analyzer.overloads.get(qualname, [])
- elif qualname in analyzer.tagorder:
- # the constructor is defined in the class, but not overridden.
- return []
- except PycodeError:
- pass
-
- return []
-
- def get_canonical_fullname(self) -> str | None:
- __modname__ = safe_getattr(self.object, '__module__', self.modname)
- __qualname__ = safe_getattr(self.object, '__qualname__', None)
- if __qualname__ is None:
- __qualname__ = safe_getattr(self.object, '__name__', None)
- if __qualname__ and '' in __qualname__:
- # No valid qualname found if the object is defined as locals
- __qualname__ = None
-
- if __modname__ and __qualname__:
- return f'{__modname__}.{__qualname__}'
- else:
- return None
-
- def add_directive_header(self, sig: str) -> None:
- sourcename = self.get_sourcename()
-
- if self.doc_as_attr:
- self.directivetype = 'attribute'
- super().add_directive_header(sig)
-
- if isinstance(self.object, NewType | TypeVar):
- return
-
- if self.analyzer and '.'.join(self.objpath) in self.analyzer.finals:
- self.add_line(' :final:', sourcename)
-
- canonical_fullname = self.get_canonical_fullname()
- if (
- not self.doc_as_attr
- and not isinstance(self.object, NewType)
- and canonical_fullname
- and self.fullname != canonical_fullname
- ):
- self.add_line(' :canonical: %s' % canonical_fullname, sourcename)
-
- # add inheritance info, if wanted
- if not self.doc_as_attr and self.options.show_inheritance:
- if inspect.getorigbases(self.object):
- # A subclass of generic types
- # refs: PEP-560
- bases = list(self.object.__orig_bases__)
- elif hasattr(self.object, '__bases__') and len(self.object.__bases__):
- # A normal class
- bases = list(self.object.__bases__)
- else:
- bases = []
-
- self._events.emit(
- 'autodoc-process-bases', self.fullname, self.object, self.options, bases
- )
-
- mode = _get_render_mode(self.config.autodoc_typehints_format)
- base_classes = [restify(cls, mode=mode) for cls in bases]
-
- sourcename = self.get_sourcename()
- self.add_line('', sourcename)
- self.add_line(' ' + _('Bases: %s') % ', '.join(base_classes), sourcename)
-
- def get_object_members(self, want_all: bool) -> tuple[bool, list[ObjectMember]]:
- members = get_class_members(
- self.object,
- self.objpath,
- self.get_attr,
- self.config.autodoc_inherit_docstrings,
- )
- if not want_all:
- if not self.options.members:
- return False, []
- # specific members given
- selected = []
- for name in self.options.members:
- if name in members:
- selected.append(members[name])
- else:
- logger.warning(
- __('missing attribute %s in object %s'),
- name,
- self.fullname,
- type='autodoc',
- )
- return False, selected
- elif self.options.inherited_members:
- return False, list(members.values())
- else:
- return False, [m for m in members.values() if m.class_ == self.object]
-
- def get_doc(self) -> list[list[str]] | None:
- if isinstance(self.object, TypeVar):
- if self.object.__doc__ == TypeVar.__doc__:
- return []
- if self.doc_as_attr:
- # Don't show the docstring of the class when it is an alias.
- if self.get_variable_comment():
- return []
- else:
- return None
-
- lines = getattr(self, '_new_docstrings', None)
- if lines is not None:
- return lines
-
- classdoc_from = self.options.get(
- 'class-doc-from', self.config.autoclass_content
- )
-
- docstrings = []
- attrdocstring = getdoc(self.object, self.get_attr)
- if attrdocstring:
- docstrings.append(attrdocstring)
-
- # for classes, what the "docstring" is can be controlled via a
- # config value; the default is only the class docstring
- if classdoc_from in {'both', 'init'}:
- __init__ = self.get_attr(self.object, '__init__', None)
- initdocstring = getdoc(
- __init__,
- self.get_attr,
- self.config.autodoc_inherit_docstrings,
- self.object,
- '__init__',
- )
- # for new-style classes, no __init__ means default __init__
- if initdocstring is not None and (
- initdocstring == object.__init__.__doc__ # for pypy
- or initdocstring.strip() == object.__init__.__doc__ # for !pypy
- ):
- initdocstring = None
- if not initdocstring:
- # try __new__
- __new__ = self.get_attr(self.object, '__new__', None)
- initdocstring = getdoc(
- __new__,
- self.get_attr,
- self.config.autodoc_inherit_docstrings,
- self.object,
- '__new__',
- )
- # for new-style classes, no __new__ means default __new__
- if initdocstring is not None and (
- initdocstring == object.__new__.__doc__ # for pypy
- or initdocstring.strip() == object.__new__.__doc__ # for !pypy
- ):
- initdocstring = None
- if initdocstring:
- if classdoc_from == 'init':
- docstrings = [initdocstring]
- else:
- docstrings.append(initdocstring)
-
- tab_width = self.directive.state.document.settings.tab_width
- return [prepare_docstring(docstring, tab_width) for docstring in docstrings]
-
- def get_variable_comment(self) -> list[str] | None:
- try:
- key = ('', '.'.join(self.objpath))
- if self.doc_as_attr:
- analyzer = ModuleAnalyzer.for_module(self.modname)
- else:
- analyzer = ModuleAnalyzer.for_module(self.get_real_modname())
- analyzer.analyze()
- return list(analyzer.attr_docs.get(key, []))
- except PycodeError:
- return None
-
- def add_content(self, more_content: StringList | None) -> None:
- mode = _get_render_mode(self.config.autodoc_typehints_format)
- short_literals = self.config.python_display_short_literal_types
-
- if isinstance(self.object, NewType):
- supertype = restify(self.object.__supertype__, mode=mode)
-
- more_content = StringList([_('alias of %s') % supertype, ''], source='')
- if isinstance(self.object, TypeVar):
- attrs = [repr(self.object.__name__)]
- attrs.extend(
- stringify_annotation(constraint, mode, short_literals=short_literals)
- for constraint in self.object.__constraints__
- )
- if self.object.__bound__:
- bound = restify(self.object.__bound__, mode=mode)
- attrs.append(r'bound=\ ' + bound)
- if self.object.__covariant__:
- attrs.append('covariant=True')
- if self.object.__contravariant__:
- attrs.append('contravariant=True')
-
- more_content = StringList(
- [_('alias of TypeVar(%s)') % ', '.join(attrs), ''], source=''
- )
- if self.doc_as_attr and self.modname != self.get_real_modname():
- try:
- # override analyzer to obtain doccomment around its definition.
- self.analyzer = ModuleAnalyzer.for_module(self.modname)
- self.analyzer.analyze()
- except PycodeError:
- pass
-
- if self.doc_as_attr and not self.get_variable_comment():
- try:
- alias = restify(self.object, mode=mode)
- more_content = StringList([_('alias of %s') % alias], source='')
- except AttributeError:
- pass # Invalid class object is passed.
-
- super().add_content(more_content)
-
- def document_members(self, all_members: bool = False) -> None:
- if self.doc_as_attr:
- return
- super().document_members(all_members)
-
- def generate(
- self,
- more_content: StringList | None = None,
- real_modname: str | None = None,
- check_module: bool = False,
- all_members: bool = False,
- ) -> None:
- # Do not pass real_modname and use the name from the __module__
- # attribute of the class.
- # If a class gets imported into the module real_modname
- # the analyzer won't find the source of the class, if
- # it looks in real_modname.
- return super().generate(
- more_content=more_content,
- check_module=check_module,
- all_members=all_members,
- )
-
-
-class ExceptionDocumenter(ClassDocumenter):
- """Specialized ClassDocumenter subclass for exceptions."""
-
- objtype = 'exception'
- member_order = 10
-
- # needs a higher priority than ClassDocumenter
- priority = ClassDocumenter.priority + 5
-
- @classmethod
- def can_document_member(
- cls: type[Documenter], member: Any, membername: str, isattr: bool, parent: Any
- ) -> bool:
- try:
- return isinstance(member, type) and issubclass(member, BaseException)
- except TypeError as exc:
- # It's possible for a member to be considered a type, but fail
- # issubclass checks due to not being a class. For example:
- # https://github.com/sphinx-doc/sphinx/issues/11654#issuecomment-1696790436
- msg = (
- f'{cls.__name__} failed to discern if member {member} with'
- f' membername {membername} is a BaseException subclass.'
- )
- raise ValueError(msg) from exc
-
-
-class DataDocumenterMixinBase:
- # define types of instance variables
- config: Config
- env: BuildEnvironment
- modname: str
- parent: Any
- object: Any
- objpath: list[str]
-
- def should_suppress_directive_header(self) -> bool:
- """Check directive header should be suppressed."""
- return False
-
- def should_suppress_value_header(self) -> bool:
- """Check :value: header should be suppressed."""
- return False
-
- def update_content(self, more_content: StringList) -> None:
- """Update docstring, for example with TypeVar variance."""
- pass
-
-
-class GenericAliasMixin(DataDocumenterMixinBase):
- """Mixin for DataDocumenter and AttributeDocumenter to provide the feature for
- supporting GenericAliases.
- """
-
- def should_suppress_directive_header(self) -> bool:
- return (
- inspect.isgenericalias(self.object)
- or super().should_suppress_directive_header()
- )
-
- def update_content(self, more_content: StringList) -> None:
- if inspect.isgenericalias(self.object):
- mode = _get_render_mode(self.config.autodoc_typehints_format)
- alias = restify(self.object, mode=mode)
-
- more_content.append(_('alias of %s') % alias, '')
- more_content.append('', '')
-
- super().update_content(more_content)
-
-
-class UninitializedGlobalVariableMixin(DataDocumenterMixinBase):
- """Mixin for DataDocumenter to provide the feature for supporting uninitialized
- (type annotation only) global variables.
- """
-
- def import_object(self, raiseerror: bool = False) -> bool:
- try:
- return super().import_object(raiseerror=True) # type: ignore[misc]
- except ImportError as exc:
- # annotation only instance variable (PEP-526)
- try:
- with mock(self.config.autodoc_mock_imports):
- parent = import_module(self.modname)
- annotations = get_type_hints(
- parent,
- None,
- self.config.autodoc_type_aliases,
- include_extras=True,
- )
- if self.objpath[-1] in annotations:
- self.object = UNINITIALIZED_ATTR
- self.parent = parent
- return True
- except ImportError:
- pass
-
- if raiseerror:
- raise
- logger.warning(exc.args[0], type='autodoc', subtype='import_object')
- self.env.note_reread()
- return False
-
- def should_suppress_value_header(self) -> bool:
- return (
- self.object is UNINITIALIZED_ATTR or super().should_suppress_value_header()
- )
-
- def get_doc(self) -> list[list[str]] | None:
- if self.object is UNINITIALIZED_ATTR:
- return []
- else:
- return super().get_doc() # type: ignore[misc]
-
-
-class DataDocumenter(
- GenericAliasMixin, UninitializedGlobalVariableMixin, ModuleLevelDocumenter
-):
- """Specialized Documenter subclass for data items."""
-
- objtype = 'data'
- member_order = 40
- priority = -10
- option_spec: ClassVar[OptionSpec] = dict(ModuleLevelDocumenter.option_spec)
- option_spec['annotation'] = annotation_option
- option_spec['no-value'] = bool_option
-
- @classmethod
- def can_document_member(
- cls: type[Documenter], member: Any, membername: str, isattr: bool, parent: Any
- ) -> bool:
- return isinstance(parent, ModuleDocumenter) and isattr
-
- def update_annotations(self, parent: Any) -> None:
- """Update __annotations__ to support type_comment and so on."""
- annotations = dict(inspect.getannotations(parent))
- parent.__annotations__ = annotations
-
- try:
- analyzer = ModuleAnalyzer.for_module(self.modname)
- analyzer.analyze()
- for (classname, attrname), annotation in analyzer.annotations.items():
- if not classname and attrname not in annotations:
- annotations[attrname] = annotation
- except PycodeError:
- pass
-
- def import_object(self, raiseerror: bool = False) -> bool:
- ret = super().import_object(raiseerror)
- if self.parent:
- self.update_annotations(self.parent)
-
- return ret
-
- def should_suppress_value_header(self) -> bool:
- if super().should_suppress_value_header():
- return True
- else:
- doc = self.get_doc() or []
- docstring, metadata = separate_metadata(
- '\n'.join(functools.reduce(operator.iadd, doc, []))
- )
- if 'hide-value' in metadata:
- return True
-
- return False
-
- def add_directive_header(self, sig: str) -> None:
- super().add_directive_header(sig)
- sourcename = self.get_sourcename()
- if (
- self.options.annotation is SUPPRESS
- or self.should_suppress_directive_header()
- ):
- pass
- elif self.options.annotation:
- self.add_line(' :annotation: %s' % self.options.annotation, sourcename)
- else:
- if self.config.autodoc_typehints != 'none':
- # obtain annotation for this data
- annotations = get_type_hints(
- self.parent,
- None,
- self.config.autodoc_type_aliases,
- include_extras=True,
- )
- if self.objpath[-1] in annotations:
- mode = _get_render_mode(self.config.autodoc_typehints_format)
- short_literals = self.config.python_display_short_literal_types
- objrepr = stringify_annotation(
- annotations.get(self.objpath[-1]),
- mode,
- short_literals=short_literals,
- )
- self.add_line(' :type: ' + objrepr, sourcename)
-
- try:
- if (
- self.options.no_value
- or self.should_suppress_value_header()
- or ismock(self.object)
- ):
- pass
- else:
- objrepr = object_description(self.object)
- self.add_line(' :value: ' + objrepr, sourcename)
- except ValueError:
- pass
-
- def document_members(self, all_members: bool = False) -> None:
- pass
-
- def get_real_modname(self) -> str:
- real_modname = self.get_attr(self.parent or self.object, '__module__', None)
- return real_modname or self.modname
-
- def get_module_comment(self, attrname: str) -> list[str] | None:
- try:
- analyzer = ModuleAnalyzer.for_module(self.modname)
- analyzer.analyze()
- key = ('', attrname)
- if key in analyzer.attr_docs:
- return list(analyzer.attr_docs[key])
- except PycodeError:
- pass
-
- return None
-
- def get_doc(self) -> list[list[str]] | None:
- # Check the variable has a docstring-comment
- comment = self.get_module_comment(self.objpath[-1])
- if comment:
- return [comment]
- else:
- return super().get_doc()
-
- def add_content(self, more_content: StringList | None) -> None:
- # Disable analyzing variable comment on Documenter.add_content() to control it on
- # DataDocumenter.add_content()
- self.analyzer = None
-
- if not more_content:
- more_content = StringList()
-
- self.update_content(more_content)
- super().add_content(more_content)
-
-
-class MethodDocumenter(DocstringSignatureMixin, ClassLevelDocumenter): # type: ignore[misc]
- """Specialized Documenter subclass for methods (normal, static and class)."""
-
- objtype = 'method'
- directivetype = 'method'
- member_order = 50
- priority = 1 # must be more than FunctionDocumenter
-
- @classmethod
- def can_document_member(
- cls: type[Documenter], member: Any, membername: str, isattr: bool, parent: Any
- ) -> bool:
- return inspect.isroutine(member) and not isinstance(parent, ModuleDocumenter)
-
- def import_object(self, raiseerror: bool = False) -> bool:
- ret = super().import_object(raiseerror)
- if not ret:
- return ret
-
- # to distinguish classmethod/staticmethod
- obj = self.parent.__dict__.get(self.object_name, self.object)
- if inspect.isstaticmethod(obj, cls=self.parent, name=self.object_name):
- # document static members before regular methods
- self.member_order -= 1
- elif inspect.isclassmethod(obj):
- # document class methods before static methods as
- # they usually behave as alternative constructors
- self.member_order -= 2
- return ret
-
- def format_args(self, **kwargs: Any) -> str:
- if self.config.autodoc_typehints in {'none', 'description'}:
- kwargs.setdefault('show_annotation', False)
- if self.config.autodoc_typehints_format == 'short':
- kwargs.setdefault('unqualified_typehints', True)
- if self.config.python_display_short_literal_types:
- kwargs.setdefault('short_literals', True)
-
- try:
- if self.object == object.__init__ and self.parent != object: # NoQA: E721
- # Classes not having own __init__() method are shown as no arguments.
- #
- # Note: The signature of object.__init__() is (self, /, *args, **kwargs).
- # But it makes users confused.
- args = '()'
- else:
- if inspect.isstaticmethod(
- self.object, cls=self.parent, name=self.object_name
- ):
- self._events.emit(
- 'autodoc-before-process-signature', self.object, False
- )
- sig = inspect.signature(
- self.object,
- bound_method=False,
- type_aliases=self.config.autodoc_type_aliases,
- )
- else:
- self._events.emit(
- 'autodoc-before-process-signature', self.object, True
- )
- sig = inspect.signature(
- self.object,
- bound_method=True,
- type_aliases=self.config.autodoc_type_aliases,
- )
- args = stringify_signature(sig, **kwargs)
- except TypeError as exc:
- logger.warning(
- __('Failed to get a method signature for %s: %s'), self.fullname, exc
- )
- return ''
- except ValueError:
- args = ''
-
- if self.config.strip_signature_backslash:
- # escape backslashes for reST
- args = args.replace('\\', '\\\\')
- return args
-
- def add_directive_header(self, sig: str) -> None:
- super().add_directive_header(sig)
-
- sourcename = self.get_sourcename()
- obj = self.parent.__dict__.get(self.object_name, self.object)
- if inspect.isabstractmethod(obj):
- self.add_line(' :abstractmethod:', sourcename)
- if inspect.iscoroutinefunction(obj) or inspect.isasyncgenfunction(obj):
- self.add_line(' :async:', sourcename)
- if (
- inspect.is_classmethod_like(obj)
- or inspect.is_singledispatch_method(obj)
- and inspect.is_classmethod_like(obj.func)
- ):
- self.add_line(' :classmethod:', sourcename)
- if inspect.isstaticmethod(obj, cls=self.parent, name=self.object_name):
- self.add_line(' :staticmethod:', sourcename)
- if self.analyzer and '.'.join(self.objpath) in self.analyzer.finals:
- self.add_line(' :final:', sourcename)
-
- def document_members(self, all_members: bool = False) -> None:
- pass
-
- def format_signature(self, **kwargs: Any) -> str:
- if self.config.autodoc_typehints_format == 'short':
- kwargs.setdefault('unqualified_typehints', True)
- if self.config.python_display_short_literal_types:
- kwargs.setdefault('short_literals', True)
-
- sigs = []
- if (
- self.analyzer
- and '.'.join(self.objpath) in self.analyzer.overloads
- and self.config.autodoc_typehints != 'none'
- ):
- # Use signatures for overloaded methods instead of the implementation method.
- overloaded = True
- else:
- overloaded = False
- sig = super().format_signature(**kwargs)
- sigs.append(sig)
-
- meth = self.parent.__dict__.get(self.objpath[-1])
- if inspect.is_singledispatch_method(meth):
- # append signature of singledispatch'ed functions
- for typ, func in meth.dispatcher.registry.items():
- if typ is object:
- pass # default implementation. skipped.
- else:
- if inspect.isclassmethod(func):
- func = func.__func__
- dispatchmeth = self.annotate_to_first_argument(func, typ)
- if dispatchmeth:
- documenter = MethodDocumenter(self.directive, '')
- documenter.parent = self.parent
- documenter.object = dispatchmeth
- documenter.objpath = ['']
- sigs.append(documenter.format_signature())
- if overloaded and self.analyzer is not None:
- if inspect.isstaticmethod(
- self.object, cls=self.parent, name=self.object_name
- ):
- actual = inspect.signature(
- self.object,
- bound_method=False,
- type_aliases=self.config.autodoc_type_aliases,
- )
- else:
- actual = inspect.signature(
- self.object,
- bound_method=True,
- type_aliases=self.config.autodoc_type_aliases,
- )
-
- __globals__ = safe_getattr(self.object, '__globals__', {})
- for overload in self.analyzer.overloads['.'.join(self.objpath)]:
- overload = self.merge_default_value(actual, overload)
- overload = evaluate_signature(
- overload, __globals__, self.config.autodoc_type_aliases
- )
-
- if not inspect.isstaticmethod(
- self.object, cls=self.parent, name=self.object_name
- ):
- parameters = list(overload.parameters.values())
- overload = overload.replace(parameters=parameters[1:])
- sig = stringify_signature(overload, **kwargs)
- sigs.append(sig)
-
- return '\n'.join(sigs)
-
- def merge_default_value(self, actual: Signature, overload: Signature) -> Signature:
- """Merge default values of actual implementation to the overload variants."""
- parameters = list(overload.parameters.values())
- for i, param in enumerate(parameters):
- actual_param = actual.parameters.get(param.name)
- if actual_param and param.default == '...':
- parameters[i] = param.replace(default=actual_param.default)
-
- return overload.replace(parameters=parameters)
-
- def annotate_to_first_argument(
- self, func: Callable[..., Any], typ: type
- ) -> Callable[..., Any] | None:
- """Annotate type hint to the first argument of function if needed."""
- try:
- sig = inspect.signature(func, type_aliases=self.config.autodoc_type_aliases)
- except TypeError as exc:
- logger.warning(
- __('Failed to get a method signature for %s: %s'), self.fullname, exc
- )
- return None
- except ValueError:
- return None
-
- if len(sig.parameters) == 1:
- return None
-
- def dummy(): # type: ignore[no-untyped-def] # NoQA: ANN202
- pass
-
- params = list(sig.parameters.values())
- if params[1].annotation is Parameter.empty:
- params[1] = params[1].replace(annotation=typ)
- try:
- dummy.__signature__ = sig.replace( # type: ignore[attr-defined]
- parameters=params
- )
- return dummy
- except (AttributeError, TypeError):
- # failed to update signature (ex. built-in or extension types)
- return None
-
- return func
-
- def get_doc(self) -> list[list[str]] | None:
- if self._new_docstrings is not None:
- # docstring already returned previously, then modified by
- # `DocstringSignatureMixin`. Just return the previously-computed
- # result, so that we don't lose the processing done by
- # `DocstringSignatureMixin`.
- return self._new_docstrings
- if self.objpath[-1] == '__init__':
- docstring = getdoc(
- self.object,
- self.get_attr,
- self.config.autodoc_inherit_docstrings,
- self.parent,
- self.object_name,
- )
- if docstring is not None and (
- docstring == object.__init__.__doc__ # for pypy
- or docstring.strip() == object.__init__.__doc__ # for !pypy
- ):
- docstring = None
- if docstring:
- tab_width = self.directive.state.document.settings.tab_width
- return [prepare_docstring(docstring, tabsize=tab_width)]
- else:
- return []
- elif self.objpath[-1] == '__new__':
- docstring = getdoc(
- self.object,
- self.get_attr,
- self.config.autodoc_inherit_docstrings,
- self.parent,
- self.object_name,
- )
- if docstring is not None and (
- docstring == object.__new__.__doc__ # for pypy
- or docstring.strip() == object.__new__.__doc__ # for !pypy
- ):
- docstring = None
- if docstring:
- tab_width = self.directive.state.document.settings.tab_width
- return [prepare_docstring(docstring, tabsize=tab_width)]
- else:
- return []
- else:
- return super().get_doc()
-
-
-class NonDataDescriptorMixin(DataDocumenterMixinBase):
- """Mixin for AttributeDocumenter to provide the feature for supporting non
- data-descriptors.
-
- .. note:: This mix-in must be inherited after other mix-ins. Otherwise, docstring
- and :value: header will be suppressed unexpectedly.
- """
-
- def import_object(self, raiseerror: bool = False) -> bool:
- ret = super().import_object(raiseerror) # type: ignore[misc]
- if ret and not inspect.isattributedescriptor(self.object):
- self.non_data_descriptor = True
- else:
- self.non_data_descriptor = False
-
- return ret
-
- def should_suppress_value_header(self) -> bool:
- return (
- not getattr(self, 'non_data_descriptor', False)
- or super().should_suppress_directive_header()
- )
-
- def get_doc(self) -> list[list[str]] | None:
- if getattr(self, 'non_data_descriptor', False):
- # the docstring of non datadescriptor is very probably the wrong thing
- # to display
- return None
- else:
- return super().get_doc() # type: ignore[misc]
-
-
-class SlotsMixin(DataDocumenterMixinBase):
- """Mixin for AttributeDocumenter to provide the feature for supporting __slots__."""
-
- def isslotsattribute(self) -> bool:
- """Check the subject is an attribute in __slots__."""
- try:
- if parent___slots__ := inspect.getslots(self.parent):
- return self.objpath[-1] in parent___slots__
- else:
- return False
- except (ValueError, TypeError):
- return False
-
- def import_object(self, raiseerror: bool = False) -> bool:
- ret = super().import_object(raiseerror) # type: ignore[misc]
- if self.isslotsattribute():
- self.object = SLOTSATTR
-
- return ret
-
- def should_suppress_value_header(self) -> bool:
- if self.object is SLOTSATTR:
- return True
- else:
- return super().should_suppress_value_header()
-
- def get_doc(self) -> list[list[str]] | None:
- if self.object is SLOTSATTR:
- try:
- parent___slots__ = inspect.getslots(self.parent)
- if parent___slots__ and (
- docstring := parent___slots__.get(self.objpath[-1])
- ):
- docstring = prepare_docstring(docstring)
- return [docstring]
- else:
- return []
- except ValueError as exc:
- logger.warning(
- __('Invalid __slots__ found on %s. Ignored.'),
- (self.parent.__qualname__, exc),
- type='autodoc',
- )
- return []
- else:
- return super().get_doc() # type: ignore[misc]
-
-
-class RuntimeInstanceAttributeMixin(DataDocumenterMixinBase):
- """Mixin for AttributeDocumenter to provide the feature for supporting runtime
- instance attributes (that are defined in __init__() methods with doc-comments).
-
- Example::
-
- class Foo:
- def __init__(self):
- self.attr = None #: This is a target of this mix-in.
- """
-
- RUNTIME_INSTANCE_ATTRIBUTE = object()
-
- def is_runtime_instance_attribute(self, parent: Any) -> bool:
- """Check the subject is an attribute defined in __init__()."""
- # An instance variable defined in __init__().
- if self.get_attribute_comment(parent, self.objpath[-1]): # type: ignore[attr-defined]
- return True
- return self.is_runtime_instance_attribute_not_commented(parent)
-
- def is_runtime_instance_attribute_not_commented(self, parent: Any) -> bool:
- """Check the subject is an attribute defined in __init__() without comment."""
- for cls in inspect.getmro(parent):
- try:
- module = safe_getattr(cls, '__module__')
- qualname = safe_getattr(cls, '__qualname__')
-
- analyzer = ModuleAnalyzer.for_module(module)
- analyzer.analyze()
- if qualname and self.objpath:
- key = f'{qualname}.{self.objpath[-1]}'
- if key in analyzer.tagorder:
- return True
- except (AttributeError, PycodeError):
- pass
-
- return False
-
- def import_object(self, raiseerror: bool = False) -> bool:
- """Check the existence of runtime instance attribute after failing to import the
- attribute.
- """
- try:
- return super().import_object(raiseerror=True) # type: ignore[misc]
- except ImportError as exc:
- try:
- with mock(self.config.autodoc_mock_imports):
- ret = import_object(
- self.modname,
- self.objpath[:-1],
- 'class',
- attrgetter=self.get_attr, # type: ignore[attr-defined]
- )
- parent = ret[3]
- if self.is_runtime_instance_attribute(parent):
- self.object = self.RUNTIME_INSTANCE_ATTRIBUTE
- self.parent = parent
- return True
- except ImportError:
- pass
-
- if raiseerror:
- raise
- logger.warning(exc.args[0], type='autodoc', subtype='import_object')
- self.env.note_reread()
- return False
-
- def should_suppress_value_header(self) -> bool:
- return (
- self.object is self.RUNTIME_INSTANCE_ATTRIBUTE
- or super().should_suppress_value_header()
- )
-
- def get_doc(self) -> list[list[str]] | None:
- if (
- self.object is self.RUNTIME_INSTANCE_ATTRIBUTE
- and self.is_runtime_instance_attribute_not_commented(self.parent)
- ):
- return None
- else:
- return super().get_doc() # type: ignore[misc]
-
-
-class UninitializedInstanceAttributeMixin(DataDocumenterMixinBase):
- """Mixin for AttributeDocumenter to provide the feature for supporting uninitialized
- instance attributes (PEP-526 styled, annotation only attributes).
-
- Example::
-
- class Foo:
- attr: int #: This is a target of this mix-in.
- """
-
- def is_uninitialized_instance_attribute(self, parent: Any) -> bool:
- """Check the subject is an annotation only attribute."""
- annotations = get_type_hints(
- parent, None, self.config.autodoc_type_aliases, include_extras=True
- )
- return self.objpath[-1] in annotations
-
- def import_object(self, raiseerror: bool = False) -> bool:
- """Check the existence of uninitialized instance attribute when failed to import
- the attribute.
- """
- try:
- return super().import_object(raiseerror=True) # type: ignore[misc]
- except ImportError as exc:
- try:
- ret = import_object(
- self.modname,
- self.objpath[:-1],
- 'class',
- attrgetter=self.get_attr, # type: ignore[attr-defined]
- )
- parent = ret[3]
- if self.is_uninitialized_instance_attribute(parent):
- self.object = UNINITIALIZED_ATTR
- self.parent = parent
- return True
- except ImportError:
- pass
-
- if raiseerror:
- raise
- logger.warning(exc.args[0], type='autodoc', subtype='import_object')
- self.env.note_reread()
- return False
-
- def should_suppress_value_header(self) -> bool:
- return (
- self.object is UNINITIALIZED_ATTR or super().should_suppress_value_header()
- )
-
- def get_doc(self) -> list[list[str]] | None:
- if self.object is UNINITIALIZED_ATTR:
- return None
- return super().get_doc() # type: ignore[misc]
-
-
-class AttributeDocumenter( # type: ignore[misc]
- GenericAliasMixin,
- SlotsMixin,
- RuntimeInstanceAttributeMixin,
- UninitializedInstanceAttributeMixin,
- NonDataDescriptorMixin,
- DocstringStripSignatureMixin,
- ClassLevelDocumenter,
-):
- """Specialized Documenter subclass for attributes."""
-
- objtype = 'attribute'
- member_order = 60
- option_spec: ClassVar[OptionSpec] = dict(ModuleLevelDocumenter.option_spec)
- option_spec['annotation'] = annotation_option
- option_spec['no-value'] = bool_option
-
- # must be higher than the MethodDocumenter, else it will recognize
- # some non-data descriptors as methods
- priority = 10
-
- @staticmethod
- def is_function_or_method(obj: Any) -> bool:
- return (
- inspect.isfunction(obj) or inspect.isbuiltin(obj) or inspect.ismethod(obj)
- )
-
- @classmethod
- def can_document_member(
- cls: type[Documenter], member: Any, membername: str, isattr: bool, parent: Any
- ) -> bool:
- if isinstance(parent, ModuleDocumenter):
- return False
- if inspect.isattributedescriptor(member):
- return True
- return not inspect.isroutine(member) and not isinstance(member, type)
-
- def document_members(self, all_members: bool = False) -> None:
- pass
-
- def update_annotations(self, parent: Any) -> None:
- """Update __annotations__ to support type_comment and so on."""
- try:
- annotations = dict(inspect.getannotations(parent))
- parent.__annotations__ = annotations
-
- for cls in inspect.getmro(parent):
- try:
- module = safe_getattr(cls, '__module__')
- qualname = safe_getattr(cls, '__qualname__')
-
- analyzer = ModuleAnalyzer.for_module(module)
- analyzer.analyze()
- anns = analyzer.annotations
- for (classname, attrname), annotation in anns.items():
- if classname == qualname and attrname not in annotations:
- annotations[attrname] = annotation
- except (AttributeError, PycodeError):
- pass
- except (AttributeError, TypeError):
- # Failed to set __annotations__ (built-in, extensions, etc.)
- pass
-
- def import_object(self, raiseerror: bool = False) -> bool:
- ret = super().import_object(raiseerror)
- if inspect.isenumattribute(self.object):
- self.object = self.object.value
- if self.parent:
- self.update_annotations(self.parent)
-
- return ret
-
- def get_real_modname(self) -> str:
- real_modname = self.get_attr(self.parent or self.object, '__module__', None)
- return real_modname or self.modname
-
- def should_suppress_value_header(self) -> bool:
- if super().should_suppress_value_header():
- return True
- else:
- doc = self.get_doc()
- if doc:
- docstring, metadata = separate_metadata(
- '\n'.join(functools.reduce(operator.iadd, doc, []))
- )
- if 'hide-value' in metadata:
- return True
-
- return False
-
- def add_directive_header(self, sig: str) -> None:
- super().add_directive_header(sig)
- sourcename = self.get_sourcename()
- if (
- self.options.annotation is SUPPRESS
- or self.should_suppress_directive_header()
- ):
- pass
- elif self.options.annotation:
- self.add_line(' :annotation: %s' % self.options.annotation, sourcename)
- else:
- if self.config.autodoc_typehints != 'none':
- # obtain type annotation for this attribute
- annotations = get_type_hints(
- self.parent,
- None,
- self.config.autodoc_type_aliases,
- include_extras=True,
- )
- if self.objpath[-1] in annotations:
- mode = _get_render_mode(self.config.autodoc_typehints_format)
- short_literals = self.config.python_display_short_literal_types
- objrepr = stringify_annotation(
- annotations.get(self.objpath[-1]),
- mode,
- short_literals=short_literals,
- )
- self.add_line(' :type: ' + objrepr, sourcename)
-
- try:
- if (
- self.options.no_value
- or self.should_suppress_value_header()
- or ismock(self.object)
- ):
- pass
- else:
- objrepr = object_description(self.object)
- self.add_line(' :value: ' + objrepr, sourcename)
- except ValueError:
- pass
-
- def get_attribute_comment(self, parent: Any, attrname: str) -> list[str] | None:
- for cls in inspect.getmro(parent):
- try:
- module = safe_getattr(cls, '__module__')
- qualname = safe_getattr(cls, '__qualname__')
-
- analyzer = ModuleAnalyzer.for_module(module)
- analyzer.analyze()
- if qualname and self.objpath:
- key = (qualname, attrname)
- if key in analyzer.attr_docs:
- return list(analyzer.attr_docs[key])
- except (AttributeError, PycodeError):
- pass
-
- return None
-
- def get_doc(self) -> list[list[str]] | None:
- # Check the attribute has a docstring-comment
- comment = self.get_attribute_comment(self.parent, self.objpath[-1])
- if comment:
- return [comment]
-
- try:
- # Disable `autodoc_inherit_docstring` temporarily to avoid to obtain
- # a docstring from the value which descriptor returns unexpectedly.
- # See: https://github.com/sphinx-doc/sphinx/issues/7805
- orig = self.config.autodoc_inherit_docstrings
- self.config.autodoc_inherit_docstrings = False
- return super().get_doc()
- finally:
- self.config.autodoc_inherit_docstrings = orig
-
- def add_content(self, more_content: StringList | None) -> None:
- # Disable analyzing attribute comment on Documenter.add_content() to control it on
- # AttributeDocumenter.add_content()
- self.analyzer = None
-
- if more_content is None:
- more_content = StringList()
- self.update_content(more_content)
- super().add_content(more_content)
-
-
-class PropertyDocumenter(DocstringStripSignatureMixin, ClassLevelDocumenter): # type: ignore[misc]
- """Specialized Documenter subclass for properties."""
-
- objtype = 'property'
- member_order = 60
-
- # before AttributeDocumenter
- priority = AttributeDocumenter.priority + 1
-
- @classmethod
- def can_document_member(
- cls: type[Documenter], member: Any, membername: str, isattr: bool, parent: Any
- ) -> bool:
- if isinstance(parent, ClassDocumenter):
- if inspect.isproperty(member):
- return True
- else:
- __dict__ = safe_getattr(parent.object, '__dict__', {})
- obj = __dict__.get(membername)
- return isinstance(obj, classmethod) and inspect.isproperty(obj.__func__)
- else:
- return False
-
- def import_object(self, raiseerror: bool = False) -> bool:
- """Check the existence of uninitialized instance attribute when failed to import
- the attribute.
- """
- ret = super().import_object(raiseerror)
- if ret and not inspect.isproperty(self.object):
- __dict__ = safe_getattr(self.parent, '__dict__', {})
- obj = __dict__.get(self.objpath[-1])
- if isinstance(obj, classmethod) and inspect.isproperty(obj.__func__):
- self.object = obj.__func__
- self.isclassmethod: bool = True
- return True
- else:
- return False
-
- self.isclassmethod = False
- return ret
-
- def format_args(self, **kwargs: Any) -> str:
- func = self._get_property_getter()
- if func is None:
- return ''
-
- # update the annotations of the property getter
- self._events.emit('autodoc-before-process-signature', func, False)
- # correctly format the arguments for a property
- return super().format_args(**kwargs)
-
- def document_members(self, all_members: bool = False) -> None:
- pass
-
- def get_real_modname(self) -> str:
- real_modname = self.get_attr(self.parent or self.object, '__module__', None)
- return real_modname or self.modname
-
- def add_directive_header(self, sig: str) -> None:
- super().add_directive_header(sig)
- sourcename = self.get_sourcename()
- if inspect.isabstractmethod(self.object):
- self.add_line(' :abstractmethod:', sourcename)
- if self.isclassmethod:
- self.add_line(' :classmethod:', sourcename)
-
- func = self._get_property_getter()
- if func is None or self.config.autodoc_typehints == 'none':
- return
-
- try:
- signature = inspect.signature(
- func, type_aliases=self.config.autodoc_type_aliases
- )
- if signature.return_annotation is not Parameter.empty:
- mode = _get_render_mode(self.config.autodoc_typehints_format)
- short_literals = self.config.python_display_short_literal_types
- objrepr = stringify_annotation(
- signature.return_annotation, mode, short_literals=short_literals
- )
- self.add_line(' :type: ' + objrepr, sourcename)
- except TypeError as exc:
- logger.warning(
- __('Failed to get a function signature for %s: %s'), self.fullname, exc
- )
- pass
- except ValueError:
- pass
-
- def _get_property_getter(self) -> Callable[..., Any] | None:
- if safe_getattr(self.object, 'fget', None): # property
- return self.object.fget
- if safe_getattr(self.object, 'func', None): # cached_property
- return self.object.func
- return None
-
-
-def autodoc_attrgetter(
- obj: Any, name: str, *defargs: Any, registry: SphinxComponentRegistry
-) -> Any:
- """Alternative getattr() for types"""
- for typ, func in registry.autodoc_attrgetters.items():
- if isinstance(obj, typ):
- return func(obj, name, *defargs)
-
- return safe_getattr(obj, name, *defargs)
def setup(app: Sphinx) -> ExtensionMetadata:
- app.add_autodocumenter(ModuleDocumenter)
- app.add_autodocumenter(ClassDocumenter)
- app.add_autodocumenter(ExceptionDocumenter)
- app.add_autodocumenter(DataDocumenter)
- app.add_autodocumenter(FunctionDocumenter)
- app.add_autodocumenter(DecoratorDocumenter)
- app.add_autodocumenter(MethodDocumenter)
- app.add_autodocumenter(AttributeDocumenter)
- app.add_autodocumenter(PropertyDocumenter)
+ obj_type: _AutodocObjType
+ for obj_type in (
+ 'module',
+ 'class',
+ 'exception',
+ 'function',
+ 'decorator',
+ 'method',
+ 'property',
+ 'attribute',
+ 'data',
+ 'type',
+ ):
+ # register the automodule, autoclass, etc. directives
+ app.add_directive(f'auto{obj_type}', AutodocDirective)
app.add_config_value(
'autoclass_content',
@@ -3208,15 +141,20 @@ def setup(app: Sphinx) -> ExtensionMetadata:
app.add_config_value(
'autodoc_inherit_docstrings', True, 'env', types=frozenset({bool})
)
+ app.add_config_value(
+ 'autodoc_preserve_defaults', False, 'env', types=frozenset({bool})
+ )
+ app.add_config_value(
+ 'autodoc_use_type_comments', True, 'env', types=frozenset({bool})
+ )
+
app.add_event('autodoc-before-process-signature')
app.add_event('autodoc-process-docstring')
app.add_event('autodoc-process-signature')
app.add_event('autodoc-skip-member')
app.add_event('autodoc-process-bases')
- app.setup_extension('sphinx.ext.autodoc.preserve_defaults')
- app.setup_extension('sphinx.ext.autodoc.type_comment')
- app.setup_extension('sphinx.ext.autodoc.typehints')
+ app.connect('object-description-transform', _merge_typehints)
return {
'version': sphinx.__display_version__,
diff --git a/sphinx/ext/autodoc/_directive_options.py b/sphinx/ext/autodoc/_directive_options.py
new file mode 100644
index 00000000000..12618b62ae5
--- /dev/null
+++ b/sphinx/ext/autodoc/_directive_options.py
@@ -0,0 +1,269 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from docutils.utils import assemble_option_dict
+
+from sphinx.ext.autodoc._sentinels import ALL, EMPTY, SUPPRESS
+from sphinx.locale import __
+
+if TYPE_CHECKING:
+ from collections.abc import Mapping, Set
+ from typing import Any, Final, Literal, Self
+
+ from sphinx.ext.autodoc._property_types import _AutodocObjType
+ from sphinx.ext.autodoc._sentinels import ALL_T, EMPTY_T, SUPPRESS_T
+ from sphinx.util.typing import OptionSpec
+
+
+# common option names for autodoc directives
+AUTODOC_DEFAULT_OPTIONS = (
+ 'members',
+ 'undoc-members',
+ 'no-index',
+ 'no-index-entry',
+ 'inherited-members',
+ 'show-inheritance',
+ 'private-members',
+ 'special-members',
+ 'ignore-module-all',
+ 'exclude-members',
+ 'member-order',
+ 'imported-members',
+ 'class-doc-from',
+ 'no-value',
+)
+
+AUTODOC_EXTENDABLE_OPTIONS = frozenset({
+ 'members',
+ 'private-members',
+ 'special-members',
+ 'exclude-members',
+})
+
+
+class _AutoDocumenterOptions:
+ # TODO: make immutable.
+
+ no_index: Literal[True] | None = None
+ no_index_entry: Literal[True] | None = None
+ _tab_width: int = 8
+
+ # module-like options
+ members: ALL_T | list[str] | None = None
+ undoc_members: Literal[True] | None = None
+ inherited_members: Set[str] | None = None
+ show_inheritance: Literal[True] | None = None
+ synopsis: str | None = None
+ platform: str | None = None
+ deprecated: Literal[True] | None = None
+ member_order: Literal['alphabetical', 'bysource', 'groupwise'] | None = None
+ exclude_members: EMPTY_T | set[str] | None = None
+ private_members: ALL_T | list[str] | None = None
+ special_members: ALL_T | list[str] | None = None
+ imported_members: Literal[True] | None = None
+ ignore_module_all: Literal[True] | None = None
+ no_value: Literal[True] | None = None
+
+ # class-like options (class, exception)
+ class_doc_from: Literal['both', 'class', 'init'] | None = None
+
+ # assignment-like (data, attribute)
+ annotation: SUPPRESS_T | str | None = None
+
+ noindex: Literal[True] | None = None
+
+ def __init__(self, **kwargs: Any) -> None:
+ vars(self).update(kwargs)
+
+ def __repr__(self) -> str:
+ args = ', '.join(f'{k}={v!r}' for k, v in vars(self).items())
+ return f'_AutoDocumenterOptions({args})'
+
+ def __getattr__(self, name: str) -> object:
+ return None # return None for missing attributes
+
+ def copy(self) -> Self:
+ return self.__class__(**vars(self))
+
+ @classmethod
+ def from_directive_options(cls, opts: Mapping[str, Any], /) -> Self:
+ return cls(**{k.replace('-', '_'): v for k, v in opts.items() if v is not None})
+
+
+def identity(x: Any) -> Any:
+ return x
+
+
+def members_option(arg: str | None) -> ALL_T | list[str] | None:
+ """Used to convert the :members: option to auto directives."""
+ if arg is None or arg is True:
+ return ALL
+ if arg is False:
+ return None
+ return [stripped for x in arg.split(',') if (stripped := x.strip())]
+
+
+def exclude_members_option(arg: str | None) -> EMPTY_T | set[str]:
+ """Used to convert the :exclude-members: option."""
+ if arg is None or arg is True:
+ return EMPTY
+ return {stripped for x in arg.split(',') if (stripped := x.strip())}
+
+
+def inherited_members_option(arg: str | None) -> set[str]:
+ """Used to convert the :inherited-members: option to auto directives."""
+ if arg is None or arg is True:
+ return {'object'}
+ if arg:
+ return {x.strip() for x in arg.split(',')}
+ return set()
+
+
+def member_order_option(
+ arg: str | None,
+) -> Literal['alphabetical', 'bysource', 'groupwise'] | None:
+ """Used to convert the :member-order: option to auto directives."""
+ if arg is None or arg is True:
+ return None
+ if arg in {'alphabetical', 'bysource', 'groupwise'}:
+ return arg # type: ignore[return-value]
+ raise ValueError(__('invalid value for member-order option: %s') % arg)
+
+
+def class_doc_from_option(arg: str | None) -> Literal['both', 'class', 'init']:
+ """Used to convert the :class-doc-from: option to autoclass directives."""
+ if arg in {'both', 'class', 'init'}:
+ return arg # type: ignore[return-value]
+ raise ValueError(__('invalid value for class-doc-from option: %s') % arg)
+
+
+def annotation_option(arg: str | None) -> SUPPRESS_T | str | Literal[False]:
+ if arg is None or arg is True:
+ # suppress showing the representation of the object
+ return SUPPRESS
+ return arg
+
+
+def bool_option(arg: str | None) -> bool:
+ """Used to convert flag options to auto directives. (Instead of
+ directives.flag(), which returns None).
+ """
+ return True
+
+
+def merge_members_option(options: dict[str, Any]) -> None:
+ """Merge :private-members: and :special-members: options to the
+ :members: option.
+ """
+ if options.get('members') is ALL:
+ # merging is not needed when members: ALL
+ return
+
+ members = options.setdefault('members', [])
+ for key in ('private-members', 'special-members'):
+ other_members = options.get(key)
+ if other_members is not None and other_members is not ALL:
+ for member in other_members:
+ if member not in members:
+ members.append(member)
+
+
+class Options(dict[str, object]): # NoQA: FURB189
+ """A dict/attribute hybrid that returns None on nonexisting keys."""
+
+ def __repr__(self) -> str:
+ return f'Options({super().__repr__()})'
+
+ def copy(self) -> Options:
+ return Options(super().copy())
+
+ def __getattr__(self, name: str) -> Any:
+ try:
+ return self[name.replace('_', '-')]
+ except KeyError:
+ return None
+
+
+_OPTION_SPEC_COMMON: Final[OptionSpec] = {
+ 'no-index': bool_option,
+ 'no-index-entry': bool_option,
+}
+_OPTION_SPEC_HAS_MEMBERS: Final[OptionSpec] = _OPTION_SPEC_COMMON | {
+ 'members': members_option,
+ 'exclude-members': exclude_members_option,
+ 'undoc-members': bool_option,
+ 'private-members': members_option,
+ 'special-members': members_option,
+ 'member-order': member_order_option,
+ 'show-inheritance': bool_option,
+}
+_OPTION_SPEC_MODULE_SPECIFIC: Final[OptionSpec] = {
+ 'ignore-module-all': bool_option,
+ 'imported-members': bool_option,
+ 'deprecated': bool_option,
+ 'platform': identity,
+ 'synopsis': identity,
+}
+_OPTION_SPEC_CLASS_SPECIFIC: Final[OptionSpec] = {
+ 'class-doc-from': class_doc_from_option,
+ 'inherited-members': inherited_members_option,
+}
+_OPTION_SPEC_ASSIGNMENT: Final[OptionSpec] = _OPTION_SPEC_COMMON | {
+ 'annotation': annotation_option,
+ 'no-value': bool_option,
+}
+_OPTION_SPEC_DEPRECATED: Final[OptionSpec] = {
+ 'noindex': bool_option,
+}
+_OPTION_SPEC_FUNCTION_DEF: Final = _OPTION_SPEC_COMMON | _OPTION_SPEC_DEPRECATED
+_OPTION_SPECS: Final[Mapping[_AutodocObjType, OptionSpec]] = {
+ 'module': _OPTION_SPEC_HAS_MEMBERS
+ | _OPTION_SPEC_MODULE_SPECIFIC
+ | {'inherited-members': inherited_members_option} # special case
+ | _OPTION_SPEC_DEPRECATED,
+ 'class': _OPTION_SPEC_HAS_MEMBERS
+ | _OPTION_SPEC_CLASS_SPECIFIC
+ | _OPTION_SPEC_DEPRECATED,
+ 'exception': _OPTION_SPEC_HAS_MEMBERS
+ | _OPTION_SPEC_CLASS_SPECIFIC
+ | _OPTION_SPEC_DEPRECATED,
+ 'function': _OPTION_SPEC_FUNCTION_DEF,
+ 'decorator': _OPTION_SPEC_FUNCTION_DEF,
+ 'method': _OPTION_SPEC_FUNCTION_DEF,
+ 'property': _OPTION_SPEC_FUNCTION_DEF,
+ 'attribute': _OPTION_SPEC_ASSIGNMENT | _OPTION_SPEC_DEPRECATED,
+ 'data': _OPTION_SPEC_ASSIGNMENT | _OPTION_SPEC_DEPRECATED,
+ 'type': _OPTION_SPEC_ASSIGNMENT,
+}
+
+
+def _process_documenter_options(
+ *,
+ obj_type: _AutodocObjType,
+ default_options: Mapping[str, str | bool],
+ options: dict[str, str | None],
+) -> _AutoDocumenterOptions:
+ """Recognize options of object type from user input."""
+ option_spec = _OPTION_SPECS[obj_type]
+ for name in AUTODOC_DEFAULT_OPTIONS:
+ if name not in option_spec:
+ continue
+
+ negated = options.pop(f'no-{name}', True) is None
+ if name in default_options and not negated:
+ if name in options and isinstance(default_options[name], str):
+ # take value from options if present or extend it
+ # with autodoc_default_options if necessary
+ if name in AUTODOC_EXTENDABLE_OPTIONS:
+ opt_value = options[name]
+ if opt_value is not None and opt_value.startswith('+'):
+ options[name] = f'{default_options[name]},{opt_value[1:]}'
+ else:
+ options[name] = default_options[name] # type: ignore[assignment]
+ elif (opt_value := options.get(name)) is not None:
+ # remove '+' from option argument if there's nothing to merge it with
+ options[name] = opt_value.removeprefix('+')
+
+ opts = assemble_option_dict(options.items(), option_spec) # type: ignore[arg-type]
+ return _AutoDocumenterOptions.from_directive_options(opts)
diff --git a/sphinx/ext/autodoc/_docstrings.py b/sphinx/ext/autodoc/_docstrings.py
new file mode 100644
index 00000000000..a1b1c84fd64
--- /dev/null
+++ b/sphinx/ext/autodoc/_docstrings.py
@@ -0,0 +1,319 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING, TypeVar
+
+from sphinx.errors import PycodeError
+from sphinx.ext.autodoc._importer import (
+ _get_attribute_comment,
+ _is_runtime_instance_attribute_not_commented,
+)
+from sphinx.ext.autodoc._property_types import _ClassDefProperties
+from sphinx.ext.autodoc._sentinels import (
+ RUNTIME_INSTANCE_ATTRIBUTE,
+ SLOTS_ATTR,
+ UNINITIALIZED_ATTR,
+)
+from sphinx.locale import __
+from sphinx.pycode import ModuleAnalyzer
+from sphinx.util import inspect, logging
+from sphinx.util.docstrings import prepare_docstring
+from sphinx.util.inspect import getdoc
+
+if TYPE_CHECKING:
+ from collections.abc import Iterator, Mapping
+ from typing import Any, Literal
+
+ from sphinx.events import EventManager
+ from sphinx.ext.autodoc._directive_options import _AutoDocumenterOptions
+ from sphinx.ext.autodoc._property_types import _ItemProperties
+ from sphinx.ext.autodoc._shared import _AttrGetter
+
+logger = logging.getLogger('sphinx.ext.autodoc')
+
+
+def _docstring_lines_for_props(
+ docstrings: list[list[str]] | None,
+ /,
+ *,
+ props: _ItemProperties,
+ parent_modname: str | None,
+ events: EventManager,
+ options: _AutoDocumenterOptions,
+) -> tuple[str, ...]:
+ attr_docs = _attr_docs_for_props(props, parent_modname=parent_modname)
+ prepared_docstrings = _prepare_docstrings(
+ docstrings, props=props, attr_docs=attr_docs
+ )
+ docstring_lines = _process_docstrings(
+ prepared_docstrings,
+ events=events,
+ props=props,
+ options=options,
+ )
+ return tuple(docstring_lines)
+
+
+def _attr_docs_for_props(
+ props: _ItemProperties, *, parent_modname: str | None
+) -> Mapping[tuple[str, str], list[str]]:
+ if props.obj_type in {'class', 'exception'}:
+ # If a class gets imported into the module ``parent_modname``
+ # the analyzer won't find the source of the class,
+ # if it looks in ``parent_modname``.
+ real_modname = props.module_name
+ elif parent_modname is None:
+ real_modname = props.canonical_module_name
+ else:
+ real_modname = parent_modname
+
+ try:
+ analyzer = ModuleAnalyzer.for_module(real_modname)
+ # parse right now, to get PycodeErrors on parsing (results will
+ # be cached anyway)
+ analyzer.analyze()
+ except PycodeError as exc:
+ logger.debug('[autodoc] module analyzer failed: %s', exc)
+ # no source file -- e.g. for builtin and C modules
+ attr_docs = {}
+ else:
+ attr_docs = analyzer.attr_docs
+ return attr_docs
+
+
+def _prepare_docstrings(
+ docstrings: list[list[str]] | None,
+ *,
+ props: _ItemProperties,
+ attr_docs: Mapping[tuple[str, str], list[str]],
+) -> list[list[str]] | None:
+ """Add content from docstrings, attribute documentation and user."""
+ # add content from attribute documentation
+ if props.obj_type not in {'data', 'attribute'} and props.parts:
+ key = ('.'.join(props.parent_names), props.name)
+ try:
+ # make a copy of docstring for attributes to avoid cache
+ # the change of autodoc-process-docstring event.
+ return [list(attr_docs[key])]
+ except KeyError:
+ pass
+
+ if docstrings is None:
+ return None
+ if not docstrings:
+ # append at least a dummy docstring, so that the event
+ # autodoc-process-docstring is fired and can add some
+ # content if desired
+ docstrings.append([])
+ return docstrings
+
+
+def _process_docstrings(
+ docstrings: list[list[str]] | None,
+ *,
+ events: EventManager,
+ props: _ItemProperties,
+ options: _AutoDocumenterOptions,
+) -> Iterator[str]:
+ """Let the user process the docstrings before adding them."""
+ if docstrings is None:
+ return
+ for docstring_lines in docstrings:
+ # let extensions pre-process docstrings
+ events.emit(
+ 'autodoc-process-docstring',
+ props.obj_type,
+ props.full_name,
+ props._obj,
+ options,
+ docstring_lines,
+ )
+
+ yield from docstring_lines
+ if docstring_lines and docstring_lines[-1]:
+ # ensure the docstring ends with a blank line
+ yield ''
+
+
+def _get_docstring_lines(
+ props: _ItemProperties,
+ *,
+ class_doc_from: Literal['both', 'class', 'init'],
+ get_attr: _AttrGetter,
+ inherit_docstrings: bool,
+ parent: Any,
+ tab_width: int,
+) -> list[list[str]] | None:
+ """Decode and return lines of the docstring(s) for the object.
+
+ When it returns None, autodoc-process-docstring will not be called for this
+ object.
+ """
+ obj = props._obj
+
+ if props.obj_type in {'class', 'exception'}:
+ assert isinstance(props, _ClassDefProperties)
+
+ if isinstance(obj, TypeVar):
+ if obj.__doc__ == TypeVar.__doc__:
+ return []
+ if props.doc_as_attr:
+ # Don't show the docstring of the class when it is an alias.
+ if _class_variable_comment(props):
+ return []
+ return None
+
+ docstrings = []
+ if attr_docstring := getdoc(obj):
+ docstrings.append(attr_docstring)
+
+ # for classes, what the "docstring" is can be controlled via a
+ # config value; the default is only the class docstring
+ if class_doc_from in {'both', 'init'}:
+ __init__ = get_attr(obj, '__init__', None)
+ init_docstring = getdoc(
+ __init__,
+ allow_inherited=inherit_docstrings,
+ cls=obj, # TODO: object or obj?
+ name='__init__',
+ )
+ # no __init__ means default __init__
+ if init_docstring == object.__init__.__doc__:
+ init_docstring = None
+ if not init_docstring:
+ # try __new__
+ __new__ = get_attr(obj, '__new__', None)
+ init_docstring = getdoc(
+ __new__,
+ allow_inherited=inherit_docstrings,
+ cls=object, # TODO: object or obj?
+ name='__new__',
+ )
+ # no __new__ means default __new__
+ if init_docstring == object.__new__.__doc__:
+ init_docstring = None
+ if init_docstring:
+ if class_doc_from == 'init':
+ docstrings = [init_docstring]
+ else:
+ docstrings.append(init_docstring)
+
+ return [prepare_docstring(docstring, tab_width) for docstring in docstrings]
+
+ if props.obj_type == 'method':
+ docstring = getdoc(
+ obj,
+ allow_inherited=inherit_docstrings,
+ cls=parent,
+ name=props.object_name,
+ )
+ if (
+ not docstring
+ or (props.name == '__init__' and docstring == object.__init__.__doc__)
+ or (props.name == '__new__' and docstring == object.__new__.__doc__)
+ ):
+ return []
+ return [prepare_docstring(docstring, tab_width)]
+
+ if props.obj_type == 'data':
+ # Check the variable has a docstring-comment
+
+ # get_module_comment()
+ comment = None
+ try:
+ analyzer = ModuleAnalyzer.for_module(props.module_name)
+ analyzer.analyze()
+ key = ('', props.name)
+ if key in analyzer.attr_docs:
+ comment = list(analyzer.attr_docs[key])
+ except PycodeError:
+ pass
+
+ if comment:
+ return [comment]
+
+ if obj is UNINITIALIZED_ATTR:
+ return []
+
+ docstring = getdoc(
+ obj,
+ allow_inherited=inherit_docstrings,
+ cls=parent,
+ name=props.object_name,
+ )
+ if not docstring:
+ return []
+ return [prepare_docstring(docstring, tab_width)]
+
+ if props.obj_type == 'attribute':
+ # Check the attribute has a docstring-comment
+ comment = _get_attribute_comment(
+ parent=parent, obj_path=props.parts, attrname=props.parts[-1]
+ )
+ if comment:
+ return [comment]
+
+ # Disable `autodoc_inherit_docstring` to avoid to obtain
+ # a docstring from the value which descriptor returns unexpectedly.
+ # See: https://github.com/sphinx-doc/sphinx/issues/7805
+ inherit_docstrings = False
+
+ if obj is SLOTS_ATTR:
+ # support for __slots__
+ try:
+ parent___slots__ = inspect.getslots(parent)
+ if parent___slots__ and (docstring := parent___slots__.get(props.name)):
+ return [prepare_docstring(docstring)]
+ return []
+ except ValueError as exc:
+ logger.warning(
+ __('Invalid __slots__ found on %s. Ignored.'),
+ (parent.__qualname__, exc),
+ type='autodoc',
+ )
+ return []
+
+ if (
+ obj is RUNTIME_INSTANCE_ATTRIBUTE
+ and _is_runtime_instance_attribute_not_commented(
+ parent=parent, obj_path=props.parts
+ )
+ ):
+ return None
+
+ if obj is UNINITIALIZED_ATTR:
+ return None
+
+ if not inspect.isattributedescriptor(obj):
+ # the docstring of non-data descriptor is very probably
+ # the wrong thing to display
+ return None
+
+ docstring = getdoc(
+ obj,
+ allow_inherited=inherit_docstrings,
+ cls=parent,
+ name=props.object_name,
+ )
+ if not docstring:
+ return []
+ return [prepare_docstring(docstring, tab_width)]
+
+ docstring = getdoc(
+ obj,
+ allow_inherited=inherit_docstrings,
+ cls=parent,
+ name=props.object_name,
+ )
+ if not docstring:
+ return []
+ return [prepare_docstring(docstring, tab_width)]
+
+
+def _class_variable_comment(props: _ItemProperties) -> bool:
+ try:
+ analyzer = ModuleAnalyzer.for_module(props.module_name)
+ analyzer.analyze()
+ key = ('', props.dotted_parts)
+ return bool(analyzer.attr_docs.get(key, False))
+ except PycodeError:
+ return False
diff --git a/sphinx/ext/autodoc/_documenters.py b/sphinx/ext/autodoc/_documenters.py
new file mode 100644
index 00000000000..d60224ce075
--- /dev/null
+++ b/sphinx/ext/autodoc/_documenters.py
@@ -0,0 +1,17 @@
+from __future__ import annotations
+
+
+class Documenter:
+ """A Documenter knows how to autodocument a single object type. When
+ registered with the AutoDirective, it will be used to document objects
+ of that type when needed by autodoc.
+
+ Its *objtype* attribute selects what auto directive it is assigned to
+ (the directive name is 'auto' + objtype), and what directive it generates
+ by default, though that can be overridden by an attribute called
+ *directivetype*.
+
+ A Documenter has an *option_spec* that works like a docutils directive's;
+ in fact, it will be used to parse an auto directive's options that matches
+ the Documenter.
+ """
diff --git a/sphinx/ext/autodoc/_event_listeners.py b/sphinx/ext/autodoc/_event_listeners.py
new file mode 100644
index 00000000000..119abae2b64
--- /dev/null
+++ b/sphinx/ext/autodoc/_event_listeners.py
@@ -0,0 +1,116 @@
+"""Some useful event listener factories for autodoc-process-docstring."""
+
+from __future__ import annotations
+
+import re
+from typing import TYPE_CHECKING
+
+if TYPE_CHECKING:
+ from collections.abc import Callable, Sequence
+ from typing import Any, TypeAlias
+
+ from sphinx.application import Sphinx
+ from sphinx.ext.autodoc._property_types import _AutodocObjType
+
+ _AutodocProcessDocstringListener: TypeAlias = Callable[
+ [Sphinx, _AutodocObjType, str, Any, dict[str, bool], list[str]], None
+ ]
+ _AutodocProcessSignatureListener: TypeAlias = Callable[ # NoQA: PYI047
+ [Sphinx, _AutodocObjType, str, Any, dict[str, bool], str | None, str | None],
+ tuple[str | None, str | None] | None,
+ ]
+ _AutodocSkipMemberListener: TypeAlias = Callable[ # NoQA: PYI047
+ [Sphinx, _AutodocObjType, str, Any, bool, dict[str, bool]], bool
+ ]
+
+
+def cut_lines(
+ pre: int, post: int = 0, what: Sequence[str] | None = None
+) -> _AutodocProcessDocstringListener:
+ """Return a listener that removes the first *pre* and last *post*
+ lines of every docstring. If *what* is a sequence of strings,
+ only docstrings of a type in *what* will be processed.
+
+ Use like this (e.g. in the ``setup()`` function of :file:`conf.py`)::
+
+ from sphinx.ext.autodoc import cut_lines
+
+ app.connect('autodoc-process-docstring', cut_lines(4, what={'module'}))
+
+ This can (and should) be used in place of :confval:`automodule_skip_lines`.
+ """
+ if not what:
+ what_unique: frozenset[str] = frozenset()
+ elif isinstance(what, str): # strongly discouraged
+ what_unique = frozenset({what})
+ else:
+ what_unique = frozenset(what)
+
+ def process(
+ app: Sphinx,
+ what_: _AutodocObjType,
+ name: str,
+ obj: Any,
+ options: dict[str, bool],
+ lines: list[str],
+ ) -> None:
+ if what_unique and what_ not in what_unique:
+ return
+ del lines[:pre]
+ if post:
+ # remove one trailing blank line.
+ if lines and not lines[-1]:
+ lines.pop(-1)
+ del lines[-post:]
+ # make sure there is a blank line at the end
+ if lines and lines[-1]:
+ lines.append('')
+
+ return process
+
+
+def between(
+ marker: str,
+ what: Sequence[str] | None = None,
+ keepempty: bool = False,
+ exclude: bool = False,
+) -> _AutodocProcessDocstringListener:
+ """Return a listener that either keeps, or if *exclude* is True excludes,
+ lines between lines that match the *marker* regular expression. If no line
+ matches, the resulting docstring would be empty, so no change will be made
+ unless *keepempty* is true.
+
+ If *what* is a sequence of strings, only docstrings of a type in *what* will
+ be processed.
+ """
+ marker_re = re.compile(marker)
+
+ def process(
+ app: Sphinx,
+ what_: _AutodocObjType,
+ name: str,
+ obj: Any,
+ options: dict[str, bool],
+ lines: list[str],
+ ) -> None:
+ if what and what_ not in what:
+ return
+ deleted = 0
+ delete = not exclude
+ orig_lines = lines.copy()
+ for i, line in enumerate(orig_lines):
+ if delete:
+ lines.pop(i - deleted)
+ deleted += 1
+ if marker_re.match(line):
+ delete = not delete
+ if delete:
+ lines.pop(i - deleted)
+ deleted += 1
+ if not lines and not keepempty:
+ lines[:] = orig_lines
+ # make sure there is a blank line at the end
+ if lines and lines[-1]:
+ lines.append('')
+
+ return process
diff --git a/sphinx/ext/autodoc/_generate.py b/sphinx/ext/autodoc/_generate.py
new file mode 100644
index 00000000000..6c47e811b9b
--- /dev/null
+++ b/sphinx/ext/autodoc/_generate.py
@@ -0,0 +1,361 @@
+from __future__ import annotations
+
+import sys
+from typing import TYPE_CHECKING
+
+from docutils.statemachine import StringList
+
+from sphinx.errors import PycodeError
+from sphinx.ext.autodoc._member_finder import _gather_members
+from sphinx.ext.autodoc._renderer import _add_content, _directive_header_lines
+from sphinx.ext.autodoc._sentinels import ALL
+from sphinx.ext.autodoc._shared import _get_render_mode
+from sphinx.ext.autodoc.mock import ismock
+from sphinx.locale import _, __
+from sphinx.pycode import ModuleAnalyzer
+from sphinx.util import inspect, logging
+from sphinx.util.typing import restify, stringify_annotation
+
+if TYPE_CHECKING:
+ from collections.abc import Iterator, Mapping, MutableSet
+
+ from sphinx.environment import _CurrentDocument
+ from sphinx.events import EventManager
+ from sphinx.ext.autodoc._directive_options import _AutoDocumenterOptions
+ from sphinx.ext.autodoc._property_types import _ItemProperties
+ from sphinx.ext.autodoc._shared import _AttrGetter, _AutodocConfig
+ from sphinx.util.typing import _RestifyMode
+
+logger = logging.getLogger('sphinx.ext.autodoc')
+
+
+def _generate_directives(
+ more_content: StringList | None = None,
+ parent_modname: str | None = None,
+ check_module: bool = False,
+ all_members: bool = False,
+ *,
+ config: _AutodocConfig,
+ current_document: _CurrentDocument,
+ events: EventManager,
+ get_attr: _AttrGetter,
+ indent: str,
+ options: _AutoDocumenterOptions,
+ props: _ItemProperties,
+ record_dependencies: set[str],
+ ref_context: Mapping[str, str | None],
+ reread_always: MutableSet[str],
+ result: StringList,
+) -> None:
+ """Generate reST for the object given by *props*, and possibly for its members.
+
+ If *more_content* is given, include that content. If *parent_modname* is
+ given, use that module name to find attribute docs. If *check_module* is
+ True, only generate if the object is defined in the module name it is
+ imported from. If *all_members* is True, document all members.
+ """
+ # If there is no parent module specified, figure out which to use.
+ # The real module is used in the module analyzer to look up the module
+ # where the attribute documentation would actually be found in.
+ # This is used for situations where you have a module that collects the
+ # functions and classes of internal submodules.
+ if parent_modname is None or props.obj_type in {'class', 'exception'}:
+ # If a class gets imported into the module ``parent_modname``
+ # the analyzer won't find the source of the class,
+ # if it looks in ``parent_modname``.
+ real_modname = props.canonical_module_name
+ else:
+ real_modname = parent_modname
+
+ # try to also get a source code analyzer for attribute docs
+ try:
+ analyzer = ModuleAnalyzer.for_module(real_modname)
+ # parse right now, to get PycodeErrors on parsing (results will
+ # be cached anyway)
+ analyzer.analyze()
+ record_dependencies.add(analyzer.srcname)
+ except PycodeError as exc:
+ logger.debug('[autodoc] module analyzer failed: %s', exc)
+ # no source file -- e.g. for builtin and C modules
+ analyzer = None
+ # at least add the module source file as a dependency
+ if props.module_name:
+ try:
+ module_spec = sys.modules[props.module_name].__spec__
+ except (AttributeError, KeyError):
+ pass
+ else:
+ if (
+ module_spec is not None
+ and module_spec.has_location
+ and module_spec.origin
+ ):
+ record_dependencies.add(module_spec.origin)
+
+ if real_modname != props.canonical_module_name:
+ # Add module to dependency list if target object is defined in other module.
+ try:
+ srcname, _ = ModuleAnalyzer.get_module_source(props.canonical_module_name)
+ record_dependencies.add(str(srcname))
+ except PycodeError:
+ pass
+
+ has_docstring = bool(props.docstring_lines)
+ if ismock(props._obj) and not has_docstring:
+ logger.warning(
+ __('A mocked object is detected: %r'),
+ props.full_name,
+ type='autodoc',
+ subtype='mocked_object',
+ )
+
+ # check __module__ of object (for members not given explicitly)
+ if check_module and not options.imported_members:
+ subject = inspect.unpartial(props._obj)
+ modname = get_attr(subject, '__module__', None)
+ if modname and modname != props.module_name:
+ return
+
+ # add all content (from docstrings, attribute docs etc.)
+ analyzer_source = '' if analyzer is None else analyzer.srcname
+ _add_directive_lines(
+ more_content=more_content,
+ is_final=analyzer is not None and props.dotted_parts in analyzer.finals,
+ config=config,
+ indent=indent,
+ options=options,
+ props=props,
+ result=result,
+ source_name=_docstring_source_name(props=props, source=analyzer_source),
+ )
+
+ # document members, if possible
+ _document_members(
+ all_members=all_members,
+ analyzer_order=analyzer.tagorder if analyzer is not None else {},
+ attr_docs=analyzer.attr_docs if analyzer is not None else {},
+ config=config,
+ current_document=current_document,
+ events=events,
+ get_attr=get_attr,
+ indent=indent,
+ options=options,
+ props=props,
+ real_modname=real_modname,
+ record_dependencies=record_dependencies,
+ ref_context=ref_context,
+ reread_always=reread_always,
+ result=result,
+ )
+
+
+def _add_directive_lines(
+ *,
+ more_content: StringList | None,
+ is_final: bool,
+ config: _AutodocConfig,
+ indent: str,
+ options: _AutoDocumenterOptions,
+ props: _ItemProperties,
+ result: StringList,
+ source_name: str,
+) -> None:
+ # generate the directive header and options, if applicable
+ lines = _directive_header_lines(
+ autodoc_typehints=config.autodoc_typehints,
+ directive_name=(
+ 'py:attribute'
+ if props.obj_type in {'class', 'exception'} and props.doc_as_attr # type: ignore[attr-defined]
+ else f'py:{props.obj_type}'
+ ),
+ is_final=is_final,
+ options=options,
+ props=props,
+ )
+ header_lines = StringList(list(lines), source='')
+
+ # add content from docstrings or attribute documentation
+ docstring_lines = StringList(props.docstring_lines, source=source_name)
+
+ # add alias information, if applicable
+ lines = _body_alias_lines(
+ render_mode=_get_render_mode(config.autodoc_typehints_format),
+ short_literals=config.python_display_short_literal_types,
+ props=props,
+ )
+ alias_lines = StringList(list(lines), source='')
+
+ # make sure that the result starts with an empty line. This is
+ # necessary for some situations where another directive preprocesses
+ # reST and no starting newline is present
+ result.append('', '')
+ _add_content(header_lines, result=result, indent=indent)
+ result.append('', '')
+ _add_content(docstring_lines, result=result, indent=indent + ' ')
+ if more_content is not None:
+ # add additional content from the directive, if present
+ _add_content(more_content, result=result, indent=indent + ' ')
+ _add_content(alias_lines, result=result, indent=indent + ' ')
+
+
+def _document_members(
+ *,
+ all_members: bool,
+ analyzer_order: dict[str, int],
+ attr_docs: dict[tuple[str, str], list[str]],
+ config: _AutodocConfig,
+ current_document: _CurrentDocument,
+ events: EventManager,
+ get_attr: _AttrGetter,
+ indent: str,
+ options: _AutoDocumenterOptions,
+ props: _ItemProperties,
+ real_modname: str,
+ record_dependencies: set[str],
+ ref_context: Mapping[str, str | None],
+ reread_always: MutableSet[str],
+ result: StringList,
+) -> None:
+ """Generate reST for member documentation.
+
+ If *all_members* is True, document all members, else those given by
+ *self.options.members*.
+ """
+ has_members = props.obj_type == 'module' or (
+ props.obj_type in {'class', 'exception'} and not props.doc_as_attr # type: ignore[attr-defined]
+ )
+ if not has_members:
+ return
+
+ want_all = bool(all_members or options.inherited_members or options.members is ALL)
+ member_documenters = _gather_members(
+ want_all=want_all,
+ indent=indent,
+ analyzer_order=analyzer_order,
+ attr_docs=attr_docs,
+ config=config,
+ current_document=current_document,
+ events=events,
+ get_attr=get_attr,
+ options=options,
+ parent_modname=real_modname,
+ props=props,
+ ref_context=ref_context,
+ reread_always=reread_always,
+ )
+
+ # for implicit module members, check __module__ to avoid
+ # documenting imported objects
+ members_check_module = bool(
+ props.obj_type == 'module'
+ and want_all
+ and (options.ignore_module_all or props.all is None) # type: ignore[attr-defined]
+ )
+ for member_props, is_attr, member_indent in member_documenters:
+ assert member_props.module_name
+ # Note that those two methods above do not emit events, so
+ # whatever objects we deduced should not have changed.
+ _generate_directives(
+ more_content=None,
+ parent_modname=real_modname,
+ check_module=members_check_module and not is_attr,
+ all_members=True,
+ config=config,
+ current_document=current_document,
+ events=events,
+ get_attr=get_attr,
+ indent=member_indent,
+ options=options,
+ props=member_props,
+ record_dependencies=record_dependencies,
+ ref_context=ref_context,
+ reread_always=reread_always,
+ result=result,
+ )
+
+
+def _body_alias_lines(
+ *, props: _ItemProperties, render_mode: _RestifyMode, short_literals: bool
+) -> Iterator[str]:
+ """Add content from docstrings, attribute documentation and user."""
+ if props.obj_type in {'data', 'attribute'}:
+ from sphinx.ext.autodoc._property_types import _AssignStatementProperties
+
+ assert isinstance(props, _AssignStatementProperties)
+
+ # Support for documenting GenericAliases
+ if props._obj_is_generic_alias:
+ alias = restify(props._obj, mode=render_mode)
+ yield _('alias of %s') % alias
+ yield ''
+ return
+ return
+
+ if props.obj_type in {'class', 'exception'}:
+ from sphinx.ext.autodoc._property_types import _ClassDefProperties
+
+ assert isinstance(props, _ClassDefProperties)
+
+ obj = props._obj
+
+ if props._obj_is_new_type:
+ supertype = restify(obj.__supertype__, mode=render_mode)
+ yield _('alias of %s') % supertype
+ yield ''
+ return
+
+ if props._obj_is_typevar:
+ attrs = [
+ repr(obj.__name__),
+ *(
+ stringify_annotation(
+ constraint, render_mode, short_literals=short_literals
+ )
+ for constraint in obj.__constraints__
+ ),
+ ]
+ if obj.__bound__:
+ attrs.append(rf'bound=\ {restify(obj.__bound__, mode=render_mode)}')
+ if obj.__covariant__:
+ attrs.append('covariant=True')
+ if obj.__contravariant__:
+ attrs.append('contravariant=True')
+
+ alias = f'TypeVar({", ".join(attrs)})'
+ yield _('alias of %s') % alias
+ yield ''
+ return
+
+ if props.doc_as_attr:
+ try:
+ analyzer = ModuleAnalyzer.for_module(props.module_name)
+ analyzer.analyze()
+ key = ('', props.dotted_parts)
+ class_var_doc_comment = key in analyzer.attr_docs
+ except PycodeError:
+ class_var_doc_comment = False
+
+ if class_var_doc_comment:
+ return
+ alias = restify(obj, mode=render_mode)
+ yield _('alias of %s') % alias
+ return
+
+ return
+
+ return
+
+
+def _docstring_source_name(*, props: _ItemProperties, source: str) -> str:
+ obj_module = inspect.safe_getattr(props._obj, '__module__', None)
+ obj_qualname = inspect.safe_getattr(props._obj, '__qualname__', None)
+ if obj_module and obj_qualname:
+ # Get the correct location of docstring from props._obj
+ # to support inherited methods
+ fullname = f'{obj_module}.{obj_qualname}'
+ else:
+ fullname = props.full_name
+
+ if source:
+ return f'{source}:docstring of {fullname}'
+ return f'docstring of {fullname}'
diff --git a/sphinx/ext/autodoc/_importer.py b/sphinx/ext/autodoc/_importer.py
new file mode 100644
index 00000000000..8b7d49c76cb
--- /dev/null
+++ b/sphinx/ext/autodoc/_importer.py
@@ -0,0 +1,431 @@
+"""Importer utilities for autodoc"""
+
+from __future__ import annotations
+
+import contextlib
+import importlib
+import os
+import sys
+import traceback
+import typing
+from importlib.abc import FileLoader
+from importlib.machinery import EXTENSION_SUFFIXES
+from importlib.util import decode_source, find_spec, module_from_spec, spec_from_loader
+from pathlib import Path
+from typing import TYPE_CHECKING
+
+from sphinx.errors import PycodeError
+from sphinx.ext.autodoc._sentinels import RUNTIME_INSTANCE_ATTRIBUTE, UNINITIALIZED_ATTR
+from sphinx.ext.autodoc.mock import ismock, mock, undecorate
+from sphinx.pycode import ModuleAnalyzer
+from sphinx.util import inspect, logging
+from sphinx.util.inspect import isclass, safe_getattr
+from sphinx.util.typing import get_type_hints
+
+if TYPE_CHECKING:
+ from collections.abc import Mapping, Sequence
+ from importlib.machinery import ModuleSpec
+ from types import ModuleType
+ from typing import Any, Protocol
+
+ from sphinx.ext.autodoc._property_types import _AutodocObjType
+
+ class _AttrGetter(Protocol):
+ def __call__(self, obj: Any, name: str, default: Any = ..., /) -> Any: ...
+
+
+_NATIVE_SUFFIXES: frozenset[str] = frozenset({'.pyx', *EXTENSION_SUFFIXES})
+logger = logging.getLogger(__name__)
+
+
+class _ImportedObject:
+ #: module containing the object to document
+ module: ModuleType | None
+
+ #: parent/owner of the object to document
+ parent: Any
+
+ #: name of the object to document
+ object_name: str
+
+ #: object to document
+ obj: Any
+
+ def __init__(
+ self,
+ *,
+ module: ModuleType | None = None,
+ parent: Any,
+ object_name: str = '',
+ obj: Any,
+ ) -> None:
+ self.module = module
+ self.parent = parent
+ self.object_name = object_name
+ self.obj = obj
+
+ def __repr__(self) -> str:
+ return f'<{self.__class__.__name__} {self.__dict__}>'
+
+
+def _import_object(
+ *,
+ get_attr: _AttrGetter = safe_getattr,
+ mock_imports: Sequence[str],
+ module_name: str,
+ obj_path: Sequence[str],
+ obj_type: _AutodocObjType,
+ type_aliases: Mapping[str, str] | None,
+) -> _ImportedObject | None:
+ """Import the module and get the object to document."""
+ try:
+ with mock(mock_imports):
+ im = _import_from_module_and_path(
+ module_name=module_name, obj_path=obj_path, get_attr=get_attr
+ )
+ except ImportError as exc:
+ if obj_type == 'data':
+ im_ = _import_data_declaration(
+ module_name=module_name,
+ obj_path=obj_path,
+ mock_imports=mock_imports,
+ type_aliases=type_aliases,
+ )
+ if im_ is not None:
+ return im_
+ elif obj_type == 'attribute':
+ im_ = _import_attribute_declaration(
+ module_name=module_name,
+ obj_path=obj_path,
+ mock_imports=mock_imports,
+ type_aliases=type_aliases,
+ get_attr=get_attr,
+ )
+ if im_ is not None:
+ return im_
+ logger.warning(exc.args[0], type='autodoc', subtype='import_object')
+ return None
+
+ if ismock(im.obj):
+ im.obj = undecorate(im.obj)
+ return im
+
+
+def _import_from_module_and_path(
+ *,
+ module_name: str,
+ obj_path: Sequence[str],
+ get_attr: _AttrGetter = safe_getattr,
+) -> _ImportedObject:
+ obj_path = list(obj_path)
+ if obj_path:
+ logger.debug('[autodoc] from %s import %s', module_name, '.'.join(obj_path))
+ else:
+ logger.debug('[autodoc] import %s', module_name)
+
+ module = None
+ exc_on_importing = None
+ try:
+ while module is None:
+ try:
+ module = _import_module(module_name, try_reload=True)
+ logger.debug('[autodoc] import %s => %r', module_name, module)
+ except ImportError as exc:
+ logger.debug('[autodoc] import %s => failed', module_name)
+ exc_on_importing = exc
+ if '.' not in module_name:
+ raise
+
+ # retry with parent module
+ module_name, _, name = module_name.rpartition('.')
+ obj_path.insert(0, name)
+
+ obj = module
+ parent = None
+ object_name = ''
+ for attr_name in obj_path:
+ parent = obj
+ logger.debug('[autodoc] getattr(_, %r)', attr_name)
+ mangled_name = _mangle_name(obj, attr_name)
+ obj = get_attr(obj, mangled_name)
+
+ try:
+ logger.debug('[autodoc] => %r', obj)
+ except TypeError:
+ # fallback of failure on logging for broken object
+ # See: https://github.com/sphinx-doc/sphinx/issues/9095
+ logger.debug('[autodoc] => %r', (obj,))
+
+ object_name = attr_name
+ return _ImportedObject(
+ module=module,
+ parent=parent,
+ object_name=object_name,
+ obj=obj,
+ )
+ except (AttributeError, ImportError) as exc:
+ if isinstance(exc, AttributeError) and exc_on_importing:
+ # restore ImportError
+ exc = exc_on_importing
+
+ if obj_path:
+ dotted_objpath = '.'.join(obj_path)
+ err_parts = [
+ f'autodoc: failed to import {dotted_objpath!r} '
+ f'from module {module_name!r}'
+ ]
+ else:
+ err_parts = [f'autodoc: failed to import {module_name!r}']
+
+ if isinstance(exc, ImportError):
+ # _import_module() raises ImportError having real exception obj and
+ # traceback
+ real_exc = exc.args[0]
+ traceback_msg = traceback.format_exception(exc)
+ if isinstance(real_exc, SystemExit):
+ err_parts.append(
+ 'the module executes module level statement '
+ 'and it might call sys.exit().'
+ )
+ elif isinstance(real_exc, ImportError) and real_exc.args:
+ err_parts.append(
+ f'the following exception was raised:\n{real_exc.args[0]}'
+ )
+ else:
+ err_parts.append(
+ f'the following exception was raised:\n{traceback_msg}'
+ )
+ else:
+ err_parts.append(
+ f'the following exception was raised:\n{traceback.format_exc()}'
+ )
+
+ errmsg = '; '.join(err_parts)
+ logger.debug(errmsg)
+ raise ImportError(errmsg) from exc
+
+
+def _import_module(modname: str, try_reload: bool = False) -> Any:
+ if modname in sys.modules:
+ return sys.modules[modname]
+
+ skip_pyi = bool(os.getenv('SPHINX_AUTODOC_IGNORE_NATIVE_MODULE_TYPE_STUBS', ''))
+ original_module_names = frozenset(sys.modules)
+ try:
+ spec = find_spec(modname)
+ if spec is None:
+ msg = f'No module named {modname!r}'
+ raise ModuleNotFoundError(msg, name=modname) # NoQA: TRY301
+ spec, pyi_path = _find_type_stub_spec(spec, modname)
+ if skip_pyi or pyi_path is None:
+ module = importlib.import_module(modname)
+ else:
+ if spec.loader is None:
+ msg = 'missing loader'
+ raise ImportError(msg, name=spec.name) # NoQA: TRY301
+ sys.modules[modname] = module = module_from_spec(spec)
+ spec.loader.exec_module(module)
+ except ImportError:
+ raise
+ except BaseException as exc:
+ # Importing modules may cause any side effects, including
+ # SystemExit, so we need to catch all errors.
+ raise ImportError(exc, traceback.format_exc()) from exc
+ if try_reload and os.environ.get('SPHINX_AUTODOC_RELOAD_MODULES'):
+ new_modules = [m for m in sys.modules if m not in original_module_names]
+ # Try reloading modules with ``typing.TYPE_CHECKING == True``.
+ try:
+ typing.TYPE_CHECKING = True # type: ignore[misc]
+ # Ignore failures; we've already successfully loaded these modules
+ with contextlib.suppress(ImportError, KeyError):
+ for m in new_modules:
+ mod_path = getattr(sys.modules[m], '__file__', '')
+ if mod_path and mod_path.endswith('.pyi'):
+ continue
+ _reload_module(sys.modules[m])
+ finally:
+ typing.TYPE_CHECKING = False # type: ignore[misc]
+ module = sys.modules[modname]
+ return module
+
+
+def _find_type_stub_spec(
+ spec: ModuleSpec, modname: str
+) -> tuple[ModuleSpec, Path | None]:
+ """Try finding a spec for a PEP 561 '.pyi' stub file for native modules."""
+ if spec.origin is None:
+ return spec, None
+
+ for suffix in _NATIVE_SUFFIXES:
+ if not spec.origin.endswith(suffix):
+ continue
+ pyi_path = Path(spec.origin.removesuffix(suffix) + '.pyi')
+ if not pyi_path.is_file():
+ continue
+ pyi_loader = _StubFileLoader(modname, path=str(pyi_path))
+ pyi_spec = spec_from_loader(modname, loader=pyi_loader)
+ if pyi_spec is not None:
+ return pyi_spec, pyi_path
+ return spec, None
+
+
+class _StubFileLoader(FileLoader):
+ """Load modules from ``.pyi`` stub files."""
+
+ def get_source(self, fullname: str) -> str:
+ path = self.get_filename(fullname)
+ for suffix in _NATIVE_SUFFIXES:
+ if not path.endswith(suffix):
+ continue
+ path = path.removesuffix(suffix) + '.pyi'
+ try:
+ source_bytes = self.get_data(path)
+ except OSError as exc:
+ raise ImportError from exc
+ return decode_source(source_bytes)
+
+
+def _reload_module(module: ModuleType) -> Any:
+ """Call importlib.reload(module), convert exceptions to ImportError"""
+ try:
+ return importlib.reload(module)
+ except BaseException as exc:
+ # Importing modules may cause any side effects, including
+ # SystemExit, so we need to catch all errors.
+ raise ImportError(exc, traceback.format_exc()) from exc
+
+
+def _mangle_name(subject: Any, name: str) -> str:
+ """Mangle the given name."""
+ try:
+ if isclass(subject) and name.startswith('__') and not name.endswith('__'):
+ return f'_{subject.__name__}{name}'
+ except AttributeError:
+ pass
+
+ return name
+
+
+def _import_data_declaration(
+ *,
+ module_name: str,
+ obj_path: Sequence[str],
+ mock_imports: Sequence[str],
+ type_aliases: Mapping[str, str] | None,
+) -> _ImportedObject | None:
+ # annotation only instance variable (PEP-526)
+ try:
+ with mock(mock_imports):
+ parent = _import_module(module_name)
+ annotations = get_type_hints(parent, None, type_aliases, include_extras=True)
+ if obj_path[-1] in annotations:
+ im = _ImportedObject(
+ parent=parent,
+ obj=UNINITIALIZED_ATTR,
+ )
+ return im
+ except ImportError:
+ pass
+ return None
+
+
+def _import_attribute_declaration(
+ *,
+ module_name: str,
+ obj_path: Sequence[str],
+ mock_imports: Sequence[str],
+ type_aliases: Mapping[str, str] | None,
+ get_attr: _AttrGetter = safe_getattr,
+) -> _ImportedObject | None:
+ # Support runtime & uninitialized instance attributes.
+ #
+ # The former are defined in __init__() methods with doc-comments.
+ # The latter are PEP-526 style annotation only annotations.
+ #
+ # class Foo:
+ # attr: int #: uninitialized attribute
+ #
+ # def __init__(self):
+ # self.attr = None #: runtime attribute
+ try:
+ with mock(mock_imports):
+ ret = _import_from_module_and_path(
+ module_name=module_name, obj_path=obj_path[:-1], get_attr=get_attr
+ )
+ parent = ret.obj
+ if _is_runtime_instance_attribute(parent=parent, obj_path=obj_path):
+ im = _ImportedObject(
+ parent=parent,
+ obj=RUNTIME_INSTANCE_ATTRIBUTE,
+ )
+ return im
+ elif _is_uninitialized_instance_attribute(
+ parent=parent, obj_path=obj_path, type_aliases=type_aliases
+ ):
+ im = _ImportedObject(
+ parent=parent,
+ obj=UNINITIALIZED_ATTR,
+ )
+ return im
+ except ImportError:
+ pass
+ return None
+
+
+def _is_runtime_instance_attribute(*, parent: Any, obj_path: Sequence[str]) -> bool:
+ """Check the subject is an attribute defined in __init__()."""
+ # An instance variable defined in __init__().
+ if _get_attribute_comment(parent=parent, obj_path=obj_path, attrname=obj_path[-1]):
+ return True
+ return _is_runtime_instance_attribute_not_commented(
+ parent=parent, obj_path=obj_path
+ )
+
+
+def _is_runtime_instance_attribute_not_commented(
+ *, parent: Any, obj_path: Sequence[str]
+) -> bool:
+ """Check the subject is an attribute defined in __init__() without comment."""
+ for cls in inspect.getmro(parent):
+ try:
+ module = safe_getattr(cls, '__module__')
+ qualname = safe_getattr(cls, '__qualname__')
+
+ analyzer = ModuleAnalyzer.for_module(module)
+ analyzer.analyze()
+ if qualname and obj_path:
+ key = f'{qualname}.{obj_path[-1]}'
+ if key in analyzer.tagorder:
+ return True
+ except (AttributeError, PycodeError):
+ pass
+
+ return False
+
+
+def _get_attribute_comment(
+ parent: Any, obj_path: Sequence[str], attrname: str
+) -> list[str] | None:
+ for cls in inspect.getmro(parent):
+ try:
+ module = safe_getattr(cls, '__module__')
+ qualname = safe_getattr(cls, '__qualname__')
+
+ analyzer = ModuleAnalyzer.for_module(module)
+ analyzer.analyze()
+ if qualname and obj_path:
+ key = (qualname, attrname)
+ if key in analyzer.attr_docs:
+ return list(analyzer.attr_docs[key])
+ except (AttributeError, PycodeError):
+ pass
+
+ return None
+
+
+def _is_uninitialized_instance_attribute(
+ *, parent: Any, obj_path: Sequence[str], type_aliases: Mapping[str, str] | None
+) -> bool:
+ """Check the subject is an annotation only attribute."""
+ annotations = get_type_hints(parent, None, type_aliases, include_extras=True)
+ return obj_path[-1] in annotations
diff --git a/sphinx/ext/autodoc/_loader.py b/sphinx/ext/autodoc/_loader.py
new file mode 100644
index 00000000000..246293855fb
--- /dev/null
+++ b/sphinx/ext/autodoc/_loader.py
@@ -0,0 +1,502 @@
+"""Object loader for autodoc"""
+
+from __future__ import annotations
+
+import re
+from inspect import Parameter
+from pathlib import Path
+from types import SimpleNamespace
+from typing import TYPE_CHECKING, NewType, TypeVar
+
+from sphinx.ext.autodoc._docstrings import (
+ _docstring_lines_for_props,
+ _get_docstring_lines,
+)
+from sphinx.ext.autodoc._importer import _import_object
+from sphinx.ext.autodoc._names import _parse_name
+from sphinx.ext.autodoc._property_types import (
+ _AssignStatementProperties,
+ _ClassDefProperties,
+ _FunctionDefProperties,
+ _ItemProperties,
+ _ModuleProperties,
+ _TypeStatementProperties,
+)
+from sphinx.ext.autodoc._sentinels import (
+ RUNTIME_INSTANCE_ATTRIBUTE,
+ SLOTS_ATTR,
+ UNINITIALIZED_ATTR,
+)
+from sphinx.ext.autodoc._shared import _get_render_mode
+from sphinx.ext.autodoc._signatures import _format_signatures
+from sphinx.ext.autodoc._type_comments import (
+ _ensure_annotations_from_type_comments,
+ _update_annotations_using_type_comments,
+)
+from sphinx.ext.autodoc.mock import ismock
+from sphinx.locale import __
+from sphinx.util import inspect, logging
+from sphinx.util.inspect import safe_getattr
+from sphinx.util.typing import get_type_hints, restify, stringify_annotation
+
+if TYPE_CHECKING:
+ from collections.abc import Mapping, MutableSet, Sequence
+ from typing import Any
+
+ from sphinx.environment import _CurrentDocument
+ from sphinx.events import EventManager
+ from sphinx.ext.autodoc._directive_options import _AutoDocumenterOptions
+ from sphinx.ext.autodoc._importer import _ImportedObject
+ from sphinx.ext.autodoc._property_types import _AutodocFuncProperty, _AutodocObjType
+ from sphinx.ext.autodoc._shared import _AttrGetter, _AutodocConfig
+
+logger = logging.getLogger(__name__)
+
+_hide_value_re = re.compile(r'^:meta \s*hide-value:( +|$)')
+
+
+def _load_object_by_name(
+ *,
+ name: str,
+ objtype: _AutodocObjType,
+ current_document: _CurrentDocument,
+ config: _AutodocConfig,
+ events: EventManager,
+ get_attr: _AttrGetter,
+ options: _AutoDocumenterOptions,
+ parent_modname: str | None = None,
+ ref_context: Mapping[str, str | None],
+ reread_always: MutableSet[str],
+) -> _ItemProperties | None:
+ """Import and load the object given by *name*."""
+ parsed = _parse_name(
+ name=name,
+ objtype=objtype,
+ current_document=current_document,
+ ref_context=ref_context,
+ )
+ if parsed is None:
+ return None
+ module_name, parts, args, retann = parsed
+
+ # Import the module and get the object to document
+ im = _import_object(
+ module_name=module_name,
+ obj_path=parts,
+ mock_imports=config.autodoc_mock_imports,
+ get_attr=get_attr,
+ obj_type=objtype,
+ type_aliases=config.autodoc_type_aliases,
+ )
+ if im is None:
+ # See BuildEnvironment.note_reread()
+ reread_always.add(current_document.docname)
+ return None
+
+ # Assemble object properties from the imported object.
+ parent = im.parent
+ props = _make_props_from_imported_object(
+ im,
+ config=config,
+ events=events,
+ get_attr=get_attr,
+ module_name=module_name,
+ objtype=objtype,
+ parts=parts,
+ )
+ if props is None:
+ return None
+
+ if options.class_doc_from is not None:
+ class_doc_from = options.class_doc_from
+ else:
+ class_doc_from = config.autoclass_content
+
+ docstrings = _get_docstring_lines(
+ props,
+ class_doc_from=class_doc_from,
+ get_attr=get_attr,
+ inherit_docstrings=config.autodoc_inherit_docstrings,
+ parent=parent,
+ tab_width=options._tab_width,
+ )
+ if docstrings:
+ for docstring_lines in docstrings:
+ for line in docstring_lines:
+ if _hide_value_re.match(line):
+ props._docstrings_has_hide_value = True
+ break
+
+ # format the object's signature, if any
+ try:
+ signatures = _format_signatures(
+ args=args,
+ retann=retann,
+ autodoc_annotations=current_document.autodoc_annotations,
+ config=config,
+ docstrings=docstrings,
+ events=events,
+ get_attr=get_attr,
+ parent=parent,
+ options=options,
+ props=props,
+ )
+ except Exception as exc:
+ msg = __('error while formatting signature for %s: %s')
+ logger.warning(msg, props.full_name, exc, type='autodoc')
+ return None
+ props.signatures = tuple(
+ f'{args} -> {retann}' if retann else str(args) for args, retann in signatures
+ )
+
+ props.docstring_lines = _docstring_lines_for_props(
+ docstrings,
+ props=props,
+ parent_modname=parent_modname,
+ events=events,
+ options=options,
+ )
+
+ return props
+
+
+def _make_props_from_imported_object(
+ im: _ImportedObject,
+ *,
+ config: _AutodocConfig,
+ events: EventManager,
+ get_attr: _AttrGetter,
+ module_name: str,
+ objtype: _AutodocObjType,
+ parts: tuple[str, ...],
+) -> _ItemProperties | None:
+ parent = im.parent
+ object_name = im.object_name
+ obj = im.obj
+ obj_properties: set[_AutodocFuncProperty] = set()
+ render_mode = _get_render_mode(config.autodoc_typehints_format)
+
+ if objtype == 'module':
+ try:
+ mod_origin = im.module.__spec__.origin # type: ignore[union-attr]
+ except AttributeError:
+ file_path = None
+ else:
+ file_path = Path(mod_origin) if mod_origin is not None else None
+
+ mod_all = safe_getattr(obj, '__all__', None)
+ if isinstance(mod_all, (list, tuple)) and all(
+ isinstance(e, str) for e in mod_all
+ ):
+ mod_all = tuple(mod_all)
+ elif mod_all is not None:
+ # Invalid __all__ found.
+ msg = __('Ignoring invalid __all__ in module %s: %r')
+ logger.warning(msg, module_name, mod_all, type='autodoc')
+ mod_all = None
+
+ return _ModuleProperties(
+ obj_type=objtype,
+ module_name=module_name,
+ docstring_lines=(),
+ file_path=file_path,
+ all=mod_all,
+ _obj=obj,
+ _obj___module__=obj.__name__,
+ )
+
+ if objtype in {'class', 'exception'}:
+ if isinstance(obj, (NewType, TypeVar)):
+ obj_module_name = getattr(obj, '__module__', module_name)
+ if obj_module_name != module_name and module_name.startswith(
+ obj_module_name
+ ):
+ bases = module_name[len(obj_module_name) :].strip('.').split('.')
+ parts = tuple(bases) + parts
+ module_name = obj_module_name
+
+ if orig_bases := inspect.getorigbases(obj):
+ # A subclass of generic types
+ # refs: PEP-560
+ obj_bases = list(orig_bases)
+ elif hasattr(obj, '__bases__') and obj.__bases__:
+ # A normal class
+ obj_bases = list(obj.__bases__)
+ else:
+ obj_bases = []
+ full_name = '.'.join((module_name, *parts))
+ events.emit(
+ 'autodoc-process-bases',
+ full_name,
+ obj,
+ SimpleNamespace(),
+ obj_bases,
+ )
+ base_classes = tuple(restify(cls, mode=render_mode) for cls in obj_bases)
+
+ return _ClassDefProperties(
+ obj_type=objtype, # type: ignore[arg-type]
+ module_name=module_name,
+ parts=parts,
+ docstring_lines=(),
+ bases=getattr(obj, '__bases__', None),
+ _obj=obj,
+ _obj___module__=get_attr(obj, '__module__', None),
+ _obj___name__=getattr(obj, '__name__', None),
+ _obj___qualname__=getattr(obj, '__qualname__', None),
+ _obj_bases=base_classes,
+ _obj_is_new_type=isinstance(obj, NewType),
+ _obj_is_typevar=isinstance(obj, TypeVar),
+ )
+
+ if objtype in {'function', 'decorator'}:
+ if inspect.isstaticmethod(obj, cls=parent, name=object_name):
+ obj_properties.add('staticmethod')
+ if inspect.isclassmethod(obj):
+ obj_properties.add('classmethod')
+ if inspect.iscoroutinefunction(obj) or inspect.isasyncgenfunction(obj):
+ obj_properties.add('async')
+
+ return _FunctionDefProperties(
+ obj_type=objtype, # type: ignore[arg-type]
+ module_name=module_name,
+ parts=parts,
+ docstring_lines=(),
+ properties=frozenset(obj_properties),
+ _obj=obj,
+ _obj___module__=get_attr(obj, '__module__', None),
+ _obj___name__=getattr(obj, '__name__', None),
+ _obj___qualname__=getattr(obj, '__qualname__', None),
+ )
+
+ if objtype == 'method':
+ # to distinguish classmethod/staticmethod
+ obj_ = parent.__dict__.get(object_name, obj)
+ if inspect.isstaticmethod(obj_, cls=parent, name=object_name):
+ obj_properties.add('staticmethod')
+ elif (
+ inspect.is_classmethod_like(obj_)
+ or inspect.is_singledispatch_method(obj_)
+ and inspect.is_classmethod_like(obj_.func)
+ ):
+ obj_properties.add('classmethod')
+ if inspect.isabstractmethod(obj_):
+ obj_properties.add('abstractmethod')
+ if inspect.iscoroutinefunction(obj_) or inspect.isasyncgenfunction(obj_):
+ obj_properties.add('async')
+
+ return _FunctionDefProperties(
+ obj_type=objtype,
+ module_name=module_name,
+ parts=parts,
+ docstring_lines=(),
+ properties=frozenset(obj_properties),
+ _obj=obj,
+ _obj___module__=get_attr(obj, '__module__', None),
+ _obj___name__=getattr(obj, '__name__', None),
+ _obj___qualname__=getattr(obj, '__qualname__', None),
+ )
+
+ if objtype == 'property':
+ if not inspect.isproperty(obj):
+ # Support for class properties. Note: these only work on Python 3.9.
+ __dict__ = safe_getattr(parent, '__dict__', {})
+ obj = __dict__.get(parts[-1])
+ if isinstance(obj, classmethod) and inspect.isproperty(obj.__func__):
+ obj = obj.__func__
+ obj_properties.add('classmethod')
+ else:
+ return None
+ if inspect.isabstractmethod(obj):
+ obj_properties.add('abstractmethod')
+
+ # get property return type annotation
+ obj_property_type_annotation = None
+ if safe_getattr(obj, 'fget', None): # property
+ func = obj.fget # type: ignore[union-attr]
+ elif safe_getattr(obj, 'func', None): # cached_property
+ func = obj.func # type: ignore[union-attr]
+ else:
+ func = None
+ if func is not None:
+ # update the annotations of the property getter
+ if config.autodoc_use_type_comments:
+ _update_annotations_using_type_comments(func, False)
+
+ try:
+ signature = inspect.signature(
+ func, type_aliases=config.autodoc_type_aliases
+ )
+ except TypeError as exc:
+ full_name = '.'.join((module_name, *parts))
+ logger.warning(
+ __('Failed to get a function signature for %s: %s'),
+ full_name,
+ exc,
+ )
+ pass
+ except ValueError:
+ pass
+ else:
+ if signature.return_annotation is not Parameter.empty:
+ short_literals = config.python_display_short_literal_types
+ obj_property_type_annotation = stringify_annotation(
+ signature.return_annotation,
+ render_mode,
+ short_literals=short_literals,
+ )
+
+ return _FunctionDefProperties(
+ obj_type=objtype,
+ module_name=module_name,
+ parts=parts,
+ docstring_lines=(),
+ properties=frozenset(obj_properties),
+ _obj=obj,
+ _obj___module__=get_attr(parent or obj, '__module__', None) or module_name,
+ _obj___name__=getattr(parent or obj, '__name__', None),
+ _obj___qualname__=getattr(parent or obj, '__qualname__', None),
+ _obj_property_type_annotation=obj_property_type_annotation,
+ )
+
+ if objtype == 'data':
+ # Update __annotations__ to support type_comment and so on
+ _ensure_annotations_from_type_comments(parent)
+
+ # obtain annotation
+ annotations = get_type_hints(
+ parent,
+ None,
+ config.autodoc_type_aliases,
+ include_extras=True,
+ )
+ if parts[-1] in annotations:
+ short_literals = config.python_display_short_literal_types
+ type_annotation = stringify_annotation(
+ annotations[parts[-1]], render_mode, short_literals=short_literals
+ )
+ else:
+ type_annotation = None
+
+ if (
+ obj is RUNTIME_INSTANCE_ATTRIBUTE
+ or obj is SLOTS_ATTR
+ or obj is UNINITIALIZED_ATTR
+ ):
+ obj_sentinel = obj
+ else:
+ obj_sentinel = None
+
+ return _AssignStatementProperties(
+ obj_type=objtype,
+ module_name=module_name,
+ parts=parts,
+ docstring_lines=(),
+ value=...,
+ annotation='',
+ class_var=False,
+ instance_var=False,
+ _obj=obj,
+ _obj___module__=get_attr(parent or obj, '__module__', None) or module_name,
+ _obj_is_generic_alias=inspect.isgenericalias(obj),
+ _obj_is_attribute_descriptor=inspect.isattributedescriptor(obj),
+ _obj_is_mock=ismock(obj),
+ _obj_is_sentinel=obj_sentinel,
+ _obj_repr_rst=inspect.object_description(obj),
+ _obj_type_annotation=type_annotation,
+ )
+
+ if objtype == 'attribute':
+ if _is_slots_attribute(parent=parent, obj_path=parts):
+ obj = SLOTS_ATTR
+ elif inspect.isenumattribute(obj):
+ obj = obj.value
+ if parent:
+ # Update __annotations__ to support type_comment and so on
+ _ensure_annotations_from_type_comments(parent)
+
+ # obtain annotation
+ annotations = get_type_hints(
+ parent,
+ None,
+ config.autodoc_type_aliases,
+ include_extras=True,
+ )
+ if parts[-1] in annotations:
+ short_literals = config.python_display_short_literal_types
+ type_annotation = stringify_annotation(
+ annotations[parts[-1]], render_mode, short_literals=short_literals
+ )
+ else:
+ type_annotation = None
+
+ if (
+ obj is RUNTIME_INSTANCE_ATTRIBUTE
+ or obj is SLOTS_ATTR
+ or obj is UNINITIALIZED_ATTR
+ ):
+ obj_sentinel = obj
+ else:
+ obj_sentinel = None
+
+ return _AssignStatementProperties(
+ obj_type=objtype,
+ module_name=module_name,
+ parts=parts,
+ docstring_lines=(),
+ value=...,
+ annotation='',
+ class_var=False,
+ instance_var=False,
+ _obj=obj,
+ _obj___module__=get_attr(obj, '__module__', None),
+ _obj_is_generic_alias=inspect.isgenericalias(obj),
+ _obj_is_attribute_descriptor=inspect.isattributedescriptor(obj),
+ _obj_is_mock=ismock(obj),
+ _obj_is_sentinel=obj_sentinel,
+ _obj_repr_rst=inspect.object_description(obj),
+ _obj_type_annotation=type_annotation,
+ )
+
+ if objtype == 'type':
+ obj_module_name = getattr(obj, '__module__', module_name)
+ if obj_module_name != module_name and module_name.startswith(obj_module_name):
+ bases = module_name[len(obj_module_name) :].strip('.').split('.')
+ parts = tuple(bases) + parts
+ module_name = obj_module_name
+
+ short_literals = config.python_display_short_literal_types
+ ann = stringify_annotation(
+ obj.__value__, render_mode, short_literals=short_literals
+ )
+ return _TypeStatementProperties(
+ obj_type=objtype,
+ module_name=module_name,
+ parts=parts,
+ docstring_lines=(),
+ _obj=obj,
+ _obj___module__=get_attr(obj, '__module__', None),
+ _obj___name__=getattr(obj, '__name__', None),
+ _obj___qualname__=getattr(obj, '__qualname__', None),
+ _obj___value__=ann,
+ )
+
+ return _ItemProperties(
+ obj_type=objtype,
+ module_name=module_name,
+ parts=parts,
+ docstring_lines=(),
+ _obj=obj,
+ _obj___module__=get_attr(obj, '__module__', None),
+ )
+
+
+def _is_slots_attribute(*, parent: Any, obj_path: Sequence[str]) -> bool:
+ """Check the subject is an attribute in __slots__."""
+ try:
+ if parent___slots__ := inspect.getslots(parent):
+ return obj_path[-1] in parent___slots__
+ else:
+ return False
+ except (ValueError, TypeError):
+ return False
diff --git a/sphinx/ext/autodoc/_member_finder.py b/sphinx/ext/autodoc/_member_finder.py
new file mode 100644
index 00000000000..8130ee5b7c0
--- /dev/null
+++ b/sphinx/ext/autodoc/_member_finder.py
@@ -0,0 +1,871 @@
+from __future__ import annotations
+
+import operator
+import re
+from enum import Enum
+from typing import TYPE_CHECKING, Literal, NewType, TypeVar
+
+from sphinx.errors import PycodeError
+from sphinx.events import EventManager
+from sphinx.ext.autodoc._directive_options import _AutoDocumenterOptions
+from sphinx.ext.autodoc._loader import _load_object_by_name
+from sphinx.ext.autodoc._property_types import _ClassDefProperties, _ModuleProperties
+from sphinx.ext.autodoc._sentinels import ALL, INSTANCE_ATTR, SLOTS_ATTR
+from sphinx.ext.autodoc.mock import ismock, undecorate
+from sphinx.locale import __
+from sphinx.pycode import ModuleAnalyzer
+from sphinx.util import inspect, logging
+from sphinx.util.docstrings import separate_metadata
+from sphinx.util.inspect import (
+ getannotations,
+ getdoc,
+ getmro,
+ getslots,
+ isclass,
+ isenumclass,
+ safe_getattr,
+ unwrap_all,
+)
+from sphinx.util.typing import AnyTypeAliasType
+
+if TYPE_CHECKING:
+ from collections.abc import Iterable, Iterator, Mapping, MutableSet, Sequence, Set
+ from typing import Any, Literal
+
+ from sphinx.environment import _CurrentDocument
+ from sphinx.events import EventManager
+ from sphinx.ext.autodoc._directive_options import _AutoDocumenterOptions
+ from sphinx.ext.autodoc._property_types import _AutodocObjType, _ItemProperties
+ from sphinx.ext.autodoc._sentinels import (
+ ALL_T,
+ EMPTY_T,
+ INSTANCE_ATTR_T,
+ SLOTS_ATTR_T,
+ )
+ from sphinx.ext.autodoc._shared import _AttrGetter, _AutodocConfig
+
+logger = logging.getLogger('sphinx.ext.autodoc')
+special_member_re = re.compile(r'^__\S+__$')
+
+
+class ObjectMember:
+ """A member of object.
+
+ This is used for the result of `_get_members_to_document()` to
+ represent each member of the object.
+ """
+
+ __slots__ = '__name__', 'object', 'docstring', 'class_'
+
+ __name__: str
+ object: Any
+ docstring: Sequence[str] | None
+ class_: Any
+ skipped: bool
+
+ def __init__(
+ self,
+ name: str,
+ obj: INSTANCE_ATTR_T | SLOTS_ATTR_T | Any,
+ *,
+ docstring: Sequence[str] | None = None,
+ class_: Any = None,
+ ) -> None:
+ self.__name__ = name
+ self.object = obj
+ self.docstring = docstring
+ self.class_ = class_
+
+ def __repr__(self) -> str:
+ return (
+ f'ObjectMember('
+ f'name={self.__name__!r}, '
+ f'obj={self.object!r}, '
+ f'docstring={self.docstring!r}, '
+ f'class_={self.class_!r}'
+ f')'
+ )
+
+
+def _gather_members(
+ *,
+ want_all: bool,
+ indent: str,
+ analyzer_order: dict[str, int],
+ attr_docs: dict[tuple[str, str], list[str]],
+ config: _AutodocConfig,
+ current_document: _CurrentDocument,
+ events: EventManager,
+ get_attr: _AttrGetter,
+ options: _AutoDocumenterOptions,
+ parent_modname: str,
+ props: _ItemProperties,
+ ref_context: Mapping[str, str | None],
+ reread_always: MutableSet[str],
+) -> list[tuple[_ItemProperties, bool, str]]:
+ """Generate reST for member documentation.
+
+ If *want_all* is True, document all members, else those given by
+ *self.options.members*.
+ """
+ if props.obj_type not in {'module', 'class', 'exception'}:
+ msg = 'must be implemented in subclasses'
+ raise NotImplementedError(msg)
+ assert isinstance(props, (_ModuleProperties, _ClassDefProperties))
+
+ indent += ' ' * (props.obj_type != 'module')
+
+ # set current namespace for finding members
+ current_document.autodoc_module = props.module_name
+ if props.parts:
+ current_document.autodoc_class = props.parts[0]
+
+ inherited_members = frozenset(options.inherited_members or ())
+ found_members = _get_members_to_document(
+ want_all=want_all,
+ get_attr=get_attr,
+ class_signature=config.autodoc_class_signature,
+ inherit_docstrings=config.autodoc_inherit_docstrings,
+ props=props,
+ opt_members=options.members or (),
+ inherited_members=inherited_members,
+ opt_private_members=options.private_members,
+ opt_special_members=options.special_members,
+ ignore_module_all=bool(options.ignore_module_all),
+ attr_docs=attr_docs,
+ )
+ filtered_members = _filter_members(
+ found_members,
+ want_all=want_all,
+ events=events,
+ get_attr=get_attr,
+ class_signature=config.autodoc_class_signature,
+ inherit_docstrings=config.autodoc_inherit_docstrings,
+ options=options,
+ props=props,
+ inherited_members=inherited_members,
+ exclude_members=options.exclude_members,
+ special_members=options.special_members,
+ private_members=options.private_members,
+ undoc_members=options.undoc_members,
+ attr_docs=attr_docs,
+ )
+ # document non-skipped members
+ member_documenters: list[tuple[_ItemProperties, bool, str]] = []
+ for member_name, member, is_attr in filtered_members:
+ # prefer the object type with the highest priority
+ obj_type = _best_object_type_for_member(
+ member=member,
+ member_name=member_name,
+ is_attr=is_attr,
+ parent_obj_type=props.obj_type,
+ parent_props=props,
+ )
+ if not obj_type:
+ # don't know how to document this member
+ continue
+ # give explicitly separated module name, so that members
+ # of inner classes can be documented
+ dotted_parts = '.'.join((*props.parts, member_name))
+ full_name = f'{props.module_name}::{dotted_parts}'
+
+ # We now try to import all objects before ordering them. This is to
+ # avoid possible circular imports if we were to import objects after
+ # their associated documenters have been sorted.
+ member_props = _load_object_by_name(
+ name=full_name,
+ objtype=obj_type,
+ current_document=current_document,
+ config=config,
+ events=events,
+ get_attr=get_attr,
+ options=options,
+ parent_modname=parent_modname,
+ ref_context=ref_context,
+ reread_always=reread_always,
+ )
+ if member_props is None:
+ continue
+ member_documenters.append((member_props, is_attr, indent))
+
+ member_order = options.member_order or config.autodoc_member_order
+ member_documenters = _sort_members(
+ member_documenters,
+ member_order,
+ ignore_module_all=bool(options.ignore_module_all),
+ analyzer_order=analyzer_order,
+ props=props,
+ )
+
+ # reset current objects
+ current_document.autodoc_module = ''
+ current_document.autodoc_class = ''
+
+ return member_documenters
+
+
+def _get_members_to_document(
+ *,
+ want_all: bool,
+ get_attr: _AttrGetter,
+ class_signature: Literal['mixed', 'separated'],
+ inherit_docstrings: bool,
+ props: _ModuleProperties | _ClassDefProperties,
+ opt_members: ALL_T | Sequence[str],
+ inherited_members: Set[str],
+ opt_private_members: ALL_T | Sequence[str] | None,
+ opt_special_members: ALL_T | Sequence[str] | None,
+ ignore_module_all: bool,
+ attr_docs: dict[tuple[str, str], list[str]],
+) -> list[ObjectMember]:
+ """Find out which members are documentable
+
+ If *want_all* is True, return all members. Else, only return those
+ members given by *self.options.members* (which may also be None).
+
+ Filter the given member list.
+
+ Members are skipped if
+
+ - they are private (except if given explicitly or the private-members
+ option is set)
+ - they are special methods (except if given explicitly or the
+ special-members option is set)
+ - they are undocumented (except if the undoc-members option is set)
+
+ The user can override the skipping decision by connecting to the
+ ``autodoc-skip-member`` event.
+ """
+ wanted_members: ALL_T | Set[str]
+ if want_all:
+ if (
+ props.obj_type == 'module'
+ and not ignore_module_all
+ and props.all is not None
+ ):
+ wanted_members = frozenset(props.all)
+ else:
+ wanted_members = ALL
+ else:
+ # specific members given
+ assert opt_members is not ALL
+
+ # Merge :private-members: and :special-members: into :members:
+ combined_members = set(opt_members)
+ if opt_private_members is not None and opt_private_members is not ALL:
+ combined_members.update(opt_private_members)
+ if opt_special_members is not None and opt_special_members is not ALL:
+ combined_members.update(opt_special_members)
+ if class_signature == 'separated' and props.obj_type in {'class', 'exception'}:
+ combined_members |= {'__new__', '__init__'} # show __init__() method
+ wanted_members = frozenset(combined_members)
+
+ object_members_map: dict[str, ObjectMember] = {}
+ if props.obj_type == 'module':
+ for name in dir(props._obj):
+ try:
+ value = safe_getattr(props._obj, name, None)
+ if ismock(value):
+ value = undecorate(value)
+ if name in wanted_members:
+ object_members_map[name] = ObjectMember(
+ name, value, docstring=attr_docs.get(('', name), [])
+ )
+ except AttributeError:
+ continue
+
+ # annotation only member (e.g. attr: int)
+ for name in inspect.getannotations(props._obj):
+ if name not in object_members_map and name in wanted_members:
+ object_members_map[name] = ObjectMember(
+ name, INSTANCE_ATTR, docstring=attr_docs.get(('', name), [])
+ )
+
+ obj_members_seq = list(object_members_map.values())
+ elif props.obj_type in {'class', 'exception'}:
+ # the members directly defined in the class
+ obj_dict = get_attr(props._obj, '__dict__', {})
+
+ # enum members
+ if isenumclass(props._obj):
+ for name, defining_class, value in _filter_enum_dict(
+ props._obj, get_attr, obj_dict
+ ):
+ # the order of occurrence of *name* matches obj's MRO,
+ # allowing inherited attributes to be shadowed correctly
+ if unmangled := unmangle(defining_class, name):
+ if unmangled in wanted_members:
+ object_members_map[unmangled] = ObjectMember(
+ unmangled, value, class_=defining_class
+ )
+
+ # members in __slots__
+ try:
+ subject___slots__ = getslots(props._obj)
+ if subject___slots__:
+ for name, subject_docstring in subject___slots__.items():
+ if name not in wanted_members:
+ continue
+ if isinstance(subject_docstring, str):
+ subject_doclines = subject_docstring.splitlines()
+ else:
+ subject_doclines = None
+ object_members_map[name] = ObjectMember(
+ name,
+ SLOTS_ATTR,
+ class_=props._obj,
+ docstring=subject_doclines,
+ )
+ except (TypeError, ValueError):
+ pass
+
+ # other members
+ for name in dir(props._obj):
+ try:
+ value = get_attr(props._obj, name)
+ if ismock(value):
+ value = undecorate(value)
+
+ unmangled = unmangle(props._obj, name)
+ if (
+ unmangled
+ and unmangled not in object_members_map
+ and unmangled in wanted_members
+ ):
+ if name in obj_dict:
+ object_members_map[unmangled] = ObjectMember(
+ unmangled, value, class_=props._obj
+ )
+ else:
+ object_members_map[unmangled] = ObjectMember(unmangled, value)
+ except AttributeError:
+ continue
+
+ try:
+ for cls in getmro(props._obj):
+ try:
+ modname = safe_getattr(cls, '__module__')
+ qualname = safe_getattr(cls, '__qualname__')
+ except AttributeError:
+ qualname = None
+ analyzer = None
+ else:
+ try:
+ analyzer = ModuleAnalyzer.for_module(modname)
+ analyzer.analyze()
+ except PycodeError:
+ analyzer = None
+
+ # annotation only member (ex. attr: int)
+ for name in getannotations(cls):
+ unmangled = unmangle(cls, name)
+ if (
+ unmangled
+ and unmangled not in object_members_map
+ and unmangled in wanted_members
+ ):
+ if analyzer and (qualname, unmangled) in analyzer.attr_docs:
+ attr_docstring = analyzer.attr_docs[qualname, unmangled]
+ else:
+ attr_docstring = None
+ object_members_map[unmangled] = ObjectMember(
+ unmangled,
+ INSTANCE_ATTR,
+ class_=cls,
+ docstring=attr_docstring,
+ )
+
+ # append or complete instance attributes (cf. self.attr1) if analyzer knows
+ if analyzer:
+ for (ns, name), attr_docstring in analyzer.attr_docs.items():
+ if ns == qualname and name not in object_members_map:
+ # otherwise unknown instance attribute
+ if name in wanted_members:
+ object_members_map[name] = ObjectMember(
+ name,
+ INSTANCE_ATTR,
+ class_=cls,
+ docstring=attr_docstring,
+ )
+ elif (
+ ns == qualname
+ and attr_docstring
+ and not object_members_map[name].docstring
+ ):
+ if cls != props._obj and not inherit_docstrings:
+ # If we are in the MRO of the class and not the class itself,
+ # and we do not want to inherit docstrings, then skip setting
+ # the docstring below
+ continue
+ # attribute is already known, because dir(props._obj)
+ # enumerates it. But it has no docstring yet
+ object_members_map[name].docstring = attr_docstring
+ except AttributeError:
+ pass
+
+ if want_all and not inherited_members:
+ obj_members_seq = [
+ m for m in object_members_map.values() if m.class_ == props._obj
+ ]
+ else:
+ obj_members_seq = list(object_members_map.values())
+ else:
+ raise ValueError
+
+ if not want_all and opt_members is not ALL:
+ for name in opt_members:
+ if name in object_members_map:
+ continue
+ msg = __(
+ 'attribute %s is listed in :members: but is missing '
+ 'as it was not found in object %r'
+ )
+ logger.warning(msg, name, props._obj, type='autodoc')
+ return obj_members_seq
+
+
+def _filter_members(
+ obj_members_seq: Iterable[ObjectMember],
+ *,
+ want_all: bool,
+ events: EventManager,
+ get_attr: _AttrGetter,
+ options: _AutoDocumenterOptions,
+ props: _ModuleProperties | _ClassDefProperties,
+ class_signature: Literal['mixed', 'separated'],
+ inherit_docstrings: bool,
+ inherited_members: Set[str],
+ exclude_members: EMPTY_T | Set[str] | None,
+ special_members: ALL_T | Sequence[str] | None,
+ private_members: ALL_T | Sequence[str] | None,
+ undoc_members: Literal[True] | None,
+ attr_docs: dict[tuple[str, str], list[str]],
+) -> Iterator[tuple[str, Any, bool]]:
+ # search for members in source code too
+ namespace = props.dotted_parts # will be empty for modules
+
+ # process members and determine which to skip
+ for obj in obj_members_seq:
+ member_name = obj.__name__
+ member_obj = obj.object
+ has_attr_doc = (namespace, member_name) in attr_docs
+ try:
+ keep = _should_keep_member(
+ member_name=member_name,
+ member_obj=member_obj,
+ member_docstring=obj.docstring,
+ member_cls=obj.class_,
+ get_attr=get_attr,
+ has_attr_doc=has_attr_doc,
+ class_signature=class_signature,
+ inherit_docstrings=inherit_docstrings,
+ inherited_members=inherited_members,
+ parent=props._obj,
+ want_all=want_all,
+ exclude_members=exclude_members,
+ special_members=special_members,
+ private_members=private_members,
+ undoc_members=undoc_members,
+ )
+ except Exception as exc:
+ logger.warning(
+ __(
+ 'autodoc: failed to determine %s.%s (%r) to be documented, '
+ 'the following exception was raised:\n%s'
+ ),
+ props.full_name,
+ member_name,
+ member_obj,
+ exc,
+ type='autodoc',
+ )
+ keep = False
+
+ # give the user a chance to decide whether this member
+ # should be skipped
+ if events is not None:
+ # let extensions preprocess docstrings
+ skip_user = events.emit_firstresult(
+ 'autodoc-skip-member',
+ props.obj_type,
+ member_name,
+ member_obj,
+ not keep,
+ options,
+ )
+ if skip_user is not None:
+ keep = not skip_user
+
+ if keep:
+ # if is_attr is True, the member is documented as an attribute
+ is_attr = member_obj is INSTANCE_ATTR or has_attr_doc
+ yield member_name, member_obj, is_attr
+
+
+def _best_object_type_for_member(
+ member: Any,
+ member_name: str,
+ is_attr: bool,
+ *,
+ parent_obj_type: str,
+ parent_props: _ItemProperties | None,
+) -> _AutodocObjType | None:
+ """Return the best object type that supports documenting *member*."""
+ filtered = []
+
+ # Don't document submodules automatically: 'module' is never returned.
+
+ try:
+ if isinstance(member, type) and issubclass(member, BaseException):
+ # priority must be higher than 'class'
+ filtered.append((20, 'exception'))
+ except TypeError as exc:
+ # It's possible for a member to be considered a type, but fail
+ # issubclass checks due to not being a class. For example:
+ # https://github.com/sphinx-doc/sphinx/issues/11654#issuecomment-1696790436
+ msg = f'Failed to discern if member {member} is a BaseException subclass.'
+ raise ValueError(msg) from exc
+
+ if isinstance(member, type) or (is_attr and isinstance(member, (NewType, TypeVar))):
+ # priority must be higher than 'function', 'class', and 'attribute'
+ # as NewType can be an attribute and is a class after Python 3.10.
+ filtered.append((15, 'class'))
+
+ if parent_obj_type in {'class', 'exception'}:
+ if inspect.isproperty(member):
+ # priority must be higher than 'attribute'
+ filtered.append((11, 'property'))
+
+ # See _get_documenter() in autosummary, parent_props might be None.
+ elif parent_props is not None:
+ # Support for class properties. Note: these only work on Python 3.9.
+ __dict__ = safe_getattr(parent_props._obj, '__dict__', {})
+ obj = __dict__.get(member_name)
+ if isinstance(obj, classmethod) and inspect.isproperty(obj.__func__):
+ # priority must be higher than 'attribute'
+ filtered.append((11, 'property'))
+
+ if parent_obj_type != 'module':
+ if inspect.isattributedescriptor(member) or not (
+ inspect.isroutine(member) or isinstance(member, type)
+ ):
+ # priority must be higher than 'method', else it will recognise
+ # some non-data descriptors as methods
+ filtered.append((10, 'attribute'))
+
+ if inspect.isroutine(member) and parent_obj_type != 'module':
+ # priority must be higher than 'function'
+ filtered.append((1, 'method'))
+
+ if (
+ inspect.isfunction(member)
+ or inspect.isbuiltin(member)
+ or (inspect.isroutine(member) and parent_obj_type == 'module')
+ ):
+ # supports functions, builtins and bound methods exported
+ # at the module level
+ filtered.extend(((0, 'function'), (-1, 'decorator')))
+
+ if isinstance(member, AnyTypeAliasType):
+ filtered.append((0, 'type'))
+
+ if parent_obj_type == 'module' and is_attr:
+ filtered.append((-10, 'data'))
+
+ if filtered:
+ # return the highest priority object type
+ return max(filtered, key=operator.itemgetter(0))[1] # type: ignore[return-value]
+ return None
+
+
+def _sort_members(
+ documenters: list[tuple[_ItemProperties, bool, str]],
+ order: Literal['alphabetical', 'bysource', 'groupwise'],
+ *,
+ ignore_module_all: bool,
+ analyzer_order: dict[str, int],
+ props: _ItemProperties,
+) -> list[tuple[_ItemProperties, bool, str]]:
+ """Sort the given member list."""
+ if order == 'groupwise':
+ # sort by group; alphabetically within groups
+ def group_order(entry: tuple[_ItemProperties, bool, str]) -> tuple[int, str]:
+ return entry[0]._groupwise_order_key, entry[0].full_name
+
+ documenters.sort(key=group_order)
+ elif order == 'bysource':
+ if (
+ isinstance(props, _ModuleProperties)
+ and not ignore_module_all
+ and (module_all := props.all)
+ ):
+ # Sort by __all__
+ module_all_idx = {name: idx for idx, name in enumerate(module_all)}
+ module_all_len = len(module_all)
+
+ def source_order(entry: tuple[_ItemProperties, bool, str]) -> int:
+ fullname = entry[0].dotted_parts
+ return module_all_idx.get(fullname, module_all_len)
+
+ documenters.sort(key=source_order)
+
+ # By default, member discovery order matches source order,
+ # as dicts are insertion-ordered from Python 3.7.
+ elif analyzer_order:
+ # sort by source order, by virtue of the module analyzer
+ order_len = len(analyzer_order)
+
+ def source_order(entry: tuple[_ItemProperties, bool, str]) -> int:
+ fullname = entry[0].dotted_parts
+ return analyzer_order.get(fullname, order_len)
+
+ documenters.sort(key=source_order)
+ else: # alphabetical
+ documenters.sort(key=lambda entry: entry[0].full_name)
+
+ return documenters
+
+
+def unmangle(subject: Any, name: str) -> str | None:
+ """Unmangle the given name."""
+ try:
+ if isclass(subject) and not name.endswith('__'):
+ prefix = f'_{subject.__name__}__'
+ if name.startswith(prefix):
+ return name.replace(prefix, '__', 1)
+ else:
+ for cls in subject.__mro__:
+ prefix = f'_{cls.__name__}__'
+ if name.startswith(prefix):
+ # mangled attribute defined in parent class
+ return None
+ except AttributeError:
+ pass
+
+ return name
+
+
+def _filter_enum_dict(
+ enum_class: type[Enum],
+ attrgetter: _AttrGetter,
+ enum_class_dict: Mapping[str, object],
+) -> Iterator[tuple[str, type, Any]]:
+ """Find the attributes to document of an enumeration class.
+
+ The output consists of triplets ``(attribute name, defining class, value)``
+ where the attribute name can appear more than once during the iteration
+ but with different defining class. The order of occurrence is guided by
+ the MRO of *enum_class*.
+ """
+ # attributes that were found on a mixin type or the data type
+ candidate_in_mro: set[str] = set()
+ # sunder names that were picked up (and thereby allowed to be redefined)
+ # see: https://docs.python.org/3/howto/enum.html#supported-dunder-names
+ sunder_names = {
+ '_name_',
+ '_value_',
+ '_missing_',
+ '_order_',
+ '_generate_next_value_',
+ }
+ # attributes that can be picked up on a mixin type or the enum's data type
+ public_names = {'name', 'value', *object.__dict__, *sunder_names}
+ # names that are ignored by default
+ ignore_names = Enum.__dict__.keys() - public_names
+
+ def should_ignore(name: str, value: Any) -> bool:
+ if name in sunder_names:
+ return _is_native_enum_api(value, name)
+ return name in ignore_names
+
+ sentinel = object()
+
+ def query(name: str, defining_class: type) -> tuple[str, type, Any] | None:
+ value = attrgetter(enum_class, name, sentinel)
+ if value is not sentinel:
+ return name, defining_class, value
+ return None
+
+ # attributes defined on a parent type, possibly shadowed later by
+ # the attributes defined directly inside the enumeration class
+ for parent in enum_class.__mro__:
+ if parent in {enum_class, Enum, object}:
+ continue
+
+ parent_dict = attrgetter(parent, '__dict__', {})
+ for name, value in parent_dict.items():
+ if should_ignore(name, value):
+ continue
+
+ candidate_in_mro.add(name)
+ if (item := query(name, parent)) is not None:
+ yield item
+
+ # exclude members coming from the native Enum unless
+ # they were redefined on a mixin type or the data type
+ excluded_members = Enum.__dict__.keys() - candidate_in_mro
+ yield from filter(
+ None,
+ (
+ query(name, enum_class)
+ for name in enum_class_dict
+ if name not in excluded_members
+ ),
+ )
+
+ # check if allowed members from ``Enum`` were redefined at the enum level
+ special_names = sunder_names | public_names
+ special_names &= enum_class_dict.keys()
+ special_names &= Enum.__dict__.keys()
+ for name in special_names:
+ if (
+ not _is_native_enum_api(enum_class_dict[name], name)
+ and (item := query(name, enum_class)) is not None
+ ):
+ yield item
+
+
+def _is_native_enum_api(obj: object, name: str) -> bool:
+ """Check whether *obj* is the same as ``Enum.__dict__[name]``."""
+ return unwrap_all(obj) is unwrap_all(Enum.__dict__[name])
+
+
+def _should_keep_member(
+ *,
+ member_name: str,
+ member_obj: Any,
+ member_docstring: Sequence[str] | None,
+ member_cls: Any,
+ get_attr: _AttrGetter,
+ has_attr_doc: bool,
+ class_signature: Literal['mixed', 'separated'],
+ inherit_docstrings: bool,
+ inherited_members: Set[str],
+ parent: Any,
+ want_all: bool,
+ exclude_members: EMPTY_T | Set[str] | None,
+ special_members: ALL_T | Sequence[str] | None,
+ private_members: ALL_T | Sequence[str] | None,
+ undoc_members: Literal[True] | None,
+) -> bool:
+ if member_docstring:
+ # hack for ClassDocumenter to inject docstring
+ doclines: Sequence[str] | None = member_docstring
+ else:
+ doc = getdoc(
+ member_obj,
+ get_attr,
+ inherit_docstrings,
+ parent,
+ member_name,
+ )
+ # Ignore non-string __doc__
+ doclines = doc.splitlines() if isinstance(doc, str) else None
+
+ # if the member __doc__ is the same as self's __doc__, it's just
+ # inherited and therefore not the member's doc
+ cls = get_attr(member_obj, '__class__', None)
+ if cls:
+ cls_doc = get_attr(cls, '__doc__', None)
+ if cls_doc == doc:
+ doclines = None
+
+ if doclines is not None:
+ doc, metadata = separate_metadata('\n'.join(doclines))
+ else:
+ doc = ''
+ metadata = {}
+ has_doc = bool(doc or undoc_members)
+
+ if 'private' in metadata:
+ # consider a member private if docstring has "private" metadata
+ is_private = True
+ elif 'public' in metadata:
+ # consider a member public if docstring has "public" metadata
+ is_private = False
+ else:
+ is_private = member_name.startswith('_')
+
+ if ismock(member_obj) and not has_attr_doc:
+ # mocked module or object
+ return False
+
+ if exclude_members and member_name in exclude_members:
+ # remove members given by exclude-members
+ return False
+
+ if not want_all:
+ # keep documented attributes
+ return has_doc or has_attr_doc
+
+ is_filtered_inherited_member = _is_filtered_inherited_member(
+ member_name,
+ member_cls=member_cls,
+ parent=parent,
+ inherited_members=inherited_members,
+ get_attr=get_attr,
+ )
+
+ if special_member_re.match(member_name):
+ # special __methods__
+ if member_name == '__doc__' or is_filtered_inherited_member:
+ return False
+ if special_members and member_name in special_members:
+ return has_doc
+ if (
+ class_signature == 'separated'
+ and member_name in {'__new__', '__init__'}
+ and inspect.isclass(parent)
+ ):
+ return has_doc # show __init__() method
+ return False
+
+ if is_private:
+ if has_attr_doc or has_doc:
+ if private_members is None: # NoQA: SIM114
+ return False
+ elif has_doc and is_filtered_inherited_member:
+ return False
+ return member_name in private_members
+ return False
+
+ if has_attr_doc:
+ # keep documented attributes
+ return True
+
+ if is_filtered_inherited_member:
+ return False
+
+ # ignore undocumented members if :undoc-members: is not given
+ return has_doc
+
+
+def _is_filtered_inherited_member(
+ member_name: str,
+ *,
+ member_cls: Any,
+ parent: Any,
+ inherited_members: Set[str],
+ get_attr: _AttrGetter,
+) -> bool:
+ if not inspect.isclass(parent):
+ return False
+
+ seen = set()
+ for cls in parent.__mro__:
+ if member_name in cls.__dict__:
+ seen.add(cls)
+ if (
+ cls.__name__ in inherited_members
+ and cls != parent
+ and any(issubclass(potential_child, cls) for potential_child in seen)
+ ):
+ # given member is a member of specified *super class*
+ return True
+ if member_cls is cls:
+ return False
+ if member_name in cls.__dict__:
+ return False
+ if member_name in get_attr(cls, '__annotations__', {}):
+ return False
+ return False
diff --git a/sphinx/ext/autodoc/_names.py b/sphinx/ext/autodoc/_names.py
new file mode 100644
index 00000000000..d11eb26bc37
--- /dev/null
+++ b/sphinx/ext/autodoc/_names.py
@@ -0,0 +1,183 @@
+"""Importer utilities for autodoc"""
+
+from __future__ import annotations
+
+import re
+from typing import TYPE_CHECKING
+
+from sphinx.locale import __
+from sphinx.util import logging
+
+if TYPE_CHECKING:
+ from collections.abc import Mapping, Sequence
+
+ from sphinx.environment import _CurrentDocument
+ from sphinx.ext.autodoc._property_types import _AutodocObjType
+
+logger = logging.getLogger(__name__)
+
+#: extended signature RE: with explicit module name separated by ::
+py_ext_sig_re = re.compile(
+ r"""^ ([\w.]+::)? # explicit module name
+ ([\w.]+\.)? # module and/or class name(s)
+ (\w+) \s* # thing name
+ (?: \[\s*(.*?)\s*])? # optional: type parameters list
+ (?: \((.*)\) # optional: arguments
+ (?:\s* -> \s* (.*))? # return annotation
+ )? $ # and nothing more
+ """,
+ re.VERBOSE,
+)
+
+
+def _parse_name(
+ *,
+ name: str,
+ objtype: _AutodocObjType,
+ current_document: _CurrentDocument,
+ ref_context: Mapping[str, str | None],
+) -> tuple[str, tuple[str, ...], str | None, str | None] | None:
+ """Parse *name* into module name, path, arguments, and return annotation."""
+ # Parse the definition in *name*.
+ # autodoc directives for classes and functions can contain a signature,
+ # which overrides the autogenerated one.
+ matched = py_ext_sig_re.match(name)
+ if matched is None:
+ logger.warning(
+ __('invalid signature for auto%s (%r)'),
+ objtype,
+ name,
+ type='autodoc',
+ )
+ # need a module to import
+ logger.warning(
+ __(
+ "don't know which module to import for autodocumenting "
+ '%r (try placing a "module" or "currentmodule" directive '
+ 'in the document, or giving an explicit module name)'
+ ),
+ name,
+ type='autodoc',
+ )
+ return None
+
+ explicit_modname, path, base, _tp_list, args, retann = matched.groups()
+ if args is not None:
+ args = f'({args})'
+
+ # Support explicit module and class name separation via ``::``
+ if explicit_modname is not None:
+ module_name = explicit_modname.removesuffix('::')
+ parents = path.rstrip('.').split('.') if path else ()
+ else:
+ module_name = None
+ parents = ()
+
+ resolved = _resolve_name(
+ objtype=objtype,
+ module_name=module_name,
+ path=path,
+ base=base,
+ parents=parents,
+ current_document=current_document,
+ ref_context_py_module=ref_context.get('py:module'),
+ ref_context_py_class=ref_context.get('py:class', ''), # type: ignore[arg-type]
+ )
+ if resolved is None:
+ return None
+ module_name, parts = resolved
+
+ if objtype == 'module' and args:
+ msg = __("signature arguments given for automodule: '%s'")
+ logger.warning(msg, name, type='autodoc')
+ return None
+ if objtype == 'module' and retann:
+ msg = __("return annotation given for automodule: '%s'")
+ logger.warning(msg, name, type='autodoc')
+ return None
+
+ if not module_name:
+ # Could not resolve a module to import
+ logger.warning(
+ __(
+ "don't know which module to import for autodocumenting "
+ '%r (try placing a "module" or "currentmodule" directive '
+ 'in the document, or giving an explicit module name)'
+ ),
+ name,
+ type='autodoc',
+ )
+ return None
+
+ return module_name, parts, args, retann
+
+
+def _resolve_name(
+ *,
+ objtype: _AutodocObjType,
+ module_name: str | None,
+ path: str | None,
+ base: str,
+ parents: Sequence[str],
+ current_document: _CurrentDocument,
+ ref_context_py_module: str | None,
+ ref_context_py_class: str,
+) -> tuple[str | None, tuple[str, ...]] | None:
+ """Resolve the module and name of the object to document given by the
+ arguments and the current module/class.
+
+ Must return a pair of the module name and a chain of attributes; for
+ example, it would return ``('zipfile', ('ZipFile', 'open'))`` for the
+ ``zipfile.ZipFile.open`` method.
+ """
+ if objtype == 'module':
+ if module_name is not None:
+ logger.warning(
+ __('"::" in automodule name doesn\'t make sense'), type='autodoc'
+ )
+ return (path or '') + base, ()
+
+ if objtype in {'class', 'exception', 'function', 'decorator', 'data', 'type'}:
+ if module_name is not None:
+ return module_name, (*parents, base)
+ if path:
+ module_name = path.rstrip('.')
+ return module_name, (*parents, base)
+
+ # if documenting a toplevel object without explicit module,
+ # it can be contained in another auto directive ...
+ module_name = current_document.autodoc_module
+ # ... or in the scope of a module directive
+ if not module_name:
+ module_name = ref_context_py_module
+ # ... else, it stays None, which means invalid
+ return module_name, (*parents, base)
+
+ if objtype in {'method', 'property', 'attribute'}:
+ if module_name is not None:
+ return module_name, (*parents, base)
+
+ if path:
+ mod_cls = path.rstrip('.')
+ else:
+ # if documenting a class-level object without path,
+ # there must be a current class, either from a parent
+ # auto directive ...
+ mod_cls = current_document.autodoc_class
+ # ... or from a class directive
+ if not mod_cls:
+ mod_cls = ref_context_py_class
+ # ... if still falsy, there's no way to know
+ if not mod_cls:
+ return None, ()
+ module_name, _sep, cls = mod_cls.rpartition('.')
+ parents = [cls]
+ # if the module name is still missing, get it like above
+ if not module_name:
+ module_name = current_document.autodoc_module
+ if not module_name:
+ module_name = ref_context_py_module
+ # ... else, it stays None, which means invalid
+ return module_name, (*parents, base)
+
+ return None
diff --git a/sphinx/ext/autodoc/_property_types.py b/sphinx/ext/autodoc/_property_types.py
new file mode 100644
index 00000000000..1747aabcff5
--- /dev/null
+++ b/sphinx/ext/autodoc/_property_types.py
@@ -0,0 +1,240 @@
+from __future__ import annotations
+
+import dataclasses
+
+from sphinx.ext.autodoc._sentinels import RUNTIME_INSTANCE_ATTRIBUTE, UNINITIALIZED_ATTR
+
+TYPE_CHECKING = False
+if TYPE_CHECKING:
+ from collections.abc import Sequence
+ from pathlib import Path
+ from typing import Any, Literal, TypeAlias
+
+ from sphinx.ext.autodoc._sentinels import (
+ RUNTIME_INSTANCE_ATTRIBUTE_T,
+ SLOTS_ATTR_T,
+ UNINITIALIZED_ATTR_T,
+ )
+
+ _AutodocObjType: TypeAlias = Literal[
+ 'module',
+ 'class',
+ 'exception',
+ 'function',
+ 'decorator',
+ 'method',
+ 'property',
+ 'attribute',
+ 'data',
+ 'type',
+ ]
+ _AutodocFuncProperty: TypeAlias = Literal[
+ 'abstractmethod',
+ 'async',
+ 'classmethod',
+ 'final',
+ 'singledispatch',
+ 'staticmethod',
+ ]
+
+
+@dataclasses.dataclass(frozen=False, kw_only=True, slots=True)
+class _ItemProperties:
+ #: The kind of object being documented
+ obj_type: _AutodocObjType
+ #: The dotted module name
+ module_name: str
+ #: The fully-qualified name within the module
+ parts: tuple[str, ...]
+ #: This item's docstring, as a sequence of lines
+ docstring_lines: tuple[str, ...]
+ #: The item's signature lines, for use in the directive
+ signatures: tuple[str, ...] = ()
+
+ _docstrings_has_hide_value: bool = False
+ _obj: Any
+ _obj___module__: str | None
+
+ @property
+ def name(self) -> str:
+ """The name of the item"""
+ return self.parts[-1]
+
+ @property
+ def object_name(self) -> str:
+ if self._obj is RUNTIME_INSTANCE_ATTRIBUTE or self._obj is UNINITIALIZED_ATTR:
+ return ''
+ return self.name
+
+ @property
+ def full_name(self) -> str:
+ return '.'.join((self.module_name, *self.parts))
+
+ @property
+ def parent_names(self) -> tuple[str, ...]:
+ return self.parts[:-1]
+
+ @property
+ def dotted_parts(self) -> str:
+ return '.'.join(self.parts)
+
+ @property
+ def _groupwise_order_key(self) -> int:
+ return 0
+
+ @property
+ def canonical_module_name(self) -> str:
+ if self._obj___module__ is not None:
+ return self._obj___module__
+ return self.module_name
+
+
+@dataclasses.dataclass(frozen=False, kw_only=True, slots=True)
+class _ModuleProperties(_ItemProperties):
+ obj_type: Literal['module'] = 'module'
+ parts: tuple[()] = () # modules have no parts
+
+ file_path: Path | None
+ all: Sequence[str] | None
+
+ @property
+ def name(self) -> str:
+ return self.module_name.rpartition('.')[2]
+
+ @property
+ def object_name(self) -> str:
+ return ''
+
+ @property
+ def full_name(self) -> str:
+ return self.module_name
+
+ @property
+ def parent_names(self) -> tuple[str, ...]:
+ return tuple(self.module_name.split('.')[:-1])
+
+
+@dataclasses.dataclass(frozen=False, kw_only=True, slots=True)
+class _ClassDefProperties(_ItemProperties):
+ obj_type: Literal['class', 'exception']
+
+ bases: Sequence[tuple[str, ...]] | None
+
+ _obj___name__: str | None
+ _obj___qualname__: str | None
+ _obj_bases: tuple[str, ...]
+ _obj_is_new_type: bool
+ _obj_is_typevar: bool
+ _signature_method_name: str = ''
+
+ @property
+ def doc_as_attr(self) -> bool:
+ # if the class is documented under another name, document it
+ # as data/attribute
+ if self._obj___name__ is None:
+ return True
+ return self.parts[-1] != self._obj___name__
+
+ @property
+ def canonical_full_name(self) -> str | None:
+ modname = self._obj___module__
+ if modname is None:
+ modname = self.module_name
+ qualname = self._obj___qualname__
+ if qualname is None:
+ qualname = self._obj___name__
+ if not modname or not qualname or '' in qualname:
+ # No valid qualname found if the object is defined as locals
+ return None
+ return f'{modname}.{qualname}'
+
+ @property
+ def _groupwise_order_key(self) -> int:
+ return 10 if self.obj_type == 'exception' else 20
+
+
+@dataclasses.dataclass(frozen=False, kw_only=True, slots=True)
+class _FunctionDefProperties(_ItemProperties):
+ obj_type: Literal['function', 'method', 'property', 'decorator']
+
+ properties: frozenset[_AutodocFuncProperty]
+
+ _obj___name__: str | None
+ _obj___qualname__: str | None
+ _obj_property_type_annotation: str | None = None
+
+ @property
+ def is_abstractmethod(self) -> bool:
+ return 'abstractmethod' in self.properties
+
+ @property
+ def is_async(self) -> bool:
+ return 'async' in self.properties
+
+ @property
+ def is_classmethod(self) -> bool:
+ return 'classmethod' in self.properties
+
+ @property
+ def is_final(self) -> bool:
+ return 'final' in self.properties
+
+ # @property
+ # def is_singledispatch(self) -> bool:
+ # return 'singledispatch' in self.properties
+
+ @property
+ def is_staticmethod(self) -> bool:
+ return 'staticmethod' in self.properties
+
+ @property
+ def _groupwise_order_key(self) -> int:
+ if self.obj_type == 'method':
+ if self.is_classmethod:
+ # document class methods before static methods as
+ # they usually behave as alternative constructors
+ return 48
+ if self.is_staticmethod:
+ # document static members before regular methods
+ return 49
+ return 50
+ if self.obj_type == 'property':
+ return 60
+ return 30
+
+
+@dataclasses.dataclass(frozen=False, kw_only=True, slots=True)
+class _AssignStatementProperties(_ItemProperties):
+ obj_type: Literal['attribute', 'data']
+
+ value: object
+ annotation: str
+
+ class_var: bool
+ instance_var: bool
+
+ _obj_is_generic_alias: bool
+ _obj_is_attribute_descriptor: bool
+ _obj_is_mock: bool
+ _obj_is_sentinel: (
+ RUNTIME_INSTANCE_ATTRIBUTE_T | SLOTS_ATTR_T | UNINITIALIZED_ATTR_T | None
+ )
+ _obj_repr_rst: str
+ _obj_type_annotation: str | None
+
+ @property
+ def _groupwise_order_key(self) -> int:
+ return 40 if self.obj_type == 'data' else 60
+
+
+@dataclasses.dataclass(frozen=False, kw_only=True, slots=True)
+class _TypeStatementProperties(_ItemProperties):
+ obj_type: Literal['type']
+
+ _obj___name__: str | None
+ _obj___qualname__: str | None
+ _obj___value__: str # The aliased annotation
+
+ @property
+ def _groupwise_order_key(self) -> int:
+ return 70
diff --git a/sphinx/ext/autodoc/_renderer.py b/sphinx/ext/autodoc/_renderer.py
new file mode 100644
index 00000000000..6b8743a1432
--- /dev/null
+++ b/sphinx/ext/autodoc/_renderer.py
@@ -0,0 +1,181 @@
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from sphinx.ext.autodoc._property_types import (
+ _AssignStatementProperties,
+ _ClassDefProperties,
+ _FunctionDefProperties,
+ _TypeStatementProperties,
+)
+from sphinx.ext.autodoc._sentinels import SUPPRESS
+from sphinx.locale import _
+from sphinx.util import logging
+
+if TYPE_CHECKING:
+ from collections.abc import Iterator
+ from typing import Literal
+
+ from docutils.statemachine import StringList
+
+ from sphinx.ext.autodoc._directive_options import _AutoDocumenterOptions
+ from sphinx.ext.autodoc._property_types import _ItemProperties
+
+logger = logging.getLogger('sphinx.ext.autodoc')
+
+
+def _directive_header_lines(
+ *,
+ autodoc_typehints: Literal['signature', 'description', 'none', 'both'],
+ directive_name: str,
+ is_final: bool,
+ options: _AutoDocumenterOptions,
+ props: _ItemProperties,
+) -> Iterator[str]:
+ """Emit the directive header and option lines."""
+ # normally the name doesn't contain the module
+ # (except for module directives of course)
+ name = props.dotted_parts or props.module_name
+
+ # emit one signature per line
+ # the first line contains the directive prefix
+ sig_line, *sig_lines = props.signatures or ('',)
+ prefix = f'.. {directive_name}:: '
+ yield f'{prefix}{name}{sig_line}'
+ # emit remaining lines, indented to the same column
+ prefix = ' ' * len(prefix)
+ for sig_line in sig_lines:
+ yield f'{prefix}{name}{sig_line}'
+
+ if options.no_index or options.noindex:
+ yield ' :no-index:'
+ if options.no_index_entry:
+ yield ' :no-index-entry:'
+ if props.parts:
+ # Be explicit about the module, this is necessary since .. class::
+ # etc. don't support a prepended module name
+ yield f' :module: {props.module_name}'
+
+ if props.obj_type == 'module':
+ # add some module-specific options
+ if options.synopsis:
+ yield f' :synopsis: {options.synopsis}'
+ if options.platform:
+ yield f' :platform: {options.platform}'
+ if options.deprecated:
+ yield ' :deprecated:'
+ if options.no_index_entry:
+ yield ' :no-index-entry:'
+
+ if props.obj_type in {'class', 'exception'}:
+ assert isinstance(props, _ClassDefProperties)
+
+ if props._obj_is_new_type or props._obj_is_typevar:
+ return
+
+ if is_final:
+ yield ' :final:'
+
+ canonical_fullname = props.canonical_full_name
+ if (
+ not props.doc_as_attr
+ and not props._obj_is_new_type
+ and canonical_fullname
+ and props.full_name != canonical_fullname
+ ):
+ yield f' :canonical: {canonical_fullname}'
+
+ # add inheritance info, if wanted
+ if not props.doc_as_attr and options.show_inheritance:
+ yield ''
+ yield ' ' + _('Bases: %s') % ', '.join(props._obj_bases)
+
+ if props.obj_type in {'function', 'decorator'}:
+ assert isinstance(props, _FunctionDefProperties)
+
+ if props.is_async:
+ yield ' :async:'
+
+ if props.obj_type == 'method':
+ assert isinstance(props, _FunctionDefProperties)
+
+ if props.is_abstractmethod:
+ yield ' :abstractmethod:'
+ if props.is_async:
+ yield ' :async:'
+ if props.is_classmethod:
+ yield ' :classmethod:'
+ if props.is_staticmethod:
+ yield ' :staticmethod:'
+ if props.is_final or is_final:
+ yield ' :final:'
+
+ if props.obj_type == 'property':
+ assert isinstance(props, _FunctionDefProperties)
+
+ if props.is_abstractmethod:
+ yield ' :abstractmethod:'
+ if props.is_classmethod:
+ yield ' :classmethod:'
+
+ objrepr = props._obj_property_type_annotation
+ if autodoc_typehints != 'none' and objrepr is not None:
+ yield f' :type: {objrepr}'
+
+ if props.obj_type == 'data':
+ assert isinstance(props, _AssignStatementProperties)
+
+ if options.annotation is SUPPRESS or props._obj_is_generic_alias:
+ pass
+ elif options.annotation:
+ yield f' :annotation: {options.annotation}'
+ else:
+ type_annotation = props._obj_type_annotation
+ if autodoc_typehints != 'none' and type_annotation is not None:
+ yield f' :type: {type_annotation}'
+
+ if (
+ not options.no_value
+ and props._obj_is_sentinel is None # not any sentinel
+ and not props._docstrings_has_hide_value
+ and not props._obj_is_mock
+ ):
+ yield f' :value: {props._obj_repr_rst}'
+
+ if props.obj_type == 'attribute':
+ assert isinstance(props, _AssignStatementProperties)
+
+ if (
+ options.annotation
+ and options.annotation is not SUPPRESS
+ and not props._obj_is_generic_alias
+ ):
+ yield f' :annotation: {options.annotation}'
+ else:
+ type_annotation = props._obj_type_annotation
+ if autodoc_typehints != 'none' and type_annotation is not None:
+ yield f' :type: {type_annotation}'
+
+ if (
+ not options.no_value
+ and props._obj_is_sentinel is None # not any sentinel
+ and not props._obj_is_attribute_descriptor
+ and not props._obj_is_generic_alias
+ and not props._docstrings_has_hide_value
+ and not props._obj_is_mock
+ ):
+ yield f' :value: {props._obj_repr_rst}'
+
+ if props.obj_type == 'type':
+ assert isinstance(props, _TypeStatementProperties)
+
+ if not options.no_value and not props._docstrings_has_hide_value:
+ yield f' :canonical: {props._obj___value__}'
+
+
+def _add_content(content: StringList, *, result: StringList, indent: str) -> None:
+ for line, src in zip(content.data, content.items, strict=True):
+ if line.strip(): # not a blank line
+ result.append(indent + line, src[0], src[1])
+ else:
+ result.append('', src[0], src[1])
diff --git a/sphinx/ext/autodoc/_sentinels.py b/sphinx/ext/autodoc/_sentinels.py
new file mode 100644
index 00000000000..52ba6ae1e48
--- /dev/null
+++ b/sphinx/ext/autodoc/_sentinels.py
@@ -0,0 +1,106 @@
+from __future__ import annotations
+
+TYPE_CHECKING = False
+if TYPE_CHECKING:
+ from typing import Final, Literal, NoReturn, Self, TypeAlias, _SpecialForm
+
+
+class _Sentinel:
+ """Create a unique sentinel object."""
+
+ __slots__ = ('_name',)
+
+ _name: str
+
+ def __new__(cls, name: str, /) -> Self:
+ sentinel = super().__new__(cls)
+ object.__setattr__(sentinel, '_name', str(name))
+ return sentinel
+
+ def __repr__(self) -> str:
+ return self._name
+
+ def __setattr__(self, key: str, value: object) -> NoReturn:
+ msg = f'{self._name} is immutable'
+ raise AttributeError(msg)
+
+ def __or__(self, other: object) -> _SpecialForm:
+ from typing import Union
+
+ return Union[self, other] # NoQA: UP007
+
+ def __ror__(self, other: object) -> _SpecialForm:
+ from typing import Union
+
+ return Union[other, self] # NoQA: UP007
+
+ def __getstate__(self) -> NoReturn:
+ msg = f'Cannot pickle {self._name}'
+ raise TypeError(msg)
+
+
+class _All(_Sentinel):
+ """A special value for :*-members: that matches to any member."""
+
+ def __contains__(self, item: object) -> Literal[True]:
+ return True
+
+ def append(self, item: object) -> None:
+ pass # nothing
+
+
+class _Empty(_Sentinel):
+ """A special value for :exclude-members: that never matches to any member."""
+
+ def __contains__(self, item: object) -> Literal[False]:
+ return False
+
+
+if TYPE_CHECKING:
+ # For the sole purpose of satisfying the type checker.
+ # fmt: off
+ import enum
+ class _AllTC(enum.Enum):
+ ALL = enum.auto()
+
+ def __contains__(self, item: object) -> Literal[True]: return True
+ def __add__(self, other: object) -> Self: pass
+ ALL_T: TypeAlias = Literal[_AllTC.ALL]
+ ALL: Final[ALL_T] = _AllTC.ALL
+
+ class _EmptyTC(enum.Enum):
+ EMPTY = enum.auto()
+
+ def __contains__(self, item: object) -> Literal[False]: return False
+ EMPTY_T: TypeAlias = Literal[_EmptyTC.EMPTY]
+ EMPTY: Final[EMPTY_T] = _EmptyTC.EMPTY
+
+ class _SentinelTC(enum.Enum):
+ INSTANCE_ATTR = enum.auto()
+ RUNTIME_INSTANCE_ATTRIBUTE = enum.auto()
+ SLOTS_ATTR = enum.auto()
+ SUPPRESS = enum.auto()
+ UNINITIALIZED_ATTR = enum.auto()
+ INSTANCE_ATTR_T: TypeAlias = Literal[_SentinelTC.INSTANCE_ATTR]
+ RUNTIME_INSTANCE_ATTRIBUTE_T: TypeAlias = Literal[
+ _SentinelTC.RUNTIME_INSTANCE_ATTRIBUTE
+ ]
+ SLOTS_ATTR_T: TypeAlias = Literal[_SentinelTC.SLOTS_ATTR]
+ SUPPRESS_T: TypeAlias = Literal[_SentinelTC.SUPPRESS]
+ UNINITIALIZED_ATTR_T: TypeAlias = Literal[_SentinelTC.UNINITIALIZED_ATTR]
+ INSTANCE_ATTR: Final[INSTANCE_ATTR_T] = _SentinelTC.INSTANCE_ATTR
+ RUNTIME_INSTANCE_ATTRIBUTE: Final[RUNTIME_INSTANCE_ATTRIBUTE_T] = (
+ _SentinelTC.RUNTIME_INSTANCE_ATTRIBUTE
+ )
+ SLOTS_ATTR: Final[SLOTS_ATTR_T] = _SentinelTC.SLOTS_ATTR
+ SUPPRESS: Final[SUPPRESS_T] = _SentinelTC.SUPPRESS
+ UNINITIALIZED_ATTR: Final[UNINITIALIZED_ATTR_T] = _SentinelTC.UNINITIALIZED_ATTR
+ # fmt: on
+else:
+ ALL = _All('ALL')
+ EMPTY = _Empty('EMPTY')
+ INSTANCE_ATTR = _Sentinel('INSTANCE_ATTR')
+ RUNTIME_INSTANCE_ATTRIBUTE = _Sentinel('RUNTIME_INSTANCE_ATTRIBUTE')
+ SLOTS_ATTR = _Sentinel('SLOTS_ATTR')
+ SUPPRESS = _Sentinel('SUPPRESS')
+ UNINITIALIZED_ATTR = _Sentinel('UNINITIALIZED_ATTR')
diff --git a/sphinx/ext/autodoc/_shared.py b/sphinx/ext/autodoc/_shared.py
new file mode 100644
index 00000000000..18b0062a3ff
--- /dev/null
+++ b/sphinx/ext/autodoc/_shared.py
@@ -0,0 +1,158 @@
+"""Shared utilities for autodoc that don't have a better home."""
+
+from __future__ import annotations
+
+from typing import TYPE_CHECKING
+
+from sphinx.util.inspect import safe_getattr
+
+if TYPE_CHECKING:
+ from collections.abc import Callable, Mapping, Sequence
+ from typing import Any, Literal, NoReturn, Protocol
+
+ from sphinx.config import Config
+ from sphinx.util.typing import _RestifyMode
+
+ class _AttrGetter(Protocol): # NoQA: PYI046
+ def __call__(self, obj: Any, name: str, default: Any = ..., /) -> Any: ...
+
+
+class _AutodocConfig:
+ __slots__ = (
+ 'autoclass_content',
+ 'autodoc_class_signature',
+ 'autodoc_default_options',
+ 'autodoc_docstring_signature',
+ 'autodoc_inherit_docstrings',
+ 'autodoc_member_order',
+ 'autodoc_mock_imports',
+ 'autodoc_preserve_defaults',
+ 'autodoc_type_aliases',
+ 'autodoc_typehints',
+ 'autodoc_typehints_description_target',
+ 'autodoc_typehints_format',
+ 'autodoc_use_type_comments',
+ # non-autodoc config
+ 'python_display_short_literal_types',
+ 'strip_signature_backslash',
+ )
+
+ autoclass_content: Literal['both', 'class', 'init']
+ autodoc_class_signature: Literal['mixed', 'separated']
+ autodoc_default_options: Mapping[str, str | bool]
+ autodoc_docstring_signature: bool
+ autodoc_inherit_docstrings: bool
+ autodoc_member_order: Literal['alphabetical', 'bysource', 'groupwise']
+ autodoc_mock_imports: Sequence[str]
+ autodoc_preserve_defaults: bool
+ autodoc_type_aliases: Mapping[str, str]
+ autodoc_typehints: Literal['signature', 'description', 'none', 'both']
+ autodoc_typehints_description_target: Literal[
+ 'all', 'documented', 'documented_params'
+ ]
+ autodoc_typehints_format: Literal['fully-qualified', 'short']
+ autodoc_use_type_comments: bool
+ # non-autodoc config
+ python_display_short_literal_types: bool
+ strip_signature_backslash: bool
+
+ @classmethod
+ def from_config(cls, config: Config) -> _AutodocConfig:
+ return cls(
+ autoclass_content=config.autoclass_content,
+ autodoc_class_signature=config.autodoc_class_signature,
+ autodoc_default_options=config.autodoc_default_options,
+ autodoc_docstring_signature=config.autodoc_docstring_signature,
+ autodoc_inherit_docstrings=config.autodoc_inherit_docstrings,
+ autodoc_member_order=config.autodoc_member_order,
+ autodoc_mock_imports=config.autodoc_mock_imports,
+ autodoc_preserve_defaults=config.autodoc_preserve_defaults,
+ autodoc_type_aliases=config.autodoc_type_aliases,
+ autodoc_typehints=config.autodoc_typehints,
+ autodoc_typehints_description_target=config.autodoc_typehints_description_target,
+ autodoc_typehints_format=config.autodoc_typehints_format,
+ autodoc_use_type_comments=config.autodoc_use_type_comments,
+ python_display_short_literal_types=config.python_display_short_literal_types,
+ strip_signature_backslash=config.strip_signature_backslash,
+ )
+
+ def __init__(
+ self,
+ *,
+ autoclass_content: Literal['both', 'class', 'init'] = 'class',
+ autodoc_class_signature: Literal['mixed', 'separated'] = 'mixed',
+ autodoc_default_options: Mapping[str, str | bool] = {}.keys().mapping,
+ autodoc_docstring_signature: bool = True,
+ autodoc_inherit_docstrings: bool = True,
+ autodoc_member_order: Literal['alphabetical', 'bysource', 'groupwise'] = (
+ 'alphabetical'
+ ),
+ autodoc_mock_imports: Sequence[str] = (),
+ autodoc_preserve_defaults: bool = False,
+ autodoc_type_aliases: Mapping[str, str] = {}.keys().mapping,
+ autodoc_typehints: Literal[
+ 'signature', 'description', 'none', 'both'
+ ] = 'signature',
+ autodoc_typehints_description_target: Literal[
+ 'all', 'documented', 'documented_params'
+ ] = 'all',
+ autodoc_typehints_format: Literal['fully-qualified', 'short'] = 'short',
+ autodoc_use_type_comments: bool = True,
+ python_display_short_literal_types: bool = False,
+ strip_signature_backslash: bool = False,
+ ) -> None:
+ for name in self.__slots__:
+ super().__setattr__(name, locals()[name])
+
+ def __repr__(self) -> str:
+ items = ((name, getattr(self, name)) for name in self.__slots__)
+ args = ', '.join(f'{name}={value!r}' for name, value in items)
+ return f'_AutodocConfig({args})'
+
+ def __setattr__(self, key: str, value: Any) -> NoReturn:
+ msg = f'{self.__class__.__name__} is immutable'
+ raise AttributeError(msg)
+
+ def __delattr__(self, key: str) -> NoReturn:
+ msg = f'{self.__class__.__name__} is immutable'
+ raise AttributeError(msg)
+
+
+class _AutodocAttrGetter:
+ """getattr() override for types such as Zope interfaces."""
+
+ _attr_getters: Sequence[tuple[type, Callable[[Any, str, Any], Any]]]
+
+ __slots__ = ('_attr_getters',)
+
+ def __init__(
+ self, attr_getters: dict[type, Callable[[Any, str, Any], Any]], /
+ ) -> None:
+ super().__setattr__('_attr_getters', tuple(attr_getters.items()))
+
+ def __call__(self, obj: Any, name: str, *defargs: Any) -> Any:
+ for typ, func in self._attr_getters:
+ if isinstance(obj, typ):
+ return func(obj, name, *defargs)
+
+ return safe_getattr(obj, name, *defargs)
+
+ def __repr__(self) -> str:
+ return f'_AutodocAttrGetter({dict(self._attr_getters)!r})'
+
+ def __setattr__(self, key: str, value: Any) -> NoReturn:
+ msg = f'{self.__class__.__name__} is immutable'
+ raise AttributeError(msg)
+
+ def __delattr__(self, key: str) -> NoReturn:
+ msg = f'{self.__class__.__name__} is immutable'
+ raise AttributeError(msg)
+
+
+def _get_render_mode(
+ typehints_format: Literal['fully-qualified', 'short'],
+ /,
+) -> _RestifyMode:
+ if typehints_format == 'short':
+ return 'smart'
+ return 'fully-qualified-except-typing'
diff --git a/sphinx/ext/autodoc/_signatures.py b/sphinx/ext/autodoc/_signatures.py
new file mode 100644
index 00000000000..0dcf669b09c
--- /dev/null
+++ b/sphinx/ext/autodoc/_signatures.py
@@ -0,0 +1,645 @@
+"""Signature utilities for autodoc"""
+
+from __future__ import annotations
+
+import sys
+from inspect import Parameter, Signature
+from typing import TYPE_CHECKING, NewType, TypeVar
+
+from sphinx.errors import PycodeError
+from sphinx.ext.autodoc._names import py_ext_sig_re
+from sphinx.ext.autodoc._property_types import _AssignStatementProperties
+from sphinx.ext.autodoc._type_comments import _update_annotations_using_type_comments
+from sphinx.ext.autodoc.preserve_defaults import update_default_value
+from sphinx.ext.autodoc.typehints import _record_typehints
+from sphinx.locale import __
+from sphinx.pycode import ModuleAnalyzer
+from sphinx.util import inspect, logging
+from sphinx.util.docstrings import prepare_docstring
+from sphinx.util.inspect import (
+ _stringify_signature_to_parts,
+ evaluate_signature,
+ safe_getattr,
+)
+
+if TYPE_CHECKING:
+ from collections.abc import Callable, Mapping
+ from typing import Any, TypeAlias
+
+ from sphinx.events import EventManager
+ from sphinx.ext.autodoc._directive_options import _AutoDocumenterOptions
+ from sphinx.ext.autodoc._property_types import _ItemProperties
+ from sphinx.ext.autodoc._shared import _AttrGetter, _AutodocConfig
+
+ _FormattedSignature: TypeAlias = tuple[str, str]
+
+logger = logging.getLogger(__name__)
+
+
+def _format_signatures(
+ *,
+ autodoc_annotations: dict[str, dict[str, str]],
+ config: _AutodocConfig,
+ docstrings: list[list[str]] | None,
+ events: EventManager,
+ get_attr: _AttrGetter,
+ parent: Any,
+ options: _AutoDocumenterOptions,
+ props: _ItemProperties,
+ args: str | None = None,
+ retann: str | None = '',
+ **kwargs: Any,
+) -> list[_FormattedSignature]:
+ """Format the signature (arguments and return annotation) of the object.
+
+ Let the user process it via the ``autodoc-process-signature`` event.
+ """
+ if props.obj_type in {'class', 'exception'}:
+ from sphinx.ext.autodoc._property_types import _ClassDefProperties
+
+ assert isinstance(props, _ClassDefProperties)
+ if props.doc_as_attr:
+ return []
+ if config.autodoc_class_signature == 'separated':
+ # do not show signatures
+ return []
+
+ if config.autodoc_typehints_format == 'short':
+ kwargs.setdefault('unqualified_typehints', True)
+ if config.python_display_short_literal_types:
+ kwargs.setdefault('short_literals', True)
+
+ if args is None:
+ signatures: list[_FormattedSignature] = []
+ else:
+ signatures = [(args, retann or '')]
+
+ if (
+ not signatures
+ and config.autodoc_docstring_signature
+ and props.obj_type not in {'module', 'data', 'type'}
+ and docstrings is not None
+ ):
+ # only act if a signature is not explicitly given already,
+ # and if the feature is enabled
+ signatures[:] = _extract_signatures_from_docstrings(
+ docstrings, props=props, tab_width=options._tab_width
+ )
+
+ if not signatures:
+ # try to introspect the signature
+ try:
+ signatures[:] = _extract_signature_from_object(
+ config=config,
+ events=events,
+ get_attr=get_attr,
+ parent=parent,
+ props=props,
+ **kwargs,
+ )
+ except Exception as exc:
+ msg = __('error while formatting arguments for %s: %s')
+ logger.warning(msg, props.full_name, exc, type='autodoc')
+
+ if props.obj_type in {'attribute', 'property'}:
+ # Only keep the return annotation
+ signatures = [('', retann) for _args, retann in signatures]
+
+ _record_typehints(
+ autodoc_annotations=autodoc_annotations,
+ name=props.full_name,
+ obj=props._obj,
+ short_literals=kwargs.get('short_literals', False),
+ type_aliases=config.autodoc_type_aliases,
+ unqualified_typehints=kwargs.get('unqualified_typehints', False),
+ )
+ if result := events.emit_firstresult(
+ 'autodoc-process-signature',
+ props.obj_type,
+ props.full_name,
+ props._obj,
+ options,
+ signatures[0][0] if signatures else None, # args
+ signatures[0][1] if signatures else '', # retann
+ ):
+ if len(result) == 2 and isinstance(result[0], str):
+ args, retann = result
+ signatures[0] = (args, retann if isinstance(retann, str) else '')
+
+ if props.obj_type in {'module', 'data', 'type'}:
+ signatures[1:] = () # discard all signatures save the first
+
+ analyzer_overloads: dict[str, list[Signature]] = {}
+ try:
+ analyzer = ModuleAnalyzer.for_module(props.canonical_module_name)
+ # parse right now, to get PycodeErrors on parsing (results will
+ # be cached anyway)
+ analyzer.analyze()
+ except PycodeError as exc:
+ logger.debug('[autodoc] module analyzer failed: %s', exc)
+ # no source file -- e.g. for builtin and C modules
+ else:
+ analyzer_overloads = analyzer.overloads
+
+ if props.obj_type in {'function', 'decorator'}:
+ overloaded = (
+ props.dotted_parts in analyzer_overloads
+ and config.autodoc_typehints != 'none'
+ )
+ is_singledispatch = inspect.is_singledispatch_function(props._obj)
+
+ if overloaded:
+ # Use signatures for overloaded functions and methods instead of
+ # their implementations.
+ signatures.clear()
+ elif not is_singledispatch:
+ return signatures
+
+ if is_singledispatch:
+ from sphinx.ext.autodoc._property_types import _FunctionDefProperties
+
+ # append signature of singledispatch'ed functions
+ for typ, func in props._obj.registry.items():
+ if typ is object:
+ continue # default implementation. skipped.
+ dispatch_func = _annotate_to_first_argument(
+ func, typ, config=config, props=props
+ )
+ if not dispatch_func:
+ continue
+ dispatch_props = _FunctionDefProperties(
+ obj_type='function',
+ module_name='',
+ parts=('',),
+ docstring_lines=(),
+ signatures=(),
+ _obj=dispatch_func,
+ _obj___module__=None,
+ _obj___qualname__=None,
+ _obj___name__=None,
+ properties=frozenset(),
+ )
+ signatures += _format_signatures(
+ autodoc_annotations=autodoc_annotations,
+ config=config,
+ docstrings=None,
+ events=events,
+ get_attr=get_attr,
+ parent=None,
+ options=options,
+ props=dispatch_props,
+ )
+ if overloaded:
+ actual = inspect.signature(
+ props._obj, type_aliases=config.autodoc_type_aliases
+ )
+ obj_globals = safe_getattr(props._obj, '__globals__', {})
+ overloads = analyzer_overloads[props.dotted_parts]
+ for overload in overloads:
+ overload = _merge_default_value(actual, overload)
+ overload = evaluate_signature(
+ overload, obj_globals, config.autodoc_type_aliases
+ )
+ signatures.append(_stringify_signature_to_parts(overload, **kwargs))
+
+ return signatures
+
+ if props.obj_type in {'class', 'exception'}:
+ from sphinx.ext.autodoc._property_types import _ClassDefProperties
+
+ assert isinstance(props, _ClassDefProperties)
+ method_name = props._signature_method_name
+ if method_name == '__call__':
+ signature_cls = type(props._obj)
+ else:
+ signature_cls = props._obj
+ overloads = []
+ overloaded = False
+ if method_name:
+ for cls in signature_cls.__mro__:
+ try:
+ analyzer = ModuleAnalyzer.for_module(cls.__module__)
+ analyzer.analyze()
+ except PycodeError:
+ pass
+ else:
+ qualname = f'{cls.__qualname__}.{method_name}'
+ if qualname in analyzer.overloads:
+ overloads = analyzer.overloads[qualname]
+ overloaded = True
+ break
+ if qualname in analyzer.tagorder:
+ # the constructor is defined in the class, but not overridden.
+ break
+ if overloaded and config.autodoc_typehints != 'none':
+ # Use signatures for overloaded methods instead of the implementation method.
+ signatures.clear()
+ method = safe_getattr(signature_cls, method_name, None)
+ method_globals = safe_getattr(method, '__globals__', {})
+ for overload in overloads:
+ overload = evaluate_signature(
+ overload, method_globals, config.autodoc_type_aliases
+ )
+
+ parameters = list(overload.parameters.values())
+ overload = overload.replace(
+ parameters=parameters[1:], return_annotation=Parameter.empty
+ )
+ signatures.append(_stringify_signature_to_parts(overload, **kwargs))
+ return signatures
+
+ return signatures
+
+ if props.obj_type == 'method':
+ overloaded = (
+ props.dotted_parts in analyzer_overloads
+ and config.autodoc_typehints != 'none'
+ )
+ meth = parent.__dict__.get(props.name)
+ is_singledispatch = inspect.is_singledispatch_method(meth)
+
+ if overloaded:
+ # Use signatures for overloaded functions and methods instead of
+ # their implementations.
+ signatures.clear()
+ elif not is_singledispatch:
+ return signatures
+
+ if is_singledispatch:
+ from sphinx.ext.autodoc._property_types import _FunctionDefProperties
+
+ # append signature of singledispatch'ed methods
+ for typ, func in meth.dispatcher.registry.items():
+ if typ is object:
+ continue # default implementation. skipped.
+ if inspect.isclassmethod(func):
+ func = func.__func__
+ dispatch_meth = _annotate_to_first_argument(
+ func, typ, config=config, props=props
+ )
+ if not dispatch_meth:
+ continue
+ dispatch_props = _FunctionDefProperties(
+ obj_type='method',
+ module_name='',
+ parts=('',),
+ docstring_lines=(),
+ signatures=(),
+ _obj=dispatch_meth,
+ _obj___module__=None,
+ _obj___qualname__=None,
+ _obj___name__=None,
+ properties=frozenset(),
+ )
+ signatures += _format_signatures(
+ autodoc_annotations=autodoc_annotations,
+ config=config,
+ docstrings=None,
+ events=events,
+ get_attr=get_attr,
+ parent=parent,
+ options=options,
+ props=dispatch_props,
+ )
+ if overloaded:
+ from sphinx.ext.autodoc._property_types import _FunctionDefProperties
+
+ assert isinstance(props, _FunctionDefProperties)
+ actual = inspect.signature(
+ props._obj,
+ bound_method=not props.is_staticmethod,
+ type_aliases=config.autodoc_type_aliases,
+ )
+
+ obj_globals = safe_getattr(props._obj, '__globals__', {})
+ overloads = analyzer_overloads[props.dotted_parts]
+ for overload in overloads:
+ overload = _merge_default_value(actual, overload)
+ overload = evaluate_signature(
+ overload, obj_globals, config.autodoc_type_aliases
+ )
+
+ if not props.is_staticmethod:
+ # hide the first argument (e.g. 'self')
+ parameters = list(overload.parameters.values())
+ overload = overload.replace(parameters=parameters[1:])
+ signatures.append(_stringify_signature_to_parts(overload, **kwargs))
+
+ return signatures
+
+ return signatures
+
+
+def _extract_signatures_from_docstrings(
+ docstrings: list[list[str]],
+ /,
+ props: _ItemProperties,
+ tab_width: int,
+) -> list[_FormattedSignature]:
+ signatures: list[_FormattedSignature] = []
+
+ # candidates of the object name
+ valid_names = {props.name}
+ if props.obj_type in {'class', 'exception'}:
+ valid_names.add('__init__')
+ if hasattr(props._obj, '__mro__'):
+ valid_names |= {cls.__name__ for cls in props._obj.__mro__}
+
+ stripped_docstrings = [list(l) for l in (docstrings or ())]
+ for i, doclines in enumerate(docstrings):
+ j = 0
+ for j, line in enumerate(doclines): # NoQA: B007
+ if not line:
+ # no lines in docstring, no match
+ break
+ line = line.rstrip('\\').rstrip()
+
+ # match first line of docstring against signature RE
+ match = py_ext_sig_re.match(line)
+ if not match:
+ break
+ _exmod, _path, base, _tp_list, args, retann = match.groups()
+ if args is not None:
+ args = f'({args})'
+ else:
+ args = '' # i.e. property or attribute
+
+ # the base name must match ours
+ if base not in valid_names:
+ break
+
+ if props.obj_type in {'class', 'exception'} and retann == 'None':
+ # Strip a return value from signatures of constructor in docstring
+ signatures.append((args, ''))
+ else:
+ signatures.append((args, retann or ''))
+
+ if signatures:
+ # re-prepare docstring to ignore more leading indentation
+ stripped_docstrings[i] = prepare_docstring(
+ '\n'.join(doclines[j:]), tab_width
+ )
+
+ # finish the loop after finding at least one signature
+ break
+
+ if not signatures:
+ return []
+
+ # Update docstrings from stripped_docstrings if needed
+ if props.obj_type in {
+ 'class',
+ 'exception',
+ 'function',
+ 'method',
+ 'property',
+ 'decorator',
+ } or (
+ props.obj_type == 'attribute'
+ and isinstance(props, _AssignStatementProperties)
+ and props._obj_is_attribute_descriptor
+ ):
+ docstrings[:] = stripped_docstrings
+
+ return signatures
+
+
+def _extract_signature_from_object(
+ config: _AutodocConfig,
+ events: EventManager,
+ get_attr: _AttrGetter,
+ parent: Any,
+ props: _ItemProperties,
+ **kwargs: Any,
+) -> list[_FormattedSignature]:
+ """Format the signature using runtime introspection."""
+ sig = _get_signature_object(
+ events=events,
+ get_attr=get_attr,
+ parent=parent,
+ preserve_defaults=config.autodoc_preserve_defaults,
+ props=props,
+ type_aliases=config.autodoc_type_aliases,
+ use_type_comments=config.autodoc_use_type_comments,
+ )
+ if sig is None:
+ return []
+
+ if props.obj_type == 'decorator' and len(sig.parameters) == 1:
+ # Special case for single-argument decorators
+ return [('', '')]
+
+ if config.autodoc_typehints in {'none', 'description'}:
+ kwargs.setdefault('show_annotation', False)
+ if config.autodoc_typehints_format == 'short':
+ kwargs.setdefault('unqualified_typehints', True)
+ if config.python_display_short_literal_types:
+ kwargs.setdefault('short_literals', True)
+ if props.obj_type in {'class', 'exception'}:
+ kwargs['show_return_annotation'] = False
+
+ args, retann = _stringify_signature_to_parts(sig, **kwargs)
+ if config.strip_signature_backslash:
+ # escape backslashes for reST
+ args = args.replace('\\', '\\\\')
+ retann = retann.replace('\\', '\\\\')
+
+ return [(args, retann)]
+
+
+# Types which have confusing metaclass signatures it would be best not to show.
+# These are listed by name, rather than storing the objects themselves, to avoid
+# needing to import the modules.
+_METACLASS_CALL_BLACKLIST = frozenset({
+ 'enum.EnumType.__call__',
+})
+
+
+# Types whose __new__ signature is a pass-through.
+_CLASS_NEW_BLACKLIST = frozenset({
+ 'typing.Generic.__new__',
+})
+
+
+def _get_signature_object(
+ events: EventManager,
+ get_attr: _AttrGetter,
+ parent: Any,
+ preserve_defaults: bool,
+ props: _ItemProperties,
+ type_aliases: Mapping[str, str] | None,
+ use_type_comments: bool,
+) -> Signature | None:
+ """Return a Signature for *obj*, or None on failure."""
+ obj, is_bound_method = _get_object_for_signature(
+ props=props, get_attr=get_attr, parent=parent, type_aliases=type_aliases
+ )
+ if obj is None or isinstance(obj, Signature):
+ return obj
+
+ if preserve_defaults:
+ update_default_value(obj, bound_method=is_bound_method)
+ if use_type_comments:
+ _update_annotations_using_type_comments(obj, bound_method=is_bound_method)
+ events.emit('autodoc-before-process-signature', obj, is_bound_method)
+
+ if props.obj_type in {'class', 'exception', 'function', 'method', 'decorator'}:
+ try:
+ return inspect.signature(
+ obj, bound_method=is_bound_method, type_aliases=type_aliases
+ )
+ except TypeError as exc:
+ if props.obj_type in {'class', 'exception'}:
+ msg = __('Failed to get a constructor signature for %s: %s')
+ elif props.obj_type in {'function', 'decorator'}:
+ msg = __('Failed to get a function signature for %s: %s')
+ elif props.obj_type == 'method':
+ msg = __('Failed to get a method signature for %s: %s')
+ else:
+ msg = __('Failed to get a signature for %s: %s')
+ logger.warning(msg, props.full_name, exc)
+ return None
+ except ValueError:
+ # Still no signature: happens e.g. for old-style classes
+ # with __init__ in C and no `__text_signature__`.
+ return None
+
+ return None
+
+
+def _get_object_for_signature(
+ props: _ItemProperties,
+ get_attr: _AttrGetter,
+ parent: Any,
+ type_aliases: Mapping[str, str] | None,
+) -> tuple[Any, bool]:
+ """Return the object from which we will obtain the signature."""
+ obj = props._obj
+ if props.obj_type in {'function', 'decorator'}:
+ return obj, False
+
+ if props.obj_type in {'class', 'exception'}:
+ if isinstance(obj, (NewType, TypeVar)):
+ # Suppress signature
+ return None, False
+
+ try:
+ object_sig = obj.__signature__
+ except AttributeError:
+ pass
+ else:
+ if isinstance(object_sig, Signature):
+ return object_sig, False
+ if sys.version_info[:2] in {(3, 12), (3, 13)} and callable(object_sig):
+ # Support for enum.Enum.__signature__ in Python 3.12
+ if isinstance(object_sig_str := object_sig(), str):
+ return inspect.signature_from_str(object_sig_str), False
+
+ def get_user_defined_function_or_method(obj: Any, attr: str) -> Any:
+ """Get the `attr` function or method from `obj`, if it is user-defined."""
+ if inspect.is_builtin_class_method(obj, attr):
+ return None
+ attr = get_attr(obj, attr, None)
+ if not (inspect.ismethod(attr) or inspect.isfunction(attr)):
+ return None
+ return attr
+
+ # This sequence is copied from inspect._signature_from_callable.
+ # ValueError means that no signature could be found, so we keep going.
+
+ # Let's see if it has an overloaded __call__ defined in its metaclass,
+ # or if the 'obj' class has a '__new__' or '__init__' method
+ for obj_, meth_name, blacklist in (
+ (type(obj), '__call__', _METACLASS_CALL_BLACKLIST),
+ (obj, '__new__', _CLASS_NEW_BLACKLIST),
+ (obj, '__init__', frozenset()),
+ ):
+ meth = get_user_defined_function_or_method(obj_, meth_name)
+ if meth is None:
+ continue
+ if blacklist:
+ if f'{meth.__module__}.{meth.__qualname__}' in blacklist:
+ continue
+
+ try:
+ inspect.signature(meth, bound_method=True, type_aliases=type_aliases)
+ except TypeError:
+ return meth, True # _get_signature_object() needs to log the failure
+ except ValueError:
+ continue
+ else:
+ from sphinx.ext.autodoc._property_types import _ClassDefProperties
+
+ assert isinstance(props, _ClassDefProperties)
+ props._signature_method_name = meth_name
+ return meth, True
+
+ # None of the attributes are user-defined, so fall back to let inspect
+ # handle it.
+ # We don't know the exact method that inspect.signature will read
+ # the signature from, so just return the object itself to be passed
+ # to the ``autodoc-before-process-signature`` hook.
+ return obj, False
+
+ if props.obj_type == 'method':
+ if obj == object.__init__ and parent != object: # NoQA: E721
+ # Classes not having own __init__() method are shown as no arguments.
+ #
+ # Note: The signature of object.__init__() is (self, /, *args, **kwargs).
+ # But it makes users confused.
+ return Signature(), False
+
+ is_bound_method = not inspect.isstaticmethod(
+ obj, cls=parent, name=props.object_name
+ )
+ return obj, is_bound_method
+
+ return None, False
+
+
+def _annotate_to_first_argument(
+ func: Callable[..., Any],
+ typ: type,
+ *,
+ config: _AutodocConfig,
+ props: _ItemProperties,
+) -> Callable[..., Any] | None:
+ """Annotate type hint to the first argument of function if needed."""
+ try:
+ sig = inspect.signature(func, type_aliases=config.autodoc_type_aliases)
+ except TypeError as exc:
+ msg = __('Failed to get a function signature for %s: %s')
+ logger.warning(msg, props.full_name, exc)
+ return None
+ except ValueError:
+ return None
+
+ first_arg_idx = 1 * (props.obj_type == 'method')
+ if len(sig.parameters) == first_arg_idx:
+ return None
+
+ def dummy(): # type: ignore[no-untyped-def] # NoQA: ANN202
+ pass
+
+ params = list(sig.parameters.values())
+ if params[first_arg_idx].annotation is Parameter.empty:
+ params[first_arg_idx] = params[first_arg_idx].replace(annotation=typ)
+ try:
+ dummy.__signature__ = sig.replace(parameters=params) # type: ignore[attr-defined]
+ return dummy
+ except (AttributeError, TypeError):
+ # failed to update signature (ex. built-in or extension types)
+ return None
+
+ return func
+
+
+def _merge_default_value(actual: Signature, overload: Signature) -> Signature:
+ """Merge default values of actual implementation to the overload variants."""
+ parameters = list(overload.parameters.values())
+ for i, param in enumerate(parameters):
+ actual_param = actual.parameters.get(param.name)
+ if actual_param and param.default == '...':
+ parameters[i] = param.replace(default=actual_param.default)
+
+ return overload.replace(parameters=parameters)
diff --git a/sphinx/ext/autodoc/type_comment.py b/sphinx/ext/autodoc/_type_comments.py
similarity index 60%
rename from sphinx/ext/autodoc/type_comment.py
rename to sphinx/ext/autodoc/_type_comments.py
index 69923609f69..b7d2b25a03e 100644
--- a/sphinx/ext/autodoc/type_comment.py
+++ b/sphinx/ext/autodoc/_type_comments.py
@@ -3,34 +3,155 @@
from __future__ import annotations
import ast
+import contextlib
+import sys
from inspect import Parameter, Signature, getsource
+from types import ModuleType
from typing import TYPE_CHECKING, cast
+from weakref import WeakSet
-import sphinx
+from sphinx.errors import PycodeError
from sphinx.locale import __
+from sphinx.pycode import ModuleAnalyzer
from sphinx.pycode.ast import unparse as ast_unparse
from sphinx.util import inspect, logging
+from sphinx.util.inspect import safe_getattr
if TYPE_CHECKING:
from collections.abc import Sequence
from typing import Any
- from sphinx.application import Sphinx
- from sphinx.util.typing import ExtensionMetadata
logger = logging.getLogger(__name__)
-def not_suppressed(argtypes: Sequence[ast.expr] = ()) -> bool:
- """Check given *argtypes* is suppressed type_comment or not."""
- if len(argtypes) == 0: # no argtypees
- return False
- if len(argtypes) == 1:
- arg = argtypes[0]
- if isinstance(arg, ast.Constant) and arg.value is ...: # suppressed
- return False
- # not suppressed
- return True
+_objects_with_type_comment_annotations: WeakSet[Any] = WeakSet()
+"""Cache of objects with annotations updated from type comments."""
+
+
+def _ensure_annotations_from_type_comments(obj: Any) -> None:
+ """Ensures `obj.__annotations__` includes type comment information.
+
+ Failures to assign to `__annotations__` are silently ignored.
+
+ If `obj` is a class type, this also ensures that type comment
+ information is incorporated into the `__annotations__` member of
+ all parent classes, if possible.
+
+ This mutates the `__annotations__` of existing imported objects,
+ in order to allow the existing `typing.get_type_hints` method to
+ take the modified annotations into account.
+
+ Modifying existing imported objects is unfortunate but avoids the
+ need to reimplement `typing.get_type_hints` in order to take into
+ account type comment information.
+
+ Note that this does not directly include type comment information
+ from parent classes, but `typing.get_type_hints` takes that into
+ account.
+ """
+ if obj in _objects_with_type_comment_annotations:
+ return
+ _objects_with_type_comment_annotations.add(obj)
+
+ if isinstance(obj, type):
+ for cls in inspect.getmro(obj):
+ modname = safe_getattr(cls, '__module__')
+ mod = sys.modules.get(modname)
+ if mod is not None:
+ _ensure_annotations_from_type_comments(mod)
+
+ elif isinstance(obj, ModuleType):
+ _update_module_annotations_from_type_comments(obj)
+
+
+def _update_module_annotations_from_type_comments(mod: ModuleType) -> None:
+ """Adds type comment annotations for a single module.
+
+ Both module-level and class-level annotations are added.
+ """
+ mod_annotations = dict(inspect.getannotations(mod))
+ mod.__annotations__ = mod_annotations
+
+ class_annotations: dict[str, dict[str, Any]] = {}
+
+ try:
+ analyzer = ModuleAnalyzer.for_module(mod.__name__)
+ analyzer.analyze()
+ anns = analyzer.annotations
+ for (classname, attrname), annotation in anns.items():
+ if not classname:
+ annotations = mod_annotations
+ else:
+ cls_annotations = class_annotations.get(classname)
+ if cls_annotations is None:
+ try:
+ cls = mod
+ for part in classname.split('.'):
+ cls = safe_getattr(cls, part)
+ annotations = dict(inspect.getannotations(cls))
+ # Ignore errors setting __annotations__
+ with contextlib.suppress(TypeError, AttributeError):
+ cls.__annotations__ = annotations
+ except AttributeError:
+ annotations = {}
+ class_annotations[classname] = annotations
+ else:
+ annotations = cls_annotations
+ annotations.setdefault(attrname, annotation)
+ except PycodeError:
+ pass
+
+
+def _update_annotations_using_type_comments(obj: Any, bound_method: bool) -> None:
+ """Update annotations info of *obj* using type_comments."""
+ try:
+ type_sig = get_type_comment(obj, bound_method)
+ if type_sig:
+ sig = inspect.signature(obj, bound_method)
+ for param in sig.parameters.values():
+ if param.name not in obj.__annotations__:
+ annotation = type_sig.parameters[param.name].annotation
+ if annotation is not Parameter.empty:
+ obj.__annotations__[param.name] = ast_unparse(annotation)
+
+ if 'return' not in obj.__annotations__:
+ obj.__annotations__['return'] = type_sig.return_annotation
+ except KeyError as exc:
+ logger.warning(
+ __('Failed to update signature for %r: parameter not found: %s'), obj, exc
+ )
+ except NotImplementedError as exc: # failed to ast.unparse()
+ logger.warning(__('Failed to parse type_comment for %r: %s'), obj, exc)
+
+
+def get_type_comment(obj: Any, bound_method: bool = False) -> Signature | None:
+ """Get type_comment'ed FunctionDef object from living object.
+
+ This tries to parse original code for living object and returns
+ Signature for given *obj*.
+ """
+ try:
+ source = getsource(obj)
+ if source.startswith((' ', r'\t')):
+ # subject is placed inside class or block. To read its docstring,
+ # this adds if-block before the declaration.
+ module = ast.parse('if True:\n' + source, type_comments=True)
+ subject = cast('ast.FunctionDef', module.body[0].body[0]) # type: ignore[attr-defined]
+ else:
+ module = ast.parse(source, type_comments=True)
+ subject = cast('ast.FunctionDef', module.body[0])
+
+ type_comment = getattr(subject, 'type_comment', None)
+ if type_comment:
+ function = ast.parse(type_comment, mode='func_type', type_comments=True)
+ return signature_from_ast(subject, bound_method, function) # type: ignore[arg-type]
+ else:
+ return None
+ except (OSError, TypeError): # failed to load source code
+ return None
+ except SyntaxError: # failed to parse type_comments
+ return None
def signature_from_ast(
@@ -98,71 +219,13 @@ def signature_from_ast(
return Signature(params)
-def get_type_comment(obj: Any, bound_method: bool = False) -> Signature | None:
- """Get type_comment'ed FunctionDef object from living object.
-
- This tries to parse original code for living object and returns
- Signature for given *obj*.
- """
- try:
- source = getsource(obj)
- if source.startswith((' ', r'\t')):
- # subject is placed inside class or block. To read its docstring,
- # this adds if-block before the declaration.
- module = ast.parse('if True:\n' + source, type_comments=True)
- subject = cast('ast.FunctionDef', module.body[0].body[0]) # type: ignore[attr-defined]
- else:
- module = ast.parse(source, type_comments=True)
- subject = cast('ast.FunctionDef', module.body[0])
-
- type_comment = getattr(subject, 'type_comment', None)
- if type_comment:
- function = ast.parse(type_comment, mode='func_type', type_comments=True)
- return signature_from_ast(subject, bound_method, function) # type: ignore[arg-type]
- else:
- return None
- except (OSError, TypeError): # failed to load source code
- return None
- except SyntaxError: # failed to parse type_comments
- return None
-
-
-def update_annotations_using_type_comments(
- app: Sphinx, obj: Any, bound_method: bool
-) -> None:
- """Update annotations info of *obj* using type_comments."""
- if not app.config.autodoc_use_type_comments:
- return
-
- try:
- type_sig = get_type_comment(obj, bound_method)
- if type_sig:
- sig = inspect.signature(obj, bound_method)
- for param in sig.parameters.values():
- if param.name not in obj.__annotations__:
- annotation = type_sig.parameters[param.name].annotation
- if annotation is not Parameter.empty:
- obj.__annotations__[param.name] = ast_unparse(annotation)
-
- if 'return' not in obj.__annotations__:
- obj.__annotations__['return'] = type_sig.return_annotation
- except KeyError as exc:
- logger.warning(
- __('Failed to update signature for %r: parameter not found: %s'), obj, exc
- )
- except NotImplementedError as exc: # failed to ast.unparse()
- logger.warning(__('Failed to parse type_comment for %r: %s'), obj, exc)
-
-
-def setup(app: Sphinx) -> ExtensionMetadata:
- app.add_config_value(
- 'autodoc_use_type_comments', True, 'env', types=frozenset({bool})
- )
- app.connect(
- 'autodoc-before-process-signature', update_annotations_using_type_comments
- )
-
- return {
- 'version': sphinx.__display_version__,
- 'parallel_read_safe': True,
- }
+def not_suppressed(argtypes: Sequence[ast.expr] = ()) -> bool:
+ """Check given *argtypes* is suppressed type_comment or not."""
+ if len(argtypes) == 0: # no argtypees
+ return False
+ if len(argtypes) == 1:
+ arg = argtypes[0]
+ if isinstance(arg, ast.Constant) and arg.value is ...: # suppressed
+ return False
+ # not suppressed
+ return True
diff --git a/sphinx/ext/autodoc/directive.py b/sphinx/ext/autodoc/directive.py
index 03d6383e0e1..d1d80f74b2b 100644
--- a/sphinx/ext/autodoc/directive.py
+++ b/sphinx/ext/autodoc/directive.py
@@ -1,58 +1,31 @@
from __future__ import annotations
-from collections.abc import Callable
from typing import TYPE_CHECKING
from docutils import nodes
from docutils.statemachine import StringList
-from docutils.utils import assemble_option_dict
-from sphinx.ext.autodoc import Options
+from sphinx.ext.autodoc._directive_options import (
+ _process_documenter_options,
+)
+from sphinx.ext.autodoc._generate import _generate_directives
+from sphinx.ext.autodoc._loader import _load_object_by_name
+from sphinx.ext.autodoc._shared import _AutodocAttrGetter, _AutodocConfig
from sphinx.util import logging
from sphinx.util.docutils import SphinxDirective, switch_source_input
from sphinx.util.parsing import nested_parse_to_nodes
if TYPE_CHECKING:
- from typing import Any
+ from collections.abc import Callable
from docutils.nodes import Node
from docutils.parsers.rst.states import RSTState
- from docutils.utils import Reporter
- from sphinx.config import Config
- from sphinx.environment import BuildEnvironment
- from sphinx.ext.autodoc import Documenter
logger = logging.getLogger(__name__)
-# common option names for autodoc directives
-AUTODOC_DEFAULT_OPTIONS = [
- 'members',
- 'undoc-members',
- 'no-index',
- 'no-index-entry',
- 'inherited-members',
- 'show-inheritance',
- 'private-members',
- 'special-members',
- 'ignore-module-all',
- 'exclude-members',
- 'member-order',
- 'imported-members',
- 'class-doc-from',
- 'no-value',
-]
-
-AUTODOC_EXTENDABLE_OPTIONS = frozenset({
- 'members',
- 'private-members',
- 'special-members',
- 'exclude-members',
-})
-
-
-class DummyOptionSpec(dict[str, Callable[[str], str]]): # NoQA: FURB189
+class DummyOptionSpec(dict[str, 'Callable[[str], str]']): # NoQA: FURB189
"""An option_spec allows any options."""
def __bool__(self) -> bool:
@@ -63,58 +36,12 @@ def __getitem__(self, _key: str) -> Callable[[str], str]:
return lambda x: x
-class DocumenterBridge:
- """A parameters container for Documenters."""
-
- def __init__(
- self,
- env: BuildEnvironment,
- reporter: Reporter | None,
- options: Options,
- lineno: int,
- state: Any,
- ) -> None:
- self.env = env
- self._reporter = reporter
- self.genopt = options
- self.lineno = lineno
- self.record_dependencies: set[str] = set()
- self.result = StringList()
- self.state = state
-
-
-def process_documenter_options(
- documenter: type[Documenter], config: Config, options: dict[str, str]
-) -> Options:
- """Recognize options of Documenter from user input."""
- default_options = config.autodoc_default_options
- for name in AUTODOC_DEFAULT_OPTIONS:
- if name not in documenter.option_spec:
- continue
- negated = options.pop('no-' + name, True) is None
- if name in default_options and not negated:
- if name in options and isinstance(default_options[name], str):
- # take value from options if present or extend it
- # with autodoc_default_options if necessary
- if name in AUTODOC_EXTENDABLE_OPTIONS:
- if options[name] is not None and options[name].startswith('+'):
- options[name] = f'{default_options[name]},{options[name][1:]}'
- else:
- options[name] = default_options[name]
-
- elif options.get(name) is not None:
- # remove '+' from option argument if there's nothing to merge it with
- options[name] = options[name].lstrip('+')
-
- return Options(assemble_option_dict(options.items(), documenter.option_spec))
-
-
def parse_generated_content(
- state: RSTState, content: StringList, documenter: Documenter
+ state: RSTState, content: StringList, titles_allowed: bool
) -> list[Node]:
- """Parse an item of content generated by Documenter."""
+ """Parse an item of content generated by _generate_directives()."""
with switch_source_input(state, content):
- if documenter.titles_allowed:
+ if titles_allowed:
return nested_parse_to_nodes(state, content)
node = nodes.paragraph()
@@ -125,10 +52,10 @@ def parse_generated_content(
class AutodocDirective(SphinxDirective):
- """A directive class for all autodoc directives. It works as a dispatcher of Documenters.
+ """A directive class for all autodoc directives.
- It invokes a Documenter upon running. After the processing, it parses and returns
- the content generated by Documenter.
+ It generates the directive lines for the given object,
+ then parses and returns the generated content.
"""
option_spec = DummyOptionSpec()
@@ -148,14 +75,20 @@ def run(self) -> list[Node]:
source, lineno = (None, None)
logger.debug('[autodoc] %s:%s: input:\n%s', source, lineno, self.block_text)
- # look up target Documenter
+ registry = self.env._registry
+
+ # look up target object type
objtype = self.name[4:] # strip prefix (auto-).
- doccls = self.env._registry.documenters[objtype]
- # process the options with the selected documenter's option_spec
+ #: true if the generated content may contain titles
+ titles_allowed = True
+
+ # process the options with the selected object types's option_spec
try:
- documenter_options = process_documenter_options(
- doccls, self.config, self.options
+ documenter_options = _process_documenter_options(
+ obj_type=objtype, # type: ignore[arg-type]
+ default_options=self.config.autodoc_default_options,
+ options=self.options,
)
except (KeyError, ValueError, TypeError) as exc:
# an option is either unknown or has a wrong type
@@ -163,25 +96,59 @@ def run(self) -> list[Node]:
'An option to %s is either unknown or has an invalid value: %s',
self.name,
exc,
- location=(self.env.docname, lineno),
+ location=(self.env.current_document.docname, lineno),
)
return []
+ documenter_options._tab_width = self.state.document.settings.tab_width
# generate the output
- params = DocumenterBridge(
- self.env, reporter, documenter_options, lineno, self.state
+ get_attr = _AutodocAttrGetter(registry.autodoc_attrgetters)
+ name = self.arguments[0]
+ env = self.env
+ config = _AutodocConfig.from_config(env.config)
+ current_document = env.current_document
+ events = env.events
+ ref_context = env.ref_context
+ reread_always = env.reread_always
+
+ props = _load_object_by_name(
+ name=name,
+ objtype=objtype, # type: ignore[arg-type]
+ current_document=current_document,
+ config=config,
+ events=events,
+ get_attr=get_attr,
+ options=documenter_options,
+ ref_context=ref_context,
+ reread_always=reread_always,
+ )
+ if props is None:
+ return []
+
+ record_dependencies: set[str] = set()
+ result = StringList()
+ _generate_directives(
+ more_content=self.content,
+ config=config,
+ current_document=current_document,
+ events=events,
+ get_attr=get_attr,
+ indent='',
+ options=documenter_options,
+ props=props,
+ record_dependencies=record_dependencies,
+ ref_context=ref_context,
+ reread_always=reread_always,
+ result=result,
)
- documenter = doccls(params, self.arguments[0])
- documenter.generate(more_content=self.content)
- if not params.result:
+ if not result:
return []
- logger.debug('[autodoc] output:\n%s', '\n'.join(params.result))
+ logger.debug('[autodoc] output:\n%s', '\n'.join(result))
# record all filenames as dependencies -- this will at least
# partially make automatic invalidation possible
- for fn in params.record_dependencies:
+ for fn in record_dependencies:
self.state.document.settings.record_dependencies.add(fn)
- result = parse_generated_content(self.state, params.result, documenter)
- return result
+ return parse_generated_content(self.state, result, titles_allowed)
diff --git a/sphinx/ext/autodoc/importer.py b/sphinx/ext/autodoc/importer.py
deleted file mode 100644
index ea5b47e41e6..00000000000
--- a/sphinx/ext/autodoc/importer.py
+++ /dev/null
@@ -1,523 +0,0 @@
-"""Importer utilities for autodoc"""
-
-from __future__ import annotations
-
-import contextlib
-import importlib
-import os
-import sys
-import traceback
-import typing
-from enum import Enum
-from importlib.abc import FileLoader
-from importlib.machinery import EXTENSION_SUFFIXES
-from importlib.util import decode_source, find_spec, module_from_spec, spec_from_loader
-from pathlib import Path
-from typing import TYPE_CHECKING, NamedTuple
-
-from sphinx.errors import PycodeError
-from sphinx.ext.autodoc.mock import ismock, undecorate
-from sphinx.pycode import ModuleAnalyzer
-from sphinx.util import logging
-from sphinx.util.inspect import (
- getannotations,
- getmro,
- getslots,
- isclass,
- isenumclass,
- safe_getattr,
- unwrap_all,
-)
-
-if TYPE_CHECKING:
- from collections.abc import Iterator, Mapping
- from importlib.machinery import ModuleSpec
- from types import ModuleType
- from typing import Any, Protocol
-
- from sphinx.ext.autodoc import ObjectMember
-
- class _AttrGetter(Protocol):
- def __call__(self, obj: Any, name: str, default: Any = ..., /) -> Any: ...
-
-
-_NATIVE_SUFFIXES: frozenset[str] = frozenset({'.pyx', *EXTENSION_SUFFIXES})
-logger = logging.getLogger(__name__)
-
-
-def _filter_enum_dict(
- enum_class: type[Enum],
- attrgetter: _AttrGetter,
- enum_class_dict: Mapping[str, object],
-) -> Iterator[tuple[str, type, Any]]:
- """Find the attributes to document of an enumeration class.
-
- The output consists of triplets ``(attribute name, defining class, value)``
- where the attribute name can appear more than once during the iteration
- but with different defining class. The order of occurrence is guided by
- the MRO of *enum_class*.
- """
- # attributes that were found on a mixin type or the data type
- candidate_in_mro: set[str] = set()
- # sunder names that were picked up (and thereby allowed to be redefined)
- # see: https://docs.python.org/3/howto/enum.html#supported-dunder-names
- sunder_names = {
- '_name_',
- '_value_',
- '_missing_',
- '_order_',
- '_generate_next_value_',
- }
- # attributes that can be picked up on a mixin type or the enum's data type
- public_names = {'name', 'value', *object.__dict__, *sunder_names}
- # names that are ignored by default
- ignore_names = Enum.__dict__.keys() - public_names
-
- def should_ignore(name: str, value: Any) -> bool:
- if name in sunder_names:
- return _is_native_enum_api(value, name)
- return name in ignore_names
-
- sentinel = object()
-
- def query(name: str, defining_class: type) -> tuple[str, type, Any] | None:
- value = attrgetter(enum_class, name, sentinel)
- if value is not sentinel:
- return name, defining_class, value
- return None
-
- # attributes defined on a parent type, possibly shadowed later by
- # the attributes defined directly inside the enumeration class
- for parent in enum_class.__mro__:
- if parent in {enum_class, Enum, object}:
- continue
-
- parent_dict = attrgetter(parent, '__dict__', {})
- for name, value in parent_dict.items():
- if should_ignore(name, value):
- continue
-
- candidate_in_mro.add(name)
- if (item := query(name, parent)) is not None:
- yield item
-
- # exclude members coming from the native Enum unless
- # they were redefined on a mixin type or the data type
- excluded_members = Enum.__dict__.keys() - candidate_in_mro
- yield from filter(
- None,
- (
- query(name, enum_class)
- for name in enum_class_dict
- if name not in excluded_members
- ),
- )
-
- # check if allowed members from ``Enum`` were redefined at the enum level
- special_names = sunder_names | public_names
- special_names &= enum_class_dict.keys()
- special_names &= Enum.__dict__.keys()
- for name in special_names:
- if (
- not _is_native_enum_api(enum_class_dict[name], name)
- and (item := query(name, enum_class)) is not None
- ):
- yield item
-
-
-def _is_native_enum_api(obj: object, name: str) -> bool:
- """Check whether *obj* is the same as ``Enum.__dict__[name]``."""
- return unwrap_all(obj) is unwrap_all(Enum.__dict__[name])
-
-
-def mangle(subject: Any, name: str) -> str:
- """Mangle the given name."""
- try:
- if isclass(subject) and name.startswith('__') and not name.endswith('__'):
- return f'_{subject.__name__}{name}'
- except AttributeError:
- pass
-
- return name
-
-
-def unmangle(subject: Any, name: str) -> str | None:
- """Unmangle the given name."""
- try:
- if isclass(subject) and not name.endswith('__'):
- prefix = f'_{subject.__name__}__'
- if name.startswith(prefix):
- return name.replace(prefix, '__', 1)
- else:
- for cls in subject.__mro__:
- prefix = f'_{cls.__name__}__'
- if name.startswith(prefix):
- # mangled attribute defined in parent class
- return None
- except AttributeError:
- pass
-
- return name
-
-
-def import_module(modname: str, try_reload: bool = False) -> Any:
- if modname in sys.modules:
- return sys.modules[modname]
-
- original_module_names = frozenset(sys.modules)
- try:
- spec = find_spec(modname)
- if spec is None:
- msg = f'No module named {modname!r}'
- raise ModuleNotFoundError(msg, name=modname) # NoQA: TRY301
- spec, pyi_path = _find_type_stub_spec(spec, modname)
- if pyi_path is None:
- module = importlib.import_module(modname)
- else:
- if spec.loader is None:
- msg = 'missing loader'
- raise ImportError(msg, name=spec.name) # NoQA: TRY301
- sys.modules[modname] = module = module_from_spec(spec)
- spec.loader.exec_module(module)
- except ImportError:
- raise
- except BaseException as exc:
- # Importing modules may cause any side effects, including
- # SystemExit, so we need to catch all errors.
- raise ImportError(exc, traceback.format_exc()) from exc
- if try_reload and os.environ.get('SPHINX_AUTODOC_RELOAD_MODULES'):
- new_modules = [m for m in sys.modules if m not in original_module_names]
- # Try reloading modules with ``typing.TYPE_CHECKING == True``.
- try:
- typing.TYPE_CHECKING = True # type: ignore[misc]
- # Ignore failures; we've already successfully loaded these modules
- with contextlib.suppress(ImportError, KeyError):
- for m in new_modules:
- mod_path = getattr(sys.modules[m], '__file__', '')
- if mod_path and mod_path.endswith('.pyi'):
- continue
- _reload_module(sys.modules[m])
- finally:
- typing.TYPE_CHECKING = False # type: ignore[misc]
- module = sys.modules[modname]
- return module
-
-
-def _find_type_stub_spec(
- spec: ModuleSpec, modname: str
-) -> tuple[ModuleSpec, Path | None]:
- """Try finding a spec for a PEP 561 '.pyi' stub file for native modules."""
- if spec.origin is None:
- return spec, None
-
- for suffix in _NATIVE_SUFFIXES:
- if not spec.origin.endswith(suffix):
- continue
- pyi_path = Path(spec.origin.removesuffix(suffix) + '.pyi')
- if not pyi_path.is_file():
- continue
- pyi_loader = _StubFileLoader(modname, path=str(pyi_path))
- pyi_spec = spec_from_loader(modname, loader=pyi_loader)
- if pyi_spec is not None:
- return pyi_spec, pyi_path
- return spec, None
-
-
-class _StubFileLoader(FileLoader):
- """Load modules from ``.pyi`` stub files."""
-
- def get_source(self, fullname: str) -> str:
- path = self.get_filename(fullname)
- for suffix in _NATIVE_SUFFIXES:
- if not path.endswith(suffix):
- continue
- path = path.removesuffix(suffix) + '.pyi'
- try:
- source_bytes = self.get_data(path)
- except OSError as exc:
- raise ImportError from exc
- return decode_source(source_bytes)
-
-
-def _reload_module(module: ModuleType) -> Any:
- """Call importlib.reload(module), convert exceptions to ImportError"""
- try:
- return importlib.reload(module)
- except BaseException as exc:
- # Importing modules may cause any side effects, including
- # SystemExit, so we need to catch all errors.
- raise ImportError(exc, traceback.format_exc()) from exc
-
-
-def import_object(
- modname: str,
- objpath: list[str],
- objtype: str = '',
- attrgetter: _AttrGetter = safe_getattr,
-) -> Any:
- if objpath:
- logger.debug('[autodoc] from %s import %s', modname, '.'.join(objpath))
- else:
- logger.debug('[autodoc] import %s', modname)
-
- try:
- module = None
- exc_on_importing = None
- objpath = objpath.copy()
- while module is None:
- try:
- module = import_module(modname, try_reload=True)
- logger.debug('[autodoc] import %s => %r', modname, module)
- except ImportError as exc:
- logger.debug('[autodoc] import %s => failed', modname)
- exc_on_importing = exc
- if '.' in modname:
- # retry with parent module
- modname, name = modname.rsplit('.', 1)
- objpath.insert(0, name)
- else:
- raise
-
- obj = module
- parent = None
- object_name = None
- for attrname in objpath:
- parent = obj
- logger.debug('[autodoc] getattr(_, %r)', attrname)
- mangled_name = mangle(obj, attrname)
- obj = attrgetter(obj, mangled_name)
-
- try:
- logger.debug('[autodoc] => %r', obj)
- except TypeError:
- # fallback of failure on logging for broken object
- # See: https://github.com/sphinx-doc/sphinx/issues/9095
- logger.debug('[autodoc] => %r', (obj,))
-
- object_name = attrname
- return [module, parent, object_name, obj]
- except (AttributeError, ImportError) as exc:
- if isinstance(exc, AttributeError) and exc_on_importing:
- # restore ImportError
- exc = exc_on_importing
-
- if objpath:
- errmsg = 'autodoc: failed to import %s %r from module %r' % (
- objtype,
- '.'.join(objpath),
- modname,
- )
- else:
- errmsg = f'autodoc: failed to import {objtype} {modname!r}'
-
- if isinstance(exc, ImportError):
- # import_module() raises ImportError having real exception obj and
- # traceback
- real_exc = exc.args[0]
- traceback_msg = traceback.format_exception(exc)
- if isinstance(real_exc, SystemExit):
- errmsg += (
- '; the module executes module level statement '
- 'and it might call sys.exit().'
- )
- elif isinstance(real_exc, ImportError) and real_exc.args:
- errmsg += '; the following exception was raised:\n%s' % real_exc.args[0]
- else:
- errmsg += '; the following exception was raised:\n%s' % traceback_msg
- else:
- errmsg += (
- '; the following exception was raised:\n%s' % traceback.format_exc()
- )
-
- logger.debug(errmsg)
- raise ImportError(errmsg) from exc
-
-
-class Attribute(NamedTuple):
- name: str
- directly_defined: bool
- value: Any
-
-
-def get_object_members(
- subject: Any,
- objpath: list[str],
- attrgetter: _AttrGetter,
- analyzer: ModuleAnalyzer | None = None,
-) -> dict[str, Attribute]:
- """Get members and attributes of target object."""
- from sphinx.ext.autodoc import INSTANCEATTR
-
- # the members directly defined in the class
- obj_dict = attrgetter(subject, '__dict__', {})
-
- members: dict[str, Attribute] = {}
-
- # enum members
- if isenumclass(subject):
- for name, defining_class, value in _filter_enum_dict(
- subject, attrgetter, obj_dict
- ):
- # the order of occurrence of *name* matches the subject's MRO,
- # allowing inherited attributes to be shadowed correctly
- if unmangled := unmangle(defining_class, name):
- members[unmangled] = Attribute(
- name=unmangled,
- directly_defined=defining_class is subject,
- value=value,
- )
-
- # members in __slots__
- try:
- subject___slots__ = getslots(subject)
- if subject___slots__:
- from sphinx.ext.autodoc import SLOTSATTR
-
- for name in subject___slots__:
- members[name] = Attribute(
- name=name, directly_defined=True, value=SLOTSATTR
- )
- except (TypeError, ValueError):
- pass
-
- # other members
- for name in dir(subject):
- try:
- value = attrgetter(subject, name)
- directly_defined = name in obj_dict
- unmangled = unmangle(subject, name)
- if unmangled and unmangled not in members:
- members[unmangled] = Attribute(
- name=unmangled, directly_defined=directly_defined, value=value
- )
- except AttributeError:
- continue
-
- # annotation only member (ex. attr: int)
- for cls in getmro(subject):
- for name in getannotations(cls):
- unmangled = unmangle(cls, name)
- if unmangled and unmangled not in members:
- members[unmangled] = Attribute(
- name=unmangled, directly_defined=cls is subject, value=INSTANCEATTR
- )
-
- if analyzer:
- # append instance attributes (cf. self.attr1) if analyzer knows
- namespace = '.'.join(objpath)
- for ns, name in analyzer.find_attr_docs():
- if namespace == ns and name not in members:
- members[name] = Attribute(
- name=name, directly_defined=True, value=INSTANCEATTR
- )
-
- return members
-
-
-def get_class_members(
- subject: Any, objpath: Any, attrgetter: _AttrGetter, inherit_docstrings: bool = True
-) -> dict[str, ObjectMember]:
- """Get members and attributes of target class."""
- from sphinx.ext.autodoc import INSTANCEATTR, ObjectMember
-
- # the members directly defined in the class
- obj_dict = attrgetter(subject, '__dict__', {})
-
- members: dict[str, ObjectMember] = {}
-
- # enum members
- if isenumclass(subject):
- for name, defining_class, value in _filter_enum_dict(
- subject, attrgetter, obj_dict
- ):
- # the order of occurrence of *name* matches the subject's MRO,
- # allowing inherited attributes to be shadowed correctly
- if unmangled := unmangle(defining_class, name):
- members[unmangled] = ObjectMember(
- unmangled, value, class_=defining_class
- )
-
- # members in __slots__
- try:
- subject___slots__ = getslots(subject)
- if subject___slots__:
- from sphinx.ext.autodoc import SLOTSATTR
-
- for name, docstring in subject___slots__.items():
- members[name] = ObjectMember(
- name, SLOTSATTR, class_=subject, docstring=docstring
- )
- except (TypeError, ValueError):
- pass
-
- # other members
- for name in dir(subject):
- try:
- value = attrgetter(subject, name)
- if ismock(value):
- value = undecorate(value)
-
- unmangled = unmangle(subject, name)
- if unmangled and unmangled not in members:
- if name in obj_dict:
- members[unmangled] = ObjectMember(unmangled, value, class_=subject)
- else:
- members[unmangled] = ObjectMember(unmangled, value)
- except AttributeError:
- continue
-
- try:
- for cls in getmro(subject):
- try:
- modname = safe_getattr(cls, '__module__')
- qualname = safe_getattr(cls, '__qualname__')
- analyzer = ModuleAnalyzer.for_module(modname)
- analyzer.analyze()
- except AttributeError:
- qualname = None
- analyzer = None
- except PycodeError:
- analyzer = None
-
- # annotation only member (ex. attr: int)
- for name in getannotations(cls):
- unmangled = unmangle(cls, name)
- if unmangled and unmangled not in members:
- if analyzer and (qualname, unmangled) in analyzer.attr_docs:
- docstring = '\n'.join(analyzer.attr_docs[qualname, unmangled])
- else:
- docstring = None
-
- members[unmangled] = ObjectMember(
- unmangled, INSTANCEATTR, class_=cls, docstring=docstring
- )
-
- # append or complete instance attributes (cf. self.attr1) if analyzer knows
- if analyzer:
- for (ns, name), docstring in analyzer.attr_docs.items():
- if ns == qualname and name not in members:
- # otherwise unknown instance attribute
- members[name] = ObjectMember(
- name,
- INSTANCEATTR,
- class_=cls,
- docstring='\n'.join(docstring),
- )
- elif (
- ns == qualname
- and docstring
- and isinstance(members[name], ObjectMember)
- and not members[name].docstring
- ):
- if cls != subject and not inherit_docstrings:
- # If we are in the MRO of the class and not the class itself,
- # and we do not want to inherit docstrings, then skip setting
- # the docstring below
- continue
- # attribute is already known, because dir(subject) enumerates it.
- # But it has no docstring yet
- members[name].docstring = '\n'.join(docstring)
- except AttributeError:
- pass
-
- return members
diff --git a/sphinx/ext/autodoc/mock.py b/sphinx/ext/autodoc/mock.py
index 236174a62f9..11c30bcb88b 100644
--- a/sphinx/ext/autodoc/mock.py
+++ b/sphinx/ext/autodoc/mock.py
@@ -129,7 +129,7 @@ def exec_module(self, module: ModuleType) -> None:
class MockFinder(MetaPathFinder):
"""A finder for mocking."""
- def __init__(self, modnames: list[str]) -> None:
+ def __init__(self, modnames: Sequence[str]) -> None:
super().__init__()
self.modnames = modnames
self.loader = MockLoader(self)
@@ -155,7 +155,7 @@ def invalidate_caches(self) -> None:
@contextlib.contextmanager
-def mock(modnames: list[str]) -> Iterator[None]:
+def mock(modnames: Sequence[str]) -> Iterator[None]:
"""Insert mock modules during context::
with mock(['target.module.name']):
diff --git a/sphinx/ext/autodoc/preserve_defaults.py b/sphinx/ext/autodoc/preserve_defaults.py
index 44fcf215c1c..7cf61f5db56 100644
--- a/sphinx/ext/autodoc/preserve_defaults.py
+++ b/sphinx/ext/autodoc/preserve_defaults.py
@@ -12,7 +12,6 @@
import warnings
from typing import TYPE_CHECKING
-import sphinx
from sphinx.deprecation import RemovedInSphinx90Warning
from sphinx.locale import __
from sphinx.pycode.ast import unparse as ast_unparse
@@ -21,8 +20,6 @@
if TYPE_CHECKING:
from typing import Any
- from sphinx.application import Sphinx
- from sphinx.util.typing import ExtensionMetadata
logger = logging.getLogger(__name__)
_LAMBDA_NAME = (lambda: None).__name__
@@ -106,9 +103,9 @@ def _is_lambda(x: Any, /) -> bool:
def _get_arguments_inner(x: Any, /) -> ast.arguments | None:
- if isinstance(x, ast.AsyncFunctionDef | ast.FunctionDef | ast.Lambda):
+ if isinstance(x, (ast.AsyncFunctionDef, ast.FunctionDef, ast.Lambda)):
return x.args
- if isinstance(x, ast.Assign | ast.AnnAssign):
+ if isinstance(x, (ast.Assign, ast.AnnAssign)):
return _get_arguments_inner(x.value)
return None
@@ -125,11 +122,8 @@ def get_default_value(lines: list[str], position: ast.expr) -> str | None:
return None
-def update_defvalue(app: Sphinx, obj: Any, bound_method: bool) -> None:
- """Update defvalue info of *obj* using type_comments."""
- if not app.config.autodoc_preserve_defaults:
- return
-
+def update_default_value(obj: Any, bound_method: bool) -> None:
+ """Update default value info of *obj* using type_comments."""
try:
lines = inspect.getsource(obj).splitlines()
if lines[0].startswith((' ', '\t')):
@@ -194,15 +188,3 @@ def update_defvalue(app: Sphinx, obj: Any, bound_method: bool) -> None:
logger.warning(
__('Failed to parse a default argument value for %r: %s'), obj, exc
)
-
-
-def setup(app: Sphinx) -> ExtensionMetadata:
- app.add_config_value(
- 'autodoc_preserve_defaults', False, 'env', types=frozenset({bool})
- )
- app.connect('autodoc-before-process-signature', update_defvalue)
-
- return {
- 'version': sphinx.__display_version__,
- 'parallel_read_safe': True,
- }
diff --git a/sphinx/ext/autodoc/typehints.py b/sphinx/ext/autodoc/typehints.py
index 63403772137..2b3c3e48e3c 100644
--- a/sphinx/ext/autodoc/typehints.py
+++ b/sphinx/ext/autodoc/typehints.py
@@ -7,45 +7,41 @@
from docutils import nodes
-import sphinx
from sphinx import addnodes
from sphinx.util import inspect
from sphinx.util.typing import stringify_annotation
if TYPE_CHECKING:
- from collections.abc import Iterable
+ from collections.abc import Iterable, Mapping
from typing import Any
from docutils.nodes import Element
from sphinx.application import Sphinx
- from sphinx.ext.autodoc import Options
- from sphinx.util.typing import ExtensionMetadata, _StringifyMode
+ from sphinx.ext.autodoc._property_types import _AutodocObjType
+ from sphinx.util.typing import _StringifyMode
-def record_typehints(
- app: Sphinx,
- objtype: str,
+def _record_typehints(
+ *,
+ autodoc_annotations: dict[str, dict[str, str]],
name: str,
obj: Any,
- options: Options,
- args: str,
- retann: str,
+ short_literals: bool,
+ type_aliases: Mapping[str, str] | None,
+ unqualified_typehints: bool,
) -> None:
"""Record type hints to env object."""
mode: _StringifyMode
- if app.config.autodoc_typehints_format == 'short':
+ if unqualified_typehints:
mode = 'smart'
else:
mode = 'fully-qualified'
- short_literals = app.config.python_display_short_literal_types
-
try:
if callable(obj):
- current_document = app.env.current_document
- annotation = current_document.autodoc_annotations.setdefault(name, {})
- sig = inspect.signature(obj, type_aliases=app.config.autodoc_type_aliases)
+ annotation = autodoc_annotations.setdefault(name, {})
+ sig = inspect.signature(obj, type_aliases=type_aliases)
for param in sig.parameters.values():
if param.annotation is not param.empty:
annotation[param.name] = stringify_annotation(
@@ -59,8 +55,8 @@ def record_typehints(
pass
-def merge_typehints(
- app: Sphinx, domain: str, objtype: str, contentnode: Element
+def _merge_typehints(
+ app: Sphinx, domain: str, obj_type: _AutodocObjType, contentnode: Element
) -> None:
if domain != 'py':
return
@@ -86,7 +82,7 @@ def merge_typehints(
for field_list in field_lists:
if app.config.autodoc_typehints_description_target == 'all':
- if objtype == 'class':
+ if obj_type == 'class':
modify_field_list(
field_list, annotations[fullname], suppress_rtype=True
)
@@ -230,14 +226,3 @@ def augment_descriptions_with_types(
field += nodes.field_name('', 'rtype')
field += nodes.field_body('', nodes.paragraph('', rtype))
node += field
-
-
-def setup(app: Sphinx) -> ExtensionMetadata:
- app.connect('autodoc-process-signature', record_typehints)
- app.connect('object-description-transform', merge_typehints)
-
- return {
- 'version': sphinx.__display_version__,
- 'parallel_read_safe': True,
- 'parallel_write_safe': True,
- }
diff --git a/sphinx/ext/autosectionlabel.py b/sphinx/ext/autosectionlabel.py
index b1eaa0ceac9..7c5304ad83d 100644
--- a/sphinx/ext/autosectionlabel.py
+++ b/sphinx/ext/autosectionlabel.py
@@ -39,7 +39,7 @@ def register_sections_as_label(app: Sphinx, document: Node) -> None:
):
continue
labelid = node['ids'][0]
- docname = app.env.docname
+ docname = app.env.current_document.docname
title = cast('nodes.title', node[0])
ref_name = getattr(title, 'rawsource', title.astext())
if app.config.autosectionlabel_prefix_document:
diff --git a/sphinx/ext/autosummary/__init__.py b/sphinx/ext/autosummary/__init__.py
index 97c64a37cd1..dedc203d919 100644
--- a/sphinx/ext/autosummary/__init__.py
+++ b/sphinx/ext/autosummary/__init__.py
@@ -55,28 +55,26 @@
import re
import sys
from inspect import Parameter
-from pathlib import Path
from types import ModuleType
from typing import TYPE_CHECKING, cast
from docutils import nodes
from docutils.parsers.rst import directives
-from docutils.parsers.rst.states import RSTStateMachine, Struct, state_classes
+from docutils.parsers.rst.states import RSTStateMachine, state_classes
from docutils.statemachine import StringList
import sphinx
from sphinx import addnodes
-from sphinx.config import Config
-from sphinx.environment import BuildEnvironment
from sphinx.errors import PycodeError
-from sphinx.ext.autodoc import INSTANCEATTR, Options
-from sphinx.ext.autodoc.directive import DocumenterBridge
-from sphinx.ext.autodoc.importer import import_module
+from sphinx.ext.autodoc._directive_options import _AutoDocumenterOptions
+from sphinx.ext.autodoc._importer import _import_module
+from sphinx.ext.autodoc._loader import _load_object_by_name
+from sphinx.ext.autodoc._member_finder import _best_object_type_for_member
+from sphinx.ext.autodoc._sentinels import INSTANCE_ATTR
+from sphinx.ext.autodoc._shared import _AutodocAttrGetter, _AutodocConfig
from sphinx.ext.autodoc.mock import mock
from sphinx.locale import __
-from sphinx.project import Project
from sphinx.pycode import ModuleAnalyzer
-from sphinx.registry import SphinxComponentRegistry
from sphinx.util import logging, rst
from sphinx.util.docutils import (
NullReporter,
@@ -96,8 +94,8 @@
from docutils.nodes import Node, system_message
from sphinx.application import Sphinx
- from sphinx.ext.autodoc import Documenter
- from sphinx.extension import Extension
+ from sphinx.environment import BuildEnvironment
+ from sphinx.ext.autodoc._property_types import _AutodocObjType
from sphinx.util.typing import ExtensionMetadata, OptionSpec
from sphinx.writers.html5 import HTML5Translator
@@ -107,7 +105,7 @@
periods_re = re.compile(r'\.(?:\s+)')
literal_re = re.compile(r'::\s*$')
-WELL_KNOWN_ABBREVIATIONS = ('et al.', 'e.g.', 'i.e.')
+WELL_KNOWN_ABBREVIATIONS = ('et al.', 'e.g.', 'i.e.', 'vs.')
# -- autosummary_toc node ------------------------------------------------------
@@ -156,79 +154,36 @@ def autosummary_table_visit_html(
# -- autodoc integration -------------------------------------------------------
-class FakeApplication:
- verbosity = 0
+def _get_documenter(obj: Any, parent: Any) -> _AutodocObjType:
+ """Get the best object type suitable for documenting the given object.
- def __init__(self) -> None:
- self.doctreedir = Path()
- self.events = None
- self.extensions: dict[str, Extension] = {}
- self.srcdir = Path()
- self.config = Config()
- self.project = Project('', {})
- self.registry = SphinxComponentRegistry()
-
-
-class FakeDirective(DocumenterBridge):
- def __init__(self) -> None:
- settings = Struct(tab_width=8)
- document = Struct(settings=settings)
- app = FakeApplication()
- app.config.add('autodoc_class_signature', 'mixed', 'env', ())
- env = BuildEnvironment(app) # type: ignore[arg-type]
- state = Struct(document=document)
- super().__init__(env, None, Options(), 0, state)
-
-
-def get_documenter(app: Sphinx, obj: Any, parent: Any) -> type[Documenter]:
- """Get an autodoc.Documenter class suitable for documenting the given
- object.
-
- *obj* is the Python object to be documented, and *parent* is an
- another Python object (e.g. a module or a class) to which *obj*
- belongs to.
+ *obj* is the Python object to be documented, and *parent* is another
+ Python object (e.g. a module or a class) to which *obj* belongs.
"""
- return _get_documenter(obj, parent, registry=app.registry)
-
-
-def _get_documenter(
- obj: Any, parent: Any, *, registry: SphinxComponentRegistry
-) -> type[Documenter]:
- """Get an autodoc.Documenter class suitable for documenting the given
- object.
-
- *obj* is the Python object to be documented, and *parent* is an
- another Python object (e.g. a module or a class) to which *obj*
- belongs to.
- """
- from sphinx.ext.autodoc import DataDocumenter, ModuleDocumenter
-
if inspect.ismodule(obj):
- # ModuleDocumenter.can_document_member always returns False
- return ModuleDocumenter
+ return 'module'
- # Construct a fake documenter for *parent*
- if parent is not None:
- parent_doc_cls = _get_documenter(parent, None, registry=registry)
+ if parent is None or inspect.ismodule(parent):
+ parent_obj_type = 'module'
else:
- parent_doc_cls = ModuleDocumenter
+ parent_opt = _best_object_type_for_member(
+ member=parent,
+ member_name='',
+ is_attr=False,
+ parent_obj_type='module',
+ parent_props=None,
+ )
+ parent_obj_type = parent_opt if parent_opt is not None else 'data'
- if hasattr(parent, '__name__'):
- parent_doc = parent_doc_cls(FakeDirective(), parent.__name__)
- else:
- parent_doc = parent_doc_cls(FakeDirective(), '')
-
- # Get the correct documenter class for *obj*
- classes = [
- cls
- for cls in registry.documenters.values()
- if cls.can_document_member(obj, '', False, parent_doc)
- ]
- if classes:
- classes.sort(key=lambda cls: cls.priority)
- return classes[-1]
- else:
- return DataDocumenter
+ if obj_type := _best_object_type_for_member(
+ member=obj,
+ member_name='',
+ is_attr=False,
+ parent_obj_type=parent_obj_type,
+ parent_props=None,
+ ):
+ return obj_type
+ return 'data'
# -- .. autosummary:: ----------------------------------------------------------
@@ -255,10 +210,6 @@ class Autosummary(SphinxDirective):
}
def run(self) -> list[Node]:
- self.bridge = DocumenterBridge(
- self.env, self.state.document.reporter, Options(), self.lineno, self.state
- )
-
names = [
x.strip().split()[0]
for x in self.content
@@ -268,7 +219,7 @@ def run(self) -> list[Node]:
nodes = self.get_table(items)
if 'toctree' in self.options:
- dirname = posixpath.dirname(self.env.docname)
+ dirname = posixpath.dirname(self.env.current_document.docname)
tree_prefix = self.options['toctree'].strip()
docnames = []
@@ -330,22 +281,6 @@ def import_by_name(
raise ImportExceptionGroup(exc.args[0], errors) from None
- def create_documenter(
- self,
- obj: Any,
- parent: Any,
- full_name: str,
- *,
- registry: SphinxComponentRegistry,
- ) -> Documenter:
- """Get an autodoc.Documenter class suitable for documenting the given
- object.
-
- Wraps _get_documenter and is meant as a hook for extensions.
- """
- doccls = _get_documenter(obj, parent, registry=registry)
- return doccls(self.bridge, full_name)
-
def get_items(self, names: list[str]) -> list[tuple[str, str | None, str, str]]:
"""Try to import the given names, and return a list of
``[(name, signature, summary_string, real_name), ...]``.
@@ -366,6 +301,16 @@ def get_items(self, names: list[str]) -> list[tuple[str, str | None, str, str]]:
)
raise ValueError(msg)
+ document_settings = self.state.document.settings
+ env = self.env
+ config = _AutodocConfig.from_config(env.config)
+ current_document = env.current_document
+ events = env.events
+ get_attr = _AutodocAttrGetter(env._registry.autodoc_attrgetters)
+ opts = _AutoDocumenterOptions()
+ ref_context = env.ref_context
+ reread_always = env.reread_always
+
max_item_chars = 50
for name in names:
@@ -388,26 +333,27 @@ def get_items(self, names: list[str]) -> list[tuple[str, str | None, str, str]]:
)
continue
- self.bridge.result = StringList() # initialize for each documenter
- full_name = real_name
- if not isinstance(obj, ModuleType):
+ obj_type = _get_documenter(obj, parent)
+ if isinstance(obj, ModuleType):
+ full_name = real_name
+ else:
# give explicitly separated module name, so that members
# of inner classes can be documented
- full_name = modname + '::' + full_name[len(modname) + 1 :]
+ full_name = f'{modname}::{real_name[len(modname) + 1 :]}'
# NB. using full_name here is important, since Documenters
# handle module prefixes slightly differently
- documenter = self.create_documenter(
- obj, parent, full_name, registry=self.env._registry
+ props = _load_object_by_name(
+ name=full_name,
+ objtype=obj_type,
+ current_document=current_document,
+ config=config,
+ events=events,
+ get_attr=get_attr,
+ options=opts,
+ ref_context=ref_context,
+ reread_always=reread_always,
)
- if not documenter.parse_name():
- logger.warning(
- __('failed to parse name %s'),
- real_name,
- location=self.get_location(),
- )
- items.append((display_name, '', '', real_name))
- continue
- if not documenter.import_object():
+ if props is None:
logger.warning(
__('failed to import object %s'),
real_name,
@@ -416,45 +362,22 @@ def get_items(self, names: list[str]) -> list[tuple[str, str | None, str, str]]:
items.append((display_name, '', '', real_name))
continue
- # try to also get a source code analyzer for attribute docs
- try:
- documenter.analyzer = ModuleAnalyzer.for_module(
- documenter.get_real_modname()
- )
- # parse right now, to get PycodeErrors on parsing (results will
- # be cached anyway)
- documenter.analyzer.find_attr_docs()
- except PycodeError as err:
- logger.debug('[autodoc] module analyzer failed: %s', err)
- # no source file -- e.g. for builtin and C modules
- documenter.analyzer = None
-
# -- Grab the signature
if signatures_option == 'none':
sig = None
- else:
- try:
- sig = documenter.format_signature(show_annotation=False)
- except TypeError:
- # the documenter does not support ``show_annotation`` option
- sig = documenter.format_signature()
- if not sig:
- sig = ''
- elif signatures_option == 'short':
- if sig != '()':
- sig = '(…)'
- else: # signatures_option == 'long'
- max_chars = max(10, max_item_chars - len(display_name))
- sig = mangle_signature(sig, max_chars=max_chars)
+ elif not props.signatures:
+ sig = ''
+ elif signatures_option == 'short':
+ sig = '()' if props.signatures == ('()',) else '(…)'
+ else: # signatures_option == 'long'
+ max_chars = max(10, max_item_chars - len(display_name))
+ sig = mangle_signature('\n'.join(props.signatures), max_chars=max_chars)
# -- Grab the summary
- # bodge for ModuleDocumenter
- documenter._extra_indent = '' # type: ignore[attr-defined]
-
- documenter.add_content(None)
- summary = extract_summary(self.bridge.result.data[:], self.state.document)
+ # get content from docstrings or attribute documentation
+ summary = extract_summary(props.docstring_lines, document_settings)
items.append((display_name, sig, summary, real_name))
@@ -511,7 +434,7 @@ def append_row(*column_texts: str) -> None:
def strip_arg_typehint(s: str) -> str:
"""Strip a type hint from argument definition."""
- return s.split(':')[0].strip()
+ return s.partition(':')[0].strip()
def _cleanup_signature(s: str) -> str:
@@ -596,43 +519,39 @@ def mangle_signature(sig: str, max_chars: int = 30) -> str:
return '(%s)' % sig
-def extract_summary(doc: list[str], document: Any) -> str:
+def extract_summary(doc: Sequence[str], settings: Any) -> str:
"""Extract summary from docstring."""
+ # Find the first stanza (heading, sentence, paragraph, etc.).
+ # If there's a blank line, then we can assume that the stanza has ended,
+ # so anything after shouldn't be part of the summary.
+ first_stanza = []
+ content_started = False
+ for line in doc:
+ is_blank_line = not line or line.isspace()
+ if not content_started:
+ # Skip any blank lines at the start
+ if is_blank_line:
+ continue
+ content_started = True
+ if content_started:
+ if is_blank_line:
+ break
+ first_stanza.append(line)
- def parse(doc: list[str], settings: Any) -> nodes.document:
- state_machine = RSTStateMachine(state_classes, 'Body')
- node = new_document('', settings)
- node.reporter = NullReporter()
- state_machine.run(doc, node)
-
- return node
-
- # Skip a blank lines at the top
- while doc and not doc[0].strip():
- doc.pop(0)
-
- # If there's a blank line, then we can assume the first sentence /
- # paragraph has ended, so anything after shouldn't be part of the
- # summary
- for i, piece in enumerate(doc):
- if not piece.strip():
- doc = doc[:i]
- break
-
- if doc == []:
+ if not first_stanza:
return ''
# parse the docstring
- node = parse(doc, document.settings)
+ node = _parse_summary(first_stanza, settings)
if isinstance(node[0], nodes.section):
# document starts with a section heading, so use that.
summary = node[0].astext().strip()
elif not isinstance(node[0], nodes.paragraph):
# document starts with non-paragraph: pick up the first line
- summary = doc[0].strip()
+ summary = first_stanza[0].strip()
else:
# Try to find the "first sentence", which may span multiple lines
- sentences = periods_re.split(' '.join(doc))
+ sentences = periods_re.split(' '.join(first_stanza))
if len(sentences) == 1:
summary = sentences[0].strip()
else:
@@ -640,7 +559,7 @@ def parse(doc: list[str], settings: Any) -> nodes.document:
for i in range(len(sentences)):
summary = '. '.join(sentences[: i + 1]).rstrip('.') + '.'
node[:] = []
- node = parse(doc, document.settings)
+ node = _parse_summary(first_stanza, settings)
if summary.endswith(WELL_KNOWN_ABBREVIATIONS):
pass
elif not any(node.findall(nodes.system_message)):
@@ -653,6 +572,15 @@ def parse(doc: list[str], settings: Any) -> nodes.document:
return summary
+def _parse_summary(doc: Sequence[str], settings: Any) -> nodes.document:
+ state_machine = RSTStateMachine(state_classes, 'Body')
+ node = new_document('', settings)
+ node.reporter = NullReporter()
+ state_machine.run(doc, node)
+
+ return node
+
+
def limited_join(
sep: str, items: list[str], max_chars: int = 30, overflow_marker: str = '...'
) -> str:
@@ -770,7 +698,7 @@ def _import_by_name(name: str, grouped_exception: bool = True) -> tuple[Any, Any
modname = '.'.join(name_parts[:-1])
if modname:
try:
- mod = import_module(modname)
+ mod = _import_module(modname)
return getattr(mod, name_parts[-1]), mod, modname
except (ImportError, IndexError, AttributeError) as exc:
errors.append(exc.__cause__ or exc)
@@ -782,7 +710,7 @@ def _import_by_name(name: str, grouped_exception: bool = True) -> tuple[Any, Any
last_j = j
modname = '.'.join(name_parts[:j])
try:
- import_module(modname)
+ _import_module(modname)
except ImportError as exc:
errors.append(exc.__cause__ or exc)
@@ -814,7 +742,7 @@ def import_ivar_by_name(
"""
try:
name, attr = name.rsplit('.', 1)
- real_name, obj, parent, modname = import_by_name(name, prefixes)
+ real_name, obj, _parent, modname = import_by_name(name, prefixes)
# Get ancestors of the object (class.__mro__ includes the class itself as
# the first entry)
@@ -832,7 +760,7 @@ def import_ivar_by_name(
found_attrs |= {attr for (qualname, attr) in analyzer.attr_docs}
found_attrs |= {attr for (qualname, attr) in analyzer.annotations}
if attr in found_attrs:
- return f'{real_name}.{attr}', INSTANCEATTR, obj, modname
+ return f'{real_name}.{attr}', INSTANCE_ATTR, obj, modname
except (ImportError, ValueError, PycodeError) as exc:
raise ImportError from exc
except ImportExceptionGroup:
diff --git a/sphinx/ext/autosummary/generate.py b/sphinx/ext/autosummary/generate.py
index d865c0de2af..1e8055bf73e 100644
--- a/sphinx/ext/autosummary/generate.py
+++ b/sphinx/ext/autosummary/generate.py
@@ -33,7 +33,10 @@
from sphinx.builders import Builder
from sphinx.config import Config
from sphinx.errors import PycodeError
-from sphinx.ext.autodoc.importer import import_module
+from sphinx.ext.autodoc._importer import _import_module
+from sphinx.ext.autodoc._member_finder import _filter_enum_dict, unmangle
+from sphinx.ext.autodoc._sentinels import INSTANCE_ATTR, SLOTS_ATTR
+from sphinx.ext.autodoc.mock import ismock, undecorate
from sphinx.ext.autosummary import (
ImportExceptionGroup,
_get_documenter,
@@ -45,7 +48,14 @@
from sphinx.registry import SphinxComponentRegistry
from sphinx.util import logging, rst
from sphinx.util._pathlib import _StrPath
-from sphinx.util.inspect import getall, safe_getattr
+from sphinx.util.inspect import (
+ getall,
+ getannotations,
+ getmro,
+ getslots,
+ isenumclass,
+ safe_getattr,
+)
from sphinx.util.osutil import ensuredir
from sphinx.util.template import SphinxTemplateLoader
@@ -57,7 +67,7 @@
from sphinx.application import Sphinx
from sphinx.events import EventManager
- from sphinx.ext.autodoc import Documenter
+ from sphinx.ext.autodoc._property_types import _AutodocObjType
logger = logging.getLogger(__name__)
@@ -96,34 +106,6 @@ class AutosummaryEntry(NamedTuple):
recursive: bool
-def setup_documenters(app: Sphinx) -> None:
- from sphinx.ext.autodoc import (
- AttributeDocumenter,
- ClassDocumenter,
- DataDocumenter,
- DecoratorDocumenter,
- ExceptionDocumenter,
- FunctionDocumenter,
- MethodDocumenter,
- ModuleDocumenter,
- PropertyDocumenter,
- )
-
- documenters: list[type[Documenter]] = [
- ModuleDocumenter,
- ClassDocumenter,
- ExceptionDocumenter,
- DataDocumenter,
- FunctionDocumenter,
- MethodDocumenter,
- AttributeDocumenter,
- DecoratorDocumenter,
- PropertyDocumenter,
- ]
- for documenter in documenters:
- app.registry.add_documenter(documenter.objtype, documenter)
-
-
def _underline(title: str, line: str = '=') -> str:
if '\n' in title:
msg = 'Can only underline single lines'
@@ -211,20 +193,18 @@ def __init__(
*,
config: Config,
events: EventManager,
- registry: SphinxComponentRegistry,
) -> None:
self.config = config
self.events = events
- self.registry = registry
self.object = obj
def get_object_type(self, name: str, value: Any) -> str:
- return _get_documenter(value, self.object, registry=self.registry).objtype
+ return _get_documenter(value, self.object)
- def is_skipped(self, name: str, value: Any, objtype: str) -> bool:
+ def is_skipped(self, name: str, value: Any, obj_type: _AutodocObjType) -> bool:
try:
return self.events.emit_firstresult(
- 'autodoc-skip-member', objtype, name, value, False, {}
+ 'autodoc-skip-member', obj_type, name, value, False, {}
)
except Exception as exc:
logger.warning(
@@ -252,7 +232,7 @@ def scan(self, imported_members: bool) -> list[str]:
except AttributeError:
value = None
- objtype = self.get_object_type(name, value)
+ objtype = _get_documenter(value, self.object)
if self.is_skipped(name, value, objtype):
continue
@@ -311,15 +291,14 @@ def generate_autosummary_content(
*,
config: Config,
events: EventManager,
- registry: SphinxComponentRegistry,
) -> str:
- doc = _get_documenter(obj, parent, registry=registry)
+ obj_type = _get_documenter(obj, parent)
ns: dict[str, Any] = {}
ns.update(context)
- if doc.objtype == 'module':
- scanner = ModuleScanner(obj, config=config, events=events, registry=registry)
+ if obj_type == 'module':
+ scanner = ModuleScanner(obj, config=config, events=events)
ns['members'] = scanner.scan(imported_members)
respect_module_all = not config.autosummary_ignore_module_all
@@ -328,30 +307,27 @@ def generate_autosummary_content(
)
ns['functions'], ns['all_functions'] = _get_members(
- doc,
+ obj_type,
obj,
{'function'},
config=config,
events=events,
- registry=registry,
imported=imported_members,
)
ns['classes'], ns['all_classes'] = _get_members(
- doc,
+ obj_type,
obj,
{'class'},
config=config,
events=events,
- registry=registry,
imported=imported_members,
)
ns['exceptions'], ns['all_exceptions'] = _get_members(
- doc,
+ obj_type,
obj,
{'exception'},
config=config,
events=events,
- registry=registry,
imported=imported_members,
)
ns['attributes'], ns['all_attributes'] = _get_module_attrs(name, ns['members'])
@@ -374,12 +350,11 @@ def generate_autosummary_content(
# Otherwise, use get_modules method normally
if respect_module_all and '__all__' in dir(obj):
imported_modules, all_imported_modules = _get_members(
- doc,
+ obj_type,
obj,
{'module'},
config=config,
events=events,
- registry=registry,
imported=True,
)
skip += all_imported_modules
@@ -393,34 +368,32 @@ def generate_autosummary_content(
)
ns['modules'] = imported_modules + modules
ns['all_modules'] = all_imported_modules + all_modules
- elif doc.objtype == 'class':
+ elif obj_type == 'class':
ns['members'] = dir(obj)
ns['inherited_members'] = set(dir(obj)) - set(obj.__dict__.keys())
ns['methods'], ns['all_methods'] = _get_members(
- doc,
+ obj_type,
obj,
{'method'},
config=config,
events=events,
- registry=registry,
include_public={'__init__'},
)
ns['attributes'], ns['all_attributes'] = _get_members(
- doc,
+ obj_type,
obj,
{'attribute', 'property'},
config=config,
events=events,
- registry=registry,
)
if modname is None or qualname is None:
modname, qualname = _split_full_qualified_name(name)
- if doc.objtype in {'method', 'attribute', 'property'}:
+ if obj_type in {'method', 'attribute', 'property'}:
ns['class'] = qualname.rsplit('.', 1)[0]
- if doc.objtype == 'class':
+ if obj_type == 'class':
shortname = qualname
else:
shortname = qualname.rsplit('.', 1)[-1]
@@ -430,19 +403,21 @@ def generate_autosummary_content(
ns['objname'] = qualname
ns['name'] = shortname
- ns['objtype'] = doc.objtype
+ ns['objtype'] = obj_type
ns['underline'] = len(name) * '='
if template_name:
return template.render(template_name, ns)
else:
- return template.render(doc.objtype, ns)
+ return template.render(obj_type, ns)
-def _skip_member(obj: Any, name: str, objtype: str, *, events: EventManager) -> bool:
+def _skip_member(
+ obj: Any, name: str, obj_type: _AutodocObjType, *, events: EventManager
+) -> bool:
try:
return events.emit_firstresult(
- 'autodoc-skip-member', objtype, name, obj, False, {}
+ 'autodoc-skip-member', obj_type, name, obj, False, {}
)
except Exception as exc:
logger.warning(
@@ -458,8 +433,76 @@ def _skip_member(obj: Any, name: str, objtype: str, *, events: EventManager) ->
def _get_class_members(obj: Any) -> dict[str, Any]:
- members = sphinx.ext.autodoc.importer.get_class_members(obj, None, safe_getattr)
- return {name: member.object for name, member in members.items()}
+ """Get members and attributes of target class."""
+ # TODO: Simplify
+ # the members directly defined in the class
+ obj_dict = safe_getattr(obj, '__dict__', {})
+
+ members_simpler: dict[str, Any] = {}
+
+ # enum members
+ if isenumclass(obj):
+ for name, defining_class, value in _filter_enum_dict(
+ obj, safe_getattr, obj_dict
+ ):
+ # the order of occurrence of *name* matches obj's MRO,
+ # allowing inherited attributes to be shadowed correctly
+ if unmangled := unmangle(defining_class, name):
+ members_simpler[unmangled] = value
+
+ # members in __slots__
+ try:
+ subject___slots__ = getslots(obj)
+ if subject___slots__:
+ for name in subject___slots__:
+ members_simpler[name] = SLOTS_ATTR
+ except (TypeError, ValueError):
+ pass
+
+ # other members
+ for name in dir(obj):
+ try:
+ value = safe_getattr(obj, name)
+ if ismock(value):
+ value = undecorate(value)
+
+ unmangled = unmangle(obj, name)
+ if unmangled and unmangled not in members_simpler:
+ members_simpler[unmangled] = value
+ except AttributeError:
+ continue
+
+ try:
+ for cls in getmro(obj):
+ try:
+ modname = safe_getattr(cls, '__module__')
+ qualname = safe_getattr(cls, '__qualname__')
+ except AttributeError:
+ qualname = None
+ analyzer = None
+ else:
+ try:
+ analyzer = ModuleAnalyzer.for_module(modname)
+ analyzer.analyze()
+ except PycodeError:
+ analyzer = None
+
+ # annotation only member (ex. attr: int)
+ for name in getannotations(cls):
+ unmangled = unmangle(cls, name)
+ if unmangled and unmangled not in members_simpler:
+ members_simpler[unmangled] = INSTANCE_ATTR
+
+ # append or complete instance attributes (cf. self.attr1) if analyzer knows
+ if analyzer:
+ for ns, name in analyzer.attr_docs:
+ if ns == qualname and name not in members_simpler:
+ # otherwise unknown instance attribute
+ members_simpler[name] = INSTANCE_ATTR
+ except AttributeError:
+ pass
+
+ return members_simpler
def _get_module_members(obj: Any, *, config: Config) -> dict[str, Any]:
@@ -473,36 +516,35 @@ def _get_module_members(obj: Any, *, config: Config) -> dict[str, Any]:
def _get_all_members(
- doc: type[Documenter], obj: Any, *, config: Config
+ obj_type: _AutodocObjType, obj: Any, *, config: Config
) -> dict[str, Any]:
- if doc.objtype == 'module':
+ if obj_type == 'module':
return _get_module_members(obj, config=config)
- elif doc.objtype == 'class':
+ elif obj_type == 'class':
return _get_class_members(obj)
return {}
def _get_members(
- doc: type[Documenter],
+ obj_type: _AutodocObjType,
obj: Any,
types: set[str],
*,
config: Config,
events: EventManager,
- registry: SphinxComponentRegistry,
include_public: Set[str] = frozenset(),
imported: bool = True,
) -> tuple[list[str], list[str]]:
items: list[str] = []
public: list[str] = []
- all_members = _get_all_members(doc, obj, config=config)
+ all_members = _get_all_members(obj_type, obj, config=config)
for name, value in all_members.items():
- documenter = _get_documenter(value, obj, registry=registry)
- if documenter.objtype in types:
+ obj_type = _get_documenter(value, obj)
+ if obj_type in types:
# skip imported members if expected
if imported or getattr(value, '__module__', None) == obj.__name__:
- skipped = _skip_member(value, name, documenter.objtype, events=events)
+ skipped = _skip_member(value, name, obj_type, events=events)
if skipped is True:
pass
elif skipped is False:
@@ -548,7 +590,7 @@ def _get_modules(
continue
fullname = f'{name}.{modname}'
try:
- module = import_module(fullname)
+ module = _import_module(fullname)
except ImportError:
pass
else:
@@ -583,7 +625,7 @@ def generate_autosummary_docs(
showed_sources = sorted(sources)
if len(showed_sources) > 20:
- showed_sources = showed_sources[:10] + ['...'] + showed_sources[-10:]
+ showed_sources = [*showed_sources[:10], '...', *showed_sources[-10:]]
logger.info(
__('[autosummary] generating autosummary for: %s'), ', '.join(showed_sources)
)
@@ -655,7 +697,6 @@ def generate_autosummary_docs(
qualname,
config=app.config,
events=app.events,
- registry=app.registry,
)
file_path = Path(path, filename_map.get(name, name) + suffix)
@@ -719,7 +760,7 @@ def find_autosummary_in_docstring(
See `find_autosummary_in_lines`.
"""
try:
- real_name, obj, parent, modname = import_by_name(name)
+ _real_name, obj, _parent, _modname = import_by_name(name)
lines = pydoc.getdoc(obj).splitlines()
return find_autosummary_in_lines(lines, module=name, filename=filename)
except AttributeError:
@@ -919,7 +960,6 @@ def main(argv: Sequence[str] = (), /) -> None:
app = DummyApplication(sphinx.locale.get_translator())
logging.setup(app, sys.stdout, sys.stderr) # type: ignore[arg-type]
- setup_documenters(app) # type: ignore[arg-type]
args = get_parser().parse_args(argv or sys.argv[1:])
if args.templates:
diff --git a/sphinx/ext/coverage.py b/sphinx/ext/coverage.py
index b2d08603f38..cd83bcbaec1 100644
--- a/sphinx/ext/coverage.py
+++ b/sphinx/ext/coverage.py
@@ -156,7 +156,7 @@ def _determine_py_coverage_modules(
logger.warning(
__(
'the following modules are specified in coverage_modules '
- 'but were not documented'
+ 'but were not documented: %s'
),
', '.join(missing_modules),
)
@@ -255,7 +255,7 @@ def write_c_coverage(self) -> None:
for typ, name in sorted(undoc):
op.write(f' * {name:<50} [{typ:>9}]\n')
if self.config.coverage_show_missing_items:
- if self.app.quiet:
+ if self.config.verbosity < 0:
logger.warning(
__('undocumented c api: %s [%s] in file %s'),
name,
@@ -446,7 +446,7 @@ def write_py_coverage(self) -> None:
op.write('Functions:\n')
op.writelines(f' * {x}\n' for x in undoc['funcs'])
if self.config.coverage_show_missing_items:
- if self.app.quiet:
+ if self.config.verbosity < 0:
for func in undoc['funcs']:
logger.warning(
__('undocumented python function: %s :: %s'),
@@ -468,7 +468,7 @@ def write_py_coverage(self) -> None:
if not methods:
op.write(f' * {class_name}\n')
if self.config.coverage_show_missing_items:
- if self.app.quiet:
+ if self.config.verbosity < 0:
logger.warning(
__('undocumented python class: %s :: %s'),
name,
@@ -485,7 +485,7 @@ def write_py_coverage(self) -> None:
op.write(f' * {class_name} -- missing methods:\n\n')
op.writelines(f' - {x}\n' for x in methods)
if self.config.coverage_show_missing_items:
- if self.app.quiet:
+ if self.config.verbosity < 0:
for meth in methods:
logger.warning(
__(
diff --git a/sphinx/ext/doctest.py b/sphinx/ext/doctest.py
index 105c50a6923..9036e6ef565 100644
--- a/sphinx/ext/doctest.py
+++ b/sphinx/ext/doctest.py
@@ -341,7 +341,7 @@ def _out(self, text: str) -> None:
self.outfile.write(text)
def _warn_out(self, text: str) -> None:
- if self.app.quiet:
+ if self.config.verbosity < 0:
logger.warning(text)
else:
logger.info(text, nonl=True)
@@ -358,10 +358,17 @@ def finish(self) -> None:
def s(v: int) -> str:
return 's' if v != 1 else ''
+ header = 'Doctest summary'
+ if self.total_failures or self.setup_failures or self.cleanup_failures:
+ self._app.statuscode = 1
+ if self.config.doctest_fail_fast:
+ header = f'{header} (exiting after first failed test)'
+ underline = '=' * len(header)
+
self._out(
f"""
-Doctest summary
-===============
+{header}
+{underline}
{self.total_tries:5} test{s(self.total_tries)}
{self.total_failures:5} failure{s(self.total_failures)} in tests
{self.setup_failures:5} failure{s(self.setup_failures)} in setup code
@@ -370,15 +377,14 @@ def s(v: int) -> str:
)
self.outfile.close()
- if self.total_failures or self.setup_failures or self.cleanup_failures:
- self.app.statuscode = 1
-
def write_documents(self, docnames: Set[str]) -> None:
logger.info(bold('running tests...'))
for docname in sorted(docnames):
# no need to resolve the doctree
doctree = self.env.get_doctree(docname)
- self.test_doc(docname, doctree)
+ success = self.test_doc(docname, doctree)
+ if not success and self.config.doctest_fail_fast:
+ break
def get_filename_for_node(self, node: Node, docname: str) -> str:
"""Try to get the file which actually contains the doctest, not the
@@ -386,7 +392,7 @@ def get_filename_for_node(self, node: Node, docname: str) -> str:
"""
try:
filename = relpath(node.source, self.env.srcdir) # type: ignore[arg-type]
- return filename.rsplit(':docstring of ', maxsplit=1)[0]
+ return filename.partition(':docstring of ')[0]
except Exception:
return str(self.env.doc2path(docname, False))
@@ -419,7 +425,7 @@ def skipped(self, node: Element) -> bool:
exec(self.config.doctest_global_cleanup, context) # NoQA: S102
return should_skip
- def test_doc(self, docname: str, doctree: Node) -> None:
+ def test_doc(self, docname: str, doctree: Node) -> bool:
groups: dict[str, TestGroup] = {}
add_to_all_groups = []
self.setup_runner = SphinxDocTestRunner(verbose=False, optionflags=self.opt)
@@ -430,21 +436,9 @@ def test_doc(self, docname: str, doctree: Node) -> None:
self.cleanup_runner._fakeout = self.setup_runner._fakeout # type: ignore[attr-defined]
if self.config.doctest_test_doctest_blocks:
-
- def condition(node: Node) -> bool:
- return (
- isinstance(node, nodes.literal_block | nodes.comment)
- and 'testnodetype' in node
- ) or isinstance(node, nodes.doctest_block)
-
+ condition = _condition_with_doctest
else:
-
- def condition(node: Node) -> bool:
- return (
- isinstance(node, nodes.literal_block | nodes.comment)
- and 'testnodetype' in node
- )
-
+ condition = _condition_default
for node in doctree.findall(condition):
if self.skipped(node): # type: ignore[arg-type]
continue
@@ -466,7 +460,7 @@ def condition(node: Node) -> bool:
lineno=line_number, # type: ignore[arg-type]
options=node.get('options'), # type: ignore[attr-defined]
)
- node_groups = node.get('groups', ['default']) # type: ignore[attr-defined]
+ node_groups = node.get('groups', [self.config.doctest_test_doctest_blocks]) # type: ignore[attr-defined]
if '*' in node_groups:
add_to_all_groups.append(code)
continue
@@ -496,13 +490,17 @@ def condition(node: Node) -> bool:
for group in groups.values():
group.add_code(code)
if not groups:
- return
+ return True
show_successes = self.config.doctest_show_successes
if show_successes:
self._out(f'\nDocument: {docname}\n----------{"-" * len(docname)}\n')
+ success = True
for group in groups.values():
- self.test_group(group)
+ if not self.test_group(group):
+ success = False
+ if self.config.doctest_fail_fast:
+ break
# Separately count results from setup code
res_f, res_t = self.setup_runner.summarize(self._out, verbose=False)
self.setup_failures += res_f
@@ -517,13 +515,14 @@ def condition(node: Node) -> bool:
)
self.cleanup_failures += res_f
self.cleanup_tries += res_t
+ return success
def compile(
self, code: str, name: str, type: str, flags: Any, dont_inherit: bool
) -> Any:
return compile(code, name, self.type, flags, dont_inherit)
- def test_group(self, group: TestGroup) -> None:
+ def test_group(self, group: TestGroup) -> bool:
ns: dict[str, Any] = {}
def run_setup_cleanup(
@@ -553,9 +552,10 @@ def run_setup_cleanup(
# run the setup code
if not run_setup_cleanup(self.setup_runner, group.setup, 'setup'):
# if setup failed, don't run the group
- return
+ return False
# run the tests
+ success = True
for code in group.tests:
if len(code) == 1:
# ordinary doctests (code/output interleaved)
@@ -608,11 +608,19 @@ def run_setup_cleanup(
self.type = 'exec' # multiple statements again
# DocTest.__init__ copies the globs namespace, which we don't want
test.globs = ns
+ old_f = self.test_runner.failures
# also don't clear the globs namespace after running the doctest
self.test_runner.run(test, out=self._warn_out, clear_globs=False)
+ if self.test_runner.failures > old_f:
+ success = False
+ if self.config.doctest_fail_fast:
+ break
# run the cleanup
- run_setup_cleanup(self.cleanup_runner, group.cleanup, 'cleanup')
+ if not run_setup_cleanup(self.cleanup_runner, group.cleanup, 'cleanup'):
+ return False
+
+ return success
def setup(app: Sphinx) -> ExtensionMetadata:
@@ -638,7 +646,19 @@ def setup(app: Sphinx) -> ExtensionMetadata:
'',
types=frozenset({int}),
)
+ app.add_config_value('doctest_fail_fast', False, '', types=frozenset({bool}))
return {
'version': sphinx.__display_version__,
'parallel_read_safe': True,
}
+
+
+def _condition_default(node: Node) -> bool:
+ return (
+ isinstance(node, (nodes.literal_block, nodes.comment))
+ and 'testnodetype' in node
+ )
+
+
+def _condition_with_doctest(node: Node) -> bool:
+ return _condition_default(node) or isinstance(node, nodes.doctest_block)
diff --git a/sphinx/ext/duration.py b/sphinx/ext/duration.py
index 1cf3f7b58d4..3f7f64c2875 100644
--- a/sphinx/ext/duration.py
+++ b/sphinx/ext/duration.py
@@ -37,7 +37,7 @@ def reading_durations(self) -> dict[str, float]:
return self.data.setdefault('reading_durations', {})
def note_reading_duration(self, duration: float) -> None:
- self.reading_durations[self.env.docname] = duration
+ self.reading_durations[self.env.current_document.docname] = duration
def clear(self) -> None:
self.reading_durations.clear()
diff --git a/sphinx/ext/extlinks.py b/sphinx/ext/extlinks.py
index a5e213ac9f9..82a323bd4b8 100644
--- a/sphinx/ext/extlinks.py
+++ b/sphinx/ext/extlinks.py
@@ -68,7 +68,7 @@ def check_uri(self, refnode: nodes.reference) -> None:
uri = refnode['refuri']
title = refnode.astext()
- for alias, (base_uri, _caption) in self.app.config.extlinks.items():
+ for alias, (base_uri, _caption) in self.config.extlinks.items():
uri_pattern = re.compile(re.escape(base_uri).replace('%s', '(?P.+)'))
match = uri_pattern.match(uri)
diff --git a/sphinx/ext/graphviz.py b/sphinx/ext/graphviz.py
index 9cd4d163e36..8ba99cc24ad 100644
--- a/sphinx/ext/graphviz.py
+++ b/sphinx/ext/graphviz.py
@@ -167,7 +167,7 @@ def run(self) -> list[Node]:
]
node = graphviz()
node['code'] = dotcode
- node['options'] = {'docname': self.env.docname}
+ node['options'] = {'docname': self.env.current_document.docname}
if 'graphviz_dot' in self.options:
node['options']['graphviz_dot'] = self.options['graphviz_dot']
@@ -212,7 +212,7 @@ def run(self) -> list[Node]:
node = graphviz()
dot_code = '\n'.join(self.content)
node['code'] = f'{self.name} {self.arguments[0]} {{\n{dot_code}\n}}\n'
- node['options'] = {'docname': self.env.docname}
+ node['options'] = {'docname': self.env.current_document.docname}
if 'graphviz_dot' in self.options:
node['options']['graphviz_dot'] = self.options['graphviz_dot']
if 'layout' in self.options:
@@ -431,7 +431,7 @@ def render_dot_latex(
filename: str | None = None,
) -> None:
try:
- fname, outfn = render_dot(self, code, options, 'pdf', prefix, filename)
+ fname, _outfn = render_dot(self, code, options, 'pdf', prefix, filename)
except GraphvizError as exc:
logger.warning(__('dot code %r: %s'), code, exc)
raise nodes.SkipNode from exc
@@ -475,7 +475,7 @@ def render_dot_texinfo(
prefix: str = 'graphviz',
) -> None:
try:
- fname, outfn = render_dot(self, code, options, 'png', prefix)
+ fname, _outfn = render_dot(self, code, options, 'png', prefix)
except GraphvizError as exc:
logger.warning(__('dot code %r: %s'), code, exc)
raise nodes.SkipNode from exc
diff --git a/sphinx/ext/imgmath.py b/sphinx/ext/imgmath.py
index b7bcf4a7a67..a8f88c62a1c 100644
--- a/sphinx/ext/imgmath.py
+++ b/sphinx/ext/imgmath.py
@@ -32,7 +32,6 @@
from docutils.nodes import Element
from sphinx.application import Sphinx
- from sphinx.builders import Builder
from sphinx.config import Config
from sphinx.util._pathlib import _StrPath
from sphinx.util.typing import ExtensionMetadata
@@ -116,36 +115,23 @@ def generate_latex_macro(
return LaTeXRenderer([templates_path]).render(template_name + '.jinja', variables)
-def ensure_tempdir(builder: Builder) -> Path:
- """Create temporary directory.
-
- use only one tempdir per build -- the use of a directory is cleaner
- than using temporary files, since we can clean up everything at once
- just removing the whole directory (see cleanup_tempdir)
- """
- if not hasattr(builder, '_imgmath_tempdir'):
- builder._imgmath_tempdir = Path(tempfile.mkdtemp()) # type: ignore[attr-defined]
-
- return builder._imgmath_tempdir # type: ignore[attr-defined]
-
-
-def compile_math(latex: str, builder: Builder) -> Path:
+def compile_math(latex: str, *, config: Config) -> Path:
"""Compile LaTeX macros for math to DVI."""
- tempdir = ensure_tempdir(builder)
+ tempdir = Path(tempfile.mkdtemp(suffix='-sphinx-imgmath'))
filename = tempdir / 'math.tex'
with open(filename, 'w', encoding='utf-8') as f:
f.write(latex)
- imgmath_latex_name = os.path.basename(builder.config.imgmath_latex)
+ imgmath_latex_name = os.path.basename(config.imgmath_latex)
# build latex command; old versions of latex don't have the
# --output-directory option, so we have to manually chdir to the
# temp dir to run it.
- command = [builder.config.imgmath_latex]
+ command = [config.imgmath_latex]
if imgmath_latex_name != 'tectonic':
command.append('--interaction=nonstopmode')
# add custom args from the config file
- command.extend(builder.config.imgmath_latex_args)
+ command.extend(config.imgmath_latex_args)
command.append('math.tex')
try:
@@ -162,7 +148,7 @@ def compile_math(latex: str, builder: Builder) -> Path:
'LaTeX command %r cannot be run (needed for math '
'display), check the imgmath_latex setting'
),
- builder.config.imgmath_latex,
+ config.imgmath_latex,
)
raise InvokeError from exc
except CalledProcessError as exc:
@@ -191,19 +177,19 @@ def convert_dvi_to_image(command: list[str], name: str) -> tuple[str, str]:
raise MathExtError(msg, exc.stderr, exc.stdout) from exc
-def convert_dvi_to_png(dvipath: Path, builder: Builder, out_path: Path) -> int | None:
+def convert_dvi_to_png(dvipath: Path, out_path: Path, *, config: Config) -> int | None:
"""Convert DVI file to PNG image."""
name = 'dvipng'
- command = [builder.config.imgmath_dvipng, '-o', out_path, '-T', 'tight', '-z9']
- command.extend(builder.config.imgmath_dvipng_args)
- if builder.config.imgmath_use_preview:
+ command = [config.imgmath_dvipng, '-o', out_path, '-T', 'tight', '-z9']
+ command.extend(config.imgmath_dvipng_args)
+ if config.imgmath_use_preview:
command.append('--depth')
command.append(dvipath)
- stdout, stderr = convert_dvi_to_image(command, name)
+ stdout, _stderr = convert_dvi_to_image(command, name)
depth = None
- if builder.config.imgmath_use_preview:
+ if config.imgmath_use_preview:
for line in stdout.splitlines():
matched = depth_re.match(line)
if matched:
@@ -214,17 +200,17 @@ def convert_dvi_to_png(dvipath: Path, builder: Builder, out_path: Path) -> int |
return depth
-def convert_dvi_to_svg(dvipath: Path, builder: Builder, out_path: Path) -> int | None:
+def convert_dvi_to_svg(dvipath: Path, out_path: Path, *, config: Config) -> int | None:
"""Convert DVI file to SVG image."""
name = 'dvisvgm'
- command = [builder.config.imgmath_dvisvgm, '-o', out_path]
- command.extend(builder.config.imgmath_dvisvgm_args)
+ command = [config.imgmath_dvisvgm, '-o', out_path]
+ command.extend(config.imgmath_dvisvgm_args)
command.append(dvipath)
- stdout, stderr = convert_dvi_to_image(command, name)
+ _stdout, stderr = convert_dvi_to_image(command, name)
depth = None
- if builder.config.imgmath_use_preview:
+ if config.imgmath_use_preview:
for line in stderr.splitlines(): # not stdout !
matched = depthsvg_re.match(line)
if matched:
@@ -236,8 +222,7 @@ def convert_dvi_to_svg(dvipath: Path, builder: Builder, out_path: Path) -> int |
def render_math(
- self: HTML5Translator,
- math: str,
+ self: HTML5Translator, math: str, *, config: Config
) -> tuple[_StrPath | None, int | None]:
"""Render the LaTeX math expression *math* using latex and dvipng or
dvisvgm.
@@ -252,14 +237,12 @@ def render_math(
docs successfully). If the programs are there, however, they may not fail
since that indicates a problem in the math source.
"""
- image_format = self.builder.config.imgmath_image_format.lower()
+ image_format = config.imgmath_image_format.lower()
if image_format not in SUPPORT_FORMAT:
unsupported_format_msg = 'imgmath_image_format must be either "png" or "svg"'
raise MathExtError(unsupported_format_msg)
- latex = generate_latex_macro(
- image_format, math, self.builder.config, self.builder.confdir
- )
+ latex = generate_latex_macro(image_format, math, config, self.builder.confdir)
filename = (
f'{sha1(latex.encode(), usedforsecurity=False).hexdigest()}.{image_format}'
@@ -281,7 +264,7 @@ def render_math(
# .tex -> .dvi
try:
- dvipath = compile_math(latex, self.builder)
+ dvipath = compile_math(latex, config=config)
except InvokeError:
self.builder._imgmath_warned_latex = True # type: ignore[attr-defined]
return None, None
@@ -289,9 +272,9 @@ def render_math(
# .dvi -> .png/.svg
try:
if image_format == 'png':
- depth = convert_dvi_to_png(dvipath, self.builder, generated_path)
+ depth = convert_dvi_to_png(dvipath, generated_path, config=config)
elif image_format == 'svg':
- depth = convert_dvi_to_svg(dvipath, self.builder, generated_path)
+ depth = convert_dvi_to_svg(dvipath, generated_path, config=config)
except InvokeError:
self.builder._imgmath_warned_image_translator = True # type: ignore[attr-defined]
return None, None
@@ -315,26 +298,25 @@ def clean_up_files(app: Sphinx, exc: Exception) -> None:
if exc:
return
- if hasattr(app.builder, '_imgmath_tempdir'):
- with contextlib.suppress(Exception):
- shutil.rmtree(app.builder._imgmath_tempdir)
-
- if app.builder.config.imgmath_embed:
+ if app.config.imgmath_embed:
# in embed mode, the images are still generated in the math output dir
# to be shared across workers, but are not useful to the final document
with contextlib.suppress(Exception):
shutil.rmtree(app.builder.outdir / app.builder.imagedir / 'math')
-def get_tooltip(self: HTML5Translator, node: Element) -> str:
- if self.builder.config.imgmath_add_tooltips:
+def get_tooltip(self: HTML5Translator, node: Element, *, config: Config) -> str:
+ if config.imgmath_add_tooltips:
return f' alt="{self.encode(node.astext()).strip()}"'
return ''
def html_visit_math(self: HTML5Translator, node: nodes.math) -> None:
+ config = self.builder.config
try:
- rendered_path, depth = render_math(self, '$' + node.astext() + '$')
+ rendered_path, depth = render_math(
+ self, '$' + node.astext() + '$', config=config
+ )
except MathExtError as exc:
msg = str(exc)
sm = nodes.system_message(
@@ -350,27 +332,27 @@ def html_visit_math(self: HTML5Translator, node: nodes.math) -> None:
f'{self.encode(node.astext()).strip()}'
)
else:
- if self.builder.config.imgmath_embed:
- image_format = self.builder.config.imgmath_image_format.lower()
+ if config.imgmath_embed:
+ image_format = config.imgmath_image_format.lower()
img_src = render_maths_to_base64(image_format, rendered_path)
else:
bname = os.path.basename(rendered_path)
relative_path = Path(self.builder.imgpath, 'math', bname)
img_src = relative_path.as_posix()
align = f' style="vertical-align: {-depth:d}px"' if depth is not None else ''
- self.body.append(
- f'
'
- )
+ tooltip = get_tooltip(self, node, config=config)
+ self.body.append(f'
')
raise nodes.SkipNode
def html_visit_displaymath(self: HTML5Translator, node: nodes.math_block) -> None:
+ config = self.builder.config
if node.get('no-wrap', node.get('nowrap', False)):
latex = node.astext()
else:
latex = wrap_displaymath(node.astext(), None, False)
try:
- rendered_path, depth = render_math(self, latex)
+ rendered_path, _depth = render_math(self, latex, config=config)
except MathExtError as exc:
msg = str(exc)
sm = nodes.system_message(
@@ -393,14 +375,15 @@ def html_visit_displaymath(self: HTML5Translator, node: nodes.math_block) -> Non
f'{self.encode(node.astext()).strip()}
\n'
)
else:
- if self.builder.config.imgmath_embed:
- image_format = self.builder.config.imgmath_image_format.lower()
+ if config.imgmath_embed:
+ image_format = config.imgmath_image_format.lower()
img_src = render_maths_to_base64(image_format, rendered_path)
else:
bname = os.path.basename(rendered_path)
relative_path = Path(self.builder.imgpath, 'math', bname)
img_src = relative_path.as_posix()
- self.body.append(f'
\n')
+ tooltip = get_tooltip(self, node, config=config)
+ self.body.append(f'
\n')
raise nodes.SkipNode
diff --git a/sphinx/ext/inheritance_diagram.py b/sphinx/ext/inheritance_diagram.py
index ce05626abe5..83a6d4b7b01 100644
--- a/sphinx/ext/inheritance_diagram.py
+++ b/sphinx/ext/inheritance_diagram.py
@@ -436,7 +436,7 @@ def run(self) -> list[Node]:
# references to real URLs later. These nodes will eventually be
# removed from the doctree after we're done with them.
for name in graph.get_all_class_names():
- refnodes, x = class_role( # type: ignore[misc]
+ refnodes, _x = class_role( # type: ignore[misc]
'class', f':class:`{name}`', name, 0, self.state.inliner
)
node.extend(refnodes)
diff --git a/sphinx/ext/intersphinx/_cli.py b/sphinx/ext/intersphinx/_cli.py
index 720f080ebde..bf3a333eb95 100644
--- a/sphinx/ext/intersphinx/_cli.py
+++ b/sphinx/ext/intersphinx/_cli.py
@@ -5,7 +5,11 @@
import sys
from pathlib import Path
-from sphinx.ext.intersphinx._load import _fetch_inventory, _InvConfig
+from sphinx.ext.intersphinx._load import (
+ _fetch_inventory_data,
+ _InvConfig,
+ _load_inventory,
+)
def inspect_main(argv: list[str], /) -> int:
@@ -28,12 +32,14 @@ def inspect_main(argv: list[str], /) -> int:
)
try:
- inv = _fetch_inventory(
+ raw_data, _ = _fetch_inventory_data(
target_uri='',
inv_location=filename,
config=config,
srcdir=Path(),
+ cache_path=None,
)
+ inv = _load_inventory(raw_data, target_uri='')
for key in sorted(inv.data):
print(key)
inv_entries = sorted(inv.data[key].items())
diff --git a/sphinx/ext/intersphinx/_load.py b/sphinx/ext/intersphinx/_load.py
index 6a07dbbc3f0..dec27c219f6 100644
--- a/sphinx/ext/intersphinx/_load.py
+++ b/sphinx/ext/intersphinx/_load.py
@@ -65,7 +65,7 @@ def validate_intersphinx_mapping(app: Sphinx, config: Config) -> None:
continue
# ensure values are properly formatted
- if not isinstance(value, (tuple | list)):
+ if not isinstance(value, (tuple, list)):
errors += 1
msg = __(
'Invalid value `%r` in intersphinx_mapping[%r]. '
@@ -107,7 +107,7 @@ def validate_intersphinx_mapping(app: Sphinx, config: Config) -> None:
continue
seen[uri] = name
- if not isinstance(inv, tuple | list):
+ if not isinstance(inv, (tuple, list)):
inv = (inv,)
# ensure inventory locations are None or non-empty
@@ -181,6 +181,9 @@ def load_mappings(app: Sphinx) -> None:
now=now,
config=inv_config,
srcdir=app.srcdir,
+ # the location of this cache directory must not be relied upon
+ # externally, it may change without notice or warning.
+ cache_dir=app.doctreedir / '__intersphinx_cache__',
)
for project in projects
]
@@ -230,6 +233,7 @@ def _fetch_inventory_group(
now: int,
config: _InvConfig,
srcdir: Path,
+ cache_dir: Path | None,
) -> bool:
if config.intersphinx_cache_limit >= 0:
# Positive value: cache is expired if its timestamp is below
@@ -250,6 +254,25 @@ def _fetch_inventory_group(
else:
inv_location = location
+ if cache_dir is not None:
+ cache_path = cache_dir / f'{project.name}_{INVENTORY_FILENAME}'
+ else:
+ cache_path = None
+
+ if (
+ cache_path is not None
+ and '://' in inv_location
+ and project.target_uri not in cache
+ and cache_path.is_file()
+ # the saved 'objects.inv' is not older than the cache expiry time
+ and cache_path.stat().st_mtime >= cache_time
+ ):
+ raw_data = cache_path.read_bytes()
+ inv = _load_inventory(raw_data, target_uri=project.target_uri)
+ cache_path_mtime = int(cache_path.stat().st_mtime)
+ cache[project.target_uri] = project.name, cache_path_mtime, inv.data
+ break
+
# decide whether the inventory must be read: always read local
# files; remote ones only if the cache time is expired
if (
@@ -264,17 +287,18 @@ def _fetch_inventory_group(
)
try:
- inv = _fetch_inventory(
+ raw_data, target_uri = _fetch_inventory_data(
target_uri=project.target_uri,
inv_location=inv_location,
config=config,
srcdir=srcdir,
+ cache_path=cache_path,
)
+ inv = _load_inventory(raw_data, target_uri=target_uri)
except Exception as err:
failures.append(err.args)
continue
-
- if inv:
+ else:
cache[project.target_uri] = project.name, now, inv.data
updated = True
break
@@ -302,18 +326,25 @@ def _fetch_inventory_group(
def fetch_inventory(app: Sphinx, uri: InventoryURI, inv: str) -> Inventory:
"""Fetch, parse and return an intersphinx inventory file."""
- return _fetch_inventory(
+ raw_data, uri = _fetch_inventory_data(
target_uri=uri,
inv_location=inv,
config=_InvConfig.from_config(app.config),
srcdir=app.srcdir,
- ).data
+ cache_path=None,
+ )
+ return _load_inventory(raw_data, target_uri=uri).data
-def _fetch_inventory(
- *, target_uri: InventoryURI, inv_location: str, config: _InvConfig, srcdir: Path
-) -> _Inventory:
- """Fetch, parse and return an intersphinx inventory file."""
+def _fetch_inventory_data(
+ *,
+ target_uri: InventoryURI,
+ inv_location: str,
+ config: _InvConfig,
+ srcdir: Path,
+ cache_path: Path | None,
+) -> tuple[bytes, str]:
+ """Fetch inventory data from a local or remote source."""
# both *target_uri* (base URI of the links to generate)
# and *inv_location* (actual location of the inventory file)
# can be local or remote URIs
@@ -324,9 +355,17 @@ def _fetch_inventory(
raw_data, target_uri = _fetch_inventory_url(
target_uri=target_uri, inv_location=inv_location, config=config
)
+ if cache_path is not None:
+ cache_path.parent.mkdir(parents=True, exist_ok=True)
+ cache_path.write_bytes(raw_data)
else:
raw_data = _fetch_inventory_file(inv_location=inv_location, srcdir=srcdir)
+ return raw_data, target_uri
+
+def _load_inventory(raw_data: bytes, /, *, target_uri: InventoryURI) -> _Inventory:
+ """Parse and return an intersphinx inventory file."""
+ # *target_uri* (base URI of the links to generate) can be a local or remote URI
try:
inv = InventoryFile.loads(raw_data, uri=target_uri)
except ValueError as exc:
diff --git a/sphinx/ext/intersphinx/_resolve.py b/sphinx/ext/intersphinx/_resolve.py
index 102c5d3ab07..52b672d783a 100644
--- a/sphinx/ext/intersphinx/_resolve.py
+++ b/sphinx/ext/intersphinx/_resolve.py
@@ -46,7 +46,12 @@ def _create_element_from_result(
# get correct path in case of subdirectories
uri = (_relative_path(Path(), Path(node['refdoc']).parent) / uri).as_posix()
if inv_item.project_version:
- reftitle = _('(in %s v%s)') % (inv_item.project_name, inv_item.project_version)
+ if not inv_item.project_version[0].isdigit():
+ # Do not append 'v' to non-numeric version
+ version = inv_item.project_version
+ else:
+ version = f'v{inv_item.project_version}'
+ reftitle = _('(in %s %s)') % (inv_item.project_name, version)
else:
reftitle = _('(in %s)') % (inv_item.project_name,)
@@ -493,7 +498,7 @@ def get_inventory_and_name_suffix(self, name: str) -> tuple[str | None, str]:
assert name.startswith('external'), name
suffix = name[9:]
if name[8] == '+':
- inv_name, suffix = suffix.split(':', 1)
+ inv_name, _, suffix = suffix.partition(':')
return inv_name, suffix
elif name[8] == ':':
return None, suffix
@@ -522,7 +527,7 @@ def _emit_warning(self, msg: str, /, *args: Any) -> None:
*args,
type='intersphinx',
subtype='external',
- location=(self.env.docname, self.lineno),
+ location=(self.env.current_document.docname, self.lineno),
)
def _concat_strings(self, strings: Iterable[str]) -> str:
diff --git a/sphinx/ext/napoleon/docstring.py b/sphinx/ext/napoleon/docstring.py
index d1317e9d841..26c7741ea60 100644
--- a/sphinx/ext/napoleon/docstring.py
+++ b/sphinx/ext/napoleon/docstring.py
@@ -476,7 +476,7 @@ def _consume_field(
) -> tuple[str, str, list[str]]:
line = self._lines.next()
- before, colon, after = self._partition_field_on_colon(line)
+ before, _colon, after = self._partition_field_on_colon(line)
_name, _type, _desc = before, '', after
if parse_type:
@@ -535,7 +535,7 @@ def _consume_returns_section(
if colon:
if after:
- _desc = [after] + lines[1:]
+ _desc = [after, *lines[1:]]
else:
_desc = lines[1:]
@@ -684,7 +684,7 @@ def _format_field(self, _name: str, _type: str, _desc: list[str]) -> list[str]:
if has_desc:
_desc = self._fix_field_desc(_desc)
if _desc[0]:
- return [field + _desc[0]] + _desc[1:]
+ return [field + _desc[0], *_desc[1:]]
else:
return [field, *_desc]
else:
@@ -1387,7 +1387,7 @@ def translate(
if m and line[m.end() :].strip().startswith(':'):
push_item(current_func, rest)
current_func, line = line[: m.end()], line[m.end() :]
- rest = [line.split(':', 1)[1].strip()]
+ rest = [line.partition(':')[-1].strip()]
if not rest[0]:
rest = []
elif not line.startswith(' '):
diff --git a/sphinx/ext/todo.py b/sphinx/ext/todo.py
index 53c4d57b4f1..4b2e32bc9c4 100644
--- a/sphinx/ext/todo.py
+++ b/sphinx/ext/todo.py
@@ -59,7 +59,7 @@ def run(self) -> list[Node]:
return [todo]
todo.insert(0, nodes.title(text=_('Todo')))
- todo['docname'] = self.env.docname
+ todo['docname'] = self.env.current_document.docname
self.add_name(todo)
self.set_source_info(todo)
self.state.document.note_explicit_target(todo)
diff --git a/sphinx/ext/viewcode.py b/sphinx/ext/viewcode.py
index 4b1c62ad0d1..af352eaaab6 100644
--- a/sphinx/ext/viewcode.py
+++ b/sphinx/ext/viewcode.py
@@ -103,11 +103,11 @@ def _get_full_modname(modname: str, attribute: str) -> str | None:
return None
-def is_supported_builder(builder: Builder) -> bool:
+def is_supported_builder(builder: type[Builder], viewcode_enable_epub: bool) -> bool:
return (
builder.format == 'html'
and builder.name != 'singlehtml'
- and (not builder.name.startswith('epub') or builder.config.viewcode_enable_epub)
+ and (not builder.name.startswith('epub') or viewcode_enable_epub)
)
@@ -166,7 +166,7 @@ def has_tag(modname: str, fullname: str, docname: str, refname: str) -> bool:
if not modname:
continue
fullname = signode.get('fullname')
- if not has_tag(modname, fullname, env.docname, refname):
+ if not has_tag(modname, fullname, env.current_document.docname, refname):
continue
if fullname in names:
# only one link per name, please
@@ -174,7 +174,7 @@ def has_tag(modname: str, fullname: str, docname: str, refname: str) -> bool:
names.add(fullname)
pagename = posixpath.join(OUTPUT_DIRNAME, modname.replace('.', '/'))
signode += viewcode_anchor(
- reftarget=pagename, refid=fullname, refdoc=env.docname
+ reftarget=pagename, refid=fullname, refdoc=env.current_document.docname
)
@@ -205,7 +205,7 @@ def env_purge_doc(app: Sphinx, env: BuildEnvironment, docname: str) -> None:
if entry is False:
continue
- code, tags, used, refname = entry
+ _code, _tags, used, _refname = entry
for fullname in list(used):
if used[fullname] == docname:
used.pop(fullname)
@@ -220,7 +220,9 @@ class ViewcodeAnchorTransform(SphinxPostTransform):
default_priority = 100
def run(self, **kwargs: Any) -> None:
- if is_supported_builder(self.app.builder):
+ if is_supported_builder(
+ self.env._builder_cls, self.config.viewcode_enable_epub
+ ):
self.convert_viewcode_anchors()
else:
self.remove_viewcode_anchors()
@@ -229,7 +231,7 @@ def convert_viewcode_anchors(self) -> None:
for node in self.document.findall(viewcode_anchor):
anchor = nodes.inline('', _('[source]'), classes=['viewcode-link'])
refnode = make_refnode(
- self.app.builder,
+ self.env._app.builder,
node['refdoc'],
node['reftarget'],
node['refid'],
@@ -250,7 +252,7 @@ def get_module_filename(app: Sphinx, modname: str) -> _StrPath | None:
return None
else:
try:
- filename, source = ModuleAnalyzer.get_module_source(modname)
+ filename, _source = ModuleAnalyzer.get_module_source(modname)
return filename
except Exception:
return None
@@ -281,7 +283,7 @@ def collect_pages(app: Sphinx) -> Iterator[tuple[str, dict[str, Any], str]]:
env = app.env
if not hasattr(env, '_viewcode_modules'):
return
- if not is_supported_builder(app.builder):
+ if not is_supported_builder(env._builder_cls, env.config.viewcode_enable_epub):
return
highlighter = app.builder.highlighter # type: ignore[attr-defined]
urito = app.builder.get_relative_uri
@@ -293,7 +295,7 @@ def collect_pages(app: Sphinx) -> Iterator[tuple[str, dict[str, Any], str]]:
__('highlighting module code... '),
'blue',
len(env._viewcode_modules),
- app.verbosity,
+ app.config.verbosity,
operator.itemgetter(0),
):
if not entry:
@@ -323,7 +325,7 @@ def collect_pages(app: Sphinx) -> Iterator[tuple[str, dict[str, Any], str]]:
max_index = len(lines) - 1
link_text = _('[docs]')
for name, docname in used.items():
- type, start, end = tags[name]
+ _type, start, end = tags[name]
backlink = urito(pagename, docname) + '#' + refname + '.' + name
lines[start] = (
f'\n'
diff --git a/sphinx/io.py b/sphinx/io.py
index 009cd38bf68..1df5ac454ce 100644
--- a/sphinx/io.py
+++ b/sphinx/io.py
@@ -2,24 +2,18 @@
from __future__ import annotations
+import warnings
from typing import TYPE_CHECKING
-from docutils.core import Publisher
-from docutils.io import FileInput, NullOutput
+from docutils.io import FileInput
from docutils.readers import standalone
from docutils.transforms.references import DanglingReferences
from docutils.writers import UnfilteredWriter
-from sphinx.transforms import AutoIndexUpgrader, DoctreeReadEvent, SphinxTransformer
-from sphinx.transforms.i18n import (
- Locale,
- PreserveTranslatableMessages,
- RemoveTranslatableInline,
-)
-from sphinx.transforms.references import SphinxDomains
+from sphinx.deprecation import RemovedInSphinx10Warning
+from sphinx.transforms import SphinxTransformer
from sphinx.util import logging
from sphinx.util.docutils import LoggingReporter
-from sphinx.versioning import UIDTransform
if TYPE_CHECKING:
from typing import Any
@@ -30,12 +24,13 @@
from docutils.parsers import Parser
from docutils.transforms import Transform
- from sphinx.application import Sphinx
from sphinx.environment import BuildEnvironment
logger = logging.getLogger(__name__)
+warnings.warn('sphinx.io is deprecated', RemovedInSphinx10Warning, stacklevel=2)
+
class SphinxBaseReader(standalone.Reader): # type: ignore[misc]
"""A base class of readers for Sphinx.
@@ -43,21 +38,15 @@ class SphinxBaseReader(standalone.Reader): # type: ignore[misc]
This replaces reporter by Sphinx's on generating document.
"""
- transforms: list[type[Transform]] = []
-
def __init__(self, *args: Any, **kwargs: Any) -> None:
- from sphinx.application import Sphinx
-
- if len(args) > 0 and isinstance(args[0], Sphinx):
- self._app = args[0]
- self._env = self._app.env
- args = args[1:]
-
super().__init__(*args, **kwargs)
+ warnings.warn(
+ 'sphinx.io.SphinxBaseReader is deprecated',
+ RemovedInSphinx10Warning,
+ stacklevel=2,
+ )
- def setup(self, app: Sphinx) -> None:
- self._app = app # hold application object only for compatibility
- self._env = app.env
+ transforms: list[type[Transform]] = []
def get_transforms(self) -> list[type[Transform]]:
transforms = super().get_transforms() + self.transforms
@@ -90,9 +79,16 @@ def new_document(self) -> nodes.document:
class SphinxStandaloneReader(SphinxBaseReader):
"""A basic document reader for Sphinx."""
- def setup(self, app: Sphinx) -> None:
- self.transforms = self.transforms + app.registry.get_transforms()
- super().setup(app)
+ def __init__(self, *args: Any, **kwargs: Any) -> None:
+ super().__init__(*args, **kwargs)
+ warnings.warn(
+ 'sphinx.io.SphinxStandaloneReader is deprecated',
+ RemovedInSphinx10Warning,
+ stacklevel=2,
+ )
+
+ def _setup_transforms(self, transforms: list[type[Transform]], /) -> None:
+ self.transforms = self.transforms + transforms
def read(self, source: Input, parser: Parser, settings: Values) -> nodes.document: # type: ignore[type-arg]
self.source = source
@@ -109,39 +105,21 @@ def read_source(self, env: BuildEnvironment) -> str:
# emit "source-read" event
arg = [content]
- env.events.emit('source-read', env.docname, arg)
+ env.events.emit('source-read', env.current_document.docname, arg)
return arg[0]
-class SphinxI18nReader(SphinxBaseReader):
- """A document reader for i18n.
-
- This returns the source line number of original text as current source line number
- to let users know where the error happened.
- Because the translated texts are partial and they don't have correct line numbers.
- """
-
- def setup(self, app: Sphinx) -> None:
- super().setup(app)
-
- self.transforms = self.transforms + app.registry.get_transforms()
- unused = [
- PreserveTranslatableMessages,
- Locale,
- RemoveTranslatableInline,
- AutoIndexUpgrader,
- SphinxDomains,
- DoctreeReadEvent,
- UIDTransform,
- ]
- for transform in unused:
- if transform in self.transforms:
- self.transforms.remove(transform)
-
-
class SphinxDummyWriter(UnfilteredWriter): # type: ignore[type-arg]
"""Dummy writer module used for generating doctree."""
+ def __init__(self) -> None:
+ super().__init__()
+ warnings.warn(
+ 'sphinx.io.SphinxDummyWriter is deprecated',
+ RemovedInSphinx10Warning,
+ stacklevel=2,
+ )
+
supported = ('html',) # needed to keep "meta" nodes
def translate(self) -> None:
@@ -150,6 +128,11 @@ def translate(self) -> None:
def SphinxDummySourceClass(source: Any, *args: Any, **kwargs: Any) -> Any:
"""Bypass source object as is to cheat Publisher."""
+ warnings.warn(
+ 'sphinx.io.SphinxDummySourceClass is deprecated',
+ RemovedInSphinx10Warning,
+ stacklevel=2,
+ )
return source
@@ -159,32 +142,8 @@ class SphinxFileInput(FileInput):
def __init__(self, *args: Any, **kwargs: Any) -> None:
kwargs['error_handler'] = 'sphinx'
super().__init__(*args, **kwargs)
-
-
-def create_publisher(app: Sphinx, filetype: str) -> Publisher:
- reader = SphinxStandaloneReader()
- reader.setup(app)
-
- parser = app.registry.create_source_parser(app, filetype)
- if parser.__class__.__name__ == 'CommonMarkParser' and parser.settings_spec == ():
- # a workaround for recommonmark
- # If recommonmark.AutoStrictify is enabled, the parser invokes reST parser
- # internally. But recommonmark-0.4.0 does not provide settings_spec for reST
- # parser. As a workaround, this copies settings_spec for RSTParser to the
- # CommonMarkParser.
- from docutils.parsers.rst import Parser as RSTParser
-
- parser.settings_spec = RSTParser.settings_spec # type: ignore[misc]
-
- pub = Publisher(
- reader=reader,
- parser=parser,
- writer=SphinxDummyWriter(),
- source_class=SphinxFileInput,
- destination=NullOutput(),
- )
- # Propagate exceptions by default when used programmatically:
- defaults = {'traceback': True, **app.env.settings}
- # Set default settings
- pub.get_settings(**defaults)
- return pub
+ warnings.warn(
+ 'sphinx.io.SphinxFileInput is deprecated',
+ RemovedInSphinx10Warning,
+ stacklevel=2,
+ )
diff --git a/sphinx/jinja2glue.py b/sphinx/jinja2glue.py
index 2f33070fa50..fd44a84bcbb 100644
--- a/sphinx/jinja2glue.py
+++ b/sphinx/jinja2glue.py
@@ -3,7 +3,6 @@
from __future__ import annotations
import os
-import os.path
from pathlib import Path
from pprint import pformat
from typing import TYPE_CHECKING
diff --git a/sphinx/parsers.py b/sphinx/parsers.py
index 70ff3eaae62..97333f86773 100644
--- a/sphinx/parsers.py
+++ b/sphinx/parsers.py
@@ -10,7 +10,8 @@
from docutils.statemachine import StringList
from docutils.transforms.universal import SmartQuotes
-from sphinx.util.rst import append_epilog, prepend_prolog
+from sphinx.deprecation import _deprecation_warning
+from sphinx.util.rst import _append_epilogue, _prepend_prologue
if TYPE_CHECKING:
from docutils import nodes
@@ -23,30 +24,42 @@
class Parser(docutils.parsers.Parser):
- """A base class of source parsers.
+ """A base class for source parsers.
- The additional parsers should inherit this class
- instead of ``docutils.parsers.Parser``.
- Compared with ``docutils.parsers.Parser``,
- this class improves accessibility to Sphinx APIs.
-
- The subclasses can access sphinx core runtime objects (app, config and env).
+ Additional parsers should inherit from this class instead of
+ ``docutils.parsers.Parser``.
+ This class provides access to core Sphinx objects; *config* and *env*.
"""
- #: The config object
- config: Config
-
- #: The environment object
- env: BuildEnvironment
+ _config: Config
+ _env: BuildEnvironment
+
+ @property
+ def config(self) -> Config:
+ """The config object."""
+ cls_module = self.__class__.__module__
+ cls_name = self.__class__.__qualname__
+ _deprecation_warning(cls_module, f'{cls_name}.config', remove=(9, 0))
+ return self._config
+
+ @property
+ def env(self) -> BuildEnvironment:
+ """The environment object."""
+ cls_module = self.__class__.__module__
+ cls_name = self.__class__.__qualname__
+ _deprecation_warning(cls_module, f'{cls_name}.env', remove=(9, 0))
+ return self._env
def set_application(self, app: Sphinx) -> None:
"""set_application will be called from Sphinx to set app and other instance variables
:param sphinx.application.Sphinx app: Sphinx application object
"""
- self._app = app
- self.config = app.config
- self.env = app.env
+ cls_module = self.__class__.__module__
+ cls_name = self.__class__.__qualname__
+ _deprecation_warning(cls_module, f'{cls_name}.set_application', remove=(9, 0))
+ self._config = app.config
+ self._env = app.env
class RSTParser(docutils.parsers.rst.Parser, Parser):
@@ -57,7 +70,7 @@ def get_transforms(self) -> list[type[Transform]]:
refs: sphinx.io.SphinxStandaloneReader
"""
- transforms = super().get_transforms()
+ transforms = super(RSTParser, RSTParser()).get_transforms()
transforms.remove(SmartQuotes)
return transforms
@@ -87,9 +100,9 @@ def parse(self, inputstring: str | StringList, document: nodes.document) -> None
self.finish_parse()
def decorate(self, content: StringList) -> None:
- """Preprocess reST content before parsing."""
- prepend_prolog(content, self.config.rst_prolog)
- append_epilog(content, self.config.rst_epilog)
+ """Preprocess reStructuredText content before parsing."""
+ _prepend_prologue(content, self._config.rst_prolog)
+ _append_epilogue(content, self._config.rst_epilog)
def setup(app: Sphinx) -> ExtensionMetadata:
diff --git a/sphinx/pycode/ast.py b/sphinx/pycode/ast.py
index b1521595b49..7f7850f3616 100644
--- a/sphinx/pycode/ast.py
+++ b/sphinx/pycode/ast.py
@@ -134,7 +134,7 @@ def visit_Call(self, node: ast.Call) -> str:
def visit_Constant(self, node: ast.Constant) -> str:
if node.value is Ellipsis:
return '...'
- elif isinstance(node.value, int | float | complex):
+ elif isinstance(node.value, (int, float, complex)):
if self.code:
return ast.get_source_segment(self.code, node) or repr(node.value)
else:
@@ -202,5 +202,8 @@ def visit_Tuple(self, node: ast.Tuple) -> str:
else:
return '(' + ', '.join(self.visit(e) for e in node.elts) + ')'
+ def visit_Starred(self, node: ast.Starred) -> str:
+ return f'*{self.visit(node.value)}'
+
def generic_visit(self, node: ast.AST) -> NoReturn:
raise NotImplementedError('Unable to parse %s object' % type(node).__name__)
diff --git a/sphinx/pycode/parser.py b/sphinx/pycode/parser.py
index 34d30200f75..4a96cedc990 100644
--- a/sphinx/pycode/parser.py
+++ b/sphinx/pycode/parser.py
@@ -9,6 +9,7 @@
import itertools
import operator
import re
+import sys
import tokenize
from token import DEDENT, INDENT, NAME, NEWLINE, NUMBER, OP, STRING
from tokenize import COMMENT, NL
@@ -20,6 +21,13 @@
from inspect import Signature
from typing import Any
+if sys.version_info[:2] >= (3, 12):
+ AssignmentLike = ast.Assign | ast.AnnAssign | ast.TypeAlias
+ AssignmentLikeType = (ast.Assign, ast.AnnAssign, ast.TypeAlias)
+else:
+ AssignmentLike = ast.Assign | ast.AnnAssign
+ AssignmentLikeType = (ast.Assign, ast.AnnAssign)
+
comment_re = re.compile('^\\s*#: ?(.*)\r?\n?$')
indent_re = re.compile('^\\s*$')
emptyline_re = re.compile('^\\s*(#.*)?$')
@@ -29,12 +37,14 @@ def filter_whitespace(code: str) -> str:
return code.replace('\f', ' ') # replace FF (form feed) with whitespace
-def get_assign_targets(node: ast.AST) -> list[ast.expr]:
- """Get list of targets from Assign and AnnAssign node."""
+def get_assign_targets(node: AssignmentLike) -> list[ast.expr]:
+ """Get list of targets from AssignmentLike node."""
if isinstance(node, ast.Assign):
return node.targets
+ elif isinstance(node, ast.AnnAssign):
+ return [node.target]
else:
- return [node.target] # type: ignore[attr-defined]
+ return [node.name] # ast.TypeAlias
def get_lvar_names(node: ast.AST, self: ast.arg | None = None) -> list[str]:
@@ -119,7 +129,7 @@ def __eq__(self, other: object) -> bool:
return self.kind == other
elif isinstance(other, str):
return self.value == other
- elif isinstance(other, list | tuple):
+ elif isinstance(other, (list, tuple)):
return [self.kind, self.value] == list(other)
elif other is None:
return False
@@ -247,9 +257,9 @@ def __init__(self, buffers: list[str], encoding: str) -> None:
self.deforders: dict[str, int] = {}
self.finals: list[str] = []
self.overloads: dict[str, list[Signature]] = {}
- self.typing: str | None = None
- self.typing_final: str | None = None
- self.typing_overload: str | None = None
+ self.typing_mods: set[str] = set()
+ self.typing_final_names: set[str] = set()
+ self.typing_overload_names: set[str] = set()
super().__init__()
def get_qualname_for(self, name: str) -> list[str] | None:
@@ -257,7 +267,7 @@ def get_qualname_for(self, name: str) -> list[str] | None:
if self.current_function:
if self.current_classes and self.context[-1] == '__init__':
# store variable comments inside __init__ method of classes
- return self.context[:-1] + [name]
+ return [*self.context[:-1], name]
else:
return None
else:
@@ -295,11 +305,8 @@ def add_variable_annotation(self, name: str, annotation: ast.AST) -> None:
self.annotations[basename, name] = ast_unparse(annotation)
def is_final(self, decorators: list[ast.expr]) -> bool:
- final = []
- if self.typing:
- final.append('%s.final' % self.typing)
- if self.typing_final:
- final.append(self.typing_final)
+ final = {f'{modname}.final' for modname in self.typing_mods}
+ final |= self.typing_final_names
for decorator in decorators:
try:
@@ -311,11 +318,8 @@ def is_final(self, decorators: list[ast.expr]) -> bool:
return False
def is_overload(self, decorators: list[ast.expr]) -> bool:
- overload = []
- if self.typing:
- overload.append('%s.overload' % self.typing)
- if self.typing_overload:
- overload.append(self.typing_overload)
+ overload = {f'{modname}.overload' for modname in self.typing_mods}
+ overload |= self.typing_overload_names
for decorator in decorators:
try:
@@ -338,34 +342,7 @@ def get_line(self, lineno: int) -> str:
"""Returns specified line."""
return self.buffers[lineno - 1]
- def visit(self, node: ast.AST) -> None:
- """Updates self.previous to the given node."""
- super().visit(node)
- self.previous = node
-
- def visit_Import(self, node: ast.Import) -> None:
- """Handles Import node and record the order of definitions."""
- for name in node.names:
- self.add_entry(name.asname or name.name)
-
- if name.name == 'typing':
- self.typing = name.asname or name.name
- elif name.name == 'typing.final':
- self.typing_final = name.asname or name.name
- elif name.name == 'typing.overload':
- self.typing_overload = name.asname or name.name
-
- def visit_ImportFrom(self, node: ast.ImportFrom) -> None:
- """Handles Import node and record the order of definitions."""
- for name in node.names:
- self.add_entry(name.asname or name.name)
-
- if node.module == 'typing' and name.name == 'final':
- self.typing_final = name.asname or name.name
- elif node.module == 'typing' and name.name == 'overload':
- self.typing_overload = name.asname or name.name
-
- def visit_Assign(self, node: ast.Assign) -> None:
+ def _handle_assignment(self, node: ast.Assign | ast.AnnAssign) -> None:
"""Handles Assign node and pick up a variable comment."""
try:
targets = get_assign_targets(node)
@@ -385,11 +362,19 @@ def visit_Assign(self, node: ast.Assign) -> None:
elif hasattr(node, 'type_comment') and node.type_comment:
for varname in varnames:
self.add_variable_annotation(varname, node.type_comment) # type: ignore[arg-type]
+ self._collect_doc_comment(node, varnames, current_line)
+ def _collect_doc_comment(
+ self,
+ node: AssignmentLike,
+ varnames: list[str],
+ current_line: str,
+ ) -> None:
# check comments after assignment
- parser = AfterCommentParser(
- [current_line[node.col_offset :]] + self.buffers[node.lineno :]
- )
+ parser = AfterCommentParser([
+ current_line[node.col_offset :],
+ *self.buffers[node.lineno :],
+ ])
parser.parse()
if parser.comment and comment_re.match(parser.comment):
for varname in varnames:
@@ -420,14 +405,47 @@ def visit_Assign(self, node: ast.Assign) -> None:
for varname in varnames:
self.add_entry(varname)
+ def visit(self, node: ast.AST) -> None:
+ """Updates self.previous to the given node."""
+ super().visit(node)
+ self.previous = node
+
+ def visit_Import(self, node: ast.Import) -> None:
+ """Handles Import node and record the order of definitions."""
+ for name in node.names:
+ self.add_entry(name.asname or name.name)
+
+ if name.name in {'typing', 'typing_extensions'}:
+ self.typing_mods.add(name.asname or name.name)
+ elif name.name in {'typing.final', 'typing_extensions.final'}:
+ self.typing_final_names.add(name.asname or name.name)
+ elif name.name in {'typing.overload', 'typing_extensions.overload'}:
+ self.typing_overload_names.add(name.asname or name.name)
+
+ def visit_ImportFrom(self, node: ast.ImportFrom) -> None:
+ """Handles Import node and record the order of definitions."""
+ for name in node.names:
+ self.add_entry(name.asname or name.name)
+
+ if node.module not in {'typing', 'typing_extensions'}:
+ continue
+ if name.name == 'final':
+ self.typing_final_names.add(name.asname or name.name)
+ elif name.name == 'overload':
+ self.typing_overload_names.add(name.asname or name.name)
+
+ def visit_Assign(self, node: ast.Assign) -> None:
+ """Handles Assign node and pick up a variable comment."""
+ self._handle_assignment(node)
+
def visit_AnnAssign(self, node: ast.AnnAssign) -> None:
"""Handles AnnAssign node and pick up a variable comment."""
- self.visit_Assign(node) # type: ignore[arg-type]
+ self._handle_assignment(node)
def visit_Expr(self, node: ast.Expr) -> None:
"""Handles Expr node and pick up a comment if string."""
if (
- isinstance(self.previous, ast.Assign | ast.AnnAssign)
+ isinstance(self.previous, AssignmentLikeType)
and isinstance(node.value, ast.Constant)
and isinstance(node.value.value, str)
):
@@ -488,6 +506,16 @@ def visit_AsyncFunctionDef(self, node: ast.AsyncFunctionDef) -> None:
"""Handles AsyncFunctionDef node and set context."""
self.visit_FunctionDef(node) # type: ignore[arg-type]
+ def visit_TypeAlias(self, node: ast.TypeAlias) -> None: # type: ignore[name-defined]
+ """Handles TypeAlias node and picks up a variable comment.
+
+ .. note:: TypeAlias node refers to `type Foo = Bar` (PEP 695) assignment,
+ NOT `Foo: TypeAlias = Bar` (PEP 613).
+ """
+ # Python 3.12+
+ current_line = self.get_line(node.lineno)
+ self._collect_doc_comment(node, [node.name.id], current_line)
+
class DefinitionFinder(TokenProcessor):
"""Python source code parser to detect location of functions,
diff --git a/sphinx/registry.py b/sphinx/registry.py
index ce52a03b323..5ccc6c5ddb0 100644
--- a/sphinx/registry.py
+++ b/sphinx/registry.py
@@ -12,7 +12,6 @@
from sphinx.domains.std import GenericObject, Target
from sphinx.errors import ExtensionError, SphinxError, VersionRequirementError
from sphinx.extension import Extension
-from sphinx.io import create_publisher
from sphinx.locale import __
from sphinx.parsers import Parser as SphinxParser
from sphinx.roles import XRefRole
@@ -26,7 +25,6 @@
from typing import Any, TypeAlias
from docutils import nodes
- from docutils.core import Publisher
from docutils.nodes import Element, Node, TextElement
from docutils.parsers import Parser
from docutils.parsers.rst import Directive
@@ -38,7 +36,7 @@
from sphinx.config import Config
from sphinx.domains import Domain, Index
from sphinx.environment import BuildEnvironment
- from sphinx.ext.autodoc import Documenter
+ from sphinx.ext.autodoc._documenters import Documenter
from sphinx.util.docfields import Field
from sphinx.util.typing import (
ExtensionMetadata,
@@ -153,9 +151,6 @@ def __init__(self) -> None:
#: additional transforms; list of transforms
self.transforms: list[type[Transform]] = []
- # private cache of Docutils Publishers (file type -> publisher object)
- self.publishers: dict[str, Publisher] = {}
-
@property
def autodoc_attrgettrs(self) -> dict[type, Callable[[Any, str, Any], Any]]:
return self.autodoc_attrgetters
@@ -375,11 +370,14 @@ def get_source_parser(self, filetype: str) -> type[Parser]:
def get_source_parsers(self) -> dict[str, type[Parser]]:
return self.source_parsers
- def create_source_parser(self, app: Sphinx, filename: str) -> Parser:
+ def create_source_parser(
+ self, filename: str, *, config: Config, env: BuildEnvironment
+ ) -> Parser:
parser_class = self.get_source_parser(filename)
parser = parser_class()
if isinstance(parser, SphinxParser):
- parser.set_application(app)
+ parser._config = config
+ parser._env = env
return parser
def add_translator(
@@ -410,7 +408,9 @@ def add_translation_handlers(
% (builder_name, handlers),
) from exc
- def get_translator_class(self, builder: Builder) -> type[nodes.NodeVisitor]:
+ def get_translator_class(
+ self, builder: type[Builder] | Builder
+ ) -> type[nodes.NodeVisitor]:
try:
return self.translators[builder.name]
except KeyError:
@@ -420,7 +420,9 @@ def get_translator_class(self, builder: Builder) -> type[nodes.NodeVisitor]:
msg = f'translator not found for {builder.name}'
raise AttributeError(msg) from err
- def create_translator(self, builder: Builder, *args: Any) -> nodes.NodeVisitor:
+ def create_translator(
+ self, builder: type[Builder] | Builder, *args: Any
+ ) -> nodes.NodeVisitor:
translator_class = self.get_translator_class(builder)
translator = translator_class(*args)
@@ -589,15 +591,6 @@ def get_envversion(self, app: Sphinx) -> Mapping[str, int]:
return _get_env_version(app.extensions)
- def get_publisher(self, app: Sphinx, filetype: str) -> Publisher:
- try:
- return self.publishers[filetype]
- except KeyError:
- pass
- publisher = create_publisher(app, filetype)
- self.publishers[filetype] = publisher
- return publisher
-
def merge_source_suffix(app: Sphinx, config: Config) -> None:
"""Merge any user-specified source_suffix with any added by extensions."""
diff --git a/sphinx/roles.py b/sphinx/roles.py
index 98843de5a95..fdb997314fe 100644
--- a/sphinx/roles.py
+++ b/sphinx/roles.py
@@ -6,14 +6,13 @@
from typing import TYPE_CHECKING
import docutils.parsers.rst.directives
-import docutils.parsers.rst.roles
import docutils.parsers.rst.states
from docutils import nodes, utils
from sphinx import addnodes
from sphinx.locale import _, __
from sphinx.util import ws_re
-from sphinx.util.docutils import ReferenceRole, SphinxRole
+from sphinx.util.docutils import ReferenceRole, SphinxRole, _normalize_options
if TYPE_CHECKING:
from collections.abc import Sequence
@@ -103,7 +102,7 @@ def run(self) -> tuple[list[Node], list[system_message]]:
self.refdomain, self.reftype = '', self.name
self.classes = ['xref', self.reftype]
else:
- self.refdomain, self.reftype = self.name.split(':', 1)
+ self.refdomain, _, self.reftype = self.name.partition(':')
self.classes = ['xref', self.refdomain, f'{self.refdomain}-{self.reftype}']
if self.disabled:
@@ -115,7 +114,7 @@ def create_non_xref_node(self) -> tuple[list[Node], list[system_message]]:
text = utils.unescape(self.text[1:])
if self.fix_parens:
self.has_explicit_title = False # treat as implicit
- text, target = self.update_title_and_target(text, '')
+ text, _target = self.update_title_and_target(text, '')
node = self.innernodeclass(self.rawtext, text, classes=self.classes)
return self.result_nodes(self.inliner.document, self.env, node, is_ref=False)
@@ -130,7 +129,7 @@ def create_xref_node(self) -> tuple[list[Node], list[system_message]]:
# create the reference node
options = {
- 'refdoc': self.env.docname,
+ 'refdoc': self.env.current_document.docname,
'refdomain': self.refdomain,
'reftype': self.reftype,
'refexplicit': self.has_explicit_title,
@@ -234,9 +233,9 @@ def run(self) -> tuple[list[Node], list[system_message]]:
return [index, target, reference], []
def build_uri(self) -> str:
- ret = self.target.split('#', 1)
- if len(ret) == 2:
- return f'{CVE._BASE_URL}{ret[0]}#{ret[1]}'
+ ret = self.target.partition('#')
+ if ret[1]:
+ return f'{CVE._BASE_URL}{ret[0]}#{ret[2]}'
return f'{CVE._BASE_URL}{ret[0]}'
@@ -279,9 +278,9 @@ def run(self) -> tuple[list[Node], list[system_message]]:
return [index, target, reference], []
def build_uri(self) -> str:
- ret = self.target.split('#', 1)
- if len(ret) == 2:
- return f'{CWE._BASE_URL}{int(ret[0])}.html#{ret[1]}'
+ ret = self.target.partition('#')
+ if ret[1]:
+ return f'{CWE._BASE_URL}{int(ret[0])}.html#{ret[2]}'
return f'{CWE._BASE_URL}{int(ret[0])}.html'
@@ -323,9 +322,9 @@ def run(self) -> tuple[list[Node], list[system_message]]:
def build_uri(self) -> str:
base_url = self.inliner.document.settings.pep_base_url
- ret = self.target.split('#', 1)
- if len(ret) == 2:
- return base_url + 'pep-%04d/#%s' % (int(ret[0]), ret[1])
+ ret = self.target.partition('#')
+ if ret[1]:
+ return base_url + 'pep-%04d/#%s' % (int(ret[0]), ret[2])
else:
return base_url + 'pep-%04d/' % int(ret[0])
@@ -361,9 +360,9 @@ def run(self) -> tuple[list[Node], list[system_message]]:
def build_uri(self) -> str:
base_url = self.inliner.document.settings.rfc_base_url
- ret = self.target.split('#', 1)
- if len(ret) == 2:
- return base_url + self.inliner.rfc_url % int(ret[0]) + '#' + ret[1]
+ ret = self.target.partition('#')
+ if ret[1]:
+ return base_url + self.inliner.rfc_url % int(ret[0]) + '#' + ret[2]
else:
return base_url + self.inliner.rfc_url % int(ret[0])
@@ -584,10 +583,7 @@ def code_role(
options: dict[str, Any] | None = None,
content: Sequence[str] = (),
) -> tuple[list[Node], list[system_message]]:
- if options is None:
- options = {}
- options = options.copy()
- docutils.parsers.rst.roles.set_classes(options)
+ options = _normalize_options(options)
language = options.get('language', '')
classes = ['code']
if language:
diff --git a/sphinx/search/__init__.py b/sphinx/search/__init__.py
index cd0aa0bbd8f..b835b7b36db 100644
--- a/sphinx/search/__init__.py
+++ b/sphinx/search/__init__.py
@@ -20,7 +20,7 @@
from sphinx.util.index_entries import split_index_msg
if TYPE_CHECKING:
- from collections.abc import Callable, Iterable
+ from collections.abc import Callable, Iterable, Set
from typing import Any, Protocol, TypeVar
from docutils.nodes import Node
@@ -74,28 +74,25 @@ class SearchLanguage:
lang: str = ''
language_name: str = ''
- stopwords: set[str] = set()
+ stopwords: Set[str] = frozenset()
js_splitter_code: str = ''
js_stemmer_rawcode: str = ''
js_stemmer_code = """
/**
* Dummy stemmer for languages without stemming rules.
*/
-var Stemmer = function() {
- this.stemWord = function(w) {
+var Stemmer = function () {
+ this.stemWord = function (w) {
return w;
- }
-}
+ };
+};
"""
_word_re = re.compile(r'\w+')
def __init__(self, options: dict[str, str]) -> None:
- self.options = options
- self.init(options)
-
- def init(self, options: dict[str, str]) -> None:
"""Initialize the class with the options the user has given."""
+ self.options = options
def split(self, input: str) -> list[str]:
"""This method splits a sentence into words. Default splitter splits input
@@ -120,10 +117,7 @@ def word_filter(self, word: str) -> bool:
"""Return true if the target word should be registered in the search index.
This method is called after stemming.
"""
- return len(word) == 0 or not (
- ((len(word) < 3) and (12353 < ord(word[0]) < 12436))
- or (ord(word[0]) < 256 and (word in self.stopwords))
- )
+ return not word.isdigit() and word not in self.stopwords
# SearchEnglish imported after SearchLanguage is defined due to circular import
@@ -131,9 +125,11 @@ def word_filter(self, word: str) -> bool:
def parse_stop_word(source: str) -> set[str]:
- """Parse snowball style word list like this:
+ """Collect the stopwords from a snowball style word list:
- * https://snowball.tartarus.org/algorithms/finnish/stop.txt
+ .. code:: text
+
+ list of space separated stop words | optional comment
"""
result: set[str] = set()
for line in source.splitlines():
@@ -304,7 +300,7 @@ def __init__(
# fallback; try again with language-code
if lang_class is None and '_' in lang:
- lang_class = languages.get(lang.split('_')[0])
+ lang_class = languages.get(lang.partition('_')[0])
if lang_class is None:
self.lang: SearchLanguage = SearchEnglish(options)
@@ -584,17 +580,17 @@ def get_js_stemmer_rawcode(self) -> str | None:
def get_js_stemmer_code(self) -> str:
"""Returns JS code that will be inserted into language_data.js."""
- if self.lang.js_stemmer_rawcode:
- base_js_path = _NON_MINIFIED_JS_PATH / 'base-stemmer.js'
- language_js_path = _NON_MINIFIED_JS_PATH / self.lang.js_stemmer_rawcode
- base_js = base_js_path.read_text(encoding='utf-8')
- language_js = language_js_path.read_text(encoding='utf-8')
- return (
- f'{base_js}\n{language_js}\nStemmer = {self.lang.language_name}Stemmer;'
- )
- else:
+ if not self.lang.js_stemmer_rawcode:
return self.lang.js_stemmer_code
+ base_js_path = _MINIFIED_JS_PATH / 'base-stemmer.js'
+ language_js_path = _MINIFIED_JS_PATH / self.lang.js_stemmer_rawcode
+ return '\n'.join((
+ base_js_path.read_text(encoding='utf-8'),
+ language_js_path.read_text(encoding='utf-8'),
+ f'window.Stemmer = {self.lang.language_name}Stemmer;',
+ ))
+
def _feed_visit_nodes(
node: nodes.Node,
diff --git a/sphinx/search/_stopwords/__init__.py b/sphinx/search/_stopwords/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/sphinx/search/_stopwords/da.py b/sphinx/search/_stopwords/da.py
new file mode 100644
index 00000000000..4991deb861e
--- /dev/null
+++ b/sphinx/search/_stopwords/da.py
@@ -0,0 +1,101 @@
+# automatically generated by utils/generate-snowball.py
+# from https://github.com/snowballstem/snowball-website/raw/efb4ae4d65769fb1652acbe608c0c817e746c730/algorithms/danish/stop.txt
+
+from __future__ import annotations
+
+DANISH_STOPWORDS = frozenset({
+ 'ad',
+ 'af',
+ 'alle',
+ 'alt',
+ 'anden',
+ 'at',
+ 'blev',
+ 'blive',
+ 'bliver',
+ 'da',
+ 'de',
+ 'dem',
+ 'den',
+ 'denne',
+ 'der',
+ 'deres',
+ 'det',
+ 'dette',
+ 'dig',
+ 'din',
+ 'disse',
+ 'dog',
+ 'du',
+ 'efter',
+ 'eller',
+ 'en',
+ 'end',
+ 'er',
+ 'et',
+ 'for',
+ 'fra',
+ 'ham',
+ 'han',
+ 'hans',
+ 'har',
+ 'havde',
+ 'have',
+ 'hende',
+ 'hendes',
+ 'her',
+ 'hos',
+ 'hun',
+ 'hvad',
+ 'hvis',
+ 'hvor',
+ 'i',
+ 'ikke',
+ 'ind',
+ 'jeg',
+ 'jer',
+ 'jo',
+ 'kunne',
+ 'man',
+ 'mange',
+ 'med',
+ 'meget',
+ 'men',
+ 'mig',
+ 'min',
+ 'mine',
+ 'mit',
+ 'mod',
+ 'ned',
+ 'noget',
+ 'nogle',
+ 'nu',
+ 'når',
+ 'og',
+ 'også',
+ 'om',
+ 'op',
+ 'os',
+ 'over',
+ 'på',
+ 'selv',
+ 'sig',
+ 'sin',
+ 'sine',
+ 'sit',
+ 'skal',
+ 'skulle',
+ 'som',
+ 'sådan',
+ 'thi',
+ 'til',
+ 'ud',
+ 'under',
+ 'var',
+ 'vi',
+ 'vil',
+ 'ville',
+ 'vor',
+ 'være',
+ 'været',
+})
diff --git a/sphinx/search/_stopwords/da.txt b/sphinx/search/_stopwords/da.txt
new file mode 100644
index 00000000000..9761414a1c6
--- /dev/null
+++ b/sphinx/search/_stopwords/da.txt
@@ -0,0 +1,102 @@
+
+ | A Danish stop word list. Comments begin with vertical bar. Each stop
+ | word is at the start of a line.
+
+ | This is a ranked list (commonest to rarest) of stopwords derived from
+ | a large text sample.
+
+
+og | and
+i | in
+jeg | I
+det | that (dem. pronoun)/it (pers. pronoun)
+at | that (in front of a sentence)/to (with infinitive)
+en | a/an
+den | it (pers. pronoun)/that (dem. pronoun)
+til | to/at/for/until/against/by/of/into, more
+er | present tense of "to be"
+som | who, as
+på | on/upon/in/on/at/to/after/of/with/for, on
+de | they
+med | with/by/in, along
+han | he
+af | of/by/from/off/for/in/with/on, off
+for | at/for/to/from/by/of/ago, in front/before, because
+ikke | not
+der | who/which, there/those
+var | past tense of "to be"
+mig | me/myself
+sig | oneself/himself/herself/itself/themselves
+men | but
+et | a/an/one, one (number), someone/somebody/one
+har | present tense of "to have"
+om | round/about/for/in/a, about/around/down, if
+vi | we
+min | my
+havde | past tense of "to have"
+ham | him
+hun | she
+nu | now
+over | over/above/across/by/beyond/past/on/about, over/past
+da | then, when/as/since
+fra | from/off/since, off, since
+du | you
+ud | out
+sin | his/her/its/one's
+dem | them
+os | us/ourselves
+op | up
+man | you/one
+hans | his
+hvor | where
+eller | or
+hvad | what
+skal | must/shall etc.
+selv | myself/youself/herself/ourselves etc., even
+her | here
+alle | all/everyone/everybody etc.
+vil | will (verb)
+blev | past tense of "to stay/to remain/to get/to become"
+kunne | could
+ind | in
+når | when
+være | present tense of "to be"
+dog | however/yet/after all
+noget | something
+ville | would
+jo | you know/you see (adv), yes
+deres | their/theirs
+efter | after/behind/according to/for/by/from, later/afterwards
+ned | down
+skulle | should
+denne | this
+end | than
+dette | this
+mit | my/mine
+også | also
+under | under/beneath/below/during, below/underneath
+have | have
+dig | you
+anden | other
+hende | her
+mine | my
+alt | everything
+meget | much/very, plenty of
+sit | his, her, its, one's
+sine | his, her, its, one's
+vor | our
+mod | against
+disse | these
+hvis | if
+din | your/yours
+nogle | some
+hos | by/at
+blive | be/become
+mange | many
+ad | by/through
+bliver | present tense of "to be/to become"
+hendes | her/hers
+været | be
+thi | for (conj)
+jer | you
+sådan | such, like this/like that
diff --git a/sphinx/search/_stopwords/de.py b/sphinx/search/_stopwords/de.py
new file mode 100644
index 00000000000..d728493d307
--- /dev/null
+++ b/sphinx/search/_stopwords/de.py
@@ -0,0 +1,238 @@
+# automatically generated by utils/generate-snowball.py
+# from https://github.com/snowballstem/snowball-website/raw/efb4ae4d65769fb1652acbe608c0c817e746c730/algorithms/german/stop.txt
+
+from __future__ import annotations
+
+GERMAN_STOPWORDS = frozenset({
+ 'aber',
+ 'alle',
+ 'allem',
+ 'allen',
+ 'aller',
+ 'alles',
+ 'als',
+ 'also',
+ 'am',
+ 'an',
+ 'ander',
+ 'andere',
+ 'anderem',
+ 'anderen',
+ 'anderer',
+ 'anderes',
+ 'anderm',
+ 'andern',
+ 'anderr',
+ 'anders',
+ 'auch',
+ 'auf',
+ 'aus',
+ 'bei',
+ 'bin',
+ 'bis',
+ 'bist',
+ 'da',
+ 'damit',
+ 'dann',
+ 'das',
+ 'daß',
+ 'dasselbe',
+ 'dazu',
+ 'dein',
+ 'deine',
+ 'deinem',
+ 'deinen',
+ 'deiner',
+ 'deines',
+ 'dem',
+ 'demselben',
+ 'den',
+ 'denn',
+ 'denselben',
+ 'der',
+ 'derer',
+ 'derselbe',
+ 'derselben',
+ 'des',
+ 'desselben',
+ 'dessen',
+ 'dich',
+ 'die',
+ 'dies',
+ 'diese',
+ 'dieselbe',
+ 'dieselben',
+ 'diesem',
+ 'diesen',
+ 'dieser',
+ 'dieses',
+ 'dir',
+ 'doch',
+ 'dort',
+ 'du',
+ 'durch',
+ 'ein',
+ 'eine',
+ 'einem',
+ 'einen',
+ 'einer',
+ 'eines',
+ 'einig',
+ 'einige',
+ 'einigem',
+ 'einigen',
+ 'einiger',
+ 'einiges',
+ 'einmal',
+ 'er',
+ 'es',
+ 'etwas',
+ 'euch',
+ 'euer',
+ 'eure',
+ 'eurem',
+ 'euren',
+ 'eurer',
+ 'eures',
+ 'für',
+ 'gegen',
+ 'gewesen',
+ 'hab',
+ 'habe',
+ 'haben',
+ 'hat',
+ 'hatte',
+ 'hatten',
+ 'hier',
+ 'hin',
+ 'hinter',
+ 'ich',
+ 'ihm',
+ 'ihn',
+ 'ihnen',
+ 'ihr',
+ 'ihre',
+ 'ihrem',
+ 'ihren',
+ 'ihrer',
+ 'ihres',
+ 'im',
+ 'in',
+ 'indem',
+ 'ins',
+ 'ist',
+ 'jede',
+ 'jedem',
+ 'jeden',
+ 'jeder',
+ 'jedes',
+ 'jene',
+ 'jenem',
+ 'jenen',
+ 'jener',
+ 'jenes',
+ 'jetzt',
+ 'kann',
+ 'kein',
+ 'keine',
+ 'keinem',
+ 'keinen',
+ 'keiner',
+ 'keines',
+ 'können',
+ 'könnte',
+ 'machen',
+ 'man',
+ 'manche',
+ 'manchem',
+ 'manchen',
+ 'mancher',
+ 'manches',
+ 'mein',
+ 'meine',
+ 'meinem',
+ 'meinen',
+ 'meiner',
+ 'meines',
+ 'mich',
+ 'mir',
+ 'mit',
+ 'muss',
+ 'musste',
+ 'nach',
+ 'nicht',
+ 'nichts',
+ 'noch',
+ 'nun',
+ 'nur',
+ 'ob',
+ 'oder',
+ 'ohne',
+ 'sehr',
+ 'sein',
+ 'seine',
+ 'seinem',
+ 'seinen',
+ 'seiner',
+ 'seines',
+ 'selbst',
+ 'sich',
+ 'sie',
+ 'sind',
+ 'so',
+ 'solche',
+ 'solchem',
+ 'solchen',
+ 'solcher',
+ 'solches',
+ 'soll',
+ 'sollte',
+ 'sondern',
+ 'sonst',
+ 'um',
+ 'und',
+ 'uns',
+ 'unse',
+ 'unsem',
+ 'unsen',
+ 'unser',
+ 'unses',
+ 'unter',
+ 'viel',
+ 'vom',
+ 'von',
+ 'vor',
+ 'war',
+ 'waren',
+ 'warst',
+ 'was',
+ 'weg',
+ 'weil',
+ 'weiter',
+ 'welche',
+ 'welchem',
+ 'welchen',
+ 'welcher',
+ 'welches',
+ 'wenn',
+ 'werde',
+ 'werden',
+ 'wie',
+ 'wieder',
+ 'will',
+ 'wir',
+ 'wird',
+ 'wirst',
+ 'wo',
+ 'wollen',
+ 'wollte',
+ 'während',
+ 'würde',
+ 'würden',
+ 'zu',
+ 'zum',
+ 'zur',
+ 'zwar',
+ 'zwischen',
+ 'über',
+})
diff --git a/sphinx/search/_stopwords/de.txt b/sphinx/search/_stopwords/de.txt
new file mode 100644
index 00000000000..c8935ae61c7
--- /dev/null
+++ b/sphinx/search/_stopwords/de.txt
@@ -0,0 +1,285 @@
+
+ | A German stop word list. Comments begin with vertical bar. Each stop
+ | word is at the start of a line.
+
+ | The number of forms in this list is reduced significantly by passing it
+ | through the German stemmer.
+
+
+aber | but
+
+alle | all
+allem
+allen
+aller
+alles
+
+als | than, as
+also | so
+am | an + dem
+an | at
+
+ander | other
+andere
+anderem
+anderen
+anderer
+anderes
+anderm
+andern
+anderr
+anders
+
+auch | also
+auf | on
+aus | out of
+bei | by
+bin | am
+bis | until
+bist | art
+da | there
+damit | with it
+dann | then
+
+der | the
+den
+des
+dem
+die
+das
+
+daß | that
+
+derselbe | the same
+derselben
+denselben
+desselben
+demselben
+dieselbe
+dieselben
+dasselbe
+
+dazu | to that
+
+dein | thy
+deine
+deinem
+deinen
+deiner
+deines
+
+denn | because
+
+derer | of those
+dessen | of him
+
+dich | thee
+dir | to thee
+du | thou
+
+dies | this
+diese
+diesem
+diesen
+dieser
+dieses
+
+
+doch | (several meanings)
+dort | (over) there
+
+
+durch | through
+
+ein | a
+eine
+einem
+einen
+einer
+eines
+
+einig | some
+einige
+einigem
+einigen
+einiger
+einiges
+
+einmal | once
+
+er | he
+ihn | him
+ihm | to him
+
+es | it
+etwas | something
+
+euer | your
+eure
+eurem
+euren
+eurer
+eures
+
+für | for
+gegen | towards
+gewesen | p.p. of sein
+hab | have
+habe | have
+haben | have
+hat | has
+hatte | had
+hatten | had
+hier | here
+hin | there
+hinter | behind
+
+ich | I
+mich | me
+mir | to me
+
+
+ihr | you, to her
+ihre
+ihrem
+ihren
+ihrer
+ihres
+euch | to you
+
+im | in + dem
+in | in
+indem | while
+ins | in + das
+ist | is
+
+jede | each, every
+jedem
+jeden
+jeder
+jedes
+
+jene | that
+jenem
+jenen
+jener
+jenes
+
+jetzt | now
+kann | can
+
+kein | no
+keine
+keinem
+keinen
+keiner
+keines
+
+können | can
+könnte | could
+machen | do
+man | one
+
+manche | some, many a
+manchem
+manchen
+mancher
+manches
+
+mein | my
+meine
+meinem
+meinen
+meiner
+meines
+
+mit | with
+muss | must
+musste | had to
+nach | to(wards)
+nicht | not
+nichts | nothing
+noch | still, yet
+nun | now
+nur | only
+ob | whether
+oder | or
+ohne | without
+sehr | very
+
+sein | his
+seine
+seinem
+seinen
+seiner
+seines
+
+selbst | self
+sich | herself
+
+sie | they, she
+ihnen | to them
+
+sind | are
+so | so
+
+solche | such
+solchem
+solchen
+solcher
+solches
+
+soll | shall
+sollte | should
+sondern | but
+sonst | else
+über | over
+um | about, around
+und | and
+
+uns | us
+unse
+unsem
+unsen
+unser
+unses
+
+unter | under
+viel | much
+vom | von + dem
+von | from
+vor | before
+während | while
+war | was
+waren | were
+warst | wast
+was | what
+weg | away, off
+weil | because
+weiter | further
+
+welche | which
+welchem
+welchen
+welcher
+welches
+
+wenn | when
+werde | will
+werden | will
+wie | how
+wieder | again
+will | want
+wir | we
+wird | will
+wirst | willst
+wo | where
+wollen | want
+wollte | wanted
+würde | would
+würden | would
+zu | to
+zum | zu + dem
+zur | zu + der
+zwar | indeed
+zwischen | between
diff --git a/sphinx/search/_stopwords/en.py b/sphinx/search/_stopwords/en.py
new file mode 100644
index 00000000000..06fa94cb2c5
--- /dev/null
+++ b/sphinx/search/_stopwords/en.py
@@ -0,0 +1,181 @@
+# automatically generated by utils/generate-snowball.py
+# from https://github.com/snowballstem/snowball-website/raw/efb4ae4d65769fb1652acbe608c0c817e746c730/algorithms/english/stop.txt
+
+from __future__ import annotations
+
+ENGLISH_STOPWORDS = frozenset({
+ 'a',
+ 'about',
+ 'above',
+ 'after',
+ 'again',
+ 'against',
+ 'all',
+ 'am',
+ 'an',
+ 'and',
+ 'any',
+ 'are',
+ "aren't",
+ 'as',
+ 'at',
+ 'be',
+ 'because',
+ 'been',
+ 'before',
+ 'being',
+ 'below',
+ 'between',
+ 'both',
+ 'but',
+ 'by',
+ "can't",
+ 'cannot',
+ 'could',
+ "couldn't",
+ 'did',
+ "didn't",
+ 'do',
+ 'does',
+ "doesn't",
+ 'doing',
+ "don't",
+ 'down',
+ 'during',
+ 'each',
+ 'few',
+ 'for',
+ 'from',
+ 'further',
+ 'had',
+ "hadn't",
+ 'has',
+ "hasn't",
+ 'have',
+ "haven't",
+ 'having',
+ 'he',
+ "he'd",
+ "he'll",
+ "he's",
+ 'her',
+ 'here',
+ "here's",
+ 'hers',
+ 'herself',
+ 'him',
+ 'himself',
+ 'his',
+ 'how',
+ "how's",
+ 'i',
+ "i'd",
+ "i'll",
+ "i'm",
+ "i've",
+ 'if',
+ 'in',
+ 'into',
+ 'is',
+ "isn't",
+ 'it',
+ "it's",
+ 'its',
+ 'itself',
+ "let's",
+ 'me',
+ 'more',
+ 'most',
+ "mustn't",
+ 'my',
+ 'myself',
+ 'no',
+ 'nor',
+ 'not',
+ 'of',
+ 'off',
+ 'on',
+ 'once',
+ 'only',
+ 'or',
+ 'other',
+ 'ought',
+ 'our',
+ 'ours',
+ 'ourselves',
+ 'out',
+ 'over',
+ 'own',
+ 'same',
+ "shan't",
+ 'she',
+ "she'd",
+ "she'll",
+ "she's",
+ 'should',
+ "shouldn't",
+ 'so',
+ 'some',
+ 'such',
+ 'than',
+ 'that',
+ "that's",
+ 'the',
+ 'their',
+ 'theirs',
+ 'them',
+ 'themselves',
+ 'then',
+ 'there',
+ "there's",
+ 'these',
+ 'they',
+ "they'd",
+ "they'll",
+ "they're",
+ "they've",
+ 'this',
+ 'those',
+ 'through',
+ 'to',
+ 'too',
+ 'under',
+ 'until',
+ 'up',
+ 'very',
+ 'was',
+ "wasn't",
+ 'we',
+ "we'd",
+ "we'll",
+ "we're",
+ "we've",
+ 'were',
+ "weren't",
+ 'what',
+ "what's",
+ 'when',
+ "when's",
+ 'where',
+ "where's",
+ 'which',
+ 'while',
+ 'who',
+ "who's",
+ 'whom',
+ 'why',
+ "why's",
+ 'with',
+ "won't",
+ 'would',
+ "wouldn't",
+ 'you',
+ "you'd",
+ "you'll",
+ "you're",
+ "you've",
+ 'your',
+ 'yours',
+ 'yourself',
+ 'yourselves',
+})
diff --git a/sphinx/search/_stopwords/en.txt b/sphinx/search/_stopwords/en.txt
new file mode 100644
index 00000000000..5442976d354
--- /dev/null
+++ b/sphinx/search/_stopwords/en.txt
@@ -0,0 +1,311 @@
+
+ | An English stop word list. Comments begin with vertical bar. Each stop
+ | word is at the start of a line.
+
+ | Many of the forms below are quite rare (e.g. "yourselves") but included for
+ | completeness.
+
+ | PRONOUNS FORMS
+ | 1st person sing
+
+i | subject, always in upper case of course
+
+me | object
+my | possessive adjective
+ | the possessive pronoun `mine' is best suppressed, because of the
+ | sense of coal-mine etc.
+myself | reflexive
+ | 1st person plural
+we | subject
+
+| us | object
+ | care is required here because US = United States. It is usually
+ | safe to remove it if it is in lower case.
+our | possessive adjective
+ours | possessive pronoun
+ourselves | reflexive
+ | second person (archaic `thou' forms not included)
+you | subject and object
+your | possessive adjective
+yours | possessive pronoun
+yourself | reflexive (singular)
+yourselves | reflexive (plural)
+ | third person singular
+he | subject
+him | object
+his | possessive adjective and pronoun
+himself | reflexive
+
+she | subject
+her | object and possessive adjective
+hers | possessive pronoun
+herself | reflexive
+
+it | subject and object
+its | possessive adjective
+itself | reflexive
+ | third person plural
+they | subject
+them | object
+their | possessive adjective
+theirs | possessive pronoun
+themselves | reflexive
+ | other forms (demonstratives, interrogatives)
+what
+which
+who
+whom
+this
+that
+these
+those
+
+ | VERB FORMS (using F.R. Palmer's nomenclature)
+ | BE
+am | 1st person, present
+is | -s form (3rd person, present)
+are | present
+was | 1st person, past
+were | past
+be | infinitive
+been | past participle
+being | -ing form
+ | HAVE
+have | simple
+has | -s form
+had | past
+having | -ing form
+ | DO
+do | simple
+does | -s form
+did | past
+doing | -ing form
+
+ | The forms below are, I believe, best omitted, because of the significant
+ | homonym forms:
+
+ | He made a WILL
+ | old tin CAN
+ | merry month of MAY
+ | a smell of MUST
+ | fight the good fight with all thy MIGHT
+
+ | would, could, should, ought might however be included
+
+ | | AUXILIARIES
+ | | WILL
+ |will
+
+would
+
+ | | SHALL
+ |shall
+
+should
+
+ | | CAN
+ |can
+
+could
+
+ | | MAY
+ |may
+ |might
+ | | MUST
+ |must
+ | | OUGHT
+
+ought
+
+ | COMPOUND FORMS, increasingly encountered nowadays in 'formal' writing
+ | pronoun + verb
+
+i'm
+you're
+he's
+she's
+it's
+we're
+they're
+i've
+you've
+we've
+they've
+i'd
+you'd
+he'd
+she'd
+we'd
+they'd
+i'll
+you'll
+he'll
+she'll
+we'll
+they'll
+
+ | verb + negation
+
+isn't
+aren't
+wasn't
+weren't
+hasn't
+haven't
+hadn't
+doesn't
+don't
+didn't
+
+ | auxiliary + negation
+
+won't
+wouldn't
+shan't
+shouldn't
+can't
+cannot
+couldn't
+mustn't
+
+ | miscellaneous forms
+
+let's
+that's
+who's
+what's
+here's
+there's
+when's
+where's
+why's
+how's
+
+ | rarer forms
+
+ | daren't needn't
+
+ | doubtful forms
+
+ | oughtn't mightn't
+
+ | ARTICLES
+a
+an
+the
+
+ | THE REST (Overlap among prepositions, conjunctions, adverbs etc is so
+ | high, that classification is pointless.)
+and
+but
+if
+or
+because
+as
+until
+while
+
+of
+at
+by
+for
+with
+about
+against
+between
+into
+through
+during
+before
+after
+above
+below
+to
+from
+up
+down
+in
+out
+on
+off
+over
+under
+
+again
+further
+then
+once
+
+here
+there
+when
+where
+why
+how
+
+all
+any
+both
+each
+few
+more
+most
+other
+some
+such
+
+no
+nor
+not
+only
+own
+same
+so
+than
+too
+very
+
+ | Just for the record, the following words are among the commonest in English
+
+ | one
+ | every
+ | least
+ | less
+ | many
+ | now
+ | ever
+ | never
+ | say
+ | says
+ | said
+ | also
+ | get
+ | go
+ | goes
+ | just
+ | made
+ | make
+ | put
+ | see
+ | seen
+ | whether
+ | like
+ | well
+ | back
+ | even
+ | still
+ | way
+ | take
+ | since
+ | another
+ | however
+ | two
+ | three
+ | four
+ | five
+ | first
+ | second
+ | new
+ | old
+ | high
+ | long
diff --git a/sphinx/search/_stopwords/es.py b/sphinx/search/_stopwords/es.py
new file mode 100644
index 00000000000..4ec63cf8998
--- /dev/null
+++ b/sphinx/search/_stopwords/es.py
@@ -0,0 +1,315 @@
+# automatically generated by utils/generate-snowball.py
+# from https://github.com/snowballstem/snowball-website/raw/efb4ae4d65769fb1652acbe608c0c817e746c730/algorithms/spanish/stop.txt
+
+from __future__ import annotations
+
+SPANISH_STOPWORDS = frozenset({
+ 'a',
+ 'al',
+ 'algo',
+ 'algunas',
+ 'algunos',
+ 'ante',
+ 'antes',
+ 'como',
+ 'con',
+ 'contra',
+ 'cual',
+ 'cuando',
+ 'de',
+ 'del',
+ 'desde',
+ 'donde',
+ 'durante',
+ 'e',
+ 'el',
+ 'ella',
+ 'ellas',
+ 'ellos',
+ 'en',
+ 'entre',
+ 'era',
+ 'erais',
+ 'eran',
+ 'eras',
+ 'eres',
+ 'es',
+ 'esa',
+ 'esas',
+ 'ese',
+ 'eso',
+ 'esos',
+ 'esta',
+ 'estaba',
+ 'estabais',
+ 'estaban',
+ 'estabas',
+ 'estad',
+ 'estada',
+ 'estadas',
+ 'estado',
+ 'estados',
+ 'estamos',
+ 'estando',
+ 'estar',
+ 'estaremos',
+ 'estará',
+ 'estarán',
+ 'estarás',
+ 'estaré',
+ 'estaréis',
+ 'estaría',
+ 'estaríais',
+ 'estaríamos',
+ 'estarían',
+ 'estarías',
+ 'estas',
+ 'este',
+ 'estemos',
+ 'esto',
+ 'estos',
+ 'estoy',
+ 'estuve',
+ 'estuviera',
+ 'estuvierais',
+ 'estuvieran',
+ 'estuvieras',
+ 'estuvieron',
+ 'estuviese',
+ 'estuvieseis',
+ 'estuviesen',
+ 'estuvieses',
+ 'estuvimos',
+ 'estuviste',
+ 'estuvisteis',
+ 'estuviéramos',
+ 'estuviésemos',
+ 'estuvo',
+ 'está',
+ 'estábamos',
+ 'estáis',
+ 'están',
+ 'estás',
+ 'esté',
+ 'estéis',
+ 'estén',
+ 'estés',
+ 'fue',
+ 'fuera',
+ 'fuerais',
+ 'fueran',
+ 'fueras',
+ 'fueron',
+ 'fuese',
+ 'fueseis',
+ 'fuesen',
+ 'fueses',
+ 'fui',
+ 'fuimos',
+ 'fuiste',
+ 'fuisteis',
+ 'fuéramos',
+ 'fuésemos',
+ 'ha',
+ 'habida',
+ 'habidas',
+ 'habido',
+ 'habidos',
+ 'habiendo',
+ 'habremos',
+ 'habrá',
+ 'habrán',
+ 'habrás',
+ 'habré',
+ 'habréis',
+ 'habría',
+ 'habríais',
+ 'habríamos',
+ 'habrían',
+ 'habrías',
+ 'habéis',
+ 'había',
+ 'habíais',
+ 'habíamos',
+ 'habían',
+ 'habías',
+ 'han',
+ 'has',
+ 'hasta',
+ 'hay',
+ 'haya',
+ 'hayamos',
+ 'hayan',
+ 'hayas',
+ 'hayáis',
+ 'he',
+ 'hemos',
+ 'hube',
+ 'hubiera',
+ 'hubierais',
+ 'hubieran',
+ 'hubieras',
+ 'hubieron',
+ 'hubiese',
+ 'hubieseis',
+ 'hubiesen',
+ 'hubieses',
+ 'hubimos',
+ 'hubiste',
+ 'hubisteis',
+ 'hubiéramos',
+ 'hubiésemos',
+ 'hubo',
+ 'la',
+ 'las',
+ 'le',
+ 'les',
+ 'lo',
+ 'los',
+ 'me',
+ 'mi',
+ 'mis',
+ 'mucho',
+ 'muchos',
+ 'muy',
+ 'más',
+ 'mí',
+ 'mía',
+ 'mías',
+ 'mío',
+ 'míos',
+ 'nada',
+ 'ni',
+ 'no',
+ 'nos',
+ 'nosotras',
+ 'nosotros',
+ 'nuestra',
+ 'nuestras',
+ 'nuestro',
+ 'nuestros',
+ 'o',
+ 'os',
+ 'otra',
+ 'otras',
+ 'otro',
+ 'otros',
+ 'para',
+ 'pero',
+ 'poco',
+ 'por',
+ 'porque',
+ 'que',
+ 'quien',
+ 'quienes',
+ 'qué',
+ 'se',
+ 'sea',
+ 'seamos',
+ 'sean',
+ 'seas',
+ 'seremos',
+ 'será',
+ 'serán',
+ 'serás',
+ 'seré',
+ 'seréis',
+ 'sería',
+ 'seríais',
+ 'seríamos',
+ 'serían',
+ 'serías',
+ 'seáis',
+ 'sido',
+ 'siendo',
+ 'sin',
+ 'sobre',
+ 'sois',
+ 'somos',
+ 'son',
+ 'soy',
+ 'su',
+ 'sus',
+ 'suya',
+ 'suyas',
+ 'suyo',
+ 'suyos',
+ 'sí',
+ 'también',
+ 'tanto',
+ 'te',
+ 'tendremos',
+ 'tendrá',
+ 'tendrán',
+ 'tendrás',
+ 'tendré',
+ 'tendréis',
+ 'tendría',
+ 'tendríais',
+ 'tendríamos',
+ 'tendrían',
+ 'tendrías',
+ 'tened',
+ 'tenemos',
+ 'tenga',
+ 'tengamos',
+ 'tengan',
+ 'tengas',
+ 'tengo',
+ 'tengáis',
+ 'tenida',
+ 'tenidas',
+ 'tenido',
+ 'tenidos',
+ 'teniendo',
+ 'tenéis',
+ 'tenía',
+ 'teníais',
+ 'teníamos',
+ 'tenían',
+ 'tenías',
+ 'ti',
+ 'tiene',
+ 'tienen',
+ 'tienes',
+ 'todo',
+ 'todos',
+ 'tu',
+ 'tus',
+ 'tuve',
+ 'tuviera',
+ 'tuvierais',
+ 'tuvieran',
+ 'tuvieras',
+ 'tuvieron',
+ 'tuviese',
+ 'tuvieseis',
+ 'tuviesen',
+ 'tuvieses',
+ 'tuvimos',
+ 'tuviste',
+ 'tuvisteis',
+ 'tuviéramos',
+ 'tuviésemos',
+ 'tuvo',
+ 'tuya',
+ 'tuyas',
+ 'tuyo',
+ 'tuyos',
+ 'tú',
+ 'un',
+ 'una',
+ 'uno',
+ 'unos',
+ 'vosotras',
+ 'vosotros',
+ 'vuestra',
+ 'vuestras',
+ 'vuestro',
+ 'vuestros',
+ 'y',
+ 'ya',
+ 'yo',
+ 'él',
+ 'éramos',
+})
diff --git a/sphinx/search/_stopwords/es.txt b/sphinx/search/_stopwords/es.txt
new file mode 100644
index 00000000000..416c84d225a
--- /dev/null
+++ b/sphinx/search/_stopwords/es.txt
@@ -0,0 +1,347 @@
+
+ | A Spanish stop word list. Comments begin with vertical bar. Each stop
+ | word is at the start of a line.
+
+
+ | The following is a ranked list (commonest to rarest) of stopwords
+ | deriving from a large sample of text.
+
+ | Extra words have been added at the end.
+
+de | from, of
+la | the, her
+que | who, that
+el | the
+en | in
+y | and
+a | to
+los | the, them
+del | de + el
+se | himself, from him etc
+las | the, them
+por | for, by, etc
+un | a
+para | for
+con | with
+no | no
+una | a
+su | his, her
+al | a + el
+ | es from SER
+lo | him
+como | how
+más | more
+pero | pero
+sus | su plural
+le | to him, her
+ya | already
+o | or
+ | fue from SER
+este | this
+ | ha from HABER
+sí | himself etc
+porque | because
+esta | this
+ | son from SER
+entre | between
+ | está from ESTAR
+cuando | when
+muy | very
+sin | without
+sobre | on
+ | ser from SER
+ | tiene from TENER
+también | also
+me | me
+hasta | until
+hay | there is/are
+donde | where
+ | han from HABER
+quien | whom, that
+ | están from ESTAR
+ | estado from ESTAR
+desde | from
+todo | all
+nos | us
+durante | during
+ | estados from ESTAR
+todos | all
+uno | a
+les | to them
+ni | nor
+contra | against
+otros | other
+ | fueron from SER
+ese | that
+eso | that
+ | había from HABER
+ante | before
+ellos | they
+e | and (variant of y)
+esto | this
+mí | me
+antes | before
+algunos | some
+qué | what?
+unos | a
+yo | I
+otro | other
+otras | other
+otra | other
+él | he
+tanto | so much, many
+esa | that
+estos | these
+mucho | much, many
+quienes | who
+nada | nothing
+muchos | many
+cual | who
+ | sea from SER
+poco | few
+ella | she
+estar | to be
+ | haber from HABER
+estas | these
+ | estaba from ESTAR
+ | estamos from ESTAR
+algunas | some
+algo | something
+nosotros | we
+
+ | other forms
+
+mi | me
+mis | mi plural
+tú | thou
+te | thee
+ti | thee
+tu | thy
+tus | tu plural
+ellas | they
+nosotras | we
+vosotros | you
+vosotras | you
+os | you
+mío | mine
+mía |
+míos |
+mías |
+tuyo | thine
+tuya |
+tuyos |
+tuyas |
+suyo | his, hers, theirs
+suya |
+suyos |
+suyas |
+nuestro | ours
+nuestra |
+nuestros |
+nuestras |
+vuestro | yours
+vuestra |
+vuestros |
+vuestras |
+esos | those
+esas | those
+
+ | forms of estar, to be (not including the infinitive):
+estoy
+estás
+está
+estamos
+estáis
+están
+esté
+estés
+estemos
+estéis
+estén
+estaré
+estarás
+estará
+estaremos
+estaréis
+estarán
+estaría
+estarías
+estaríamos
+estaríais
+estarían
+estaba
+estabas
+estábamos
+estabais
+estaban
+estuve
+estuviste
+estuvo
+estuvimos
+estuvisteis
+estuvieron
+estuviera
+estuvieras
+estuviéramos
+estuvierais
+estuvieran
+estuviese
+estuvieses
+estuviésemos
+estuvieseis
+estuviesen
+estando
+estado
+estada
+estados
+estadas
+estad
+
+ | forms of haber, to have (not including the infinitive):
+he
+has
+ha
+hemos
+habéis
+han
+haya
+hayas
+hayamos
+hayáis
+hayan
+habré
+habrás
+habrá
+habremos
+habréis
+habrán
+habría
+habrías
+habríamos
+habríais
+habrían
+había
+habías
+habíamos
+habíais
+habían
+hube
+hubiste
+hubo
+hubimos
+hubisteis
+hubieron
+hubiera
+hubieras
+hubiéramos
+hubierais
+hubieran
+hubiese
+hubieses
+hubiésemos
+hubieseis
+hubiesen
+habiendo
+habido
+habida
+habidos
+habidas
+
+ | forms of ser, to be (not including the infinitive):
+soy
+eres
+es
+somos
+sois
+son
+sea
+seas
+seamos
+seáis
+sean
+seré
+serás
+será
+seremos
+seréis
+serán
+sería
+serías
+seríamos
+seríais
+serían
+era
+eras
+éramos
+erais
+eran
+fui
+fuiste
+fue
+fuimos
+fuisteis
+fueron
+fuera
+fueras
+fuéramos
+fuerais
+fueran
+fuese
+fueses
+fuésemos
+fueseis
+fuesen
+siendo
+sido
+ | sed also means 'thirst'
+
+ | forms of tener, to have (not including the infinitive):
+tengo
+tienes
+tiene
+tenemos
+tenéis
+tienen
+tenga
+tengas
+tengamos
+tengáis
+tengan
+tendré
+tendrás
+tendrá
+tendremos
+tendréis
+tendrán
+tendría
+tendrías
+tendríamos
+tendríais
+tendrían
+tenía
+tenías
+teníamos
+teníais
+tenían
+tuve
+tuviste
+tuvo
+tuvimos
+tuvisteis
+tuvieron
+tuviera
+tuvieras
+tuviéramos
+tuvierais
+tuvieran
+tuviese
+tuvieses
+tuviésemos
+tuvieseis
+tuviesen
+teniendo
+tenido
+tenida
+tenidos
+tenidas
+tened
diff --git a/sphinx/search/_stopwords/fi.py b/sphinx/search/_stopwords/fi.py
new file mode 100644
index 00000000000..af681337d4b
--- /dev/null
+++ b/sphinx/search/_stopwords/fi.py
@@ -0,0 +1,236 @@
+# automatically generated by utils/generate-snowball.py
+# from https://github.com/snowballstem/snowball-website/raw/efb4ae4d65769fb1652acbe608c0c817e746c730/algorithms/finnish/stop.txt
+
+from __future__ import annotations
+
+FINNISH_STOPWORDS = frozenset({
+ 'ei',
+ 'eivät',
+ 'emme',
+ 'en',
+ 'et',
+ 'ette',
+ 'että',
+ 'he',
+ 'heidän',
+ 'heidät',
+ 'heihin',
+ 'heille',
+ 'heillä',
+ 'heiltä',
+ 'heissä',
+ 'heistä',
+ 'heitä',
+ 'hän',
+ 'häneen',
+ 'hänelle',
+ 'hänellä',
+ 'häneltä',
+ 'hänen',
+ 'hänessä',
+ 'hänestä',
+ 'hänet',
+ 'häntä',
+ 'itse',
+ 'ja',
+ 'johon',
+ 'joiden',
+ 'joihin',
+ 'joiksi',
+ 'joilla',
+ 'joille',
+ 'joilta',
+ 'joina',
+ 'joissa',
+ 'joista',
+ 'joita',
+ 'joka',
+ 'joksi',
+ 'jolla',
+ 'jolle',
+ 'jolta',
+ 'jona',
+ 'jonka',
+ 'jos',
+ 'jossa',
+ 'josta',
+ 'jota',
+ 'jotka',
+ 'kanssa',
+ 'keiden',
+ 'keihin',
+ 'keiksi',
+ 'keille',
+ 'keillä',
+ 'keiltä',
+ 'keinä',
+ 'keissä',
+ 'keistä',
+ 'keitä',
+ 'keneen',
+ 'keneksi',
+ 'kenelle',
+ 'kenellä',
+ 'keneltä',
+ 'kenen',
+ 'kenenä',
+ 'kenessä',
+ 'kenestä',
+ 'kenet',
+ 'ketkä',
+ 'ketä',
+ 'koska',
+ 'kuin',
+ 'kuka',
+ 'kun',
+ 'me',
+ 'meidän',
+ 'meidät',
+ 'meihin',
+ 'meille',
+ 'meillä',
+ 'meiltä',
+ 'meissä',
+ 'meistä',
+ 'meitä',
+ 'mihin',
+ 'miksi',
+ 'mikä',
+ 'mille',
+ 'millä',
+ 'miltä',
+ 'minkä',
+ 'minua',
+ 'minulla',
+ 'minulle',
+ 'minulta',
+ 'minun',
+ 'minussa',
+ 'minusta',
+ 'minut',
+ 'minuun',
+ 'minä',
+ 'missä',
+ 'mistä',
+ 'mitkä',
+ 'mitä',
+ 'mukaan',
+ 'mutta',
+ 'ne',
+ 'niiden',
+ 'niihin',
+ 'niiksi',
+ 'niille',
+ 'niillä',
+ 'niiltä',
+ 'niin',
+ 'niinä',
+ 'niissä',
+ 'niistä',
+ 'niitä',
+ 'noiden',
+ 'noihin',
+ 'noiksi',
+ 'noilla',
+ 'noille',
+ 'noilta',
+ 'noin',
+ 'noina',
+ 'noissa',
+ 'noista',
+ 'noita',
+ 'nuo',
+ 'nyt',
+ 'näiden',
+ 'näihin',
+ 'näiksi',
+ 'näille',
+ 'näillä',
+ 'näiltä',
+ 'näinä',
+ 'näissä',
+ 'näistä',
+ 'näitä',
+ 'nämä',
+ 'ole',
+ 'olemme',
+ 'olen',
+ 'olet',
+ 'olette',
+ 'oli',
+ 'olimme',
+ 'olin',
+ 'olisi',
+ 'olisimme',
+ 'olisin',
+ 'olisit',
+ 'olisitte',
+ 'olisivat',
+ 'olit',
+ 'olitte',
+ 'olivat',
+ 'olla',
+ 'olleet',
+ 'ollut',
+ 'on',
+ 'ovat',
+ 'poikki',
+ 'se',
+ 'sekä',
+ 'sen',
+ 'siihen',
+ 'siinä',
+ 'siitä',
+ 'siksi',
+ 'sille',
+ 'sillä',
+ 'siltä',
+ 'sinua',
+ 'sinulla',
+ 'sinulle',
+ 'sinulta',
+ 'sinun',
+ 'sinussa',
+ 'sinusta',
+ 'sinut',
+ 'sinuun',
+ 'sinä',
+ 'sitä',
+ 'tai',
+ 'te',
+ 'teidän',
+ 'teidät',
+ 'teihin',
+ 'teille',
+ 'teillä',
+ 'teiltä',
+ 'teissä',
+ 'teistä',
+ 'teitä',
+ 'tuo',
+ 'tuohon',
+ 'tuoksi',
+ 'tuolla',
+ 'tuolle',
+ 'tuolta',
+ 'tuon',
+ 'tuona',
+ 'tuossa',
+ 'tuosta',
+ 'tuota',
+ 'tähän',
+ 'täksi',
+ 'tälle',
+ 'tällä',
+ 'tältä',
+ 'tämä',
+ 'tämän',
+ 'tänä',
+ 'tässä',
+ 'tästä',
+ 'tätä',
+ 'vaan',
+ 'vai',
+ 'vaikka',
+ 'yli',
+})
diff --git a/sphinx/search/_stopwords/fi.txt b/sphinx/search/_stopwords/fi.txt
new file mode 100644
index 00000000000..b4347ffd74e
--- /dev/null
+++ b/sphinx/search/_stopwords/fi.txt
@@ -0,0 +1,88 @@
+
+| forms of BE
+
+olla
+olen
+olet
+on
+olemme
+olette
+ovat
+ole | negative form
+
+oli
+olisi
+olisit
+olisin
+olisimme
+olisitte
+olisivat
+olit
+olin
+olimme
+olitte
+olivat
+ollut
+olleet
+
+en | negation
+et
+ei
+emme
+ette
+eivät
+
+|Nom Gen Acc Part Iness Elat Illat Adess Ablat Allat Ess Trans
+minä minun minut minua minussa minusta minuun minulla minulta minulle | I
+sinä sinun sinut sinua sinussa sinusta sinuun sinulla sinulta sinulle | you
+hän hänen hänet häntä hänessä hänestä häneen hänellä häneltä hänelle | he she
+me meidän meidät meitä meissä meistä meihin meillä meiltä meille | we
+te teidän teidät teitä teissä teistä teihin teillä teiltä teille | you
+he heidän heidät heitä heissä heistä heihin heillä heiltä heille | they
+
+tämä tämän tätä tässä tästä tähän tällä tältä tälle tänä täksi | this
+tuo tuon tuota tuossa tuosta tuohon tuolla tuolta tuolle tuona tuoksi | that
+se sen sitä siinä siitä siihen sillä siltä sille sinä siksi | it
+nämä näiden näitä näissä näistä näihin näillä näiltä näille näinä näiksi | these
+nuo noiden noita noissa noista noihin noilla noilta noille noina noiksi | those
+ne niiden niitä niissä niistä niihin niillä niiltä niille niinä niiksi | they
+
+kuka kenen kenet ketä kenessä kenestä keneen kenellä keneltä kenelle kenenä keneksi| who
+ketkä keiden ketkä keitä keissä keistä keihin keillä keiltä keille keinä keiksi | (pl)
+mikä minkä minkä mitä missä mistä mihin millä miltä mille minä miksi | which what
+mitkä | (pl)
+
+joka jonka jota jossa josta johon jolla jolta jolle jona joksi | who which
+jotka joiden joita joissa joista joihin joilla joilta joille joina joiksi | (pl)
+
+| conjunctions
+
+että | that
+ja | and
+jos | if
+koska | because
+kuin | than
+mutta | but
+niin | so
+sekä | and
+sillä | for
+tai | or
+vaan | but
+vai | or
+vaikka | although
+
+
+| prepositions
+
+kanssa | with
+mukaan | according to
+noin | about
+poikki | across
+yli | over, across
+
+| other
+
+kun | when
+niin | so
+nyt | now
+itse | self
diff --git a/sphinx/search/_stopwords/fr.py b/sphinx/search/_stopwords/fr.py
new file mode 100644
index 00000000000..1a1edc46916
--- /dev/null
+++ b/sphinx/search/_stopwords/fr.py
@@ -0,0 +1,171 @@
+# automatically generated by utils/generate-snowball.py
+# from https://github.com/snowballstem/snowball-website/raw/efb4ae4d65769fb1652acbe608c0c817e746c730/algorithms/french/stop.txt
+
+from __future__ import annotations
+
+FRENCH_STOPWORDS = frozenset({
+ 'ai',
+ 'aie',
+ 'aient',
+ 'aies',
+ 'ait',
+ 'as',
+ 'au',
+ 'aura',
+ 'aurai',
+ 'auraient',
+ 'aurais',
+ 'aurait',
+ 'auras',
+ 'aurez',
+ 'auriez',
+ 'aurions',
+ 'aurons',
+ 'auront',
+ 'aux',
+ 'avaient',
+ 'avais',
+ 'avait',
+ 'avec',
+ 'avez',
+ 'aviez',
+ 'avions',
+ 'avons',
+ 'ayant',
+ 'ayez',
+ 'ayons',
+ 'c',
+ 'ce',
+ 'ceci',
+ 'cela',
+ 'celà',
+ 'ces',
+ 'cet',
+ 'cette',
+ 'd',
+ 'dans',
+ 'de',
+ 'des',
+ 'du',
+ 'elle',
+ 'en',
+ 'es',
+ 'est',
+ 'et',
+ 'eu',
+ 'eue',
+ 'eues',
+ 'eurent',
+ 'eus',
+ 'eusse',
+ 'eussent',
+ 'eusses',
+ 'eussiez',
+ 'eussions',
+ 'eut',
+ 'eux',
+ 'eûmes',
+ 'eût',
+ 'eûtes',
+ 'furent',
+ 'fus',
+ 'fusse',
+ 'fussent',
+ 'fusses',
+ 'fussiez',
+ 'fussions',
+ 'fut',
+ 'fûmes',
+ 'fût',
+ 'fûtes',
+ 'ici',
+ 'il',
+ 'ils',
+ 'j',
+ 'je',
+ 'l',
+ 'la',
+ 'le',
+ 'les',
+ 'leur',
+ 'leurs',
+ 'lui',
+ 'm',
+ 'ma',
+ 'mais',
+ 'me',
+ 'mes',
+ 'moi',
+ 'mon',
+ 'même',
+ 'n',
+ 'ne',
+ 'nos',
+ 'notre',
+ 'nous',
+ 'on',
+ 'ont',
+ 'ou',
+ 'par',
+ 'pas',
+ 'pour',
+ 'qu',
+ 'que',
+ 'quel',
+ 'quelle',
+ 'quelles',
+ 'quels',
+ 'qui',
+ 's',
+ 'sa',
+ 'sans',
+ 'se',
+ 'sera',
+ 'serai',
+ 'seraient',
+ 'serais',
+ 'serait',
+ 'seras',
+ 'serez',
+ 'seriez',
+ 'serions',
+ 'serons',
+ 'seront',
+ 'ses',
+ 'soi',
+ 'soient',
+ 'sois',
+ 'soit',
+ 'sommes',
+ 'son',
+ 'sont',
+ 'soyez',
+ 'soyons',
+ 'suis',
+ 'sur',
+ 't',
+ 'ta',
+ 'te',
+ 'tes',
+ 'toi',
+ 'ton',
+ 'tu',
+ 'un',
+ 'une',
+ 'vos',
+ 'votre',
+ 'vous',
+ 'y',
+ 'à',
+ 'étaient',
+ 'étais',
+ 'était',
+ 'étant',
+ 'étiez',
+ 'étions',
+ 'été',
+ 'étée',
+ 'étées',
+ 'étés',
+ 'êtes',
+})
diff --git a/sphinx/search/_stopwords/fr.txt b/sphinx/search/_stopwords/fr.txt
new file mode 100644
index 00000000000..8591079bf6e
--- /dev/null
+++ b/sphinx/search/_stopwords/fr.txt
@@ -0,0 +1,177 @@
+
+ | A French stop word list. Comments begin with vertical bar. Each stop
+ | word is at the start of a line.
+
+au | a + le
+aux | a + les
+avec | with
+ce | this
+ces | these
+dans | with
+de | of
+des | de + les
+du | de + le
+elle | she
+en | `of them' etc
+et | and
+eux | them
+il | he
+je | I
+la | the
+le | the
+leur | their
+lui | him
+ma | my (fem)
+mais | but
+me | me
+même | same; as in moi-même (myself) etc
+mes | me (pl)
+moi | me
+mon | my (masc)
+ne | not
+nos | our (pl)
+notre | our
+nous | we
+on | one
+ou | where
+par | by
+pas | not
+pour | for
+qu | que before vowel
+que | that
+qui | who
+sa | his, her (fem)
+se | oneself
+ses | his (pl)
+son | his, her (masc)
+sur | on
+ta | thy (fem)
+te | thee
+tes | thy (pl)
+toi | thee
+ton | thy (masc)
+tu | thou
+un | a
+une | a
+vos | your (pl)
+votre | your
+vous | you
+
+ | single letter forms
+
+c | c'
+d | d'
+j | j'
+l | l'
+à | to, at
+m | m'
+n | n'
+s | s'
+t | t'
+y | there
+
+ | forms of être (not including the infinitive):
+été
+étée
+étées
+étés
+étant
+suis
+es
+est
+sommes
+êtes
+sont
+serai
+seras
+sera
+serons
+serez
+seront
+serais
+serait
+serions
+seriez
+seraient
+étais
+était
+étions
+étiez
+étaient
+fus
+fut
+fûmes
+fûtes
+furent
+sois
+soit
+soyons
+soyez
+soient
+fusse
+fusses
+fût
+fussions
+fussiez
+fussent
+
+ | forms of avoir (not including the infinitive):
+ayant
+eu
+eue
+eues
+eus
+ai
+as
+avons
+avez
+ont
+aurai
+auras
+aura
+aurons
+aurez
+auront
+aurais
+aurait
+aurions
+auriez
+auraient
+avais
+avait
+avions
+aviez
+avaient
+eut
+eûmes
+eûtes
+eurent
+aie
+aies
+ait
+ayons
+ayez
+aient
+eusse
+eusses
+eût
+eussions
+eussiez
+eussent
+
+ | Later additions (from Jean-Christophe Deschamps)
+ceci | this
+cela | that (added 11 Apr 2012. Omission reported by Adrien Grand)
+celà | that (incorrect, though common)
+cet | this
+cette | this
+ici | here
+ils | they
+les | the (pl)
+leurs | their (pl)
+quel | which
+quels | which
+quelle | which
+quelles | which
+sans | without
+soi | oneself
diff --git a/sphinx/search/_stopwords/hu.py b/sphinx/search/_stopwords/hu.py
new file mode 100644
index 00000000000..122fd8b7b47
--- /dev/null
+++ b/sphinx/search/_stopwords/hu.py
@@ -0,0 +1,205 @@
+# automatically generated by utils/generate-snowball.py
+# from https://github.com/snowballstem/snowball-website/raw/efb4ae4d65769fb1652acbe608c0c817e746c730/algorithms/hungarian/stop.txt
+
+from __future__ import annotations
+
+HUNGARIAN_STOPWORDS = frozenset({
+ 'a',
+ 'abban',
+ 'ahhoz',
+ 'ahogy',
+ 'ahol',
+ 'aki',
+ 'akik',
+ 'akkor',
+ 'alatt',
+ 'amely',
+ 'amelyek',
+ 'amelyekben',
+ 'amelyeket',
+ 'amelyet',
+ 'amelynek',
+ 'ami',
+ 'amikor',
+ 'amit',
+ 'amolyan',
+ 'amíg',
+ 'annak',
+ 'arra',
+ 'arról',
+ 'az',
+ 'azok',
+ 'azon',
+ 'azonban',
+ 'azt',
+ 'aztán',
+ 'azután',
+ 'azzal',
+ 'azért',
+ 'be',
+ 'belül',
+ 'benne',
+ 'bár',
+ 'cikk',
+ 'cikkek',
+ 'cikkeket',
+ 'csak',
+ 'de',
+ 'e',
+ 'ebben',
+ 'eddig',
+ 'egy',
+ 'egyes',
+ 'egyetlen',
+ 'egyik',
+ 'egyre',
+ 'egyéb',
+ 'egész',
+ 'ehhez',
+ 'ekkor',
+ 'el',
+ 'ellen',
+ 'elsõ',
+ 'elég',
+ 'elõ',
+ 'elõször',
+ 'elõtt',
+ 'emilyen',
+ 'ennek',
+ 'erre',
+ 'ez',
+ 'ezek',
+ 'ezen',
+ 'ezt',
+ 'ezzel',
+ 'ezért',
+ 'fel',
+ 'felé',
+ 'hanem',
+ 'hiszen',
+ 'hogy',
+ 'hogyan',
+ 'igen',
+ 'ill',
+ 'ill.',
+ 'illetve',
+ 'ilyen',
+ 'ilyenkor',
+ 'ismét',
+ 'ison',
+ 'itt',
+ 'jobban',
+ 'jó',
+ 'jól',
+ 'kell',
+ 'kellett',
+ 'keressünk',
+ 'keresztül',
+ 'ki',
+ 'kívül',
+ 'között',
+ 'közül',
+ 'legalább',
+ 'legyen',
+ 'lehet',
+ 'lehetett',
+ 'lenne',
+ 'lenni',
+ 'lesz',
+ 'lett',
+ 'maga',
+ 'magát',
+ 'majd',
+ 'meg',
+ 'mellett',
+ 'mely',
+ 'melyek',
+ 'mert',
+ 'mi',
+ 'mikor',
+ 'milyen',
+ 'minden',
+ 'mindenki',
+ 'mindent',
+ 'mindig',
+ 'mint',
+ 'mintha',
+ 'mit',
+ 'mivel',
+ 'miért',
+ 'most',
+ 'már',
+ 'más',
+ 'másik',
+ 'még',
+ 'míg',
+ 'nagy',
+ 'nagyobb',
+ 'nagyon',
+ 'ne',
+ 'nekem',
+ 'neki',
+ 'nem',
+ 'nincs',
+ 'néha',
+ 'néhány',
+ 'nélkül',
+ 'olyan',
+ 'ott',
+ 'pedig',
+ 'persze',
+ 'rá',
+ 's',
+ 'saját',
+ 'sem',
+ 'semmi',
+ 'sok',
+ 'sokat',
+ 'sokkal',
+ 'szemben',
+ 'szerint',
+ 'szinte',
+ 'számára',
+ 'talán',
+ 'tehát',
+ 'teljes',
+ 'tovább',
+ 'továbbá',
+ 'több',
+ 'ugyanis',
+ 'utolsó',
+ 'után',
+ 'utána',
+ 'vagy',
+ 'vagyis',
+ 'vagyok',
+ 'valaki',
+ 'valami',
+ 'valamint',
+ 'való',
+ 'van',
+ 'vannak',
+ 'vele',
+ 'vissza',
+ 'viszont',
+ 'volna',
+ 'volt',
+ 'voltak',
+ 'voltam',
+ 'voltunk',
+ 'által',
+ 'általában',
+ 'át',
+ 'én',
+ 'éppen',
+ 'és',
+ 'így',
+ 'õ',
+ 'õk',
+ 'õket',
+ 'össze',
+ 'úgy',
+ 'új',
+ 'újabb',
+ 'újra',
+})
diff --git a/sphinx/search/_stopwords/hu.txt b/sphinx/search/_stopwords/hu.txt
new file mode 100644
index 00000000000..cdae504fd96
--- /dev/null
+++ b/sphinx/search/_stopwords/hu.txt
@@ -0,0 +1,203 @@
+
+| Hungarian stop word list
+| prepared by Anna Tordai
+
+a
+ahogy
+ahol
+aki
+akik
+akkor
+alatt
+által
+általában
+amely
+amelyek
+amelyekben
+amelyeket
+amelyet
+amelynek
+ami
+amit
+amolyan
+amíg
+amikor
+át
+abban
+ahhoz
+annak
+arra
+arról
+az
+azok
+azon
+azt
+azzal
+azért
+aztán
+azután
+azonban
+bár
+be
+belül
+benne
+cikk
+cikkek
+cikkeket
+csak
+de
+e
+eddig
+egész
+egy
+egyes
+egyetlen
+egyéb
+egyik
+egyre
+ekkor
+el
+elég
+ellen
+elõ
+elõször
+elõtt
+elsõ
+én
+éppen
+ebben
+ehhez
+emilyen
+ennek
+erre
+ez
+ezt
+ezek
+ezen
+ezzel
+ezért
+és
+fel
+felé
+hanem
+hiszen
+hogy
+hogyan
+igen
+így
+illetve
+ill.
+ill
+ilyen
+ilyenkor
+ison
+ismét
+itt
+jó
+jól
+jobban
+kell
+kellett
+keresztül
+keressünk
+ki
+kívül
+között
+közül
+legalább
+lehet
+lehetett
+legyen
+lenne
+lenni
+lesz
+lett
+maga
+magát
+majd
+majd
+már
+más
+másik
+meg
+még
+mellett
+mert
+mely
+melyek
+mi
+mit
+míg
+miért
+milyen
+mikor
+minden
+mindent
+mindenki
+mindig
+mint
+mintha
+mivel
+most
+nagy
+nagyobb
+nagyon
+ne
+néha
+nekem
+neki
+nem
+néhány
+nélkül
+nincs
+olyan
+ott
+össze
+õ
+õk
+õket
+pedig
+persze
+rá
+s
+saját
+sem
+semmi
+sok
+sokat
+sokkal
+számára
+szemben
+szerint
+szinte
+talán
+tehát
+teljes
+tovább
+továbbá
+több
+úgy
+ugyanis
+új
+újabb
+újra
+után
+utána
+utolsó
+vagy
+vagyis
+valaki
+valami
+valamint
+való
+vagyok
+van
+vannak
+volt
+voltam
+voltak
+voltunk
+vissza
+vele
+viszont
+volna
diff --git a/sphinx/search/_stopwords/it.py b/sphinx/search/_stopwords/it.py
new file mode 100644
index 00000000000..ac411f580b2
--- /dev/null
+++ b/sphinx/search/_stopwords/it.py
@@ -0,0 +1,286 @@
+# automatically generated by utils/generate-snowball.py
+# from https://github.com/snowballstem/snowball-website/raw/efb4ae4d65769fb1652acbe608c0c817e746c730/algorithms/italian/stop.txt
+
+from __future__ import annotations
+
+ITALIAN_STOPWORDS = frozenset({
+ 'a',
+ 'abbia',
+ 'abbiamo',
+ 'abbiano',
+ 'abbiate',
+ 'ad',
+ 'agl',
+ 'agli',
+ 'ai',
+ 'al',
+ 'all',
+ 'alla',
+ 'alle',
+ 'allo',
+ 'anche',
+ 'avemmo',
+ 'avendo',
+ 'avesse',
+ 'avessero',
+ 'avessi',
+ 'avessimo',
+ 'aveste',
+ 'avesti',
+ 'avete',
+ 'aveva',
+ 'avevamo',
+ 'avevano',
+ 'avevate',
+ 'avevi',
+ 'avevo',
+ 'avrai',
+ 'avranno',
+ 'avrebbe',
+ 'avrebbero',
+ 'avrei',
+ 'avremmo',
+ 'avremo',
+ 'avreste',
+ 'avresti',
+ 'avrete',
+ 'avrà',
+ 'avrò',
+ 'avuta',
+ 'avute',
+ 'avuti',
+ 'avuto',
+ 'c',
+ 'che',
+ 'chi',
+ 'ci',
+ 'coi',
+ 'col',
+ 'come',
+ 'con',
+ 'contro',
+ 'cui',
+ 'da',
+ 'dagl',
+ 'dagli',
+ 'dai',
+ 'dal',
+ 'dall',
+ 'dalla',
+ 'dalle',
+ 'dallo',
+ 'degl',
+ 'degli',
+ 'dei',
+ 'del',
+ 'dell',
+ 'della',
+ 'delle',
+ 'dello',
+ 'di',
+ 'dov',
+ 'dove',
+ 'e',
+ 'ebbe',
+ 'ebbero',
+ 'ebbi',
+ 'ed',
+ 'era',
+ 'erano',
+ 'eravamo',
+ 'eravate',
+ 'eri',
+ 'ero',
+ 'essendo',
+ 'faccia',
+ 'facciamo',
+ 'facciano',
+ 'facciate',
+ 'faccio',
+ 'facemmo',
+ 'facendo',
+ 'facesse',
+ 'facessero',
+ 'facessi',
+ 'facessimo',
+ 'faceste',
+ 'facesti',
+ 'faceva',
+ 'facevamo',
+ 'facevano',
+ 'facevate',
+ 'facevi',
+ 'facevo',
+ 'fai',
+ 'fanno',
+ 'farai',
+ 'faranno',
+ 'farebbe',
+ 'farebbero',
+ 'farei',
+ 'faremmo',
+ 'faremo',
+ 'fareste',
+ 'faresti',
+ 'farete',
+ 'farà',
+ 'farò',
+ 'fece',
+ 'fecero',
+ 'feci',
+ 'fosse',
+ 'fossero',
+ 'fossi',
+ 'fossimo',
+ 'foste',
+ 'fosti',
+ 'fu',
+ 'fui',
+ 'fummo',
+ 'furono',
+ 'gli',
+ 'ha',
+ 'hai',
+ 'hanno',
+ 'ho',
+ 'i',
+ 'il',
+ 'in',
+ 'io',
+ 'l',
+ 'la',
+ 'le',
+ 'lei',
+ 'li',
+ 'lo',
+ 'loro',
+ 'lui',
+ 'ma',
+ 'mi',
+ 'mia',
+ 'mie',
+ 'miei',
+ 'mio',
+ 'ne',
+ 'negl',
+ 'negli',
+ 'nei',
+ 'nel',
+ 'nell',
+ 'nella',
+ 'nelle',
+ 'nello',
+ 'noi',
+ 'non',
+ 'nostra',
+ 'nostre',
+ 'nostri',
+ 'nostro',
+ 'o',
+ 'per',
+ 'perché',
+ 'più',
+ 'quale',
+ 'quanta',
+ 'quante',
+ 'quanti',
+ 'quanto',
+ 'quella',
+ 'quelle',
+ 'quelli',
+ 'quello',
+ 'questa',
+ 'queste',
+ 'questi',
+ 'questo',
+ 'sarai',
+ 'saranno',
+ 'sarebbe',
+ 'sarebbero',
+ 'sarei',
+ 'saremmo',
+ 'saremo',
+ 'sareste',
+ 'saresti',
+ 'sarete',
+ 'sarà',
+ 'sarò',
+ 'se',
+ 'sei',
+ 'si',
+ 'sia',
+ 'siamo',
+ 'siano',
+ 'siate',
+ 'siete',
+ 'sono',
+ 'sta',
+ 'stai',
+ 'stando',
+ 'stanno',
+ 'starai',
+ 'staranno',
+ 'starebbe',
+ 'starebbero',
+ 'starei',
+ 'staremmo',
+ 'staremo',
+ 'stareste',
+ 'staresti',
+ 'starete',
+ 'starà',
+ 'starò',
+ 'stava',
+ 'stavamo',
+ 'stavano',
+ 'stavate',
+ 'stavi',
+ 'stavo',
+ 'stemmo',
+ 'stesse',
+ 'stessero',
+ 'stessi',
+ 'stessimo',
+ 'steste',
+ 'stesti',
+ 'stette',
+ 'stettero',
+ 'stetti',
+ 'stia',
+ 'stiamo',
+ 'stiano',
+ 'stiate',
+ 'sto',
+ 'su',
+ 'sua',
+ 'sue',
+ 'sugl',
+ 'sugli',
+ 'sui',
+ 'sul',
+ 'sull',
+ 'sulla',
+ 'sulle',
+ 'sullo',
+ 'suo',
+ 'suoi',
+ 'ti',
+ 'tra',
+ 'tu',
+ 'tua',
+ 'tue',
+ 'tuo',
+ 'tuoi',
+ 'tutti',
+ 'tutto',
+ 'un',
+ 'una',
+ 'uno',
+ 'vi',
+ 'voi',
+ 'vostra',
+ 'vostre',
+ 'vostri',
+ 'vostro',
+ 'è',
+})
diff --git a/sphinx/search/_stopwords/it.txt b/sphinx/search/_stopwords/it.txt
new file mode 100644
index 00000000000..a20bb9528a5
--- /dev/null
+++ b/sphinx/search/_stopwords/it.txt
@@ -0,0 +1,295 @@
+
+ | An Italian stop word list. Comments begin with vertical bar. Each stop
+ | word is at the start of a line.
+
+ad | a (to) before vowel
+al | a + il
+allo | a + lo
+ai | a + i
+agli | a + gli
+all | a + l'
+agl | a + gl'
+alla | a + la
+alle | a + le
+con | with
+col | con + il
+coi | con + i (forms collo, cogli etc are now very rare)
+da | from
+dal | da + il
+dallo | da + lo
+dai | da + i
+dagli | da + gli
+dall | da + l'
+dagl | da + gll'
+dalla | da + la
+dalle | da + le
+di | of
+del | di + il
+dello | di + lo
+dei | di + i
+degli | di + gli
+dell | di + l'
+degl | di + gl'
+della | di + la
+delle | di + le
+in | in
+nel | in + el
+nello | in + lo
+nei | in + i
+negli | in + gli
+nell | in + l'
+negl | in + gl'
+nella | in + la
+nelle | in + le
+su | on
+sul | su + il
+sullo | su + lo
+sui | su + i
+sugli | su + gli
+sull | su + l'
+sugl | su + gl'
+sulla | su + la
+sulle | su + le
+per | through, by
+tra | among
+contro | against
+io | I
+tu | thou
+lui | he
+lei | she
+noi | we
+voi | you
+loro | they
+mio | my
+mia |
+miei |
+mie |
+tuo |
+tua |
+tuoi | thy
+tue |
+suo |
+sua |
+suoi | his, her
+sue |
+nostro | our
+nostra |
+nostri |
+nostre |
+vostro | your
+vostra |
+vostri |
+vostre |
+mi | me
+ti | thee
+ci | us, there
+vi | you, there
+lo | him, the
+la | her, the
+li | them
+le | them, the
+gli | to him, the
+ne | from there etc
+il | the
+un | a
+uno | a
+una | a
+ma | but
+ed | and
+se | if
+perché | why, because
+anche | also
+come | how
+dov | where (as dov')
+dove | where
+che | who, that
+chi | who
+cui | whom
+non | not
+più | more
+quale | who, that
+quanto | how much
+quanti |
+quanta |
+quante |
+quello | that
+quelli |
+quella |
+quelle |
+questo | this
+questi |
+questa |
+queste |
+si | yes
+tutto | all
+tutti | all
+
+ | single letter forms:
+
+a | at
+c | as c' for ce or ci
+e | and
+i | the
+l | as l'
+o | or
+
+ | forms of avere, to have (not including the infinitive):
+
+ho
+hai
+ha
+abbiamo
+avete
+hanno
+abbia
+abbiate
+abbiano
+avrò
+avrai
+avrà
+avremo
+avrete
+avranno
+avrei
+avresti
+avrebbe
+avremmo
+avreste
+avrebbero
+avevo
+avevi
+aveva
+avevamo
+avevate
+avevano
+ebbi
+avesti
+ebbe
+avemmo
+aveste
+ebbero
+avessi
+avesse
+avessimo
+avessero
+avendo
+avuto
+avuta
+avuti
+avute
+
+ | forms of essere, to be (not including the infinitive):
+sono
+sei
+è
+siamo
+siete
+sia
+siate
+siano
+sarò
+sarai
+sarà
+saremo
+sarete
+saranno
+sarei
+saresti
+sarebbe
+saremmo
+sareste
+sarebbero
+ero
+eri
+era
+eravamo
+eravate
+erano
+fui
+fosti
+fu
+fummo
+foste
+furono
+fossi
+fosse
+fossimo
+fossero
+essendo
+
+ | forms of fare, to do (not including the infinitive, fa, fat-):
+faccio
+fai
+facciamo
+fanno
+faccia
+facciate
+facciano
+farò
+farai
+farà
+faremo
+farete
+faranno
+farei
+faresti
+farebbe
+faremmo
+fareste
+farebbero
+facevo
+facevi
+faceva
+facevamo
+facevate
+facevano
+feci
+facesti
+fece
+facemmo
+faceste
+fecero
+facessi
+facesse
+facessimo
+facessero
+facendo
+
+ | forms of stare, to be (not including the infinitive):
+sto
+stai
+sta
+stiamo
+stanno
+stia
+stiate
+stiano
+starò
+starai
+starà
+staremo
+starete
+staranno
+starei
+staresti
+starebbe
+staremmo
+stareste
+starebbero
+stavo
+stavi
+stava
+stavamo
+stavate
+stavano
+stetti
+stesti
+stette
+stemmo
+steste
+stettero
+stessi
+stesse
+stessimo
+stessero
+stando
diff --git a/sphinx/search/_stopwords/nl.py b/sphinx/search/_stopwords/nl.py
new file mode 100644
index 00000000000..c4e21dde6c1
--- /dev/null
+++ b/sphinx/search/_stopwords/nl.py
@@ -0,0 +1,108 @@
+# automatically generated by utils/generate-snowball.py
+# from https://github.com/snowballstem/snowball-website/raw/efb4ae4d65769fb1652acbe608c0c817e746c730/algorithms/dutch/stop.txt
+
+from __future__ import annotations
+
+DUTCH_STOPWORDS = frozenset({
+ 'aan',
+ 'al',
+ 'alles',
+ 'als',
+ 'altijd',
+ 'andere',
+ 'ben',
+ 'bij',
+ 'daar',
+ 'dan',
+ 'dat',
+ 'de',
+ 'der',
+ 'deze',
+ 'die',
+ 'dit',
+ 'doch',
+ 'doen',
+ 'door',
+ 'dus',
+ 'een',
+ 'eens',
+ 'en',
+ 'er',
+ 'ge',
+ 'geen',
+ 'geweest',
+ 'haar',
+ 'had',
+ 'heb',
+ 'hebben',
+ 'heeft',
+ 'hem',
+ 'het',
+ 'hier',
+ 'hij',
+ 'hoe',
+ 'hun',
+ 'iemand',
+ 'iets',
+ 'ik',
+ 'in',
+ 'is',
+ 'ja',
+ 'je',
+ 'kan',
+ 'kon',
+ 'kunnen',
+ 'maar',
+ 'me',
+ 'meer',
+ 'men',
+ 'met',
+ 'mij',
+ 'mijn',
+ 'moet',
+ 'na',
+ 'naar',
+ 'niet',
+ 'niets',
+ 'nog',
+ 'nu',
+ 'of',
+ 'om',
+ 'omdat',
+ 'onder',
+ 'ons',
+ 'ook',
+ 'op',
+ 'over',
+ 'reeds',
+ 'te',
+ 'tegen',
+ 'toch',
+ 'toen',
+ 'tot',
+ 'u',
+ 'uit',
+ 'uw',
+ 'van',
+ 'veel',
+ 'voor',
+ 'want',
+ 'waren',
+ 'was',
+ 'wat',
+ 'werd',
+ 'wezen',
+ 'wie',
+ 'wil',
+ 'worden',
+ 'wordt',
+ 'zal',
+ 'ze',
+ 'zelf',
+ 'zich',
+ 'zij',
+ 'zijn',
+ 'zo',
+ 'zonder',
+ 'zou',
+})
diff --git a/sphinx/search/_stopwords/nl.txt b/sphinx/search/_stopwords/nl.txt
new file mode 100644
index 00000000000..edf99730a2c
--- /dev/null
+++ b/sphinx/search/_stopwords/nl.txt
@@ -0,0 +1,112 @@
+
+
+ | A Dutch stop word list. Comments begin with vertical bar. Each stop
+ | word is at the start of a line.
+
+ | This is a ranked list (commonest to rarest) of stopwords derived from
+ | a large sample of Dutch text.
+
+ | Dutch stop words frequently exhibit homonym clashes. These are indicated
+ | clearly below.
+
+de | the
+en | and
+van | of, from
+ik | I, the ego
+te | (1) chez, at etc, (2) to, (3) too
+dat | that, which
+die | that, those, who, which
+in | in, inside
+een | a, an, one
+hij | he
+het | the, it
+niet | not, nothing, naught
+zijn | (1) to be, being, (2) his, one's, its
+is | is
+was | (1) was, past tense of all persons sing. of 'zijn' (to be) (2) wax, (3) the washing, (4) rise of river
+op | on, upon, at, in, up, used up
+aan | on, upon, to (as dative)
+met | with, by
+als | like, such as, when
+voor | (1) before, in front of, (2) furrow
+had | had, past tense all persons sing. of 'hebben' (have)
+er | there
+maar | but, only
+om | round, about, for etc
+hem | him
+dan | then
+zou | should/would, past tense all persons sing. of 'zullen'
+of | or, whether, if
+wat | what, something, anything
+mijn | possessive and noun 'mine'
+men | people, 'one'
+dit | this
+zo | so, thus, in this way
+door | through by
+over | over, across
+ze | she, her, they, them
+zich | oneself
+bij | (1) a bee, (2) by, near, at
+ook | also, too
+tot | till, until
+je | you
+mij | me
+uit | out of, from
+der | Old Dutch form of 'van der' still found in surnames
+daar | (1) there, (2) because
+haar | (1) her, their, them, (2) hair
+naar | (1) unpleasant, unwell etc, (2) towards, (3) as
+heb | present first person sing. of 'to have'
+hoe | how, why
+heeft | present third person sing. of 'to have'
+hebben | 'to have' and various parts thereof
+deze | this
+u | you
+want | (1) for, (2) mitten, (3) rigging
+nog | yet, still
+zal | 'shall', first and third person sing. of verb 'zullen' (will)
+me | me
+zij | she, they
+nu | now
+ge | 'thou', still used in Belgium and south Netherlands
+geen | none
+omdat | because
+iets | something, somewhat
+worden | to become, grow, get
+toch | yet, still
+al | all, every, each
+waren | (1) 'were' (2) to wander, (3) wares, (3)
+veel | much, many
+meer | (1) more, (2) lake
+doen | to do, to make
+toen | then, when
+moet | noun 'spot/mote' and present form of 'to must'
+ben | (1) am, (2) 'are' in interrogative second person singular of 'to be'
+zonder | without
+kan | noun 'can' and present form of 'to be able'
+hun | their, them
+dus | so, consequently
+alles | all, everything, anything
+onder | under, beneath
+ja | yes, of course
+eens | once, one day
+hier | here
+wie | who
+werd | imperfect third person sing. of 'become'
+altijd | always
+doch | yet, but etc
+wordt | present third person sing. of 'become'
+wezen | (1) to be, (2) 'been' as in 'been fishing', (3) orphans
+kunnen | to be able
+ons | us/our
+zelf | self
+tegen | against, towards, at
+na | after, near
+reeds | already
+wil | (1) present tense of 'want', (2) 'will', noun, (3) fender
+kon | could; past tense of 'to be able'
+niets | nothing
+uw | your
+iemand | somebody
+geweest | been; past participle of 'be'
+andere | other
diff --git a/sphinx/search/_stopwords/no.py b/sphinx/search/_stopwords/no.py
new file mode 100644
index 00000000000..4d35e2dde89
--- /dev/null
+++ b/sphinx/search/_stopwords/no.py
@@ -0,0 +1,179 @@
+# automatically generated by utils/generate-snowball.py
+# from https://github.com/snowballstem/snowball-website/raw/efb4ae4d65769fb1652acbe608c0c817e746c730/algorithms/norwegian/stop.txt
+
+from __future__ import annotations
+
+NORWEGIAN_STOPWORDS = frozenset({
+ 'alle',
+ 'at',
+ 'av',
+ 'bare',
+ 'begge',
+ 'ble',
+ 'blei',
+ 'bli',
+ 'blir',
+ 'blitt',
+ 'både',
+ 'båe',
+ 'da',
+ 'de',
+ 'deg',
+ 'dei',
+ 'deim',
+ 'deira',
+ 'deires',
+ 'dem',
+ 'den',
+ 'denne',
+ 'der',
+ 'dere',
+ 'deres',
+ 'det',
+ 'dette',
+ 'di',
+ 'din',
+ 'disse',
+ 'ditt',
+ 'du',
+ 'dykk',
+ 'dykkar',
+ 'då',
+ 'eg',
+ 'ein',
+ 'eit',
+ 'eitt',
+ 'eller',
+ 'elles',
+ 'en',
+ 'enn',
+ 'er',
+ 'et',
+ 'ett',
+ 'etter',
+ 'for',
+ 'fordi',
+ 'fra',
+ 'før',
+ 'ha',
+ 'hadde',
+ 'han',
+ 'hans',
+ 'har',
+ 'hennar',
+ 'henne',
+ 'hennes',
+ 'her',
+ 'hjå',
+ 'ho',
+ 'hoe',
+ 'honom',
+ 'hoss',
+ 'hossen',
+ 'hun',
+ 'hva',
+ 'hvem',
+ 'hver',
+ 'hvilke',
+ 'hvilken',
+ 'hvis',
+ 'hvor',
+ 'hvordan',
+ 'hvorfor',
+ 'i',
+ 'ikke',
+ 'ikkje',
+ 'ingen',
+ 'ingi',
+ 'inkje',
+ 'inn',
+ 'inni',
+ 'ja',
+ 'jeg',
+ 'kan',
+ 'kom',
+ 'korleis',
+ 'korso',
+ 'kun',
+ 'kunne',
+ 'kva',
+ 'kvar',
+ 'kvarhelst',
+ 'kven',
+ 'kvi',
+ 'kvifor',
+ 'man',
+ 'mange',
+ 'me',
+ 'med',
+ 'medan',
+ 'meg',
+ 'meget',
+ 'mellom',
+ 'men',
+ 'mi',
+ 'min',
+ 'mine',
+ 'mitt',
+ 'mot',
+ 'mykje',
+ 'ned',
+ 'no',
+ 'noe',
+ 'noen',
+ 'noka',
+ 'noko',
+ 'nokon',
+ 'nokor',
+ 'nokre',
+ 'nå',
+ 'når',
+ 'og',
+ 'også',
+ 'om',
+ 'opp',
+ 'oss',
+ 'over',
+ 'på',
+ 'samme',
+ 'seg',
+ 'selv',
+ 'si',
+ 'sia',
+ 'sidan',
+ 'siden',
+ 'sin',
+ 'sine',
+ 'sitt',
+ 'sjøl',
+ 'skal',
+ 'skulle',
+ 'slik',
+ 'so',
+ 'som',
+ 'somme',
+ 'somt',
+ 'så',
+ 'sånn',
+ 'til',
+ 'um',
+ 'upp',
+ 'ut',
+ 'uten',
+ 'var',
+ 'vart',
+ 'varte',
+ 'ved',
+ 'vere',
+ 'verte',
+ 'vi',
+ 'vil',
+ 'ville',
+ 'vore',
+ 'vors',
+ 'vort',
+ 'vår',
+ 'være',
+ 'vært',
+ 'å',
+})
diff --git a/sphinx/search/_stopwords/no.txt b/sphinx/search/_stopwords/no.txt
new file mode 100644
index 00000000000..c1739309ac8
--- /dev/null
+++ b/sphinx/search/_stopwords/no.txt
@@ -0,0 +1,185 @@
+
+ | A Norwegian stop word list. Comments begin with vertical bar. Each stop
+ | word is at the start of a line.
+
+ | This stop word list is for the dominant bokmål dialect. Words unique
+ | to nynorsk are marked *.
+
+ | Revised by Jan Bruusgaard
, Jan 2005
+
+og | and
+i | in
+jeg | I
+det | it/this/that
+at | to (w. inf.)
+en | a/an
+et | a/an
+den | it/this/that
+til | to
+er | is/am/are
+som | who/that
+på | on
+de | they / you(formal)
+med | with
+han | he
+av | of
+ikke | not
+ikkje | not *
+der | there
+så | so
+var | was/were
+meg | me
+seg | you
+men | but
+ett | one
+har | have
+om | about
+vi | we
+min | my
+mitt | my
+ha | have
+hadde | had
+hun | she
+nå | now
+over | over
+da | when/as
+ved | by/know
+fra | from
+du | you
+ut | out
+sin | your
+dem | them
+oss | us
+opp | up
+man | you/one
+kan | can
+hans | his
+hvor | where
+eller | or
+hva | what
+skal | shall/must
+selv | self (reflective)
+sjøl | self (reflective)
+her | here
+alle | all
+vil | will
+bli | become
+ble | became
+blei | became *
+blitt | have become
+kunne | could
+inn | in
+når | when
+være | be
+kom | come
+noen | some
+noe | some
+ville | would
+dere | you
+som | who/which/that
+deres | their/theirs
+kun | only/just
+ja | yes
+etter | after
+ned | down
+skulle | should
+denne | this
+for | for/because
+deg | you
+si | hers/his
+sine | hers/his
+sitt | hers/his
+mot | against
+å | to
+meget | much
+hvorfor | why
+dette | this
+disse | these/those
+uten | without
+hvordan | how
+ingen | none
+din | your
+ditt | your
+blir | become
+samme | same
+hvilken | which
+hvilke | which (plural)
+sånn | such a
+inni | inside/within
+mellom | between
+vår | our
+hver | each
+hvem | who
+vors | us/ours
+hvis | whose
+både | both
+bare | only/just
+enn | than
+fordi | as/because
+før | before
+mange | many
+også | also
+slik | just
+vært | been
+være | to be
+båe | both *
+begge | both
+siden | since
+dykk | your *
+dykkar | yours *
+dei | they *
+deira | them *
+deires | theirs *
+deim | them *
+di | your (fem.) *
+då | as/when *
+eg | I *
+ein | a/an *
+eit | a/an *
+eitt | a/an *
+elles | or *
+honom | he *
+hjå | at *
+ho | she *
+hoe | she *
+henne | her
+hennar | her/hers
+hennes | hers
+hoss | how *
+hossen | how *
+ikkje | not *
+ingi | noone *
+inkje | noone *
+korleis | how *
+korso | how *
+kva | what/which *
+kvar | where *
+kvarhelst | where *
+kven | who/whom *
+kvi | why *
+kvifor | why *
+me | we *
+medan | while *
+mi | my *
+mine | my *
+mykje | much *
+no | now *
+nokon | some (masc./neut.) *
+noka | some (fem.) *
+nokor | some *
+noko | some *
+nokre | some *
+si | his/hers *
+sia | since *
+sidan | since *
+so | so *
+somt | some *
+somme | some *
+um | about*
+upp | up *
+vere | be *
+vore | was *
+verte | become *
+vort | become *
+varte | became *
+vart | became *
diff --git a/sphinx/search/_stopwords/pt.py b/sphinx/search/_stopwords/pt.py
new file mode 100644
index 00000000000..9adef01661b
--- /dev/null
+++ b/sphinx/search/_stopwords/pt.py
@@ -0,0 +1,210 @@
+# automatically generated by utils/generate-snowball.py
+# from https://github.com/snowballstem/snowball-website/raw/efb4ae4d65769fb1652acbe608c0c817e746c730/algorithms/portuguese/stop.txt
+
+from __future__ import annotations
+
+PORTUGUESE_STOPWORDS = frozenset({
+ 'a',
+ 'ao',
+ 'aos',
+ 'aquela',
+ 'aquelas',
+ 'aquele',
+ 'aqueles',
+ 'aquilo',
+ 'as',
+ 'até',
+ 'com',
+ 'como',
+ 'da',
+ 'das',
+ 'de',
+ 'dela',
+ 'delas',
+ 'dele',
+ 'deles',
+ 'depois',
+ 'do',
+ 'dos',
+ 'e',
+ 'ela',
+ 'elas',
+ 'ele',
+ 'eles',
+ 'em',
+ 'entre',
+ 'era',
+ 'eram',
+ 'essa',
+ 'essas',
+ 'esse',
+ 'esses',
+ 'esta',
+ 'estamos',
+ 'estas',
+ 'estava',
+ 'estavam',
+ 'este',
+ 'esteja',
+ 'estejam',
+ 'estejamos',
+ 'estes',
+ 'esteve',
+ 'estive',
+ 'estivemos',
+ 'estiver',
+ 'estivera',
+ 'estiveram',
+ 'estiverem',
+ 'estivermos',
+ 'estivesse',
+ 'estivessem',
+ 'estivéramos',
+ 'estivéssemos',
+ 'estou',
+ 'está',
+ 'estávamos',
+ 'estão',
+ 'eu',
+ 'foi',
+ 'fomos',
+ 'for',
+ 'fora',
+ 'foram',
+ 'forem',
+ 'formos',
+ 'fosse',
+ 'fossem',
+ 'fui',
+ 'fôramos',
+ 'fôssemos',
+ 'haja',
+ 'hajam',
+ 'hajamos',
+ 'havemos',
+ 'hei',
+ 'houve',
+ 'houvemos',
+ 'houver',
+ 'houvera',
+ 'houveram',
+ 'houverei',
+ 'houverem',
+ 'houveremos',
+ 'houveria',
+ 'houveriam',
+ 'houvermos',
+ 'houverá',
+ 'houverão',
+ 'houveríamos',
+ 'houvesse',
+ 'houvessem',
+ 'houvéramos',
+ 'houvéssemos',
+ 'há',
+ 'hão',
+ 'isso',
+ 'isto',
+ 'já',
+ 'lhe',
+ 'lhes',
+ 'mais',
+ 'mas',
+ 'me',
+ 'mesmo',
+ 'meu',
+ 'meus',
+ 'minha',
+ 'minhas',
+ 'muito',
+ 'na',
+ 'nas',
+ 'nem',
+ 'no',
+ 'nos',
+ 'nossa',
+ 'nossas',
+ 'nosso',
+ 'nossos',
+ 'num',
+ 'numa',
+ 'não',
+ 'nós',
+ 'o',
+ 'os',
+ 'ou',
+ 'para',
+ 'pela',
+ 'pelas',
+ 'pelo',
+ 'pelos',
+ 'por',
+ 'qual',
+ 'quando',
+ 'que',
+ 'quem',
+ 'se',
+ 'seja',
+ 'sejam',
+ 'sejamos',
+ 'sem',
+ 'serei',
+ 'seremos',
+ 'seria',
+ 'seriam',
+ 'será',
+ 'serão',
+ 'seríamos',
+ 'seu',
+ 'seus',
+ 'somos',
+ 'sou',
+ 'sua',
+ 'suas',
+ 'são',
+ 'só',
+ 'também',
+ 'te',
+ 'tem',
+ 'temos',
+ 'tenha',
+ 'tenham',
+ 'tenhamos',
+ 'tenho',
+ 'terei',
+ 'teremos',
+ 'teria',
+ 'teriam',
+ 'terá',
+ 'terão',
+ 'teríamos',
+ 'teu',
+ 'teus',
+ 'teve',
+ 'tinha',
+ 'tinham',
+ 'tive',
+ 'tivemos',
+ 'tiver',
+ 'tivera',
+ 'tiveram',
+ 'tiverem',
+ 'tivermos',
+ 'tivesse',
+ 'tivessem',
+ 'tivéramos',
+ 'tivéssemos',
+ 'tu',
+ 'tua',
+ 'tuas',
+ 'tém',
+ 'tínhamos',
+ 'um',
+ 'uma',
+ 'você',
+ 'vocês',
+ 'vos',
+ 'à',
+ 'às',
+ 'éramos',
+})
diff --git a/sphinx/search/_stopwords/pt.txt b/sphinx/search/_stopwords/pt.txt
new file mode 100644
index 00000000000..9c3c9ac76d7
--- /dev/null
+++ b/sphinx/search/_stopwords/pt.txt
@@ -0,0 +1,245 @@
+
+ | A Portuguese stop word list. Comments begin with vertical bar. Each stop
+ | word is at the start of a line.
+
+
+ | The following is a ranked list (commonest to rarest) of stopwords
+ | deriving from a large sample of text.
+
+ | Extra words have been added at the end.
+
+de | of, from
+a | the; to, at; her
+o | the; him
+que | who, that
+e | and
+do | de + o
+da | de + a
+em | in
+um | a
+para | for
+ | é from SER
+com | with
+não | not, no
+uma | a
+os | the; them
+no | em + o
+se | himself etc
+na | em + a
+por | for
+mais | more
+as | the; them
+dos | de + os
+como | as, like
+mas | but
+ | foi from SER
+ao | a + o
+ele | he
+das | de + as
+ | tem from TER
+à | a + a
+seu | his
+sua | her
+ou | or
+ | ser from SER
+quando | when
+muito | much
+ | há from HAV
+nos | em + os; us
+já | already, now
+ | está from EST
+eu | I
+também | also
+só | only, just
+pelo | per + o
+pela | per + a
+até | up to
+isso | that
+ela | he
+entre | between
+ | era from SER
+depois | after
+sem | without
+mesmo | same
+aos | a + os
+ | ter from TER
+seus | his
+quem | whom
+nas | em + as
+me | me
+esse | that
+eles | they
+ | estão from EST
+você | you
+ | tinha from TER
+ | foram from SER
+essa | that
+num | em + um
+nem | nor
+suas | her
+meu | my
+às | a + as
+minha | my
+ | têm from TER
+numa | em + uma
+pelos | per + os
+elas | they
+ | havia from HAV
+ | seja from SER
+qual | which
+ | será from SER
+nós | we
+ | tenho from TER
+lhe | to him, her
+deles | of them
+essas | those
+esses | those
+pelas | per + as
+este | this
+ | fosse from SER
+dele | of him
+
+ | other words. There are many contractions such as naquele = em+aquele,
+ | mo = me+o, but they are rare.
+ | Indefinite article plural forms are also rare.
+
+tu | thou
+te | thee
+vocês | you (plural)
+vos | you
+lhes | to them
+meus | my
+minhas
+teu | thy
+tua
+teus
+tuas
+nosso | our
+nossa
+nossos
+nossas
+
+dela | of her
+delas | of them
+
+esta | this
+estes | these
+estas | these
+aquele | that
+aquela | that
+aqueles | those
+aquelas | those
+isto | this
+aquilo | that
+
+ | forms of estar, to be (not including the infinitive):
+estou
+está
+estamos
+estão
+estive
+esteve
+estivemos
+estiveram
+estava
+estávamos
+estavam
+estivera
+estivéramos
+esteja
+estejamos
+estejam
+estivesse
+estivéssemos
+estivessem
+estiver
+estivermos
+estiverem
+
+ | forms of haver, to have (not including the infinitive):
+hei
+há
+havemos
+hão
+houve
+houvemos
+houveram
+houvera
+houvéramos
+haja
+hajamos
+hajam
+houvesse
+houvéssemos
+houvessem
+houver
+houvermos
+houverem
+houverei
+houverá
+houveremos
+houverão
+houveria
+houveríamos
+houveriam
+
+ | forms of ser, to be (not including the infinitive):
+sou
+somos
+são
+era
+éramos
+eram
+fui
+foi
+fomos
+foram
+fora
+fôramos
+seja
+sejamos
+sejam
+fosse
+fôssemos
+fossem
+for
+formos
+forem
+serei
+será
+seremos
+serão
+seria
+seríamos
+seriam
+
+ | forms of ter, to have (not including the infinitive):
+tenho
+tem
+temos
+tém
+tinha
+tínhamos
+tinham
+tive
+teve
+tivemos
+tiveram
+tivera
+tivéramos
+tenha
+tenhamos
+tenham
+tivesse
+tivéssemos
+tivessem
+tiver
+tivermos
+tiverem
+terei
+terá
+teremos
+terão
+teria
+teríamos
+teriam
diff --git a/sphinx/search/_stopwords/ru.py b/sphinx/search/_stopwords/ru.py
new file mode 100644
index 00000000000..2610b3b9b9a
--- /dev/null
+++ b/sphinx/search/_stopwords/ru.py
@@ -0,0 +1,166 @@
+# automatically generated by utils/generate-snowball.py
+# from https://github.com/snowballstem/snowball-website/raw/efb4ae4d65769fb1652acbe608c0c817e746c730/algorithms/russian/stop.txt
+
+from __future__ import annotations
+
+RUSSIAN_STOPWORDS = frozenset({
+ 'а',
+ 'без',
+ 'более',
+ 'больше',
+ 'будет',
+ 'будто',
+ 'бы',
+ 'был',
+ 'была',
+ 'были',
+ 'было',
+ 'быть',
+ 'в',
+ 'вам',
+ 'вас',
+ 'вдруг',
+ 'ведь',
+ 'во',
+ 'вот',
+ 'впрочем',
+ 'все',
+ 'всегда',
+ 'всего',
+ 'всех',
+ 'всю',
+ 'вы',
+ 'где',
+ 'говорил',
+ 'да',
+ 'даже',
+ 'два',
+ 'для',
+ 'до',
+ 'другой',
+ 'его',
+ 'ее',
+ 'ей',
+ 'ему',
+ 'если',
+ 'есть',
+ 'еще',
+ 'ж',
+ 'же',
+ 'жизнь',
+ 'за',
+ 'зачем',
+ 'здесь',
+ 'и',
+ 'из',
+ 'или',
+ 'им',
+ 'иногда',
+ 'их',
+ 'к',
+ 'кажется',
+ 'как',
+ 'какая',
+ 'какой',
+ 'когда',
+ 'конечно',
+ 'кто',
+ 'куда',
+ 'ли',
+ 'лучше',
+ 'между',
+ 'меня',
+ 'мне',
+ 'много',
+ 'может',
+ 'можно',
+ 'мой',
+ 'моя',
+ 'мы',
+ 'на',
+ 'над',
+ 'надо',
+ 'наконец',
+ 'нас',
+ 'не',
+ 'него',
+ 'нее',
+ 'ней',
+ 'нельзя',
+ 'нет',
+ 'ни',
+ 'нибудь',
+ 'никогда',
+ 'ним',
+ 'них',
+ 'ничего',
+ 'но',
+ 'ну',
+ 'о',
+ 'об',
+ 'один',
+ 'он',
+ 'она',
+ 'они',
+ 'опять',
+ 'от',
+ 'перед',
+ 'по',
+ 'под',
+ 'после',
+ 'потом',
+ 'потому',
+ 'почти',
+ 'при',
+ 'про',
+ 'раз',
+ 'разве',
+ 'с',
+ 'сам',
+ 'свою',
+ 'себе',
+ 'себя',
+ 'сегодня',
+ 'сейчас',
+ 'сказал',
+ 'сказала',
+ 'сказать',
+ 'со',
+ 'совсем',
+ 'так',
+ 'такой',
+ 'там',
+ 'тебя',
+ 'тем',
+ 'теперь',
+ 'то',
+ 'тогда',
+ 'того',
+ 'тоже',
+ 'только',
+ 'том',
+ 'тот',
+ 'три',
+ 'тут',
+ 'ты',
+ 'у',
+ 'уж',
+ 'уже',
+ 'хорошо',
+ 'хоть',
+ 'чего',
+ 'человек',
+ 'чем',
+ 'через',
+ 'что',
+ 'чтоб',
+ 'чтобы',
+ 'чуть',
+ 'эти',
+ 'этого',
+ 'этой',
+ 'этом',
+ 'этот',
+ 'эту',
+ 'я',
+})
diff --git a/sphinx/search/_stopwords/ru.txt b/sphinx/search/_stopwords/ru.txt
new file mode 100644
index 00000000000..96abb77073e
--- /dev/null
+++ b/sphinx/search/_stopwords/ru.txt
@@ -0,0 +1,235 @@
+
+
+ | a russian stop word list. comments begin with vertical bar. each stop
+ | word is at the start of a line.
+
+ | this is a ranked list (commonest to rarest) of stopwords derived from
+ | a large text sample.
+
+ | letter `ё' is translated to `е'.
+
+и | and
+в | in/into
+во | alternative form
+не | not
+что | what/that
+он | he
+на | on/onto
+я | i
+с | from
+со | alternative form
+как | how
+а | milder form of `no' (but)
+то | conjunction and form of `that'
+все | all
+она | she
+так | so, thus
+его | him
+но | but
+да | yes/and
+ты | thou
+к | towards, by
+у | around, chez
+же | intensifier particle
+вы | you
+за | beyond, behind
+бы | conditional/subj. particle
+по | up to, along
+только | only
+ее | her
+мне | to me
+было | it was
+вот | here is/are, particle
+от | away from
+меня | me
+еще | still, yet, more
+нет | no, there isnt/arent
+о | about
+из | out of
+ему | to him
+теперь | now
+когда | when
+даже | even
+ну | so, well
+вдруг | suddenly
+ли | interrogative particle
+если | if
+уже | already, but homonym of `narrower'
+или | or
+ни | neither
+быть | to be
+был | he was
+него | prepositional form of его
+до | up to
+вас | you accusative
+нибудь | indef. suffix preceded by hyphen
+опять | again
+уж | already, but homonym of `adder'
+вам | to you
+сказал | he said
+ведь | particle `after all'
+там | there
+потом | then
+себя | oneself
+ничего | nothing
+ей | to her
+может | usually with `быть' as `maybe'
+они | they
+тут | here
+где | where
+есть | there is/are
+надо | got to, must
+ней | prepositional form of ей
+для | for
+мы | we
+тебя | thee
+их | them, their
+чем | than
+была | she was
+сам | self
+чтоб | in order to
+без | without
+будто | as if
+человек | man, person, one
+чего | genitive form of `what'
+раз | once
+тоже | also
+себе | to oneself
+под | beneath
+жизнь | life
+будет | will be
+ж | short form of intensifer particle `же'
+тогда | then
+кто | who
+этот | this
+говорил | was saying
+того | genitive form of `that'
+потому | for that reason
+этого | genitive form of `this'
+какой | which
+совсем | altogether
+ним | prepositional form of `его', `они'
+здесь | here
+этом | prepositional form of `этот'
+один | one
+почти | almost
+мой | my
+тем | instrumental/dative plural of `тот', `то'
+чтобы | full form of `in order that'
+нее | her (acc.)
+кажется | it seems
+сейчас | now
+были | they were
+куда | where to
+зачем | why
+сказать | to say
+всех | all (acc., gen. preposn. plural)
+никогда | never
+сегодня | today
+можно | possible, one can
+при | by
+наконец | finally
+два | two
+об | alternative form of `о', about
+другой | another
+хоть | even
+после | after
+над | above
+больше | more
+тот | that one (masc.)
+через | across, in
+эти | these
+нас | us
+про | about
+всего | in all, only, of all
+них | prepositional form of `они' (they)
+какая | which, feminine
+много | lots
+разве | interrogative particle
+сказала | she said
+три | three
+эту | this, acc. fem. sing.
+моя | my, feminine
+впрочем | moreover, besides
+хорошо | good
+свою | ones own, acc. fem. sing.
+этой | oblique form of `эта', fem. `this'
+перед | in front of
+иногда | sometimes
+лучше | better
+чуть | a little
+том | preposn. form of `that one'
+нельзя | one must not
+такой | such a one
+им | to them
+более | more
+всегда | always
+конечно | of course
+всю | acc. fem. sing of `all'
+между | between
+
+
+ | b: some paradigms
+ |
+ | personal pronouns
+ |
+ | я меня мне мной [мною]
+ | ты тебя тебе тобой [тобою]
+ | он его ему им [него, нему, ним]
+ | она ее эи ею [нее, нэи, нею]
+ | оно его ему им [него, нему, ним]
+ |
+ | мы нас нам нами
+ | вы вас вам вами
+ | они их им ими [них, ним, ними]
+ |
+ | себя себе собой [собою]
+ |
+ | demonstrative pronouns: этот (this), тот (that)
+ |
+ | этот эта это эти
+ | этого эты это эти
+ | этого этой этого этих
+ | этому этой этому этим
+ | этим этой этим [этою] этими
+ | этом этой этом этих
+ |
+ | тот та то те
+ | того ту то те
+ | того той того тех
+ | тому той тому тем
+ | тем той тем [тою] теми
+ | том той том тех
+ |
+ | determinative pronouns
+ |
+ | (a) весь (all)
+ |
+ | весь вся все все
+ | всего всю все все
+ | всего всей всего всех
+ | всему всей всему всем
+ | всем всей всем [всею] всеми
+ | всем всей всем всех
+ |
+ | (b) сам (himself etc)
+ |
+ | сам сама само сами
+ | самого саму само самих
+ | самого самой самого самих
+ | самому самой самому самим
+ | самим самой самим [самою] самими
+ | самом самой самом самих
+ |
+ | stems of verbs `to be', `to have', `to do' and modal
+ |
+ | быть бы буд быв есть суть
+ | име
+ | дел
+ | мог мож мочь
+ | уме
+ | хоч хот
+ | долж
+ | можн
+ | нужн
+ | нельзя
diff --git a/sphinx/search/_stopwords/sv.py b/sphinx/search/_stopwords/sv.py
new file mode 100644
index 00000000000..d9be39c35ca
--- /dev/null
+++ b/sphinx/search/_stopwords/sv.py
@@ -0,0 +1,121 @@
+# automatically generated by utils/generate-snowball.py
+# from https://github.com/snowballstem/snowball-website/raw/efb4ae4d65769fb1652acbe608c0c817e746c730/algorithms/swedish/stop.txt
+
+from __future__ import annotations
+
+SWEDISH_STOPWORDS = frozenset({
+ 'alla',
+ 'allt',
+ 'att',
+ 'av',
+ 'blev',
+ 'bli',
+ 'blir',
+ 'blivit',
+ 'de',
+ 'dem',
+ 'den',
+ 'denna',
+ 'deras',
+ 'dess',
+ 'dessa',
+ 'det',
+ 'detta',
+ 'dig',
+ 'din',
+ 'dina',
+ 'ditt',
+ 'du',
+ 'där',
+ 'då',
+ 'efter',
+ 'ej',
+ 'eller',
+ 'en',
+ 'er',
+ 'era',
+ 'ert',
+ 'ett',
+ 'från',
+ 'för',
+ 'ha',
+ 'hade',
+ 'han',
+ 'hans',
+ 'har',
+ 'henne',
+ 'hennes',
+ 'hon',
+ 'honom',
+ 'hur',
+ 'här',
+ 'i',
+ 'icke',
+ 'ingen',
+ 'inom',
+ 'inte',
+ 'jag',
+ 'ju',
+ 'kan',
+ 'kunde',
+ 'man',
+ 'med',
+ 'mellan',
+ 'men',
+ 'mig',
+ 'min',
+ 'mina',
+ 'mitt',
+ 'mot',
+ 'mycket',
+ 'ni',
+ 'nu',
+ 'när',
+ 'någon',
+ 'något',
+ 'några',
+ 'och',
+ 'om',
+ 'oss',
+ 'på',
+ 'samma',
+ 'sedan',
+ 'sig',
+ 'sin',
+ 'sina',
+ 'sitta',
+ 'själv',
+ 'skulle',
+ 'som',
+ 'så',
+ 'sådan',
+ 'sådana',
+ 'sådant',
+ 'till',
+ 'under',
+ 'upp',
+ 'ut',
+ 'utan',
+ 'vad',
+ 'var',
+ 'vara',
+ 'varför',
+ 'varit',
+ 'varje',
+ 'vars',
+ 'vart',
+ 'vem',
+ 'vi',
+ 'vid',
+ 'vilka',
+ 'vilkas',
+ 'vilken',
+ 'vilket',
+ 'vår',
+ 'våra',
+ 'vårt',
+ 'än',
+ 'är',
+ 'åt',
+ 'över',
+})
diff --git a/sphinx/search/_stopwords/sv.txt b/sphinx/search/_stopwords/sv.txt
new file mode 100644
index 00000000000..2fb53892a42
--- /dev/null
+++ b/sphinx/search/_stopwords/sv.txt
@@ -0,0 +1,124 @@
+
+ | A Swedish stop word list. Comments begin with vertical bar. Each stop
+ | word is at the start of a line.
+
+ | This is a ranked list (commonest to rarest) of stopwords derived from
+ | a large text sample.
+
+ | Swedish stop words occasionally exhibit homonym clashes. For example
+ | så = so, but also seed. These are indicated clearly below.
+
+och | and
+det | it, this/that
+att | to (with infinitive)
+i | in, at
+en | a
+jag | I
+hon | she
+som | who, that
+han | he
+på | on
+den | it, this/that
+med | with
+var | where, each
+sig | him(self) etc
+för | for
+så | so (also: seed)
+till | to
+är | is
+men | but
+ett | a
+om | if; around, about
+hade | had
+de | they, these/those
+av | of
+icke | not, no
+mig | me
+du | you
+henne | her
+då | then, when
+sin | his
+nu | now
+har | have
+inte | inte någon = no one
+hans | his
+honom | him
+skulle | 'sake'
+hennes | her
+där | there
+min | my
+man | one (pronoun)
+ej | nor
+vid | at, by, on (also: vast)
+kunde | could
+något | some etc
+från | from, off
+ut | out
+när | when
+efter | after, behind
+upp | up
+vi | we
+dem | them
+vara | be
+vad | what
+över | over
+än | than
+dig | you
+kan | can
+sina | his
+här | here
+ha | have
+mot | towards
+alla | all
+under | under (also: wonder)
+någon | some etc
+eller | or (else)
+allt | all
+mycket | much
+sedan | since
+ju | why
+denna | this/that
+själv | myself, yourself etc
+detta | this/that
+åt | to
+utan | without
+varit | was
+hur | how
+ingen | no
+mitt | my
+ni | you
+bli | to be, become
+blev | from bli
+oss | us
+din | thy
+dessa | these/those
+några | some etc
+deras | their
+blir | from bli
+mina | my
+samma | (the) same
+vilken | who, that
+er | you, your
+sådan | such a
+vår | our
+blivit | from bli
+dess | its
+inom | within
+mellan | between
+sådant | such a
+varför | why
+varje | each
+vilka | who, that
+ditt | thy
+vem | who
+vilket | who, that
+sitta | his
+sådana | such a
+vart | each
+dina | thy
+vars | whose
+vårt | our
+våra | our
+ert | your
+era | your
+vilkas | whose
diff --git a/sphinx/search/da.py b/sphinx/search/da.py
index a56114bb6ba..e632a97fb78 100644
--- a/sphinx/search/da.py
+++ b/sphinx/search/da.py
@@ -1,117 +1,21 @@
-"""Danish search language: includes the JS Danish stemmer."""
+"""Danish search language."""
from __future__ import annotations
import snowballstemmer
-from sphinx.search import SearchLanguage, parse_stop_word
-
-danish_stopwords = parse_stop_word("""
-| source: https://snowball.tartarus.org/algorithms/danish/stop.txt
-og | and
-i | in
-jeg | I
-det | that (dem. pronoun)/it (pers. pronoun)
-at | that (in front of a sentence)/to (with infinitive)
-en | a/an
-den | it (pers. pronoun)/that (dem. pronoun)
-til | to/at/for/until/against/by/of/into, more
-er | present tense of "to be"
-som | who, as
-på | on/upon/in/on/at/to/after/of/with/for, on
-de | they
-med | with/by/in, along
-han | he
-af | of/by/from/off/for/in/with/on, off
-for | at/for/to/from/by/of/ago, in front/before, because
-ikke | not
-der | who/which, there/those
-var | past tense of "to be"
-mig | me/myself
-sig | oneself/himself/herself/itself/themselves
-men | but
-et | a/an/one, one (number), someone/somebody/one
-har | present tense of "to have"
-om | round/about/for/in/a, about/around/down, if
-vi | we
-min | my
-havde | past tense of "to have"
-ham | him
-hun | she
-nu | now
-over | over/above/across/by/beyond/past/on/about, over/past
-da | then, when/as/since
-fra | from/off/since, off, since
-du | you
-ud | out
-sin | his/her/its/one's
-dem | them
-os | us/ourselves
-op | up
-man | you/one
-hans | his
-hvor | where
-eller | or
-hvad | what
-skal | must/shall etc.
-selv | myself/yourself/herself/ourselves etc., even
-her | here
-alle | all/everyone/everybody etc.
-vil | will (verb)
-blev | past tense of "to stay/to remain/to get/to become"
-kunne | could
-ind | in
-når | when
-være | present tense of "to be"
-dog | however/yet/after all
-noget | something
-ville | would
-jo | you know/you see (adv), yes
-deres | their/theirs
-efter | after/behind/according to/for/by/from, later/afterwards
-ned | down
-skulle | should
-denne | this
-end | than
-dette | this
-mit | my/mine
-også | also
-under | under/beneath/below/during, below/underneath
-have | have
-dig | you
-anden | other
-hende | her
-mine | my
-alt | everything
-meget | much/very, plenty of
-sit | his, her, its, one's
-sine | his, her, its, one's
-vor | our
-mod | against
-disse | these
-hvis | if
-din | your/yours
-nogle | some
-hos | by/at
-blive | be/become
-mange | many
-ad | by/through
-bliver | present tense of "to be/to become"
-hendes | her/hers
-været | be
-thi | for (conj)
-jer | you
-sådan | such, like this/like that
-""")
+from sphinx.search import SearchLanguage
+from sphinx.search._stopwords.da import DANISH_STOPWORDS
class SearchDanish(SearchLanguage):
lang = 'da'
language_name = 'Danish'
js_stemmer_rawcode = 'danish-stemmer.js'
- stopwords = danish_stopwords
+ stopwords = DANISH_STOPWORDS
- def init(self, options: dict[str, str]) -> None:
+ def __init__(self, options: dict[str, str]) -> None:
+ super().__init__(options)
self.stemmer = snowballstemmer.stemmer('danish')
def stem(self, word: str) -> str:
diff --git a/sphinx/search/de.py b/sphinx/search/de.py
index 37aa9ec8890..278d78fb487 100644
--- a/sphinx/search/de.py
+++ b/sphinx/search/de.py
@@ -1,300 +1,21 @@
-"""German search language: includes the JS German stemmer."""
+"""German search language."""
from __future__ import annotations
import snowballstemmer
-from sphinx.search import SearchLanguage, parse_stop_word
-
-german_stopwords = parse_stop_word("""
-|source: https://snowball.tartarus.org/algorithms/german/stop.txt
-aber | but
-
-alle | all
-allem
-allen
-aller
-alles
-
-als | than, as
-also | so
-am | an + dem
-an | at
-
-ander | other
-andere
-anderem
-anderen
-anderer
-anderes
-anderm
-andern
-anderr
-anders
-
-auch | also
-auf | on
-aus | out of
-bei | by
-bin | am
-bis | until
-bist | art
-da | there
-damit | with it
-dann | then
-
-der | the
-den
-des
-dem
-die
-das
-
-daß | that
-
-derselbe | the same
-derselben
-denselben
-desselben
-demselben
-dieselbe
-dieselben
-dasselbe
-
-dazu | to that
-
-dein | thy
-deine
-deinem
-deinen
-deiner
-deines
-
-denn | because
-
-derer | of those
-dessen | of him
-
-dich | thee
-dir | to thee
-du | thou
-
-dies | this
-diese
-diesem
-diesen
-dieser
-dieses
-
-
-doch | (several meanings)
-dort | (over) there
-
-
-durch | through
-
-ein | a
-eine
-einem
-einen
-einer
-eines
-
-einig | some
-einige
-einigem
-einigen
-einiger
-einiges
-
-einmal | once
-
-er | he
-ihn | him
-ihm | to him
-
-es | it
-etwas | something
-
-euer | your
-eure
-eurem
-euren
-eurer
-eures
-
-für | for
-gegen | towards
-gewesen | p.p. of sein
-hab | have
-habe | have
-haben | have
-hat | has
-hatte | had
-hatten | had
-hier | here
-hin | there
-hinter | behind
-
-ich | I
-mich | me
-mir | to me
-
-
-ihr | you, to her
-ihre
-ihrem
-ihren
-ihrer
-ihres
-euch | to you
-
-im | in + dem
-in | in
-indem | while
-ins | in + das
-ist | is
-
-jede | each, every
-jedem
-jeden
-jeder
-jedes
-
-jene | that
-jenem
-jenen
-jener
-jenes
-
-jetzt | now
-kann | can
-
-kein | no
-keine
-keinem
-keinen
-keiner
-keines
-
-können | can
-könnte | could
-machen | do
-man | one
-
-manche | some, many a
-manchem
-manchen
-mancher
-manches
-
-mein | my
-meine
-meinem
-meinen
-meiner
-meines
-
-mit | with
-muss | must
-musste | had to
-nach | to(wards)
-nicht | not
-nichts | nothing
-noch | still, yet
-nun | now
-nur | only
-ob | whether
-oder | or
-ohne | without
-sehr | very
-
-sein | his
-seine
-seinem
-seinen
-seiner
-seines
-
-selbst | self
-sich | herself
-
-sie | they, she
-ihnen | to them
-
-sind | are
-so | so
-
-solche | such
-solchem
-solchen
-solcher
-solches
-
-soll | shall
-sollte | should
-sondern | but
-sonst | else
-über | over
-um | about, around
-und | and
-
-uns | us
-unse
-unsem
-unsen
-unser
-unses
-
-unter | under
-viel | much
-vom | von + dem
-von | from
-vor | before
-während | while
-war | was
-waren | were
-warst | wast
-was | what
-weg | away, off
-weil | because
-weiter | further
-
-welche | which
-welchem
-welchen
-welcher
-welches
-
-wenn | when
-werde | will
-werden | will
-wie | how
-wieder | again
-will | want
-wir | we
-wird | will
-wirst | willst
-wo | where
-wollen | want
-wollte | wanted
-würde | would
-würden | would
-zu | to
-zum | zu + dem
-zur | zu + der
-zwar | indeed
-zwischen | between
-""")
+from sphinx.search import SearchLanguage
+from sphinx.search._stopwords.de import GERMAN_STOPWORDS
class SearchGerman(SearchLanguage):
lang = 'de'
language_name = 'German'
js_stemmer_rawcode = 'german-stemmer.js'
- stopwords = german_stopwords
+ stopwords = GERMAN_STOPWORDS
- def init(self, options: dict[str, str]) -> None:
+ def __init__(self, options: dict[str, str]) -> None:
+ super().__init__(options)
self.stemmer = snowballstemmer.stemmer('german')
def stem(self, word: str) -> str:
diff --git a/sphinx/search/en.py b/sphinx/search/en.py
index 5173dc03fc0..273a25a0272 100644
--- a/sphinx/search/en.py
+++ b/sphinx/search/en.py
@@ -1,218 +1,22 @@
-"""English search language: includes the JS porter stemmer."""
+"""English search language."""
from __future__ import annotations
import snowballstemmer
from sphinx.search import SearchLanguage
-
-english_stopwords = {
- 'a', 'and', 'are', 'as', 'at',
- 'be', 'but', 'by',
- 'for',
- 'if', 'in', 'into', 'is', 'it',
- 'near', 'no', 'not',
- 'of', 'on', 'or',
- 'such',
- 'that', 'the', 'their', 'then', 'there', 'these', 'they', 'this', 'to',
- 'was', 'will', 'with',
-} # fmt: skip
-
-js_porter_stemmer = """
-/**
- * Porter Stemmer
- */
-var Stemmer = function() {
-
- var step2list = {
- ational: 'ate',
- tional: 'tion',
- enci: 'ence',
- anci: 'ance',
- izer: 'ize',
- bli: 'ble',
- alli: 'al',
- entli: 'ent',
- eli: 'e',
- ousli: 'ous',
- ization: 'ize',
- ation: 'ate',
- ator: 'ate',
- alism: 'al',
- iveness: 'ive',
- fulness: 'ful',
- ousness: 'ous',
- aliti: 'al',
- iviti: 'ive',
- biliti: 'ble',
- logi: 'log'
- };
-
- var step3list = {
- icate: 'ic',
- ative: '',
- alize: 'al',
- iciti: 'ic',
- ical: 'ic',
- ful: '',
- ness: ''
- };
-
- var c = "[^aeiou]"; // consonant
- var v = "[aeiouy]"; // vowel
- var C = c + "[^aeiouy]*"; // consonant sequence
- var V = v + "[aeiou]*"; // vowel sequence
-
- var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0
- var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1
- var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1
- var s_v = "^(" + C + ")?" + v; // vowel in stem
-
- this.stemWord = function (w) {
- var stem;
- var suffix;
- var firstch;
- var origword = w;
-
- if (w.length < 3)
- return w;
-
- var re;
- var re2;
- var re3;
- var re4;
-
- firstch = w.substr(0,1);
- if (firstch == "y")
- w = firstch.toUpperCase() + w.substr(1);
-
- // Step 1a
- re = /^(.+?)(ss|i)es$/;
- re2 = /^(.+?)([^s])s$/;
-
- if (re.test(w))
- w = w.replace(re,"$1$2");
- else if (re2.test(w))
- w = w.replace(re2,"$1$2");
-
- // Step 1b
- re = /^(.+?)eed$/;
- re2 = /^(.+?)(ed|ing)$/;
- if (re.test(w)) {
- var fp = re.exec(w);
- re = new RegExp(mgr0);
- if (re.test(fp[1])) {
- re = /.$/;
- w = w.replace(re,"");
- }
- }
- else if (re2.test(w)) {
- var fp = re2.exec(w);
- stem = fp[1];
- re2 = new RegExp(s_v);
- if (re2.test(stem)) {
- w = stem;
- re2 = /(at|bl|iz)$/;
- re3 = new RegExp("([^aeiouylsz])\\\\1$");
- re4 = new RegExp("^" + C + v + "[^aeiouwxy]$");
- if (re2.test(w))
- w = w + "e";
- else if (re3.test(w)) {
- re = /.$/;
- w = w.replace(re,"");
- }
- else if (re4.test(w))
- w = w + "e";
- }
- }
-
- // Step 1c
- re = /^(.+?)y$/;
- if (re.test(w)) {
- var fp = re.exec(w);
- stem = fp[1];
- re = new RegExp(s_v);
- if (re.test(stem))
- w = stem + "i";
- }
-
- // Step 2
- re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|\
-ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/;
- if (re.test(w)) {
- var fp = re.exec(w);
- stem = fp[1];
- suffix = fp[2];
- re = new RegExp(mgr0);
- if (re.test(stem))
- w = stem + step2list[suffix];
- }
-
- // Step 3
- re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/;
- if (re.test(w)) {
- var fp = re.exec(w);
- stem = fp[1];
- suffix = fp[2];
- re = new RegExp(mgr0);
- if (re.test(stem))
- w = stem + step3list[suffix];
- }
-
- // Step 4
- re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|\
-iti|ous|ive|ize)$/;
- re2 = /^(.+?)(s|t)(ion)$/;
- if (re.test(w)) {
- var fp = re.exec(w);
- stem = fp[1];
- re = new RegExp(mgr1);
- if (re.test(stem))
- w = stem;
- }
- else if (re2.test(w)) {
- var fp = re2.exec(w);
- stem = fp[1] + fp[2];
- re2 = new RegExp(mgr1);
- if (re2.test(stem))
- w = stem;
- }
-
- // Step 5
- re = /^(.+?)e$/;
- if (re.test(w)) {
- var fp = re.exec(w);
- stem = fp[1];
- re = new RegExp(mgr1);
- re2 = new RegExp(meq1);
- re3 = new RegExp("^" + C + v + "[^aeiouwxy]$");
- if (re.test(stem) || (re2.test(stem) && !(re3.test(stem))))
- w = stem;
- }
- re = /ll$/;
- re2 = new RegExp(mgr1);
- if (re.test(w) && re2.test(w)) {
- re = /.$/;
- w = w.replace(re,"");
- }
-
- // and turn initial Y back to y
- if (firstch == "y")
- w = firstch.toLowerCase() + w.substr(1);
- return w;
- }
-}
-"""
+from sphinx.search._stopwords.en import ENGLISH_STOPWORDS
class SearchEnglish(SearchLanguage):
lang = 'en'
language_name = 'English'
- js_stemmer_code = js_porter_stemmer
- stopwords = english_stopwords
+ js_stemmer_rawcode = 'english-stemmer.js'
+ stopwords = ENGLISH_STOPWORDS
- def init(self, options: dict[str, str]) -> None:
- self.stemmer = snowballstemmer.stemmer('porter')
+ def __init__(self, options: dict[str, str]) -> None:
+ super().__init__(options)
+ self.stemmer = snowballstemmer.stemmer('english')
def stem(self, word: str) -> str:
return self.stemmer.stemWord(word.lower())
diff --git a/sphinx/search/es.py b/sphinx/search/es.py
index 5739c88172a..c1b08ab1bad 100644
--- a/sphinx/search/es.py
+++ b/sphinx/search/es.py
@@ -1,360 +1,21 @@
-"""Spanish search language: includes the JS Spanish stemmer."""
+"""Spanish search language."""
from __future__ import annotations
import snowballstemmer
-from sphinx.search import SearchLanguage, parse_stop_word
-
-spanish_stopwords = parse_stop_word("""
-|source: https://snowball.tartarus.org/algorithms/spanish/stop.txt
-de | from, of
-la | the, her
-que | who, that
-el | the
-en | in
-y | and
-a | to
-los | the, them
-del | de + el
-se | himself, from him etc
-las | the, them
-por | for, by, etc
-un | a
-para | for
-con | with
-no | no
-una | a
-su | his, her
-al | a + el
- | es from SER
-lo | him
-como | how
-más | more
-pero | pero
-sus | su plural
-le | to him, her
-ya | already
-o | or
- | fue from SER
-este | this
- | ha from HABER
-sí | himself etc
-porque | because
-esta | this
- | son from SER
-entre | between
- | está from ESTAR
-cuando | when
-muy | very
-sin | without
-sobre | on
- | ser from SER
- | tiene from TENER
-también | also
-me | me
-hasta | until
-hay | there is/are
-donde | where
- | han from HABER
-quien | whom, that
- | están from ESTAR
- | estado from ESTAR
-desde | from
-todo | all
-nos | us
-durante | during
- | estados from ESTAR
-todos | all
-uno | a
-les | to them
-ni | nor
-contra | against
-otros | other
- | fueron from SER
-ese | that
-eso | that
- | había from HABER
-ante | before
-ellos | they
-e | and (variant of y)
-esto | this
-mí | me
-antes | before
-algunos | some
-qué | what?
-unos | a
-yo | I
-otro | other
-otras | other
-otra | other
-él | he
-tanto | so much, many
-esa | that
-estos | these
-mucho | much, many
-quienes | who
-nada | nothing
-muchos | many
-cual | who
- | sea from SER
-poco | few
-ella | she
-estar | to be
- | haber from HABER
-estas | these
- | estaba from ESTAR
- | estamos from ESTAR
-algunas | some
-algo | something
-nosotros | we
-
- | other forms
-
-mi | me
-mis | mi plural
-tú | thou
-te | thee
-ti | thee
-tu | thy
-tus | tu plural
-ellas | they
-nosotras | we
-vosotros | you
-vosotras | you
-os | you
-mío | mine
-mía |
-míos |
-mías |
-tuyo | thine
-tuya |
-tuyos |
-tuyas |
-suyo | his, hers, theirs
-suya |
-suyos |
-suyas |
-nuestro | ours
-nuestra |
-nuestros |
-nuestras |
-vuestro | yours
-vuestra |
-vuestros |
-vuestras |
-esos | those
-esas | those
-
- | forms of estar, to be (not including the infinitive):
-estoy
-estás
-está
-estamos
-estáis
-están
-esté
-estés
-estemos
-estéis
-estén
-estaré
-estarás
-estará
-estaremos
-estaréis
-estarán
-estaría
-estarías
-estaríamos
-estaríais
-estarían
-estaba
-estabas
-estábamos
-estabais
-estaban
-estuve
-estuviste
-estuvo
-estuvimos
-estuvisteis
-estuvieron
-estuviera
-estuvieras
-estuviéramos
-estuvierais
-estuvieran
-estuviese
-estuvieses
-estuviésemos
-estuvieseis
-estuviesen
-estando
-estado
-estada
-estados
-estadas
-estad
-
- | forms of haber, to have (not including the infinitive):
-he
-has
-ha
-hemos
-habéis
-han
-haya
-hayas
-hayamos
-hayáis
-hayan
-habré
-habrás
-habrá
-habremos
-habréis
-habrán
-habría
-habrías
-habríamos
-habríais
-habrían
-había
-habías
-habíamos
-habíais
-habían
-hube
-hubiste
-hubo
-hubimos
-hubisteis
-hubieron
-hubiera
-hubieras
-hubiéramos
-hubierais
-hubieran
-hubiese
-hubieses
-hubiésemos
-hubieseis
-hubiesen
-habiendo
-habido
-habida
-habidos
-habidas
-
- | forms of ser, to be (not including the infinitive):
-soy
-eres
-es
-somos
-sois
-son
-sea
-seas
-seamos
-seáis
-sean
-seré
-serás
-será
-seremos
-seréis
-serán
-sería
-serías
-seríamos
-seríais
-serían
-era
-eras
-éramos
-erais
-eran
-fui
-fuiste
-fue
-fuimos
-fuisteis
-fueron
-fuera
-fueras
-fuéramos
-fuerais
-fueran
-fuese
-fueses
-fuésemos
-fueseis
-fuesen
-siendo
-sido
- | sed also means 'thirst'
-
- | forms of tener, to have (not including the infinitive):
-tengo
-tienes
-tiene
-tenemos
-tenéis
-tienen
-tenga
-tengas
-tengamos
-tengáis
-tengan
-tendré
-tendrás
-tendrá
-tendremos
-tendréis
-tendrán
-tendría
-tendrías
-tendríamos
-tendríais
-tendrían
-tenía
-tenías
-teníamos
-teníais
-tenían
-tuve
-tuviste
-tuvo
-tuvimos
-tuvisteis
-tuvieron
-tuviera
-tuvieras
-tuviéramos
-tuvierais
-tuvieran
-tuviese
-tuvieses
-tuviésemos
-tuvieseis
-tuviesen
-teniendo
-tenido
-tenida
-tenidos
-tenidas
-tened
-""")
+from sphinx.search import SearchLanguage
+from sphinx.search._stopwords.es import SPANISH_STOPWORDS
class SearchSpanish(SearchLanguage):
lang = 'es'
language_name = 'Spanish'
js_stemmer_rawcode = 'spanish-stemmer.js'
- stopwords = spanish_stopwords
+ stopwords = SPANISH_STOPWORDS
- def init(self, options: dict[str, str]) -> None:
+ def __init__(self, options: dict[str, str]) -> None:
+ super().__init__(options)
self.stemmer = snowballstemmer.stemmer('spanish')
def stem(self, word: str) -> str:
diff --git a/sphinx/search/fi.py b/sphinx/search/fi.py
index 24ef7502300..01c7e0ba126 100644
--- a/sphinx/search/fi.py
+++ b/sphinx/search/fi.py
@@ -1,110 +1,21 @@
-"""Finnish search language: includes the JS Finnish stemmer."""
+"""Finnish search language."""
from __future__ import annotations
import snowballstemmer
-from sphinx.search import SearchLanguage, parse_stop_word
-
-finnish_stopwords = parse_stop_word("""
-| source: https://snowball.tartarus.org/algorithms/finnish/stop.txt
-| forms of BE
-
-olla
-olen
-olet
-on
-olemme
-olette
-ovat
-ole | negative form
-
-oli
-olisi
-olisit
-olisin
-olisimme
-olisitte
-olisivat
-olit
-olin
-olimme
-olitte
-olivat
-ollut
-olleet
-
-en | negation
-et
-ei
-emme
-ette
-eivät
-
-|Nom Gen Acc Part Iness Elat Illat Adess Ablat Allat Ess Trans
-minä minun minut minua minussa minusta minuun minulla minulta minulle | I
-sinä sinun sinut sinua sinussa sinusta sinuun sinulla sinulta sinulle | you
-hän hänen hänet häntä hänessä hänestä häneen hänellä häneltä hänelle | he she
-me meidän meidät meitä meissä meistä meihin meillä meiltä meille | we
-te teidän teidät teitä teissä teistä teihin teillä teiltä teille | you
-he heidän heidät heitä heissä heistä heihin heillä heiltä heille | they
-
-tämä tämän tätä tässä tästä tähän tällä tältä tälle tänä täksi | this
-tuo tuon tuota tuossa tuosta tuohon tuolla tuolta tuolle tuona tuoksi | that
-se sen sitä siinä siitä siihen sillä siltä sille sinä siksi | it
-nämä näiden näitä näissä näistä näihin näillä näiltä näille näinä näiksi | these
-nuo noiden noita noissa noista noihin noilla noilta noille noina noiksi | those
-ne niiden niitä niissä niistä niihin niillä niiltä niille niinä niiksi | they
-
-kuka kenen kenet ketä kenessä kenestä keneen kenellä keneltä kenelle kenenä keneksi| who
-ketkä keiden ketkä keitä keissä keistä keihin keillä keiltä keille keinä keiksi | (pl)
-mikä minkä minkä mitä missä mistä mihin millä miltä mille minä miksi | which what
-mitkä | (pl)
-
-joka jonka jota jossa josta johon jolla jolta jolle jona joksi | who which
-jotka joiden joita joissa joista joihin joilla joilta joille joina joiksi | (pl)
-
-| conjunctions
-
-että | that
-ja | and
-jos | if
-koska | because
-kuin | than
-mutta | but
-niin | so
-sekä | and
-sillä | for
-tai | or
-vaan | but
-vai | or
-vaikka | although
-
-
-| prepositions
-
-kanssa | with
-mukaan | according to
-noin | about
-poikki | across
-yli | over, across
-
-| other
-
-kun | when
-niin | so
-nyt | now
-itse | self
-""") # NoQA: E501
+from sphinx.search import SearchLanguage
+from sphinx.search._stopwords.fi import FINNISH_STOPWORDS
class SearchFinnish(SearchLanguage):
lang = 'fi'
language_name = 'Finnish'
js_stemmer_rawcode = 'finnish-stemmer.js'
- stopwords = finnish_stopwords
+ stopwords = FINNISH_STOPWORDS
- def init(self, options: dict[str, str]) -> None:
+ def __init__(self, options: dict[str, str]) -> None:
+ super().__init__(options)
self.stemmer = snowballstemmer.stemmer('finnish')
def stem(self, word: str) -> str:
diff --git a/sphinx/search/fr.py b/sphinx/search/fr.py
index 7662737d6e3..e79976dfea1 100644
--- a/sphinx/search/fr.py
+++ b/sphinx/search/fr.py
@@ -1,196 +1,21 @@
-"""French search language: includes the JS French stemmer."""
+"""French search language."""
from __future__ import annotations
import snowballstemmer
-from sphinx.search import SearchLanguage, parse_stop_word
-
-french_stopwords = parse_stop_word("""
-| source: https://snowball.tartarus.org/algorithms/french/stop.txt
-au | a + le
-aux | a + les
-avec | with
-ce | this
-ces | these
-dans | with
-de | of
-des | de + les
-du | de + le
-elle | she
-en | `of them' etc
-et | and
-eux | them
-il | he
-je | I
-la | the
-le | the
-leur | their
-lui | him
-ma | my (fem)
-mais | but
-me | me
-même | same; as in moi-même (myself) etc
-mes | me (pl)
-moi | me
-mon | my (masc)
-ne | not
-nos | our (pl)
-notre | our
-nous | we
-on | one
-ou | where
-par | by
-pas | not
-pour | for
-qu | que before vowel
-que | that
-qui | who
-sa | his, her (fem)
-se | oneself
-ses | his (pl)
-son | his, her (masc)
-sur | on
-ta | thy (fem)
-te | thee
-tes | thy (pl)
-toi | thee
-ton | thy (masc)
-tu | thou
-un | a
-une | a
-vos | your (pl)
-votre | your
-vous | you
-
- | single letter forms
-
-c | c'
-d | d'
-j | j'
-l | l'
-à | to, at
-m | m'
-n | n'
-s | s'
-t | t'
-y | there
-
- | forms of être (not including the infinitive):
-été
-étée
-étées
-étés
-étant
-suis
-es
-est
-sommes
-êtes
-sont
-serai
-seras
-sera
-serons
-serez
-seront
-serais
-serait
-serions
-seriez
-seraient
-étais
-était
-étions
-étiez
-étaient
-fus
-fut
-fûmes
-fûtes
-furent
-sois
-soit
-soyons
-soyez
-soient
-fusse
-fusses
-fût
-fussions
-fussiez
-fussent
-
- | forms of avoir (not including the infinitive):
-ayant
-eu
-eue
-eues
-eus
-ai
-as
-avons
-avez
-ont
-aurai
-auras
-aura
-aurons
-aurez
-auront
-aurais
-aurait
-aurions
-auriez
-auraient
-avais
-avait
-avions
-aviez
-avaient
-eut
-eûmes
-eûtes
-eurent
-aie
-aies
-ait
-ayons
-ayez
-aient
-eusse
-eusses
-eût
-eussions
-eussiez
-eussent
-
- | Later additions (from Jean-Christophe Deschamps)
-ceci | this
-cela | that (added 11 Apr 2012. Omission reported by Adrien Grand)
-celà | that (incorrect, though common)
-cet | this
-cette | this
-ici | here
-ils | they
-les | the (pl)
-leurs | their (pl)
-quel | which
-quels | which
-quelle | which
-quelles | which
-sans | without
-soi | oneself
-""")
+from sphinx.search import SearchLanguage
+from sphinx.search._stopwords.fr import FRENCH_STOPWORDS
class SearchFrench(SearchLanguage):
lang = 'fr'
language_name = 'French'
js_stemmer_rawcode = 'french-stemmer.js'
- stopwords = french_stopwords
+ stopwords = FRENCH_STOPWORDS
- def init(self, options: dict[str, str]) -> None:
+ def __init__(self, options: dict[str, str]) -> None:
+ super().__init__(options)
self.stemmer = snowballstemmer.stemmer('french')
def stem(self, word: str) -> str:
diff --git a/sphinx/search/hu.py b/sphinx/search/hu.py
index 5c35b16fc65..254ad488d78 100644
--- a/sphinx/search/hu.py
+++ b/sphinx/search/hu.py
@@ -1,223 +1,21 @@
-"""Hungarian search language: includes the JS Hungarian stemmer."""
+"""Hungarian search language."""
from __future__ import annotations
import snowballstemmer
-from sphinx.search import SearchLanguage, parse_stop_word
-
-hungarian_stopwords = parse_stop_word("""
-| source: https://snowball.tartarus.org/algorithms/hungarian/stop.txt
-| prepared by Anna Tordai
-a
-ahogy
-ahol
-aki
-akik
-akkor
-alatt
-által
-általában
-amely
-amelyek
-amelyekben
-amelyeket
-amelyet
-amelynek
-ami
-amit
-amolyan
-amíg
-amikor
-át
-abban
-ahhoz
-annak
-arra
-arról
-az
-azok
-azon
-azt
-azzal
-azért
-aztán
-azután
-azonban
-bár
-be
-belül
-benne
-cikk
-cikkek
-cikkeket
-csak
-de
-e
-eddig
-egész
-egy
-egyes
-egyetlen
-egyéb
-egyik
-egyre
-ekkor
-el
-elég
-ellen
-elő
-először
-előtt
-első
-én
-éppen
-ebben
-ehhez
-emilyen
-ennek
-erre
-ez
-ezt
-ezek
-ezen
-ezzel
-ezért
-és
-fel
-felé
-hanem
-hiszen
-hogy
-hogyan
-igen
-így
-illetve
-ill.
-ill
-ilyen
-ilyenkor
-ison
-ismét
-itt
-jó
-jól
-jobban
-kell
-kellett
-keresztül
-keressünk
-ki
-kívül
-között
-közül
-legalább
-lehet
-lehetett
-legyen
-lenne
-lenni
-lesz
-lett
-maga
-magát
-majd
-majd
-már
-más
-másik
-meg
-még
-mellett
-mert
-mely
-melyek
-mi
-mit
-míg
-miért
-milyen
-mikor
-minden
-mindent
-mindenki
-mindig
-mint
-mintha
-mivel
-most
-nagy
-nagyobb
-nagyon
-ne
-néha
-nekem
-neki
-nem
-néhány
-nélkül
-nincs
-olyan
-ott
-össze
-ő
-ők
-őket
-pedig
-persze
-rá
-s
-saját
-sem
-semmi
-sok
-sokat
-sokkal
-számára
-szemben
-szerint
-szinte
-talán
-tehát
-teljes
-tovább
-továbbá
-több
-úgy
-ugyanis
-új
-újabb
-újra
-után
-utána
-utolsó
-vagy
-vagyis
-valaki
-valami
-valamint
-való
-vagyok
-van
-vannak
-volt
-voltam
-voltak
-voltunk
-vissza
-vele
-viszont
-volna
-""")
+from sphinx.search import SearchLanguage
+from sphinx.search._stopwords.hu import HUNGARIAN_STOPWORDS
class SearchHungarian(SearchLanguage):
lang = 'hu'
language_name = 'Hungarian'
js_stemmer_rawcode = 'hungarian-stemmer.js'
- stopwords = hungarian_stopwords
+ stopwords = HUNGARIAN_STOPWORDS
- def init(self, options: dict[str, str]) -> None:
+ def __init__(self, options: dict[str, str]) -> None:
+ super().__init__(options)
self.stemmer = snowballstemmer.stemmer('hungarian')
def stem(self, word: str) -> str:
diff --git a/sphinx/search/it.py b/sphinx/search/it.py
index 60a5cf57720..d8a583f9d85 100644
--- a/sphinx/search/it.py
+++ b/sphinx/search/it.py
@@ -1,313 +1,21 @@
-"""Italian search language: includes the JS Italian stemmer."""
+"""Italian search language."""
from __future__ import annotations
import snowballstemmer
-from sphinx.search import SearchLanguage, parse_stop_word
-
-italian_stopwords = parse_stop_word("""
-| source: https://snowball.tartarus.org/algorithms/italian/stop.txt
-ad | a (to) before vowel
-al | a + il
-allo | a + lo
-ai | a + i
-agli | a + gli
-all | a + l'
-agl | a + gl'
-alla | a + la
-alle | a + le
-con | with
-col | con + il
-coi | con + i (forms collo, cogli etc are now very rare)
-da | from
-dal | da + il
-dallo | da + lo
-dai | da + i
-dagli | da + gli
-dall | da + l'
-dagl | da + gll'
-dalla | da + la
-dalle | da + le
-di | of
-del | di + il
-dello | di + lo
-dei | di + i
-degli | di + gli
-dell | di + l'
-degl | di + gl'
-della | di + la
-delle | di + le
-in | in
-nel | in + el
-nello | in + lo
-nei | in + i
-negli | in + gli
-nell | in + l'
-negl | in + gl'
-nella | in + la
-nelle | in + le
-su | on
-sul | su + il
-sullo | su + lo
-sui | su + i
-sugli | su + gli
-sull | su + l'
-sugl | su + gl'
-sulla | su + la
-sulle | su + le
-per | through, by
-tra | among
-contro | against
-io | I
-tu | thou
-lui | he
-lei | she
-noi | we
-voi | you
-loro | they
-mio | my
-mia |
-miei |
-mie |
-tuo |
-tua |
-tuoi | thy
-tue |
-suo |
-sua |
-suoi | his, her
-sue |
-nostro | our
-nostra |
-nostri |
-nostre |
-vostro | your
-vostra |
-vostri |
-vostre |
-mi | me
-ti | thee
-ci | us, there
-vi | you, there
-lo | him, the
-la | her, the
-li | them
-le | them, the
-gli | to him, the
-ne | from there etc
-il | the
-un | a
-uno | a
-una | a
-ma | but
-ed | and
-se | if
-perché | why, because
-anche | also
-come | how
-dov | where (as dov')
-dove | where
-che | who, that
-chi | who
-cui | whom
-non | not
-più | more
-quale | who, that
-quanto | how much
-quanti |
-quanta |
-quante |
-quello | that
-quelli |
-quella |
-quelle |
-questo | this
-questi |
-questa |
-queste |
-si | yes
-tutto | all
-tutti | all
-
- | single letter forms:
-
-a | at
-c | as c' for ce or ci
-e | and
-i | the
-l | as l'
-o | or
-
- | forms of avere, to have (not including the infinitive):
-
-ho
-hai
-ha
-abbiamo
-avete
-hanno
-abbia
-abbiate
-abbiano
-avrò
-avrai
-avrà
-avremo
-avrete
-avranno
-avrei
-avresti
-avrebbe
-avremmo
-avreste
-avrebbero
-avevo
-avevi
-aveva
-avevamo
-avevate
-avevano
-ebbi
-avesti
-ebbe
-avemmo
-aveste
-ebbero
-avessi
-avesse
-avessimo
-avessero
-avendo
-avuto
-avuta
-avuti
-avute
-
- | forms of essere, to be (not including the infinitive):
-sono
-sei
-è
-siamo
-siete
-sia
-siate
-siano
-sarò
-sarai
-sarà
-saremo
-sarete
-saranno
-sarei
-saresti
-sarebbe
-saremmo
-sareste
-sarebbero
-ero
-eri
-era
-eravamo
-eravate
-erano
-fui
-fosti
-fu
-fummo
-foste
-furono
-fossi
-fosse
-fossimo
-fossero
-essendo
-
- | forms of fare, to do (not including the infinitive, fa, fat-):
-faccio
-fai
-facciamo
-fanno
-faccia
-facciate
-facciano
-farò
-farai
-farà
-faremo
-farete
-faranno
-farei
-faresti
-farebbe
-faremmo
-fareste
-farebbero
-facevo
-facevi
-faceva
-facevamo
-facevate
-facevano
-feci
-facesti
-fece
-facemmo
-faceste
-fecero
-facessi
-facesse
-facessimo
-facessero
-facendo
-
- | forms of stare, to be (not including the infinitive):
-sto
-stai
-sta
-stiamo
-stanno
-stia
-stiate
-stiano
-starò
-starai
-starà
-staremo
-starete
-staranno
-starei
-staresti
-starebbe
-staremmo
-stareste
-starebbero
-stavo
-stavi
-stava
-stavamo
-stavate
-stavano
-stetti
-stesti
-stette
-stemmo
-steste
-stettero
-stessi
-stesse
-stessimo
-stessero
-""")
+from sphinx.search import SearchLanguage
+from sphinx.search._stopwords.it import ITALIAN_STOPWORDS
class SearchItalian(SearchLanguage):
lang = 'it'
language_name = 'Italian'
js_stemmer_rawcode = 'italian-stemmer.js'
- stopwords = italian_stopwords
+ stopwords = ITALIAN_STOPWORDS
- def init(self, options: dict[str, str]) -> None:
+ def __init__(self, options: dict[str, str]) -> None:
+ super().__init__(options)
self.stemmer = snowballstemmer.stemmer('italian')
def stem(self, word: str) -> str:
diff --git a/sphinx/search/ja.py b/sphinx/search/ja.py
index f855fe4a67d..7045a314459 100644
--- a/sphinx/search/ja.py
+++ b/sphinx/search/ja.py
@@ -523,7 +523,8 @@ class SearchJapanese(SearchLanguage):
lang = 'ja'
language_name = 'Japanese'
- def init(self, options: dict[str, str]) -> None:
+ def __init__(self, options: dict[str, str]) -> None:
+ super().__init__(options)
dotted_path = options.get('type')
if dotted_path is None:
self.splitter = DefaultSplitter(options)
diff --git a/sphinx/search/minified-js/README.rst b/sphinx/search/minified-js/README.rst
new file mode 100644
index 00000000000..e14b36aef3b
--- /dev/null
+++ b/sphinx/search/minified-js/README.rst
@@ -0,0 +1,7 @@
+Regenerate minified files with::
+
+ npm install -g uglify-js
+ for f in $(ls sphinx/search/non-minified-js/); \
+ do echo $f && \
+ npx uglifyjs sphinx/search/non-minified-js/$f --compress --mangle --output sphinx/search/minified-js/$f; \
+ done
diff --git a/sphinx/search/minified-js/arabic-stemmer.js b/sphinx/search/minified-js/arabic-stemmer.js
new file mode 100644
index 00000000000..c8e178a75b4
--- /dev/null
+++ b/sphinx/search/minified-js/arabic-stemmer.js
@@ -0,0 +1 @@
+var ArabicStemmer=function(){var o=new BaseStemmer,l=[["ـ",-1,1],["ً",-1,1],["ٌ",-1,1],["ٍ",-1,1],["َ",-1,1],["ُ",-1,1],["ِ",-1,1],["ّ",-1,1],["ْ",-1,1],["٠",-1,2],["١",-1,3],["٢",-1,4],["٣",-1,5],["٤",-1,6],["٥",-1,7],["٦",-1,8],["٧",-1,9],["٨",-1,10],["٩",-1,11],["ﺀ",-1,12],["ﺁ",-1,16],["ﺂ",-1,16],["ﺃ",-1,13],["ﺄ",-1,13],["ﺅ",-1,17],["ﺆ",-1,17],["ﺇ",-1,14],["ﺈ",-1,14],["ﺉ",-1,15],["ﺊ",-1,15],["ﺋ",-1,15],["ﺌ",-1,15],["ﺍ",-1,18],["ﺎ",-1,18],["ﺏ",-1,19],["ﺐ",-1,19],["ﺑ",-1,19],["ﺒ",-1,19],["ﺓ",-1,20],["ﺔ",-1,20],["ﺕ",-1,21],["ﺖ",-1,21],["ﺗ",-1,21],["ﺘ",-1,21],["ﺙ",-1,22],["ﺚ",-1,22],["ﺛ",-1,22],["ﺜ",-1,22],["ﺝ",-1,23],["ﺞ",-1,23],["ﺟ",-1,23],["ﺠ",-1,23],["ﺡ",-1,24],["ﺢ",-1,24],["ﺣ",-1,24],["ﺤ",-1,24],["ﺥ",-1,25],["ﺦ",-1,25],["ﺧ",-1,25],["ﺨ",-1,25],["ﺩ",-1,26],["ﺪ",-1,26],["ﺫ",-1,27],["ﺬ",-1,27],["ﺭ",-1,28],["ﺮ",-1,28],["ﺯ",-1,29],["ﺰ",-1,29],["ﺱ",-1,30],["ﺲ",-1,30],["ﺳ",-1,30],["ﺴ",-1,30],["ﺵ",-1,31],["ﺶ",-1,31],["ﺷ",-1,31],["ﺸ",-1,31],["ﺹ",-1,32],["ﺺ",-1,32],["ﺻ",-1,32],["ﺼ",-1,32],["ﺽ",-1,33],["ﺾ",-1,33],["ﺿ",-1,33],["ﻀ",-1,33],["ﻁ",-1,34],["ﻂ",-1,34],["ﻃ",-1,34],["ﻄ",-1,34],["ﻅ",-1,35],["ﻆ",-1,35],["ﻇ",-1,35],["ﻈ",-1,35],["ﻉ",-1,36],["ﻊ",-1,36],["ﻋ",-1,36],["ﻌ",-1,36],["ﻍ",-1,37],["ﻎ",-1,37],["ﻏ",-1,37],["ﻐ",-1,37],["ﻑ",-1,38],["ﻒ",-1,38],["ﻓ",-1,38],["ﻔ",-1,38],["ﻕ",-1,39],["ﻖ",-1,39],["ﻗ",-1,39],["ﻘ",-1,39],["ﻙ",-1,40],["ﻚ",-1,40],["ﻛ",-1,40],["ﻜ",-1,40],["ﻝ",-1,41],["ﻞ",-1,41],["ﻟ",-1,41],["ﻠ",-1,41],["ﻡ",-1,42],["ﻢ",-1,42],["ﻣ",-1,42],["ﻤ",-1,42],["ﻥ",-1,43],["ﻦ",-1,43],["ﻧ",-1,43],["ﻨ",-1,43],["ﻩ",-1,44],["ﻪ",-1,44],["ﻫ",-1,44],["ﻬ",-1,44],["ﻭ",-1,45],["ﻮ",-1,45],["ﻯ",-1,46],["ﻰ",-1,46],["ﻱ",-1,47],["ﻲ",-1,47],["ﻳ",-1,47],["ﻴ",-1,47],["ﻵ",-1,51],["ﻶ",-1,51],["ﻷ",-1,49],["ﻸ",-1,49],["ﻹ",-1,50],["ﻺ",-1,50],["ﻻ",-1,48],["ﻼ",-1,48]],b=[["آ",-1,1],["أ",-1,1],["ؤ",-1,1],["إ",-1,1],["ئ",-1,1]],m=[["آ",-1,1],["أ",-1,1],["ؤ",-1,2],["إ",-1,1],["ئ",-1,3]],_=[["ال",-1,2],["بال",-1,1],["كال",-1,1],["لل",-1,2]],k=[["أآ",-1,2],["أأ",-1,1],["أؤ",-1,1],["أإ",-1,4],["أا",-1,3]],g=[["ف",-1,1],["و",-1,1]],d=[["ال",-1,2],["بال",-1,1],["كال",-1,1],["لل",-1,2]],h=[["ب",-1,1],["با",0,-1],["بب",0,2],["كك",-1,3]],v=[["سأ",-1,4],["ست",-1,2],["سن",-1,3],["سي",-1,1]],w=[["تست",-1,1],["نست",-1,1],["يست",-1,1]],C=[["كما",-1,3],["هما",-1,3],["نا",-1,2],["ها",-1,2],["ك",-1,1],["كم",-1,2],["هم",-1,2],["هن",-1,2],["ه",-1,1],["ي",-1,1]],S=[["ن",-1,1]],r=[["ا",-1,1],["و",-1,1],["ي",-1,1]],e=[["ات",-1,1]],i=[["ت",-1,1]],q=[["ة",-1,1]],A=[["ي",-1,1]],B=[["كما",-1,3],["هما",-1,3],["نا",-1,2],["ها",-1,2],["ك",-1,1],["كم",-1,2],["هم",-1,2],["كن",-1,2],["هن",-1,2],["ه",-1,1],["كمو",-1,3],["ني",-1,2]],c=[["ا",-1,1],["تا",0,2],["تما",0,4],["نا",0,2],["ت",-1,1],["ن",-1,1],["ان",5,3],["تن",5,2],["ون",5,3],["ين",5,3],["ي",-1,1]],W=[["وا",-1,1],["تم",-1,1]],j=[["و",-1,1],["تمو",0,2]],p=[["ى",-1,1]],x=!1,y=!1,z=!1;function D(){return o.ket=o.cursor,0!=o.find_among_b(r)&&(o.bra=o.cursor,!(o.current.length<=4||!o.slice_del()))}function E(){return o.ket=o.cursor,0!=o.find_among_b(e)&&(o.bra=o.cursor,!(o.current.length<5||!o.slice_del()))}function F(){return o.ket=o.cursor,0!=o.find_among_b(i)&&(o.bra=o.cursor,!(o.current.length<4||!o.slice_del()))}function G(){var r;if(o.ket=o.cursor,0!=(r=o.find_among_b(c))){switch(o.bra=o.cursor,r){case 1:if(o.current.length<4)return;if(o.slice_del())break;return;case 2:if(o.current.length<5)return;if(o.slice_del())break;return;case 3:if(o.current.length<=5)return;if(o.slice_del())break;return;case 4:if(o.current.length<6)return;if(o.slice_del())break;return}return 1}}this.stem=function(){x=!(y=z=!0);var r=o.cursor,r=((()=>{var r;if(o.bra=o.cursor,0!=(r=o.find_among(_)))switch(o.ket=o.cursor,r){case 1:if(o.current.length<=4)return;x=!(y=!(z=!0));break;case 2:if(o.current.length<=3)return;x=!(y=!(z=!0))}})(),o.cursor=r,(()=>{for(var r,e=o.cursor;;){var i=o.cursor;r:{var c=o.cursor;if(o.bra=o.cursor,0!=(r=o.find_among(l)))switch(o.ket=o.cursor,r){case 1:if(o.slice_del())break;return;case 2:if(o.slice_from("0"))break;return;case 3:if(o.slice_from("1"))break;return;case 4:if(o.slice_from("2"))break;return;case 5:if(o.slice_from("3"))break;return;case 6:if(o.slice_from("4"))break;return;case 7:if(o.slice_from("5"))break;return;case 8:if(o.slice_from("6"))break;return;case 9:if(o.slice_from("7"))break;return;case 10:if(o.slice_from("8"))break;return;case 11:if(o.slice_from("9"))break;return;case 12:if(o.slice_from("ء"))break;return;case 13:if(o.slice_from("أ"))break;return;case 14:if(o.slice_from("إ"))break;return;case 15:if(o.slice_from("ئ"))break;return;case 16:if(o.slice_from("آ"))break;return;case 17:if(o.slice_from("ؤ"))break;return;case 18:if(o.slice_from("ا"))break;return;case 19:if(o.slice_from("ب"))break;return;case 20:if(o.slice_from("ة"))break;return;case 21:if(o.slice_from("ت"))break;return;case 22:if(o.slice_from("ث"))break;return;case 23:if(o.slice_from("ج"))break;return;case 24:if(o.slice_from("ح"))break;return;case 25:if(o.slice_from("خ"))break;return;case 26:if(o.slice_from("د"))break;return;case 27:if(o.slice_from("ذ"))break;return;case 28:if(o.slice_from("ر"))break;return;case 29:if(o.slice_from("ز"))break;return;case 30:if(o.slice_from("س"))break;return;case 31:if(o.slice_from("ش"))break;return;case 32:if(o.slice_from("ص"))break;return;case 33:if(o.slice_from("ض"))break;return;case 34:if(o.slice_from("ط"))break;return;case 35:if(o.slice_from("ظ"))break;return;case 36:if(o.slice_from("ع"))break;return;case 37:if(o.slice_from("غ"))break;return;case 38:if(o.slice_from("ف"))break;return;case 39:if(o.slice_from("ق"))break;return;case 40:if(o.slice_from("ك"))break;return;case 41:if(o.slice_from("ل"))break;return;case 42:if(o.slice_from("م"))break;return;case 43:if(o.slice_from("ن"))break;return;case 44:if(o.slice_from("ه"))break;return;case 45:if(o.slice_from("و"))break;return;case 46:if(o.slice_from("ى"))break;return;case 47:if(o.slice_from("ي"))break;return;case 48:if(o.slice_from("لا"))break;return;case 49:if(o.slice_from("لأ"))break;return;case 50:if(o.slice_from("لإ"))break;return;case 51:if(o.slice_from("لآ"))break;return}else{if(o.cursor=c,o.cursor>=o.limit)break r;o.cursor++}continue}o.cursor=i;break}o.cursor=e})(),o.limit_backward=o.cursor,o.cursor=o.limit,o.limit-o.cursor);r:e:{var e=o.limit-o.cursor;i:if(y){c:{var i=o.limit-o.cursor;s:{for(var c=1;;){var s=o.limit-o.cursor;if(!(()=>{var r;if(o.ket=o.cursor,0!=(r=o.find_among_b(B))){switch(o.bra=o.cursor,r){case 1:if(o.current.length<4)return;if(o.slice_del())break;return;case 2:if(o.current.length<5)return;if(o.slice_del())break;return;case 3:if(o.current.length<6)return;if(o.slice_del())break;return}return 1}})()){o.cursor=o.limit-s;break}c--}if(!(0{var r;if(o.ket=o.cursor,0!=(r=o.find_among_b(j))){switch(o.bra=o.cursor,r){case 1:if(o.current.length<4)return;if(o.slice_del())break;return;case 2:if(o.current.length<6)return;if(o.slice_del())break;return}return 1}})())){if(o.cursor=o.limit-t,o.cursor<=o.limit_backward)break s;o.cursor--}break c}}if(o.cursor=o.limit-i,(o.ket=o.cursor,0==o.find_among_b(W)||(o.bra=o.cursor,o.current.length<5)||!o.slice_del())&&(o.cursor=o.limit-i,!G()))break i}break e}if(o.cursor=o.limit-e,z){var u=o.limit-o.cursor;i:c:{var a=o.limit-o.cursor;if(o.ket=o.cursor,0==o.find_among_b(q)||(o.bra=o.cursor,o.current.length<4)||!o.slice_del()){o.cursor=o.limit-a;s:if(!x&&(()=>{var r;if(o.ket=o.cursor,0!=(r=o.find_among_b(C))){switch(o.bra=o.cursor,r){case 1:if(o.current.length<4)return;if(o.slice_del())break;return;case 2:if(o.current.length<5)return;if(o.slice_del())break;return;case 3:if(o.current.length<6)return;if(o.slice_del())break;return}return 1}})()){var n=o.limit-o.cursor;if(!D()&&(o.cursor=o.limit-n,!E())&&(o.cursor=o.limit-n,!F())){if(o.cursor=o.limit-n,o.cursor<=o.limit_backward)break s;o.cursor--}break c}if(o.cursor=o.limit-a,o.ket=o.cursor,!(0==o.find_among_b(S)||(o.bra=o.cursor,o.current.length<=5))&&o.slice_del()){n=o.limit-o.cursor;if(D()||(o.cursor=o.limit-n,E())||(o.cursor=o.limit-n,F()))break c}if(o.cursor=o.limit-a,(x||!D())&&(o.cursor=o.limit-a,!E())){o.cursor=o.limit-u;break i}}}if(o.ket=o.cursor,!(0==o.find_among_b(A)||(o.bra=o.cursor,o.current.length<3))&&o.slice_del())break e}if(o.cursor=o.limit-e,o.ket=o.cursor,0==o.find_among_b(p)||(o.bra=o.cursor,!o.slice_from("ي")))break r}o.cursor=o.limit-r,o.cursor=o.limit_backward;r=o.cursor;r:{var f=o.cursor,f=((()=>{var r;if(o.bra=o.cursor,0!=(r=o.find_among(k))){switch(o.ket=o.cursor,r){case 1:if(o.current.length<=3)return;if(o.slice_from("أ"))break;return;case 2:if(o.current.length<=3)return;if(o.slice_from("آ"))break;return;case 3:if(o.current.length<=3)return;if(o.slice_from("ا"))break;return;case 4:if(o.current.length<=3)return;if(o.slice_from("إ"))break;return}return 1}})()||(o.cursor=f),o.cursor),f=((()=>{var r;return o.bra=o.cursor,0==o.find_among(g)||(o.ket=o.cursor,o.current.length<=3)||(r=o.cursor,o.eq_s("ا"))?void 0:(o.cursor=r,!!o.slice_del())})()||(o.cursor=f),o.cursor);if(!(()=>{var r;if(o.bra=o.cursor,0!=(r=o.find_among(d))){switch(o.ket=o.cursor,r){case 1:if(o.current.length<=5)return;if(o.slice_del())break;return;case 2:if(o.current.length<=4)return;if(o.slice_del())break;return}return 1}})()&&(o.cursor=f,!z||!(()=>{var r;if(o.bra=o.cursor,0!=(r=o.find_among(h))){switch(o.ket=o.cursor,r){case 1:if(o.current.length<=3)return;if(o.slice_del())break;return;case 2:if(o.current.length<=3)return;if(o.slice_from("ب"))break;return;case 3:if(o.current.length<=3)return;if(o.slice_from("ك"))break;return}return 1}})())){if(o.cursor=f,!y)break r;f=o.cursor;if((()=>{var r;if(o.bra=o.cursor,0!=(r=o.find_among(v))){switch(o.ket=o.cursor,r){case 1:if(o.current.length<=4)return;if(o.slice_from("ي"))break;return;case 2:if(o.current.length<=4)return;if(o.slice_from("ت"))break;return;case 3:if(o.current.length<=4)return;if(o.slice_from("ن"))break;return;case 4:if(o.current.length<=4)return;if(o.slice_from("أ"))break;return}return 1}})()||(o.cursor=f),o.bra=o.cursor,0==o.find_among(w)||(o.ket=o.cursor,o.current.length<=4)||(z=!(y=!0),!o.slice_from("است")))break r}}return o.cursor=r,(()=>{var r,e=o.cursor;if(o.limit_backward=o.cursor,o.cursor=o.limit,o.ket=o.cursor,0!=o.find_among_b(b)){if(o.bra=o.cursor,!o.slice_from("ء"))return;o.cursor=o.limit_backward}for(o.cursor=e,e=o.cursor;;){var i=o.cursor;r:{var c=o.cursor;if(o.bra=o.cursor,0!=(r=o.find_among(m)))switch(o.ket=o.cursor,r){case 1:if(o.slice_from("ا"))break;return;case 2:if(o.slice_from("و"))break;return;case 3:if(o.slice_from("ي"))break;return}else{if(o.cursor=c,o.cursor>=o.limit)break r;o.cursor++}continue}o.cursor=i;break}o.cursor=e})(),!0},this.stemWord=function(r){return o.setCurrent(r),this.stem(),o.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/armenian-stemmer.js b/sphinx/search/minified-js/armenian-stemmer.js
new file mode 100644
index 00000000000..6b5c33afba2
--- /dev/null
+++ b/sphinx/search/minified-js/armenian-stemmer.js
@@ -0,0 +1 @@
+var ArmenianStemmer=function(){var o=new BaseStemmer,u=[["րորդ",-1,1],["երորդ",0,1],["ալի",-1,1],["ակի",-1,1],["որակ",-1,1],["եղ",-1,1],["ական",-1,1],["արան",-1,1],["են",-1,1],["եկեն",8,1],["երեն",8,1],["որէն",-1,1],["ին",-1,1],["գին",12,1],["ովին",12,1],["լայն",-1,1],["վուն",-1,1],["պես",-1,1],["իվ",-1,1],["ատ",-1,1],["ավետ",-1,1],["կոտ",-1,1],["բար",-1,1]],c=[["ա",-1,1],["ացա",0,1],["եցա",0,1],["վե",-1,1],["ացրի",-1,1],["ացի",-1,1],["եցի",-1,1],["վեցի",6,1],["ալ",-1,1],["ըալ",8,1],["անալ",8,1],["ենալ",8,1],["ացնալ",8,1],["ել",-1,1],["ըել",13,1],["նել",13,1],["ցնել",15,1],["եցնել",16,1],["չել",13,1],["վել",13,1],["ացվել",19,1],["եցվել",19,1],["տել",13,1],["ատել",22,1],["ոտել",22,1],["կոտել",24,1],["ված",-1,1],["ում",-1,1],["վում",27,1],["ան",-1,1],["ցան",29,1],["ացան",30,1],["ացրին",-1,1],["ացին",-1,1],["եցին",-1,1],["վեցին",34,1],["ալիս",-1,1],["ելիս",-1,1],["ավ",-1,1],["ացավ",38,1],["եցավ",38,1],["ալով",-1,1],["ելով",-1,1],["ար",-1,1],["ացար",43,1],["եցար",43,1],["ացրիր",-1,1],["ացիր",-1,1],["եցիր",-1,1],["վեցիր",48,1],["աց",-1,1],["եց",-1,1],["ացրեց",51,1],["ալուց",-1,1],["ելուց",-1,1],["ալու",-1,1],["ելու",-1,1],["աք",-1,1],["ցաք",57,1],["ացաք",58,1],["ացրիք",-1,1],["ացիք",-1,1],["եցիք",-1,1],["վեցիք",62,1],["անք",-1,1],["ցանք",64,1],["ացանք",65,1],["ացրինք",-1,1],["ացինք",-1,1],["եցինք",-1,1],["վեցինք",69,1]],s=[["որդ",-1,1],["ույթ",-1,1],["ուհի",-1,1],["ցի",-1,1],["իլ",-1,1],["ակ",-1,1],["յակ",5,1],["անակ",5,1],["իկ",-1,1],["ուկ",-1,1],["ան",-1,1],["պան",10,1],["ստան",10,1],["արան",10,1],["եղէն",-1,1],["յուն",-1,1],["ություն",15,1],["ածո",-1,1],["իչ",-1,1],["ուս",-1,1],["ուստ",-1,1],["գար",-1,1],["վոր",-1,1],["ավոր",22,1],["ոց",-1,1],["անօց",-1,1],["ու",-1,1],["ք",-1,1],["չեք",27,1],["իք",27,1],["ալիք",29,1],["անիք",29,1],["վածք",27,1],["ույք",27,1],["ենք",27,1],["ոնք",27,1],["ունք",27,1],["մունք",36,1],["իչք",27,1],["արք",27,1]],r=[["սա",-1,1],["վա",-1,1],["ամբ",-1,1],["դ",-1,1],["անդ",3,1],["ությանդ",4,1],["վանդ",4,1],["ոջդ",3,1],["երդ",3,1],["ներդ",8,1],["ուդ",3,1],["ը",-1,1],["անը",11,1],["ությանը",12,1],["վանը",12,1],["ոջը",11,1],["երը",11,1],["ները",16,1],["ի",-1,1],["վի",18,1],["երի",18,1],["ների",20,1],["անում",-1,1],["երում",-1,1],["ներում",23,1],["ն",-1,1],["ան",25,1],["ության",26,1],["վան",26,1],["ին",25,1],["երին",29,1],["ներին",30,1],["ությանն",25,1],["երն",25,1],["ներն",33,1],["ուն",25,1],["ոջ",-1,1],["ությանս",-1,1],["վանս",-1,1],["ոջս",-1,1],["ով",-1,1],["անով",40,1],["վով",40,1],["երով",40,1],["ներով",43,1],["եր",-1,1],["ներ",45,1],["ց",-1,1],["ից",47,1],["վանից",48,1],["ոջից",48,1],["վից",48,1],["երից",48,1],["ներից",52,1],["ցից",48,1],["ոց",47,1],["ուց",47,1]],t=[209,4,128,0,18],n=0,e=0;function m(){o.ket=o.cursor,0!=o.find_among_b(r)&&(o.bra=o.cursor,n<=o.cursor)&&o.slice_del()}this.stem=function(){var r,i;return e=o.limit,n=o.limit,r=o.cursor,o.go_out_grouping(t,1377,1413)&&(o.cursor++,e=o.cursor,o.go_in_grouping(t,1377,1413))&&(o.cursor++,o.go_out_grouping(t,1377,1413))&&(o.cursor++,o.go_in_grouping(t,1377,1413))&&(o.cursor++,n=o.cursor),o.cursor=r,o.limit_backward=o.cursor,o.cursor=o.limit,!(o.cursor=this.limit)return false;var s=this.current.charCodeAt(this.cursor);if(s>i||s>>3]&1<<(s&7))==0)return false;this.cursor++;return true};this.in_grouping_b=function(r,t,i){if(this.cursor<=this.limit_backward)return false;var s=this.current.charCodeAt(this.cursor-1);if(s>i||s>>3]&1<<(s&7))==0)return false;this.cursor--;return true};this.out_grouping=function(r,t,i){if(this.cursor>=this.limit)return false;var s=this.current.charCodeAt(this.cursor);if(s>i||s>>3]&1<<(s&7))==0){this.cursor++;return true}return false};this.out_grouping_b=function(r,t,i){if(this.cursor<=this.limit_backward)return false;var s=this.current.charCodeAt(this.cursor-1);if(s>i||s>>3]&1<<(s&7))==0){this.cursor--;return true}return false};this.eq_s=function(r){if(this.limit-this.cursor>>1);var a=0;var f=h0)break;if(i==t)break;if(n)break;n=true}}do{var l=r[t];if(h>=l[0].length){this.cursor=s+l[0].length;if(l.length<4)return l[2];var v=l[3](this);this.cursor=s+l[0].length;if(v)return l[2]}t=l[1]}while(t>=0);return 0};this.find_among_b=function(r){var t=0;var i=r.length;var s=this.cursor;var e=this.limit_backward;var h=0;var u=0;var n=false;while(true){var c=t+(i-t>>1);var a=0;var f=h=0;o--){if(s-f==e){a=-1;break}a=this.current.charCodeAt(s-1-f)-l[0].charCodeAt(o);if(a!=0)break;f++}if(a<0){i=c;u=f}else{t=c;h=f}if(i-t<=1){if(t>0)break;if(i==t)break;if(n)break;n=true}}do{var l=r[t];if(h>=l[0].length){this.cursor=s-l[0].length;if(l.length<4)return l[2];var v=l[3](this);this.cursor=s-l[0].length;if(v)return l[2]}t=l[1]}while(t>=0);return 0};this.replace_s=function(r,t,i){var s=i.length-(t-r);this.current=this.current.slice(0,r)+i+this.current.slice(t);this.limit+=s;if(this.cursor>=t)this.cursor+=s;else if(this.cursor>r)this.cursor=r;return s};this.slice_check=function(){if(this.bra<0||this.bra>this.ket||this.ket>this.limit||this.limit>this.current.length){return false}return true};this.slice_from=function(r){var t=false;if(this.slice_check()){this.replace_s(this.bra,this.ket,r);t=true}return t};this.slice_del=function(){return this.slice_from("")};this.insert=function(r,t,i){var s=this.replace_s(r,t,i);if(r<=this.bra)this.bra+=s;if(r<=this.ket)this.ket+=s};this.slice_to=function(){var r="";if(this.slice_check()){r=this.current.slice(this.bra,this.ket)}return r};this.assign_to=function(){return this.current.slice(0,this.limit)}};
\ No newline at end of file
+BaseStemmer=function(){this.current="",this.cursor=0,this.limit=0,this.limit_backward=0,this.bra=0,this.ket=0,this.setCurrent=function(t){this.current=t,this.cursor=0,this.limit=this.current.length,this.limit_backward=0,this.bra=this.cursor,this.ket=this.limit},this.getCurrent=function(){return this.current},this.copy_from=function(t){this.current=t.current,this.cursor=t.cursor,this.limit=t.limit,this.limit_backward=t.limit_backward,this.bra=t.bra,this.ket=t.ket},this.in_grouping=function(t,r,i){return!(this.cursor>=this.limit||i<(i=this.current.charCodeAt(this.cursor))||i>>3]&1<<(7&i))||(this.cursor++,0))},this.go_in_grouping=function(t,r,i){for(;this.cursor>>3]&1<<(7&s)))return!0;this.cursor++}return!1},this.in_grouping_b=function(t,r,i){return!(this.cursor<=this.limit_backward||i<(i=this.current.charCodeAt(this.cursor-1))||i>>3]&1<<(7&i))||(this.cursor--,0))},this.go_in_grouping_b=function(t,r,i){for(;this.cursor>this.limit_backward;){var s=this.current.charCodeAt(this.cursor-1);if(i>>3]&1<<(7&s)))return!0;this.cursor--}return!1},this.out_grouping=function(t,r,i){return!(this.cursor>=this.limit)&&(i<(i=this.current.charCodeAt(this.cursor))||i>>3]&1<<(7&i)))&&(this.cursor++,!0)},this.go_out_grouping=function(t,r,i){for(;this.cursor>>3]&1<<(7&s)))return!0;this.cursor++}return!1},this.out_grouping_b=function(t,r,i){return!(this.cursor<=this.limit_backward)&&(i<(i=this.current.charCodeAt(this.cursor-1))||i>>3]&1<<(7&i)))&&(this.cursor--,!0)},this.go_out_grouping_b=function(t,r,i){for(;this.cursor>this.limit_backward;){var s=this.current.charCodeAt(this.cursor-1);if(s<=i&&r<=s&&0!=(t[(s-=r)>>>3]&1<<(7&s)))return!0;this.cursor--}return!1},this.eq_s=function(t){return!(this.limit-this.cursor>>1),o=0,a=e=(l=t[r])[0].length){if(this.cursor=s+l[0].length,l.length<4)return l[2];var g=l[3](this);if(this.cursor=s+l[0].length,g)return l[2]}}while(0<=(r=l[1]));return 0},this.find_among_b=function(t){for(var r=0,i=t.length,s=this.cursor,h=this.limit_backward,e=0,n=0,c=!1;;){for(var u,o=r+(i-r>>1),a=0,l=e=(u=t[r])[0].length){if(this.cursor=s-u[0].length,u.length<4)return u[2];var g=u[3](this);if(this.cursor=s-u[0].length,g)return u[2]}}while(0<=(r=u[1]));return 0},this.replace_s=function(t,r,i){var s=i.length-(r-t);return this.current=this.current.slice(0,t)+i+this.current.slice(r),this.limit+=s,this.cursor>=r?this.cursor+=s:this.cursor>t&&(this.cursor=t),s},this.slice_check=function(){return!(this.bra<0||this.bra>this.ket||this.ket>this.limit||this.limit>this.current.length)},this.slice_from=function(t){var r=!1;return this.slice_check()&&(this.replace_s(this.bra,this.ket,t),r=!0),r},this.slice_del=function(){return this.slice_from("")},this.insert=function(t,r,i){r=this.replace_s(t,r,i);t<=this.bra&&(this.bra+=r),t<=this.ket&&(this.ket+=r)},this.slice_to=function(){var t="";return t=this.slice_check()?this.current.slice(this.bra,this.ket):t},this.assign_to=function(){return this.current.slice(0,this.limit)}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/basque-stemmer.js b/sphinx/search/minified-js/basque-stemmer.js
new file mode 100644
index 00000000000..3e1c4337a69
--- /dev/null
+++ b/sphinx/search/minified-js/basque-stemmer.js
@@ -0,0 +1 @@
+var BasqueStemmer=function(){var o=new BaseStemmer,u=[["idea",-1,1],["bidea",0,1],["kidea",0,1],["pidea",0,1],["kundea",-1,1],["galea",-1,1],["tailea",-1,1],["tzailea",-1,1],["gunea",-1,1],["kunea",-1,1],["tzaga",-1,1],["gaia",-1,1],["aldia",-1,1],["taldia",12,1],["karia",-1,1],["garria",-1,2],["karria",-1,1],["ka",-1,1],["tzaka",17,1],["la",-1,1],["mena",-1,1],["pena",-1,1],["kina",-1,1],["ezina",-1,1],["tezina",23,1],["kuna",-1,1],["tuna",-1,1],["kizuna",-1,1],["era",-1,1],["bera",28,1],["arabera",29,-1],["kera",28,1],["pera",28,1],["orra",-1,1],["korra",33,1],["dura",-1,1],["gura",-1,1],["kura",-1,1],["tura",-1,1],["eta",-1,1],["keta",39,1],["gailua",-1,1],["eza",-1,1],["erreza",42,1],["tza",-1,2],["gaitza",44,1],["kaitza",44,1],["kuntza",44,1],["ide",-1,1],["bide",48,1],["kide",48,1],["pide",48,1],["kunde",-1,1],["tzake",-1,1],["tzeke",-1,1],["le",-1,1],["gale",55,1],["taile",55,1],["tzaile",55,1],["gune",-1,1],["kune",-1,1],["tze",-1,1],["atze",61,1],["gai",-1,1],["aldi",-1,1],["taldi",64,1],["ki",-1,1],["ari",-1,1],["kari",67,1],["lari",67,1],["tari",67,1],["etari",70,1],["garri",-1,2],["karri",-1,1],["arazi",-1,1],["tarazi",74,1],["an",-1,1],["ean",76,1],["rean",77,1],["kan",76,1],["etan",76,1],["atseden",-1,-1],["men",-1,1],["pen",-1,1],["kin",-1,1],["rekin",84,1],["ezin",-1,1],["tezin",86,1],["tun",-1,1],["kizun",-1,1],["go",-1,1],["ago",90,1],["tio",-1,1],["dako",-1,1],["or",-1,1],["kor",94,1],["tzat",-1,1],["du",-1,1],["gailu",-1,1],["tu",-1,1],["atu",99,1],["aldatu",100,1],["tatu",100,1],["baditu",99,-1],["ez",-1,1],["errez",104,1],["tzez",104,1],["gaitz",-1,1],["kaitz",-1,1]],r=[["ada",-1,1],["kada",0,1],["anda",-1,1],["denda",-1,1],["gabea",-1,1],["kabea",-1,1],["aldea",-1,1],["kaldea",6,1],["taldea",6,1],["ordea",-1,1],["zalea",-1,1],["tzalea",10,1],["gilea",-1,1],["emea",-1,1],["kumea",-1,1],["nea",-1,1],["enea",15,1],["zionea",15,1],["unea",15,1],["gunea",18,1],["pea",-1,1],["aurrea",-1,1],["tea",-1,1],["kotea",22,1],["artea",22,1],["ostea",22,1],["etxea",-1,1],["ga",-1,1],["anga",27,1],["gaia",-1,1],["aldia",-1,1],["taldia",30,1],["handia",-1,1],["mendia",-1,1],["geia",-1,1],["egia",-1,1],["degia",35,1],["tegia",35,1],["nahia",-1,1],["ohia",-1,1],["kia",-1,1],["tokia",40,1],["oia",-1,1],["koia",42,1],["aria",-1,1],["karia",44,1],["laria",44,1],["taria",44,1],["eria",-1,1],["keria",48,1],["teria",48,1],["garria",-1,2],["larria",-1,1],["kirria",-1,1],["duria",-1,1],["asia",-1,1],["tia",-1,1],["ezia",-1,1],["bizia",-1,1],["ontzia",-1,1],["ka",-1,1],["joka",60,3],["aurka",60,-1],["ska",60,1],["xka",60,1],["zka",60,1],["gibela",-1,1],["gela",-1,1],["kaila",-1,1],["skila",-1,1],["tila",-1,1],["ola",-1,1],["na",-1,1],["kana",72,1],["ena",72,1],["garrena",74,1],["gerrena",74,1],["urrena",74,1],["zaina",72,1],["tzaina",78,1],["kina",72,1],["mina",72,1],["garna",72,1],["una",72,1],["duna",83,1],["asuna",83,1],["tasuna",85,1],["ondoa",-1,1],["kondoa",87,1],["ngoa",-1,1],["zioa",-1,1],["koa",-1,1],["takoa",91,1],["zkoa",91,1],["noa",-1,1],["zinoa",94,1],["aroa",-1,1],["taroa",96,1],["zaroa",96,1],["eroa",-1,1],["oroa",-1,1],["osoa",-1,1],["toa",-1,1],["ttoa",102,1],["ztoa",102,1],["txoa",-1,1],["tzoa",-1,1],["ñoa",-1,1],["ra",-1,1],["ara",108,1],["dara",109,1],["liara",109,1],["tiara",109,1],["tara",109,1],["etara",113,1],["tzara",109,1],["bera",108,1],["kera",108,1],["pera",108,1],["ora",108,2],["tzarra",108,1],["korra",108,1],["tra",108,1],["sa",-1,1],["osa",123,1],["ta",-1,1],["eta",125,1],["keta",126,1],["sta",125,1],["dua",-1,1],["mendua",129,1],["ordua",129,1],["lekua",-1,1],["burua",-1,1],["durua",-1,1],["tsua",-1,1],["tua",-1,1],["mentua",136,1],["estua",136,1],["txua",-1,1],["zua",-1,1],["tzua",140,1],["za",-1,1],["eza",142,1],["eroza",142,1],["tza",142,2],["koitza",145,1],["antza",145,1],["gintza",145,1],["kintza",145,1],["kuntza",145,1],["gabe",-1,1],["kabe",-1,1],["kide",-1,1],["alde",-1,1],["kalde",154,1],["talde",154,1],["orde",-1,1],["ge",-1,1],["zale",-1,1],["tzale",159,1],["gile",-1,1],["eme",-1,1],["kume",-1,1],["ne",-1,1],["zione",164,1],["une",164,1],["gune",166,1],["pe",-1,1],["aurre",-1,1],["te",-1,1],["kote",170,1],["arte",170,1],["oste",170,1],["etxe",-1,1],["gai",-1,1],["di",-1,1],["aldi",176,1],["taldi",177,1],["geldi",176,-1],["handi",176,1],["mendi",176,1],["gei",-1,1],["egi",-1,1],["degi",183,1],["tegi",183,1],["nahi",-1,1],["ohi",-1,1],["ki",-1,1],["toki",188,1],["oi",-1,1],["goi",190,1],["koi",190,1],["ari",-1,1],["kari",193,1],["lari",193,1],["tari",193,1],["garri",-1,2],["larri",-1,1],["kirri",-1,1],["duri",-1,1],["asi",-1,1],["ti",-1,1],["ontzi",-1,1],["ñi",-1,1],["ak",-1,1],["ek",-1,1],["tarik",-1,1],["gibel",-1,1],["ail",-1,1],["kail",209,1],["kan",-1,1],["tan",-1,1],["etan",212,1],["en",-1,4],["ren",214,2],["garren",215,1],["gerren",215,1],["urren",215,1],["ten",214,4],["tzen",214,4],["zain",-1,1],["tzain",221,1],["kin",-1,1],["min",-1,1],["dun",-1,1],["asun",-1,1],["tasun",226,1],["aizun",-1,1],["ondo",-1,1],["kondo",229,1],["go",-1,1],["ngo",231,1],["zio",-1,1],["ko",-1,1],["trako",234,5],["tako",234,1],["etako",236,1],["eko",234,1],["tariko",234,1],["sko",234,1],["tuko",234,1],["minutuko",241,6],["zko",234,1],["no",-1,1],["zino",244,1],["ro",-1,1],["aro",246,1],["igaro",247,-1],["taro",247,1],["zaro",247,1],["ero",246,1],["giro",246,1],["oro",246,1],["oso",-1,1],["to",-1,1],["tto",255,1],["zto",255,1],["txo",-1,1],["tzo",-1,1],["gintzo",259,1],["ño",-1,1],["zp",-1,1],["ar",-1,1],["dar",263,1],["behar",263,1],["zehar",263,-1],["liar",263,1],["tiar",263,1],["tar",263,1],["tzar",263,1],["or",-1,2],["kor",271,1],["os",-1,1],["ket",-1,1],["du",-1,1],["mendu",275,1],["ordu",275,1],["leku",-1,1],["buru",-1,2],["duru",-1,1],["tsu",-1,1],["tu",-1,1],["tatu",282,4],["mentu",282,1],["estu",282,1],["txu",-1,1],["zu",-1,1],["tzu",287,1],["gintzu",288,1],["z",-1,1],["ez",290,1],["eroz",290,1],["tz",290,1],["koitz",293,1]],n=[["zlea",-1,2],["keria",-1,1],["la",-1,1],["era",-1,1],["dade",-1,1],["tade",-1,1],["date",-1,1],["tate",-1,1],["gi",-1,1],["ki",-1,1],["ik",-1,1],["lanik",10,1],["rik",10,1],["larik",12,1],["ztik",10,1],["go",-1,1],["ro",-1,1],["ero",16,1],["to",-1,1]],k=[17,65,16],g=0,s=0,z=0;function l(){return z<=o.cursor}function d(){return g<=o.cursor}function c(){var a;if(o.ket=o.cursor,0!=(a=o.find_among_b(r))){switch(o.bra=o.cursor,a){case 1:if(!l())return;if(o.slice_del())break;return;case 2:if(!d())return;if(o.slice_del())break;return;case 3:if(o.slice_from("jok"))break;return;case 4:if(!(s<=o.cursor))return;if(o.slice_del())break;return;case 5:if(o.slice_from("tra"))break;return;case 6:if(o.slice_from("minutu"))break;return}return 1}}this.stem=function(){z=o.limit,s=o.limit,g=o.limit;var a=o.cursor;a:{r:{var r=o.cursor;i:if(o.in_grouping(k,97,117)){var i=o.cursor;if(!o.out_grouping(k,97,117)||!o.go_out_grouping(k,97,117)){if(o.cursor=i,!o.in_grouping(k,97,117))break i;if(!o.go_in_grouping(k,97,117))break i}o.cursor++;break r}if(o.cursor=r,!o.out_grouping(k,97,117))break a;i=o.cursor;if(o.out_grouping(k,97,117)&&o.go_out_grouping(k,97,117));else{if(o.cursor=i,!o.in_grouping(k,97,117))break a;if(o.cursor>=o.limit)break a}o.cursor++}z=o.cursor}for(o.cursor=a,a=o.cursor,o.go_out_grouping(k,97,117)&&(o.cursor++,o.go_in_grouping(k,97,117))&&(o.cursor++,s=o.cursor,o.go_out_grouping(k,97,117))&&(o.cursor++,o.go_in_grouping(k,97,117))&&(o.cursor++,g=o.cursor),o.cursor=a,o.limit_backward=o.cursor,o.cursor=o.limit;;){var e=o.limit-o.cursor;if(!(()=>{var a;if(o.ket=o.cursor,0!=(a=o.find_among_b(u))){switch(o.bra=o.cursor,a){case 1:if(!l())return;if(o.slice_del())break;return;case 2:if(!d())return;if(o.slice_del())break;return}return 1}})()){o.cursor=o.limit-e;break}}for(;;){var t=o.limit-o.cursor;if(!c()){o.cursor=o.limit-t;break}}a=o.limit-o.cursor;return(()=>{var a;if(o.ket=o.cursor,0!=(a=o.find_among_b(n)))switch(o.bra=o.cursor,a){case 1:if(!l())return;if(o.slice_del())break;return;case 2:if(o.slice_from("z"))break}})(),o.cursor=o.limit-a,o.cursor=o.limit_backward,!0},this.stemWord=function(a){return o.setCurrent(a),this.stem(),o.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/catalan-stemmer.js b/sphinx/search/minified-js/catalan-stemmer.js
new file mode 100644
index 00000000000..75788216aa1
--- /dev/null
+++ b/sphinx/search/minified-js/catalan-stemmer.js
@@ -0,0 +1 @@
+var CatalanStemmer=function(){var e=new BaseStemmer,r=[["",-1,7],["·",0,6],["à",0,1],["á",0,1],["è",0,2],["é",0,2],["ì",0,3],["í",0,3],["ï",0,3],["ò",0,4],["ó",0,4],["ú",0,5],["ü",0,5]],a=[["la",-1,1],["-la",0,1],["sela",0,1],["le",-1,1],["me",-1,1],["-me",4,1],["se",-1,1],["-te",-1,1],["hi",-1,1],["'hi",8,1],["li",-1,1],["-li",10,1],["'l",-1,1],["'m",-1,1],["-m",-1,1],["'n",-1,1],["-n",-1,1],["ho",-1,1],["'ho",17,1],["lo",-1,1],["selo",19,1],["'s",-1,1],["las",-1,1],["selas",22,1],["les",-1,1],["-les",24,1],["'ls",-1,1],["-ls",-1,1],["'ns",-1,1],["-ns",-1,1],["ens",-1,1],["los",-1,1],["selos",31,1],["nos",-1,1],["-nos",33,1],["vos",-1,1],["us",-1,1],["-us",36,1],["'t",-1,1]],t=[["ica",-1,4],["lógica",0,3],["enca",-1,1],["ada",-1,2],["ancia",-1,1],["encia",-1,1],["ència",-1,1],["ícia",-1,1],["logia",-1,3],["inia",-1,1],["íinia",9,1],["eria",-1,1],["ària",-1,1],["atòria",-1,1],["alla",-1,1],["ella",-1,1],["ívola",-1,1],["ima",-1,1],["íssima",17,1],["quíssima",18,5],["ana",-1,1],["ina",-1,1],["era",-1,1],["sfera",22,1],["ora",-1,1],["dora",24,1],["adora",25,1],["adura",-1,1],["esa",-1,1],["osa",-1,1],["assa",-1,1],["essa",-1,1],["issa",-1,1],["eta",-1,1],["ita",-1,1],["ota",-1,1],["ista",-1,1],["ialista",36,1],["ionista",36,1],["iva",-1,1],["ativa",39,1],["nça",-1,1],["logía",-1,3],["ic",-1,4],["ístic",43,1],["enc",-1,1],["esc",-1,1],["ud",-1,1],["atge",-1,1],["ble",-1,1],["able",49,1],["ible",49,1],["isme",-1,1],["ialisme",52,1],["ionisme",52,1],["ivisme",52,1],["aire",-1,1],["icte",-1,1],["iste",-1,1],["ici",-1,1],["íci",-1,1],["logi",-1,3],["ari",-1,1],["tori",-1,1],["al",-1,1],["il",-1,1],["all",-1,1],["ell",-1,1],["ívol",-1,1],["isam",-1,1],["issem",-1,1],["ìssem",-1,1],["íssem",-1,1],["íssim",-1,1],["quíssim",73,5],["amen",-1,1],["ìssin",-1,1],["ar",-1,1],["ificar",77,1],["egar",77,1],["ejar",77,1],["itar",77,1],["itzar",77,1],["fer",-1,1],["or",-1,1],["dor",84,1],["dur",-1,1],["doras",-1,1],["ics",-1,4],["lógics",88,3],["uds",-1,1],["nces",-1,1],["ades",-1,2],["ancies",-1,1],["encies",-1,1],["ències",-1,1],["ícies",-1,1],["logies",-1,3],["inies",-1,1],["ínies",-1,1],["eries",-1,1],["àries",-1,1],["atòries",-1,1],["bles",-1,1],["ables",103,1],["ibles",103,1],["imes",-1,1],["íssimes",106,1],["quíssimes",107,5],["formes",-1,1],["ismes",-1,1],["ialismes",110,1],["ines",-1,1],["eres",-1,1],["ores",-1,1],["dores",114,1],["idores",115,1],["dures",-1,1],["eses",-1,1],["oses",-1,1],["asses",-1,1],["ictes",-1,1],["ites",-1,1],["otes",-1,1],["istes",-1,1],["ialistes",124,1],["ionistes",124,1],["iques",-1,4],["lógiques",127,3],["ives",-1,1],["atives",129,1],["logíes",-1,3],["allengües",-1,1],["icis",-1,1],["ícis",-1,1],["logis",-1,3],["aris",-1,1],["toris",-1,1],["ls",-1,1],["als",138,1],["ells",138,1],["ims",-1,1],["íssims",141,1],["quíssims",142,5],["ions",-1,1],["cions",144,1],["acions",145,2],["esos",-1,1],["osos",-1,1],["assos",-1,1],["issos",-1,1],["ers",-1,1],["ors",-1,1],["dors",152,1],["adors",153,1],["idors",153,1],["ats",-1,1],["itats",156,1],["bilitats",157,1],["ivitats",157,1],["ativitats",159,1],["ïtats",156,1],["ets",-1,1],["ants",-1,1],["ents",-1,1],["ments",164,1],["aments",165,1],["ots",-1,1],["uts",-1,1],["ius",-1,1],["trius",169,1],["atius",169,1],["ès",-1,1],["és",-1,1],["ís",-1,1],["dís",174,1],["ós",-1,1],["itat",-1,1],["bilitat",177,1],["ivitat",177,1],["ativitat",179,1],["ïtat",-1,1],["et",-1,1],["ant",-1,1],["ent",-1,1],["ient",184,1],["ment",184,1],["ament",186,1],["isament",187,1],["ot",-1,1],["isseu",-1,1],["ìsseu",-1,1],["ísseu",-1,1],["triu",-1,1],["íssiu",-1,1],["atiu",-1,1],["ó",-1,1],["ió",196,1],["ció",197,1],["ació",198,1]],n=[["aba",-1,1],["esca",-1,1],["isca",-1,1],["ïsca",-1,1],["ada",-1,1],["ida",-1,1],["uda",-1,1],["ïda",-1,1],["ia",-1,1],["aria",8,1],["iria",8,1],["ara",-1,1],["iera",-1,1],["ira",-1,1],["adora",-1,1],["ïra",-1,1],["ava",-1,1],["ixa",-1,1],["itza",-1,1],["ía",-1,1],["aría",19,1],["ería",19,1],["iría",19,1],["ïa",-1,1],["isc",-1,1],["ïsc",-1,1],["ad",-1,1],["ed",-1,1],["id",-1,1],["ie",-1,1],["re",-1,1],["dre",30,1],["ase",-1,1],["iese",-1,1],["aste",-1,1],["iste",-1,1],["ii",-1,1],["ini",-1,1],["esqui",-1,1],["eixi",-1,1],["itzi",-1,1],["am",-1,1],["em",-1,1],["arem",42,1],["irem",42,1],["àrem",42,1],["írem",42,1],["àssem",42,1],["éssem",42,1],["iguem",42,1],["ïguem",42,1],["avem",42,1],["àvem",42,1],["ávem",42,1],["irìem",42,1],["íem",42,1],["aríem",55,1],["iríem",55,1],["assim",-1,1],["essim",-1,1],["issim",-1,1],["àssim",-1,1],["èssim",-1,1],["éssim",-1,1],["íssim",-1,1],["ïm",-1,1],["an",-1,1],["aban",66,1],["arian",66,1],["aran",66,1],["ieran",66,1],["iran",66,1],["ían",66,1],["arían",72,1],["erían",72,1],["irían",72,1],["en",-1,1],["ien",76,1],["arien",77,1],["irien",77,1],["aren",76,1],["eren",76,1],["iren",76,1],["àren",76,1],["ïren",76,1],["asen",76,1],["iesen",76,1],["assen",76,1],["essen",76,1],["issen",76,1],["éssen",76,1],["ïssen",76,1],["esquen",76,1],["isquen",76,1],["ïsquen",76,1],["aven",76,1],["ixen",76,1],["eixen",96,1],["ïxen",76,1],["ïen",76,1],["in",-1,1],["inin",100,1],["sin",100,1],["isin",102,1],["assin",102,1],["essin",102,1],["issin",102,1],["ïssin",102,1],["esquin",100,1],["eixin",100,1],["aron",-1,1],["ieron",-1,1],["arán",-1,1],["erán",-1,1],["irán",-1,1],["iïn",-1,1],["ado",-1,1],["ido",-1,1],["ando",-1,2],["iendo",-1,1],["io",-1,1],["ixo",-1,1],["eixo",121,1],["ïxo",-1,1],["itzo",-1,1],["ar",-1,1],["tzar",125,1],["er",-1,1],["eixer",127,1],["ir",-1,1],["ador",-1,1],["as",-1,1],["abas",131,1],["adas",131,1],["idas",131,1],["aras",131,1],["ieras",131,1],["ías",131,1],["arías",137,1],["erías",137,1],["irías",137,1],["ids",-1,1],["es",-1,1],["ades",142,1],["ides",142,1],["udes",142,1],["ïdes",142,1],["atges",142,1],["ies",142,1],["aries",148,1],["iries",148,1],["ares",142,1],["ires",142,1],["adores",142,1],["ïres",142,1],["ases",142,1],["ieses",142,1],["asses",142,1],["esses",142,1],["isses",142,1],["ïsses",142,1],["ques",142,1],["esques",161,1],["ïsques",161,1],["aves",142,1],["ixes",142,1],["eixes",165,1],["ïxes",142,1],["ïes",142,1],["abais",-1,1],["arais",-1,1],["ierais",-1,1],["íais",-1,1],["aríais",172,1],["eríais",172,1],["iríais",172,1],["aseis",-1,1],["ieseis",-1,1],["asteis",-1,1],["isteis",-1,1],["inis",-1,1],["sis",-1,1],["isis",181,1],["assis",181,1],["essis",181,1],["issis",181,1],["ïssis",181,1],["esquis",-1,1],["eixis",-1,1],["itzis",-1,1],["áis",-1,1],["aréis",-1,1],["eréis",-1,1],["iréis",-1,1],["ams",-1,1],["ados",-1,1],["idos",-1,1],["amos",-1,1],["ábamos",197,1],["áramos",197,1],["iéramos",197,1],["íamos",197,1],["aríamos",201,1],["eríamos",201,1],["iríamos",201,1],["aremos",-1,1],["eremos",-1,1],["iremos",-1,1],["ásemos",-1,1],["iésemos",-1,1],["imos",-1,1],["adors",-1,1],["ass",-1,1],["erass",212,1],["ess",-1,1],["ats",-1,1],["its",-1,1],["ents",-1,1],["às",-1,1],["aràs",218,1],["iràs",218,1],["arás",-1,1],["erás",-1,1],["irás",-1,1],["és",-1,1],["arés",224,1],["ís",-1,1],["iïs",-1,1],["at",-1,1],["it",-1,1],["ant",-1,1],["ent",-1,1],["int",-1,1],["ut",-1,1],["ït",-1,1],["au",-1,1],["erau",235,1],["ieu",-1,1],["ineu",-1,1],["areu",-1,1],["ireu",-1,1],["àreu",-1,1],["íreu",-1,1],["asseu",-1,1],["esseu",-1,1],["eresseu",244,1],["àsseu",-1,1],["ésseu",-1,1],["igueu",-1,1],["ïgueu",-1,1],["àveu",-1,1],["áveu",-1,1],["itzeu",-1,1],["ìeu",-1,1],["irìeu",253,1],["íeu",-1,1],["aríeu",255,1],["iríeu",255,1],["assiu",-1,1],["issiu",-1,1],["àssiu",-1,1],["èssiu",-1,1],["éssiu",-1,1],["íssiu",-1,1],["ïu",-1,1],["ix",-1,1],["eix",265,1],["ïx",-1,1],["itz",-1,1],["ià",-1,1],["arà",-1,1],["irà",-1,1],["itzà",-1,1],["ará",-1,1],["erá",-1,1],["irá",-1,1],["irè",-1,1],["aré",-1,1],["eré",-1,1],["iré",-1,1],["í",-1,1],["iï",-1,1],["ió",-1,1]],o=[["a",-1,1],["e",-1,1],["i",-1,1],["ïn",-1,1],["o",-1,1],["ir",-1,1],["s",-1,1],["is",6,1],["os",6,1],["ïs",6,1],["it",-1,1],["eu",-1,1],["iu",-1,1],["iqu",-1,2],["itz",-1,1],["à",-1,1],["á",-1,1],["é",-1,1],["ì",-1,1],["í",-1,1],["ï",-1,1],["ó",-1,1]],u=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,128,129,81,6,10],c=0,m=0;function l(){return m<=e.cursor}function d(){return c<=e.cursor}this.stem=function(){m=e.limit,c=e.limit,s=e.cursor,e.go_out_grouping(u,97,252)&&(e.cursor++,e.go_in_grouping(u,97,252))&&(e.cursor++,m=e.cursor,e.go_out_grouping(u,97,252))&&(e.cursor++,e.go_in_grouping(u,97,252))&&(e.cursor++,c=e.cursor),e.cursor=s,e.limit_backward=e.cursor,e.cursor=e.limit;var s=e.limit-e.cursor,s=(e.ket=e.cursor,0!=e.find_among_b(a)&&(e.bra=e.cursor,l())&&e.slice_del(),e.cursor=e.limit-s,e.limit-e.cursor),i=e.limit-e.cursor,i=((()=>{var s;if(e.ket=e.cursor,0!=(s=e.find_among_b(t))){switch(e.bra=e.cursor,s){case 1:if(!l())return;if(e.slice_del())break;return;case 2:if(!d())return;if(e.slice_del())break;return;case 3:if(!d())return;if(e.slice_from("log"))break;return;case 4:if(!d())return;if(e.slice_from("ic"))break;return;case 5:if(!l())return;if(e.slice_from("c"))break;return}return 1}})()||(e.cursor=e.limit-i,(()=>{var s;if(e.ket=e.cursor,0!=(s=e.find_among_b(n))){switch(e.bra=e.cursor,s){case 1:if(!l())return;if(e.slice_del())break;return;case 2:if(!d())return;if(e.slice_del())break;return}}})()),e.cursor=e.limit-s,e.limit-e.cursor),s=((()=>{var s;if(e.ket=e.cursor,0!=(s=e.find_among_b(o)))switch(e.bra=e.cursor,s){case 1:if(!l())return;if(e.slice_del())break;return;case 2:if(!l())return;if(e.slice_from("ic"))break}})(),e.cursor=e.limit-i,e.cursor=e.limit_backward,e.cursor);return(()=>{for(var s;;){var i=e.cursor;s:{switch(e.bra=e.cursor,s=e.find_among(r),e.ket=e.cursor,s){case 1:if(e.slice_from("a"))break;return;case 2:if(e.slice_from("e"))break;return;case 3:if(e.slice_from("i"))break;return;case 4:if(e.slice_from("o"))break;return;case 5:if(e.slice_from("u"))break;return;case 6:if(e.slice_from("."))break;return;case 7:if(e.cursor>=e.limit)break s;e.cursor++}continue}e.cursor=i;break}})(),e.cursor=s,!0},this.stemWord=function(s){return e.setCurrent(s),this.stem(),e.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/danish-stemmer.js b/sphinx/search/minified-js/danish-stemmer.js
index f3fc600033c..7a577f3eb26 100644
--- a/sphinx/search/minified-js/danish-stemmer.js
+++ b/sphinx/search/minified-js/danish-stemmer.js
@@ -1 +1 @@
-DanishStemmer=function(){var r=new BaseStemmer;var e=[["hed",-1,1],["ethed",0,1],["ered",-1,1],["e",-1,1],["erede",3,1],["ende",3,1],["erende",5,1],["ene",3,1],["erne",3,1],["ere",3,1],["en",-1,1],["heden",10,1],["eren",10,1],["er",-1,1],["heder",13,1],["erer",13,1],["s",-1,2],["heds",16,1],["es",16,1],["endes",18,1],["erendes",19,1],["enes",18,1],["ernes",18,1],["eres",18,1],["ens",16,1],["hedens",24,1],["erens",24,1],["ers",16,1],["ets",16,1],["erets",28,1],["et",-1,1],["eret",30,1]];var i=[["gd",-1,-1],["dt",-1,-1],["gt",-1,-1],["kt",-1,-1]];var s=[["ig",-1,1],["lig",0,1],["elig",1,1],["els",-1,1],["løst",-1,2]];var t=[119,223,119,1];var a=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,48,0,128];var u=[239,254,42,3,0,0,0,0,0,0,0,0,0,0,0,0,16];var c=0;var l=0;var n="";function o(){l=r.limit;var e=r.cursor;{var i=r.cursor+3;if(i>r.limit){return false}r.cursor=i}c=r.cursor;r.cursor=e;r:while(true){var s=r.cursor;e:{if(!r.in_grouping(a,97,248)){break e}r.cursor=s;break r}r.cursor=s;if(r.cursor>=r.limit){return false}r.cursor++}r:while(true){e:{if(!r.out_grouping(a,97,248)){break e}break r}if(r.cursor>=r.limit){return false}r.cursor++}l=r.cursor;r:{if(!(ls.limit||(s.cursor=i,l=s.cursor,s.cursor=e,s.go_out_grouping(o,97,248)&&(s.cursor++,s.go_in_grouping(o,97,248))&&(s.cursor++,m=s.cursor,l<=m||(m=l))),s.cursor=r,s.limit_backward=s.cursor,s.cursor=s.limit,s.limit-s.cursor),e=((()=>{var r;if(!(s.cursor{var r,i=s.limit-s.cursor;if(s.ket=s.cursor,(!s.eq_s_b("st")||(s.bra=s.cursor,!s.eq_s_b("ig"))||s.slice_del())&&(s.cursor=s.limit-i,!(s.cursor=r.limit){break r}r.cursor++;break}continue}r.cursor=s;break}r.cursor=u;var a=r.cursor;r:{r.bra=r.cursor;if(!r.eq_s("y")){r.cursor=a;break r}r.ket=r.cursor;if(!r.slice_from("Y")){return false}}while(true){var t=r.cursor;r:{e:while(true){var o=r.cursor;i:{if(!r.in_grouping(c,97,232)){break i}r.bra=r.cursor;u:{var f=r.cursor;s:{if(!r.eq_s("i")){break s}r.ket=r.cursor;if(!r.in_grouping(c,97,232)){break s}if(!r.slice_from("I")){return false}break u}r.cursor=f;if(!r.eq_s("y")){break i}r.ket=r.cursor;if(!r.slice_from("Y")){return false}}r.cursor=o;break e}r.cursor=o;if(r.cursor>=r.limit){break r}r.cursor++}continue}r.cursor=t;break}return true}function _(){n=r.limit;l=r.limit;r:while(true){e:{if(!r.in_grouping(c,97,232)){break e}break r}if(r.cursor>=r.limit){return false}r.cursor++}r:while(true){e:{if(!r.out_grouping(c,97,232)){break e}break r}if(r.cursor>=r.limit){return false}r.cursor++}n=r.cursor;r:{if(!(n<3)){break r}n=3}r:while(true){e:{if(!r.in_grouping(c,97,232)){break e}break r}if(r.cursor>=r.limit){return false}r.cursor++}r:while(true){e:{if(!r.out_grouping(c,97,232)){break e}break r}if(r.cursor>=r.limit){return false}r.cursor++}l=r.cursor;return true}function m(){var e;while(true){var u=r.cursor;r:{r.bra=r.cursor;e=r.find_among(i);if(e==0){break r}r.ket=r.cursor;switch(e){case 1:if(!r.slice_from("y")){return false}break;case 2:if(!r.slice_from("i")){return false}break;case 3:if(r.cursor>=r.limit){break r}r.cursor++;break}continue}r.cursor=u;break}return true}function v(){if(!(n<=r.cursor)){return false}return true}function g(){if(!(l<=r.cursor)){return false}return true}function d(){var e=r.limit-r.cursor;if(r.find_among_b(u)==0){return false}r.cursor=r.limit-e;r.ket=r.cursor;if(r.cursor<=r.limit_backward){return false}r.cursor--;r.bra=r.cursor;if(!r.slice_del()){return false}return true}function h(){b=false;r.ket=r.cursor;if(!r.eq_s_b("e")){return false}r.bra=r.cursor;if(!v()){return false}var e=r.limit-r.cursor;if(!r.out_grouping_b(c,97,232)){return false}r.cursor=r.limit-e;if(!r.slice_del()){return false}b=true;if(!d()){return false}return true}function w(){if(!v()){return false}var e=r.limit-r.cursor;if(!r.out_grouping_b(c,97,232)){return false}r.cursor=r.limit-e;{var i=r.limit-r.cursor;r:{if(!r.eq_s_b("gem")){break r}return false}r.cursor=r.limit-i}if(!r.slice_del()){return false}if(!d()){return false}return true}function p(){var e;var i=r.limit-r.cursor;r:{r.ket=r.cursor;e=r.find_among_b(s);if(e==0){break r}r.bra=r.cursor;switch(e){case 1:if(!v()){break r}if(!r.slice_from("heid")){return false}break;case 2:if(!w()){break r}break;case 3:if(!v()){break r}if(!r.out_grouping_b(f,97,232)){break r}if(!r.slice_del()){return false}break}}r.cursor=r.limit-i;var u=r.limit-r.cursor;h();r.cursor=r.limit-u;var l=r.limit-r.cursor;r:{r.ket=r.cursor;if(!r.eq_s_b("heid")){break r}r.bra=r.cursor;if(!g()){break r}{var n=r.limit-r.cursor;e:{if(!r.eq_s_b("c")){break e}break r}r.cursor=r.limit-n}if(!r.slice_del()){return false}r.ket=r.cursor;if(!r.eq_s_b("en")){break r}r.bra=r.cursor;if(!w()){break r}}r.cursor=r.limit-l;var k=r.limit-r.cursor;r:{r.ket=r.cursor;e=r.find_among_b(a);if(e==0){break r}r.bra=r.cursor;switch(e){case 1:if(!g()){break r}if(!r.slice_del()){return false}e:{var _=r.limit-r.cursor;i:{r.ket=r.cursor;if(!r.eq_s_b("ig")){break i}r.bra=r.cursor;if(!g()){break i}{var m=r.limit-r.cursor;u:{if(!r.eq_s_b("e")){break u}break i}r.cursor=r.limit-m}if(!r.slice_del()){return false}break e}r.cursor=r.limit-_;if(!d()){break r}}break;case 2:if(!g()){break r}{var p=r.limit-r.cursor;e:{if(!r.eq_s_b("e")){break e}break r}r.cursor=r.limit-p}if(!r.slice_del()){return false}break;case 3:if(!g()){break r}if(!r.slice_del()){return false}if(!h()){break r}break;case 4:if(!g()){break r}if(!r.slice_del()){return false}break;case 5:if(!g()){break r}if(!b){break r}if(!r.slice_del()){return false}break}}r.cursor=r.limit-k;var q=r.limit-r.cursor;r:{if(!r.out_grouping_b(o,73,232)){break r}var y=r.limit-r.cursor;if(r.find_among_b(t)==0){break r}if(!r.out_grouping_b(c,97,232)){break r}r.cursor=r.limit-y;r.ket=r.cursor;if(r.cursor<=r.limit_backward){break r}r.cursor--;r.bra=r.cursor;if(!r.slice_del()){return false}}r.cursor=r.limit-q;return true}this.stem=function(){var e=r.cursor;k();r.cursor=e;var i=r.cursor;_();r.cursor=i;r.limit_backward=r.cursor;r.cursor=r.limit;p();r.cursor=r.limit_backward;var u=r.cursor;m();r.cursor=u;return true};this["stemWord"]=function(e){r.setCurrent(e);this.stem();return r.getCurrent()}};
\ No newline at end of file
+var DutchStemmer=function(){var o=new BaseStemmer,a=[["a",-1,1],["e",-1,2],["o",-1,1],["u",-1,1],["à",-1,1],["á",-1,1],["â",-1,1],["ä",-1,1],["è",-1,2],["é",-1,2],["ê",-1,2],["eë",-1,3],["ië",-1,4],["ò",-1,1],["ó",-1,1],["ô",-1,1],["ö",-1,1],["ù",-1,1],["ú",-1,1],["û",-1,1],["ü",-1,1]],t=[["nde",-1,8],["en",-1,7],["s",-1,2],["'s",2,1],["es",2,4],["ies",4,3],["aus",2,6],["és",2,5]],e=[["de",-1,5],["ge",-1,2],["ische",-1,4],["je",-1,1],["lijke",-1,3],["le",-1,9],["ene",-1,10],["re",-1,8],["se",-1,7],["te",-1,6],["ieve",-1,11]],s=[["heid",-1,3],["fie",-1,7],["gie",-1,8],["atie",-1,1],["isme",-1,5],["ing",-1,5],["arij",-1,6],["erij",-1,5],["sel",-1,3],["rder",-1,4],["ster",-1,3],["iteit",-1,2],["dst",-1,10],["tst",-1,9]],c=[["end",-1,9],["atief",-1,2],["erig",-1,9],["achtig",-1,3],["ioneel",-1,1],["baar",-1,3],["laar",-1,5],["naar",-1,4],["raar",-1,6],["eriger",-1,9],["achtiger",-1,3],["lijker",-1,8],["tant",-1,7],["erigst",-1,9],["achtigst",-1,3],["lijkst",-1,8]],u=[["ig",-1,1],["iger",-1,1],["igst",-1,1]],f=[["ft",-1,2],["kt",-1,1],["pt",-1,3]],n=[["bb",-1,1],["cc",-1,2],["dd",-1,3],["ff",-1,4],["gg",-1,5],["hh",-1,6],["jj",-1,7],["kk",-1,8],["ll",-1,9],["mm",-1,10],["nn",-1,11],["pp",-1,12],["qq",-1,13],["rr",-1,14],["ss",-1,15],["tt",-1,16],["v",-1,4],["vv",16,17],["ww",-1,18],["xx",-1,19],["z",-1,15],["zz",20,20]],l=[["d",-1,1],["t",-1,2]],_=[["",-1,-1],["eft",0,1],["vaa",0,1],["val",0,1],["vali",3,-1],["vare",0,1]],m=[["ë",-1,1],["ï",-1,2]],b=[["ë",-1,1],["ï",-1,2]],k=[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,120],d=[1,65,16,0,0,0,0,0,0,0,0,0,0,0,0,128,11,120,46,15],g=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,128,139,127,46,15],v=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,128,139,127,46,15],q=[17,65,208,1,0,0,0,0,0,0,0,0,0,0,0,128,139,127,46,15],w=!1,h=!1,p=0,j=0,z="";function x(){return j<=o.cursor}function C(){return p<=o.cursor}function S(){var r=o.limit-o.cursor,i=o.limit-o.cursor;return(o.in_grouping_b(v,97,252)||(o.cursor=o.limit-i,o.eq_s_b("ij")))&&(o.cursor=o.limit-r,1)}function B(){var r=o.limit-o.cursor,i=o.limit-o.cursor;return!o.eq_s_b("ij")&&(o.cursor=o.limit-i,o.out_grouping_b(v,97,252))&&(o.cursor=o.limit-r,1)}function D(){var r,i=o.limit-o.cursor;r:if(o.out_grouping_b(q,97,252)&&(o.ket=o.cursor,0!=(r=o.find_among_b(a))))switch(o.bra=o.cursor,r){case 1:var e=o.limit-o.cursor,s=o.limit-o.cursor;if(o.out_grouping_b(g,97,252)||(o.cursor=o.limit-s,!(o.cursor>o.limit_backward))){if(o.cursor=o.limit-e,""==(z=o.slice_to()))return;s=o.cursor;o.insert(o.cursor,o.cursor,z),o.cursor=s}break;case 2:var c=o.limit-o.cursor,e=o.limit-o.cursor;if(o.out_grouping_b(g,97,252)||(o.cursor=o.limit-e,!(o.cursor>o.limit_backward))){var u=o.limit-o.cursor;i:{var t=o.limit-o.cursor;if(!o.in_grouping_b(d,97,252)){if(o.cursor=o.limit-t,!o.in_grouping_b(k,101,235))break i;if(o.cursor>o.limit_backward)break i}break r}o.cursor=o.limit-u;t=o.limit-o.cursor;if(o.cursor<=o.limit_backward||(o.cursor--,!o.in_grouping_b(d,97,252))||!o.out_grouping_b(g,97,252)){if(o.cursor=o.limit-t,o.cursor=o.limit-c,""==(z=o.slice_to()))return;t=o.cursor;o.insert(o.cursor,o.cursor,z),o.cursor=t}}break;case 3:if(o.slice_from("eëe"))break;return;case 4:if(o.slice_from("iee"))break;return}o.cursor=o.limit-i}function W(){var r;if(o.ket=o.cursor,0!=(r=o.find_among_b(e))){switch(o.bra=o.cursor,r){case 1:r:{var i=o.limit-o.cursor;if(o.eq_s_b("'t")){if(o.bra=o.cursor,o.slice_del())break r;return}if(o.cursor=o.limit-i,o.eq_s_b("et")&&(o.bra=o.cursor,x())&&B()){if(o.slice_del())break r;return}if(o.cursor=o.limit-i,o.eq_s_b("rnt")){if(o.bra=o.cursor,o.slice_from("rn"))break r;return}if(o.cursor=o.limit-i,o.eq_s_b("t")&&(o.bra=o.cursor,x())&&(()=>{var r,i=o.limit-o.cursor;return!(o.cursor<=o.limit_backward)&&(o.cursor--,r=o.limit-o.cursor,o.in_grouping_b(v,97,252)||(o.cursor=o.limit-r,o.eq_s_b("ij")))?(o.cursor=o.limit-i,1):void 0})()){if(o.slice_del())break r;return}if(o.cursor=o.limit-i,o.eq_s_b("ink")){if(o.bra=o.cursor,o.slice_from("ing"))break r;return}if(o.cursor=o.limit-i,o.eq_s_b("mp")){if(o.bra=o.cursor,o.slice_from("m"))break r;return}if(o.cursor=o.limit-i,o.eq_s_b("'")&&(o.bra=o.cursor,x())){if(o.slice_del())break r;return}if(o.cursor=o.limit-i,o.bra=o.cursor,!x())return;if(!B())return;if(!o.slice_del())return}break;case 2:if(!x())return;if(o.slice_from("g"))break;return;case 3:if(!x())return;if(o.slice_from("lijk"))break;return;case 4:if(!x())return;if(o.slice_from("isch"))break;return;case 5:if(!x())return;if(!B())return;if(o.slice_del())break;return;case 6:if(!x())return;if(o.slice_from("t"))break;return;case 7:if(!x())return;if(o.slice_from("s"))break;return;case 8:if(!x())return;if(o.slice_from("r"))break;return;case 9:if(!x())return;if(!o.slice_del())return;o.insert(o.cursor,o.cursor,"l"),D();break;case 10:if(!x())return;if(!B())return;if(!o.slice_del())return;o.insert(o.cursor,o.cursor,"en"),D();break;case 11:if(!x())return;if(!B())return;if(o.slice_from("ief"))break;return}return 1}}function y(){var r;if(o.ket=o.cursor,0!=(r=o.find_among_b(l))&&(o.bra=o.cursor,x())&&B()){switch(r){case 1:var i=o.limit-o.cursor;if(o.eq_s_b("n")&&x())return;o.cursor=o.limit-i;r:{var e=o.limit-o.cursor;if(o.eq_s_b("in")&&!(o.cursor>o.limit_backward)){if(o.slice_from("n"))break r;return}if(o.cursor=o.limit-e,!o.slice_del())return}break;case 2:i=o.limit-o.cursor;if(o.eq_s_b("h")&&x())return;o.cursor=o.limit-i;i=o.limit-o.cursor;if(o.eq_s_b("en")&&!(o.cursor>o.limit_backward))return;if(o.cursor=o.limit-i,o.slice_del())break;return}return 1}}function A(){j=o.limit,p=o.limit;for(var r=o.cursor;o.out_grouping(v,97,252););for(var i=1;;){var e=o.cursor,s=o.cursor;if(!o.eq_s("ij")&&(o.cursor=s,!o.in_grouping(v,97,252))){o.cursor=e;break}i--}if(!(0{var r;if(o.ket=o.cursor,0!=(r=o.find_among_b(t))){switch(o.bra=o.cursor,r){case 1:if(o.slice_del())break;return;case 2:if(!x())return;var i=o.limit-o.cursor;if(o.eq_s_b("t")&&x())return;if(o.cursor=o.limit-i,!B())return;if(o.slice_del())break;return;case 3:if(!x())return;if(o.slice_from("ie"))break;return;case 4:r:{var e=o.limit-o.cursor,s=o.limit-o.cursor;if(o.eq_s_b("ar")&&x()&&B()){if(o.cursor=o.limit-s,!o.slice_del())return;D()}else{o.cursor=o.limit-e;s=o.limit-o.cursor;if(o.eq_s_b("er")&&x()&&B()){if(o.cursor=o.limit-s,o.slice_del())break r;return}if(o.cursor=o.limit-e,!x())return;if(!B())return;if(!o.slice_from("e"))return}}break;case 5:if(!x())return;if(o.slice_from("é"))break;return;case 6:if(!x())return;if(!S())return;if(o.slice_from("au"))break;return;case 7:r:{var c=o.limit-o.cursor;if(o.eq_s_b("hed")&&x()){if(o.bra=o.cursor,o.slice_from("heid"))break r;return}if(o.cursor=o.limit-c,o.eq_s_b("nd")){if(o.slice_del())break r;return}if(o.cursor=o.limit-c,o.eq_s_b("d")&&x()&&B()){if(o.bra=o.cursor,o.slice_del())break r;return}o.cursor=o.limit-c;var u=o.limit-o.cursor;if((o.eq_s_b("i")||(o.cursor=o.limit-u,o.eq_s_b("j")))&&S()){if(o.slice_del())break r;return}if(o.cursor=o.limit-c,!x())return;if(!B())return;if(!o.slice_del())return;D()}break;case 8:if(o.slice_from("nd"))break;return}return 1}})()&&(h=!0),o.cursor=o.limit-r,o.limit-o.cursor),r=(W()&&(h=!0),o.cursor=o.limit-r,o.limit-o.cursor),r=((()=>{var r;if(o.ket=o.cursor,0!=(r=o.find_among_b(s))){switch(o.bra=o.cursor,r){case 1:if(!x())return;if(o.slice_from("eer"))break;return;case 2:if(!x())return;if(!o.slice_del())return;D();break;case 3:if(!x())return;if(o.slice_del())break;return;case 4:if(o.slice_from("r"))break;return;case 5:r:{var i=o.limit-o.cursor;if(o.eq_s_b("ild")){if(o.slice_from("er"))break r;return}if(o.cursor=o.limit-i,!x())return;if(!o.slice_del())return;D()}break;case 6:if(!x())return;if(!B())return;if(o.slice_from("aar"))break;return;case 7:if(!C())return;if(!o.slice_del())return;o.insert(o.cursor,o.cursor,"f"),D();break;case 8:if(!C())return;if(!o.slice_del())return;o.insert(o.cursor,o.cursor,"g"),D();break;case 9:if(!x())return;if(!B())return;if(o.slice_from("t"))break;return;case 10:if(!x())return;if(!B())return;if(o.slice_from("d"))break;return}return 1}})()&&(h=!0),o.cursor=o.limit-r,o.limit-o.cursor),r=((()=>{r:{var r=o.limit-o.cursor;i:if(o.ket=o.cursor,0!=(i=o.find_among_b(c))){switch(o.bra=o.cursor,i){case 1:if(!x())break i;if(o.slice_from("ie"))break;return;case 2:if(!x())break i;if(o.slice_from("eer"))break;return;case 3:if(!x())break i;if(o.slice_del())break;return;case 4:if(!x())break i;if(!S())break i;if(o.slice_from("n"))break;return;case 5:if(!x())break i;if(!S())break i;if(o.slice_from("l"))break;return;case 6:if(!x())break i;if(!S())break i;if(o.slice_from("r"))break;return;case 7:if(!x())break i;if(o.slice_from("teer"))break;return;case 8:if(!x())break i;if(o.slice_from("lijk"))break;return;case 9:if(!x())break i;if(!B())break i;if(!o.slice_del())return;D()}break r}if(o.cursor=o.limit-r,o.ket=o.cursor,0==o.find_among_b(u))return;if(o.bra=o.cursor,!x())return;var i=o.limit-o.cursor;if(o.eq_s_b("inn")&&!(o.cursor>o.limit_backward))return;if(o.cursor=o.limit-i,!B())return;if(!o.slice_del())return;D()}return 1})()&&(h=!0),o.cursor=o.limit-r,o.cursor=o.limit_backward,w=!1,o.cursor),i=o.cursor,i=((()=>{if(o.bra=o.cursor,o.eq_s("ge")){o.ket=o.cursor;var r=o.cursor,i=o.cursor+3;if(!(i>o.limit)){o.cursor=i,o.cursor=r;for(var i=o.cursor;;){var e=o.cursor,s=o.cursor;if(o.eq_s("ij")||(o.cursor=s,o.in_grouping(v,97,252)))break;if(o.cursor=e,o.cursor>=o.limit)return;o.cursor++}for(;;){var c=o.cursor,u=o.cursor;if(!o.eq_s("ij")&&(o.cursor=u,!o.in_grouping(v,97,252))){o.cursor=c;break}}if(o.cursor{if(!(o.cursor>=o.limit)){for(o.cursor++;;){if(o.bra=o.cursor,o.eq_s("ge")){o.ket=o.cursor;break}if(o.cursor>=o.limit)return;o.cursor++}var r=o.cursor,i=o.cursor+3;if(!(i>o.limit)){o.cursor=i,o.cursor=r;for(var i=o.cursor;;){var e=o.cursor,s=o.cursor;if(o.eq_s("ij")||(o.cursor=s,o.in_grouping(v,97,252)))break;if(o.cursor=e,o.cursor>=o.limit)return;o.cursor++}for(;;){var c=o.cursor,u=o.cursor;if(!o.eq_s("ij")&&(o.cursor=u,!o.in_grouping(v,97,252))){o.cursor=c;break}}if(o.cursor{var r;if(o.ket=o.cursor,0!=(r=o.find_among_b(f))){switch(o.bra=o.cursor,r){case 1:if(o.slice_from("k"))break;return;case 2:if(o.slice_from("f"))break;return;case 3:if(o.slice_from("p"))break;return}return 1}})()&&(h=!0),o.cursor=o.limit-r,o.limit-o.cursor);return h&&(()=>{var r;if(o.ket=o.cursor,0!=(r=o.find_among_b(n))){switch(o.bra=o.cursor,r){case 1:if(o.slice_from("b"))break;return;case 2:if(o.slice_from("c"))break;return;case 3:if(o.slice_from("d"))break;return;case 4:if(o.slice_from("f"))break;return;case 5:if(o.slice_from("g"))break;return;case 6:if(o.slice_from("h"))break;return;case 7:if(o.slice_from("j"))break;return;case 8:if(o.slice_from("k"))break;return;case 9:if(o.slice_from("l"))break;return;case 10:if(o.slice_from("m"))break;return;case 11:var i=o.limit-o.cursor;if(o.eq_s_b("i")&&!(o.cursor>o.limit_backward))return;if(o.cursor=o.limit-i,o.slice_from("n"))break;return;case 12:if(o.slice_from("p"))break;return;case 13:if(o.slice_from("q"))break;return;case 14:if(o.slice_from("r"))break;return;case 15:if(o.slice_from("s"))break;return;case 16:if(o.slice_from("t"))break;return;case 17:if(o.slice_from("v"))break;return;case 18:if(o.slice_from("w"))break;return;case 19:if(o.slice_from("x"))break;return;case 20:if(o.slice_from("z"))break;return}}})(),o.cursor=o.limit-i,o.cursor=o.limit_backward,!0},this.stemWord=function(r){return o.setCurrent(r),this.stem(),o.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/dutch_porter-stemmer.js b/sphinx/search/minified-js/dutch_porter-stemmer.js
new file mode 100644
index 00000000000..32f195914c2
--- /dev/null
+++ b/sphinx/search/minified-js/dutch_porter-stemmer.js
@@ -0,0 +1 @@
+var DutchPorterStemmer=function(){var t=new BaseStemmer,a=[["",-1,6],["á",0,1],["ä",0,1],["é",0,2],["ë",0,2],["í",0,3],["ï",0,3],["ó",0,4],["ö",0,4],["ú",0,5],["ü",0,5]],s=[["",-1,3],["I",0,2],["Y",0,1]],i=[["dd",-1,-1],["kk",-1,-1],["tt",-1,-1]],c=[["ene",-1,2],["se",-1,3],["en",-1,2],["heden",2,1],["s",-1,3]],n=[["end",-1,1],["ig",-1,2],["ing",-1,1],["lijk",-1,3],["baar",-1,4],["bar",-1,5]],_=[["aa",-1,-1],["ee",-1,-1],["oo",-1,-1],["uu",-1,-1]],l=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,128],f=[1,0,0,17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,128],b=[17,67,16,1,0,0,0,0,0,0,0,0,0,0,0,0,128],u=0,o=0,m=0,k=!1;function g(){return m<=t.cursor}function d(){return o<=t.cursor}function v(){var r=t.limit-t.cursor;return 0!=t.find_among_b(i)&&(t.cursor=t.limit-r,t.ket=t.cursor,!(t.cursor<=t.limit_backward||(t.cursor--,t.bra=t.cursor,!t.slice_del())))}function h(){var r;if((k=!1,t.ket=t.cursor,t.eq_s_b("e"))&&(t.bra=t.cursor,g()))return r=t.limit-t.cursor,t.out_grouping_b(l,97,232)?(t.cursor=t.limit-r,t.slice_del()&&(k=!0,!!v())):void 0}function p(){if(g()){var r=t.limit-t.cursor;if(t.out_grouping_b(l,97,232))return t.cursor=t.limit-r,r=t.limit-t.cursor,t.eq_s_b("gem")?void 0:(t.cursor=t.limit-r,t.slice_del()&&!!v())}}this.stem=function(){var r,i=t.cursor,i=((()=>{for(var r,i=t.cursor;;){var e=t.cursor;r:{switch(t.bra=t.cursor,r=t.find_among(a),t.ket=t.cursor,r){case 1:if(t.slice_from("a"))break;return;case 2:if(t.slice_from("e"))break;return;case 3:if(t.slice_from("i"))break;return;case 4:if(t.slice_from("o"))break;return;case 5:if(t.slice_from("u"))break;return;case 6:if(t.cursor>=t.limit)break r;t.cursor++}continue}t.cursor=e;break}if(t.cursor=i,i=t.cursor,t.bra=t.cursor,t.eq_s("y")){if(t.ket=t.cursor,!t.slice_from("Y"))return}else t.cursor=i;for(;;){var s=t.cursor;if(!t.go_out_grouping(l,97,232)){t.cursor=s;break}t.cursor++;var u=t.cursor;r:{t.bra=t.cursor;var o=t.cursor;if(t.eq_s("i")){t.ket=t.cursor;var c=t.cursor;if(t.in_grouping(l,97,232)&&!t.slice_from("I"))return;t.cursor=c}else{if(t.cursor=o,!t.eq_s("y")){t.cursor=u;break r}if(t.ket=t.cursor,!t.slice_from("Y"))return}}}})(),t.cursor=i,t.cursor),e=(m=t.limit,o=t.limit,r=t.cursor,(e=t.cursor+3)>t.limit||(t.cursor=e,u=t.cursor,t.cursor=r,t.go_out_grouping(l,97,232)&&(t.cursor++,t.go_in_grouping(l,97,232))&&(t.cursor++,m=t.cursor,u<=m||(m=u),t.go_out_grouping(l,97,232))&&(t.cursor++,t.go_in_grouping(l,97,232))&&(t.cursor++,o=t.cursor)),t.cursor=i,t.limit_backward=t.cursor,t.cursor=t.limit,(()=>{var r,i=t.limit-t.cursor;r:if(t.ket=t.cursor,0!=(r=t.find_among_b(c)))switch(t.bra=t.cursor,r){case 1:if(!g())break r;if(t.slice_from("heid"))break;return;case 2:p();break;case 3:if(!g())break r;if(!t.out_grouping_b(b,97,232))break r;if(t.slice_del())break;return}if(t.cursor=t.limit-i,i=t.limit-t.cursor,h(),t.cursor=t.limit-i,i=t.limit-t.cursor,t.ket=t.cursor,t.eq_s_b("heid")&&(t.bra=t.cursor,d())){var e=t.limit-t.cursor;if(!t.eq_s_b("c")){if(t.cursor=t.limit-e,!t.slice_del())return;t.ket=t.cursor,t.eq_s_b("en")&&(t.bra=t.cursor,p())}}t.cursor=t.limit-i,e=t.limit-t.cursor;r:if(t.ket=t.cursor,0!=(r=t.find_among_b(n)))switch(t.bra=t.cursor,r){case 1:if(d()){if(!t.slice_del())return;i:{var s=t.limit-t.cursor;if(t.ket=t.cursor,t.eq_s_b("ig")&&(t.bra=t.cursor,d())){var u=t.limit-t.cursor;if(!t.eq_s_b("e")){if(t.cursor=t.limit-u,t.slice_del())break i;return}}if(t.cursor=t.limit-s,!v())break r}}break;case 2:if(!d())break r;var o=t.limit-t.cursor;if(t.eq_s_b("e"))break r;if(t.cursor=t.limit-o,t.slice_del())break;return;case 3:if(d()){if(!t.slice_del())return;h()}break;case 4:if(!d())break r;if(t.slice_del())break;return;case 5:if(!d())break r;if(!k)break r;if(t.slice_del())break;return}if(t.cursor=t.limit-e,i=t.limit-t.cursor,t.out_grouping_b(f,73,232)){e=t.limit-t.cursor;if(0!=t.find_among_b(_)&&t.out_grouping_b(l,97,232)&&(t.cursor=t.limit-e,t.ket=t.cursor,!(t.cursor<=t.limit_backward||(t.cursor--,t.bra=t.cursor,t.slice_del()))))return}t.cursor=t.limit-i})(),t.cursor=t.limit_backward,t.cursor);return(()=>{for(var r;;){var i=t.cursor;r:{switch(t.bra=t.cursor,r=t.find_among(s),t.ket=t.cursor,r){case 1:if(t.slice_from("y"))break;return;case 2:if(t.slice_from("i"))break;return;case 3:if(t.cursor>=t.limit)break r;t.cursor++}continue}t.cursor=i;break}})(),t.cursor=e,!0},this.stemWord=function(r){return t.setCurrent(r),this.stem(),t.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/english-stemmer.js b/sphinx/search/minified-js/english-stemmer.js
new file mode 100644
index 00000000000..e005f991e6a
--- /dev/null
+++ b/sphinx/search/minified-js/english-stemmer.js
@@ -0,0 +1 @@
+var EnglishStemmer=function(){var a=new BaseStemmer,c=[["arsen",-1,-1],["commun",-1,-1],["emerg",-1,-1],["gener",-1,-1],["later",-1,-1],["organ",-1,-1],["past",-1,-1],["univers",-1,-1]],o=[["'",-1,1],["'s'",0,1],["'s",-1,1]],u=[["ied",-1,2],["s",-1,3],["ies",1,2],["sses",1,1],["ss",1,-1],["us",1,-1]],t=[["succ",-1,1],["proc",-1,1],["exc",-1,1]],l=[["even",-1,2],["cann",-1,2],["inn",-1,2],["earr",-1,2],["herr",-1,2],["out",-1,2],["y",-1,1]],n=[["",-1,-1],["ed",0,2],["eed",1,1],["ing",0,3],["edly",0,2],["eedly",4,1],["ingly",0,2]],f=[["",-1,3],["bb",0,2],["dd",0,2],["ff",0,2],["gg",0,2],["bl",0,1],["mm",0,2],["nn",0,2],["pp",0,2],["rr",0,2],["at",0,1],["tt",0,2],["iz",0,1]],_=[["anci",-1,3],["enci",-1,2],["ogi",-1,14],["li",-1,16],["bli",3,12],["abli",4,4],["alli",3,8],["fulli",3,9],["lessli",3,15],["ousli",3,10],["entli",3,5],["aliti",-1,8],["biliti",-1,12],["iviti",-1,11],["tional",-1,1],["ational",14,7],["alism",-1,8],["ation",-1,7],["ization",17,6],["izer",-1,6],["ator",-1,7],["iveness",-1,11],["fulness",-1,9],["ousness",-1,10],["ogist",-1,13]],m=[["icate",-1,4],["ative",-1,6],["alize",-1,3],["iciti",-1,4],["ical",-1,4],["tional",-1,1],["ational",5,2],["ful",-1,5],["ness",-1,5]],b=[["ic",-1,1],["ance",-1,1],["ence",-1,1],["able",-1,1],["ible",-1,1],["ate",-1,1],["ive",-1,1],["ize",-1,1],["iti",-1,1],["al",-1,1],["ism",-1,1],["ion",-1,2],["er",-1,1],["ous",-1,1],["ant",-1,1],["ent",-1,1],["ment",15,1],["ement",16,1]],k=[["e",-1,1],["l",-1,2]],g=[["andes",-1,-1],["atlas",-1,-1],["bias",-1,-1],["cosmos",-1,-1],["early",-1,5],["gently",-1,3],["howe",-1,-1],["idly",-1,2],["news",-1,-1],["only",-1,6],["singly",-1,7],["skies",-1,1],["sky",-1,-1],["ugly",-1,4]],d=[17,64],v=[17,65,16,1],i=[1,17,65,208,1],w=[55,141,2],p=!1,y=0,h=0;function q(){var r=a.limit-a.cursor;return!!(a.out_grouping_b(i,89,121)&&a.in_grouping_b(v,97,121)&&a.out_grouping_b(v,97,121)||(a.cursor=a.limit-r,a.out_grouping_b(v,97,121)&&a.in_grouping_b(v,97,121)&&!(a.cursor>a.limit_backward))||(a.cursor=a.limit-r,a.eq_s_b("past")))}function z(){return h<=a.cursor}function Y(){return y<=a.cursor}this.stem=function(){var r=a.cursor;if(!(()=>{var r;if(a.bra=a.cursor,0!=(r=a.find_among(g))&&(a.ket=a.cursor,!(a.cursora.limit)a.cursor=i;else{a.cursor=e,a.cursor=r,(()=>{p=!1;var r=a.cursor;if(a.bra=a.cursor,!a.eq_s("'")||(a.ket=a.cursor,a.slice_del())){a.cursor=r;r=a.cursor;if(a.bra=a.cursor,a.eq_s("y")){if(a.ket=a.cursor,!a.slice_from("Y"))return;p=!0}a.cursor=r;for(r=a.cursor;;){var i=a.cursor;r:{for(;;){var e=a.cursor;if(a.in_grouping(v,97,121)&&(a.bra=a.cursor,a.eq_s("y"))){a.ket=a.cursor,a.cursor=e;break}if(a.cursor=e,a.cursor>=a.limit)break r;a.cursor++}if(!a.slice_from("Y"))return;p=!0;continue}a.cursor=i;break}a.cursor=r}})(),h=a.limit,y=a.limit;i=a.cursor;r:{var s=a.cursor;if(0==a.find_among(c)){if(a.cursor=s,!a.go_out_grouping(v,97,121))break r;if(a.cursor++,!a.go_in_grouping(v,97,121))break r;a.cursor++}h=a.cursor,a.go_out_grouping(v,97,121)&&(a.cursor++,a.go_in_grouping(v,97,121))&&(a.cursor++,y=a.cursor)}a.cursor=i,a.limit_backward=a.cursor,a.cursor=a.limit;var e=a.limit-a.cursor,r=((()=>{var r=a.limit-a.cursor;if(a.ket=a.cursor,0==a.find_among_b(o))a.cursor=a.limit-r;else if(a.bra=a.cursor,!a.slice_del())return;if(a.ket=a.cursor,0!=(r=a.find_among_b(u)))switch(a.bra=a.cursor,r){case 1:if(a.slice_from("ss"))break;return;case 2:r:{var i=a.limit-a.cursor,e=a.cursor-2;if(!(e{a.ket=a.cursor,o=a.find_among_b(n),a.bra=a.cursor;r:{var r=a.limit-a.cursor;i:{switch(o){case 1:var i=a.limit-a.cursor;e:{var e=a.limit-a.cursor;if(0==a.find_among_b(t)||a.cursor>a.limit_backward){if(a.cursor=a.limit-e,!z())break e;if(!a.slice_from("ee"))return}}a.cursor=a.limit-i;break;case 2:break i;case 3:if(0==(o=a.find_among_b(l)))break i;switch(o){case 1:var s=a.limit-a.cursor;if(!a.out_grouping_b(v,97,121))break i;if(a.cursor>a.limit_backward)break i;if(a.cursor=a.limit-s,a.bra=a.cursor,a.slice_from("ie"))break;return;case 2:if(a.cursor>a.limit_backward)break i}}break r}a.cursor=a.limit-r;var c=a.limit-a.cursor;if(!a.go_out_grouping_b(v,97,121))return;if(a.cursor--,a.cursor=a.limit-c,!a.slice_del())return;a.ket=a.cursor,a.bra=a.cursor;var o,c=a.limit-a.cursor;switch(o=a.find_among_b(f)){case 1:return a.slice_from("e");case 2:var u=a.limit-a.cursor;if(a.in_grouping_b(d,97,111)&&!(a.cursor>a.limit_backward))return;a.cursor=a.limit-u;break;case 3:return a.cursor!=h||(u=a.limit-a.cursor,q()&&(a.cursor=a.limit-u,a.slice_from("e")))}if(a.cursor=a.limit-c,a.ket=a.cursor,a.cursor<=a.limit_backward)return;if(a.cursor--,a.bra=a.cursor,!a.slice_del())return}})(),a.cursor=a.limit-r,a.limit-a.cursor),r=(a.ket=a.cursor,e=a.limit-a.cursor,(a.eq_s_b("y")||(a.cursor=a.limit-e,a.eq_s_b("Y")))&&(a.bra=a.cursor,a.out_grouping_b(v,97,121))&&a.cursor>a.limit_backward&&a.slice_from("i"),a.cursor=a.limit-i,a.limit-a.cursor),e=((()=>{var r;if(a.ket=a.cursor,0!=(r=a.find_among_b(_))&&(a.bra=a.cursor,z()))switch(r){case 1:if(a.slice_from("tion"))break;return;case 2:if(a.slice_from("ence"))break;return;case 3:if(a.slice_from("ance"))break;return;case 4:if(a.slice_from("able"))break;return;case 5:if(a.slice_from("ent"))break;return;case 6:if(a.slice_from("ize"))break;return;case 7:if(a.slice_from("ate"))break;return;case 8:if(a.slice_from("al"))break;return;case 9:if(a.slice_from("ful"))break;return;case 10:if(a.slice_from("ous"))break;return;case 11:if(a.slice_from("ive"))break;return;case 12:if(a.slice_from("ble"))break;return;case 13:if(a.slice_from("og"))break;return;case 14:if(!a.eq_s_b("l"))return;if(a.slice_from("og"))break;return;case 15:if(a.slice_from("less"))break;return;case 16:if(!a.in_grouping_b(w,99,116))return;if(a.slice_del())break}})(),a.cursor=a.limit-r,a.limit-a.cursor),i=((()=>{var r;if(a.ket=a.cursor,0!=(r=a.find_among_b(m))&&(a.bra=a.cursor,z()))switch(r){case 1:if(a.slice_from("tion"))break;return;case 2:if(a.slice_from("ate"))break;return;case 3:if(a.slice_from("al"))break;return;case 4:if(a.slice_from("ic"))break;return;case 5:if(a.slice_del())break;return;case 6:if(!Y())return;if(a.slice_del())break}})(),a.cursor=a.limit-e,a.limit-a.cursor),r=((()=>{var r;if(a.ket=a.cursor,0!=(r=a.find_among_b(b))&&(a.bra=a.cursor,Y()))switch(r){case 1:if(a.slice_del())break;return;case 2:var i=a.limit-a.cursor;if(!a.eq_s_b("s")&&(a.cursor=a.limit-i,!a.eq_s_b("t")))return;if(a.slice_del())break}})(),a.cursor=a.limit-i,a.limit-a.cursor),e=((()=>{var r;if(a.ket=a.cursor,0!=(r=a.find_among_b(k)))switch(a.bra=a.cursor,r){case 1:if(!Y()){if(!z())return;var i=a.limit-a.cursor;if(q())return;a.cursor=a.limit-i}if(a.slice_del())break;return;case 2:if(!Y())return;if(!a.eq_s_b("l"))return;if(a.slice_del())break}})(),a.cursor=a.limit-r,a.cursor=a.limit_backward,a.cursor);(()=>{if(p)for(;;){var r=a.cursor;r:{for(;;){var i=a.cursor;if(a.bra=a.cursor,a.eq_s("Y")){a.ket=a.cursor,a.cursor=i;break}if(a.cursor=i,a.cursor>=a.limit)break r;a.cursor++}if(a.slice_from("y"))continue;return}a.cursor=r;break}})(),a.cursor=e}}return!0},this.stemWord=function(r){return a.setCurrent(r),this.stem(),a.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/esperanto-stemmer.js b/sphinx/search/minified-js/esperanto-stemmer.js
new file mode 100644
index 00000000000..44353e6a150
--- /dev/null
+++ b/sphinx/search/minified-js/esperanto-stemmer.js
@@ -0,0 +1 @@
+var EsperantoStemmer=function(){var o=new BaseStemmer,s=[["",-1,14],["-",0,13],["cx",0,1],["gx",0,2],["hx",0,3],["jx",0,4],["q",0,12],["sx",0,5],["ux",0,6],["w",0,12],["x",0,12],["y",0,12],["á",0,7],["é",0,8],["í",0,9],["ó",0,10],["ú",0,11]],i=[["as",-1,-1],["i",-1,-1],["is",1,-1],["os",-1,-1],["u",-1,-1],["us",4,-1]],u=[["ci",-1,-1],["gi",-1,-1],["hi",-1,-1],["li",-1,-1],["ili",3,-1],["ŝli",3,-1],["mi",-1,-1],["ni",-1,-1],["oni",7,-1],["ri",-1,-1],["si",-1,-1],["vi",-1,-1],["ivi",11,-1],["ĝi",-1,-1],["ŝi",-1,-1],["iŝi",14,-1],["malŝi",14,-1]],e=[["amb",-1,-1],["bald",-1,-1],["malbald",1,-1],["morg",-1,-1],["postmorg",3,-1],["adi",-1,-1],["hodi",-1,-1],["ank",-1,-1],["ĉirk",-1,-1],["tutĉirk",8,-1],["presk",-1,-1],["almen",-1,-1],["apen",-1,-1],["hier",-1,-1],["antaŭhier",13,-1],["malgr",-1,-1],["ankor",-1,-1],["kontr",-1,-1],["anstat",-1,-1],["kvaz",-1,-1]],c=[["aliu",-1,-1],["unu",-1,-1]],a=[["aha",-1,-1],["haha",0,-1],["haleluja",-1,-1],["hola",-1,-1],["hosana",-1,-1],["maltra",-1,-1],["hura",-1,-1],["ĥaĥa",-1,-1],["ekde",-1,-1],["elde",-1,-1],["disde",-1,-1],["ehe",-1,-1],["maltre",-1,-1],["dirlididi",-1,-1],["malpli",-1,-1],["malĉi",-1,-1],["malkaj",-1,-1],["amen",-1,-1],["tamen",17,-1],["oho",-1,-1],["maltro",-1,-1],["minus",-1,-1],["uhu",-1,-1],["muu",-1,-1]],t=[["tri",-1,-1],["du",-1,-1],["unu",-1,-1]],m=[["dek",-1,-1],["cent",-1,-1]],l=[["k",-1,-1],["kelk",0,-1],["nen",-1,-1],["t",-1,-1],["mult",3,-1],["samt",3,-1],["ĉ",-1,-1]],n=[["a",-1,-1],["e",-1,-1],["i",-1,-1],["j",-1,-1,r],["aj",3,-1],["oj",3,-1],["n",-1,-1,r],["an",6,-1],["en",6,-1],["jn",6,-1,r],["ajn",9,-1],["ojn",9,-1],["on",6,-1],["o",-1,-1],["as",-1,-1],["is",-1,-1],["os",-1,-1],["us",-1,-1],["u",-1,-1]],_=[17,65,16],b=[1,64,16],f=[255,3],k=!1;function r(){var r=o.limit-o.cursor;return!(!o.eq_s_b("-")&&(o.cursor=o.limit-r,!o.in_grouping_b(f,48,57)))}this.stem=function(){var r=o.cursor;if(!(()=>{var r;for(k=!1;;){var i=o.cursor;r:{switch(o.bra=o.cursor,r=o.find_among(s),o.ket=o.cursor,r){case 1:if(o.slice_from("ĉ"))break;return;case 2:if(o.slice_from("ĝ"))break;return;case 3:if(o.slice_from("ĥ"))break;return;case 4:if(o.slice_from("ĵ"))break;return;case 5:if(o.slice_from("ŝ"))break;return;case 6:if(o.slice_from("ŭ"))break;return;case 7:if(!o.slice_from("a"))return;k=!0;break;case 8:if(!o.slice_from("e"))return;k=!0;break;case 9:if(!o.slice_from("i"))return;k=!0;break;case 10:if(!o.slice_from("o"))return;k=!0;break;case 11:if(!o.slice_from("u"))return;k=!0;break;case 12:k=!0;break;case 13:k=!1;break;case 14:if(o.cursor>=o.limit)break r;o.cursor++}continue}o.cursor=i;break}return!k})())return!1;o.cursor=r;r=o.cursor,o.bra=o.cursor,o.eq_s("'")&&(o.ket=o.cursor,!o.eq_s("st")||0==o.find_among(i)||o.cursor{o.ket=o.cursor;var r=o.limit-o.cursor;return o.eq_s_b("n")||(o.cursor=o.limit-r),o.bra=o.cursor,0==o.find_among_b(u)||(r=o.limit-o.cursor,o.cursor>o.limit_backward&&(o.cursor=o.limit-r,!o.eq_s_b("-")))?void 0:!!o.slice_del()})())return!1;o.cursor=o.limit-r;r=o.limit-o.cursor,(()=>{if(o.ket=o.cursor,o.eq_s_b("'")){o.bra=o.cursor;r:{var r=o.limit-o.cursor;if(o.eq_s_b("l")&&!(o.cursor>o.limit_backward)){if(o.slice_from("a"))break r;return}if(o.cursor=o.limit-r,o.eq_s_b("un")&&!(o.cursor>o.limit_backward)){if(o.slice_from("u"))break r;return}if(o.cursor=o.limit-r,0!=o.find_among_b(e)){var i=o.limit-o.cursor;if(!(o.cursor>o.limit_backward)||(o.cursor=o.limit-i,o.eq_s_b("-"))){if(o.slice_from("aŭ"))break r;return}}if(o.cursor=o.limit-r,!o.slice_from("o"))return}}})(),o.cursor=o.limit-r,r=o.limit-o.cursor;if((()=>{o.ket=o.cursor,o.bra=o.cursor;var r=o.limit-o.cursor,i=o.limit-o.cursor,s=o.limit-o.cursor;if(o.eq_s_b("n")||(o.cursor=o.limit-s),o.bra=o.cursor,!o.eq_s_b("e")){o.cursor=o.limit-i;var s=o.limit-o.cursor,i=(o.eq_s_b("n")||(o.cursor=o.limit-s),o.limit-o.cursor);if(o.eq_s_b("j")||(o.cursor=o.limit-i),o.bra=o.cursor,!o.in_grouping_b(b,97,117))return}return!o.eq_s_b("i")||(s=o.limit-o.cursor,0==o.find_among_b(l)&&(o.cursor=o.limit-s),i=o.limit-o.cursor,o.cursor>o.limit_backward&&(o.cursor=o.limit-i,!o.eq_s_b("-")))?void 0:(o.cursor=o.limit-r,!!o.slice_del())})())return!1;o.cursor=o.limit-r;r=o.limit-o.cursor;if((()=>{var r;return 0==o.find_among_b(a)||(r=o.limit-o.cursor,o.cursor>o.limit_backward&&(o.cursor=o.limit-r,!o.eq_s_b("-")))?void 0:1})())return!1;o.cursor=o.limit-r;r=o.limit-o.cursor;if(0!=o.find_among_b(t)&&0!=o.find_among_b(m))return!1;o.cursor=o.limit-r;r=o.limit-o.cursor;if((()=>{o.ket=o.cursor;var r=o.limit-o.cursor,r=(o.eq_s_b("n")||(o.cursor=o.limit-r),o.limit-o.cursor);return o.eq_s_b("j")||(o.cursor=o.limit-r),o.bra=o.cursor,0==o.find_among_b(c)||(r=o.limit-o.cursor,o.cursor>o.limit_backward&&(o.cursor=o.limit-r,!o.eq_s_b("-")))?void 0:!!o.slice_del()})())return!1;o.cursor=o.limit-r;r=o.limit-o.cursor;return!!(()=>{r:{var r=o.limit-o.cursor;i:{for(var i=2;0{var r;return o.ket=o.cursor,0!=o.find_among_b(n)&&(r=o.limit-o.cursor,o.eq_s_b("-")||(o.cursor=o.limit-r),o.bra=o.cursor,o.slice_del())?1:void 0})())&&(o.cursor=o.limit_backward,!0)},this.stemWord=function(r){return o.setCurrent(r),this.stem(),o.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/estonian-stemmer.js b/sphinx/search/minified-js/estonian-stemmer.js
new file mode 100644
index 00000000000..d27e90fcd3b
--- /dev/null
+++ b/sphinx/search/minified-js/estonian-stemmer.js
@@ -0,0 +1 @@
+var EstonianStemmer=function(){var t=new BaseStemmer,a=[["gi",-1,1],["ki",-1,2]],r=[["da",-1,3],["mata",-1,1],["b",-1,3],["ksid",-1,1],["nuksid",3,1],["me",-1,3],["sime",5,1],["ksime",6,1],["nuksime",7,1],["akse",-1,2],["dakse",9,1],["takse",9,1],["site",-1,1],["ksite",12,1],["nuksite",13,1],["n",-1,3],["sin",15,1],["ksin",16,1],["nuksin",17,1],["daks",-1,1],["taks",-1,1]],i=[["aa",-1,-1],["ee",-1,-1],["ii",-1,-1],["oo",-1,-1],["uu",-1,-1],["ää",-1,-1],["õõ",-1,-1],["öö",-1,-1],["üü",-1,-1]],s=[["i",-1,1]],o=[["lane",-1,1],["line",-1,3],["mine",-1,2],["lasse",-1,1],["lisse",-1,3],["misse",-1,2],["lasi",-1,1],["lisi",-1,3],["misi",-1,2],["last",-1,1],["list",-1,3],["mist",-1,2]],k=[["ga",-1,1],["ta",-1,1],["le",-1,1],["sse",-1,1],["l",-1,1],["s",-1,1],["ks",5,1],["t",-1,2],["lt",7,1],["st",7,1]],m=[["",-1,2],["las",0,1],["lis",0,1],["mis",0,1],["t",0,-1]],l=[["d",-1,4],["sid",0,2],["de",-1,4],["ikkude",2,1],["ike",-1,1],["ikke",-1,1],["te",-1,3]],c=[["va",-1,-1],["du",-1,-1],["nu",-1,-1],["tu",-1,-1]],n=[["kk",-1,1],["pp",-1,2],["tt",-1,3]],u=[["ma",-1,2],["mai",-1,1],["m",-1,1]],d=[["joob",-1,1],["jood",-1,1],["joodakse",1,1],["jooma",-1,1],["joomata",3,1],["joome",-1,1],["joon",-1,1],["joote",-1,1],["joovad",-1,1],["juua",-1,1],["juuakse",9,1],["jäi",-1,12],["jäid",11,12],["jäime",11,12],["jäin",11,12],["jäite",11,12],["jääb",-1,12],["jääd",-1,12],["jääda",17,12],["jäädakse",18,12],["jäädi",17,12],["jääks",-1,12],["jääksid",21,12],["jääksime",21,12],["jääksin",21,12],["jääksite",21,12],["jääma",-1,12],["jäämata",26,12],["jääme",-1,12],["jään",-1,12],["jääte",-1,12],["jäävad",-1,12],["jõi",-1,1],["jõid",32,1],["jõime",32,1],["jõin",32,1],["jõite",32,1],["keeb",-1,4],["keed",-1,4],["keedakse",38,4],["keeks",-1,4],["keeksid",40,4],["keeksime",40,4],["keeksin",40,4],["keeksite",40,4],["keema",-1,4],["keemata",45,4],["keeme",-1,4],["keen",-1,4],["kees",-1,4],["keeta",-1,4],["keete",-1,4],["keevad",-1,4],["käia",-1,8],["käiakse",53,8],["käib",-1,8],["käid",-1,8],["käidi",56,8],["käiks",-1,8],["käiksid",58,8],["käiksime",58,8],["käiksin",58,8],["käiksite",58,8],["käima",-1,8],["käimata",63,8],["käime",-1,8],["käin",-1,8],["käis",-1,8],["käite",-1,8],["käivad",-1,8],["laob",-1,16],["laod",-1,16],["laoks",-1,16],["laoksid",72,16],["laoksime",72,16],["laoksin",72,16],["laoksite",72,16],["laome",-1,16],["laon",-1,16],["laote",-1,16],["laovad",-1,16],["loeb",-1,14],["loed",-1,14],["loeks",-1,14],["loeksid",83,14],["loeksime",83,14],["loeksin",83,14],["loeksite",83,14],["loeme",-1,14],["loen",-1,14],["loete",-1,14],["loevad",-1,14],["loob",-1,7],["lood",-1,7],["loodi",93,7],["looks",-1,7],["looksid",95,7],["looksime",95,7],["looksin",95,7],["looksite",95,7],["looma",-1,7],["loomata",100,7],["loome",-1,7],["loon",-1,7],["loote",-1,7],["loovad",-1,7],["luua",-1,7],["luuakse",106,7],["lõi",-1,6],["lõid",108,6],["lõime",108,6],["lõin",108,6],["lõite",108,6],["lööb",-1,5],["lööd",-1,5],["löödakse",114,5],["löödi",114,5],["lööks",-1,5],["lööksid",117,5],["lööksime",117,5],["lööksin",117,5],["lööksite",117,5],["lööma",-1,5],["löömata",122,5],["lööme",-1,5],["löön",-1,5],["lööte",-1,5],["löövad",-1,5],["lüüa",-1,5],["lüüakse",128,5],["müüa",-1,13],["müüakse",130,13],["müüb",-1,13],["müüd",-1,13],["müüdi",133,13],["müüks",-1,13],["müüksid",135,13],["müüksime",135,13],["müüksin",135,13],["müüksite",135,13],["müüma",-1,13],["müümata",140,13],["müüme",-1,13],["müün",-1,13],["müüs",-1,13],["müüte",-1,13],["müüvad",-1,13],["näeb",-1,18],["näed",-1,18],["näeks",-1,18],["näeksid",149,18],["näeksime",149,18],["näeksin",149,18],["näeksite",149,18],["näeme",-1,18],["näen",-1,18],["näete",-1,18],["näevad",-1,18],["nägema",-1,18],["nägemata",158,18],["näha",-1,18],["nähakse",160,18],["nähti",-1,18],["põeb",-1,15],["põed",-1,15],["põeks",-1,15],["põeksid",165,15],["põeksime",165,15],["põeksin",165,15],["põeksite",165,15],["põeme",-1,15],["põen",-1,15],["põete",-1,15],["põevad",-1,15],["saab",-1,2],["saad",-1,2],["saada",175,2],["saadakse",176,2],["saadi",175,2],["saaks",-1,2],["saaksid",179,2],["saaksime",179,2],["saaksin",179,2],["saaksite",179,2],["saama",-1,2],["saamata",184,2],["saame",-1,2],["saan",-1,2],["saate",-1,2],["saavad",-1,2],["sai",-1,2],["said",190,2],["saime",190,2],["sain",190,2],["saite",190,2],["sõi",-1,9],["sõid",195,9],["sõime",195,9],["sõin",195,9],["sõite",195,9],["sööb",-1,9],["sööd",-1,9],["söödakse",201,9],["söödi",201,9],["sööks",-1,9],["sööksid",204,9],["sööksime",204,9],["sööksin",204,9],["sööksite",204,9],["sööma",-1,9],["söömata",209,9],["sööme",-1,9],["söön",-1,9],["sööte",-1,9],["söövad",-1,9],["süüa",-1,9],["süüakse",215,9],["teeb",-1,17],["teed",-1,17],["teeks",-1,17],["teeksid",219,17],["teeksime",219,17],["teeksin",219,17],["teeksite",219,17],["teeme",-1,17],["teen",-1,17],["teete",-1,17],["teevad",-1,17],["tegema",-1,17],["tegemata",228,17],["teha",-1,17],["tehakse",230,17],["tehti",-1,17],["toob",-1,10],["tood",-1,10],["toodi",234,10],["tooks",-1,10],["tooksid",236,10],["tooksime",236,10],["tooksin",236,10],["tooksite",236,10],["tooma",-1,10],["toomata",241,10],["toome",-1,10],["toon",-1,10],["toote",-1,10],["toovad",-1,10],["tuua",-1,10],["tuuakse",247,10],["tõi",-1,10],["tõid",249,10],["tõime",249,10],["tõin",249,10],["tõite",249,10],["viia",-1,3],["viiakse",254,3],["viib",-1,3],["viid",-1,3],["viidi",257,3],["viiks",-1,3],["viiksid",259,3],["viiksime",259,3],["viiksin",259,3],["viiksite",259,3],["viima",-1,3],["viimata",264,3],["viime",-1,3],["viin",-1,3],["viisime",-1,3],["viisin",-1,3],["viisite",-1,3],["viite",-1,3],["viivad",-1,3],["võib",-1,11],["võid",-1,11],["võida",274,11],["võidakse",275,11],["võidi",274,11],["võiks",-1,11],["võiksid",278,11],["võiksime",278,11],["võiksin",278,11],["võiksite",278,11],["võima",-1,11],["võimata",283,11],["võime",-1,11],["võin",-1,11],["võis",-1,11],["võite",-1,11],["võivad",-1,11]],b=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,8,0,48,8],_=[17,65,16],f=[117,66,6,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,128,0,0,0,16],v=[21,123,243,0,0,0,0,0,0,0,0,0,0,0,0,0,8,0,48,8],w=0;function g(){return 0!=t.find_among_b(i)}function j(){var i=t.limit-t.cursor,i=((()=>{var i;if(!(t.cursor{var i;if(!(t.cursor{var i;if(!(t.cursor{var i;if(!(t.cursor{var i;if(t.bra=t.cursor,0!=(i=t.find_among(d))&&(t.ket=t.cursor,!(t.cursor{var i;if(!(t.cursor{var i;if(!(t.cursor{var i;if(t.in_grouping_b(b,97,252)&&!(w>t.cursor)&&(t.ket=t.cursor,0!=(i=t.find_among_b(n))))switch(t.bra=t.cursor,i){case 1:if(t.slice_from("k"))break;return;case 2:if(t.slice_from("p"))break;return;case 3:if(t.slice_from("t"))break}})(),t.cursor=t.limit-e,t.cursor=t.limit_backward,!0},this.stemWord=function(i){return t.setCurrent(i),this.stem(),t.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/finnish-stemmer.js b/sphinx/search/minified-js/finnish-stemmer.js
index f6301fdfa56..160fb267a05 100644
--- a/sphinx/search/minified-js/finnish-stemmer.js
+++ b/sphinx/search/minified-js/finnish-stemmer.js
@@ -1 +1 @@
-FinnishStemmer=function(){var r=new BaseStemmer;var i=[["pa",-1,1],["sti",-1,2],["kaan",-1,1],["han",-1,1],["kin",-1,1],["hän",-1,1],["kään",-1,1],["ko",-1,1],["pä",-1,1],["kö",-1,1]];var e=[["lla",-1,-1],["na",-1,-1],["ssa",-1,-1],["ta",-1,-1],["lta",3,-1],["sta",3,-1]];var a=[["llä",-1,-1],["nä",-1,-1],["ssä",-1,-1],["tä",-1,-1],["ltä",3,-1],["stä",3,-1]];var s=[["lle",-1,-1],["ine",-1,-1]];var t=[["nsa",-1,3],["mme",-1,3],["nne",-1,3],["ni",-1,2],["si",-1,1],["an",-1,4],["en",-1,6],["än",-1,5],["nsä",-1,3]];var u=[["aa",-1,-1],["ee",-1,-1],["ii",-1,-1],["oo",-1,-1],["uu",-1,-1],["ää",-1,-1],["öö",-1,-1]];var l=[["a",-1,8],["lla",0,-1],["na",0,-1],["ssa",0,-1],["ta",0,-1],["lta",4,-1],["sta",4,-1],["tta",4,2],["lle",-1,-1],["ine",-1,-1],["ksi",-1,-1],["n",-1,7],["han",11,1],["den",11,-1,S],["seen",11,-1,C],["hen",11,2],["tten",11,-1,S],["hin",11,3],["siin",11,-1,S],["hon",11,4],["hän",11,5],["hön",11,6],["ä",-1,8],["llä",22,-1],["nä",22,-1],["ssä",22,-1],["tä",22,-1],["ltä",26,-1],["stä",26,-1],["ttä",26,2]];var c=[["eja",-1,-1],["mma",-1,1],["imma",1,-1],["mpa",-1,1],["impa",3,-1],["mmi",-1,1],["immi",5,-1],["mpi",-1,1],["impi",7,-1],["ejä",-1,-1],["mmä",-1,1],["immä",10,-1],["mpä",-1,1],["impä",12,-1]];var n=[["i",-1,-1],["j",-1,-1]];var f=[["mma",-1,1],["imma",0,-1]];var o=[17,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8];var b=[119,223,119,1];var _=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,8,0,32];var m=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,8,0,32];var k=[17,97,24,1,0,0,0,0,0,0,0,0,0,0,0,0,8,0,32];var d=false;var v="";var w=0;var g=0;function p(){g=r.limit;w=r.limit;r:while(true){var i=r.cursor;i:{if(!r.in_grouping(_,97,246)){break i}r.cursor=i;break r}r.cursor=i;if(r.cursor>=r.limit){return false}r.cursor++}r:while(true){i:{if(!r.out_grouping(_,97,246)){break i}break r}if(r.cursor>=r.limit){return false}r.cursor++}g=r.cursor;r:while(true){var e=r.cursor;i:{if(!r.in_grouping(_,97,246)){break i}r.cursor=e;break r}r.cursor=e;if(r.cursor>=r.limit){return false}r.cursor++}r:while(true){i:{if(!r.out_grouping(_,97,246)){break i}break r}if(r.cursor>=r.limit){return false}r.cursor++}w=r.cursor;return true}function h(){if(!(w<=r.cursor)){return false}return true}function q(){var e;if(r.cursor{var r;if(!(c.cursor{var r;if(!(c.cursor
{var r;if(!(c.cursor{var r;if(!(c.cursor{if(!(c.cursor
=r.limit){break r}r.cursor++}continue}r.cursor=e;break}return true}function v(){m=r.limit;k=r.limit;b=r.limit;var i=r.cursor;r:{e:{var s=r.cursor;i:{if(!r.in_grouping(o,97,251)){break i}if(!r.in_grouping(o,97,251)){break i}if(r.cursor>=r.limit){break i}r.cursor++;break e}r.cursor=s;i:{if(r.find_among(e)==0){break i}break e}r.cursor=s;if(r.cursor>=r.limit){break r}r.cursor++;i:while(true){s:{if(!r.in_grouping(o,97,251)){break s}break i}if(r.cursor>=r.limit){break r}r.cursor++}}m=r.cursor}r.cursor=i;var a=r.cursor;r:{e:while(true){i:{if(!r.in_grouping(o,97,251)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}e:while(true){i:{if(!r.out_grouping(o,97,251)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}k=r.cursor;e:while(true){i:{if(!r.in_grouping(o,97,251)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}e:while(true){i:{if(!r.out_grouping(o,97,251)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}b=r.cursor}r.cursor=a;return true}function d(){var e;while(true){var s=r.cursor;r:{r.bra=r.cursor;e=r.find_among(i);if(e==0){break r}r.ket=r.cursor;switch(e){case 1:if(!r.slice_from("i")){return false}break;case 2:if(!r.slice_from("u")){return false}break;case 3:if(!r.slice_from("y")){return false}break;case 4:if(!r.slice_from("ë")){return false}break;case 5:if(!r.slice_from("ï")){return false}break;case 6:if(!r.slice_del()){return false}break;case 7:if(r.cursor>=r.limit){break r}r.cursor++;break}continue}r.cursor=s;break}return true}function g(){if(!(m<=r.cursor)){return false}return true}function w(){if(!(k<=r.cursor)){return false}return true}function q(){if(!(b<=r.cursor)){return false}return true}function h(){var e;r.ket=r.cursor;e=r.find_among_b(u);if(e==0){return false}r.bra=r.cursor;switch(e){case 1:if(!q()){return false}if(!r.slice_del()){return false}break;case 2:if(!q()){return false}if(!r.slice_del()){return false}var i=r.limit-r.cursor;r:{r.ket=r.cursor;if(!r.eq_s_b("ic")){r.cursor=r.limit-i;break r}r.bra=r.cursor;e:{var t=r.limit-r.cursor;i:{if(!q()){break i}if(!r.slice_del()){return false}break e}r.cursor=r.limit-t;if(!r.slice_from("iqU")){return false}}}break;case 3:if(!q()){return false}if(!r.slice_from("log")){return false}break;case 4:if(!q()){return false}if(!r.slice_from("u")){return false}break;case 5:if(!q()){return false}if(!r.slice_from("ent")){return false}break;case 6:if(!g()){return false}if(!r.slice_del()){return false}var c=r.limit-r.cursor;r:{r.ket=r.cursor;e=r.find_among_b(s);if(e==0){r.cursor=r.limit-c;break r}r.bra=r.cursor;switch(e){case 1:if(!q()){r.cursor=r.limit-c;break r}if(!r.slice_del()){return false}r.ket=r.cursor;if(!r.eq_s_b("at")){r.cursor=r.limit-c;break r}r.bra=r.cursor;if(!q()){r.cursor=r.limit-c;break r}if(!r.slice_del()){return false}break;case 2:e:{var f=r.limit-r.cursor;i:{if(!q()){break i}if(!r.slice_del()){return false}break e}r.cursor=r.limit-f;if(!w()){r.cursor=r.limit-c;break r}if(!r.slice_from("eux")){return false}}break;case 3:if(!q()){r.cursor=r.limit-c;break r}if(!r.slice_del()){return false}break;case 4:if(!g()){r.cursor=r.limit-c;break r}if(!r.slice_from("i")){return false}break}}break;case 7:if(!q()){return false}if(!r.slice_del()){return false}var l=r.limit-r.cursor;r:{r.ket=r.cursor;e=r.find_among_b(a);if(e==0){r.cursor=r.limit-l;break r}r.bra=r.cursor;switch(e){case 1:e:{var n=r.limit-r.cursor;i:{if(!q()){break i}if(!r.slice_del()){return false}break e}r.cursor=r.limit-n;if(!r.slice_from("abl")){return false}}break;case 2:e:{var b=r.limit-r.cursor;i:{if(!q()){break i}if(!r.slice_del()){return false}break e}r.cursor=r.limit-b;if(!r.slice_from("iqU")){return false}}break;case 3:if(!q()){r.cursor=r.limit-l;break r}if(!r.slice_del()){return false}break}}break;case 8:if(!q()){return false}if(!r.slice_del()){return false}var k=r.limit-r.cursor;r:{r.ket=r.cursor;if(!r.eq_s_b("at")){r.cursor=r.limit-k;break r}r.bra=r.cursor;if(!q()){r.cursor=r.limit-k;break r}if(!r.slice_del()){return false}r.ket=r.cursor;if(!r.eq_s_b("ic")){r.cursor=r.limit-k;break r}r.bra=r.cursor;e:{var m=r.limit-r.cursor;i:{if(!q()){break i}if(!r.slice_del()){return false}break e}r.cursor=r.limit-m;if(!r.slice_from("iqU")){return false}}}break;case 9:if(!r.slice_from("eau")){return false}break;case 10:if(!w()){return false}if(!r.slice_from("al")){return false}break;case 11:r:{var _=r.limit-r.cursor;e:{if(!q()){break e}if(!r.slice_del()){return false}break r}r.cursor=r.limit-_;if(!w()){return false}if(!r.slice_from("eux")){return false}}break;case 12:if(!w()){return false}if(!r.out_grouping_b(o,97,251)){return false}if(!r.slice_del()){return false}break;case 13:if(!g()){return false}if(!r.slice_from("ant")){return false}return false;case 14:if(!g()){return false}if(!r.slice_from("ent")){return false}return false;case 15:var v=r.limit-r.cursor;if(!r.in_grouping_b(o,97,251)){return false}if(!g()){return false}r.cursor=r.limit-v;if(!r.slice_del()){return false}return false}return true}function p(){if(r.cursor0){return false}}r.ket=r.cursor;r:{var i=r.limit-r.cursor;e:{if(!r.eq_s_b("é")){break e}break r}r.cursor=r.limit-i;if(!r.eq_s_b("è")){return false}}r.bra=r.cursor;if(!r.slice_from("e")){return false}return true}this.stem=function(){var e=r.cursor;_();r.cursor=e;v();r.limit_backward=r.cursor;r.cursor=r.limit;var i=r.limit-r.cursor;r:{e:{var s=r.limit-r.cursor;i:{var a=r.limit-r.cursor;s:{var u=r.limit-r.cursor;a:{if(!h()){break a}break s}r.cursor=r.limit-u;a:{if(!p()){break a}break s}r.cursor=r.limit-u;if(!z()){break i}}r.cursor=r.limit-a;var t=r.limit-r.cursor;s:{r.ket=r.cursor;a:{var c=r.limit-r.cursor;u:{if(!r.eq_s_b("Y")){break u}r.bra=r.cursor;if(!r.slice_from("i")){return false}break a}r.cursor=r.limit-c;if(!r.eq_s_b("ç")){r.cursor=r.limit-t;break s}r.bra=r.cursor;if(!r.slice_from("c")){return false}}}break e}r.cursor=r.limit-s;if(!I()){break r}}}r.cursor=r.limit-i;var f=r.limit-r.cursor;U();r.cursor=r.limit-f;var l=r.limit-r.cursor;H();r.cursor=r.limit-l;r.cursor=r.limit_backward;var o=r.cursor;d();r.cursor=o;return true};this["stemWord"]=function(e){r.setCurrent(e);this.stem();return r.getCurrent()}};
\ No newline at end of file
+var FrenchStemmer=function(){var n=new BaseStemmer,f=[["col",-1,-1],["ni",-1,1],["par",-1,-1],["tap",-1,-1]],_=[["",-1,7],["H",0,6],["He",1,4],["Hi",1,5],["I",0,1],["U",0,2],["Y",0,3]],m=[["iqU",-1,3],["abl",-1,3],["Ièr",-1,4],["ièr",-1,4],["eus",-1,2],["iv",-1,1]],b=[["ic",-1,2],["abil",-1,1],["iv",-1,3]],k=[["iqUe",-1,1],["atrice",-1,2],["ance",-1,1],["ence",-1,5],["logie",-1,3],["able",-1,1],["isme",-1,1],["euse",-1,12],["iste",-1,1],["ive",-1,8],["if",-1,8],["usion",-1,4],["ation",-1,2],["ution",-1,4],["ateur",-1,2],["iqUes",-1,1],["atrices",-1,2],["ances",-1,1],["ences",-1,5],["logies",-1,3],["ables",-1,1],["ismes",-1,1],["euses",-1,12],["istes",-1,1],["ives",-1,8],["ifs",-1,8],["usions",-1,4],["ations",-1,2],["utions",-1,4],["ateurs",-1,2],["ments",-1,16],["ements",30,6],["issements",31,13],["ités",-1,7],["ment",-1,16],["ement",34,6],["issement",35,13],["amment",34,14],["emment",34,15],["aux",-1,10],["eaux",39,9],["eux",-1,1],["oux",-1,11],["ité",-1,7]],d=[["ira",-1,1],["ie",-1,1],["isse",-1,1],["issante",-1,1],["i",-1,1],["irai",4,1],["ir",-1,1],["iras",-1,1],["ies",-1,1],["îmes",-1,1],["isses",-1,1],["issantes",-1,1],["îtes",-1,1],["is",-1,1],["irais",13,1],["issais",13,1],["irions",-1,1],["issions",-1,1],["irons",-1,1],["issons",-1,1],["issants",-1,1],["it",-1,1],["irait",21,1],["issait",21,1],["issant",-1,1],["iraIent",-1,1],["issaIent",-1,1],["irent",-1,1],["issent",-1,1],["iront",-1,1],["ît",-1,1],["iriez",-1,1],["issiez",-1,1],["irez",-1,1],["issez",-1,1]],g=[["al",-1,1],["épl",-1,-1],["auv",-1,-1]],v=[["a",-1,3],["era",0,2],["aise",-1,4],["asse",-1,3],["ante",-1,3],["ée",-1,2],["ai",-1,3],["erai",6,2],["er",-1,2],["as",-1,3],["eras",9,2],["âmes",-1,3],["aises",-1,4],["asses",-1,3],["antes",-1,3],["âtes",-1,3],["ées",-1,2],["ais",-1,4],["eais",17,2],["erais",17,2],["ions",-1,1],["erions",20,2],["assions",20,3],["erons",-1,2],["ants",-1,3],["és",-1,2],["ait",-1,3],["erait",26,2],["ant",-1,3],["aIent",-1,3],["eraIent",29,2],["èrent",-1,2],["assent",-1,3],["eront",-1,2],["ât",-1,3],["ez",-1,2],["iez",35,2],["eriez",36,2],["assiez",36,3],["erez",35,2],["é",-1,2]],q=[["e",-1,3],["Ière",0,2],["ière",0,2],["ion",-1,1],["Ier",-1,2],["ier",-1,2]],w=[["ell",-1,-1],["eill",-1,-1],["enn",-1,-1],["onn",-1,-1],["ett",-1,-1]],p=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,128,130,103,8,5],h=[65,85],z=[131,14,3],I=[1,65,20,0,0,0,0,0,0,0,0,0,0,0,0,0,128],U=0,H=0,x=0;function Y(){return x<=n.cursor}function y(){return H<=n.cursor}function C(){return U<=n.cursor}this.stem=function(){var r=n.cursor,i=(n.bra=n.cursor,i=n.cursor,(n.in_grouping(z,99,116)||(n.cursor=i,n.eq_s("qu")))&&n.eq_s("'")&&(n.ket=n.cursor,n.cursor{for(;;){var r=n.cursor;r:{for(;;){var i=n.cursor;i:{e:{var e=n.cursor;s:if(n.in_grouping(p,97,251)){n.bra=n.cursor;c:{var s=n.cursor;if(n.eq_s("u")&&(n.ket=n.cursor,n.in_grouping(p,97,251))){if(n.slice_from("U"))break c;return}if(n.cursor=s,n.eq_s("i")&&(n.ket=n.cursor,n.in_grouping(p,97,251))){if(n.slice_from("I"))break c;return}if(n.cursor=s,!n.eq_s("y"))break s;if(n.ket=n.cursor,!n.slice_from("Y"))return}break e}if(n.cursor=e,n.bra=n.cursor,n.eq_s("ë")){if(n.ket=n.cursor,n.slice_from("He"))break e;return}if(n.cursor=e,n.bra=n.cursor,n.eq_s("ï")){if(n.ket=n.cursor,n.slice_from("Hi"))break e;return}if(n.cursor=e,n.bra=n.cursor,n.eq_s("y")&&(n.ket=n.cursor,n.in_grouping(p,97,251))){if(n.slice_from("Y"))break e;return}if(n.cursor=e,!n.eq_s("q"))break i;if(n.bra=n.cursor,!n.eq_s("u"))break i;if(n.ket=n.cursor,!n.slice_from("U"))return}n.cursor=i;break}if(n.cursor=i,n.cursor>=n.limit)break r;n.cursor++}continue}n.cursor=r;break}})(),n.cursor=i,x=n.limit,H=n.limit,U=n.limit;var e,r=n.cursor;r:{i:{var s=n.cursor;if(!n.in_grouping(p,97,251)||!n.in_grouping(p,97,251)||n.cursor>=n.limit){n.cursor=s;e:if(0!=(e=n.find_among(f))){switch(e){case 1:if(n.in_grouping(p,97,251))break;break e}break i}if(n.cursor=s,n.cursor>=n.limit)break r;if(n.cursor++,!n.go_out_grouping(p,97,251))break r}n.cursor++}x=n.cursor}n.cursor=r,r=n.cursor,n.go_out_grouping(p,97,251)&&(n.cursor++,n.go_in_grouping(p,97,251))&&(n.cursor++,H=n.cursor,n.go_out_grouping(p,97,251))&&(n.cursor++,n.go_in_grouping(p,97,251))&&(n.cursor++,U=n.cursor),n.cursor=r,n.limit_backward=n.cursor,n.cursor=n.limit;i=n.limit-n.cursor;r:{var c=n.limit-n.cursor,u=n.limit-n.cursor,t=n.limit-n.cursor;if((()=>{var r;if(n.ket=n.cursor,0!=(r=n.find_among_b(k))){switch(n.bra=n.cursor,r){case 1:if(!C())return;if(n.slice_del())break;return;case 2:if(!C())return;if(!n.slice_del())return;var i=n.limit-n.cursor;if(n.ket=n.cursor,n.eq_s_b("ic")){n.bra=n.cursor;i:{var e=n.limit-n.cursor;if(C()){if(n.slice_del())break i;return}if(n.cursor=n.limit-e,!n.slice_from("iqU"))return}}else n.cursor=n.limit-i;break;case 3:if(!C())return;if(n.slice_from("log"))break;return;case 4:if(!C())return;if(n.slice_from("u"))break;return;case 5:if(!C())return;if(n.slice_from("ent"))break;return;case 6:if(!Y())return;if(!n.slice_del())return;var s=n.limit-n.cursor;i:if(n.ket=n.cursor,0==(r=n.find_among_b(m)))n.cursor=n.limit-s;else switch(n.bra=n.cursor,r){case 1:if(!C()){n.cursor=n.limit-s;break i}if(!n.slice_del())return;if(n.ket=n.cursor,!n.eq_s_b("at")){n.cursor=n.limit-s;break i}if(n.bra=n.cursor,!C()){n.cursor=n.limit-s;break i}if(n.slice_del())break;return;case 2:e:{var c=n.limit-n.cursor;if(C()){if(n.slice_del())break e;return}if(n.cursor=n.limit-c,!y()){n.cursor=n.limit-s;break i}if(!n.slice_from("eux"))return}break;case 3:if(!C()){n.cursor=n.limit-s;break i}if(n.slice_del())break;return;case 4:if(!Y()){n.cursor=n.limit-s;break i}if(n.slice_from("i"))break;return}break;case 7:if(!C())return;if(!n.slice_del())return;var u=n.limit-n.cursor;i:if(n.ket=n.cursor,0==(r=n.find_among_b(b)))n.cursor=n.limit-u;else switch(n.bra=n.cursor,r){case 1:e:{var t=n.limit-n.cursor;if(C()){if(n.slice_del())break e;return}if(n.cursor=n.limit-t,!n.slice_from("abl"))return}break;case 2:e:{var o=n.limit-n.cursor;if(C()){if(n.slice_del())break e;return}if(n.cursor=n.limit-o,!n.slice_from("iqU"))return}break;case 3:if(!C()){n.cursor=n.limit-u;break i}if(n.slice_del())break;return}break;case 8:if(!C())return;if(!n.slice_del())return;i=n.limit-n.cursor;if(n.ket=n.cursor,n.eq_s_b("at"))if(n.bra=n.cursor,C()){if(!n.slice_del())return;if(n.ket=n.cursor,n.eq_s_b("ic")){n.bra=n.cursor;i:{var a=n.limit-n.cursor;if(C()){if(n.slice_del())break i;return}if(n.cursor=n.limit-a,!n.slice_from("iqU"))return}}else n.cursor=n.limit-i}else n.cursor=n.limit-i;else n.cursor=n.limit-i;break;case 9:if(n.slice_from("eau"))break;return;case 10:if(!y())return;if(n.slice_from("al"))break;return;case 11:if(!n.in_grouping_b(h,98,112))return;if(n.slice_from("ou"))break;return;case 12:i:{var l=n.limit-n.cursor;if(C()){if(n.slice_del())break i;return}if(n.cursor=n.limit-l,!y())return;if(!n.slice_from("eux"))return}break;case 13:if(!y())return;if(!n.out_grouping_b(p,97,251))return;if(n.slice_del())break;return;case 14:return Y()?void n.slice_from("ant"):void 0;case 15:return Y()?void n.slice_from("ent"):void 0;case 16:i=n.limit-n.cursor;return n.in_grouping_b(p,97,251)?Y()&&(n.cursor=n.limit-i,void n.slice_del()):void 0}return 1}})()||(n.cursor=n.limit-t,(()=>{if(!(n.cursor{var r;if(!(n.cursorn.limit_backward)break i}return}if(n.cursor=n.limit-e,n.slice_del())break;return}return 1}n.limit_backward=i}})())){n.cursor=n.limit-u;var o=n.limit-n.cursor;i:{n.ket=n.cursor;e:{var a=n.limit-n.cursor;if(n.eq_s_b("Y")){if(n.bra=n.cursor,n.slice_from("i"))break e;return!1}if(n.cursor=n.limit-a,!n.eq_s_b("ç")){n.cursor=n.limit-o;break i}if(n.bra=n.cursor,!n.slice_from("c"))return!1}}}else if(n.cursor=n.limit-c,!(()=>{var r=n.limit-n.cursor;if(n.ket=n.cursor,n.eq_s_b("s")){n.bra=n.cursor;var i=n.limit-n.cursor,e=n.limit-n.cursor;if(n.eq_s_b("Hi")||(n.cursor=n.limit-e,n.out_grouping_b(I,97,232))){if(n.cursor=n.limit-i,!n.slice_del())return}else n.cursor=n.limit-r}else n.cursor=n.limit-r;if(!(n.cursor{for(var r;;){var i=n.cursor;r:{switch(n.bra=n.cursor,r=n.find_among(_),n.ket=n.cursor,r){case 1:if(n.slice_from("i"))break;return;case 2:if(n.slice_from("u"))break;return;case 3:if(n.slice_from("y"))break;return;case 4:if(n.slice_from("ë"))break;return;case 5:if(n.slice_from("ï"))break;return;case 6:if(n.slice_del())break;return;case 7:if(n.cursor>=n.limit)break r;n.cursor++}continue}n.cursor=i;break}})(),n.cursor=r,!0},this.stemWord=function(r){return n.setCurrent(r),this.stem(),n.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/german-stemmer.js b/sphinx/search/minified-js/german-stemmer.js
index e2a335d15e0..da15f9e8f71 100644
--- a/sphinx/search/minified-js/german-stemmer.js
+++ b/sphinx/search/minified-js/german-stemmer.js
@@ -1 +1 @@
-GermanStemmer=function(){var r=new BaseStemmer;var e=[["",-1,5],["U",0,2],["Y",0,1],["ä",0,3],["ö",0,4],["ü",0,2]];var i=[["e",-1,2],["em",-1,1],["en",-1,2],["ern",-1,1],["er",-1,1],["s",-1,3],["es",5,2]];var s=[["en",-1,1],["er",-1,1],["st",-1,2],["est",2,1]];var u=[["ig",-1,1],["lich",-1,1]];var a=[["end",-1,1],["ig",-1,2],["ung",-1,1],["lich",-1,3],["isch",-1,2],["ik",-1,2],["heit",-1,3],["keit",-1,4]];var c=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,8,0,32,8];var t=[117,30,5];var o=[117,30,4];var f=0;var l=0;var n=0;function b(){var e=r.cursor;while(true){var i=r.cursor;r:{e:{var s=r.cursor;i:{r.bra=r.cursor;if(!r.eq_s("ß")){break i}r.ket=r.cursor;if(!r.slice_from("ss")){return false}break e}r.cursor=s;if(r.cursor>=r.limit){break r}r.cursor++}continue}r.cursor=i;break}r.cursor=e;while(true){var u=r.cursor;r:{e:while(true){var a=r.cursor;i:{if(!r.in_grouping(c,97,252)){break i}r.bra=r.cursor;s:{var t=r.cursor;u:{if(!r.eq_s("u")){break u}r.ket=r.cursor;if(!r.in_grouping(c,97,252)){break u}if(!r.slice_from("U")){return false}break s}r.cursor=t;if(!r.eq_s("y")){break i}r.ket=r.cursor;if(!r.in_grouping(c,97,252)){break i}if(!r.slice_from("Y")){return false}}r.cursor=a;break e}r.cursor=a;if(r.cursor>=r.limit){break r}r.cursor++}continue}r.cursor=u;break}return true}function k(){n=r.limit;l=r.limit;var e=r.cursor;{var i=r.cursor+3;if(i>r.limit){return false}r.cursor=i}f=r.cursor;r.cursor=e;r:while(true){e:{if(!r.in_grouping(c,97,252)){break e}break r}if(r.cursor>=r.limit){return false}r.cursor++}r:while(true){e:{if(!r.out_grouping(c,97,252)){break e}break r}if(r.cursor>=r.limit){return false}r.cursor++}n=r.cursor;r:{if(!(n=r.limit){return false}r.cursor++}r:while(true){e:{if(!r.out_grouping(c,97,252)){break e}break r}if(r.cursor>=r.limit){return false}r.cursor++}l=r.cursor;return true}function m(){var i;while(true){var s=r.cursor;r:{r.bra=r.cursor;i=r.find_among(e);if(i==0){break r}r.ket=r.cursor;switch(i){case 1:if(!r.slice_from("y")){return false}break;case 2:if(!r.slice_from("u")){return false}break;case 3:if(!r.slice_from("a")){return false}break;case 4:if(!r.slice_from("o")){return false}break;case 5:if(r.cursor>=r.limit){break r}r.cursor++;break}continue}r.cursor=s;break}return true}function _(){if(!(n<=r.cursor)){return false}return true}function v(){if(!(l<=r.cursor)){return false}return true}function g(){var e;var c=r.limit-r.cursor;r:{r.ket=r.cursor;e=r.find_among_b(i);if(e==0){break r}r.bra=r.cursor;if(!_()){break r}switch(e){case 1:if(!r.slice_del()){return false}break;case 2:if(!r.slice_del()){return false}var f=r.limit-r.cursor;e:{r.ket=r.cursor;if(!r.eq_s_b("s")){r.cursor=r.limit-f;break e}r.bra=r.cursor;if(!r.eq_s_b("nis")){r.cursor=r.limit-f;break e}if(!r.slice_del()){return false}}break;case 3:if(!r.in_grouping_b(t,98,116)){break r}if(!r.slice_del()){return false}break}}r.cursor=r.limit-c;var l=r.limit-r.cursor;r:{r.ket=r.cursor;e=r.find_among_b(s);if(e==0){break r}r.bra=r.cursor;if(!_()){break r}switch(e){case 1:if(!r.slice_del()){return false}break;case 2:if(!r.in_grouping_b(o,98,116)){break r}{var n=r.cursor-3;if(n{for(var r,i=n.cursor;;){var e=n.cursor;r:{for(;;){var s=n.cursor;i:if(n.in_grouping(t,97,252)){n.bra=n.cursor;e:{var c=n.cursor;if(n.eq_s("u")&&(n.ket=n.cursor,n.in_grouping(t,97,252))){if(n.slice_from("U"))break e;return}if(n.cursor=c,!n.eq_s("y"))break i;if(n.ket=n.cursor,!n.in_grouping(t,97,252))break i;if(!n.slice_from("Y"))return}n.cursor=s;break}if(n.cursor=s,n.cursor>=n.limit)break r;n.cursor++}continue}n.cursor=e;break}for(n.cursor=i;;){var u=n.cursor;r:{switch(n.bra=n.cursor,r=n.find_among(o),n.ket=n.cursor,r){case 1:if(n.slice_from("ss"))break;return;case 2:if(n.slice_from("ä"))break;return;case 3:if(n.slice_from("ö"))break;return;case 4:if(n.slice_from("ü"))break;return;case 5:if(n.cursor>=n.limit)break r;n.cursor++}continue}n.cursor=u;break}})(),n.cursor=i,n.cursor),e=(a=n.limit,u=n.limit,r=n.cursor,(e=n.cursor+3)>n.limit||(n.cursor=e,c=n.cursor,n.cursor=r,n.go_out_grouping(t,97,252)&&(n.cursor++,n.go_in_grouping(t,97,252))&&(n.cursor++,a=n.cursor,c<=a||(a=c),n.go_out_grouping(t,97,252))&&(n.cursor++,n.go_in_grouping(t,97,252))&&(n.cursor++,u=n.cursor)),n.cursor=i,n.limit_backward=n.cursor,n.cursor=n.limit,(()=>{var r,i=n.limit-n.cursor;r:if(n.ket=n.cursor,0!=(r=n.find_among_b(l))&&(n.bra=n.cursor,v()))switch(r){case 1:var e=n.limit-n.cursor;if(n.eq_s_b("syst"))break r;if(n.cursor=n.limit-e,n.slice_del())break;return;case 2:if(n.slice_del())break;return;case 3:if(!n.slice_del())return;e=n.limit-n.cursor;if(n.ket=n.cursor,n.eq_s_b("s"))if(n.bra=n.cursor,n.eq_s_b("nis")){if(!n.slice_del())return}else n.cursor=n.limit-e;else n.cursor=n.limit-e;break;case 4:if(!n.in_grouping_b(g,98,116))break r;if(n.slice_del())break;return;case 5:if(n.slice_from("l"))break;return}n.cursor=n.limit-i,i=n.limit-n.cursor;r:if(n.ket=n.cursor,0!=(r=n.find_among_b(_))&&(n.bra=n.cursor,v()))switch(r){case 1:if(n.slice_del())break;return;case 2:if(!n.in_grouping_b(d,98,116))break r;var s=n.cursor-3;if(s{for(var r;;){var i=n.cursor;r:{switch(n.bra=n.cursor,r=n.find_among(s),n.ket=n.cursor,r){case 1:if(n.slice_from("y"))break;return;case 2:if(n.slice_from("u"))break;return;case 3:if(n.slice_from("a"))break;return;case 4:if(n.slice_from("o"))break;return;case 5:if(n.cursor>=n.limit)break r;n.cursor++}continue}n.cursor=i;break}})(),n.cursor=e,!0},this.stemWord=function(r){return n.setCurrent(r),this.stem(),n.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/greek-stemmer.js b/sphinx/search/minified-js/greek-stemmer.js
new file mode 100644
index 00000000000..d02a30bf1fe
--- /dev/null
+++ b/sphinx/search/minified-js/greek-stemmer.js
@@ -0,0 +1 @@
+var GreekStemmer=function(){var s=new BaseStemmer,e=[["",-1,25],["Ά",0,1],["Έ",0,5],["Ή",0,7],["Ί",0,9],["Ό",0,15],["Ύ",0,20],["Ώ",0,24],["ΐ",0,7],["Α",0,1],["Β",0,2],["Γ",0,3],["Δ",0,4],["Ε",0,5],["Ζ",0,6],["Η",0,7],["Θ",0,8],["Ι",0,9],["Κ",0,10],["Λ",0,11],["Μ",0,12],["Ν",0,13],["Ξ",0,14],["Ο",0,15],["Π",0,16],["Ρ",0,17],["Σ",0,18],["Τ",0,19],["Υ",0,20],["Φ",0,21],["Χ",0,22],["Ψ",0,23],["Ω",0,24],["Ϊ",0,9],["Ϋ",0,20],["ά",0,1],["έ",0,5],["ή",0,7],["ί",0,9],["ΰ",0,20],["ς",0,18],["ϊ",0,7],["ϋ",0,20],["ό",0,15],["ύ",0,20],["ώ",0,24]],o=[["σκαγια",-1,2],["φαγια",-1,1],["ολογια",-1,3],["σογια",-1,4],["τατογια",-1,5],["κρεατα",-1,6],["περατα",-1,7],["τερατα",-1,8],["γεγονοτα",-1,11],["καθεστωτα",-1,10],["φωτα",-1,9],["περατη",-1,7],["σκαγιων",-1,2],["φαγιων",-1,1],["ολογιων",-1,3],["σογιων",-1,4],["τατογιων",-1,5],["κρεατων",-1,6],["περατων",-1,7],["τερατων",-1,8],["γεγονοτων",-1,11],["καθεστωτων",-1,10],["φωτων",-1,9],["κρεασ",-1,6],["περασ",-1,7],["τερασ",-1,8],["γεγονοσ",-1,11],["κρεατοσ",-1,6],["περατοσ",-1,7],["τερατοσ",-1,8],["γεγονοτοσ",-1,11],["καθεστωτοσ",-1,10],["φωτοσ",-1,9],["καθεστωσ",-1,10],["φωσ",-1,9],["σκαγιου",-1,2],["φαγιου",-1,1],["ολογιου",-1,3],["σογιου",-1,4],["τατογιου",-1,5]],u=[["πα",-1,1],["ξαναπα",0,1],["επα",0,1],["περιπα",0,1],["αναμπα",0,1],["εμπα",0,1],["β",-1,2],["δανε",-1,1],["βαθυρι",-1,2],["βαρκ",-1,2],["μαρκ",-1,2],["λ",-1,2],["μ",-1,2],["κορν",-1,2],["αθρο",-1,1],["συναθρο",14,1],["π",-1,2],["ιμπ",16,2],["ρ",-1,2],["μαρ",18,2],["αμπαρ",18,2],["γκρ",18,2],["βολβορ",18,2],["γλυκορ",18,2],["πιπερορ",18,2],["πρ",18,2],["μπρ",25,2],["αρρ",18,2],["γλυκυρ",18,2],["πολυρ",18,2],["λου",-1,2]],a=[["ιζα",-1,1],["ιζε",-1,1],["ιζαμε",-1,1],["ιζουμε",-1,1],["ιζανε",-1,1],["ιζουνε",-1,1],["ιζατε",-1,1],["ιζετε",-1,1],["ιζει",-1,1],["ιζαν",-1,1],["ιζουν",-1,1],["ιζεσ",-1,1],["ιζεισ",-1,1],["ιζω",-1,1]],t=[["βι",-1,1],["λι",-1,1],["αλ",-1,1],["εν",-1,1],["σ",-1,1],["χ",-1,1],["υψ",-1,1],["ζω",-1,1]],_=[["ωθηκα",-1,1],["ωθηκε",-1,1],["ωθηκαμε",-1,1],["ωθηκανε",-1,1],["ωθηκατε",-1,1],["ωθηκαν",-1,1],["ωθηκεσ",-1,1]],l=[["ξαναπα",-1,1],["επα",-1,1],["περιπα",-1,1],["αναμπα",-1,1],["εμπα",-1,1],["χαρτοπα",-1,1],["εξαρχα",-1,1],["γε",-1,2],["γκε",-1,2],["κλε",-1,1],["εκλε",9,1],["απεκλε",10,1],["αποκλε",9,1],["εσωκλε",9,1],["δανε",-1,1],["πε",-1,1],["επε",15,1],["μετεπε",16,1],["εσε",-1,1],["γκ",-1,2],["μ",-1,2],["πουκαμ",20,2],["κομ",20,2],["αν",-1,2],["ολο",-1,2],["αθρο",-1,1],["συναθρο",25,1],["π",-1,2],["λαρ",-1,2],["δημοκρατ",-1,2],["αφ",-1,2],["γιγαντοαφ",30,2]],m=[["ισα",-1,1],["ισαμε",-1,1],["ισανε",-1,1],["ισε",-1,1],["ισατε",-1,1],["ισαν",-1,1],["ισεσ",-1,1]],f=[["ξαναπα",-1,1],["επα",-1,1],["περιπα",-1,1],["αναμπα",-1,1],["εμπα",-1,1],["χαρτοπα",-1,1],["εξαρχα",-1,1],["κλε",-1,1],["εκλε",7,1],["απεκλε",8,1],["αποκλε",7,1],["εσωκλε",7,1],["δανε",-1,1],["πε",-1,1],["επε",13,1],["μετεπε",14,1],["εσε",-1,1],["αθρο",-1,1],["συναθρο",17,1]],b=[["ισουμε",-1,1],["ισουνε",-1,1],["ισετε",-1,1],["ισει",-1,1],["ισουν",-1,1],["ισεισ",-1,1],["ισω",-1,1]],n=[["ατα",-1,2],["φα",-1,2],["ηφα",1,2],["μεγ",-1,2],["λυγ",-1,2],["ηδ",-1,2],["κλε",-1,1],["εσωκλε",6,1],["πλε",-1,1],["δανε",-1,1],["σε",-1,1],["ασε",10,1],["καθ",-1,2],["εχθ",-1,2],["κακ",-1,2],["μακ",-1,2],["σκ",-1,2],["φιλ",-1,2],["κυλ",-1,2],["μ",-1,2],["γεμ",19,2],["αχν",-1,2],["συναθρο",-1,1],["π",-1,2],["απ",23,2],["εμπ",23,2],["ευπ",23,2],["αρ",-1,2],["αορ",-1,2],["γυρ",-1,2],["χρ",-1,2],["χωρ",-1,2],["κτ",-1,2],["ακτ",32,2],["χτ",-1,2],["αχτ",34,2],["ταχ",-1,2],["σχ",-1,2],["ασχ",37,2],["υψ",-1,2]],k=[["ιστα",-1,1],["ιστε",-1,1],["ιστη",-1,1],["ιστοι",-1,1],["ιστων",-1,1],["ιστο",-1,1],["ιστεσ",-1,1],["ιστησ",-1,1],["ιστοσ",-1,1],["ιστουσ",-1,1],["ιστου",-1,1]],d=[["εγκλε",-1,1],["αποκλε",-1,1],["δανε",-1,2],["αντιδανε",2,2],["σε",-1,1],["μετασε",4,1],["μικροσε",4,1]],g=[["ατομικ",-1,2],["εθνικ",-1,4],["τοπικ",-1,7],["εκλεκτικ",-1,5],["σκεπτικ",-1,6],["γνωστικ",-1,3],["αγνωστικ",5,1],["αλεξανδριν",-1,8],["θεατριν",-1,10],["βυζαντιν",-1,9]],w=[["ισμοι",-1,1],["ισμων",-1,1],["ισμο",-1,1],["ισμοσ",-1,1],["ισμουσ",-1,1],["ισμου",-1,1]],v=[["σ",-1,1],["χ",-1,1]],h=[["ουδακια",-1,1],["αρακια",-1,1],["ουδακι",-1,1],["αρακι",-1,1]],q=[["β",-1,2],["βαμβ",0,1],["σλοβ",0,1],["τσεχοσλοβ",2,1],["καρδ",-1,2],["ζ",-1,2],["τζ",5,1],["κ",-1,1],["καπακ",7,1],["σοκ",7,1],["σκ",7,1],["βαλ",-1,2],["μαλ",-1,1],["γλ",-1,2],["τριπολ",-1,2],["πλ",-1,1],["λουλ",-1,1],["φυλ",-1,1],["καιμ",-1,1],["κλιμ",-1,1],["φαρμ",-1,1],["γιαν",-1,2],["σπαν",-1,1],["ηγουμεν",-1,2],["κον",-1,1],["μακρυν",-1,2],["π",-1,2],["κατραπ",26,1],["ρ",-1,1],["βρ",28,1],["λαβρ",29,1],["αμβρ",29,1],["μερ",28,1],["πατερ",28,2],["ανθρ",28,1],["κορ",28,1],["σ",-1,1],["ναγκασ",36,1],["τοσ",36,2],["μουστ",-1,1],["ρυ",-1,1],["φ",-1,1],["σφ",41,1],["αλισφ",42,1],["νυφ",41,2],["χ",-1,1]],p=[["ακια",-1,1],["αρακια",0,1],["ιτσα",-1,1],["ακι",-1,1],["αρακι",3,1],["ιτσων",-1,1],["ιτσασ",-1,1],["ιτσεσ",-1,1]],C=[["ψαλ",-1,1],["αιφν",-1,1],["ολο",-1,1],["ιρ",-1,1]],S=[["ε",-1,1],["παιχν",-1,1]],B=[["ιδια",-1,1],["ιδιων",-1,1],["ιδιο",-1,1]],G=[["ιβ",-1,1],["δ",-1,1],["φραγκ",-1,1],["λυκ",-1,1],["οβελ",-1,1],["μην",-1,1],["ρ",-1,1]],W=[["ισκε",-1,1],["ισκο",-1,1],["ισκοσ",-1,1],["ισκου",-1,1]],j=[["αδων",-1,1],["αδεσ",-1,1]],x=[["γιαγι",-1,-1],["θει",-1,-1],["οκ",-1,-1],["μαμ",-1,-1],["μαν",-1,-1],["μπαμπ",-1,-1],["πεθερ",-1,-1],["πατερ",-1,-1],["κυρ",-1,-1],["νταντ",-1,-1]],y=[["εδων",-1,1],["εδεσ",-1,1]],z=[["μιλ",-1,1],["δαπ",-1,1],["γηπ",-1,1],["ιπ",-1,1],["εμπ",-1,1],["οπ",-1,1],["κρασπ",-1,1],["υπ",-1,1]],A=[["ουδων",-1,1],["ουδεσ",-1,1]],D=[["τραγ",-1,1],["φε",-1,1],["καλιακ",-1,1],["αρκ",-1,1],["σκ",-1,1],["πεταλ",-1,1],["βελ",-1,1],["λουλ",-1,1],["φλ",-1,1],["χν",-1,1],["πλεξ",-1,1],["σπ",-1,1],["φρ",-1,1],["σ",-1,1],["λιχ",-1,1]],E=[["εων",-1,1],["εωσ",-1,1]],F=[["δ",-1,1],["ιδ",0,1],["θ",-1,1],["γαλ",-1,1],["ελ",-1,1],["ν",-1,1],["π",-1,1],["παρ",-1,1]],K=[["ια",-1,1],["ιων",-1,1],["ιου",-1,1]],L=[["ικα",-1,1],["ικων",-1,1],["ικο",-1,1],["ικου",-1,1]],M=[["αδ",-1,1],["συναδ",0,1],["καταδ",0,1],["αντιδ",-1,1],["ενδ",-1,1],["φυλοδ",-1,1],["υποδ",-1,1],["πρωτοδ",-1,1],["εξωδ",-1,1],["ηθ",-1,1],["ανηθ",9,1],["ξικ",-1,1],["αλ",-1,1],["αμμοχαλ",12,1],["συνομηλ",-1,1],["μπολ",-1,1],["μουλ",-1,1],["τσαμ",-1,1],["βρωμ",-1,1],["αμαν",-1,1],["μπαν",-1,1],["καλλιν",-1,1],["ποστελν",-1,1],["φιλον",-1,1],["καλπ",-1,1],["γερ",-1,1],["χασ",-1,1],["μποσ",-1,1],["πλιατσ",-1,1],["πετσ",-1,1],["πιτσ",-1,1],["φυσ",-1,1],["μπαγιατ",-1,1],["νιτ",-1,1],["πικαντ",-1,1],["σερτ",-1,1]],N=[["αγαμε",-1,1],["ηκαμε",-1,1],["ηθηκαμε",1,1],["ησαμε",-1,1],["ουσαμε",-1,1]],O=[["βουβ",-1,1],["ξεθ",-1,1],["πεθ",-1,1],["αποθ",-1,1],["αποκ",-1,1],["ουλ",-1,1],["αναπ",-1,1],["πικρ",-1,1],["ποτ",-1,1],["αποστ",-1,1],["χ",-1,1],["σιχ",10,1]],P=[["τρ",-1,1],["τσ",-1,1]],Q=[["αγανε",-1,1],["ηκανε",-1,1],["ηθηκανε",1,1],["ησανε",-1,1],["ουσανε",-1,1],["οντανε",-1,1],["ιοντανε",5,1],["ουντανε",-1,1],["ιουντανε",7,1],["οτανε",-1,1],["ιοτανε",9,1]],R=[["ταβ",-1,1],["νταβ",0,1],["ψηλοταβ",0,1],["λιβ",-1,1],["κλιβ",3,1],["ξηροκλιβ",4,1],["γ",-1,1],["αγ",6,1],["τραγ",7,1],["τσαγ",7,1],["αθιγγ",6,1],["τσιγγ",6,1],["ατσιγγ",11,1],["στεγ",6,1],["απηγ",6,1],["σιγ",6,1],["ανοργ",6,1],["ενοργ",6,1],["καλπουζ",-1,1],["θ",-1,1],["μωαμεθ",19,1],["πιθ",19,1],["απιθ",21,1],["δεκ",-1,1],["πελεκ",-1,1],["ικ",-1,1],["ανικ",25,1],["βουλκ",-1,1],["βασκ",-1,1],["βραχυκ",-1,1],["γαλ",-1,1],["καταγαλ",30,1],["ολογαλ",30,1],["βαθυγαλ",30,1],["μελ",-1,1],["καστελ",-1,1],["πορτολ",-1,1],["πλ",-1,1],["διπλ",37,1],["λαοπλ",37,1],["ψυχοπλ",37,1],["ουλ",-1,1],["μ",-1,1],["ολιγοδαμ",42,1],["μουσουλμ",42,1],["δραδουμ",42,1],["βραχμ",42,1],["ν",-1,1],["αμερικαν",47,1],["π",-1,1],["αδαπ",49,1],["χαμηλοδαπ",49,1],["πολυδαπ",49,1],["κοπ",49,1],["υποκοπ",53,1],["τσοπ",49,1],["σπ",49,1],["ερ",-1,1],["γερ",57,1],["βετερ",57,1],["λουθηρ",-1,1],["κορμορ",-1,1],["περιτρ",-1,1],["ουρ",-1,1],["σ",-1,1],["βασ",64,1],["πολισ",64,1],["σαρακατσ",64,1],["θυσ",64,1],["διατ",-1,1],["πλατ",-1,1],["τσαρλατ",-1,1],["τετ",-1,1],["πουριτ",-1,1],["σουλτ",-1,1],["μαιντ",-1,1],["ζωντ",-1,1],["καστ",-1,1],["φ",-1,1],["διαφ",78,1],["στεφ",78,1],["φωτοστεφ",80,1],["περηφ",78,1],["υπερηφ",82,1],["κοιλαρφ",78,1],["πενταρφ",78,1],["ορφ",78,1],["χ",-1,1],["αμηχ",87,1],["βιομηχ",87,1],["μεγλοβιομηχ",89,1],["καπνοβιομηχ",89,1],["μικροβιομηχ",89,1],["πολυμηχ",87,1],["λιχ",87,1]],T=[["ησετε",-1,1]],U=[["ενδ",-1,1],["συνδ",-1,1],["οδ",-1,1],["διαθ",-1,1],["καθ",-1,1],["ραθ",-1,1],["ταθ",-1,1],["τιθ",-1,1],["εκθ",-1,1],["ενθ",-1,1],["συνθ",-1,1],["ροθ",-1,1],["υπερθ",-1,1],["σθ",-1,1],["ευθ",-1,1],["αρκ",-1,1],["ωφελ",-1,1],["βολ",-1,1],["αιν",-1,1],["πον",-1,1],["ρον",-1,1],["συν",-1,1],["βαρ",-1,1],["βρ",-1,1],["αιρ",-1,1],["φορ",-1,1],["ευρ",-1,1],["πυρ",-1,1],["χωρ",-1,1],["νετ",-1,1],["σχ",-1,1]],V=[["παγ",-1,1],["δ",-1,1],["αδ",1,1],["θ",-1,1],["αθ",3,1],["τοκ",-1,1],["σκ",-1,1],["παρακαλ",-1,1],["σκελ",-1,1],["απλ",-1,1],["εμ",-1,1],["αν",-1,1],["βεν",-1,1],["βαρον",-1,1],["κοπ",-1,1],["σερπ",-1,1],["αβαρ",-1,1],["εναρ",-1,1],["αβρ",-1,1],["μπορ",-1,1],["θαρρ",-1,1],["ντρ",-1,1],["υ",-1,1],["νιφ",-1,1],["συρφ",-1,1]],X=[["οντασ",-1,1],["ωντασ",-1,1]],Y=[["ομαστε",-1,1],["ιομαστε",0,1]],Z=[["π",-1,1],["απ",0,1],["ακαταπ",1,1],["συμπ",0,1],["ασυμπ",3,1],["αμεταμφ",-1,1]],$=[["ζ",-1,1],["αλ",-1,1],["παρακαλ",1,1],["εκτελ",-1,1],["μ",-1,1],["ξ",-1,1],["προ",-1,1],["αρ",-1,1],["νισ",-1,1]],r1=[["ηθηκα",-1,1],["ηθηκε",-1,1],["ηθηκεσ",-1,1]],i1=[["πιθ",-1,1],["οθ",-1,1],["ναρθ",-1,1],["σκουλ",-1,1],["σκωλ",-1,1],["σφ",-1,1]],c1=[["θ",-1,1],["διαθ",0,1],["παρακαταθ",0,1],["συνθ",0,1],["προσθ",0,1]],s1=[["ηκα",-1,1],["ηκε",-1,1],["ηκεσ",-1,1]],e1=[["φαγ",-1,1],["ληγ",-1,1],["φρυδ",-1,1],["μαντιλ",-1,1],["μαλλ",-1,1],["ομ",-1,1],["βλεπ",-1,1],["ποδαρ",-1,1],["κυματ",-1,1],["πρωτ",-1,1],["λαχ",-1,1],["πανταχ",-1,1]],o1=[["τσα",-1,1],["χαδ",-1,1],["μεδ",-1,1],["λαμπιδ",-1,1],["δε",-1,1],["πλε",-1,1],["μεσαζ",-1,1],["δεσποζ",-1,1],["αιθ",-1,1],["φαρμακ",-1,1],["αγκ",-1,1],["ανηκ",-1,1],["λ",-1,1],["μ",-1,1],["αμ",13,1],["βρομ",13,1],["υποτειν",-1,1],["εκλιπ",-1,1],["ρ",-1,1],["ενδιαφερ",18,1],["αναρρ",18,1],["πατ",-1,1],["καθαρευ",-1,1],["δευτερευ",-1,1],["λεχ",-1,1]],u1=[["ουσα",-1,1],["ουσε",-1,1],["ουσεσ",-1,1]],a1=[["πελ",-1,1],["λλ",-1,1],["σμην",-1,1],["ρπ",-1,1],["πρ",-1,1],["φρ",-1,1],["χορτ",-1,1],["οφ",-1,1],["ψοφ",7,-1],["σφ",-1,1],["λοχ",-1,1],["ναυλοχ",10,-1]],t1=[["αμαλλι",-1,1],["λ",-1,1],["αμαλ",1,1],["μ",-1,1],["ουλαμ",3,1],["εν",-1,1],["δερβεν",5,1],["π",-1,1],["αειπ",7,1],["αρτιπ",7,1],["συμπ",7,1],["νεοπ",7,1],["κροκαλοπ",7,1],["ολοπ",7,1],["προσωποπ",7,1],["σιδηροπ",7,1],["δροσοπ",7,1],["ασπ",7,1],["ανυπ",7,1],["ρ",-1,1],["ασπαρ",19,1],["χαρ",19,1],["αχαρ",21,1],["απερ",19,1],["τρ",19,1],["ουρ",19,1],["τ",-1,1],["διατ",26,1],["επιτ",26,1],["συντ",26,1],["ομοτ",26,1],["νομοτ",30,1],["αποτ",26,1],["υποτ",26,1],["αβαστ",26,1],["αιμοστ",26,1],["προστ",26,1],["ανυστ",26,1],["ναυ",-1,1],["αφ",-1,1],["ξεφ",-1,1],["αδηφ",-1,1],["παμφ",-1,1],["πολυφ",-1,1]],_1=[["αγα",-1,1],["αγε",-1,1],["αγεσ",-1,1]],l1=[["ησα",-1,1],["ησε",-1,1],["ησου",-1,1]],m1=[["ν",-1,1],["δωδεκαν",0,1],["επταν",0,1],["μεγαλον",0,1],["ερημον",0,1],["χερσον",0,1]],f1=[["ηστε",-1,1]],b1=[["σβ",-1,1],["ασβ",0,1],["απλ",-1,1],["αειμν",-1,1],["χρ",-1,1],["αχρ",4,1],["κοινοχρ",4,1],["δυσχρ",4,1],["ευχρ",4,1],["παλιμψ",-1,1]],n1=[["ουνε",-1,1],["ηθουνε",0,1],["ησουνε",0,1]],k1=[["σπι",-1,1],["ν",-1,1],["εξων",1,1],["ρ",-1,1],["στραβομουτσ",-1,1],["κακομουτσ",-1,1]],d1=[["ουμε",-1,1],["ηθουμε",0,1],["ησουμε",0,1]],g1=[["αζ",-1,1],["ωριοπλ",-1,1],["ασουσ",-1,1],["παρασουσ",2,1],["αλλοσουσ",-1,1],["φ",-1,1],["χ",-1,1]],w1=[["ματα",-1,1],["ματων",-1,1],["ματοσ",-1,1]],v1=[["α",-1,1],["ιουμα",0,1],["ομουνα",0,1],["ιομουνα",2,1],["οσουνα",0,1],["ιοσουνα",4,1],["ε",-1,1],["αγατε",6,1],["ηκατε",6,1],["ηθηκατε",8,1],["ησατε",6,1],["ουσατε",6,1],["ειτε",6,1],["ηθειτε",12,1],["ιεμαστε",6,1],["ουμαστε",6,1],["ιουμαστε",15,1],["ιεσαστε",6,1],["οσαστε",6,1],["ιοσαστε",18,1],["η",-1,1],["ι",-1,1],["αμαι",21,1],["ιεμαι",21,1],["ομαι",21,1],["ουμαι",21,1],["ασαι",21,1],["εσαι",21,1],["ιεσαι",27,1],["αται",21,1],["εται",21,1],["ιεται",30,1],["ονται",21,1],["ουνται",21,1],["ιουνται",33,1],["ει",21,1],["αει",35,1],["ηθει",35,1],["ησει",35,1],["οι",21,1],["αν",-1,1],["αγαν",40,1],["ηκαν",40,1],["ηθηκαν",42,1],["ησαν",40,1],["ουσαν",40,1],["οντουσαν",45,1],["ιοντουσαν",46,1],["ονταν",40,1],["ιονταν",48,1],["ουνταν",40,1],["ιουνταν",50,1],["οταν",40,1],["ιοταν",52,1],["ομασταν",40,1],["ιομασταν",54,1],["οσασταν",40,1],["ιοσασταν",56,1],["ουν",-1,1],["ηθουν",58,1],["ομουν",58,1],["ιομουν",60,1],["ησουν",58,1],["οσουν",58,1],["ιοσουν",63,1],["ων",-1,1],["ηδων",65,1],["ο",-1,1],["ασ",-1,1],["εσ",-1,1],["ηδεσ",69,1],["ησεσ",69,1],["ησ",-1,1],["εισ",-1,1],["ηθεισ",73,1],["οσ",-1,1],["υσ",-1,1],["ουσ",76,1],["υ",-1,1],["ου",78,1],["ω",-1,1],["αω",80,1],["ηθω",80,1],["ησω",80,1]],h1=[["οτερ",-1,1],["εστερ",-1,1],["υτερ",-1,1],["ωτερ",-1,1],["οτατ",-1,1],["εστατ",-1,1],["υτατ",-1,1],["ωτατ",-1,1]],H=[81,65,16,1],I=[81,65,0,1],J=!1;this.stem=function(){s.limit_backward=s.cursor,s.cursor=s.limit;var r=s.limit-s.cursor;if((()=>{for(var r;;){var i=s.limit-s.cursor;r:{switch(s.ket=s.cursor,r=s.find_among_b(e),s.bra=s.cursor,r){case 1:if(s.slice_from("α"))break;return;case 2:if(s.slice_from("β"))break;return;case 3:if(s.slice_from("γ"))break;return;case 4:if(s.slice_from("δ"))break;return;case 5:if(s.slice_from("ε"))break;return;case 6:if(s.slice_from("ζ"))break;return;case 7:if(s.slice_from("η"))break;return;case 8:if(s.slice_from("θ"))break;return;case 9:if(s.slice_from("ι"))break;return;case 10:if(s.slice_from("κ"))break;return;case 11:if(s.slice_from("λ"))break;return;case 12:if(s.slice_from("μ"))break;return;case 13:if(s.slice_from("ν"))break;return;case 14:if(s.slice_from("ξ"))break;return;case 15:if(s.slice_from("ο"))break;return;case 16:if(s.slice_from("π"))break;return;case 17:if(s.slice_from("ρ"))break;return;case 18:if(s.slice_from("σ"))break;return;case 19:if(s.slice_from("τ"))break;return;case 20:if(s.slice_from("υ"))break;return;case 21:if(s.slice_from("φ"))break;return;case 22:if(s.slice_from("χ"))break;return;case 23:if(s.slice_from("ψ"))break;return;case 24:if(s.slice_from("ω"))break;return;case 25:if(s.cursor<=s.limit_backward)break r;s.cursor--}continue}s.cursor=s.limit-i;break}})(),s.cursor=s.limit-r,!(3<=s.current.length))return!1;J=!0;var r=s.limit-s.cursor,r=((()=>{var r;if(s.ket=s.cursor,0!=(r=s.find_among_b(o))){switch(s.bra=s.cursor,r){case 1:if(s.slice_from("φα"))break;return;case 2:if(s.slice_from("σκα"))break;return;case 3:if(s.slice_from("ολο"))break;return;case 4:if(s.slice_from("σο"))break;return;case 5:if(s.slice_from("τατο"))break;return;case 6:if(s.slice_from("κρε"))break;return;case 7:if(s.slice_from("περ"))break;return;case 8:if(s.slice_from("τερ"))break;return;case 9:if(s.slice_from("φω"))break;return;case 10:if(s.slice_from("καθεστ"))break;return;case 11:if(s.slice_from("γεγον"))break;return}J=!1}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{var r;if(s.ket=s.cursor,0!=s.find_among_b(a)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0!=(r=s.find_among_b(u)))&&!(s.cursor>s.limit_backward))switch(r){case 1:if(s.slice_from("ι"))break;return;case 2:if(s.slice_from("ιζ"))break}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=(s.ket=s.cursor,0!=s.find_among_b(_)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(t)||s.cursor>s.limit_backward||s.slice_from("ων")),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{var r;r:{var i=s.limit-s.cursor;if(s.ket=s.cursor,s.eq_s_b("ισα")&&(s.bra=s.cursor,!(s.cursor>s.limit_backward))){if(s.slice_from("ισ"))break r;return}s.cursor=s.limit-i,s.ket=s.cursor}if(0!=s.find_among_b(m)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0!=(r=s.find_among_b(l)))&&!(s.cursor>s.limit_backward))switch(r){case 1:if(s.slice_from("ι"))break;return;case 2:if(s.slice_from("ισ"))break}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=(s.ket=s.cursor,0!=s.find_among_b(b)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(f)||s.cursor>s.limit_backward||s.slice_from("ι")),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{var r;if(s.ket=s.cursor,0!=s.find_among_b(k)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0!=(r=s.find_among_b(n)))&&!(s.cursor>s.limit_backward))switch(r){case 1:if(s.slice_from("ι"))break;return;case 2:if(s.slice_from("ιστ"))break}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{var r;if(s.ket=s.cursor,0!=s.find_among_b(w)&&(s.bra=s.cursor,s.slice_del())){J=!1;var i=s.limit-s.cursor;if(s.ket=s.cursor,s.bra=s.cursor,0==(r=s.find_among_b(d))||s.cursor>s.limit_backward){if(s.cursor=s.limit-i,s.ket=s.cursor,0==(r=s.find_among_b(g)))return;switch(s.bra=s.cursor,r){case 1:if(s.slice_from("αγνωστ"))break;return;case 2:if(s.slice_from("ατομ"))break;return;case 3:if(s.slice_from("γνωστ"))break;return;case 4:if(s.slice_from("εθν"))break;return;case 5:if(s.slice_from("εκλεκτ"))break;return;case 6:if(s.slice_from("σκεπτ"))break;return;case 7:if(s.slice_from("τοπ"))break;return;case 8:if(s.slice_from("αλεξανδρ"))break;return;case 9:if(s.slice_from("βυζαντ"))break;return;case 10:if(s.slice_from("θεατρ"))break}}else switch(r){case 1:if(s.slice_from("ισμ"))break;return;case 2:if(s.slice_from("ι"))break}}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=(s.ket=s.cursor,0!=s.find_among_b(h)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(v)||s.cursor>s.limit_backward||s.slice_from("αρακ")),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{var r;if(s.ket=s.cursor,0!=s.find_among_b(p)&&(s.bra=s.cursor,s.slice_del())){J=!1;var i=s.limit-s.cursor;if(s.ket=s.cursor,s.bra=s.cursor,0==(r=s.find_among_b(q))||s.cursor>s.limit_backward){if(s.cursor=s.limit-i,s.ket=s.cursor,s.bra=s.cursor,!s.eq_s_b("κορ"))return;if(!s.slice_from("ιτσ"));}else switch(r){case 1:if(s.slice_from("ακ"))break;return;case 2:if(s.slice_from("ιτσ"))break}}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{if(s.ket=s.cursor,0!=s.find_among_b(B)&&(s.bra=s.cursor,s.slice_del())){J=!1;r:{var r=s.limit-s.cursor;if(s.ket=s.cursor,s.bra=s.cursor,0!=s.find_among_b(C)&&!(s.cursor>s.limit_backward)){if(s.slice_from("ιδ"))break r;return}if(s.cursor=s.limit-r,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(S))return;if(!s.slice_from("ιδ"))return}}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=(s.ket=s.cursor,0!=s.find_among_b(W)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(G)||s.cursor>s.limit_backward||s.slice_from("ισκ")),s.cursor=s.limit-r,s.limit-s.cursor),i=(s.ket=s.cursor,0!=s.find_among_b(j)&&(s.bra=s.cursor,s.slice_del())&&(i=s.limit-s.cursor,0==s.find_among_b(x))&&(s.cursor=s.limit-i,i=s.cursor,s.insert(s.cursor,s.cursor,"αδ"),s.cursor=i),s.cursor=s.limit-r,s.limit-s.cursor),r=(s.ket=s.cursor,0!=s.find_among_b(y)&&(s.bra=s.cursor,s.slice_del())&&(s.ket=s.cursor,s.bra=s.cursor,0!=s.find_among_b(z))&&s.slice_from("εδ"),s.cursor=s.limit-i,s.limit-s.cursor),r=(s.ket=s.cursor,0!=s.find_among_b(A)&&(s.bra=s.cursor,s.slice_del())&&(s.ket=s.cursor,s.bra=s.cursor,0!=s.find_among_b(D))&&s.slice_from("ουδ"),s.cursor=s.limit-r,s.limit-s.cursor),r=(s.ket=s.cursor,0!=s.find_among_b(E)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(F)||s.cursor>s.limit_backward||s.slice_from("ε")),s.cursor=s.limit-r,s.limit-s.cursor),r=(s.ket=s.cursor,0!=s.find_among_b(K)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,s.in_grouping_b(H,945,969))&&s.slice_from("ι"),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{if(s.ket=s.cursor,0!=s.find_among_b(L)&&(s.bra=s.cursor,s.slice_del())){J=!1;r:{var r=s.limit-s.cursor;if(s.ket=s.cursor,s.bra=s.cursor,s.in_grouping_b(H,945,969)){if(s.slice_from("ικ"))break r;return}s.cursor=s.limit-r,s.ket=s.cursor}s.bra=s.cursor,0==s.find_among_b(M)||s.cursor>s.limit_backward||s.slice_from("ικ")}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{var r=s.limit-s.cursor;if(s.ket=s.cursor,!s.eq_s_b("αγαμε")||(s.bra=s.cursor,s.cursor>s.limit_backward||s.slice_from("αγαμ"))){s.cursor=s.limit-r;r=s.limit-s.cursor;if(s.ket=s.cursor,0!=s.find_among_b(N)){if(s.bra=s.cursor,!s.slice_del())return;J=!1}s.cursor=s.limit-r,s.ket=s.cursor,s.eq_s_b("αμε")&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(O)||s.cursor>s.limit_backward||s.slice_from("αμ"))}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{var r=s.limit-s.cursor;if(s.ket=s.cursor,0!=s.find_among_b(Q)){if(s.bra=s.cursor,!s.slice_del())return;if(J=!1,s.ket=s.cursor,s.bra=s.cursor,0!=s.find_among_b(P)&&!(s.cursor>s.limit_backward||s.slice_from("αγαν")))return}if(s.cursor=s.limit-r,s.ket=s.cursor,s.eq_s_b("ανε")&&(s.bra=s.cursor,s.slice_del())){J=!1;r:{var i=s.limit-s.cursor;if(s.ket=s.cursor,s.bra=s.cursor,s.in_grouping_b(I,945,969)){if(s.slice_from("αν"))break r;return}s.cursor=s.limit-i,s.ket=s.cursor}s.bra=s.cursor,0==s.find_among_b(R)||s.cursor>s.limit_backward||s.slice_from("αν")}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{var r=s.limit-s.cursor;if(s.ket=s.cursor,0!=s.find_among_b(T)){if(s.bra=s.cursor,!s.slice_del())return;J=!1}if(s.cursor=s.limit-r,s.ket=s.cursor,s.eq_s_b("ετε")&&(s.bra=s.cursor,s.slice_del())){J=!1;r:{var i=s.limit-s.cursor;if(s.ket=s.cursor,s.bra=s.cursor,s.in_grouping_b(I,945,969)){if(s.slice_from("ετ"))break r;return}if(s.cursor=s.limit-i,s.ket=s.cursor,s.bra=s.cursor,0!=s.find_among_b(U)){if(s.slice_from("ετ"))break r;return}s.cursor=s.limit-i,s.ket=s.cursor}s.bra=s.cursor,0==s.find_among_b(V)||s.cursor>s.limit_backward||s.slice_from("ετ")}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{if(s.ket=s.cursor,0!=s.find_among_b(X)&&(s.bra=s.cursor,s.slice_del())){J=!1;r:{var r=s.limit-s.cursor;if(s.ket=s.cursor,s.bra=s.cursor,s.eq_s_b("αρχ")&&!(s.cursor>s.limit_backward)){if(s.slice_from("οντ"))break r;return}if(s.cursor=s.limit-r,s.ket=s.cursor,s.bra=s.cursor,!s.eq_s_b("κρε"))return;if(!s.slice_from("ωντ"))return}}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=(s.ket=s.cursor,0!=s.find_among_b(Y)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,!s.eq_s_b("ον")||s.cursor>s.limit_backward||s.slice_from("ομαστ")),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{var r=s.limit-s.cursor;if(s.ket=s.cursor,s.eq_s_b("ιεστε")){if(s.bra=s.cursor,!s.slice_del())return;if(J=!1,s.ket=s.cursor,s.bra=s.cursor,0!=s.find_among_b(Z)&&!(s.cursor>s.limit_backward||s.slice_from("ιεστ")))return}s.cursor=s.limit-r,s.ket=s.cursor,s.eq_s_b("εστε")&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b($)||s.cursor>s.limit_backward||s.slice_from("ιεστ"))})(),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{var r=s.limit-s.cursor;if(s.ket=s.cursor,0!=s.find_among_b(r1)){if(s.bra=s.cursor,!s.slice_del())return;J=!1}if(s.cursor=s.limit-r,s.ket=s.cursor,0!=s.find_among_b(s1)&&(s.bra=s.cursor,s.slice_del())){J=!1;r:{var i=s.limit-s.cursor;if(s.ket=s.cursor,s.bra=s.cursor,0!=s.find_among_b(i1)){if(s.slice_from("ηκ"))break r;return}if(s.cursor=s.limit-i,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(c1))return;if(s.cursor>s.limit_backward)return;if(!s.slice_from("ηκ"))return}}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{if(s.ket=s.cursor,0!=s.find_among_b(u1)&&(s.bra=s.cursor,s.slice_del())){J=!1;r:{var r=s.limit-s.cursor;if(s.ket=s.cursor,s.bra=s.cursor,0!=s.find_among_b(e1)){if(s.slice_from("ουσ"))break r;return}if(s.cursor=s.limit-r,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(o1))return;if(s.cursor>s.limit_backward)return;if(!s.slice_from("ουσ"))return}}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=(s.ket=s.cursor,0!=s.find_among_b(l1)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(m1)||s.cursor>s.limit_backward||s.slice_from("ησ")),s.cursor=s.limit-r,s.limit-s.cursor),r=((()=>{var r;if(s.ket=s.cursor,0!=s.find_among_b(_1)&&(s.bra=s.cursor,s.slice_del())){J=!1;r:{var i=s.limit-s.cursor;if(s.ket=s.cursor,s.bra=s.cursor,s.eq_s_b("κολλ")){if(s.slice_from("αγ"))break r;return}s.cursor=s.limit-i;i=s.limit-s.cursor;if(s.ket=s.cursor,s.bra=s.cursor,0!=(r=s.find_among_b(a1)))switch(r){case 1:if(s.slice_from("αγ"))break;return}else{if(s.cursor=s.limit-i,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(t1))return;if(s.cursor>s.limit_backward)return;if(!s.slice_from("αγ"))return}}}})(),s.cursor=s.limit-r,s.limit-s.cursor),r=(s.ket=s.cursor,0!=s.find_among_b(f1)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(b1)||s.cursor>s.limit_backward||s.slice_from("ηστ")),s.cursor=s.limit-r,s.limit-s.cursor),r=(s.ket=s.cursor,0!=s.find_among_b(n1)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(k1)||s.cursor>s.limit_backward||s.slice_from("ουν")),s.cursor=s.limit-r,s.limit-s.cursor),r=(s.ket=s.cursor,0!=s.find_among_b(d1)&&(s.bra=s.cursor,s.slice_del())&&(J=!1,s.ket=s.cursor,s.bra=s.cursor,0==s.find_among_b(g1)||s.cursor>s.limit_backward||s.slice_from("ουμ")),s.cursor=s.limit-r,s.limit-s.cursor),c=(c=s.limit-s.cursor,s.ket=s.cursor,0!=s.find_among_b(w1)&&(s.bra=s.cursor,!s.slice_from("μα"))||(s.cursor=s.limit-c,J&&(s.ket=s.cursor,0!=s.find_among_b(v1))&&(s.bra=s.cursor,s.slice_del())),s.cursor=s.limit-r,s.limit-s.cursor);return s.ket=s.cursor,0!=s.find_among_b(h1)&&(s.bra=s.cursor,s.slice_del()),s.cursor=s.limit-c,s.cursor=s.limit_backward,!0},this.stemWord=function(r){return s.setCurrent(r),this.stem(),s.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/hindi-stemmer.js b/sphinx/search/minified-js/hindi-stemmer.js
new file mode 100644
index 00000000000..850b0430cdd
--- /dev/null
+++ b/sphinx/search/minified-js/hindi-stemmer.js
@@ -0,0 +1 @@
+var HindiStemmer=function(){var t=new BaseStemmer,r=[["आँ",-1,-1],["ाँ",-1,-1],["इयाँ",1,-1],["आइयाँ",2,-1],["ाइयाँ",2,-1],["ियाँ",1,-1],["आं",-1,-1],["उआं",6,-1],["ुआं",6,-1],["ईं",-1,-1],["आईं",9,-1],["ाईं",9,-1],["एं",-1,-1],["आएं",12,-1],["उएं",12,-1],["ाएं",12,-1],["ताएं",15,-1,e],["अताएं",16,-1],["नाएं",15,-1,e],["अनाएं",18,-1],["ुएं",12,-1],["ओं",-1,-1],["आओं",21,-1],["उओं",21,-1],["ाओं",21,-1],["ताओं",24,-1,e],["अताओं",25,-1],["नाओं",24,-1,e],["अनाओं",27,-1],["ुओं",21,-1],["ां",-1,-1],["इयां",30,-1],["आइयां",31,-1],["ाइयां",31,-1],["ियां",30,-1],["ीं",-1,-1],["तीं",35,-1,e],["अतीं",36,-1],["आतीं",36,-1],["ातीं",36,-1],["ें",-1,-1],["ों",-1,-1],["इयों",41,-1],["आइयों",42,-1],["ाइयों",42,-1],["ियों",41,-1],["अ",-1,-1],["आ",-1,-1],["इ",-1,-1],["ई",-1,-1],["आई",49,-1],["ाई",49,-1],["उ",-1,-1],["ऊ",-1,-1],["ए",-1,-1],["आए",54,-1],["इए",54,-1],["आइए",56,-1],["ाइए",56,-1],["ाए",54,-1],["िए",54,-1],["ओ",-1,-1],["आओ",61,-1],["ाओ",61,-1],["कर",-1,-1,e],["अकर",64,-1],["आकर",64,-1],["ाकर",64,-1],["ा",-1,-1],["ऊंगा",68,-1],["आऊंगा",69,-1],["ाऊंगा",69,-1],["ूंगा",68,-1],["एगा",68,-1],["आएगा",73,-1],["ाएगा",73,-1],["ेगा",68,-1],["ता",68,-1,e],["अता",77,-1],["आता",77,-1],["ाता",77,-1],["ना",68,-1,e],["अना",81,-1],["आना",81,-1],["ाना",81,-1],["आया",68,-1],["ाया",68,-1],["ि",-1,-1],["ी",-1,-1],["ऊंगी",88,-1],["आऊंगी",89,-1],["ाऊंगी",89,-1],["एंगी",88,-1],["आएंगी",92,-1],["ाएंगी",92,-1],["ूंगी",88,-1],["ेंगी",88,-1],["एगी",88,-1],["आएगी",97,-1],["ाएगी",97,-1],["ओगी",88,-1],["आओगी",100,-1],["ाओगी",100,-1],["ेगी",88,-1],["ोगी",88,-1],["ती",88,-1,e],["अती",105,-1],["आती",105,-1],["ाती",105,-1],["नी",88,-1,e],["अनी",109,-1],["ु",-1,-1],["ू",-1,-1],["े",-1,-1],["एंगे",113,-1],["आएंगे",114,-1],["ाएंगे",114,-1],["ेंगे",113,-1],["ओगे",113,-1],["आओगे",118,-1],["ाओगे",118,-1],["ोगे",113,-1],["ते",113,-1,e],["अते",122,-1],["आते",122,-1],["ाते",122,-1],["ने",113,-1,e],["अने",126,-1],["आने",126,-1],["ाने",126,-1],["ो",-1,-1],["्",-1,-1]],i=[255,255,255,255,159,0,0,0,248,7];function e(){return!!t.in_grouping_b(i,2325,2399)}this.stem=function(){return!(t.cursor>=t.limit||(t.cursor++,t.limit_backward=t.cursor,t.cursor=t.limit,t.ket=t.cursor,0==t.find_among_b(r))||(t.bra=t.cursor,!t.slice_del())||(t.cursor=t.limit_backward,0))},this.stemWord=function(r){return t.setCurrent(r),this.stem(),t.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/hungarian-stemmer.js b/sphinx/search/minified-js/hungarian-stemmer.js
index e1fca971f79..a7f3926cacf 100644
--- a/sphinx/search/minified-js/hungarian-stemmer.js
+++ b/sphinx/search/minified-js/hungarian-stemmer.js
@@ -1 +1 @@
-HungarianStemmer=function(){var r=new BaseStemmer;var e=[["cs",-1,-1],["dzs",-1,-1],["gy",-1,-1],["ly",-1,-1],["ny",-1,-1],["sz",-1,-1],["ty",-1,-1],["zs",-1,-1]];var i=[["á",-1,1],["é",-1,2]];var a=[["bb",-1,-1],["cc",-1,-1],["dd",-1,-1],["ff",-1,-1],["gg",-1,-1],["jj",-1,-1],["kk",-1,-1],["ll",-1,-1],["mm",-1,-1],["nn",-1,-1],["pp",-1,-1],["rr",-1,-1],["ccs",-1,-1],["ss",-1,-1],["zzs",-1,-1],["tt",-1,-1],["vv",-1,-1],["ggy",-1,-1],["lly",-1,-1],["nny",-1,-1],["tty",-1,-1],["ssz",-1,-1],["zz",-1,-1]];var t=[["al",-1,1],["el",-1,1]];var s=[["ba",-1,-1],["ra",-1,-1],["be",-1,-1],["re",-1,-1],["ig",-1,-1],["nak",-1,-1],["nek",-1,-1],["val",-1,-1],["vel",-1,-1],["ul",-1,-1],["nál",-1,-1],["nél",-1,-1],["ból",-1,-1],["ról",-1,-1],["tól",-1,-1],["ül",-1,-1],["ből",-1,-1],["ről",-1,-1],["től",-1,-1],["n",-1,-1],["an",19,-1],["ban",20,-1],["en",19,-1],["ben",22,-1],["képpen",22,-1],["on",19,-1],["ön",19,-1],["képp",-1,-1],["kor",-1,-1],["t",-1,-1],["at",29,-1],["et",29,-1],["ként",29,-1],["anként",32,-1],["enként",32,-1],["onként",32,-1],["ot",29,-1],["ért",29,-1],["öt",29,-1],["hez",-1,-1],["hoz",-1,-1],["höz",-1,-1],["vá",-1,-1],["vé",-1,-1]];var u=[["án",-1,2],["én",-1,1],["ánként",-1,2]];var n=[["stul",-1,1],["astul",0,1],["ástul",0,2],["stül",-1,1],["estül",3,1],["éstül",3,3]];var f=[["á",-1,1],["é",-1,1]];var c=[["k",-1,3],["ak",0,3],["ek",0,3],["ok",0,3],["ák",0,1],["ék",0,2],["ök",0,3]];var l=[["éi",-1,1],["áéi",0,3],["ééi",0,2],["é",-1,1],["ké",3,1],["aké",4,1],["eké",4,1],["oké",4,1],["áké",4,3],["éké",4,2],["öké",4,1],["éé",3,2]];var o=[["a",-1,1],["ja",0,1],["d",-1,1],["ad",2,1],["ed",2,1],["od",2,1],["ád",2,2],["éd",2,3],["öd",2,1],["e",-1,1],["je",9,1],["nk",-1,1],["unk",11,1],["ánk",11,2],["énk",11,3],["ünk",11,1],["uk",-1,1],["juk",16,1],["ájuk",17,2],["ük",-1,1],["jük",19,1],["éjük",20,3],["m",-1,1],["am",22,1],["em",22,1],["om",22,1],["ám",22,2],["ém",22,3],["o",-1,1],["á",-1,2],["é",-1,3]];var k=[["id",-1,1],["aid",0,1],["jaid",1,1],["eid",0,1],["jeid",3,1],["áid",0,2],["éid",0,3],["i",-1,1],["ai",7,1],["jai",8,1],["ei",7,1],["jei",10,1],["ái",7,2],["éi",7,3],["itek",-1,1],["eitek",14,1],["jeitek",15,1],["éitek",14,3],["ik",-1,1],["aik",18,1],["jaik",19,1],["eik",18,1],["jeik",21,1],["áik",18,2],["éik",18,3],["ink",-1,1],["aink",25,1],["jaink",26,1],["eink",25,1],["jeink",28,1],["áink",25,2],["éink",25,3],["aitok",-1,1],["jaitok",32,1],["áitok",-1,2],["im",-1,1],["aim",35,1],["jaim",36,1],["eim",35,1],["jeim",38,1],["áim",35,2],["éim",35,3]];var m=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,1,17,36,10,0,0,0,0,0,0,0,0,0,0,1,0,0,0,1];var b=0;function _(){b=r.limit;r:{var i=r.cursor;e:{if(!r.in_grouping(m,97,369)){break e}i:while(true){var a=r.cursor;a:{if(!r.out_grouping(m,97,369)){break a}r.cursor=a;break i}r.cursor=a;if(r.cursor>=r.limit){break e}r.cursor++}i:{var t=r.cursor;a:{if(r.find_among(e)==0){break a}break i}r.cursor=t;if(r.cursor>=r.limit){break e}r.cursor++}b=r.cursor;break r}r.cursor=i;if(!r.out_grouping(m,97,369)){return false}e:while(true){i:{if(!r.in_grouping(m,97,369)){break i}break e}if(r.cursor>=r.limit){return false}r.cursor++}b=r.cursor}return true}function d(){if(!(b<=r.cursor)){return false}return true}function v(){var e;r.ket=r.cursor;e=r.find_among_b(i);if(e==0){return false}r.bra=r.cursor;if(!d()){return false}switch(e){case 1:if(!r.slice_from("a")){return false}break;case 2:if(!r.slice_from("e")){return false}break}return true}function g(){var e=r.limit-r.cursor;if(r.find_among_b(a)==0){return false}r.cursor=r.limit-e;return true}function j(){if(r.cursor<=r.limit_backward){return false}r.cursor--;r.ket=r.cursor;{var e=r.cursor-1;if(e{_=e.limit;var r=e.cursor;if(e.in_grouping(f,97,369)){var i=e.cursor;e.go_in_grouping(f,97,369)&&(e.cursor++,_=e.cursor),e.cursor=i}else{if(e.cursor=r,!e.go_out_grouping(f,97,369))return;e.cursor++,_=e.cursor}})(),e.cursor=r,e.limit_backward=e.cursor,e.cursor=e.limit,e.limit-e.cursor),r=(e.ket=e.cursor,0!=e.find_among_b(c)&&(e.bra=e.cursor,b())&&d()&&e.slice_del()&&g(),e.cursor=e.limit-r,e.limit-e.cursor),r=(j(),e.cursor=e.limit-r,e.limit-e.cursor),r=((()=>{var r;if(e.ket=e.cursor,0!=(r=e.find_among_b(t))&&(e.bra=e.cursor,b()))switch(r){case 1:if(e.slice_from("e"))break;return;case 2:if(e.slice_from("a"))break}})(),e.cursor=e.limit-r,e.limit-e.cursor),r=((()=>{var r;if(e.ket=e.cursor,0!=(r=e.find_among_b(o))&&(e.bra=e.cursor,b()))switch(r){case 1:if(e.slice_del())break;return;case 2:if(e.slice_from("a"))break;return;case 3:if(e.slice_from("e"))break}})(),e.cursor=e.limit-r,e.limit-e.cursor),r=(e.ket=e.cursor,0!=e.find_among_b(n)&&(e.bra=e.cursor,b())&&d()&&e.slice_del()&&g(),e.cursor=e.limit-r,e.limit-e.cursor),r=((()=>{var r;if(e.ket=e.cursor,0!=(r=e.find_among_b(k))&&(e.bra=e.cursor,b()))switch(r){case 1:if(e.slice_del())break;return;case 2:if(e.slice_from("e"))break;return;case 3:if(e.slice_from("a"))break}})(),e.cursor=e.limit-r,e.limit-e.cursor),r=((()=>{var r;if(e.ket=e.cursor,0!=(r=e.find_among_b(l))&&(e.bra=e.cursor,b()))switch(r){case 1:if(e.slice_del())break;return;case 2:if(e.slice_from("a"))break;return;case 3:if(e.slice_from("e"))break}})(),e.cursor=e.limit-r,e.limit-e.cursor),r=((()=>{var r;if(e.ket=e.cursor,0!=(r=e.find_among_b(m))&&(e.bra=e.cursor,b()))switch(r){case 1:if(e.slice_del())break;return;case 2:if(e.slice_from("a"))break;return;case 3:if(e.slice_from("e"))break}})(),e.cursor=e.limit-r,e.limit-e.cursor);return(()=>{var r;if(e.ket=e.cursor,0!=(r=e.find_among_b(u))&&(e.bra=e.cursor,b()))switch(r){case 1:if(e.slice_from("a"))break;return;case 2:if(e.slice_from("e"))break;return;case 3:if(e.slice_del())break}})(),e.cursor=e.limit-r,e.cursor=e.limit_backward,!0},this.stemWord=function(r){return e.setCurrent(r),this.stem(),e.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/indonesian-stemmer.js b/sphinx/search/minified-js/indonesian-stemmer.js
new file mode 100644
index 00000000000..89339d9783d
--- /dev/null
+++ b/sphinx/search/minified-js/indonesian-stemmer.js
@@ -0,0 +1 @@
+var IndonesianStemmer=function(){var s=new BaseStemmer,c=[["kah",-1,1],["lah",-1,1],["pun",-1,1]],o=[["nya",-1,1],["ku",-1,1],["mu",-1,1]],r=[["i",-1,1,function(){if(2{var r;if(s.bra=s.cursor,0!=(r=s.find_among(n))){switch(s.ket=s.cursor,r){case 1:if(!s.slice_del())return;a=1,--l;break;case 2:if(!s.slice_del())return;a=3,--l;break;case 3:if(a=1,!s.slice_from("s"))return;--l;break;case 4:if(a=3,!s.slice_from("s"))return;--l;break;case 5:a=1,--l;r:{var e=s.cursor,i=s.cursor;if(s.in_grouping(t,97,117)){if(s.cursor=i,s.slice_from("p"))break r;return}if(s.cursor=e,!s.slice_del())return}break;case 6:a=3,--l;r:{var u=s.cursor,c=s.cursor;if(s.in_grouping(t,97,117)){if(s.cursor=c,s.slice_from("p"))break r;return}if(s.cursor=u,!s.slice_del())return}}return 1}})()?(u=s.cursor,i=s.cursor,l<=2||(s.limit_backward=s.cursor,s.cursor=s.limit,f()&&(s.cursor=s.limit_backward,s.cursor=i,l<=2||m())),s.cursor=u,s.cursor=e):(s.cursor=r,i=s.cursor,m(),s.cursor=i,u=s.cursor,l<=2||(s.limit_backward=s.cursor,s.cursor=s.limit,f()&&(s.cursor=s.limit_backward)),s.cursor=u),0))},this.stemWord=function(r){return s.setCurrent(r),this.stem(),s.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/irish-stemmer.js b/sphinx/search/minified-js/irish-stemmer.js
new file mode 100644
index 00000000000..c90c92292cc
--- /dev/null
+++ b/sphinx/search/minified-js/irish-stemmer.js
@@ -0,0 +1 @@
+var IrishStemmer=function(){var i=new BaseStemmer,e=[["b'",-1,1],["bh",-1,4],["bhf",1,2],["bp",-1,8],["ch",-1,5],["d'",-1,1],["d'fh",5,2],["dh",-1,6],["dt",-1,9],["fh",-1,2],["gc",-1,5],["gh",-1,7],["h-",-1,1],["m'",-1,1],["mb",-1,4],["mh",-1,10],["n-",-1,1],["nd",-1,6],["ng",-1,7],["ph",-1,8],["sh",-1,3],["t-",-1,1],["th",-1,9],["ts",-1,3]],a=[["íochta",-1,1],["aíochta",0,1],["ire",-1,2],["aire",2,2],["abh",-1,1],["eabh",4,1],["ibh",-1,1],["aibh",6,1],["amh",-1,1],["eamh",8,1],["imh",-1,1],["aimh",10,1],["íocht",-1,1],["aíocht",12,1],["irí",-1,2],["airí",14,2]],c=[["óideacha",-1,6],["patacha",-1,5],["achta",-1,1],["arcachta",2,2],["eachta",2,1],["grafaíochta",-1,4],["paite",-1,5],["ach",-1,1],["each",7,1],["óideach",8,6],["gineach",8,3],["patach",7,5],["grafaíoch",-1,4],["pataigh",-1,5],["óidigh",-1,6],["achtúil",-1,1],["eachtúil",15,1],["gineas",-1,3],["ginis",-1,3],["acht",-1,1],["arcacht",19,2],["eacht",19,1],["grafaíocht",-1,4],["arcachtaí",-1,2],["grafaíochtaí",-1,4]],t=[["imid",-1,1],["aimid",0,1],["ímid",-1,1],["aímid",2,1],["adh",-1,2],["eadh",4,2],["faidh",-1,1],["fidh",-1,1],["áil",-1,2],["ain",-1,2],["tear",-1,2],["tar",-1,2]],s=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,1,17,4,2],o=0,u=0,f=0;function n(){return u<=i.cursor}function h(){return o<=i.cursor}function m(){var r;if(i.ket=i.cursor,0!=(r=i.find_among_b(t)))switch(i.bra=i.cursor,r){case 1:if(!(f<=i.cursor))return;if(i.slice_del())break;return;case 2:if(!n())return;if(i.slice_del())break;return}}this.stem=function(){var r=i.cursor,r=((()=>{var r;if(i.bra=i.cursor,0!=(r=i.find_among(e)))switch(i.ket=i.cursor,r){case 1:if(i.slice_del())break;return;case 2:if(i.slice_from("f"))break;return;case 3:if(i.slice_from("s"))break;return;case 4:if(i.slice_from("b"))break;return;case 5:if(i.slice_from("c"))break;return;case 6:if(i.slice_from("d"))break;return;case 7:if(i.slice_from("g"))break;return;case 8:if(i.slice_from("p"))break;return;case 9:if(i.slice_from("t"))break;return;case 10:if(i.slice_from("m"))break}})(),i.cursor=r,f=i.limit,u=i.limit,o=i.limit,r=i.cursor,i.go_out_grouping(s,97,250)&&(i.cursor++,f=i.cursor,i.go_in_grouping(s,97,250))&&(i.cursor++,u=i.cursor,i.go_out_grouping(s,97,250))&&(i.cursor++,i.go_in_grouping(s,97,250))&&(i.cursor++,o=i.cursor),i.cursor=r,i.limit_backward=i.cursor,i.cursor=i.limit,i.limit-i.cursor),r=((()=>{var r;if(i.ket=i.cursor,0!=(r=i.find_among_b(a)))switch(i.bra=i.cursor,r){case 1:if(!n())return;if(i.slice_del())break;return;case 2:if(!h())return;if(i.slice_del())break}})(),i.cursor=i.limit-r,i.limit-i.cursor),r=((()=>{var r;if(i.ket=i.cursor,0!=(r=i.find_among_b(c)))switch(i.bra=i.cursor,r){case 1:if(!h())return;if(i.slice_del())break;return;case 2:if(i.slice_from("arc"))break;return;case 3:if(i.slice_from("gin"))break;return;case 4:if(i.slice_from("graf"))break;return;case 5:if(i.slice_from("paite"))break;return;case 6:if(i.slice_from("óid"))break}})(),i.cursor=i.limit-r,i.limit-i.cursor);return m(),i.cursor=i.limit-r,i.cursor=i.limit_backward,!0},this.stemWord=function(r){return i.setCurrent(r),this.stem(),i.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/italian-stemmer.js b/sphinx/search/minified-js/italian-stemmer.js
index a3a5c4265e4..ac46b1d415e 100644
--- a/sphinx/search/minified-js/italian-stemmer.js
+++ b/sphinx/search/minified-js/italian-stemmer.js
@@ -1 +1 @@
-ItalianStemmer=function(){var r=new BaseStemmer;var e=[["",-1,7],["qu",0,6],["á",0,1],["é",0,2],["í",0,3],["ó",0,4],["ú",0,5]];var i=[["",-1,3],["I",0,1],["U",0,2]];var a=[["la",-1,-1],["cela",0,-1],["gliela",0,-1],["mela",0,-1],["tela",0,-1],["vela",0,-1],["le",-1,-1],["cele",6,-1],["gliele",6,-1],["mele",6,-1],["tele",6,-1],["vele",6,-1],["ne",-1,-1],["cene",12,-1],["gliene",12,-1],["mene",12,-1],["sene",12,-1],["tene",12,-1],["vene",12,-1],["ci",-1,-1],["li",-1,-1],["celi",20,-1],["glieli",20,-1],["meli",20,-1],["teli",20,-1],["veli",20,-1],["gli",20,-1],["mi",-1,-1],["si",-1,-1],["ti",-1,-1],["vi",-1,-1],["lo",-1,-1],["celo",31,-1],["glielo",31,-1],["melo",31,-1],["telo",31,-1],["velo",31,-1]];var s=[["ando",-1,1],["endo",-1,1],["ar",-1,2],["er",-1,2],["ir",-1,2]];var o=[["ic",-1,-1],["abil",-1,-1],["os",-1,-1],["iv",-1,1]];var u=[["ic",-1,1],["abil",-1,1],["iv",-1,1]];var t=[["ica",-1,1],["logia",-1,3],["osa",-1,1],["ista",-1,1],["iva",-1,9],["anza",-1,1],["enza",-1,5],["ice",-1,1],["atrice",7,1],["iche",-1,1],["logie",-1,3],["abile",-1,1],["ibile",-1,1],["usione",-1,4],["azione",-1,2],["uzione",-1,4],["atore",-1,2],["ose",-1,1],["ante",-1,1],["mente",-1,1],["amente",19,7],["iste",-1,1],["ive",-1,9],["anze",-1,1],["enze",-1,5],["ici",-1,1],["atrici",25,1],["ichi",-1,1],["abili",-1,1],["ibili",-1,1],["ismi",-1,1],["usioni",-1,4],["azioni",-1,2],["uzioni",-1,4],["atori",-1,2],["osi",-1,1],["anti",-1,1],["amenti",-1,6],["imenti",-1,6],["isti",-1,1],["ivi",-1,9],["ico",-1,1],["ismo",-1,1],["oso",-1,1],["amento",-1,6],["imento",-1,6],["ivo",-1,9],["ità",-1,8],["istà",-1,1],["istè",-1,1],["istì",-1,1]];var c=[["isca",-1,1],["enda",-1,1],["ata",-1,1],["ita",-1,1],["uta",-1,1],["ava",-1,1],["eva",-1,1],["iva",-1,1],["erebbe",-1,1],["irebbe",-1,1],["isce",-1,1],["ende",-1,1],["are",-1,1],["ere",-1,1],["ire",-1,1],["asse",-1,1],["ate",-1,1],["avate",16,1],["evate",16,1],["ivate",16,1],["ete",-1,1],["erete",20,1],["irete",20,1],["ite",-1,1],["ereste",-1,1],["ireste",-1,1],["ute",-1,1],["erai",-1,1],["irai",-1,1],["isci",-1,1],["endi",-1,1],["erei",-1,1],["irei",-1,1],["assi",-1,1],["ati",-1,1],["iti",-1,1],["eresti",-1,1],["iresti",-1,1],["uti",-1,1],["avi",-1,1],["evi",-1,1],["ivi",-1,1],["isco",-1,1],["ando",-1,1],["endo",-1,1],["Yamo",-1,1],["iamo",-1,1],["avamo",-1,1],["evamo",-1,1],["ivamo",-1,1],["eremo",-1,1],["iremo",-1,1],["assimo",-1,1],["ammo",-1,1],["emmo",-1,1],["eremmo",54,1],["iremmo",54,1],["immo",-1,1],["ano",-1,1],["iscano",58,1],["avano",58,1],["evano",58,1],["ivano",58,1],["eranno",-1,1],["iranno",-1,1],["ono",-1,1],["iscono",65,1],["arono",65,1],["erono",65,1],["irono",65,1],["erebbero",-1,1],["irebbero",-1,1],["assero",-1,1],["essero",-1,1],["issero",-1,1],["ato",-1,1],["ito",-1,1],["uto",-1,1],["avo",-1,1],["evo",-1,1],["ivo",-1,1],["ar",-1,1],["ir",-1,1],["erà",-1,1],["irà",-1,1],["erò",-1,1],["irò",-1,1]];var l=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,128,128,8,2,1];var n=[17,65,0,0,0,0,0,0,0,0,0,0,0,0,0,128,128,8,2];var f=[17];var b=0;var m=0;var k=0;function _(){var i;var a=r.cursor;while(true){var s=r.cursor;r:{r.bra=r.cursor;i=r.find_among(e);if(i==0){break r}r.ket=r.cursor;switch(i){case 1:if(!r.slice_from("à")){return false}break;case 2:if(!r.slice_from("è")){return false}break;case 3:if(!r.slice_from("ì")){return false}break;case 4:if(!r.slice_from("ò")){return false}break;case 5:if(!r.slice_from("ù")){return false}break;case 6:if(!r.slice_from("qU")){return false}break;case 7:if(r.cursor>=r.limit){break r}r.cursor++;break}continue}r.cursor=s;break}r.cursor=a;while(true){var o=r.cursor;r:{e:while(true){var u=r.cursor;i:{if(!r.in_grouping(l,97,249)){break i}r.bra=r.cursor;a:{var t=r.cursor;s:{if(!r.eq_s("u")){break s}r.ket=r.cursor;if(!r.in_grouping(l,97,249)){break s}if(!r.slice_from("U")){return false}break a}r.cursor=t;if(!r.eq_s("i")){break i}r.ket=r.cursor;if(!r.in_grouping(l,97,249)){break i}if(!r.slice_from("I")){return false}}r.cursor=u;break e}r.cursor=u;if(r.cursor>=r.limit){break r}r.cursor++}continue}r.cursor=o;break}return true}function v(){k=r.limit;m=r.limit;b=r.limit;var e=r.cursor;r:{e:{var i=r.cursor;i:{if(!r.in_grouping(l,97,249)){break i}a:{var a=r.cursor;s:{if(!r.out_grouping(l,97,249)){break s}o:while(true){u:{if(!r.in_grouping(l,97,249)){break u}break o}if(r.cursor>=r.limit){break s}r.cursor++}break a}r.cursor=a;if(!r.in_grouping(l,97,249)){break i}s:while(true){o:{if(!r.out_grouping(l,97,249)){break o}break s}if(r.cursor>=r.limit){break i}r.cursor++}}break e}r.cursor=i;if(!r.out_grouping(l,97,249)){break r}i:{var s=r.cursor;a:{if(!r.out_grouping(l,97,249)){break a}s:while(true){o:{if(!r.in_grouping(l,97,249)){break o}break s}if(r.cursor>=r.limit){break a}r.cursor++}break i}r.cursor=s;if(!r.in_grouping(l,97,249)){break r}if(r.cursor>=r.limit){break r}r.cursor++}}k=r.cursor}r.cursor=e;var o=r.cursor;r:{e:while(true){i:{if(!r.in_grouping(l,97,249)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}e:while(true){i:{if(!r.out_grouping(l,97,249)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}m=r.cursor;e:while(true){i:{if(!r.in_grouping(l,97,249)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}e:while(true){i:{if(!r.out_grouping(l,97,249)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}b=r.cursor}r.cursor=o;return true}function g(){var e;while(true){var a=r.cursor;r:{r.bra=r.cursor;e=r.find_among(i);if(e==0){break r}r.ket=r.cursor;switch(e){case 1:if(!r.slice_from("i")){return false}break;case 2:if(!r.slice_from("u")){return false}break;case 3:if(r.cursor>=r.limit){break r}r.cursor++;break}continue}r.cursor=a;break}return true}function d(){if(!(k<=r.cursor)){return false}return true}function w(){if(!(m<=r.cursor)){return false}return true}function h(){if(!(b<=r.cursor)){return false}return true}function p(){var e;r.ket=r.cursor;if(r.find_among_b(a)==0){return false}r.bra=r.cursor;e=r.find_among_b(s);if(e==0){return false}if(!d()){return false}switch(e){case 1:if(!r.slice_del()){return false}break;case 2:if(!r.slice_from("e")){return false}break}return true}function q(){var e;r.ket=r.cursor;e=r.find_among_b(t);if(e==0){return false}r.bra=r.cursor;switch(e){case 1:if(!h()){return false}if(!r.slice_del()){return false}break;case 2:if(!h()){return false}if(!r.slice_del()){return false}var i=r.limit-r.cursor;r:{r.ket=r.cursor;if(!r.eq_s_b("ic")){r.cursor=r.limit-i;break r}r.bra=r.cursor;if(!h()){r.cursor=r.limit-i;break r}if(!r.slice_del()){return false}}break;case 3:if(!h()){return false}if(!r.slice_from("log")){return false}break;case 4:if(!h()){return false}if(!r.slice_from("u")){return false}break;case 5:if(!h()){return false}if(!r.slice_from("ente")){return false}break;case 6:if(!d()){return false}if(!r.slice_del()){return false}break;case 7:if(!w()){return false}if(!r.slice_del()){return false}var a=r.limit-r.cursor;r:{r.ket=r.cursor;e=r.find_among_b(o);if(e==0){r.cursor=r.limit-a;break r}r.bra=r.cursor;if(!h()){r.cursor=r.limit-a;break r}if(!r.slice_del()){return false}switch(e){case 1:r.ket=r.cursor;if(!r.eq_s_b("at")){r.cursor=r.limit-a;break r}r.bra=r.cursor;if(!h()){r.cursor=r.limit-a;break r}if(!r.slice_del()){return false}break}}break;case 8:if(!h()){return false}if(!r.slice_del()){return false}var s=r.limit-r.cursor;r:{r.ket=r.cursor;if(r.find_among_b(u)==0){r.cursor=r.limit-s;break r}r.bra=r.cursor;if(!h()){r.cursor=r.limit-s;break r}if(!r.slice_del()){return false}}break;case 9:if(!h()){return false}if(!r.slice_del()){return false}var c=r.limit-r.cursor;r:{r.ket=r.cursor;if(!r.eq_s_b("at")){r.cursor=r.limit-c;break r}r.bra=r.cursor;if(!h()){r.cursor=r.limit-c;break r}if(!r.slice_del()){return false}r.ket=r.cursor;if(!r.eq_s_b("ic")){r.cursor=r.limit-c;break r}r.bra=r.cursor;if(!h()){r.cursor=r.limit-c;break r}if(!r.slice_del()){return false}}break}return true}function z(){if(r.cursor{for(var r,i=u.cursor;;){var e=u.cursor;r:{switch(u.bra=u.cursor,r=u.find_among(a),u.ket=u.cursor,r){case 1:if(u.slice_from("à"))break;return;case 2:if(u.slice_from("è"))break;return;case 3:if(u.slice_from("ì"))break;return;case 4:if(u.slice_from("ò"))break;return;case 5:if(u.slice_from("ù"))break;return;case 6:if(u.slice_from("qU"))break;return;case 7:if(u.cursor>=u.limit)break r;u.cursor++}continue}u.cursor=e;break}for(u.cursor=i;;){var o=u.cursor;r:{for(;;){var s=u.cursor;i:if(u.in_grouping(m,97,249)){u.bra=u.cursor;e:{var c=u.cursor;if(u.eq_s("u")&&(u.ket=u.cursor,u.in_grouping(m,97,249))){if(u.slice_from("U"))break e;return}if(u.cursor=c,!u.eq_s("i"))break i;if(u.ket=u.cursor,!u.in_grouping(m,97,249))break i;if(!u.slice_from("I"))return}u.cursor=s;break}if(u.cursor=s,u.cursor>=u.limit)break r;u.cursor++}continue}u.cursor=o;break}})(),u.cursor=r,v=u.limit,k=u.limit,g=u.limit,u.cursor);r:{i:{var i=u.cursor;e:if(u.in_grouping(m,97,249)){var e=u.cursor;if(!u.out_grouping(m,97,249)||!u.go_out_grouping(m,97,249)){if(u.cursor=e,!u.in_grouping(m,97,249))break e;if(!u.go_in_grouping(m,97,249))break e}u.cursor++;break i}if(u.cursor=i,!u.eq_s("divan")){if(u.cursor=i,!u.out_grouping(m,97,249))break r;e=u.cursor;if(!u.out_grouping(m,97,249)||!u.go_out_grouping(m,97,249)){if(u.cursor=e,!u.in_grouping(m,97,249))break r;if(u.cursor>=u.limit)break r}u.cursor++}}v=u.cursor}u.cursor=r,r=u.cursor,u.go_out_grouping(m,97,249)&&(u.cursor++,u.go_in_grouping(m,97,249))&&(u.cursor++,k=u.cursor,u.go_out_grouping(m,97,249))&&(u.cursor++,u.go_in_grouping(m,97,249))&&(u.cursor++,g=u.cursor),u.cursor=r,u.limit_backward=u.cursor,u.cursor=u.limit;var r=u.limit-u.cursor,r=((()=>{var r;if(u.ket=u.cursor,0!=u.find_among_b(c)&&(u.bra=u.cursor,0!=(r=u.find_among_b(t)))&&d())switch(r){case 1:if(u.slice_del())break;return;case 2:if(u.slice_from("e"))break}})(),u.cursor=u.limit-r,u.limit-u.cursor),o=u.limit-u.cursor,o=(w()||(u.cursor=u.limit-o,(()=>{if(!(u.cursor{var r=u.limit-u.cursor;if(u.ket=u.cursor,u.in_grouping_b(_,97,242))if(u.bra=u.cursor,d()){if(!u.slice_del())return;if(u.ket=u.cursor,u.eq_s_b("i"))if(u.bra=u.cursor,d()){if(!u.slice_del())return}else u.cursor=u.limit-r;else u.cursor=u.limit-r}else u.cursor=u.limit-r;else u.cursor=u.limit-r;if(r=u.limit-u.cursor,u.ket=u.cursor,u.eq_s_b("h"))if(u.bra=u.cursor,u.in_grouping_b(b,99,103))if(d()){if(!u.slice_del());}else u.cursor=u.limit-r;else u.cursor=u.limit-r;else u.cursor=u.limit-r})(),u.cursor=u.limit-o,u.cursor=u.limit_backward,u.cursor);return(()=>{for(var r;;){var i=u.cursor;r:{switch(u.bra=u.cursor,r=u.find_among(s),u.ket=u.cursor,r){case 1:if(u.slice_from("i"))break;return;case 2:if(u.slice_from("u"))break;return;case 3:if(u.cursor>=u.limit)break r;u.cursor++}continue}u.cursor=i;break}})(),u.cursor=r,!0},this.stemWord=function(r){return u.setCurrent(r),this.stem(),u.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/lithuanian-stemmer.js b/sphinx/search/minified-js/lithuanian-stemmer.js
new file mode 100644
index 00000000000..6d48ddac94c
--- /dev/null
+++ b/sphinx/search/minified-js/lithuanian-stemmer.js
@@ -0,0 +1 @@
+var LithuanianStemmer=function(){var e=new BaseStemmer,t=[["a",-1,-1],["ia",0,-1],["eria",1,-1],["osna",0,-1],["iosna",3,-1],["uosna",3,-1],["iuosna",5,-1],["ysna",0,-1],["ėsna",0,-1],["e",-1,-1],["ie",9,-1],["enie",10,-1],["erie",10,-1],["oje",9,-1],["ioje",13,-1],["uje",9,-1],["iuje",15,-1],["yje",9,-1],["enyje",17,-1],["eryje",17,-1],["ėje",9,-1],["ame",9,-1],["iame",21,-1],["sime",9,-1],["ome",9,-1],["ėme",9,-1],["tumėme",25,-1],["ose",9,-1],["iose",27,-1],["uose",27,-1],["iuose",29,-1],["yse",9,-1],["enyse",31,-1],["eryse",31,-1],["ėse",9,-1],["ate",9,-1],["iate",35,-1],["ite",9,-1],["kite",37,-1],["site",37,-1],["ote",9,-1],["tute",9,-1],["ėte",9,-1],["tumėte",42,-1],["i",-1,-1],["ai",44,-1],["iai",45,-1],["eriai",46,-1],["ei",44,-1],["tumei",48,-1],["ki",44,-1],["imi",44,-1],["erimi",51,-1],["umi",44,-1],["iumi",53,-1],["si",44,-1],["asi",55,-1],["iasi",56,-1],["esi",55,-1],["iesi",58,-1],["siesi",59,-1],["isi",55,-1],["aisi",61,-1],["eisi",61,-1],["tumeisi",63,-1],["uisi",61,-1],["osi",55,-1],["ėjosi",66,-1],["uosi",66,-1],["iuosi",68,-1],["siuosi",69,-1],["usi",55,-1],["ausi",71,-1],["čiausi",72,-1],["ąsi",55,-1],["ėsi",55,-1],["ųsi",55,-1],["tųsi",76,-1],["ti",44,-1],["enti",78,-1],["inti",78,-1],["oti",78,-1],["ioti",81,-1],["uoti",81,-1],["iuoti",83,-1],["auti",78,-1],["iauti",85,-1],["yti",78,-1],["ėti",78,-1],["telėti",88,-1],["inėti",88,-1],["terėti",88,-1],["ui",44,-1],["iui",92,-1],["eniui",93,-1],["oj",-1,-1],["ėj",-1,-1],["k",-1,-1],["am",-1,-1],["iam",98,-1],["iem",-1,-1],["im",-1,-1],["sim",101,-1],["om",-1,-1],["tum",-1,-1],["ėm",-1,-1],["tumėm",105,-1],["an",-1,-1],["on",-1,-1],["ion",108,-1],["un",-1,-1],["iun",110,-1],["ėn",-1,-1],["o",-1,-1],["io",113,-1],["enio",114,-1],["ėjo",113,-1],["uo",113,-1],["s",-1,-1],["as",118,-1],["ias",119,-1],["es",118,-1],["ies",121,-1],["is",118,-1],["ais",123,-1],["iais",124,-1],["tumeis",123,-1],["imis",123,-1],["enimis",127,-1],["omis",123,-1],["iomis",129,-1],["umis",123,-1],["ėmis",123,-1],["enis",123,-1],["asis",123,-1],["ysis",123,-1],["ams",118,-1],["iams",136,-1],["iems",118,-1],["ims",118,-1],["enims",139,-1],["erims",139,-1],["oms",118,-1],["ioms",142,-1],["ums",118,-1],["ėms",118,-1],["ens",118,-1],["os",118,-1],["ios",147,-1],["uos",147,-1],["iuos",149,-1],["ers",118,-1],["us",118,-1],["aus",152,-1],["iaus",153,-1],["ius",152,-1],["ys",118,-1],["enys",156,-1],["erys",156,-1],["ąs",118,-1],["iąs",159,-1],["ės",118,-1],["amės",161,-1],["iamės",162,-1],["imės",161,-1],["kimės",164,-1],["simės",164,-1],["omės",161,-1],["ėmės",161,-1],["tumėmės",168,-1],["atės",161,-1],["iatės",170,-1],["sitės",161,-1],["otės",161,-1],["ėtės",161,-1],["tumėtės",174,-1],["įs",118,-1],["ūs",118,-1],["tųs",118,-1],["at",-1,-1],["iat",179,-1],["it",-1,-1],["sit",181,-1],["ot",-1,-1],["ėt",-1,-1],["tumėt",184,-1],["u",-1,-1],["au",186,-1],["iau",187,-1],["čiau",188,-1],["iu",186,-1],["eniu",190,-1],["siu",190,-1],["y",-1,-1],["ą",-1,-1],["ią",194,-1],["ė",-1,-1],["ę",-1,-1],["į",-1,-1],["enį",198,-1],["erį",198,-1],["ų",-1,-1],["ių",201,-1],["erų",201,-1]],a=[["ing",-1,-1],["aj",-1,-1],["iaj",1,-1],["iej",-1,-1],["oj",-1,-1],["ioj",4,-1],["uoj",4,-1],["iuoj",6,-1],["auj",-1,-1],["ąj",-1,-1],["iąj",9,-1],["ėj",-1,-1],["ųj",-1,-1],["iųj",12,-1],["ok",-1,-1],["iok",14,-1],["iuk",-1,-1],["uliuk",16,-1],["učiuk",16,-1],["išk",-1,-1],["iul",-1,-1],["yl",-1,-1],["ėl",-1,-1],["am",-1,-1],["dam",23,-1],["jam",23,-1],["zgan",-1,-1],["ain",-1,-1],["esn",-1,-1],["op",-1,-1],["iop",29,-1],["ias",-1,-1],["ies",-1,-1],["ais",-1,-1],["iais",33,-1],["os",-1,-1],["ios",35,-1],["uos",35,-1],["iuos",37,-1],["aus",-1,-1],["iaus",39,-1],["ąs",-1,-1],["iąs",41,-1],["ęs",-1,-1],["utėait",-1,-1],["ant",-1,-1],["iant",45,-1],["siant",46,-1],["int",-1,-1],["ot",-1,-1],["uot",49,-1],["iuot",50,-1],["yt",-1,-1],["ėt",-1,-1],["ykšt",-1,-1],["iau",-1,-1],["dav",-1,-1],["sv",-1,-1],["šv",-1,-1],["ykšč",-1,-1],["ę",-1,-1],["ėję",60,-1]],u=[["ojime",-1,7],["ėjime",-1,3],["avime",-1,6],["okate",-1,8],["aite",-1,1],["uote",-1,2],["asius",-1,5],["okatės",-1,8],["aitės",-1,1],["uotės",-1,2],["esiu",-1,4]],s=[["č",-1,1],["dž",-1,2]],o=[["gd",-1,1]],m=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,16,0,64,1,0,64,0,0,0,0,0,0,0,4,4],c=0;function n(){var i;if(e.ket=e.cursor,0!=(i=e.find_among_b(s)))switch(e.bra=e.cursor,i){case 1:if(e.slice_from("t"))break;return;case 2:if(e.slice_from("d"))break;return}}this.stem=function(){c=e.limit;var i=e.cursor,s=e.cursor,r=e.cursor,r=(e.eq_s("a")?(e.cursor=r,e.current.length<=6||e.cursor>=e.limit?e.cursor=s:e.cursor++):e.cursor=s,e.go_out_grouping(m,97,371)&&(e.cursor++,e.go_in_grouping(m,97,371))&&(e.cursor++,c=e.cursor),e.cursor=i,e.limit_backward=e.cursor,e.cursor=e.limit,e.limit-e.cursor),s=((()=>{var i;if(e.ket=e.cursor,0!=(i=e.find_among_b(u)))switch(e.bra=e.cursor,i){case 1:if(e.slice_from("aitė"))break;return;case 2:if(e.slice_from("uotė"))break;return;case 3:if(e.slice_from("ėjimas"))break;return;case 4:if(e.slice_from("esys"))break;return;case 5:if(e.slice_from("asys"))break;return;case 6:if(e.slice_from("avimas"))break;return;case 7:if(e.slice_from("ojimas"))break;return;case 8:if(e.slice_from("okatė"))break}})(),e.cursor=e.limit-r,e.limit-e.cursor),r=(e.cursor{for(;;){var i=e.limit-e.cursor;if(!(e.cursor{var r;if(s.ket=s.cursor,0!=(r=s.find_among_b(t)))switch(s.bra=s.cursor,r){case 1:if(s.slice_del())break;return;case 2:var i=s.limit-s.cursor;if(s.eq_s_b("ए")||(s.cursor=s.limit-i,s.eq_s_b("े"))||(s.cursor=s.limit-i,s.slice_del()))break}})(),s.cursor=s.limit-r;;){var i=s.limit-s.cursor,e=s.limit-s.cursor;if((()=>{var r;if(s.ket=s.cursor,0!=(r=s.find_among_b(c)))switch(s.bra=s.cursor,r){case 1:var i=s.limit-s.cursor;if(!s.eq_s_b("यौ")&&(s.cursor=s.limit-i,!s.eq_s_b("छौ")&&(s.cursor=s.limit-i,!s.eq_s_b("नौ"))&&(s.cursor=s.limit-i,!s.eq_s_b("थे"))))return;if(s.slice_del())break;return;case 2:if(!s.eq_s_b("त्र"))return;if(s.slice_del())break}})(),s.cursor=s.limit-e,s.ket=s.cursor,0==s.find_among_b(u)||(s.bra=s.cursor,!s.slice_del())){s.cursor=s.limit-i;break}}return s.cursor=s.limit_backward,!0},this.stemWord=function(r){return s.setCurrent(r),this.stem(),s.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/norwegian-stemmer.js b/sphinx/search/minified-js/norwegian-stemmer.js
index c8ec76cc1ca..5cf580e3ed8 100644
--- a/sphinx/search/minified-js/norwegian-stemmer.js
+++ b/sphinx/search/minified-js/norwegian-stemmer.js
@@ -1 +1 @@
-NorwegianStemmer=function(){var r=new BaseStemmer;var e=[["a",-1,1],["e",-1,1],["ede",1,1],["ande",1,1],["ende",1,1],["ane",1,1],["ene",1,1],["hetene",6,1],["erte",1,3],["en",-1,1],["heten",9,1],["ar",-1,1],["er",-1,1],["heter",12,1],["s",-1,2],["as",14,1],["es",14,1],["edes",16,1],["endes",16,1],["enes",16,1],["hetenes",19,1],["ens",14,1],["hetens",21,1],["ers",14,1],["ets",14,1],["et",-1,1],["het",25,1],["ert",-1,3],["ast",-1,1]];var i=[["dt",-1,-1],["vt",-1,-1]];var t=[["leg",-1,1],["eleg",0,1],["ig",-1,1],["eig",2,1],["lig",2,1],["elig",4,1],["els",-1,1],["lov",-1,1],["elov",7,1],["slov",7,1],["hetslov",9,1]];var a=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,48,0,128];var s=[119,125,149,1];var u=0;var c=0;function l(){c=r.limit;var e=r.cursor;{var i=r.cursor+3;if(i>r.limit){return false}r.cursor=i}u=r.cursor;r.cursor=e;r:while(true){var t=r.cursor;e:{if(!r.in_grouping(a,97,248)){break e}r.cursor=t;break r}r.cursor=t;if(r.cursor>=r.limit){return false}r.cursor++}r:while(true){e:{if(!r.out_grouping(a,97,248)){break e}break r}if(r.cursor>=r.limit){return false}r.cursor++}c=r.cursor;r:{if(!(ct.limit||(t.cursor=i,m=t.cursor,t.cursor=e,t.go_out_grouping(u,97,248)&&(t.cursor++,t.go_in_grouping(u,97,248))&&(t.cursor++,n=t.cursor,m<=n||(n=m))),t.cursor=r,t.limit_backward=t.cursor,t.cursor=t.limit,t.limit-t.cursor),e=((()=>{var r;if(!(t.cursor=r.limit){break e}r.cursor++}if(!r.slice_from("Y")){return false}l=true;continue}r.cursor=s;break}}r.cursor=i;n=r.limit;o=r.limit;var u=r.cursor;r:{e:while(true){i:{if(!r.in_grouping(c,97,121)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}e:while(true){i:{if(!r.out_grouping(c,97,121)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}n=r.cursor;e:while(true){i:{if(!r.in_grouping(c,97,121)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}e:while(true){i:{if(!r.out_grouping(c,97,121)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}o=r.cursor}r.cursor=u;r.limit_backward=r.cursor;r.cursor=r.limit;var t=r.limit-r.cursor;k();r.cursor=r.limit-t;var f=r.limit-r.cursor;v();r.cursor=r.limit-f;var b=r.limit-r.cursor;g();r.cursor=r.limit-b;var m=r.limit-r.cursor;d();r.cursor=r.limit-m;var _=r.limit-r.cursor;w();r.cursor=r.limit-_;var z=r.limit-r.cursor;h();r.cursor=r.limit-z;var y=r.limit-r.cursor;p();r.cursor=r.limit-y;var Y=r.limit-r.cursor;q();r.cursor=r.limit-Y;r.cursor=r.limit_backward;var C=r.cursor;r:{if(!l){break r}while(true){var S=r.cursor;e:{i:while(true){var B=r.cursor;s:{r.bra=r.cursor;if(!r.eq_s("Y")){break s}r.ket=r.cursor;r.cursor=B;break i}r.cursor=B;if(r.cursor>=r.limit){break e}r.cursor++}if(!r.slice_from("y")){return false}continue}r.cursor=S;break}}r.cursor=C;return true};this["stemWord"]=function(e){r.setCurrent(e);this.stem();return r.getCurrent()}};
\ No newline at end of file
+var PorterStemmer=function(){var u=new BaseStemmer,t=[["s",-1,3],["ies",0,2],["sses",0,1],["ss",0,-1]],a=[["",-1,3],["bb",0,2],["dd",0,2],["ff",0,2],["gg",0,2],["bl",0,1],["mm",0,2],["nn",0,2],["pp",0,2],["rr",0,2],["at",0,1],["tt",0,2],["iz",0,1]],n=[["ed",-1,2],["eed",0,1],["ing",-1,2]],l=[["anci",-1,3],["enci",-1,2],["abli",-1,4],["eli",-1,6],["alli",-1,9],["ousli",-1,11],["entli",-1,5],["aliti",-1,9],["biliti",-1,13],["iviti",-1,12],["tional",-1,1],["ational",10,8],["alism",-1,9],["ation",-1,8],["ization",13,7],["izer",-1,7],["ator",-1,8],["iveness",-1,12],["fulness",-1,10],["ousness",-1,11]],f=[["icate",-1,2],["ative",-1,3],["alize",-1,1],["iciti",-1,2],["ical",-1,2],["ful",-1,3],["ness",-1,3]],_=[["ic",-1,1],["ance",-1,1],["ence",-1,1],["able",-1,1],["ible",-1,1],["ate",-1,1],["ive",-1,1],["ize",-1,1],["iti",-1,1],["al",-1,1],["ism",-1,1],["ion",-1,2],["er",-1,1],["ous",-1,1],["ant",-1,1],["ent",-1,1],["ment",15,1],["ement",16,1],["ou",-1,1]],m=[17,65,16,1],r=[1,17,65,208,1],b=!1,k=0,g=0;function d(){return u.out_grouping_b(r,89,121)&&u.in_grouping_b(m,97,121)&&!!u.out_grouping_b(m,97,121)}function v(){return g<=u.cursor}function p(){return k<=u.cursor}this.stem=function(){b=!1;var r=u.cursor;if(u.bra=u.cursor,u.eq_s("y")){if(u.ket=u.cursor,!u.slice_from("Y"))return!1;b=!0}u.cursor=r;for(r=u.cursor;;){var i=u.cursor;r:{for(;;){var e=u.cursor;if(u.in_grouping(m,97,121)&&(u.bra=u.cursor,u.eq_s("y"))){u.ket=u.cursor,u.cursor=e;break}if(u.cursor=e,u.cursor>=u.limit)break r;u.cursor++}if(!u.slice_from("Y"))return!1;b=!0;continue}u.cursor=i;break}u.cursor=r,g=u.limit,k=u.limit;var r=u.cursor,r=(u.go_out_grouping(m,97,121)&&(u.cursor++,u.go_in_grouping(m,97,121))&&(u.cursor++,g=u.cursor,u.go_out_grouping(m,97,121))&&(u.cursor++,u.go_in_grouping(m,97,121))&&(u.cursor++,k=u.cursor),u.cursor=r,u.limit_backward=u.cursor,u.cursor=u.limit,u.limit-u.cursor),r=((()=>{var r;if(u.ket=u.cursor,0!=(r=u.find_among_b(t)))switch(u.bra=u.cursor,r){case 1:if(u.slice_from("ss"))break;return;case 2:if(u.slice_from("i"))break;return;case 3:if(u.slice_del())break}})(),u.cursor=u.limit-r,u.limit-u.cursor),r=((()=>{if(u.ket=u.cursor,0!=(i=u.find_among_b(n)))switch(u.bra=u.cursor,i){case 1:if(!v())return;if(u.slice_from("ee"))break;return;case 2:var r=u.limit-u.cursor;if(!u.go_out_grouping_b(m,97,121))return;if(u.cursor--,u.cursor=u.limit-r,!u.slice_del())return;var r=u.limit-u.cursor,i=u.find_among_b(a);switch(u.cursor=u.limit-r,i){case 1:var e=u.cursor;u.insert(u.cursor,u.cursor,"e"),u.cursor=e;break;case 2:if(u.ket=u.cursor,u.cursor<=u.limit_backward)return;if(u.cursor--,u.bra=u.cursor,u.slice_del())break;return;case 3:if(u.cursor!=g)return;e=u.limit-u.cursor;if(!d())return;u.cursor=u.limit-e;e=u.cursor;u.insert(u.cursor,u.cursor,"e"),u.cursor=e}}})(),u.cursor=u.limit-r,u.limit-u.cursor),s=(u.ket=u.cursor,s=u.limit-u.cursor,(u.eq_s_b("y")||(u.cursor=u.limit-s,u.eq_s_b("Y")))&&(u.bra=u.cursor,u.go_out_grouping_b(m,97,121))&&(u.cursor--,u.slice_from("i")),u.cursor=u.limit-r,u.limit-u.cursor),r=((()=>{var r;if(u.ket=u.cursor,0!=(r=u.find_among_b(l))&&(u.bra=u.cursor,v()))switch(r){case 1:if(u.slice_from("tion"))break;return;case 2:if(u.slice_from("ence"))break;return;case 3:if(u.slice_from("ance"))break;return;case 4:if(u.slice_from("able"))break;return;case 5:if(u.slice_from("ent"))break;return;case 6:if(u.slice_from("e"))break;return;case 7:if(u.slice_from("ize"))break;return;case 8:if(u.slice_from("ate"))break;return;case 9:if(u.slice_from("al"))break;return;case 10:if(u.slice_from("ful"))break;return;case 11:if(u.slice_from("ous"))break;return;case 12:if(u.slice_from("ive"))break;return;case 13:if(u.slice_from("ble"))break}})(),u.cursor=u.limit-s,u.limit-u.cursor),s=((()=>{var r;if(u.ket=u.cursor,0!=(r=u.find_among_b(f))&&(u.bra=u.cursor,v()))switch(r){case 1:if(u.slice_from("al"))break;return;case 2:if(u.slice_from("ic"))break;return;case 3:if(u.slice_del())break}})(),u.cursor=u.limit-r,u.limit-u.cursor),r=((()=>{var r;if(u.ket=u.cursor,0!=(r=u.find_among_b(_))&&(u.bra=u.cursor,p()))switch(r){case 1:if(u.slice_del())break;return;case 2:var i=u.limit-u.cursor;if(!u.eq_s_b("s")&&(u.cursor=u.limit-i,!u.eq_s_b("t")))return;if(u.slice_del())break}})(),u.cursor=u.limit-s,u.limit-u.cursor),s=((()=>{if(u.ket=u.cursor,u.eq_s_b("e")){if(u.bra=u.cursor,!p()){if(!v())return;var r=u.limit-u.cursor;if(d())return;u.cursor=u.limit-r}u.slice_del()}})(),u.cursor=u.limit-r,u.limit-u.cursor),r=(u.ket=u.cursor,u.eq_s_b("l")&&(u.bra=u.cursor,p())&&u.eq_s_b("l")&&u.slice_del(),u.cursor=u.limit-s,u.cursor=u.limit_backward,u.cursor);if(b)for(;;){var c=u.cursor;r:{for(;;){var o=u.cursor;if(u.bra=u.cursor,u.eq_s("Y")){u.ket=u.cursor,u.cursor=o;break}if(u.cursor=o,u.cursor>=u.limit)break r;u.cursor++}if(u.slice_from("y"))continue;return!1}u.cursor=c;break}return u.cursor=r,!0},this.stemWord=function(r){return u.setCurrent(r),this.stem(),u.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/portuguese-stemmer.js b/sphinx/search/minified-js/portuguese-stemmer.js
index 022d860e6b3..9cc42155d49 100644
--- a/sphinx/search/minified-js/portuguese-stemmer.js
+++ b/sphinx/search/minified-js/portuguese-stemmer.js
@@ -1 +1 @@
-PortugueseStemmer=function(){var r=new BaseStemmer;var e=[["",-1,3],["ã",0,1],["õ",0,2]];var i=[["",-1,3],["a~",0,1],["o~",0,2]];var s=[["ic",-1,-1],["ad",-1,-1],["os",-1,-1],["iv",-1,1]];var a=[["ante",-1,1],["avel",-1,1],["ível",-1,1]];var u=[["ic",-1,1],["abil",-1,1],["iv",-1,1]];var o=[["ica",-1,1],["ância",-1,1],["ência",-1,4],["logia",-1,2],["ira",-1,9],["adora",-1,1],["osa",-1,1],["ista",-1,1],["iva",-1,8],["eza",-1,1],["idade",-1,7],["ante",-1,1],["mente",-1,6],["amente",12,5],["ável",-1,1],["ível",-1,1],["ico",-1,1],["ismo",-1,1],["oso",-1,1],["amento",-1,1],["imento",-1,1],["ivo",-1,8],["aça~o",-1,1],["uça~o",-1,3],["ador",-1,1],["icas",-1,1],["ências",-1,4],["logias",-1,2],["iras",-1,9],["adoras",-1,1],["osas",-1,1],["istas",-1,1],["ivas",-1,8],["ezas",-1,1],["idades",-1,7],["adores",-1,1],["antes",-1,1],["aço~es",-1,1],["uço~es",-1,3],["icos",-1,1],["ismos",-1,1],["osos",-1,1],["amentos",-1,1],["imentos",-1,1],["ivos",-1,8]];var t=[["ada",-1,1],["ida",-1,1],["ia",-1,1],["aria",2,1],["eria",2,1],["iria",2,1],["ara",-1,1],["era",-1,1],["ira",-1,1],["ava",-1,1],["asse",-1,1],["esse",-1,1],["isse",-1,1],["aste",-1,1],["este",-1,1],["iste",-1,1],["ei",-1,1],["arei",16,1],["erei",16,1],["irei",16,1],["am",-1,1],["iam",20,1],["ariam",21,1],["eriam",21,1],["iriam",21,1],["aram",20,1],["eram",20,1],["iram",20,1],["avam",20,1],["em",-1,1],["arem",29,1],["erem",29,1],["irem",29,1],["assem",29,1],["essem",29,1],["issem",29,1],["ado",-1,1],["ido",-1,1],["ando",-1,1],["endo",-1,1],["indo",-1,1],["ara~o",-1,1],["era~o",-1,1],["ira~o",-1,1],["ar",-1,1],["er",-1,1],["ir",-1,1],["as",-1,1],["adas",47,1],["idas",47,1],["ias",47,1],["arias",50,1],["erias",50,1],["irias",50,1],["aras",47,1],["eras",47,1],["iras",47,1],["avas",47,1],["es",-1,1],["ardes",58,1],["erdes",58,1],["irdes",58,1],["ares",58,1],["eres",58,1],["ires",58,1],["asses",58,1],["esses",58,1],["isses",58,1],["astes",58,1],["estes",58,1],["istes",58,1],["is",-1,1],["ais",71,1],["eis",71,1],["areis",73,1],["ereis",73,1],["ireis",73,1],["áreis",73,1],["éreis",73,1],["íreis",73,1],["ásseis",73,1],["ésseis",73,1],["ísseis",73,1],["áveis",73,1],["íeis",73,1],["aríeis",84,1],["eríeis",84,1],["iríeis",84,1],["ados",-1,1],["idos",-1,1],["amos",-1,1],["áramos",90,1],["éramos",90,1],["íramos",90,1],["ávamos",90,1],["íamos",90,1],["aríamos",95,1],["eríamos",95,1],["iríamos",95,1],["emos",-1,1],["aremos",99,1],["eremos",99,1],["iremos",99,1],["ássemos",99,1],["êssemos",99,1],["íssemos",99,1],["imos",-1,1],["armos",-1,1],["ermos",-1,1],["irmos",-1,1],["ámos",-1,1],["arás",-1,1],["erás",-1,1],["irás",-1,1],["eu",-1,1],["iu",-1,1],["ou",-1,1],["ará",-1,1],["erá",-1,1],["irá",-1,1]];var c=[["a",-1,1],["i",-1,1],["o",-1,1],["os",-1,1],["á",-1,1],["í",-1,1],["ó",-1,1]];var f=[["e",-1,1],["ç",-1,2],["é",-1,1],["ê",-1,1]];var l=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,3,19,12,2];var n=0;var m=0;var b=0;function k(){var i;while(true){var s=r.cursor;r:{r.bra=r.cursor;i=r.find_among(e);if(i==0){break r}r.ket=r.cursor;switch(i){case 1:if(!r.slice_from("a~")){return false}break;case 2:if(!r.slice_from("o~")){return false}break;case 3:if(r.cursor>=r.limit){break r}r.cursor++;break}continue}r.cursor=s;break}return true}function _(){b=r.limit;m=r.limit;n=r.limit;var e=r.cursor;r:{e:{var i=r.cursor;i:{if(!r.in_grouping(l,97,250)){break i}s:{var s=r.cursor;a:{if(!r.out_grouping(l,97,250)){break a}u:while(true){o:{if(!r.in_grouping(l,97,250)){break o}break u}if(r.cursor>=r.limit){break a}r.cursor++}break s}r.cursor=s;if(!r.in_grouping(l,97,250)){break i}a:while(true){u:{if(!r.out_grouping(l,97,250)){break u}break a}if(r.cursor>=r.limit){break i}r.cursor++}}break e}r.cursor=i;if(!r.out_grouping(l,97,250)){break r}i:{var a=r.cursor;s:{if(!r.out_grouping(l,97,250)){break s}a:while(true){u:{if(!r.in_grouping(l,97,250)){break u}break a}if(r.cursor>=r.limit){break s}r.cursor++}break i}r.cursor=a;if(!r.in_grouping(l,97,250)){break r}if(r.cursor>=r.limit){break r}r.cursor++}}b=r.cursor}r.cursor=e;var u=r.cursor;r:{e:while(true){i:{if(!r.in_grouping(l,97,250)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}e:while(true){i:{if(!r.out_grouping(l,97,250)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}m=r.cursor;e:while(true){i:{if(!r.in_grouping(l,97,250)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}e:while(true){i:{if(!r.out_grouping(l,97,250)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}n=r.cursor}r.cursor=u;return true}function v(){var e;while(true){var s=r.cursor;r:{r.bra=r.cursor;e=r.find_among(i);if(e==0){break r}r.ket=r.cursor;switch(e){case 1:if(!r.slice_from("ã")){return false}break;case 2:if(!r.slice_from("õ")){return false}break;case 3:if(r.cursor>=r.limit){break r}r.cursor++;break}continue}r.cursor=s;break}return true}function d(){if(!(b<=r.cursor)){return false}return true}function g(){if(!(m<=r.cursor)){return false}return true}function w(){if(!(n<=r.cursor)){return false}return true}function h(){var e;r.ket=r.cursor;e=r.find_among_b(o);if(e==0){return false}r.bra=r.cursor;switch(e){case 1:if(!w()){return false}if(!r.slice_del()){return false}break;case 2:if(!w()){return false}if(!r.slice_from("log")){return false}break;case 3:if(!w()){return false}if(!r.slice_from("u")){return false}break;case 4:if(!w()){return false}if(!r.slice_from("ente")){return false}break;case 5:if(!g()){return false}if(!r.slice_del()){return false}var i=r.limit-r.cursor;r:{r.ket=r.cursor;e=r.find_among_b(s);if(e==0){r.cursor=r.limit-i;break r}r.bra=r.cursor;if(!w()){r.cursor=r.limit-i;break r}if(!r.slice_del()){return false}switch(e){case 1:r.ket=r.cursor;if(!r.eq_s_b("at")){r.cursor=r.limit-i;break r}r.bra=r.cursor;if(!w()){r.cursor=r.limit-i;break r}if(!r.slice_del()){return false}break}}break;case 6:if(!w()){return false}if(!r.slice_del()){return false}var t=r.limit-r.cursor;r:{r.ket=r.cursor;if(r.find_among_b(a)==0){r.cursor=r.limit-t;break r}r.bra=r.cursor;if(!w()){r.cursor=r.limit-t;break r}if(!r.slice_del()){return false}}break;case 7:if(!w()){return false}if(!r.slice_del()){return false}var c=r.limit-r.cursor;r:{r.ket=r.cursor;if(r.find_among_b(u)==0){r.cursor=r.limit-c;break r}r.bra=r.cursor;if(!w()){r.cursor=r.limit-c;break r}if(!r.slice_del()){return false}}break;case 8:if(!w()){return false}if(!r.slice_del()){return false}var f=r.limit-r.cursor;r:{r.ket=r.cursor;if(!r.eq_s_b("at")){r.cursor=r.limit-f;break r}r.bra=r.cursor;if(!w()){r.cursor=r.limit-f;break r}if(!r.slice_del()){return false}}break;case 9:if(!d()){return false}if(!r.eq_s_b("e")){return false}if(!r.slice_from("ir")){return false}break}return true}function p(){if(r.cursor{for(var r;;){var i=u.cursor;r:{switch(u.bra=u.cursor,r=u.find_among(c),u.ket=u.cursor,r){case 1:if(u.slice_from("a~"))break;return;case 2:if(u.slice_from("o~"))break;return;case 3:if(u.cursor>=u.limit)break r;u.cursor++}continue}u.cursor=i;break}})(),u.cursor=r,k=u.limit,d=u.limit,b=u.limit,u.cursor);r:{i:{var i=u.cursor;s:if(u.in_grouping(_,97,250)){var s=u.cursor;if(!u.out_grouping(_,97,250)||!u.go_out_grouping(_,97,250)){if(u.cursor=s,!u.in_grouping(_,97,250))break s;if(!u.go_in_grouping(_,97,250))break s}u.cursor++;break i}if(u.cursor=i,!u.out_grouping(_,97,250))break r;s=u.cursor;if(u.out_grouping(_,97,250)&&u.go_out_grouping(_,97,250));else{if(u.cursor=s,!u.in_grouping(_,97,250))break r;if(u.cursor>=u.limit)break r}u.cursor++}k=u.cursor}u.cursor=r,r=u.cursor,u.go_out_grouping(_,97,250)&&(u.cursor++,u.go_in_grouping(_,97,250))&&(u.cursor++,d=u.cursor,u.go_out_grouping(_,97,250))&&(u.cursor++,u.go_in_grouping(_,97,250))&&(u.cursor++,b=u.cursor),u.cursor=r,u.limit_backward=u.cursor,u.cursor=u.limit;r=u.limit-u.cursor;r:{var e=u.limit-u.cursor,o=u.limit-u.cursor,a=u.limit-u.cursor;if(p()||(u.cursor=u.limit-a,(()=>{if(!(u.cursor{var r;if(u.ket=u.cursor,0!=(r=u.find_among_b(f)))switch(u.bra=u.cursor,r){case 1:if(!g())return;if(!u.slice_del())return;u.ket=u.cursor;r:{var i=u.limit-u.cursor;if(u.eq_s_b("u")){u.bra=u.cursor;var s=u.limit-u.cursor;if(u.eq_s_b("g")){u.cursor=u.limit-s;break r}}if(u.cursor=u.limit-i,!u.eq_s_b("i"))return;u.bra=u.cursor;s=u.limit-u.cursor;if(!u.eq_s_b("c"))return;u.cursor=u.limit-s}if(!g())return;if(u.slice_del())break;return;case 2:if(u.slice_from("c"))break}})(),u.cursor=u.limit-r,u.cursor=u.limit_backward,r=u.cursor;return(()=>{for(var r;;){var i=u.cursor;r:{switch(u.bra=u.cursor,r=u.find_among(t),u.ket=u.cursor,r){case 1:if(u.slice_from("ã"))break;return;case 2:if(u.slice_from("õ"))break;return;case 3:if(u.cursor>=u.limit)break r;u.cursor++}continue}u.cursor=i;break}})(),u.cursor=r,!0},this.stemWord=function(r){return u.setCurrent(r),this.stem(),u.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/romanian-stemmer.js b/sphinx/search/minified-js/romanian-stemmer.js
index 01c54d0185c..aefb071550d 100644
--- a/sphinx/search/minified-js/romanian-stemmer.js
+++ b/sphinx/search/minified-js/romanian-stemmer.js
@@ -1 +1 @@
-RomanianStemmer=function(){var r=new BaseStemmer;var i=[["",-1,3],["I",0,1],["U",0,2]];var e=[["ea",-1,3],["aţia",-1,7],["aua",-1,2],["iua",-1,4],["aţie",-1,7],["ele",-1,3],["ile",-1,5],["iile",6,4],["iei",-1,4],["atei",-1,6],["ii",-1,4],["ului",-1,1],["ul",-1,1],["elor",-1,3],["ilor",-1,4],["iilor",14,4]];var a=[["icala",-1,4],["iciva",-1,4],["ativa",-1,5],["itiva",-1,6],["icale",-1,4],["aţiune",-1,5],["iţiune",-1,6],["atoare",-1,5],["itoare",-1,6],["ătoare",-1,5],["icitate",-1,4],["abilitate",-1,1],["ibilitate",-1,2],["ivitate",-1,3],["icive",-1,4],["ative",-1,5],["itive",-1,6],["icali",-1,4],["atori",-1,5],["icatori",18,4],["itori",-1,6],["ători",-1,5],["icitati",-1,4],["abilitati",-1,1],["ivitati",-1,3],["icivi",-1,4],["ativi",-1,5],["itivi",-1,6],["icităi",-1,4],["abilităi",-1,1],["ivităi",-1,3],["icităţi",-1,4],["abilităţi",-1,1],["ivităţi",-1,3],["ical",-1,4],["ator",-1,5],["icator",35,4],["itor",-1,6],["ător",-1,5],["iciv",-1,4],["ativ",-1,5],["itiv",-1,6],["icală",-1,4],["icivă",-1,4],["ativă",-1,5],["itivă",-1,6]];var t=[["ica",-1,1],["abila",-1,1],["ibila",-1,1],["oasa",-1,1],["ata",-1,1],["ita",-1,1],["anta",-1,1],["ista",-1,3],["uta",-1,1],["iva",-1,1],["ic",-1,1],["ice",-1,1],["abile",-1,1],["ibile",-1,1],["isme",-1,3],["iune",-1,2],["oase",-1,1],["ate",-1,1],["itate",17,1],["ite",-1,1],["ante",-1,1],["iste",-1,3],["ute",-1,1],["ive",-1,1],["ici",-1,1],["abili",-1,1],["ibili",-1,1],["iuni",-1,2],["atori",-1,1],["osi",-1,1],["ati",-1,1],["itati",30,1],["iti",-1,1],["anti",-1,1],["isti",-1,3],["uti",-1,1],["işti",-1,3],["ivi",-1,1],["ităi",-1,1],["oşi",-1,1],["ităţi",-1,1],["abil",-1,1],["ibil",-1,1],["ism",-1,3],["ator",-1,1],["os",-1,1],["at",-1,1],["it",-1,1],["ant",-1,1],["ist",-1,3],["ut",-1,1],["iv",-1,1],["ică",-1,1],["abilă",-1,1],["ibilă",-1,1],["oasă",-1,1],["ată",-1,1],["ită",-1,1],["antă",-1,1],["istă",-1,3],["ută",-1,1],["ivă",-1,1]];var s=[["ea",-1,1],["ia",-1,1],["esc",-1,1],["ăsc",-1,1],["ind",-1,1],["ând",-1,1],["are",-1,1],["ere",-1,1],["ire",-1,1],["âre",-1,1],["se",-1,2],["ase",10,1],["sese",10,2],["ise",10,1],["use",10,1],["âse",10,1],["eşte",-1,1],["ăşte",-1,1],["eze",-1,1],["ai",-1,1],["eai",19,1],["iai",19,1],["sei",-1,2],["eşti",-1,1],["ăşti",-1,1],["ui",-1,1],["ezi",-1,1],["âi",-1,1],["aşi",-1,1],["seşi",-1,2],["aseşi",29,1],["seseşi",29,2],["iseşi",29,1],["useşi",29,1],["âseşi",29,1],["işi",-1,1],["uşi",-1,1],["âşi",-1,1],["aţi",-1,2],["eaţi",38,1],["iaţi",38,1],["eţi",-1,2],["iţi",-1,2],["âţi",-1,2],["arăţi",-1,1],["serăţi",-1,2],["aserăţi",45,1],["seserăţi",45,2],["iserăţi",45,1],["userăţi",45,1],["âserăţi",45,1],["irăţi",-1,1],["urăţi",-1,1],["ârăţi",-1,1],["am",-1,1],["eam",54,1],["iam",54,1],["em",-1,2],["asem",57,1],["sesem",57,2],["isem",57,1],["usem",57,1],["âsem",57,1],["im",-1,2],["âm",-1,2],["ăm",-1,2],["arăm",65,1],["serăm",65,2],["aserăm",67,1],["seserăm",67,2],["iserăm",67,1],["userăm",67,1],["âserăm",67,1],["irăm",65,1],["urăm",65,1],["ârăm",65,1],["au",-1,1],["eau",76,1],["iau",76,1],["indu",-1,1],["ându",-1,1],["ez",-1,1],["ească",-1,1],["ară",-1,1],["seră",-1,2],["aseră",84,1],["seseră",84,2],["iseră",84,1],["useră",84,1],["âseră",84,1],["iră",-1,1],["ură",-1,1],["âră",-1,1],["ează",-1,1]];var u=[["a",-1,1],["e",-1,1],["ie",1,1],["i",-1,1],["ă",-1,1]];var c=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,2,32,0,0,4];var o=false;var f=0;var l=0;var n=0;function b(){while(true){var i=r.cursor;r:{i:while(true){var e=r.cursor;e:{if(!r.in_grouping(c,97,259)){break e}r.bra=r.cursor;a:{var a=r.cursor;t:{if(!r.eq_s("u")){break t}r.ket=r.cursor;if(!r.in_grouping(c,97,259)){break t}if(!r.slice_from("U")){return false}break a}r.cursor=a;if(!r.eq_s("i")){break e}r.ket=r.cursor;if(!r.in_grouping(c,97,259)){break e}if(!r.slice_from("I")){return false}}r.cursor=e;break i}r.cursor=e;if(r.cursor>=r.limit){break r}r.cursor++}continue}r.cursor=i;break}return true}function m(){n=r.limit;l=r.limit;f=r.limit;var i=r.cursor;r:{i:{var e=r.cursor;e:{if(!r.in_grouping(c,97,259)){break e}a:{var a=r.cursor;t:{if(!r.out_grouping(c,97,259)){break t}s:while(true){u:{if(!r.in_grouping(c,97,259)){break u}break s}if(r.cursor>=r.limit){break t}r.cursor++}break a}r.cursor=a;if(!r.in_grouping(c,97,259)){break e}t:while(true){s:{if(!r.out_grouping(c,97,259)){break s}break t}if(r.cursor>=r.limit){break e}r.cursor++}}break i}r.cursor=e;if(!r.out_grouping(c,97,259)){break r}e:{var t=r.cursor;a:{if(!r.out_grouping(c,97,259)){break a}t:while(true){s:{if(!r.in_grouping(c,97,259)){break s}break t}if(r.cursor>=r.limit){break a}r.cursor++}break e}r.cursor=t;if(!r.in_grouping(c,97,259)){break r}if(r.cursor>=r.limit){break r}r.cursor++}}n=r.cursor}r.cursor=i;var s=r.cursor;r:{i:while(true){e:{if(!r.in_grouping(c,97,259)){break e}break i}if(r.cursor>=r.limit){break r}r.cursor++}i:while(true){e:{if(!r.out_grouping(c,97,259)){break e}break i}if(r.cursor>=r.limit){break r}r.cursor++}l=r.cursor;i:while(true){e:{if(!r.in_grouping(c,97,259)){break e}break i}if(r.cursor>=r.limit){break r}r.cursor++}i:while(true){e:{if(!r.out_grouping(c,97,259)){break e}break i}if(r.cursor>=r.limit){break r}r.cursor++}f=r.cursor}r.cursor=s;return true}function k(){var e;while(true){var a=r.cursor;r:{r.bra=r.cursor;e=r.find_among(i);if(e==0){break r}r.ket=r.cursor;switch(e){case 1:if(!r.slice_from("i")){return false}break;case 2:if(!r.slice_from("u")){return false}break;case 3:if(r.cursor>=r.limit){break r}r.cursor++;break}continue}r.cursor=a;break}return true}function _(){if(!(n<=r.cursor)){return false}return true}function v(){if(!(l<=r.cursor)){return false}return true}function g(){if(!(f<=r.cursor)){return false}return true}function w(){var i;r.ket=r.cursor;i=r.find_among_b(e);if(i==0){return false}r.bra=r.cursor;if(!v()){return false}switch(i){case 1:if(!r.slice_del()){return false}break;case 2:if(!r.slice_from("a")){return false}break;case 3:if(!r.slice_from("e")){return false}break;case 4:if(!r.slice_from("i")){return false}break;case 5:{var a=r.limit-r.cursor;r:{if(!r.eq_s_b("ab")){break r}return false}r.cursor=r.limit-a}if(!r.slice_from("i")){return false}break;case 6:if(!r.slice_from("at")){return false}break;case 7:if(!r.slice_from("aţi")){return false}break}return true}function d(){var i;var e=r.limit-r.cursor;r.ket=r.cursor;i=r.find_among_b(a);if(i==0){return false}r.bra=r.cursor;if(!v()){return false}switch(i){case 1:if(!r.slice_from("abil")){return false}break;case 2:if(!r.slice_from("ibil")){return false}break;case 3:if(!r.slice_from("iv")){return false}break;case 4:if(!r.slice_from("ic")){return false}break;case 5:if(!r.slice_from("at")){return false}break;case 6:if(!r.slice_from("it")){return false}break}o=true;r.cursor=r.limit-e;return true}function h(){var i;o=false;while(true){var e=r.limit-r.cursor;r:{if(!d()){break r}continue}r.cursor=r.limit-e;break}r.ket=r.cursor;i=r.find_among_b(t);if(i==0){return false}r.bra=r.cursor;if(!g()){return false}switch(i){case 1:if(!r.slice_del()){return false}break;case 2:if(!r.eq_s_b("ţ")){return false}r.bra=r.cursor;if(!r.slice_from("t")){return false}break;case 3:if(!r.slice_from("ist")){return false}break}o=true;return true}function p(){var i;if(r.cursor{var i,r=s.limit-s.cursor;if(s.ket=s.cursor,0!=(i=s.find_among_b(e))&&(s.bra=s.cursor,_())){switch(i){case 1:if(s.slice_from("abil"))break;return;case 2:if(s.slice_from("ibil"))break;return;case 3:if(s.slice_from("iv"))break;return;case 4:if(s.slice_from("ic"))break;return;case 5:if(s.slice_from("at"))break;return;case 6:if(s.slice_from("it"))break;return}return l=!0,s.cursor=s.limit-r,1}})()){s.cursor=s.limit-r;break}}if(s.ket=s.cursor,0!=(i=s.find_among_b(a))&&(s.bra=s.cursor,m<=s.cursor)){switch(i){case 1:if(s.slice_del())break;return;case 2:if(!s.eq_s_b("ț"))return;if(s.bra=s.cursor,s.slice_from("t"))break;return;case 3:if(s.slice_from("ist"))break;return}l=!0}}function g(){s.ket=s.cursor,0!=s.find_among_b(i)&&(s.bra=s.cursor,b<=s.cursor)&&s.slice_del()}this.stem=function(){(()=>{for(var i,r=s.cursor;;){var e=s.cursor;i:{for(;;){var a=s.cursor;if(s.bra=s.cursor,0!=(i=s.find_among(t))){switch(s.ket=s.cursor,i){case 1:if(s.slice_from("ș"))break;return;case 2:if(s.slice_from("ț"))break;return}s.cursor=a;break}if(s.cursor=a,s.cursor>=s.limit)break i;s.cursor++}continue}s.cursor=e;break}s.cursor=r})();var i=s.cursor,i=((()=>{for(;;){var i=s.cursor;i:{for(;;){var r=s.cursor;r:if(s.in_grouping(n,97,259)){s.bra=s.cursor;e:{var e=s.cursor;if(s.eq_s("u")&&(s.ket=s.cursor,s.in_grouping(n,97,259))){if(s.slice_from("U"))break e;return}if(s.cursor=e,!s.eq_s("i"))break r;if(s.ket=s.cursor,!s.in_grouping(n,97,259))break r;if(!s.slice_from("I"))return}s.cursor=r;break}if(s.cursor=r,s.cursor>=s.limit)break i;s.cursor++}continue}s.cursor=i;break}})(),s.cursor=i,b=s.limit,f=s.limit,m=s.limit,s.cursor);i:{r:{var r=s.cursor;e:if(s.in_grouping(n,97,259)){var e=s.cursor;if(!s.out_grouping(n,97,259)||!s.go_out_grouping(n,97,259)){if(s.cursor=e,!s.in_grouping(n,97,259))break e;if(!s.go_in_grouping(n,97,259))break e}s.cursor++;break r}if(s.cursor=r,!s.out_grouping(n,97,259))break i;e=s.cursor;if(s.out_grouping(n,97,259)&&s.go_out_grouping(n,97,259));else{if(s.cursor=e,!s.in_grouping(n,97,259))break i;if(s.cursor>=s.limit)break i}s.cursor++}b=s.cursor}s.cursor=i,i=s.cursor,s.go_out_grouping(n,97,259)&&(s.cursor++,s.go_in_grouping(n,97,259))&&(s.cursor++,f=s.cursor,s.go_out_grouping(n,97,259))&&(s.cursor++,s.go_in_grouping(n,97,259))&&(s.cursor++,m=s.cursor),s.cursor=i,s.limit_backward=s.cursor,s.cursor=s.limit;var i=s.limit-s.cursor,i=((()=>{var i;if(s.ket=s.cursor,0!=(i=s.find_among_b(o))&&(s.bra=s.cursor,_()))switch(i){case 1:if(s.slice_del())break;return;case 2:if(s.slice_from("a"))break;return;case 3:if(s.slice_from("e"))break;return;case 4:if(s.slice_from("i"))break;return;case 5:var r=s.limit-s.cursor;if(s.eq_s_b("ab"))return;if(s.cursor=s.limit-r,s.slice_from("i"))break;return;case 6:if(s.slice_from("at"))break;return;case 7:if(s.slice_from("ați"))break}})(),s.cursor=s.limit-i,s.limit-s.cursor),i=(k(),s.cursor=s.limit-i,s.limit-s.cursor),a=s.limit-s.cursor,a=(l||(s.cursor=s.limit-a,(()=>{var i;if(!(s.cursor{for(var i;;){var r=s.cursor;i:{switch(s.bra=s.cursor,i=s.find_among(u),s.ket=s.cursor,i){case 1:if(s.slice_from("i"))break;return;case 2:if(s.slice_from("u"))break;return;case 3:if(s.cursor>=s.limit)break i;s.cursor++}continue}s.cursor=r;break}})(),s.cursor=i,!0},this.stemWord=function(i){return s.setCurrent(i),this.stem(),s.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/russian-stemmer.js b/sphinx/search/minified-js/russian-stemmer.js
index 698d92bcdb8..7b5410c5e82 100644
--- a/sphinx/search/minified-js/russian-stemmer.js
+++ b/sphinx/search/minified-js/russian-stemmer.js
@@ -1 +1 @@
-RussianStemmer=function(){var r=new BaseStemmer;var e=[["в",-1,1],["ив",0,2],["ыв",0,2],["вши",-1,1],["ивши",3,2],["ывши",3,2],["вшись",-1,1],["ившись",6,2],["ывшись",6,2]];var i=[["ее",-1,1],["ие",-1,1],["ое",-1,1],["ые",-1,1],["ими",-1,1],["ыми",-1,1],["ей",-1,1],["ий",-1,1],["ой",-1,1],["ый",-1,1],["ем",-1,1],["им",-1,1],["ом",-1,1],["ым",-1,1],["его",-1,1],["ого",-1,1],["ему",-1,1],["ому",-1,1],["их",-1,1],["ых",-1,1],["ею",-1,1],["ою",-1,1],["ую",-1,1],["юю",-1,1],["ая",-1,1],["яя",-1,1]];var u=[["ем",-1,1],["нн",-1,1],["вш",-1,1],["ивш",2,2],["ывш",2,2],["щ",-1,1],["ющ",5,1],["ующ",6,2]];var s=[["сь",-1,1],["ся",-1,1]];var a=[["ла",-1,1],["ила",0,2],["ыла",0,2],["на",-1,1],["ена",3,2],["ете",-1,1],["ите",-1,2],["йте",-1,1],["ейте",7,2],["уйте",7,2],["ли",-1,1],["или",10,2],["ыли",10,2],["й",-1,1],["ей",13,2],["уй",13,2],["л",-1,1],["ил",16,2],["ыл",16,2],["ем",-1,1],["им",-1,2],["ым",-1,2],["н",-1,1],["ен",22,2],["ло",-1,1],["ило",24,2],["ыло",24,2],["но",-1,1],["ено",27,2],["нно",27,1],["ет",-1,1],["ует",30,2],["ит",-1,2],["ыт",-1,2],["ют",-1,1],["уют",34,2],["ят",-1,2],["ны",-1,1],["ены",37,2],["ть",-1,1],["ить",39,2],["ыть",39,2],["ешь",-1,1],["ишь",-1,2],["ю",-1,2],["ую",44,2]];var t=[["а",-1,1],["ев",-1,1],["ов",-1,1],["е",-1,1],["ие",3,1],["ье",3,1],["и",-1,1],["еи",6,1],["ии",6,1],["ами",6,1],["ями",6,1],["иями",10,1],["й",-1,1],["ей",12,1],["ией",13,1],["ий",12,1],["ой",12,1],["ам",-1,1],["ем",-1,1],["ием",18,1],["ом",-1,1],["ям",-1,1],["иям",21,1],["о",-1,1],["у",-1,1],["ах",-1,1],["ях",-1,1],["иях",26,1],["ы",-1,1],["ь",-1,1],["ю",-1,1],["ию",30,1],["ью",30,1],["я",-1,1],["ия",33,1],["ья",33,1]];var c=[["ост",-1,1],["ость",-1,1]];var f=[["ейше",-1,1],["н",-1,2],["ейш",-1,1],["ь",-1,3]];var l=[33,65,8,232];var o=0;var n=0;function b(){n=r.limit;o=r.limit;var e=r.cursor;r:{e:while(true){i:{if(!r.in_grouping(l,1072,1103)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}n=r.cursor;e:while(true){i:{if(!r.out_grouping(l,1072,1103)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}e:while(true){i:{if(!r.in_grouping(l,1072,1103)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}e:while(true){i:{if(!r.out_grouping(l,1072,1103)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}o=r.cursor}r.cursor=e;return true}function _(){if(!(o<=r.cursor)){return false}return true}function k(){var i;r.ket=r.cursor;i=r.find_among_b(e);if(i==0){return false}r.bra=r.cursor;switch(i){case 1:r:{var u=r.limit-r.cursor;e:{if(!r.eq_s_b("а")){break e}break r}r.cursor=r.limit-u;if(!r.eq_s_b("я")){return false}}if(!r.slice_del()){return false}break;case 2:if(!r.slice_del()){return false}break}return true}function m(){r.ket=r.cursor;if(r.find_among_b(i)==0){return false}r.bra=r.cursor;if(!r.slice_del()){return false}return true}function v(){var e;if(!m()){return false}var i=r.limit-r.cursor;r:{r.ket=r.cursor;e=r.find_among_b(u);if(e==0){r.cursor=r.limit-i;break r}r.bra=r.cursor;switch(e){case 1:e:{var s=r.limit-r.cursor;i:{if(!r.eq_s_b("а")){break i}break e}r.cursor=r.limit-s;if(!r.eq_s_b("я")){r.cursor=r.limit-i;break r}}if(!r.slice_del()){return false}break;case 2:if(!r.slice_del()){return false}break}}return true}function d(){r.ket=r.cursor;if(r.find_among_b(s)==0){return false}r.bra=r.cursor;if(!r.slice_del()){return false}return true}function g(){var e;r.ket=r.cursor;e=r.find_among_b(a);if(e==0){return false}r.bra=r.cursor;switch(e){case 1:r:{var i=r.limit-r.cursor;e:{if(!r.eq_s_b("а")){break e}break r}r.cursor=r.limit-i;if(!r.eq_s_b("я")){return false}}if(!r.slice_del()){return false}break;case 2:if(!r.slice_del()){return false}break}return true}function w(){r.ket=r.cursor;if(r.find_among_b(t)==0){return false}r.bra=r.cursor;if(!r.slice_del()){return false}return true}function h(){r.ket=r.cursor;if(r.find_among_b(c)==0){return false}r.bra=r.cursor;if(!_()){return false}if(!r.slice_del()){return false}return true}function q(){var e;r.ket=r.cursor;e=r.find_among_b(f);if(e==0){return false}r.bra=r.cursor;switch(e){case 1:if(!r.slice_del()){return false}r.ket=r.cursor;if(!r.eq_s_b("н")){return false}r.bra=r.cursor;if(!r.eq_s_b("н")){return false}if(!r.slice_del()){return false}break;case 2:if(!r.eq_s_b("н")){return false}if(!r.slice_del()){return false}break;case 3:if(!r.slice_del()){return false}break}return true}this.stem=function(){var e=r.cursor;r:{while(true){var i=r.cursor;e:{i:while(true){var u=r.cursor;u:{r.bra=r.cursor;if(!r.eq_s("ё")){break u}r.ket=r.cursor;r.cursor=u;break i}r.cursor=u;if(r.cursor>=r.limit){break e}r.cursor++}if(!r.slice_from("е")){return false}continue}r.cursor=i;break}}r.cursor=e;b();r.limit_backward=r.cursor;r.cursor=r.limit;if(r.cursor=u.limit)break r;u.cursor++}if(u.slice_from("е"))continue;return!1}u.cursor=i;break}if(u.cursor=r,b=u.limit,m=u.limit,r=u.cursor,u.go_out_grouping(n,1072,1103)&&(u.cursor++,b=u.cursor,u.go_in_grouping(n,1072,1103))&&(u.cursor++,u.go_out_grouping(n,1072,1103))&&(u.cursor++,u.go_in_grouping(n,1072,1103))&&(u.cursor++,m=u.cursor),u.cursor=r,u.limit_backward=u.cursor,u.cursor=u.limit,u.cursor{var r;if(u.ket=u.cursor,0!=(r=u.find_among_b(o))){switch(u.bra=u.cursor,r){case 1:var i=u.limit-u.cursor;if(!u.eq_s_b("а")&&(u.cursor=u.limit-i,!u.eq_s_b("я")))return;if(u.slice_del())break;return;case 2:if(u.slice_del())break;return}return 1}})()){u.cursor=u.limit-c;c=u.limit-u.cursor,c=(u.ket=u.cursor,0!=u.find_among_b(t)&&(u.bra=u.cursor,u.slice_del())||(u.cursor=u.limit-c),u.limit-u.cursor);if(!f()&&(u.cursor=u.limit-c,!(()=>{var r;if(u.ket=u.cursor,0!=(r=u.find_among_b(a))){switch(u.bra=u.cursor,r){case 1:var i=u.limit-u.cursor;if(!u.eq_s_b("а")&&(u.cursor=u.limit-i,!u.eq_s_b("я")))return;if(u.slice_del())break;return;case 2:if(u.slice_del())break;return}return 1}})())&&(u.cursor=u.limit-c,u.ket=u.cursor,0==u.find_among_b(l)||(u.bra=u.cursor,!u.slice_del())))break r}}u.cursor=u.limit-e;e=u.limit-u.cursor;if(u.ket=u.cursor,u.eq_s_b("и")){if(u.bra=u.cursor,!u.slice_del())return!1}else u.cursor=u.limit-e;e=u.limit-u.cursor,k(),u.cursor=u.limit-e,e=u.limit-u.cursor;return(()=>{var r;if(u.ket=u.cursor,0!=(r=u.find_among_b(_)))switch(u.bra=u.cursor,r){case 1:if(!u.slice_del())return;if(u.ket=u.cursor,!u.eq_s_b("н"))return;if(u.bra=u.cursor,!u.eq_s_b("н"))return;if(u.slice_del())break;return;case 2:if(!u.eq_s_b("н"))return;if(u.slice_del())break;return;case 3:if(u.slice_del())break}})(),u.cursor=u.limit-e,u.limit_backward=r,u.cursor=u.limit_backward,!0},this.stemWord=function(r){return u.setCurrent(r),this.stem(),u.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/serbian-stemmer.js b/sphinx/search/minified-js/serbian-stemmer.js
new file mode 100644
index 00000000000..0ac2621eb22
--- /dev/null
+++ b/sphinx/search/minified-js/serbian-stemmer.js
@@ -0,0 +1 @@
+var SerbianStemmer=function(){var m=new BaseStemmer,s=[["а",-1,1],["б",-1,2],["в",-1,3],["г",-1,4],["д",-1,5],["е",-1,7],["ж",-1,8],["з",-1,9],["и",-1,10],["к",-1,12],["л",-1,13],["м",-1,15],["н",-1,16],["о",-1,18],["п",-1,19],["р",-1,20],["с",-1,21],["т",-1,22],["у",-1,24],["ф",-1,25],["х",-1,26],["ц",-1,27],["ч",-1,28],["ш",-1,30],["ђ",-1,6],["ј",-1,11],["љ",-1,14],["њ",-1,17],["ћ",-1,23],["џ",-1,29]],r=[["daba",-1,73],["ajaca",-1,12],["ejaca",-1,14],["ljaca",-1,13],["njaca",-1,85],["ojaca",-1,15],["alaca",-1,82],["elaca",-1,83],["olaca",-1,84],["maca",-1,75],["naca",-1,76],["raca",-1,81],["saca",-1,80],["vaca",-1,79],["šaca",-1,18],["aoca",-1,82],["acaka",-1,55],["ajaka",-1,16],["ojaka",-1,17],["anaka",-1,78],["ataka",-1,58],["etaka",-1,59],["itaka",-1,60],["otaka",-1,61],["utaka",-1,62],["ačaka",-1,54],["esama",-1,67],["izama",-1,87],["jacima",-1,5],["nicima",-1,23],["ticima",-1,24],["teticima",30,21],["zicima",-1,25],["atcima",-1,58],["utcima",-1,62],["čcima",-1,74],["pesima",-1,2],["inzima",-1,19],["lozima",-1,1],["metara",-1,68],["centara",-1,69],["istara",-1,70],["ekata",-1,86],["anata",-1,53],["nstava",-1,22],["kustava",-1,29],["ajac",-1,12],["ejac",-1,14],["ljac",-1,13],["njac",-1,85],["anjac",49,11],["ojac",-1,15],["alac",-1,82],["elac",-1,83],["olac",-1,84],["mac",-1,75],["nac",-1,76],["rac",-1,81],["sac",-1,80],["vac",-1,79],["šac",-1,18],["jebe",-1,88],["olce",-1,84],["kuse",-1,27],["rave",-1,42],["save",-1,52],["šave",-1,51],["baci",-1,89],["jaci",-1,5],["tvenici",-1,20],["snici",-1,26],["tetici",-1,21],["bojci",-1,4],["vojci",-1,3],["ojsci",-1,66],["atci",-1,58],["itci",-1,60],["utci",-1,62],["čci",-1,74],["pesi",-1,2],["inzi",-1,19],["lozi",-1,1],["acak",-1,55],["usak",-1,57],["atak",-1,58],["etak",-1,59],["itak",-1,60],["otak",-1,61],["utak",-1,62],["ačak",-1,54],["ušak",-1,56],["izam",-1,87],["tican",-1,65],["cajan",-1,7],["čajan",-1,6],["voljan",-1,77],["eskan",-1,63],["alan",-1,40],["bilan",-1,33],["gilan",-1,37],["nilan",-1,39],["rilan",-1,38],["silan",-1,36],["tilan",-1,34],["avilan",-1,35],["laran",-1,9],["eran",-1,8],["asan",-1,91],["esan",-1,10],["dusan",-1,31],["kusan",-1,28],["atan",-1,47],["pletan",-1,50],["tetan",-1,49],["antan",-1,32],["pravan",-1,44],["stavan",-1,43],["sivan",-1,46],["tivan",-1,45],["ozan",-1,41],["tičan",-1,64],["ašan",-1,90],["dušan",-1,30],["metar",-1,68],["centar",-1,69],["istar",-1,70],["ekat",-1,86],["enat",-1,48],["oscu",-1,72],["ošću",-1,71]],o=[["aca",-1,124],["eca",-1,125],["uca",-1,126],["ga",-1,20],["acega",3,124],["ecega",3,125],["ucega",3,126],["anjijega",3,84],["enjijega",3,85],["snjijega",3,122],["šnjijega",3,86],["kijega",3,95],["skijega",11,1],["škijega",11,2],["elijega",3,83],["nijega",3,13],["osijega",3,123],["atijega",3,120],["evitijega",3,92],["ovitijega",3,93],["astijega",3,94],["avijega",3,77],["evijega",3,78],["ivijega",3,79],["ovijega",3,80],["ošijega",3,91],["anjega",3,84],["enjega",3,85],["snjega",3,122],["šnjega",3,86],["kega",3,95],["skega",30,1],["škega",30,2],["elega",3,83],["nega",3,13],["anega",34,10],["enega",34,87],["snega",34,159],["šnega",34,88],["osega",3,123],["atega",3,120],["evitega",3,92],["ovitega",3,93],["astega",3,94],["avega",3,77],["evega",3,78],["ivega",3,79],["ovega",3,80],["aćega",3,14],["ećega",3,15],["ućega",3,16],["ošega",3,91],["acoga",3,124],["ecoga",3,125],["ucoga",3,126],["anjoga",3,84],["enjoga",3,85],["snjoga",3,122],["šnjoga",3,86],["koga",3,95],["skoga",59,1],["škoga",59,2],["loga",3,19],["eloga",62,83],["noga",3,13],["cinoga",64,137],["činoga",64,89],["osoga",3,123],["atoga",3,120],["evitoga",3,92],["ovitoga",3,93],["astoga",3,94],["avoga",3,77],["evoga",3,78],["ivoga",3,79],["ovoga",3,80],["aćoga",3,14],["ećoga",3,15],["ućoga",3,16],["ošoga",3,91],["uga",3,18],["aja",-1,109],["caja",81,26],["laja",81,30],["raja",81,31],["ćaja",81,28],["čaja",81,27],["đaja",81,29],["bija",-1,32],["cija",-1,33],["dija",-1,34],["fija",-1,40],["gija",-1,39],["anjija",-1,84],["enjija",-1,85],["snjija",-1,122],["šnjija",-1,86],["kija",-1,95],["skija",97,1],["škija",97,2],["lija",-1,24],["elija",100,83],["mija",-1,37],["nija",-1,13],["ganija",103,9],["manija",103,6],["panija",103,7],["ranija",103,8],["tanija",103,5],["pija",-1,41],["rija",-1,42],["rarija",110,21],["sija",-1,23],["osija",112,123],["tija",-1,44],["atija",114,120],["evitija",114,92],["ovitija",114,93],["otija",114,22],["astija",114,94],["avija",-1,77],["evija",-1,78],["ivija",-1,79],["ovija",-1,80],["zija",-1,45],["ošija",-1,91],["žija",-1,38],["anja",-1,84],["enja",-1,85],["snja",-1,122],["šnja",-1,86],["ka",-1,95],["ska",131,1],["ška",131,2],["ala",-1,104],["acala",134,128],["astajala",134,106],["istajala",134,107],["ostajala",134,108],["ijala",134,47],["injala",134,114],["nala",134,46],["irala",134,100],["urala",134,105],["tala",134,113],["astala",144,110],["istala",144,111],["ostala",144,112],["avala",134,97],["evala",134,96],["ivala",134,98],["ovala",134,76],["uvala",134,99],["ačala",134,102],["ela",-1,83],["ila",-1,116],["acila",155,124],["lucila",155,121],["nila",155,103],["astanila",158,110],["istanila",158,111],["ostanila",158,112],["rosila",155,127],["jetila",155,118],["ozila",155,48],["ačila",155,101],["lučila",155,117],["rošila",155,90],["ola",-1,50],["asla",-1,115],["nula",-1,13],["gama",-1,20],["logama",171,19],["ugama",171,18],["ajama",-1,109],["cajama",174,26],["lajama",174,30],["rajama",174,31],["ćajama",174,28],["čajama",174,27],["đajama",174,29],["bijama",-1,32],["cijama",-1,33],["dijama",-1,34],["fijama",-1,40],["gijama",-1,39],["lijama",-1,35],["mijama",-1,37],["nijama",-1,36],["ganijama",188,9],["manijama",188,6],["panijama",188,7],["ranijama",188,8],["tanijama",188,5],["pijama",-1,41],["rijama",-1,42],["sijama",-1,43],["tijama",-1,44],["zijama",-1,45],["žijama",-1,38],["alama",-1,104],["ijalama",200,47],["nalama",200,46],["elama",-1,119],["ilama",-1,116],["ramama",-1,52],["lemama",-1,51],["inama",-1,11],["cinama",207,137],["činama",207,89],["rama",-1,52],["arama",210,53],["drama",210,54],["erama",210,55],["orama",210,56],["basama",-1,135],["gasama",-1,131],["jasama",-1,129],["kasama",-1,133],["nasama",-1,132],["tasama",-1,130],["vasama",-1,134],["esama",-1,152],["isama",-1,154],["etama",-1,70],["estama",-1,71],["istama",-1,72],["kstama",-1,73],["ostama",-1,74],["avama",-1,77],["evama",-1,78],["ivama",-1,79],["bašama",-1,63],["gašama",-1,64],["jašama",-1,61],["kašama",-1,62],["našama",-1,60],["tašama",-1,59],["vašama",-1,65],["ešama",-1,66],["išama",-1,67],["lema",-1,51],["acima",-1,124],["ecima",-1,125],["ucima",-1,126],["ajima",-1,109],["cajima",245,26],["lajima",245,30],["rajima",245,31],["ćajima",245,28],["čajima",245,27],["đajima",245,29],["bijima",-1,32],["cijima",-1,33],["dijima",-1,34],["fijima",-1,40],["gijima",-1,39],["anjijima",-1,84],["enjijima",-1,85],["snjijima",-1,122],["šnjijima",-1,86],["kijima",-1,95],["skijima",261,1],["škijima",261,2],["lijima",-1,35],["elijima",264,83],["mijima",-1,37],["nijima",-1,13],["ganijima",267,9],["manijima",267,6],["panijima",267,7],["ranijima",267,8],["tanijima",267,5],["pijima",-1,41],["rijima",-1,42],["sijima",-1,43],["osijima",275,123],["tijima",-1,44],["atijima",277,120],["evitijima",277,92],["ovitijima",277,93],["astijima",277,94],["avijima",-1,77],["evijima",-1,78],["ivijima",-1,79],["ovijima",-1,80],["zijima",-1,45],["ošijima",-1,91],["žijima",-1,38],["anjima",-1,84],["enjima",-1,85],["snjima",-1,122],["šnjima",-1,86],["kima",-1,95],["skima",293,1],["škima",293,2],["alima",-1,104],["ijalima",296,47],["nalima",296,46],["elima",-1,83],["ilima",-1,116],["ozilima",300,48],["olima",-1,50],["lemima",-1,51],["nima",-1,13],["anima",304,10],["inima",304,11],["cinima",306,137],["činima",306,89],["onima",304,12],["arima",-1,53],["drima",-1,54],["erima",-1,55],["orima",-1,56],["basima",-1,135],["gasima",-1,131],["jasima",-1,129],["kasima",-1,133],["nasima",-1,132],["tasima",-1,130],["vasima",-1,134],["esima",-1,57],["isima",-1,58],["osima",-1,123],["atima",-1,120],["ikatima",324,68],["latima",324,69],["etima",-1,70],["evitima",-1,92],["ovitima",-1,93],["astima",-1,94],["estima",-1,71],["istima",-1,72],["kstima",-1,73],["ostima",-1,74],["ištima",-1,75],["avima",-1,77],["evima",-1,78],["ajevima",337,109],["cajevima",338,26],["lajevima",338,30],["rajevima",338,31],["ćajevima",338,28],["čajevima",338,27],["đajevima",338,29],["ivima",-1,79],["ovima",-1,80],["govima",346,20],["ugovima",347,17],["lovima",346,82],["olovima",349,49],["movima",346,81],["onovima",346,12],["stvima",-1,3],["štvima",-1,4],["aćima",-1,14],["ećima",-1,15],["ućima",-1,16],["bašima",-1,63],["gašima",-1,64],["jašima",-1,61],["kašima",-1,62],["našima",-1,60],["tašima",-1,59],["vašima",-1,65],["ešima",-1,66],["išima",-1,67],["ošima",-1,91],["na",-1,13],["ana",368,10],["acana",369,128],["urana",369,105],["tana",369,113],["avana",369,97],["evana",369,96],["ivana",369,98],["uvana",369,99],["ačana",369,102],["acena",368,124],["lucena",368,121],["ačena",368,101],["lučena",368,117],["ina",368,11],["cina",382,137],["anina",382,10],["čina",382,89],["ona",368,12],["ara",-1,53],["dra",-1,54],["era",-1,55],["ora",-1,56],["basa",-1,135],["gasa",-1,131],["jasa",-1,129],["kasa",-1,133],["nasa",-1,132],["tasa",-1,130],["vasa",-1,134],["esa",-1,57],["isa",-1,58],["osa",-1,123],["ata",-1,120],["ikata",401,68],["lata",401,69],["eta",-1,70],["evita",-1,92],["ovita",-1,93],["asta",-1,94],["esta",-1,71],["ista",-1,72],["ksta",-1,73],["osta",-1,74],["nuta",-1,13],["išta",-1,75],["ava",-1,77],["eva",-1,78],["ajeva",415,109],["cajeva",416,26],["lajeva",416,30],["rajeva",416,31],["ćajeva",416,28],["čajeva",416,27],["đajeva",416,29],["iva",-1,79],["ova",-1,80],["gova",424,20],["ugova",425,17],["lova",424,82],["olova",427,49],["mova",424,81],["onova",424,12],["stva",-1,3],["štva",-1,4],["aća",-1,14],["eća",-1,15],["uća",-1,16],["baša",-1,63],["gaša",-1,64],["jaša",-1,61],["kaša",-1,62],["naša",-1,60],["taša",-1,59],["vaša",-1,65],["eša",-1,66],["iša",-1,67],["oša",-1,91],["ace",-1,124],["ece",-1,125],["uce",-1,126],["luce",448,121],["astade",-1,110],["istade",-1,111],["ostade",-1,112],["ge",-1,20],["loge",453,19],["uge",453,18],["aje",-1,104],["caje",456,26],["laje",456,30],["raje",456,31],["astaje",456,106],["istaje",456,107],["ostaje",456,108],["ćaje",456,28],["čaje",456,27],["đaje",456,29],["ije",-1,116],["bije",466,32],["cije",466,33],["dije",466,34],["fije",466,40],["gije",466,39],["anjije",466,84],["enjije",466,85],["snjije",466,122],["šnjije",466,86],["kije",466,95],["skije",476,1],["škije",476,2],["lije",466,35],["elije",479,83],["mije",466,37],["nije",466,13],["ganije",482,9],["manije",482,6],["panije",482,7],["ranije",482,8],["tanije",482,5],["pije",466,41],["rije",466,42],["sije",466,43],["osije",490,123],["tije",466,44],["atije",492,120],["evitije",492,92],["ovitije",492,93],["astije",492,94],["avije",466,77],["evije",466,78],["ivije",466,79],["ovije",466,80],["zije",466,45],["ošije",466,91],["žije",466,38],["anje",-1,84],["enje",-1,85],["snje",-1,122],["šnje",-1,86],["uje",-1,25],["lucuje",508,121],["iruje",508,100],["lučuje",508,117],["ke",-1,95],["ske",512,1],["ške",512,2],["ale",-1,104],["acale",515,128],["astajale",515,106],["istajale",515,107],["ostajale",515,108],["ijale",515,47],["injale",515,114],["nale",515,46],["irale",515,100],["urale",515,105],["tale",515,113],["astale",525,110],["istale",525,111],["ostale",525,112],["avale",515,97],["evale",515,96],["ivale",515,98],["ovale",515,76],["uvale",515,99],["ačale",515,102],["ele",-1,83],["ile",-1,116],["acile",536,124],["lucile",536,121],["nile",536,103],["rosile",536,127],["jetile",536,118],["ozile",536,48],["ačile",536,101],["lučile",536,117],["rošile",536,90],["ole",-1,50],["asle",-1,115],["nule",-1,13],["rame",-1,52],["leme",-1,51],["acome",-1,124],["ecome",-1,125],["ucome",-1,126],["anjome",-1,84],["enjome",-1,85],["snjome",-1,122],["šnjome",-1,86],["kome",-1,95],["skome",558,1],["škome",558,2],["elome",-1,83],["nome",-1,13],["cinome",562,137],["činome",562,89],["osome",-1,123],["atome",-1,120],["evitome",-1,92],["ovitome",-1,93],["astome",-1,94],["avome",-1,77],["evome",-1,78],["ivome",-1,79],["ovome",-1,80],["aćome",-1,14],["ećome",-1,15],["ućome",-1,16],["ošome",-1,91],["ne",-1,13],["ane",578,10],["acane",579,128],["urane",579,105],["tane",579,113],["astane",582,110],["istane",582,111],["ostane",582,112],["avane",579,97],["evane",579,96],["ivane",579,98],["uvane",579,99],["ačane",579,102],["acene",578,124],["lucene",578,121],["ačene",578,101],["lučene",578,117],["ine",578,11],["cine",595,137],["anine",595,10],["čine",595,89],["one",578,12],["are",-1,53],["dre",-1,54],["ere",-1,55],["ore",-1,56],["ase",-1,161],["base",604,135],["acase",604,128],["gase",604,131],["jase",604,129],["astajase",608,138],["istajase",608,139],["ostajase",608,140],["injase",608,150],["kase",604,133],["nase",604,132],["irase",604,155],["urase",604,156],["tase",604,130],["vase",604,134],["avase",618,144],["evase",618,145],["ivase",618,146],["ovase",618,148],["uvase",618,147],["ese",-1,57],["ise",-1,58],["acise",625,124],["lucise",625,121],["rosise",625,127],["jetise",625,149],["ose",-1,123],["astadose",630,141],["istadose",630,142],["ostadose",630,143],["ate",-1,104],["acate",634,128],["ikate",634,68],["late",634,69],["irate",634,100],["urate",634,105],["tate",634,113],["avate",634,97],["evate",634,96],["ivate",634,98],["uvate",634,99],["ačate",634,102],["ete",-1,70],["astadete",646,110],["istadete",646,111],["ostadete",646,112],["astajete",646,106],["istajete",646,107],["ostajete",646,108],["ijete",646,116],["injete",646,114],["ujete",646,25],["lucujete",655,121],["irujete",655,100],["lučujete",655,117],["nete",646,13],["astanete",659,110],["istanete",659,111],["ostanete",659,112],["astete",646,115],["ite",-1,116],["acite",664,124],["lucite",664,121],["nite",664,13],["astanite",667,110],["istanite",667,111],["ostanite",667,112],["rosite",664,127],["jetite",664,118],["astite",664,115],["evite",664,92],["ovite",664,93],["ačite",664,101],["lučite",664,117],["rošite",664,90],["ajte",-1,104],["urajte",679,105],["tajte",679,113],["astajte",681,106],["istajte",681,107],["ostajte",681,108],["avajte",679,97],["evajte",679,96],["ivajte",679,98],["uvajte",679,99],["ijte",-1,116],["lucujte",-1,121],["irujte",-1,100],["lučujte",-1,117],["aste",-1,94],["acaste",693,128],["astajaste",693,106],["istajaste",693,107],["ostajaste",693,108],["injaste",693,114],["iraste",693,100],["uraste",693,105],["taste",693,113],["avaste",693,97],["evaste",693,96],["ivaste",693,98],["ovaste",693,76],["uvaste",693,99],["ačaste",693,102],["este",-1,71],["iste",-1,72],["aciste",709,124],["luciste",709,121],["niste",709,103],["rosiste",709,127],["jetiste",709,118],["ačiste",709,101],["lučiste",709,117],["rošiste",709,90],["kste",-1,73],["oste",-1,74],["astadoste",719,110],["istadoste",719,111],["ostadoste",719,112],["nuste",-1,13],["ište",-1,75],["ave",-1,77],["eve",-1,78],["ajeve",726,109],["cajeve",727,26],["lajeve",727,30],["rajeve",727,31],["ćajeve",727,28],["čajeve",727,27],["đajeve",727,29],["ive",-1,79],["ove",-1,80],["gove",735,20],["ugove",736,17],["love",735,82],["olove",738,49],["move",735,81],["onove",735,12],["aće",-1,14],["eće",-1,15],["uće",-1,16],["ače",-1,101],["luče",-1,117],["aše",-1,104],["baše",747,63],["gaše",747,64],["jaše",747,61],["astajaše",750,106],["istajaše",750,107],["ostajaše",750,108],["injaše",750,114],["kaše",747,62],["naše",747,60],["iraše",747,100],["uraše",747,105],["taše",747,59],["vaše",747,65],["avaše",760,97],["evaše",760,96],["ivaše",760,98],["ovaše",760,76],["uvaše",760,99],["ačaše",747,102],["eše",-1,66],["iše",-1,67],["jetiše",768,118],["ačiše",768,101],["lučiše",768,117],["rošiše",768,90],["oše",-1,91],["astadoše",773,110],["istadoše",773,111],["ostadoše",773,112],["aceg",-1,124],["eceg",-1,125],["uceg",-1,126],["anjijeg",-1,84],["enjijeg",-1,85],["snjijeg",-1,122],["šnjijeg",-1,86],["kijeg",-1,95],["skijeg",784,1],["škijeg",784,2],["elijeg",-1,83],["nijeg",-1,13],["osijeg",-1,123],["atijeg",-1,120],["evitijeg",-1,92],["ovitijeg",-1,93],["astijeg",-1,94],["avijeg",-1,77],["evijeg",-1,78],["ivijeg",-1,79],["ovijeg",-1,80],["ošijeg",-1,91],["anjeg",-1,84],["enjeg",-1,85],["snjeg",-1,122],["šnjeg",-1,86],["keg",-1,95],["eleg",-1,83],["neg",-1,13],["aneg",805,10],["eneg",805,87],["sneg",805,159],["šneg",805,88],["oseg",-1,123],["ateg",-1,120],["aveg",-1,77],["eveg",-1,78],["iveg",-1,79],["oveg",-1,80],["aćeg",-1,14],["ećeg",-1,15],["ućeg",-1,16],["ošeg",-1,91],["acog",-1,124],["ecog",-1,125],["ucog",-1,126],["anjog",-1,84],["enjog",-1,85],["snjog",-1,122],["šnjog",-1,86],["kog",-1,95],["skog",827,1],["škog",827,2],["elog",-1,83],["nog",-1,13],["cinog",831,137],["činog",831,89],["osog",-1,123],["atog",-1,120],["evitog",-1,92],["ovitog",-1,93],["astog",-1,94],["avog",-1,77],["evog",-1,78],["ivog",-1,79],["ovog",-1,80],["aćog",-1,14],["ećog",-1,15],["ućog",-1,16],["ošog",-1,91],["ah",-1,104],["acah",847,128],["astajah",847,106],["istajah",847,107],["ostajah",847,108],["injah",847,114],["irah",847,100],["urah",847,105],["tah",847,113],["avah",847,97],["evah",847,96],["ivah",847,98],["ovah",847,76],["uvah",847,99],["ačah",847,102],["ih",-1,116],["acih",862,124],["ecih",862,125],["ucih",862,126],["lucih",865,121],["anjijih",862,84],["enjijih",862,85],["snjijih",862,122],["šnjijih",862,86],["kijih",862,95],["skijih",871,1],["škijih",871,2],["elijih",862,83],["nijih",862,13],["osijih",862,123],["atijih",862,120],["evitijih",862,92],["ovitijih",862,93],["astijih",862,94],["avijih",862,77],["evijih",862,78],["ivijih",862,79],["ovijih",862,80],["ošijih",862,91],["anjih",862,84],["enjih",862,85],["snjih",862,122],["šnjih",862,86],["kih",862,95],["skih",890,1],["ških",890,2],["elih",862,83],["nih",862,13],["cinih",894,137],["činih",894,89],["osih",862,123],["rosih",897,127],["atih",862,120],["jetih",862,118],["evitih",862,92],["ovitih",862,93],["astih",862,94],["avih",862,77],["evih",862,78],["ivih",862,79],["ovih",862,80],["aćih",862,14],["ećih",862,15],["ućih",862,16],["ačih",862,101],["lučih",862,117],["oših",862,91],["roših",913,90],["astadoh",-1,110],["istadoh",-1,111],["ostadoh",-1,112],["acuh",-1,124],["ecuh",-1,125],["ucuh",-1,126],["aćuh",-1,14],["ećuh",-1,15],["ućuh",-1,16],["aci",-1,124],["aceci",-1,124],["ieci",-1,162],["ajuci",-1,161],["irajuci",927,155],["urajuci",927,156],["astajuci",927,138],["istajuci",927,139],["ostajuci",927,140],["avajuci",927,144],["evajuci",927,145],["ivajuci",927,146],["uvajuci",927,147],["ujuci",-1,157],["lucujuci",937,121],["irujuci",937,155],["luci",-1,121],["nuci",-1,164],["etuci",-1,153],["astuci",-1,136],["gi",-1,20],["ugi",944,18],["aji",-1,109],["caji",946,26],["laji",946,30],["raji",946,31],["ćaji",946,28],["čaji",946,27],["đaji",946,29],["biji",-1,32],["ciji",-1,33],["diji",-1,34],["fiji",-1,40],["giji",-1,39],["anjiji",-1,84],["enjiji",-1,85],["snjiji",-1,122],["šnjiji",-1,86],["kiji",-1,95],["skiji",962,1],["škiji",962,2],["liji",-1,35],["eliji",965,83],["miji",-1,37],["niji",-1,13],["ganiji",968,9],["maniji",968,6],["paniji",968,7],["raniji",968,8],["taniji",968,5],["piji",-1,41],["riji",-1,42],["siji",-1,43],["osiji",976,123],["tiji",-1,44],["atiji",978,120],["evitiji",978,92],["ovitiji",978,93],["astiji",978,94],["aviji",-1,77],["eviji",-1,78],["iviji",-1,79],["oviji",-1,80],["ziji",-1,45],["ošiji",-1,91],["žiji",-1,38],["anji",-1,84],["enji",-1,85],["snji",-1,122],["šnji",-1,86],["ki",-1,95],["ski",994,1],["ški",994,2],["ali",-1,104],["acali",997,128],["astajali",997,106],["istajali",997,107],["ostajali",997,108],["ijali",997,47],["injali",997,114],["nali",997,46],["irali",997,100],["urali",997,105],["tali",997,113],["astali",1007,110],["istali",1007,111],["ostali",1007,112],["avali",997,97],["evali",997,96],["ivali",997,98],["ovali",997,76],["uvali",997,99],["ačali",997,102],["eli",-1,83],["ili",-1,116],["acili",1018,124],["lucili",1018,121],["nili",1018,103],["rosili",1018,127],["jetili",1018,118],["ozili",1018,48],["ačili",1018,101],["lučili",1018,117],["rošili",1018,90],["oli",-1,50],["asli",-1,115],["nuli",-1,13],["rami",-1,52],["lemi",-1,51],["ni",-1,13],["ani",1033,10],["acani",1034,128],["urani",1034,105],["tani",1034,113],["avani",1034,97],["evani",1034,96],["ivani",1034,98],["uvani",1034,99],["ačani",1034,102],["aceni",1033,124],["luceni",1033,121],["ačeni",1033,101],["lučeni",1033,117],["ini",1033,11],["cini",1047,137],["čini",1047,89],["oni",1033,12],["ari",-1,53],["dri",-1,54],["eri",-1,55],["ori",-1,56],["basi",-1,135],["gasi",-1,131],["jasi",-1,129],["kasi",-1,133],["nasi",-1,132],["tasi",-1,130],["vasi",-1,134],["esi",-1,152],["isi",-1,154],["osi",-1,123],["avsi",-1,161],["acavsi",1065,128],["iravsi",1065,155],["tavsi",1065,160],["etavsi",1068,153],["astavsi",1068,141],["istavsi",1068,142],["ostavsi",1068,143],["ivsi",-1,162],["nivsi",1073,158],["rosivsi",1073,127],["nuvsi",-1,164],["ati",-1,104],["acati",1077,128],["astajati",1077,106],["istajati",1077,107],["ostajati",1077,108],["injati",1077,114],["ikati",1077,68],["lati",1077,69],["irati",1077,100],["urati",1077,105],["tati",1077,113],["astati",1087,110],["istati",1087,111],["ostati",1087,112],["avati",1077,97],["evati",1077,96],["ivati",1077,98],["ovati",1077,76],["uvati",1077,99],["ačati",1077,102],["eti",-1,70],["iti",-1,116],["aciti",1098,124],["luciti",1098,121],["niti",1098,103],["rositi",1098,127],["jetiti",1098,118],["eviti",1098,92],["oviti",1098,93],["ačiti",1098,101],["lučiti",1098,117],["rošiti",1098,90],["asti",-1,94],["esti",-1,71],["isti",-1,72],["ksti",-1,73],["osti",-1,74],["nuti",-1,13],["avi",-1,77],["evi",-1,78],["ajevi",1116,109],["cajevi",1117,26],["lajevi",1117,30],["rajevi",1117,31],["ćajevi",1117,28],["čajevi",1117,27],["đajevi",1117,29],["ivi",-1,79],["ovi",-1,80],["govi",1125,20],["ugovi",1126,17],["lovi",1125,82],["olovi",1128,49],["movi",1125,81],["onovi",1125,12],["ieći",-1,116],["ačeći",-1,101],["ajući",-1,104],["irajući",1134,100],["urajući",1134,105],["astajući",1134,106],["istajući",1134,107],["ostajući",1134,108],["avajući",1134,97],["evajući",1134,96],["ivajući",1134,98],["uvajući",1134,99],["ujući",-1,25],["irujući",1144,100],["lučujući",1144,117],["nući",-1,13],["etući",-1,70],["astući",-1,115],["ači",-1,101],["luči",-1,117],["baši",-1,63],["gaši",-1,64],["jaši",-1,61],["kaši",-1,62],["naši",-1,60],["taši",-1,59],["vaši",-1,65],["eši",-1,66],["iši",-1,67],["oši",-1,91],["avši",-1,104],["iravši",1162,100],["tavši",1162,113],["etavši",1164,70],["astavši",1164,110],["istavši",1164,111],["ostavši",1164,112],["ačavši",1162,102],["ivši",-1,116],["nivši",1170,103],["rošivši",1170,90],["nuvši",-1,13],["aj",-1,104],["uraj",1174,105],["taj",1174,113],["avaj",1174,97],["evaj",1174,96],["ivaj",1174,98],["uvaj",1174,99],["ij",-1,116],["acoj",-1,124],["ecoj",-1,125],["ucoj",-1,126],["anjijoj",-1,84],["enjijoj",-1,85],["snjijoj",-1,122],["šnjijoj",-1,86],["kijoj",-1,95],["skijoj",1189,1],["škijoj",1189,2],["elijoj",-1,83],["nijoj",-1,13],["osijoj",-1,123],["evitijoj",-1,92],["ovitijoj",-1,93],["astijoj",-1,94],["avijoj",-1,77],["evijoj",-1,78],["ivijoj",-1,79],["ovijoj",-1,80],["ošijoj",-1,91],["anjoj",-1,84],["enjoj",-1,85],["snjoj",-1,122],["šnjoj",-1,86],["koj",-1,95],["skoj",1207,1],["škoj",1207,2],["aloj",-1,104],["eloj",-1,83],["noj",-1,13],["cinoj",1212,137],["činoj",1212,89],["osoj",-1,123],["atoj",-1,120],["evitoj",-1,92],["ovitoj",-1,93],["astoj",-1,94],["avoj",-1,77],["evoj",-1,78],["ivoj",-1,79],["ovoj",-1,80],["aćoj",-1,14],["ećoj",-1,15],["ućoj",-1,16],["ošoj",-1,91],["lucuj",-1,121],["iruj",-1,100],["lučuj",-1,117],["al",-1,104],["iral",1231,100],["ural",1231,105],["el",-1,119],["il",-1,116],["am",-1,104],["acam",1236,128],["iram",1236,100],["uram",1236,105],["tam",1236,113],["avam",1236,97],["evam",1236,96],["ivam",1236,98],["uvam",1236,99],["ačam",1236,102],["em",-1,119],["acem",1246,124],["ecem",1246,125],["ucem",1246,126],["astadem",1246,110],["istadem",1246,111],["ostadem",1246,112],["ajem",1246,104],["cajem",1253,26],["lajem",1253,30],["rajem",1253,31],["astajem",1253,106],["istajem",1253,107],["ostajem",1253,108],["ćajem",1253,28],["čajem",1253,27],["đajem",1253,29],["ijem",1246,116],["anjijem",1263,84],["enjijem",1263,85],["snjijem",1263,123],["šnjijem",1263,86],["kijem",1263,95],["skijem",1268,1],["škijem",1268,2],["lijem",1263,24],["elijem",1271,83],["nijem",1263,13],["rarijem",1263,21],["sijem",1263,23],["osijem",1275,123],["atijem",1263,120],["evitijem",1263,92],["ovitijem",1263,93],["otijem",1263,22],["astijem",1263,94],["avijem",1263,77],["evijem",1263,78],["ivijem",1263,79],["ovijem",1263,80],["ošijem",1263,91],["anjem",1246,84],["enjem",1246,85],["injem",1246,114],["snjem",1246,122],["šnjem",1246,86],["ujem",1246,25],["lucujem",1292,121],["irujem",1292,100],["lučujem",1292,117],["kem",1246,95],["skem",1296,1],["škem",1296,2],["elem",1246,83],["nem",1246,13],["anem",1300,10],["astanem",1301,110],["istanem",1301,111],["ostanem",1301,112],["enem",1300,87],["snem",1300,159],["šnem",1300,88],["basem",1246,135],["gasem",1246,131],["jasem",1246,129],["kasem",1246,133],["nasem",1246,132],["tasem",1246,130],["vasem",1246,134],["esem",1246,152],["isem",1246,154],["osem",1246,123],["atem",1246,120],["etem",1246,70],["evitem",1246,92],["ovitem",1246,93],["astem",1246,94],["istem",1246,151],["ištem",1246,75],["avem",1246,77],["evem",1246,78],["ivem",1246,79],["aćem",1246,14],["ećem",1246,15],["ućem",1246,16],["bašem",1246,63],["gašem",1246,64],["jašem",1246,61],["kašem",1246,62],["našem",1246,60],["tašem",1246,59],["vašem",1246,65],["ešem",1246,66],["išem",1246,67],["ošem",1246,91],["im",-1,116],["acim",1341,124],["ecim",1341,125],["ucim",1341,126],["lucim",1344,121],["anjijim",1341,84],["enjijim",1341,85],["snjijim",1341,122],["šnjijim",1341,86],["kijim",1341,95],["skijim",1350,1],["škijim",1350,2],["elijim",1341,83],["nijim",1341,13],["osijim",1341,123],["atijim",1341,120],["evitijim",1341,92],["ovitijim",1341,93],["astijim",1341,94],["avijim",1341,77],["evijim",1341,78],["ivijim",1341,79],["ovijim",1341,80],["ošijim",1341,91],["anjim",1341,84],["enjim",1341,85],["snjim",1341,122],["šnjim",1341,86],["kim",1341,95],["skim",1369,1],["škim",1369,2],["elim",1341,83],["nim",1341,13],["cinim",1373,137],["činim",1373,89],["osim",1341,123],["rosim",1376,127],["atim",1341,120],["jetim",1341,118],["evitim",1341,92],["ovitim",1341,93],["astim",1341,94],["avim",1341,77],["evim",1341,78],["ivim",1341,79],["ovim",1341,80],["aćim",1341,14],["ećim",1341,15],["ućim",1341,16],["ačim",1341,101],["lučim",1341,117],["ošim",1341,91],["rošim",1392,90],["acom",-1,124],["ecom",-1,125],["ucom",-1,126],["gom",-1,20],["logom",1397,19],["ugom",1397,18],["bijom",-1,32],["cijom",-1,33],["dijom",-1,34],["fijom",-1,40],["gijom",-1,39],["lijom",-1,35],["mijom",-1,37],["nijom",-1,36],["ganijom",1407,9],["manijom",1407,6],["panijom",1407,7],["ranijom",1407,8],["tanijom",1407,5],["pijom",-1,41],["rijom",-1,42],["sijom",-1,43],["tijom",-1,44],["zijom",-1,45],["žijom",-1,38],["anjom",-1,84],["enjom",-1,85],["snjom",-1,122],["šnjom",-1,86],["kom",-1,95],["skom",1423,1],["škom",1423,2],["alom",-1,104],["ijalom",1426,47],["nalom",1426,46],["elom",-1,83],["ilom",-1,116],["ozilom",1430,48],["olom",-1,50],["ramom",-1,52],["lemom",-1,51],["nom",-1,13],["anom",1435,10],["inom",1435,11],["cinom",1437,137],["aninom",1437,10],["činom",1437,89],["onom",1435,12],["arom",-1,53],["drom",-1,54],["erom",-1,55],["orom",-1,56],["basom",-1,135],["gasom",-1,131],["jasom",-1,129],["kasom",-1,133],["nasom",-1,132],["tasom",-1,130],["vasom",-1,134],["esom",-1,57],["isom",-1,58],["osom",-1,123],["atom",-1,120],["ikatom",1456,68],["latom",1456,69],["etom",-1,70],["evitom",-1,92],["ovitom",-1,93],["astom",-1,94],["estom",-1,71],["istom",-1,72],["kstom",-1,73],["ostom",-1,74],["avom",-1,77],["evom",-1,78],["ivom",-1,79],["ovom",-1,80],["lovom",1470,82],["movom",1470,81],["stvom",-1,3],["štvom",-1,4],["aćom",-1,14],["ećom",-1,15],["ućom",-1,16],["bašom",-1,63],["gašom",-1,64],["jašom",-1,61],["kašom",-1,62],["našom",-1,60],["tašom",-1,59],["vašom",-1,65],["ešom",-1,66],["išom",-1,67],["ošom",-1,91],["an",-1,104],["acan",1488,128],["iran",1488,100],["uran",1488,105],["tan",1488,113],["avan",1488,97],["evan",1488,96],["ivan",1488,98],["uvan",1488,99],["ačan",1488,102],["acen",-1,124],["lucen",-1,121],["ačen",-1,101],["lučen",-1,117],["anin",-1,10],["ao",-1,104],["acao",1503,128],["astajao",1503,106],["istajao",1503,107],["ostajao",1503,108],["injao",1503,114],["irao",1503,100],["urao",1503,105],["tao",1503,113],["astao",1511,110],["istao",1511,111],["ostao",1511,112],["avao",1503,97],["evao",1503,96],["ivao",1503,98],["ovao",1503,76],["uvao",1503,99],["ačao",1503,102],["go",-1,20],["ugo",1521,18],["io",-1,116],["acio",1523,124],["lucio",1523,121],["lio",1523,24],["nio",1523,103],["rario",1523,21],["sio",1523,23],["rosio",1529,127],["jetio",1523,118],["otio",1523,22],["ačio",1523,101],["lučio",1523,117],["rošio",1523,90],["bijo",-1,32],["cijo",-1,33],["dijo",-1,34],["fijo",-1,40],["gijo",-1,39],["lijo",-1,35],["mijo",-1,37],["nijo",-1,36],["pijo",-1,41],["rijo",-1,42],["sijo",-1,43],["tijo",-1,44],["zijo",-1,45],["žijo",-1,38],["anjo",-1,84],["enjo",-1,85],["snjo",-1,122],["šnjo",-1,86],["ko",-1,95],["sko",1554,1],["ško",1554,2],["alo",-1,104],["acalo",1557,128],["astajalo",1557,106],["istajalo",1557,107],["ostajalo",1557,108],["ijalo",1557,47],["injalo",1557,114],["nalo",1557,46],["iralo",1557,100],["uralo",1557,105],["talo",1557,113],["astalo",1567,110],["istalo",1567,111],["ostalo",1567,112],["avalo",1557,97],["evalo",1557,96],["ivalo",1557,98],["ovalo",1557,76],["uvalo",1557,99],["ačalo",1557,102],["elo",-1,83],["ilo",-1,116],["acilo",1578,124],["lucilo",1578,121],["nilo",1578,103],["rosilo",1578,127],["jetilo",1578,118],["ačilo",1578,101],["lučilo",1578,117],["rošilo",1578,90],["aslo",-1,115],["nulo",-1,13],["amo",-1,104],["acamo",1589,128],["ramo",1589,52],["iramo",1591,100],["uramo",1591,105],["tamo",1589,113],["avamo",1589,97],["evamo",1589,96],["ivamo",1589,98],["uvamo",1589,99],["ačamo",1589,102],["emo",-1,119],["astademo",1600,110],["istademo",1600,111],["ostademo",1600,112],["astajemo",1600,106],["istajemo",1600,107],["ostajemo",1600,108],["ijemo",1600,116],["injemo",1600,114],["ujemo",1600,25],["lucujemo",1609,121],["irujemo",1609,100],["lučujemo",1609,117],["lemo",1600,51],["nemo",1600,13],["astanemo",1614,110],["istanemo",1614,111],["ostanemo",1614,112],["etemo",1600,70],["astemo",1600,115],["imo",-1,116],["acimo",1620,124],["lucimo",1620,121],["nimo",1620,13],["astanimo",1623,110],["istanimo",1623,111],["ostanimo",1623,112],["rosimo",1620,127],["etimo",1620,70],["jetimo",1628,118],["astimo",1620,115],["ačimo",1620,101],["lučimo",1620,117],["rošimo",1620,90],["ajmo",-1,104],["urajmo",1634,105],["tajmo",1634,113],["astajmo",1636,106],["istajmo",1636,107],["ostajmo",1636,108],["avajmo",1634,97],["evajmo",1634,96],["ivajmo",1634,98],["uvajmo",1634,99],["ijmo",-1,116],["ujmo",-1,25],["lucujmo",1645,121],["irujmo",1645,100],["lučujmo",1645,117],["asmo",-1,104],["acasmo",1649,128],["astajasmo",1649,106],["istajasmo",1649,107],["ostajasmo",1649,108],["injasmo",1649,114],["irasmo",1649,100],["urasmo",1649,105],["tasmo",1649,113],["avasmo",1649,97],["evasmo",1649,96],["ivasmo",1649,98],["ovasmo",1649,76],["uvasmo",1649,99],["ačasmo",1649,102],["ismo",-1,116],["acismo",1664,124],["lucismo",1664,121],["nismo",1664,103],["rosismo",1664,127],["jetismo",1664,118],["ačismo",1664,101],["lučismo",1664,117],["rošismo",1664,90],["astadosmo",-1,110],["istadosmo",-1,111],["ostadosmo",-1,112],["nusmo",-1,13],["no",-1,13],["ano",1677,104],["acano",1678,128],["urano",1678,105],["tano",1678,113],["avano",1678,97],["evano",1678,96],["ivano",1678,98],["uvano",1678,99],["ačano",1678,102],["aceno",1677,124],["luceno",1677,121],["ačeno",1677,101],["lučeno",1677,117],["ino",1677,11],["cino",1691,137],["čino",1691,89],["ato",-1,120],["ikato",1694,68],["lato",1694,69],["eto",-1,70],["evito",-1,92],["ovito",-1,93],["asto",-1,94],["esto",-1,71],["isto",-1,72],["ksto",-1,73],["osto",-1,74],["nuto",-1,13],["nuo",-1,13],["avo",-1,77],["evo",-1,78],["ivo",-1,79],["ovo",-1,80],["stvo",-1,3],["štvo",-1,4],["as",-1,161],["acas",1713,128],["iras",1713,155],["uras",1713,156],["tas",1713,160],["avas",1713,144],["evas",1713,145],["ivas",1713,146],["uvas",1713,147],["es",-1,163],["astades",1722,141],["istades",1722,142],["ostades",1722,143],["astajes",1722,138],["istajes",1722,139],["ostajes",1722,140],["ijes",1722,162],["injes",1722,150],["ujes",1722,157],["lucujes",1731,121],["irujes",1731,155],["nes",1722,164],["astanes",1734,141],["istanes",1734,142],["ostanes",1734,143],["etes",1722,153],["astes",1722,136],["is",-1,162],["acis",1740,124],["lucis",1740,121],["nis",1740,158],["rosis",1740,127],["jetis",1740,149],["at",-1,104],["acat",1746,128],["astajat",1746,106],["istajat",1746,107],["ostajat",1746,108],["injat",1746,114],["irat",1746,100],["urat",1746,105],["tat",1746,113],["astat",1754,110],["istat",1754,111],["ostat",1754,112],["avat",1746,97],["evat",1746,96],["ivat",1746,98],["irivat",1760,100],["ovat",1746,76],["uvat",1746,99],["ačat",1746,102],["it",-1,116],["acit",1765,124],["lucit",1765,121],["rosit",1765,127],["jetit",1765,118],["ačit",1765,101],["lučit",1765,117],["rošit",1765,90],["nut",-1,13],["astadu",-1,110],["istadu",-1,111],["ostadu",-1,112],["gu",-1,20],["logu",1777,19],["ugu",1777,18],["ahu",-1,104],["acahu",1780,128],["astajahu",1780,106],["istajahu",1780,107],["ostajahu",1780,108],["injahu",1780,114],["irahu",1780,100],["urahu",1780,105],["avahu",1780,97],["evahu",1780,96],["ivahu",1780,98],["ovahu",1780,76],["uvahu",1780,99],["ačahu",1780,102],["aju",-1,104],["caju",1794,26],["acaju",1795,128],["laju",1794,30],["raju",1794,31],["iraju",1798,100],["uraju",1798,105],["taju",1794,113],["astaju",1801,106],["istaju",1801,107],["ostaju",1801,108],["avaju",1794,97],["evaju",1794,96],["ivaju",1794,98],["uvaju",1794,99],["ćaju",1794,28],["čaju",1794,27],["ačaju",1810,102],["đaju",1794,29],["iju",-1,116],["biju",1813,32],["ciju",1813,33],["diju",1813,34],["fiju",1813,40],["giju",1813,39],["anjiju",1813,84],["enjiju",1813,85],["snjiju",1813,122],["šnjiju",1813,86],["kiju",1813,95],["liju",1813,24],["eliju",1824,83],["miju",1813,37],["niju",1813,13],["ganiju",1827,9],["maniju",1827,6],["paniju",1827,7],["raniju",1827,8],["taniju",1827,5],["piju",1813,41],["riju",1813,42],["rariju",1834,21],["siju",1813,23],["osiju",1836,123],["tiju",1813,44],["atiju",1838,120],["otiju",1838,22],["aviju",1813,77],["eviju",1813,78],["iviju",1813,79],["oviju",1813,80],["ziju",1813,45],["ošiju",1813,91],["žiju",1813,38],["anju",-1,84],["enju",-1,85],["snju",-1,122],["šnju",-1,86],["uju",-1,25],["lucuju",1852,121],["iruju",1852,100],["lučuju",1852,117],["ku",-1,95],["sku",1856,1],["šku",1856,2],["alu",-1,104],["ijalu",1859,47],["nalu",1859,46],["elu",-1,83],["ilu",-1,116],["ozilu",1863,48],["olu",-1,50],["ramu",-1,52],["acemu",-1,124],["ecemu",-1,125],["ucemu",-1,126],["anjijemu",-1,84],["enjijemu",-1,85],["snjijemu",-1,122],["šnjijemu",-1,86],["kijemu",-1,95],["skijemu",1874,1],["škijemu",1874,2],["elijemu",-1,83],["nijemu",-1,13],["osijemu",-1,123],["atijemu",-1,120],["evitijemu",-1,92],["ovitijemu",-1,93],["astijemu",-1,94],["avijemu",-1,77],["evijemu",-1,78],["ivijemu",-1,79],["ovijemu",-1,80],["ošijemu",-1,91],["anjemu",-1,84],["enjemu",-1,85],["snjemu",-1,122],["šnjemu",-1,86],["kemu",-1,95],["skemu",1893,1],["škemu",1893,2],["lemu",-1,51],["elemu",1896,83],["nemu",-1,13],["anemu",1898,10],["enemu",1898,87],["snemu",1898,159],["šnemu",1898,88],["osemu",-1,123],["atemu",-1,120],["evitemu",-1,92],["ovitemu",-1,93],["astemu",-1,94],["avemu",-1,77],["evemu",-1,78],["ivemu",-1,79],["ovemu",-1,80],["aćemu",-1,14],["ećemu",-1,15],["ućemu",-1,16],["ošemu",-1,91],["acomu",-1,124],["ecomu",-1,125],["ucomu",-1,126],["anjomu",-1,84],["enjomu",-1,85],["snjomu",-1,122],["šnjomu",-1,86],["komu",-1,95],["skomu",1923,1],["škomu",1923,2],["elomu",-1,83],["nomu",-1,13],["cinomu",1927,137],["činomu",1927,89],["osomu",-1,123],["atomu",-1,120],["evitomu",-1,92],["ovitomu",-1,93],["astomu",-1,94],["avomu",-1,77],["evomu",-1,78],["ivomu",-1,79],["ovomu",-1,80],["aćomu",-1,14],["ećomu",-1,15],["ućomu",-1,16],["ošomu",-1,91],["nu",-1,13],["anu",1943,10],["astanu",1944,110],["istanu",1944,111],["ostanu",1944,112],["inu",1943,11],["cinu",1948,137],["aninu",1948,10],["činu",1948,89],["onu",1943,12],["aru",-1,53],["dru",-1,54],["eru",-1,55],["oru",-1,56],["basu",-1,135],["gasu",-1,131],["jasu",-1,129],["kasu",-1,133],["nasu",-1,132],["tasu",-1,130],["vasu",-1,134],["esu",-1,57],["isu",-1,58],["osu",-1,123],["atu",-1,120],["ikatu",1967,68],["latu",1967,69],["etu",-1,70],["evitu",-1,92],["ovitu",-1,93],["astu",-1,94],["estu",-1,71],["istu",-1,72],["kstu",-1,73],["ostu",-1,74],["ištu",-1,75],["avu",-1,77],["evu",-1,78],["ivu",-1,79],["ovu",-1,80],["lovu",1982,82],["movu",1982,81],["stvu",-1,3],["štvu",-1,4],["bašu",-1,63],["gašu",-1,64],["jašu",-1,61],["kašu",-1,62],["našu",-1,60],["tašu",-1,59],["vašu",-1,65],["ešu",-1,66],["išu",-1,67],["ošu",-1,91],["avav",-1,97],["evav",-1,96],["ivav",-1,98],["uvav",-1,99],["kov",-1,95],["aš",-1,104],["iraš",2002,100],["uraš",2002,105],["taš",2002,113],["avaš",2002,97],["evaš",2002,96],["ivaš",2002,98],["uvaš",2002,99],["ačaš",2002,102],["eš",-1,119],["astadeš",2011,110],["istadeš",2011,111],["ostadeš",2011,112],["astaješ",2011,106],["istaješ",2011,107],["ostaješ",2011,108],["iješ",2011,116],["inješ",2011,114],["uješ",2011,25],["iruješ",2020,100],["lučuješ",2020,117],["neš",2011,13],["astaneš",2023,110],["istaneš",2023,111],["ostaneš",2023,112],["eteš",2011,70],["asteš",2011,115],["iš",-1,116],["niš",2029,103],["jetiš",2029,118],["ačiš",2029,101],["lučiš",2029,117],["rošiš",2029,90]],t=[["a",-1,1],["oga",0,1],["ama",0,1],["ima",0,1],["ena",0,1],["e",-1,1],["og",-1,1],["anog",6,1],["enog",6,1],["anih",-1,1],["enih",-1,1],["i",-1,1],["ani",11,1],["eni",11,1],["anoj",-1,1],["enoj",-1,1],["anim",-1,1],["enim",-1,1],["om",-1,1],["enom",18,1],["o",-1,1],["ano",20,1],["eno",20,1],["ost",-1,1],["u",-1,1],["enu",24,1]],u=[17,65,16],n=[65,4,0,0,0,0,0,0,0,0,0,4,0,0,128],j=[119,95,23,1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,136,0,0,0,0,0,0,0,0,0,128,0,0,0,16],c=[1],f=0,l=!1;function v(){return f<=m.cursor}this.stem=function(){(()=>{for(var a,e=m.cursor;;){var i=m.cursor;a:{for(;;){var r=m.cursor;if(m.bra=m.cursor,0!=(a=m.find_among(s))){switch(m.ket=m.cursor,a){case 1:if(m.slice_from("a"))break;return;case 2:if(m.slice_from("b"))break;return;case 3:if(m.slice_from("v"))break;return;case 4:if(m.slice_from("g"))break;return;case 5:if(m.slice_from("d"))break;return;case 6:if(m.slice_from("đ"))break;return;case 7:if(m.slice_from("e"))break;return;case 8:if(m.slice_from("ž"))break;return;case 9:if(m.slice_from("z"))break;return;case 10:if(m.slice_from("i"))break;return;case 11:if(m.slice_from("j"))break;return;case 12:if(m.slice_from("k"))break;return;case 13:if(m.slice_from("l"))break;return;case 14:if(m.slice_from("lj"))break;return;case 15:if(m.slice_from("m"))break;return;case 16:if(m.slice_from("n"))break;return;case 17:if(m.slice_from("nj"))break;return;case 18:if(m.slice_from("o"))break;return;case 19:if(m.slice_from("p"))break;return;case 20:if(m.slice_from("r"))break;return;case 21:if(m.slice_from("s"))break;return;case 22:if(m.slice_from("t"))break;return;case 23:if(m.slice_from("ć"))break;return;case 24:if(m.slice_from("u"))break;return;case 25:if(m.slice_from("f"))break;return;case 26:if(m.slice_from("h"))break;return;case 27:if(m.slice_from("c"))break;return;case 28:if(m.slice_from("č"))break;return;case 29:if(m.slice_from("dž"))break;return;case 30:if(m.slice_from("š"))break;return}m.cursor=r;break}if(m.cursor=r,m.cursor>=m.limit)break a;m.cursor++}continue}m.cursor=i;break}m.cursor=e})(),(()=>{for(var a=m.cursor;;){var e=m.cursor;a:{for(;;){var i=m.cursor;if(m.in_grouping(j,98,382)&&(m.bra=m.cursor,m.eq_s("ije"))&&(m.ket=m.cursor,m.in_grouping(j,98,382))){if(!m.slice_from("e"))return;m.cursor=i;break}if(m.cursor=i,m.cursor>=m.limit)break a;m.cursor++}continue}m.cursor=e;break}for(m.cursor=a,a=m.cursor;;){var r=m.cursor;a:{for(;;){var s=m.cursor;if(m.in_grouping(j,98,382)&&(m.bra=m.cursor,m.eq_s("je"))&&(m.ket=m.cursor,m.in_grouping(j,98,382))){if(!m.slice_from("e"))return;m.cursor=s;break}if(m.cursor=s,m.cursor>=m.limit)break a;m.cursor++}continue}m.cursor=r;break}for(m.cursor=a,a=m.cursor;;){var o=m.cursor;a:{for(;;){var t=m.cursor;if(m.bra=m.cursor,m.eq_s("dj")){if(m.ket=m.cursor,!m.slice_from("đ"))return;m.cursor=t;break}if(m.cursor=t,m.cursor>=m.limit)break a;m.cursor++}continue}m.cursor=o;break}m.cursor=a})(),l=!0;var a=m.cursor,a=(m.go_out_grouping(n,263,382)&&(m.cursor++,l=!1),m.cursor=a,f=m.limit,m.cursor),a=(m.go_out_grouping(u,97,117)&&(m.cursor++,2<=(f=m.cursor)||m.go_in_grouping(u,97,117)&&(m.cursor++,f=m.cursor)),m.cursor=a,m.cursor);a:{for(;;){if(m.eq_s("r"))break;if(m.cursor>=m.limit)break a;m.cursor++}var e=m.cursor;if(m.cursor<2){if(m.cursor=e,!m.go_in_grouping(c,114,114))break a;m.cursor++}f-m.cursor<=1||(f=m.cursor)}m.cursor=a,m.limit_backward=m.cursor,m.cursor=m.limit;var a=m.limit-m.cursor,a=((()=>{var a;if(m.ket=m.cursor,0!=(a=m.find_among_b(r)))switch(m.bra=m.cursor,a){case 1:if(m.slice_from("loga"))break;return;case 2:if(m.slice_from("peh"))break;return;case 3:if(m.slice_from("vojka"))break;return;case 4:if(m.slice_from("bojka"))break;return;case 5:if(m.slice_from("jak"))break;return;case 6:if(m.slice_from("čajni"))break;return;case 7:if(!l)return;if(m.slice_from("cajni"))break;return;case 8:if(m.slice_from("erni"))break;return;case 9:if(m.slice_from("larni"))break;return;case 10:if(m.slice_from("esni"))break;return;case 11:if(m.slice_from("anjca"))break;return;case 12:if(m.slice_from("ajca"))break;return;case 13:if(m.slice_from("ljca"))break;return;case 14:if(m.slice_from("ejca"))break;return;case 15:if(m.slice_from("ojca"))break;return;case 16:if(m.slice_from("ajka"))break;return;case 17:if(m.slice_from("ojka"))break;return;case 18:if(m.slice_from("šca"))break;return;case 19:if(m.slice_from("ing"))break;return;case 20:if(m.slice_from("tvenik"))break;return;case 21:if(m.slice_from("tetika"))break;return;case 22:if(m.slice_from("nstva"))break;return;case 23:if(m.slice_from("nik"))break;return;case 24:if(m.slice_from("tik"))break;return;case 25:if(m.slice_from("zik"))break;return;case 26:if(m.slice_from("snik"))break;return;case 27:if(m.slice_from("kusi"))break;return;case 28:if(m.slice_from("kusni"))break;return;case 29:if(m.slice_from("kustva"))break;return;case 30:if(m.slice_from("dušni"))break;return;case 31:if(!l)return;if(m.slice_from("dusni"))break;return;case 32:if(m.slice_from("antni"))break;return;case 33:if(m.slice_from("bilni"))break;return;case 34:if(m.slice_from("tilni"))break;return;case 35:if(m.slice_from("avilni"))break;return;case 36:if(m.slice_from("silni"))break;return;case 37:if(m.slice_from("gilni"))break;return;case 38:if(m.slice_from("rilni"))break;return;case 39:if(m.slice_from("nilni"))break;return;case 40:if(m.slice_from("alni"))break;return;case 41:if(m.slice_from("ozni"))break;return;case 42:if(m.slice_from("ravi"))break;return;case 43:if(m.slice_from("stavni"))break;return;case 44:if(m.slice_from("pravni"))break;return;case 45:if(m.slice_from("tivni"))break;return;case 46:if(m.slice_from("sivni"))break;return;case 47:if(m.slice_from("atni"))break;return;case 48:if(m.slice_from("enta"))break;return;case 49:if(m.slice_from("tetni"))break;return;case 50:if(m.slice_from("pletni"))break;return;case 51:if(m.slice_from("šavi"))break;return;case 52:if(!l)return;if(m.slice_from("savi"))break;return;case 53:if(m.slice_from("anta"))break;return;case 54:if(m.slice_from("ačka"))break;return;case 55:if(!l)return;if(m.slice_from("acka"))break;return;case 56:if(m.slice_from("uška"))break;return;case 57:if(!l)return;if(m.slice_from("uska"))break;return;case 58:if(m.slice_from("atka"))break;return;case 59:if(m.slice_from("etka"))break;return;case 60:if(m.slice_from("itka"))break;return;case 61:if(m.slice_from("otka"))break;return;case 62:if(m.slice_from("utka"))break;return;case 63:if(m.slice_from("eskna"))break;return;case 64:if(m.slice_from("tični"))break;return;case 65:if(!l)return;if(m.slice_from("ticni"))break;return;case 66:if(m.slice_from("ojska"))break;return;case 67:if(m.slice_from("esma"))break;return;case 68:if(m.slice_from("metra"))break;return;case 69:if(m.slice_from("centra"))break;return;case 70:if(m.slice_from("istra"))break;return;case 71:if(m.slice_from("osti"))break;return;case 72:if(!l)return;if(m.slice_from("osti"))break;return;case 73:if(m.slice_from("dba"))break;return;case 74:if(m.slice_from("čka"))break;return;case 75:if(m.slice_from("mca"))break;return;case 76:if(m.slice_from("nca"))break;return;case 77:if(m.slice_from("voljni"))break;return;case 78:if(m.slice_from("anki"))break;return;case 79:if(m.slice_from("vca"))break;return;case 80:if(m.slice_from("sca"))break;return;case 81:if(m.slice_from("rca"))break;return;case 82:if(m.slice_from("alca"))break;return;case 83:if(m.slice_from("elca"))break;return;case 84:if(m.slice_from("olca"))break;return;case 85:if(m.slice_from("njca"))break;return;case 86:if(m.slice_from("ekta"))break;return;case 87:if(m.slice_from("izma"))break;return;case 88:if(m.slice_from("jebi"))break;return;case 89:if(m.slice_from("baci"))break;return;case 90:if(m.slice_from("ašni"))break;return;case 91:if(!l)return;if(m.slice_from("asni"))break}})(),m.cursor=m.limit-a,m.limit-m.cursor),i=m.limit-m.cursor;return(()=>{var a;if(m.ket=m.cursor,0!=(a=m.find_among_b(o))&&(m.bra=m.cursor,v())){switch(a){case 1:if(m.slice_from("sk"))break;return;case 2:if(m.slice_from("šk"))break;return;case 3:if(m.slice_from("stv"))break;return;case 4:if(m.slice_from("štv"))break;return;case 5:if(m.slice_from("tanij"))break;return;case 6:if(m.slice_from("manij"))break;return;case 7:if(m.slice_from("panij"))break;return;case 8:if(m.slice_from("ranij"))break;return;case 9:if(m.slice_from("ganij"))break;return;case 10:if(m.slice_from("an"))break;return;case 11:if(m.slice_from("in"))break;return;case 12:if(m.slice_from("on"))break;return;case 13:if(m.slice_from("n"))break;return;case 14:if(m.slice_from("ać"))break;return;case 15:if(m.slice_from("eć"))break;return;case 16:if(m.slice_from("uć"))break;return;case 17:if(m.slice_from("ugov"))break;return;case 18:if(m.slice_from("ug"))break;return;case 19:if(m.slice_from("log"))break;return;case 20:if(m.slice_from("g"))break;return;case 21:if(m.slice_from("rari"))break;return;case 22:if(m.slice_from("oti"))break;return;case 23:if(m.slice_from("si"))break;return;case 24:if(m.slice_from("li"))break;return;case 25:if(m.slice_from("uj"))break;return;case 26:if(m.slice_from("caj"))break;return;case 27:if(m.slice_from("čaj"))break;return;case 28:if(m.slice_from("ćaj"))break;return;case 29:if(m.slice_from("đaj"))break;return;case 30:if(m.slice_from("laj"))break;return;case 31:if(m.slice_from("raj"))break;return;case 32:if(m.slice_from("bij"))break;return;case 33:if(m.slice_from("cij"))break;return;case 34:if(m.slice_from("dij"))break;return;case 35:if(m.slice_from("lij"))break;return;case 36:if(m.slice_from("nij"))break;return;case 37:if(m.slice_from("mij"))break;return;case 38:if(m.slice_from("žij"))break;return;case 39:if(m.slice_from("gij"))break;return;case 40:if(m.slice_from("fij"))break;return;case 41:if(m.slice_from("pij"))break;return;case 42:if(m.slice_from("rij"))break;return;case 43:if(m.slice_from("sij"))break;return;case 44:if(m.slice_from("tij"))break;return;case 45:if(m.slice_from("zij"))break;return;case 46:if(m.slice_from("nal"))break;return;case 47:if(m.slice_from("ijal"))break;return;case 48:if(m.slice_from("ozil"))break;return;case 49:if(m.slice_from("olov"))break;return;case 50:if(m.slice_from("ol"))break;return;case 51:if(m.slice_from("lem"))break;return;case 52:if(m.slice_from("ram"))break;return;case 53:if(m.slice_from("ar"))break;return;case 54:if(m.slice_from("dr"))break;return;case 55:if(m.slice_from("er"))break;return;case 56:if(m.slice_from("or"))break;return;case 57:if(m.slice_from("es"))break;return;case 58:if(m.slice_from("is"))break;return;case 59:if(m.slice_from("taš"))break;return;case 60:if(m.slice_from("naš"))break;return;case 61:if(m.slice_from("jaš"))break;return;case 62:if(m.slice_from("kaš"))break;return;case 63:if(m.slice_from("baš"))break;return;case 64:if(m.slice_from("gaš"))break;return;case 65:if(m.slice_from("vaš"))break;return;case 66:if(m.slice_from("eš"))break;return;case 67:if(m.slice_from("iš"))break;return;case 68:if(m.slice_from("ikat"))break;return;case 69:if(m.slice_from("lat"))break;return;case 70:if(m.slice_from("et"))break;return;case 71:if(m.slice_from("est"))break;return;case 72:if(m.slice_from("ist"))break;return;case 73:if(m.slice_from("kst"))break;return;case 74:if(m.slice_from("ost"))break;return;case 75:if(m.slice_from("išt"))break;return;case 76:if(m.slice_from("ova"))break;return;case 77:if(m.slice_from("av"))break;return;case 78:if(m.slice_from("ev"))break;return;case 79:if(m.slice_from("iv"))break;return;case 80:if(m.slice_from("ov"))break;return;case 81:if(m.slice_from("mov"))break;return;case 82:if(m.slice_from("lov"))break;return;case 83:if(m.slice_from("el"))break;return;case 84:if(m.slice_from("anj"))break;return;case 85:if(m.slice_from("enj"))break;return;case 86:if(m.slice_from("šnj"))break;return;case 87:if(m.slice_from("en"))break;return;case 88:if(m.slice_from("šn"))break;return;case 89:if(m.slice_from("čin"))break;return;case 90:if(m.slice_from("roši"))break;return;case 91:if(m.slice_from("oš"))break;return;case 92:if(m.slice_from("evit"))break;return;case 93:if(m.slice_from("ovit"))break;return;case 94:if(m.slice_from("ast"))break;return;case 95:if(m.slice_from("k"))break;return;case 96:if(m.slice_from("eva"))break;return;case 97:if(m.slice_from("ava"))break;return;case 98:if(m.slice_from("iva"))break;return;case 99:if(m.slice_from("uva"))break;return;case 100:if(m.slice_from("ir"))break;return;case 101:if(m.slice_from("ač"))break;return;case 102:if(m.slice_from("ača"))break;return;case 103:if(m.slice_from("ni"))break;return;case 104:if(m.slice_from("a"))break;return;case 105:if(m.slice_from("ur"))break;return;case 106:if(m.slice_from("astaj"))break;return;case 107:if(m.slice_from("istaj"))break;return;case 108:if(m.slice_from("ostaj"))break;return;case 109:if(m.slice_from("aj"))break;return;case 110:if(m.slice_from("asta"))break;return;case 111:if(m.slice_from("ista"))break;return;case 112:if(m.slice_from("osta"))break;return;case 113:if(m.slice_from("ta"))break;return;case 114:if(m.slice_from("inj"))break;return;case 115:if(m.slice_from("as"))break;return;case 116:if(m.slice_from("i"))break;return;case 117:if(m.slice_from("luč"))break;return;case 118:if(m.slice_from("jeti"))break;return;case 119:if(m.slice_from("e"))break;return;case 120:if(m.slice_from("at"))break;return;case 121:if(!l)return;if(m.slice_from("luc"))break;return;case 122:if(!l)return;if(m.slice_from("snj"))break;return;case 123:if(!l)return;if(m.slice_from("os"))break;return;case 124:if(!l)return;if(m.slice_from("ac"))break;return;case 125:if(!l)return;if(m.slice_from("ec"))break;return;case 126:if(!l)return;if(m.slice_from("uc"))break;return;case 127:if(!l)return;if(m.slice_from("rosi"))break;return;case 128:if(!l)return;if(m.slice_from("aca"))break;return;case 129:if(!l)return;if(m.slice_from("jas"))break;return;case 130:if(!l)return;if(m.slice_from("tas"))break;return;case 131:if(!l)return;if(m.slice_from("gas"))break;return;case 132:if(!l)return;if(m.slice_from("nas"))break;return;case 133:if(!l)return;if(m.slice_from("kas"))break;return;case 134:if(!l)return;if(m.slice_from("vas"))break;return;case 135:if(!l)return;if(m.slice_from("bas"))break;return;case 136:if(!l)return;if(m.slice_from("as"))break;return;case 137:if(!l)return;if(m.slice_from("cin"))break;return;case 138:if(!l)return;if(m.slice_from("astaj"))break;return;case 139:if(!l)return;if(m.slice_from("istaj"))break;return;case 140:if(!l)return;if(m.slice_from("ostaj"))break;return;case 141:if(!l)return;if(m.slice_from("asta"))break;return;case 142:if(!l)return;if(m.slice_from("ista"))break;return;case 143:if(!l)return;if(m.slice_from("osta"))break;return;case 144:if(!l)return;if(m.slice_from("ava"))break;return;case 145:if(!l)return;if(m.slice_from("eva"))break;return;case 146:if(!l)return;if(m.slice_from("iva"))break;return;case 147:if(!l)return;if(m.slice_from("uva"))break;return;case 148:if(!l)return;if(m.slice_from("ova"))break;return;case 149:if(!l)return;if(m.slice_from("jeti"))break;return;case 150:if(!l)return;if(m.slice_from("inj"))break;return;case 151:if(!l)return;if(m.slice_from("ist"))break;return;case 152:if(!l)return;if(m.slice_from("es"))break;return;case 153:if(!l)return;if(m.slice_from("et"))break;return;case 154:if(!l)return;if(m.slice_from("is"))break;return;case 155:if(!l)return;if(m.slice_from("ir"))break;return;case 156:if(!l)return;if(m.slice_from("ur"))break;return;case 157:if(!l)return;if(m.slice_from("uj"))break;return;case 158:if(!l)return;if(m.slice_from("ni"))break;return;case 159:if(!l)return;if(m.slice_from("sn"))break;return;case 160:if(!l)return;if(m.slice_from("ta"))break;return;case 161:if(!l)return;if(m.slice_from("a"))break;return;case 162:if(!l)return;if(m.slice_from("i"))break;return;case 163:if(!l)return;if(m.slice_from("e"))break;return;case 164:if(!l)return;if(m.slice_from("n"))break;return}return 1}})()||(m.cursor=m.limit-i,m.ket=m.cursor,0!=m.find_among_b(t)&&(m.bra=m.cursor,v())&&m.slice_from("")),m.cursor=m.limit-a,m.cursor=m.limit_backward,!0},this.stemWord=function(a){return m.setCurrent(a),this.stem(),m.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/spanish-stemmer.js b/sphinx/search/minified-js/spanish-stemmer.js
index ef634a8b75c..c47ed2e414a 100644
--- a/sphinx/search/minified-js/spanish-stemmer.js
+++ b/sphinx/search/minified-js/spanish-stemmer.js
@@ -1 +1 @@
-SpanishStemmer=function(){var r=new BaseStemmer;var e=[["",-1,6],["á",0,1],["é",0,2],["í",0,3],["ó",0,4],["ú",0,5]];var i=[["la",-1,-1],["sela",0,-1],["le",-1,-1],["me",-1,-1],["se",-1,-1],["lo",-1,-1],["selo",5,-1],["las",-1,-1],["selas",7,-1],["les",-1,-1],["los",-1,-1],["selos",10,-1],["nos",-1,-1]];var a=[["ando",-1,6],["iendo",-1,6],["yendo",-1,7],["ándo",-1,2],["iéndo",-1,1],["ar",-1,6],["er",-1,6],["ir",-1,6],["ár",-1,3],["ér",-1,4],["ír",-1,5]];var s=[["ic",-1,-1],["ad",-1,-1],["os",-1,-1],["iv",-1,1]];var u=[["able",-1,1],["ible",-1,1],["ante",-1,1]];var o=[["ic",-1,1],["abil",-1,1],["iv",-1,1]];var t=[["ica",-1,1],["ancia",-1,2],["encia",-1,5],["adora",-1,2],["osa",-1,1],["ista",-1,1],["iva",-1,9],["anza",-1,1],["logía",-1,3],["idad",-1,8],["able",-1,1],["ible",-1,1],["ante",-1,2],["mente",-1,7],["amente",13,6],["ación",-1,2],["ución",-1,4],["ico",-1,1],["ismo",-1,1],["oso",-1,1],["amiento",-1,1],["imiento",-1,1],["ivo",-1,9],["ador",-1,2],["icas",-1,1],["ancias",-1,2],["encias",-1,5],["adoras",-1,2],["osas",-1,1],["istas",-1,1],["ivas",-1,9],["anzas",-1,1],["logías",-1,3],["idades",-1,8],["ables",-1,1],["ibles",-1,1],["aciones",-1,2],["uciones",-1,4],["adores",-1,2],["antes",-1,2],["icos",-1,1],["ismos",-1,1],["osos",-1,1],["amientos",-1,1],["imientos",-1,1],["ivos",-1,9]];var c=[["ya",-1,1],["ye",-1,1],["yan",-1,1],["yen",-1,1],["yeron",-1,1],["yendo",-1,1],["yo",-1,1],["yas",-1,1],["yes",-1,1],["yais",-1,1],["yamos",-1,1],["yó",-1,1]];var l=[["aba",-1,2],["ada",-1,2],["ida",-1,2],["ara",-1,2],["iera",-1,2],["ía",-1,2],["aría",5,2],["ería",5,2],["iría",5,2],["ad",-1,2],["ed",-1,2],["id",-1,2],["ase",-1,2],["iese",-1,2],["aste",-1,2],["iste",-1,2],["an",-1,2],["aban",16,2],["aran",16,2],["ieran",16,2],["ían",16,2],["arían",20,2],["erían",20,2],["irían",20,2],["en",-1,1],["asen",24,2],["iesen",24,2],["aron",-1,2],["ieron",-1,2],["arán",-1,2],["erán",-1,2],["irán",-1,2],["ado",-1,2],["ido",-1,2],["ando",-1,2],["iendo",-1,2],["ar",-1,2],["er",-1,2],["ir",-1,2],["as",-1,2],["abas",39,2],["adas",39,2],["idas",39,2],["aras",39,2],["ieras",39,2],["ías",39,2],["arías",45,2],["erías",45,2],["irías",45,2],["es",-1,1],["ases",49,2],["ieses",49,2],["abais",-1,2],["arais",-1,2],["ierais",-1,2],["íais",-1,2],["aríais",55,2],["eríais",55,2],["iríais",55,2],["aseis",-1,2],["ieseis",-1,2],["asteis",-1,2],["isteis",-1,2],["áis",-1,2],["éis",-1,1],["aréis",64,2],["eréis",64,2],["iréis",64,2],["ados",-1,2],["idos",-1,2],["amos",-1,2],["ábamos",70,2],["áramos",70,2],["iéramos",70,2],["íamos",70,2],["aríamos",74,2],["eríamos",74,2],["iríamos",74,2],["emos",-1,1],["aremos",78,2],["eremos",78,2],["iremos",78,2],["ásemos",78,2],["iésemos",78,2],["imos",-1,2],["arás",-1,2],["erás",-1,2],["irás",-1,2],["ís",-1,2],["ará",-1,2],["erá",-1,2],["irá",-1,2],["aré",-1,2],["eré",-1,2],["iré",-1,2],["ió",-1,2]];var f=[["a",-1,1],["e",-1,2],["o",-1,1],["os",-1,1],["á",-1,1],["é",-1,2],["í",-1,1],["ó",-1,1]];var n=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,1,17,4,10];var b=0;var m=0;var k=0;function _(){k=r.limit;m=r.limit;b=r.limit;var e=r.cursor;r:{e:{var i=r.cursor;i:{if(!r.in_grouping(n,97,252)){break i}a:{var a=r.cursor;s:{if(!r.out_grouping(n,97,252)){break s}u:while(true){o:{if(!r.in_grouping(n,97,252)){break o}break u}if(r.cursor>=r.limit){break s}r.cursor++}break a}r.cursor=a;if(!r.in_grouping(n,97,252)){break i}s:while(true){u:{if(!r.out_grouping(n,97,252)){break u}break s}if(r.cursor>=r.limit){break i}r.cursor++}}break e}r.cursor=i;if(!r.out_grouping(n,97,252)){break r}i:{var s=r.cursor;a:{if(!r.out_grouping(n,97,252)){break a}s:while(true){u:{if(!r.in_grouping(n,97,252)){break u}break s}if(r.cursor>=r.limit){break a}r.cursor++}break i}r.cursor=s;if(!r.in_grouping(n,97,252)){break r}if(r.cursor>=r.limit){break r}r.cursor++}}k=r.cursor}r.cursor=e;var u=r.cursor;r:{e:while(true){i:{if(!r.in_grouping(n,97,252)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}e:while(true){i:{if(!r.out_grouping(n,97,252)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}m=r.cursor;e:while(true){i:{if(!r.in_grouping(n,97,252)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}e:while(true){i:{if(!r.out_grouping(n,97,252)){break i}break e}if(r.cursor>=r.limit){break r}r.cursor++}b=r.cursor}r.cursor=u;return true}function d(){var i;while(true){var a=r.cursor;r:{r.bra=r.cursor;i=r.find_among(e);if(i==0){break r}r.ket=r.cursor;switch(i){case 1:if(!r.slice_from("a")){return false}break;case 2:if(!r.slice_from("e")){return false}break;case 3:if(!r.slice_from("i")){return false}break;case 4:if(!r.slice_from("o")){return false}break;case 5:if(!r.slice_from("u")){return false}break;case 6:if(r.cursor>=r.limit){break r}r.cursor++;break}continue}r.cursor=a;break}return true}function v(){if(!(k<=r.cursor)){return false}return true}function g(){if(!(m<=r.cursor)){return false}return true}function w(){if(!(b<=r.cursor)){return false}return true}function h(){var e;r.ket=r.cursor;if(r.find_among_b(i)==0){return false}r.bra=r.cursor;e=r.find_among_b(a);if(e==0){return false}if(!v()){return false}switch(e){case 1:r.bra=r.cursor;if(!r.slice_from("iendo")){return false}break;case 2:r.bra=r.cursor;if(!r.slice_from("ando")){return false}break;case 3:r.bra=r.cursor;if(!r.slice_from("ar")){return false}break;case 4:r.bra=r.cursor;if(!r.slice_from("er")){return false}break;case 5:r.bra=r.cursor;if(!r.slice_from("ir")){return false}break;case 6:if(!r.slice_del()){return false}break;case 7:if(!r.eq_s_b("u")){return false}if(!r.slice_del()){return false}break}return true}function p(){var e;r.ket=r.cursor;e=r.find_among_b(t);if(e==0){return false}r.bra=r.cursor;switch(e){case 1:if(!w()){return false}if(!r.slice_del()){return false}break;case 2:if(!w()){return false}if(!r.slice_del()){return false}var i=r.limit-r.cursor;r:{r.ket=r.cursor;if(!r.eq_s_b("ic")){r.cursor=r.limit-i;break r}r.bra=r.cursor;if(!w()){r.cursor=r.limit-i;break r}if(!r.slice_del()){return false}}break;case 3:if(!w()){return false}if(!r.slice_from("log")){return false}break;case 4:if(!w()){return false}if(!r.slice_from("u")){return false}break;case 5:if(!w()){return false}if(!r.slice_from("ente")){return false}break;case 6:if(!g()){return false}if(!r.slice_del()){return false}var a=r.limit-r.cursor;r:{r.ket=r.cursor;e=r.find_among_b(s);if(e==0){r.cursor=r.limit-a;break r}r.bra=r.cursor;if(!w()){r.cursor=r.limit-a;break r}if(!r.slice_del()){return false}switch(e){case 1:r.ket=r.cursor;if(!r.eq_s_b("at")){r.cursor=r.limit-a;break r}r.bra=r.cursor;if(!w()){r.cursor=r.limit-a;break r}if(!r.slice_del()){return false}break}}break;case 7:if(!w()){return false}if(!r.slice_del()){return false}var c=r.limit-r.cursor;r:{r.ket=r.cursor;if(r.find_among_b(u)==0){r.cursor=r.limit-c;break r}r.bra=r.cursor;if(!w()){r.cursor=r.limit-c;break r}if(!r.slice_del()){return false}}break;case 8:if(!w()){return false}if(!r.slice_del()){return false}var l=r.limit-r.cursor;r:{r.ket=r.cursor;if(r.find_among_b(o)==0){r.cursor=r.limit-l;break r}r.bra=r.cursor;if(!w()){r.cursor=r.limit-l;break r}if(!r.slice_del()){return false}}break;case 9:if(!w()){return false}if(!r.slice_del()){return false}var f=r.limit-r.cursor;r:{r.ket=r.cursor;if(!r.eq_s_b("at")){r.cursor=r.limit-f;break r}r.bra=r.cursor;if(!w()){r.cursor=r.limit-f;break r}if(!r.slice_del()){return false}}break}return true}function y(){if(r.cursor=a.limit)break r}a.cursor++}g=a.cursor}a.cursor=r,r=a.cursor,a.go_out_grouping(b,97,252)&&(a.cursor++,a.go_in_grouping(b,97,252))&&(a.cursor++,k=a.cursor,a.go_out_grouping(b,97,252))&&(a.cursor++,a.go_in_grouping(b,97,252))&&(a.cursor++,d=a.cursor),a.cursor=r,a.limit_backward=a.cursor,a.cursor=a.limit;var r=a.limit-a.cursor,r=((()=>{var r;if(a.ket=a.cursor,0!=a.find_among_b(c)&&(a.bra=a.cursor,0!=(r=a.find_among_b(u)))&&v())switch(r){case 1:if(a.bra=a.cursor,a.slice_from("iendo"))break;return;case 2:if(a.bra=a.cursor,a.slice_from("ando"))break;return;case 3:if(a.bra=a.cursor,a.slice_from("ar"))break;return;case 4:if(a.bra=a.cursor,a.slice_from("er"))break;return;case 5:if(a.bra=a.cursor,a.slice_from("ir"))break;return;case 6:if(a.slice_del())break;return;case 7:if(!a.eq_s_b("u"))return;if(a.slice_del())break}})(),a.cursor=a.limit-r,a.limit-a.cursor),s=a.limit-a.cursor,s=(p()||(a.cursor=a.limit-s,(()=>{if(!(a.cursor{var r;if(!(a.cursor{var r;if(a.ket=a.cursor,0!=(r=a.find_among_b(_)))switch(a.bra=a.cursor,r){case 1:if(!v())return;if(a.slice_del())break;return;case 2:if(!v())return;if(!a.slice_del())return;var i=a.limit-a.cursor;if(a.ket=a.cursor,a.eq_s_b("u")){a.bra=a.cursor;var e=a.limit-a.cursor;if(a.eq_s_b("g"))if(a.cursor=a.limit-e,v()){if(!a.slice_del());}else a.cursor=a.limit-i;else a.cursor=a.limit-i}else a.cursor=a.limit-i}})(),a.cursor=a.limit-s,a.cursor=a.limit_backward,a.cursor);return(()=>{for(var r;;){var i=a.cursor;r:{switch(a.bra=a.cursor,r=a.find_among(o),a.ket=a.cursor,r){case 1:if(a.slice_from("a"))break;return;case 2:if(a.slice_from("e"))break;return;case 3:if(a.slice_from("i"))break;return;case 4:if(a.slice_from("o"))break;return;case 5:if(a.slice_from("u"))break;return;case 6:if(a.cursor>=a.limit)break r;a.cursor++}continue}a.cursor=i;break}})(),a.cursor=r,!0},this.stemWord=function(r){return a.setCurrent(r),this.stem(),a.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/swedish-stemmer.js b/sphinx/search/minified-js/swedish-stemmer.js
index b975f54284d..d66010809c6 100644
--- a/sphinx/search/minified-js/swedish-stemmer.js
+++ b/sphinx/search/minified-js/swedish-stemmer.js
@@ -1 +1 @@
-SwedishStemmer=function(){var r=new BaseStemmer;var e=[["a",-1,1],["arna",0,1],["erna",0,1],["heterna",2,1],["orna",0,1],["ad",-1,1],["e",-1,1],["ade",6,1],["ande",6,1],["arne",6,1],["are",6,1],["aste",6,1],["en",-1,1],["anden",12,1],["aren",12,1],["heten",12,1],["ern",-1,1],["ar",-1,1],["er",-1,1],["heter",18,1],["or",-1,1],["s",-1,2],["as",21,1],["arnas",22,1],["ernas",22,1],["ornas",22,1],["es",21,1],["ades",26,1],["andes",26,1],["ens",21,1],["arens",29,1],["hetens",29,1],["erns",21,1],["at",-1,1],["andet",-1,1],["het",-1,1],["ast",-1,1]];var a=[["dd",-1,-1],["gd",-1,-1],["nn",-1,-1],["dt",-1,-1],["gt",-1,-1],["kt",-1,-1],["tt",-1,-1]];var i=[["ig",-1,1],["lig",0,1],["els",-1,1],["fullt",-1,3],["löst",-1,2]];var t=[17,65,16,1,0,0,0,0,0,0,0,0,0,0,0,0,24,0,32];var s=[119,127,149];var u=0;var n=0;function c(){n=r.limit;var e=r.cursor;{var a=r.cursor+3;if(a>r.limit){return false}r.cursor=a}u=r.cursor;r.cursor=e;r:while(true){var i=r.cursor;e:{if(!r.in_grouping(t,97,246)){break e}r.cursor=i;break r}r.cursor=i;if(r.cursor>=r.limit){return false}r.cursor++}r:while(true){e:{if(!r.out_grouping(t,97,246)){break e}break r}if(r.cursor>=r.limit){return false}r.cursor++}n=r.cursor;r:{if(!(na.limit_backward))return a.cursor=a.limit-r,r=a.limit-a.cursor,0==a.find_among_b(i)?(a.cursor=a.limit-r,1):void 0}this.stem=function(){var r=a.cursor,i=(m=a.limit,t=a.cursor,(i=a.cursor+3)>a.limit||(a.cursor=i,l=a.cursor,a.cursor=t,a.go_out_grouping(u,97,246)&&(a.cursor++,a.go_in_grouping(u,97,246))&&(a.cursor++,m=a.cursor,l<=m||(m=l))),a.cursor=r,a.limit_backward=a.cursor,a.cursor=a.limit,a.limit-a.cursor),t=((()=>{var r;if(!(a.cursor{var r;if(!(a.cursor{var r;if(z=!1,A()){l.limit_backward=l.cursor,l.cursor=l.limit;var i=l.limit-l.cursor;r:{var c=l.limit-l.cursor;if(l.ket=l.cursor,0!=(r=l.find_among_b(p))){switch(l.bra=l.cursor,r){case 1:if(l.slice_del())break;return;case 2:var e=l.limit-l.cursor;if(0!=l.find_among_b(W))break r;if(l.cursor=l.limit-e,l.slice_del())break;return;case 3:var s=l.limit-l.cursor;if(0!=l.find_among_b(j))break r;if(l.cursor=l.limit-s,l.slice_del())break;return;case 4:var o=l.limit-l.cursor;if(l.eq_s_b("ச"))break r;if(l.cursor=l.limit-o,l.slice_from("்"))break;return;case 5:if(l.slice_from("்"))break;return;case 6:var u=l.limit-l.cursor;if(!l.eq_s_b("்"))break r;if(l.cursor=l.limit-u,l.slice_del())break;return}z=!0,l.cursor=l.limit-c}}if(l.cursor=l.limit-i,i=l.limit-l.cursor,l.ket=l.cursor,0!=l.find_among_b(x)){if(l.bra=l.cursor,!l.slice_del())return;z=!0}return l.cursor=l.limit-i,l.cursor=l.limit_backward,E(),!!z}})()){l.cursor=r;break}}}this.stem=function(){y=!1;var r,i,c=l.cursor;return F(),l.cursor=c,!!A()&&(c=l.cursor,l.bra=l.cursor,l.eq_s("எ")&&0!=l.find_among(e)&&l.eq_s("்")&&(l.ket=l.cursor,l.slice_del())&&(r=l.cursor,D(),l.cursor=r),l.cursor=c,r=l.cursor,l.bra=l.cursor,0!=l.find_among(s)&&0!=l.find_among(o)&&l.eq_s("்")&&(l.ket=l.cursor,l.slice_del())&&(c=l.cursor,D(),l.cursor=c),l.cursor=r,c=l.cursor,A()&&(l.limit_backward=l.cursor,l.cursor=l.limit,i=l.limit-l.cursor,l.ket=l.cursor,0!=l.find_among_b(v)&&(l.bra=l.cursor,!l.slice_from("்"))||(l.cursor=l.limit-i,l.cursor=l.limit_backward,E())),l.cursor=c,i=l.cursor,A()&&(l.limit_backward=l.cursor,l.cursor=l.limit,l.ket=l.cursor,l.eq_s_b("ும்"))&&(l.bra=l.cursor,l.slice_from("்"))&&(l.cursor=l.limit_backward,c=l.cursor,F(),l.cursor=c),l.cursor=i,c=l.cursor,(()=>{var r;if(A()&&(l.limit_backward=l.cursor,l.cursor=l.limit,l.ket=l.cursor,0!=(r=l.find_among_b(q)))){switch(l.bra=l.cursor,r){case 1:if(l.slice_from("்"))break;return;case 2:var i=l.limit-l.cursor;if(0!=l.find_among_b(w))return;if(l.cursor=l.limit-i,l.slice_from("்"))break;return;case 3:if(l.slice_del())break;return}l.cursor=l.limit_backward,E()}})(),l.cursor=c,c=l.cursor,(()=>{var r;if(y=!1,A()){l.limit_backward=l.cursor,l.cursor=l.limit;r:{var i=l.limit-l.cursor;i:{var c=l.limit-l.cursor;if(l.ket=l.cursor,0!=(r=l.find_among_b(S))){switch(l.bra=l.cursor,r){case 1:if(l.slice_del())break;return;case 2:if(l.slice_from("்"))break;return;case 3:var e=l.limit-l.cursor;if(l.eq_s_b("ம"))break i;if(l.cursor=l.limit-e,l.slice_from("்"))break;return;case 4:if(l.current.length<7)break i;if(l.slice_from("்"))break;return;case 5:var s=l.limit-l.cursor;if(0!=l.find_among_b(h))break i;if(l.cursor=l.limit-s,l.slice_from("்"))break;return;case 6:var o=l.limit-l.cursor;if(0!=l.find_among_b(C))break i;if(l.cursor=l.limit-o,l.slice_del())break;return;case 7:if(l.slice_from("ி"))break;return}l.cursor=l.limit-c;break r}}l.cursor=l.limit-i;i=l.limit-l.cursor;if(l.ket=l.cursor,!l.eq_s_b("ை"))return;var u=l.limit-l.cursor,a=l.limit-l.cursor;if(0==l.find_among_b(B))l.cursor=l.limit-a;else{l.cursor=l.limit-u;a=l.limit-l.cursor;if(0==l.find_among_b(T))return;if(!l.eq_s_b("்"))return;l.cursor=l.limit-a}if(l.bra=l.cursor,!l.slice_from("்"))return;l.cursor=l.limit-i}y=!0;var t=l.limit-l.cursor;l.ket=l.cursor,l.eq_s_b("ின்")&&(l.bra=l.cursor,!l.slice_from("்"))||(l.cursor=l.limit-t,l.cursor=l.limit_backward,E())}})(),l.cursor=c,c=l.cursor,(()=>{var r;if(l.limit_backward=l.cursor,l.cursor=l.limit,l.ket=l.cursor,0!=(r=l.find_among_b(d))){switch(l.bra=l.cursor,r){case 1:r:{var i=l.limit-l.cursor;if(0!=l.find_among_b(u)){if(l.slice_from("ுங்"))break r;return}if(l.cursor=l.limit-i,!l.slice_from("்"))return}break;case 2:if(l.slice_from("ல்"))break;return;case 3:if(l.slice_from("ள்"))break;return;case 4:if(l.slice_del())break;return}l.cursor=l.limit_backward}})(),l.cursor=c,c=l.cursor,A()&&(l.limit_backward=l.cursor,l.cursor=l.limit,l.ket=l.cursor,0!=l.find_among_b(g))&&(l.bra=l.cursor,l.slice_del())&&(l.cursor=l.limit_backward),l.cursor=c,c=l.cursor,G(),l.cursor=c,!0)},this.stemWord=function(r){return l.setCurrent(r),this.stem(),l.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/minified-js/turkish-stemmer.js b/sphinx/search/minified-js/turkish-stemmer.js
index 4c0a699bfbe..476e4abc4ad 100644
--- a/sphinx/search/minified-js/turkish-stemmer.js
+++ b/sphinx/search/minified-js/turkish-stemmer.js
@@ -1 +1 @@
-TurkishStemmer=function(){var r=new BaseStemmer;var i=[["m",-1,-1],["n",-1,-1],["miz",-1,-1],["niz",-1,-1],["muz",-1,-1],["nuz",-1,-1],["müz",-1,-1],["nüz",-1,-1],["mız",-1,-1],["nız",-1,-1]];var e=[["leri",-1,-1],["ları",-1,-1]];var u=[["ni",-1,-1],["nu",-1,-1],["nü",-1,-1],["nı",-1,-1]];var a=[["in",-1,-1],["un",-1,-1],["ün",-1,-1],["ın",-1,-1]];var s=[["a",-1,-1],["e",-1,-1]];var t=[["na",-1,-1],["ne",-1,-1]];var l=[["da",-1,-1],["ta",-1,-1],["de",-1,-1],["te",-1,-1]];var c=[["nda",-1,-1],["nde",-1,-1]];var o=[["dan",-1,-1],["tan",-1,-1],["den",-1,-1],["ten",-1,-1]];var f=[["ndan",-1,-1],["nden",-1,-1]];var n=[["la",-1,-1],["le",-1,-1]];var b=[["ca",-1,-1],["ce",-1,-1]];var m=[["im",-1,-1],["um",-1,-1],["üm",-1,-1],["ım",-1,-1]];var k=[["sin",-1,-1],["sun",-1,-1],["sün",-1,-1],["sın",-1,-1]];var _=[["iz",-1,-1],["uz",-1,-1],["üz",-1,-1],["ız",-1,-1]];var v=[["siniz",-1,-1],["sunuz",-1,-1],["sünüz",-1,-1],["sınız",-1,-1]];var d=[["lar",-1,-1],["ler",-1,-1]];var g=[["niz",-1,-1],["nuz",-1,-1],["nüz",-1,-1],["nız",-1,-1]];var w=[["dir",-1,-1],["tir",-1,-1],["dur",-1,-1],["tur",-1,-1],["dür",-1,-1],["tür",-1,-1],["dır",-1,-1],["tır",-1,-1]];var q=[["casına",-1,-1],["cesine",-1,-1]];var p=[["di",-1,-1],["ti",-1,-1],["dik",-1,-1],["tik",-1,-1],["duk",-1,-1],["tuk",-1,-1],["dük",-1,-1],["tük",-1,-1],["dık",-1,-1],["tık",-1,-1],["dim",-1,-1],["tim",-1,-1],["dum",-1,-1],["tum",-1,-1],["düm",-1,-1],["tüm",-1,-1],["dım",-1,-1],["tım",-1,-1],["din",-1,-1],["tin",-1,-1],["dun",-1,-1],["tun",-1,-1],["dün",-1,-1],["tün",-1,-1],["dın",-1,-1],["tın",-1,-1],["du",-1,-1],["tu",-1,-1],["dü",-1,-1],["tü",-1,-1],["dı",-1,-1],["tı",-1,-1]];var h=[["sa",-1,-1],["se",-1,-1],["sak",-1,-1],["sek",-1,-1],["sam",-1,-1],["sem",-1,-1],["san",-1,-1],["sen",-1,-1]];var z=[["miş",-1,-1],["muş",-1,-1],["müş",-1,-1],["mış",-1,-1]];var y=[["b",-1,1],["c",-1,2],["d",-1,3],["ğ",-1,4]];var C=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,8,0,0,0,0,0,0,1];var S=[1,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8,0,0,0,0,0,0,1];var B=[1,64,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1];var T=[17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,130];var W=[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1];var j=[17];var x=[65];var A=[65];var D=false;function E(){var i=r.limit-r.cursor;r:while(true){var e=r.limit-r.cursor;i:{if(!r.in_grouping_b(C,97,305)){break i}r.cursor=r.limit-e;break r}r.cursor=r.limit-e;if(r.cursor<=r.limit_backward){return false}r.cursor--}r:{var u=r.limit-r.cursor;i:{if(!r.eq_s_b("a")){break i}e:while(true){var a=r.limit-r.cursor;u:{if(!r.in_grouping_b(B,97,305)){break u}r.cursor=r.limit-a;break e}r.cursor=r.limit-a;if(r.cursor<=r.limit_backward){break i}r.cursor--}break r}r.cursor=r.limit-u;i:{if(!r.eq_s_b("e")){break i}e:while(true){var s=r.limit-r.cursor;u:{if(!r.in_grouping_b(T,101,252)){break u}r.cursor=r.limit-s;break e}r.cursor=r.limit-s;if(r.cursor<=r.limit_backward){break i}r.cursor--}break r}r.cursor=r.limit-u;i:{if(!r.eq_s_b("ı")){break i}e:while(true){var t=r.limit-r.cursor;u:{if(!r.in_grouping_b(W,97,305)){break u}r.cursor=r.limit-t;break e}r.cursor=r.limit-t;if(r.cursor<=r.limit_backward){break i}r.cursor--}break r}r.cursor=r.limit-u;i:{if(!r.eq_s_b("i")){break i}e:while(true){var l=r.limit-r.cursor;u:{if(!r.in_grouping_b(j,101,105)){break u}r.cursor=r.limit-l;break e}r.cursor=r.limit-l;if(r.cursor<=r.limit_backward){break i}r.cursor--}break r}r.cursor=r.limit-u;i:{if(!r.eq_s_b("o")){break i}e:while(true){var c=r.limit-r.cursor;u:{if(!r.in_grouping_b(x,111,117)){break u}r.cursor=r.limit-c;break e}r.cursor=r.limit-c;if(r.cursor<=r.limit_backward){break i}r.cursor--}break r}r.cursor=r.limit-u;i:{if(!r.eq_s_b("ö")){break i}e:while(true){var o=r.limit-r.cursor;u:{if(!r.in_grouping_b(A,246,252)){break u}r.cursor=r.limit-o;break e}r.cursor=r.limit-o;if(r.cursor<=r.limit_backward){break i}r.cursor--}break r}r.cursor=r.limit-u;i:{if(!r.eq_s_b("u")){break i}e:while(true){var f=r.limit-r.cursor;u:{if(!r.in_grouping_b(x,111,117)){break u}r.cursor=r.limit-f;break e}r.cursor=r.limit-f;if(r.cursor<=r.limit_backward){break i}r.cursor--}break r}r.cursor=r.limit-u;if(!r.eq_s_b("ü")){return false}i:while(true){var n=r.limit-r.cursor;e:{if(!r.in_grouping_b(A,246,252)){break e}r.cursor=r.limit-n;break i}r.cursor=r.limit-n;if(r.cursor<=r.limit_backward){return false}r.cursor--}}r.cursor=r.limit-i;return true}function F(){r:{var i=r.limit-r.cursor;i:{if(!r.eq_s_b("n")){break i}var e=r.limit-r.cursor;if(!r.in_grouping_b(C,97,305)){break i}r.cursor=r.limit-e;break r}r.cursor=r.limit-i;{var u=r.limit-r.cursor;i:{var a=r.limit-r.cursor;if(!r.eq_s_b("n")){break i}r.cursor=r.limit-a;return false}r.cursor=r.limit-u}var s=r.limit-r.cursor;if(r.cursor<=r.limit_backward){return false}r.cursor--;if(!r.in_grouping_b(C,97,305)){return false}r.cursor=r.limit-s}return true}function G(){r:{var i=r.limit-r.cursor;i:{if(!r.eq_s_b("s")){break i}var e=r.limit-r.cursor;if(!r.in_grouping_b(C,97,305)){break i}r.cursor=r.limit-e;break r}r.cursor=r.limit-i;{var u=r.limit-r.cursor;i:{var a=r.limit-r.cursor;if(!r.eq_s_b("s")){break i}r.cursor=r.limit-a;return false}r.cursor=r.limit-u}var s=r.limit-r.cursor;if(r.cursor<=r.limit_backward){return false}r.cursor--;if(!r.in_grouping_b(C,97,305)){return false}r.cursor=r.limit-s}return true}function H(){r:{var i=r.limit-r.cursor;i:{if(!r.eq_s_b("y")){break i}var e=r.limit-r.cursor;if(!r.in_grouping_b(C,97,305)){break i}r.cursor=r.limit-e;break r}r.cursor=r.limit-i;{var u=r.limit-r.cursor;i:{var a=r.limit-r.cursor;if(!r.eq_s_b("y")){break i}r.cursor=r.limit-a;return false}r.cursor=r.limit-u}var s=r.limit-r.cursor;if(r.cursor<=r.limit_backward){return false}r.cursor--;if(!r.in_grouping_b(C,97,305)){return false}r.cursor=r.limit-s}return true}function I(){r:{var i=r.limit-r.cursor;i:{if(!r.in_grouping_b(S,105,305)){break i}var e=r.limit-r.cursor;if(!r.out_grouping_b(C,97,305)){break i}r.cursor=r.limit-e;break r}r.cursor=r.limit-i;{var u=r.limit-r.cursor;i:{var a=r.limit-r.cursor;if(!r.in_grouping_b(S,105,305)){break i}r.cursor=r.limit-a;return false}r.cursor=r.limit-u}var s=r.limit-r.cursor;if(r.cursor<=r.limit_backward){return false}r.cursor--;if(!r.out_grouping_b(C,97,305)){return false}r.cursor=r.limit-s}return true}function J(){if(r.find_among_b(i)==0){return false}if(!I()){return false}return true}function K(){if(!E()){return false}if(!r.in_grouping_b(S,105,305)){return false}if(!G()){return false}return true}function L(){if(r.find_among_b(e)==0){return false}return true}function M(){if(!E()){return false}if(!r.in_grouping_b(S,105,305)){return false}if(!H()){return false}return true}function N(){if(!E()){return false}if(r.find_among_b(u)==0){return false}return true}function O(){if(!E()){return false}if(r.find_among_b(a)==0){return false}if(!F()){return false}return true}function P(){if(!E()){return false}if(r.find_among_b(s)==0){return false}if(!H()){return false}return true}function Q(){if(!E()){return false}if(r.find_among_b(t)==0){return false}return true}function R(){if(!E()){return false}if(r.find_among_b(l)==0){return false}return true}function U(){if(!E()){return false}if(r.find_among_b(c)==0){return false}return true}function V(){if(!E()){return false}if(r.find_among_b(o)==0){return false}return true}function X(){if(!E()){return false}if(r.find_among_b(f)==0){return false}return true}function Y(){if(!E()){return false}if(r.find_among_b(n)==0){return false}if(!H()){return false}return true}function Z(){if(!r.eq_s_b("ki")){return false}return true}function $(){if(!E()){return false}if(r.find_among_b(b)==0){return false}if(!F()){return false}return true}function rr(){if(!E()){return false}if(r.find_among_b(m)==0){return false}if(!H()){return false}return true}function ir(){if(!E()){return false}if(r.find_among_b(k)==0){return false}return true}function er(){if(!E()){return false}if(r.find_among_b(_)==0){return false}if(!H()){return false}return true}function ur(){if(r.find_among_b(v)==0){return false}return true}function ar(){if(!E()){return false}if(r.find_among_b(d)==0){return false}return true}function sr(){if(!E()){return false}if(r.find_among_b(g)==0){return false}return true}function tr(){if(!E()){return false}if(r.find_among_b(w)==0){return false}return true}function lr(){if(r.find_among_b(q)==0){return false}return true}function cr(){if(!E()){return false}if(r.find_among_b(p)==0){return false}if(!H()){return false}return true}function or(){if(r.find_among_b(h)==0){return false}if(!H()){return false}return true}function fr(){if(!E()){return false}if(r.find_among_b(z)==0){return false}if(!H()){return false}return true}function nr(){if(!r.eq_s_b("ken")){return false}if(!H()){return false}return true}function br(){r.ket=r.cursor;D=true;r:{var i=r.limit-r.cursor;i:{e:{var e=r.limit-r.cursor;u:{if(!fr()){break u}break e}r.cursor=r.limit-e;u:{if(!cr()){break u}break e}r.cursor=r.limit-e;u:{if(!or()){break u}break e}r.cursor=r.limit-e;if(!nr()){break i}}break r}r.cursor=r.limit-i;i:{if(!lr()){break i}e:{var u=r.limit-r.cursor;u:{if(!ur()){break u}break e}r.cursor=r.limit-u;u:{if(!ar()){break u}break e}r.cursor=r.limit-u;u:{if(!rr()){break u}break e}r.cursor=r.limit-u;u:{if(!ir()){break u}break e}r.cursor=r.limit-u;u:{if(!er()){break u}break e}r.cursor=r.limit-u}if(!fr()){break i}break r}r.cursor=r.limit-i;i:{if(!ar()){break i}r.bra=r.cursor;if(!r.slice_del()){return false}var a=r.limit-r.cursor;e:{r.ket=r.cursor;u:{var s=r.limit-r.cursor;a:{if(!tr()){break a}break u}r.cursor=r.limit-s;a:{if(!cr()){break a}break u}r.cursor=r.limit-s;a:{if(!or()){break a}break u}r.cursor=r.limit-s;if(!fr()){r.cursor=r.limit-a;break e}}}D=false;break r}r.cursor=r.limit-i;i:{if(!sr()){break i}e:{var t=r.limit-r.cursor;u:{if(!cr()){break u}break e}r.cursor=r.limit-t;if(!or()){break i}}break r}r.cursor=r.limit-i;i:{e:{var l=r.limit-r.cursor;u:{if(!ur()){break u}break e}r.cursor=r.limit-l;u:{if(!er()){break u}break e}r.cursor=r.limit-l;u:{if(!ir()){break u}break e}r.cursor=r.limit-l;if(!rr()){break i}}r.bra=r.cursor;if(!r.slice_del()){return false}var c=r.limit-r.cursor;e:{r.ket=r.cursor;if(!fr()){r.cursor=r.limit-c;break e}}break r}r.cursor=r.limit-i;if(!tr()){return false}r.bra=r.cursor;if(!r.slice_del()){return false}var o=r.limit-r.cursor;i:{r.ket=r.cursor;e:{var f=r.limit-r.cursor;u:{if(!ur()){break u}break e}r.cursor=r.limit-f;u:{if(!ar()){break u}break e}r.cursor=r.limit-f;u:{if(!rr()){break u}break e}r.cursor=r.limit-f;u:{if(!ir()){break u}break e}r.cursor=r.limit-f;u:{if(!er()){break u}break e}r.cursor=r.limit-f}if(!fr()){r.cursor=r.limit-o;break i}}}r.bra=r.cursor;if(!r.slice_del()){return false}return true}function mr(){r.ket=r.cursor;if(!Z()){return false}r:{var i=r.limit-r.cursor;i:{if(!R()){break i}r.bra=r.cursor;if(!r.slice_del()){return false}var e=r.limit-r.cursor;e:{r.ket=r.cursor;u:{var u=r.limit-r.cursor;a:{if(!ar()){break a}r.bra=r.cursor;if(!r.slice_del()){return false}var a=r.limit-r.cursor;s:{if(!mr()){r.cursor=r.limit-a;break s}}break u}r.cursor=r.limit-u;if(!J()){r.cursor=r.limit-e;break e}r.bra=r.cursor;if(!r.slice_del()){return false}var s=r.limit-r.cursor;a:{r.ket=r.cursor;if(!ar()){r.cursor=r.limit-s;break a}r.bra=r.cursor;if(!r.slice_del()){return false}if(!mr()){r.cursor=r.limit-s;break a}}}}break r}r.cursor=r.limit-i;i:{if(!O()){break i}r.bra=r.cursor;if(!r.slice_del()){return false}var t=r.limit-r.cursor;e:{r.ket=r.cursor;u:{var l=r.limit-r.cursor;a:{if(!L()){break a}r.bra=r.cursor;if(!r.slice_del()){return false}break u}r.cursor=r.limit-l;a:{r.ket=r.cursor;s:{var c=r.limit-r.cursor;t:{if(!J()){break t}break s}r.cursor=r.limit-c;if(!K()){break a}}r.bra=r.cursor;if(!r.slice_del()){return false}var o=r.limit-r.cursor;s:{r.ket=r.cursor;if(!ar()){r.cursor=r.limit-o;break s}r.bra=r.cursor;if(!r.slice_del()){return false}if(!mr()){r.cursor=r.limit-o;break s}}break u}r.cursor=r.limit-l;if(!mr()){r.cursor=r.limit-t;break e}}}break r}r.cursor=r.limit-i;if(!U()){return false}i:{var f=r.limit-r.cursor;e:{if(!L()){break e}r.bra=r.cursor;if(!r.slice_del()){return false}break i}r.cursor=r.limit-f;e:{if(!K()){break e}r.bra=r.cursor;if(!r.slice_del()){return false}var n=r.limit-r.cursor;u:{r.ket=r.cursor;if(!ar()){r.cursor=r.limit-n;break u}r.bra=r.cursor;if(!r.slice_del()){return false}if(!mr()){r.cursor=r.limit-n;break u}}break i}r.cursor=r.limit-f;if(!mr()){return false}}}return true}function kr(){r:{var i=r.limit-r.cursor;i:{r.ket=r.cursor;if(!ar()){break i}r.bra=r.cursor;if(!r.slice_del()){return false}var e=r.limit-r.cursor;e:{if(!mr()){r.cursor=r.limit-e;break e}}break r}r.cursor=r.limit-i;i:{r.ket=r.cursor;if(!$()){break i}r.bra=r.cursor;if(!r.slice_del()){return false}var u=r.limit-r.cursor;e:{u:{var a=r.limit-r.cursor;a:{r.ket=r.cursor;if(!L()){break a}r.bra=r.cursor;if(!r.slice_del()){return false}break u}r.cursor=r.limit-a;a:{r.ket=r.cursor;s:{var s=r.limit-r.cursor;t:{if(!J()){break t}break s}r.cursor=r.limit-s;if(!K()){break a}}r.bra=r.cursor;if(!r.slice_del()){return false}var t=r.limit-r.cursor;s:{r.ket=r.cursor;if(!ar()){r.cursor=r.limit-t;break s}r.bra=r.cursor;if(!r.slice_del()){return false}if(!mr()){r.cursor=r.limit-t;break s}}break u}r.cursor=r.limit-a;r.ket=r.cursor;if(!ar()){r.cursor=r.limit-u;break e}r.bra=r.cursor;if(!r.slice_del()){return false}if(!mr()){r.cursor=r.limit-u;break e}}}break r}r.cursor=r.limit-i;i:{r.ket=r.cursor;e:{var l=r.limit-r.cursor;u:{if(!U()){break u}break e}r.cursor=r.limit-l;if(!Q()){break i}}e:{var c=r.limit-r.cursor;u:{if(!L()){break u}r.bra=r.cursor;if(!r.slice_del()){return false}break e}r.cursor=r.limit-c;u:{if(!K()){break u}r.bra=r.cursor;if(!r.slice_del()){return false}var o=r.limit-r.cursor;a:{r.ket=r.cursor;if(!ar()){r.cursor=r.limit-o;break a}r.bra=r.cursor;if(!r.slice_del()){return false}if(!mr()){r.cursor=r.limit-o;break a}}break e}r.cursor=r.limit-c;if(!mr()){break i}}break r}r.cursor=r.limit-i;i:{r.ket=r.cursor;e:{var f=r.limit-r.cursor;u:{if(!X()){break u}break e}r.cursor=r.limit-f;if(!N()){break i}}e:{var n=r.limit-r.cursor;u:{if(!K()){break u}r.bra=r.cursor;if(!r.slice_del()){return false}var b=r.limit-r.cursor;a:{r.ket=r.cursor;if(!ar()){r.cursor=r.limit-b;break a}r.bra=r.cursor;if(!r.slice_del()){return false}if(!mr()){r.cursor=r.limit-b;break a}}break e}r.cursor=r.limit-n;if(!L()){break i}}break r}r.cursor=r.limit-i;i:{r.ket=r.cursor;if(!V()){break i}r.bra=r.cursor;if(!r.slice_del()){return false}var m=r.limit-r.cursor;e:{r.ket=r.cursor;u:{var k=r.limit-r.cursor;a:{if(!J()){break a}r.bra=r.cursor;if(!r.slice_del()){return false}var _=r.limit-r.cursor;s:{r.ket=r.cursor;if(!ar()){r.cursor=r.limit-_;break s}r.bra=r.cursor;if(!r.slice_del()){return false}if(!mr()){r.cursor=r.limit-_;break s}}break u}r.cursor=r.limit-k;a:{if(!ar()){break a}r.bra=r.cursor;if(!r.slice_del()){return false}var v=r.limit-r.cursor;s:{if(!mr()){r.cursor=r.limit-v;break s}}break u}r.cursor=r.limit-k;if(!mr()){r.cursor=r.limit-m;break e}}}break r}r.cursor=r.limit-i;i:{r.ket=r.cursor;e:{var d=r.limit-r.cursor;u:{if(!O()){break u}break e}r.cursor=r.limit-d;if(!Y()){break i}}r.bra=r.cursor;if(!r.slice_del()){return false}var g=r.limit-r.cursor;e:{u:{var w=r.limit-r.cursor;a:{r.ket=r.cursor;if(!ar()){break a}r.bra=r.cursor;if(!r.slice_del()){return false}if(!mr()){break a}break u}r.cursor=r.limit-w;a:{r.ket=r.cursor;s:{var q=r.limit-r.cursor;t:{if(!J()){break t}break s}r.cursor=r.limit-q;if(!K()){break a}}r.bra=r.cursor;if(!r.slice_del()){return false}var p=r.limit-r.cursor;s:{r.ket=r.cursor;if(!ar()){r.cursor=r.limit-p;break s}r.bra=r.cursor;if(!r.slice_del()){return false}if(!mr()){r.cursor=r.limit-p;break s}}break u}r.cursor=r.limit-w;if(!mr()){r.cursor=r.limit-g;break e}}}break r}r.cursor=r.limit-i;i:{r.ket=r.cursor;if(!L()){break i}r.bra=r.cursor;if(!r.slice_del()){return false}break r}r.cursor=r.limit-i;i:{if(!mr()){break i}break r}r.cursor=r.limit-i;i:{r.ket=r.cursor;e:{var h=r.limit-r.cursor;u:{if(!R()){break u}break e}r.cursor=r.limit-h;u:{if(!M()){break u}break e}r.cursor=r.limit-h;if(!P()){break i}}r.bra=r.cursor;if(!r.slice_del()){return false}var z=r.limit-r.cursor;e:{r.ket=r.cursor;u:{var y=r.limit-r.cursor;a:{if(!J()){break a}r.bra=r.cursor;if(!r.slice_del()){return false}var C=r.limit-r.cursor;s:{r.ket=r.cursor;if(!ar()){r.cursor=r.limit-C;break s}}break u}r.cursor=r.limit-y;if(!ar()){r.cursor=r.limit-z;break e}}r.bra=r.cursor;if(!r.slice_del()){return false}r.ket=r.cursor;if(!mr()){r.cursor=r.limit-z;break e}}break r}r.cursor=r.limit-i;r.ket=r.cursor;i:{var S=r.limit-r.cursor;e:{if(!J()){break e}break i}r.cursor=r.limit-S;if(!K()){return false}}r.bra=r.cursor;if(!r.slice_del()){return false}var B=r.limit-r.cursor;i:{r.ket=r.cursor;if(!ar()){r.cursor=r.limit-B;break i}r.bra=r.cursor;if(!r.slice_del()){return false}if(!mr()){r.cursor=r.limit-B;break i}}}return true}function _r(){var i;r.ket=r.cursor;i=r.find_among_b(y);if(i==0){return false}r.bra=r.cursor;switch(i){case 1:if(!r.slice_from("p")){return false}break;case 2:if(!r.slice_from("ç")){return false}break;case 3:if(!r.slice_from("t")){return false}break;case 4:if(!r.slice_from("k")){return false}break}return true}function vr(){var i=r.limit-r.cursor;r:{var e=r.limit-r.cursor;i:{if(!r.eq_s_b("d")){break i}break r}r.cursor=r.limit-e;if(!r.eq_s_b("g")){return false}}r.cursor=r.limit-i;r:{var u=r.limit-r.cursor;i:{var a=r.limit-r.cursor;e:while(true){var s=r.limit-r.cursor;u:{if(!r.in_grouping_b(C,97,305)){break u}r.cursor=r.limit-s;break e}r.cursor=r.limit-s;if(r.cursor<=r.limit_backward){break i}r.cursor--}e:{var t=r.limit-r.cursor;u:{if(!r.eq_s_b("a")){break u}break e}r.cursor=r.limit-t;if(!r.eq_s_b("ı")){break i}}r.cursor=r.limit-a;{var l=r.cursor;r.insert(r.cursor,r.cursor,"ı");r.cursor=l}break r}r.cursor=r.limit-u;i:{var c=r.limit-r.cursor;e:while(true){var o=r.limit-r.cursor;u:{if(!r.in_grouping_b(C,97,305)){break u}r.cursor=r.limit-o;break e}r.cursor=r.limit-o;if(r.cursor<=r.limit_backward){break i}r.cursor--}e:{var f=r.limit-r.cursor;u:{if(!r.eq_s_b("e")){break u}break e}r.cursor=r.limit-f;if(!r.eq_s_b("i")){break i}}r.cursor=r.limit-c;{var n=r.cursor;r.insert(r.cursor,r.cursor,"i");r.cursor=n}break r}r.cursor=r.limit-u;i:{var b=r.limit-r.cursor;e:while(true){var m=r.limit-r.cursor;u:{if(!r.in_grouping_b(C,97,305)){break u}r.cursor=r.limit-m;break e}r.cursor=r.limit-m;if(r.cursor<=r.limit_backward){break i}r.cursor--}e:{var k=r.limit-r.cursor;u:{if(!r.eq_s_b("o")){break u}break e}r.cursor=r.limit-k;if(!r.eq_s_b("u")){break i}}r.cursor=r.limit-b;{var _=r.cursor;r.insert(r.cursor,r.cursor,"u");r.cursor=_}break r}r.cursor=r.limit-u;var v=r.limit-r.cursor;i:while(true){var d=r.limit-r.cursor;e:{if(!r.in_grouping_b(C,97,305)){break e}r.cursor=r.limit-d;break i}r.cursor=r.limit-d;if(r.cursor<=r.limit_backward){return false}r.cursor--}i:{var g=r.limit-r.cursor;e:{if(!r.eq_s_b("ö")){break e}break i}r.cursor=r.limit-g;if(!r.eq_s_b("ü")){return false}}r.cursor=r.limit-v;{var w=r.cursor;r.insert(r.cursor,r.cursor,"ü");r.cursor=w}}return true}function dr(){if(!r.eq_s_b("ad")){return false}var i=r.limit-r.cursor;r:{if(!r.eq_s_b("soy")){r.cursor=r.limit-i;break r}}if(r.cursor>r.limit_backward){return false}return true}function gr(){var i=r.cursor;{var e=2;while(true){var u=r.cursor;r:{i:while(true){e:{if(!r.in_grouping(C,97,305)){break e}break i}if(r.cursor>=r.limit){break r}r.cursor++}e--;continue}r.cursor=u;break}if(e>0){return false}}r.cursor=i;return true}function wr(){r.limit_backward=r.cursor;r.cursor=r.limit;{var i=r.limit-r.cursor;r:{if(!dr()){break r}return false}r.cursor=r.limit-i}var e=r.limit-r.cursor;vr();r.cursor=r.limit-e;var u=r.limit-r.cursor;_r();r.cursor=r.limit-u;r.cursor=r.limit_backward;return true}this.stem=function(){if(!gr()){return false}r.limit_backward=r.cursor;r.cursor=r.limit;var i=r.limit-r.cursor;br();r.cursor=r.limit-i;if(!D){return false}var e=r.limit-r.cursor;kr();r.cursor=r.limit-e;r.cursor=r.limit_backward;if(!wr()){return false}return true};this["stemWord"]=function(i){r.setCurrent(i);this.stem();return r.getCurrent()}};
\ No newline at end of file
+var TurkishStemmer=function(){var q=new BaseStemmer,u=[["m",-1,-1],["n",-1,-1],["miz",-1,-1],["niz",-1,-1],["muz",-1,-1],["nuz",-1,-1],["müz",-1,-1],["nüz",-1,-1],["mız",-1,-1],["nız",-1,-1]],r=[["leri",-1,-1],["ları",-1,-1]],p=[["ni",-1,-1],["nu",-1,-1],["nü",-1,-1],["nı",-1,-1]],i=[["in",-1,-1],["un",-1,-1],["ün",-1,-1],["ın",-1,-1]],z=[["a",-1,-1],["e",-1,-1]],w=[["na",-1,-1],["ne",-1,-1]],s=[["da",-1,-1],["ta",-1,-1],["de",-1,-1],["te",-1,-1]],o=[["nda",-1,-1],["nde",-1,-1]],h=[["dan",-1,-1],["tan",-1,-1],["den",-1,-1],["ten",-1,-1]],y=[["ndan",-1,-1],["nden",-1,-1]],C=[["la",-1,-1],["le",-1,-1]],I=[["ca",-1,-1],["ce",-1,-1]],g=[["im",-1,-1],["um",-1,-1],["üm",-1,-1],["ım",-1,-1]],v=[["sin",-1,-1],["sun",-1,-1],["sün",-1,-1],["sın",-1,-1]],J=[["iz",-1,-1],["uz",-1,-1],["üz",-1,-1],["ız",-1,-1]],K=[["siniz",-1,-1],["sunuz",-1,-1],["sünüz",-1,-1],["sınız",-1,-1]],L=[["lar",-1,-1],["ler",-1,-1]],M=[["niz",-1,-1],["nuz",-1,-1],["nüz",-1,-1],["nız",-1,-1]],N=[["dir",-1,-1],["tir",-1,-1],["dur",-1,-1],["tur",-1,-1],["dür",-1,-1],["tür",-1,-1],["dır",-1,-1],["tır",-1,-1]],O=[["casına",-1,-1],["cesine",-1,-1]],P=[["di",-1,-1],["ti",-1,-1],["dik",-1,-1],["tik",-1,-1],["duk",-1,-1],["tuk",-1,-1],["dük",-1,-1],["tük",-1,-1],["dık",-1,-1],["tık",-1,-1],["dim",-1,-1],["tim",-1,-1],["dum",-1,-1],["tum",-1,-1],["düm",-1,-1],["tüm",-1,-1],["dım",-1,-1],["tım",-1,-1],["din",-1,-1],["tin",-1,-1],["dun",-1,-1],["tun",-1,-1],["dün",-1,-1],["tün",-1,-1],["dın",-1,-1],["tın",-1,-1],["du",-1,-1],["tu",-1,-1],["dü",-1,-1],["tü",-1,-1],["dı",-1,-1],["tı",-1,-1]],Q=[["sa",-1,-1],["se",-1,-1],["sak",-1,-1],["sek",-1,-1],["sam",-1,-1],["sem",-1,-1],["san",-1,-1],["sen",-1,-1]],R=[["miş",-1,-1],["muş",-1,-1],["müş",-1,-1],["mış",-1,-1]],U=[["b",-1,1],["c",-1,2],["d",-1,3],["ğ",-1,4]],t=[17,65,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,32,8,0,0,0,0,0,0,1],S=[1,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,8,0,0,0,0,0,0,1],V=[1,64,16,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1],X=[17,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,130],Y=[1,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,0,1],Z=[17],c=[65],e=[65],l=!1;function B(){var r=q.limit-q.cursor;if(q.go_out_grouping_b(t,97,305)){var i=q.limit-q.cursor;if(!(q.eq_s_b("a")&&q.go_out_grouping_b(V,97,305)||(q.cursor=q.limit-i,q.eq_s_b("e")&&q.go_out_grouping_b(X,101,252))||(q.cursor=q.limit-i,q.eq_s_b("ı")&&q.go_out_grouping_b(Y,97,305))||(q.cursor=q.limit-i,q.eq_s_b("i")&&q.go_out_grouping_b(Z,101,105))||(q.cursor=q.limit-i,q.eq_s_b("o")&&q.go_out_grouping_b(c,111,117))||(q.cursor=q.limit-i,q.eq_s_b("ö")&&q.go_out_grouping_b(e,246,252))||(q.cursor=q.limit-i,q.eq_s_b("u")&&q.go_out_grouping_b(c,111,117)))){if(q.cursor=q.limit-i,!q.eq_s_b("ü"))return;if(!q.go_out_grouping_b(e,246,252))return}return q.cursor=q.limit-r,1}}function T(){r:{var r=q.limit-q.cursor;if(q.eq_s_b("n")){var i=q.limit-q.cursor;if(q.in_grouping_b(t,97,305)){q.cursor=q.limit-i;break r}}q.cursor=q.limit-r;i=q.limit-q.cursor,r=q.limit-q.cursor;if(q.eq_s_b("n"))return void(q.cursor=q.limit-r);q.cursor=q.limit-i;r=q.limit-q.cursor;if(q.cursor<=q.limit_backward)return;if(q.cursor--,!q.in_grouping_b(t,97,305))return;q.cursor=q.limit-r}return 1}function W(){r:{var r=q.limit-q.cursor;if(q.eq_s_b("y")){var i=q.limit-q.cursor;if(q.in_grouping_b(t,97,305)){q.cursor=q.limit-i;break r}}q.cursor=q.limit-r;i=q.limit-q.cursor,r=q.limit-q.cursor;if(q.eq_s_b("y"))return void(q.cursor=q.limit-r);q.cursor=q.limit-i;r=q.limit-q.cursor;if(q.cursor<=q.limit_backward)return;if(q.cursor--,!q.in_grouping_b(t,97,305))return;q.cursor=q.limit-r}return 1}function j(){if(0!=q.find_among_b(u)){r:{var r=q.limit-q.cursor;if(q.in_grouping_b(S,105,305)){var i=q.limit-q.cursor;if(q.out_grouping_b(t,97,305)){q.cursor=q.limit-i;break r}}q.cursor=q.limit-r;i=q.limit-q.cursor,r=q.limit-q.cursor;if(q.in_grouping_b(S,105,305))return!!(q.cursor=q.limit-r,0);q.cursor=q.limit-i;r=q.limit-q.cursor;if(q.cursor<=q.limit_backward)return!!void 0;if(q.cursor--,!q.out_grouping_b(t,97,305))return!!void 0;q.cursor=q.limit-r}return!!1}}function x(){if(B()&&q.in_grouping_b(S,105,305)){r:{var r=q.limit-q.cursor;if(q.eq_s_b("s")){var i=q.limit-q.cursor;if(q.in_grouping_b(t,97,305)){q.cursor=q.limit-i;break r}}q.cursor=q.limit-r;i=q.limit-q.cursor,r=q.limit-q.cursor;if(q.eq_s_b("s"))return!!(q.cursor=q.limit-r,0);q.cursor=q.limit-i;r=q.limit-q.cursor;if(q.cursor<=q.limit_backward)return!!void 0;if(q.cursor--,!q.in_grouping_b(t,97,305))return!!void 0;q.cursor=q.limit-r}return!!1}}function A(){return 0!=q.find_among_b(r)}function D(){return B()&&0!=q.find_among_b(i)&&!!T()}function E(){return B()&&0!=q.find_among_b(s)}function F(){return B()&&0!=q.find_among_b(o)}function m(){return B()&&0!=q.find_among_b(g)&&!!W()}function n(){return B()&&0!=q.find_among_b(v)}function _(){return B()&&0!=q.find_among_b(J)&&!!W()}function f(){return 0!=q.find_among_b(K)}function G(){return B()&&0!=q.find_among_b(L)}function a(){return B()&&0!=q.find_among_b(N)}function b(){return B()&&0!=q.find_among_b(P)&&!!W()}function d(){return 0!=q.find_among_b(Q)&&!!W()}function k(){return B()&&0!=q.find_among_b(R)&&!!W()}function $(){q.ket=q.cursor,l=!0;r:{var r=q.limit-q.cursor,i=q.limit-q.cursor;if(!(k()||(q.cursor=q.limit-i,b())||(q.cursor=q.limit-i,d())||(q.cursor=q.limit-i,q.eq_s_b("ken")&&W()))){if(q.cursor=q.limit-r,0!=q.find_among_b(O)){i=q.limit-q.cursor;if(f()||(q.cursor=q.limit-i,G())||(q.cursor=q.limit-i,m())||(q.cursor=q.limit-i,n())||(q.cursor=q.limit-i,_())||(q.cursor=q.limit-i),k())break r}if(q.cursor=q.limit-r,G()){if(q.bra=q.cursor,!q.slice_del())return;var i=q.limit-q.cursor,u=(q.ket=q.cursor,q.limit-q.cursor);a()||(q.cursor=q.limit-u,b())||(q.cursor=q.limit-u,d())||(q.cursor=q.limit-u,k())||(q.cursor=q.limit-i),l=!1}else{if(q.cursor=q.limit-r,B()&&0!=q.find_among_b(M)){u=q.limit-q.cursor;if(b()||(q.cursor=q.limit-u,d()))break r}q.cursor=q.limit-r;i=q.limit-q.cursor;if(f()||(q.cursor=q.limit-i,_())||(q.cursor=q.limit-i,n())||(q.cursor=q.limit-i,m())){if(q.bra=q.cursor,!q.slice_del())return;u=q.limit-q.cursor;q.ket=q.cursor,k()||(q.cursor=q.limit-u)}else{if(q.cursor=q.limit-r,!a())return;if(q.bra=q.cursor,!q.slice_del())return;i=q.limit-q.cursor,u=(q.ket=q.cursor,q.limit-q.cursor);f()||(q.cursor=q.limit-u,G())||(q.cursor=q.limit-u,m())||(q.cursor=q.limit-u,n())||(q.cursor=q.limit-u,_())||(q.cursor=q.limit-u),k()||(q.cursor=q.limit-i)}}}}q.bra=q.cursor,q.slice_del()}function H(){if(q.ket=q.cursor,q.eq_s_b("ki")){var r=q.limit-q.cursor;if(E()){if(q.bra=q.cursor,!q.slice_del())return;var i=q.limit-q.cursor;r:{q.ket=q.cursor;var u=q.limit-q.cursor;if(G()){if(q.bra=q.cursor,!q.slice_del())return;var s=q.limit-q.cursor;H()||(q.cursor=q.limit-s)}else{if(q.cursor=q.limit-u,!j()){q.cursor=q.limit-i;break r}if(q.bra=q.cursor,!q.slice_del())return;s=q.limit-q.cursor;if(q.ket=q.cursor,G()){if(q.bra=q.cursor,!q.slice_del())return;H()||(q.cursor=q.limit-s)}else q.cursor=q.limit-s}}}else if(q.cursor=q.limit-r,D()){if(q.bra=q.cursor,!q.slice_del())return;var o=q.limit-q.cursor;r:{q.ket=q.cursor;i:{var t=q.limit-q.cursor;if(A()){if(q.bra=q.cursor,q.slice_del())break i;return}q.cursor=q.limit-t,q.ket=q.cursor;var c=q.limit-q.cursor;if(j()||(q.cursor=q.limit-c,x())){if(q.bra=q.cursor,!q.slice_del())return;c=q.limit-q.cursor;if(q.ket=q.cursor,G()){if(q.bra=q.cursor,!q.slice_del())return;H()||(q.cursor=q.limit-c)}else q.cursor=q.limit-c}else if(q.cursor=q.limit-t,!H()){q.cursor=q.limit-o;break r}}}}else{if(q.cursor=q.limit-r,!F())return;r:{var e=q.limit-q.cursor;if(A()){if(q.bra=q.cursor,q.slice_del())break r;return}if(q.cursor=q.limit-e,x()){if(q.bra=q.cursor,!q.slice_del())return;var l=q.limit-q.cursor;if(q.ket=q.cursor,G()){if(q.bra=q.cursor,!q.slice_del())return;H()||(q.cursor=q.limit-l)}else q.cursor=q.limit-l}else if(q.cursor=q.limit-e,!H())return}}return 1}}function rr(){r:{var r=q.limit-q.cursor;if(q.ket=q.cursor,G()){if(q.bra=q.cursor,!q.slice_del())return;var i=q.limit-q.cursor;H()||(q.cursor=q.limit-i)}else if(q.cursor=q.limit-r,q.ket=q.cursor,B()&&0!=q.find_among_b(I)&&T()){if(q.bra=q.cursor,!q.slice_del())return;var u=q.limit-q.cursor;i:u:{var s=q.limit-q.cursor;if(q.ket=q.cursor,A()){if(q.bra=q.cursor,q.slice_del())break u;return}q.cursor=q.limit-s,q.ket=q.cursor;var o=q.limit-q.cursor;if(j()||(q.cursor=q.limit-o,x())){if(q.bra=q.cursor,!q.slice_del())return;o=q.limit-q.cursor;if(q.ket=q.cursor,G()){if(q.bra=q.cursor,!q.slice_del())return;H()||(q.cursor=q.limit-o)}else q.cursor=q.limit-o}else{if(q.cursor=q.limit-s,q.ket=q.cursor,!G()){q.cursor=q.limit-u;break i}if(q.bra=q.cursor,!q.slice_del())return;if(!H()){q.cursor=q.limit-u;break i}}}}else{q.cursor=q.limit-r;i:{q.ket=q.cursor;var t=q.limit-q.cursor;if(F()||(q.cursor=q.limit-t,B()&&0!=q.find_among_b(w))){u:{var c=q.limit-q.cursor;if(A()){if(q.bra=q.cursor,q.slice_del())break u;return}if(q.cursor=q.limit-c,x()){if(q.bra=q.cursor,!q.slice_del())return;var e=q.limit-q.cursor;if(q.ket=q.cursor,G()){if(q.bra=q.cursor,!q.slice_del())return;H()||(q.cursor=q.limit-e)}else q.cursor=q.limit-e}else if(q.cursor=q.limit-c,!H())break i}break r}}q.cursor=q.limit-r;i:{q.ket=q.cursor;var l=q.limit-q.cursor;if(B()&&0!=q.find_among_b(y)||(q.cursor=q.limit-l,B()&&0!=q.find_among_b(p))){l=q.limit-q.cursor;if(x()){if(q.bra=q.cursor,!q.slice_del())return;var m=q.limit-q.cursor;if(q.ket=q.cursor,G()){if(q.bra=q.cursor,!q.slice_del())return;H()||(q.cursor=q.limit-m)}else q.cursor=q.limit-m}else if(q.cursor=q.limit-l,!A())break i;break r}}if(q.cursor=q.limit-r,q.ket=q.cursor,B()&&0!=q.find_among_b(h)){if(q.bra=q.cursor,!q.slice_del())return;var n=q.limit-q.cursor;i:{q.ket=q.cursor;var _=q.limit-q.cursor;if(j()){if(q.bra=q.cursor,!q.slice_del())return;var f=q.limit-q.cursor;if(q.ket=q.cursor,G()){if(q.bra=q.cursor,!q.slice_del())return;H()||(q.cursor=q.limit-f)}else q.cursor=q.limit-f}else if(q.cursor=q.limit-_,G()){if(q.bra=q.cursor,!q.slice_del())return;f=q.limit-q.cursor;H()||(q.cursor=q.limit-f)}else if(q.cursor=q.limit-_,!H()){q.cursor=q.limit-n;break i}}}else{q.cursor=q.limit-r,q.ket=q.cursor;m=q.limit-q.cursor;if(D()||(q.cursor=q.limit-m,B()&&0!=q.find_among_b(C)&&W())){if(q.bra=q.cursor,!q.slice_del())return;var a=q.limit-q.cursor;i:u:{var b=q.limit-q.cursor;if(q.ket=q.cursor,G()){if(q.bra=q.cursor,!q.slice_del())return;if(H())break u}q.cursor=q.limit-b,q.ket=q.cursor;var d=q.limit-q.cursor;if(j()||(q.cursor=q.limit-d,x())){if(q.bra=q.cursor,!q.slice_del())return;d=q.limit-q.cursor;if(q.ket=q.cursor,G()){if(q.bra=q.cursor,!q.slice_del())return;H()||(q.cursor=q.limit-d)}else q.cursor=q.limit-d}else if(q.cursor=q.limit-b,!H()){q.cursor=q.limit-a;break i}}}else{if(q.cursor=q.limit-r,q.ket=q.cursor,A()){if(q.bra=q.cursor,q.slice_del())break r;return}if(q.cursor=q.limit-r,!H()){q.cursor=q.limit-r,q.ket=q.cursor;l=q.limit-q.cursor;if(E()||(q.cursor=q.limit-l,B()&&q.in_grouping_b(S,105,305)&&W())||(q.cursor=q.limit-l,B()&&0!=q.find_among_b(z)&&W())){if(q.bra=q.cursor,!q.slice_del())return;var k=q.limit-q.cursor;i:{q.ket=q.cursor;var g=q.limit-q.cursor;if(j()){if(q.bra=q.cursor,!q.slice_del())return;var v=q.limit-q.cursor;q.ket=q.cursor,G()||(q.cursor=q.limit-v)}else if(q.cursor=q.limit-g,!G()){q.cursor=q.limit-k;break i}if(q.bra=q.cursor,!q.slice_del())return;q.ket=q.cursor,H()||(q.cursor=q.limit-k)}}else{q.cursor=q.limit-r,q.ket=q.cursor;m=q.limit-q.cursor;if(!j()&&(q.cursor=q.limit-m,!x()))return;if(q.bra=q.cursor,!q.slice_del())return;l=q.limit-q.cursor;if(q.ket=q.cursor,G()){if(q.bra=q.cursor,!q.slice_del())return;H()||(q.cursor=q.limit-l)}else q.cursor=q.limit-l}}}}}}}function ir(){q.limit_backward=q.cursor,q.cursor=q.limit;var r=q.limit-q.cursor;if(!(()=>{var r;return!q.eq_s_b("ad")||(r=q.limit-q.cursor,q.eq_s_b("soy")||(q.cursor=q.limit-r),q.cursor>q.limit_backward)?void 0:1})())return q.cursor=q.limit-r,r=q.limit-q.cursor,(()=>{q.ket=q.cursor,q.bra=q.cursor;var r=q.limit-q.cursor;if((q.eq_s_b("d")||(q.cursor=q.limit-r,q.eq_s_b("g")))&&q.go_out_grouping_b(t,97,305))r:{var i=q.limit-q.cursor,u=q.limit-q.cursor;if(q.eq_s_b("a")||(q.cursor=q.limit-u,q.eq_s_b("ı"))){if(q.slice_from("ı"))break r;return}q.cursor=q.limit-i;u=q.limit-q.cursor;if(q.eq_s_b("e")||(q.cursor=q.limit-u,q.eq_s_b("i"))){if(q.slice_from("i"))break r;return}q.cursor=q.limit-i;u=q.limit-q.cursor;if(q.eq_s_b("o")||(q.cursor=q.limit-u,q.eq_s_b("u"))){if(q.slice_from("u"))break r;return}q.cursor=q.limit-i;u=q.limit-q.cursor;if(!q.eq_s_b("ö")&&(q.cursor=q.limit-u,!q.eq_s_b("ü")))return;if(!q.slice_from("ü"))return}})(),q.cursor=q.limit-r,r=q.limit-q.cursor,(()=>{var r;if(q.ket=q.cursor,0!=(r=q.find_among_b(U)))switch(q.bra=q.cursor,r){case 1:if(q.slice_from("p"))break;return;case 2:if(q.slice_from("ç"))break;return;case 3:if(q.slice_from("t"))break;return;case 4:if(q.slice_from("k"))break}})(),q.cursor=q.limit-r,q.cursor=q.limit_backward,1}this.stem=function(){if((()=>{var r=q.cursor;r:{for(q.bra=q.cursor;;){var i=q.cursor,u=q.cursor;if(!q.eq_s("'")){q.cursor=u,q.cursor=i;break}if(q.cursor=i,q.cursor>=q.limit)break r;q.cursor++}if(q.ket=q.cursor,!q.slice_del())return}q.cursor=r,r=q.cursor;r:{var s=q.cursor+2;if(!(s>q.limit)){for(q.cursor=s;;){var o=q.cursor;if(q.eq_s("'")){q.cursor=o;break}if(q.cursor=o,q.cursor>=q.limit)break r;q.cursor++}if(q.bra=q.cursor,q.cursor=q.limit,q.ket=q.cursor,!q.slice_del())return}}q.cursor=r})(),!(()=>{for(var r=q.cursor,i=2;0{for(var r,e=t.cursor;;){var s=t.cursor;r:{for(;;){var i=t.cursor;e:if(t.bra=t.cursor,0!=(r=t.find_among(l))){switch(t.ket=t.cursor,r){case 1:var c=t.cursor;if(t.eq_s("ּ"))break e;if(t.cursor=c,t.slice_from("װ"))break;return;case 2:var o=t.cursor;if(t.eq_s("ִ"))break e;if(t.cursor=o,t.slice_from("ױ"))break;return;case 3:var u=t.cursor;if(t.eq_s("ִ"))break e;if(t.cursor=u,t.slice_from("ײ"))break;return;case 4:if(t.slice_from("כ"))break;return;case 5:if(t.slice_from("מ"))break;return;case 6:if(t.slice_from("נ"))break;return;case 7:if(t.slice_from("פ"))break;return;case 8:if(t.slice_from("צ"))break;return}t.cursor=i;break}if(t.cursor=i,t.cursor>=t.limit)break r;t.cursor++}continue}t.cursor=s;break}for(t.cursor=e,e=t.cursor;;){var a=t.cursor;r:{for(;;){var f=t.cursor;if(t.bra=t.cursor,t.in_grouping(k,1456,1474)){if(t.ket=t.cursor,!t.slice_del())return;t.cursor=f;break}if(t.cursor=f,t.cursor>=t.limit)break r;t.cursor++}continue}t.cursor=a;break}t.cursor=e})();var r=t.cursor;return(()=>{q=t.limit;var r=t.cursor;if(t.bra=t.cursor,t.eq_s("גע")){t.ket=t.cursor;var e=t.cursor,s=t.cursor;if(!t.eq_s("לט")&&(t.cursor=s,!t.eq_s("בנ"))&&(t.cursor=s,t.cursort.limit||(t.cursor=e,v=t.cursor,t.cursor=s,r=t.cursor,0==t.find_among(f)&&(t.cursor=r),e=t.cursor,t.in_grouping(d,1489,1520)&&t.in_grouping(d,1489,1520)&&t.in_grouping(d,1489,1520)?q=t.cursor:(t.cursor=e,t.go_out_grouping(g,1488,1522)&&(t.cursor++,t.go_in_grouping(g,1488,1522))&&(q=t.cursor,v<=q||(q=v))))})(),t.cursor=r,t.limit_backward=t.cursor,t.cursor=t.limit,e(),t.cursor=t.limit_backward,!0},this.stemWord=function(r){return t.setCurrent(r),this.stem(),t.getCurrent()}};
\ No newline at end of file
diff --git a/sphinx/search/nl.py b/sphinx/search/nl.py
index 2d2f2b8a8b6..de1a7d1f17d 100644
--- a/sphinx/search/nl.py
+++ b/sphinx/search/nl.py
@@ -1,124 +1,21 @@
-"""Dutch search language: includes the JS porter stemmer."""
+"""Dutch search language."""
from __future__ import annotations
import snowballstemmer
-from sphinx.search import SearchLanguage, parse_stop_word
-
-dutch_stopwords = parse_stop_word("""
-| source: https://snowball.tartarus.org/algorithms/dutch/stop.txt
-de | the
-en | and
-van | of, from
-ik | I, the ego
-te | (1) chez, at etc, (2) to, (3) too
-dat | that, which
-die | that, those, who, which
-in | in, inside
-een | a, an, one
-hij | he
-het | the, it
-niet | not, nothing, naught
-zijn | (1) to be, being, (2) his, one's, its
-is | is
-was | (1) was, past tense of all persons sing. of 'zijn' (to be) (2) wax, (3) the washing, (4) rise of river
-op | on, upon, at, in, up, used up
-aan | on, upon, to (as dative)
-met | with, by
-als | like, such as, when
-voor | (1) before, in front of, (2) furrow
-had | had, past tense all persons sing. of 'hebben' (have)
-er | there
-maar | but, only
-om | round, about, for etc
-hem | him
-dan | then
-zou | should/would, past tense all persons sing. of 'zullen'
-of | or, whether, if
-wat | what, something, anything
-mijn | possessive and noun 'mine'
-men | people, 'one'
-dit | this
-zo | so, thus, in this way
-door | through by
-over | over, across
-ze | she, her, they, them
-zich | oneself
-bij | (1) a bee, (2) by, near, at
-ook | also, too
-tot | till, until
-je | you
-mij | me
-uit | out of, from
-der | Old Dutch form of 'van der' still found in surnames
-daar | (1) there, (2) because
-haar | (1) her, their, them, (2) hair
-naar | (1) unpleasant, unwell etc, (2) towards, (3) as
-heb | present first person sing. of 'to have'
-hoe | how, why
-heeft | present third person sing. of 'to have'
-hebben | 'to have' and various parts thereof
-deze | this
-u | you
-want | (1) for, (2) mitten, (3) rigging
-nog | yet, still
-zal | 'shall', first and third person sing. of verb 'zullen' (will)
-me | me
-zij | she, they
-nu | now
-ge | 'thou', still used in Belgium and south Netherlands
-geen | none
-omdat | because
-iets | something, somewhat
-worden | to become, grow, get
-toch | yet, still
-al | all, every, each
-waren | (1) 'were' (2) to wander, (3) wares, (3)
-veel | much, many
-meer | (1) more, (2) lake
-doen | to do, to make
-toen | then, when
-moet | noun 'spot/mote' and present form of 'to must'
-ben | (1) am, (2) 'are' in interrogative second person singular of 'to be'
-zonder | without
-kan | noun 'can' and present form of 'to be able'
-hun | their, them
-dus | so, consequently
-alles | all, everything, anything
-onder | under, beneath
-ja | yes, of course
-eens | once, one day
-hier | here
-wie | who
-werd | imperfect third person sing. of 'become'
-altijd | always
-doch | yet, but etc
-wordt | present third person sing. of 'become'
-wezen | (1) to be, (2) 'been' as in 'been fishing', (3) orphans
-kunnen | to be able
-ons | us/our
-zelf | self
-tegen | against, towards, at
-na | after, near
-reeds | already
-wil | (1) present tense of 'want', (2) 'will', noun, (3) fender
-kon | could; past tense of 'to be able'
-niets | nothing
-uw | your
-iemand | somebody
-geweest | been; past participle of 'be'
-andere | other
-""") # NoQA: E501
+from sphinx.search import SearchLanguage
+from sphinx.search._stopwords.nl import DUTCH_STOPWORDS
class SearchDutch(SearchLanguage):
lang = 'nl'
language_name = 'Dutch'
js_stemmer_rawcode = 'dutch-stemmer.js'
- stopwords = dutch_stopwords
+ stopwords = DUTCH_STOPWORDS
- def init(self, options: dict[str, str]) -> None:
+ def __init__(self, options: dict[str, str]) -> None:
+ super().__init__(options)
self.stemmer = snowballstemmer.stemmer('dutch')
def stem(self, word: str) -> str:
diff --git a/sphinx/search/no.py b/sphinx/search/no.py
index dfc7786d46a..45b202f0926 100644
--- a/sphinx/search/no.py
+++ b/sphinx/search/no.py
@@ -1,199 +1,21 @@
-"""Norwegian search language: includes the JS Norwegian stemmer."""
+"""Norwegian search language."""
from __future__ import annotations
import snowballstemmer
-from sphinx.search import SearchLanguage, parse_stop_word
-
-norwegian_stopwords = parse_stop_word("""
-| source: https://snowball.tartarus.org/algorithms/norwegian/stop.txt
-og | and
-i | in
-jeg | I
-det | it/this/that
-at | to (w. inf.)
-en | a/an
-et | a/an
-den | it/this/that
-til | to
-er | is/am/are
-som | who/that
-på | on
-de | they / you(formal)
-med | with
-han | he
-av | of
-ikke | not
-ikkje | not *
-der | there
-så | so
-var | was/were
-meg | me
-seg | you
-men | but
-ett | one
-har | have
-om | about
-vi | we
-min | my
-mitt | my
-ha | have
-hadde | had
-hun | she
-nå | now
-over | over
-da | when/as
-ved | by/know
-fra | from
-du | you
-ut | out
-sin | your
-dem | them
-oss | us
-opp | up
-man | you/one
-kan | can
-hans | his
-hvor | where
-eller | or
-hva | what
-skal | shall/must
-selv | self (reflective)
-sjøl | self (reflective)
-her | here
-alle | all
-vil | will
-bli | become
-ble | became
-blei | became *
-blitt | have become
-kunne | could
-inn | in
-når | when
-være | be
-kom | come
-noen | some
-noe | some
-ville | would
-dere | you
-som | who/which/that
-deres | their/theirs
-kun | only/just
-ja | yes
-etter | after
-ned | down
-skulle | should
-denne | this
-for | for/because
-deg | you
-si | hers/his
-sine | hers/his
-sitt | hers/his
-mot | against
-å | to
-meget | much
-hvorfor | why
-dette | this
-disse | these/those
-uten | without
-hvordan | how
-ingen | none
-din | your
-ditt | your
-blir | become
-samme | same
-hvilken | which
-hvilke | which (plural)
-sånn | such a
-inni | inside/within
-mellom | between
-vår | our
-hver | each
-hvem | who
-vors | us/ours
-hvis | whose
-både | both
-bare | only/just
-enn | than
-fordi | as/because
-før | before
-mange | many
-også | also
-slik | just
-vært | been
-være | to be
-båe | both *
-begge | both
-siden | since
-dykk | your *
-dykkar | yours *
-dei | they *
-deira | them *
-deires | theirs *
-deim | them *
-di | your (fem.) *
-då | as/when *
-eg | I *
-ein | a/an *
-eit | a/an *
-eitt | a/an *
-elles | or *
-honom | he *
-hjå | at *
-ho | she *
-hoe | she *
-henne | her
-hennar | her/hers
-hennes | hers
-hoss | how *
-hossen | how *
-ikkje | not *
-ingi | noone *
-inkje | noone *
-korleis | how *
-korso | how *
-kva | what/which *
-kvar | where *
-kvarhelst | where *
-kven | who/whom *
-kvi | why *
-kvifor | why *
-me | we *
-medan | while *
-mi | my *
-mine | my *
-mykje | much *
-no | now *
-nokon | some (masc./neut.) *
-noka | some (fem.) *
-nokor | some *
-noko | some *
-nokre | some *
-si | his/hers *
-sia | since *
-sidan | since *
-so | so *
-somt | some *
-somme | some *
-um | about*
-upp | up *
-vere | be *
-vore | was *
-verte | become *
-vort | become *
-varte | became *
-vart | became *
-""")
+from sphinx.search import SearchLanguage
+from sphinx.search._stopwords.no import NORWEGIAN_STOPWORDS
class SearchNorwegian(SearchLanguage):
lang = 'no'
language_name = 'Norwegian'
js_stemmer_rawcode = 'norwegian-stemmer.js'
- stopwords = norwegian_stopwords
+ stopwords = NORWEGIAN_STOPWORDS
- def init(self, options: dict[str, str]) -> None:
+ def __init__(self, options: dict[str, str]) -> None:
+ super().__init__(options)
self.stemmer = snowballstemmer.stemmer('norwegian')
def stem(self, word: str) -> str:
diff --git a/sphinx/search/non-minified-js/arabic-stemmer.js b/sphinx/search/non-minified-js/arabic-stemmer.js
new file mode 100644
index 00000000000..dbab12d81e1
--- /dev/null
+++ b/sphinx/search/non-minified-js/arabic-stemmer.js
@@ -0,0 +1,1612 @@
+// Generated from arabic.sbl by Snowball 3.0.1 - https://snowballstem.org/
+
+/**@constructor*/
+var ArabicStemmer = function() {
+ var base = new BaseStemmer();
+
+ /** @const */ var a_0 = [
+ ["\u0640", -1, 1],
+ ["\u064B", -1, 1],
+ ["\u064C", -1, 1],
+ ["\u064D", -1, 1],
+ ["\u064E", -1, 1],
+ ["\u064F", -1, 1],
+ ["\u0650", -1, 1],
+ ["\u0651", -1, 1],
+ ["\u0652", -1, 1],
+ ["\u0660", -1, 2],
+ ["\u0661", -1, 3],
+ ["\u0662", -1, 4],
+ ["\u0663", -1, 5],
+ ["\u0664", -1, 6],
+ ["\u0665", -1, 7],
+ ["\u0666", -1, 8],
+ ["\u0667", -1, 9],
+ ["\u0668", -1, 10],
+ ["\u0669", -1, 11],
+ ["\uFE80", -1, 12],
+ ["\uFE81", -1, 16],
+ ["\uFE82", -1, 16],
+ ["\uFE83", -1, 13],
+ ["\uFE84", -1, 13],
+ ["\uFE85", -1, 17],
+ ["\uFE86", -1, 17],
+ ["\uFE87", -1, 14],
+ ["\uFE88", -1, 14],
+ ["\uFE89", -1, 15],
+ ["\uFE8A", -1, 15],
+ ["\uFE8B", -1, 15],
+ ["\uFE8C", -1, 15],
+ ["\uFE8D", -1, 18],
+ ["\uFE8E", -1, 18],
+ ["\uFE8F", -1, 19],
+ ["\uFE90", -1, 19],
+ ["\uFE91", -1, 19],
+ ["\uFE92", -1, 19],
+ ["\uFE93", -1, 20],
+ ["\uFE94", -1, 20],
+ ["\uFE95", -1, 21],
+ ["\uFE96", -1, 21],
+ ["\uFE97", -1, 21],
+ ["\uFE98", -1, 21],
+ ["\uFE99", -1, 22],
+ ["\uFE9A", -1, 22],
+ ["\uFE9B", -1, 22],
+ ["\uFE9C", -1, 22],
+ ["\uFE9D", -1, 23],
+ ["\uFE9E", -1, 23],
+ ["\uFE9F", -1, 23],
+ ["\uFEA0", -1, 23],
+ ["\uFEA1", -1, 24],
+ ["\uFEA2", -1, 24],
+ ["\uFEA3", -1, 24],
+ ["\uFEA4", -1, 24],
+ ["\uFEA5", -1, 25],
+ ["\uFEA6", -1, 25],
+ ["\uFEA7", -1, 25],
+ ["\uFEA8", -1, 25],
+ ["\uFEA9", -1, 26],
+ ["\uFEAA", -1, 26],
+ ["\uFEAB", -1, 27],
+ ["\uFEAC", -1, 27],
+ ["\uFEAD", -1, 28],
+ ["\uFEAE", -1, 28],
+ ["\uFEAF", -1, 29],
+ ["\uFEB0", -1, 29],
+ ["\uFEB1", -1, 30],
+ ["\uFEB2", -1, 30],
+ ["\uFEB3", -1, 30],
+ ["\uFEB4", -1, 30],
+ ["\uFEB5", -1, 31],
+ ["\uFEB6", -1, 31],
+ ["\uFEB7", -1, 31],
+ ["\uFEB8", -1, 31],
+ ["\uFEB9", -1, 32],
+ ["\uFEBA", -1, 32],
+ ["\uFEBB", -1, 32],
+ ["\uFEBC", -1, 32],
+ ["\uFEBD", -1, 33],
+ ["\uFEBE", -1, 33],
+ ["\uFEBF", -1, 33],
+ ["\uFEC0", -1, 33],
+ ["\uFEC1", -1, 34],
+ ["\uFEC2", -1, 34],
+ ["\uFEC3", -1, 34],
+ ["\uFEC4", -1, 34],
+ ["\uFEC5", -1, 35],
+ ["\uFEC6", -1, 35],
+ ["\uFEC7", -1, 35],
+ ["\uFEC8", -1, 35],
+ ["\uFEC9", -1, 36],
+ ["\uFECA", -1, 36],
+ ["\uFECB", -1, 36],
+ ["\uFECC", -1, 36],
+ ["\uFECD", -1, 37],
+ ["\uFECE", -1, 37],
+ ["\uFECF", -1, 37],
+ ["\uFED0", -1, 37],
+ ["\uFED1", -1, 38],
+ ["\uFED2", -1, 38],
+ ["\uFED3", -1, 38],
+ ["\uFED4", -1, 38],
+ ["\uFED5", -1, 39],
+ ["\uFED6", -1, 39],
+ ["\uFED7", -1, 39],
+ ["\uFED8", -1, 39],
+ ["\uFED9", -1, 40],
+ ["\uFEDA", -1, 40],
+ ["\uFEDB", -1, 40],
+ ["\uFEDC", -1, 40],
+ ["\uFEDD", -1, 41],
+ ["\uFEDE", -1, 41],
+ ["\uFEDF", -1, 41],
+ ["\uFEE0", -1, 41],
+ ["\uFEE1", -1, 42],
+ ["\uFEE2", -1, 42],
+ ["\uFEE3", -1, 42],
+ ["\uFEE4", -1, 42],
+ ["\uFEE5", -1, 43],
+ ["\uFEE6", -1, 43],
+ ["\uFEE7", -1, 43],
+ ["\uFEE8", -1, 43],
+ ["\uFEE9", -1, 44],
+ ["\uFEEA", -1, 44],
+ ["\uFEEB", -1, 44],
+ ["\uFEEC", -1, 44],
+ ["\uFEED", -1, 45],
+ ["\uFEEE", -1, 45],
+ ["\uFEEF", -1, 46],
+ ["\uFEF0", -1, 46],
+ ["\uFEF1", -1, 47],
+ ["\uFEF2", -1, 47],
+ ["\uFEF3", -1, 47],
+ ["\uFEF4", -1, 47],
+ ["\uFEF5", -1, 51],
+ ["\uFEF6", -1, 51],
+ ["\uFEF7", -1, 49],
+ ["\uFEF8", -1, 49],
+ ["\uFEF9", -1, 50],
+ ["\uFEFA", -1, 50],
+ ["\uFEFB", -1, 48],
+ ["\uFEFC", -1, 48]
+ ];
+
+ /** @const */ var a_1 = [
+ ["\u0622", -1, 1],
+ ["\u0623", -1, 1],
+ ["\u0624", -1, 1],
+ ["\u0625", -1, 1],
+ ["\u0626", -1, 1]
+ ];
+
+ /** @const */ var a_2 = [
+ ["\u0622", -1, 1],
+ ["\u0623", -1, 1],
+ ["\u0624", -1, 2],
+ ["\u0625", -1, 1],
+ ["\u0626", -1, 3]
+ ];
+
+ /** @const */ var a_3 = [
+ ["\u0627\u0644", -1, 2],
+ ["\u0628\u0627\u0644", -1, 1],
+ ["\u0643\u0627\u0644", -1, 1],
+ ["\u0644\u0644", -1, 2]
+ ];
+
+ /** @const */ var a_4 = [
+ ["\u0623\u0622", -1, 2],
+ ["\u0623\u0623", -1, 1],
+ ["\u0623\u0624", -1, 1],
+ ["\u0623\u0625", -1, 4],
+ ["\u0623\u0627", -1, 3]
+ ];
+
+ /** @const */ var a_5 = [
+ ["\u0641", -1, 1],
+ ["\u0648", -1, 1]
+ ];
+
+ /** @const */ var a_6 = [
+ ["\u0627\u0644", -1, 2],
+ ["\u0628\u0627\u0644", -1, 1],
+ ["\u0643\u0627\u0644", -1, 1],
+ ["\u0644\u0644", -1, 2]
+ ];
+
+ /** @const */ var a_7 = [
+ ["\u0628", -1, 1],
+ ["\u0628\u0627", 0, -1],
+ ["\u0628\u0628", 0, 2],
+ ["\u0643\u0643", -1, 3]
+ ];
+
+ /** @const */ var a_8 = [
+ ["\u0633\u0623", -1, 4],
+ ["\u0633\u062A", -1, 2],
+ ["\u0633\u0646", -1, 3],
+ ["\u0633\u064A", -1, 1]
+ ];
+
+ /** @const */ var a_9 = [
+ ["\u062A\u0633\u062A", -1, 1],
+ ["\u0646\u0633\u062A", -1, 1],
+ ["\u064A\u0633\u062A", -1, 1]
+ ];
+
+ /** @const */ var a_10 = [
+ ["\u0643\u0645\u0627", -1, 3],
+ ["\u0647\u0645\u0627", -1, 3],
+ ["\u0646\u0627", -1, 2],
+ ["\u0647\u0627", -1, 2],
+ ["\u0643", -1, 1],
+ ["\u0643\u0645", -1, 2],
+ ["\u0647\u0645", -1, 2],
+ ["\u0647\u0646", -1, 2],
+ ["\u0647", -1, 1],
+ ["\u064A", -1, 1]
+ ];
+
+ /** @const */ var a_11 = [
+ ["\u0646", -1, 1]
+ ];
+
+ /** @const */ var a_12 = [
+ ["\u0627", -1, 1],
+ ["\u0648", -1, 1],
+ ["\u064A", -1, 1]
+ ];
+
+ /** @const */ var a_13 = [
+ ["\u0627\u062A", -1, 1]
+ ];
+
+ /** @const */ var a_14 = [
+ ["\u062A", -1, 1]
+ ];
+
+ /** @const */ var a_15 = [
+ ["\u0629", -1, 1]
+ ];
+
+ /** @const */ var a_16 = [
+ ["\u064A", -1, 1]
+ ];
+
+ /** @const */ var a_17 = [
+ ["\u0643\u0645\u0627", -1, 3],
+ ["\u0647\u0645\u0627", -1, 3],
+ ["\u0646\u0627", -1, 2],
+ ["\u0647\u0627", -1, 2],
+ ["\u0643", -1, 1],
+ ["\u0643\u0645", -1, 2],
+ ["\u0647\u0645", -1, 2],
+ ["\u0643\u0646", -1, 2],
+ ["\u0647\u0646", -1, 2],
+ ["\u0647", -1, 1],
+ ["\u0643\u0645\u0648", -1, 3],
+ ["\u0646\u064A", -1, 2]
+ ];
+
+ /** @const */ var a_18 = [
+ ["\u0627", -1, 1],
+ ["\u062A\u0627", 0, 2],
+ ["\u062A\u0645\u0627", 0, 4],
+ ["\u0646\u0627", 0, 2],
+ ["\u062A", -1, 1],
+ ["\u0646", -1, 1],
+ ["\u0627\u0646", 5, 3],
+ ["\u062A\u0646", 5, 2],
+ ["\u0648\u0646", 5, 3],
+ ["\u064A\u0646", 5, 3],
+ ["\u064A", -1, 1]
+ ];
+
+ /** @const */ var a_19 = [
+ ["\u0648\u0627", -1, 1],
+ ["\u062A\u0645", -1, 1]
+ ];
+
+ /** @const */ var a_20 = [
+ ["\u0648", -1, 1],
+ ["\u062A\u0645\u0648", 0, 2]
+ ];
+
+ /** @const */ var a_21 = [
+ ["\u0649", -1, 1]
+ ];
+
+ var /** boolean */ B_is_defined = false;
+ var /** boolean */ B_is_verb = false;
+ var /** boolean */ B_is_noun = false;
+
+
+ /** @return {boolean} */
+ function r_Normalize_pre() {
+ var /** number */ among_var;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ while(true)
+ {
+ /** @const */ var /** number */ v_2 = base.cursor;
+ lab1: {
+ lab2: {
+ /** @const */ var /** number */ v_3 = base.cursor;
+ lab3: {
+ base.bra = base.cursor;
+ among_var = base.find_among(a_0);
+ if (among_var == 0)
+ {
+ break lab3;
+ }
+ base.ket = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("0"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("1"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("2"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("3"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!base.slice_from("4"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!base.slice_from("5"))
+ {
+ return false;
+ }
+ break;
+ case 8:
+ if (!base.slice_from("6"))
+ {
+ return false;
+ }
+ break;
+ case 9:
+ if (!base.slice_from("7"))
+ {
+ return false;
+ }
+ break;
+ case 10:
+ if (!base.slice_from("8"))
+ {
+ return false;
+ }
+ break;
+ case 11:
+ if (!base.slice_from("9"))
+ {
+ return false;
+ }
+ break;
+ case 12:
+ if (!base.slice_from("\u0621"))
+ {
+ return false;
+ }
+ break;
+ case 13:
+ if (!base.slice_from("\u0623"))
+ {
+ return false;
+ }
+ break;
+ case 14:
+ if (!base.slice_from("\u0625"))
+ {
+ return false;
+ }
+ break;
+ case 15:
+ if (!base.slice_from("\u0626"))
+ {
+ return false;
+ }
+ break;
+ case 16:
+ if (!base.slice_from("\u0622"))
+ {
+ return false;
+ }
+ break;
+ case 17:
+ if (!base.slice_from("\u0624"))
+ {
+ return false;
+ }
+ break;
+ case 18:
+ if (!base.slice_from("\u0627"))
+ {
+ return false;
+ }
+ break;
+ case 19:
+ if (!base.slice_from("\u0628"))
+ {
+ return false;
+ }
+ break;
+ case 20:
+ if (!base.slice_from("\u0629"))
+ {
+ return false;
+ }
+ break;
+ case 21:
+ if (!base.slice_from("\u062A"))
+ {
+ return false;
+ }
+ break;
+ case 22:
+ if (!base.slice_from("\u062B"))
+ {
+ return false;
+ }
+ break;
+ case 23:
+ if (!base.slice_from("\u062C"))
+ {
+ return false;
+ }
+ break;
+ case 24:
+ if (!base.slice_from("\u062D"))
+ {
+ return false;
+ }
+ break;
+ case 25:
+ if (!base.slice_from("\u062E"))
+ {
+ return false;
+ }
+ break;
+ case 26:
+ if (!base.slice_from("\u062F"))
+ {
+ return false;
+ }
+ break;
+ case 27:
+ if (!base.slice_from("\u0630"))
+ {
+ return false;
+ }
+ break;
+ case 28:
+ if (!base.slice_from("\u0631"))
+ {
+ return false;
+ }
+ break;
+ case 29:
+ if (!base.slice_from("\u0632"))
+ {
+ return false;
+ }
+ break;
+ case 30:
+ if (!base.slice_from("\u0633"))
+ {
+ return false;
+ }
+ break;
+ case 31:
+ if (!base.slice_from("\u0634"))
+ {
+ return false;
+ }
+ break;
+ case 32:
+ if (!base.slice_from("\u0635"))
+ {
+ return false;
+ }
+ break;
+ case 33:
+ if (!base.slice_from("\u0636"))
+ {
+ return false;
+ }
+ break;
+ case 34:
+ if (!base.slice_from("\u0637"))
+ {
+ return false;
+ }
+ break;
+ case 35:
+ if (!base.slice_from("\u0638"))
+ {
+ return false;
+ }
+ break;
+ case 36:
+ if (!base.slice_from("\u0639"))
+ {
+ return false;
+ }
+ break;
+ case 37:
+ if (!base.slice_from("\u063A"))
+ {
+ return false;
+ }
+ break;
+ case 38:
+ if (!base.slice_from("\u0641"))
+ {
+ return false;
+ }
+ break;
+ case 39:
+ if (!base.slice_from("\u0642"))
+ {
+ return false;
+ }
+ break;
+ case 40:
+ if (!base.slice_from("\u0643"))
+ {
+ return false;
+ }
+ break;
+ case 41:
+ if (!base.slice_from("\u0644"))
+ {
+ return false;
+ }
+ break;
+ case 42:
+ if (!base.slice_from("\u0645"))
+ {
+ return false;
+ }
+ break;
+ case 43:
+ if (!base.slice_from("\u0646"))
+ {
+ return false;
+ }
+ break;
+ case 44:
+ if (!base.slice_from("\u0647"))
+ {
+ return false;
+ }
+ break;
+ case 45:
+ if (!base.slice_from("\u0648"))
+ {
+ return false;
+ }
+ break;
+ case 46:
+ if (!base.slice_from("\u0649"))
+ {
+ return false;
+ }
+ break;
+ case 47:
+ if (!base.slice_from("\u064A"))
+ {
+ return false;
+ }
+ break;
+ case 48:
+ if (!base.slice_from("\u0644\u0627"))
+ {
+ return false;
+ }
+ break;
+ case 49:
+ if (!base.slice_from("\u0644\u0623"))
+ {
+ return false;
+ }
+ break;
+ case 50:
+ if (!base.slice_from("\u0644\u0625"))
+ {
+ return false;
+ }
+ break;
+ case 51:
+ if (!base.slice_from("\u0644\u0622"))
+ {
+ return false;
+ }
+ break;
+ }
+ break lab2;
+ }
+ base.cursor = v_3;
+ if (base.cursor >= base.limit)
+ {
+ break lab1;
+ }
+ base.cursor++;
+ }
+ continue;
+ }
+ base.cursor = v_2;
+ break;
+ }
+ }
+ base.cursor = v_1;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Normalize_post() {
+ var /** number */ among_var;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ base.ket = base.cursor;
+ if (base.find_among_b(a_1) == 0)
+ {
+ break lab0;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_from("\u0621"))
+ {
+ return false;
+ }
+ base.cursor = base.limit_backward;
+ }
+ base.cursor = v_1;
+ /** @const */ var /** number */ v_2 = base.cursor;
+ lab1: {
+ while(true)
+ {
+ /** @const */ var /** number */ v_3 = base.cursor;
+ lab2: {
+ lab3: {
+ /** @const */ var /** number */ v_4 = base.cursor;
+ lab4: {
+ base.bra = base.cursor;
+ among_var = base.find_among(a_2);
+ if (among_var == 0)
+ {
+ break lab4;
+ }
+ base.ket = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("\u0627"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("\u0648"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("\u064A"))
+ {
+ return false;
+ }
+ break;
+ }
+ break lab3;
+ }
+ base.cursor = v_4;
+ if (base.cursor >= base.limit)
+ {
+ break lab2;
+ }
+ base.cursor++;
+ }
+ continue;
+ }
+ base.cursor = v_3;
+ break;
+ }
+ }
+ base.cursor = v_2;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Checks1() {
+ var /** number */ among_var;
+ base.bra = base.cursor;
+ among_var = base.find_among(a_3);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (base.current.length <= 4)
+ {
+ return false;
+ }
+ B_is_noun = true;
+ B_is_verb = false;
+ B_is_defined = true;
+ break;
+ case 2:
+ if (base.current.length <= 3)
+ {
+ return false;
+ }
+ B_is_noun = true;
+ B_is_verb = false;
+ B_is_defined = true;
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Prefix_Step1() {
+ var /** number */ among_var;
+ base.bra = base.cursor;
+ among_var = base.find_among(a_4);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (base.current.length <= 3)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u0623"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (base.current.length <= 3)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u0622"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (base.current.length <= 3)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u0627"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (base.current.length <= 3)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u0625"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Prefix_Step2() {
+ base.bra = base.cursor;
+ if (base.find_among(a_5) == 0)
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ if (base.current.length <= 3)
+ {
+ return false;
+ }
+ {
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ if (!(base.eq_s("\u0627")))
+ {
+ break lab0;
+ }
+ return false;
+ }
+ base.cursor = v_1;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Prefix_Step3a_Noun() {
+ var /** number */ among_var;
+ base.bra = base.cursor;
+ among_var = base.find_among(a_6);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (base.current.length <= 5)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (base.current.length <= 4)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Prefix_Step3b_Noun() {
+ var /** number */ among_var;
+ base.bra = base.cursor;
+ among_var = base.find_among(a_7);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (base.current.length <= 3)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (base.current.length <= 3)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u0628"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (base.current.length <= 3)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u0643"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Prefix_Step3_Verb() {
+ var /** number */ among_var;
+ base.bra = base.cursor;
+ among_var = base.find_among(a_8);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (base.current.length <= 4)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u064A"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (base.current.length <= 4)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u062A"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (base.current.length <= 4)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u0646"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (base.current.length <= 4)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u0623"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Prefix_Step4_Verb() {
+ base.bra = base.cursor;
+ if (base.find_among(a_9) == 0)
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ if (base.current.length <= 4)
+ {
+ return false;
+ }
+ B_is_verb = true;
+ B_is_noun = false;
+ if (!base.slice_from("\u0627\u0633\u062A"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Suffix_Noun_Step1a() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_10);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (base.current.length < 4)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (base.current.length < 5)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (base.current.length < 6)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Suffix_Noun_Step1b() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_11) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (base.current.length <= 5)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Suffix_Noun_Step2a() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_12) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (base.current.length <= 4)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Suffix_Noun_Step2b() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_13) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (base.current.length < 5)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Suffix_Noun_Step2c1() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_14) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (base.current.length < 4)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Suffix_Noun_Step2c2() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_15) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (base.current.length < 4)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Suffix_Noun_Step3() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_16) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (base.current.length < 3)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Suffix_Verb_Step1() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_17);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (base.current.length < 4)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (base.current.length < 5)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (base.current.length < 6)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Suffix_Verb_Step2a() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_18);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (base.current.length < 4)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (base.current.length < 5)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (base.current.length <= 5)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (base.current.length < 6)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Suffix_Verb_Step2b() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_19) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (base.current.length < 5)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Suffix_Verb_Step2c() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_20);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (base.current.length < 4)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (base.current.length < 6)
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Suffix_All_alef_maqsura() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_21) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_from("\u064A"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ this.stem = /** @return {boolean} */ function() {
+ B_is_noun = true;
+ B_is_verb = true;
+ B_is_defined = false;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ r_Checks1();
+ base.cursor = v_1;
+ r_Normalize_pre();
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab0: {
+ lab1: {
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ lab2: {
+ if (!B_is_verb)
+ {
+ break lab2;
+ }
+ lab3: {
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ lab4: {
+ {
+ var v_5 = 1;
+ while(true)
+ {
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
+ lab5: {
+ if (!r_Suffix_Verb_Step1())
+ {
+ break lab5;
+ }
+ v_5--;
+ continue;
+ }
+ base.cursor = base.limit - v_6;
+ break;
+ }
+ if (v_5 > 0)
+ {
+ break lab4;
+ }
+ }
+ lab6: {
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
+ lab7: {
+ if (!r_Suffix_Verb_Step2a())
+ {
+ break lab7;
+ }
+ break lab6;
+ }
+ base.cursor = base.limit - v_7;
+ lab8: {
+ if (!r_Suffix_Verb_Step2c())
+ {
+ break lab8;
+ }
+ break lab6;
+ }
+ base.cursor = base.limit - v_7;
+ if (base.cursor <= base.limit_backward)
+ {
+ break lab4;
+ }
+ base.cursor--;
+ }
+ break lab3;
+ }
+ base.cursor = base.limit - v_4;
+ lab9: {
+ if (!r_Suffix_Verb_Step2b())
+ {
+ break lab9;
+ }
+ break lab3;
+ }
+ base.cursor = base.limit - v_4;
+ if (!r_Suffix_Verb_Step2a())
+ {
+ break lab2;
+ }
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_3;
+ lab10: {
+ if (!B_is_noun)
+ {
+ break lab10;
+ }
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
+ lab11: {
+ lab12: {
+ /** @const */ var /** number */ v_9 = base.limit - base.cursor;
+ lab13: {
+ if (!r_Suffix_Noun_Step2c2())
+ {
+ break lab13;
+ }
+ break lab12;
+ }
+ base.cursor = base.limit - v_9;
+ lab14: {
+ lab15: {
+ if (!B_is_defined)
+ {
+ break lab15;
+ }
+ break lab14;
+ }
+ if (!r_Suffix_Noun_Step1a())
+ {
+ break lab14;
+ }
+ lab16: {
+ /** @const */ var /** number */ v_10 = base.limit - base.cursor;
+ lab17: {
+ if (!r_Suffix_Noun_Step2a())
+ {
+ break lab17;
+ }
+ break lab16;
+ }
+ base.cursor = base.limit - v_10;
+ lab18: {
+ if (!r_Suffix_Noun_Step2b())
+ {
+ break lab18;
+ }
+ break lab16;
+ }
+ base.cursor = base.limit - v_10;
+ lab19: {
+ if (!r_Suffix_Noun_Step2c1())
+ {
+ break lab19;
+ }
+ break lab16;
+ }
+ base.cursor = base.limit - v_10;
+ if (base.cursor <= base.limit_backward)
+ {
+ break lab14;
+ }
+ base.cursor--;
+ }
+ break lab12;
+ }
+ base.cursor = base.limit - v_9;
+ lab20: {
+ if (!r_Suffix_Noun_Step1b())
+ {
+ break lab20;
+ }
+ lab21: {
+ /** @const */ var /** number */ v_11 = base.limit - base.cursor;
+ lab22: {
+ if (!r_Suffix_Noun_Step2a())
+ {
+ break lab22;
+ }
+ break lab21;
+ }
+ base.cursor = base.limit - v_11;
+ lab23: {
+ if (!r_Suffix_Noun_Step2b())
+ {
+ break lab23;
+ }
+ break lab21;
+ }
+ base.cursor = base.limit - v_11;
+ if (!r_Suffix_Noun_Step2c1())
+ {
+ break lab20;
+ }
+ }
+ break lab12;
+ }
+ base.cursor = base.limit - v_9;
+ lab24: {
+ lab25: {
+ if (!B_is_defined)
+ {
+ break lab25;
+ }
+ break lab24;
+ }
+ if (!r_Suffix_Noun_Step2a())
+ {
+ break lab24;
+ }
+ break lab12;
+ }
+ base.cursor = base.limit - v_9;
+ if (!r_Suffix_Noun_Step2b())
+ {
+ base.cursor = base.limit - v_8;
+ break lab11;
+ }
+ }
+ }
+ if (!r_Suffix_Noun_Step3())
+ {
+ break lab10;
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_3;
+ if (!r_Suffix_All_alef_maqsura())
+ {
+ break lab0;
+ }
+ }
+ }
+ base.cursor = base.limit - v_2;
+ base.cursor = base.limit_backward;
+ /** @const */ var /** number */ v_12 = base.cursor;
+ lab26: {
+ /** @const */ var /** number */ v_13 = base.cursor;
+ lab27: {
+ if (!r_Prefix_Step1())
+ {
+ base.cursor = v_13;
+ break lab27;
+ }
+ }
+ /** @const */ var /** number */ v_14 = base.cursor;
+ lab28: {
+ if (!r_Prefix_Step2())
+ {
+ base.cursor = v_14;
+ break lab28;
+ }
+ }
+ lab29: {
+ /** @const */ var /** number */ v_15 = base.cursor;
+ lab30: {
+ if (!r_Prefix_Step3a_Noun())
+ {
+ break lab30;
+ }
+ break lab29;
+ }
+ base.cursor = v_15;
+ lab31: {
+ if (!B_is_noun)
+ {
+ break lab31;
+ }
+ if (!r_Prefix_Step3b_Noun())
+ {
+ break lab31;
+ }
+ break lab29;
+ }
+ base.cursor = v_15;
+ if (!B_is_verb)
+ {
+ break lab26;
+ }
+ /** @const */ var /** number */ v_16 = base.cursor;
+ lab32: {
+ if (!r_Prefix_Step3_Verb())
+ {
+ base.cursor = v_16;
+ break lab32;
+ }
+ }
+ if (!r_Prefix_Step4_Verb())
+ {
+ break lab26;
+ }
+ }
+ }
+ base.cursor = v_12;
+ r_Normalize_post();
+ return true;
+ };
+
+ /**@return{string}*/
+ this['stemWord'] = function(/**string*/word) {
+ base.setCurrent(word);
+ this.stem();
+ return base.getCurrent();
+ };
+};
diff --git a/sphinx/search/non-minified-js/armenian-stemmer.js b/sphinx/search/non-minified-js/armenian-stemmer.js
new file mode 100644
index 00000000000..915146dbd0a
--- /dev/null
+++ b/sphinx/search/non-minified-js/armenian-stemmer.js
@@ -0,0 +1,350 @@
+// Generated from armenian.sbl by Snowball 3.0.1 - https://snowballstem.org/
+
+/**@constructor*/
+var ArmenianStemmer = function() {
+ var base = new BaseStemmer();
+
+ /** @const */ var a_0 = [
+ ["\u0580\u0578\u0580\u0564", -1, 1],
+ ["\u0565\u0580\u0578\u0580\u0564", 0, 1],
+ ["\u0561\u056C\u056B", -1, 1],
+ ["\u0561\u056F\u056B", -1, 1],
+ ["\u0578\u0580\u0561\u056F", -1, 1],
+ ["\u0565\u0572", -1, 1],
+ ["\u0561\u056F\u0561\u0576", -1, 1],
+ ["\u0561\u0580\u0561\u0576", -1, 1],
+ ["\u0565\u0576", -1, 1],
+ ["\u0565\u056F\u0565\u0576", 8, 1],
+ ["\u0565\u0580\u0565\u0576", 8, 1],
+ ["\u0578\u0580\u0567\u0576", -1, 1],
+ ["\u056B\u0576", -1, 1],
+ ["\u0563\u056B\u0576", 12, 1],
+ ["\u0578\u057E\u056B\u0576", 12, 1],
+ ["\u056C\u0561\u0575\u0576", -1, 1],
+ ["\u057E\u0578\u0582\u0576", -1, 1],
+ ["\u057A\u0565\u057D", -1, 1],
+ ["\u056B\u057E", -1, 1],
+ ["\u0561\u057F", -1, 1],
+ ["\u0561\u057E\u0565\u057F", -1, 1],
+ ["\u056F\u0578\u057F", -1, 1],
+ ["\u0562\u0561\u0580", -1, 1]
+ ];
+
+ /** @const */ var a_1 = [
+ ["\u0561", -1, 1],
+ ["\u0561\u0581\u0561", 0, 1],
+ ["\u0565\u0581\u0561", 0, 1],
+ ["\u057E\u0565", -1, 1],
+ ["\u0561\u0581\u0580\u056B", -1, 1],
+ ["\u0561\u0581\u056B", -1, 1],
+ ["\u0565\u0581\u056B", -1, 1],
+ ["\u057E\u0565\u0581\u056B", 6, 1],
+ ["\u0561\u056C", -1, 1],
+ ["\u0568\u0561\u056C", 8, 1],
+ ["\u0561\u0576\u0561\u056C", 8, 1],
+ ["\u0565\u0576\u0561\u056C", 8, 1],
+ ["\u0561\u0581\u0576\u0561\u056C", 8, 1],
+ ["\u0565\u056C", -1, 1],
+ ["\u0568\u0565\u056C", 13, 1],
+ ["\u0576\u0565\u056C", 13, 1],
+ ["\u0581\u0576\u0565\u056C", 15, 1],
+ ["\u0565\u0581\u0576\u0565\u056C", 16, 1],
+ ["\u0579\u0565\u056C", 13, 1],
+ ["\u057E\u0565\u056C", 13, 1],
+ ["\u0561\u0581\u057E\u0565\u056C", 19, 1],
+ ["\u0565\u0581\u057E\u0565\u056C", 19, 1],
+ ["\u057F\u0565\u056C", 13, 1],
+ ["\u0561\u057F\u0565\u056C", 22, 1],
+ ["\u0578\u057F\u0565\u056C", 22, 1],
+ ["\u056F\u0578\u057F\u0565\u056C", 24, 1],
+ ["\u057E\u0561\u056E", -1, 1],
+ ["\u0578\u0582\u0574", -1, 1],
+ ["\u057E\u0578\u0582\u0574", 27, 1],
+ ["\u0561\u0576", -1, 1],
+ ["\u0581\u0561\u0576", 29, 1],
+ ["\u0561\u0581\u0561\u0576", 30, 1],
+ ["\u0561\u0581\u0580\u056B\u0576", -1, 1],
+ ["\u0561\u0581\u056B\u0576", -1, 1],
+ ["\u0565\u0581\u056B\u0576", -1, 1],
+ ["\u057E\u0565\u0581\u056B\u0576", 34, 1],
+ ["\u0561\u056C\u056B\u057D", -1, 1],
+ ["\u0565\u056C\u056B\u057D", -1, 1],
+ ["\u0561\u057E", -1, 1],
+ ["\u0561\u0581\u0561\u057E", 38, 1],
+ ["\u0565\u0581\u0561\u057E", 38, 1],
+ ["\u0561\u056C\u0578\u057E", -1, 1],
+ ["\u0565\u056C\u0578\u057E", -1, 1],
+ ["\u0561\u0580", -1, 1],
+ ["\u0561\u0581\u0561\u0580", 43, 1],
+ ["\u0565\u0581\u0561\u0580", 43, 1],
+ ["\u0561\u0581\u0580\u056B\u0580", -1, 1],
+ ["\u0561\u0581\u056B\u0580", -1, 1],
+ ["\u0565\u0581\u056B\u0580", -1, 1],
+ ["\u057E\u0565\u0581\u056B\u0580", 48, 1],
+ ["\u0561\u0581", -1, 1],
+ ["\u0565\u0581", -1, 1],
+ ["\u0561\u0581\u0580\u0565\u0581", 51, 1],
+ ["\u0561\u056C\u0578\u0582\u0581", -1, 1],
+ ["\u0565\u056C\u0578\u0582\u0581", -1, 1],
+ ["\u0561\u056C\u0578\u0582", -1, 1],
+ ["\u0565\u056C\u0578\u0582", -1, 1],
+ ["\u0561\u0584", -1, 1],
+ ["\u0581\u0561\u0584", 57, 1],
+ ["\u0561\u0581\u0561\u0584", 58, 1],
+ ["\u0561\u0581\u0580\u056B\u0584", -1, 1],
+ ["\u0561\u0581\u056B\u0584", -1, 1],
+ ["\u0565\u0581\u056B\u0584", -1, 1],
+ ["\u057E\u0565\u0581\u056B\u0584", 62, 1],
+ ["\u0561\u0576\u0584", -1, 1],
+ ["\u0581\u0561\u0576\u0584", 64, 1],
+ ["\u0561\u0581\u0561\u0576\u0584", 65, 1],
+ ["\u0561\u0581\u0580\u056B\u0576\u0584", -1, 1],
+ ["\u0561\u0581\u056B\u0576\u0584", -1, 1],
+ ["\u0565\u0581\u056B\u0576\u0584", -1, 1],
+ ["\u057E\u0565\u0581\u056B\u0576\u0584", 69, 1]
+ ];
+
+ /** @const */ var a_2 = [
+ ["\u0578\u0580\u0564", -1, 1],
+ ["\u0578\u0582\u0575\u0569", -1, 1],
+ ["\u0578\u0582\u0570\u056B", -1, 1],
+ ["\u0581\u056B", -1, 1],
+ ["\u056B\u056C", -1, 1],
+ ["\u0561\u056F", -1, 1],
+ ["\u0575\u0561\u056F", 5, 1],
+ ["\u0561\u0576\u0561\u056F", 5, 1],
+ ["\u056B\u056F", -1, 1],
+ ["\u0578\u0582\u056F", -1, 1],
+ ["\u0561\u0576", -1, 1],
+ ["\u057A\u0561\u0576", 10, 1],
+ ["\u057D\u057F\u0561\u0576", 10, 1],
+ ["\u0561\u0580\u0561\u0576", 10, 1],
+ ["\u0565\u0572\u0567\u0576", -1, 1],
+ ["\u0575\u0578\u0582\u0576", -1, 1],
+ ["\u0578\u0582\u0569\u0575\u0578\u0582\u0576", 15, 1],
+ ["\u0561\u056E\u0578", -1, 1],
+ ["\u056B\u0579", -1, 1],
+ ["\u0578\u0582\u057D", -1, 1],
+ ["\u0578\u0582\u057D\u057F", -1, 1],
+ ["\u0563\u0561\u0580", -1, 1],
+ ["\u057E\u0578\u0580", -1, 1],
+ ["\u0561\u057E\u0578\u0580", 22, 1],
+ ["\u0578\u0581", -1, 1],
+ ["\u0561\u0576\u0585\u0581", -1, 1],
+ ["\u0578\u0582", -1, 1],
+ ["\u0584", -1, 1],
+ ["\u0579\u0565\u0584", 27, 1],
+ ["\u056B\u0584", 27, 1],
+ ["\u0561\u056C\u056B\u0584", 29, 1],
+ ["\u0561\u0576\u056B\u0584", 29, 1],
+ ["\u057E\u0561\u056E\u0584", 27, 1],
+ ["\u0578\u0582\u0575\u0584", 27, 1],
+ ["\u0565\u0576\u0584", 27, 1],
+ ["\u0578\u0576\u0584", 27, 1],
+ ["\u0578\u0582\u0576\u0584", 27, 1],
+ ["\u0574\u0578\u0582\u0576\u0584", 36, 1],
+ ["\u056B\u0579\u0584", 27, 1],
+ ["\u0561\u0580\u0584", 27, 1]
+ ];
+
+ /** @const */ var a_3 = [
+ ["\u057D\u0561", -1, 1],
+ ["\u057E\u0561", -1, 1],
+ ["\u0561\u0574\u0562", -1, 1],
+ ["\u0564", -1, 1],
+ ["\u0561\u0576\u0564", 3, 1],
+ ["\u0578\u0582\u0569\u0575\u0561\u0576\u0564", 4, 1],
+ ["\u057E\u0561\u0576\u0564", 4, 1],
+ ["\u0578\u057B\u0564", 3, 1],
+ ["\u0565\u0580\u0564", 3, 1],
+ ["\u0576\u0565\u0580\u0564", 8, 1],
+ ["\u0578\u0582\u0564", 3, 1],
+ ["\u0568", -1, 1],
+ ["\u0561\u0576\u0568", 11, 1],
+ ["\u0578\u0582\u0569\u0575\u0561\u0576\u0568", 12, 1],
+ ["\u057E\u0561\u0576\u0568", 12, 1],
+ ["\u0578\u057B\u0568", 11, 1],
+ ["\u0565\u0580\u0568", 11, 1],
+ ["\u0576\u0565\u0580\u0568", 16, 1],
+ ["\u056B", -1, 1],
+ ["\u057E\u056B", 18, 1],
+ ["\u0565\u0580\u056B", 18, 1],
+ ["\u0576\u0565\u0580\u056B", 20, 1],
+ ["\u0561\u0576\u0578\u0582\u0574", -1, 1],
+ ["\u0565\u0580\u0578\u0582\u0574", -1, 1],
+ ["\u0576\u0565\u0580\u0578\u0582\u0574", 23, 1],
+ ["\u0576", -1, 1],
+ ["\u0561\u0576", 25, 1],
+ ["\u0578\u0582\u0569\u0575\u0561\u0576", 26, 1],
+ ["\u057E\u0561\u0576", 26, 1],
+ ["\u056B\u0576", 25, 1],
+ ["\u0565\u0580\u056B\u0576", 29, 1],
+ ["\u0576\u0565\u0580\u056B\u0576", 30, 1],
+ ["\u0578\u0582\u0569\u0575\u0561\u0576\u0576", 25, 1],
+ ["\u0565\u0580\u0576", 25, 1],
+ ["\u0576\u0565\u0580\u0576", 33, 1],
+ ["\u0578\u0582\u0576", 25, 1],
+ ["\u0578\u057B", -1, 1],
+ ["\u0578\u0582\u0569\u0575\u0561\u0576\u057D", -1, 1],
+ ["\u057E\u0561\u0576\u057D", -1, 1],
+ ["\u0578\u057B\u057D", -1, 1],
+ ["\u0578\u057E", -1, 1],
+ ["\u0561\u0576\u0578\u057E", 40, 1],
+ ["\u057E\u0578\u057E", 40, 1],
+ ["\u0565\u0580\u0578\u057E", 40, 1],
+ ["\u0576\u0565\u0580\u0578\u057E", 43, 1],
+ ["\u0565\u0580", -1, 1],
+ ["\u0576\u0565\u0580", 45, 1],
+ ["\u0581", -1, 1],
+ ["\u056B\u0581", 47, 1],
+ ["\u057E\u0561\u0576\u056B\u0581", 48, 1],
+ ["\u0578\u057B\u056B\u0581", 48, 1],
+ ["\u057E\u056B\u0581", 48, 1],
+ ["\u0565\u0580\u056B\u0581", 48, 1],
+ ["\u0576\u0565\u0580\u056B\u0581", 52, 1],
+ ["\u0581\u056B\u0581", 48, 1],
+ ["\u0578\u0581", 47, 1],
+ ["\u0578\u0582\u0581", 47, 1]
+ ];
+
+ /** @const */ var /** Array */ g_v = [209, 4, 128, 0, 18];
+
+ var /** number */ I_p2 = 0;
+ var /** number */ I_pV = 0;
+
+
+ /** @return {boolean} */
+ function r_mark_regions() {
+ I_pV = base.limit;
+ I_p2 = base.limit;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ if (!base.go_out_grouping(g_v, 1377, 1413))
+ {
+ break lab0;
+ }
+ base.cursor++;
+ I_pV = base.cursor;
+ if (!base.go_in_grouping(g_v, 1377, 1413))
+ {
+ break lab0;
+ }
+ base.cursor++;
+ if (!base.go_out_grouping(g_v, 1377, 1413))
+ {
+ break lab0;
+ }
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 1377, 1413))
+ {
+ break lab0;
+ }
+ base.cursor++;
+ I_p2 = base.cursor;
+ }
+ base.cursor = v_1;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_R2() {
+ return I_p2 <= base.cursor;
+ };
+
+ /** @return {boolean} */
+ function r_adjective() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_0) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_verb() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_1) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_noun() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_2) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_ending() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_3) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!r_R2())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ this.stem = /** @return {boolean} */ function() {
+ r_mark_regions();
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ if (base.cursor < I_pV)
+ {
+ return false;
+ }
+ /** @const */ var /** number */ v_1 = base.limit_backward;
+ base.limit_backward = I_pV;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ r_ending();
+ base.cursor = base.limit - v_2;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ r_verb();
+ base.cursor = base.limit - v_3;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ r_adjective();
+ base.cursor = base.limit - v_4;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
+ r_noun();
+ base.cursor = base.limit - v_5;
+ base.limit_backward = v_1;
+ base.cursor = base.limit_backward;
+ return true;
+ };
+
+ /**@return{string}*/
+ this['stemWord'] = function(/**string*/word) {
+ base.setCurrent(word);
+ this.stem();
+ return base.getCurrent();
+ };
+};
diff --git a/sphinx/search/non-minified-js/base-stemmer.js b/sphinx/search/non-minified-js/base-stemmer.js
index ca6cca156a3..e6fa0c49260 100644
--- a/sphinx/search/non-minified-js/base-stemmer.js
+++ b/sphinx/search/non-minified-js/base-stemmer.js
@@ -1,5 +1,18 @@
+// @ts-check
+
/**@constructor*/
BaseStemmer = function() {
+ /** @protected */
+ this.current = '';
+ this.cursor = 0;
+ this.limit = 0;
+ this.limit_backward = 0;
+ this.bra = 0;
+ this.ket = 0;
+
+ /**
+ * @param {string} value
+ */
this.setCurrent = function(value) {
this.current = value;
this.cursor = 0;
@@ -9,11 +22,18 @@ BaseStemmer = function() {
this.ket = this.limit;
};
+ /**
+ * @return {string}
+ */
this.getCurrent = function() {
return this.current;
};
+ /**
+ * @param {BaseStemmer} other
+ */
this.copy_from = function(other) {
+ /** @protected */
this.current = other.current;
this.cursor = other.cursor;
this.limit = other.limit;
@@ -22,7 +42,14 @@ BaseStemmer = function() {
this.ket = other.ket;
};
+ /**
+ * @param {number[]} s
+ * @param {number} min
+ * @param {number} max
+ * @return {boolean}
+ */
this.in_grouping = function(s, min, max) {
+ /** @protected */
if (this.cursor >= this.limit) return false;
var ch = this.current.charCodeAt(this.cursor);
if (ch > max || ch < min) return false;
@@ -32,7 +59,34 @@ BaseStemmer = function() {
return true;
};
+ /**
+ * @param {number[]} s
+ * @param {number} min
+ * @param {number} max
+ * @return {boolean}
+ */
+ this.go_in_grouping = function(s, min, max) {
+ /** @protected */
+ while (this.cursor < this.limit) {
+ var ch = this.current.charCodeAt(this.cursor);
+ if (ch > max || ch < min)
+ return true;
+ ch -= min;
+ if ((s[ch >>> 3] & (0x1 << (ch & 0x7))) == 0)
+ return true;
+ this.cursor++;
+ }
+ return false;
+ };
+
+ /**
+ * @param {number[]} s
+ * @param {number} min
+ * @param {number} max
+ * @return {boolean}
+ */
this.in_grouping_b = function(s, min, max) {
+ /** @protected */
if (this.cursor <= this.limit_backward) return false;
var ch = this.current.charCodeAt(this.cursor - 1);
if (ch > max || ch < min) return false;
@@ -42,7 +96,32 @@ BaseStemmer = function() {
return true;
};
+ /**
+ * @param {number[]} s
+ * @param {number} min
+ * @param {number} max
+ * @return {boolean}
+ */
+ this.go_in_grouping_b = function(s, min, max) {
+ /** @protected */
+ while (this.cursor > this.limit_backward) {
+ var ch = this.current.charCodeAt(this.cursor - 1);
+ if (ch > max || ch < min) return true;
+ ch -= min;
+ if ((s[ch >>> 3] & (0x1 << (ch & 0x7))) == 0) return true;
+ this.cursor--;
+ }
+ return false;
+ };
+
+ /**
+ * @param {number[]} s
+ * @param {number} min
+ * @param {number} max
+ * @return {boolean}
+ */
this.out_grouping = function(s, min, max) {
+ /** @protected */
if (this.cursor >= this.limit) return false;
var ch = this.current.charCodeAt(this.cursor);
if (ch > max || ch < min) {
@@ -57,7 +136,35 @@ BaseStemmer = function() {
return false;
};
+ /**
+ * @param {number[]} s
+ * @param {number} min
+ * @param {number} max
+ * @return {boolean}
+ */
+ this.go_out_grouping = function(s, min, max) {
+ /** @protected */
+ while (this.cursor < this.limit) {
+ var ch = this.current.charCodeAt(this.cursor);
+ if (ch <= max && ch >= min) {
+ ch -= min;
+ if ((s[ch >>> 3] & (0X1 << (ch & 0x7))) != 0) {
+ return true;
+ }
+ }
+ this.cursor++;
+ }
+ return false;
+ };
+
+ /**
+ * @param {number[]} s
+ * @param {number} min
+ * @param {number} max
+ * @return {boolean}
+ */
this.out_grouping_b = function(s, min, max) {
+ /** @protected */
if (this.cursor <= this.limit_backward) return false;
var ch = this.current.charCodeAt(this.cursor - 1);
if (ch > max || ch < min) {
@@ -72,8 +179,34 @@ BaseStemmer = function() {
return false;
};
+ /**
+ * @param {number[]} s
+ * @param {number} min
+ * @param {number} max
+ * @return {boolean}
+ */
+ this.go_out_grouping_b = function(s, min, max) {
+ /** @protected */
+ while (this.cursor > this.limit_backward) {
+ var ch = this.current.charCodeAt(this.cursor - 1);
+ if (ch <= max && ch >= min) {
+ ch -= min;
+ if ((s[ch >>> 3] & (0x1 << (ch & 0x7))) != 0) {
+ return true;
+ }
+ }
+ this.cursor--;
+ }
+ return false;
+ };
+
+ /**
+ * @param {string} s
+ * @return {boolean}
+ */
this.eq_s = function(s)
{
+ /** @protected */
if (this.limit - this.cursor < s.length) return false;
if (this.current.slice(this.cursor, this.cursor + s.length) != s)
{
@@ -83,8 +216,13 @@ BaseStemmer = function() {
return true;
};
+ /**
+ * @param {string} s
+ * @return {boolean}
+ */
this.eq_s_b = function(s)
{
+ /** @protected */
if (this.cursor - this.limit_backward < s.length) return false;
if (this.current.slice(this.cursor - s.length, this.cursor) != s)
{
@@ -94,8 +232,13 @@ BaseStemmer = function() {
return true;
};
- /** @return {number} */ this.find_among = function(v)
+ /**
+ * @param {Among[]} v
+ * @return {number}
+ */
+ this.find_among = function(v)
{
+ /** @protected */
var i = 0;
var j = v.length;
@@ -165,8 +308,13 @@ BaseStemmer = function() {
};
// find_among_b is for backwards processing. Same comments apply
+ /**
+ * @param {Among[]} v
+ * @return {number}
+ */
this.find_among_b = function(v)
{
+ /** @protected */
var i = 0;
var j = v.length
@@ -232,8 +380,15 @@ BaseStemmer = function() {
/* to replace chars between c_bra and c_ket in this.current by the
* chars in s.
*/
+ /**
+ * @param {number} c_bra
+ * @param {number} c_ket
+ * @param {string} s
+ * @return {number}
+ */
this.replace_s = function(c_bra, c_ket, s)
{
+ /** @protected */
var adjustment = s.length - (c_ket - c_bra);
this.current = this.current.slice(0, c_bra) + s + this.current.slice(c_ket);
this.limit += adjustment;
@@ -242,8 +397,12 @@ BaseStemmer = function() {
return adjustment;
};
+ /**
+ * @return {boolean}
+ */
this.slice_check = function()
{
+ /** @protected */
if (this.bra < 0 ||
this.bra > this.ket ||
this.ket > this.limit ||
@@ -254,8 +413,13 @@ BaseStemmer = function() {
return true;
};
+ /**
+ * @param {number} c_bra
+ * @return {boolean}
+ */
this.slice_from = function(s)
{
+ /** @protected */
var result = false;
if (this.slice_check())
{
@@ -265,20 +429,34 @@ BaseStemmer = function() {
return result;
};
+ /**
+ * @return {boolean}
+ */
this.slice_del = function()
{
+ /** @protected */
return this.slice_from("");
};
+ /**
+ * @param {number} c_bra
+ * @param {number} c_ket
+ * @param {string} s
+ */
this.insert = function(c_bra, c_ket, s)
{
+ /** @protected */
var adjustment = this.replace_s(c_bra, c_ket, s);
if (c_bra <= this.bra) this.bra += adjustment;
if (c_bra <= this.ket) this.ket += adjustment;
};
+ /**
+ * @return {string}
+ */
this.slice_to = function()
{
+ /** @protected */
var result = '';
if (this.slice_check())
{
@@ -287,8 +465,12 @@ BaseStemmer = function() {
return result;
};
+ /**
+ * @return {string}
+ */
this.assign_to = function()
{
+ /** @protected */
return this.current.slice(0, this.limit);
};
};
diff --git a/sphinx/search/non-minified-js/basque-stemmer.js b/sphinx/search/non-minified-js/basque-stemmer.js
new file mode 100644
index 00000000000..5ed3a26af26
--- /dev/null
+++ b/sphinx/search/non-minified-js/basque-stemmer.js
@@ -0,0 +1,736 @@
+// Generated from basque.sbl by Snowball 3.0.1 - https://snowballstem.org/
+
+/**@constructor*/
+var BasqueStemmer = function() {
+ var base = new BaseStemmer();
+
+ /** @const */ var a_0 = [
+ ["idea", -1, 1],
+ ["bidea", 0, 1],
+ ["kidea", 0, 1],
+ ["pidea", 0, 1],
+ ["kundea", -1, 1],
+ ["galea", -1, 1],
+ ["tailea", -1, 1],
+ ["tzailea", -1, 1],
+ ["gunea", -1, 1],
+ ["kunea", -1, 1],
+ ["tzaga", -1, 1],
+ ["gaia", -1, 1],
+ ["aldia", -1, 1],
+ ["taldia", 12, 1],
+ ["karia", -1, 1],
+ ["garria", -1, 2],
+ ["karria", -1, 1],
+ ["ka", -1, 1],
+ ["tzaka", 17, 1],
+ ["la", -1, 1],
+ ["mena", -1, 1],
+ ["pena", -1, 1],
+ ["kina", -1, 1],
+ ["ezina", -1, 1],
+ ["tezina", 23, 1],
+ ["kuna", -1, 1],
+ ["tuna", -1, 1],
+ ["kizuna", -1, 1],
+ ["era", -1, 1],
+ ["bera", 28, 1],
+ ["arabera", 29, -1],
+ ["kera", 28, 1],
+ ["pera", 28, 1],
+ ["orra", -1, 1],
+ ["korra", 33, 1],
+ ["dura", -1, 1],
+ ["gura", -1, 1],
+ ["kura", -1, 1],
+ ["tura", -1, 1],
+ ["eta", -1, 1],
+ ["keta", 39, 1],
+ ["gailua", -1, 1],
+ ["eza", -1, 1],
+ ["erreza", 42, 1],
+ ["tza", -1, 2],
+ ["gaitza", 44, 1],
+ ["kaitza", 44, 1],
+ ["kuntza", 44, 1],
+ ["ide", -1, 1],
+ ["bide", 48, 1],
+ ["kide", 48, 1],
+ ["pide", 48, 1],
+ ["kunde", -1, 1],
+ ["tzake", -1, 1],
+ ["tzeke", -1, 1],
+ ["le", -1, 1],
+ ["gale", 55, 1],
+ ["taile", 55, 1],
+ ["tzaile", 55, 1],
+ ["gune", -1, 1],
+ ["kune", -1, 1],
+ ["tze", -1, 1],
+ ["atze", 61, 1],
+ ["gai", -1, 1],
+ ["aldi", -1, 1],
+ ["taldi", 64, 1],
+ ["ki", -1, 1],
+ ["ari", -1, 1],
+ ["kari", 67, 1],
+ ["lari", 67, 1],
+ ["tari", 67, 1],
+ ["etari", 70, 1],
+ ["garri", -1, 2],
+ ["karri", -1, 1],
+ ["arazi", -1, 1],
+ ["tarazi", 74, 1],
+ ["an", -1, 1],
+ ["ean", 76, 1],
+ ["rean", 77, 1],
+ ["kan", 76, 1],
+ ["etan", 76, 1],
+ ["atseden", -1, -1],
+ ["men", -1, 1],
+ ["pen", -1, 1],
+ ["kin", -1, 1],
+ ["rekin", 84, 1],
+ ["ezin", -1, 1],
+ ["tezin", 86, 1],
+ ["tun", -1, 1],
+ ["kizun", -1, 1],
+ ["go", -1, 1],
+ ["ago", 90, 1],
+ ["tio", -1, 1],
+ ["dako", -1, 1],
+ ["or", -1, 1],
+ ["kor", 94, 1],
+ ["tzat", -1, 1],
+ ["du", -1, 1],
+ ["gailu", -1, 1],
+ ["tu", -1, 1],
+ ["atu", 99, 1],
+ ["aldatu", 100, 1],
+ ["tatu", 100, 1],
+ ["baditu", 99, -1],
+ ["ez", -1, 1],
+ ["errez", 104, 1],
+ ["tzez", 104, 1],
+ ["gaitz", -1, 1],
+ ["kaitz", -1, 1]
+ ];
+
+ /** @const */ var a_1 = [
+ ["ada", -1, 1],
+ ["kada", 0, 1],
+ ["anda", -1, 1],
+ ["denda", -1, 1],
+ ["gabea", -1, 1],
+ ["kabea", -1, 1],
+ ["aldea", -1, 1],
+ ["kaldea", 6, 1],
+ ["taldea", 6, 1],
+ ["ordea", -1, 1],
+ ["zalea", -1, 1],
+ ["tzalea", 10, 1],
+ ["gilea", -1, 1],
+ ["emea", -1, 1],
+ ["kumea", -1, 1],
+ ["nea", -1, 1],
+ ["enea", 15, 1],
+ ["zionea", 15, 1],
+ ["unea", 15, 1],
+ ["gunea", 18, 1],
+ ["pea", -1, 1],
+ ["aurrea", -1, 1],
+ ["tea", -1, 1],
+ ["kotea", 22, 1],
+ ["artea", 22, 1],
+ ["ostea", 22, 1],
+ ["etxea", -1, 1],
+ ["ga", -1, 1],
+ ["anga", 27, 1],
+ ["gaia", -1, 1],
+ ["aldia", -1, 1],
+ ["taldia", 30, 1],
+ ["handia", -1, 1],
+ ["mendia", -1, 1],
+ ["geia", -1, 1],
+ ["egia", -1, 1],
+ ["degia", 35, 1],
+ ["tegia", 35, 1],
+ ["nahia", -1, 1],
+ ["ohia", -1, 1],
+ ["kia", -1, 1],
+ ["tokia", 40, 1],
+ ["oia", -1, 1],
+ ["koia", 42, 1],
+ ["aria", -1, 1],
+ ["karia", 44, 1],
+ ["laria", 44, 1],
+ ["taria", 44, 1],
+ ["eria", -1, 1],
+ ["keria", 48, 1],
+ ["teria", 48, 1],
+ ["garria", -1, 2],
+ ["larria", -1, 1],
+ ["kirria", -1, 1],
+ ["duria", -1, 1],
+ ["asia", -1, 1],
+ ["tia", -1, 1],
+ ["ezia", -1, 1],
+ ["bizia", -1, 1],
+ ["ontzia", -1, 1],
+ ["ka", -1, 1],
+ ["joka", 60, 3],
+ ["aurka", 60, -1],
+ ["ska", 60, 1],
+ ["xka", 60, 1],
+ ["zka", 60, 1],
+ ["gibela", -1, 1],
+ ["gela", -1, 1],
+ ["kaila", -1, 1],
+ ["skila", -1, 1],
+ ["tila", -1, 1],
+ ["ola", -1, 1],
+ ["na", -1, 1],
+ ["kana", 72, 1],
+ ["ena", 72, 1],
+ ["garrena", 74, 1],
+ ["gerrena", 74, 1],
+ ["urrena", 74, 1],
+ ["zaina", 72, 1],
+ ["tzaina", 78, 1],
+ ["kina", 72, 1],
+ ["mina", 72, 1],
+ ["garna", 72, 1],
+ ["una", 72, 1],
+ ["duna", 83, 1],
+ ["asuna", 83, 1],
+ ["tasuna", 85, 1],
+ ["ondoa", -1, 1],
+ ["kondoa", 87, 1],
+ ["ngoa", -1, 1],
+ ["zioa", -1, 1],
+ ["koa", -1, 1],
+ ["takoa", 91, 1],
+ ["zkoa", 91, 1],
+ ["noa", -1, 1],
+ ["zinoa", 94, 1],
+ ["aroa", -1, 1],
+ ["taroa", 96, 1],
+ ["zaroa", 96, 1],
+ ["eroa", -1, 1],
+ ["oroa", -1, 1],
+ ["osoa", -1, 1],
+ ["toa", -1, 1],
+ ["ttoa", 102, 1],
+ ["ztoa", 102, 1],
+ ["txoa", -1, 1],
+ ["tzoa", -1, 1],
+ ["\u00F1oa", -1, 1],
+ ["ra", -1, 1],
+ ["ara", 108, 1],
+ ["dara", 109, 1],
+ ["liara", 109, 1],
+ ["tiara", 109, 1],
+ ["tara", 109, 1],
+ ["etara", 113, 1],
+ ["tzara", 109, 1],
+ ["bera", 108, 1],
+ ["kera", 108, 1],
+ ["pera", 108, 1],
+ ["ora", 108, 2],
+ ["tzarra", 108, 1],
+ ["korra", 108, 1],
+ ["tra", 108, 1],
+ ["sa", -1, 1],
+ ["osa", 123, 1],
+ ["ta", -1, 1],
+ ["eta", 125, 1],
+ ["keta", 126, 1],
+ ["sta", 125, 1],
+ ["dua", -1, 1],
+ ["mendua", 129, 1],
+ ["ordua", 129, 1],
+ ["lekua", -1, 1],
+ ["burua", -1, 1],
+ ["durua", -1, 1],
+ ["tsua", -1, 1],
+ ["tua", -1, 1],
+ ["mentua", 136, 1],
+ ["estua", 136, 1],
+ ["txua", -1, 1],
+ ["zua", -1, 1],
+ ["tzua", 140, 1],
+ ["za", -1, 1],
+ ["eza", 142, 1],
+ ["eroza", 142, 1],
+ ["tza", 142, 2],
+ ["koitza", 145, 1],
+ ["antza", 145, 1],
+ ["gintza", 145, 1],
+ ["kintza", 145, 1],
+ ["kuntza", 145, 1],
+ ["gabe", -1, 1],
+ ["kabe", -1, 1],
+ ["kide", -1, 1],
+ ["alde", -1, 1],
+ ["kalde", 154, 1],
+ ["talde", 154, 1],
+ ["orde", -1, 1],
+ ["ge", -1, 1],
+ ["zale", -1, 1],
+ ["tzale", 159, 1],
+ ["gile", -1, 1],
+ ["eme", -1, 1],
+ ["kume", -1, 1],
+ ["ne", -1, 1],
+ ["zione", 164, 1],
+ ["une", 164, 1],
+ ["gune", 166, 1],
+ ["pe", -1, 1],
+ ["aurre", -1, 1],
+ ["te", -1, 1],
+ ["kote", 170, 1],
+ ["arte", 170, 1],
+ ["oste", 170, 1],
+ ["etxe", -1, 1],
+ ["gai", -1, 1],
+ ["di", -1, 1],
+ ["aldi", 176, 1],
+ ["taldi", 177, 1],
+ ["geldi", 176, -1],
+ ["handi", 176, 1],
+ ["mendi", 176, 1],
+ ["gei", -1, 1],
+ ["egi", -1, 1],
+ ["degi", 183, 1],
+ ["tegi", 183, 1],
+ ["nahi", -1, 1],
+ ["ohi", -1, 1],
+ ["ki", -1, 1],
+ ["toki", 188, 1],
+ ["oi", -1, 1],
+ ["goi", 190, 1],
+ ["koi", 190, 1],
+ ["ari", -1, 1],
+ ["kari", 193, 1],
+ ["lari", 193, 1],
+ ["tari", 193, 1],
+ ["garri", -1, 2],
+ ["larri", -1, 1],
+ ["kirri", -1, 1],
+ ["duri", -1, 1],
+ ["asi", -1, 1],
+ ["ti", -1, 1],
+ ["ontzi", -1, 1],
+ ["\u00F1i", -1, 1],
+ ["ak", -1, 1],
+ ["ek", -1, 1],
+ ["tarik", -1, 1],
+ ["gibel", -1, 1],
+ ["ail", -1, 1],
+ ["kail", 209, 1],
+ ["kan", -1, 1],
+ ["tan", -1, 1],
+ ["etan", 212, 1],
+ ["en", -1, 4],
+ ["ren", 214, 2],
+ ["garren", 215, 1],
+ ["gerren", 215, 1],
+ ["urren", 215, 1],
+ ["ten", 214, 4],
+ ["tzen", 214, 4],
+ ["zain", -1, 1],
+ ["tzain", 221, 1],
+ ["kin", -1, 1],
+ ["min", -1, 1],
+ ["dun", -1, 1],
+ ["asun", -1, 1],
+ ["tasun", 226, 1],
+ ["aizun", -1, 1],
+ ["ondo", -1, 1],
+ ["kondo", 229, 1],
+ ["go", -1, 1],
+ ["ngo", 231, 1],
+ ["zio", -1, 1],
+ ["ko", -1, 1],
+ ["trako", 234, 5],
+ ["tako", 234, 1],
+ ["etako", 236, 1],
+ ["eko", 234, 1],
+ ["tariko", 234, 1],
+ ["sko", 234, 1],
+ ["tuko", 234, 1],
+ ["minutuko", 241, 6],
+ ["zko", 234, 1],
+ ["no", -1, 1],
+ ["zino", 244, 1],
+ ["ro", -1, 1],
+ ["aro", 246, 1],
+ ["igaro", 247, -1],
+ ["taro", 247, 1],
+ ["zaro", 247, 1],
+ ["ero", 246, 1],
+ ["giro", 246, 1],
+ ["oro", 246, 1],
+ ["oso", -1, 1],
+ ["to", -1, 1],
+ ["tto", 255, 1],
+ ["zto", 255, 1],
+ ["txo", -1, 1],
+ ["tzo", -1, 1],
+ ["gintzo", 259, 1],
+ ["\u00F1o", -1, 1],
+ ["zp", -1, 1],
+ ["ar", -1, 1],
+ ["dar", 263, 1],
+ ["behar", 263, 1],
+ ["zehar", 263, -1],
+ ["liar", 263, 1],
+ ["tiar", 263, 1],
+ ["tar", 263, 1],
+ ["tzar", 263, 1],
+ ["or", -1, 2],
+ ["kor", 271, 1],
+ ["os", -1, 1],
+ ["ket", -1, 1],
+ ["du", -1, 1],
+ ["mendu", 275, 1],
+ ["ordu", 275, 1],
+ ["leku", -1, 1],
+ ["buru", -1, 2],
+ ["duru", -1, 1],
+ ["tsu", -1, 1],
+ ["tu", -1, 1],
+ ["tatu", 282, 4],
+ ["mentu", 282, 1],
+ ["estu", 282, 1],
+ ["txu", -1, 1],
+ ["zu", -1, 1],
+ ["tzu", 287, 1],
+ ["gintzu", 288, 1],
+ ["z", -1, 1],
+ ["ez", 290, 1],
+ ["eroz", 290, 1],
+ ["tz", 290, 1],
+ ["koitz", 293, 1]
+ ];
+
+ /** @const */ var a_2 = [
+ ["zlea", -1, 2],
+ ["keria", -1, 1],
+ ["la", -1, 1],
+ ["era", -1, 1],
+ ["dade", -1, 1],
+ ["tade", -1, 1],
+ ["date", -1, 1],
+ ["tate", -1, 1],
+ ["gi", -1, 1],
+ ["ki", -1, 1],
+ ["ik", -1, 1],
+ ["lanik", 10, 1],
+ ["rik", 10, 1],
+ ["larik", 12, 1],
+ ["ztik", 10, 1],
+ ["go", -1, 1],
+ ["ro", -1, 1],
+ ["ero", 16, 1],
+ ["to", -1, 1]
+ ];
+
+ /** @const */ var /** Array */ g_v = [17, 65, 16];
+
+ var /** number */ I_p2 = 0;
+ var /** number */ I_p1 = 0;
+ var /** number */ I_pV = 0;
+
+
+ /** @return {boolean} */
+ function r_mark_regions() {
+ I_pV = base.limit;
+ I_p1 = base.limit;
+ I_p2 = base.limit;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ lab1: {
+ /** @const */ var /** number */ v_2 = base.cursor;
+ lab2: {
+ if (!(base.in_grouping(g_v, 97, 117)))
+ {
+ break lab2;
+ }
+ lab3: {
+ /** @const */ var /** number */ v_3 = base.cursor;
+ lab4: {
+ if (!(base.out_grouping(g_v, 97, 117)))
+ {
+ break lab4;
+ }
+ if (!base.go_out_grouping(g_v, 97, 117))
+ {
+ break lab4;
+ }
+ base.cursor++;
+ break lab3;
+ }
+ base.cursor = v_3;
+ if (!(base.in_grouping(g_v, 97, 117)))
+ {
+ break lab2;
+ }
+ if (!base.go_in_grouping(g_v, 97, 117))
+ {
+ break lab2;
+ }
+ base.cursor++;
+ }
+ break lab1;
+ }
+ base.cursor = v_2;
+ if (!(base.out_grouping(g_v, 97, 117)))
+ {
+ break lab0;
+ }
+ lab5: {
+ /** @const */ var /** number */ v_4 = base.cursor;
+ lab6: {
+ if (!(base.out_grouping(g_v, 97, 117)))
+ {
+ break lab6;
+ }
+ if (!base.go_out_grouping(g_v, 97, 117))
+ {
+ break lab6;
+ }
+ base.cursor++;
+ break lab5;
+ }
+ base.cursor = v_4;
+ if (!(base.in_grouping(g_v, 97, 117)))
+ {
+ break lab0;
+ }
+ if (base.cursor >= base.limit)
+ {
+ break lab0;
+ }
+ base.cursor++;
+ }
+ }
+ I_pV = base.cursor;
+ }
+ base.cursor = v_1;
+ /** @const */ var /** number */ v_5 = base.cursor;
+ lab7: {
+ if (!base.go_out_grouping(g_v, 97, 117))
+ {
+ break lab7;
+ }
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 117))
+ {
+ break lab7;
+ }
+ base.cursor++;
+ I_p1 = base.cursor;
+ if (!base.go_out_grouping(g_v, 97, 117))
+ {
+ break lab7;
+ }
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 117))
+ {
+ break lab7;
+ }
+ base.cursor++;
+ I_p2 = base.cursor;
+ }
+ base.cursor = v_5;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_RV() {
+ return I_pV <= base.cursor;
+ };
+
+ /** @return {boolean} */
+ function r_R2() {
+ return I_p2 <= base.cursor;
+ };
+
+ /** @return {boolean} */
+ function r_R1() {
+ return I_p1 <= base.cursor;
+ };
+
+ /** @return {boolean} */
+ function r_aditzak() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_0);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!r_RV())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!r_R2())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_izenak() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_1);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!r_RV())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!r_R2())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("jok"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("tra"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!base.slice_from("minutu"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_adjetiboak() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_2);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!r_RV())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("z"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ this.stem = /** @return {boolean} */ function() {
+ r_mark_regions();
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ while(true)
+ {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ if (!r_aditzak())
+ {
+ break lab0;
+ }
+ continue;
+ }
+ base.cursor = base.limit - v_1;
+ break;
+ }
+ while(true)
+ {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab1: {
+ if (!r_izenak())
+ {
+ break lab1;
+ }
+ continue;
+ }
+ base.cursor = base.limit - v_2;
+ break;
+ }
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ r_adjetiboak();
+ base.cursor = base.limit - v_3;
+ base.cursor = base.limit_backward;
+ return true;
+ };
+
+ /**@return{string}*/
+ this['stemWord'] = function(/**string*/word) {
+ base.setCurrent(word);
+ this.stem();
+ return base.getCurrent();
+ };
+};
diff --git a/sphinx/search/non-minified-js/catalan-stemmer.js b/sphinx/search/non-minified-js/catalan-stemmer.js
new file mode 100644
index 00000000000..441e655adda
--- /dev/null
+++ b/sphinx/search/non-minified-js/catalan-stemmer.js
@@ -0,0 +1,886 @@
+// Generated from catalan.sbl by Snowball 3.0.1 - https://snowballstem.org/
+
+/**@constructor*/
+var CatalanStemmer = function() {
+ var base = new BaseStemmer();
+
+ /** @const */ var a_0 = [
+ ["", -1, 7],
+ ["\u00B7", 0, 6],
+ ["\u00E0", 0, 1],
+ ["\u00E1", 0, 1],
+ ["\u00E8", 0, 2],
+ ["\u00E9", 0, 2],
+ ["\u00EC", 0, 3],
+ ["\u00ED", 0, 3],
+ ["\u00EF", 0, 3],
+ ["\u00F2", 0, 4],
+ ["\u00F3", 0, 4],
+ ["\u00FA", 0, 5],
+ ["\u00FC", 0, 5]
+ ];
+
+ /** @const */ var a_1 = [
+ ["la", -1, 1],
+ ["-la", 0, 1],
+ ["sela", 0, 1],
+ ["le", -1, 1],
+ ["me", -1, 1],
+ ["-me", 4, 1],
+ ["se", -1, 1],
+ ["-te", -1, 1],
+ ["hi", -1, 1],
+ ["'hi", 8, 1],
+ ["li", -1, 1],
+ ["-li", 10, 1],
+ ["'l", -1, 1],
+ ["'m", -1, 1],
+ ["-m", -1, 1],
+ ["'n", -1, 1],
+ ["-n", -1, 1],
+ ["ho", -1, 1],
+ ["'ho", 17, 1],
+ ["lo", -1, 1],
+ ["selo", 19, 1],
+ ["'s", -1, 1],
+ ["las", -1, 1],
+ ["selas", 22, 1],
+ ["les", -1, 1],
+ ["-les", 24, 1],
+ ["'ls", -1, 1],
+ ["-ls", -1, 1],
+ ["'ns", -1, 1],
+ ["-ns", -1, 1],
+ ["ens", -1, 1],
+ ["los", -1, 1],
+ ["selos", 31, 1],
+ ["nos", -1, 1],
+ ["-nos", 33, 1],
+ ["vos", -1, 1],
+ ["us", -1, 1],
+ ["-us", 36, 1],
+ ["'t", -1, 1]
+ ];
+
+ /** @const */ var a_2 = [
+ ["ica", -1, 4],
+ ["l\u00F3gica", 0, 3],
+ ["enca", -1, 1],
+ ["ada", -1, 2],
+ ["ancia", -1, 1],
+ ["encia", -1, 1],
+ ["\u00E8ncia", -1, 1],
+ ["\u00EDcia", -1, 1],
+ ["logia", -1, 3],
+ ["inia", -1, 1],
+ ["\u00EDinia", 9, 1],
+ ["eria", -1, 1],
+ ["\u00E0ria", -1, 1],
+ ["at\u00F2ria", -1, 1],
+ ["alla", -1, 1],
+ ["ella", -1, 1],
+ ["\u00EDvola", -1, 1],
+ ["ima", -1, 1],
+ ["\u00EDssima", 17, 1],
+ ["qu\u00EDssima", 18, 5],
+ ["ana", -1, 1],
+ ["ina", -1, 1],
+ ["era", -1, 1],
+ ["sfera", 22, 1],
+ ["ora", -1, 1],
+ ["dora", 24, 1],
+ ["adora", 25, 1],
+ ["adura", -1, 1],
+ ["esa", -1, 1],
+ ["osa", -1, 1],
+ ["assa", -1, 1],
+ ["essa", -1, 1],
+ ["issa", -1, 1],
+ ["eta", -1, 1],
+ ["ita", -1, 1],
+ ["ota", -1, 1],
+ ["ista", -1, 1],
+ ["ialista", 36, 1],
+ ["ionista", 36, 1],
+ ["iva", -1, 1],
+ ["ativa", 39, 1],
+ ["n\u00E7a", -1, 1],
+ ["log\u00EDa", -1, 3],
+ ["ic", -1, 4],
+ ["\u00EDstic", 43, 1],
+ ["enc", -1, 1],
+ ["esc", -1, 1],
+ ["ud", -1, 1],
+ ["atge", -1, 1],
+ ["ble", -1, 1],
+ ["able", 49, 1],
+ ["ible", 49, 1],
+ ["isme", -1, 1],
+ ["ialisme", 52, 1],
+ ["ionisme", 52, 1],
+ ["ivisme", 52, 1],
+ ["aire", -1, 1],
+ ["icte", -1, 1],
+ ["iste", -1, 1],
+ ["ici", -1, 1],
+ ["\u00EDci", -1, 1],
+ ["logi", -1, 3],
+ ["ari", -1, 1],
+ ["tori", -1, 1],
+ ["al", -1, 1],
+ ["il", -1, 1],
+ ["all", -1, 1],
+ ["ell", -1, 1],
+ ["\u00EDvol", -1, 1],
+ ["isam", -1, 1],
+ ["issem", -1, 1],
+ ["\u00ECssem", -1, 1],
+ ["\u00EDssem", -1, 1],
+ ["\u00EDssim", -1, 1],
+ ["qu\u00EDssim", 73, 5],
+ ["amen", -1, 1],
+ ["\u00ECssin", -1, 1],
+ ["ar", -1, 1],
+ ["ificar", 77, 1],
+ ["egar", 77, 1],
+ ["ejar", 77, 1],
+ ["itar", 77, 1],
+ ["itzar", 77, 1],
+ ["fer", -1, 1],
+ ["or", -1, 1],
+ ["dor", 84, 1],
+ ["dur", -1, 1],
+ ["doras", -1, 1],
+ ["ics", -1, 4],
+ ["l\u00F3gics", 88, 3],
+ ["uds", -1, 1],
+ ["nces", -1, 1],
+ ["ades", -1, 2],
+ ["ancies", -1, 1],
+ ["encies", -1, 1],
+ ["\u00E8ncies", -1, 1],
+ ["\u00EDcies", -1, 1],
+ ["logies", -1, 3],
+ ["inies", -1, 1],
+ ["\u00EDnies", -1, 1],
+ ["eries", -1, 1],
+ ["\u00E0ries", -1, 1],
+ ["at\u00F2ries", -1, 1],
+ ["bles", -1, 1],
+ ["ables", 103, 1],
+ ["ibles", 103, 1],
+ ["imes", -1, 1],
+ ["\u00EDssimes", 106, 1],
+ ["qu\u00EDssimes", 107, 5],
+ ["formes", -1, 1],
+ ["ismes", -1, 1],
+ ["ialismes", 110, 1],
+ ["ines", -1, 1],
+ ["eres", -1, 1],
+ ["ores", -1, 1],
+ ["dores", 114, 1],
+ ["idores", 115, 1],
+ ["dures", -1, 1],
+ ["eses", -1, 1],
+ ["oses", -1, 1],
+ ["asses", -1, 1],
+ ["ictes", -1, 1],
+ ["ites", -1, 1],
+ ["otes", -1, 1],
+ ["istes", -1, 1],
+ ["ialistes", 124, 1],
+ ["ionistes", 124, 1],
+ ["iques", -1, 4],
+ ["l\u00F3giques", 127, 3],
+ ["ives", -1, 1],
+ ["atives", 129, 1],
+ ["log\u00EDes", -1, 3],
+ ["alleng\u00FCes", -1, 1],
+ ["icis", -1, 1],
+ ["\u00EDcis", -1, 1],
+ ["logis", -1, 3],
+ ["aris", -1, 1],
+ ["toris", -1, 1],
+ ["ls", -1, 1],
+ ["als", 138, 1],
+ ["ells", 138, 1],
+ ["ims", -1, 1],
+ ["\u00EDssims", 141, 1],
+ ["qu\u00EDssims", 142, 5],
+ ["ions", -1, 1],
+ ["cions", 144, 1],
+ ["acions", 145, 2],
+ ["esos", -1, 1],
+ ["osos", -1, 1],
+ ["assos", -1, 1],
+ ["issos", -1, 1],
+ ["ers", -1, 1],
+ ["ors", -1, 1],
+ ["dors", 152, 1],
+ ["adors", 153, 1],
+ ["idors", 153, 1],
+ ["ats", -1, 1],
+ ["itats", 156, 1],
+ ["bilitats", 157, 1],
+ ["ivitats", 157, 1],
+ ["ativitats", 159, 1],
+ ["\u00EFtats", 156, 1],
+ ["ets", -1, 1],
+ ["ants", -1, 1],
+ ["ents", -1, 1],
+ ["ments", 164, 1],
+ ["aments", 165, 1],
+ ["ots", -1, 1],
+ ["uts", -1, 1],
+ ["ius", -1, 1],
+ ["trius", 169, 1],
+ ["atius", 169, 1],
+ ["\u00E8s", -1, 1],
+ ["\u00E9s", -1, 1],
+ ["\u00EDs", -1, 1],
+ ["d\u00EDs", 174, 1],
+ ["\u00F3s", -1, 1],
+ ["itat", -1, 1],
+ ["bilitat", 177, 1],
+ ["ivitat", 177, 1],
+ ["ativitat", 179, 1],
+ ["\u00EFtat", -1, 1],
+ ["et", -1, 1],
+ ["ant", -1, 1],
+ ["ent", -1, 1],
+ ["ient", 184, 1],
+ ["ment", 184, 1],
+ ["ament", 186, 1],
+ ["isament", 187, 1],
+ ["ot", -1, 1],
+ ["isseu", -1, 1],
+ ["\u00ECsseu", -1, 1],
+ ["\u00EDsseu", -1, 1],
+ ["triu", -1, 1],
+ ["\u00EDssiu", -1, 1],
+ ["atiu", -1, 1],
+ ["\u00F3", -1, 1],
+ ["i\u00F3", 196, 1],
+ ["ci\u00F3", 197, 1],
+ ["aci\u00F3", 198, 1]
+ ];
+
+ /** @const */ var a_3 = [
+ ["aba", -1, 1],
+ ["esca", -1, 1],
+ ["isca", -1, 1],
+ ["\u00EFsca", -1, 1],
+ ["ada", -1, 1],
+ ["ida", -1, 1],
+ ["uda", -1, 1],
+ ["\u00EFda", -1, 1],
+ ["ia", -1, 1],
+ ["aria", 8, 1],
+ ["iria", 8, 1],
+ ["ara", -1, 1],
+ ["iera", -1, 1],
+ ["ira", -1, 1],
+ ["adora", -1, 1],
+ ["\u00EFra", -1, 1],
+ ["ava", -1, 1],
+ ["ixa", -1, 1],
+ ["itza", -1, 1],
+ ["\u00EDa", -1, 1],
+ ["ar\u00EDa", 19, 1],
+ ["er\u00EDa", 19, 1],
+ ["ir\u00EDa", 19, 1],
+ ["\u00EFa", -1, 1],
+ ["isc", -1, 1],
+ ["\u00EFsc", -1, 1],
+ ["ad", -1, 1],
+ ["ed", -1, 1],
+ ["id", -1, 1],
+ ["ie", -1, 1],
+ ["re", -1, 1],
+ ["dre", 30, 1],
+ ["ase", -1, 1],
+ ["iese", -1, 1],
+ ["aste", -1, 1],
+ ["iste", -1, 1],
+ ["ii", -1, 1],
+ ["ini", -1, 1],
+ ["esqui", -1, 1],
+ ["eixi", -1, 1],
+ ["itzi", -1, 1],
+ ["am", -1, 1],
+ ["em", -1, 1],
+ ["arem", 42, 1],
+ ["irem", 42, 1],
+ ["\u00E0rem", 42, 1],
+ ["\u00EDrem", 42, 1],
+ ["\u00E0ssem", 42, 1],
+ ["\u00E9ssem", 42, 1],
+ ["iguem", 42, 1],
+ ["\u00EFguem", 42, 1],
+ ["avem", 42, 1],
+ ["\u00E0vem", 42, 1],
+ ["\u00E1vem", 42, 1],
+ ["ir\u00ECem", 42, 1],
+ ["\u00EDem", 42, 1],
+ ["ar\u00EDem", 55, 1],
+ ["ir\u00EDem", 55, 1],
+ ["assim", -1, 1],
+ ["essim", -1, 1],
+ ["issim", -1, 1],
+ ["\u00E0ssim", -1, 1],
+ ["\u00E8ssim", -1, 1],
+ ["\u00E9ssim", -1, 1],
+ ["\u00EDssim", -1, 1],
+ ["\u00EFm", -1, 1],
+ ["an", -1, 1],
+ ["aban", 66, 1],
+ ["arian", 66, 1],
+ ["aran", 66, 1],
+ ["ieran", 66, 1],
+ ["iran", 66, 1],
+ ["\u00EDan", 66, 1],
+ ["ar\u00EDan", 72, 1],
+ ["er\u00EDan", 72, 1],
+ ["ir\u00EDan", 72, 1],
+ ["en", -1, 1],
+ ["ien", 76, 1],
+ ["arien", 77, 1],
+ ["irien", 77, 1],
+ ["aren", 76, 1],
+ ["eren", 76, 1],
+ ["iren", 76, 1],
+ ["\u00E0ren", 76, 1],
+ ["\u00EFren", 76, 1],
+ ["asen", 76, 1],
+ ["iesen", 76, 1],
+ ["assen", 76, 1],
+ ["essen", 76, 1],
+ ["issen", 76, 1],
+ ["\u00E9ssen", 76, 1],
+ ["\u00EFssen", 76, 1],
+ ["esquen", 76, 1],
+ ["isquen", 76, 1],
+ ["\u00EFsquen", 76, 1],
+ ["aven", 76, 1],
+ ["ixen", 76, 1],
+ ["eixen", 96, 1],
+ ["\u00EFxen", 76, 1],
+ ["\u00EFen", 76, 1],
+ ["in", -1, 1],
+ ["inin", 100, 1],
+ ["sin", 100, 1],
+ ["isin", 102, 1],
+ ["assin", 102, 1],
+ ["essin", 102, 1],
+ ["issin", 102, 1],
+ ["\u00EFssin", 102, 1],
+ ["esquin", 100, 1],
+ ["eixin", 100, 1],
+ ["aron", -1, 1],
+ ["ieron", -1, 1],
+ ["ar\u00E1n", -1, 1],
+ ["er\u00E1n", -1, 1],
+ ["ir\u00E1n", -1, 1],
+ ["i\u00EFn", -1, 1],
+ ["ado", -1, 1],
+ ["ido", -1, 1],
+ ["ando", -1, 2],
+ ["iendo", -1, 1],
+ ["io", -1, 1],
+ ["ixo", -1, 1],
+ ["eixo", 121, 1],
+ ["\u00EFxo", -1, 1],
+ ["itzo", -1, 1],
+ ["ar", -1, 1],
+ ["tzar", 125, 1],
+ ["er", -1, 1],
+ ["eixer", 127, 1],
+ ["ir", -1, 1],
+ ["ador", -1, 1],
+ ["as", -1, 1],
+ ["abas", 131, 1],
+ ["adas", 131, 1],
+ ["idas", 131, 1],
+ ["aras", 131, 1],
+ ["ieras", 131, 1],
+ ["\u00EDas", 131, 1],
+ ["ar\u00EDas", 137, 1],
+ ["er\u00EDas", 137, 1],
+ ["ir\u00EDas", 137, 1],
+ ["ids", -1, 1],
+ ["es", -1, 1],
+ ["ades", 142, 1],
+ ["ides", 142, 1],
+ ["udes", 142, 1],
+ ["\u00EFdes", 142, 1],
+ ["atges", 142, 1],
+ ["ies", 142, 1],
+ ["aries", 148, 1],
+ ["iries", 148, 1],
+ ["ares", 142, 1],
+ ["ires", 142, 1],
+ ["adores", 142, 1],
+ ["\u00EFres", 142, 1],
+ ["ases", 142, 1],
+ ["ieses", 142, 1],
+ ["asses", 142, 1],
+ ["esses", 142, 1],
+ ["isses", 142, 1],
+ ["\u00EFsses", 142, 1],
+ ["ques", 142, 1],
+ ["esques", 161, 1],
+ ["\u00EFsques", 161, 1],
+ ["aves", 142, 1],
+ ["ixes", 142, 1],
+ ["eixes", 165, 1],
+ ["\u00EFxes", 142, 1],
+ ["\u00EFes", 142, 1],
+ ["abais", -1, 1],
+ ["arais", -1, 1],
+ ["ierais", -1, 1],
+ ["\u00EDais", -1, 1],
+ ["ar\u00EDais", 172, 1],
+ ["er\u00EDais", 172, 1],
+ ["ir\u00EDais", 172, 1],
+ ["aseis", -1, 1],
+ ["ieseis", -1, 1],
+ ["asteis", -1, 1],
+ ["isteis", -1, 1],
+ ["inis", -1, 1],
+ ["sis", -1, 1],
+ ["isis", 181, 1],
+ ["assis", 181, 1],
+ ["essis", 181, 1],
+ ["issis", 181, 1],
+ ["\u00EFssis", 181, 1],
+ ["esquis", -1, 1],
+ ["eixis", -1, 1],
+ ["itzis", -1, 1],
+ ["\u00E1is", -1, 1],
+ ["ar\u00E9is", -1, 1],
+ ["er\u00E9is", -1, 1],
+ ["ir\u00E9is", -1, 1],
+ ["ams", -1, 1],
+ ["ados", -1, 1],
+ ["idos", -1, 1],
+ ["amos", -1, 1],
+ ["\u00E1bamos", 197, 1],
+ ["\u00E1ramos", 197, 1],
+ ["i\u00E9ramos", 197, 1],
+ ["\u00EDamos", 197, 1],
+ ["ar\u00EDamos", 201, 1],
+ ["er\u00EDamos", 201, 1],
+ ["ir\u00EDamos", 201, 1],
+ ["aremos", -1, 1],
+ ["eremos", -1, 1],
+ ["iremos", -1, 1],
+ ["\u00E1semos", -1, 1],
+ ["i\u00E9semos", -1, 1],
+ ["imos", -1, 1],
+ ["adors", -1, 1],
+ ["ass", -1, 1],
+ ["erass", 212, 1],
+ ["ess", -1, 1],
+ ["ats", -1, 1],
+ ["its", -1, 1],
+ ["ents", -1, 1],
+ ["\u00E0s", -1, 1],
+ ["ar\u00E0s", 218, 1],
+ ["ir\u00E0s", 218, 1],
+ ["ar\u00E1s", -1, 1],
+ ["er\u00E1s", -1, 1],
+ ["ir\u00E1s", -1, 1],
+ ["\u00E9s", -1, 1],
+ ["ar\u00E9s", 224, 1],
+ ["\u00EDs", -1, 1],
+ ["i\u00EFs", -1, 1],
+ ["at", -1, 1],
+ ["it", -1, 1],
+ ["ant", -1, 1],
+ ["ent", -1, 1],
+ ["int", -1, 1],
+ ["ut", -1, 1],
+ ["\u00EFt", -1, 1],
+ ["au", -1, 1],
+ ["erau", 235, 1],
+ ["ieu", -1, 1],
+ ["ineu", -1, 1],
+ ["areu", -1, 1],
+ ["ireu", -1, 1],
+ ["\u00E0reu", -1, 1],
+ ["\u00EDreu", -1, 1],
+ ["asseu", -1, 1],
+ ["esseu", -1, 1],
+ ["eresseu", 244, 1],
+ ["\u00E0sseu", -1, 1],
+ ["\u00E9sseu", -1, 1],
+ ["igueu", -1, 1],
+ ["\u00EFgueu", -1, 1],
+ ["\u00E0veu", -1, 1],
+ ["\u00E1veu", -1, 1],
+ ["itzeu", -1, 1],
+ ["\u00ECeu", -1, 1],
+ ["ir\u00ECeu", 253, 1],
+ ["\u00EDeu", -1, 1],
+ ["ar\u00EDeu", 255, 1],
+ ["ir\u00EDeu", 255, 1],
+ ["assiu", -1, 1],
+ ["issiu", -1, 1],
+ ["\u00E0ssiu", -1, 1],
+ ["\u00E8ssiu", -1, 1],
+ ["\u00E9ssiu", -1, 1],
+ ["\u00EDssiu", -1, 1],
+ ["\u00EFu", -1, 1],
+ ["ix", -1, 1],
+ ["eix", 265, 1],
+ ["\u00EFx", -1, 1],
+ ["itz", -1, 1],
+ ["i\u00E0", -1, 1],
+ ["ar\u00E0", -1, 1],
+ ["ir\u00E0", -1, 1],
+ ["itz\u00E0", -1, 1],
+ ["ar\u00E1", -1, 1],
+ ["er\u00E1", -1, 1],
+ ["ir\u00E1", -1, 1],
+ ["ir\u00E8", -1, 1],
+ ["ar\u00E9", -1, 1],
+ ["er\u00E9", -1, 1],
+ ["ir\u00E9", -1, 1],
+ ["\u00ED", -1, 1],
+ ["i\u00EF", -1, 1],
+ ["i\u00F3", -1, 1]
+ ];
+
+ /** @const */ var a_4 = [
+ ["a", -1, 1],
+ ["e", -1, 1],
+ ["i", -1, 1],
+ ["\u00EFn", -1, 1],
+ ["o", -1, 1],
+ ["ir", -1, 1],
+ ["s", -1, 1],
+ ["is", 6, 1],
+ ["os", 6, 1],
+ ["\u00EFs", 6, 1],
+ ["it", -1, 1],
+ ["eu", -1, 1],
+ ["iu", -1, 1],
+ ["iqu", -1, 2],
+ ["itz", -1, 1],
+ ["\u00E0", -1, 1],
+ ["\u00E1", -1, 1],
+ ["\u00E9", -1, 1],
+ ["\u00EC", -1, 1],
+ ["\u00ED", -1, 1],
+ ["\u00EF", -1, 1],
+ ["\u00F3", -1, 1]
+ ];
+
+ /** @const */ var /** Array */ g_v = [17, 65, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 129, 81, 6, 10];
+
+ var /** number */ I_p2 = 0;
+ var /** number */ I_p1 = 0;
+
+
+ /** @return {boolean} */
+ function r_mark_regions() {
+ I_p1 = base.limit;
+ I_p2 = base.limit;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ if (!base.go_out_grouping(g_v, 97, 252))
+ {
+ break lab0;
+ }
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 252))
+ {
+ break lab0;
+ }
+ base.cursor++;
+ I_p1 = base.cursor;
+ if (!base.go_out_grouping(g_v, 97, 252))
+ {
+ break lab0;
+ }
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 252))
+ {
+ break lab0;
+ }
+ base.cursor++;
+ I_p2 = base.cursor;
+ }
+ base.cursor = v_1;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_cleaning() {
+ var /** number */ among_var;
+ while(true)
+ {
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ base.bra = base.cursor;
+ among_var = base.find_among(a_0);
+ base.ket = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("a"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("e"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("i"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("o"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("u"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!base.slice_from("."))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (base.cursor >= base.limit)
+ {
+ break lab0;
+ }
+ base.cursor++;
+ break;
+ }
+ continue;
+ }
+ base.cursor = v_1;
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_R1() {
+ return I_p1 <= base.cursor;
+ };
+
+ /** @return {boolean} */
+ function r_R2() {
+ return I_p2 <= base.cursor;
+ };
+
+ /** @return {boolean} */
+ function r_attached_pronoun() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_1) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_standard_suffix() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_2);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!r_R2())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!r_R2())
+ {
+ return false;
+ }
+ if (!base.slice_from("log"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!r_R2())
+ {
+ return false;
+ }
+ if (!base.slice_from("ic"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_from("c"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_verb_suffix() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_3);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!r_R2())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_residual_suffix() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_4);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_from("ic"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ this.stem = /** @return {boolean} */ function() {
+ r_mark_regions();
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ r_attached_pronoun();
+ base.cursor = base.limit - v_1;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab0: {
+ lab1: {
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ lab2: {
+ if (!r_standard_suffix())
+ {
+ break lab2;
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_3;
+ if (!r_verb_suffix())
+ {
+ break lab0;
+ }
+ }
+ }
+ base.cursor = base.limit - v_2;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ r_residual_suffix();
+ base.cursor = base.limit - v_4;
+ base.cursor = base.limit_backward;
+ /** @const */ var /** number */ v_5 = base.cursor;
+ r_cleaning();
+ base.cursor = v_5;
+ return true;
+ };
+
+ /**@return{string}*/
+ this['stemWord'] = function(/**string*/word) {
+ base.setCurrent(word);
+ this.stem();
+ return base.getCurrent();
+ };
+};
diff --git a/sphinx/search/non-minified-js/danish-stemmer.js b/sphinx/search/non-minified-js/danish-stemmer.js
index 46b5d55b0e1..b0867495bbf 100644
--- a/sphinx/search/non-minified-js/danish-stemmer.js
+++ b/sphinx/search/non-minified-js/danish-stemmer.js
@@ -1,8 +1,9 @@
-// Generated by Snowball 2.1.0 - https://snowballstem.org/
+// Generated from danish.sbl by Snowball 3.0.1 - https://snowballstem.org/
/**@constructor*/
-DanishStemmer = function() {
+var DanishStemmer = function() {
var base = new BaseStemmer();
+
/** @const */ var a_0 = [
["hed", -1, 1],
["ethed", 0, 1],
@@ -67,9 +68,9 @@ DanishStemmer = function() {
/** @return {boolean} */
function r_mark_regions() {
I_p1 = base.limit;
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
{
- var /** number */ c1 = base.cursor + 3;
+ /** @const */ var /** number */ c1 = base.cursor + 3;
if (c1 > base.limit)
{
return false;
@@ -78,44 +79,21 @@ DanishStemmer = function() {
}
I_x = base.cursor;
base.cursor = v_1;
- golab0: while(true)
+ if (!base.go_out_grouping(g_v, 97, 248))
{
- var /** number */ v_2 = base.cursor;
- lab1: {
- if (!(base.in_grouping(g_v, 97, 248)))
- {
- break lab1;
- }
- base.cursor = v_2;
- break golab0;
- }
- base.cursor = v_2;
- if (base.cursor >= base.limit)
- {
- return false;
- }
- base.cursor++;
+ return false;
}
- golab2: while(true)
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 248))
{
- lab3: {
- if (!(base.out_grouping(g_v, 97, 248)))
- {
- break lab3;
- }
- break golab2;
- }
- if (base.cursor >= base.limit)
- {
- return false;
- }
- base.cursor++;
+ return false;
}
+ base.cursor++;
I_p1 = base.cursor;
- lab4: {
- if (!(I_p1 < I_x))
+ lab0: {
+ if (I_p1 >= I_x)
{
- break lab4;
+ break lab0;
}
I_p1 = I_x;
}
@@ -129,17 +107,17 @@ DanishStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_p1;
base.ket = base.cursor;
among_var = base.find_among_b(a_0);
if (among_var == 0)
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
base.bra = base.cursor;
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
switch (among_var) {
case 1:
if (!base.slice_del())
@@ -163,21 +141,21 @@ DanishStemmer = function() {
/** @return {boolean} */
function r_consonant_pair() {
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
if (base.cursor < I_p1)
{
return false;
}
- var /** number */ v_3 = base.limit_backward;
+ /** @const */ var /** number */ v_2 = base.limit_backward;
base.limit_backward = I_p1;
base.ket = base.cursor;
if (base.find_among_b(a_1) == 0)
{
- base.limit_backward = v_3;
+ base.limit_backward = v_2;
return false;
}
base.bra = base.cursor;
- base.limit_backward = v_3;
+ base.limit_backward = v_2;
base.cursor = base.limit - v_1;
if (base.cursor <= base.limit_backward)
{
@@ -195,7 +173,7 @@ DanishStemmer = function() {
/** @return {boolean} */
function r_other_suffix() {
var /** number */ among_var;
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab0: {
base.ket = base.cursor;
if (!(base.eq_s_b("st")))
@@ -217,26 +195,26 @@ DanishStemmer = function() {
{
return false;
}
- var /** number */ v_3 = base.limit_backward;
+ /** @const */ var /** number */ v_2 = base.limit_backward;
base.limit_backward = I_p1;
base.ket = base.cursor;
among_var = base.find_among_b(a_2);
if (among_var == 0)
{
- base.limit_backward = v_3;
+ base.limit_backward = v_2;
return false;
}
base.bra = base.cursor;
- base.limit_backward = v_3;
+ base.limit_backward = v_2;
switch (among_var) {
case 1:
if (!base.slice_del())
{
return false;
}
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
r_consonant_pair();
- base.cursor = base.limit - v_4;
+ base.cursor = base.limit - v_3;
break;
case 2:
if (!base.slice_from("l\u00F8s"))
@@ -254,12 +232,12 @@ DanishStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_p1;
base.ket = base.cursor;
if (!(base.in_grouping_b(g_c, 98, 122)))
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
base.bra = base.cursor;
@@ -268,7 +246,7 @@ DanishStemmer = function() {
{
return false;
}
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
if (!(base.eq_s_b(S_ch)))
{
return false;
@@ -281,20 +259,20 @@ DanishStemmer = function() {
};
this.stem = /** @return {boolean} */ function() {
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
r_mark_regions();
base.cursor = v_1;
base.limit_backward = base.cursor; base.cursor = base.limit;
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
r_main_suffix();
base.cursor = base.limit - v_2;
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
r_consonant_pair();
base.cursor = base.limit - v_3;
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
r_other_suffix();
base.cursor = base.limit - v_4;
- var /** number */ v_5 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
r_undouble();
base.cursor = base.limit - v_5;
base.cursor = base.limit_backward;
diff --git a/sphinx/search/non-minified-js/dutch-stemmer.js b/sphinx/search/non-minified-js/dutch-stemmer.js
index 0ad11e212cc..50e53e7b510 100644
--- a/sphinx/search/non-minified-js/dutch-stemmer.js
+++ b/sphinx/search/non-minified-js/dutch-stemmer.js
@@ -1,678 +1,1960 @@
-// Generated by Snowball 2.1.0 - https://snowballstem.org/
+// Generated from dutch.sbl by Snowball 3.0.1 - https://snowballstem.org/
/**@constructor*/
-DutchStemmer = function() {
+var DutchStemmer = function() {
var base = new BaseStemmer();
+
/** @const */ var a_0 = [
- ["", -1, 6],
- ["\u00E1", 0, 1],
- ["\u00E4", 0, 1],
- ["\u00E9", 0, 2],
- ["\u00EB", 0, 2],
- ["\u00ED", 0, 3],
- ["\u00EF", 0, 3],
- ["\u00F3", 0, 4],
- ["\u00F6", 0, 4],
- ["\u00FA", 0, 5],
- ["\u00FC", 0, 5]
+ ["a", -1, 1],
+ ["e", -1, 2],
+ ["o", -1, 1],
+ ["u", -1, 1],
+ ["\u00E0", -1, 1],
+ ["\u00E1", -1, 1],
+ ["\u00E2", -1, 1],
+ ["\u00E4", -1, 1],
+ ["\u00E8", -1, 2],
+ ["\u00E9", -1, 2],
+ ["\u00EA", -1, 2],
+ ["e\u00EB", -1, 3],
+ ["i\u00EB", -1, 4],
+ ["\u00F2", -1, 1],
+ ["\u00F3", -1, 1],
+ ["\u00F4", -1, 1],
+ ["\u00F6", -1, 1],
+ ["\u00F9", -1, 1],
+ ["\u00FA", -1, 1],
+ ["\u00FB", -1, 1],
+ ["\u00FC", -1, 1]
];
/** @const */ var a_1 = [
- ["", -1, 3],
- ["I", 0, 2],
- ["Y", 0, 1]
+ ["nde", -1, 8],
+ ["en", -1, 7],
+ ["s", -1, 2],
+ ["'s", 2, 1],
+ ["es", 2, 4],
+ ["ies", 4, 3],
+ ["aus", 2, 6],
+ ["\u00E9s", 2, 5]
];
/** @const */ var a_2 = [
- ["dd", -1, -1],
- ["kk", -1, -1],
- ["tt", -1, -1]
+ ["de", -1, 5],
+ ["ge", -1, 2],
+ ["ische", -1, 4],
+ ["je", -1, 1],
+ ["lijke", -1, 3],
+ ["le", -1, 9],
+ ["ene", -1, 10],
+ ["re", -1, 8],
+ ["se", -1, 7],
+ ["te", -1, 6],
+ ["ieve", -1, 11]
];
/** @const */ var a_3 = [
- ["ene", -1, 2],
- ["se", -1, 3],
- ["en", -1, 2],
- ["heden", 2, 1],
- ["s", -1, 3]
+ ["heid", -1, 3],
+ ["fie", -1, 7],
+ ["gie", -1, 8],
+ ["atie", -1, 1],
+ ["isme", -1, 5],
+ ["ing", -1, 5],
+ ["arij", -1, 6],
+ ["erij", -1, 5],
+ ["sel", -1, 3],
+ ["rder", -1, 4],
+ ["ster", -1, 3],
+ ["iteit", -1, 2],
+ ["dst", -1, 10],
+ ["tst", -1, 9]
];
/** @const */ var a_4 = [
- ["end", -1, 1],
- ["ig", -1, 2],
- ["ing", -1, 1],
- ["lijk", -1, 3],
- ["baar", -1, 4],
- ["bar", -1, 5]
+ ["end", -1, 9],
+ ["atief", -1, 2],
+ ["erig", -1, 9],
+ ["achtig", -1, 3],
+ ["ioneel", -1, 1],
+ ["baar", -1, 3],
+ ["laar", -1, 5],
+ ["naar", -1, 4],
+ ["raar", -1, 6],
+ ["eriger", -1, 9],
+ ["achtiger", -1, 3],
+ ["lijker", -1, 8],
+ ["tant", -1, 7],
+ ["erigst", -1, 9],
+ ["achtigst", -1, 3],
+ ["lijkst", -1, 8]
];
/** @const */ var a_5 = [
- ["aa", -1, -1],
- ["ee", -1, -1],
- ["oo", -1, -1],
- ["uu", -1, -1]
+ ["ig", -1, 1],
+ ["iger", -1, 1],
+ ["igst", -1, 1]
+ ];
+
+ /** @const */ var a_6 = [
+ ["ft", -1, 2],
+ ["kt", -1, 1],
+ ["pt", -1, 3]
+ ];
+
+ /** @const */ var a_7 = [
+ ["bb", -1, 1],
+ ["cc", -1, 2],
+ ["dd", -1, 3],
+ ["ff", -1, 4],
+ ["gg", -1, 5],
+ ["hh", -1, 6],
+ ["jj", -1, 7],
+ ["kk", -1, 8],
+ ["ll", -1, 9],
+ ["mm", -1, 10],
+ ["nn", -1, 11],
+ ["pp", -1, 12],
+ ["qq", -1, 13],
+ ["rr", -1, 14],
+ ["ss", -1, 15],
+ ["tt", -1, 16],
+ ["v", -1, 4],
+ ["vv", 16, 17],
+ ["ww", -1, 18],
+ ["xx", -1, 19],
+ ["z", -1, 15],
+ ["zz", 20, 20]
+ ];
+
+ /** @const */ var a_8 = [
+ ["d", -1, 1],
+ ["t", -1, 2]
+ ];
+
+ /** @const */ var a_9 = [
+ ["", -1, -1],
+ ["eft", 0, 1],
+ ["vaa", 0, 1],
+ ["val", 0, 1],
+ ["vali", 3, -1],
+ ["vare", 0, 1]
+ ];
+
+ /** @const */ var a_10 = [
+ ["\u00EB", -1, 1],
+ ["\u00EF", -1, 2]
+ ];
+
+ /** @const */ var a_11 = [
+ ["\u00EB", -1, 1],
+ ["\u00EF", -1, 2]
];
- /** @const */ var /** Array */ g_v = [17, 65, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128];
+ /** @const */ var /** Array */ g_E = [1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 120];
+
+ /** @const */ var /** Array */ g_AIOU = [1, 65, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 11, 120, 46, 15];
- /** @const */ var /** Array */ g_v_I = [1, 0, 0, 17, 65, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128];
+ /** @const */ var /** Array */ g_AEIOU = [17, 65, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 139, 127, 46, 15];
- /** @const */ var /** Array */ g_v_j = [17, 67, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128];
+ /** @const */ var /** Array */ g_v = [17, 65, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 139, 127, 46, 15];
+ /** @const */ var /** Array */ g_v_WX = [17, 65, 208, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 139, 127, 46, 15];
+
+ var /** boolean */ B_GE_removed = false;
+ var /** boolean */ B_stemmed = false;
var /** number */ I_p2 = 0;
var /** number */ I_p1 = 0;
- var /** boolean */ B_e_found = false;
+ var /** string */ S_ch = '';
/** @return {boolean} */
- function r_prelude() {
- var /** number */ among_var;
- var /** number */ v_1 = base.cursor;
- while(true)
+ function r_R1() {
+ return I_p1 <= base.cursor;
+ };
+
+ /** @return {boolean} */
+ function r_R2() {
+ return I_p2 <= base.cursor;
+ };
+
+ /** @return {boolean} */
+ function r_V() {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab1: {
+ if (!(base.in_grouping_b(g_v, 97, 252)))
+ {
+ break lab1;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_2;
+ if (!(base.eq_s_b("ij")))
+ {
+ return false;
+ }
+ }
+ base.cursor = base.limit - v_1;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_VX() {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ if (base.cursor <= base.limit_backward)
+ {
+ return false;
+ }
+ base.cursor--;
+ lab0: {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab1: {
+ if (!(base.in_grouping_b(g_v, 97, 252)))
+ {
+ break lab1;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_2;
+ if (!(base.eq_s_b("ij")))
+ {
+ return false;
+ }
+ }
+ base.cursor = base.limit - v_1;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_C() {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
{
- var /** number */ v_2 = base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab0: {
- base.bra = base.cursor;
- among_var = base.find_among(a_0);
- if (among_var == 0)
+ if (!(base.eq_s_b("ij")))
{
break lab0;
}
- base.ket = base.cursor;
- switch (among_var) {
- case 1:
- if (!base.slice_from("a"))
- {
- return false;
- }
- break;
- case 2:
- if (!base.slice_from("e"))
- {
- return false;
- }
- break;
- case 3:
- if (!base.slice_from("i"))
- {
- return false;
- }
- break;
- case 4:
- if (!base.slice_from("o"))
- {
- return false;
- }
- break;
- case 5:
- if (!base.slice_from("u"))
- {
- return false;
- }
- break;
- case 6:
- if (base.cursor >= base.limit)
- {
- break lab0;
- }
- base.cursor++;
- break;
- }
- continue;
+ return false;
}
- base.cursor = v_2;
- break;
+ base.cursor = base.limit - v_2;
}
- base.cursor = v_1;
- var /** number */ v_3 = base.cursor;
- lab1: {
- base.bra = base.cursor;
- if (!(base.eq_s("y")))
+ if (!(base.out_grouping_b(g_v, 97, 252)))
+ {
+ return false;
+ }
+ base.cursor = base.limit - v_1;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_lengthen_V() {
+ var /** number */ among_var;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ if (!(base.out_grouping_b(g_v_WX, 97, 252)))
{
- base.cursor = v_3;
- break lab1;
+ break lab0;
}
base.ket = base.cursor;
- if (!base.slice_from("Y"))
+ among_var = base.find_among_b(a_0);
+ if (among_var == 0)
{
- return false;
+ break lab0;
}
- }
- while(true)
- {
- var /** number */ v_4 = base.cursor;
- lab2: {
- golab3: while(true)
- {
- var /** number */ v_5 = base.cursor;
- lab4: {
- if (!(base.in_grouping(g_v, 97, 232)))
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab1: {
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ lab2: {
+ if (!(base.out_grouping_b(g_AEIOU, 97, 252)))
+ {
+ break lab2;
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_3;
+ if (base.cursor > base.limit_backward)
{
- break lab4;
+ break lab0;
}
- base.bra = base.cursor;
+ }
+ base.cursor = base.limit - v_2;
+ S_ch = base.slice_to();
+ if (S_ch == '')
+ {
+ return false;
+ }
+ {
+ /** @const */ var /** number */ c1 = base.cursor;
+ base.insert(base.cursor, base.cursor, S_ch);
+ base.cursor = c1;
+ }
+ break;
+ case 2:
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ lab3: {
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
+ lab4: {
+ if (!(base.out_grouping_b(g_AEIOU, 97, 252)))
+ {
+ break lab4;
+ }
+ break lab3;
+ }
+ base.cursor = base.limit - v_5;
+ if (base.cursor > base.limit_backward)
+ {
+ break lab0;
+ }
+ }
+ {
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
lab5: {
- var /** number */ v_6 = base.cursor;
lab6: {
- if (!(base.eq_s("i")))
- {
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
+ lab7: {
+ if (!(base.in_grouping_b(g_AIOU, 97, 252)))
+ {
+ break lab7;
+ }
break lab6;
}
- base.ket = base.cursor;
- if (!(base.in_grouping(g_v, 97, 232)))
+ base.cursor = base.limit - v_7;
+ if (!(base.in_grouping_b(g_E, 101, 235)))
{
- break lab6;
+ break lab5;
}
- if (!base.slice_from("I"))
+ if (base.cursor > base.limit_backward)
{
- return false;
+ break lab5;
}
- break lab5;
}
- base.cursor = v_6;
- if (!(base.eq_s("y")))
+ break lab0;
+ }
+ base.cursor = base.limit - v_6;
+ }
+ {
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
+ lab8: {
+ if (base.cursor <= base.limit_backward)
{
- break lab4;
+ break lab8;
+ }
+ base.cursor--;
+ if (!(base.in_grouping_b(g_AIOU, 97, 252)))
+ {
+ break lab8;
}
- base.ket = base.cursor;
- if (!base.slice_from("Y"))
+ if (!(base.out_grouping_b(g_AEIOU, 97, 252)))
{
- return false;
+ break lab8;
}
+ break lab0;
}
- base.cursor = v_5;
- break golab3;
+ base.cursor = base.limit - v_8;
}
- base.cursor = v_5;
- if (base.cursor >= base.limit)
+ base.cursor = base.limit - v_4;
+ S_ch = base.slice_to();
+ if (S_ch == '')
{
- break lab2;
+ return false;
}
- base.cursor++;
- }
- continue;
+ {
+ /** @const */ var /** number */ c2 = base.cursor;
+ base.insert(base.cursor, base.cursor, S_ch);
+ base.cursor = c2;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("e\u00EBe"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("iee"))
+ {
+ return false;
+ }
+ break;
}
- base.cursor = v_4;
- break;
}
+ base.cursor = base.limit - v_1;
return true;
};
/** @return {boolean} */
- function r_mark_regions() {
- I_p1 = base.limit;
- I_p2 = base.limit;
- golab0: while(true)
+ function r_Step_1() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_1);
+ if (among_var == 0)
{
- lab1: {
- if (!(base.in_grouping(g_v, 97, 232)))
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_del())
{
- break lab1;
+ return false;
}
- break golab0;
- }
- if (base.cursor >= base.limit)
- {
- return false;
- }
- base.cursor++;
- }
- golab2: while(true)
- {
- lab3: {
- if (!(base.out_grouping(g_v, 97, 232)))
+ break;
+ case 2:
+ if (!r_R1())
{
- break lab3;
+ return false;
}
- break golab2;
- }
- if (base.cursor >= base.limit)
- {
- return false;
- }
- base.cursor++;
- }
- I_p1 = base.cursor;
- lab4: {
- if (!(I_p1 < 3))
- {
- break lab4;
- }
- I_p1 = 3;
- }
- golab5: while(true)
- {
- lab6: {
- if (!(base.in_grouping(g_v, 97, 232)))
{
- break lab6;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ if (!(base.eq_s_b("t")))
+ {
+ break lab0;
+ }
+ if (!r_R1())
+ {
+ break lab0;
+ }
+ return false;
+ }
+ base.cursor = base.limit - v_1;
}
- break golab5;
- }
- if (base.cursor >= base.limit)
- {
- return false;
- }
- base.cursor++;
- }
- golab7: while(true)
- {
- lab8: {
- if (!(base.out_grouping(g_v, 97, 232)))
+ if (!r_C())
{
- break lab8;
+ return false;
}
- break golab7;
- }
- if (base.cursor >= base.limit)
- {
- return false;
- }
- base.cursor++;
- }
- I_p2 = base.cursor;
- return true;
- };
-
- /** @return {boolean} */
- function r_postlude() {
- var /** number */ among_var;
- while(true)
- {
- var /** number */ v_1 = base.cursor;
- lab0: {
- base.bra = base.cursor;
- among_var = base.find_among(a_1);
- if (among_var == 0)
+ if (!base.slice_del())
{
- break lab0;
+ return false;
+ }
+ break;
+ case 3:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_from("ie"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ lab1: {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab2: {
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ if (!(base.eq_s_b("ar")))
+ {
+ break lab2;
+ }
+ if (!r_R1())
+ {
+ break lab2;
+ }
+ if (!r_C())
+ {
+ break lab2;
+ }
+ base.cursor = base.limit - v_3;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ r_lengthen_V();
+ break lab1;
+ }
+ base.cursor = base.limit - v_2;
+ lab3: {
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ if (!(base.eq_s_b("er")))
+ {
+ break lab3;
+ }
+ if (!r_R1())
+ {
+ break lab3;
+ }
+ if (!r_C())
+ {
+ break lab3;
+ }
+ base.cursor = base.limit - v_4;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_2;
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!r_C())
+ {
+ return false;
+ }
+ if (!base.slice_from("e"))
+ {
+ return false;
+ }
+ }
+ break;
+ case 5:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_from("\u00E9"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!r_V())
+ {
+ return false;
+ }
+ if (!base.slice_from("au"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ lab4: {
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
+ lab5: {
+ if (!(base.eq_s_b("hed")))
+ {
+ break lab5;
+ }
+ if (!r_R1())
+ {
+ break lab5;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_from("heid"))
+ {
+ return false;
+ }
+ break lab4;
+ }
+ base.cursor = base.limit - v_5;
+ lab6: {
+ if (!(base.eq_s_b("nd")))
+ {
+ break lab6;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break lab4;
+ }
+ base.cursor = base.limit - v_5;
+ lab7: {
+ if (!(base.eq_s_b("d")))
+ {
+ break lab7;
+ }
+ if (!r_R1())
+ {
+ break lab7;
+ }
+ if (!r_C())
+ {
+ break lab7;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break lab4;
+ }
+ base.cursor = base.limit - v_5;
+ lab8: {
+ lab9: {
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
+ lab10: {
+ if (!(base.eq_s_b("i")))
+ {
+ break lab10;
+ }
+ break lab9;
+ }
+ base.cursor = base.limit - v_6;
+ if (!(base.eq_s_b("j")))
+ {
+ break lab8;
+ }
+ }
+ if (!r_V())
+ {
+ break lab8;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break lab4;
+ }
+ base.cursor = base.limit - v_5;
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!r_C())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ r_lengthen_V();
+ }
+ break;
+ case 8:
+ if (!base.slice_from("nd"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Step_2() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_2);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ if (!(base.eq_s_b("'t")))
+ {
+ break lab1;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ lab2: {
+ if (!(base.eq_s_b("et")))
+ {
+ break lab2;
+ }
+ base.bra = base.cursor;
+ if (!r_R1())
+ {
+ break lab2;
+ }
+ if (!r_C())
+ {
+ break lab2;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ lab3: {
+ if (!(base.eq_s_b("rnt")))
+ {
+ break lab3;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_from("rn"))
+ {
+ return false;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ lab4: {
+ if (!(base.eq_s_b("t")))
+ {
+ break lab4;
+ }
+ base.bra = base.cursor;
+ if (!r_R1())
+ {
+ break lab4;
+ }
+ if (!r_VX())
+ {
+ break lab4;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ lab5: {
+ if (!(base.eq_s_b("ink")))
+ {
+ break lab5;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_from("ing"))
+ {
+ return false;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ lab6: {
+ if (!(base.eq_s_b("mp")))
+ {
+ break lab6;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_from("m"))
+ {
+ return false;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ lab7: {
+ if (!(base.eq_s_b("'")))
+ {
+ break lab7;
+ }
+ base.bra = base.cursor;
+ if (!r_R1())
+ {
+ break lab7;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ base.bra = base.cursor;
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!r_C())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ }
+ break;
+ case 2:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_from("g"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_from("lijk"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_from("isch"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!r_C())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_from("t"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_from("s"))
+ {
+ return false;
+ }
+ break;
+ case 8:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_from("r"))
+ {
+ return false;
+ }
+ break;
+ case 9:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ base.insert(base.cursor, base.cursor, "l");
+ r_lengthen_V();
+ break;
+ case 10:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!r_C())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ base.insert(base.cursor, base.cursor, "en");
+ r_lengthen_V();
+ break;
+ case 11:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!r_C())
+ {
+ return false;
+ }
+ if (!base.slice_from("ief"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Step_3() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_3);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_from("eer"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ r_lengthen_V();
+ break;
+ case 3:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("r"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ if (!(base.eq_s_b("ild")))
+ {
+ break lab1;
+ }
+ if (!base.slice_from("er"))
+ {
+ return false;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ r_lengthen_V();
+ }
+ break;
+ case 6:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!r_C())
+ {
+ return false;
+ }
+ if (!base.slice_from("aar"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!r_R2())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ base.insert(base.cursor, base.cursor, "f");
+ r_lengthen_V();
+ break;
+ case 8:
+ if (!r_R2())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ base.insert(base.cursor, base.cursor, "g");
+ r_lengthen_V();
+ break;
+ case 9:
+ if (!r_R1())
+ {
+ return false;
}
+ if (!r_C())
+ {
+ return false;
+ }
+ if (!base.slice_from("t"))
+ {
+ return false;
+ }
+ break;
+ case 10:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!r_C())
+ {
+ return false;
+ }
+ if (!base.slice_from("d"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Step_4() {
+ var /** number */ among_var;
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
base.ket = base.cursor;
+ among_var = base.find_among_b(a_4);
+ if (among_var == 0)
+ {
+ break lab1;
+ }
+ base.bra = base.cursor;
switch (among_var) {
case 1:
- if (!base.slice_from("y"))
+ if (!r_R1())
+ {
+ break lab1;
+ }
+ if (!base.slice_from("ie"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!r_R1())
+ {
+ break lab1;
+ }
+ if (!base.slice_from("eer"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!r_R1())
+ {
+ break lab1;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!r_R1())
+ {
+ break lab1;
+ }
+ if (!r_V())
+ {
+ break lab1;
+ }
+ if (!base.slice_from("n"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!r_R1())
+ {
+ break lab1;
+ }
+ if (!r_V())
+ {
+ break lab1;
+ }
+ if (!base.slice_from("l"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!r_R1())
+ {
+ break lab1;
+ }
+ if (!r_V())
+ {
+ break lab1;
+ }
+ if (!base.slice_from("r"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!r_R1())
+ {
+ break lab1;
+ }
+ if (!base.slice_from("teer"))
{
return false;
}
break;
- case 2:
- if (!base.slice_from("i"))
+ case 8:
+ if (!r_R1())
+ {
+ break lab1;
+ }
+ if (!base.slice_from("lijk"))
{
return false;
}
break;
- case 3:
- if (base.cursor >= base.limit)
+ case 9:
+ if (!r_R1())
{
- break lab0;
+ break lab1;
+ }
+ if (!r_C())
+ {
+ break lab1;
+ }
+ if (!base.slice_del())
+ {
+ return false;
}
- base.cursor++;
+ r_lengthen_V();
break;
}
- continue;
+ break lab0;
}
- base.cursor = v_1;
- break;
+ base.cursor = base.limit - v_1;
+ base.ket = base.cursor;
+ if (base.find_among_b(a_5) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!r_R1())
+ {
+ return false;
+ }
+ {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab2: {
+ if (!(base.eq_s_b("inn")))
+ {
+ break lab2;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ break lab2;
+ }
+ return false;
+ }
+ base.cursor = base.limit - v_2;
+ }
+ if (!r_C())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ r_lengthen_V();
}
return true;
};
/** @return {boolean} */
- function r_R1() {
- if (!(I_p1 <= base.cursor))
+ function r_Step_7() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_6);
+ if (among_var == 0)
{
return false;
}
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("k"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("f"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("p"))
+ {
+ return false;
+ }
+ break;
+ }
return true;
};
/** @return {boolean} */
- function r_R2() {
- if (!(I_p2 <= base.cursor))
+ function r_Step_6() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_7);
+ if (among_var == 0)
{
return false;
}
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("b"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("c"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("d"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("f"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("g"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!base.slice_from("h"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!base.slice_from("j"))
+ {
+ return false;
+ }
+ break;
+ case 8:
+ if (!base.slice_from("k"))
+ {
+ return false;
+ }
+ break;
+ case 9:
+ if (!base.slice_from("l"))
+ {
+ return false;
+ }
+ break;
+ case 10:
+ if (!base.slice_from("m"))
+ {
+ return false;
+ }
+ break;
+ case 11:
+ {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ if (!(base.eq_s_b("i")))
+ {
+ break lab0;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ break lab0;
+ }
+ return false;
+ }
+ base.cursor = base.limit - v_1;
+ }
+ if (!base.slice_from("n"))
+ {
+ return false;
+ }
+ break;
+ case 12:
+ if (!base.slice_from("p"))
+ {
+ return false;
+ }
+ break;
+ case 13:
+ if (!base.slice_from("q"))
+ {
+ return false;
+ }
+ break;
+ case 14:
+ if (!base.slice_from("r"))
+ {
+ return false;
+ }
+ break;
+ case 15:
+ if (!base.slice_from("s"))
+ {
+ return false;
+ }
+ break;
+ case 16:
+ if (!base.slice_from("t"))
+ {
+ return false;
+ }
+ break;
+ case 17:
+ if (!base.slice_from("v"))
+ {
+ return false;
+ }
+ break;
+ case 18:
+ if (!base.slice_from("w"))
+ {
+ return false;
+ }
+ break;
+ case 19:
+ if (!base.slice_from("x"))
+ {
+ return false;
+ }
+ break;
+ case 20:
+ if (!base.slice_from("z"))
+ {
+ return false;
+ }
+ break;
+ }
return true;
};
/** @return {boolean} */
- function r_undouble() {
- var /** number */ v_1 = base.limit - base.cursor;
- if (base.find_among_b(a_2) == 0)
+ function r_Step_1c() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_8);
+ if (among_var == 0)
{
return false;
}
- base.cursor = base.limit - v_1;
- base.ket = base.cursor;
- if (base.cursor <= base.limit_backward)
+ base.bra = base.cursor;
+ if (!r_R1())
{
return false;
}
- base.cursor--;
- base.bra = base.cursor;
- if (!base.slice_del())
+ if (!r_C())
{
return false;
}
+ switch (among_var) {
+ case 1:
+ {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ if (!(base.eq_s_b("n")))
+ {
+ break lab0;
+ }
+ if (!r_R1())
+ {
+ break lab0;
+ }
+ return false;
+ }
+ base.cursor = base.limit - v_1;
+ }
+ lab1: {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab2: {
+ if (!(base.eq_s_b("in")))
+ {
+ break lab2;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ break lab2;
+ }
+ if (!base.slice_from("n"))
+ {
+ return false;
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_2;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ }
+ break;
+ case 2:
+ {
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ lab3: {
+ if (!(base.eq_s_b("h")))
+ {
+ break lab3;
+ }
+ if (!r_R1())
+ {
+ break lab3;
+ }
+ return false;
+ }
+ base.cursor = base.limit - v_3;
+ }
+ {
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ lab4: {
+ if (!(base.eq_s_b("en")))
+ {
+ break lab4;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ break lab4;
+ }
+ return false;
+ }
+ base.cursor = base.limit - v_4;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
return true;
};
/** @return {boolean} */
- function r_e_ending() {
- B_e_found = false;
- base.ket = base.cursor;
- if (!(base.eq_s_b("e")))
+ function r_Lose_prefix() {
+ var /** number */ among_var;
+ base.bra = base.cursor;
+ if (!(base.eq_s("ge")))
{
return false;
}
- base.bra = base.cursor;
- if (!r_R1())
+ base.ket = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
{
- return false;
+ /** @const */ var /** number */ c1 = base.cursor + 3;
+ if (c1 > base.limit)
+ {
+ return false;
+ }
+ base.cursor = c1;
}
- var /** number */ v_1 = base.limit - base.cursor;
- if (!(base.out_grouping_b(g_v, 97, 232)))
+ base.cursor = v_1;
+ /** @const */ var /** number */ v_2 = base.cursor;
+ golab0: while(true)
{
- return false;
+ /** @const */ var /** number */ v_3 = base.cursor;
+ lab1: {
+ lab2: {
+ /** @const */ var /** number */ v_4 = base.cursor;
+ lab3: {
+ if (!(base.eq_s("ij")))
+ {
+ break lab3;
+ }
+ break lab2;
+ }
+ base.cursor = v_4;
+ if (!(base.in_grouping(g_v, 97, 252)))
+ {
+ break lab1;
+ }
+ }
+ break golab0;
+ }
+ base.cursor = v_3;
+ if (base.cursor >= base.limit)
+ {
+ return false;
+ }
+ base.cursor++;
}
- base.cursor = base.limit - v_1;
- if (!base.slice_del())
+ while(true)
{
+ /** @const */ var /** number */ v_5 = base.cursor;
+ lab4: {
+ lab5: {
+ /** @const */ var /** number */ v_6 = base.cursor;
+ lab6: {
+ if (!(base.eq_s("ij")))
+ {
+ break lab6;
+ }
+ break lab5;
+ }
+ base.cursor = v_6;
+ if (!(base.in_grouping(g_v, 97, 252)))
+ {
+ break lab4;
+ }
+ }
+ continue;
+ }
+ base.cursor = v_5;
+ break;
+ }
+ lab7: {
+ if (base.cursor < base.limit)
+ {
+ break lab7;
+ }
return false;
}
- B_e_found = true;
- if (!r_undouble())
+ base.cursor = v_2;
+ among_var = base.find_among(a_9);
+ switch (among_var) {
+ case 1:
+ return false;
+ }
+ B_GE_removed = true;
+ if (!base.slice_del())
{
return false;
}
+ /** @const */ var /** number */ v_7 = base.cursor;
+ lab8: {
+ base.bra = base.cursor;
+ among_var = base.find_among(a_10);
+ if (among_var == 0)
+ {
+ break lab8;
+ }
+ base.ket = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("e"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("i"))
+ {
+ return false;
+ }
+ break;
+ }
+ }
+ base.cursor = v_7;
return true;
};
/** @return {boolean} */
- function r_en_ending() {
- if (!r_R1())
+ function r_Lose_infix() {
+ var /** number */ among_var;
+ if (base.cursor >= base.limit)
{
return false;
}
- var /** number */ v_1 = base.limit - base.cursor;
- if (!(base.out_grouping_b(g_v, 97, 232)))
+ base.cursor++;
+ golab0: while(true)
{
- return false;
+ lab1: {
+ base.bra = base.cursor;
+ if (!(base.eq_s("ge")))
+ {
+ break lab1;
+ }
+ base.ket = base.cursor;
+ break golab0;
+ }
+ if (base.cursor >= base.limit)
+ {
+ return false;
+ }
+ base.cursor++;
}
- base.cursor = base.limit - v_1;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ {
+ /** @const */ var /** number */ c1 = base.cursor + 3;
+ if (c1 > base.limit)
+ {
+ return false;
+ }
+ base.cursor = c1;
+ }
+ base.cursor = v_1;
+ /** @const */ var /** number */ v_2 = base.cursor;
+ golab2: while(true)
{
- var /** number */ v_2 = base.limit - base.cursor;
- lab0: {
- if (!(base.eq_s_b("gem")))
- {
- break lab0;
+ /** @const */ var /** number */ v_3 = base.cursor;
+ lab3: {
+ lab4: {
+ /** @const */ var /** number */ v_4 = base.cursor;
+ lab5: {
+ if (!(base.eq_s("ij")))
+ {
+ break lab5;
+ }
+ break lab4;
+ }
+ base.cursor = v_4;
+ if (!(base.in_grouping(g_v, 97, 252)))
+ {
+ break lab3;
+ }
}
+ break golab2;
+ }
+ base.cursor = v_3;
+ if (base.cursor >= base.limit)
+ {
return false;
}
- base.cursor = base.limit - v_2;
+ base.cursor++;
}
- if (!base.slice_del())
+ while(true)
{
+ /** @const */ var /** number */ v_5 = base.cursor;
+ lab6: {
+ lab7: {
+ /** @const */ var /** number */ v_6 = base.cursor;
+ lab8: {
+ if (!(base.eq_s("ij")))
+ {
+ break lab8;
+ }
+ break lab7;
+ }
+ base.cursor = v_6;
+ if (!(base.in_grouping(g_v, 97, 252)))
+ {
+ break lab6;
+ }
+ }
+ continue;
+ }
+ base.cursor = v_5;
+ break;
+ }
+ lab9: {
+ if (base.cursor < base.limit)
+ {
+ break lab9;
+ }
return false;
}
- if (!r_undouble())
+ base.cursor = v_2;
+ B_GE_removed = true;
+ if (!base.slice_del())
{
return false;
}
- return true;
- };
-
- /** @return {boolean} */
- function r_standard_suffix() {
- var /** number */ among_var;
- var /** number */ v_1 = base.limit - base.cursor;
- lab0: {
- base.ket = base.cursor;
- among_var = base.find_among_b(a_3);
+ /** @const */ var /** number */ v_7 = base.cursor;
+ lab10: {
+ base.bra = base.cursor;
+ among_var = base.find_among(a_11);
if (among_var == 0)
{
- break lab0;
+ break lab10;
}
- base.bra = base.cursor;
+ base.ket = base.cursor;
switch (among_var) {
case 1:
- if (!r_R1())
- {
- break lab0;
- }
- if (!base.slice_from("heid"))
+ if (!base.slice_from("e"))
{
return false;
}
break;
case 2:
- if (!r_en_ending())
+ if (!base.slice_from("i"))
{
- break lab0;
+ return false;
}
break;
- case 3:
- if (!r_R1())
- {
- break lab0;
- }
- if (!(base.out_grouping_b(g_v_j, 97, 232)))
+ }
+ }
+ base.cursor = v_7;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_measure() {
+ I_p1 = base.limit;
+ I_p2 = base.limit;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ while(true)
+ {
+ lab1: {
+ if (!(base.out_grouping(g_v, 97, 252)))
{
- break lab0;
+ break lab1;
}
- if (!base.slice_del())
- {
- return false;
+ continue;
+ }
+ break;
+ }
+ {
+ var v_2 = 1;
+ while(true)
+ {
+ /** @const */ var /** number */ v_3 = base.cursor;
+ lab2: {
+ lab3: {
+ /** @const */ var /** number */ v_4 = base.cursor;
+ lab4: {
+ if (!(base.eq_s("ij")))
+ {
+ break lab4;
+ }
+ break lab3;
+ }
+ base.cursor = v_4;
+ if (!(base.in_grouping(g_v, 97, 252)))
+ {
+ break lab2;
+ }
+ }
+ v_2--;
+ continue;
}
+ base.cursor = v_3;
break;
+ }
+ if (v_2 > 0)
+ {
+ break lab0;
+ }
}
- }
- base.cursor = base.limit - v_1;
- var /** number */ v_2 = base.limit - base.cursor;
- r_e_ending();
- base.cursor = base.limit - v_2;
- var /** number */ v_3 = base.limit - base.cursor;
- lab1: {
- base.ket = base.cursor;
- if (!(base.eq_s_b("heid")))
+ if (!(base.out_grouping(g_v, 97, 252)))
{
- break lab1;
+ break lab0;
}
- base.bra = base.cursor;
- if (!r_R2())
+ I_p1 = base.cursor;
+ while(true)
{
- break lab1;
+ lab5: {
+ if (!(base.out_grouping(g_v, 97, 252)))
+ {
+ break lab5;
+ }
+ continue;
+ }
+ break;
}
{
- var /** number */ v_4 = base.limit - base.cursor;
- lab2: {
- if (!(base.eq_s_b("c")))
- {
- break lab2;
+ var v_5 = 1;
+ while(true)
+ {
+ /** @const */ var /** number */ v_6 = base.cursor;
+ lab6: {
+ lab7: {
+ /** @const */ var /** number */ v_7 = base.cursor;
+ lab8: {
+ if (!(base.eq_s("ij")))
+ {
+ break lab8;
+ }
+ break lab7;
+ }
+ base.cursor = v_7;
+ if (!(base.in_grouping(g_v, 97, 252)))
+ {
+ break lab6;
+ }
+ }
+ v_5--;
+ continue;
}
- break lab1;
+ base.cursor = v_6;
+ break;
+ }
+ if (v_5 > 0)
+ {
+ break lab0;
}
- base.cursor = base.limit - v_4;
}
- if (!base.slice_del())
+ if (!(base.out_grouping(g_v, 97, 252)))
{
- return false;
+ break lab0;
}
- base.ket = base.cursor;
- if (!(base.eq_s_b("en")))
+ I_p2 = base.cursor;
+ }
+ base.cursor = v_1;
+ return true;
+ };
+
+ this.stem = /** @return {boolean} */ function() {
+ B_stemmed = false;
+ r_measure();
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ if (!r_Step_1())
{
- break lab1;
+ break lab0;
}
- base.bra = base.cursor;
- if (!r_en_ending())
+ B_stemmed = true;
+ }
+ base.cursor = base.limit - v_1;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab1: {
+ if (!r_Step_2())
{
break lab1;
}
+ B_stemmed = true;
+ }
+ base.cursor = base.limit - v_2;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ lab2: {
+ if (!r_Step_3())
+ {
+ break lab2;
+ }
+ B_stemmed = true;
}
base.cursor = base.limit - v_3;
- var /** number */ v_5 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
lab3: {
- base.ket = base.cursor;
- among_var = base.find_among_b(a_4);
- if (among_var == 0)
+ if (!r_Step_4())
{
break lab3;
}
- base.bra = base.cursor;
- switch (among_var) {
- case 1:
- if (!r_R2())
- {
- break lab3;
- }
- if (!base.slice_del())
- {
- return false;
- }
- lab4: {
- var /** number */ v_6 = base.limit - base.cursor;
- lab5: {
- base.ket = base.cursor;
- if (!(base.eq_s_b("ig")))
- {
- break lab5;
- }
- base.bra = base.cursor;
- if (!r_R2())
- {
- break lab5;
- }
- {
- var /** number */ v_7 = base.limit - base.cursor;
- lab6: {
- if (!(base.eq_s_b("e")))
- {
- break lab6;
- }
- break lab5;
- }
- base.cursor = base.limit - v_7;
- }
- if (!base.slice_del())
- {
- return false;
- }
- break lab4;
- }
- base.cursor = base.limit - v_6;
- if (!r_undouble())
- {
- break lab3;
- }
- }
- break;
- case 2:
- if (!r_R2())
- {
- break lab3;
- }
- {
- var /** number */ v_8 = base.limit - base.cursor;
- lab7: {
- if (!(base.eq_s_b("e")))
- {
- break lab7;
- }
- break lab3;
- }
- base.cursor = base.limit - v_8;
- }
- if (!base.slice_del())
- {
- return false;
- }
- break;
- case 3:
- if (!r_R2())
- {
- break lab3;
- }
- if (!base.slice_del())
- {
- return false;
- }
- if (!r_e_ending())
- {
- break lab3;
- }
- break;
- case 4:
- if (!r_R2())
- {
- break lab3;
- }
- if (!base.slice_del())
- {
- return false;
- }
- break;
- case 5:
- if (!r_R2())
- {
- break lab3;
- }
- if (!B_e_found)
- {
- break lab3;
- }
- if (!base.slice_del())
- {
- return false;
- }
- break;
+ B_stemmed = true;
+ }
+ base.cursor = base.limit - v_4;
+ base.cursor = base.limit_backward;
+ B_GE_removed = false;
+ /** @const */ var /** number */ v_5 = base.cursor;
+ lab4: {
+ /** @const */ var /** number */ v_6 = base.cursor;
+ if (!r_Lose_prefix())
+ {
+ break lab4;
}
+ base.cursor = v_6;
+ r_measure();
}
- base.cursor = base.limit - v_5;
- var /** number */ v_9 = base.limit - base.cursor;
- lab8: {
- if (!(base.out_grouping_b(g_v_I, 73, 232)))
+ base.cursor = v_5;
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
+ lab5: {
+ if (!B_GE_removed)
{
- break lab8;
+ break lab5;
}
- var /** number */ v_10 = base.limit - base.cursor;
- if (base.find_among_b(a_5) == 0)
+ B_stemmed = true;
+ if (!r_Step_1c())
{
- break lab8;
+ break lab5;
}
- if (!(base.out_grouping_b(g_v, 97, 232)))
+ }
+ base.cursor = base.limit - v_7;
+ base.cursor = base.limit_backward;
+ B_GE_removed = false;
+ /** @const */ var /** number */ v_8 = base.cursor;
+ lab6: {
+ /** @const */ var /** number */ v_9 = base.cursor;
+ if (!r_Lose_infix())
{
- break lab8;
+ break lab6;
}
- base.cursor = base.limit - v_10;
- base.ket = base.cursor;
- if (base.cursor <= base.limit_backward)
+ base.cursor = v_9;
+ r_measure();
+ }
+ base.cursor = v_8;
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ /** @const */ var /** number */ v_10 = base.limit - base.cursor;
+ lab7: {
+ if (!B_GE_removed)
{
- break lab8;
+ break lab7;
}
- base.cursor--;
- base.bra = base.cursor;
- if (!base.slice_del())
+ B_stemmed = true;
+ if (!r_Step_1c())
{
- return false;
+ break lab7;
}
}
- base.cursor = base.limit - v_9;
- return true;
- };
-
- this.stem = /** @return {boolean} */ function() {
- var /** number */ v_1 = base.cursor;
- r_prelude();
- base.cursor = v_1;
- var /** number */ v_2 = base.cursor;
- r_mark_regions();
- base.cursor = v_2;
+ base.cursor = base.limit - v_10;
+ base.cursor = base.limit_backward;
base.limit_backward = base.cursor; base.cursor = base.limit;
- r_standard_suffix();
+ /** @const */ var /** number */ v_11 = base.limit - base.cursor;
+ lab8: {
+ if (!r_Step_7())
+ {
+ break lab8;
+ }
+ B_stemmed = true;
+ }
+ base.cursor = base.limit - v_11;
+ /** @const */ var /** number */ v_12 = base.limit - base.cursor;
+ lab9: {
+ if (!B_stemmed)
+ {
+ break lab9;
+ }
+ if (!r_Step_6())
+ {
+ break lab9;
+ }
+ }
+ base.cursor = base.limit - v_12;
base.cursor = base.limit_backward;
- var /** number */ v_4 = base.cursor;
- r_postlude();
- base.cursor = v_4;
return true;
};
diff --git a/sphinx/search/non-minified-js/dutch_porter-stemmer.js b/sphinx/search/non-minified-js/dutch_porter-stemmer.js
new file mode 100644
index 00000000000..6bbf2bf2e8e
--- /dev/null
+++ b/sphinx/search/non-minified-js/dutch_porter-stemmer.js
@@ -0,0 +1,637 @@
+// Generated from dutch_porter.sbl by Snowball 3.0.1 - https://snowballstem.org/
+
+/**@constructor*/
+var DutchPorterStemmer = function() {
+ var base = new BaseStemmer();
+
+ /** @const */ var a_0 = [
+ ["", -1, 6],
+ ["\u00E1", 0, 1],
+ ["\u00E4", 0, 1],
+ ["\u00E9", 0, 2],
+ ["\u00EB", 0, 2],
+ ["\u00ED", 0, 3],
+ ["\u00EF", 0, 3],
+ ["\u00F3", 0, 4],
+ ["\u00F6", 0, 4],
+ ["\u00FA", 0, 5],
+ ["\u00FC", 0, 5]
+ ];
+
+ /** @const */ var a_1 = [
+ ["", -1, 3],
+ ["I", 0, 2],
+ ["Y", 0, 1]
+ ];
+
+ /** @const */ var a_2 = [
+ ["dd", -1, -1],
+ ["kk", -1, -1],
+ ["tt", -1, -1]
+ ];
+
+ /** @const */ var a_3 = [
+ ["ene", -1, 2],
+ ["se", -1, 3],
+ ["en", -1, 2],
+ ["heden", 2, 1],
+ ["s", -1, 3]
+ ];
+
+ /** @const */ var a_4 = [
+ ["end", -1, 1],
+ ["ig", -1, 2],
+ ["ing", -1, 1],
+ ["lijk", -1, 3],
+ ["baar", -1, 4],
+ ["bar", -1, 5]
+ ];
+
+ /** @const */ var a_5 = [
+ ["aa", -1, -1],
+ ["ee", -1, -1],
+ ["oo", -1, -1],
+ ["uu", -1, -1]
+ ];
+
+ /** @const */ var /** Array */ g_v = [17, 65, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128];
+
+ /** @const */ var /** Array */ g_v_I = [1, 0, 0, 17, 65, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128];
+
+ /** @const */ var /** Array */ g_v_j = [17, 67, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128];
+
+ var /** number */ I_x = 0;
+ var /** number */ I_p2 = 0;
+ var /** number */ I_p1 = 0;
+ var /** boolean */ B_e_found = false;
+
+
+ /** @return {boolean} */
+ function r_prelude() {
+ var /** number */ among_var;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ while(true)
+ {
+ /** @const */ var /** number */ v_2 = base.cursor;
+ lab0: {
+ base.bra = base.cursor;
+ among_var = base.find_among(a_0);
+ base.ket = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("a"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("e"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("i"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("o"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("u"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (base.cursor >= base.limit)
+ {
+ break lab0;
+ }
+ base.cursor++;
+ break;
+ }
+ continue;
+ }
+ base.cursor = v_2;
+ break;
+ }
+ base.cursor = v_1;
+ /** @const */ var /** number */ v_3 = base.cursor;
+ lab1: {
+ base.bra = base.cursor;
+ if (!(base.eq_s("y")))
+ {
+ base.cursor = v_3;
+ break lab1;
+ }
+ base.ket = base.cursor;
+ if (!base.slice_from("Y"))
+ {
+ return false;
+ }
+ }
+ while(true)
+ {
+ /** @const */ var /** number */ v_4 = base.cursor;
+ lab2: {
+ if (!base.go_out_grouping(g_v, 97, 232))
+ {
+ break lab2;
+ }
+ base.cursor++;
+ /** @const */ var /** number */ v_5 = base.cursor;
+ lab3: {
+ base.bra = base.cursor;
+ lab4: {
+ /** @const */ var /** number */ v_6 = base.cursor;
+ lab5: {
+ if (!(base.eq_s("i")))
+ {
+ break lab5;
+ }
+ base.ket = base.cursor;
+ /** @const */ var /** number */ v_7 = base.cursor;
+ lab6: {
+ if (!(base.in_grouping(g_v, 97, 232)))
+ {
+ break lab6;
+ }
+ if (!base.slice_from("I"))
+ {
+ return false;
+ }
+ }
+ base.cursor = v_7;
+ break lab4;
+ }
+ base.cursor = v_6;
+ if (!(base.eq_s("y")))
+ {
+ base.cursor = v_5;
+ break lab3;
+ }
+ base.ket = base.cursor;
+ if (!base.slice_from("Y"))
+ {
+ return false;
+ }
+ }
+ }
+ continue;
+ }
+ base.cursor = v_4;
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_mark_regions() {
+ I_p1 = base.limit;
+ I_p2 = base.limit;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ {
+ /** @const */ var /** number */ c1 = base.cursor + 3;
+ if (c1 > base.limit)
+ {
+ return false;
+ }
+ base.cursor = c1;
+ }
+ I_x = base.cursor;
+ base.cursor = v_1;
+ if (!base.go_out_grouping(g_v, 97, 232))
+ {
+ return false;
+ }
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 232))
+ {
+ return false;
+ }
+ base.cursor++;
+ I_p1 = base.cursor;
+ lab0: {
+ if (I_p1 >= I_x)
+ {
+ break lab0;
+ }
+ I_p1 = I_x;
+ }
+ if (!base.go_out_grouping(g_v, 97, 232))
+ {
+ return false;
+ }
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 232))
+ {
+ return false;
+ }
+ base.cursor++;
+ I_p2 = base.cursor;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_postlude() {
+ var /** number */ among_var;
+ while(true)
+ {
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ base.bra = base.cursor;
+ among_var = base.find_among(a_1);
+ base.ket = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("y"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("i"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (base.cursor >= base.limit)
+ {
+ break lab0;
+ }
+ base.cursor++;
+ break;
+ }
+ continue;
+ }
+ base.cursor = v_1;
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_R1() {
+ return I_p1 <= base.cursor;
+ };
+
+ /** @return {boolean} */
+ function r_R2() {
+ return I_p2 <= base.cursor;
+ };
+
+ /** @return {boolean} */
+ function r_undouble() {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ if (base.find_among_b(a_2) == 0)
+ {
+ return false;
+ }
+ base.cursor = base.limit - v_1;
+ base.ket = base.cursor;
+ if (base.cursor <= base.limit_backward)
+ {
+ return false;
+ }
+ base.cursor--;
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_e_ending() {
+ B_e_found = false;
+ base.ket = base.cursor;
+ if (!(base.eq_s_b("e")))
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!r_R1())
+ {
+ return false;
+ }
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ if (!(base.out_grouping_b(g_v, 97, 232)))
+ {
+ return false;
+ }
+ base.cursor = base.limit - v_1;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_e_found = true;
+ if (!r_undouble())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_en_ending() {
+ if (!r_R1())
+ {
+ return false;
+ }
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ if (!(base.out_grouping_b(g_v, 97, 232)))
+ {
+ return false;
+ }
+ base.cursor = base.limit - v_1;
+ {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab0: {
+ if (!(base.eq_s_b("gem")))
+ {
+ break lab0;
+ }
+ return false;
+ }
+ base.cursor = base.limit - v_2;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ if (!r_undouble())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_standard_suffix() {
+ var /** number */ among_var;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_3);
+ if (among_var == 0)
+ {
+ break lab0;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!r_R1())
+ {
+ break lab0;
+ }
+ if (!base.slice_from("heid"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!r_en_ending())
+ {
+ break lab0;
+ }
+ break;
+ case 3:
+ if (!r_R1())
+ {
+ break lab0;
+ }
+ if (!(base.out_grouping_b(g_v_j, 97, 232)))
+ {
+ break lab0;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ }
+ base.cursor = base.limit - v_1;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ r_e_ending();
+ base.cursor = base.limit - v_2;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ lab1: {
+ base.ket = base.cursor;
+ if (!(base.eq_s_b("heid")))
+ {
+ break lab1;
+ }
+ base.bra = base.cursor;
+ if (!r_R2())
+ {
+ break lab1;
+ }
+ {
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ lab2: {
+ if (!(base.eq_s_b("c")))
+ {
+ break lab2;
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_4;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ if (!(base.eq_s_b("en")))
+ {
+ break lab1;
+ }
+ base.bra = base.cursor;
+ if (!r_en_ending())
+ {
+ break lab1;
+ }
+ }
+ base.cursor = base.limit - v_3;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
+ lab3: {
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_4);
+ if (among_var == 0)
+ {
+ break lab3;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!r_R2())
+ {
+ break lab3;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ lab4: {
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
+ lab5: {
+ base.ket = base.cursor;
+ if (!(base.eq_s_b("ig")))
+ {
+ break lab5;
+ }
+ base.bra = base.cursor;
+ if (!r_R2())
+ {
+ break lab5;
+ }
+ {
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
+ lab6: {
+ if (!(base.eq_s_b("e")))
+ {
+ break lab6;
+ }
+ break lab5;
+ }
+ base.cursor = base.limit - v_7;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break lab4;
+ }
+ base.cursor = base.limit - v_6;
+ if (!r_undouble())
+ {
+ break lab3;
+ }
+ }
+ break;
+ case 2:
+ if (!r_R2())
+ {
+ break lab3;
+ }
+ {
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
+ lab7: {
+ if (!(base.eq_s_b("e")))
+ {
+ break lab7;
+ }
+ break lab3;
+ }
+ base.cursor = base.limit - v_8;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!r_R2())
+ {
+ break lab3;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ if (!r_e_ending())
+ {
+ break lab3;
+ }
+ break;
+ case 4:
+ if (!r_R2())
+ {
+ break lab3;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!r_R2())
+ {
+ break lab3;
+ }
+ if (!B_e_found)
+ {
+ break lab3;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ }
+ base.cursor = base.limit - v_5;
+ /** @const */ var /** number */ v_9 = base.limit - base.cursor;
+ lab8: {
+ if (!(base.out_grouping_b(g_v_I, 73, 232)))
+ {
+ break lab8;
+ }
+ /** @const */ var /** number */ v_10 = base.limit - base.cursor;
+ if (base.find_among_b(a_5) == 0)
+ {
+ break lab8;
+ }
+ if (!(base.out_grouping_b(g_v, 97, 232)))
+ {
+ break lab8;
+ }
+ base.cursor = base.limit - v_10;
+ base.ket = base.cursor;
+ if (base.cursor <= base.limit_backward)
+ {
+ break lab8;
+ }
+ base.cursor--;
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ }
+ base.cursor = base.limit - v_9;
+ return true;
+ };
+
+ this.stem = /** @return {boolean} */ function() {
+ /** @const */ var /** number */ v_1 = base.cursor;
+ r_prelude();
+ base.cursor = v_1;
+ /** @const */ var /** number */ v_2 = base.cursor;
+ r_mark_regions();
+ base.cursor = v_2;
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ r_standard_suffix();
+ base.cursor = base.limit_backward;
+ /** @const */ var /** number */ v_3 = base.cursor;
+ r_postlude();
+ base.cursor = v_3;
+ return true;
+ };
+
+ /**@return{string}*/
+ this['stemWord'] = function(/**string*/word) {
+ base.setCurrent(word);
+ this.stem();
+ return base.getCurrent();
+ };
+};
diff --git a/sphinx/search/non-minified-js/english-stemmer.js b/sphinx/search/non-minified-js/english-stemmer.js
new file mode 100644
index 00000000000..056760ee8aa
--- /dev/null
+++ b/sphinx/search/non-minified-js/english-stemmer.js
@@ -0,0 +1,1066 @@
+// Generated from english.sbl by Snowball 3.0.1 - https://snowballstem.org/
+
+/**@constructor*/
+var EnglishStemmer = function() {
+ var base = new BaseStemmer();
+
+ /** @const */ var a_0 = [
+ ["arsen", -1, -1],
+ ["commun", -1, -1],
+ ["emerg", -1, -1],
+ ["gener", -1, -1],
+ ["later", -1, -1],
+ ["organ", -1, -1],
+ ["past", -1, -1],
+ ["univers", -1, -1]
+ ];
+
+ /** @const */ var a_1 = [
+ ["'", -1, 1],
+ ["'s'", 0, 1],
+ ["'s", -1, 1]
+ ];
+
+ /** @const */ var a_2 = [
+ ["ied", -1, 2],
+ ["s", -1, 3],
+ ["ies", 1, 2],
+ ["sses", 1, 1],
+ ["ss", 1, -1],
+ ["us", 1, -1]
+ ];
+
+ /** @const */ var a_3 = [
+ ["succ", -1, 1],
+ ["proc", -1, 1],
+ ["exc", -1, 1]
+ ];
+
+ /** @const */ var a_4 = [
+ ["even", -1, 2],
+ ["cann", -1, 2],
+ ["inn", -1, 2],
+ ["earr", -1, 2],
+ ["herr", -1, 2],
+ ["out", -1, 2],
+ ["y", -1, 1]
+ ];
+
+ /** @const */ var a_5 = [
+ ["", -1, -1],
+ ["ed", 0, 2],
+ ["eed", 1, 1],
+ ["ing", 0, 3],
+ ["edly", 0, 2],
+ ["eedly", 4, 1],
+ ["ingly", 0, 2]
+ ];
+
+ /** @const */ var a_6 = [
+ ["", -1, 3],
+ ["bb", 0, 2],
+ ["dd", 0, 2],
+ ["ff", 0, 2],
+ ["gg", 0, 2],
+ ["bl", 0, 1],
+ ["mm", 0, 2],
+ ["nn", 0, 2],
+ ["pp", 0, 2],
+ ["rr", 0, 2],
+ ["at", 0, 1],
+ ["tt", 0, 2],
+ ["iz", 0, 1]
+ ];
+
+ /** @const */ var a_7 = [
+ ["anci", -1, 3],
+ ["enci", -1, 2],
+ ["ogi", -1, 14],
+ ["li", -1, 16],
+ ["bli", 3, 12],
+ ["abli", 4, 4],
+ ["alli", 3, 8],
+ ["fulli", 3, 9],
+ ["lessli", 3, 15],
+ ["ousli", 3, 10],
+ ["entli", 3, 5],
+ ["aliti", -1, 8],
+ ["biliti", -1, 12],
+ ["iviti", -1, 11],
+ ["tional", -1, 1],
+ ["ational", 14, 7],
+ ["alism", -1, 8],
+ ["ation", -1, 7],
+ ["ization", 17, 6],
+ ["izer", -1, 6],
+ ["ator", -1, 7],
+ ["iveness", -1, 11],
+ ["fulness", -1, 9],
+ ["ousness", -1, 10],
+ ["ogist", -1, 13]
+ ];
+
+ /** @const */ var a_8 = [
+ ["icate", -1, 4],
+ ["ative", -1, 6],
+ ["alize", -1, 3],
+ ["iciti", -1, 4],
+ ["ical", -1, 4],
+ ["tional", -1, 1],
+ ["ational", 5, 2],
+ ["ful", -1, 5],
+ ["ness", -1, 5]
+ ];
+
+ /** @const */ var a_9 = [
+ ["ic", -1, 1],
+ ["ance", -1, 1],
+ ["ence", -1, 1],
+ ["able", -1, 1],
+ ["ible", -1, 1],
+ ["ate", -1, 1],
+ ["ive", -1, 1],
+ ["ize", -1, 1],
+ ["iti", -1, 1],
+ ["al", -1, 1],
+ ["ism", -1, 1],
+ ["ion", -1, 2],
+ ["er", -1, 1],
+ ["ous", -1, 1],
+ ["ant", -1, 1],
+ ["ent", -1, 1],
+ ["ment", 15, 1],
+ ["ement", 16, 1]
+ ];
+
+ /** @const */ var a_10 = [
+ ["e", -1, 1],
+ ["l", -1, 2]
+ ];
+
+ /** @const */ var a_11 = [
+ ["andes", -1, -1],
+ ["atlas", -1, -1],
+ ["bias", -1, -1],
+ ["cosmos", -1, -1],
+ ["early", -1, 5],
+ ["gently", -1, 3],
+ ["howe", -1, -1],
+ ["idly", -1, 2],
+ ["news", -1, -1],
+ ["only", -1, 6],
+ ["singly", -1, 7],
+ ["skies", -1, 1],
+ ["sky", -1, -1],
+ ["ugly", -1, 4]
+ ];
+
+ /** @const */ var /** Array */ g_aeo = [17, 64];
+
+ /** @const */ var /** Array */ g_v = [17, 65, 16, 1];
+
+ /** @const */ var /** Array */ g_v_WXY = [1, 17, 65, 208, 1];
+
+ /** @const */ var /** Array */ g_valid_LI = [55, 141, 2];
+
+ var /** boolean */ B_Y_found = false;
+ var /** number */ I_p2 = 0;
+ var /** number */ I_p1 = 0;
+
+
+ /** @return {boolean} */
+ function r_prelude() {
+ B_Y_found = false;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ base.bra = base.cursor;
+ if (!(base.eq_s("'")))
+ {
+ break lab0;
+ }
+ base.ket = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ }
+ base.cursor = v_1;
+ /** @const */ var /** number */ v_2 = base.cursor;
+ lab1: {
+ base.bra = base.cursor;
+ if (!(base.eq_s("y")))
+ {
+ break lab1;
+ }
+ base.ket = base.cursor;
+ if (!base.slice_from("Y"))
+ {
+ return false;
+ }
+ B_Y_found = true;
+ }
+ base.cursor = v_2;
+ /** @const */ var /** number */ v_3 = base.cursor;
+ lab2: {
+ while(true)
+ {
+ /** @const */ var /** number */ v_4 = base.cursor;
+ lab3: {
+ golab4: while(true)
+ {
+ /** @const */ var /** number */ v_5 = base.cursor;
+ lab5: {
+ if (!(base.in_grouping(g_v, 97, 121)))
+ {
+ break lab5;
+ }
+ base.bra = base.cursor;
+ if (!(base.eq_s("y")))
+ {
+ break lab5;
+ }
+ base.ket = base.cursor;
+ base.cursor = v_5;
+ break golab4;
+ }
+ base.cursor = v_5;
+ if (base.cursor >= base.limit)
+ {
+ break lab3;
+ }
+ base.cursor++;
+ }
+ if (!base.slice_from("Y"))
+ {
+ return false;
+ }
+ B_Y_found = true;
+ continue;
+ }
+ base.cursor = v_4;
+ break;
+ }
+ }
+ base.cursor = v_3;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_mark_regions() {
+ I_p1 = base.limit;
+ I_p2 = base.limit;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ lab1: {
+ /** @const */ var /** number */ v_2 = base.cursor;
+ lab2: {
+ if (base.find_among(a_0) == 0)
+ {
+ break lab2;
+ }
+ break lab1;
+ }
+ base.cursor = v_2;
+ if (!base.go_out_grouping(g_v, 97, 121))
+ {
+ break lab0;
+ }
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 121))
+ {
+ break lab0;
+ }
+ base.cursor++;
+ }
+ I_p1 = base.cursor;
+ if (!base.go_out_grouping(g_v, 97, 121))
+ {
+ break lab0;
+ }
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 121))
+ {
+ break lab0;
+ }
+ base.cursor++;
+ I_p2 = base.cursor;
+ }
+ base.cursor = v_1;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_shortv() {
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ if (!(base.out_grouping_b(g_v_WXY, 89, 121)))
+ {
+ break lab1;
+ }
+ if (!(base.in_grouping_b(g_v, 97, 121)))
+ {
+ break lab1;
+ }
+ if (!(base.out_grouping_b(g_v, 97, 121)))
+ {
+ break lab1;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ lab2: {
+ if (!(base.out_grouping_b(g_v, 97, 121)))
+ {
+ break lab2;
+ }
+ if (!(base.in_grouping_b(g_v, 97, 121)))
+ {
+ break lab2;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ break lab2;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ if (!(base.eq_s_b("past")))
+ {
+ return false;
+ }
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_R1() {
+ return I_p1 <= base.cursor;
+ };
+
+ /** @return {boolean} */
+ function r_R2() {
+ return I_p2 <= base.cursor;
+ };
+
+ /** @return {boolean} */
+ function r_Step_1a() {
+ var /** number */ among_var;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_1) == 0)
+ {
+ base.cursor = base.limit - v_1;
+ break lab0;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ }
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_2);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("ss"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ lab1: {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab2: {
+ {
+ /** @const */ var /** number */ c1 = base.cursor - 2;
+ if (c1 < base.limit_backward)
+ {
+ break lab2;
+ }
+ base.cursor = c1;
+ }
+ if (!base.slice_from("i"))
+ {
+ return false;
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_2;
+ if (!base.slice_from("ie"))
+ {
+ return false;
+ }
+ }
+ break;
+ case 3:
+ if (base.cursor <= base.limit_backward)
+ {
+ return false;
+ }
+ base.cursor--;
+ if (!base.go_out_grouping_b(g_v, 97, 121))
+ {
+ return false;
+ }
+ base.cursor--;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Step_1b() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_5);
+ base.bra = base.cursor;
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ switch (among_var) {
+ case 1:
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab2: {
+ lab3: {
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ lab4: {
+ if (base.find_among_b(a_3) == 0)
+ {
+ break lab4;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ break lab4;
+ }
+ break lab3;
+ }
+ base.cursor = base.limit - v_3;
+ if (!r_R1())
+ {
+ break lab2;
+ }
+ if (!base.slice_from("ee"))
+ {
+ return false;
+ }
+ }
+ }
+ base.cursor = base.limit - v_2;
+ break;
+ case 2:
+ break lab1;
+ case 3:
+ among_var = base.find_among_b(a_4);
+ if (among_var == 0)
+ {
+ break lab1;
+ }
+ switch (among_var) {
+ case 1:
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ if (!(base.out_grouping_b(g_v, 97, 121)))
+ {
+ break lab1;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ break lab1;
+ }
+ base.cursor = base.limit - v_4;
+ base.bra = base.cursor;
+ if (!base.slice_from("ie"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (base.cursor > base.limit_backward)
+ {
+ break lab1;
+ }
+ break;
+ }
+ break;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
+ if (!base.go_out_grouping_b(g_v, 97, 121))
+ {
+ return false;
+ }
+ base.cursor--;
+ base.cursor = base.limit - v_5;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
+ among_var = base.find_among_b(a_6);
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("e"))
+ {
+ return false;
+ }
+ return false;
+ case 2:
+ {
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
+ lab5: {
+ if (!(base.in_grouping_b(g_aeo, 97, 111)))
+ {
+ break lab5;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ break lab5;
+ }
+ return false;
+ }
+ base.cursor = base.limit - v_7;
+ }
+ break;
+ case 3:
+ if (base.cursor != I_p1)
+ {
+ return false;
+ }
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
+ if (!r_shortv())
+ {
+ return false;
+ }
+ base.cursor = base.limit - v_8;
+ if (!base.slice_from("e"))
+ {
+ return false;
+ }
+ return false;
+ }
+ base.cursor = base.limit - v_6;
+ base.ket = base.cursor;
+ if (base.cursor <= base.limit_backward)
+ {
+ return false;
+ }
+ base.cursor--;
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Step_1c() {
+ base.ket = base.cursor;
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ if (!(base.eq_s_b("y")))
+ {
+ break lab1;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ if (!(base.eq_s_b("Y")))
+ {
+ return false;
+ }
+ }
+ base.bra = base.cursor;
+ if (!(base.out_grouping_b(g_v, 97, 121)))
+ {
+ return false;
+ }
+ lab2: {
+ if (base.cursor > base.limit_backward)
+ {
+ break lab2;
+ }
+ return false;
+ }
+ if (!base.slice_from("i"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Step_2() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_7);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!r_R1())
+ {
+ return false;
+ }
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("tion"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("ence"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("ance"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("able"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("ent"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!base.slice_from("ize"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!base.slice_from("ate"))
+ {
+ return false;
+ }
+ break;
+ case 8:
+ if (!base.slice_from("al"))
+ {
+ return false;
+ }
+ break;
+ case 9:
+ if (!base.slice_from("ful"))
+ {
+ return false;
+ }
+ break;
+ case 10:
+ if (!base.slice_from("ous"))
+ {
+ return false;
+ }
+ break;
+ case 11:
+ if (!base.slice_from("ive"))
+ {
+ return false;
+ }
+ break;
+ case 12:
+ if (!base.slice_from("ble"))
+ {
+ return false;
+ }
+ break;
+ case 13:
+ if (!base.slice_from("og"))
+ {
+ return false;
+ }
+ break;
+ case 14:
+ if (!(base.eq_s_b("l")))
+ {
+ return false;
+ }
+ if (!base.slice_from("og"))
+ {
+ return false;
+ }
+ break;
+ case 15:
+ if (!base.slice_from("less"))
+ {
+ return false;
+ }
+ break;
+ case 16:
+ if (!(base.in_grouping_b(g_valid_LI, 99, 116)))
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Step_3() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_8);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!r_R1())
+ {
+ return false;
+ }
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("tion"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("ate"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("al"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("ic"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!r_R2())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Step_4() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_9);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!r_R2())
+ {
+ return false;
+ }
+ switch (among_var) {
+ case 1:
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ if (!(base.eq_s_b("s")))
+ {
+ break lab1;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ if (!(base.eq_s_b("t")))
+ {
+ return false;
+ }
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Step_5() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_10);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ lab0: {
+ lab1: {
+ if (!r_R2())
+ {
+ break lab1;
+ }
+ break lab0;
+ }
+ if (!r_R1())
+ {
+ return false;
+ }
+ {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab2: {
+ if (!r_shortv())
+ {
+ break lab2;
+ }
+ return false;
+ }
+ base.cursor = base.limit - v_1;
+ }
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!r_R2())
+ {
+ return false;
+ }
+ if (!(base.eq_s_b("l")))
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_exception1() {
+ var /** number */ among_var;
+ base.bra = base.cursor;
+ among_var = base.find_among(a_11);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ if (base.cursor < base.limit)
+ {
+ return false;
+ }
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("sky"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("idl"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("gentl"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("ugli"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("earli"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!base.slice_from("onli"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!base.slice_from("singl"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_postlude() {
+ if (!B_Y_found)
+ {
+ return false;
+ }
+ while(true)
+ {
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ golab1: while(true)
+ {
+ /** @const */ var /** number */ v_2 = base.cursor;
+ lab2: {
+ base.bra = base.cursor;
+ if (!(base.eq_s("Y")))
+ {
+ break lab2;
+ }
+ base.ket = base.cursor;
+ base.cursor = v_2;
+ break golab1;
+ }
+ base.cursor = v_2;
+ if (base.cursor >= base.limit)
+ {
+ break lab0;
+ }
+ base.cursor++;
+ }
+ if (!base.slice_from("y"))
+ {
+ return false;
+ }
+ continue;
+ }
+ base.cursor = v_1;
+ break;
+ }
+ return true;
+ };
+
+ this.stem = /** @return {boolean} */ function() {
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab1: {
+ if (!r_exception1())
+ {
+ break lab1;
+ }
+ break lab0;
+ }
+ base.cursor = v_1;
+ lab2: {
+ {
+ /** @const */ var /** number */ v_2 = base.cursor;
+ lab3: {
+ {
+ /** @const */ var /** number */ c1 = base.cursor + 3;
+ if (c1 > base.limit)
+ {
+ break lab3;
+ }
+ base.cursor = c1;
+ }
+ break lab2;
+ }
+ base.cursor = v_2;
+ }
+ break lab0;
+ }
+ base.cursor = v_1;
+ r_prelude();
+ r_mark_regions();
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ r_Step_1a();
+ base.cursor = base.limit - v_3;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ r_Step_1b();
+ base.cursor = base.limit - v_4;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
+ r_Step_1c();
+ base.cursor = base.limit - v_5;
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
+ r_Step_2();
+ base.cursor = base.limit - v_6;
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
+ r_Step_3();
+ base.cursor = base.limit - v_7;
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
+ r_Step_4();
+ base.cursor = base.limit - v_8;
+ /** @const */ var /** number */ v_9 = base.limit - base.cursor;
+ r_Step_5();
+ base.cursor = base.limit - v_9;
+ base.cursor = base.limit_backward;
+ /** @const */ var /** number */ v_10 = base.cursor;
+ r_postlude();
+ base.cursor = v_10;
+ }
+ return true;
+ };
+
+ /**@return{string}*/
+ this['stemWord'] = function(/**string*/word) {
+ base.setCurrent(word);
+ this.stem();
+ return base.getCurrent();
+ };
+};
diff --git a/sphinx/search/non-minified-js/esperanto-stemmer.js b/sphinx/search/non-minified-js/esperanto-stemmer.js
new file mode 100644
index 00000000000..8fc6af00f1a
--- /dev/null
+++ b/sphinx/search/non-minified-js/esperanto-stemmer.js
@@ -0,0 +1,762 @@
+// Generated from esperanto.sbl by Snowball 3.0.1 - https://snowballstem.org/
+
+/**@constructor*/
+var EsperantoStemmer = function() {
+ var base = new BaseStemmer();
+
+ /** @const */ var a_0 = [
+ ["", -1, 14],
+ ["-", 0, 13],
+ ["cx", 0, 1],
+ ["gx", 0, 2],
+ ["hx", 0, 3],
+ ["jx", 0, 4],
+ ["q", 0, 12],
+ ["sx", 0, 5],
+ ["ux", 0, 6],
+ ["w", 0, 12],
+ ["x", 0, 12],
+ ["y", 0, 12],
+ ["\u00E1", 0, 7],
+ ["\u00E9", 0, 8],
+ ["\u00ED", 0, 9],
+ ["\u00F3", 0, 10],
+ ["\u00FA", 0, 11]
+ ];
+
+ /** @const */ var a_1 = [
+ ["as", -1, -1],
+ ["i", -1, -1],
+ ["is", 1, -1],
+ ["os", -1, -1],
+ ["u", -1, -1],
+ ["us", 4, -1]
+ ];
+
+ /** @const */ var a_2 = [
+ ["ci", -1, -1],
+ ["gi", -1, -1],
+ ["hi", -1, -1],
+ ["li", -1, -1],
+ ["ili", 3, -1],
+ ["\u015Dli", 3, -1],
+ ["mi", -1, -1],
+ ["ni", -1, -1],
+ ["oni", 7, -1],
+ ["ri", -1, -1],
+ ["si", -1, -1],
+ ["vi", -1, -1],
+ ["ivi", 11, -1],
+ ["\u011Di", -1, -1],
+ ["\u015Di", -1, -1],
+ ["i\u015Di", 14, -1],
+ ["mal\u015Di", 14, -1]
+ ];
+
+ /** @const */ var a_3 = [
+ ["amb", -1, -1],
+ ["bald", -1, -1],
+ ["malbald", 1, -1],
+ ["morg", -1, -1],
+ ["postmorg", 3, -1],
+ ["adi", -1, -1],
+ ["hodi", -1, -1],
+ ["ank", -1, -1],
+ ["\u0109irk", -1, -1],
+ ["tut\u0109irk", 8, -1],
+ ["presk", -1, -1],
+ ["almen", -1, -1],
+ ["apen", -1, -1],
+ ["hier", -1, -1],
+ ["anta\u016Dhier", 13, -1],
+ ["malgr", -1, -1],
+ ["ankor", -1, -1],
+ ["kontr", -1, -1],
+ ["anstat", -1, -1],
+ ["kvaz", -1, -1]
+ ];
+
+ /** @const */ var a_4 = [
+ ["aliu", -1, -1],
+ ["unu", -1, -1]
+ ];
+
+ /** @const */ var a_5 = [
+ ["aha", -1, -1],
+ ["haha", 0, -1],
+ ["haleluja", -1, -1],
+ ["hola", -1, -1],
+ ["hosana", -1, -1],
+ ["maltra", -1, -1],
+ ["hura", -1, -1],
+ ["\u0125a\u0125a", -1, -1],
+ ["ekde", -1, -1],
+ ["elde", -1, -1],
+ ["disde", -1, -1],
+ ["ehe", -1, -1],
+ ["maltre", -1, -1],
+ ["dirlididi", -1, -1],
+ ["malpli", -1, -1],
+ ["mal\u0109i", -1, -1],
+ ["malkaj", -1, -1],
+ ["amen", -1, -1],
+ ["tamen", 17, -1],
+ ["oho", -1, -1],
+ ["maltro", -1, -1],
+ ["minus", -1, -1],
+ ["uhu", -1, -1],
+ ["muu", -1, -1]
+ ];
+
+ /** @const */ var a_6 = [
+ ["tri", -1, -1],
+ ["du", -1, -1],
+ ["unu", -1, -1]
+ ];
+
+ /** @const */ var a_7 = [
+ ["dek", -1, -1],
+ ["cent", -1, -1]
+ ];
+
+ /** @const */ var a_8 = [
+ ["k", -1, -1],
+ ["kelk", 0, -1],
+ ["nen", -1, -1],
+ ["t", -1, -1],
+ ["mult", 3, -1],
+ ["samt", 3, -1],
+ ["\u0109", -1, -1]
+ ];
+
+ /** @const */ var a_9 = [
+ ["a", -1, -1],
+ ["e", -1, -1],
+ ["i", -1, -1],
+ ["j", -1, -1, r_not_after_letter],
+ ["aj", 3, -1],
+ ["oj", 3, -1],
+ ["n", -1, -1, r_not_after_letter],
+ ["an", 6, -1],
+ ["en", 6, -1],
+ ["jn", 6, -1, r_not_after_letter],
+ ["ajn", 9, -1],
+ ["ojn", 9, -1],
+ ["on", 6, -1],
+ ["o", -1, -1],
+ ["as", -1, -1],
+ ["is", -1, -1],
+ ["os", -1, -1],
+ ["us", -1, -1],
+ ["u", -1, -1]
+ ];
+
+ /** @const */ var /** Array */ g_vowel = [17, 65, 16];
+
+ /** @const */ var /** Array */ g_aou = [1, 64, 16];
+
+ /** @const */ var /** Array */ g_digit = [255, 3];
+
+ var /** boolean */ B_foreign = false;
+
+
+ /** @return {boolean} */
+ function r_canonical_form() {
+ var /** number */ among_var;
+ B_foreign = false;
+ while(true)
+ {
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ base.bra = base.cursor;
+ among_var = base.find_among(a_0);
+ base.ket = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("\u0109"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("\u011D"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("\u0125"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("\u0135"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("\u015D"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!base.slice_from("\u016D"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!base.slice_from("a"))
+ {
+ return false;
+ }
+ B_foreign = true;
+ break;
+ case 8:
+ if (!base.slice_from("e"))
+ {
+ return false;
+ }
+ B_foreign = true;
+ break;
+ case 9:
+ if (!base.slice_from("i"))
+ {
+ return false;
+ }
+ B_foreign = true;
+ break;
+ case 10:
+ if (!base.slice_from("o"))
+ {
+ return false;
+ }
+ B_foreign = true;
+ break;
+ case 11:
+ if (!base.slice_from("u"))
+ {
+ return false;
+ }
+ B_foreign = true;
+ break;
+ case 12:
+ B_foreign = true;
+ break;
+ case 13:
+ B_foreign = false;
+ break;
+ case 14:
+ if (base.cursor >= base.limit)
+ {
+ break lab0;
+ }
+ base.cursor++;
+ break;
+ }
+ continue;
+ }
+ base.cursor = v_1;
+ break;
+ }
+ lab1: {
+ if (!B_foreign)
+ {
+ break lab1;
+ }
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_initial_apostrophe() {
+ base.bra = base.cursor;
+ if (!(base.eq_s("'")))
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ if (!(base.eq_s("st")))
+ {
+ return false;
+ }
+ if (base.find_among(a_1) == 0)
+ {
+ return false;
+ }
+ if (base.cursor < base.limit)
+ {
+ return false;
+ }
+ if (!base.slice_from("e"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_pronoun() {
+ base.ket = base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ if (!(base.eq_s_b("n")))
+ {
+ base.cursor = base.limit - v_1;
+ break lab0;
+ }
+ }
+ base.bra = base.cursor;
+ if (base.find_among_b(a_2) == 0)
+ {
+ return false;
+ }
+ lab1: {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab2: {
+ if (base.cursor > base.limit_backward)
+ {
+ break lab2;
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_2;
+ if (!(base.eq_s_b("-")))
+ {
+ return false;
+ }
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_final_apostrophe() {
+ base.ket = base.cursor;
+ if (!(base.eq_s_b("'")))
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ if (!(base.eq_s_b("l")))
+ {
+ break lab1;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ break lab1;
+ }
+ if (!base.slice_from("a"))
+ {
+ return false;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ lab2: {
+ if (!(base.eq_s_b("un")))
+ {
+ break lab2;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ break lab2;
+ }
+ if (!base.slice_from("u"))
+ {
+ return false;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ lab3: {
+ if (base.find_among_b(a_3) == 0)
+ {
+ break lab3;
+ }
+ lab4: {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab5: {
+ if (base.cursor > base.limit_backward)
+ {
+ break lab5;
+ }
+ break lab4;
+ }
+ base.cursor = base.limit - v_2;
+ if (!(base.eq_s_b("-")))
+ {
+ break lab3;
+ }
+ }
+ if (!base.slice_from("a\u016D"))
+ {
+ return false;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ if (!base.slice_from("o"))
+ {
+ return false;
+ }
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_ujn_suffix() {
+ base.ket = base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ if (!(base.eq_s_b("n")))
+ {
+ base.cursor = base.limit - v_1;
+ break lab0;
+ }
+ }
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab1: {
+ if (!(base.eq_s_b("j")))
+ {
+ base.cursor = base.limit - v_2;
+ break lab1;
+ }
+ }
+ base.bra = base.cursor;
+ if (base.find_among_b(a_4) == 0)
+ {
+ return false;
+ }
+ lab2: {
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ lab3: {
+ if (base.cursor > base.limit_backward)
+ {
+ break lab3;
+ }
+ break lab2;
+ }
+ base.cursor = base.limit - v_3;
+ if (!(base.eq_s_b("-")))
+ {
+ return false;
+ }
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_uninflected() {
+ if (base.find_among_b(a_5) == 0)
+ {
+ return false;
+ }
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ if (base.cursor > base.limit_backward)
+ {
+ break lab1;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ if (!(base.eq_s_b("-")))
+ {
+ return false;
+ }
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_merged_numeral() {
+ if (base.find_among_b(a_6) == 0)
+ {
+ return false;
+ }
+ if (base.find_among_b(a_7) == 0)
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_correlative() {
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab1: {
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ lab2: {
+ if (!(base.eq_s_b("n")))
+ {
+ base.cursor = base.limit - v_3;
+ break lab2;
+ }
+ }
+ base.bra = base.cursor;
+ if (!(base.eq_s_b("e")))
+ {
+ break lab1;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_2;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ lab3: {
+ if (!(base.eq_s_b("n")))
+ {
+ base.cursor = base.limit - v_4;
+ break lab3;
+ }
+ }
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
+ lab4: {
+ if (!(base.eq_s_b("j")))
+ {
+ base.cursor = base.limit - v_5;
+ break lab4;
+ }
+ }
+ base.bra = base.cursor;
+ if (!(base.in_grouping_b(g_aou, 97, 117)))
+ {
+ return false;
+ }
+ }
+ if (!(base.eq_s_b("i")))
+ {
+ return false;
+ }
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
+ lab5: {
+ if (base.find_among_b(a_8) == 0)
+ {
+ base.cursor = base.limit - v_6;
+ break lab5;
+ }
+ }
+ lab6: {
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
+ lab7: {
+ if (base.cursor > base.limit_backward)
+ {
+ break lab7;
+ }
+ break lab6;
+ }
+ base.cursor = base.limit - v_7;
+ if (!(base.eq_s_b("-")))
+ {
+ return false;
+ }
+ }
+ base.cursor = base.limit - v_1;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_long_word() {
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ for (var /** number */ v_2 = 2; v_2 > 0; v_2--)
+ {
+ if (!base.go_out_grouping_b(g_vowel, 97, 117))
+ {
+ break lab1;
+ }
+ base.cursor--;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ lab2: {
+ golab3: while(true)
+ {
+ lab4: {
+ if (!(base.eq_s_b("-")))
+ {
+ break lab4;
+ }
+ break golab3;
+ }
+ if (base.cursor <= base.limit_backward)
+ {
+ break lab2;
+ }
+ base.cursor--;
+ }
+ if (base.cursor <= base.limit_backward)
+ {
+ break lab2;
+ }
+ base.cursor--;
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ if (!base.go_out_grouping_b(g_digit, 48, 57))
+ {
+ return false;
+ }
+ base.cursor--;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_not_after_letter() {
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ if (!(base.eq_s_b("-")))
+ {
+ break lab1;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ if (!(base.in_grouping_b(g_digit, 48, 57)))
+ {
+ return false;
+ }
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_standard_suffix() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_9) == 0)
+ {
+ return false;
+ }
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ if (!(base.eq_s_b("-")))
+ {
+ base.cursor = base.limit - v_1;
+ break lab0;
+ }
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ this.stem = /** @return {boolean} */ function() {
+ /** @const */ var /** number */ v_1 = base.cursor;
+ if (!r_canonical_form())
+ {
+ return false;
+ }
+ base.cursor = v_1;
+ /** @const */ var /** number */ v_2 = base.cursor;
+ r_initial_apostrophe();
+ base.cursor = v_2;
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ {
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ lab0: {
+ if (!r_pronoun())
+ {
+ break lab0;
+ }
+ return false;
+ }
+ base.cursor = base.limit - v_3;
+ }
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ r_final_apostrophe();
+ base.cursor = base.limit - v_4;
+ {
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
+ lab1: {
+ if (!r_correlative())
+ {
+ break lab1;
+ }
+ return false;
+ }
+ base.cursor = base.limit - v_5;
+ }
+ {
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
+ lab2: {
+ if (!r_uninflected())
+ {
+ break lab2;
+ }
+ return false;
+ }
+ base.cursor = base.limit - v_6;
+ }
+ {
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
+ lab3: {
+ if (!r_merged_numeral())
+ {
+ break lab3;
+ }
+ return false;
+ }
+ base.cursor = base.limit - v_7;
+ }
+ {
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
+ lab4: {
+ if (!r_ujn_suffix())
+ {
+ break lab4;
+ }
+ return false;
+ }
+ base.cursor = base.limit - v_8;
+ }
+ /** @const */ var /** number */ v_9 = base.limit - base.cursor;
+ if (!r_long_word())
+ {
+ return false;
+ }
+ base.cursor = base.limit - v_9;
+ if (!r_standard_suffix())
+ {
+ return false;
+ }
+ base.cursor = base.limit_backward;
+ return true;
+ };
+
+ /**@return{string}*/
+ this['stemWord'] = function(/**string*/word) {
+ base.setCurrent(word);
+ this.stem();
+ return base.getCurrent();
+ };
+};
diff --git a/sphinx/search/non-minified-js/estonian-stemmer.js b/sphinx/search/non-minified-js/estonian-stemmer.js
new file mode 100644
index 00000000000..2700c0b3379
--- /dev/null
+++ b/sphinx/search/non-minified-js/estonian-stemmer.js
@@ -0,0 +1,1088 @@
+// Generated from estonian.sbl by Snowball 3.0.1 - https://snowballstem.org/
+
+/**@constructor*/
+var EstonianStemmer = function() {
+ var base = new BaseStemmer();
+
+ /** @const */ var a_0 = [
+ ["gi", -1, 1],
+ ["ki", -1, 2]
+ ];
+
+ /** @const */ var a_1 = [
+ ["da", -1, 3],
+ ["mata", -1, 1],
+ ["b", -1, 3],
+ ["ksid", -1, 1],
+ ["nuksid", 3, 1],
+ ["me", -1, 3],
+ ["sime", 5, 1],
+ ["ksime", 6, 1],
+ ["nuksime", 7, 1],
+ ["akse", -1, 2],
+ ["dakse", 9, 1],
+ ["takse", 9, 1],
+ ["site", -1, 1],
+ ["ksite", 12, 1],
+ ["nuksite", 13, 1],
+ ["n", -1, 3],
+ ["sin", 15, 1],
+ ["ksin", 16, 1],
+ ["nuksin", 17, 1],
+ ["daks", -1, 1],
+ ["taks", -1, 1]
+ ];
+
+ /** @const */ var a_2 = [
+ ["aa", -1, -1],
+ ["ee", -1, -1],
+ ["ii", -1, -1],
+ ["oo", -1, -1],
+ ["uu", -1, -1],
+ ["\u00E4\u00E4", -1, -1],
+ ["\u00F5\u00F5", -1, -1],
+ ["\u00F6\u00F6", -1, -1],
+ ["\u00FC\u00FC", -1, -1]
+ ];
+
+ /** @const */ var a_3 = [
+ ["i", -1, 1]
+ ];
+
+ /** @const */ var a_4 = [
+ ["lane", -1, 1],
+ ["line", -1, 3],
+ ["mine", -1, 2],
+ ["lasse", -1, 1],
+ ["lisse", -1, 3],
+ ["misse", -1, 2],
+ ["lasi", -1, 1],
+ ["lisi", -1, 3],
+ ["misi", -1, 2],
+ ["last", -1, 1],
+ ["list", -1, 3],
+ ["mist", -1, 2]
+ ];
+
+ /** @const */ var a_5 = [
+ ["ga", -1, 1],
+ ["ta", -1, 1],
+ ["le", -1, 1],
+ ["sse", -1, 1],
+ ["l", -1, 1],
+ ["s", -1, 1],
+ ["ks", 5, 1],
+ ["t", -1, 2],
+ ["lt", 7, 1],
+ ["st", 7, 1]
+ ];
+
+ /** @const */ var a_6 = [
+ ["", -1, 2],
+ ["las", 0, 1],
+ ["lis", 0, 1],
+ ["mis", 0, 1],
+ ["t", 0, -1]
+ ];
+
+ /** @const */ var a_7 = [
+ ["d", -1, 4],
+ ["sid", 0, 2],
+ ["de", -1, 4],
+ ["ikkude", 2, 1],
+ ["ike", -1, 1],
+ ["ikke", -1, 1],
+ ["te", -1, 3]
+ ];
+
+ /** @const */ var a_8 = [
+ ["va", -1, -1],
+ ["du", -1, -1],
+ ["nu", -1, -1],
+ ["tu", -1, -1]
+ ];
+
+ /** @const */ var a_9 = [
+ ["kk", -1, 1],
+ ["pp", -1, 2],
+ ["tt", -1, 3]
+ ];
+
+ /** @const */ var a_10 = [
+ ["ma", -1, 2],
+ ["mai", -1, 1],
+ ["m", -1, 1]
+ ];
+
+ /** @const */ var a_11 = [
+ ["joob", -1, 1],
+ ["jood", -1, 1],
+ ["joodakse", 1, 1],
+ ["jooma", -1, 1],
+ ["joomata", 3, 1],
+ ["joome", -1, 1],
+ ["joon", -1, 1],
+ ["joote", -1, 1],
+ ["joovad", -1, 1],
+ ["juua", -1, 1],
+ ["juuakse", 9, 1],
+ ["j\u00E4i", -1, 12],
+ ["j\u00E4id", 11, 12],
+ ["j\u00E4ime", 11, 12],
+ ["j\u00E4in", 11, 12],
+ ["j\u00E4ite", 11, 12],
+ ["j\u00E4\u00E4b", -1, 12],
+ ["j\u00E4\u00E4d", -1, 12],
+ ["j\u00E4\u00E4da", 17, 12],
+ ["j\u00E4\u00E4dakse", 18, 12],
+ ["j\u00E4\u00E4di", 17, 12],
+ ["j\u00E4\u00E4ks", -1, 12],
+ ["j\u00E4\u00E4ksid", 21, 12],
+ ["j\u00E4\u00E4ksime", 21, 12],
+ ["j\u00E4\u00E4ksin", 21, 12],
+ ["j\u00E4\u00E4ksite", 21, 12],
+ ["j\u00E4\u00E4ma", -1, 12],
+ ["j\u00E4\u00E4mata", 26, 12],
+ ["j\u00E4\u00E4me", -1, 12],
+ ["j\u00E4\u00E4n", -1, 12],
+ ["j\u00E4\u00E4te", -1, 12],
+ ["j\u00E4\u00E4vad", -1, 12],
+ ["j\u00F5i", -1, 1],
+ ["j\u00F5id", 32, 1],
+ ["j\u00F5ime", 32, 1],
+ ["j\u00F5in", 32, 1],
+ ["j\u00F5ite", 32, 1],
+ ["keeb", -1, 4],
+ ["keed", -1, 4],
+ ["keedakse", 38, 4],
+ ["keeks", -1, 4],
+ ["keeksid", 40, 4],
+ ["keeksime", 40, 4],
+ ["keeksin", 40, 4],
+ ["keeksite", 40, 4],
+ ["keema", -1, 4],
+ ["keemata", 45, 4],
+ ["keeme", -1, 4],
+ ["keen", -1, 4],
+ ["kees", -1, 4],
+ ["keeta", -1, 4],
+ ["keete", -1, 4],
+ ["keevad", -1, 4],
+ ["k\u00E4ia", -1, 8],
+ ["k\u00E4iakse", 53, 8],
+ ["k\u00E4ib", -1, 8],
+ ["k\u00E4id", -1, 8],
+ ["k\u00E4idi", 56, 8],
+ ["k\u00E4iks", -1, 8],
+ ["k\u00E4iksid", 58, 8],
+ ["k\u00E4iksime", 58, 8],
+ ["k\u00E4iksin", 58, 8],
+ ["k\u00E4iksite", 58, 8],
+ ["k\u00E4ima", -1, 8],
+ ["k\u00E4imata", 63, 8],
+ ["k\u00E4ime", -1, 8],
+ ["k\u00E4in", -1, 8],
+ ["k\u00E4is", -1, 8],
+ ["k\u00E4ite", -1, 8],
+ ["k\u00E4ivad", -1, 8],
+ ["laob", -1, 16],
+ ["laod", -1, 16],
+ ["laoks", -1, 16],
+ ["laoksid", 72, 16],
+ ["laoksime", 72, 16],
+ ["laoksin", 72, 16],
+ ["laoksite", 72, 16],
+ ["laome", -1, 16],
+ ["laon", -1, 16],
+ ["laote", -1, 16],
+ ["laovad", -1, 16],
+ ["loeb", -1, 14],
+ ["loed", -1, 14],
+ ["loeks", -1, 14],
+ ["loeksid", 83, 14],
+ ["loeksime", 83, 14],
+ ["loeksin", 83, 14],
+ ["loeksite", 83, 14],
+ ["loeme", -1, 14],
+ ["loen", -1, 14],
+ ["loete", -1, 14],
+ ["loevad", -1, 14],
+ ["loob", -1, 7],
+ ["lood", -1, 7],
+ ["loodi", 93, 7],
+ ["looks", -1, 7],
+ ["looksid", 95, 7],
+ ["looksime", 95, 7],
+ ["looksin", 95, 7],
+ ["looksite", 95, 7],
+ ["looma", -1, 7],
+ ["loomata", 100, 7],
+ ["loome", -1, 7],
+ ["loon", -1, 7],
+ ["loote", -1, 7],
+ ["loovad", -1, 7],
+ ["luua", -1, 7],
+ ["luuakse", 106, 7],
+ ["l\u00F5i", -1, 6],
+ ["l\u00F5id", 108, 6],
+ ["l\u00F5ime", 108, 6],
+ ["l\u00F5in", 108, 6],
+ ["l\u00F5ite", 108, 6],
+ ["l\u00F6\u00F6b", -1, 5],
+ ["l\u00F6\u00F6d", -1, 5],
+ ["l\u00F6\u00F6dakse", 114, 5],
+ ["l\u00F6\u00F6di", 114, 5],
+ ["l\u00F6\u00F6ks", -1, 5],
+ ["l\u00F6\u00F6ksid", 117, 5],
+ ["l\u00F6\u00F6ksime", 117, 5],
+ ["l\u00F6\u00F6ksin", 117, 5],
+ ["l\u00F6\u00F6ksite", 117, 5],
+ ["l\u00F6\u00F6ma", -1, 5],
+ ["l\u00F6\u00F6mata", 122, 5],
+ ["l\u00F6\u00F6me", -1, 5],
+ ["l\u00F6\u00F6n", -1, 5],
+ ["l\u00F6\u00F6te", -1, 5],
+ ["l\u00F6\u00F6vad", -1, 5],
+ ["l\u00FC\u00FCa", -1, 5],
+ ["l\u00FC\u00FCakse", 128, 5],
+ ["m\u00FC\u00FCa", -1, 13],
+ ["m\u00FC\u00FCakse", 130, 13],
+ ["m\u00FC\u00FCb", -1, 13],
+ ["m\u00FC\u00FCd", -1, 13],
+ ["m\u00FC\u00FCdi", 133, 13],
+ ["m\u00FC\u00FCks", -1, 13],
+ ["m\u00FC\u00FCksid", 135, 13],
+ ["m\u00FC\u00FCksime", 135, 13],
+ ["m\u00FC\u00FCksin", 135, 13],
+ ["m\u00FC\u00FCksite", 135, 13],
+ ["m\u00FC\u00FCma", -1, 13],
+ ["m\u00FC\u00FCmata", 140, 13],
+ ["m\u00FC\u00FCme", -1, 13],
+ ["m\u00FC\u00FCn", -1, 13],
+ ["m\u00FC\u00FCs", -1, 13],
+ ["m\u00FC\u00FCte", -1, 13],
+ ["m\u00FC\u00FCvad", -1, 13],
+ ["n\u00E4eb", -1, 18],
+ ["n\u00E4ed", -1, 18],
+ ["n\u00E4eks", -1, 18],
+ ["n\u00E4eksid", 149, 18],
+ ["n\u00E4eksime", 149, 18],
+ ["n\u00E4eksin", 149, 18],
+ ["n\u00E4eksite", 149, 18],
+ ["n\u00E4eme", -1, 18],
+ ["n\u00E4en", -1, 18],
+ ["n\u00E4ete", -1, 18],
+ ["n\u00E4evad", -1, 18],
+ ["n\u00E4gema", -1, 18],
+ ["n\u00E4gemata", 158, 18],
+ ["n\u00E4ha", -1, 18],
+ ["n\u00E4hakse", 160, 18],
+ ["n\u00E4hti", -1, 18],
+ ["p\u00F5eb", -1, 15],
+ ["p\u00F5ed", -1, 15],
+ ["p\u00F5eks", -1, 15],
+ ["p\u00F5eksid", 165, 15],
+ ["p\u00F5eksime", 165, 15],
+ ["p\u00F5eksin", 165, 15],
+ ["p\u00F5eksite", 165, 15],
+ ["p\u00F5eme", -1, 15],
+ ["p\u00F5en", -1, 15],
+ ["p\u00F5ete", -1, 15],
+ ["p\u00F5evad", -1, 15],
+ ["saab", -1, 2],
+ ["saad", -1, 2],
+ ["saada", 175, 2],
+ ["saadakse", 176, 2],
+ ["saadi", 175, 2],
+ ["saaks", -1, 2],
+ ["saaksid", 179, 2],
+ ["saaksime", 179, 2],
+ ["saaksin", 179, 2],
+ ["saaksite", 179, 2],
+ ["saama", -1, 2],
+ ["saamata", 184, 2],
+ ["saame", -1, 2],
+ ["saan", -1, 2],
+ ["saate", -1, 2],
+ ["saavad", -1, 2],
+ ["sai", -1, 2],
+ ["said", 190, 2],
+ ["saime", 190, 2],
+ ["sain", 190, 2],
+ ["saite", 190, 2],
+ ["s\u00F5i", -1, 9],
+ ["s\u00F5id", 195, 9],
+ ["s\u00F5ime", 195, 9],
+ ["s\u00F5in", 195, 9],
+ ["s\u00F5ite", 195, 9],
+ ["s\u00F6\u00F6b", -1, 9],
+ ["s\u00F6\u00F6d", -1, 9],
+ ["s\u00F6\u00F6dakse", 201, 9],
+ ["s\u00F6\u00F6di", 201, 9],
+ ["s\u00F6\u00F6ks", -1, 9],
+ ["s\u00F6\u00F6ksid", 204, 9],
+ ["s\u00F6\u00F6ksime", 204, 9],
+ ["s\u00F6\u00F6ksin", 204, 9],
+ ["s\u00F6\u00F6ksite", 204, 9],
+ ["s\u00F6\u00F6ma", -1, 9],
+ ["s\u00F6\u00F6mata", 209, 9],
+ ["s\u00F6\u00F6me", -1, 9],
+ ["s\u00F6\u00F6n", -1, 9],
+ ["s\u00F6\u00F6te", -1, 9],
+ ["s\u00F6\u00F6vad", -1, 9],
+ ["s\u00FC\u00FCa", -1, 9],
+ ["s\u00FC\u00FCakse", 215, 9],
+ ["teeb", -1, 17],
+ ["teed", -1, 17],
+ ["teeks", -1, 17],
+ ["teeksid", 219, 17],
+ ["teeksime", 219, 17],
+ ["teeksin", 219, 17],
+ ["teeksite", 219, 17],
+ ["teeme", -1, 17],
+ ["teen", -1, 17],
+ ["teete", -1, 17],
+ ["teevad", -1, 17],
+ ["tegema", -1, 17],
+ ["tegemata", 228, 17],
+ ["teha", -1, 17],
+ ["tehakse", 230, 17],
+ ["tehti", -1, 17],
+ ["toob", -1, 10],
+ ["tood", -1, 10],
+ ["toodi", 234, 10],
+ ["tooks", -1, 10],
+ ["tooksid", 236, 10],
+ ["tooksime", 236, 10],
+ ["tooksin", 236, 10],
+ ["tooksite", 236, 10],
+ ["tooma", -1, 10],
+ ["toomata", 241, 10],
+ ["toome", -1, 10],
+ ["toon", -1, 10],
+ ["toote", -1, 10],
+ ["toovad", -1, 10],
+ ["tuua", -1, 10],
+ ["tuuakse", 247, 10],
+ ["t\u00F5i", -1, 10],
+ ["t\u00F5id", 249, 10],
+ ["t\u00F5ime", 249, 10],
+ ["t\u00F5in", 249, 10],
+ ["t\u00F5ite", 249, 10],
+ ["viia", -1, 3],
+ ["viiakse", 254, 3],
+ ["viib", -1, 3],
+ ["viid", -1, 3],
+ ["viidi", 257, 3],
+ ["viiks", -1, 3],
+ ["viiksid", 259, 3],
+ ["viiksime", 259, 3],
+ ["viiksin", 259, 3],
+ ["viiksite", 259, 3],
+ ["viima", -1, 3],
+ ["viimata", 264, 3],
+ ["viime", -1, 3],
+ ["viin", -1, 3],
+ ["viisime", -1, 3],
+ ["viisin", -1, 3],
+ ["viisite", -1, 3],
+ ["viite", -1, 3],
+ ["viivad", -1, 3],
+ ["v\u00F5ib", -1, 11],
+ ["v\u00F5id", -1, 11],
+ ["v\u00F5ida", 274, 11],
+ ["v\u00F5idakse", 275, 11],
+ ["v\u00F5idi", 274, 11],
+ ["v\u00F5iks", -1, 11],
+ ["v\u00F5iksid", 278, 11],
+ ["v\u00F5iksime", 278, 11],
+ ["v\u00F5iksin", 278, 11],
+ ["v\u00F5iksite", 278, 11],
+ ["v\u00F5ima", -1, 11],
+ ["v\u00F5imata", 283, 11],
+ ["v\u00F5ime", -1, 11],
+ ["v\u00F5in", -1, 11],
+ ["v\u00F5is", -1, 11],
+ ["v\u00F5ite", -1, 11],
+ ["v\u00F5ivad", -1, 11]
+ ];
+
+ /** @const */ var /** Array */ g_V1 = [17, 65, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 0, 48, 8];
+
+ /** @const */ var /** Array */ g_RV = [17, 65, 16];
+
+ /** @const */ var /** Array */ g_KI = [117, 66, 6, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 0, 0, 0, 16];
+
+ /** @const */ var /** Array */ g_GI = [21, 123, 243, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 0, 48, 8];
+
+ var /** number */ I_p1 = 0;
+
+
+ /** @return {boolean} */
+ function r_mark_regions() {
+ I_p1 = base.limit;
+ if (!base.go_out_grouping(g_V1, 97, 252))
+ {
+ return false;
+ }
+ base.cursor++;
+ if (!base.go_in_grouping(g_V1, 97, 252))
+ {
+ return false;
+ }
+ base.cursor++;
+ I_p1 = base.cursor;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_emphasis() {
+ var /** number */ among_var;
+ if (base.cursor < I_p1)
+ {
+ return false;
+ }
+ /** @const */ var /** number */ v_1 = base.limit_backward;
+ base.limit_backward = I_p1;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_0);
+ if (among_var == 0)
+ {
+ base.limit_backward = v_1;
+ return false;
+ }
+ base.bra = base.cursor;
+ base.limit_backward = v_1;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ {
+ /** @const */ var /** number */ c1 = base.cursor - 4;
+ if (c1 < base.limit_backward)
+ {
+ return false;
+ }
+ base.cursor = c1;
+ }
+ base.cursor = base.limit - v_2;
+ switch (among_var) {
+ case 1:
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ if (!(base.in_grouping_b(g_GI, 97, 252)))
+ {
+ return false;
+ }
+ base.cursor = base.limit - v_3;
+ {
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ lab0: {
+ if (!r_LONGV())
+ {
+ break lab0;
+ }
+ return false;
+ }
+ base.cursor = base.limit - v_4;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!(base.in_grouping_b(g_KI, 98, 382)))
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_verb() {
+ var /** number */ among_var;
+ if (base.cursor < I_p1)
+ {
+ return false;
+ }
+ /** @const */ var /** number */ v_1 = base.limit_backward;
+ base.limit_backward = I_p1;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_1);
+ if (among_var == 0)
+ {
+ base.limit_backward = v_1;
+ return false;
+ }
+ base.bra = base.cursor;
+ base.limit_backward = v_1;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("a"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!(base.in_grouping_b(g_V1, 97, 252)))
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_LONGV() {
+ if (base.find_among_b(a_2) == 0)
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_i_plural() {
+ if (base.cursor < I_p1)
+ {
+ return false;
+ }
+ /** @const */ var /** number */ v_1 = base.limit_backward;
+ base.limit_backward = I_p1;
+ base.ket = base.cursor;
+ if (base.find_among_b(a_3) == 0)
+ {
+ base.limit_backward = v_1;
+ return false;
+ }
+ base.bra = base.cursor;
+ base.limit_backward = v_1;
+ if (!(base.in_grouping_b(g_RV, 97, 117)))
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_special_noun_endings() {
+ var /** number */ among_var;
+ if (base.cursor < I_p1)
+ {
+ return false;
+ }
+ /** @const */ var /** number */ v_1 = base.limit_backward;
+ base.limit_backward = I_p1;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_4);
+ if (among_var == 0)
+ {
+ base.limit_backward = v_1;
+ return false;
+ }
+ base.bra = base.cursor;
+ base.limit_backward = v_1;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("lase"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("mise"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("lise"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_case_ending() {
+ var /** number */ among_var;
+ if (base.cursor < I_p1)
+ {
+ return false;
+ }
+ /** @const */ var /** number */ v_1 = base.limit_backward;
+ base.limit_backward = I_p1;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_5);
+ if (among_var == 0)
+ {
+ base.limit_backward = v_1;
+ return false;
+ }
+ base.bra = base.cursor;
+ base.limit_backward = v_1;
+ switch (among_var) {
+ case 1:
+ lab0: {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab1: {
+ if (!(base.in_grouping_b(g_RV, 97, 117)))
+ {
+ break lab1;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_2;
+ if (!r_LONGV())
+ {
+ return false;
+ }
+ }
+ break;
+ case 2:
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ {
+ /** @const */ var /** number */ c1 = base.cursor - 4;
+ if (c1 < base.limit_backward)
+ {
+ return false;
+ }
+ base.cursor = c1;
+ }
+ base.cursor = base.limit - v_3;
+ break;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_plural_three_first_cases() {
+ var /** number */ among_var;
+ if (base.cursor < I_p1)
+ {
+ return false;
+ }
+ /** @const */ var /** number */ v_1 = base.limit_backward;
+ base.limit_backward = I_p1;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_7);
+ if (among_var == 0)
+ {
+ base.limit_backward = v_1;
+ return false;
+ }
+ base.bra = base.cursor;
+ base.limit_backward = v_1;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("iku"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab0: {
+ if (!r_LONGV())
+ {
+ break lab0;
+ }
+ return false;
+ }
+ base.cursor = base.limit - v_2;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 3:
+ lab1: {
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ lab2: {
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ {
+ /** @const */ var /** number */ c1 = base.cursor - 4;
+ if (c1 < base.limit_backward)
+ {
+ break lab2;
+ }
+ base.cursor = c1;
+ }
+ base.cursor = base.limit - v_4;
+ among_var = base.find_among_b(a_6);
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("e"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_3;
+ if (!base.slice_from("t"))
+ {
+ return false;
+ }
+ }
+ break;
+ case 4:
+ lab3: {
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
+ lab4: {
+ if (!(base.in_grouping_b(g_RV, 97, 117)))
+ {
+ break lab4;
+ }
+ break lab3;
+ }
+ base.cursor = base.limit - v_5;
+ if (!r_LONGV())
+ {
+ return false;
+ }
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_nu() {
+ if (base.cursor < I_p1)
+ {
+ return false;
+ }
+ /** @const */ var /** number */ v_1 = base.limit_backward;
+ base.limit_backward = I_p1;
+ base.ket = base.cursor;
+ if (base.find_among_b(a_8) == 0)
+ {
+ base.limit_backward = v_1;
+ return false;
+ }
+ base.bra = base.cursor;
+ base.limit_backward = v_1;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_undouble_kpt() {
+ var /** number */ among_var;
+ if (!(base.in_grouping_b(g_V1, 97, 252)))
+ {
+ return false;
+ }
+ if (I_p1 > base.cursor)
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_9);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("k"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("p"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("t"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_degrees() {
+ var /** number */ among_var;
+ if (base.cursor < I_p1)
+ {
+ return false;
+ }
+ /** @const */ var /** number */ v_1 = base.limit_backward;
+ base.limit_backward = I_p1;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_10);
+ if (among_var == 0)
+ {
+ base.limit_backward = v_1;
+ return false;
+ }
+ base.bra = base.cursor;
+ base.limit_backward = v_1;
+ switch (among_var) {
+ case 1:
+ if (!(base.in_grouping_b(g_RV, 97, 117)))
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_substantive() {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ r_special_noun_endings();
+ base.cursor = base.limit - v_1;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ r_case_ending();
+ base.cursor = base.limit - v_2;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ r_plural_three_first_cases();
+ base.cursor = base.limit - v_3;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ r_degrees();
+ base.cursor = base.limit - v_4;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
+ r_i_plural();
+ base.cursor = base.limit - v_5;
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
+ r_nu();
+ base.cursor = base.limit - v_6;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_verb_exceptions() {
+ var /** number */ among_var;
+ base.bra = base.cursor;
+ among_var = base.find_among(a_11);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ if (base.cursor < base.limit)
+ {
+ return false;
+ }
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("joo"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("saa"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("viima"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("keesi"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("l\u00F6\u00F6"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!base.slice_from("l\u00F5i"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!base.slice_from("loo"))
+ {
+ return false;
+ }
+ break;
+ case 8:
+ if (!base.slice_from("k\u00E4isi"))
+ {
+ return false;
+ }
+ break;
+ case 9:
+ if (!base.slice_from("s\u00F6\u00F6"))
+ {
+ return false;
+ }
+ break;
+ case 10:
+ if (!base.slice_from("too"))
+ {
+ return false;
+ }
+ break;
+ case 11:
+ if (!base.slice_from("v\u00F5isi"))
+ {
+ return false;
+ }
+ break;
+ case 12:
+ if (!base.slice_from("j\u00E4\u00E4ma"))
+ {
+ return false;
+ }
+ break;
+ case 13:
+ if (!base.slice_from("m\u00FC\u00FCsi"))
+ {
+ return false;
+ }
+ break;
+ case 14:
+ if (!base.slice_from("luge"))
+ {
+ return false;
+ }
+ break;
+ case 15:
+ if (!base.slice_from("p\u00F5de"))
+ {
+ return false;
+ }
+ break;
+ case 16:
+ if (!base.slice_from("ladu"))
+ {
+ return false;
+ }
+ break;
+ case 17:
+ if (!base.slice_from("tegi"))
+ {
+ return false;
+ }
+ break;
+ case 18:
+ if (!base.slice_from("n\u00E4gi"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ this.stem = /** @return {boolean} */ function() {
+ {
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ if (!r_verb_exceptions())
+ {
+ break lab0;
+ }
+ return false;
+ }
+ base.cursor = v_1;
+ }
+ /** @const */ var /** number */ v_2 = base.cursor;
+ r_mark_regions();
+ base.cursor = v_2;
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ r_emphasis();
+ base.cursor = base.limit - v_3;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ lab1: {
+ lab2: {
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
+ lab3: {
+ if (!r_verb())
+ {
+ break lab3;
+ }
+ break lab2;
+ }
+ base.cursor = base.limit - v_5;
+ r_substantive();
+ }
+ }
+ base.cursor = base.limit - v_4;
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
+ r_undouble_kpt();
+ base.cursor = base.limit - v_6;
+ base.cursor = base.limit_backward;
+ return true;
+ };
+
+ /**@return{string}*/
+ this['stemWord'] = function(/**string*/word) {
+ base.setCurrent(word);
+ this.stem();
+ return base.getCurrent();
+ };
+};
diff --git a/sphinx/search/non-minified-js/finnish-stemmer.js b/sphinx/search/non-minified-js/finnish-stemmer.js
index c907c4a70b4..07fd78516f6 100644
--- a/sphinx/search/non-minified-js/finnish-stemmer.js
+++ b/sphinx/search/non-minified-js/finnish-stemmer.js
@@ -1,8 +1,9 @@
-// Generated by Snowball 2.1.0 - https://snowballstem.org/
+// Generated from finnish.sbl by Snowball 3.0.1 - https://snowballstem.org/
/**@constructor*/
-FinnishStemmer = function() {
+var FinnishStemmer = function() {
var base = new BaseStemmer();
+
/** @const */ var a_0 = [
["pa", -1, 1],
["sti", -1, 2],
@@ -141,84 +142,34 @@ FinnishStemmer = function() {
function r_mark_regions() {
I_p1 = base.limit;
I_p2 = base.limit;
- golab0: while(true)
+ if (!base.go_out_grouping(g_V1, 97, 246))
{
- var /** number */ v_1 = base.cursor;
- lab1: {
- if (!(base.in_grouping(g_V1, 97, 246)))
- {
- break lab1;
- }
- base.cursor = v_1;
- break golab0;
- }
- base.cursor = v_1;
- if (base.cursor >= base.limit)
- {
- return false;
- }
- base.cursor++;
+ return false;
}
- golab2: while(true)
+ base.cursor++;
+ if (!base.go_in_grouping(g_V1, 97, 246))
{
- lab3: {
- if (!(base.out_grouping(g_V1, 97, 246)))
- {
- break lab3;
- }
- break golab2;
- }
- if (base.cursor >= base.limit)
- {
- return false;
- }
- base.cursor++;
+ return false;
}
+ base.cursor++;
I_p1 = base.cursor;
- golab4: while(true)
+ if (!base.go_out_grouping(g_V1, 97, 246))
{
- var /** number */ v_3 = base.cursor;
- lab5: {
- if (!(base.in_grouping(g_V1, 97, 246)))
- {
- break lab5;
- }
- base.cursor = v_3;
- break golab4;
- }
- base.cursor = v_3;
- if (base.cursor >= base.limit)
- {
- return false;
- }
- base.cursor++;
+ return false;
}
- golab6: while(true)
+ base.cursor++;
+ if (!base.go_in_grouping(g_V1, 97, 246))
{
- lab7: {
- if (!(base.out_grouping(g_V1, 97, 246)))
- {
- break lab7;
- }
- break golab6;
- }
- if (base.cursor >= base.limit)
- {
- return false;
- }
- base.cursor++;
+ return false;
}
+ base.cursor++;
I_p2 = base.cursor;
return true;
};
/** @return {boolean} */
function r_R2() {
- if (!(I_p2 <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_p2 <= base.cursor;
};
/** @return {boolean} */
@@ -228,17 +179,17 @@ FinnishStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_p1;
base.ket = base.cursor;
among_var = base.find_among_b(a_0);
if (among_var == 0)
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
base.bra = base.cursor;
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
switch (among_var) {
case 1:
if (!(base.in_grouping_b(g_particle_end, 97, 246)))
@@ -267,21 +218,21 @@ FinnishStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_p1;
base.ket = base.cursor;
among_var = base.find_among_b(a_4);
if (among_var == 0)
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
base.bra = base.cursor;
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
switch (among_var) {
case 1:
{
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab0: {
if (!(base.eq_s_b("k")))
{
@@ -289,7 +240,7 @@ FinnishStemmer = function() {
}
return false;
}
- base.cursor = base.limit - v_3;
+ base.cursor = base.limit - v_2;
}
if (!base.slice_del())
{
@@ -381,17 +332,17 @@ FinnishStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_p1;
base.ket = base.cursor;
among_var = base.find_among_b(a_6);
if (among_var == 0)
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
base.bra = base.cursor;
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
switch (among_var) {
case 1:
if (!(base.eq_s_b("a")))
@@ -430,11 +381,11 @@ FinnishStemmer = function() {
}
break;
case 7:
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab0: {
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
lab1: {
- var /** number */ v_5 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
lab2: {
if (!r_LONG())
{
@@ -442,17 +393,17 @@ FinnishStemmer = function() {
}
break lab1;
}
- base.cursor = base.limit - v_5;
+ base.cursor = base.limit - v_4;
if (!(base.eq_s_b("ie")))
{
- base.cursor = base.limit - v_3;
+ base.cursor = base.limit - v_2;
break lab0;
}
}
- base.cursor = base.limit - v_4;
+ base.cursor = base.limit - v_3;
if (base.cursor <= base.limit_backward)
{
- base.cursor = base.limit - v_3;
+ base.cursor = base.limit - v_2;
break lab0;
}
base.cursor--;
@@ -485,21 +436,21 @@ FinnishStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_p2;
base.ket = base.cursor;
among_var = base.find_among_b(a_7);
if (among_var == 0)
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
base.bra = base.cursor;
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
switch (among_var) {
case 1:
{
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab0: {
if (!(base.eq_s_b("po")))
{
@@ -507,7 +458,7 @@ FinnishStemmer = function() {
}
return false;
}
- base.cursor = base.limit - v_3;
+ base.cursor = base.limit - v_2;
}
break;
}
@@ -524,16 +475,16 @@ FinnishStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_p1;
base.ket = base.cursor;
if (base.find_among_b(a_8) == 0)
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
base.bra = base.cursor;
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
if (!base.slice_del())
{
return false;
@@ -548,46 +499,46 @@ FinnishStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_p1;
base.ket = base.cursor;
if (!(base.eq_s_b("t")))
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
base.bra = base.cursor;
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
if (!(base.in_grouping_b(g_V1, 97, 246)))
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
- base.cursor = base.limit - v_3;
+ base.cursor = base.limit - v_2;
if (!base.slice_del())
{
return false;
}
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
if (base.cursor < I_p2)
{
return false;
}
- var /** number */ v_5 = base.limit_backward;
+ /** @const */ var /** number */ v_3 = base.limit_backward;
base.limit_backward = I_p2;
base.ket = base.cursor;
among_var = base.find_among_b(a_9);
if (among_var == 0)
{
- base.limit_backward = v_5;
+ base.limit_backward = v_3;
return false;
}
base.bra = base.cursor;
- base.limit_backward = v_5;
+ base.limit_backward = v_3;
switch (among_var) {
case 1:
{
- var /** number */ v_6 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
lab0: {
if (!(base.eq_s_b("po")))
{
@@ -595,7 +546,7 @@ FinnishStemmer = function() {
}
return false;
}
- base.cursor = base.limit - v_6;
+ base.cursor = base.limit - v_4;
}
break;
}
@@ -612,16 +563,16 @@ FinnishStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_p1;
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab0: {
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
if (!r_LONG())
{
break lab0;
}
- base.cursor = base.limit - v_4;
+ base.cursor = base.limit - v_3;
base.ket = base.cursor;
if (base.cursor <= base.limit_backward)
{
@@ -634,8 +585,8 @@ FinnishStemmer = function() {
return false;
}
}
- base.cursor = base.limit - v_3;
- var /** number */ v_5 = base.limit - base.cursor;
+ base.cursor = base.limit - v_2;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
lab1: {
base.ket = base.cursor;
if (!(base.in_grouping_b(g_AEI, 97, 228)))
@@ -652,8 +603,8 @@ FinnishStemmer = function() {
return false;
}
}
- base.cursor = base.limit - v_5;
- var /** number */ v_6 = base.limit - base.cursor;
+ base.cursor = base.limit - v_4;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
lab2: {
base.ket = base.cursor;
if (!(base.eq_s_b("j")))
@@ -662,7 +613,7 @@ FinnishStemmer = function() {
}
base.bra = base.cursor;
lab3: {
- var /** number */ v_7 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
lab4: {
if (!(base.eq_s_b("o")))
{
@@ -670,7 +621,7 @@ FinnishStemmer = function() {
}
break lab3;
}
- base.cursor = base.limit - v_7;
+ base.cursor = base.limit - v_6;
if (!(base.eq_s_b("u")))
{
break lab2;
@@ -681,8 +632,8 @@ FinnishStemmer = function() {
return false;
}
}
- base.cursor = base.limit - v_6;
- var /** number */ v_8 = base.limit - base.cursor;
+ base.cursor = base.limit - v_5;
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
lab5: {
base.ket = base.cursor;
if (!(base.eq_s_b("o")))
@@ -699,25 +650,11 @@ FinnishStemmer = function() {
return false;
}
}
- base.cursor = base.limit - v_8;
- base.limit_backward = v_2;
- golab6: while(true)
+ base.cursor = base.limit - v_7;
+ base.limit_backward = v_1;
+ if (!base.go_in_grouping_b(g_V1, 97, 246))
{
- var /** number */ v_9 = base.limit - base.cursor;
- lab7: {
- if (!(base.out_grouping_b(g_V1, 97, 246)))
- {
- break lab7;
- }
- base.cursor = base.limit - v_9;
- break golab6;
- }
- base.cursor = base.limit - v_9;
- if (base.cursor <= base.limit_backward)
- {
- return false;
- }
- base.cursor--;
+ return false;
}
base.ket = base.cursor;
if (!(base.in_grouping_b(g_C, 98, 122)))
@@ -742,21 +679,21 @@ FinnishStemmer = function() {
};
this.stem = /** @return {boolean} */ function() {
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
r_mark_regions();
base.cursor = v_1;
B_ending_removed = false;
base.limit_backward = base.cursor; base.cursor = base.limit;
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
r_particle_etc();
base.cursor = base.limit - v_2;
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
r_possessive();
base.cursor = base.limit - v_3;
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
r_case_ending();
base.cursor = base.limit - v_4;
- var /** number */ v_5 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
r_other_endings();
base.cursor = base.limit - v_5;
lab0: {
@@ -765,18 +702,18 @@ FinnishStemmer = function() {
{
break lab1;
}
- var /** number */ v_7 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
r_i_plural();
- base.cursor = base.limit - v_7;
+ base.cursor = base.limit - v_6;
break lab0;
}
- var /** number */ v_8 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
r_t_plural();
- base.cursor = base.limit - v_8;
+ base.cursor = base.limit - v_7;
}
- var /** number */ v_9 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
r_tidy();
- base.cursor = base.limit - v_9;
+ base.cursor = base.limit - v_8;
base.cursor = base.limit_backward;
return true;
};
diff --git a/sphinx/search/non-minified-js/french-stemmer.js b/sphinx/search/non-minified-js/french-stemmer.js
index c9708312a8f..0e7b0655494 100644
--- a/sphinx/search/non-minified-js/french-stemmer.js
+++ b/sphinx/search/non-minified-js/french-stemmer.js
@@ -1,10 +1,12 @@
-// Generated by Snowball 2.1.0 - https://snowballstem.org/
+// Generated from french.sbl by Snowball 3.0.1 - https://snowballstem.org/
/**@constructor*/
-FrenchStemmer = function() {
+var FrenchStemmer = function() {
var base = new BaseStemmer();
+
/** @const */ var a_0 = [
["col", -1, -1],
+ ["ni", -1, 1],
["par", -1, -1],
["tap", -1, -1]
];
@@ -42,7 +44,7 @@ FrenchStemmer = function() {
["logie", -1, 3],
["able", -1, 1],
["isme", -1, 1],
- ["euse", -1, 11],
+ ["euse", -1, 12],
["iste", -1, 1],
["ive", -1, 8],
["if", -1, 8],
@@ -57,7 +59,7 @@ FrenchStemmer = function() {
["logies", -1, 3],
["ables", -1, 1],
["ismes", -1, 1],
- ["euses", -1, 11],
+ ["euses", -1, 12],
["istes", -1, 1],
["ives", -1, 8],
["ifs", -1, 8],
@@ -65,18 +67,19 @@ FrenchStemmer = function() {
["ations", -1, 2],
["utions", -1, 4],
["ateurs", -1, 2],
- ["ments", -1, 15],
+ ["ments", -1, 16],
["ements", 30, 6],
- ["issements", 31, 12],
+ ["issements", 31, 13],
["it\u00E9s", -1, 7],
- ["ment", -1, 15],
+ ["ment", -1, 16],
["ement", 34, 6],
- ["issement", 35, 12],
- ["amment", 34, 13],
- ["emment", 34, 14],
+ ["issement", 35, 13],
+ ["amment", 34, 14],
+ ["emment", 34, 15],
["aux", -1, 10],
["eaux", 39, 9],
["eux", -1, 1],
+ ["oux", -1, 11],
["it\u00E9", -1, 7]
];
@@ -119,47 +122,56 @@ FrenchStemmer = function() {
];
/** @const */ var a_6 = [
+ ["al", -1, 1],
+ ["\u00E9pl", -1, -1],
+ ["auv", -1, -1]
+ ];
+
+ /** @const */ var a_7 = [
["a", -1, 3],
["era", 0, 2],
+ ["aise", -1, 4],
["asse", -1, 3],
["ante", -1, 3],
["\u00E9e", -1, 2],
["ai", -1, 3],
- ["erai", 5, 2],
+ ["erai", 6, 2],
["er", -1, 2],
["as", -1, 3],
- ["eras", 8, 2],
+ ["eras", 9, 2],
["\u00E2mes", -1, 3],
+ ["aises", -1, 4],
["asses", -1, 3],
["antes", -1, 3],
["\u00E2tes", -1, 3],
["\u00E9es", -1, 2],
- ["ais", -1, 3],
- ["erais", 15, 2],
+ ["ais", -1, 4],
+ ["eais", 17, 2],
+ ["erais", 17, 2],
["ions", -1, 1],
- ["erions", 17, 2],
- ["assions", 17, 3],
+ ["erions", 20, 2],
+ ["assions", 20, 3],
["erons", -1, 2],
["ants", -1, 3],
["\u00E9s", -1, 2],
["ait", -1, 3],
- ["erait", 23, 2],
+ ["erait", 26, 2],
["ant", -1, 3],
["aIent", -1, 3],
- ["eraIent", 26, 2],
+ ["eraIent", 29, 2],
["\u00E8rent", -1, 2],
["assent", -1, 3],
["eront", -1, 2],
["\u00E2t", -1, 3],
["ez", -1, 2],
- ["iez", 32, 2],
- ["eriez", 33, 2],
- ["assiez", 33, 3],
- ["erez", 32, 2],
+ ["iez", 35, 2],
+ ["eriez", 36, 2],
+ ["assiez", 36, 3],
+ ["erez", 35, 2],
["\u00E9", -1, 2]
];
- /** @const */ var a_7 = [
+ /** @const */ var a_8 = [
["e", -1, 3],
["I\u00E8re", 0, 2],
["i\u00E8re", 0, 2],
@@ -168,7 +180,7 @@ FrenchStemmer = function() {
["ier", -1, 2]
];
- /** @const */ var a_8 = [
+ /** @const */ var a_9 = [
["ell", -1, -1],
["eill", -1, -1],
["enn", -1, -1],
@@ -178,6 +190,10 @@ FrenchStemmer = function() {
/** @const */ var /** Array */ g_v = [17, 65, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 130, 103, 8, 5];
+ /** @const */ var /** Array */ g_oux_ending = [65, 85];
+
+ /** @const */ var /** Array */ g_elision_char = [131, 14, 3];
+
/** @const */ var /** Array */ g_keep_with_s = [1, 65, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128];
var /** number */ I_p2 = 0;
@@ -185,18 +201,55 @@ FrenchStemmer = function() {
var /** number */ I_pV = 0;
+ /** @return {boolean} */
+ function r_elisions() {
+ base.bra = base.cursor;
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab1: {
+ if (!(base.in_grouping(g_elision_char, 99, 116)))
+ {
+ break lab1;
+ }
+ break lab0;
+ }
+ base.cursor = v_1;
+ if (!(base.eq_s("qu")))
+ {
+ return false;
+ }
+ }
+ if (!(base.eq_s("'")))
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ lab2: {
+ if (base.cursor < base.limit)
+ {
+ break lab2;
+ }
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
/** @return {boolean} */
function r_prelude() {
while(true)
{
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
lab0: {
golab1: while(true)
{
- var /** number */ v_2 = base.cursor;
+ /** @const */ var /** number */ v_2 = base.cursor;
lab2: {
lab3: {
- var /** number */ v_3 = base.cursor;
+ /** @const */ var /** number */ v_3 = base.cursor;
lab4: {
if (!(base.in_grouping(g_v, 97, 251)))
{
@@ -204,7 +257,7 @@ FrenchStemmer = function() {
}
base.bra = base.cursor;
lab5: {
- var /** number */ v_4 = base.cursor;
+ /** @const */ var /** number */ v_4 = base.cursor;
lab6: {
if (!(base.eq_s("u")))
{
@@ -333,13 +386,14 @@ FrenchStemmer = function() {
/** @return {boolean} */
function r_mark_regions() {
+ var /** number */ among_var;
I_pV = base.limit;
I_p1 = base.limit;
I_p2 = base.limit;
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
lab0: {
lab1: {
- var /** number */ v_2 = base.cursor;
+ /** @const */ var /** number */ v_2 = base.cursor;
lab2: {
if (!(base.in_grouping(g_v, 97, 251)))
{
@@ -358,10 +412,19 @@ FrenchStemmer = function() {
}
base.cursor = v_2;
lab3: {
- if (base.find_among(a_0) == 0)
+ among_var = base.find_among(a_0);
+ if (among_var == 0)
{
break lab3;
}
+ switch (among_var) {
+ case 1:
+ if (!(base.in_grouping(g_v, 97, 251)))
+ {
+ break lab3;
+ }
+ break;
+ }
break lab1;
}
base.cursor = v_2;
@@ -370,91 +433,41 @@ FrenchStemmer = function() {
break lab0;
}
base.cursor++;
- golab4: while(true)
+ if (!base.go_out_grouping(g_v, 97, 251))
{
- lab5: {
- if (!(base.in_grouping(g_v, 97, 251)))
- {
- break lab5;
- }
- break golab4;
- }
- if (base.cursor >= base.limit)
- {
- break lab0;
- }
- base.cursor++;
+ break lab0;
}
+ base.cursor++;
}
I_pV = base.cursor;
}
base.cursor = v_1;
- var /** number */ v_4 = base.cursor;
- lab6: {
- golab7: while(true)
+ /** @const */ var /** number */ v_3 = base.cursor;
+ lab4: {
+ if (!base.go_out_grouping(g_v, 97, 251))
{
- lab8: {
- if (!(base.in_grouping(g_v, 97, 251)))
- {
- break lab8;
- }
- break golab7;
- }
- if (base.cursor >= base.limit)
- {
- break lab6;
- }
- base.cursor++;
+ break lab4;
}
- golab9: while(true)
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 251))
{
- lab10: {
- if (!(base.out_grouping(g_v, 97, 251)))
- {
- break lab10;
- }
- break golab9;
- }
- if (base.cursor >= base.limit)
- {
- break lab6;
- }
- base.cursor++;
+ break lab4;
}
+ base.cursor++;
I_p1 = base.cursor;
- golab11: while(true)
+ if (!base.go_out_grouping(g_v, 97, 251))
{
- lab12: {
- if (!(base.in_grouping(g_v, 97, 251)))
- {
- break lab12;
- }
- break golab11;
- }
- if (base.cursor >= base.limit)
- {
- break lab6;
- }
- base.cursor++;
+ break lab4;
}
- golab13: while(true)
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 251))
{
- lab14: {
- if (!(base.out_grouping(g_v, 97, 251)))
- {
- break lab14;
- }
- break golab13;
- }
- if (base.cursor >= base.limit)
- {
- break lab6;
- }
- base.cursor++;
+ break lab4;
}
+ base.cursor++;
I_p2 = base.cursor;
}
- base.cursor = v_4;
+ base.cursor = v_3;
return true;
};
@@ -463,14 +476,10 @@ FrenchStemmer = function() {
var /** number */ among_var;
while(true)
{
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
lab0: {
base.bra = base.cursor;
among_var = base.find_among(a_1);
- if (among_var == 0)
- {
- break lab0;
- }
base.ket = base.cursor;
switch (among_var) {
case 1:
@@ -527,29 +536,17 @@ FrenchStemmer = function() {
/** @return {boolean} */
function r_RV() {
- if (!(I_pV <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_pV <= base.cursor;
};
/** @return {boolean} */
function r_R1() {
- if (!(I_p1 <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_p1 <= base.cursor;
};
/** @return {boolean} */
function r_R2() {
- if (!(I_p2 <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_p2 <= base.cursor;
};
/** @return {boolean} */
@@ -582,7 +579,7 @@ FrenchStemmer = function() {
{
return false;
}
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab0: {
base.ket = base.cursor;
if (!(base.eq_s_b("ic")))
@@ -592,7 +589,7 @@ FrenchStemmer = function() {
}
base.bra = base.cursor;
lab1: {
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab2: {
if (!r_R2())
{
@@ -651,7 +648,7 @@ FrenchStemmer = function() {
{
return false;
}
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
lab3: {
base.ket = base.cursor;
among_var = base.find_among_b(a_2);
@@ -691,7 +688,7 @@ FrenchStemmer = function() {
break;
case 2:
lab4: {
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
lab5: {
if (!r_R2())
{
@@ -749,7 +746,7 @@ FrenchStemmer = function() {
{
return false;
}
- var /** number */ v_5 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
lab6: {
base.ket = base.cursor;
among_var = base.find_among_b(a_3);
@@ -762,7 +759,7 @@ FrenchStemmer = function() {
switch (among_var) {
case 1:
lab7: {
- var /** number */ v_6 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
lab8: {
if (!r_R2())
{
@@ -783,7 +780,7 @@ FrenchStemmer = function() {
break;
case 2:
lab9: {
- var /** number */ v_7 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
lab10: {
if (!r_R2())
{
@@ -825,7 +822,7 @@ FrenchStemmer = function() {
{
return false;
}
- var /** number */ v_8 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
lab11: {
base.ket = base.cursor;
if (!(base.eq_s_b("at")))
@@ -851,7 +848,7 @@ FrenchStemmer = function() {
}
base.bra = base.cursor;
lab12: {
- var /** number */ v_9 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_9 = base.limit - base.cursor;
lab13: {
if (!r_R2())
{
@@ -888,8 +885,18 @@ FrenchStemmer = function() {
}
break;
case 11:
+ if (!(base.in_grouping_b(g_oux_ending, 98, 112)))
+ {
+ return false;
+ }
+ if (!base.slice_from("ou"))
+ {
+ return false;
+ }
+ break;
+ case 12:
lab14: {
- var /** number */ v_10 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_10 = base.limit - base.cursor;
lab15: {
if (!r_R2())
{
@@ -912,7 +919,7 @@ FrenchStemmer = function() {
}
}
break;
- case 12:
+ case 13:
if (!r_R1())
{
return false;
@@ -926,7 +933,7 @@ FrenchStemmer = function() {
return false;
}
break;
- case 13:
+ case 14:
if (!r_RV())
{
return false;
@@ -936,7 +943,7 @@ FrenchStemmer = function() {
return false;
}
return false;
- case 14:
+ case 15:
if (!r_RV())
{
return false;
@@ -946,8 +953,8 @@ FrenchStemmer = function() {
return false;
}
return false;
- case 15:
- var /** number */ v_11 = base.limit - base.cursor;
+ case 16:
+ /** @const */ var /** number */ v_11 = base.limit - base.cursor;
if (!(base.in_grouping_b(g_v, 97, 251)))
{
return false;
@@ -972,37 +979,37 @@ FrenchStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_pV;
base.ket = base.cursor;
if (base.find_among_b(a_5) == 0)
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
base.bra = base.cursor;
{
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab0: {
if (!(base.eq_s_b("H")))
{
break lab0;
}
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
- base.cursor = base.limit - v_3;
+ base.cursor = base.limit - v_2;
}
if (!(base.out_grouping_b(g_v, 97, 251)))
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
if (!base.slice_del())
{
return false;
}
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return true;
};
@@ -1013,21 +1020,21 @@ FrenchStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_pV;
base.ket = base.cursor;
- among_var = base.find_among_b(a_6);
+ among_var = base.find_among_b(a_7);
if (among_var == 0)
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
base.bra = base.cursor;
+ base.limit_backward = v_1;
switch (among_var) {
case 1:
if (!r_R2())
{
- base.limit_backward = v_2;
return false;
}
if (!base.slice_del())
@@ -1042,34 +1049,64 @@ FrenchStemmer = function() {
}
break;
case 3:
- if (!base.slice_del())
- {
- return false;
- }
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab0: {
- base.ket = base.cursor;
if (!(base.eq_s_b("e")))
{
- base.cursor = base.limit - v_3;
+ base.cursor = base.limit - v_2;
break lab0;
}
- base.bra = base.cursor;
- if (!base.slice_del())
+ if (!r_RV())
{
+ base.cursor = base.limit - v_2;
+ break lab0;
+ }
+ base.bra = base.cursor;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 4:
+ {
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ lab1: {
+ among_var = base.find_among_b(a_6);
+ if (among_var == 0)
+ {
+ break lab1;
+ }
+ switch (among_var) {
+ case 1:
+ if (base.cursor <= base.limit_backward)
+ {
+ break lab1;
+ }
+ base.cursor--;
+ if (base.cursor > base.limit_backward)
+ {
+ break lab1;
+ }
+ break;
+ }
return false;
}
+ base.cursor = base.limit - v_3;
+ }
+ if (!base.slice_del())
+ {
+ return false;
}
break;
}
- base.limit_backward = v_2;
return true;
};
/** @return {boolean} */
function r_residual_suffix() {
var /** number */ among_var;
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab0: {
base.ket = base.cursor;
if (!(base.eq_s_b("s")))
@@ -1078,9 +1115,9 @@ FrenchStemmer = function() {
break lab0;
}
base.bra = base.cursor;
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab1: {
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
lab2: {
if (!(base.eq_s_b("Hi")))
{
@@ -1105,13 +1142,13 @@ FrenchStemmer = function() {
{
return false;
}
- var /** number */ v_5 = base.limit_backward;
+ /** @const */ var /** number */ v_4 = base.limit_backward;
base.limit_backward = I_pV;
base.ket = base.cursor;
- among_var = base.find_among_b(a_7);
+ among_var = base.find_among_b(a_8);
if (among_var == 0)
{
- base.limit_backward = v_5;
+ base.limit_backward = v_4;
return false;
}
base.bra = base.cursor;
@@ -1119,11 +1156,11 @@ FrenchStemmer = function() {
case 1:
if (!r_R2())
{
- base.limit_backward = v_5;
+ base.limit_backward = v_4;
return false;
}
lab3: {
- var /** number */ v_6 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
lab4: {
if (!(base.eq_s_b("s")))
{
@@ -1131,10 +1168,10 @@ FrenchStemmer = function() {
}
break lab3;
}
- base.cursor = base.limit - v_6;
+ base.cursor = base.limit - v_5;
if (!(base.eq_s_b("t")))
{
- base.limit_backward = v_5;
+ base.limit_backward = v_4;
return false;
}
}
@@ -1156,14 +1193,14 @@ FrenchStemmer = function() {
}
break;
}
- base.limit_backward = v_5;
+ base.limit_backward = v_4;
return true;
};
/** @return {boolean} */
function r_un_double() {
- var /** number */ v_1 = base.limit - base.cursor;
- if (base.find_among_b(a_8) == 0)
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ if (base.find_among_b(a_9) == 0)
{
return false;
}
@@ -1205,7 +1242,7 @@ FrenchStemmer = function() {
}
base.ket = base.cursor;
lab1: {
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab2: {
if (!(base.eq_s_b("\u00E9")))
{
@@ -1213,7 +1250,7 @@ FrenchStemmer = function() {
}
break lab1;
}
- base.cursor = base.limit - v_3;
+ base.cursor = base.limit - v_2;
if (!(base.eq_s_b("\u00E8")))
{
return false;
@@ -1228,19 +1265,22 @@ FrenchStemmer = function() {
};
this.stem = /** @return {boolean} */ function() {
- var /** number */ v_1 = base.cursor;
- r_prelude();
+ /** @const */ var /** number */ v_1 = base.cursor;
+ r_elisions();
base.cursor = v_1;
+ /** @const */ var /** number */ v_2 = base.cursor;
+ r_prelude();
+ base.cursor = v_2;
r_mark_regions();
base.limit_backward = base.cursor; base.cursor = base.limit;
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
lab0: {
lab1: {
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
lab2: {
- var /** number */ v_5 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
lab3: {
- var /** number */ v_6 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
lab4: {
if (!r_standard_suffix())
{
@@ -1263,11 +1303,11 @@ FrenchStemmer = function() {
}
}
base.cursor = base.limit - v_5;
- var /** number */ v_7 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
lab6: {
base.ket = base.cursor;
lab7: {
- var /** number */ v_8 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
lab8: {
if (!(base.eq_s_b("Y")))
{
@@ -1303,14 +1343,14 @@ FrenchStemmer = function() {
}
}
base.cursor = base.limit - v_3;
- var /** number */ v_9 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_9 = base.limit - base.cursor;
r_un_double();
base.cursor = base.limit - v_9;
- var /** number */ v_10 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_10 = base.limit - base.cursor;
r_un_accent();
base.cursor = base.limit - v_10;
base.cursor = base.limit_backward;
- var /** number */ v_11 = base.cursor;
+ /** @const */ var /** number */ v_11 = base.cursor;
r_postlude();
base.cursor = v_11;
return true;
diff --git a/sphinx/search/non-minified-js/german-stemmer.js b/sphinx/search/non-minified-js/german-stemmer.js
index f5ff81bc9d7..007a8668575 100644
--- a/sphinx/search/non-minified-js/german-stemmer.js
+++ b/sphinx/search/non-minified-js/german-stemmer.js
@@ -1,9 +1,19 @@
-// Generated by Snowball 2.1.0 - https://snowballstem.org/
+// Generated from german.sbl by Snowball 3.0.1 - https://snowballstem.org/
/**@constructor*/
-GermanStemmer = function() {
+var GermanStemmer = function() {
var base = new BaseStemmer();
+
/** @const */ var a_0 = [
+ ["", -1, 5],
+ ["ae", 0, 2],
+ ["oe", 0, 3],
+ ["qu", 0, -1],
+ ["ue", 0, 4],
+ ["\u00DF", 0, 1]
+ ];
+
+ /** @const */ var a_1 = [
["", -1, 5],
["U", 0, 2],
["Y", 0, 1],
@@ -12,29 +22,42 @@ GermanStemmer = function() {
["\u00FC", 0, 2]
];
- /** @const */ var a_1 = [
- ["e", -1, 2],
+ /** @const */ var a_2 = [
+ ["e", -1, 3],
["em", -1, 1],
- ["en", -1, 2],
- ["ern", -1, 1],
- ["er", -1, 1],
- ["s", -1, 3],
- ["es", 5, 2]
+ ["en", -1, 3],
+ ["erinnen", 2, 2],
+ ["erin", -1, 2],
+ ["ln", -1, 5],
+ ["ern", -1, 2],
+ ["er", -1, 2],
+ ["s", -1, 4],
+ ["es", 8, 3],
+ ["lns", 8, 5]
];
- /** @const */ var a_2 = [
+ /** @const */ var a_3 = [
+ ["tick", -1, -1],
+ ["plan", -1, -1],
+ ["geordn", -1, -1],
+ ["intern", -1, -1],
+ ["tr", -1, -1]
+ ];
+
+ /** @const */ var a_4 = [
["en", -1, 1],
["er", -1, 1],
+ ["et", -1, 3],
["st", -1, 2],
- ["est", 2, 1]
+ ["est", 3, 1]
];
- /** @const */ var a_3 = [
+ /** @const */ var a_5 = [
["ig", -1, 1],
["lich", -1, 1]
];
- /** @const */ var a_4 = [
+ /** @const */ var a_6 = [
["end", -1, 1],
["ig", -1, 2],
["ung", -1, 1],
@@ -47,6 +70,8 @@ GermanStemmer = function() {
/** @const */ var /** Array */ g_v = [17, 65, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 0, 32, 8];
+ /** @const */ var /** Array */ g_et_ending = [1, 128, 198, 227, 32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128];
+
/** @const */ var /** Array */ g_s_ending = [117, 30, 5];
/** @const */ var /** Array */ g_st_ending = [117, 30, 4];
@@ -58,98 +83,113 @@ GermanStemmer = function() {
/** @return {boolean} */
function r_prelude() {
- var /** number */ v_1 = base.cursor;
+ var /** number */ among_var;
+ /** @const */ var /** number */ v_1 = base.cursor;
while(true)
{
- var /** number */ v_2 = base.cursor;
+ /** @const */ var /** number */ v_2 = base.cursor;
lab0: {
- lab1: {
- var /** number */ v_3 = base.cursor;
- lab2: {
- base.bra = base.cursor;
- if (!(base.eq_s("\u00DF")))
- {
- break lab2;
- }
- base.ket = base.cursor;
- if (!base.slice_from("ss"))
- {
- return false;
- }
- break lab1;
- }
- base.cursor = v_3;
- if (base.cursor >= base.limit)
- {
- break lab0;
- }
- base.cursor++;
- }
- continue;
- }
- base.cursor = v_2;
- break;
- }
- base.cursor = v_1;
- while(true)
- {
- var /** number */ v_4 = base.cursor;
- lab3: {
- golab4: while(true)
+ golab1: while(true)
{
- var /** number */ v_5 = base.cursor;
- lab5: {
+ /** @const */ var /** number */ v_3 = base.cursor;
+ lab2: {
if (!(base.in_grouping(g_v, 97, 252)))
{
- break lab5;
+ break lab2;
}
base.bra = base.cursor;
- lab6: {
- var /** number */ v_6 = base.cursor;
- lab7: {
+ lab3: {
+ /** @const */ var /** number */ v_4 = base.cursor;
+ lab4: {
if (!(base.eq_s("u")))
{
- break lab7;
+ break lab4;
}
base.ket = base.cursor;
if (!(base.in_grouping(g_v, 97, 252)))
{
- break lab7;
+ break lab4;
}
if (!base.slice_from("U"))
{
return false;
}
- break lab6;
+ break lab3;
}
- base.cursor = v_6;
+ base.cursor = v_4;
if (!(base.eq_s("y")))
{
- break lab5;
+ break lab2;
}
base.ket = base.cursor;
if (!(base.in_grouping(g_v, 97, 252)))
{
- break lab5;
+ break lab2;
}
if (!base.slice_from("Y"))
{
return false;
}
}
- base.cursor = v_5;
- break golab4;
+ base.cursor = v_3;
+ break golab1;
}
- base.cursor = v_5;
+ base.cursor = v_3;
if (base.cursor >= base.limit)
{
- break lab3;
+ break lab0;
}
base.cursor++;
}
continue;
}
- base.cursor = v_4;
+ base.cursor = v_2;
+ break;
+ }
+ base.cursor = v_1;
+ while(true)
+ {
+ /** @const */ var /** number */ v_5 = base.cursor;
+ lab5: {
+ base.bra = base.cursor;
+ among_var = base.find_among(a_0);
+ base.ket = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("ss"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("\u00E4"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("\u00F6"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("\u00FC"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (base.cursor >= base.limit)
+ {
+ break lab5;
+ }
+ base.cursor++;
+ break;
+ }
+ continue;
+ }
+ base.cursor = v_5;
break;
}
return true;
@@ -159,9 +199,9 @@ GermanStemmer = function() {
function r_mark_regions() {
I_p1 = base.limit;
I_p2 = base.limit;
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
{
- var /** number */ c1 = base.cursor + 3;
+ /** @const */ var /** number */ c1 = base.cursor + 3;
if (c1 > base.limit)
{
return false;
@@ -170,74 +210,34 @@ GermanStemmer = function() {
}
I_x = base.cursor;
base.cursor = v_1;
- golab0: while(true)
+ if (!base.go_out_grouping(g_v, 97, 252))
{
- lab1: {
- if (!(base.in_grouping(g_v, 97, 252)))
- {
- break lab1;
- }
- break golab0;
- }
- if (base.cursor >= base.limit)
- {
- return false;
- }
- base.cursor++;
+ return false;
}
- golab2: while(true)
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 252))
{
- lab3: {
- if (!(base.out_grouping(g_v, 97, 252)))
- {
- break lab3;
- }
- break golab2;
- }
- if (base.cursor >= base.limit)
- {
- return false;
- }
- base.cursor++;
+ return false;
}
+ base.cursor++;
I_p1 = base.cursor;
- lab4: {
- if (!(I_p1 < I_x))
+ lab0: {
+ if (I_p1 >= I_x)
{
- break lab4;
+ break lab0;
}
I_p1 = I_x;
}
- golab5: while(true)
+ if (!base.go_out_grouping(g_v, 97, 252))
{
- lab6: {
- if (!(base.in_grouping(g_v, 97, 252)))
- {
- break lab6;
- }
- break golab5;
- }
- if (base.cursor >= base.limit)
- {
- return false;
- }
- base.cursor++;
+ return false;
}
- golab7: while(true)
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 252))
{
- lab8: {
- if (!(base.out_grouping(g_v, 97, 252)))
- {
- break lab8;
- }
- break golab7;
- }
- if (base.cursor >= base.limit)
- {
- return false;
- }
- base.cursor++;
+ return false;
}
+ base.cursor++;
I_p2 = base.cursor;
return true;
};
@@ -247,14 +247,10 @@ GermanStemmer = function() {
var /** number */ among_var;
while(true)
{
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
lab0: {
base.bra = base.cursor;
- among_var = base.find_among(a_0);
- if (among_var == 0)
- {
- break lab0;
- }
+ among_var = base.find_among(a_1);
base.ket = base.cursor;
switch (among_var) {
case 1:
@@ -299,29 +295,21 @@ GermanStemmer = function() {
/** @return {boolean} */
function r_R1() {
- if (!(I_p1 <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_p1 <= base.cursor;
};
/** @return {boolean} */
function r_R2() {
- if (!(I_p2 <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_p2 <= base.cursor;
};
/** @return {boolean} */
function r_standard_suffix() {
var /** number */ among_var;
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab0: {
base.ket = base.cursor;
- among_var = base.find_among_b(a_1);
+ among_var = base.find_among_b(a_2);
if (among_var == 0)
{
break lab0;
@@ -333,6 +321,17 @@ GermanStemmer = function() {
}
switch (among_var) {
case 1:
+ {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab1: {
+ if (!(base.eq_s_b("syst")))
+ {
+ break lab1;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_2;
+ }
if (!base.slice_del())
{
return false;
@@ -343,19 +342,25 @@ GermanStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit - base.cursor;
- lab1: {
+ break;
+ case 3:
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ lab2: {
base.ket = base.cursor;
if (!(base.eq_s_b("s")))
{
- base.cursor = base.limit - v_2;
- break lab1;
+ base.cursor = base.limit - v_3;
+ break lab2;
}
base.bra = base.cursor;
if (!(base.eq_s_b("nis")))
{
- base.cursor = base.limit - v_2;
- break lab1;
+ base.cursor = base.limit - v_3;
+ break lab2;
}
if (!base.slice_del())
{
@@ -363,7 +368,7 @@ GermanStemmer = function() {
}
}
break;
- case 3:
+ case 4:
if (!(base.in_grouping_b(g_s_ending, 98, 116)))
{
break lab0;
@@ -373,21 +378,27 @@ GermanStemmer = function() {
return false;
}
break;
+ case 5:
+ if (!base.slice_from("l"))
+ {
+ return false;
+ }
+ break;
}
}
base.cursor = base.limit - v_1;
- var /** number */ v_3 = base.limit - base.cursor;
- lab2: {
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ lab3: {
base.ket = base.cursor;
- among_var = base.find_among_b(a_2);
+ among_var = base.find_among_b(a_4);
if (among_var == 0)
{
- break lab2;
+ break lab3;
}
base.bra = base.cursor;
if (!r_R1())
{
- break lab2;
+ break lab3;
}
switch (among_var) {
case 1:
@@ -399,13 +410,13 @@ GermanStemmer = function() {
case 2:
if (!(base.in_grouping_b(g_st_ending, 98, 116)))
{
- break lab2;
+ break lab3;
}
{
- var /** number */ c1 = base.cursor - 3;
+ /** @const */ var /** number */ c1 = base.cursor - 3;
if (c1 < base.limit_backward)
{
- break lab2;
+ break lab3;
}
base.cursor = c1;
}
@@ -414,21 +425,44 @@ GermanStemmer = function() {
return false;
}
break;
+ case 3:
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
+ if (!(base.in_grouping_b(g_et_ending, 85, 228)))
+ {
+ break lab3;
+ }
+ base.cursor = base.limit - v_5;
+ {
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
+ lab4: {
+ if (base.find_among_b(a_3) == 0)
+ {
+ break lab4;
+ }
+ break lab3;
+ }
+ base.cursor = base.limit - v_6;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
}
}
- base.cursor = base.limit - v_3;
- var /** number */ v_4 = base.limit - base.cursor;
- lab3: {
+ base.cursor = base.limit - v_4;
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
+ lab5: {
base.ket = base.cursor;
- among_var = base.find_among_b(a_4);
+ among_var = base.find_among_b(a_6);
if (among_var == 0)
{
- break lab3;
+ break lab5;
}
base.bra = base.cursor;
if (!r_R2())
{
- break lab3;
+ break lab5;
}
switch (among_var) {
case 1:
@@ -436,31 +470,31 @@ GermanStemmer = function() {
{
return false;
}
- var /** number */ v_5 = base.limit - base.cursor;
- lab4: {
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
+ lab6: {
base.ket = base.cursor;
if (!(base.eq_s_b("ig")))
{
- base.cursor = base.limit - v_5;
- break lab4;
+ base.cursor = base.limit - v_8;
+ break lab6;
}
base.bra = base.cursor;
{
- var /** number */ v_6 = base.limit - base.cursor;
- lab5: {
+ /** @const */ var /** number */ v_9 = base.limit - base.cursor;
+ lab7: {
if (!(base.eq_s_b("e")))
{
- break lab5;
+ break lab7;
}
- base.cursor = base.limit - v_5;
- break lab4;
+ base.cursor = base.limit - v_8;
+ break lab6;
}
- base.cursor = base.limit - v_6;
+ base.cursor = base.limit - v_9;
}
if (!r_R2())
{
- base.cursor = base.limit - v_5;
- break lab4;
+ base.cursor = base.limit - v_8;
+ break lab6;
}
if (!base.slice_del())
{
@@ -470,15 +504,15 @@ GermanStemmer = function() {
break;
case 2:
{
- var /** number */ v_7 = base.limit - base.cursor;
- lab6: {
+ /** @const */ var /** number */ v_10 = base.limit - base.cursor;
+ lab8: {
if (!(base.eq_s_b("e")))
{
- break lab6;
+ break lab8;
}
- break lab3;
+ break lab5;
}
- base.cursor = base.limit - v_7;
+ base.cursor = base.limit - v_10;
}
if (!base.slice_del())
{
@@ -490,30 +524,30 @@ GermanStemmer = function() {
{
return false;
}
- var /** number */ v_8 = base.limit - base.cursor;
- lab7: {
+ /** @const */ var /** number */ v_11 = base.limit - base.cursor;
+ lab9: {
base.ket = base.cursor;
- lab8: {
- var /** number */ v_9 = base.limit - base.cursor;
- lab9: {
+ lab10: {
+ /** @const */ var /** number */ v_12 = base.limit - base.cursor;
+ lab11: {
if (!(base.eq_s_b("er")))
{
- break lab9;
+ break lab11;
}
- break lab8;
+ break lab10;
}
- base.cursor = base.limit - v_9;
+ base.cursor = base.limit - v_12;
if (!(base.eq_s_b("en")))
{
- base.cursor = base.limit - v_8;
- break lab7;
+ base.cursor = base.limit - v_11;
+ break lab9;
}
}
base.bra = base.cursor;
if (!r_R1())
{
- base.cursor = base.limit - v_8;
- break lab7;
+ base.cursor = base.limit - v_11;
+ break lab9;
}
if (!base.slice_del())
{
@@ -526,19 +560,19 @@ GermanStemmer = function() {
{
return false;
}
- var /** number */ v_10 = base.limit - base.cursor;
- lab10: {
+ /** @const */ var /** number */ v_13 = base.limit - base.cursor;
+ lab12: {
base.ket = base.cursor;
- if (base.find_among_b(a_3) == 0)
+ if (base.find_among_b(a_5) == 0)
{
- base.cursor = base.limit - v_10;
- break lab10;
+ base.cursor = base.limit - v_13;
+ break lab12;
}
base.bra = base.cursor;
if (!r_R2())
{
- base.cursor = base.limit - v_10;
- break lab10;
+ base.cursor = base.limit - v_13;
+ break lab12;
}
if (!base.slice_del())
{
@@ -548,23 +582,23 @@ GermanStemmer = function() {
break;
}
}
- base.cursor = base.limit - v_4;
+ base.cursor = base.limit - v_7;
return true;
};
this.stem = /** @return {boolean} */ function() {
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
r_prelude();
base.cursor = v_1;
- var /** number */ v_2 = base.cursor;
+ /** @const */ var /** number */ v_2 = base.cursor;
r_mark_regions();
base.cursor = v_2;
base.limit_backward = base.cursor; base.cursor = base.limit;
r_standard_suffix();
base.cursor = base.limit_backward;
- var /** number */ v_4 = base.cursor;
+ /** @const */ var /** number */ v_3 = base.cursor;
r_postlude();
- base.cursor = v_4;
+ base.cursor = v_3;
return true;
};
diff --git a/sphinx/search/non-minified-js/greek-stemmer.js b/sphinx/search/non-minified-js/greek-stemmer.js
new file mode 100644
index 00000000000..06ad1692fb1
--- /dev/null
+++ b/sphinx/search/non-minified-js/greek-stemmer.js
@@ -0,0 +1,2873 @@
+// Generated from greek.sbl by Snowball 3.0.1 - https://snowballstem.org/
+
+/**@constructor*/
+var GreekStemmer = function() {
+ var base = new BaseStemmer();
+
+ /** @const */ var a_0 = [
+ ["", -1, 25],
+ ["\u0386", 0, 1],
+ ["\u0388", 0, 5],
+ ["\u0389", 0, 7],
+ ["\u038A", 0, 9],
+ ["\u038C", 0, 15],
+ ["\u038E", 0, 20],
+ ["\u038F", 0, 24],
+ ["\u0390", 0, 7],
+ ["\u0391", 0, 1],
+ ["\u0392", 0, 2],
+ ["\u0393", 0, 3],
+ ["\u0394", 0, 4],
+ ["\u0395", 0, 5],
+ ["\u0396", 0, 6],
+ ["\u0397", 0, 7],
+ ["\u0398", 0, 8],
+ ["\u0399", 0, 9],
+ ["\u039A", 0, 10],
+ ["\u039B", 0, 11],
+ ["\u039C", 0, 12],
+ ["\u039D", 0, 13],
+ ["\u039E", 0, 14],
+ ["\u039F", 0, 15],
+ ["\u03A0", 0, 16],
+ ["\u03A1", 0, 17],
+ ["\u03A3", 0, 18],
+ ["\u03A4", 0, 19],
+ ["\u03A5", 0, 20],
+ ["\u03A6", 0, 21],
+ ["\u03A7", 0, 22],
+ ["\u03A8", 0, 23],
+ ["\u03A9", 0, 24],
+ ["\u03AA", 0, 9],
+ ["\u03AB", 0, 20],
+ ["\u03AC", 0, 1],
+ ["\u03AD", 0, 5],
+ ["\u03AE", 0, 7],
+ ["\u03AF", 0, 9],
+ ["\u03B0", 0, 20],
+ ["\u03C2", 0, 18],
+ ["\u03CA", 0, 7],
+ ["\u03CB", 0, 20],
+ ["\u03CC", 0, 15],
+ ["\u03CD", 0, 20],
+ ["\u03CE", 0, 24]
+ ];
+
+ /** @const */ var a_1 = [
+ ["\u03C3\u03BA\u03B1\u03B3\u03B9\u03B1", -1, 2],
+ ["\u03C6\u03B1\u03B3\u03B9\u03B1", -1, 1],
+ ["\u03BF\u03BB\u03BF\u03B3\u03B9\u03B1", -1, 3],
+ ["\u03C3\u03BF\u03B3\u03B9\u03B1", -1, 4],
+ ["\u03C4\u03B1\u03C4\u03BF\u03B3\u03B9\u03B1", -1, 5],
+ ["\u03BA\u03C1\u03B5\u03B1\u03C4\u03B1", -1, 6],
+ ["\u03C0\u03B5\u03C1\u03B1\u03C4\u03B1", -1, 7],
+ ["\u03C4\u03B5\u03C1\u03B1\u03C4\u03B1", -1, 8],
+ ["\u03B3\u03B5\u03B3\u03BF\u03BD\u03BF\u03C4\u03B1", -1, 11],
+ ["\u03BA\u03B1\u03B8\u03B5\u03C3\u03C4\u03C9\u03C4\u03B1", -1, 10],
+ ["\u03C6\u03C9\u03C4\u03B1", -1, 9],
+ ["\u03C0\u03B5\u03C1\u03B1\u03C4\u03B7", -1, 7],
+ ["\u03C3\u03BA\u03B1\u03B3\u03B9\u03C9\u03BD", -1, 2],
+ ["\u03C6\u03B1\u03B3\u03B9\u03C9\u03BD", -1, 1],
+ ["\u03BF\u03BB\u03BF\u03B3\u03B9\u03C9\u03BD", -1, 3],
+ ["\u03C3\u03BF\u03B3\u03B9\u03C9\u03BD", -1, 4],
+ ["\u03C4\u03B1\u03C4\u03BF\u03B3\u03B9\u03C9\u03BD", -1, 5],
+ ["\u03BA\u03C1\u03B5\u03B1\u03C4\u03C9\u03BD", -1, 6],
+ ["\u03C0\u03B5\u03C1\u03B1\u03C4\u03C9\u03BD", -1, 7],
+ ["\u03C4\u03B5\u03C1\u03B1\u03C4\u03C9\u03BD", -1, 8],
+ ["\u03B3\u03B5\u03B3\u03BF\u03BD\u03BF\u03C4\u03C9\u03BD", -1, 11],
+ ["\u03BA\u03B1\u03B8\u03B5\u03C3\u03C4\u03C9\u03C4\u03C9\u03BD", -1, 10],
+ ["\u03C6\u03C9\u03C4\u03C9\u03BD", -1, 9],
+ ["\u03BA\u03C1\u03B5\u03B1\u03C3", -1, 6],
+ ["\u03C0\u03B5\u03C1\u03B1\u03C3", -1, 7],
+ ["\u03C4\u03B5\u03C1\u03B1\u03C3", -1, 8],
+ ["\u03B3\u03B5\u03B3\u03BF\u03BD\u03BF\u03C3", -1, 11],
+ ["\u03BA\u03C1\u03B5\u03B1\u03C4\u03BF\u03C3", -1, 6],
+ ["\u03C0\u03B5\u03C1\u03B1\u03C4\u03BF\u03C3", -1, 7],
+ ["\u03C4\u03B5\u03C1\u03B1\u03C4\u03BF\u03C3", -1, 8],
+ ["\u03B3\u03B5\u03B3\u03BF\u03BD\u03BF\u03C4\u03BF\u03C3", -1, 11],
+ ["\u03BA\u03B1\u03B8\u03B5\u03C3\u03C4\u03C9\u03C4\u03BF\u03C3", -1, 10],
+ ["\u03C6\u03C9\u03C4\u03BF\u03C3", -1, 9],
+ ["\u03BA\u03B1\u03B8\u03B5\u03C3\u03C4\u03C9\u03C3", -1, 10],
+ ["\u03C6\u03C9\u03C3", -1, 9],
+ ["\u03C3\u03BA\u03B1\u03B3\u03B9\u03BF\u03C5", -1, 2],
+ ["\u03C6\u03B1\u03B3\u03B9\u03BF\u03C5", -1, 1],
+ ["\u03BF\u03BB\u03BF\u03B3\u03B9\u03BF\u03C5", -1, 3],
+ ["\u03C3\u03BF\u03B3\u03B9\u03BF\u03C5", -1, 4],
+ ["\u03C4\u03B1\u03C4\u03BF\u03B3\u03B9\u03BF\u03C5", -1, 5]
+ ];
+
+ /** @const */ var a_2 = [
+ ["\u03C0\u03B1", -1, 1],
+ ["\u03BE\u03B1\u03BD\u03B1\u03C0\u03B1", 0, 1],
+ ["\u03B5\u03C0\u03B1", 0, 1],
+ ["\u03C0\u03B5\u03C1\u03B9\u03C0\u03B1", 0, 1],
+ ["\u03B1\u03BD\u03B1\u03BC\u03C0\u03B1", 0, 1],
+ ["\u03B5\u03BC\u03C0\u03B1", 0, 1],
+ ["\u03B2", -1, 2],
+ ["\u03B4\u03B1\u03BD\u03B5", -1, 1],
+ ["\u03B2\u03B1\u03B8\u03C5\u03C1\u03B9", -1, 2],
+ ["\u03B2\u03B1\u03C1\u03BA", -1, 2],
+ ["\u03BC\u03B1\u03C1\u03BA", -1, 2],
+ ["\u03BB", -1, 2],
+ ["\u03BC", -1, 2],
+ ["\u03BA\u03BF\u03C1\u03BD", -1, 2],
+ ["\u03B1\u03B8\u03C1\u03BF", -1, 1],
+ ["\u03C3\u03C5\u03BD\u03B1\u03B8\u03C1\u03BF", 14, 1],
+ ["\u03C0", -1, 2],
+ ["\u03B9\u03BC\u03C0", 16, 2],
+ ["\u03C1", -1, 2],
+ ["\u03BC\u03B1\u03C1", 18, 2],
+ ["\u03B1\u03BC\u03C0\u03B1\u03C1", 18, 2],
+ ["\u03B3\u03BA\u03C1", 18, 2],
+ ["\u03B2\u03BF\u03BB\u03B2\u03BF\u03C1", 18, 2],
+ ["\u03B3\u03BB\u03C5\u03BA\u03BF\u03C1", 18, 2],
+ ["\u03C0\u03B9\u03C0\u03B5\u03C1\u03BF\u03C1", 18, 2],
+ ["\u03C0\u03C1", 18, 2],
+ ["\u03BC\u03C0\u03C1", 25, 2],
+ ["\u03B1\u03C1\u03C1", 18, 2],
+ ["\u03B3\u03BB\u03C5\u03BA\u03C5\u03C1", 18, 2],
+ ["\u03C0\u03BF\u03BB\u03C5\u03C1", 18, 2],
+ ["\u03BB\u03BF\u03C5", -1, 2]
+ ];
+
+ /** @const */ var a_3 = [
+ ["\u03B9\u03B6\u03B1", -1, 1],
+ ["\u03B9\u03B6\u03B5", -1, 1],
+ ["\u03B9\u03B6\u03B1\u03BC\u03B5", -1, 1],
+ ["\u03B9\u03B6\u03BF\u03C5\u03BC\u03B5", -1, 1],
+ ["\u03B9\u03B6\u03B1\u03BD\u03B5", -1, 1],
+ ["\u03B9\u03B6\u03BF\u03C5\u03BD\u03B5", -1, 1],
+ ["\u03B9\u03B6\u03B1\u03C4\u03B5", -1, 1],
+ ["\u03B9\u03B6\u03B5\u03C4\u03B5", -1, 1],
+ ["\u03B9\u03B6\u03B5\u03B9", -1, 1],
+ ["\u03B9\u03B6\u03B1\u03BD", -1, 1],
+ ["\u03B9\u03B6\u03BF\u03C5\u03BD", -1, 1],
+ ["\u03B9\u03B6\u03B5\u03C3", -1, 1],
+ ["\u03B9\u03B6\u03B5\u03B9\u03C3", -1, 1],
+ ["\u03B9\u03B6\u03C9", -1, 1]
+ ];
+
+ /** @const */ var a_4 = [
+ ["\u03B2\u03B9", -1, 1],
+ ["\u03BB\u03B9", -1, 1],
+ ["\u03B1\u03BB", -1, 1],
+ ["\u03B5\u03BD", -1, 1],
+ ["\u03C3", -1, 1],
+ ["\u03C7", -1, 1],
+ ["\u03C5\u03C8", -1, 1],
+ ["\u03B6\u03C9", -1, 1]
+ ];
+
+ /** @const */ var a_5 = [
+ ["\u03C9\u03B8\u03B7\u03BA\u03B1", -1, 1],
+ ["\u03C9\u03B8\u03B7\u03BA\u03B5", -1, 1],
+ ["\u03C9\u03B8\u03B7\u03BA\u03B1\u03BC\u03B5", -1, 1],
+ ["\u03C9\u03B8\u03B7\u03BA\u03B1\u03BD\u03B5", -1, 1],
+ ["\u03C9\u03B8\u03B7\u03BA\u03B1\u03C4\u03B5", -1, 1],
+ ["\u03C9\u03B8\u03B7\u03BA\u03B1\u03BD", -1, 1],
+ ["\u03C9\u03B8\u03B7\u03BA\u03B5\u03C3", -1, 1]
+ ];
+
+ /** @const */ var a_6 = [
+ ["\u03BE\u03B1\u03BD\u03B1\u03C0\u03B1", -1, 1],
+ ["\u03B5\u03C0\u03B1", -1, 1],
+ ["\u03C0\u03B5\u03C1\u03B9\u03C0\u03B1", -1, 1],
+ ["\u03B1\u03BD\u03B1\u03BC\u03C0\u03B1", -1, 1],
+ ["\u03B5\u03BC\u03C0\u03B1", -1, 1],
+ ["\u03C7\u03B1\u03C1\u03C4\u03BF\u03C0\u03B1", -1, 1],
+ ["\u03B5\u03BE\u03B1\u03C1\u03C7\u03B1", -1, 1],
+ ["\u03B3\u03B5", -1, 2],
+ ["\u03B3\u03BA\u03B5", -1, 2],
+ ["\u03BA\u03BB\u03B5", -1, 1],
+ ["\u03B5\u03BA\u03BB\u03B5", 9, 1],
+ ["\u03B1\u03C0\u03B5\u03BA\u03BB\u03B5", 10, 1],
+ ["\u03B1\u03C0\u03BF\u03BA\u03BB\u03B5", 9, 1],
+ ["\u03B5\u03C3\u03C9\u03BA\u03BB\u03B5", 9, 1],
+ ["\u03B4\u03B1\u03BD\u03B5", -1, 1],
+ ["\u03C0\u03B5", -1, 1],
+ ["\u03B5\u03C0\u03B5", 15, 1],
+ ["\u03BC\u03B5\u03C4\u03B5\u03C0\u03B5", 16, 1],
+ ["\u03B5\u03C3\u03B5", -1, 1],
+ ["\u03B3\u03BA", -1, 2],
+ ["\u03BC", -1, 2],
+ ["\u03C0\u03BF\u03C5\u03BA\u03B1\u03BC", 20, 2],
+ ["\u03BA\u03BF\u03BC", 20, 2],
+ ["\u03B1\u03BD", -1, 2],
+ ["\u03BF\u03BB\u03BF", -1, 2],
+ ["\u03B1\u03B8\u03C1\u03BF", -1, 1],
+ ["\u03C3\u03C5\u03BD\u03B1\u03B8\u03C1\u03BF", 25, 1],
+ ["\u03C0", -1, 2],
+ ["\u03BB\u03B1\u03C1", -1, 2],
+ ["\u03B4\u03B7\u03BC\u03BF\u03BA\u03C1\u03B1\u03C4", -1, 2],
+ ["\u03B1\u03C6", -1, 2],
+ ["\u03B3\u03B9\u03B3\u03B1\u03BD\u03C4\u03BF\u03B1\u03C6", 30, 2]
+ ];
+
+ /** @const */ var a_7 = [
+ ["\u03B9\u03C3\u03B1", -1, 1],
+ ["\u03B9\u03C3\u03B1\u03BC\u03B5", -1, 1],
+ ["\u03B9\u03C3\u03B1\u03BD\u03B5", -1, 1],
+ ["\u03B9\u03C3\u03B5", -1, 1],
+ ["\u03B9\u03C3\u03B1\u03C4\u03B5", -1, 1],
+ ["\u03B9\u03C3\u03B1\u03BD", -1, 1],
+ ["\u03B9\u03C3\u03B5\u03C3", -1, 1]
+ ];
+
+ /** @const */ var a_8 = [
+ ["\u03BE\u03B1\u03BD\u03B1\u03C0\u03B1", -1, 1],
+ ["\u03B5\u03C0\u03B1", -1, 1],
+ ["\u03C0\u03B5\u03C1\u03B9\u03C0\u03B1", -1, 1],
+ ["\u03B1\u03BD\u03B1\u03BC\u03C0\u03B1", -1, 1],
+ ["\u03B5\u03BC\u03C0\u03B1", -1, 1],
+ ["\u03C7\u03B1\u03C1\u03C4\u03BF\u03C0\u03B1", -1, 1],
+ ["\u03B5\u03BE\u03B1\u03C1\u03C7\u03B1", -1, 1],
+ ["\u03BA\u03BB\u03B5", -1, 1],
+ ["\u03B5\u03BA\u03BB\u03B5", 7, 1],
+ ["\u03B1\u03C0\u03B5\u03BA\u03BB\u03B5", 8, 1],
+ ["\u03B1\u03C0\u03BF\u03BA\u03BB\u03B5", 7, 1],
+ ["\u03B5\u03C3\u03C9\u03BA\u03BB\u03B5", 7, 1],
+ ["\u03B4\u03B1\u03BD\u03B5", -1, 1],
+ ["\u03C0\u03B5", -1, 1],
+ ["\u03B5\u03C0\u03B5", 13, 1],
+ ["\u03BC\u03B5\u03C4\u03B5\u03C0\u03B5", 14, 1],
+ ["\u03B5\u03C3\u03B5", -1, 1],
+ ["\u03B1\u03B8\u03C1\u03BF", -1, 1],
+ ["\u03C3\u03C5\u03BD\u03B1\u03B8\u03C1\u03BF", 17, 1]
+ ];
+
+ /** @const */ var a_9 = [
+ ["\u03B9\u03C3\u03BF\u03C5\u03BC\u03B5", -1, 1],
+ ["\u03B9\u03C3\u03BF\u03C5\u03BD\u03B5", -1, 1],
+ ["\u03B9\u03C3\u03B5\u03C4\u03B5", -1, 1],
+ ["\u03B9\u03C3\u03B5\u03B9", -1, 1],
+ ["\u03B9\u03C3\u03BF\u03C5\u03BD", -1, 1],
+ ["\u03B9\u03C3\u03B5\u03B9\u03C3", -1, 1],
+ ["\u03B9\u03C3\u03C9", -1, 1]
+ ];
+
+ /** @const */ var a_10 = [
+ ["\u03B1\u03C4\u03B1", -1, 2],
+ ["\u03C6\u03B1", -1, 2],
+ ["\u03B7\u03C6\u03B1", 1, 2],
+ ["\u03BC\u03B5\u03B3", -1, 2],
+ ["\u03BB\u03C5\u03B3", -1, 2],
+ ["\u03B7\u03B4", -1, 2],
+ ["\u03BA\u03BB\u03B5", -1, 1],
+ ["\u03B5\u03C3\u03C9\u03BA\u03BB\u03B5", 6, 1],
+ ["\u03C0\u03BB\u03B5", -1, 1],
+ ["\u03B4\u03B1\u03BD\u03B5", -1, 1],
+ ["\u03C3\u03B5", -1, 1],
+ ["\u03B1\u03C3\u03B5", 10, 1],
+ ["\u03BA\u03B1\u03B8", -1, 2],
+ ["\u03B5\u03C7\u03B8", -1, 2],
+ ["\u03BA\u03B1\u03BA", -1, 2],
+ ["\u03BC\u03B1\u03BA", -1, 2],
+ ["\u03C3\u03BA", -1, 2],
+ ["\u03C6\u03B9\u03BB", -1, 2],
+ ["\u03BA\u03C5\u03BB", -1, 2],
+ ["\u03BC", -1, 2],
+ ["\u03B3\u03B5\u03BC", 19, 2],
+ ["\u03B1\u03C7\u03BD", -1, 2],
+ ["\u03C3\u03C5\u03BD\u03B1\u03B8\u03C1\u03BF", -1, 1],
+ ["\u03C0", -1, 2],
+ ["\u03B1\u03C0", 23, 2],
+ ["\u03B5\u03BC\u03C0", 23, 2],
+ ["\u03B5\u03C5\u03C0", 23, 2],
+ ["\u03B1\u03C1", -1, 2],
+ ["\u03B1\u03BF\u03C1", -1, 2],
+ ["\u03B3\u03C5\u03C1", -1, 2],
+ ["\u03C7\u03C1", -1, 2],
+ ["\u03C7\u03C9\u03C1", -1, 2],
+ ["\u03BA\u03C4", -1, 2],
+ ["\u03B1\u03BA\u03C4", 32, 2],
+ ["\u03C7\u03C4", -1, 2],
+ ["\u03B1\u03C7\u03C4", 34, 2],
+ ["\u03C4\u03B1\u03C7", -1, 2],
+ ["\u03C3\u03C7", -1, 2],
+ ["\u03B1\u03C3\u03C7", 37, 2],
+ ["\u03C5\u03C8", -1, 2]
+ ];
+
+ /** @const */ var a_11 = [
+ ["\u03B9\u03C3\u03C4\u03B1", -1, 1],
+ ["\u03B9\u03C3\u03C4\u03B5", -1, 1],
+ ["\u03B9\u03C3\u03C4\u03B7", -1, 1],
+ ["\u03B9\u03C3\u03C4\u03BF\u03B9", -1, 1],
+ ["\u03B9\u03C3\u03C4\u03C9\u03BD", -1, 1],
+ ["\u03B9\u03C3\u03C4\u03BF", -1, 1],
+ ["\u03B9\u03C3\u03C4\u03B5\u03C3", -1, 1],
+ ["\u03B9\u03C3\u03C4\u03B7\u03C3", -1, 1],
+ ["\u03B9\u03C3\u03C4\u03BF\u03C3", -1, 1],
+ ["\u03B9\u03C3\u03C4\u03BF\u03C5\u03C3", -1, 1],
+ ["\u03B9\u03C3\u03C4\u03BF\u03C5", -1, 1]
+ ];
+
+ /** @const */ var a_12 = [
+ ["\u03B5\u03B3\u03BA\u03BB\u03B5", -1, 1],
+ ["\u03B1\u03C0\u03BF\u03BA\u03BB\u03B5", -1, 1],
+ ["\u03B4\u03B1\u03BD\u03B5", -1, 2],
+ ["\u03B1\u03BD\u03C4\u03B9\u03B4\u03B1\u03BD\u03B5", 2, 2],
+ ["\u03C3\u03B5", -1, 1],
+ ["\u03BC\u03B5\u03C4\u03B1\u03C3\u03B5", 4, 1],
+ ["\u03BC\u03B9\u03BA\u03C1\u03BF\u03C3\u03B5", 4, 1]
+ ];
+
+ /** @const */ var a_13 = [
+ ["\u03B1\u03C4\u03BF\u03BC\u03B9\u03BA", -1, 2],
+ ["\u03B5\u03B8\u03BD\u03B9\u03BA", -1, 4],
+ ["\u03C4\u03BF\u03C0\u03B9\u03BA", -1, 7],
+ ["\u03B5\u03BA\u03BB\u03B5\u03BA\u03C4\u03B9\u03BA", -1, 5],
+ ["\u03C3\u03BA\u03B5\u03C0\u03C4\u03B9\u03BA", -1, 6],
+ ["\u03B3\u03BD\u03C9\u03C3\u03C4\u03B9\u03BA", -1, 3],
+ ["\u03B1\u03B3\u03BD\u03C9\u03C3\u03C4\u03B9\u03BA", 5, 1],
+ ["\u03B1\u03BB\u03B5\u03BE\u03B1\u03BD\u03B4\u03C1\u03B9\u03BD", -1, 8],
+ ["\u03B8\u03B5\u03B1\u03C4\u03C1\u03B9\u03BD", -1, 10],
+ ["\u03B2\u03C5\u03B6\u03B1\u03BD\u03C4\u03B9\u03BD", -1, 9]
+ ];
+
+ /** @const */ var a_14 = [
+ ["\u03B9\u03C3\u03BC\u03BF\u03B9", -1, 1],
+ ["\u03B9\u03C3\u03BC\u03C9\u03BD", -1, 1],
+ ["\u03B9\u03C3\u03BC\u03BF", -1, 1],
+ ["\u03B9\u03C3\u03BC\u03BF\u03C3", -1, 1],
+ ["\u03B9\u03C3\u03BC\u03BF\u03C5\u03C3", -1, 1],
+ ["\u03B9\u03C3\u03BC\u03BF\u03C5", -1, 1]
+ ];
+
+ /** @const */ var a_15 = [
+ ["\u03C3", -1, 1],
+ ["\u03C7", -1, 1]
+ ];
+
+ /** @const */ var a_16 = [
+ ["\u03BF\u03C5\u03B4\u03B1\u03BA\u03B9\u03B1", -1, 1],
+ ["\u03B1\u03C1\u03B1\u03BA\u03B9\u03B1", -1, 1],
+ ["\u03BF\u03C5\u03B4\u03B1\u03BA\u03B9", -1, 1],
+ ["\u03B1\u03C1\u03B1\u03BA\u03B9", -1, 1]
+ ];
+
+ /** @const */ var a_17 = [
+ ["\u03B2", -1, 2],
+ ["\u03B2\u03B1\u03BC\u03B2", 0, 1],
+ ["\u03C3\u03BB\u03BF\u03B2", 0, 1],
+ ["\u03C4\u03C3\u03B5\u03C7\u03BF\u03C3\u03BB\u03BF\u03B2", 2, 1],
+ ["\u03BA\u03B1\u03C1\u03B4", -1, 2],
+ ["\u03B6", -1, 2],
+ ["\u03C4\u03B6", 5, 1],
+ ["\u03BA", -1, 1],
+ ["\u03BA\u03B1\u03C0\u03B1\u03BA", 7, 1],
+ ["\u03C3\u03BF\u03BA", 7, 1],
+ ["\u03C3\u03BA", 7, 1],
+ ["\u03B2\u03B1\u03BB", -1, 2],
+ ["\u03BC\u03B1\u03BB", -1, 1],
+ ["\u03B3\u03BB", -1, 2],
+ ["\u03C4\u03C1\u03B9\u03C0\u03BF\u03BB", -1, 2],
+ ["\u03C0\u03BB", -1, 1],
+ ["\u03BB\u03BF\u03C5\u03BB", -1, 1],
+ ["\u03C6\u03C5\u03BB", -1, 1],
+ ["\u03BA\u03B1\u03B9\u03BC", -1, 1],
+ ["\u03BA\u03BB\u03B9\u03BC", -1, 1],
+ ["\u03C6\u03B1\u03C1\u03BC", -1, 1],
+ ["\u03B3\u03B9\u03B1\u03BD", -1, 2],
+ ["\u03C3\u03C0\u03B1\u03BD", -1, 1],
+ ["\u03B7\u03B3\u03BF\u03C5\u03BC\u03B5\u03BD", -1, 2],
+ ["\u03BA\u03BF\u03BD", -1, 1],
+ ["\u03BC\u03B1\u03BA\u03C1\u03C5\u03BD", -1, 2],
+ ["\u03C0", -1, 2],
+ ["\u03BA\u03B1\u03C4\u03C1\u03B1\u03C0", 26, 1],
+ ["\u03C1", -1, 1],
+ ["\u03B2\u03C1", 28, 1],
+ ["\u03BB\u03B1\u03B2\u03C1", 29, 1],
+ ["\u03B1\u03BC\u03B2\u03C1", 29, 1],
+ ["\u03BC\u03B5\u03C1", 28, 1],
+ ["\u03C0\u03B1\u03C4\u03B5\u03C1", 28, 2],
+ ["\u03B1\u03BD\u03B8\u03C1", 28, 1],
+ ["\u03BA\u03BF\u03C1", 28, 1],
+ ["\u03C3", -1, 1],
+ ["\u03BD\u03B1\u03B3\u03BA\u03B1\u03C3", 36, 1],
+ ["\u03C4\u03BF\u03C3", 36, 2],
+ ["\u03BC\u03BF\u03C5\u03C3\u03C4", -1, 1],
+ ["\u03C1\u03C5", -1, 1],
+ ["\u03C6", -1, 1],
+ ["\u03C3\u03C6", 41, 1],
+ ["\u03B1\u03BB\u03B9\u03C3\u03C6", 42, 1],
+ ["\u03BD\u03C5\u03C6", 41, 2],
+ ["\u03C7", -1, 1]
+ ];
+
+ /** @const */ var a_18 = [
+ ["\u03B1\u03BA\u03B9\u03B1", -1, 1],
+ ["\u03B1\u03C1\u03B1\u03BA\u03B9\u03B1", 0, 1],
+ ["\u03B9\u03C4\u03C3\u03B1", -1, 1],
+ ["\u03B1\u03BA\u03B9", -1, 1],
+ ["\u03B1\u03C1\u03B1\u03BA\u03B9", 3, 1],
+ ["\u03B9\u03C4\u03C3\u03C9\u03BD", -1, 1],
+ ["\u03B9\u03C4\u03C3\u03B1\u03C3", -1, 1],
+ ["\u03B9\u03C4\u03C3\u03B5\u03C3", -1, 1]
+ ];
+
+ /** @const */ var a_19 = [
+ ["\u03C8\u03B1\u03BB", -1, 1],
+ ["\u03B1\u03B9\u03C6\u03BD", -1, 1],
+ ["\u03BF\u03BB\u03BF", -1, 1],
+ ["\u03B9\u03C1", -1, 1]
+ ];
+
+ /** @const */ var a_20 = [
+ ["\u03B5", -1, 1],
+ ["\u03C0\u03B1\u03B9\u03C7\u03BD", -1, 1]
+ ];
+
+ /** @const */ var a_21 = [
+ ["\u03B9\u03B4\u03B9\u03B1", -1, 1],
+ ["\u03B9\u03B4\u03B9\u03C9\u03BD", -1, 1],
+ ["\u03B9\u03B4\u03B9\u03BF", -1, 1]
+ ];
+
+ /** @const */ var a_22 = [
+ ["\u03B9\u03B2", -1, 1],
+ ["\u03B4", -1, 1],
+ ["\u03C6\u03C1\u03B1\u03B3\u03BA", -1, 1],
+ ["\u03BB\u03C5\u03BA", -1, 1],
+ ["\u03BF\u03B2\u03B5\u03BB", -1, 1],
+ ["\u03BC\u03B7\u03BD", -1, 1],
+ ["\u03C1", -1, 1]
+ ];
+
+ /** @const */ var a_23 = [
+ ["\u03B9\u03C3\u03BA\u03B5", -1, 1],
+ ["\u03B9\u03C3\u03BA\u03BF", -1, 1],
+ ["\u03B9\u03C3\u03BA\u03BF\u03C3", -1, 1],
+ ["\u03B9\u03C3\u03BA\u03BF\u03C5", -1, 1]
+ ];
+
+ /** @const */ var a_24 = [
+ ["\u03B1\u03B4\u03C9\u03BD", -1, 1],
+ ["\u03B1\u03B4\u03B5\u03C3", -1, 1]
+ ];
+
+ /** @const */ var a_25 = [
+ ["\u03B3\u03B9\u03B1\u03B3\u03B9", -1, -1],
+ ["\u03B8\u03B5\u03B9", -1, -1],
+ ["\u03BF\u03BA", -1, -1],
+ ["\u03BC\u03B1\u03BC", -1, -1],
+ ["\u03BC\u03B1\u03BD", -1, -1],
+ ["\u03BC\u03C0\u03B1\u03BC\u03C0", -1, -1],
+ ["\u03C0\u03B5\u03B8\u03B5\u03C1", -1, -1],
+ ["\u03C0\u03B1\u03C4\u03B5\u03C1", -1, -1],
+ ["\u03BA\u03C5\u03C1", -1, -1],
+ ["\u03BD\u03C4\u03B1\u03BD\u03C4", -1, -1]
+ ];
+
+ /** @const */ var a_26 = [
+ ["\u03B5\u03B4\u03C9\u03BD", -1, 1],
+ ["\u03B5\u03B4\u03B5\u03C3", -1, 1]
+ ];
+
+ /** @const */ var a_27 = [
+ ["\u03BC\u03B9\u03BB", -1, 1],
+ ["\u03B4\u03B1\u03C0", -1, 1],
+ ["\u03B3\u03B7\u03C0", -1, 1],
+ ["\u03B9\u03C0", -1, 1],
+ ["\u03B5\u03BC\u03C0", -1, 1],
+ ["\u03BF\u03C0", -1, 1],
+ ["\u03BA\u03C1\u03B1\u03C3\u03C0", -1, 1],
+ ["\u03C5\u03C0", -1, 1]
+ ];
+
+ /** @const */ var a_28 = [
+ ["\u03BF\u03C5\u03B4\u03C9\u03BD", -1, 1],
+ ["\u03BF\u03C5\u03B4\u03B5\u03C3", -1, 1]
+ ];
+
+ /** @const */ var a_29 = [
+ ["\u03C4\u03C1\u03B1\u03B3", -1, 1],
+ ["\u03C6\u03B5", -1, 1],
+ ["\u03BA\u03B1\u03BB\u03B9\u03B1\u03BA", -1, 1],
+ ["\u03B1\u03C1\u03BA", -1, 1],
+ ["\u03C3\u03BA", -1, 1],
+ ["\u03C0\u03B5\u03C4\u03B1\u03BB", -1, 1],
+ ["\u03B2\u03B5\u03BB", -1, 1],
+ ["\u03BB\u03BF\u03C5\u03BB", -1, 1],
+ ["\u03C6\u03BB", -1, 1],
+ ["\u03C7\u03BD", -1, 1],
+ ["\u03C0\u03BB\u03B5\u03BE", -1, 1],
+ ["\u03C3\u03C0", -1, 1],
+ ["\u03C6\u03C1", -1, 1],
+ ["\u03C3", -1, 1],
+ ["\u03BB\u03B9\u03C7", -1, 1]
+ ];
+
+ /** @const */ var a_30 = [
+ ["\u03B5\u03C9\u03BD", -1, 1],
+ ["\u03B5\u03C9\u03C3", -1, 1]
+ ];
+
+ /** @const */ var a_31 = [
+ ["\u03B4", -1, 1],
+ ["\u03B9\u03B4", 0, 1],
+ ["\u03B8", -1, 1],
+ ["\u03B3\u03B1\u03BB", -1, 1],
+ ["\u03B5\u03BB", -1, 1],
+ ["\u03BD", -1, 1],
+ ["\u03C0", -1, 1],
+ ["\u03C0\u03B1\u03C1", -1, 1]
+ ];
+
+ /** @const */ var a_32 = [
+ ["\u03B9\u03B1", -1, 1],
+ ["\u03B9\u03C9\u03BD", -1, 1],
+ ["\u03B9\u03BF\u03C5", -1, 1]
+ ];
+
+ /** @const */ var a_33 = [
+ ["\u03B9\u03BA\u03B1", -1, 1],
+ ["\u03B9\u03BA\u03C9\u03BD", -1, 1],
+ ["\u03B9\u03BA\u03BF", -1, 1],
+ ["\u03B9\u03BA\u03BF\u03C5", -1, 1]
+ ];
+
+ /** @const */ var a_34 = [
+ ["\u03B1\u03B4", -1, 1],
+ ["\u03C3\u03C5\u03BD\u03B1\u03B4", 0, 1],
+ ["\u03BA\u03B1\u03C4\u03B1\u03B4", 0, 1],
+ ["\u03B1\u03BD\u03C4\u03B9\u03B4", -1, 1],
+ ["\u03B5\u03BD\u03B4", -1, 1],
+ ["\u03C6\u03C5\u03BB\u03BF\u03B4", -1, 1],
+ ["\u03C5\u03C0\u03BF\u03B4", -1, 1],
+ ["\u03C0\u03C1\u03C9\u03C4\u03BF\u03B4", -1, 1],
+ ["\u03B5\u03BE\u03C9\u03B4", -1, 1],
+ ["\u03B7\u03B8", -1, 1],
+ ["\u03B1\u03BD\u03B7\u03B8", 9, 1],
+ ["\u03BE\u03B9\u03BA", -1, 1],
+ ["\u03B1\u03BB", -1, 1],
+ ["\u03B1\u03BC\u03BC\u03BF\u03C7\u03B1\u03BB", 12, 1],
+ ["\u03C3\u03C5\u03BD\u03BF\u03BC\u03B7\u03BB", -1, 1],
+ ["\u03BC\u03C0\u03BF\u03BB", -1, 1],
+ ["\u03BC\u03BF\u03C5\u03BB", -1, 1],
+ ["\u03C4\u03C3\u03B1\u03BC", -1, 1],
+ ["\u03B2\u03C1\u03C9\u03BC", -1, 1],
+ ["\u03B1\u03BC\u03B1\u03BD", -1, 1],
+ ["\u03BC\u03C0\u03B1\u03BD", -1, 1],
+ ["\u03BA\u03B1\u03BB\u03BB\u03B9\u03BD", -1, 1],
+ ["\u03C0\u03BF\u03C3\u03C4\u03B5\u03BB\u03BD", -1, 1],
+ ["\u03C6\u03B9\u03BB\u03BF\u03BD", -1, 1],
+ ["\u03BA\u03B1\u03BB\u03C0", -1, 1],
+ ["\u03B3\u03B5\u03C1", -1, 1],
+ ["\u03C7\u03B1\u03C3", -1, 1],
+ ["\u03BC\u03C0\u03BF\u03C3", -1, 1],
+ ["\u03C0\u03BB\u03B9\u03B1\u03C4\u03C3", -1, 1],
+ ["\u03C0\u03B5\u03C4\u03C3", -1, 1],
+ ["\u03C0\u03B9\u03C4\u03C3", -1, 1],
+ ["\u03C6\u03C5\u03C3", -1, 1],
+ ["\u03BC\u03C0\u03B1\u03B3\u03B9\u03B1\u03C4", -1, 1],
+ ["\u03BD\u03B9\u03C4", -1, 1],
+ ["\u03C0\u03B9\u03BA\u03B1\u03BD\u03C4", -1, 1],
+ ["\u03C3\u03B5\u03C1\u03C4", -1, 1]
+ ];
+
+ /** @const */ var a_35 = [
+ ["\u03B1\u03B3\u03B1\u03BC\u03B5", -1, 1],
+ ["\u03B7\u03BA\u03B1\u03BC\u03B5", -1, 1],
+ ["\u03B7\u03B8\u03B7\u03BA\u03B1\u03BC\u03B5", 1, 1],
+ ["\u03B7\u03C3\u03B1\u03BC\u03B5", -1, 1],
+ ["\u03BF\u03C5\u03C3\u03B1\u03BC\u03B5", -1, 1]
+ ];
+
+ /** @const */ var a_36 = [
+ ["\u03B2\u03BF\u03C5\u03B2", -1, 1],
+ ["\u03BE\u03B5\u03B8", -1, 1],
+ ["\u03C0\u03B5\u03B8", -1, 1],
+ ["\u03B1\u03C0\u03BF\u03B8", -1, 1],
+ ["\u03B1\u03C0\u03BF\u03BA", -1, 1],
+ ["\u03BF\u03C5\u03BB", -1, 1],
+ ["\u03B1\u03BD\u03B1\u03C0", -1, 1],
+ ["\u03C0\u03B9\u03BA\u03C1", -1, 1],
+ ["\u03C0\u03BF\u03C4", -1, 1],
+ ["\u03B1\u03C0\u03BF\u03C3\u03C4", -1, 1],
+ ["\u03C7", -1, 1],
+ ["\u03C3\u03B9\u03C7", 10, 1]
+ ];
+
+ /** @const */ var a_37 = [
+ ["\u03C4\u03C1", -1, 1],
+ ["\u03C4\u03C3", -1, 1]
+ ];
+
+ /** @const */ var a_38 = [
+ ["\u03B1\u03B3\u03B1\u03BD\u03B5", -1, 1],
+ ["\u03B7\u03BA\u03B1\u03BD\u03B5", -1, 1],
+ ["\u03B7\u03B8\u03B7\u03BA\u03B1\u03BD\u03B5", 1, 1],
+ ["\u03B7\u03C3\u03B1\u03BD\u03B5", -1, 1],
+ ["\u03BF\u03C5\u03C3\u03B1\u03BD\u03B5", -1, 1],
+ ["\u03BF\u03BD\u03C4\u03B1\u03BD\u03B5", -1, 1],
+ ["\u03B9\u03BF\u03BD\u03C4\u03B1\u03BD\u03B5", 5, 1],
+ ["\u03BF\u03C5\u03BD\u03C4\u03B1\u03BD\u03B5", -1, 1],
+ ["\u03B9\u03BF\u03C5\u03BD\u03C4\u03B1\u03BD\u03B5", 7, 1],
+ ["\u03BF\u03C4\u03B1\u03BD\u03B5", -1, 1],
+ ["\u03B9\u03BF\u03C4\u03B1\u03BD\u03B5", 9, 1]
+ ];
+
+ /** @const */ var a_39 = [
+ ["\u03C4\u03B1\u03B2", -1, 1],
+ ["\u03BD\u03C4\u03B1\u03B2", 0, 1],
+ ["\u03C8\u03B7\u03BB\u03BF\u03C4\u03B1\u03B2", 0, 1],
+ ["\u03BB\u03B9\u03B2", -1, 1],
+ ["\u03BA\u03BB\u03B9\u03B2", 3, 1],
+ ["\u03BE\u03B7\u03C1\u03BF\u03BA\u03BB\u03B9\u03B2", 4, 1],
+ ["\u03B3", -1, 1],
+ ["\u03B1\u03B3", 6, 1],
+ ["\u03C4\u03C1\u03B1\u03B3", 7, 1],
+ ["\u03C4\u03C3\u03B1\u03B3", 7, 1],
+ ["\u03B1\u03B8\u03B9\u03B3\u03B3", 6, 1],
+ ["\u03C4\u03C3\u03B9\u03B3\u03B3", 6, 1],
+ ["\u03B1\u03C4\u03C3\u03B9\u03B3\u03B3", 11, 1],
+ ["\u03C3\u03C4\u03B5\u03B3", 6, 1],
+ ["\u03B1\u03C0\u03B7\u03B3", 6, 1],
+ ["\u03C3\u03B9\u03B3", 6, 1],
+ ["\u03B1\u03BD\u03BF\u03C1\u03B3", 6, 1],
+ ["\u03B5\u03BD\u03BF\u03C1\u03B3", 6, 1],
+ ["\u03BA\u03B1\u03BB\u03C0\u03BF\u03C5\u03B6", -1, 1],
+ ["\u03B8", -1, 1],
+ ["\u03BC\u03C9\u03B1\u03BC\u03B5\u03B8", 19, 1],
+ ["\u03C0\u03B9\u03B8", 19, 1],
+ ["\u03B1\u03C0\u03B9\u03B8", 21, 1],
+ ["\u03B4\u03B5\u03BA", -1, 1],
+ ["\u03C0\u03B5\u03BB\u03B5\u03BA", -1, 1],
+ ["\u03B9\u03BA", -1, 1],
+ ["\u03B1\u03BD\u03B9\u03BA", 25, 1],
+ ["\u03B2\u03BF\u03C5\u03BB\u03BA", -1, 1],
+ ["\u03B2\u03B1\u03C3\u03BA", -1, 1],
+ ["\u03B2\u03C1\u03B1\u03C7\u03C5\u03BA", -1, 1],
+ ["\u03B3\u03B1\u03BB", -1, 1],
+ ["\u03BA\u03B1\u03C4\u03B1\u03B3\u03B1\u03BB", 30, 1],
+ ["\u03BF\u03BB\u03BF\u03B3\u03B1\u03BB", 30, 1],
+ ["\u03B2\u03B1\u03B8\u03C5\u03B3\u03B1\u03BB", 30, 1],
+ ["\u03BC\u03B5\u03BB", -1, 1],
+ ["\u03BA\u03B1\u03C3\u03C4\u03B5\u03BB", -1, 1],
+ ["\u03C0\u03BF\u03C1\u03C4\u03BF\u03BB", -1, 1],
+ ["\u03C0\u03BB", -1, 1],
+ ["\u03B4\u03B9\u03C0\u03BB", 37, 1],
+ ["\u03BB\u03B1\u03BF\u03C0\u03BB", 37, 1],
+ ["\u03C8\u03C5\u03C7\u03BF\u03C0\u03BB", 37, 1],
+ ["\u03BF\u03C5\u03BB", -1, 1],
+ ["\u03BC", -1, 1],
+ ["\u03BF\u03BB\u03B9\u03B3\u03BF\u03B4\u03B1\u03BC", 42, 1],
+ ["\u03BC\u03BF\u03C5\u03C3\u03BF\u03C5\u03BB\u03BC", 42, 1],
+ ["\u03B4\u03C1\u03B1\u03B4\u03BF\u03C5\u03BC", 42, 1],
+ ["\u03B2\u03C1\u03B1\u03C7\u03BC", 42, 1],
+ ["\u03BD", -1, 1],
+ ["\u03B1\u03BC\u03B5\u03C1\u03B9\u03BA\u03B1\u03BD", 47, 1],
+ ["\u03C0", -1, 1],
+ ["\u03B1\u03B4\u03B1\u03C0", 49, 1],
+ ["\u03C7\u03B1\u03BC\u03B7\u03BB\u03BF\u03B4\u03B1\u03C0", 49, 1],
+ ["\u03C0\u03BF\u03BB\u03C5\u03B4\u03B1\u03C0", 49, 1],
+ ["\u03BA\u03BF\u03C0", 49, 1],
+ ["\u03C5\u03C0\u03BF\u03BA\u03BF\u03C0", 53, 1],
+ ["\u03C4\u03C3\u03BF\u03C0", 49, 1],
+ ["\u03C3\u03C0", 49, 1],
+ ["\u03B5\u03C1", -1, 1],
+ ["\u03B3\u03B5\u03C1", 57, 1],
+ ["\u03B2\u03B5\u03C4\u03B5\u03C1", 57, 1],
+ ["\u03BB\u03BF\u03C5\u03B8\u03B7\u03C1", -1, 1],
+ ["\u03BA\u03BF\u03C1\u03BC\u03BF\u03C1", -1, 1],
+ ["\u03C0\u03B5\u03C1\u03B9\u03C4\u03C1", -1, 1],
+ ["\u03BF\u03C5\u03C1", -1, 1],
+ ["\u03C3", -1, 1],
+ ["\u03B2\u03B1\u03C3", 64, 1],
+ ["\u03C0\u03BF\u03BB\u03B9\u03C3", 64, 1],
+ ["\u03C3\u03B1\u03C1\u03B1\u03BA\u03B1\u03C4\u03C3", 64, 1],
+ ["\u03B8\u03C5\u03C3", 64, 1],
+ ["\u03B4\u03B9\u03B1\u03C4", -1, 1],
+ ["\u03C0\u03BB\u03B1\u03C4", -1, 1],
+ ["\u03C4\u03C3\u03B1\u03C1\u03BB\u03B1\u03C4", -1, 1],
+ ["\u03C4\u03B5\u03C4", -1, 1],
+ ["\u03C0\u03BF\u03C5\u03C1\u03B9\u03C4", -1, 1],
+ ["\u03C3\u03BF\u03C5\u03BB\u03C4", -1, 1],
+ ["\u03BC\u03B1\u03B9\u03BD\u03C4", -1, 1],
+ ["\u03B6\u03C9\u03BD\u03C4", -1, 1],
+ ["\u03BA\u03B1\u03C3\u03C4", -1, 1],
+ ["\u03C6", -1, 1],
+ ["\u03B4\u03B9\u03B1\u03C6", 78, 1],
+ ["\u03C3\u03C4\u03B5\u03C6", 78, 1],
+ ["\u03C6\u03C9\u03C4\u03BF\u03C3\u03C4\u03B5\u03C6", 80, 1],
+ ["\u03C0\u03B5\u03C1\u03B7\u03C6", 78, 1],
+ ["\u03C5\u03C0\u03B5\u03C1\u03B7\u03C6", 82, 1],
+ ["\u03BA\u03BF\u03B9\u03BB\u03B1\u03C1\u03C6", 78, 1],
+ ["\u03C0\u03B5\u03BD\u03C4\u03B1\u03C1\u03C6", 78, 1],
+ ["\u03BF\u03C1\u03C6", 78, 1],
+ ["\u03C7", -1, 1],
+ ["\u03B1\u03BC\u03B7\u03C7", 87, 1],
+ ["\u03B2\u03B9\u03BF\u03BC\u03B7\u03C7", 87, 1],
+ ["\u03BC\u03B5\u03B3\u03BB\u03BF\u03B2\u03B9\u03BF\u03BC\u03B7\u03C7", 89, 1],
+ ["\u03BA\u03B1\u03C0\u03BD\u03BF\u03B2\u03B9\u03BF\u03BC\u03B7\u03C7", 89, 1],
+ ["\u03BC\u03B9\u03BA\u03C1\u03BF\u03B2\u03B9\u03BF\u03BC\u03B7\u03C7", 89, 1],
+ ["\u03C0\u03BF\u03BB\u03C5\u03BC\u03B7\u03C7", 87, 1],
+ ["\u03BB\u03B9\u03C7", 87, 1]
+ ];
+
+ /** @const */ var a_40 = [
+ ["\u03B7\u03C3\u03B5\u03C4\u03B5", -1, 1]
+ ];
+
+ /** @const */ var a_41 = [
+ ["\u03B5\u03BD\u03B4", -1, 1],
+ ["\u03C3\u03C5\u03BD\u03B4", -1, 1],
+ ["\u03BF\u03B4", -1, 1],
+ ["\u03B4\u03B9\u03B1\u03B8", -1, 1],
+ ["\u03BA\u03B1\u03B8", -1, 1],
+ ["\u03C1\u03B1\u03B8", -1, 1],
+ ["\u03C4\u03B1\u03B8", -1, 1],
+ ["\u03C4\u03B9\u03B8", -1, 1],
+ ["\u03B5\u03BA\u03B8", -1, 1],
+ ["\u03B5\u03BD\u03B8", -1, 1],
+ ["\u03C3\u03C5\u03BD\u03B8", -1, 1],
+ ["\u03C1\u03BF\u03B8", -1, 1],
+ ["\u03C5\u03C0\u03B5\u03C1\u03B8", -1, 1],
+ ["\u03C3\u03B8", -1, 1],
+ ["\u03B5\u03C5\u03B8", -1, 1],
+ ["\u03B1\u03C1\u03BA", -1, 1],
+ ["\u03C9\u03C6\u03B5\u03BB", -1, 1],
+ ["\u03B2\u03BF\u03BB", -1, 1],
+ ["\u03B1\u03B9\u03BD", -1, 1],
+ ["\u03C0\u03BF\u03BD", -1, 1],
+ ["\u03C1\u03BF\u03BD", -1, 1],
+ ["\u03C3\u03C5\u03BD", -1, 1],
+ ["\u03B2\u03B1\u03C1", -1, 1],
+ ["\u03B2\u03C1", -1, 1],
+ ["\u03B1\u03B9\u03C1", -1, 1],
+ ["\u03C6\u03BF\u03C1", -1, 1],
+ ["\u03B5\u03C5\u03C1", -1, 1],
+ ["\u03C0\u03C5\u03C1", -1, 1],
+ ["\u03C7\u03C9\u03C1", -1, 1],
+ ["\u03BD\u03B5\u03C4", -1, 1],
+ ["\u03C3\u03C7", -1, 1]
+ ];
+
+ /** @const */ var a_42 = [
+ ["\u03C0\u03B1\u03B3", -1, 1],
+ ["\u03B4", -1, 1],
+ ["\u03B1\u03B4", 1, 1],
+ ["\u03B8", -1, 1],
+ ["\u03B1\u03B8", 3, 1],
+ ["\u03C4\u03BF\u03BA", -1, 1],
+ ["\u03C3\u03BA", -1, 1],
+ ["\u03C0\u03B1\u03C1\u03B1\u03BA\u03B1\u03BB", -1, 1],
+ ["\u03C3\u03BA\u03B5\u03BB", -1, 1],
+ ["\u03B1\u03C0\u03BB", -1, 1],
+ ["\u03B5\u03BC", -1, 1],
+ ["\u03B1\u03BD", -1, 1],
+ ["\u03B2\u03B5\u03BD", -1, 1],
+ ["\u03B2\u03B1\u03C1\u03BF\u03BD", -1, 1],
+ ["\u03BA\u03BF\u03C0", -1, 1],
+ ["\u03C3\u03B5\u03C1\u03C0", -1, 1],
+ ["\u03B1\u03B2\u03B1\u03C1", -1, 1],
+ ["\u03B5\u03BD\u03B1\u03C1", -1, 1],
+ ["\u03B1\u03B2\u03C1", -1, 1],
+ ["\u03BC\u03C0\u03BF\u03C1", -1, 1],
+ ["\u03B8\u03B1\u03C1\u03C1", -1, 1],
+ ["\u03BD\u03C4\u03C1", -1, 1],
+ ["\u03C5", -1, 1],
+ ["\u03BD\u03B9\u03C6", -1, 1],
+ ["\u03C3\u03C5\u03C1\u03C6", -1, 1]
+ ];
+
+ /** @const */ var a_43 = [
+ ["\u03BF\u03BD\u03C4\u03B1\u03C3", -1, 1],
+ ["\u03C9\u03BD\u03C4\u03B1\u03C3", -1, 1]
+ ];
+
+ /** @const */ var a_44 = [
+ ["\u03BF\u03BC\u03B1\u03C3\u03C4\u03B5", -1, 1],
+ ["\u03B9\u03BF\u03BC\u03B1\u03C3\u03C4\u03B5", 0, 1]
+ ];
+
+ /** @const */ var a_45 = [
+ ["\u03C0", -1, 1],
+ ["\u03B1\u03C0", 0, 1],
+ ["\u03B1\u03BA\u03B1\u03C4\u03B1\u03C0", 1, 1],
+ ["\u03C3\u03C5\u03BC\u03C0", 0, 1],
+ ["\u03B1\u03C3\u03C5\u03BC\u03C0", 3, 1],
+ ["\u03B1\u03BC\u03B5\u03C4\u03B1\u03BC\u03C6", -1, 1]
+ ];
+
+ /** @const */ var a_46 = [
+ ["\u03B6", -1, 1],
+ ["\u03B1\u03BB", -1, 1],
+ ["\u03C0\u03B1\u03C1\u03B1\u03BA\u03B1\u03BB", 1, 1],
+ ["\u03B5\u03BA\u03C4\u03B5\u03BB", -1, 1],
+ ["\u03BC", -1, 1],
+ ["\u03BE", -1, 1],
+ ["\u03C0\u03C1\u03BF", -1, 1],
+ ["\u03B1\u03C1", -1, 1],
+ ["\u03BD\u03B9\u03C3", -1, 1]
+ ];
+
+ /** @const */ var a_47 = [
+ ["\u03B7\u03B8\u03B7\u03BA\u03B1", -1, 1],
+ ["\u03B7\u03B8\u03B7\u03BA\u03B5", -1, 1],
+ ["\u03B7\u03B8\u03B7\u03BA\u03B5\u03C3", -1, 1]
+ ];
+
+ /** @const */ var a_48 = [
+ ["\u03C0\u03B9\u03B8", -1, 1],
+ ["\u03BF\u03B8", -1, 1],
+ ["\u03BD\u03B1\u03C1\u03B8", -1, 1],
+ ["\u03C3\u03BA\u03BF\u03C5\u03BB", -1, 1],
+ ["\u03C3\u03BA\u03C9\u03BB", -1, 1],
+ ["\u03C3\u03C6", -1, 1]
+ ];
+
+ /** @const */ var a_49 = [
+ ["\u03B8", -1, 1],
+ ["\u03B4\u03B9\u03B1\u03B8", 0, 1],
+ ["\u03C0\u03B1\u03C1\u03B1\u03BA\u03B1\u03C4\u03B1\u03B8", 0, 1],
+ ["\u03C3\u03C5\u03BD\u03B8", 0, 1],
+ ["\u03C0\u03C1\u03BF\u03C3\u03B8", 0, 1]
+ ];
+
+ /** @const */ var a_50 = [
+ ["\u03B7\u03BA\u03B1", -1, 1],
+ ["\u03B7\u03BA\u03B5", -1, 1],
+ ["\u03B7\u03BA\u03B5\u03C3", -1, 1]
+ ];
+
+ /** @const */ var a_51 = [
+ ["\u03C6\u03B1\u03B3", -1, 1],
+ ["\u03BB\u03B7\u03B3", -1, 1],
+ ["\u03C6\u03C1\u03C5\u03B4", -1, 1],
+ ["\u03BC\u03B1\u03BD\u03C4\u03B9\u03BB", -1, 1],
+ ["\u03BC\u03B1\u03BB\u03BB", -1, 1],
+ ["\u03BF\u03BC", -1, 1],
+ ["\u03B2\u03BB\u03B5\u03C0", -1, 1],
+ ["\u03C0\u03BF\u03B4\u03B1\u03C1", -1, 1],
+ ["\u03BA\u03C5\u03BC\u03B1\u03C4", -1, 1],
+ ["\u03C0\u03C1\u03C9\u03C4", -1, 1],
+ ["\u03BB\u03B1\u03C7", -1, 1],
+ ["\u03C0\u03B1\u03BD\u03C4\u03B1\u03C7", -1, 1]
+ ];
+
+ /** @const */ var a_52 = [
+ ["\u03C4\u03C3\u03B1", -1, 1],
+ ["\u03C7\u03B1\u03B4", -1, 1],
+ ["\u03BC\u03B5\u03B4", -1, 1],
+ ["\u03BB\u03B1\u03BC\u03C0\u03B9\u03B4", -1, 1],
+ ["\u03B4\u03B5", -1, 1],
+ ["\u03C0\u03BB\u03B5", -1, 1],
+ ["\u03BC\u03B5\u03C3\u03B1\u03B6", -1, 1],
+ ["\u03B4\u03B5\u03C3\u03C0\u03BF\u03B6", -1, 1],
+ ["\u03B1\u03B9\u03B8", -1, 1],
+ ["\u03C6\u03B1\u03C1\u03BC\u03B1\u03BA", -1, 1],
+ ["\u03B1\u03B3\u03BA", -1, 1],
+ ["\u03B1\u03BD\u03B7\u03BA", -1, 1],
+ ["\u03BB", -1, 1],
+ ["\u03BC", -1, 1],
+ ["\u03B1\u03BC", 13, 1],
+ ["\u03B2\u03C1\u03BF\u03BC", 13, 1],
+ ["\u03C5\u03C0\u03BF\u03C4\u03B5\u03B9\u03BD", -1, 1],
+ ["\u03B5\u03BA\u03BB\u03B9\u03C0", -1, 1],
+ ["\u03C1", -1, 1],
+ ["\u03B5\u03BD\u03B4\u03B9\u03B1\u03C6\u03B5\u03C1", 18, 1],
+ ["\u03B1\u03BD\u03B1\u03C1\u03C1", 18, 1],
+ ["\u03C0\u03B1\u03C4", -1, 1],
+ ["\u03BA\u03B1\u03B8\u03B1\u03C1\u03B5\u03C5", -1, 1],
+ ["\u03B4\u03B5\u03C5\u03C4\u03B5\u03C1\u03B5\u03C5", -1, 1],
+ ["\u03BB\u03B5\u03C7", -1, 1]
+ ];
+
+ /** @const */ var a_53 = [
+ ["\u03BF\u03C5\u03C3\u03B1", -1, 1],
+ ["\u03BF\u03C5\u03C3\u03B5", -1, 1],
+ ["\u03BF\u03C5\u03C3\u03B5\u03C3", -1, 1]
+ ];
+
+ /** @const */ var a_54 = [
+ ["\u03C0\u03B5\u03BB", -1, 1],
+ ["\u03BB\u03BB", -1, 1],
+ ["\u03C3\u03BC\u03B7\u03BD", -1, 1],
+ ["\u03C1\u03C0", -1, 1],
+ ["\u03C0\u03C1", -1, 1],
+ ["\u03C6\u03C1", -1, 1],
+ ["\u03C7\u03BF\u03C1\u03C4", -1, 1],
+ ["\u03BF\u03C6", -1, 1],
+ ["\u03C8\u03BF\u03C6", 7, -1],
+ ["\u03C3\u03C6", -1, 1],
+ ["\u03BB\u03BF\u03C7", -1, 1],
+ ["\u03BD\u03B1\u03C5\u03BB\u03BF\u03C7", 10, -1]
+ ];
+
+ /** @const */ var a_55 = [
+ ["\u03B1\u03BC\u03B1\u03BB\u03BB\u03B9", -1, 1],
+ ["\u03BB", -1, 1],
+ ["\u03B1\u03BC\u03B1\u03BB", 1, 1],
+ ["\u03BC", -1, 1],
+ ["\u03BF\u03C5\u03BB\u03B1\u03BC", 3, 1],
+ ["\u03B5\u03BD", -1, 1],
+ ["\u03B4\u03B5\u03C1\u03B2\u03B5\u03BD", 5, 1],
+ ["\u03C0", -1, 1],
+ ["\u03B1\u03B5\u03B9\u03C0", 7, 1],
+ ["\u03B1\u03C1\u03C4\u03B9\u03C0", 7, 1],
+ ["\u03C3\u03C5\u03BC\u03C0", 7, 1],
+ ["\u03BD\u03B5\u03BF\u03C0", 7, 1],
+ ["\u03BA\u03C1\u03BF\u03BA\u03B1\u03BB\u03BF\u03C0", 7, 1],
+ ["\u03BF\u03BB\u03BF\u03C0", 7, 1],
+ ["\u03C0\u03C1\u03BF\u03C3\u03C9\u03C0\u03BF\u03C0", 7, 1],
+ ["\u03C3\u03B9\u03B4\u03B7\u03C1\u03BF\u03C0", 7, 1],
+ ["\u03B4\u03C1\u03BF\u03C3\u03BF\u03C0", 7, 1],
+ ["\u03B1\u03C3\u03C0", 7, 1],
+ ["\u03B1\u03BD\u03C5\u03C0", 7, 1],
+ ["\u03C1", -1, 1],
+ ["\u03B1\u03C3\u03C0\u03B1\u03C1", 19, 1],
+ ["\u03C7\u03B1\u03C1", 19, 1],
+ ["\u03B1\u03C7\u03B1\u03C1", 21, 1],
+ ["\u03B1\u03C0\u03B5\u03C1", 19, 1],
+ ["\u03C4\u03C1", 19, 1],
+ ["\u03BF\u03C5\u03C1", 19, 1],
+ ["\u03C4", -1, 1],
+ ["\u03B4\u03B9\u03B1\u03C4", 26, 1],
+ ["\u03B5\u03C0\u03B9\u03C4", 26, 1],
+ ["\u03C3\u03C5\u03BD\u03C4", 26, 1],
+ ["\u03BF\u03BC\u03BF\u03C4", 26, 1],
+ ["\u03BD\u03BF\u03BC\u03BF\u03C4", 30, 1],
+ ["\u03B1\u03C0\u03BF\u03C4", 26, 1],
+ ["\u03C5\u03C0\u03BF\u03C4", 26, 1],
+ ["\u03B1\u03B2\u03B1\u03C3\u03C4", 26, 1],
+ ["\u03B1\u03B9\u03BC\u03BF\u03C3\u03C4", 26, 1],
+ ["\u03C0\u03C1\u03BF\u03C3\u03C4", 26, 1],
+ ["\u03B1\u03BD\u03C5\u03C3\u03C4", 26, 1],
+ ["\u03BD\u03B1\u03C5", -1, 1],
+ ["\u03B1\u03C6", -1, 1],
+ ["\u03BE\u03B5\u03C6", -1, 1],
+ ["\u03B1\u03B4\u03B7\u03C6", -1, 1],
+ ["\u03C0\u03B1\u03BC\u03C6", -1, 1],
+ ["\u03C0\u03BF\u03BB\u03C5\u03C6", -1, 1]
+ ];
+
+ /** @const */ var a_56 = [
+ ["\u03B1\u03B3\u03B1", -1, 1],
+ ["\u03B1\u03B3\u03B5", -1, 1],
+ ["\u03B1\u03B3\u03B5\u03C3", -1, 1]
+ ];
+
+ /** @const */ var a_57 = [
+ ["\u03B7\u03C3\u03B1", -1, 1],
+ ["\u03B7\u03C3\u03B5", -1, 1],
+ ["\u03B7\u03C3\u03BF\u03C5", -1, 1]
+ ];
+
+ /** @const */ var a_58 = [
+ ["\u03BD", -1, 1],
+ ["\u03B4\u03C9\u03B4\u03B5\u03BA\u03B1\u03BD", 0, 1],
+ ["\u03B5\u03C0\u03C4\u03B1\u03BD", 0, 1],
+ ["\u03BC\u03B5\u03B3\u03B1\u03BB\u03BF\u03BD", 0, 1],
+ ["\u03B5\u03C1\u03B7\u03BC\u03BF\u03BD", 0, 1],
+ ["\u03C7\u03B5\u03C1\u03C3\u03BF\u03BD", 0, 1]
+ ];
+
+ /** @const */ var a_59 = [
+ ["\u03B7\u03C3\u03C4\u03B5", -1, 1]
+ ];
+
+ /** @const */ var a_60 = [
+ ["\u03C3\u03B2", -1, 1],
+ ["\u03B1\u03C3\u03B2", 0, 1],
+ ["\u03B1\u03C0\u03BB", -1, 1],
+ ["\u03B1\u03B5\u03B9\u03BC\u03BD", -1, 1],
+ ["\u03C7\u03C1", -1, 1],
+ ["\u03B1\u03C7\u03C1", 4, 1],
+ ["\u03BA\u03BF\u03B9\u03BD\u03BF\u03C7\u03C1", 4, 1],
+ ["\u03B4\u03C5\u03C3\u03C7\u03C1", 4, 1],
+ ["\u03B5\u03C5\u03C7\u03C1", 4, 1],
+ ["\u03C0\u03B1\u03BB\u03B9\u03BC\u03C8", -1, 1]
+ ];
+
+ /** @const */ var a_61 = [
+ ["\u03BF\u03C5\u03BD\u03B5", -1, 1],
+ ["\u03B7\u03B8\u03BF\u03C5\u03BD\u03B5", 0, 1],
+ ["\u03B7\u03C3\u03BF\u03C5\u03BD\u03B5", 0, 1]
+ ];
+
+ /** @const */ var a_62 = [
+ ["\u03C3\u03C0\u03B9", -1, 1],
+ ["\u03BD", -1, 1],
+ ["\u03B5\u03BE\u03C9\u03BD", 1, 1],
+ ["\u03C1", -1, 1],
+ ["\u03C3\u03C4\u03C1\u03B1\u03B2\u03BF\u03BC\u03BF\u03C5\u03C4\u03C3", -1, 1],
+ ["\u03BA\u03B1\u03BA\u03BF\u03BC\u03BF\u03C5\u03C4\u03C3", -1, 1]
+ ];
+
+ /** @const */ var a_63 = [
+ ["\u03BF\u03C5\u03BC\u03B5", -1, 1],
+ ["\u03B7\u03B8\u03BF\u03C5\u03BC\u03B5", 0, 1],
+ ["\u03B7\u03C3\u03BF\u03C5\u03BC\u03B5", 0, 1]
+ ];
+
+ /** @const */ var a_64 = [
+ ["\u03B1\u03B6", -1, 1],
+ ["\u03C9\u03C1\u03B9\u03BF\u03C0\u03BB", -1, 1],
+ ["\u03B1\u03C3\u03BF\u03C5\u03C3", -1, 1],
+ ["\u03C0\u03B1\u03C1\u03B1\u03C3\u03BF\u03C5\u03C3", 2, 1],
+ ["\u03B1\u03BB\u03BB\u03BF\u03C3\u03BF\u03C5\u03C3", -1, 1],
+ ["\u03C6", -1, 1],
+ ["\u03C7", -1, 1]
+ ];
+
+ /** @const */ var a_65 = [
+ ["\u03BC\u03B1\u03C4\u03B1", -1, 1],
+ ["\u03BC\u03B1\u03C4\u03C9\u03BD", -1, 1],
+ ["\u03BC\u03B1\u03C4\u03BF\u03C3", -1, 1]
+ ];
+
+ /** @const */ var a_66 = [
+ ["\u03B1", -1, 1],
+ ["\u03B9\u03BF\u03C5\u03BC\u03B1", 0, 1],
+ ["\u03BF\u03BC\u03BF\u03C5\u03BD\u03B1", 0, 1],
+ ["\u03B9\u03BF\u03BC\u03BF\u03C5\u03BD\u03B1", 2, 1],
+ ["\u03BF\u03C3\u03BF\u03C5\u03BD\u03B1", 0, 1],
+ ["\u03B9\u03BF\u03C3\u03BF\u03C5\u03BD\u03B1", 4, 1],
+ ["\u03B5", -1, 1],
+ ["\u03B1\u03B3\u03B1\u03C4\u03B5", 6, 1],
+ ["\u03B7\u03BA\u03B1\u03C4\u03B5", 6, 1],
+ ["\u03B7\u03B8\u03B7\u03BA\u03B1\u03C4\u03B5", 8, 1],
+ ["\u03B7\u03C3\u03B1\u03C4\u03B5", 6, 1],
+ ["\u03BF\u03C5\u03C3\u03B1\u03C4\u03B5", 6, 1],
+ ["\u03B5\u03B9\u03C4\u03B5", 6, 1],
+ ["\u03B7\u03B8\u03B5\u03B9\u03C4\u03B5", 12, 1],
+ ["\u03B9\u03B5\u03BC\u03B1\u03C3\u03C4\u03B5", 6, 1],
+ ["\u03BF\u03C5\u03BC\u03B1\u03C3\u03C4\u03B5", 6, 1],
+ ["\u03B9\u03BF\u03C5\u03BC\u03B1\u03C3\u03C4\u03B5", 15, 1],
+ ["\u03B9\u03B5\u03C3\u03B1\u03C3\u03C4\u03B5", 6, 1],
+ ["\u03BF\u03C3\u03B1\u03C3\u03C4\u03B5", 6, 1],
+ ["\u03B9\u03BF\u03C3\u03B1\u03C3\u03C4\u03B5", 18, 1],
+ ["\u03B7", -1, 1],
+ ["\u03B9", -1, 1],
+ ["\u03B1\u03BC\u03B1\u03B9", 21, 1],
+ ["\u03B9\u03B5\u03BC\u03B1\u03B9", 21, 1],
+ ["\u03BF\u03BC\u03B1\u03B9", 21, 1],
+ ["\u03BF\u03C5\u03BC\u03B1\u03B9", 21, 1],
+ ["\u03B1\u03C3\u03B1\u03B9", 21, 1],
+ ["\u03B5\u03C3\u03B1\u03B9", 21, 1],
+ ["\u03B9\u03B5\u03C3\u03B1\u03B9", 27, 1],
+ ["\u03B1\u03C4\u03B1\u03B9", 21, 1],
+ ["\u03B5\u03C4\u03B1\u03B9", 21, 1],
+ ["\u03B9\u03B5\u03C4\u03B1\u03B9", 30, 1],
+ ["\u03BF\u03BD\u03C4\u03B1\u03B9", 21, 1],
+ ["\u03BF\u03C5\u03BD\u03C4\u03B1\u03B9", 21, 1],
+ ["\u03B9\u03BF\u03C5\u03BD\u03C4\u03B1\u03B9", 33, 1],
+ ["\u03B5\u03B9", 21, 1],
+ ["\u03B1\u03B5\u03B9", 35, 1],
+ ["\u03B7\u03B8\u03B5\u03B9", 35, 1],
+ ["\u03B7\u03C3\u03B5\u03B9", 35, 1],
+ ["\u03BF\u03B9", 21, 1],
+ ["\u03B1\u03BD", -1, 1],
+ ["\u03B1\u03B3\u03B1\u03BD", 40, 1],
+ ["\u03B7\u03BA\u03B1\u03BD", 40, 1],
+ ["\u03B7\u03B8\u03B7\u03BA\u03B1\u03BD", 42, 1],
+ ["\u03B7\u03C3\u03B1\u03BD", 40, 1],
+ ["\u03BF\u03C5\u03C3\u03B1\u03BD", 40, 1],
+ ["\u03BF\u03BD\u03C4\u03BF\u03C5\u03C3\u03B1\u03BD", 45, 1],
+ ["\u03B9\u03BF\u03BD\u03C4\u03BF\u03C5\u03C3\u03B1\u03BD", 46, 1],
+ ["\u03BF\u03BD\u03C4\u03B1\u03BD", 40, 1],
+ ["\u03B9\u03BF\u03BD\u03C4\u03B1\u03BD", 48, 1],
+ ["\u03BF\u03C5\u03BD\u03C4\u03B1\u03BD", 40, 1],
+ ["\u03B9\u03BF\u03C5\u03BD\u03C4\u03B1\u03BD", 50, 1],
+ ["\u03BF\u03C4\u03B1\u03BD", 40, 1],
+ ["\u03B9\u03BF\u03C4\u03B1\u03BD", 52, 1],
+ ["\u03BF\u03BC\u03B1\u03C3\u03C4\u03B1\u03BD", 40, 1],
+ ["\u03B9\u03BF\u03BC\u03B1\u03C3\u03C4\u03B1\u03BD", 54, 1],
+ ["\u03BF\u03C3\u03B1\u03C3\u03C4\u03B1\u03BD", 40, 1],
+ ["\u03B9\u03BF\u03C3\u03B1\u03C3\u03C4\u03B1\u03BD", 56, 1],
+ ["\u03BF\u03C5\u03BD", -1, 1],
+ ["\u03B7\u03B8\u03BF\u03C5\u03BD", 58, 1],
+ ["\u03BF\u03BC\u03BF\u03C5\u03BD", 58, 1],
+ ["\u03B9\u03BF\u03BC\u03BF\u03C5\u03BD", 60, 1],
+ ["\u03B7\u03C3\u03BF\u03C5\u03BD", 58, 1],
+ ["\u03BF\u03C3\u03BF\u03C5\u03BD", 58, 1],
+ ["\u03B9\u03BF\u03C3\u03BF\u03C5\u03BD", 63, 1],
+ ["\u03C9\u03BD", -1, 1],
+ ["\u03B7\u03B4\u03C9\u03BD", 65, 1],
+ ["\u03BF", -1, 1],
+ ["\u03B1\u03C3", -1, 1],
+ ["\u03B5\u03C3", -1, 1],
+ ["\u03B7\u03B4\u03B5\u03C3", 69, 1],
+ ["\u03B7\u03C3\u03B5\u03C3", 69, 1],
+ ["\u03B7\u03C3", -1, 1],
+ ["\u03B5\u03B9\u03C3", -1, 1],
+ ["\u03B7\u03B8\u03B5\u03B9\u03C3", 73, 1],
+ ["\u03BF\u03C3", -1, 1],
+ ["\u03C5\u03C3", -1, 1],
+ ["\u03BF\u03C5\u03C3", 76, 1],
+ ["\u03C5", -1, 1],
+ ["\u03BF\u03C5", 78, 1],
+ ["\u03C9", -1, 1],
+ ["\u03B1\u03C9", 80, 1],
+ ["\u03B7\u03B8\u03C9", 80, 1],
+ ["\u03B7\u03C3\u03C9", 80, 1]
+ ];
+
+ /** @const */ var a_67 = [
+ ["\u03BF\u03C4\u03B5\u03C1", -1, 1],
+ ["\u03B5\u03C3\u03C4\u03B5\u03C1", -1, 1],
+ ["\u03C5\u03C4\u03B5\u03C1", -1, 1],
+ ["\u03C9\u03C4\u03B5\u03C1", -1, 1],
+ ["\u03BF\u03C4\u03B1\u03C4", -1, 1],
+ ["\u03B5\u03C3\u03C4\u03B1\u03C4", -1, 1],
+ ["\u03C5\u03C4\u03B1\u03C4", -1, 1],
+ ["\u03C9\u03C4\u03B1\u03C4", -1, 1]
+ ];
+
+ /** @const */ var /** Array */ g_v = [81, 65, 16, 1];
+
+ /** @const */ var /** Array */ g_v2 = [81, 65, 0, 1];
+
+ var /** boolean */ B_test1 = false;
+
+
+ /** @return {boolean} */
+ function r_has_min_length() {
+ return base.current.length >= 3;
+ };
+
+ /** @return {boolean} */
+ function r_tolower() {
+ var /** number */ among_var;
+ while(true)
+ {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_0);
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("\u03B1"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("\u03B2"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("\u03B3"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("\u03B4"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("\u03B5"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!base.slice_from("\u03B6"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!base.slice_from("\u03B7"))
+ {
+ return false;
+ }
+ break;
+ case 8:
+ if (!base.slice_from("\u03B8"))
+ {
+ return false;
+ }
+ break;
+ case 9:
+ if (!base.slice_from("\u03B9"))
+ {
+ return false;
+ }
+ break;
+ case 10:
+ if (!base.slice_from("\u03BA"))
+ {
+ return false;
+ }
+ break;
+ case 11:
+ if (!base.slice_from("\u03BB"))
+ {
+ return false;
+ }
+ break;
+ case 12:
+ if (!base.slice_from("\u03BC"))
+ {
+ return false;
+ }
+ break;
+ case 13:
+ if (!base.slice_from("\u03BD"))
+ {
+ return false;
+ }
+ break;
+ case 14:
+ if (!base.slice_from("\u03BE"))
+ {
+ return false;
+ }
+ break;
+ case 15:
+ if (!base.slice_from("\u03BF"))
+ {
+ return false;
+ }
+ break;
+ case 16:
+ if (!base.slice_from("\u03C0"))
+ {
+ return false;
+ }
+ break;
+ case 17:
+ if (!base.slice_from("\u03C1"))
+ {
+ return false;
+ }
+ break;
+ case 18:
+ if (!base.slice_from("\u03C3"))
+ {
+ return false;
+ }
+ break;
+ case 19:
+ if (!base.slice_from("\u03C4"))
+ {
+ return false;
+ }
+ break;
+ case 20:
+ if (!base.slice_from("\u03C5"))
+ {
+ return false;
+ }
+ break;
+ case 21:
+ if (!base.slice_from("\u03C6"))
+ {
+ return false;
+ }
+ break;
+ case 22:
+ if (!base.slice_from("\u03C7"))
+ {
+ return false;
+ }
+ break;
+ case 23:
+ if (!base.slice_from("\u03C8"))
+ {
+ return false;
+ }
+ break;
+ case 24:
+ if (!base.slice_from("\u03C9"))
+ {
+ return false;
+ }
+ break;
+ case 25:
+ if (base.cursor <= base.limit_backward)
+ {
+ break lab0;
+ }
+ base.cursor--;
+ break;
+ }
+ continue;
+ }
+ base.cursor = base.limit - v_1;
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_1() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_1);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("\u03C6\u03B1"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("\u03C3\u03BA\u03B1"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("\u03BF\u03BB\u03BF"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("\u03C3\u03BF"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("\u03C4\u03B1\u03C4\u03BF"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!base.slice_from("\u03BA\u03C1\u03B5"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!base.slice_from("\u03C0\u03B5\u03C1"))
+ {
+ return false;
+ }
+ break;
+ case 8:
+ if (!base.slice_from("\u03C4\u03B5\u03C1"))
+ {
+ return false;
+ }
+ break;
+ case 9:
+ if (!base.slice_from("\u03C6\u03C9"))
+ {
+ return false;
+ }
+ break;
+ case 10:
+ if (!base.slice_from("\u03BA\u03B1\u03B8\u03B5\u03C3\u03C4"))
+ {
+ return false;
+ }
+ break;
+ case 11:
+ if (!base.slice_from("\u03B3\u03B5\u03B3\u03BF\u03BD"))
+ {
+ return false;
+ }
+ break;
+ }
+ B_test1 = false;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_s1() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ if (base.find_among_b(a_3) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ among_var = base.find_among_b(a_2);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("\u03B9"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("\u03B9\u03B6"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_s2() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_5) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_4) == 0)
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03C9\u03BD"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_s3() {
+ var /** number */ among_var;
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ base.ket = base.cursor;
+ if (!(base.eq_s_b("\u03B9\u03C3\u03B1")))
+ {
+ break lab1;
+ }
+ base.bra = base.cursor;
+ if (base.cursor > base.limit_backward)
+ {
+ break lab1;
+ }
+ if (!base.slice_from("\u03B9\u03C3"))
+ {
+ return false;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ base.ket = base.cursor;
+ }
+ if (base.find_among_b(a_7) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ among_var = base.find_among_b(a_6);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("\u03B9"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("\u03B9\u03C3"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_s4() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_9) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_8) == 0)
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03B9"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_s5() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ if (base.find_among_b(a_11) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ among_var = base.find_among_b(a_10);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("\u03B9"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("\u03B9\u03C3\u03C4"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_s6() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ if (base.find_among_b(a_14) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ among_var = base.find_among_b(a_12);
+ if (among_var == 0)
+ {
+ break lab1;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ break lab1;
+ }
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("\u03B9\u03C3\u03BC"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("\u03B9"))
+ {
+ return false;
+ }
+ break;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_13);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("\u03B1\u03B3\u03BD\u03C9\u03C3\u03C4"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("\u03B1\u03C4\u03BF\u03BC"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("\u03B3\u03BD\u03C9\u03C3\u03C4"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("\u03B5\u03B8\u03BD"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("\u03B5\u03BA\u03BB\u03B5\u03BA\u03C4"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!base.slice_from("\u03C3\u03BA\u03B5\u03C0\u03C4"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!base.slice_from("\u03C4\u03BF\u03C0"))
+ {
+ return false;
+ }
+ break;
+ case 8:
+ if (!base.slice_from("\u03B1\u03BB\u03B5\u03BE\u03B1\u03BD\u03B4\u03C1"))
+ {
+ return false;
+ }
+ break;
+ case 9:
+ if (!base.slice_from("\u03B2\u03C5\u03B6\u03B1\u03BD\u03C4"))
+ {
+ return false;
+ }
+ break;
+ case 10:
+ if (!base.slice_from("\u03B8\u03B5\u03B1\u03C4\u03C1"))
+ {
+ return false;
+ }
+ break;
+ }
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_s7() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_16) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_15) == 0)
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03B1\u03C1\u03B1\u03BA"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_s8() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ if (base.find_among_b(a_18) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ among_var = base.find_among_b(a_17);
+ if (among_var == 0)
+ {
+ break lab1;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ break lab1;
+ }
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("\u03B1\u03BA"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("\u03B9\u03C4\u03C3"))
+ {
+ return false;
+ }
+ break;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (!(base.eq_s_b("\u03BA\u03BF\u03C1")))
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03B9\u03C4\u03C3"))
+ {
+ return false;
+ }
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_s9() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_21) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_19) == 0)
+ {
+ break lab1;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ break lab1;
+ }
+ if (!base.slice_from("\u03B9\u03B4"))
+ {
+ return false;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_20) == 0)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03B9\u03B4"))
+ {
+ return false;
+ }
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_s10() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_23) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_22) == 0)
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03B9\u03C3\u03BA"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_2a() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_24) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ if (base.find_among_b(a_25) == 0)
+ {
+ break lab0;
+ }
+ return false;
+ }
+ base.cursor = base.limit - v_1;
+ }
+ {
+ /** @const */ var /** number */ c1 = base.cursor;
+ base.insert(base.cursor, base.cursor, "\u03B1\u03B4");
+ base.cursor = c1;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_2b() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_26) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_27) == 0)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03B5\u03B4"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_2c() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_28) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_29) == 0)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03BF\u03C5\u03B4"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_2d() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_30) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_31) == 0)
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03B5"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_3() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_32) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (!(base.in_grouping_b(g_v, 945, 969)))
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03B9"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_4() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_33) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (!(base.in_grouping_b(g_v, 945, 969)))
+ {
+ break lab1;
+ }
+ if (!base.slice_from("\u03B9\u03BA"))
+ {
+ return false;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ base.ket = base.cursor;
+ }
+ base.bra = base.cursor;
+ if (base.find_among_b(a_34) == 0)
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03B9\u03BA"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_5a() {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ base.ket = base.cursor;
+ if (!(base.eq_s_b("\u03B1\u03B3\u03B1\u03BC\u03B5")))
+ {
+ break lab0;
+ }
+ base.bra = base.cursor;
+ if (base.cursor > base.limit_backward)
+ {
+ break lab0;
+ }
+ if (!base.slice_from("\u03B1\u03B3\u03B1\u03BC"))
+ {
+ return false;
+ }
+ }
+ base.cursor = base.limit - v_1;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab1: {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_35) == 0)
+ {
+ break lab1;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ }
+ base.cursor = base.limit - v_2;
+ base.ket = base.cursor;
+ if (!(base.eq_s_b("\u03B1\u03BC\u03B5")))
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_36) == 0)
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03B1\u03BC"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_5b() {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_38) == 0)
+ {
+ break lab0;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_37) == 0)
+ {
+ break lab0;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ break lab0;
+ }
+ if (!base.slice_from("\u03B1\u03B3\u03B1\u03BD"))
+ {
+ return false;
+ }
+ }
+ base.cursor = base.limit - v_1;
+ base.ket = base.cursor;
+ if (!(base.eq_s_b("\u03B1\u03BD\u03B5")))
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ lab1: {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab2: {
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (!(base.in_grouping_b(g_v2, 945, 969)))
+ {
+ break lab2;
+ }
+ if (!base.slice_from("\u03B1\u03BD"))
+ {
+ return false;
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_2;
+ base.ket = base.cursor;
+ }
+ base.bra = base.cursor;
+ if (base.find_among_b(a_39) == 0)
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03B1\u03BD"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_5c() {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_40) == 0)
+ {
+ break lab0;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ }
+ base.cursor = base.limit - v_1;
+ base.ket = base.cursor;
+ if (!(base.eq_s_b("\u03B5\u03C4\u03B5")))
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ lab1: {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab2: {
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (!(base.in_grouping_b(g_v2, 945, 969)))
+ {
+ break lab2;
+ }
+ if (!base.slice_from("\u03B5\u03C4"))
+ {
+ return false;
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_2;
+ lab3: {
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_41) == 0)
+ {
+ break lab3;
+ }
+ if (!base.slice_from("\u03B5\u03C4"))
+ {
+ return false;
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_2;
+ base.ket = base.cursor;
+ }
+ base.bra = base.cursor;
+ if (base.find_among_b(a_42) == 0)
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03B5\u03C4"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_5d() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_43) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (!(base.eq_s_b("\u03B1\u03C1\u03C7")))
+ {
+ break lab1;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ break lab1;
+ }
+ if (!base.slice_from("\u03BF\u03BD\u03C4"))
+ {
+ return false;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (!(base.eq_s_b("\u03BA\u03C1\u03B5")))
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03C9\u03BD\u03C4"))
+ {
+ return false;
+ }
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_5e() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_44) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (!(base.eq_s_b("\u03BF\u03BD")))
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03BF\u03BC\u03B1\u03C3\u03C4"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_5f() {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ base.ket = base.cursor;
+ if (!(base.eq_s_b("\u03B9\u03B5\u03C3\u03C4\u03B5")))
+ {
+ break lab0;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_45) == 0)
+ {
+ break lab0;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ break lab0;
+ }
+ if (!base.slice_from("\u03B9\u03B5\u03C3\u03C4"))
+ {
+ return false;
+ }
+ }
+ base.cursor = base.limit - v_1;
+ base.ket = base.cursor;
+ if (!(base.eq_s_b("\u03B5\u03C3\u03C4\u03B5")))
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_46) == 0)
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03B9\u03B5\u03C3\u03C4"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_5g() {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_47) == 0)
+ {
+ break lab0;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ }
+ base.cursor = base.limit - v_1;
+ base.ket = base.cursor;
+ if (base.find_among_b(a_50) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ lab1: {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab2: {
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_48) == 0)
+ {
+ break lab2;
+ }
+ if (!base.slice_from("\u03B7\u03BA"))
+ {
+ return false;
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_2;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_49) == 0)
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03B7\u03BA"))
+ {
+ return false;
+ }
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_5h() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_53) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_51) == 0)
+ {
+ break lab1;
+ }
+ if (!base.slice_from("\u03BF\u03C5\u03C3"))
+ {
+ return false;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_52) == 0)
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03BF\u03C5\u03C3"))
+ {
+ return false;
+ }
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_5i() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ if (base.find_among_b(a_56) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (!(base.eq_s_b("\u03BA\u03BF\u03BB\u03BB")))
+ {
+ break lab1;
+ }
+ if (!base.slice_from("\u03B1\u03B3"))
+ {
+ return false;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ lab2: {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab3: {
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ among_var = base.find_among_b(a_54);
+ if (among_var == 0)
+ {
+ break lab3;
+ }
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("\u03B1\u03B3"))
+ {
+ return false;
+ }
+ break;
+ }
+ break lab2;
+ }
+ base.cursor = base.limit - v_2;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_55) == 0)
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03B1\u03B3"))
+ {
+ return false;
+ }
+ }
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_5j() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_57) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_58) == 0)
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03B7\u03C3"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_5k() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_59) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_60) == 0)
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03B7\u03C3\u03C4"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_5l() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_61) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_62) == 0)
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03BF\u03C5\u03BD"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_5m() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_63) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_test1 = false;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
+ if (base.find_among_b(a_64) == 0)
+ {
+ return false;
+ }
+ if (base.cursor > base.limit_backward)
+ {
+ return false;
+ }
+ if (!base.slice_from("\u03BF\u03C5\u03BC"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_6() {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_65) == 0)
+ {
+ break lab0;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_from("\u03BC\u03B1"))
+ {
+ return false;
+ }
+ }
+ base.cursor = base.limit - v_1;
+ if (!B_test1)
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ if (base.find_among_b(a_66) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step_7() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_67) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ this.stem = /** @return {boolean} */ function() {
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ r_tolower();
+ base.cursor = base.limit - v_1;
+ if (!r_has_min_length())
+ {
+ return false;
+ }
+ B_test1 = true;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ r_step_1();
+ base.cursor = base.limit - v_2;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ r_step_s1();
+ base.cursor = base.limit - v_3;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ r_step_s2();
+ base.cursor = base.limit - v_4;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
+ r_step_s3();
+ base.cursor = base.limit - v_5;
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
+ r_step_s4();
+ base.cursor = base.limit - v_6;
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
+ r_step_s5();
+ base.cursor = base.limit - v_7;
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
+ r_step_s6();
+ base.cursor = base.limit - v_8;
+ /** @const */ var /** number */ v_9 = base.limit - base.cursor;
+ r_step_s7();
+ base.cursor = base.limit - v_9;
+ /** @const */ var /** number */ v_10 = base.limit - base.cursor;
+ r_step_s8();
+ base.cursor = base.limit - v_10;
+ /** @const */ var /** number */ v_11 = base.limit - base.cursor;
+ r_step_s9();
+ base.cursor = base.limit - v_11;
+ /** @const */ var /** number */ v_12 = base.limit - base.cursor;
+ r_step_s10();
+ base.cursor = base.limit - v_12;
+ /** @const */ var /** number */ v_13 = base.limit - base.cursor;
+ r_step_2a();
+ base.cursor = base.limit - v_13;
+ /** @const */ var /** number */ v_14 = base.limit - base.cursor;
+ r_step_2b();
+ base.cursor = base.limit - v_14;
+ /** @const */ var /** number */ v_15 = base.limit - base.cursor;
+ r_step_2c();
+ base.cursor = base.limit - v_15;
+ /** @const */ var /** number */ v_16 = base.limit - base.cursor;
+ r_step_2d();
+ base.cursor = base.limit - v_16;
+ /** @const */ var /** number */ v_17 = base.limit - base.cursor;
+ r_step_3();
+ base.cursor = base.limit - v_17;
+ /** @const */ var /** number */ v_18 = base.limit - base.cursor;
+ r_step_4();
+ base.cursor = base.limit - v_18;
+ /** @const */ var /** number */ v_19 = base.limit - base.cursor;
+ r_step_5a();
+ base.cursor = base.limit - v_19;
+ /** @const */ var /** number */ v_20 = base.limit - base.cursor;
+ r_step_5b();
+ base.cursor = base.limit - v_20;
+ /** @const */ var /** number */ v_21 = base.limit - base.cursor;
+ r_step_5c();
+ base.cursor = base.limit - v_21;
+ /** @const */ var /** number */ v_22 = base.limit - base.cursor;
+ r_step_5d();
+ base.cursor = base.limit - v_22;
+ /** @const */ var /** number */ v_23 = base.limit - base.cursor;
+ r_step_5e();
+ base.cursor = base.limit - v_23;
+ /** @const */ var /** number */ v_24 = base.limit - base.cursor;
+ r_step_5f();
+ base.cursor = base.limit - v_24;
+ /** @const */ var /** number */ v_25 = base.limit - base.cursor;
+ r_step_5g();
+ base.cursor = base.limit - v_25;
+ /** @const */ var /** number */ v_26 = base.limit - base.cursor;
+ r_step_5h();
+ base.cursor = base.limit - v_26;
+ /** @const */ var /** number */ v_27 = base.limit - base.cursor;
+ r_step_5j();
+ base.cursor = base.limit - v_27;
+ /** @const */ var /** number */ v_28 = base.limit - base.cursor;
+ r_step_5i();
+ base.cursor = base.limit - v_28;
+ /** @const */ var /** number */ v_29 = base.limit - base.cursor;
+ r_step_5k();
+ base.cursor = base.limit - v_29;
+ /** @const */ var /** number */ v_30 = base.limit - base.cursor;
+ r_step_5l();
+ base.cursor = base.limit - v_30;
+ /** @const */ var /** number */ v_31 = base.limit - base.cursor;
+ r_step_5m();
+ base.cursor = base.limit - v_31;
+ /** @const */ var /** number */ v_32 = base.limit - base.cursor;
+ r_step_6();
+ base.cursor = base.limit - v_32;
+ /** @const */ var /** number */ v_33 = base.limit - base.cursor;
+ r_step_7();
+ base.cursor = base.limit - v_33;
+ base.cursor = base.limit_backward;
+ return true;
+ };
+
+ /**@return{string}*/
+ this['stemWord'] = function(/**string*/word) {
+ base.setCurrent(word);
+ this.stem();
+ return base.getCurrent();
+ };
+};
diff --git a/sphinx/search/non-minified-js/hindi-stemmer.js b/sphinx/search/non-minified-js/hindi-stemmer.js
new file mode 100644
index 00000000000..26a715e7e77
--- /dev/null
+++ b/sphinx/search/non-minified-js/hindi-stemmer.js
@@ -0,0 +1,181 @@
+// Generated from hindi.sbl by Snowball 3.0.1 - https://snowballstem.org/
+
+/**@constructor*/
+var HindiStemmer = function() {
+ var base = new BaseStemmer();
+
+ /** @const */ var a_0 = [
+ ["\u0906\u0901", -1, -1],
+ ["\u093E\u0901", -1, -1],
+ ["\u0907\u092F\u093E\u0901", 1, -1],
+ ["\u0906\u0907\u092F\u093E\u0901", 2, -1],
+ ["\u093E\u0907\u092F\u093E\u0901", 2, -1],
+ ["\u093F\u092F\u093E\u0901", 1, -1],
+ ["\u0906\u0902", -1, -1],
+ ["\u0909\u0906\u0902", 6, -1],
+ ["\u0941\u0906\u0902", 6, -1],
+ ["\u0908\u0902", -1, -1],
+ ["\u0906\u0908\u0902", 9, -1],
+ ["\u093E\u0908\u0902", 9, -1],
+ ["\u090F\u0902", -1, -1],
+ ["\u0906\u090F\u0902", 12, -1],
+ ["\u0909\u090F\u0902", 12, -1],
+ ["\u093E\u090F\u0902", 12, -1],
+ ["\u0924\u093E\u090F\u0902", 15, -1, r_CONSONANT],
+ ["\u0905\u0924\u093E\u090F\u0902", 16, -1],
+ ["\u0928\u093E\u090F\u0902", 15, -1, r_CONSONANT],
+ ["\u0905\u0928\u093E\u090F\u0902", 18, -1],
+ ["\u0941\u090F\u0902", 12, -1],
+ ["\u0913\u0902", -1, -1],
+ ["\u0906\u0913\u0902", 21, -1],
+ ["\u0909\u0913\u0902", 21, -1],
+ ["\u093E\u0913\u0902", 21, -1],
+ ["\u0924\u093E\u0913\u0902", 24, -1, r_CONSONANT],
+ ["\u0905\u0924\u093E\u0913\u0902", 25, -1],
+ ["\u0928\u093E\u0913\u0902", 24, -1, r_CONSONANT],
+ ["\u0905\u0928\u093E\u0913\u0902", 27, -1],
+ ["\u0941\u0913\u0902", 21, -1],
+ ["\u093E\u0902", -1, -1],
+ ["\u0907\u092F\u093E\u0902", 30, -1],
+ ["\u0906\u0907\u092F\u093E\u0902", 31, -1],
+ ["\u093E\u0907\u092F\u093E\u0902", 31, -1],
+ ["\u093F\u092F\u093E\u0902", 30, -1],
+ ["\u0940\u0902", -1, -1],
+ ["\u0924\u0940\u0902", 35, -1, r_CONSONANT],
+ ["\u0905\u0924\u0940\u0902", 36, -1],
+ ["\u0906\u0924\u0940\u0902", 36, -1],
+ ["\u093E\u0924\u0940\u0902", 36, -1],
+ ["\u0947\u0902", -1, -1],
+ ["\u094B\u0902", -1, -1],
+ ["\u0907\u092F\u094B\u0902", 41, -1],
+ ["\u0906\u0907\u092F\u094B\u0902", 42, -1],
+ ["\u093E\u0907\u092F\u094B\u0902", 42, -1],
+ ["\u093F\u092F\u094B\u0902", 41, -1],
+ ["\u0905", -1, -1],
+ ["\u0906", -1, -1],
+ ["\u0907", -1, -1],
+ ["\u0908", -1, -1],
+ ["\u0906\u0908", 49, -1],
+ ["\u093E\u0908", 49, -1],
+ ["\u0909", -1, -1],
+ ["\u090A", -1, -1],
+ ["\u090F", -1, -1],
+ ["\u0906\u090F", 54, -1],
+ ["\u0907\u090F", 54, -1],
+ ["\u0906\u0907\u090F", 56, -1],
+ ["\u093E\u0907\u090F", 56, -1],
+ ["\u093E\u090F", 54, -1],
+ ["\u093F\u090F", 54, -1],
+ ["\u0913", -1, -1],
+ ["\u0906\u0913", 61, -1],
+ ["\u093E\u0913", 61, -1],
+ ["\u0915\u0930", -1, -1, r_CONSONANT],
+ ["\u0905\u0915\u0930", 64, -1],
+ ["\u0906\u0915\u0930", 64, -1],
+ ["\u093E\u0915\u0930", 64, -1],
+ ["\u093E", -1, -1],
+ ["\u090A\u0902\u0917\u093E", 68, -1],
+ ["\u0906\u090A\u0902\u0917\u093E", 69, -1],
+ ["\u093E\u090A\u0902\u0917\u093E", 69, -1],
+ ["\u0942\u0902\u0917\u093E", 68, -1],
+ ["\u090F\u0917\u093E", 68, -1],
+ ["\u0906\u090F\u0917\u093E", 73, -1],
+ ["\u093E\u090F\u0917\u093E", 73, -1],
+ ["\u0947\u0917\u093E", 68, -1],
+ ["\u0924\u093E", 68, -1, r_CONSONANT],
+ ["\u0905\u0924\u093E", 77, -1],
+ ["\u0906\u0924\u093E", 77, -1],
+ ["\u093E\u0924\u093E", 77, -1],
+ ["\u0928\u093E", 68, -1, r_CONSONANT],
+ ["\u0905\u0928\u093E", 81, -1],
+ ["\u0906\u0928\u093E", 81, -1],
+ ["\u093E\u0928\u093E", 81, -1],
+ ["\u0906\u092F\u093E", 68, -1],
+ ["\u093E\u092F\u093E", 68, -1],
+ ["\u093F", -1, -1],
+ ["\u0940", -1, -1],
+ ["\u090A\u0902\u0917\u0940", 88, -1],
+ ["\u0906\u090A\u0902\u0917\u0940", 89, -1],
+ ["\u093E\u090A\u0902\u0917\u0940", 89, -1],
+ ["\u090F\u0902\u0917\u0940", 88, -1],
+ ["\u0906\u090F\u0902\u0917\u0940", 92, -1],
+ ["\u093E\u090F\u0902\u0917\u0940", 92, -1],
+ ["\u0942\u0902\u0917\u0940", 88, -1],
+ ["\u0947\u0902\u0917\u0940", 88, -1],
+ ["\u090F\u0917\u0940", 88, -1],
+ ["\u0906\u090F\u0917\u0940", 97, -1],
+ ["\u093E\u090F\u0917\u0940", 97, -1],
+ ["\u0913\u0917\u0940", 88, -1],
+ ["\u0906\u0913\u0917\u0940", 100, -1],
+ ["\u093E\u0913\u0917\u0940", 100, -1],
+ ["\u0947\u0917\u0940", 88, -1],
+ ["\u094B\u0917\u0940", 88, -1],
+ ["\u0924\u0940", 88, -1, r_CONSONANT],
+ ["\u0905\u0924\u0940", 105, -1],
+ ["\u0906\u0924\u0940", 105, -1],
+ ["\u093E\u0924\u0940", 105, -1],
+ ["\u0928\u0940", 88, -1, r_CONSONANT],
+ ["\u0905\u0928\u0940", 109, -1],
+ ["\u0941", -1, -1],
+ ["\u0942", -1, -1],
+ ["\u0947", -1, -1],
+ ["\u090F\u0902\u0917\u0947", 113, -1],
+ ["\u0906\u090F\u0902\u0917\u0947", 114, -1],
+ ["\u093E\u090F\u0902\u0917\u0947", 114, -1],
+ ["\u0947\u0902\u0917\u0947", 113, -1],
+ ["\u0913\u0917\u0947", 113, -1],
+ ["\u0906\u0913\u0917\u0947", 118, -1],
+ ["\u093E\u0913\u0917\u0947", 118, -1],
+ ["\u094B\u0917\u0947", 113, -1],
+ ["\u0924\u0947", 113, -1, r_CONSONANT],
+ ["\u0905\u0924\u0947", 122, -1],
+ ["\u0906\u0924\u0947", 122, -1],
+ ["\u093E\u0924\u0947", 122, -1],
+ ["\u0928\u0947", 113, -1, r_CONSONANT],
+ ["\u0905\u0928\u0947", 126, -1],
+ ["\u0906\u0928\u0947", 126, -1],
+ ["\u093E\u0928\u0947", 126, -1],
+ ["\u094B", -1, -1],
+ ["\u094D", -1, -1]
+ ];
+
+ /** @const */ var /** Array */ g_consonant = [255, 255, 255, 255, 159, 0, 0, 0, 248, 7];
+
+
+ /** @return {boolean} */
+ function r_CONSONANT() {
+ if (!(base.in_grouping_b(g_consonant, 2325, 2399)))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ this.stem = /** @return {boolean} */ function() {
+ if (base.cursor >= base.limit)
+ {
+ return false;
+ }
+ base.cursor++;
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ base.ket = base.cursor;
+ if (base.find_among_b(a_0) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ base.cursor = base.limit_backward;
+ return true;
+ };
+
+ /**@return{string}*/
+ this['stemWord'] = function(/**string*/word) {
+ base.setCurrent(word);
+ this.stem();
+ return base.getCurrent();
+ };
+};
diff --git a/sphinx/search/non-minified-js/hungarian-stemmer.js b/sphinx/search/non-minified-js/hungarian-stemmer.js
index 2c550ac0d0e..886e1cf39b2 100644
--- a/sphinx/search/non-minified-js/hungarian-stemmer.js
+++ b/sphinx/search/non-minified-js/hungarian-stemmer.js
@@ -1,25 +1,15 @@
-// Generated by Snowball 2.1.0 - https://snowballstem.org/
+// Generated from hungarian.sbl by Snowball 3.0.1 - https://snowballstem.org/
/**@constructor*/
-HungarianStemmer = function() {
+var HungarianStemmer = function() {
var base = new BaseStemmer();
- /** @const */ var a_0 = [
- ["cs", -1, -1],
- ["dzs", -1, -1],
- ["gy", -1, -1],
- ["ly", -1, -1],
- ["ny", -1, -1],
- ["sz", -1, -1],
- ["ty", -1, -1],
- ["zs", -1, -1]
- ];
- /** @const */ var a_1 = [
+ /** @const */ var a_0 = [
["\u00E1", -1, 1],
["\u00E9", -1, 2]
];
- /** @const */ var a_2 = [
+ /** @const */ var a_1 = [
["bb", -1, -1],
["cc", -1, -1],
["dd", -1, -1],
@@ -45,12 +35,12 @@ HungarianStemmer = function() {
["zz", -1, -1]
];
- /** @const */ var a_3 = [
+ /** @const */ var a_2 = [
["al", -1, 1],
["el", -1, 1]
];
- /** @const */ var a_4 = [
+ /** @const */ var a_3 = [
["ba", -1, -1],
["ra", -1, -1],
["be", -1, -1],
@@ -97,13 +87,13 @@ HungarianStemmer = function() {
["v\u00E9", -1, -1]
];
- /** @const */ var a_5 = [
+ /** @const */ var a_4 = [
["\u00E1n", -1, 2],
["\u00E9n", -1, 1],
["\u00E1nk\u00E9nt", -1, 2]
];
- /** @const */ var a_6 = [
+ /** @const */ var a_5 = [
["stul", -1, 1],
["astul", 0, 1],
["\u00E1stul", 0, 2],
@@ -112,12 +102,12 @@ HungarianStemmer = function() {
["\u00E9st\u00FCl", 3, 3]
];
- /** @const */ var a_7 = [
+ /** @const */ var a_6 = [
["\u00E1", -1, 1],
["\u00E9", -1, 1]
];
- /** @const */ var a_8 = [
+ /** @const */ var a_7 = [
["k", -1, 3],
["ak", 0, 3],
["ek", 0, 3],
@@ -127,7 +117,7 @@ HungarianStemmer = function() {
["\u00F6k", 0, 3]
];
- /** @const */ var a_9 = [
+ /** @const */ var a_8 = [
["\u00E9i", -1, 1],
["\u00E1\u00E9i", 0, 3],
["\u00E9\u00E9i", 0, 2],
@@ -142,7 +132,7 @@ HungarianStemmer = function() {
["\u00E9\u00E9", 3, 2]
];
- /** @const */ var a_10 = [
+ /** @const */ var a_9 = [
["a", -1, 1],
["ja", 0, 1],
["d", -1, 1],
@@ -176,7 +166,7 @@ HungarianStemmer = function() {
["\u00E9", -1, 3]
];
- /** @const */ var a_11 = [
+ /** @const */ var a_10 = [
["id", -1, 1],
["aid", 0, 1],
["jaid", 1, 1],
@@ -230,69 +220,30 @@ HungarianStemmer = function() {
function r_mark_regions() {
I_p1 = base.limit;
lab0: {
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
lab1: {
if (!(base.in_grouping(g_v, 97, 369)))
{
break lab1;
}
- golab2: while(true)
- {
- var /** number */ v_2 = base.cursor;
- lab3: {
- if (!(base.out_grouping(g_v, 97, 369)))
- {
- break lab3;
- }
- base.cursor = v_2;
- break golab2;
- }
- base.cursor = v_2;
- if (base.cursor >= base.limit)
- {
- break lab1;
- }
- base.cursor++;
- }
- lab4: {
- var /** number */ v_3 = base.cursor;
- lab5: {
- if (base.find_among(a_0) == 0)
- {
- break lab5;
- }
- break lab4;
- }
- base.cursor = v_3;
- if (base.cursor >= base.limit)
+ /** @const */ var /** number */ v_2 = base.cursor;
+ lab2: {
+ if (!base.go_in_grouping(g_v, 97, 369))
{
- break lab1;
+ break lab2;
}
base.cursor++;
+ I_p1 = base.cursor;
}
- I_p1 = base.cursor;
+ base.cursor = v_2;
break lab0;
}
base.cursor = v_1;
- if (!(base.out_grouping(g_v, 97, 369)))
+ if (!base.go_out_grouping(g_v, 97, 369))
{
return false;
}
- golab6: while(true)
- {
- lab7: {
- if (!(base.in_grouping(g_v, 97, 369)))
- {
- break lab7;
- }
- break golab6;
- }
- if (base.cursor >= base.limit)
- {
- return false;
- }
- base.cursor++;
- }
+ base.cursor++;
I_p1 = base.cursor;
}
return true;
@@ -300,18 +251,14 @@ HungarianStemmer = function() {
/** @return {boolean} */
function r_R1() {
- if (!(I_p1 <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_p1 <= base.cursor;
};
/** @return {boolean} */
function r_v_ending() {
var /** number */ among_var;
base.ket = base.cursor;
- among_var = base.find_among_b(a_1);
+ among_var = base.find_among_b(a_0);
if (among_var == 0)
{
return false;
@@ -340,8 +287,8 @@ HungarianStemmer = function() {
/** @return {boolean} */
function r_double() {
- var /** number */ v_1 = base.limit - base.cursor;
- if (base.find_among_b(a_2) == 0)
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ if (base.find_among_b(a_1) == 0)
{
return false;
}
@@ -357,14 +304,11 @@ HungarianStemmer = function() {
}
base.cursor--;
base.ket = base.cursor;
+ if (base.cursor <= base.limit_backward)
{
- var /** number */ c1 = base.cursor - 1;
- if (c1 < base.limit_backward)
- {
- return false;
- }
- base.cursor = c1;
+ return false;
}
+ base.cursor--;
base.bra = base.cursor;
if (!base.slice_del())
{
@@ -376,7 +320,7 @@ HungarianStemmer = function() {
/** @return {boolean} */
function r_instrum() {
base.ket = base.cursor;
- if (base.find_among_b(a_3) == 0)
+ if (base.find_among_b(a_2) == 0)
{
return false;
}
@@ -403,7 +347,7 @@ HungarianStemmer = function() {
/** @return {boolean} */
function r_case() {
base.ket = base.cursor;
- if (base.find_among_b(a_4) == 0)
+ if (base.find_among_b(a_3) == 0)
{
return false;
}
@@ -427,7 +371,7 @@ HungarianStemmer = function() {
function r_case_special() {
var /** number */ among_var;
base.ket = base.cursor;
- among_var = base.find_among_b(a_5);
+ among_var = base.find_among_b(a_4);
if (among_var == 0)
{
return false;
@@ -458,7 +402,7 @@ HungarianStemmer = function() {
function r_case_other() {
var /** number */ among_var;
base.ket = base.cursor;
- among_var = base.find_among_b(a_6);
+ among_var = base.find_among_b(a_5);
if (among_var == 0)
{
return false;
@@ -494,7 +438,7 @@ HungarianStemmer = function() {
/** @return {boolean} */
function r_factive() {
base.ket = base.cursor;
- if (base.find_among_b(a_7) == 0)
+ if (base.find_among_b(a_6) == 0)
{
return false;
}
@@ -522,7 +466,7 @@ HungarianStemmer = function() {
function r_plural() {
var /** number */ among_var;
base.ket = base.cursor;
- among_var = base.find_among_b(a_8);
+ among_var = base.find_among_b(a_7);
if (among_var == 0)
{
return false;
@@ -559,7 +503,7 @@ HungarianStemmer = function() {
function r_owned() {
var /** number */ among_var;
base.ket = base.cursor;
- among_var = base.find_among_b(a_9);
+ among_var = base.find_among_b(a_8);
if (among_var == 0)
{
return false;
@@ -596,7 +540,7 @@ HungarianStemmer = function() {
function r_sing_owner() {
var /** number */ among_var;
base.ket = base.cursor;
- among_var = base.find_among_b(a_10);
+ among_var = base.find_among_b(a_9);
if (among_var == 0)
{
return false;
@@ -633,7 +577,7 @@ HungarianStemmer = function() {
function r_plur_owner() {
var /** number */ among_var;
base.ket = base.cursor;
- among_var = base.find_among_b(a_11);
+ among_var = base.find_among_b(a_10);
if (among_var == 0)
{
return false;
@@ -667,35 +611,35 @@ HungarianStemmer = function() {
};
this.stem = /** @return {boolean} */ function() {
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
r_mark_regions();
base.cursor = v_1;
base.limit_backward = base.cursor; base.cursor = base.limit;
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
r_instrum();
base.cursor = base.limit - v_2;
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
r_case();
base.cursor = base.limit - v_3;
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
r_case_special();
base.cursor = base.limit - v_4;
- var /** number */ v_5 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
r_case_other();
base.cursor = base.limit - v_5;
- var /** number */ v_6 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
r_factive();
base.cursor = base.limit - v_6;
- var /** number */ v_7 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
r_owned();
base.cursor = base.limit - v_7;
- var /** number */ v_8 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
r_sing_owner();
base.cursor = base.limit - v_8;
- var /** number */ v_9 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_9 = base.limit - base.cursor;
r_plur_owner();
base.cursor = base.limit - v_9;
- var /** number */ v_10 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_10 = base.limit - base.cursor;
r_plural();
base.cursor = base.limit - v_10;
base.cursor = base.limit_backward;
diff --git a/sphinx/search/non-minified-js/indonesian-stemmer.js b/sphinx/search/non-minified-js/indonesian-stemmer.js
new file mode 100644
index 00000000000..714c410e738
--- /dev/null
+++ b/sphinx/search/non-minified-js/indonesian-stemmer.js
@@ -0,0 +1,409 @@
+// Generated from indonesian.sbl by Snowball 3.0.1 - https://snowballstem.org/
+
+/**@constructor*/
+var IndonesianStemmer = function() {
+ var base = new BaseStemmer();
+
+ /** @const */ var a_0 = [
+ ["kah", -1, 1],
+ ["lah", -1, 1],
+ ["pun", -1, 1]
+ ];
+
+ /** @const */ var a_1 = [
+ ["nya", -1, 1],
+ ["ku", -1, 1],
+ ["mu", -1, 1]
+ ];
+
+ /** @const */ var a_2 = [
+ ["i", -1, 1, r_SUFFIX_I_OK],
+ ["an", -1, 1, r_SUFFIX_AN_OK],
+ ["kan", 1, 1, r_SUFFIX_KAN_OK]
+ ];
+
+ /** @const */ var a_3 = [
+ ["di", -1, 1],
+ ["ke", -1, 2],
+ ["me", -1, 1],
+ ["mem", 2, 5],
+ ["men", 2, 1],
+ ["meng", 4, 1],
+ ["meny", 4, 3, r_VOWEL],
+ ["pem", -1, 6],
+ ["pen", -1, 2],
+ ["peng", 8, 2],
+ ["peny", 8, 4, r_VOWEL],
+ ["ter", -1, 1]
+ ];
+
+ /** @const */ var a_4 = [
+ ["be", -1, 3, r_KER],
+ ["belajar", 0, 4],
+ ["ber", 0, 3],
+ ["pe", -1, 1],
+ ["pelajar", 3, 2],
+ ["per", 3, 1]
+ ];
+
+ /** @const */ var /** Array */ g_vowel = [17, 65, 16];
+
+ var /** number */ I_prefix = 0;
+ var /** number */ I_measure = 0;
+
+
+ /** @return {boolean} */
+ function r_remove_particle() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_0) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ I_measure -= 1;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_remove_possessive_pronoun() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_1) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ I_measure -= 1;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_SUFFIX_KAN_OK() {
+ if (I_prefix == 3)
+ {
+ return false;
+ }
+ if (I_prefix == 2)
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_SUFFIX_AN_OK() {
+ return I_prefix != 1;
+ };
+
+ /** @return {boolean} */
+ function r_SUFFIX_I_OK() {
+ if (I_prefix > 2)
+ {
+ return false;
+ }
+ {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ if (!(base.eq_s_b("s")))
+ {
+ break lab0;
+ }
+ return false;
+ }
+ base.cursor = base.limit - v_1;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_remove_suffix() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_2) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ I_measure -= 1;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_VOWEL() {
+ if (!(base.in_grouping(g_vowel, 97, 117)))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_KER() {
+ if (!(base.out_grouping(g_vowel, 97, 117)))
+ {
+ return false;
+ }
+ if (!(base.eq_s("er")))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_remove_first_order_prefix() {
+ var /** number */ among_var;
+ base.bra = base.cursor;
+ among_var = base.find_among(a_3);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ I_prefix = 1;
+ I_measure -= 1;
+ break;
+ case 2:
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ I_prefix = 3;
+ I_measure -= 1;
+ break;
+ case 3:
+ I_prefix = 1;
+ if (!base.slice_from("s"))
+ {
+ return false;
+ }
+ I_measure -= 1;
+ break;
+ case 4:
+ I_prefix = 3;
+ if (!base.slice_from("s"))
+ {
+ return false;
+ }
+ I_measure -= 1;
+ break;
+ case 5:
+ I_prefix = 1;
+ I_measure -= 1;
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab1: {
+ /** @const */ var /** number */ v_2 = base.cursor;
+ if (!(base.in_grouping(g_vowel, 97, 117)))
+ {
+ break lab1;
+ }
+ base.cursor = v_2;
+ if (!base.slice_from("p"))
+ {
+ return false;
+ }
+ break lab0;
+ }
+ base.cursor = v_1;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ }
+ break;
+ case 6:
+ I_prefix = 3;
+ I_measure -= 1;
+ lab2: {
+ /** @const */ var /** number */ v_3 = base.cursor;
+ lab3: {
+ /** @const */ var /** number */ v_4 = base.cursor;
+ if (!(base.in_grouping(g_vowel, 97, 117)))
+ {
+ break lab3;
+ }
+ base.cursor = v_4;
+ if (!base.slice_from("p"))
+ {
+ return false;
+ }
+ break lab2;
+ }
+ base.cursor = v_3;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_remove_second_order_prefix() {
+ var /** number */ among_var;
+ base.bra = base.cursor;
+ among_var = base.find_among(a_4);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ I_prefix = 2;
+ I_measure -= 1;
+ break;
+ case 2:
+ if (!base.slice_from("ajar"))
+ {
+ return false;
+ }
+ I_measure -= 1;
+ break;
+ case 3:
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ I_prefix = 4;
+ I_measure -= 1;
+ break;
+ case 4:
+ if (!base.slice_from("ajar"))
+ {
+ return false;
+ }
+ I_prefix = 4;
+ I_measure -= 1;
+ break;
+ }
+ return true;
+ };
+
+ this.stem = /** @return {boolean} */ function() {
+ I_measure = 0;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ while(true)
+ {
+ lab1: {
+ if (!base.go_out_grouping(g_vowel, 97, 117))
+ {
+ break lab1;
+ }
+ base.cursor++;
+ I_measure += 1;
+ continue;
+ }
+ break;
+ }
+ }
+ base.cursor = v_1;
+ if (I_measure <= 2)
+ {
+ return false;
+ }
+ I_prefix = 0;
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ r_remove_particle();
+ base.cursor = base.limit - v_2;
+ if (I_measure <= 2)
+ {
+ return false;
+ }
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ r_remove_possessive_pronoun();
+ base.cursor = base.limit - v_3;
+ base.cursor = base.limit_backward;
+ if (I_measure <= 2)
+ {
+ return false;
+ }
+ lab2: {
+ /** @const */ var /** number */ v_4 = base.cursor;
+ lab3: {
+ /** @const */ var /** number */ v_5 = base.cursor;
+ if (!r_remove_first_order_prefix())
+ {
+ break lab3;
+ }
+ /** @const */ var /** number */ v_6 = base.cursor;
+ lab4: {
+ /** @const */ var /** number */ v_7 = base.cursor;
+ if (I_measure <= 2)
+ {
+ break lab4;
+ }
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ if (!r_remove_suffix())
+ {
+ break lab4;
+ }
+ base.cursor = base.limit_backward;
+ base.cursor = v_7;
+ if (I_measure <= 2)
+ {
+ break lab4;
+ }
+ if (!r_remove_second_order_prefix())
+ {
+ break lab4;
+ }
+ }
+ base.cursor = v_6;
+ base.cursor = v_5;
+ break lab2;
+ }
+ base.cursor = v_4;
+ /** @const */ var /** number */ v_8 = base.cursor;
+ r_remove_second_order_prefix();
+ base.cursor = v_8;
+ /** @const */ var /** number */ v_9 = base.cursor;
+ lab5: {
+ if (I_measure <= 2)
+ {
+ break lab5;
+ }
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ if (!r_remove_suffix())
+ {
+ break lab5;
+ }
+ base.cursor = base.limit_backward;
+ }
+ base.cursor = v_9;
+ }
+ return true;
+ };
+
+ /**@return{string}*/
+ this['stemWord'] = function(/**string*/word) {
+ base.setCurrent(word);
+ this.stem();
+ return base.getCurrent();
+ };
+};
diff --git a/sphinx/search/non-minified-js/irish-stemmer.js b/sphinx/search/non-minified-js/irish-stemmer.js
new file mode 100644
index 00000000000..ec6601efb04
--- /dev/null
+++ b/sphinx/search/non-minified-js/irish-stemmer.js
@@ -0,0 +1,378 @@
+// Generated from irish.sbl by Snowball 3.0.1 - https://snowballstem.org/
+
+/**@constructor*/
+var IrishStemmer = function() {
+ var base = new BaseStemmer();
+
+ /** @const */ var a_0 = [
+ ["b'", -1, 1],
+ ["bh", -1, 4],
+ ["bhf", 1, 2],
+ ["bp", -1, 8],
+ ["ch", -1, 5],
+ ["d'", -1, 1],
+ ["d'fh", 5, 2],
+ ["dh", -1, 6],
+ ["dt", -1, 9],
+ ["fh", -1, 2],
+ ["gc", -1, 5],
+ ["gh", -1, 7],
+ ["h-", -1, 1],
+ ["m'", -1, 1],
+ ["mb", -1, 4],
+ ["mh", -1, 10],
+ ["n-", -1, 1],
+ ["nd", -1, 6],
+ ["ng", -1, 7],
+ ["ph", -1, 8],
+ ["sh", -1, 3],
+ ["t-", -1, 1],
+ ["th", -1, 9],
+ ["ts", -1, 3]
+ ];
+
+ /** @const */ var a_1 = [
+ ["\u00EDochta", -1, 1],
+ ["a\u00EDochta", 0, 1],
+ ["ire", -1, 2],
+ ["aire", 2, 2],
+ ["abh", -1, 1],
+ ["eabh", 4, 1],
+ ["ibh", -1, 1],
+ ["aibh", 6, 1],
+ ["amh", -1, 1],
+ ["eamh", 8, 1],
+ ["imh", -1, 1],
+ ["aimh", 10, 1],
+ ["\u00EDocht", -1, 1],
+ ["a\u00EDocht", 12, 1],
+ ["ir\u00ED", -1, 2],
+ ["air\u00ED", 14, 2]
+ ];
+
+ /** @const */ var a_2 = [
+ ["\u00F3ideacha", -1, 6],
+ ["patacha", -1, 5],
+ ["achta", -1, 1],
+ ["arcachta", 2, 2],
+ ["eachta", 2, 1],
+ ["grafa\u00EDochta", -1, 4],
+ ["paite", -1, 5],
+ ["ach", -1, 1],
+ ["each", 7, 1],
+ ["\u00F3ideach", 8, 6],
+ ["gineach", 8, 3],
+ ["patach", 7, 5],
+ ["grafa\u00EDoch", -1, 4],
+ ["pataigh", -1, 5],
+ ["\u00F3idigh", -1, 6],
+ ["acht\u00FAil", -1, 1],
+ ["eacht\u00FAil", 15, 1],
+ ["gineas", -1, 3],
+ ["ginis", -1, 3],
+ ["acht", -1, 1],
+ ["arcacht", 19, 2],
+ ["eacht", 19, 1],
+ ["grafa\u00EDocht", -1, 4],
+ ["arcachta\u00ED", -1, 2],
+ ["grafa\u00EDochta\u00ED", -1, 4]
+ ];
+
+ /** @const */ var a_3 = [
+ ["imid", -1, 1],
+ ["aimid", 0, 1],
+ ["\u00EDmid", -1, 1],
+ ["a\u00EDmid", 2, 1],
+ ["adh", -1, 2],
+ ["eadh", 4, 2],
+ ["faidh", -1, 1],
+ ["fidh", -1, 1],
+ ["\u00E1il", -1, 2],
+ ["ain", -1, 2],
+ ["tear", -1, 2],
+ ["tar", -1, 2]
+ ];
+
+ /** @const */ var /** Array */ g_v = [17, 65, 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 17, 4, 2];
+
+ var /** number */ I_p2 = 0;
+ var /** number */ I_p1 = 0;
+ var /** number */ I_pV = 0;
+
+
+ /** @return {boolean} */
+ function r_mark_regions() {
+ I_pV = base.limit;
+ I_p1 = base.limit;
+ I_p2 = base.limit;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ if (!base.go_out_grouping(g_v, 97, 250))
+ {
+ break lab0;
+ }
+ base.cursor++;
+ I_pV = base.cursor;
+ if (!base.go_in_grouping(g_v, 97, 250))
+ {
+ break lab0;
+ }
+ base.cursor++;
+ I_p1 = base.cursor;
+ if (!base.go_out_grouping(g_v, 97, 250))
+ {
+ break lab0;
+ }
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 250))
+ {
+ break lab0;
+ }
+ base.cursor++;
+ I_p2 = base.cursor;
+ }
+ base.cursor = v_1;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_initial_morph() {
+ var /** number */ among_var;
+ base.bra = base.cursor;
+ among_var = base.find_among(a_0);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("f"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("s"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("b"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("c"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!base.slice_from("d"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!base.slice_from("g"))
+ {
+ return false;
+ }
+ break;
+ case 8:
+ if (!base.slice_from("p"))
+ {
+ return false;
+ }
+ break;
+ case 9:
+ if (!base.slice_from("t"))
+ {
+ return false;
+ }
+ break;
+ case 10:
+ if (!base.slice_from("m"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_RV() {
+ return I_pV <= base.cursor;
+ };
+
+ /** @return {boolean} */
+ function r_R1() {
+ return I_p1 <= base.cursor;
+ };
+
+ /** @return {boolean} */
+ function r_R2() {
+ return I_p2 <= base.cursor;
+ };
+
+ /** @return {boolean} */
+ function r_noun_sfx() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_1);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!r_R2())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_deriv() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_2);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!r_R2())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("arc"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("gin"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("graf"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("paite"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!base.slice_from("\u00F3id"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_verb_sfx() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_3);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!r_RV())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ this.stem = /** @return {boolean} */ function() {
+ /** @const */ var /** number */ v_1 = base.cursor;
+ r_initial_morph();
+ base.cursor = v_1;
+ r_mark_regions();
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ r_noun_sfx();
+ base.cursor = base.limit - v_2;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ r_deriv();
+ base.cursor = base.limit - v_3;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ r_verb_sfx();
+ base.cursor = base.limit - v_4;
+ base.cursor = base.limit_backward;
+ return true;
+ };
+
+ /**@return{string}*/
+ this['stemWord'] = function(/**string*/word) {
+ base.setCurrent(word);
+ this.stem();
+ return base.getCurrent();
+ };
+};
diff --git a/sphinx/search/non-minified-js/italian-stemmer.js b/sphinx/search/non-minified-js/italian-stemmer.js
index df6ddfd332a..f5ab55ecc8d 100644
--- a/sphinx/search/non-minified-js/italian-stemmer.js
+++ b/sphinx/search/non-minified-js/italian-stemmer.js
@@ -1,8 +1,9 @@
-// Generated by Snowball 2.1.0 - https://snowballstem.org/
+// Generated from italian.sbl by Snowball 3.0.1 - https://snowballstem.org/
/**@constructor*/
-ItalianStemmer = function() {
+var ItalianStemmer = function() {
var base = new BaseStemmer();
+
/** @const */ var a_0 = [
["", -1, 7],
["qu", 0, 6],
@@ -238,17 +239,13 @@ ItalianStemmer = function() {
/** @return {boolean} */
function r_prelude() {
var /** number */ among_var;
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
while(true)
{
- var /** number */ v_2 = base.cursor;
+ /** @const */ var /** number */ v_2 = base.cursor;
lab0: {
base.bra = base.cursor;
among_var = base.find_among(a_0);
- if (among_var == 0)
- {
- break lab0;
- }
base.ket = base.cursor;
switch (among_var) {
case 1:
@@ -303,11 +300,11 @@ ItalianStemmer = function() {
base.cursor = v_1;
while(true)
{
- var /** number */ v_3 = base.cursor;
+ /** @const */ var /** number */ v_3 = base.cursor;
lab1: {
golab2: while(true)
{
- var /** number */ v_4 = base.cursor;
+ /** @const */ var /** number */ v_4 = base.cursor;
lab3: {
if (!(base.in_grouping(g_v, 97, 249)))
{
@@ -315,7 +312,7 @@ ItalianStemmer = function() {
}
base.bra = base.cursor;
lab4: {
- var /** number */ v_5 = base.cursor;
+ /** @const */ var /** number */ v_5 = base.cursor;
lab5: {
if (!(base.eq_s("u")))
{
@@ -370,37 +367,27 @@ ItalianStemmer = function() {
I_pV = base.limit;
I_p1 = base.limit;
I_p2 = base.limit;
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
lab0: {
lab1: {
- var /** number */ v_2 = base.cursor;
+ /** @const */ var /** number */ v_2 = base.cursor;
lab2: {
if (!(base.in_grouping(g_v, 97, 249)))
{
break lab2;
}
lab3: {
- var /** number */ v_3 = base.cursor;
+ /** @const */ var /** number */ v_3 = base.cursor;
lab4: {
if (!(base.out_grouping(g_v, 97, 249)))
{
break lab4;
}
- golab5: while(true)
+ if (!base.go_out_grouping(g_v, 97, 249))
{
- lab6: {
- if (!(base.in_grouping(g_v, 97, 249)))
- {
- break lab6;
- }
- break golab5;
- }
- if (base.cursor >= base.limit)
- {
- break lab4;
- }
- base.cursor++;
+ break lab4;
}
+ base.cursor++;
break lab3;
}
base.cursor = v_3;
@@ -408,21 +395,19 @@ ItalianStemmer = function() {
{
break lab2;
}
- golab7: while(true)
+ if (!base.go_in_grouping(g_v, 97, 249))
{
- lab8: {
- if (!(base.out_grouping(g_v, 97, 249)))
- {
- break lab8;
- }
- break golab7;
- }
- if (base.cursor >= base.limit)
- {
- break lab2;
- }
- base.cursor++;
+ break lab2;
}
+ base.cursor++;
+ }
+ break lab1;
+ }
+ base.cursor = v_2;
+ lab5: {
+ if (!(base.eq_s("divan")))
+ {
+ break lab5;
}
break lab1;
}
@@ -431,31 +416,21 @@ ItalianStemmer = function() {
{
break lab0;
}
- lab9: {
- var /** number */ v_6 = base.cursor;
- lab10: {
+ lab6: {
+ /** @const */ var /** number */ v_4 = base.cursor;
+ lab7: {
if (!(base.out_grouping(g_v, 97, 249)))
{
- break lab10;
+ break lab7;
}
- golab11: while(true)
+ if (!base.go_out_grouping(g_v, 97, 249))
{
- lab12: {
- if (!(base.in_grouping(g_v, 97, 249)))
- {
- break lab12;
- }
- break golab11;
- }
- if (base.cursor >= base.limit)
- {
- break lab10;
- }
- base.cursor++;
+ break lab7;
}
- break lab9;
+ base.cursor++;
+ break lab6;
}
- base.cursor = v_6;
+ base.cursor = v_4;
if (!(base.in_grouping(g_v, 97, 249)))
{
break lab0;
@@ -470,72 +445,32 @@ ItalianStemmer = function() {
I_pV = base.cursor;
}
base.cursor = v_1;
- var /** number */ v_8 = base.cursor;
- lab13: {
- golab14: while(true)
+ /** @const */ var /** number */ v_5 = base.cursor;
+ lab8: {
+ if (!base.go_out_grouping(g_v, 97, 249))
{
- lab15: {
- if (!(base.in_grouping(g_v, 97, 249)))
- {
- break lab15;
- }
- break golab14;
- }
- if (base.cursor >= base.limit)
- {
- break lab13;
- }
- base.cursor++;
+ break lab8;
}
- golab16: while(true)
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 249))
{
- lab17: {
- if (!(base.out_grouping(g_v, 97, 249)))
- {
- break lab17;
- }
- break golab16;
- }
- if (base.cursor >= base.limit)
- {
- break lab13;
- }
- base.cursor++;
+ break lab8;
}
+ base.cursor++;
I_p1 = base.cursor;
- golab18: while(true)
+ if (!base.go_out_grouping(g_v, 97, 249))
{
- lab19: {
- if (!(base.in_grouping(g_v, 97, 249)))
- {
- break lab19;
- }
- break golab18;
- }
- if (base.cursor >= base.limit)
- {
- break lab13;
- }
- base.cursor++;
+ break lab8;
}
- golab20: while(true)
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 249))
{
- lab21: {
- if (!(base.out_grouping(g_v, 97, 249)))
- {
- break lab21;
- }
- break golab20;
- }
- if (base.cursor >= base.limit)
- {
- break lab13;
- }
- base.cursor++;
+ break lab8;
}
+ base.cursor++;
I_p2 = base.cursor;
}
- base.cursor = v_8;
+ base.cursor = v_5;
return true;
};
@@ -544,14 +479,10 @@ ItalianStemmer = function() {
var /** number */ among_var;
while(true)
{
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
lab0: {
base.bra = base.cursor;
among_var = base.find_among(a_1);
- if (among_var == 0)
- {
- break lab0;
- }
base.ket = base.cursor;
switch (among_var) {
case 1:
@@ -584,29 +515,17 @@ ItalianStemmer = function() {
/** @return {boolean} */
function r_RV() {
- if (!(I_pV <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_pV <= base.cursor;
};
/** @return {boolean} */
function r_R1() {
- if (!(I_p1 <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_p1 <= base.cursor;
};
/** @return {boolean} */
function r_R2() {
- if (!(I_p2 <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_p2 <= base.cursor;
};
/** @return {boolean} */
@@ -674,7 +593,7 @@ ItalianStemmer = function() {
{
return false;
}
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab0: {
base.ket = base.cursor;
if (!(base.eq_s_b("ic")))
@@ -743,7 +662,7 @@ ItalianStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab1: {
base.ket = base.cursor;
among_var = base.find_among_b(a_4);
@@ -793,7 +712,7 @@ ItalianStemmer = function() {
{
return false;
}
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
lab2: {
base.ket = base.cursor;
if (base.find_among_b(a_5) == 0)
@@ -822,7 +741,7 @@ ItalianStemmer = function() {
{
return false;
}
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
lab3: {
base.ket = base.cursor;
if (!(base.eq_s_b("at")))
@@ -868,12 +787,12 @@ ItalianStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_pV;
base.ket = base.cursor;
if (base.find_among_b(a_7) == 0)
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
base.bra = base.cursor;
@@ -881,13 +800,13 @@ ItalianStemmer = function() {
{
return false;
}
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return true;
};
/** @return {boolean} */
function r_vowel_suffix() {
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab0: {
base.ket = base.cursor;
if (!(base.in_grouping_b(g_AEIO, 97, 242)))
@@ -922,7 +841,7 @@ ItalianStemmer = function() {
return false;
}
}
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab1: {
base.ket = base.cursor;
if (!(base.eq_s_b("h")))
@@ -950,18 +869,18 @@ ItalianStemmer = function() {
};
this.stem = /** @return {boolean} */ function() {
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
r_prelude();
base.cursor = v_1;
r_mark_regions();
base.limit_backward = base.cursor; base.cursor = base.limit;
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
r_attached_pronoun();
- base.cursor = base.limit - v_3;
- var /** number */ v_4 = base.limit - base.cursor;
+ base.cursor = base.limit - v_2;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
lab0: {
lab1: {
- var /** number */ v_5 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
lab2: {
if (!r_standard_suffix())
{
@@ -969,21 +888,21 @@ ItalianStemmer = function() {
}
break lab1;
}
- base.cursor = base.limit - v_5;
+ base.cursor = base.limit - v_4;
if (!r_verb_suffix())
{
break lab0;
}
}
}
- base.cursor = base.limit - v_4;
- var /** number */ v_6 = base.limit - base.cursor;
+ base.cursor = base.limit - v_3;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
r_vowel_suffix();
- base.cursor = base.limit - v_6;
+ base.cursor = base.limit - v_5;
base.cursor = base.limit_backward;
- var /** number */ v_7 = base.cursor;
+ /** @const */ var /** number */ v_6 = base.cursor;
r_postlude();
- base.cursor = v_7;
+ base.cursor = v_6;
return true;
};
diff --git a/sphinx/search/non-minified-js/lithuanian-stemmer.js b/sphinx/search/non-minified-js/lithuanian-stemmer.js
new file mode 100644
index 00000000000..213ff5979b2
--- /dev/null
+++ b/sphinx/search/non-minified-js/lithuanian-stemmer.js
@@ -0,0 +1,534 @@
+// Generated from lithuanian.sbl by Snowball 3.0.1 - https://snowballstem.org/
+
+/**@constructor*/
+var LithuanianStemmer = function() {
+ var base = new BaseStemmer();
+
+ /** @const */ var a_0 = [
+ ["a", -1, -1],
+ ["ia", 0, -1],
+ ["eria", 1, -1],
+ ["osna", 0, -1],
+ ["iosna", 3, -1],
+ ["uosna", 3, -1],
+ ["iuosna", 5, -1],
+ ["ysna", 0, -1],
+ ["\u0117sna", 0, -1],
+ ["e", -1, -1],
+ ["ie", 9, -1],
+ ["enie", 10, -1],
+ ["erie", 10, -1],
+ ["oje", 9, -1],
+ ["ioje", 13, -1],
+ ["uje", 9, -1],
+ ["iuje", 15, -1],
+ ["yje", 9, -1],
+ ["enyje", 17, -1],
+ ["eryje", 17, -1],
+ ["\u0117je", 9, -1],
+ ["ame", 9, -1],
+ ["iame", 21, -1],
+ ["sime", 9, -1],
+ ["ome", 9, -1],
+ ["\u0117me", 9, -1],
+ ["tum\u0117me", 25, -1],
+ ["ose", 9, -1],
+ ["iose", 27, -1],
+ ["uose", 27, -1],
+ ["iuose", 29, -1],
+ ["yse", 9, -1],
+ ["enyse", 31, -1],
+ ["eryse", 31, -1],
+ ["\u0117se", 9, -1],
+ ["ate", 9, -1],
+ ["iate", 35, -1],
+ ["ite", 9, -1],
+ ["kite", 37, -1],
+ ["site", 37, -1],
+ ["ote", 9, -1],
+ ["tute", 9, -1],
+ ["\u0117te", 9, -1],
+ ["tum\u0117te", 42, -1],
+ ["i", -1, -1],
+ ["ai", 44, -1],
+ ["iai", 45, -1],
+ ["eriai", 46, -1],
+ ["ei", 44, -1],
+ ["tumei", 48, -1],
+ ["ki", 44, -1],
+ ["imi", 44, -1],
+ ["erimi", 51, -1],
+ ["umi", 44, -1],
+ ["iumi", 53, -1],
+ ["si", 44, -1],
+ ["asi", 55, -1],
+ ["iasi", 56, -1],
+ ["esi", 55, -1],
+ ["iesi", 58, -1],
+ ["siesi", 59, -1],
+ ["isi", 55, -1],
+ ["aisi", 61, -1],
+ ["eisi", 61, -1],
+ ["tumeisi", 63, -1],
+ ["uisi", 61, -1],
+ ["osi", 55, -1],
+ ["\u0117josi", 66, -1],
+ ["uosi", 66, -1],
+ ["iuosi", 68, -1],
+ ["siuosi", 69, -1],
+ ["usi", 55, -1],
+ ["ausi", 71, -1],
+ ["\u010Diausi", 72, -1],
+ ["\u0105si", 55, -1],
+ ["\u0117si", 55, -1],
+ ["\u0173si", 55, -1],
+ ["t\u0173si", 76, -1],
+ ["ti", 44, -1],
+ ["enti", 78, -1],
+ ["inti", 78, -1],
+ ["oti", 78, -1],
+ ["ioti", 81, -1],
+ ["uoti", 81, -1],
+ ["iuoti", 83, -1],
+ ["auti", 78, -1],
+ ["iauti", 85, -1],
+ ["yti", 78, -1],
+ ["\u0117ti", 78, -1],
+ ["tel\u0117ti", 88, -1],
+ ["in\u0117ti", 88, -1],
+ ["ter\u0117ti", 88, -1],
+ ["ui", 44, -1],
+ ["iui", 92, -1],
+ ["eniui", 93, -1],
+ ["oj", -1, -1],
+ ["\u0117j", -1, -1],
+ ["k", -1, -1],
+ ["am", -1, -1],
+ ["iam", 98, -1],
+ ["iem", -1, -1],
+ ["im", -1, -1],
+ ["sim", 101, -1],
+ ["om", -1, -1],
+ ["tum", -1, -1],
+ ["\u0117m", -1, -1],
+ ["tum\u0117m", 105, -1],
+ ["an", -1, -1],
+ ["on", -1, -1],
+ ["ion", 108, -1],
+ ["un", -1, -1],
+ ["iun", 110, -1],
+ ["\u0117n", -1, -1],
+ ["o", -1, -1],
+ ["io", 113, -1],
+ ["enio", 114, -1],
+ ["\u0117jo", 113, -1],
+ ["uo", 113, -1],
+ ["s", -1, -1],
+ ["as", 118, -1],
+ ["ias", 119, -1],
+ ["es", 118, -1],
+ ["ies", 121, -1],
+ ["is", 118, -1],
+ ["ais", 123, -1],
+ ["iais", 124, -1],
+ ["tumeis", 123, -1],
+ ["imis", 123, -1],
+ ["enimis", 127, -1],
+ ["omis", 123, -1],
+ ["iomis", 129, -1],
+ ["umis", 123, -1],
+ ["\u0117mis", 123, -1],
+ ["enis", 123, -1],
+ ["asis", 123, -1],
+ ["ysis", 123, -1],
+ ["ams", 118, -1],
+ ["iams", 136, -1],
+ ["iems", 118, -1],
+ ["ims", 118, -1],
+ ["enims", 139, -1],
+ ["erims", 139, -1],
+ ["oms", 118, -1],
+ ["ioms", 142, -1],
+ ["ums", 118, -1],
+ ["\u0117ms", 118, -1],
+ ["ens", 118, -1],
+ ["os", 118, -1],
+ ["ios", 147, -1],
+ ["uos", 147, -1],
+ ["iuos", 149, -1],
+ ["ers", 118, -1],
+ ["us", 118, -1],
+ ["aus", 152, -1],
+ ["iaus", 153, -1],
+ ["ius", 152, -1],
+ ["ys", 118, -1],
+ ["enys", 156, -1],
+ ["erys", 156, -1],
+ ["\u0105s", 118, -1],
+ ["i\u0105s", 159, -1],
+ ["\u0117s", 118, -1],
+ ["am\u0117s", 161, -1],
+ ["iam\u0117s", 162, -1],
+ ["im\u0117s", 161, -1],
+ ["kim\u0117s", 164, -1],
+ ["sim\u0117s", 164, -1],
+ ["om\u0117s", 161, -1],
+ ["\u0117m\u0117s", 161, -1],
+ ["tum\u0117m\u0117s", 168, -1],
+ ["at\u0117s", 161, -1],
+ ["iat\u0117s", 170, -1],
+ ["sit\u0117s", 161, -1],
+ ["ot\u0117s", 161, -1],
+ ["\u0117t\u0117s", 161, -1],
+ ["tum\u0117t\u0117s", 174, -1],
+ ["\u012Fs", 118, -1],
+ ["\u016Bs", 118, -1],
+ ["t\u0173s", 118, -1],
+ ["at", -1, -1],
+ ["iat", 179, -1],
+ ["it", -1, -1],
+ ["sit", 181, -1],
+ ["ot", -1, -1],
+ ["\u0117t", -1, -1],
+ ["tum\u0117t", 184, -1],
+ ["u", -1, -1],
+ ["au", 186, -1],
+ ["iau", 187, -1],
+ ["\u010Diau", 188, -1],
+ ["iu", 186, -1],
+ ["eniu", 190, -1],
+ ["siu", 190, -1],
+ ["y", -1, -1],
+ ["\u0105", -1, -1],
+ ["i\u0105", 194, -1],
+ ["\u0117", -1, -1],
+ ["\u0119", -1, -1],
+ ["\u012F", -1, -1],
+ ["en\u012F", 198, -1],
+ ["er\u012F", 198, -1],
+ ["\u0173", -1, -1],
+ ["i\u0173", 201, -1],
+ ["er\u0173", 201, -1]
+ ];
+
+ /** @const */ var a_1 = [
+ ["ing", -1, -1],
+ ["aj", -1, -1],
+ ["iaj", 1, -1],
+ ["iej", -1, -1],
+ ["oj", -1, -1],
+ ["ioj", 4, -1],
+ ["uoj", 4, -1],
+ ["iuoj", 6, -1],
+ ["auj", -1, -1],
+ ["\u0105j", -1, -1],
+ ["i\u0105j", 9, -1],
+ ["\u0117j", -1, -1],
+ ["\u0173j", -1, -1],
+ ["i\u0173j", 12, -1],
+ ["ok", -1, -1],
+ ["iok", 14, -1],
+ ["iuk", -1, -1],
+ ["uliuk", 16, -1],
+ ["u\u010Diuk", 16, -1],
+ ["i\u0161k", -1, -1],
+ ["iul", -1, -1],
+ ["yl", -1, -1],
+ ["\u0117l", -1, -1],
+ ["am", -1, -1],
+ ["dam", 23, -1],
+ ["jam", 23, -1],
+ ["zgan", -1, -1],
+ ["ain", -1, -1],
+ ["esn", -1, -1],
+ ["op", -1, -1],
+ ["iop", 29, -1],
+ ["ias", -1, -1],
+ ["ies", -1, -1],
+ ["ais", -1, -1],
+ ["iais", 33, -1],
+ ["os", -1, -1],
+ ["ios", 35, -1],
+ ["uos", 35, -1],
+ ["iuos", 37, -1],
+ ["aus", -1, -1],
+ ["iaus", 39, -1],
+ ["\u0105s", -1, -1],
+ ["i\u0105s", 41, -1],
+ ["\u0119s", -1, -1],
+ ["ut\u0117ait", -1, -1],
+ ["ant", -1, -1],
+ ["iant", 45, -1],
+ ["siant", 46, -1],
+ ["int", -1, -1],
+ ["ot", -1, -1],
+ ["uot", 49, -1],
+ ["iuot", 50, -1],
+ ["yt", -1, -1],
+ ["\u0117t", -1, -1],
+ ["yk\u0161t", -1, -1],
+ ["iau", -1, -1],
+ ["dav", -1, -1],
+ ["sv", -1, -1],
+ ["\u0161v", -1, -1],
+ ["yk\u0161\u010D", -1, -1],
+ ["\u0119", -1, -1],
+ ["\u0117j\u0119", 60, -1]
+ ];
+
+ /** @const */ var a_2 = [
+ ["ojime", -1, 7],
+ ["\u0117jime", -1, 3],
+ ["avime", -1, 6],
+ ["okate", -1, 8],
+ ["aite", -1, 1],
+ ["uote", -1, 2],
+ ["asius", -1, 5],
+ ["okat\u0117s", -1, 8],
+ ["ait\u0117s", -1, 1],
+ ["uot\u0117s", -1, 2],
+ ["esiu", -1, 4]
+ ];
+
+ /** @const */ var a_3 = [
+ ["\u010D", -1, 1],
+ ["d\u017E", -1, 2]
+ ];
+
+ /** @const */ var a_4 = [
+ ["gd", -1, 1]
+ ];
+
+ /** @const */ var /** Array */ g_v = [17, 65, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 16, 0, 64, 1, 0, 64, 0, 0, 0, 0, 0, 0, 0, 4, 4];
+
+ var /** number */ I_p1 = 0;
+
+
+ /** @return {boolean} */
+ function r_step1() {
+ if (base.cursor < I_p1)
+ {
+ return false;
+ }
+ /** @const */ var /** number */ v_1 = base.limit_backward;
+ base.limit_backward = I_p1;
+ base.ket = base.cursor;
+ if (base.find_among_b(a_0) == 0)
+ {
+ base.limit_backward = v_1;
+ return false;
+ }
+ base.bra = base.cursor;
+ base.limit_backward = v_1;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_step2() {
+ while(true)
+ {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ if (base.cursor < I_p1)
+ {
+ break lab0;
+ }
+ /** @const */ var /** number */ v_2 = base.limit_backward;
+ base.limit_backward = I_p1;
+ base.ket = base.cursor;
+ if (base.find_among_b(a_1) == 0)
+ {
+ base.limit_backward = v_2;
+ break lab0;
+ }
+ base.bra = base.cursor;
+ base.limit_backward = v_2;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ continue;
+ }
+ base.cursor = base.limit - v_1;
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_fix_conflicts() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_2);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("ait\u0117"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("uot\u0117"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("\u0117jimas"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("esys"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("asys"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!base.slice_from("avimas"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!base.slice_from("ojimas"))
+ {
+ return false;
+ }
+ break;
+ case 8:
+ if (!base.slice_from("okat\u0117"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_fix_chdz() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_3);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("t"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("d"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_fix_gd() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_4) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_from("g"))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ this.stem = /** @return {boolean} */ function() {
+ I_p1 = base.limit;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ /** @const */ var /** number */ v_2 = base.cursor;
+ lab1: {
+ /** @const */ var /** number */ v_3 = base.cursor;
+ if (!(base.eq_s("a")))
+ {
+ base.cursor = v_2;
+ break lab1;
+ }
+ base.cursor = v_3;
+ if (base.current.length <= 6)
+ {
+ base.cursor = v_2;
+ break lab1;
+ }
+ if (base.cursor >= base.limit)
+ {
+ base.cursor = v_2;
+ break lab1;
+ }
+ base.cursor++;
+ }
+ if (!base.go_out_grouping(g_v, 97, 371))
+ {
+ break lab0;
+ }
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 371))
+ {
+ break lab0;
+ }
+ base.cursor++;
+ I_p1 = base.cursor;
+ }
+ base.cursor = v_1;
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ r_fix_conflicts();
+ base.cursor = base.limit - v_4;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
+ r_step1();
+ base.cursor = base.limit - v_5;
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
+ r_fix_chdz();
+ base.cursor = base.limit - v_6;
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
+ r_step2();
+ base.cursor = base.limit - v_7;
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
+ r_fix_chdz();
+ base.cursor = base.limit - v_8;
+ /** @const */ var /** number */ v_9 = base.limit - base.cursor;
+ r_fix_gd();
+ base.cursor = base.limit - v_9;
+ base.cursor = base.limit_backward;
+ return true;
+ };
+
+ /**@return{string}*/
+ this['stemWord'] = function(/**string*/word) {
+ base.setCurrent(word);
+ this.stem();
+ return base.getCurrent();
+ };
+};
diff --git a/sphinx/search/non-minified-js/nepali-stemmer.js b/sphinx/search/non-minified-js/nepali-stemmer.js
new file mode 100644
index 00000000000..d6352d00a4d
--- /dev/null
+++ b/sphinx/search/non-minified-js/nepali-stemmer.js
@@ -0,0 +1,282 @@
+// Generated from nepali.sbl by Snowball 3.0.1 - https://snowballstem.org/
+
+/**@constructor*/
+var NepaliStemmer = function() {
+ var base = new BaseStemmer();
+
+ /** @const */ var a_0 = [
+ ["\u0932\u093E\u0907", -1, 1],
+ ["\u0932\u093E\u0908", -1, 1],
+ ["\u0938\u0901\u0917", -1, 1],
+ ["\u0938\u0902\u0917", -1, 1],
+ ["\u092E\u093E\u0930\u094D\u092B\u0924", -1, 1],
+ ["\u0930\u0924", -1, 1],
+ ["\u0915\u093E", -1, 2],
+ ["\u092E\u093E", -1, 1],
+ ["\u0926\u094D\u0935\u093E\u0930\u093E", -1, 1],
+ ["\u0915\u093F", -1, 2],
+ ["\u092A\u091B\u093F", -1, 1],
+ ["\u0915\u0940", -1, 2],
+ ["\u0932\u0947", -1, 1],
+ ["\u0915\u0948", -1, 2],
+ ["\u0938\u0901\u0917\u0948", -1, 1],
+ ["\u092E\u0948", -1, 1],
+ ["\u0915\u094B", -1, 2]
+ ];
+
+ /** @const */ var a_1 = [
+ ["\u0901", -1, 1],
+ ["\u0902", -1, 1],
+ ["\u0948", -1, 2]
+ ];
+
+ /** @const */ var a_2 = [
+ ["\u0925\u093F\u090F", -1, 1],
+ ["\u091B", -1, 1],
+ ["\u0907\u091B", 1, 1],
+ ["\u090F\u091B", 1, 1],
+ ["\u093F\u091B", 1, 1],
+ ["\u0947\u091B", 1, 1],
+ ["\u0928\u0947\u091B", 5, 1],
+ ["\u0939\u0941\u0928\u0947\u091B", 6, 1],
+ ["\u0907\u0928\u094D\u091B", 1, 1],
+ ["\u093F\u0928\u094D\u091B", 1, 1],
+ ["\u0939\u0941\u0928\u094D\u091B", 1, 1],
+ ["\u090F\u0915\u093E", -1, 1],
+ ["\u0907\u090F\u0915\u093E", 11, 1],
+ ["\u093F\u090F\u0915\u093E", 11, 1],
+ ["\u0947\u0915\u093E", -1, 1],
+ ["\u0928\u0947\u0915\u093E", 14, 1],
+ ["\u0926\u093E", -1, 1],
+ ["\u0907\u0926\u093E", 16, 1],
+ ["\u093F\u0926\u093E", 16, 1],
+ ["\u0926\u0947\u0916\u093F", -1, 1],
+ ["\u092E\u093E\u0925\u093F", -1, 1],
+ ["\u090F\u0915\u0940", -1, 1],
+ ["\u0907\u090F\u0915\u0940", 21, 1],
+ ["\u093F\u090F\u0915\u0940", 21, 1],
+ ["\u0947\u0915\u0940", -1, 1],
+ ["\u0926\u0947\u0916\u0940", -1, 1],
+ ["\u0925\u0940", -1, 1],
+ ["\u0926\u0940", -1, 1],
+ ["\u091B\u0941", -1, 1],
+ ["\u090F\u091B\u0941", 28, 1],
+ ["\u0947\u091B\u0941", 28, 1],
+ ["\u0928\u0947\u091B\u0941", 30, 1],
+ ["\u0928\u0941", -1, 1],
+ ["\u0939\u0930\u0941", -1, 1],
+ ["\u0939\u0930\u0942", -1, 1],
+ ["\u091B\u0947", -1, 1],
+ ["\u0925\u0947", -1, 1],
+ ["\u0928\u0947", -1, 1],
+ ["\u090F\u0915\u0948", -1, 1],
+ ["\u0947\u0915\u0948", -1, 1],
+ ["\u0928\u0947\u0915\u0948", 39, 1],
+ ["\u0926\u0948", -1, 1],
+ ["\u0907\u0926\u0948", 41, 1],
+ ["\u093F\u0926\u0948", 41, 1],
+ ["\u090F\u0915\u094B", -1, 1],
+ ["\u0907\u090F\u0915\u094B", 44, 1],
+ ["\u093F\u090F\u0915\u094B", 44, 1],
+ ["\u0947\u0915\u094B", -1, 1],
+ ["\u0928\u0947\u0915\u094B", 47, 1],
+ ["\u0926\u094B", -1, 1],
+ ["\u0907\u0926\u094B", 49, 1],
+ ["\u093F\u0926\u094B", 49, 1],
+ ["\u092F\u094B", -1, 1],
+ ["\u0907\u092F\u094B", 52, 1],
+ ["\u092D\u092F\u094B", 52, 1],
+ ["\u093F\u092F\u094B", 52, 1],
+ ["\u0925\u093F\u092F\u094B", 55, 1],
+ ["\u0926\u093F\u092F\u094B", 55, 1],
+ ["\u0925\u094D\u092F\u094B", 52, 1],
+ ["\u091B\u094C", -1, 1],
+ ["\u0907\u091B\u094C", 59, 1],
+ ["\u090F\u091B\u094C", 59, 1],
+ ["\u093F\u091B\u094C", 59, 1],
+ ["\u0947\u091B\u094C", 59, 1],
+ ["\u0928\u0947\u091B\u094C", 63, 1],
+ ["\u092F\u094C", -1, 1],
+ ["\u0925\u093F\u092F\u094C", 65, 1],
+ ["\u091B\u094D\u092F\u094C", 65, 1],
+ ["\u0925\u094D\u092F\u094C", 65, 1],
+ ["\u091B\u0928\u094D", -1, 1],
+ ["\u0907\u091B\u0928\u094D", 69, 1],
+ ["\u090F\u091B\u0928\u094D", 69, 1],
+ ["\u093F\u091B\u0928\u094D", 69, 1],
+ ["\u0947\u091B\u0928\u094D", 69, 1],
+ ["\u0928\u0947\u091B\u0928\u094D", 73, 1],
+ ["\u0932\u093E\u0928\u094D", -1, 1],
+ ["\u091B\u093F\u0928\u094D", -1, 1],
+ ["\u0925\u093F\u0928\u094D", -1, 1],
+ ["\u092A\u0930\u094D", -1, 1],
+ ["\u0907\u0938\u094D", -1, 1],
+ ["\u0925\u093F\u0907\u0938\u094D", 79, 1],
+ ["\u091B\u0938\u094D", -1, 1],
+ ["\u0907\u091B\u0938\u094D", 81, 1],
+ ["\u090F\u091B\u0938\u094D", 81, 1],
+ ["\u093F\u091B\u0938\u094D", 81, 1],
+ ["\u0947\u091B\u0938\u094D", 81, 1],
+ ["\u0928\u0947\u091B\u0938\u094D", 85, 1],
+ ["\u093F\u0938\u094D", -1, 1],
+ ["\u0925\u093F\u0938\u094D", 87, 1],
+ ["\u091B\u0947\u0938\u094D", -1, 1],
+ ["\u0939\u094B\u0938\u094D", -1, 1]
+ ];
+
+
+ /** @return {boolean} */
+ function r_remove_category_1() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_0);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ if (!(base.eq_s_b("\u090F")))
+ {
+ break lab1;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ lab2: {
+ if (!(base.eq_s_b("\u0947")))
+ {
+ break lab2;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_remove_category_2() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_1);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ if (!(base.eq_s_b("\u092F\u094C")))
+ {
+ break lab1;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ lab2: {
+ if (!(base.eq_s_b("\u091B\u094C")))
+ {
+ break lab2;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ lab3: {
+ if (!(base.eq_s_b("\u0928\u094C")))
+ {
+ break lab3;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ if (!(base.eq_s_b("\u0925\u0947")))
+ {
+ return false;
+ }
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!(base.eq_s_b("\u0924\u094D\u0930")))
+ {
+ return false;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_remove_category_3() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_2) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ return true;
+ };
+
+ this.stem = /** @return {boolean} */ function() {
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ r_remove_category_1();
+ base.cursor = base.limit - v_1;
+ while(true)
+ {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab0: {
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ r_remove_category_2();
+ base.cursor = base.limit - v_3;
+ if (!r_remove_category_3())
+ {
+ break lab0;
+ }
+ continue;
+ }
+ base.cursor = base.limit - v_2;
+ break;
+ }
+ base.cursor = base.limit_backward;
+ return true;
+ };
+
+ /**@return{string}*/
+ this['stemWord'] = function(/**string*/word) {
+ base.setCurrent(word);
+ this.stem();
+ return base.getCurrent();
+ };
+};
diff --git a/sphinx/search/non-minified-js/norwegian-stemmer.js b/sphinx/search/non-minified-js/norwegian-stemmer.js
index e1760631ab9..149e63c1a32 100644
--- a/sphinx/search/non-minified-js/norwegian-stemmer.js
+++ b/sphinx/search/non-minified-js/norwegian-stemmer.js
@@ -1,9 +1,28 @@
-// Generated by Snowball 2.1.0 - https://snowballstem.org/
+// Generated from norwegian.sbl by Snowball 3.0.1 - https://snowballstem.org/
/**@constructor*/
-NorwegianStemmer = function() {
+var NorwegianStemmer = function() {
var base = new BaseStemmer();
+
/** @const */ var a_0 = [
+ ["", -1, 1],
+ ["ind", 0, -1],
+ ["kk", 0, -1],
+ ["nk", 0, -1],
+ ["amm", 0, -1],
+ ["omm", 0, -1],
+ ["kap", 0, -1],
+ ["skap", 6, 1],
+ ["pp", 0, -1],
+ ["lt", 0, -1],
+ ["ast", 0, -1],
+ ["\u00F8st", 0, -1],
+ ["v", 0, -1],
+ ["hav", 12, 1],
+ ["giv", 12, 1]
+ ];
+
+ /** @const */ var a_1 = [
["a", -1, 1],
["e", -1, 1],
["ede", 1, 1],
@@ -12,13 +31,13 @@ NorwegianStemmer = function() {
["ane", 1, 1],
["ene", 1, 1],
["hetene", 6, 1],
- ["erte", 1, 3],
+ ["erte", 1, 4],
["en", -1, 1],
["heten", 9, 1],
["ar", -1, 1],
["er", -1, 1],
["heter", 12, 1],
- ["s", -1, 2],
+ ["s", -1, 3],
["as", 14, 1],
["es", 14, 1],
["edes", 16, 1],
@@ -27,20 +46,20 @@ NorwegianStemmer = function() {
["hetenes", 19, 1],
["ens", 14, 1],
["hetens", 21, 1],
- ["ers", 14, 1],
+ ["ers", 14, 2],
["ets", 14, 1],
["et", -1, 1],
["het", 25, 1],
- ["ert", -1, 3],
+ ["ert", -1, 4],
["ast", -1, 1]
];
- /** @const */ var a_1 = [
+ /** @const */ var a_2 = [
["dt", -1, -1],
["vt", -1, -1]
];
- /** @const */ var a_2 = [
+ /** @const */ var a_3 = [
["leg", -1, 1],
["eleg", 0, 1],
["ig", -1, 1],
@@ -54,9 +73,9 @@ NorwegianStemmer = function() {
["hetslov", 9, 1]
];
- /** @const */ var /** Array */ g_v = [17, 65, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 48, 0, 128];
+ /** @const */ var /** Array */ g_v = [17, 65, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 48, 2, 142];
- /** @const */ var /** Array */ g_s_ending = [119, 125, 149, 1];
+ /** @const */ var /** Array */ g_s_ending = [119, 125, 148, 1];
var /** number */ I_x = 0;
var /** number */ I_p1 = 0;
@@ -65,9 +84,9 @@ NorwegianStemmer = function() {
/** @return {boolean} */
function r_mark_regions() {
I_p1 = base.limit;
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
{
- var /** number */ c1 = base.cursor + 3;
+ /** @const */ var /** number */ c1 = base.cursor + 3;
if (c1 > base.limit)
{
return false;
@@ -76,44 +95,21 @@ NorwegianStemmer = function() {
}
I_x = base.cursor;
base.cursor = v_1;
- golab0: while(true)
+ if (!base.go_out_grouping(g_v, 97, 248))
{
- var /** number */ v_2 = base.cursor;
- lab1: {
- if (!(base.in_grouping(g_v, 97, 248)))
- {
- break lab1;
- }
- base.cursor = v_2;
- break golab0;
- }
- base.cursor = v_2;
- if (base.cursor >= base.limit)
- {
- return false;
- }
- base.cursor++;
+ return false;
}
- golab2: while(true)
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 248))
{
- lab3: {
- if (!(base.out_grouping(g_v, 97, 248)))
- {
- break lab3;
- }
- break golab2;
- }
- if (base.cursor >= base.limit)
- {
- return false;
- }
- base.cursor++;
+ return false;
}
+ base.cursor++;
I_p1 = base.cursor;
- lab4: {
- if (!(I_p1 < I_x))
+ lab0: {
+ if (I_p1 >= I_x)
{
- break lab4;
+ break lab0;
}
I_p1 = I_x;
}
@@ -127,17 +123,17 @@ NorwegianStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_p1;
base.ket = base.cursor;
- among_var = base.find_among_b(a_0);
+ among_var = base.find_among_b(a_1);
if (among_var == 0)
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
base.bra = base.cursor;
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
switch (among_var) {
case 1:
if (!base.slice_del())
@@ -146,8 +142,19 @@ NorwegianStemmer = function() {
}
break;
case 2:
+ among_var = base.find_among_b(a_0);
+ switch (among_var) {
+ case 1:
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ break;
+ case 3:
lab0: {
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab1: {
if (!(base.in_grouping_b(g_s_ending, 98, 122)))
{
@@ -155,7 +162,26 @@ NorwegianStemmer = function() {
}
break lab0;
}
- base.cursor = base.limit - v_3;
+ base.cursor = base.limit - v_2;
+ lab2: {
+ if (!(base.eq_s_b("r")))
+ {
+ break lab2;
+ }
+ {
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ lab3: {
+ if (!(base.eq_s_b("e")))
+ {
+ break lab3;
+ }
+ break lab2;
+ }
+ base.cursor = base.limit - v_3;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_2;
if (!(base.eq_s_b("k")))
{
return false;
@@ -170,7 +196,7 @@ NorwegianStemmer = function() {
return false;
}
break;
- case 3:
+ case 4:
if (!base.slice_from("er"))
{
return false;
@@ -182,21 +208,21 @@ NorwegianStemmer = function() {
/** @return {boolean} */
function r_consonant_pair() {
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
if (base.cursor < I_p1)
{
return false;
}
- var /** number */ v_3 = base.limit_backward;
+ /** @const */ var /** number */ v_2 = base.limit_backward;
base.limit_backward = I_p1;
base.ket = base.cursor;
- if (base.find_among_b(a_1) == 0)
+ if (base.find_among_b(a_2) == 0)
{
- base.limit_backward = v_3;
+ base.limit_backward = v_2;
return false;
}
base.bra = base.cursor;
- base.limit_backward = v_3;
+ base.limit_backward = v_2;
base.cursor = base.limit - v_1;
if (base.cursor <= base.limit_backward)
{
@@ -217,16 +243,16 @@ NorwegianStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_p1;
base.ket = base.cursor;
- if (base.find_among_b(a_2) == 0)
+ if (base.find_among_b(a_3) == 0)
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
base.bra = base.cursor;
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
if (!base.slice_del())
{
return false;
@@ -235,17 +261,17 @@ NorwegianStemmer = function() {
};
this.stem = /** @return {boolean} */ function() {
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
r_mark_regions();
base.cursor = v_1;
base.limit_backward = base.cursor; base.cursor = base.limit;
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
r_main_suffix();
base.cursor = base.limit - v_2;
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
r_consonant_pair();
base.cursor = base.limit - v_3;
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
r_other_suffix();
base.cursor = base.limit - v_4;
base.cursor = base.limit_backward;
diff --git a/sphinx/search/non-minified-js/porter-stemmer.js b/sphinx/search/non-minified-js/porter-stemmer.js
index 0747d2cf3ea..182b9d5edfb 100644
--- a/sphinx/search/non-minified-js/porter-stemmer.js
+++ b/sphinx/search/non-minified-js/porter-stemmer.js
@@ -1,8 +1,9 @@
-// Generated by Snowball 2.1.0 - https://snowballstem.org/
+// Generated from porter.sbl by Snowball 3.0.1 - https://snowballstem.org/
/**@constructor*/
-PorterStemmer = function() {
+var PorterStemmer = function() {
var base = new BaseStemmer();
+
/** @const */ var a_0 = [
["s", -1, 3],
["ies", 0, 2],
@@ -115,20 +116,12 @@ PorterStemmer = function() {
/** @return {boolean} */
function r_R1() {
- if (!(I_p1 <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_p1 <= base.cursor;
};
/** @return {boolean} */
function r_R2() {
- if (!(I_p2 <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_p2 <= base.cursor;
};
/** @return {boolean} */
@@ -186,38 +179,24 @@ PorterStemmer = function() {
}
break;
case 2:
- var /** number */ v_1 = base.limit - base.cursor;
- golab0: while(true)
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ if (!base.go_out_grouping_b(g_v, 97, 121))
{
- lab1: {
- if (!(base.in_grouping_b(g_v, 97, 121)))
- {
- break lab1;
- }
- break golab0;
- }
- if (base.cursor <= base.limit_backward)
- {
- return false;
- }
- base.cursor--;
+ return false;
}
+ base.cursor--;
base.cursor = base.limit - v_1;
if (!base.slice_del())
{
return false;
}
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
among_var = base.find_among_b(a_1);
- if (among_var == 0)
- {
- return false;
- }
- base.cursor = base.limit - v_3;
+ base.cursor = base.limit - v_2;
switch (among_var) {
case 1:
{
- var /** number */ c1 = base.cursor;
+ /** @const */ var /** number */ c1 = base.cursor;
base.insert(base.cursor, base.cursor, "e");
base.cursor = c1;
}
@@ -240,14 +219,14 @@ PorterStemmer = function() {
{
return false;
}
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
if (!r_shortv())
{
return false;
}
- base.cursor = base.limit - v_4;
+ base.cursor = base.limit - v_3;
{
- var /** number */ c2 = base.cursor;
+ /** @const */ var /** number */ c2 = base.cursor;
base.insert(base.cursor, base.cursor, "e");
base.cursor = c2;
}
@@ -262,7 +241,7 @@ PorterStemmer = function() {
function r_Step_1c() {
base.ket = base.cursor;
lab0: {
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab1: {
if (!(base.eq_s_b("y")))
{
@@ -277,21 +256,11 @@ PorterStemmer = function() {
}
}
base.bra = base.cursor;
- golab2: while(true)
+ if (!base.go_out_grouping_b(g_v, 97, 121))
{
- lab3: {
- if (!(base.in_grouping_b(g_v, 97, 121)))
- {
- break lab3;
- }
- break golab2;
- }
- if (base.cursor <= base.limit_backward)
- {
- return false;
- }
- base.cursor--;
+ return false;
}
+ base.cursor--;
if (!base.slice_from("i"))
{
return false;
@@ -456,7 +425,7 @@ PorterStemmer = function() {
break;
case 2:
lab0: {
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab1: {
if (!(base.eq_s_b("s")))
{
@@ -488,7 +457,6 @@ PorterStemmer = function() {
}
base.bra = base.cursor;
lab0: {
- var /** number */ v_1 = base.limit - base.cursor;
lab1: {
if (!r_R2())
{
@@ -496,13 +464,12 @@ PorterStemmer = function() {
}
break lab0;
}
- base.cursor = base.limit - v_1;
if (!r_R1())
{
return false;
}
{
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab2: {
if (!r_shortv())
{
@@ -510,7 +477,7 @@ PorterStemmer = function() {
}
return false;
}
- base.cursor = base.limit - v_2;
+ base.cursor = base.limit - v_1;
}
}
if (!base.slice_del())
@@ -545,7 +512,7 @@ PorterStemmer = function() {
this.stem = /** @return {boolean} */ function() {
B_Y_found = false;
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
lab0: {
base.bra = base.cursor;
if (!(base.eq_s("y")))
@@ -560,15 +527,15 @@ PorterStemmer = function() {
B_Y_found = true;
}
base.cursor = v_1;
- var /** number */ v_2 = base.cursor;
+ /** @const */ var /** number */ v_2 = base.cursor;
lab1: {
while(true)
{
- var /** number */ v_3 = base.cursor;
+ /** @const */ var /** number */ v_3 = base.cursor;
lab2: {
golab3: while(true)
{
- var /** number */ v_4 = base.cursor;
+ /** @const */ var /** number */ v_4 = base.cursor;
lab4: {
if (!(base.in_grouping(g_v, 97, 121)))
{
@@ -604,125 +571,85 @@ PorterStemmer = function() {
base.cursor = v_2;
I_p1 = base.limit;
I_p2 = base.limit;
- var /** number */ v_5 = base.cursor;
+ /** @const */ var /** number */ v_5 = base.cursor;
lab5: {
- golab6: while(true)
+ if (!base.go_out_grouping(g_v, 97, 121))
{
- lab7: {
- if (!(base.in_grouping(g_v, 97, 121)))
- {
- break lab7;
- }
- break golab6;
- }
- if (base.cursor >= base.limit)
- {
- break lab5;
- }
- base.cursor++;
+ break lab5;
}
- golab8: while(true)
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 121))
{
- lab9: {
- if (!(base.out_grouping(g_v, 97, 121)))
- {
- break lab9;
- }
- break golab8;
- }
- if (base.cursor >= base.limit)
- {
- break lab5;
- }
- base.cursor++;
+ break lab5;
}
+ base.cursor++;
I_p1 = base.cursor;
- golab10: while(true)
+ if (!base.go_out_grouping(g_v, 97, 121))
{
- lab11: {
- if (!(base.in_grouping(g_v, 97, 121)))
- {
- break lab11;
- }
- break golab10;
- }
- if (base.cursor >= base.limit)
- {
- break lab5;
- }
- base.cursor++;
+ break lab5;
}
- golab12: while(true)
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 121))
{
- lab13: {
- if (!(base.out_grouping(g_v, 97, 121)))
- {
- break lab13;
- }
- break golab12;
- }
- if (base.cursor >= base.limit)
- {
- break lab5;
- }
- base.cursor++;
+ break lab5;
}
+ base.cursor++;
I_p2 = base.cursor;
}
base.cursor = v_5;
base.limit_backward = base.cursor; base.cursor = base.limit;
- var /** number */ v_10 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
r_Step_1a();
- base.cursor = base.limit - v_10;
- var /** number */ v_11 = base.limit - base.cursor;
+ base.cursor = base.limit - v_6;
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
r_Step_1b();
- base.cursor = base.limit - v_11;
- var /** number */ v_12 = base.limit - base.cursor;
+ base.cursor = base.limit - v_7;
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
r_Step_1c();
- base.cursor = base.limit - v_12;
- var /** number */ v_13 = base.limit - base.cursor;
+ base.cursor = base.limit - v_8;
+ /** @const */ var /** number */ v_9 = base.limit - base.cursor;
r_Step_2();
- base.cursor = base.limit - v_13;
- var /** number */ v_14 = base.limit - base.cursor;
+ base.cursor = base.limit - v_9;
+ /** @const */ var /** number */ v_10 = base.limit - base.cursor;
r_Step_3();
- base.cursor = base.limit - v_14;
- var /** number */ v_15 = base.limit - base.cursor;
+ base.cursor = base.limit - v_10;
+ /** @const */ var /** number */ v_11 = base.limit - base.cursor;
r_Step_4();
- base.cursor = base.limit - v_15;
- var /** number */ v_16 = base.limit - base.cursor;
+ base.cursor = base.limit - v_11;
+ /** @const */ var /** number */ v_12 = base.limit - base.cursor;
r_Step_5a();
- base.cursor = base.limit - v_16;
- var /** number */ v_17 = base.limit - base.cursor;
+ base.cursor = base.limit - v_12;
+ /** @const */ var /** number */ v_13 = base.limit - base.cursor;
r_Step_5b();
- base.cursor = base.limit - v_17;
+ base.cursor = base.limit - v_13;
base.cursor = base.limit_backward;
- var /** number */ v_18 = base.cursor;
- lab14: {
+ /** @const */ var /** number */ v_14 = base.cursor;
+ lab6: {
if (!B_Y_found)
{
- break lab14;
+ break lab6;
}
while(true)
{
- var /** number */ v_19 = base.cursor;
- lab15: {
- golab16: while(true)
+ /** @const */ var /** number */ v_15 = base.cursor;
+ lab7: {
+ golab8: while(true)
{
- var /** number */ v_20 = base.cursor;
- lab17: {
+ /** @const */ var /** number */ v_16 = base.cursor;
+ lab9: {
base.bra = base.cursor;
if (!(base.eq_s("Y")))
{
- break lab17;
+ break lab9;
}
base.ket = base.cursor;
- base.cursor = v_20;
- break golab16;
+ base.cursor = v_16;
+ break golab8;
}
- base.cursor = v_20;
+ base.cursor = v_16;
if (base.cursor >= base.limit)
{
- break lab15;
+ break lab7;
}
base.cursor++;
}
@@ -732,11 +659,11 @@ PorterStemmer = function() {
}
continue;
}
- base.cursor = v_19;
+ base.cursor = v_15;
break;
}
}
- base.cursor = v_18;
+ base.cursor = v_14;
return true;
};
diff --git a/sphinx/search/non-minified-js/portuguese-stemmer.js b/sphinx/search/non-minified-js/portuguese-stemmer.js
index 662b976565a..2b4a63fafe6 100644
--- a/sphinx/search/non-minified-js/portuguese-stemmer.js
+++ b/sphinx/search/non-minified-js/portuguese-stemmer.js
@@ -1,8 +1,9 @@
-// Generated by Snowball 2.1.0 - https://snowballstem.org/
+// Generated from portuguese.sbl by Snowball 3.0.1 - https://snowballstem.org/
/**@constructor*/
-PortugueseStemmer = function() {
+var PortugueseStemmer = function() {
var base = new BaseStemmer();
+
/** @const */ var a_0 = [
["", -1, 3],
["\u00E3", 0, 1],
@@ -234,14 +235,10 @@ PortugueseStemmer = function() {
var /** number */ among_var;
while(true)
{
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
lab0: {
base.bra = base.cursor;
among_var = base.find_among(a_0);
- if (among_var == 0)
- {
- break lab0;
- }
base.ket = base.cursor;
switch (among_var) {
case 1:
@@ -277,37 +274,27 @@ PortugueseStemmer = function() {
I_pV = base.limit;
I_p1 = base.limit;
I_p2 = base.limit;
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
lab0: {
lab1: {
- var /** number */ v_2 = base.cursor;
+ /** @const */ var /** number */ v_2 = base.cursor;
lab2: {
if (!(base.in_grouping(g_v, 97, 250)))
{
break lab2;
}
lab3: {
- var /** number */ v_3 = base.cursor;
+ /** @const */ var /** number */ v_3 = base.cursor;
lab4: {
if (!(base.out_grouping(g_v, 97, 250)))
{
break lab4;
}
- golab5: while(true)
+ if (!base.go_out_grouping(g_v, 97, 250))
{
- lab6: {
- if (!(base.in_grouping(g_v, 97, 250)))
- {
- break lab6;
- }
- break golab5;
- }
- if (base.cursor >= base.limit)
- {
- break lab4;
- }
- base.cursor++;
+ break lab4;
}
+ base.cursor++;
break lab3;
}
base.cursor = v_3;
@@ -315,21 +302,11 @@ PortugueseStemmer = function() {
{
break lab2;
}
- golab7: while(true)
+ if (!base.go_in_grouping(g_v, 97, 250))
{
- lab8: {
- if (!(base.out_grouping(g_v, 97, 250)))
- {
- break lab8;
- }
- break golab7;
- }
- if (base.cursor >= base.limit)
- {
- break lab2;
- }
- base.cursor++;
+ break lab2;
}
+ base.cursor++;
}
break lab1;
}
@@ -338,31 +315,21 @@ PortugueseStemmer = function() {
{
break lab0;
}
- lab9: {
- var /** number */ v_6 = base.cursor;
- lab10: {
+ lab5: {
+ /** @const */ var /** number */ v_4 = base.cursor;
+ lab6: {
if (!(base.out_grouping(g_v, 97, 250)))
{
- break lab10;
+ break lab6;
}
- golab11: while(true)
+ if (!base.go_out_grouping(g_v, 97, 250))
{
- lab12: {
- if (!(base.in_grouping(g_v, 97, 250)))
- {
- break lab12;
- }
- break golab11;
- }
- if (base.cursor >= base.limit)
- {
- break lab10;
- }
- base.cursor++;
+ break lab6;
}
- break lab9;
+ base.cursor++;
+ break lab5;
}
- base.cursor = v_6;
+ base.cursor = v_4;
if (!(base.in_grouping(g_v, 97, 250)))
{
break lab0;
@@ -377,72 +344,32 @@ PortugueseStemmer = function() {
I_pV = base.cursor;
}
base.cursor = v_1;
- var /** number */ v_8 = base.cursor;
- lab13: {
- golab14: while(true)
+ /** @const */ var /** number */ v_5 = base.cursor;
+ lab7: {
+ if (!base.go_out_grouping(g_v, 97, 250))
{
- lab15: {
- if (!(base.in_grouping(g_v, 97, 250)))
- {
- break lab15;
- }
- break golab14;
- }
- if (base.cursor >= base.limit)
- {
- break lab13;
- }
- base.cursor++;
+ break lab7;
}
- golab16: while(true)
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 250))
{
- lab17: {
- if (!(base.out_grouping(g_v, 97, 250)))
- {
- break lab17;
- }
- break golab16;
- }
- if (base.cursor >= base.limit)
- {
- break lab13;
- }
- base.cursor++;
+ break lab7;
}
+ base.cursor++;
I_p1 = base.cursor;
- golab18: while(true)
+ if (!base.go_out_grouping(g_v, 97, 250))
{
- lab19: {
- if (!(base.in_grouping(g_v, 97, 250)))
- {
- break lab19;
- }
- break golab18;
- }
- if (base.cursor >= base.limit)
- {
- break lab13;
- }
- base.cursor++;
+ break lab7;
}
- golab20: while(true)
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 250))
{
- lab21: {
- if (!(base.out_grouping(g_v, 97, 250)))
- {
- break lab21;
- }
- break golab20;
- }
- if (base.cursor >= base.limit)
- {
- break lab13;
- }
- base.cursor++;
+ break lab7;
}
+ base.cursor++;
I_p2 = base.cursor;
}
- base.cursor = v_8;
+ base.cursor = v_5;
return true;
};
@@ -451,14 +378,10 @@ PortugueseStemmer = function() {
var /** number */ among_var;
while(true)
{
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
lab0: {
base.bra = base.cursor;
among_var = base.find_among(a_1);
- if (among_var == 0)
- {
- break lab0;
- }
base.ket = base.cursor;
switch (among_var) {
case 1:
@@ -491,29 +414,17 @@ PortugueseStemmer = function() {
/** @return {boolean} */
function r_RV() {
- if (!(I_pV <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_pV <= base.cursor;
};
/** @return {boolean} */
function r_R1() {
- if (!(I_p1 <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_p1 <= base.cursor;
};
/** @return {boolean} */
function r_R2() {
- if (!(I_p2 <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_p2 <= base.cursor;
};
/** @return {boolean} */
@@ -576,7 +487,7 @@ PortugueseStemmer = function() {
{
return false;
}
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab0: {
base.ket = base.cursor;
among_var = base.find_among_b(a_2);
@@ -626,7 +537,7 @@ PortugueseStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab1: {
base.ket = base.cursor;
if (base.find_among_b(a_3) == 0)
@@ -655,7 +566,7 @@ PortugueseStemmer = function() {
{
return false;
}
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
lab2: {
base.ket = base.cursor;
if (base.find_among_b(a_4) == 0)
@@ -684,7 +595,7 @@ PortugueseStemmer = function() {
{
return false;
}
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
lab3: {
base.ket = base.cursor;
if (!(base.eq_s_b("at")))
@@ -728,12 +639,12 @@ PortugueseStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_pV;
base.ket = base.cursor;
if (base.find_among_b(a_6) == 0)
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
base.bra = base.cursor;
@@ -741,7 +652,7 @@ PortugueseStemmer = function() {
{
return false;
}
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return true;
};
@@ -786,14 +697,14 @@ PortugueseStemmer = function() {
}
base.ket = base.cursor;
lab0: {
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab1: {
if (!(base.eq_s_b("u")))
{
break lab1;
}
base.bra = base.cursor;
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
if (!(base.eq_s_b("g")))
{
break lab1;
@@ -807,7 +718,7 @@ PortugueseStemmer = function() {
return false;
}
base.bra = base.cursor;
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
if (!(base.eq_s_b("c")))
{
return false;
@@ -834,19 +745,19 @@ PortugueseStemmer = function() {
};
this.stem = /** @return {boolean} */ function() {
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
r_prelude();
base.cursor = v_1;
r_mark_regions();
base.limit_backward = base.cursor; base.cursor = base.limit;
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab0: {
lab1: {
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
lab2: {
- var /** number */ v_5 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
lab3: {
- var /** number */ v_6 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
lab4: {
if (!r_standard_suffix())
{
@@ -854,14 +765,14 @@ PortugueseStemmer = function() {
}
break lab3;
}
- base.cursor = base.limit - v_6;
+ base.cursor = base.limit - v_5;
if (!r_verb_suffix())
{
break lab2;
}
}
- base.cursor = base.limit - v_5;
- var /** number */ v_7 = base.limit - base.cursor;
+ base.cursor = base.limit - v_4;
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
lab5: {
base.ket = base.cursor;
if (!(base.eq_s_b("i")))
@@ -869,12 +780,12 @@ PortugueseStemmer = function() {
break lab5;
}
base.bra = base.cursor;
- var /** number */ v_8 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
if (!(base.eq_s_b("c")))
{
break lab5;
}
- base.cursor = base.limit - v_8;
+ base.cursor = base.limit - v_7;
if (!r_RV())
{
break lab5;
@@ -884,24 +795,24 @@ PortugueseStemmer = function() {
return false;
}
}
- base.cursor = base.limit - v_7;
+ base.cursor = base.limit - v_6;
break lab1;
}
- base.cursor = base.limit - v_4;
+ base.cursor = base.limit - v_3;
if (!r_residual_suffix())
{
break lab0;
}
}
}
- base.cursor = base.limit - v_3;
- var /** number */ v_9 = base.limit - base.cursor;
+ base.cursor = base.limit - v_2;
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
r_residual_form();
- base.cursor = base.limit - v_9;
+ base.cursor = base.limit - v_8;
base.cursor = base.limit_backward;
- var /** number */ v_10 = base.cursor;
+ /** @const */ var /** number */ v_9 = base.cursor;
r_postlude();
- base.cursor = v_10;
+ base.cursor = v_9;
return true;
};
diff --git a/sphinx/search/non-minified-js/romanian-stemmer.js b/sphinx/search/non-minified-js/romanian-stemmer.js
index 67538f1c008..4c58e819ec2 100644
--- a/sphinx/search/non-minified-js/romanian-stemmer.js
+++ b/sphinx/search/non-minified-js/romanian-stemmer.js
@@ -1,20 +1,26 @@
-// Generated by Snowball 2.1.0 - https://snowballstem.org/
+// Generated from romanian.sbl by Snowball 3.0.1 - https://snowballstem.org/
/**@constructor*/
-RomanianStemmer = function() {
+var RomanianStemmer = function() {
var base = new BaseStemmer();
+
/** @const */ var a_0 = [
+ ["\u015F", -1, 1],
+ ["\u0163", -1, 2]
+ ];
+
+ /** @const */ var a_1 = [
["", -1, 3],
["I", 0, 1],
["U", 0, 2]
];
- /** @const */ var a_1 = [
+ /** @const */ var a_2 = [
["ea", -1, 3],
- ["a\u0163ia", -1, 7],
+ ["a\u021Bia", -1, 7],
["aua", -1, 2],
["iua", -1, 4],
- ["a\u0163ie", -1, 7],
+ ["a\u021Bie", -1, 7],
["ele", -1, 3],
["ile", -1, 5],
["iile", 6, 4],
@@ -28,14 +34,14 @@ RomanianStemmer = function() {
["iilor", 14, 4]
];
- /** @const */ var a_2 = [
+ /** @const */ var a_3 = [
["icala", -1, 4],
["iciva", -1, 4],
["ativa", -1, 5],
["itiva", -1, 6],
["icale", -1, 4],
- ["a\u0163iune", -1, 5],
- ["i\u0163iune", -1, 6],
+ ["a\u021Biune", -1, 5],
+ ["i\u021Biune", -1, 6],
["atoare", -1, 5],
["itoare", -1, 6],
["\u0103toare", -1, 5],
@@ -60,9 +66,9 @@ RomanianStemmer = function() {
["icit\u0103i", -1, 4],
["abilit\u0103i", -1, 1],
["ivit\u0103i", -1, 3],
- ["icit\u0103\u0163i", -1, 4],
- ["abilit\u0103\u0163i", -1, 1],
- ["ivit\u0103\u0163i", -1, 3],
+ ["icit\u0103\u021Bi", -1, 4],
+ ["abilit\u0103\u021Bi", -1, 1],
+ ["ivit\u0103\u021Bi", -1, 3],
["ical", -1, 4],
["ator", -1, 5],
["icator", 35, 4],
@@ -77,7 +83,7 @@ RomanianStemmer = function() {
["itiv\u0103", -1, 6]
];
- /** @const */ var a_3 = [
+ /** @const */ var a_4 = [
["ica", -1, 1],
["abila", -1, 1],
["ibila", -1, 1],
@@ -114,11 +120,11 @@ RomanianStemmer = function() {
["anti", -1, 1],
["isti", -1, 3],
["uti", -1, 1],
- ["i\u015Fti", -1, 3],
+ ["i\u0219ti", -1, 3],
["ivi", -1, 1],
["it\u0103i", -1, 1],
- ["o\u015Fi", -1, 1],
- ["it\u0103\u0163i", -1, 1],
+ ["o\u0219i", -1, 1],
+ ["it\u0103\u021Bi", -1, 1],
["abil", -1, 1],
["ibil", -1, 1],
["ism", -1, 3],
@@ -142,7 +148,7 @@ RomanianStemmer = function() {
["iv\u0103", -1, 1]
];
- /** @const */ var a_4 = [
+ /** @const */ var a_5 = [
["ea", -1, 1],
["ia", -1, 1],
["esc", -1, 1],
@@ -159,44 +165,44 @@ RomanianStemmer = function() {
["ise", 10, 1],
["use", 10, 1],
["\u00E2se", 10, 1],
- ["e\u015Fte", -1, 1],
- ["\u0103\u015Fte", -1, 1],
+ ["e\u0219te", -1, 1],
+ ["\u0103\u0219te", -1, 1],
["eze", -1, 1],
["ai", -1, 1],
["eai", 19, 1],
["iai", 19, 1],
["sei", -1, 2],
- ["e\u015Fti", -1, 1],
- ["\u0103\u015Fti", -1, 1],
+ ["e\u0219ti", -1, 1],
+ ["\u0103\u0219ti", -1, 1],
["ui", -1, 1],
["ezi", -1, 1],
["\u00E2i", -1, 1],
- ["a\u015Fi", -1, 1],
- ["se\u015Fi", -1, 2],
- ["ase\u015Fi", 29, 1],
- ["sese\u015Fi", 29, 2],
- ["ise\u015Fi", 29, 1],
- ["use\u015Fi", 29, 1],
- ["\u00E2se\u015Fi", 29, 1],
- ["i\u015Fi", -1, 1],
- ["u\u015Fi", -1, 1],
- ["\u00E2\u015Fi", -1, 1],
- ["a\u0163i", -1, 2],
- ["ea\u0163i", 38, 1],
- ["ia\u0163i", 38, 1],
- ["e\u0163i", -1, 2],
- ["i\u0163i", -1, 2],
- ["\u00E2\u0163i", -1, 2],
- ["ar\u0103\u0163i", -1, 1],
- ["ser\u0103\u0163i", -1, 2],
- ["aser\u0103\u0163i", 45, 1],
- ["seser\u0103\u0163i", 45, 2],
- ["iser\u0103\u0163i", 45, 1],
- ["user\u0103\u0163i", 45, 1],
- ["\u00E2ser\u0103\u0163i", 45, 1],
- ["ir\u0103\u0163i", -1, 1],
- ["ur\u0103\u0163i", -1, 1],
- ["\u00E2r\u0103\u0163i", -1, 1],
+ ["a\u0219i", -1, 1],
+ ["se\u0219i", -1, 2],
+ ["ase\u0219i", 29, 1],
+ ["sese\u0219i", 29, 2],
+ ["ise\u0219i", 29, 1],
+ ["use\u0219i", 29, 1],
+ ["\u00E2se\u0219i", 29, 1],
+ ["i\u0219i", -1, 1],
+ ["u\u0219i", -1, 1],
+ ["\u00E2\u0219i", -1, 1],
+ ["a\u021Bi", -1, 2],
+ ["ea\u021Bi", 38, 1],
+ ["ia\u021Bi", 38, 1],
+ ["e\u021Bi", -1, 2],
+ ["i\u021Bi", -1, 2],
+ ["\u00E2\u021Bi", -1, 2],
+ ["ar\u0103\u021Bi", -1, 1],
+ ["ser\u0103\u021Bi", -1, 2],
+ ["aser\u0103\u021Bi", 45, 1],
+ ["seser\u0103\u021Bi", 45, 2],
+ ["iser\u0103\u021Bi", 45, 1],
+ ["user\u0103\u021Bi", 45, 1],
+ ["\u00E2ser\u0103\u021Bi", 45, 1],
+ ["ir\u0103\u021Bi", -1, 1],
+ ["ur\u0103\u021Bi", -1, 1],
+ ["\u00E2r\u0103\u021Bi", -1, 1],
["am", -1, 1],
["eam", 54, 1],
["iam", 54, 1],
@@ -239,7 +245,7 @@ RomanianStemmer = function() {
["eaz\u0103", -1, 1]
];
- /** @const */ var a_5 = [
+ /** @const */ var a_6 = [
["a", -1, 1],
["e", -1, 1],
["ie", 1, 1],
@@ -255,15 +261,69 @@ RomanianStemmer = function() {
var /** number */ I_pV = 0;
+ /** @return {boolean} */
+ function r_norm() {
+ var /** number */ among_var;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ while(true)
+ {
+ /** @const */ var /** number */ v_2 = base.cursor;
+ lab1: {
+ golab2: while(true)
+ {
+ /** @const */ var /** number */ v_3 = base.cursor;
+ lab3: {
+ base.bra = base.cursor;
+ among_var = base.find_among(a_0);
+ if (among_var == 0)
+ {
+ break lab3;
+ }
+ base.ket = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("\u0219"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("\u021B"))
+ {
+ return false;
+ }
+ break;
+ }
+ base.cursor = v_3;
+ break golab2;
+ }
+ base.cursor = v_3;
+ if (base.cursor >= base.limit)
+ {
+ break lab1;
+ }
+ base.cursor++;
+ }
+ continue;
+ }
+ base.cursor = v_2;
+ break;
+ }
+ }
+ base.cursor = v_1;
+ return true;
+ };
+
/** @return {boolean} */
function r_prelude() {
while(true)
{
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
lab0: {
golab1: while(true)
{
- var /** number */ v_2 = base.cursor;
+ /** @const */ var /** number */ v_2 = base.cursor;
lab2: {
if (!(base.in_grouping(g_v, 97, 259)))
{
@@ -271,7 +331,7 @@ RomanianStemmer = function() {
}
base.bra = base.cursor;
lab3: {
- var /** number */ v_3 = base.cursor;
+ /** @const */ var /** number */ v_3 = base.cursor;
lab4: {
if (!(base.eq_s("u")))
{
@@ -326,37 +386,27 @@ RomanianStemmer = function() {
I_pV = base.limit;
I_p1 = base.limit;
I_p2 = base.limit;
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
lab0: {
lab1: {
- var /** number */ v_2 = base.cursor;
+ /** @const */ var /** number */ v_2 = base.cursor;
lab2: {
if (!(base.in_grouping(g_v, 97, 259)))
{
break lab2;
}
lab3: {
- var /** number */ v_3 = base.cursor;
+ /** @const */ var /** number */ v_3 = base.cursor;
lab4: {
if (!(base.out_grouping(g_v, 97, 259)))
{
break lab4;
}
- golab5: while(true)
+ if (!base.go_out_grouping(g_v, 97, 259))
{
- lab6: {
- if (!(base.in_grouping(g_v, 97, 259)))
- {
- break lab6;
- }
- break golab5;
- }
- if (base.cursor >= base.limit)
- {
- break lab4;
- }
- base.cursor++;
+ break lab4;
}
+ base.cursor++;
break lab3;
}
base.cursor = v_3;
@@ -364,21 +414,11 @@ RomanianStemmer = function() {
{
break lab2;
}
- golab7: while(true)
+ if (!base.go_in_grouping(g_v, 97, 259))
{
- lab8: {
- if (!(base.out_grouping(g_v, 97, 259)))
- {
- break lab8;
- }
- break golab7;
- }
- if (base.cursor >= base.limit)
- {
- break lab2;
- }
- base.cursor++;
+ break lab2;
}
+ base.cursor++;
}
break lab1;
}
@@ -387,31 +427,21 @@ RomanianStemmer = function() {
{
break lab0;
}
- lab9: {
- var /** number */ v_6 = base.cursor;
- lab10: {
+ lab5: {
+ /** @const */ var /** number */ v_4 = base.cursor;
+ lab6: {
if (!(base.out_grouping(g_v, 97, 259)))
{
- break lab10;
+ break lab6;
}
- golab11: while(true)
+ if (!base.go_out_grouping(g_v, 97, 259))
{
- lab12: {
- if (!(base.in_grouping(g_v, 97, 259)))
- {
- break lab12;
- }
- break golab11;
- }
- if (base.cursor >= base.limit)
- {
- break lab10;
- }
- base.cursor++;
+ break lab6;
}
- break lab9;
+ base.cursor++;
+ break lab5;
}
- base.cursor = v_6;
+ base.cursor = v_4;
if (!(base.in_grouping(g_v, 97, 259)))
{
break lab0;
@@ -426,72 +456,32 @@ RomanianStemmer = function() {
I_pV = base.cursor;
}
base.cursor = v_1;
- var /** number */ v_8 = base.cursor;
- lab13: {
- golab14: while(true)
+ /** @const */ var /** number */ v_5 = base.cursor;
+ lab7: {
+ if (!base.go_out_grouping(g_v, 97, 259))
{
- lab15: {
- if (!(base.in_grouping(g_v, 97, 259)))
- {
- break lab15;
- }
- break golab14;
- }
- if (base.cursor >= base.limit)
- {
- break lab13;
- }
- base.cursor++;
+ break lab7;
}
- golab16: while(true)
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 259))
{
- lab17: {
- if (!(base.out_grouping(g_v, 97, 259)))
- {
- break lab17;
- }
- break golab16;
- }
- if (base.cursor >= base.limit)
- {
- break lab13;
- }
- base.cursor++;
+ break lab7;
}
+ base.cursor++;
I_p1 = base.cursor;
- golab18: while(true)
+ if (!base.go_out_grouping(g_v, 97, 259))
{
- lab19: {
- if (!(base.in_grouping(g_v, 97, 259)))
- {
- break lab19;
- }
- break golab18;
- }
- if (base.cursor >= base.limit)
- {
- break lab13;
- }
- base.cursor++;
+ break lab7;
}
- golab20: while(true)
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 259))
{
- lab21: {
- if (!(base.out_grouping(g_v, 97, 259)))
- {
- break lab21;
- }
- break golab20;
- }
- if (base.cursor >= base.limit)
- {
- break lab13;
- }
- base.cursor++;
+ break lab7;
}
+ base.cursor++;
I_p2 = base.cursor;
}
- base.cursor = v_8;
+ base.cursor = v_5;
return true;
};
@@ -500,14 +490,10 @@ RomanianStemmer = function() {
var /** number */ among_var;
while(true)
{
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
lab0: {
base.bra = base.cursor;
- among_var = base.find_among(a_0);
- if (among_var == 0)
- {
- break lab0;
- }
+ among_var = base.find_among(a_1);
base.ket = base.cursor;
switch (among_var) {
case 1:
@@ -540,36 +526,24 @@ RomanianStemmer = function() {
/** @return {boolean} */
function r_RV() {
- if (!(I_pV <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_pV <= base.cursor;
};
/** @return {boolean} */
function r_R1() {
- if (!(I_p1 <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_p1 <= base.cursor;
};
/** @return {boolean} */
function r_R2() {
- if (!(I_p2 <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_p2 <= base.cursor;
};
/** @return {boolean} */
function r_step_0() {
var /** number */ among_var;
base.ket = base.cursor;
- among_var = base.find_among_b(a_1);
+ among_var = base.find_among_b(a_2);
if (among_var == 0)
{
return false;
@@ -606,7 +580,7 @@ RomanianStemmer = function() {
break;
case 5:
{
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab0: {
if (!(base.eq_s_b("ab")))
{
@@ -628,7 +602,7 @@ RomanianStemmer = function() {
}
break;
case 7:
- if (!base.slice_from("a\u0163i"))
+ if (!base.slice_from("a\u021Bi"))
{
return false;
}
@@ -640,9 +614,9 @@ RomanianStemmer = function() {
/** @return {boolean} */
function r_combo_suffix() {
var /** number */ among_var;
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
base.ket = base.cursor;
- among_var = base.find_among_b(a_2);
+ among_var = base.find_among_b(a_3);
if (among_var == 0)
{
return false;
@@ -701,7 +675,7 @@ RomanianStemmer = function() {
B_standard_suffix_removed = false;
while(true)
{
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab0: {
if (!r_combo_suffix())
{
@@ -713,7 +687,7 @@ RomanianStemmer = function() {
break;
}
base.ket = base.cursor;
- among_var = base.find_among_b(a_3);
+ among_var = base.find_among_b(a_4);
if (among_var == 0)
{
return false;
@@ -731,7 +705,7 @@ RomanianStemmer = function() {
}
break;
case 2:
- if (!(base.eq_s_b("\u0163")))
+ if (!(base.eq_s_b("\u021B")))
{
return false;
}
@@ -759,20 +733,20 @@ RomanianStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_pV;
base.ket = base.cursor;
- among_var = base.find_among_b(a_4);
+ among_var = base.find_among_b(a_5);
if (among_var == 0)
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
base.bra = base.cursor;
switch (among_var) {
case 1:
lab0: {
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab1: {
if (!(base.out_grouping_b(g_v, 97, 259)))
{
@@ -780,10 +754,10 @@ RomanianStemmer = function() {
}
break lab0;
}
- base.cursor = base.limit - v_3;
+ base.cursor = base.limit - v_2;
if (!(base.eq_s_b("u")))
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
}
@@ -799,14 +773,14 @@ RomanianStemmer = function() {
}
break;
}
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return true;
};
/** @return {boolean} */
function r_vowel_suffix() {
base.ket = base.cursor;
- if (base.find_among_b(a_5) == 0)
+ if (base.find_among_b(a_6) == 0)
{
return false;
}
@@ -823,21 +797,22 @@ RomanianStemmer = function() {
};
this.stem = /** @return {boolean} */ function() {
- var /** number */ v_1 = base.cursor;
+ r_norm();
+ /** @const */ var /** number */ v_1 = base.cursor;
r_prelude();
base.cursor = v_1;
r_mark_regions();
base.limit_backward = base.cursor; base.cursor = base.limit;
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
r_step_0();
- base.cursor = base.limit - v_3;
- var /** number */ v_4 = base.limit - base.cursor;
+ base.cursor = base.limit - v_2;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
r_standard_suffix();
- base.cursor = base.limit - v_4;
- var /** number */ v_5 = base.limit - base.cursor;
+ base.cursor = base.limit - v_3;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
lab0: {
lab1: {
- var /** number */ v_6 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
lab2: {
if (!B_standard_suffix_removed)
{
@@ -845,21 +820,21 @@ RomanianStemmer = function() {
}
break lab1;
}
- base.cursor = base.limit - v_6;
+ base.cursor = base.limit - v_5;
if (!r_verb_suffix())
{
break lab0;
}
}
}
- base.cursor = base.limit - v_5;
- var /** number */ v_7 = base.limit - base.cursor;
+ base.cursor = base.limit - v_4;
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
r_vowel_suffix();
- base.cursor = base.limit - v_7;
+ base.cursor = base.limit - v_6;
base.cursor = base.limit_backward;
- var /** number */ v_8 = base.cursor;
+ /** @const */ var /** number */ v_7 = base.cursor;
r_postlude();
- base.cursor = v_8;
+ base.cursor = v_7;
return true;
};
diff --git a/sphinx/search/non-minified-js/russian-stemmer.js b/sphinx/search/non-minified-js/russian-stemmer.js
index 28ded5fc816..36c655d6bd7 100644
--- a/sphinx/search/non-minified-js/russian-stemmer.js
+++ b/sphinx/search/non-minified-js/russian-stemmer.js
@@ -1,8 +1,9 @@
-// Generated by Snowball 2.1.0 - https://snowballstem.org/
+// Generated from russian.sbl by Snowball 3.0.1 - https://snowballstem.org/
/**@constructor*/
-RussianStemmer = function() {
+var RussianStemmer = function() {
var base = new BaseStemmer();
+
/** @const */ var a_0 = [
["\u0432", -1, 1],
["\u0438\u0432", 0, 2],
@@ -170,69 +171,29 @@ RussianStemmer = function() {
function r_mark_regions() {
I_pV = base.limit;
I_p2 = base.limit;
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
lab0: {
- golab1: while(true)
+ if (!base.go_out_grouping(g_v, 1072, 1103))
{
- lab2: {
- if (!(base.in_grouping(g_v, 1072, 1103)))
- {
- break lab2;
- }
- break golab1;
- }
- if (base.cursor >= base.limit)
- {
- break lab0;
- }
- base.cursor++;
+ break lab0;
}
+ base.cursor++;
I_pV = base.cursor;
- golab3: while(true)
+ if (!base.go_in_grouping(g_v, 1072, 1103))
{
- lab4: {
- if (!(base.out_grouping(g_v, 1072, 1103)))
- {
- break lab4;
- }
- break golab3;
- }
- if (base.cursor >= base.limit)
- {
- break lab0;
- }
- base.cursor++;
+ break lab0;
}
- golab5: while(true)
+ base.cursor++;
+ if (!base.go_out_grouping(g_v, 1072, 1103))
{
- lab6: {
- if (!(base.in_grouping(g_v, 1072, 1103)))
- {
- break lab6;
- }
- break golab5;
- }
- if (base.cursor >= base.limit)
- {
- break lab0;
- }
- base.cursor++;
+ break lab0;
}
- golab7: while(true)
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 1072, 1103))
{
- lab8: {
- if (!(base.out_grouping(g_v, 1072, 1103)))
- {
- break lab8;
- }
- break golab7;
- }
- if (base.cursor >= base.limit)
- {
- break lab0;
- }
- base.cursor++;
+ break lab0;
}
+ base.cursor++;
I_p2 = base.cursor;
}
base.cursor = v_1;
@@ -241,11 +202,7 @@ RussianStemmer = function() {
/** @return {boolean} */
function r_R2() {
- if (!(I_p2 <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_p2 <= base.cursor;
};
/** @return {boolean} */
@@ -261,7 +218,7 @@ RussianStemmer = function() {
switch (among_var) {
case 1:
lab0: {
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab1: {
if (!(base.eq_s_b("\u0430")))
{
@@ -312,7 +269,7 @@ RussianStemmer = function() {
{
return false;
}
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab0: {
base.ket = base.cursor;
among_var = base.find_among_b(a_2);
@@ -325,7 +282,7 @@ RussianStemmer = function() {
switch (among_var) {
case 1:
lab1: {
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab2: {
if (!(base.eq_s_b("\u0430")))
{
@@ -384,7 +341,7 @@ RussianStemmer = function() {
switch (among_var) {
case 1:
lab0: {
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab1: {
if (!(base.eq_s_b("\u0430")))
{
@@ -499,15 +456,15 @@ RussianStemmer = function() {
};
this.stem = /** @return {boolean} */ function() {
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
lab0: {
while(true)
{
- var /** number */ v_2 = base.cursor;
+ /** @const */ var /** number */ v_2 = base.cursor;
lab1: {
golab2: while(true)
{
- var /** number */ v_3 = base.cursor;
+ /** @const */ var /** number */ v_3 = base.cursor;
lab3: {
base.bra = base.cursor;
if (!(base.eq_s("\u0451")))
@@ -542,12 +499,12 @@ RussianStemmer = function() {
{
return false;
}
- var /** number */ v_6 = base.limit_backward;
+ /** @const */ var /** number */ v_4 = base.limit_backward;
base.limit_backward = I_pV;
- var /** number */ v_7 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
lab4: {
lab5: {
- var /** number */ v_8 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
lab6: {
if (!r_perfective_gerund())
{
@@ -555,17 +512,17 @@ RussianStemmer = function() {
}
break lab5;
}
- base.cursor = base.limit - v_8;
- var /** number */ v_9 = base.limit - base.cursor;
+ base.cursor = base.limit - v_6;
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
lab7: {
if (!r_reflexive())
{
- base.cursor = base.limit - v_9;
+ base.cursor = base.limit - v_7;
break lab7;
}
}
lab8: {
- var /** number */ v_10 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
lab9: {
if (!r_adjectival())
{
@@ -573,7 +530,7 @@ RussianStemmer = function() {
}
break lab8;
}
- base.cursor = base.limit - v_10;
+ base.cursor = base.limit - v_8;
lab10: {
if (!r_verb())
{
@@ -581,7 +538,7 @@ RussianStemmer = function() {
}
break lab8;
}
- base.cursor = base.limit - v_10;
+ base.cursor = base.limit - v_8;
if (!r_noun())
{
break lab4;
@@ -589,13 +546,13 @@ RussianStemmer = function() {
}
}
}
- base.cursor = base.limit - v_7;
- var /** number */ v_11 = base.limit - base.cursor;
+ base.cursor = base.limit - v_5;
+ /** @const */ var /** number */ v_9 = base.limit - base.cursor;
lab11: {
base.ket = base.cursor;
if (!(base.eq_s_b("\u0438")))
{
- base.cursor = base.limit - v_11;
+ base.cursor = base.limit - v_9;
break lab11;
}
base.bra = base.cursor;
@@ -604,13 +561,13 @@ RussianStemmer = function() {
return false;
}
}
- var /** number */ v_12 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_10 = base.limit - base.cursor;
r_derivational();
- base.cursor = base.limit - v_12;
- var /** number */ v_13 = base.limit - base.cursor;
+ base.cursor = base.limit - v_10;
+ /** @const */ var /** number */ v_11 = base.limit - base.cursor;
r_tidy_up();
- base.cursor = base.limit - v_13;
- base.limit_backward = v_6;
+ base.cursor = base.limit - v_11;
+ base.limit_backward = v_4;
base.cursor = base.limit_backward;
return true;
};
diff --git a/sphinx/search/non-minified-js/serbian-stemmer.js b/sphinx/search/non-minified-js/serbian-stemmer.js
new file mode 100644
index 00000000000..7d6d0ce32e9
--- /dev/null
+++ b/sphinx/search/non-minified-js/serbian-stemmer.js
@@ -0,0 +1,4516 @@
+// Generated from serbian.sbl by Snowball 3.0.1 - https://snowballstem.org/
+
+/**@constructor*/
+var SerbianStemmer = function() {
+ var base = new BaseStemmer();
+
+ /** @const */ var a_0 = [
+ ["\u0430", -1, 1],
+ ["\u0431", -1, 2],
+ ["\u0432", -1, 3],
+ ["\u0433", -1, 4],
+ ["\u0434", -1, 5],
+ ["\u0435", -1, 7],
+ ["\u0436", -1, 8],
+ ["\u0437", -1, 9],
+ ["\u0438", -1, 10],
+ ["\u043A", -1, 12],
+ ["\u043B", -1, 13],
+ ["\u043C", -1, 15],
+ ["\u043D", -1, 16],
+ ["\u043E", -1, 18],
+ ["\u043F", -1, 19],
+ ["\u0440", -1, 20],
+ ["\u0441", -1, 21],
+ ["\u0442", -1, 22],
+ ["\u0443", -1, 24],
+ ["\u0444", -1, 25],
+ ["\u0445", -1, 26],
+ ["\u0446", -1, 27],
+ ["\u0447", -1, 28],
+ ["\u0448", -1, 30],
+ ["\u0452", -1, 6],
+ ["\u0458", -1, 11],
+ ["\u0459", -1, 14],
+ ["\u045A", -1, 17],
+ ["\u045B", -1, 23],
+ ["\u045F", -1, 29]
+ ];
+
+ /** @const */ var a_1 = [
+ ["daba", -1, 73],
+ ["ajaca", -1, 12],
+ ["ejaca", -1, 14],
+ ["ljaca", -1, 13],
+ ["njaca", -1, 85],
+ ["ojaca", -1, 15],
+ ["alaca", -1, 82],
+ ["elaca", -1, 83],
+ ["olaca", -1, 84],
+ ["maca", -1, 75],
+ ["naca", -1, 76],
+ ["raca", -1, 81],
+ ["saca", -1, 80],
+ ["vaca", -1, 79],
+ ["\u0161aca", -1, 18],
+ ["aoca", -1, 82],
+ ["acaka", -1, 55],
+ ["ajaka", -1, 16],
+ ["ojaka", -1, 17],
+ ["anaka", -1, 78],
+ ["ataka", -1, 58],
+ ["etaka", -1, 59],
+ ["itaka", -1, 60],
+ ["otaka", -1, 61],
+ ["utaka", -1, 62],
+ ["a\u010Daka", -1, 54],
+ ["esama", -1, 67],
+ ["izama", -1, 87],
+ ["jacima", -1, 5],
+ ["nicima", -1, 23],
+ ["ticima", -1, 24],
+ ["teticima", 30, 21],
+ ["zicima", -1, 25],
+ ["atcima", -1, 58],
+ ["utcima", -1, 62],
+ ["\u010Dcima", -1, 74],
+ ["pesima", -1, 2],
+ ["inzima", -1, 19],
+ ["lozima", -1, 1],
+ ["metara", -1, 68],
+ ["centara", -1, 69],
+ ["istara", -1, 70],
+ ["ekata", -1, 86],
+ ["anata", -1, 53],
+ ["nstava", -1, 22],
+ ["kustava", -1, 29],
+ ["ajac", -1, 12],
+ ["ejac", -1, 14],
+ ["ljac", -1, 13],
+ ["njac", -1, 85],
+ ["anjac", 49, 11],
+ ["ojac", -1, 15],
+ ["alac", -1, 82],
+ ["elac", -1, 83],
+ ["olac", -1, 84],
+ ["mac", -1, 75],
+ ["nac", -1, 76],
+ ["rac", -1, 81],
+ ["sac", -1, 80],
+ ["vac", -1, 79],
+ ["\u0161ac", -1, 18],
+ ["jebe", -1, 88],
+ ["olce", -1, 84],
+ ["kuse", -1, 27],
+ ["rave", -1, 42],
+ ["save", -1, 52],
+ ["\u0161ave", -1, 51],
+ ["baci", -1, 89],
+ ["jaci", -1, 5],
+ ["tvenici", -1, 20],
+ ["snici", -1, 26],
+ ["tetici", -1, 21],
+ ["bojci", -1, 4],
+ ["vojci", -1, 3],
+ ["ojsci", -1, 66],
+ ["atci", -1, 58],
+ ["itci", -1, 60],
+ ["utci", -1, 62],
+ ["\u010Dci", -1, 74],
+ ["pesi", -1, 2],
+ ["inzi", -1, 19],
+ ["lozi", -1, 1],
+ ["acak", -1, 55],
+ ["usak", -1, 57],
+ ["atak", -1, 58],
+ ["etak", -1, 59],
+ ["itak", -1, 60],
+ ["otak", -1, 61],
+ ["utak", -1, 62],
+ ["a\u010Dak", -1, 54],
+ ["u\u0161ak", -1, 56],
+ ["izam", -1, 87],
+ ["tican", -1, 65],
+ ["cajan", -1, 7],
+ ["\u010Dajan", -1, 6],
+ ["voljan", -1, 77],
+ ["eskan", -1, 63],
+ ["alan", -1, 40],
+ ["bilan", -1, 33],
+ ["gilan", -1, 37],
+ ["nilan", -1, 39],
+ ["rilan", -1, 38],
+ ["silan", -1, 36],
+ ["tilan", -1, 34],
+ ["avilan", -1, 35],
+ ["laran", -1, 9],
+ ["eran", -1, 8],
+ ["asan", -1, 91],
+ ["esan", -1, 10],
+ ["dusan", -1, 31],
+ ["kusan", -1, 28],
+ ["atan", -1, 47],
+ ["pletan", -1, 50],
+ ["tetan", -1, 49],
+ ["antan", -1, 32],
+ ["pravan", -1, 44],
+ ["stavan", -1, 43],
+ ["sivan", -1, 46],
+ ["tivan", -1, 45],
+ ["ozan", -1, 41],
+ ["ti\u010Dan", -1, 64],
+ ["a\u0161an", -1, 90],
+ ["du\u0161an", -1, 30],
+ ["metar", -1, 68],
+ ["centar", -1, 69],
+ ["istar", -1, 70],
+ ["ekat", -1, 86],
+ ["enat", -1, 48],
+ ["oscu", -1, 72],
+ ["o\u0161\u0107u", -1, 71]
+ ];
+
+ /** @const */ var a_2 = [
+ ["aca", -1, 124],
+ ["eca", -1, 125],
+ ["uca", -1, 126],
+ ["ga", -1, 20],
+ ["acega", 3, 124],
+ ["ecega", 3, 125],
+ ["ucega", 3, 126],
+ ["anjijega", 3, 84],
+ ["enjijega", 3, 85],
+ ["snjijega", 3, 122],
+ ["\u0161njijega", 3, 86],
+ ["kijega", 3, 95],
+ ["skijega", 11, 1],
+ ["\u0161kijega", 11, 2],
+ ["elijega", 3, 83],
+ ["nijega", 3, 13],
+ ["osijega", 3, 123],
+ ["atijega", 3, 120],
+ ["evitijega", 3, 92],
+ ["ovitijega", 3, 93],
+ ["astijega", 3, 94],
+ ["avijega", 3, 77],
+ ["evijega", 3, 78],
+ ["ivijega", 3, 79],
+ ["ovijega", 3, 80],
+ ["o\u0161ijega", 3, 91],
+ ["anjega", 3, 84],
+ ["enjega", 3, 85],
+ ["snjega", 3, 122],
+ ["\u0161njega", 3, 86],
+ ["kega", 3, 95],
+ ["skega", 30, 1],
+ ["\u0161kega", 30, 2],
+ ["elega", 3, 83],
+ ["nega", 3, 13],
+ ["anega", 34, 10],
+ ["enega", 34, 87],
+ ["snega", 34, 159],
+ ["\u0161nega", 34, 88],
+ ["osega", 3, 123],
+ ["atega", 3, 120],
+ ["evitega", 3, 92],
+ ["ovitega", 3, 93],
+ ["astega", 3, 94],
+ ["avega", 3, 77],
+ ["evega", 3, 78],
+ ["ivega", 3, 79],
+ ["ovega", 3, 80],
+ ["a\u0107ega", 3, 14],
+ ["e\u0107ega", 3, 15],
+ ["u\u0107ega", 3, 16],
+ ["o\u0161ega", 3, 91],
+ ["acoga", 3, 124],
+ ["ecoga", 3, 125],
+ ["ucoga", 3, 126],
+ ["anjoga", 3, 84],
+ ["enjoga", 3, 85],
+ ["snjoga", 3, 122],
+ ["\u0161njoga", 3, 86],
+ ["koga", 3, 95],
+ ["skoga", 59, 1],
+ ["\u0161koga", 59, 2],
+ ["loga", 3, 19],
+ ["eloga", 62, 83],
+ ["noga", 3, 13],
+ ["cinoga", 64, 137],
+ ["\u010Dinoga", 64, 89],
+ ["osoga", 3, 123],
+ ["atoga", 3, 120],
+ ["evitoga", 3, 92],
+ ["ovitoga", 3, 93],
+ ["astoga", 3, 94],
+ ["avoga", 3, 77],
+ ["evoga", 3, 78],
+ ["ivoga", 3, 79],
+ ["ovoga", 3, 80],
+ ["a\u0107oga", 3, 14],
+ ["e\u0107oga", 3, 15],
+ ["u\u0107oga", 3, 16],
+ ["o\u0161oga", 3, 91],
+ ["uga", 3, 18],
+ ["aja", -1, 109],
+ ["caja", 81, 26],
+ ["laja", 81, 30],
+ ["raja", 81, 31],
+ ["\u0107aja", 81, 28],
+ ["\u010Daja", 81, 27],
+ ["\u0111aja", 81, 29],
+ ["bija", -1, 32],
+ ["cija", -1, 33],
+ ["dija", -1, 34],
+ ["fija", -1, 40],
+ ["gija", -1, 39],
+ ["anjija", -1, 84],
+ ["enjija", -1, 85],
+ ["snjija", -1, 122],
+ ["\u0161njija", -1, 86],
+ ["kija", -1, 95],
+ ["skija", 97, 1],
+ ["\u0161kija", 97, 2],
+ ["lija", -1, 24],
+ ["elija", 100, 83],
+ ["mija", -1, 37],
+ ["nija", -1, 13],
+ ["ganija", 103, 9],
+ ["manija", 103, 6],
+ ["panija", 103, 7],
+ ["ranija", 103, 8],
+ ["tanija", 103, 5],
+ ["pija", -1, 41],
+ ["rija", -1, 42],
+ ["rarija", 110, 21],
+ ["sija", -1, 23],
+ ["osija", 112, 123],
+ ["tija", -1, 44],
+ ["atija", 114, 120],
+ ["evitija", 114, 92],
+ ["ovitija", 114, 93],
+ ["otija", 114, 22],
+ ["astija", 114, 94],
+ ["avija", -1, 77],
+ ["evija", -1, 78],
+ ["ivija", -1, 79],
+ ["ovija", -1, 80],
+ ["zija", -1, 45],
+ ["o\u0161ija", -1, 91],
+ ["\u017Eija", -1, 38],
+ ["anja", -1, 84],
+ ["enja", -1, 85],
+ ["snja", -1, 122],
+ ["\u0161nja", -1, 86],
+ ["ka", -1, 95],
+ ["ska", 131, 1],
+ ["\u0161ka", 131, 2],
+ ["ala", -1, 104],
+ ["acala", 134, 128],
+ ["astajala", 134, 106],
+ ["istajala", 134, 107],
+ ["ostajala", 134, 108],
+ ["ijala", 134, 47],
+ ["injala", 134, 114],
+ ["nala", 134, 46],
+ ["irala", 134, 100],
+ ["urala", 134, 105],
+ ["tala", 134, 113],
+ ["astala", 144, 110],
+ ["istala", 144, 111],
+ ["ostala", 144, 112],
+ ["avala", 134, 97],
+ ["evala", 134, 96],
+ ["ivala", 134, 98],
+ ["ovala", 134, 76],
+ ["uvala", 134, 99],
+ ["a\u010Dala", 134, 102],
+ ["ela", -1, 83],
+ ["ila", -1, 116],
+ ["acila", 155, 124],
+ ["lucila", 155, 121],
+ ["nila", 155, 103],
+ ["astanila", 158, 110],
+ ["istanila", 158, 111],
+ ["ostanila", 158, 112],
+ ["rosila", 155, 127],
+ ["jetila", 155, 118],
+ ["ozila", 155, 48],
+ ["a\u010Dila", 155, 101],
+ ["lu\u010Dila", 155, 117],
+ ["ro\u0161ila", 155, 90],
+ ["ola", -1, 50],
+ ["asla", -1, 115],
+ ["nula", -1, 13],
+ ["gama", -1, 20],
+ ["logama", 171, 19],
+ ["ugama", 171, 18],
+ ["ajama", -1, 109],
+ ["cajama", 174, 26],
+ ["lajama", 174, 30],
+ ["rajama", 174, 31],
+ ["\u0107ajama", 174, 28],
+ ["\u010Dajama", 174, 27],
+ ["\u0111ajama", 174, 29],
+ ["bijama", -1, 32],
+ ["cijama", -1, 33],
+ ["dijama", -1, 34],
+ ["fijama", -1, 40],
+ ["gijama", -1, 39],
+ ["lijama", -1, 35],
+ ["mijama", -1, 37],
+ ["nijama", -1, 36],
+ ["ganijama", 188, 9],
+ ["manijama", 188, 6],
+ ["panijama", 188, 7],
+ ["ranijama", 188, 8],
+ ["tanijama", 188, 5],
+ ["pijama", -1, 41],
+ ["rijama", -1, 42],
+ ["sijama", -1, 43],
+ ["tijama", -1, 44],
+ ["zijama", -1, 45],
+ ["\u017Eijama", -1, 38],
+ ["alama", -1, 104],
+ ["ijalama", 200, 47],
+ ["nalama", 200, 46],
+ ["elama", -1, 119],
+ ["ilama", -1, 116],
+ ["ramama", -1, 52],
+ ["lemama", -1, 51],
+ ["inama", -1, 11],
+ ["cinama", 207, 137],
+ ["\u010Dinama", 207, 89],
+ ["rama", -1, 52],
+ ["arama", 210, 53],
+ ["drama", 210, 54],
+ ["erama", 210, 55],
+ ["orama", 210, 56],
+ ["basama", -1, 135],
+ ["gasama", -1, 131],
+ ["jasama", -1, 129],
+ ["kasama", -1, 133],
+ ["nasama", -1, 132],
+ ["tasama", -1, 130],
+ ["vasama", -1, 134],
+ ["esama", -1, 152],
+ ["isama", -1, 154],
+ ["etama", -1, 70],
+ ["estama", -1, 71],
+ ["istama", -1, 72],
+ ["kstama", -1, 73],
+ ["ostama", -1, 74],
+ ["avama", -1, 77],
+ ["evama", -1, 78],
+ ["ivama", -1, 79],
+ ["ba\u0161ama", -1, 63],
+ ["ga\u0161ama", -1, 64],
+ ["ja\u0161ama", -1, 61],
+ ["ka\u0161ama", -1, 62],
+ ["na\u0161ama", -1, 60],
+ ["ta\u0161ama", -1, 59],
+ ["va\u0161ama", -1, 65],
+ ["e\u0161ama", -1, 66],
+ ["i\u0161ama", -1, 67],
+ ["lema", -1, 51],
+ ["acima", -1, 124],
+ ["ecima", -1, 125],
+ ["ucima", -1, 126],
+ ["ajima", -1, 109],
+ ["cajima", 245, 26],
+ ["lajima", 245, 30],
+ ["rajima", 245, 31],
+ ["\u0107ajima", 245, 28],
+ ["\u010Dajima", 245, 27],
+ ["\u0111ajima", 245, 29],
+ ["bijima", -1, 32],
+ ["cijima", -1, 33],
+ ["dijima", -1, 34],
+ ["fijima", -1, 40],
+ ["gijima", -1, 39],
+ ["anjijima", -1, 84],
+ ["enjijima", -1, 85],
+ ["snjijima", -1, 122],
+ ["\u0161njijima", -1, 86],
+ ["kijima", -1, 95],
+ ["skijima", 261, 1],
+ ["\u0161kijima", 261, 2],
+ ["lijima", -1, 35],
+ ["elijima", 264, 83],
+ ["mijima", -1, 37],
+ ["nijima", -1, 13],
+ ["ganijima", 267, 9],
+ ["manijima", 267, 6],
+ ["panijima", 267, 7],
+ ["ranijima", 267, 8],
+ ["tanijima", 267, 5],
+ ["pijima", -1, 41],
+ ["rijima", -1, 42],
+ ["sijima", -1, 43],
+ ["osijima", 275, 123],
+ ["tijima", -1, 44],
+ ["atijima", 277, 120],
+ ["evitijima", 277, 92],
+ ["ovitijima", 277, 93],
+ ["astijima", 277, 94],
+ ["avijima", -1, 77],
+ ["evijima", -1, 78],
+ ["ivijima", -1, 79],
+ ["ovijima", -1, 80],
+ ["zijima", -1, 45],
+ ["o\u0161ijima", -1, 91],
+ ["\u017Eijima", -1, 38],
+ ["anjima", -1, 84],
+ ["enjima", -1, 85],
+ ["snjima", -1, 122],
+ ["\u0161njima", -1, 86],
+ ["kima", -1, 95],
+ ["skima", 293, 1],
+ ["\u0161kima", 293, 2],
+ ["alima", -1, 104],
+ ["ijalima", 296, 47],
+ ["nalima", 296, 46],
+ ["elima", -1, 83],
+ ["ilima", -1, 116],
+ ["ozilima", 300, 48],
+ ["olima", -1, 50],
+ ["lemima", -1, 51],
+ ["nima", -1, 13],
+ ["anima", 304, 10],
+ ["inima", 304, 11],
+ ["cinima", 306, 137],
+ ["\u010Dinima", 306, 89],
+ ["onima", 304, 12],
+ ["arima", -1, 53],
+ ["drima", -1, 54],
+ ["erima", -1, 55],
+ ["orima", -1, 56],
+ ["basima", -1, 135],
+ ["gasima", -1, 131],
+ ["jasima", -1, 129],
+ ["kasima", -1, 133],
+ ["nasima", -1, 132],
+ ["tasima", -1, 130],
+ ["vasima", -1, 134],
+ ["esima", -1, 57],
+ ["isima", -1, 58],
+ ["osima", -1, 123],
+ ["atima", -1, 120],
+ ["ikatima", 324, 68],
+ ["latima", 324, 69],
+ ["etima", -1, 70],
+ ["evitima", -1, 92],
+ ["ovitima", -1, 93],
+ ["astima", -1, 94],
+ ["estima", -1, 71],
+ ["istima", -1, 72],
+ ["kstima", -1, 73],
+ ["ostima", -1, 74],
+ ["i\u0161tima", -1, 75],
+ ["avima", -1, 77],
+ ["evima", -1, 78],
+ ["ajevima", 337, 109],
+ ["cajevima", 338, 26],
+ ["lajevima", 338, 30],
+ ["rajevima", 338, 31],
+ ["\u0107ajevima", 338, 28],
+ ["\u010Dajevima", 338, 27],
+ ["\u0111ajevima", 338, 29],
+ ["ivima", -1, 79],
+ ["ovima", -1, 80],
+ ["govima", 346, 20],
+ ["ugovima", 347, 17],
+ ["lovima", 346, 82],
+ ["olovima", 349, 49],
+ ["movima", 346, 81],
+ ["onovima", 346, 12],
+ ["stvima", -1, 3],
+ ["\u0161tvima", -1, 4],
+ ["a\u0107ima", -1, 14],
+ ["e\u0107ima", -1, 15],
+ ["u\u0107ima", -1, 16],
+ ["ba\u0161ima", -1, 63],
+ ["ga\u0161ima", -1, 64],
+ ["ja\u0161ima", -1, 61],
+ ["ka\u0161ima", -1, 62],
+ ["na\u0161ima", -1, 60],
+ ["ta\u0161ima", -1, 59],
+ ["va\u0161ima", -1, 65],
+ ["e\u0161ima", -1, 66],
+ ["i\u0161ima", -1, 67],
+ ["o\u0161ima", -1, 91],
+ ["na", -1, 13],
+ ["ana", 368, 10],
+ ["acana", 369, 128],
+ ["urana", 369, 105],
+ ["tana", 369, 113],
+ ["avana", 369, 97],
+ ["evana", 369, 96],
+ ["ivana", 369, 98],
+ ["uvana", 369, 99],
+ ["a\u010Dana", 369, 102],
+ ["acena", 368, 124],
+ ["lucena", 368, 121],
+ ["a\u010Dena", 368, 101],
+ ["lu\u010Dena", 368, 117],
+ ["ina", 368, 11],
+ ["cina", 382, 137],
+ ["anina", 382, 10],
+ ["\u010Dina", 382, 89],
+ ["ona", 368, 12],
+ ["ara", -1, 53],
+ ["dra", -1, 54],
+ ["era", -1, 55],
+ ["ora", -1, 56],
+ ["basa", -1, 135],
+ ["gasa", -1, 131],
+ ["jasa", -1, 129],
+ ["kasa", -1, 133],
+ ["nasa", -1, 132],
+ ["tasa", -1, 130],
+ ["vasa", -1, 134],
+ ["esa", -1, 57],
+ ["isa", -1, 58],
+ ["osa", -1, 123],
+ ["ata", -1, 120],
+ ["ikata", 401, 68],
+ ["lata", 401, 69],
+ ["eta", -1, 70],
+ ["evita", -1, 92],
+ ["ovita", -1, 93],
+ ["asta", -1, 94],
+ ["esta", -1, 71],
+ ["ista", -1, 72],
+ ["ksta", -1, 73],
+ ["osta", -1, 74],
+ ["nuta", -1, 13],
+ ["i\u0161ta", -1, 75],
+ ["ava", -1, 77],
+ ["eva", -1, 78],
+ ["ajeva", 415, 109],
+ ["cajeva", 416, 26],
+ ["lajeva", 416, 30],
+ ["rajeva", 416, 31],
+ ["\u0107ajeva", 416, 28],
+ ["\u010Dajeva", 416, 27],
+ ["\u0111ajeva", 416, 29],
+ ["iva", -1, 79],
+ ["ova", -1, 80],
+ ["gova", 424, 20],
+ ["ugova", 425, 17],
+ ["lova", 424, 82],
+ ["olova", 427, 49],
+ ["mova", 424, 81],
+ ["onova", 424, 12],
+ ["stva", -1, 3],
+ ["\u0161tva", -1, 4],
+ ["a\u0107a", -1, 14],
+ ["e\u0107a", -1, 15],
+ ["u\u0107a", -1, 16],
+ ["ba\u0161a", -1, 63],
+ ["ga\u0161a", -1, 64],
+ ["ja\u0161a", -1, 61],
+ ["ka\u0161a", -1, 62],
+ ["na\u0161a", -1, 60],
+ ["ta\u0161a", -1, 59],
+ ["va\u0161a", -1, 65],
+ ["e\u0161a", -1, 66],
+ ["i\u0161a", -1, 67],
+ ["o\u0161a", -1, 91],
+ ["ace", -1, 124],
+ ["ece", -1, 125],
+ ["uce", -1, 126],
+ ["luce", 448, 121],
+ ["astade", -1, 110],
+ ["istade", -1, 111],
+ ["ostade", -1, 112],
+ ["ge", -1, 20],
+ ["loge", 453, 19],
+ ["uge", 453, 18],
+ ["aje", -1, 104],
+ ["caje", 456, 26],
+ ["laje", 456, 30],
+ ["raje", 456, 31],
+ ["astaje", 456, 106],
+ ["istaje", 456, 107],
+ ["ostaje", 456, 108],
+ ["\u0107aje", 456, 28],
+ ["\u010Daje", 456, 27],
+ ["\u0111aje", 456, 29],
+ ["ije", -1, 116],
+ ["bije", 466, 32],
+ ["cije", 466, 33],
+ ["dije", 466, 34],
+ ["fije", 466, 40],
+ ["gije", 466, 39],
+ ["anjije", 466, 84],
+ ["enjije", 466, 85],
+ ["snjije", 466, 122],
+ ["\u0161njije", 466, 86],
+ ["kije", 466, 95],
+ ["skije", 476, 1],
+ ["\u0161kije", 476, 2],
+ ["lije", 466, 35],
+ ["elije", 479, 83],
+ ["mije", 466, 37],
+ ["nije", 466, 13],
+ ["ganije", 482, 9],
+ ["manije", 482, 6],
+ ["panije", 482, 7],
+ ["ranije", 482, 8],
+ ["tanije", 482, 5],
+ ["pije", 466, 41],
+ ["rije", 466, 42],
+ ["sije", 466, 43],
+ ["osije", 490, 123],
+ ["tije", 466, 44],
+ ["atije", 492, 120],
+ ["evitije", 492, 92],
+ ["ovitije", 492, 93],
+ ["astije", 492, 94],
+ ["avije", 466, 77],
+ ["evije", 466, 78],
+ ["ivije", 466, 79],
+ ["ovije", 466, 80],
+ ["zije", 466, 45],
+ ["o\u0161ije", 466, 91],
+ ["\u017Eije", 466, 38],
+ ["anje", -1, 84],
+ ["enje", -1, 85],
+ ["snje", -1, 122],
+ ["\u0161nje", -1, 86],
+ ["uje", -1, 25],
+ ["lucuje", 508, 121],
+ ["iruje", 508, 100],
+ ["lu\u010Duje", 508, 117],
+ ["ke", -1, 95],
+ ["ske", 512, 1],
+ ["\u0161ke", 512, 2],
+ ["ale", -1, 104],
+ ["acale", 515, 128],
+ ["astajale", 515, 106],
+ ["istajale", 515, 107],
+ ["ostajale", 515, 108],
+ ["ijale", 515, 47],
+ ["injale", 515, 114],
+ ["nale", 515, 46],
+ ["irale", 515, 100],
+ ["urale", 515, 105],
+ ["tale", 515, 113],
+ ["astale", 525, 110],
+ ["istale", 525, 111],
+ ["ostale", 525, 112],
+ ["avale", 515, 97],
+ ["evale", 515, 96],
+ ["ivale", 515, 98],
+ ["ovale", 515, 76],
+ ["uvale", 515, 99],
+ ["a\u010Dale", 515, 102],
+ ["ele", -1, 83],
+ ["ile", -1, 116],
+ ["acile", 536, 124],
+ ["lucile", 536, 121],
+ ["nile", 536, 103],
+ ["rosile", 536, 127],
+ ["jetile", 536, 118],
+ ["ozile", 536, 48],
+ ["a\u010Dile", 536, 101],
+ ["lu\u010Dile", 536, 117],
+ ["ro\u0161ile", 536, 90],
+ ["ole", -1, 50],
+ ["asle", -1, 115],
+ ["nule", -1, 13],
+ ["rame", -1, 52],
+ ["leme", -1, 51],
+ ["acome", -1, 124],
+ ["ecome", -1, 125],
+ ["ucome", -1, 126],
+ ["anjome", -1, 84],
+ ["enjome", -1, 85],
+ ["snjome", -1, 122],
+ ["\u0161njome", -1, 86],
+ ["kome", -1, 95],
+ ["skome", 558, 1],
+ ["\u0161kome", 558, 2],
+ ["elome", -1, 83],
+ ["nome", -1, 13],
+ ["cinome", 562, 137],
+ ["\u010Dinome", 562, 89],
+ ["osome", -1, 123],
+ ["atome", -1, 120],
+ ["evitome", -1, 92],
+ ["ovitome", -1, 93],
+ ["astome", -1, 94],
+ ["avome", -1, 77],
+ ["evome", -1, 78],
+ ["ivome", -1, 79],
+ ["ovome", -1, 80],
+ ["a\u0107ome", -1, 14],
+ ["e\u0107ome", -1, 15],
+ ["u\u0107ome", -1, 16],
+ ["o\u0161ome", -1, 91],
+ ["ne", -1, 13],
+ ["ane", 578, 10],
+ ["acane", 579, 128],
+ ["urane", 579, 105],
+ ["tane", 579, 113],
+ ["astane", 582, 110],
+ ["istane", 582, 111],
+ ["ostane", 582, 112],
+ ["avane", 579, 97],
+ ["evane", 579, 96],
+ ["ivane", 579, 98],
+ ["uvane", 579, 99],
+ ["a\u010Dane", 579, 102],
+ ["acene", 578, 124],
+ ["lucene", 578, 121],
+ ["a\u010Dene", 578, 101],
+ ["lu\u010Dene", 578, 117],
+ ["ine", 578, 11],
+ ["cine", 595, 137],
+ ["anine", 595, 10],
+ ["\u010Dine", 595, 89],
+ ["one", 578, 12],
+ ["are", -1, 53],
+ ["dre", -1, 54],
+ ["ere", -1, 55],
+ ["ore", -1, 56],
+ ["ase", -1, 161],
+ ["base", 604, 135],
+ ["acase", 604, 128],
+ ["gase", 604, 131],
+ ["jase", 604, 129],
+ ["astajase", 608, 138],
+ ["istajase", 608, 139],
+ ["ostajase", 608, 140],
+ ["injase", 608, 150],
+ ["kase", 604, 133],
+ ["nase", 604, 132],
+ ["irase", 604, 155],
+ ["urase", 604, 156],
+ ["tase", 604, 130],
+ ["vase", 604, 134],
+ ["avase", 618, 144],
+ ["evase", 618, 145],
+ ["ivase", 618, 146],
+ ["ovase", 618, 148],
+ ["uvase", 618, 147],
+ ["ese", -1, 57],
+ ["ise", -1, 58],
+ ["acise", 625, 124],
+ ["lucise", 625, 121],
+ ["rosise", 625, 127],
+ ["jetise", 625, 149],
+ ["ose", -1, 123],
+ ["astadose", 630, 141],
+ ["istadose", 630, 142],
+ ["ostadose", 630, 143],
+ ["ate", -1, 104],
+ ["acate", 634, 128],
+ ["ikate", 634, 68],
+ ["late", 634, 69],
+ ["irate", 634, 100],
+ ["urate", 634, 105],
+ ["tate", 634, 113],
+ ["avate", 634, 97],
+ ["evate", 634, 96],
+ ["ivate", 634, 98],
+ ["uvate", 634, 99],
+ ["a\u010Date", 634, 102],
+ ["ete", -1, 70],
+ ["astadete", 646, 110],
+ ["istadete", 646, 111],
+ ["ostadete", 646, 112],
+ ["astajete", 646, 106],
+ ["istajete", 646, 107],
+ ["ostajete", 646, 108],
+ ["ijete", 646, 116],
+ ["injete", 646, 114],
+ ["ujete", 646, 25],
+ ["lucujete", 655, 121],
+ ["irujete", 655, 100],
+ ["lu\u010Dujete", 655, 117],
+ ["nete", 646, 13],
+ ["astanete", 659, 110],
+ ["istanete", 659, 111],
+ ["ostanete", 659, 112],
+ ["astete", 646, 115],
+ ["ite", -1, 116],
+ ["acite", 664, 124],
+ ["lucite", 664, 121],
+ ["nite", 664, 13],
+ ["astanite", 667, 110],
+ ["istanite", 667, 111],
+ ["ostanite", 667, 112],
+ ["rosite", 664, 127],
+ ["jetite", 664, 118],
+ ["astite", 664, 115],
+ ["evite", 664, 92],
+ ["ovite", 664, 93],
+ ["a\u010Dite", 664, 101],
+ ["lu\u010Dite", 664, 117],
+ ["ro\u0161ite", 664, 90],
+ ["ajte", -1, 104],
+ ["urajte", 679, 105],
+ ["tajte", 679, 113],
+ ["astajte", 681, 106],
+ ["istajte", 681, 107],
+ ["ostajte", 681, 108],
+ ["avajte", 679, 97],
+ ["evajte", 679, 96],
+ ["ivajte", 679, 98],
+ ["uvajte", 679, 99],
+ ["ijte", -1, 116],
+ ["lucujte", -1, 121],
+ ["irujte", -1, 100],
+ ["lu\u010Dujte", -1, 117],
+ ["aste", -1, 94],
+ ["acaste", 693, 128],
+ ["astajaste", 693, 106],
+ ["istajaste", 693, 107],
+ ["ostajaste", 693, 108],
+ ["injaste", 693, 114],
+ ["iraste", 693, 100],
+ ["uraste", 693, 105],
+ ["taste", 693, 113],
+ ["avaste", 693, 97],
+ ["evaste", 693, 96],
+ ["ivaste", 693, 98],
+ ["ovaste", 693, 76],
+ ["uvaste", 693, 99],
+ ["a\u010Daste", 693, 102],
+ ["este", -1, 71],
+ ["iste", -1, 72],
+ ["aciste", 709, 124],
+ ["luciste", 709, 121],
+ ["niste", 709, 103],
+ ["rosiste", 709, 127],
+ ["jetiste", 709, 118],
+ ["a\u010Diste", 709, 101],
+ ["lu\u010Diste", 709, 117],
+ ["ro\u0161iste", 709, 90],
+ ["kste", -1, 73],
+ ["oste", -1, 74],
+ ["astadoste", 719, 110],
+ ["istadoste", 719, 111],
+ ["ostadoste", 719, 112],
+ ["nuste", -1, 13],
+ ["i\u0161te", -1, 75],
+ ["ave", -1, 77],
+ ["eve", -1, 78],
+ ["ajeve", 726, 109],
+ ["cajeve", 727, 26],
+ ["lajeve", 727, 30],
+ ["rajeve", 727, 31],
+ ["\u0107ajeve", 727, 28],
+ ["\u010Dajeve", 727, 27],
+ ["\u0111ajeve", 727, 29],
+ ["ive", -1, 79],
+ ["ove", -1, 80],
+ ["gove", 735, 20],
+ ["ugove", 736, 17],
+ ["love", 735, 82],
+ ["olove", 738, 49],
+ ["move", 735, 81],
+ ["onove", 735, 12],
+ ["a\u0107e", -1, 14],
+ ["e\u0107e", -1, 15],
+ ["u\u0107e", -1, 16],
+ ["a\u010De", -1, 101],
+ ["lu\u010De", -1, 117],
+ ["a\u0161e", -1, 104],
+ ["ba\u0161e", 747, 63],
+ ["ga\u0161e", 747, 64],
+ ["ja\u0161e", 747, 61],
+ ["astaja\u0161e", 750, 106],
+ ["istaja\u0161e", 750, 107],
+ ["ostaja\u0161e", 750, 108],
+ ["inja\u0161e", 750, 114],
+ ["ka\u0161e", 747, 62],
+ ["na\u0161e", 747, 60],
+ ["ira\u0161e", 747, 100],
+ ["ura\u0161e", 747, 105],
+ ["ta\u0161e", 747, 59],
+ ["va\u0161e", 747, 65],
+ ["ava\u0161e", 760, 97],
+ ["eva\u0161e", 760, 96],
+ ["iva\u0161e", 760, 98],
+ ["ova\u0161e", 760, 76],
+ ["uva\u0161e", 760, 99],
+ ["a\u010Da\u0161e", 747, 102],
+ ["e\u0161e", -1, 66],
+ ["i\u0161e", -1, 67],
+ ["jeti\u0161e", 768, 118],
+ ["a\u010Di\u0161e", 768, 101],
+ ["lu\u010Di\u0161e", 768, 117],
+ ["ro\u0161i\u0161e", 768, 90],
+ ["o\u0161e", -1, 91],
+ ["astado\u0161e", 773, 110],
+ ["istado\u0161e", 773, 111],
+ ["ostado\u0161e", 773, 112],
+ ["aceg", -1, 124],
+ ["eceg", -1, 125],
+ ["uceg", -1, 126],
+ ["anjijeg", -1, 84],
+ ["enjijeg", -1, 85],
+ ["snjijeg", -1, 122],
+ ["\u0161njijeg", -1, 86],
+ ["kijeg", -1, 95],
+ ["skijeg", 784, 1],
+ ["\u0161kijeg", 784, 2],
+ ["elijeg", -1, 83],
+ ["nijeg", -1, 13],
+ ["osijeg", -1, 123],
+ ["atijeg", -1, 120],
+ ["evitijeg", -1, 92],
+ ["ovitijeg", -1, 93],
+ ["astijeg", -1, 94],
+ ["avijeg", -1, 77],
+ ["evijeg", -1, 78],
+ ["ivijeg", -1, 79],
+ ["ovijeg", -1, 80],
+ ["o\u0161ijeg", -1, 91],
+ ["anjeg", -1, 84],
+ ["enjeg", -1, 85],
+ ["snjeg", -1, 122],
+ ["\u0161njeg", -1, 86],
+ ["keg", -1, 95],
+ ["eleg", -1, 83],
+ ["neg", -1, 13],
+ ["aneg", 805, 10],
+ ["eneg", 805, 87],
+ ["sneg", 805, 159],
+ ["\u0161neg", 805, 88],
+ ["oseg", -1, 123],
+ ["ateg", -1, 120],
+ ["aveg", -1, 77],
+ ["eveg", -1, 78],
+ ["iveg", -1, 79],
+ ["oveg", -1, 80],
+ ["a\u0107eg", -1, 14],
+ ["e\u0107eg", -1, 15],
+ ["u\u0107eg", -1, 16],
+ ["o\u0161eg", -1, 91],
+ ["acog", -1, 124],
+ ["ecog", -1, 125],
+ ["ucog", -1, 126],
+ ["anjog", -1, 84],
+ ["enjog", -1, 85],
+ ["snjog", -1, 122],
+ ["\u0161njog", -1, 86],
+ ["kog", -1, 95],
+ ["skog", 827, 1],
+ ["\u0161kog", 827, 2],
+ ["elog", -1, 83],
+ ["nog", -1, 13],
+ ["cinog", 831, 137],
+ ["\u010Dinog", 831, 89],
+ ["osog", -1, 123],
+ ["atog", -1, 120],
+ ["evitog", -1, 92],
+ ["ovitog", -1, 93],
+ ["astog", -1, 94],
+ ["avog", -1, 77],
+ ["evog", -1, 78],
+ ["ivog", -1, 79],
+ ["ovog", -1, 80],
+ ["a\u0107og", -1, 14],
+ ["e\u0107og", -1, 15],
+ ["u\u0107og", -1, 16],
+ ["o\u0161og", -1, 91],
+ ["ah", -1, 104],
+ ["acah", 847, 128],
+ ["astajah", 847, 106],
+ ["istajah", 847, 107],
+ ["ostajah", 847, 108],
+ ["injah", 847, 114],
+ ["irah", 847, 100],
+ ["urah", 847, 105],
+ ["tah", 847, 113],
+ ["avah", 847, 97],
+ ["evah", 847, 96],
+ ["ivah", 847, 98],
+ ["ovah", 847, 76],
+ ["uvah", 847, 99],
+ ["a\u010Dah", 847, 102],
+ ["ih", -1, 116],
+ ["acih", 862, 124],
+ ["ecih", 862, 125],
+ ["ucih", 862, 126],
+ ["lucih", 865, 121],
+ ["anjijih", 862, 84],
+ ["enjijih", 862, 85],
+ ["snjijih", 862, 122],
+ ["\u0161njijih", 862, 86],
+ ["kijih", 862, 95],
+ ["skijih", 871, 1],
+ ["\u0161kijih", 871, 2],
+ ["elijih", 862, 83],
+ ["nijih", 862, 13],
+ ["osijih", 862, 123],
+ ["atijih", 862, 120],
+ ["evitijih", 862, 92],
+ ["ovitijih", 862, 93],
+ ["astijih", 862, 94],
+ ["avijih", 862, 77],
+ ["evijih", 862, 78],
+ ["ivijih", 862, 79],
+ ["ovijih", 862, 80],
+ ["o\u0161ijih", 862, 91],
+ ["anjih", 862, 84],
+ ["enjih", 862, 85],
+ ["snjih", 862, 122],
+ ["\u0161njih", 862, 86],
+ ["kih", 862, 95],
+ ["skih", 890, 1],
+ ["\u0161kih", 890, 2],
+ ["elih", 862, 83],
+ ["nih", 862, 13],
+ ["cinih", 894, 137],
+ ["\u010Dinih", 894, 89],
+ ["osih", 862, 123],
+ ["rosih", 897, 127],
+ ["atih", 862, 120],
+ ["jetih", 862, 118],
+ ["evitih", 862, 92],
+ ["ovitih", 862, 93],
+ ["astih", 862, 94],
+ ["avih", 862, 77],
+ ["evih", 862, 78],
+ ["ivih", 862, 79],
+ ["ovih", 862, 80],
+ ["a\u0107ih", 862, 14],
+ ["e\u0107ih", 862, 15],
+ ["u\u0107ih", 862, 16],
+ ["a\u010Dih", 862, 101],
+ ["lu\u010Dih", 862, 117],
+ ["o\u0161ih", 862, 91],
+ ["ro\u0161ih", 913, 90],
+ ["astadoh", -1, 110],
+ ["istadoh", -1, 111],
+ ["ostadoh", -1, 112],
+ ["acuh", -1, 124],
+ ["ecuh", -1, 125],
+ ["ucuh", -1, 126],
+ ["a\u0107uh", -1, 14],
+ ["e\u0107uh", -1, 15],
+ ["u\u0107uh", -1, 16],
+ ["aci", -1, 124],
+ ["aceci", -1, 124],
+ ["ieci", -1, 162],
+ ["ajuci", -1, 161],
+ ["irajuci", 927, 155],
+ ["urajuci", 927, 156],
+ ["astajuci", 927, 138],
+ ["istajuci", 927, 139],
+ ["ostajuci", 927, 140],
+ ["avajuci", 927, 144],
+ ["evajuci", 927, 145],
+ ["ivajuci", 927, 146],
+ ["uvajuci", 927, 147],
+ ["ujuci", -1, 157],
+ ["lucujuci", 937, 121],
+ ["irujuci", 937, 155],
+ ["luci", -1, 121],
+ ["nuci", -1, 164],
+ ["etuci", -1, 153],
+ ["astuci", -1, 136],
+ ["gi", -1, 20],
+ ["ugi", 944, 18],
+ ["aji", -1, 109],
+ ["caji", 946, 26],
+ ["laji", 946, 30],
+ ["raji", 946, 31],
+ ["\u0107aji", 946, 28],
+ ["\u010Daji", 946, 27],
+ ["\u0111aji", 946, 29],
+ ["biji", -1, 32],
+ ["ciji", -1, 33],
+ ["diji", -1, 34],
+ ["fiji", -1, 40],
+ ["giji", -1, 39],
+ ["anjiji", -1, 84],
+ ["enjiji", -1, 85],
+ ["snjiji", -1, 122],
+ ["\u0161njiji", -1, 86],
+ ["kiji", -1, 95],
+ ["skiji", 962, 1],
+ ["\u0161kiji", 962, 2],
+ ["liji", -1, 35],
+ ["eliji", 965, 83],
+ ["miji", -1, 37],
+ ["niji", -1, 13],
+ ["ganiji", 968, 9],
+ ["maniji", 968, 6],
+ ["paniji", 968, 7],
+ ["raniji", 968, 8],
+ ["taniji", 968, 5],
+ ["piji", -1, 41],
+ ["riji", -1, 42],
+ ["siji", -1, 43],
+ ["osiji", 976, 123],
+ ["tiji", -1, 44],
+ ["atiji", 978, 120],
+ ["evitiji", 978, 92],
+ ["ovitiji", 978, 93],
+ ["astiji", 978, 94],
+ ["aviji", -1, 77],
+ ["eviji", -1, 78],
+ ["iviji", -1, 79],
+ ["oviji", -1, 80],
+ ["ziji", -1, 45],
+ ["o\u0161iji", -1, 91],
+ ["\u017Eiji", -1, 38],
+ ["anji", -1, 84],
+ ["enji", -1, 85],
+ ["snji", -1, 122],
+ ["\u0161nji", -1, 86],
+ ["ki", -1, 95],
+ ["ski", 994, 1],
+ ["\u0161ki", 994, 2],
+ ["ali", -1, 104],
+ ["acali", 997, 128],
+ ["astajali", 997, 106],
+ ["istajali", 997, 107],
+ ["ostajali", 997, 108],
+ ["ijali", 997, 47],
+ ["injali", 997, 114],
+ ["nali", 997, 46],
+ ["irali", 997, 100],
+ ["urali", 997, 105],
+ ["tali", 997, 113],
+ ["astali", 1007, 110],
+ ["istali", 1007, 111],
+ ["ostali", 1007, 112],
+ ["avali", 997, 97],
+ ["evali", 997, 96],
+ ["ivali", 997, 98],
+ ["ovali", 997, 76],
+ ["uvali", 997, 99],
+ ["a\u010Dali", 997, 102],
+ ["eli", -1, 83],
+ ["ili", -1, 116],
+ ["acili", 1018, 124],
+ ["lucili", 1018, 121],
+ ["nili", 1018, 103],
+ ["rosili", 1018, 127],
+ ["jetili", 1018, 118],
+ ["ozili", 1018, 48],
+ ["a\u010Dili", 1018, 101],
+ ["lu\u010Dili", 1018, 117],
+ ["ro\u0161ili", 1018, 90],
+ ["oli", -1, 50],
+ ["asli", -1, 115],
+ ["nuli", -1, 13],
+ ["rami", -1, 52],
+ ["lemi", -1, 51],
+ ["ni", -1, 13],
+ ["ani", 1033, 10],
+ ["acani", 1034, 128],
+ ["urani", 1034, 105],
+ ["tani", 1034, 113],
+ ["avani", 1034, 97],
+ ["evani", 1034, 96],
+ ["ivani", 1034, 98],
+ ["uvani", 1034, 99],
+ ["a\u010Dani", 1034, 102],
+ ["aceni", 1033, 124],
+ ["luceni", 1033, 121],
+ ["a\u010Deni", 1033, 101],
+ ["lu\u010Deni", 1033, 117],
+ ["ini", 1033, 11],
+ ["cini", 1047, 137],
+ ["\u010Dini", 1047, 89],
+ ["oni", 1033, 12],
+ ["ari", -1, 53],
+ ["dri", -1, 54],
+ ["eri", -1, 55],
+ ["ori", -1, 56],
+ ["basi", -1, 135],
+ ["gasi", -1, 131],
+ ["jasi", -1, 129],
+ ["kasi", -1, 133],
+ ["nasi", -1, 132],
+ ["tasi", -1, 130],
+ ["vasi", -1, 134],
+ ["esi", -1, 152],
+ ["isi", -1, 154],
+ ["osi", -1, 123],
+ ["avsi", -1, 161],
+ ["acavsi", 1065, 128],
+ ["iravsi", 1065, 155],
+ ["tavsi", 1065, 160],
+ ["etavsi", 1068, 153],
+ ["astavsi", 1068, 141],
+ ["istavsi", 1068, 142],
+ ["ostavsi", 1068, 143],
+ ["ivsi", -1, 162],
+ ["nivsi", 1073, 158],
+ ["rosivsi", 1073, 127],
+ ["nuvsi", -1, 164],
+ ["ati", -1, 104],
+ ["acati", 1077, 128],
+ ["astajati", 1077, 106],
+ ["istajati", 1077, 107],
+ ["ostajati", 1077, 108],
+ ["injati", 1077, 114],
+ ["ikati", 1077, 68],
+ ["lati", 1077, 69],
+ ["irati", 1077, 100],
+ ["urati", 1077, 105],
+ ["tati", 1077, 113],
+ ["astati", 1087, 110],
+ ["istati", 1087, 111],
+ ["ostati", 1087, 112],
+ ["avati", 1077, 97],
+ ["evati", 1077, 96],
+ ["ivati", 1077, 98],
+ ["ovati", 1077, 76],
+ ["uvati", 1077, 99],
+ ["a\u010Dati", 1077, 102],
+ ["eti", -1, 70],
+ ["iti", -1, 116],
+ ["aciti", 1098, 124],
+ ["luciti", 1098, 121],
+ ["niti", 1098, 103],
+ ["rositi", 1098, 127],
+ ["jetiti", 1098, 118],
+ ["eviti", 1098, 92],
+ ["oviti", 1098, 93],
+ ["a\u010Diti", 1098, 101],
+ ["lu\u010Diti", 1098, 117],
+ ["ro\u0161iti", 1098, 90],
+ ["asti", -1, 94],
+ ["esti", -1, 71],
+ ["isti", -1, 72],
+ ["ksti", -1, 73],
+ ["osti", -1, 74],
+ ["nuti", -1, 13],
+ ["avi", -1, 77],
+ ["evi", -1, 78],
+ ["ajevi", 1116, 109],
+ ["cajevi", 1117, 26],
+ ["lajevi", 1117, 30],
+ ["rajevi", 1117, 31],
+ ["\u0107ajevi", 1117, 28],
+ ["\u010Dajevi", 1117, 27],
+ ["\u0111ajevi", 1117, 29],
+ ["ivi", -1, 79],
+ ["ovi", -1, 80],
+ ["govi", 1125, 20],
+ ["ugovi", 1126, 17],
+ ["lovi", 1125, 82],
+ ["olovi", 1128, 49],
+ ["movi", 1125, 81],
+ ["onovi", 1125, 12],
+ ["ie\u0107i", -1, 116],
+ ["a\u010De\u0107i", -1, 101],
+ ["aju\u0107i", -1, 104],
+ ["iraju\u0107i", 1134, 100],
+ ["uraju\u0107i", 1134, 105],
+ ["astaju\u0107i", 1134, 106],
+ ["istaju\u0107i", 1134, 107],
+ ["ostaju\u0107i", 1134, 108],
+ ["avaju\u0107i", 1134, 97],
+ ["evaju\u0107i", 1134, 96],
+ ["ivaju\u0107i", 1134, 98],
+ ["uvaju\u0107i", 1134, 99],
+ ["uju\u0107i", -1, 25],
+ ["iruju\u0107i", 1144, 100],
+ ["lu\u010Duju\u0107i", 1144, 117],
+ ["nu\u0107i", -1, 13],
+ ["etu\u0107i", -1, 70],
+ ["astu\u0107i", -1, 115],
+ ["a\u010Di", -1, 101],
+ ["lu\u010Di", -1, 117],
+ ["ba\u0161i", -1, 63],
+ ["ga\u0161i", -1, 64],
+ ["ja\u0161i", -1, 61],
+ ["ka\u0161i", -1, 62],
+ ["na\u0161i", -1, 60],
+ ["ta\u0161i", -1, 59],
+ ["va\u0161i", -1, 65],
+ ["e\u0161i", -1, 66],
+ ["i\u0161i", -1, 67],
+ ["o\u0161i", -1, 91],
+ ["av\u0161i", -1, 104],
+ ["irav\u0161i", 1162, 100],
+ ["tav\u0161i", 1162, 113],
+ ["etav\u0161i", 1164, 70],
+ ["astav\u0161i", 1164, 110],
+ ["istav\u0161i", 1164, 111],
+ ["ostav\u0161i", 1164, 112],
+ ["a\u010Dav\u0161i", 1162, 102],
+ ["iv\u0161i", -1, 116],
+ ["niv\u0161i", 1170, 103],
+ ["ro\u0161iv\u0161i", 1170, 90],
+ ["nuv\u0161i", -1, 13],
+ ["aj", -1, 104],
+ ["uraj", 1174, 105],
+ ["taj", 1174, 113],
+ ["avaj", 1174, 97],
+ ["evaj", 1174, 96],
+ ["ivaj", 1174, 98],
+ ["uvaj", 1174, 99],
+ ["ij", -1, 116],
+ ["acoj", -1, 124],
+ ["ecoj", -1, 125],
+ ["ucoj", -1, 126],
+ ["anjijoj", -1, 84],
+ ["enjijoj", -1, 85],
+ ["snjijoj", -1, 122],
+ ["\u0161njijoj", -1, 86],
+ ["kijoj", -1, 95],
+ ["skijoj", 1189, 1],
+ ["\u0161kijoj", 1189, 2],
+ ["elijoj", -1, 83],
+ ["nijoj", -1, 13],
+ ["osijoj", -1, 123],
+ ["evitijoj", -1, 92],
+ ["ovitijoj", -1, 93],
+ ["astijoj", -1, 94],
+ ["avijoj", -1, 77],
+ ["evijoj", -1, 78],
+ ["ivijoj", -1, 79],
+ ["ovijoj", -1, 80],
+ ["o\u0161ijoj", -1, 91],
+ ["anjoj", -1, 84],
+ ["enjoj", -1, 85],
+ ["snjoj", -1, 122],
+ ["\u0161njoj", -1, 86],
+ ["koj", -1, 95],
+ ["skoj", 1207, 1],
+ ["\u0161koj", 1207, 2],
+ ["aloj", -1, 104],
+ ["eloj", -1, 83],
+ ["noj", -1, 13],
+ ["cinoj", 1212, 137],
+ ["\u010Dinoj", 1212, 89],
+ ["osoj", -1, 123],
+ ["atoj", -1, 120],
+ ["evitoj", -1, 92],
+ ["ovitoj", -1, 93],
+ ["astoj", -1, 94],
+ ["avoj", -1, 77],
+ ["evoj", -1, 78],
+ ["ivoj", -1, 79],
+ ["ovoj", -1, 80],
+ ["a\u0107oj", -1, 14],
+ ["e\u0107oj", -1, 15],
+ ["u\u0107oj", -1, 16],
+ ["o\u0161oj", -1, 91],
+ ["lucuj", -1, 121],
+ ["iruj", -1, 100],
+ ["lu\u010Duj", -1, 117],
+ ["al", -1, 104],
+ ["iral", 1231, 100],
+ ["ural", 1231, 105],
+ ["el", -1, 119],
+ ["il", -1, 116],
+ ["am", -1, 104],
+ ["acam", 1236, 128],
+ ["iram", 1236, 100],
+ ["uram", 1236, 105],
+ ["tam", 1236, 113],
+ ["avam", 1236, 97],
+ ["evam", 1236, 96],
+ ["ivam", 1236, 98],
+ ["uvam", 1236, 99],
+ ["a\u010Dam", 1236, 102],
+ ["em", -1, 119],
+ ["acem", 1246, 124],
+ ["ecem", 1246, 125],
+ ["ucem", 1246, 126],
+ ["astadem", 1246, 110],
+ ["istadem", 1246, 111],
+ ["ostadem", 1246, 112],
+ ["ajem", 1246, 104],
+ ["cajem", 1253, 26],
+ ["lajem", 1253, 30],
+ ["rajem", 1253, 31],
+ ["astajem", 1253, 106],
+ ["istajem", 1253, 107],
+ ["ostajem", 1253, 108],
+ ["\u0107ajem", 1253, 28],
+ ["\u010Dajem", 1253, 27],
+ ["\u0111ajem", 1253, 29],
+ ["ijem", 1246, 116],
+ ["anjijem", 1263, 84],
+ ["enjijem", 1263, 85],
+ ["snjijem", 1263, 123],
+ ["\u0161njijem", 1263, 86],
+ ["kijem", 1263, 95],
+ ["skijem", 1268, 1],
+ ["\u0161kijem", 1268, 2],
+ ["lijem", 1263, 24],
+ ["elijem", 1271, 83],
+ ["nijem", 1263, 13],
+ ["rarijem", 1263, 21],
+ ["sijem", 1263, 23],
+ ["osijem", 1275, 123],
+ ["atijem", 1263, 120],
+ ["evitijem", 1263, 92],
+ ["ovitijem", 1263, 93],
+ ["otijem", 1263, 22],
+ ["astijem", 1263, 94],
+ ["avijem", 1263, 77],
+ ["evijem", 1263, 78],
+ ["ivijem", 1263, 79],
+ ["ovijem", 1263, 80],
+ ["o\u0161ijem", 1263, 91],
+ ["anjem", 1246, 84],
+ ["enjem", 1246, 85],
+ ["injem", 1246, 114],
+ ["snjem", 1246, 122],
+ ["\u0161njem", 1246, 86],
+ ["ujem", 1246, 25],
+ ["lucujem", 1292, 121],
+ ["irujem", 1292, 100],
+ ["lu\u010Dujem", 1292, 117],
+ ["kem", 1246, 95],
+ ["skem", 1296, 1],
+ ["\u0161kem", 1296, 2],
+ ["elem", 1246, 83],
+ ["nem", 1246, 13],
+ ["anem", 1300, 10],
+ ["astanem", 1301, 110],
+ ["istanem", 1301, 111],
+ ["ostanem", 1301, 112],
+ ["enem", 1300, 87],
+ ["snem", 1300, 159],
+ ["\u0161nem", 1300, 88],
+ ["basem", 1246, 135],
+ ["gasem", 1246, 131],
+ ["jasem", 1246, 129],
+ ["kasem", 1246, 133],
+ ["nasem", 1246, 132],
+ ["tasem", 1246, 130],
+ ["vasem", 1246, 134],
+ ["esem", 1246, 152],
+ ["isem", 1246, 154],
+ ["osem", 1246, 123],
+ ["atem", 1246, 120],
+ ["etem", 1246, 70],
+ ["evitem", 1246, 92],
+ ["ovitem", 1246, 93],
+ ["astem", 1246, 94],
+ ["istem", 1246, 151],
+ ["i\u0161tem", 1246, 75],
+ ["avem", 1246, 77],
+ ["evem", 1246, 78],
+ ["ivem", 1246, 79],
+ ["a\u0107em", 1246, 14],
+ ["e\u0107em", 1246, 15],
+ ["u\u0107em", 1246, 16],
+ ["ba\u0161em", 1246, 63],
+ ["ga\u0161em", 1246, 64],
+ ["ja\u0161em", 1246, 61],
+ ["ka\u0161em", 1246, 62],
+ ["na\u0161em", 1246, 60],
+ ["ta\u0161em", 1246, 59],
+ ["va\u0161em", 1246, 65],
+ ["e\u0161em", 1246, 66],
+ ["i\u0161em", 1246, 67],
+ ["o\u0161em", 1246, 91],
+ ["im", -1, 116],
+ ["acim", 1341, 124],
+ ["ecim", 1341, 125],
+ ["ucim", 1341, 126],
+ ["lucim", 1344, 121],
+ ["anjijim", 1341, 84],
+ ["enjijim", 1341, 85],
+ ["snjijim", 1341, 122],
+ ["\u0161njijim", 1341, 86],
+ ["kijim", 1341, 95],
+ ["skijim", 1350, 1],
+ ["\u0161kijim", 1350, 2],
+ ["elijim", 1341, 83],
+ ["nijim", 1341, 13],
+ ["osijim", 1341, 123],
+ ["atijim", 1341, 120],
+ ["evitijim", 1341, 92],
+ ["ovitijim", 1341, 93],
+ ["astijim", 1341, 94],
+ ["avijim", 1341, 77],
+ ["evijim", 1341, 78],
+ ["ivijim", 1341, 79],
+ ["ovijim", 1341, 80],
+ ["o\u0161ijim", 1341, 91],
+ ["anjim", 1341, 84],
+ ["enjim", 1341, 85],
+ ["snjim", 1341, 122],
+ ["\u0161njim", 1341, 86],
+ ["kim", 1341, 95],
+ ["skim", 1369, 1],
+ ["\u0161kim", 1369, 2],
+ ["elim", 1341, 83],
+ ["nim", 1341, 13],
+ ["cinim", 1373, 137],
+ ["\u010Dinim", 1373, 89],
+ ["osim", 1341, 123],
+ ["rosim", 1376, 127],
+ ["atim", 1341, 120],
+ ["jetim", 1341, 118],
+ ["evitim", 1341, 92],
+ ["ovitim", 1341, 93],
+ ["astim", 1341, 94],
+ ["avim", 1341, 77],
+ ["evim", 1341, 78],
+ ["ivim", 1341, 79],
+ ["ovim", 1341, 80],
+ ["a\u0107im", 1341, 14],
+ ["e\u0107im", 1341, 15],
+ ["u\u0107im", 1341, 16],
+ ["a\u010Dim", 1341, 101],
+ ["lu\u010Dim", 1341, 117],
+ ["o\u0161im", 1341, 91],
+ ["ro\u0161im", 1392, 90],
+ ["acom", -1, 124],
+ ["ecom", -1, 125],
+ ["ucom", -1, 126],
+ ["gom", -1, 20],
+ ["logom", 1397, 19],
+ ["ugom", 1397, 18],
+ ["bijom", -1, 32],
+ ["cijom", -1, 33],
+ ["dijom", -1, 34],
+ ["fijom", -1, 40],
+ ["gijom", -1, 39],
+ ["lijom", -1, 35],
+ ["mijom", -1, 37],
+ ["nijom", -1, 36],
+ ["ganijom", 1407, 9],
+ ["manijom", 1407, 6],
+ ["panijom", 1407, 7],
+ ["ranijom", 1407, 8],
+ ["tanijom", 1407, 5],
+ ["pijom", -1, 41],
+ ["rijom", -1, 42],
+ ["sijom", -1, 43],
+ ["tijom", -1, 44],
+ ["zijom", -1, 45],
+ ["\u017Eijom", -1, 38],
+ ["anjom", -1, 84],
+ ["enjom", -1, 85],
+ ["snjom", -1, 122],
+ ["\u0161njom", -1, 86],
+ ["kom", -1, 95],
+ ["skom", 1423, 1],
+ ["\u0161kom", 1423, 2],
+ ["alom", -1, 104],
+ ["ijalom", 1426, 47],
+ ["nalom", 1426, 46],
+ ["elom", -1, 83],
+ ["ilom", -1, 116],
+ ["ozilom", 1430, 48],
+ ["olom", -1, 50],
+ ["ramom", -1, 52],
+ ["lemom", -1, 51],
+ ["nom", -1, 13],
+ ["anom", 1435, 10],
+ ["inom", 1435, 11],
+ ["cinom", 1437, 137],
+ ["aninom", 1437, 10],
+ ["\u010Dinom", 1437, 89],
+ ["onom", 1435, 12],
+ ["arom", -1, 53],
+ ["drom", -1, 54],
+ ["erom", -1, 55],
+ ["orom", -1, 56],
+ ["basom", -1, 135],
+ ["gasom", -1, 131],
+ ["jasom", -1, 129],
+ ["kasom", -1, 133],
+ ["nasom", -1, 132],
+ ["tasom", -1, 130],
+ ["vasom", -1, 134],
+ ["esom", -1, 57],
+ ["isom", -1, 58],
+ ["osom", -1, 123],
+ ["atom", -1, 120],
+ ["ikatom", 1456, 68],
+ ["latom", 1456, 69],
+ ["etom", -1, 70],
+ ["evitom", -1, 92],
+ ["ovitom", -1, 93],
+ ["astom", -1, 94],
+ ["estom", -1, 71],
+ ["istom", -1, 72],
+ ["kstom", -1, 73],
+ ["ostom", -1, 74],
+ ["avom", -1, 77],
+ ["evom", -1, 78],
+ ["ivom", -1, 79],
+ ["ovom", -1, 80],
+ ["lovom", 1470, 82],
+ ["movom", 1470, 81],
+ ["stvom", -1, 3],
+ ["\u0161tvom", -1, 4],
+ ["a\u0107om", -1, 14],
+ ["e\u0107om", -1, 15],
+ ["u\u0107om", -1, 16],
+ ["ba\u0161om", -1, 63],
+ ["ga\u0161om", -1, 64],
+ ["ja\u0161om", -1, 61],
+ ["ka\u0161om", -1, 62],
+ ["na\u0161om", -1, 60],
+ ["ta\u0161om", -1, 59],
+ ["va\u0161om", -1, 65],
+ ["e\u0161om", -1, 66],
+ ["i\u0161om", -1, 67],
+ ["o\u0161om", -1, 91],
+ ["an", -1, 104],
+ ["acan", 1488, 128],
+ ["iran", 1488, 100],
+ ["uran", 1488, 105],
+ ["tan", 1488, 113],
+ ["avan", 1488, 97],
+ ["evan", 1488, 96],
+ ["ivan", 1488, 98],
+ ["uvan", 1488, 99],
+ ["a\u010Dan", 1488, 102],
+ ["acen", -1, 124],
+ ["lucen", -1, 121],
+ ["a\u010Den", -1, 101],
+ ["lu\u010Den", -1, 117],
+ ["anin", -1, 10],
+ ["ao", -1, 104],
+ ["acao", 1503, 128],
+ ["astajao", 1503, 106],
+ ["istajao", 1503, 107],
+ ["ostajao", 1503, 108],
+ ["injao", 1503, 114],
+ ["irao", 1503, 100],
+ ["urao", 1503, 105],
+ ["tao", 1503, 113],
+ ["astao", 1511, 110],
+ ["istao", 1511, 111],
+ ["ostao", 1511, 112],
+ ["avao", 1503, 97],
+ ["evao", 1503, 96],
+ ["ivao", 1503, 98],
+ ["ovao", 1503, 76],
+ ["uvao", 1503, 99],
+ ["a\u010Dao", 1503, 102],
+ ["go", -1, 20],
+ ["ugo", 1521, 18],
+ ["io", -1, 116],
+ ["acio", 1523, 124],
+ ["lucio", 1523, 121],
+ ["lio", 1523, 24],
+ ["nio", 1523, 103],
+ ["rario", 1523, 21],
+ ["sio", 1523, 23],
+ ["rosio", 1529, 127],
+ ["jetio", 1523, 118],
+ ["otio", 1523, 22],
+ ["a\u010Dio", 1523, 101],
+ ["lu\u010Dio", 1523, 117],
+ ["ro\u0161io", 1523, 90],
+ ["bijo", -1, 32],
+ ["cijo", -1, 33],
+ ["dijo", -1, 34],
+ ["fijo", -1, 40],
+ ["gijo", -1, 39],
+ ["lijo", -1, 35],
+ ["mijo", -1, 37],
+ ["nijo", -1, 36],
+ ["pijo", -1, 41],
+ ["rijo", -1, 42],
+ ["sijo", -1, 43],
+ ["tijo", -1, 44],
+ ["zijo", -1, 45],
+ ["\u017Eijo", -1, 38],
+ ["anjo", -1, 84],
+ ["enjo", -1, 85],
+ ["snjo", -1, 122],
+ ["\u0161njo", -1, 86],
+ ["ko", -1, 95],
+ ["sko", 1554, 1],
+ ["\u0161ko", 1554, 2],
+ ["alo", -1, 104],
+ ["acalo", 1557, 128],
+ ["astajalo", 1557, 106],
+ ["istajalo", 1557, 107],
+ ["ostajalo", 1557, 108],
+ ["ijalo", 1557, 47],
+ ["injalo", 1557, 114],
+ ["nalo", 1557, 46],
+ ["iralo", 1557, 100],
+ ["uralo", 1557, 105],
+ ["talo", 1557, 113],
+ ["astalo", 1567, 110],
+ ["istalo", 1567, 111],
+ ["ostalo", 1567, 112],
+ ["avalo", 1557, 97],
+ ["evalo", 1557, 96],
+ ["ivalo", 1557, 98],
+ ["ovalo", 1557, 76],
+ ["uvalo", 1557, 99],
+ ["a\u010Dalo", 1557, 102],
+ ["elo", -1, 83],
+ ["ilo", -1, 116],
+ ["acilo", 1578, 124],
+ ["lucilo", 1578, 121],
+ ["nilo", 1578, 103],
+ ["rosilo", 1578, 127],
+ ["jetilo", 1578, 118],
+ ["a\u010Dilo", 1578, 101],
+ ["lu\u010Dilo", 1578, 117],
+ ["ro\u0161ilo", 1578, 90],
+ ["aslo", -1, 115],
+ ["nulo", -1, 13],
+ ["amo", -1, 104],
+ ["acamo", 1589, 128],
+ ["ramo", 1589, 52],
+ ["iramo", 1591, 100],
+ ["uramo", 1591, 105],
+ ["tamo", 1589, 113],
+ ["avamo", 1589, 97],
+ ["evamo", 1589, 96],
+ ["ivamo", 1589, 98],
+ ["uvamo", 1589, 99],
+ ["a\u010Damo", 1589, 102],
+ ["emo", -1, 119],
+ ["astademo", 1600, 110],
+ ["istademo", 1600, 111],
+ ["ostademo", 1600, 112],
+ ["astajemo", 1600, 106],
+ ["istajemo", 1600, 107],
+ ["ostajemo", 1600, 108],
+ ["ijemo", 1600, 116],
+ ["injemo", 1600, 114],
+ ["ujemo", 1600, 25],
+ ["lucujemo", 1609, 121],
+ ["irujemo", 1609, 100],
+ ["lu\u010Dujemo", 1609, 117],
+ ["lemo", 1600, 51],
+ ["nemo", 1600, 13],
+ ["astanemo", 1614, 110],
+ ["istanemo", 1614, 111],
+ ["ostanemo", 1614, 112],
+ ["etemo", 1600, 70],
+ ["astemo", 1600, 115],
+ ["imo", -1, 116],
+ ["acimo", 1620, 124],
+ ["lucimo", 1620, 121],
+ ["nimo", 1620, 13],
+ ["astanimo", 1623, 110],
+ ["istanimo", 1623, 111],
+ ["ostanimo", 1623, 112],
+ ["rosimo", 1620, 127],
+ ["etimo", 1620, 70],
+ ["jetimo", 1628, 118],
+ ["astimo", 1620, 115],
+ ["a\u010Dimo", 1620, 101],
+ ["lu\u010Dimo", 1620, 117],
+ ["ro\u0161imo", 1620, 90],
+ ["ajmo", -1, 104],
+ ["urajmo", 1634, 105],
+ ["tajmo", 1634, 113],
+ ["astajmo", 1636, 106],
+ ["istajmo", 1636, 107],
+ ["ostajmo", 1636, 108],
+ ["avajmo", 1634, 97],
+ ["evajmo", 1634, 96],
+ ["ivajmo", 1634, 98],
+ ["uvajmo", 1634, 99],
+ ["ijmo", -1, 116],
+ ["ujmo", -1, 25],
+ ["lucujmo", 1645, 121],
+ ["irujmo", 1645, 100],
+ ["lu\u010Dujmo", 1645, 117],
+ ["asmo", -1, 104],
+ ["acasmo", 1649, 128],
+ ["astajasmo", 1649, 106],
+ ["istajasmo", 1649, 107],
+ ["ostajasmo", 1649, 108],
+ ["injasmo", 1649, 114],
+ ["irasmo", 1649, 100],
+ ["urasmo", 1649, 105],
+ ["tasmo", 1649, 113],
+ ["avasmo", 1649, 97],
+ ["evasmo", 1649, 96],
+ ["ivasmo", 1649, 98],
+ ["ovasmo", 1649, 76],
+ ["uvasmo", 1649, 99],
+ ["a\u010Dasmo", 1649, 102],
+ ["ismo", -1, 116],
+ ["acismo", 1664, 124],
+ ["lucismo", 1664, 121],
+ ["nismo", 1664, 103],
+ ["rosismo", 1664, 127],
+ ["jetismo", 1664, 118],
+ ["a\u010Dismo", 1664, 101],
+ ["lu\u010Dismo", 1664, 117],
+ ["ro\u0161ismo", 1664, 90],
+ ["astadosmo", -1, 110],
+ ["istadosmo", -1, 111],
+ ["ostadosmo", -1, 112],
+ ["nusmo", -1, 13],
+ ["no", -1, 13],
+ ["ano", 1677, 104],
+ ["acano", 1678, 128],
+ ["urano", 1678, 105],
+ ["tano", 1678, 113],
+ ["avano", 1678, 97],
+ ["evano", 1678, 96],
+ ["ivano", 1678, 98],
+ ["uvano", 1678, 99],
+ ["a\u010Dano", 1678, 102],
+ ["aceno", 1677, 124],
+ ["luceno", 1677, 121],
+ ["a\u010Deno", 1677, 101],
+ ["lu\u010Deno", 1677, 117],
+ ["ino", 1677, 11],
+ ["cino", 1691, 137],
+ ["\u010Dino", 1691, 89],
+ ["ato", -1, 120],
+ ["ikato", 1694, 68],
+ ["lato", 1694, 69],
+ ["eto", -1, 70],
+ ["evito", -1, 92],
+ ["ovito", -1, 93],
+ ["asto", -1, 94],
+ ["esto", -1, 71],
+ ["isto", -1, 72],
+ ["ksto", -1, 73],
+ ["osto", -1, 74],
+ ["nuto", -1, 13],
+ ["nuo", -1, 13],
+ ["avo", -1, 77],
+ ["evo", -1, 78],
+ ["ivo", -1, 79],
+ ["ovo", -1, 80],
+ ["stvo", -1, 3],
+ ["\u0161tvo", -1, 4],
+ ["as", -1, 161],
+ ["acas", 1713, 128],
+ ["iras", 1713, 155],
+ ["uras", 1713, 156],
+ ["tas", 1713, 160],
+ ["avas", 1713, 144],
+ ["evas", 1713, 145],
+ ["ivas", 1713, 146],
+ ["uvas", 1713, 147],
+ ["es", -1, 163],
+ ["astades", 1722, 141],
+ ["istades", 1722, 142],
+ ["ostades", 1722, 143],
+ ["astajes", 1722, 138],
+ ["istajes", 1722, 139],
+ ["ostajes", 1722, 140],
+ ["ijes", 1722, 162],
+ ["injes", 1722, 150],
+ ["ujes", 1722, 157],
+ ["lucujes", 1731, 121],
+ ["irujes", 1731, 155],
+ ["nes", 1722, 164],
+ ["astanes", 1734, 141],
+ ["istanes", 1734, 142],
+ ["ostanes", 1734, 143],
+ ["etes", 1722, 153],
+ ["astes", 1722, 136],
+ ["is", -1, 162],
+ ["acis", 1740, 124],
+ ["lucis", 1740, 121],
+ ["nis", 1740, 158],
+ ["rosis", 1740, 127],
+ ["jetis", 1740, 149],
+ ["at", -1, 104],
+ ["acat", 1746, 128],
+ ["astajat", 1746, 106],
+ ["istajat", 1746, 107],
+ ["ostajat", 1746, 108],
+ ["injat", 1746, 114],
+ ["irat", 1746, 100],
+ ["urat", 1746, 105],
+ ["tat", 1746, 113],
+ ["astat", 1754, 110],
+ ["istat", 1754, 111],
+ ["ostat", 1754, 112],
+ ["avat", 1746, 97],
+ ["evat", 1746, 96],
+ ["ivat", 1746, 98],
+ ["irivat", 1760, 100],
+ ["ovat", 1746, 76],
+ ["uvat", 1746, 99],
+ ["a\u010Dat", 1746, 102],
+ ["it", -1, 116],
+ ["acit", 1765, 124],
+ ["lucit", 1765, 121],
+ ["rosit", 1765, 127],
+ ["jetit", 1765, 118],
+ ["a\u010Dit", 1765, 101],
+ ["lu\u010Dit", 1765, 117],
+ ["ro\u0161it", 1765, 90],
+ ["nut", -1, 13],
+ ["astadu", -1, 110],
+ ["istadu", -1, 111],
+ ["ostadu", -1, 112],
+ ["gu", -1, 20],
+ ["logu", 1777, 19],
+ ["ugu", 1777, 18],
+ ["ahu", -1, 104],
+ ["acahu", 1780, 128],
+ ["astajahu", 1780, 106],
+ ["istajahu", 1780, 107],
+ ["ostajahu", 1780, 108],
+ ["injahu", 1780, 114],
+ ["irahu", 1780, 100],
+ ["urahu", 1780, 105],
+ ["avahu", 1780, 97],
+ ["evahu", 1780, 96],
+ ["ivahu", 1780, 98],
+ ["ovahu", 1780, 76],
+ ["uvahu", 1780, 99],
+ ["a\u010Dahu", 1780, 102],
+ ["aju", -1, 104],
+ ["caju", 1794, 26],
+ ["acaju", 1795, 128],
+ ["laju", 1794, 30],
+ ["raju", 1794, 31],
+ ["iraju", 1798, 100],
+ ["uraju", 1798, 105],
+ ["taju", 1794, 113],
+ ["astaju", 1801, 106],
+ ["istaju", 1801, 107],
+ ["ostaju", 1801, 108],
+ ["avaju", 1794, 97],
+ ["evaju", 1794, 96],
+ ["ivaju", 1794, 98],
+ ["uvaju", 1794, 99],
+ ["\u0107aju", 1794, 28],
+ ["\u010Daju", 1794, 27],
+ ["a\u010Daju", 1810, 102],
+ ["\u0111aju", 1794, 29],
+ ["iju", -1, 116],
+ ["biju", 1813, 32],
+ ["ciju", 1813, 33],
+ ["diju", 1813, 34],
+ ["fiju", 1813, 40],
+ ["giju", 1813, 39],
+ ["anjiju", 1813, 84],
+ ["enjiju", 1813, 85],
+ ["snjiju", 1813, 122],
+ ["\u0161njiju", 1813, 86],
+ ["kiju", 1813, 95],
+ ["liju", 1813, 24],
+ ["eliju", 1824, 83],
+ ["miju", 1813, 37],
+ ["niju", 1813, 13],
+ ["ganiju", 1827, 9],
+ ["maniju", 1827, 6],
+ ["paniju", 1827, 7],
+ ["raniju", 1827, 8],
+ ["taniju", 1827, 5],
+ ["piju", 1813, 41],
+ ["riju", 1813, 42],
+ ["rariju", 1834, 21],
+ ["siju", 1813, 23],
+ ["osiju", 1836, 123],
+ ["tiju", 1813, 44],
+ ["atiju", 1838, 120],
+ ["otiju", 1838, 22],
+ ["aviju", 1813, 77],
+ ["eviju", 1813, 78],
+ ["iviju", 1813, 79],
+ ["oviju", 1813, 80],
+ ["ziju", 1813, 45],
+ ["o\u0161iju", 1813, 91],
+ ["\u017Eiju", 1813, 38],
+ ["anju", -1, 84],
+ ["enju", -1, 85],
+ ["snju", -1, 122],
+ ["\u0161nju", -1, 86],
+ ["uju", -1, 25],
+ ["lucuju", 1852, 121],
+ ["iruju", 1852, 100],
+ ["lu\u010Duju", 1852, 117],
+ ["ku", -1, 95],
+ ["sku", 1856, 1],
+ ["\u0161ku", 1856, 2],
+ ["alu", -1, 104],
+ ["ijalu", 1859, 47],
+ ["nalu", 1859, 46],
+ ["elu", -1, 83],
+ ["ilu", -1, 116],
+ ["ozilu", 1863, 48],
+ ["olu", -1, 50],
+ ["ramu", -1, 52],
+ ["acemu", -1, 124],
+ ["ecemu", -1, 125],
+ ["ucemu", -1, 126],
+ ["anjijemu", -1, 84],
+ ["enjijemu", -1, 85],
+ ["snjijemu", -1, 122],
+ ["\u0161njijemu", -1, 86],
+ ["kijemu", -1, 95],
+ ["skijemu", 1874, 1],
+ ["\u0161kijemu", 1874, 2],
+ ["elijemu", -1, 83],
+ ["nijemu", -1, 13],
+ ["osijemu", -1, 123],
+ ["atijemu", -1, 120],
+ ["evitijemu", -1, 92],
+ ["ovitijemu", -1, 93],
+ ["astijemu", -1, 94],
+ ["avijemu", -1, 77],
+ ["evijemu", -1, 78],
+ ["ivijemu", -1, 79],
+ ["ovijemu", -1, 80],
+ ["o\u0161ijemu", -1, 91],
+ ["anjemu", -1, 84],
+ ["enjemu", -1, 85],
+ ["snjemu", -1, 122],
+ ["\u0161njemu", -1, 86],
+ ["kemu", -1, 95],
+ ["skemu", 1893, 1],
+ ["\u0161kemu", 1893, 2],
+ ["lemu", -1, 51],
+ ["elemu", 1896, 83],
+ ["nemu", -1, 13],
+ ["anemu", 1898, 10],
+ ["enemu", 1898, 87],
+ ["snemu", 1898, 159],
+ ["\u0161nemu", 1898, 88],
+ ["osemu", -1, 123],
+ ["atemu", -1, 120],
+ ["evitemu", -1, 92],
+ ["ovitemu", -1, 93],
+ ["astemu", -1, 94],
+ ["avemu", -1, 77],
+ ["evemu", -1, 78],
+ ["ivemu", -1, 79],
+ ["ovemu", -1, 80],
+ ["a\u0107emu", -1, 14],
+ ["e\u0107emu", -1, 15],
+ ["u\u0107emu", -1, 16],
+ ["o\u0161emu", -1, 91],
+ ["acomu", -1, 124],
+ ["ecomu", -1, 125],
+ ["ucomu", -1, 126],
+ ["anjomu", -1, 84],
+ ["enjomu", -1, 85],
+ ["snjomu", -1, 122],
+ ["\u0161njomu", -1, 86],
+ ["komu", -1, 95],
+ ["skomu", 1923, 1],
+ ["\u0161komu", 1923, 2],
+ ["elomu", -1, 83],
+ ["nomu", -1, 13],
+ ["cinomu", 1927, 137],
+ ["\u010Dinomu", 1927, 89],
+ ["osomu", -1, 123],
+ ["atomu", -1, 120],
+ ["evitomu", -1, 92],
+ ["ovitomu", -1, 93],
+ ["astomu", -1, 94],
+ ["avomu", -1, 77],
+ ["evomu", -1, 78],
+ ["ivomu", -1, 79],
+ ["ovomu", -1, 80],
+ ["a\u0107omu", -1, 14],
+ ["e\u0107omu", -1, 15],
+ ["u\u0107omu", -1, 16],
+ ["o\u0161omu", -1, 91],
+ ["nu", -1, 13],
+ ["anu", 1943, 10],
+ ["astanu", 1944, 110],
+ ["istanu", 1944, 111],
+ ["ostanu", 1944, 112],
+ ["inu", 1943, 11],
+ ["cinu", 1948, 137],
+ ["aninu", 1948, 10],
+ ["\u010Dinu", 1948, 89],
+ ["onu", 1943, 12],
+ ["aru", -1, 53],
+ ["dru", -1, 54],
+ ["eru", -1, 55],
+ ["oru", -1, 56],
+ ["basu", -1, 135],
+ ["gasu", -1, 131],
+ ["jasu", -1, 129],
+ ["kasu", -1, 133],
+ ["nasu", -1, 132],
+ ["tasu", -1, 130],
+ ["vasu", -1, 134],
+ ["esu", -1, 57],
+ ["isu", -1, 58],
+ ["osu", -1, 123],
+ ["atu", -1, 120],
+ ["ikatu", 1967, 68],
+ ["latu", 1967, 69],
+ ["etu", -1, 70],
+ ["evitu", -1, 92],
+ ["ovitu", -1, 93],
+ ["astu", -1, 94],
+ ["estu", -1, 71],
+ ["istu", -1, 72],
+ ["kstu", -1, 73],
+ ["ostu", -1, 74],
+ ["i\u0161tu", -1, 75],
+ ["avu", -1, 77],
+ ["evu", -1, 78],
+ ["ivu", -1, 79],
+ ["ovu", -1, 80],
+ ["lovu", 1982, 82],
+ ["movu", 1982, 81],
+ ["stvu", -1, 3],
+ ["\u0161tvu", -1, 4],
+ ["ba\u0161u", -1, 63],
+ ["ga\u0161u", -1, 64],
+ ["ja\u0161u", -1, 61],
+ ["ka\u0161u", -1, 62],
+ ["na\u0161u", -1, 60],
+ ["ta\u0161u", -1, 59],
+ ["va\u0161u", -1, 65],
+ ["e\u0161u", -1, 66],
+ ["i\u0161u", -1, 67],
+ ["o\u0161u", -1, 91],
+ ["avav", -1, 97],
+ ["evav", -1, 96],
+ ["ivav", -1, 98],
+ ["uvav", -1, 99],
+ ["kov", -1, 95],
+ ["a\u0161", -1, 104],
+ ["ira\u0161", 2002, 100],
+ ["ura\u0161", 2002, 105],
+ ["ta\u0161", 2002, 113],
+ ["ava\u0161", 2002, 97],
+ ["eva\u0161", 2002, 96],
+ ["iva\u0161", 2002, 98],
+ ["uva\u0161", 2002, 99],
+ ["a\u010Da\u0161", 2002, 102],
+ ["e\u0161", -1, 119],
+ ["astade\u0161", 2011, 110],
+ ["istade\u0161", 2011, 111],
+ ["ostade\u0161", 2011, 112],
+ ["astaje\u0161", 2011, 106],
+ ["istaje\u0161", 2011, 107],
+ ["ostaje\u0161", 2011, 108],
+ ["ije\u0161", 2011, 116],
+ ["inje\u0161", 2011, 114],
+ ["uje\u0161", 2011, 25],
+ ["iruje\u0161", 2020, 100],
+ ["lu\u010Duje\u0161", 2020, 117],
+ ["ne\u0161", 2011, 13],
+ ["astane\u0161", 2023, 110],
+ ["istane\u0161", 2023, 111],
+ ["ostane\u0161", 2023, 112],
+ ["ete\u0161", 2011, 70],
+ ["aste\u0161", 2011, 115],
+ ["i\u0161", -1, 116],
+ ["ni\u0161", 2029, 103],
+ ["jeti\u0161", 2029, 118],
+ ["a\u010Di\u0161", 2029, 101],
+ ["lu\u010Di\u0161", 2029, 117],
+ ["ro\u0161i\u0161", 2029, 90]
+ ];
+
+ /** @const */ var a_3 = [
+ ["a", -1, 1],
+ ["oga", 0, 1],
+ ["ama", 0, 1],
+ ["ima", 0, 1],
+ ["ena", 0, 1],
+ ["e", -1, 1],
+ ["og", -1, 1],
+ ["anog", 6, 1],
+ ["enog", 6, 1],
+ ["anih", -1, 1],
+ ["enih", -1, 1],
+ ["i", -1, 1],
+ ["ani", 11, 1],
+ ["eni", 11, 1],
+ ["anoj", -1, 1],
+ ["enoj", -1, 1],
+ ["anim", -1, 1],
+ ["enim", -1, 1],
+ ["om", -1, 1],
+ ["enom", 18, 1],
+ ["o", -1, 1],
+ ["ano", 20, 1],
+ ["eno", 20, 1],
+ ["ost", -1, 1],
+ ["u", -1, 1],
+ ["enu", 24, 1]
+ ];
+
+ /** @const */ var /** Array */ g_v = [17, 65, 16];
+
+ /** @const */ var /** Array */ g_sa = [65, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 128];
+
+ /** @const */ var /** Array */ g_ca = [119, 95, 23, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 32, 136, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 0, 0, 0, 16];
+
+ /** @const */ var /** Array */ g_rg = [1];
+
+ var /** number */ I_p1 = 0;
+ var /** boolean */ B_no_diacritics = false;
+
+
+ /** @return {boolean} */
+ function r_cyr_to_lat() {
+ var /** number */ among_var;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ while(true)
+ {
+ /** @const */ var /** number */ v_2 = base.cursor;
+ lab1: {
+ golab2: while(true)
+ {
+ /** @const */ var /** number */ v_3 = base.cursor;
+ lab3: {
+ base.bra = base.cursor;
+ among_var = base.find_among(a_0);
+ if (among_var == 0)
+ {
+ break lab3;
+ }
+ base.ket = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("a"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("b"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("v"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("g"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("d"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!base.slice_from("\u0111"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!base.slice_from("e"))
+ {
+ return false;
+ }
+ break;
+ case 8:
+ if (!base.slice_from("\u017E"))
+ {
+ return false;
+ }
+ break;
+ case 9:
+ if (!base.slice_from("z"))
+ {
+ return false;
+ }
+ break;
+ case 10:
+ if (!base.slice_from("i"))
+ {
+ return false;
+ }
+ break;
+ case 11:
+ if (!base.slice_from("j"))
+ {
+ return false;
+ }
+ break;
+ case 12:
+ if (!base.slice_from("k"))
+ {
+ return false;
+ }
+ break;
+ case 13:
+ if (!base.slice_from("l"))
+ {
+ return false;
+ }
+ break;
+ case 14:
+ if (!base.slice_from("lj"))
+ {
+ return false;
+ }
+ break;
+ case 15:
+ if (!base.slice_from("m"))
+ {
+ return false;
+ }
+ break;
+ case 16:
+ if (!base.slice_from("n"))
+ {
+ return false;
+ }
+ break;
+ case 17:
+ if (!base.slice_from("nj"))
+ {
+ return false;
+ }
+ break;
+ case 18:
+ if (!base.slice_from("o"))
+ {
+ return false;
+ }
+ break;
+ case 19:
+ if (!base.slice_from("p"))
+ {
+ return false;
+ }
+ break;
+ case 20:
+ if (!base.slice_from("r"))
+ {
+ return false;
+ }
+ break;
+ case 21:
+ if (!base.slice_from("s"))
+ {
+ return false;
+ }
+ break;
+ case 22:
+ if (!base.slice_from("t"))
+ {
+ return false;
+ }
+ break;
+ case 23:
+ if (!base.slice_from("\u0107"))
+ {
+ return false;
+ }
+ break;
+ case 24:
+ if (!base.slice_from("u"))
+ {
+ return false;
+ }
+ break;
+ case 25:
+ if (!base.slice_from("f"))
+ {
+ return false;
+ }
+ break;
+ case 26:
+ if (!base.slice_from("h"))
+ {
+ return false;
+ }
+ break;
+ case 27:
+ if (!base.slice_from("c"))
+ {
+ return false;
+ }
+ break;
+ case 28:
+ if (!base.slice_from("\u010D"))
+ {
+ return false;
+ }
+ break;
+ case 29:
+ if (!base.slice_from("d\u017E"))
+ {
+ return false;
+ }
+ break;
+ case 30:
+ if (!base.slice_from("\u0161"))
+ {
+ return false;
+ }
+ break;
+ }
+ base.cursor = v_3;
+ break golab2;
+ }
+ base.cursor = v_3;
+ if (base.cursor >= base.limit)
+ {
+ break lab1;
+ }
+ base.cursor++;
+ }
+ continue;
+ }
+ base.cursor = v_2;
+ break;
+ }
+ }
+ base.cursor = v_1;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_prelude() {
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ while(true)
+ {
+ /** @const */ var /** number */ v_2 = base.cursor;
+ lab1: {
+ golab2: while(true)
+ {
+ /** @const */ var /** number */ v_3 = base.cursor;
+ lab3: {
+ if (!(base.in_grouping(g_ca, 98, 382)))
+ {
+ break lab3;
+ }
+ base.bra = base.cursor;
+ if (!(base.eq_s("ije")))
+ {
+ break lab3;
+ }
+ base.ket = base.cursor;
+ if (!(base.in_grouping(g_ca, 98, 382)))
+ {
+ break lab3;
+ }
+ if (!base.slice_from("e"))
+ {
+ return false;
+ }
+ base.cursor = v_3;
+ break golab2;
+ }
+ base.cursor = v_3;
+ if (base.cursor >= base.limit)
+ {
+ break lab1;
+ }
+ base.cursor++;
+ }
+ continue;
+ }
+ base.cursor = v_2;
+ break;
+ }
+ }
+ base.cursor = v_1;
+ /** @const */ var /** number */ v_4 = base.cursor;
+ lab4: {
+ while(true)
+ {
+ /** @const */ var /** number */ v_5 = base.cursor;
+ lab5: {
+ golab6: while(true)
+ {
+ /** @const */ var /** number */ v_6 = base.cursor;
+ lab7: {
+ if (!(base.in_grouping(g_ca, 98, 382)))
+ {
+ break lab7;
+ }
+ base.bra = base.cursor;
+ if (!(base.eq_s("je")))
+ {
+ break lab7;
+ }
+ base.ket = base.cursor;
+ if (!(base.in_grouping(g_ca, 98, 382)))
+ {
+ break lab7;
+ }
+ if (!base.slice_from("e"))
+ {
+ return false;
+ }
+ base.cursor = v_6;
+ break golab6;
+ }
+ base.cursor = v_6;
+ if (base.cursor >= base.limit)
+ {
+ break lab5;
+ }
+ base.cursor++;
+ }
+ continue;
+ }
+ base.cursor = v_5;
+ break;
+ }
+ }
+ base.cursor = v_4;
+ /** @const */ var /** number */ v_7 = base.cursor;
+ lab8: {
+ while(true)
+ {
+ /** @const */ var /** number */ v_8 = base.cursor;
+ lab9: {
+ golab10: while(true)
+ {
+ /** @const */ var /** number */ v_9 = base.cursor;
+ lab11: {
+ base.bra = base.cursor;
+ if (!(base.eq_s("dj")))
+ {
+ break lab11;
+ }
+ base.ket = base.cursor;
+ if (!base.slice_from("\u0111"))
+ {
+ return false;
+ }
+ base.cursor = v_9;
+ break golab10;
+ }
+ base.cursor = v_9;
+ if (base.cursor >= base.limit)
+ {
+ break lab9;
+ }
+ base.cursor++;
+ }
+ continue;
+ }
+ base.cursor = v_8;
+ break;
+ }
+ }
+ base.cursor = v_7;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_mark_regions() {
+ B_no_diacritics = true;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ if (!base.go_out_grouping(g_sa, 263, 382))
+ {
+ break lab0;
+ }
+ base.cursor++;
+ B_no_diacritics = false;
+ }
+ base.cursor = v_1;
+ I_p1 = base.limit;
+ /** @const */ var /** number */ v_2 = base.cursor;
+ lab1: {
+ if (!base.go_out_grouping(g_v, 97, 117))
+ {
+ break lab1;
+ }
+ base.cursor++;
+ I_p1 = base.cursor;
+ if (I_p1 >= 2)
+ {
+ break lab1;
+ }
+ if (!base.go_in_grouping(g_v, 97, 117))
+ {
+ break lab1;
+ }
+ base.cursor++;
+ I_p1 = base.cursor;
+ }
+ base.cursor = v_2;
+ /** @const */ var /** number */ v_3 = base.cursor;
+ lab2: {
+ golab3: while(true)
+ {
+ lab4: {
+ if (!(base.eq_s("r")))
+ {
+ break lab4;
+ }
+ break golab3;
+ }
+ if (base.cursor >= base.limit)
+ {
+ break lab2;
+ }
+ base.cursor++;
+ }
+ lab5: {
+ /** @const */ var /** number */ v_4 = base.cursor;
+ lab6: {
+ if (base.cursor < 2)
+ {
+ break lab6;
+ }
+ break lab5;
+ }
+ base.cursor = v_4;
+ if (!base.go_in_grouping(g_rg, 114, 114))
+ {
+ break lab2;
+ }
+ base.cursor++;
+ }
+ if ((I_p1 - base.cursor) <= 1)
+ {
+ break lab2;
+ }
+ I_p1 = base.cursor;
+ }
+ base.cursor = v_3;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_R1() {
+ return I_p1 <= base.cursor;
+ };
+
+ /** @return {boolean} */
+ function r_Step_1() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_1);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("loga"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("peh"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("vojka"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("bojka"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("jak"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!base.slice_from("\u010Dajni"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("cajni"))
+ {
+ return false;
+ }
+ break;
+ case 8:
+ if (!base.slice_from("erni"))
+ {
+ return false;
+ }
+ break;
+ case 9:
+ if (!base.slice_from("larni"))
+ {
+ return false;
+ }
+ break;
+ case 10:
+ if (!base.slice_from("esni"))
+ {
+ return false;
+ }
+ break;
+ case 11:
+ if (!base.slice_from("anjca"))
+ {
+ return false;
+ }
+ break;
+ case 12:
+ if (!base.slice_from("ajca"))
+ {
+ return false;
+ }
+ break;
+ case 13:
+ if (!base.slice_from("ljca"))
+ {
+ return false;
+ }
+ break;
+ case 14:
+ if (!base.slice_from("ejca"))
+ {
+ return false;
+ }
+ break;
+ case 15:
+ if (!base.slice_from("ojca"))
+ {
+ return false;
+ }
+ break;
+ case 16:
+ if (!base.slice_from("ajka"))
+ {
+ return false;
+ }
+ break;
+ case 17:
+ if (!base.slice_from("ojka"))
+ {
+ return false;
+ }
+ break;
+ case 18:
+ if (!base.slice_from("\u0161ca"))
+ {
+ return false;
+ }
+ break;
+ case 19:
+ if (!base.slice_from("ing"))
+ {
+ return false;
+ }
+ break;
+ case 20:
+ if (!base.slice_from("tvenik"))
+ {
+ return false;
+ }
+ break;
+ case 21:
+ if (!base.slice_from("tetika"))
+ {
+ return false;
+ }
+ break;
+ case 22:
+ if (!base.slice_from("nstva"))
+ {
+ return false;
+ }
+ break;
+ case 23:
+ if (!base.slice_from("nik"))
+ {
+ return false;
+ }
+ break;
+ case 24:
+ if (!base.slice_from("tik"))
+ {
+ return false;
+ }
+ break;
+ case 25:
+ if (!base.slice_from("zik"))
+ {
+ return false;
+ }
+ break;
+ case 26:
+ if (!base.slice_from("snik"))
+ {
+ return false;
+ }
+ break;
+ case 27:
+ if (!base.slice_from("kusi"))
+ {
+ return false;
+ }
+ break;
+ case 28:
+ if (!base.slice_from("kusni"))
+ {
+ return false;
+ }
+ break;
+ case 29:
+ if (!base.slice_from("kustva"))
+ {
+ return false;
+ }
+ break;
+ case 30:
+ if (!base.slice_from("du\u0161ni"))
+ {
+ return false;
+ }
+ break;
+ case 31:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("dusni"))
+ {
+ return false;
+ }
+ break;
+ case 32:
+ if (!base.slice_from("antni"))
+ {
+ return false;
+ }
+ break;
+ case 33:
+ if (!base.slice_from("bilni"))
+ {
+ return false;
+ }
+ break;
+ case 34:
+ if (!base.slice_from("tilni"))
+ {
+ return false;
+ }
+ break;
+ case 35:
+ if (!base.slice_from("avilni"))
+ {
+ return false;
+ }
+ break;
+ case 36:
+ if (!base.slice_from("silni"))
+ {
+ return false;
+ }
+ break;
+ case 37:
+ if (!base.slice_from("gilni"))
+ {
+ return false;
+ }
+ break;
+ case 38:
+ if (!base.slice_from("rilni"))
+ {
+ return false;
+ }
+ break;
+ case 39:
+ if (!base.slice_from("nilni"))
+ {
+ return false;
+ }
+ break;
+ case 40:
+ if (!base.slice_from("alni"))
+ {
+ return false;
+ }
+ break;
+ case 41:
+ if (!base.slice_from("ozni"))
+ {
+ return false;
+ }
+ break;
+ case 42:
+ if (!base.slice_from("ravi"))
+ {
+ return false;
+ }
+ break;
+ case 43:
+ if (!base.slice_from("stavni"))
+ {
+ return false;
+ }
+ break;
+ case 44:
+ if (!base.slice_from("pravni"))
+ {
+ return false;
+ }
+ break;
+ case 45:
+ if (!base.slice_from("tivni"))
+ {
+ return false;
+ }
+ break;
+ case 46:
+ if (!base.slice_from("sivni"))
+ {
+ return false;
+ }
+ break;
+ case 47:
+ if (!base.slice_from("atni"))
+ {
+ return false;
+ }
+ break;
+ case 48:
+ if (!base.slice_from("enta"))
+ {
+ return false;
+ }
+ break;
+ case 49:
+ if (!base.slice_from("tetni"))
+ {
+ return false;
+ }
+ break;
+ case 50:
+ if (!base.slice_from("pletni"))
+ {
+ return false;
+ }
+ break;
+ case 51:
+ if (!base.slice_from("\u0161avi"))
+ {
+ return false;
+ }
+ break;
+ case 52:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("savi"))
+ {
+ return false;
+ }
+ break;
+ case 53:
+ if (!base.slice_from("anta"))
+ {
+ return false;
+ }
+ break;
+ case 54:
+ if (!base.slice_from("a\u010Dka"))
+ {
+ return false;
+ }
+ break;
+ case 55:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("acka"))
+ {
+ return false;
+ }
+ break;
+ case 56:
+ if (!base.slice_from("u\u0161ka"))
+ {
+ return false;
+ }
+ break;
+ case 57:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("uska"))
+ {
+ return false;
+ }
+ break;
+ case 58:
+ if (!base.slice_from("atka"))
+ {
+ return false;
+ }
+ break;
+ case 59:
+ if (!base.slice_from("etka"))
+ {
+ return false;
+ }
+ break;
+ case 60:
+ if (!base.slice_from("itka"))
+ {
+ return false;
+ }
+ break;
+ case 61:
+ if (!base.slice_from("otka"))
+ {
+ return false;
+ }
+ break;
+ case 62:
+ if (!base.slice_from("utka"))
+ {
+ return false;
+ }
+ break;
+ case 63:
+ if (!base.slice_from("eskna"))
+ {
+ return false;
+ }
+ break;
+ case 64:
+ if (!base.slice_from("ti\u010Dni"))
+ {
+ return false;
+ }
+ break;
+ case 65:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("ticni"))
+ {
+ return false;
+ }
+ break;
+ case 66:
+ if (!base.slice_from("ojska"))
+ {
+ return false;
+ }
+ break;
+ case 67:
+ if (!base.slice_from("esma"))
+ {
+ return false;
+ }
+ break;
+ case 68:
+ if (!base.slice_from("metra"))
+ {
+ return false;
+ }
+ break;
+ case 69:
+ if (!base.slice_from("centra"))
+ {
+ return false;
+ }
+ break;
+ case 70:
+ if (!base.slice_from("istra"))
+ {
+ return false;
+ }
+ break;
+ case 71:
+ if (!base.slice_from("osti"))
+ {
+ return false;
+ }
+ break;
+ case 72:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("osti"))
+ {
+ return false;
+ }
+ break;
+ case 73:
+ if (!base.slice_from("dba"))
+ {
+ return false;
+ }
+ break;
+ case 74:
+ if (!base.slice_from("\u010Dka"))
+ {
+ return false;
+ }
+ break;
+ case 75:
+ if (!base.slice_from("mca"))
+ {
+ return false;
+ }
+ break;
+ case 76:
+ if (!base.slice_from("nca"))
+ {
+ return false;
+ }
+ break;
+ case 77:
+ if (!base.slice_from("voljni"))
+ {
+ return false;
+ }
+ break;
+ case 78:
+ if (!base.slice_from("anki"))
+ {
+ return false;
+ }
+ break;
+ case 79:
+ if (!base.slice_from("vca"))
+ {
+ return false;
+ }
+ break;
+ case 80:
+ if (!base.slice_from("sca"))
+ {
+ return false;
+ }
+ break;
+ case 81:
+ if (!base.slice_from("rca"))
+ {
+ return false;
+ }
+ break;
+ case 82:
+ if (!base.slice_from("alca"))
+ {
+ return false;
+ }
+ break;
+ case 83:
+ if (!base.slice_from("elca"))
+ {
+ return false;
+ }
+ break;
+ case 84:
+ if (!base.slice_from("olca"))
+ {
+ return false;
+ }
+ break;
+ case 85:
+ if (!base.slice_from("njca"))
+ {
+ return false;
+ }
+ break;
+ case 86:
+ if (!base.slice_from("ekta"))
+ {
+ return false;
+ }
+ break;
+ case 87:
+ if (!base.slice_from("izma"))
+ {
+ return false;
+ }
+ break;
+ case 88:
+ if (!base.slice_from("jebi"))
+ {
+ return false;
+ }
+ break;
+ case 89:
+ if (!base.slice_from("baci"))
+ {
+ return false;
+ }
+ break;
+ case 90:
+ if (!base.slice_from("a\u0161ni"))
+ {
+ return false;
+ }
+ break;
+ case 91:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("asni"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Step_2() {
+ var /** number */ among_var;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_2);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!r_R1())
+ {
+ return false;
+ }
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("sk"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("\u0161k"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("stv"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("\u0161tv"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("tanij"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!base.slice_from("manij"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!base.slice_from("panij"))
+ {
+ return false;
+ }
+ break;
+ case 8:
+ if (!base.slice_from("ranij"))
+ {
+ return false;
+ }
+ break;
+ case 9:
+ if (!base.slice_from("ganij"))
+ {
+ return false;
+ }
+ break;
+ case 10:
+ if (!base.slice_from("an"))
+ {
+ return false;
+ }
+ break;
+ case 11:
+ if (!base.slice_from("in"))
+ {
+ return false;
+ }
+ break;
+ case 12:
+ if (!base.slice_from("on"))
+ {
+ return false;
+ }
+ break;
+ case 13:
+ if (!base.slice_from("n"))
+ {
+ return false;
+ }
+ break;
+ case 14:
+ if (!base.slice_from("a\u0107"))
+ {
+ return false;
+ }
+ break;
+ case 15:
+ if (!base.slice_from("e\u0107"))
+ {
+ return false;
+ }
+ break;
+ case 16:
+ if (!base.slice_from("u\u0107"))
+ {
+ return false;
+ }
+ break;
+ case 17:
+ if (!base.slice_from("ugov"))
+ {
+ return false;
+ }
+ break;
+ case 18:
+ if (!base.slice_from("ug"))
+ {
+ return false;
+ }
+ break;
+ case 19:
+ if (!base.slice_from("log"))
+ {
+ return false;
+ }
+ break;
+ case 20:
+ if (!base.slice_from("g"))
+ {
+ return false;
+ }
+ break;
+ case 21:
+ if (!base.slice_from("rari"))
+ {
+ return false;
+ }
+ break;
+ case 22:
+ if (!base.slice_from("oti"))
+ {
+ return false;
+ }
+ break;
+ case 23:
+ if (!base.slice_from("si"))
+ {
+ return false;
+ }
+ break;
+ case 24:
+ if (!base.slice_from("li"))
+ {
+ return false;
+ }
+ break;
+ case 25:
+ if (!base.slice_from("uj"))
+ {
+ return false;
+ }
+ break;
+ case 26:
+ if (!base.slice_from("caj"))
+ {
+ return false;
+ }
+ break;
+ case 27:
+ if (!base.slice_from("\u010Daj"))
+ {
+ return false;
+ }
+ break;
+ case 28:
+ if (!base.slice_from("\u0107aj"))
+ {
+ return false;
+ }
+ break;
+ case 29:
+ if (!base.slice_from("\u0111aj"))
+ {
+ return false;
+ }
+ break;
+ case 30:
+ if (!base.slice_from("laj"))
+ {
+ return false;
+ }
+ break;
+ case 31:
+ if (!base.slice_from("raj"))
+ {
+ return false;
+ }
+ break;
+ case 32:
+ if (!base.slice_from("bij"))
+ {
+ return false;
+ }
+ break;
+ case 33:
+ if (!base.slice_from("cij"))
+ {
+ return false;
+ }
+ break;
+ case 34:
+ if (!base.slice_from("dij"))
+ {
+ return false;
+ }
+ break;
+ case 35:
+ if (!base.slice_from("lij"))
+ {
+ return false;
+ }
+ break;
+ case 36:
+ if (!base.slice_from("nij"))
+ {
+ return false;
+ }
+ break;
+ case 37:
+ if (!base.slice_from("mij"))
+ {
+ return false;
+ }
+ break;
+ case 38:
+ if (!base.slice_from("\u017Eij"))
+ {
+ return false;
+ }
+ break;
+ case 39:
+ if (!base.slice_from("gij"))
+ {
+ return false;
+ }
+ break;
+ case 40:
+ if (!base.slice_from("fij"))
+ {
+ return false;
+ }
+ break;
+ case 41:
+ if (!base.slice_from("pij"))
+ {
+ return false;
+ }
+ break;
+ case 42:
+ if (!base.slice_from("rij"))
+ {
+ return false;
+ }
+ break;
+ case 43:
+ if (!base.slice_from("sij"))
+ {
+ return false;
+ }
+ break;
+ case 44:
+ if (!base.slice_from("tij"))
+ {
+ return false;
+ }
+ break;
+ case 45:
+ if (!base.slice_from("zij"))
+ {
+ return false;
+ }
+ break;
+ case 46:
+ if (!base.slice_from("nal"))
+ {
+ return false;
+ }
+ break;
+ case 47:
+ if (!base.slice_from("ijal"))
+ {
+ return false;
+ }
+ break;
+ case 48:
+ if (!base.slice_from("ozil"))
+ {
+ return false;
+ }
+ break;
+ case 49:
+ if (!base.slice_from("olov"))
+ {
+ return false;
+ }
+ break;
+ case 50:
+ if (!base.slice_from("ol"))
+ {
+ return false;
+ }
+ break;
+ case 51:
+ if (!base.slice_from("lem"))
+ {
+ return false;
+ }
+ break;
+ case 52:
+ if (!base.slice_from("ram"))
+ {
+ return false;
+ }
+ break;
+ case 53:
+ if (!base.slice_from("ar"))
+ {
+ return false;
+ }
+ break;
+ case 54:
+ if (!base.slice_from("dr"))
+ {
+ return false;
+ }
+ break;
+ case 55:
+ if (!base.slice_from("er"))
+ {
+ return false;
+ }
+ break;
+ case 56:
+ if (!base.slice_from("or"))
+ {
+ return false;
+ }
+ break;
+ case 57:
+ if (!base.slice_from("es"))
+ {
+ return false;
+ }
+ break;
+ case 58:
+ if (!base.slice_from("is"))
+ {
+ return false;
+ }
+ break;
+ case 59:
+ if (!base.slice_from("ta\u0161"))
+ {
+ return false;
+ }
+ break;
+ case 60:
+ if (!base.slice_from("na\u0161"))
+ {
+ return false;
+ }
+ break;
+ case 61:
+ if (!base.slice_from("ja\u0161"))
+ {
+ return false;
+ }
+ break;
+ case 62:
+ if (!base.slice_from("ka\u0161"))
+ {
+ return false;
+ }
+ break;
+ case 63:
+ if (!base.slice_from("ba\u0161"))
+ {
+ return false;
+ }
+ break;
+ case 64:
+ if (!base.slice_from("ga\u0161"))
+ {
+ return false;
+ }
+ break;
+ case 65:
+ if (!base.slice_from("va\u0161"))
+ {
+ return false;
+ }
+ break;
+ case 66:
+ if (!base.slice_from("e\u0161"))
+ {
+ return false;
+ }
+ break;
+ case 67:
+ if (!base.slice_from("i\u0161"))
+ {
+ return false;
+ }
+ break;
+ case 68:
+ if (!base.slice_from("ikat"))
+ {
+ return false;
+ }
+ break;
+ case 69:
+ if (!base.slice_from("lat"))
+ {
+ return false;
+ }
+ break;
+ case 70:
+ if (!base.slice_from("et"))
+ {
+ return false;
+ }
+ break;
+ case 71:
+ if (!base.slice_from("est"))
+ {
+ return false;
+ }
+ break;
+ case 72:
+ if (!base.slice_from("ist"))
+ {
+ return false;
+ }
+ break;
+ case 73:
+ if (!base.slice_from("kst"))
+ {
+ return false;
+ }
+ break;
+ case 74:
+ if (!base.slice_from("ost"))
+ {
+ return false;
+ }
+ break;
+ case 75:
+ if (!base.slice_from("i\u0161t"))
+ {
+ return false;
+ }
+ break;
+ case 76:
+ if (!base.slice_from("ova"))
+ {
+ return false;
+ }
+ break;
+ case 77:
+ if (!base.slice_from("av"))
+ {
+ return false;
+ }
+ break;
+ case 78:
+ if (!base.slice_from("ev"))
+ {
+ return false;
+ }
+ break;
+ case 79:
+ if (!base.slice_from("iv"))
+ {
+ return false;
+ }
+ break;
+ case 80:
+ if (!base.slice_from("ov"))
+ {
+ return false;
+ }
+ break;
+ case 81:
+ if (!base.slice_from("mov"))
+ {
+ return false;
+ }
+ break;
+ case 82:
+ if (!base.slice_from("lov"))
+ {
+ return false;
+ }
+ break;
+ case 83:
+ if (!base.slice_from("el"))
+ {
+ return false;
+ }
+ break;
+ case 84:
+ if (!base.slice_from("anj"))
+ {
+ return false;
+ }
+ break;
+ case 85:
+ if (!base.slice_from("enj"))
+ {
+ return false;
+ }
+ break;
+ case 86:
+ if (!base.slice_from("\u0161nj"))
+ {
+ return false;
+ }
+ break;
+ case 87:
+ if (!base.slice_from("en"))
+ {
+ return false;
+ }
+ break;
+ case 88:
+ if (!base.slice_from("\u0161n"))
+ {
+ return false;
+ }
+ break;
+ case 89:
+ if (!base.slice_from("\u010Din"))
+ {
+ return false;
+ }
+ break;
+ case 90:
+ if (!base.slice_from("ro\u0161i"))
+ {
+ return false;
+ }
+ break;
+ case 91:
+ if (!base.slice_from("o\u0161"))
+ {
+ return false;
+ }
+ break;
+ case 92:
+ if (!base.slice_from("evit"))
+ {
+ return false;
+ }
+ break;
+ case 93:
+ if (!base.slice_from("ovit"))
+ {
+ return false;
+ }
+ break;
+ case 94:
+ if (!base.slice_from("ast"))
+ {
+ return false;
+ }
+ break;
+ case 95:
+ if (!base.slice_from("k"))
+ {
+ return false;
+ }
+ break;
+ case 96:
+ if (!base.slice_from("eva"))
+ {
+ return false;
+ }
+ break;
+ case 97:
+ if (!base.slice_from("ava"))
+ {
+ return false;
+ }
+ break;
+ case 98:
+ if (!base.slice_from("iva"))
+ {
+ return false;
+ }
+ break;
+ case 99:
+ if (!base.slice_from("uva"))
+ {
+ return false;
+ }
+ break;
+ case 100:
+ if (!base.slice_from("ir"))
+ {
+ return false;
+ }
+ break;
+ case 101:
+ if (!base.slice_from("a\u010D"))
+ {
+ return false;
+ }
+ break;
+ case 102:
+ if (!base.slice_from("a\u010Da"))
+ {
+ return false;
+ }
+ break;
+ case 103:
+ if (!base.slice_from("ni"))
+ {
+ return false;
+ }
+ break;
+ case 104:
+ if (!base.slice_from("a"))
+ {
+ return false;
+ }
+ break;
+ case 105:
+ if (!base.slice_from("ur"))
+ {
+ return false;
+ }
+ break;
+ case 106:
+ if (!base.slice_from("astaj"))
+ {
+ return false;
+ }
+ break;
+ case 107:
+ if (!base.slice_from("istaj"))
+ {
+ return false;
+ }
+ break;
+ case 108:
+ if (!base.slice_from("ostaj"))
+ {
+ return false;
+ }
+ break;
+ case 109:
+ if (!base.slice_from("aj"))
+ {
+ return false;
+ }
+ break;
+ case 110:
+ if (!base.slice_from("asta"))
+ {
+ return false;
+ }
+ break;
+ case 111:
+ if (!base.slice_from("ista"))
+ {
+ return false;
+ }
+ break;
+ case 112:
+ if (!base.slice_from("osta"))
+ {
+ return false;
+ }
+ break;
+ case 113:
+ if (!base.slice_from("ta"))
+ {
+ return false;
+ }
+ break;
+ case 114:
+ if (!base.slice_from("inj"))
+ {
+ return false;
+ }
+ break;
+ case 115:
+ if (!base.slice_from("as"))
+ {
+ return false;
+ }
+ break;
+ case 116:
+ if (!base.slice_from("i"))
+ {
+ return false;
+ }
+ break;
+ case 117:
+ if (!base.slice_from("lu\u010D"))
+ {
+ return false;
+ }
+ break;
+ case 118:
+ if (!base.slice_from("jeti"))
+ {
+ return false;
+ }
+ break;
+ case 119:
+ if (!base.slice_from("e"))
+ {
+ return false;
+ }
+ break;
+ case 120:
+ if (!base.slice_from("at"))
+ {
+ return false;
+ }
+ break;
+ case 121:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("luc"))
+ {
+ return false;
+ }
+ break;
+ case 122:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("snj"))
+ {
+ return false;
+ }
+ break;
+ case 123:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("os"))
+ {
+ return false;
+ }
+ break;
+ case 124:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("ac"))
+ {
+ return false;
+ }
+ break;
+ case 125:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("ec"))
+ {
+ return false;
+ }
+ break;
+ case 126:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("uc"))
+ {
+ return false;
+ }
+ break;
+ case 127:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("rosi"))
+ {
+ return false;
+ }
+ break;
+ case 128:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("aca"))
+ {
+ return false;
+ }
+ break;
+ case 129:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("jas"))
+ {
+ return false;
+ }
+ break;
+ case 130:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("tas"))
+ {
+ return false;
+ }
+ break;
+ case 131:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("gas"))
+ {
+ return false;
+ }
+ break;
+ case 132:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("nas"))
+ {
+ return false;
+ }
+ break;
+ case 133:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("kas"))
+ {
+ return false;
+ }
+ break;
+ case 134:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("vas"))
+ {
+ return false;
+ }
+ break;
+ case 135:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("bas"))
+ {
+ return false;
+ }
+ break;
+ case 136:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("as"))
+ {
+ return false;
+ }
+ break;
+ case 137:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("cin"))
+ {
+ return false;
+ }
+ break;
+ case 138:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("astaj"))
+ {
+ return false;
+ }
+ break;
+ case 139:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("istaj"))
+ {
+ return false;
+ }
+ break;
+ case 140:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("ostaj"))
+ {
+ return false;
+ }
+ break;
+ case 141:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("asta"))
+ {
+ return false;
+ }
+ break;
+ case 142:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("ista"))
+ {
+ return false;
+ }
+ break;
+ case 143:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("osta"))
+ {
+ return false;
+ }
+ break;
+ case 144:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("ava"))
+ {
+ return false;
+ }
+ break;
+ case 145:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("eva"))
+ {
+ return false;
+ }
+ break;
+ case 146:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("iva"))
+ {
+ return false;
+ }
+ break;
+ case 147:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("uva"))
+ {
+ return false;
+ }
+ break;
+ case 148:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("ova"))
+ {
+ return false;
+ }
+ break;
+ case 149:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("jeti"))
+ {
+ return false;
+ }
+ break;
+ case 150:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("inj"))
+ {
+ return false;
+ }
+ break;
+ case 151:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("ist"))
+ {
+ return false;
+ }
+ break;
+ case 152:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("es"))
+ {
+ return false;
+ }
+ break;
+ case 153:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("et"))
+ {
+ return false;
+ }
+ break;
+ case 154:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("is"))
+ {
+ return false;
+ }
+ break;
+ case 155:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("ir"))
+ {
+ return false;
+ }
+ break;
+ case 156:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("ur"))
+ {
+ return false;
+ }
+ break;
+ case 157:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("uj"))
+ {
+ return false;
+ }
+ break;
+ case 158:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("ni"))
+ {
+ return false;
+ }
+ break;
+ case 159:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("sn"))
+ {
+ return false;
+ }
+ break;
+ case 160:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("ta"))
+ {
+ return false;
+ }
+ break;
+ case 161:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("a"))
+ {
+ return false;
+ }
+ break;
+ case 162:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("i"))
+ {
+ return false;
+ }
+ break;
+ case 163:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("e"))
+ {
+ return false;
+ }
+ break;
+ case 164:
+ if (!B_no_diacritics)
+ {
+ return false;
+ }
+ if (!base.slice_from("n"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_Step_3() {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_3) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!r_R1())
+ {
+ return false;
+ }
+ if (!base.slice_from(""))
+ {
+ return false;
+ }
+ return true;
+ };
+
+ this.stem = /** @return {boolean} */ function() {
+ r_cyr_to_lat();
+ r_prelude();
+ r_mark_regions();
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ r_Step_1();
+ base.cursor = base.limit - v_1;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab0: {
+ lab1: {
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ lab2: {
+ if (!r_Step_2())
+ {
+ break lab2;
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_3;
+ if (!r_Step_3())
+ {
+ break lab0;
+ }
+ }
+ }
+ base.cursor = base.limit - v_2;
+ base.cursor = base.limit_backward;
+ return true;
+ };
+
+ /**@return{string}*/
+ this['stemWord'] = function(/**string*/word) {
+ base.setCurrent(word);
+ this.stem();
+ return base.getCurrent();
+ };
+};
diff --git a/sphinx/search/non-minified-js/spanish-stemmer.js b/sphinx/search/non-minified-js/spanish-stemmer.js
index fffd6160b13..f800db7467d 100644
--- a/sphinx/search/non-minified-js/spanish-stemmer.js
+++ b/sphinx/search/non-minified-js/spanish-stemmer.js
@@ -1,8 +1,9 @@
-// Generated by Snowball 2.1.0 - https://snowballstem.org/
+// Generated from spanish.sbl by Snowball 3.0.1 - https://snowballstem.org/
/**@constructor*/
-SpanishStemmer = function() {
+var SpanishStemmer = function() {
var base = new BaseStemmer();
+
/** @const */ var a_0 = [
["", -1, 6],
["\u00E1", 0, 1],
@@ -77,6 +78,8 @@ SpanishStemmer = function() {
["ante", -1, 2],
["mente", -1, 7],
["amente", 13, 6],
+ ["acion", -1, 2],
+ ["ucion", -1, 4],
["aci\u00F3n", -1, 2],
["uci\u00F3n", -1, 4],
["ico", -1, 1],
@@ -247,37 +250,27 @@ SpanishStemmer = function() {
I_pV = base.limit;
I_p1 = base.limit;
I_p2 = base.limit;
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
lab0: {
lab1: {
- var /** number */ v_2 = base.cursor;
+ /** @const */ var /** number */ v_2 = base.cursor;
lab2: {
if (!(base.in_grouping(g_v, 97, 252)))
{
break lab2;
}
lab3: {
- var /** number */ v_3 = base.cursor;
+ /** @const */ var /** number */ v_3 = base.cursor;
lab4: {
if (!(base.out_grouping(g_v, 97, 252)))
{
break lab4;
}
- golab5: while(true)
+ if (!base.go_out_grouping(g_v, 97, 252))
{
- lab6: {
- if (!(base.in_grouping(g_v, 97, 252)))
- {
- break lab6;
- }
- break golab5;
- }
- if (base.cursor >= base.limit)
- {
- break lab4;
- }
- base.cursor++;
+ break lab4;
}
+ base.cursor++;
break lab3;
}
base.cursor = v_3;
@@ -285,21 +278,11 @@ SpanishStemmer = function() {
{
break lab2;
}
- golab7: while(true)
+ if (!base.go_in_grouping(g_v, 97, 252))
{
- lab8: {
- if (!(base.out_grouping(g_v, 97, 252)))
- {
- break lab8;
- }
- break golab7;
- }
- if (base.cursor >= base.limit)
- {
- break lab2;
- }
- base.cursor++;
+ break lab2;
}
+ base.cursor++;
}
break lab1;
}
@@ -308,31 +291,21 @@ SpanishStemmer = function() {
{
break lab0;
}
- lab9: {
- var /** number */ v_6 = base.cursor;
- lab10: {
+ lab5: {
+ /** @const */ var /** number */ v_4 = base.cursor;
+ lab6: {
if (!(base.out_grouping(g_v, 97, 252)))
{
- break lab10;
+ break lab6;
}
- golab11: while(true)
+ if (!base.go_out_grouping(g_v, 97, 252))
{
- lab12: {
- if (!(base.in_grouping(g_v, 97, 252)))
- {
- break lab12;
- }
- break golab11;
- }
- if (base.cursor >= base.limit)
- {
- break lab10;
- }
- base.cursor++;
+ break lab6;
}
- break lab9;
+ base.cursor++;
+ break lab5;
}
- base.cursor = v_6;
+ base.cursor = v_4;
if (!(base.in_grouping(g_v, 97, 252)))
{
break lab0;
@@ -347,72 +320,32 @@ SpanishStemmer = function() {
I_pV = base.cursor;
}
base.cursor = v_1;
- var /** number */ v_8 = base.cursor;
- lab13: {
- golab14: while(true)
+ /** @const */ var /** number */ v_5 = base.cursor;
+ lab7: {
+ if (!base.go_out_grouping(g_v, 97, 252))
{
- lab15: {
- if (!(base.in_grouping(g_v, 97, 252)))
- {
- break lab15;
- }
- break golab14;
- }
- if (base.cursor >= base.limit)
- {
- break lab13;
- }
- base.cursor++;
+ break lab7;
}
- golab16: while(true)
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 252))
{
- lab17: {
- if (!(base.out_grouping(g_v, 97, 252)))
- {
- break lab17;
- }
- break golab16;
- }
- if (base.cursor >= base.limit)
- {
- break lab13;
- }
- base.cursor++;
+ break lab7;
}
+ base.cursor++;
I_p1 = base.cursor;
- golab18: while(true)
+ if (!base.go_out_grouping(g_v, 97, 252))
{
- lab19: {
- if (!(base.in_grouping(g_v, 97, 252)))
- {
- break lab19;
- }
- break golab18;
- }
- if (base.cursor >= base.limit)
- {
- break lab13;
- }
- base.cursor++;
+ break lab7;
}
- golab20: while(true)
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 252))
{
- lab21: {
- if (!(base.out_grouping(g_v, 97, 252)))
- {
- break lab21;
- }
- break golab20;
- }
- if (base.cursor >= base.limit)
- {
- break lab13;
- }
- base.cursor++;
+ break lab7;
}
+ base.cursor++;
I_p2 = base.cursor;
}
- base.cursor = v_8;
+ base.cursor = v_5;
return true;
};
@@ -421,14 +354,10 @@ SpanishStemmer = function() {
var /** number */ among_var;
while(true)
{
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
lab0: {
base.bra = base.cursor;
among_var = base.find_among(a_0);
- if (among_var == 0)
- {
- break lab0;
- }
base.ket = base.cursor;
switch (among_var) {
case 1:
@@ -479,29 +408,17 @@ SpanishStemmer = function() {
/** @return {boolean} */
function r_RV() {
- if (!(I_pV <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_pV <= base.cursor;
};
/** @return {boolean} */
function r_R1() {
- if (!(I_p1 <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_p1 <= base.cursor;
};
/** @return {boolean} */
function r_R2() {
- if (!(I_p2 <= base.cursor))
- {
- return false;
- }
- return true;
+ return I_p2 <= base.cursor;
};
/** @return {boolean} */
@@ -608,7 +525,7 @@ SpanishStemmer = function() {
{
return false;
}
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab0: {
base.ket = base.cursor;
if (!(base.eq_s_b("ic")))
@@ -667,7 +584,7 @@ SpanishStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab1: {
base.ket = base.cursor;
among_var = base.find_among_b(a_3);
@@ -717,7 +634,7 @@ SpanishStemmer = function() {
{
return false;
}
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
lab2: {
base.ket = base.cursor;
if (base.find_among_b(a_4) == 0)
@@ -746,7 +663,7 @@ SpanishStemmer = function() {
{
return false;
}
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
lab3: {
base.ket = base.cursor;
if (base.find_among_b(a_5) == 0)
@@ -775,7 +692,7 @@ SpanishStemmer = function() {
{
return false;
}
- var /** number */ v_5 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
lab4: {
base.ket = base.cursor;
if (!(base.eq_s_b("at")))
@@ -805,16 +722,16 @@ SpanishStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_pV;
base.ket = base.cursor;
if (base.find_among_b(a_7) == 0)
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
base.bra = base.cursor;
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
if (!(base.eq_s_b("u")))
{
return false;
@@ -833,33 +750,33 @@ SpanishStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_pV;
base.ket = base.cursor;
among_var = base.find_among_b(a_8);
if (among_var == 0)
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
base.bra = base.cursor;
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
switch (among_var) {
case 1:
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab0: {
if (!(base.eq_s_b("u")))
{
- base.cursor = base.limit - v_3;
+ base.cursor = base.limit - v_2;
break lab0;
}
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
if (!(base.eq_s_b("g")))
{
- base.cursor = base.limit - v_3;
+ base.cursor = base.limit - v_2;
break lab0;
}
- base.cursor = base.limit - v_4;
+ base.cursor = base.limit - v_3;
}
base.bra = base.cursor;
if (!base.slice_del())
@@ -907,7 +824,7 @@ SpanishStemmer = function() {
{
return false;
}
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab0: {
base.ket = base.cursor;
if (!(base.eq_s_b("u")))
@@ -916,7 +833,7 @@ SpanishStemmer = function() {
break lab0;
}
base.bra = base.cursor;
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
if (!(base.eq_s_b("g")))
{
base.cursor = base.limit - v_1;
@@ -941,13 +858,13 @@ SpanishStemmer = function() {
this.stem = /** @return {boolean} */ function() {
r_mark_regions();
base.limit_backward = base.cursor; base.cursor = base.limit;
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
r_attached_pronoun();
- base.cursor = base.limit - v_2;
- var /** number */ v_3 = base.limit - base.cursor;
+ base.cursor = base.limit - v_1;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab0: {
lab1: {
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
lab2: {
if (!r_standard_suffix())
{
@@ -955,7 +872,7 @@ SpanishStemmer = function() {
}
break lab1;
}
- base.cursor = base.limit - v_4;
+ base.cursor = base.limit - v_3;
lab3: {
if (!r_y_verb_suffix())
{
@@ -963,21 +880,21 @@ SpanishStemmer = function() {
}
break lab1;
}
- base.cursor = base.limit - v_4;
+ base.cursor = base.limit - v_3;
if (!r_verb_suffix())
{
break lab0;
}
}
}
- base.cursor = base.limit - v_3;
- var /** number */ v_5 = base.limit - base.cursor;
+ base.cursor = base.limit - v_2;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
r_residual_suffix();
- base.cursor = base.limit - v_5;
+ base.cursor = base.limit - v_4;
base.cursor = base.limit_backward;
- var /** number */ v_6 = base.cursor;
+ /** @const */ var /** number */ v_5 = base.cursor;
r_postlude();
- base.cursor = v_6;
+ base.cursor = v_5;
return true;
};
diff --git a/sphinx/search/non-minified-js/swedish-stemmer.js b/sphinx/search/non-minified-js/swedish-stemmer.js
index 4d7d49fc0e5..bf1a64268d2 100644
--- a/sphinx/search/non-minified-js/swedish-stemmer.js
+++ b/sphinx/search/non-minified-js/swedish-stemmer.js
@@ -1,9 +1,34 @@
-// Generated by Snowball 2.1.0 - https://snowballstem.org/
+// Generated from swedish.sbl by Snowball 3.0.1 - https://snowballstem.org/
/**@constructor*/
-SwedishStemmer = function() {
+var SwedishStemmer = function() {
var base = new BaseStemmer();
+
/** @const */ var a_0 = [
+ ["fab", -1, -1],
+ ["h", -1, -1],
+ ["pak", -1, -1],
+ ["rak", -1, -1],
+ ["stak", -1, -1],
+ ["kom", -1, -1],
+ ["iet", -1, -1],
+ ["cit", -1, -1],
+ ["dit", -1, -1],
+ ["alit", -1, -1],
+ ["ilit", -1, -1],
+ ["mit", -1, -1],
+ ["nit", -1, -1],
+ ["pit", -1, -1],
+ ["rit", -1, -1],
+ ["sit", -1, -1],
+ ["tit", -1, -1],
+ ["uit", -1, -1],
+ ["ivit", -1, -1],
+ ["kvit", -1, -1],
+ ["xit", -1, -1]
+ ];
+
+ /** @const */ var a_1 = [
["a", -1, 1],
["arna", 0, 1],
["erna", 0, 1],
@@ -38,12 +63,13 @@ SwedishStemmer = function() {
["hetens", 29, 1],
["erns", 21, 1],
["at", -1, 1],
- ["andet", -1, 1],
- ["het", -1, 1],
+ ["et", -1, 3],
+ ["andet", 34, 1],
+ ["het", 34, 1],
["ast", -1, 1]
];
- /** @const */ var a_1 = [
+ /** @const */ var a_2 = [
["dd", -1, -1],
["gd", -1, -1],
["nn", -1, -1],
@@ -53,18 +79,20 @@ SwedishStemmer = function() {
["tt", -1, -1]
];
- /** @const */ var a_2 = [
+ /** @const */ var a_3 = [
["ig", -1, 1],
["lig", 0, 1],
["els", -1, 1],
["fullt", -1, 3],
- ["l\u00F6st", -1, 2]
+ ["\u00F6st", -1, 2]
];
/** @const */ var /** Array */ g_v = [17, 65, 16, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 24, 0, 32];
/** @const */ var /** Array */ g_s_ending = [119, 127, 149];
+ /** @const */ var /** Array */ g_ost_ending = [173, 58];
+
var /** number */ I_x = 0;
var /** number */ I_p1 = 0;
@@ -72,9 +100,9 @@ SwedishStemmer = function() {
/** @return {boolean} */
function r_mark_regions() {
I_p1 = base.limit;
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
{
- var /** number */ c1 = base.cursor + 3;
+ /** @const */ var /** number */ c1 = base.cursor + 3;
if (c1 > base.limit)
{
return false;
@@ -83,46 +111,56 @@ SwedishStemmer = function() {
}
I_x = base.cursor;
base.cursor = v_1;
- golab0: while(true)
+ if (!base.go_out_grouping(g_v, 97, 246))
{
- var /** number */ v_2 = base.cursor;
- lab1: {
- if (!(base.in_grouping(g_v, 97, 246)))
- {
- break lab1;
- }
- base.cursor = v_2;
- break golab0;
+ return false;
+ }
+ base.cursor++;
+ if (!base.go_in_grouping(g_v, 97, 246))
+ {
+ return false;
+ }
+ base.cursor++;
+ I_p1 = base.cursor;
+ lab0: {
+ if (I_p1 >= I_x)
+ {
+ break lab0;
}
- base.cursor = v_2;
- if (base.cursor >= base.limit)
+ I_p1 = I_x;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_et_condition() {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ if (!(base.out_grouping_b(g_v, 97, 246)))
+ {
+ return false;
+ }
+ if (!(base.in_grouping_b(g_v, 97, 246)))
+ {
+ return false;
+ }
+ lab0: {
+ if (base.cursor > base.limit_backward)
{
- return false;
+ break lab0;
}
- base.cursor++;
+ return false;
}
- golab2: while(true)
+ base.cursor = base.limit - v_1;
{
- lab3: {
- if (!(base.out_grouping(g_v, 97, 246)))
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab1: {
+ if (base.find_among_b(a_0) == 0)
{
- break lab3;
+ break lab1;
}
- break golab2;
- }
- if (base.cursor >= base.limit)
- {
return false;
}
- base.cursor++;
- }
- I_p1 = base.cursor;
- lab4: {
- if (!(I_p1 < I_x))
- {
- break lab4;
- }
- I_p1 = I_x;
+ base.cursor = base.limit - v_2;
}
return true;
};
@@ -134,17 +172,17 @@ SwedishStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_p1;
base.ket = base.cursor;
- among_var = base.find_among_b(a_0);
+ among_var = base.find_among_b(a_1);
if (among_var == 0)
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
base.bra = base.cursor;
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
switch (among_var) {
case 1:
if (!base.slice_del())
@@ -153,7 +191,33 @@ SwedishStemmer = function() {
}
break;
case 2:
- if (!(base.in_grouping_b(g_s_ending, 98, 121)))
+ lab0: {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab1: {
+ if (!(base.eq_s_b("et")))
+ {
+ break lab1;
+ }
+ if (!r_et_condition())
+ {
+ break lab1;
+ }
+ base.bra = base.cursor;
+ break lab0;
+ }
+ base.cursor = base.limit - v_2;
+ if (!(base.in_grouping_b(g_s_ending, 98, 121)))
+ {
+ return false;
+ }
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!r_et_condition())
{
return false;
}
@@ -172,19 +236,19 @@ SwedishStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_p1;
- var /** number */ v_3 = base.limit - base.cursor;
- if (base.find_among_b(a_1) == 0)
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ if (base.find_among_b(a_2) == 0)
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
- base.cursor = base.limit - v_3;
+ base.cursor = base.limit - v_2;
base.ket = base.cursor;
if (base.cursor <= base.limit_backward)
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
base.cursor--;
@@ -193,7 +257,7 @@ SwedishStemmer = function() {
{
return false;
}
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return true;
};
@@ -204,16 +268,17 @@ SwedishStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.limit_backward;
base.limit_backward = I_p1;
base.ket = base.cursor;
- among_var = base.find_among_b(a_2);
+ among_var = base.find_among_b(a_3);
if (among_var == 0)
{
- base.limit_backward = v_2;
+ base.limit_backward = v_1;
return false;
}
base.bra = base.cursor;
+ base.limit_backward = v_1;
switch (among_var) {
case 1:
if (!base.slice_del())
@@ -222,7 +287,11 @@ SwedishStemmer = function() {
}
break;
case 2:
- if (!base.slice_from("l\u00F6s"))
+ if (!(base.in_grouping_b(g_ost_ending, 105, 118)))
+ {
+ return false;
+ }
+ if (!base.slice_from("\u00F6s"))
{
return false;
}
@@ -234,22 +303,21 @@ SwedishStemmer = function() {
}
break;
}
- base.limit_backward = v_2;
return true;
};
this.stem = /** @return {boolean} */ function() {
- var /** number */ v_1 = base.cursor;
+ /** @const */ var /** number */ v_1 = base.cursor;
r_mark_regions();
base.cursor = v_1;
base.limit_backward = base.cursor; base.cursor = base.limit;
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
r_main_suffix();
base.cursor = base.limit - v_2;
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
r_consonant_pair();
base.cursor = base.limit - v_3;
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
r_other_suffix();
base.cursor = base.limit - v_4;
base.cursor = base.limit_backward;
diff --git a/sphinx/search/non-minified-js/tamil-stemmer.js b/sphinx/search/non-minified-js/tamil-stemmer.js
new file mode 100644
index 00000000000..2ae474784d7
--- /dev/null
+++ b/sphinx/search/non-minified-js/tamil-stemmer.js
@@ -0,0 +1,1189 @@
+// Generated from tamil.sbl by Snowball 3.0.1 - https://snowballstem.org/
+
+/**@constructor*/
+var TamilStemmer = function() {
+ var base = new BaseStemmer();
+
+ /** @const */ var a_0 = [
+ ["\u0BB5\u0BC1", -1, 3],
+ ["\u0BB5\u0BC2", -1, 4],
+ ["\u0BB5\u0BCA", -1, 2],
+ ["\u0BB5\u0BCB", -1, 1]
+ ];
+
+ /** @const */ var a_1 = [
+ ["\u0B95", -1, -1],
+ ["\u0B99", -1, -1],
+ ["\u0B9A", -1, -1],
+ ["\u0B9E", -1, -1],
+ ["\u0BA4", -1, -1],
+ ["\u0BA8", -1, -1],
+ ["\u0BAA", -1, -1],
+ ["\u0BAE", -1, -1],
+ ["\u0BAF", -1, -1],
+ ["\u0BB5", -1, -1]
+ ];
+
+ /** @const */ var a_2 = [
+ ["\u0BBF", -1, -1],
+ ["\u0BC0", -1, -1],
+ ["\u0BC8", -1, -1]
+ ];
+
+ /** @const */ var a_3 = [
+ ["\u0BBE", -1, -1],
+ ["\u0BBF", -1, -1],
+ ["\u0BC0", -1, -1],
+ ["\u0BC1", -1, -1],
+ ["\u0BC2", -1, -1],
+ ["\u0BC6", -1, -1],
+ ["\u0BC7", -1, -1],
+ ["\u0BC8", -1, -1]
+ ];
+
+ /** @const */ var a_4 = [
+ ["", -1, 2],
+ ["\u0BC8", 0, 1],
+ ["\u0BCD", 0, 1]
+ ];
+
+ /** @const */ var a_5 = [
+ ["\u0BA8\u0BCD\u0BA4", -1, 1],
+ ["\u0BAF", -1, 1],
+ ["\u0BB5", -1, 1],
+ ["\u0BA9\u0BC1", -1, 8],
+ ["\u0BC1\u0B95\u0BCD", -1, 7],
+ ["\u0BC1\u0B95\u0BCD\u0B95\u0BCD", -1, 7],
+ ["\u0B9F\u0BCD\u0B95\u0BCD", -1, 3],
+ ["\u0BB1\u0BCD\u0B95\u0BCD", -1, 4],
+ ["\u0B99\u0BCD", -1, 9],
+ ["\u0B9F\u0BCD\u0B9F\u0BCD", -1, 5],
+ ["\u0BA4\u0BCD\u0BA4\u0BCD", -1, 6],
+ ["\u0BA8\u0BCD\u0BA4\u0BCD", -1, 1],
+ ["\u0BA8\u0BCD", -1, 1],
+ ["\u0B9F\u0BCD\u0BAA\u0BCD", -1, 3],
+ ["\u0BAF\u0BCD", -1, 2],
+ ["\u0BA9\u0BCD\u0BB1\u0BCD", -1, 4],
+ ["\u0BB5\u0BCD", -1, 1]
+ ];
+
+ /** @const */ var a_6 = [
+ ["\u0B95", -1, -1],
+ ["\u0B9A", -1, -1],
+ ["\u0B9F", -1, -1],
+ ["\u0BA4", -1, -1],
+ ["\u0BAA", -1, -1],
+ ["\u0BB1", -1, -1]
+ ];
+
+ /** @const */ var a_7 = [
+ ["\u0B95", -1, -1],
+ ["\u0B9A", -1, -1],
+ ["\u0B9F", -1, -1],
+ ["\u0BA4", -1, -1],
+ ["\u0BAA", -1, -1],
+ ["\u0BB1", -1, -1]
+ ];
+
+ /** @const */ var a_8 = [
+ ["\u0B9E", -1, -1],
+ ["\u0BA3", -1, -1],
+ ["\u0BA8", -1, -1],
+ ["\u0BA9", -1, -1],
+ ["\u0BAE", -1, -1],
+ ["\u0BAF", -1, -1],
+ ["\u0BB0", -1, -1],
+ ["\u0BB2", -1, -1],
+ ["\u0BB3", -1, -1],
+ ["\u0BB4", -1, -1],
+ ["\u0BB5", -1, -1]
+ ];
+
+ /** @const */ var a_9 = [
+ ["\u0BBE", -1, -1],
+ ["\u0BBF", -1, -1],
+ ["\u0BC0", -1, -1],
+ ["\u0BC1", -1, -1],
+ ["\u0BC2", -1, -1],
+ ["\u0BC6", -1, -1],
+ ["\u0BC7", -1, -1],
+ ["\u0BC8", -1, -1],
+ ["\u0BCD", -1, -1]
+ ];
+
+ /** @const */ var a_10 = [
+ ["\u0B85", -1, -1],
+ ["\u0B87", -1, -1],
+ ["\u0B89", -1, -1]
+ ];
+
+ /** @const */ var a_11 = [
+ ["\u0B95", -1, -1],
+ ["\u0B99", -1, -1],
+ ["\u0B9A", -1, -1],
+ ["\u0B9E", -1, -1],
+ ["\u0BA4", -1, -1],
+ ["\u0BA8", -1, -1],
+ ["\u0BAA", -1, -1],
+ ["\u0BAE", -1, -1],
+ ["\u0BAF", -1, -1],
+ ["\u0BB5", -1, -1]
+ ];
+
+ /** @const */ var a_12 = [
+ ["\u0B95", -1, -1],
+ ["\u0B9A", -1, -1],
+ ["\u0B9F", -1, -1],
+ ["\u0BA4", -1, -1],
+ ["\u0BAA", -1, -1],
+ ["\u0BB1", -1, -1]
+ ];
+
+ /** @const */ var a_13 = [
+ ["\u0B95\u0BB3\u0BCD", -1, 4],
+ ["\u0BC1\u0B99\u0BCD\u0B95\u0BB3\u0BCD", 0, 1],
+ ["\u0B9F\u0BCD\u0B95\u0BB3\u0BCD", 0, 3],
+ ["\u0BB1\u0BCD\u0B95\u0BB3\u0BCD", 0, 2]
+ ];
+
+ /** @const */ var a_14 = [
+ ["\u0BBE", -1, -1],
+ ["\u0BC7", -1, -1],
+ ["\u0BCB", -1, -1]
+ ];
+
+ /** @const */ var a_15 = [
+ ["\u0BAA\u0BBF", -1, -1],
+ ["\u0BB5\u0BBF", -1, -1]
+ ];
+
+ /** @const */ var a_16 = [
+ ["\u0BBE", -1, -1],
+ ["\u0BBF", -1, -1],
+ ["\u0BC0", -1, -1],
+ ["\u0BC1", -1, -1],
+ ["\u0BC2", -1, -1],
+ ["\u0BC6", -1, -1],
+ ["\u0BC7", -1, -1],
+ ["\u0BC8", -1, -1]
+ ];
+
+ /** @const */ var a_17 = [
+ ["\u0BAA\u0B9F\u0BCD\u0B9F", -1, 3],
+ ["\u0BAA\u0B9F\u0BCD\u0B9F\u0BA3", -1, 3],
+ ["\u0BA4\u0BBE\u0BA9", -1, 3],
+ ["\u0BAA\u0B9F\u0BBF\u0BA4\u0BBE\u0BA9", 2, 3],
+ ["\u0BC6\u0BA9", -1, 1],
+ ["\u0BBE\u0B95\u0BBF\u0BAF", -1, 1],
+ ["\u0B95\u0BC1\u0BB0\u0BBF\u0BAF", -1, 3],
+ ["\u0BC1\u0B9F\u0BC8\u0BAF", -1, 1],
+ ["\u0BB2\u0BCD\u0BB2", -1, 2],
+ ["\u0BC1\u0BB3\u0BCD\u0BB3", -1, 1],
+ ["\u0BBE\u0B95\u0BBF", -1, 1],
+ ["\u0BAA\u0B9F\u0BBF", -1, 3],
+ ["\u0BBF\u0BA9\u0BCD\u0BB1\u0BBF", -1, 1],
+ ["\u0BAA\u0BB1\u0BCD\u0BB1\u0BBF", -1, 3],
+ ["\u0BAA\u0B9F\u0BC1", -1, 3],
+ ["\u0BB5\u0BBF\u0B9F\u0BC1", -1, 3],
+ ["\u0BAA\u0B9F\u0BCD\u0B9F\u0BC1", -1, 3],
+ ["\u0BB5\u0BBF\u0B9F\u0BCD\u0B9F\u0BC1", -1, 3],
+ ["\u0BAA\u0B9F\u0BCD\u0B9F\u0BA4\u0BC1", -1, 3],
+ ["\u0BC6\u0BA9\u0BCD\u0BB1\u0BC1", -1, 1],
+ ["\u0BC1\u0B9F\u0BC8", -1, 1],
+ ["\u0BBF\u0BB2\u0BCD\u0BB2\u0BC8", -1, 1],
+ ["\u0BC1\u0B9F\u0BA9\u0BCD", -1, 1],
+ ["\u0BBF\u0B9F\u0BAE\u0BCD", -1, 1],
+ ["\u0BC6\u0BB2\u0BCD\u0BB2\u0BBE\u0BAE\u0BCD", -1, 3],
+ ["\u0BC6\u0BA9\u0BC1\u0BAE\u0BCD", -1, 1]
+ ];
+
+ /** @const */ var a_18 = [
+ ["\u0BBE", -1, -1],
+ ["\u0BBF", -1, -1],
+ ["\u0BC0", -1, -1],
+ ["\u0BC1", -1, -1],
+ ["\u0BC2", -1, -1],
+ ["\u0BC6", -1, -1],
+ ["\u0BC7", -1, -1],
+ ["\u0BC8", -1, -1]
+ ];
+
+ /** @const */ var a_19 = [
+ ["\u0BBE", -1, -1],
+ ["\u0BBF", -1, -1],
+ ["\u0BC0", -1, -1],
+ ["\u0BC1", -1, -1],
+ ["\u0BC2", -1, -1],
+ ["\u0BC6", -1, -1],
+ ["\u0BC7", -1, -1],
+ ["\u0BC8", -1, -1]
+ ];
+
+ /** @const */ var a_20 = [
+ ["\u0BB5\u0BBF\u0B9F", -1, 2],
+ ["\u0BC0", -1, 7],
+ ["\u0BCA\u0B9F\u0BC1", -1, 2],
+ ["\u0BCB\u0B9F\u0BC1", -1, 2],
+ ["\u0BA4\u0BC1", -1, 6],
+ ["\u0BBF\u0BB0\u0BC1\u0BA8\u0BCD\u0BA4\u0BC1", 4, 2],
+ ["\u0BBF\u0BA9\u0BCD\u0BB1\u0BC1", -1, 2],
+ ["\u0BC1\u0B9F\u0BC8", -1, 2],
+ ["\u0BA9\u0BC8", -1, 1],
+ ["\u0B95\u0BA3\u0BCD", -1, 1],
+ ["\u0BBF\u0BA9\u0BCD", -1, 3],
+ ["\u0BAE\u0BC1\u0BA9\u0BCD", -1, 1],
+ ["\u0BBF\u0B9F\u0BAE\u0BCD", -1, 4],
+ ["\u0BBF\u0BB1\u0BCD", -1, 2],
+ ["\u0BAE\u0BC7\u0BB1\u0BCD", -1, 1],
+ ["\u0BB2\u0BCD", -1, 5],
+ ["\u0BBE\u0BAE\u0BB2\u0BCD", 15, 2],
+ ["\u0BBE\u0BB2\u0BCD", 15, 2],
+ ["\u0BBF\u0BB2\u0BCD", 15, 2],
+ ["\u0BAE\u0BC7\u0BB2\u0BCD", 15, 1],
+ ["\u0BC1\u0BB3\u0BCD", -1, 2],
+ ["\u0B95\u0BC0\u0BB4\u0BCD", -1, 1]
+ ];
+
+ /** @const */ var a_21 = [
+ ["\u0B95", -1, -1],
+ ["\u0B9A", -1, -1],
+ ["\u0B9F", -1, -1],
+ ["\u0BA4", -1, -1],
+ ["\u0BAA", -1, -1],
+ ["\u0BB1", -1, -1]
+ ];
+
+ /** @const */ var a_22 = [
+ ["\u0B95", -1, -1],
+ ["\u0B9A", -1, -1],
+ ["\u0B9F", -1, -1],
+ ["\u0BA4", -1, -1],
+ ["\u0BAA", -1, -1],
+ ["\u0BB1", -1, -1]
+ ];
+
+ /** @const */ var a_23 = [
+ ["\u0B85", -1, -1],
+ ["\u0B86", -1, -1],
+ ["\u0B87", -1, -1],
+ ["\u0B88", -1, -1],
+ ["\u0B89", -1, -1],
+ ["\u0B8A", -1, -1],
+ ["\u0B8E", -1, -1],
+ ["\u0B8F", -1, -1],
+ ["\u0B90", -1, -1],
+ ["\u0B92", -1, -1],
+ ["\u0B93", -1, -1],
+ ["\u0B94", -1, -1]
+ ];
+
+ /** @const */ var a_24 = [
+ ["\u0BBE", -1, -1],
+ ["\u0BBF", -1, -1],
+ ["\u0BC0", -1, -1],
+ ["\u0BC1", -1, -1],
+ ["\u0BC2", -1, -1],
+ ["\u0BC6", -1, -1],
+ ["\u0BC7", -1, -1],
+ ["\u0BC8", -1, -1]
+ ];
+
+ /** @const */ var a_25 = [
+ ["\u0B95", -1, 1],
+ ["\u0BA4", -1, 1],
+ ["\u0BA9", -1, 1],
+ ["\u0BAA", -1, 1],
+ ["\u0BAF", -1, 1],
+ ["\u0BBE", -1, 5],
+ ["\u0B95\u0BC1", -1, 6],
+ ["\u0BAA\u0B9F\u0BC1", -1, 1],
+ ["\u0BA4\u0BC1", -1, 3],
+ ["\u0BBF\u0BB1\u0BCD\u0BB1\u0BC1", -1, 1],
+ ["\u0BA9\u0BC8", -1, 1],
+ ["\u0BB5\u0BC8", -1, 1],
+ ["\u0BA9\u0BA9\u0BCD", -1, 1],
+ ["\u0BAA\u0BA9\u0BCD", -1, 1],
+ ["\u0BB5\u0BA9\u0BCD", -1, 2],
+ ["\u0BBE\u0BA9\u0BCD", -1, 4],
+ ["\u0BA9\u0BBE\u0BA9\u0BCD", 15, 1],
+ ["\u0BAE\u0BBF\u0BA9\u0BCD", -1, 1],
+ ["\u0BA9\u0BC6\u0BA9\u0BCD", -1, 1],
+ ["\u0BC7\u0BA9\u0BCD", -1, 5],
+ ["\u0BA9\u0BAE\u0BCD", -1, 1],
+ ["\u0BAA\u0BAE\u0BCD", -1, 1],
+ ["\u0BBE\u0BAE\u0BCD", -1, 5],
+ ["\u0B95\u0BC1\u0BAE\u0BCD", -1, 1],
+ ["\u0B9F\u0BC1\u0BAE\u0BCD", -1, 5],
+ ["\u0BA4\u0BC1\u0BAE\u0BCD", -1, 1],
+ ["\u0BB1\u0BC1\u0BAE\u0BCD", -1, 1],
+ ["\u0BC6\u0BAE\u0BCD", -1, 5],
+ ["\u0BC7\u0BAE\u0BCD", -1, 5],
+ ["\u0BCB\u0BAE\u0BCD", -1, 5],
+ ["\u0BBE\u0BAF\u0BCD", -1, 5],
+ ["\u0BA9\u0BB0\u0BCD", -1, 1],
+ ["\u0BAA\u0BB0\u0BCD", -1, 1],
+ ["\u0BC0\u0BAF\u0BB0\u0BCD", -1, 5],
+ ["\u0BB5\u0BB0\u0BCD", -1, 1],
+ ["\u0BBE\u0BB0\u0BCD", -1, 5],
+ ["\u0BA9\u0BBE\u0BB0\u0BCD", 35, 1],
+ ["\u0BAE\u0BBE\u0BB0\u0BCD", 35, 1],
+ ["\u0B95\u0BCA\u0BA3\u0BCD\u0B9F\u0BBF\u0BB0\u0BCD", -1, 1],
+ ["\u0BA9\u0BBF\u0BB0\u0BCD", -1, 5],
+ ["\u0BC0\u0BB0\u0BCD", -1, 5],
+ ["\u0BA9\u0BB3\u0BCD", -1, 1],
+ ["\u0BAA\u0BB3\u0BCD", -1, 1],
+ ["\u0BB5\u0BB3\u0BCD", -1, 1],
+ ["\u0BBE\u0BB3\u0BCD", -1, 5],
+ ["\u0BA9\u0BBE\u0BB3\u0BCD", 44, 1]
+ ];
+
+ /** @const */ var a_26 = [
+ ["\u0B95\u0BBF\u0BB1", -1, -1],
+ ["\u0B95\u0BBF\u0BA9\u0BCD\u0BB1", -1, -1],
+ ["\u0BBE\u0BA8\u0BBF\u0BA9\u0BCD\u0BB1", -1, -1],
+ ["\u0B95\u0BBF\u0BB1\u0BCD", -1, -1],
+ ["\u0B95\u0BBF\u0BA9\u0BCD\u0BB1\u0BCD", -1, -1],
+ ["\u0BBE\u0BA8\u0BBF\u0BA9\u0BCD\u0BB1\u0BCD", -1, -1]
+ ];
+
+ var /** boolean */ B_found_vetrumai_urupu = false;
+ var /** boolean */ B_found_a_match = false;
+
+
+ /** @return {boolean} */
+ function r_has_min_length() {
+ return base.current.length > 4;
+ };
+
+ /** @return {boolean} */
+ function r_fix_va_start() {
+ var /** number */ among_var;
+ base.bra = base.cursor;
+ among_var = base.find_among(a_0);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("\u0B93"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("\u0B92"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("\u0B89"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("\u0B8A"))
+ {
+ return false;
+ }
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_fix_endings() {
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ while(true)
+ {
+ /** @const */ var /** number */ v_2 = base.cursor;
+ lab1: {
+ if (!r_fix_ending())
+ {
+ break lab1;
+ }
+ continue;
+ }
+ base.cursor = v_2;
+ break;
+ }
+ }
+ base.cursor = v_1;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_remove_question_prefixes() {
+ base.bra = base.cursor;
+ if (!(base.eq_s("\u0B8E")))
+ {
+ return false;
+ }
+ if (base.find_among(a_1) == 0)
+ {
+ return false;
+ }
+ if (!(base.eq_s("\u0BCD")))
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ /** @const */ var /** number */ v_1 = base.cursor;
+ r_fix_va_start();
+ base.cursor = v_1;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_fix_ending() {
+ var /** number */ among_var;
+ if (base.current.length <= 3)
+ {
+ return false;
+ }
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_5);
+ if (among_var == 0)
+ {
+ break lab1;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ if (base.find_among_b(a_2) == 0)
+ {
+ break lab1;
+ }
+ base.cursor = base.limit - v_2;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("\u0BB3\u0BCD"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("\u0BB2\u0BCD"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("\u0B9F\u0BC1"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!B_found_vetrumai_urupu)
+ {
+ break lab1;
+ }
+ {
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ lab2: {
+ if (!(base.eq_s_b("\u0BC8")))
+ {
+ break lab2;
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_3;
+ }
+ if (!base.slice_from("\u0BAE\u0BCD"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!base.slice_from("\u0BCD"))
+ {
+ return false;
+ }
+ break;
+ case 8:
+ {
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ lab3: {
+ if (base.find_among_b(a_3) == 0)
+ {
+ break lab3;
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_4;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 9:
+ among_var = base.find_among_b(a_4);
+ switch (among_var) {
+ case 1:
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("\u0BAE\u0BCD"))
+ {
+ return false;
+ }
+ break;
+ }
+ break;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ base.ket = base.cursor;
+ if (!(base.eq_s_b("\u0BCD")))
+ {
+ return false;
+ }
+ lab4: {
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
+ lab5: {
+ if (base.find_among_b(a_6) == 0)
+ {
+ break lab5;
+ }
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
+ lab6: {
+ if (!(base.eq_s_b("\u0BCD")))
+ {
+ base.cursor = base.limit - v_6;
+ break lab6;
+ }
+ if (base.find_among_b(a_7) == 0)
+ {
+ base.cursor = base.limit - v_6;
+ break lab6;
+ }
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break lab4;
+ }
+ base.cursor = base.limit - v_5;
+ lab7: {
+ if (base.find_among_b(a_8) == 0)
+ {
+ break lab7;
+ }
+ base.bra = base.cursor;
+ if (!(base.eq_s_b("\u0BCD")))
+ {
+ break lab7;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break lab4;
+ }
+ base.cursor = base.limit - v_5;
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
+ if (base.find_among_b(a_9) == 0)
+ {
+ return false;
+ }
+ base.cursor = base.limit - v_7;
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ }
+ }
+ base.cursor = base.limit_backward;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_remove_pronoun_prefixes() {
+ base.bra = base.cursor;
+ if (base.find_among(a_10) == 0)
+ {
+ return false;
+ }
+ if (base.find_among(a_11) == 0)
+ {
+ return false;
+ }
+ if (!(base.eq_s("\u0BCD")))
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ /** @const */ var /** number */ v_1 = base.cursor;
+ r_fix_va_start();
+ base.cursor = v_1;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_remove_plural_suffix() {
+ var /** number */ among_var;
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_13);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ if (base.find_among_b(a_12) == 0)
+ {
+ break lab1;
+ }
+ if (!base.slice_from("\u0BC1\u0B99\u0BCD"))
+ {
+ return false;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ if (!base.slice_from("\u0BCD"))
+ {
+ return false;
+ }
+ }
+ break;
+ case 2:
+ if (!base.slice_from("\u0BB2\u0BCD"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("\u0BB3\u0BCD"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ base.cursor = base.limit_backward;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_remove_question_suffixes() {
+ if (!r_has_min_length())
+ {
+ return false;
+ }
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_14) == 0)
+ {
+ break lab0;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_from("\u0BCD"))
+ {
+ return false;
+ }
+ }
+ base.cursor = base.limit - v_1;
+ base.cursor = base.limit_backward;
+ r_fix_endings();
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_remove_command_suffixes() {
+ if (!r_has_min_length())
+ {
+ return false;
+ }
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ base.ket = base.cursor;
+ if (base.find_among_b(a_15) == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ base.cursor = base.limit_backward;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_remove_um() {
+ if (!r_has_min_length())
+ {
+ return false;
+ }
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ base.ket = base.cursor;
+ if (!(base.eq_s_b("\u0BC1\u0BAE\u0BCD")))
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_from("\u0BCD"))
+ {
+ return false;
+ }
+ base.cursor = base.limit_backward;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ r_fix_ending();
+ base.cursor = v_1;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_remove_common_word_endings() {
+ var /** number */ among_var;
+ if (!r_has_min_length())
+ {
+ return false;
+ }
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_17);
+ if (among_var == 0)
+ {
+ return false;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("\u0BCD"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ if (base.find_among_b(a_16) == 0)
+ {
+ break lab0;
+ }
+ return false;
+ }
+ base.cursor = base.limit - v_1;
+ }
+ if (!base.slice_from("\u0BCD"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ base.cursor = base.limit_backward;
+ r_fix_endings();
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_remove_vetrumai_urupukal() {
+ var /** number */ among_var;
+ B_found_vetrumai_urupu = false;
+ if (!r_has_min_length())
+ {
+ return false;
+ }
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ lab0: {
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab1: {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_20);
+ if (among_var == 0)
+ {
+ break lab1;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("\u0BCD"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ {
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ lab2: {
+ if (!(base.eq_s_b("\u0BAE")))
+ {
+ break lab2;
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_3;
+ }
+ if (!base.slice_from("\u0BCD"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (base.current.length < 7)
+ {
+ break lab1;
+ }
+ if (!base.slice_from("\u0BCD"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ {
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ lab3: {
+ if (base.find_among_b(a_18) == 0)
+ {
+ break lab3;
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_4;
+ }
+ if (!base.slice_from("\u0BCD"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ {
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
+ lab4: {
+ if (base.find_among_b(a_19) == 0)
+ {
+ break lab4;
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_5;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!base.slice_from("\u0BBF"))
+ {
+ return false;
+ }
+ break;
+ }
+ base.cursor = base.limit - v_2;
+ break lab0;
+ }
+ base.cursor = base.limit - v_1;
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
+ base.ket = base.cursor;
+ if (!(base.eq_s_b("\u0BC8")))
+ {
+ return false;
+ }
+ lab5: {
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
+ lab6: {
+ {
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
+ lab7: {
+ if (base.find_among_b(a_21) == 0)
+ {
+ break lab7;
+ }
+ break lab6;
+ }
+ base.cursor = base.limit - v_8;
+ }
+ break lab5;
+ }
+ base.cursor = base.limit - v_7;
+ /** @const */ var /** number */ v_9 = base.limit - base.cursor;
+ if (base.find_among_b(a_22) == 0)
+ {
+ return false;
+ }
+ if (!(base.eq_s_b("\u0BCD")))
+ {
+ return false;
+ }
+ base.cursor = base.limit - v_9;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_from("\u0BCD"))
+ {
+ return false;
+ }
+ base.cursor = base.limit - v_6;
+ }
+ B_found_vetrumai_urupu = true;
+ /** @const */ var /** number */ v_10 = base.limit - base.cursor;
+ lab8: {
+ base.ket = base.cursor;
+ if (!(base.eq_s_b("\u0BBF\u0BA9\u0BCD")))
+ {
+ break lab8;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_from("\u0BCD"))
+ {
+ return false;
+ }
+ }
+ base.cursor = base.limit - v_10;
+ base.cursor = base.limit_backward;
+ r_fix_endings();
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_remove_tense_suffixes() {
+ while(true)
+ {
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ if (!r_remove_tense_suffix())
+ {
+ break lab0;
+ }
+ continue;
+ }
+ base.cursor = v_1;
+ break;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_remove_tense_suffix() {
+ var /** number */ among_var;
+ B_found_a_match = false;
+ if (!r_has_min_length())
+ {
+ return false;
+ }
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_25);
+ if (among_var == 0)
+ {
+ break lab0;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ {
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ lab1: {
+ if (base.find_among_b(a_23) == 0)
+ {
+ break lab1;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_3;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 3:
+ {
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ lab2: {
+ if (base.find_among_b(a_24) == 0)
+ {
+ break lab2;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_4;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 4:
+ {
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
+ lab3: {
+ if (!(base.eq_s_b("\u0B9A")))
+ {
+ break lab3;
+ }
+ break lab0;
+ }
+ base.cursor = base.limit - v_5;
+ }
+ if (!base.slice_from("\u0BCD"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("\u0BCD"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
+ if (!(base.eq_s_b("\u0BCD")))
+ {
+ break lab0;
+ }
+ base.cursor = base.limit - v_6;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ B_found_a_match = true;
+ base.cursor = base.limit - v_2;
+ }
+ base.cursor = base.limit - v_1;
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
+ lab4: {
+ base.ket = base.cursor;
+ if (base.find_among_b(a_26) == 0)
+ {
+ break lab4;
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ B_found_a_match = true;
+ }
+ base.cursor = base.limit - v_7;
+ base.cursor = base.limit_backward;
+ r_fix_endings();
+ if (!B_found_a_match)
+ {
+ return false;
+ }
+ return true;
+ };
+
+ this.stem = /** @return {boolean} */ function() {
+ B_found_vetrumai_urupu = false;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ r_fix_ending();
+ base.cursor = v_1;
+ if (!r_has_min_length())
+ {
+ return false;
+ }
+ /** @const */ var /** number */ v_2 = base.cursor;
+ r_remove_question_prefixes();
+ base.cursor = v_2;
+ /** @const */ var /** number */ v_3 = base.cursor;
+ r_remove_pronoun_prefixes();
+ base.cursor = v_3;
+ /** @const */ var /** number */ v_4 = base.cursor;
+ r_remove_question_suffixes();
+ base.cursor = v_4;
+ /** @const */ var /** number */ v_5 = base.cursor;
+ r_remove_um();
+ base.cursor = v_5;
+ /** @const */ var /** number */ v_6 = base.cursor;
+ r_remove_common_word_endings();
+ base.cursor = v_6;
+ /** @const */ var /** number */ v_7 = base.cursor;
+ r_remove_vetrumai_urupukal();
+ base.cursor = v_7;
+ /** @const */ var /** number */ v_8 = base.cursor;
+ r_remove_plural_suffix();
+ base.cursor = v_8;
+ /** @const */ var /** number */ v_9 = base.cursor;
+ r_remove_command_suffixes();
+ base.cursor = v_9;
+ /** @const */ var /** number */ v_10 = base.cursor;
+ r_remove_tense_suffixes();
+ base.cursor = v_10;
+ return true;
+ };
+
+ /**@return{string}*/
+ this['stemWord'] = function(/**string*/word) {
+ base.setCurrent(word);
+ this.stem();
+ return base.getCurrent();
+ };
+};
diff --git a/sphinx/search/non-minified-js/turkish-stemmer.js b/sphinx/search/non-minified-js/turkish-stemmer.js
index 8ba74b9218e..c57ba798066 100644
--- a/sphinx/search/non-minified-js/turkish-stemmer.js
+++ b/sphinx/search/non-minified-js/turkish-stemmer.js
@@ -1,8 +1,9 @@
-// Generated by Snowball 2.1.0 - https://snowballstem.org/
+// Generated from turkish.sbl by Snowball 3.0.1 - https://snowballstem.org/
/**@constructor*/
-TurkishStemmer = function() {
+var TurkishStemmer = function() {
var base = new BaseStemmer();
+
/** @const */ var a_0 = [
["m", -1, -1],
["n", -1, -1],
@@ -216,230 +217,104 @@ TurkishStemmer = function() {
/** @return {boolean} */
function r_check_vowel_harmony() {
- var /** number */ v_1 = base.limit - base.cursor;
- golab0: while(true)
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ if (!base.go_out_grouping_b(g_vowel, 97, 305))
{
- var /** number */ v_2 = base.limit - base.cursor;
- lab1: {
- if (!(base.in_grouping_b(g_vowel, 97, 305)))
- {
- break lab1;
- }
- base.cursor = base.limit - v_2;
- break golab0;
- }
- base.cursor = base.limit - v_2;
- if (base.cursor <= base.limit_backward)
- {
- return false;
- }
- base.cursor--;
+ return false;
}
- lab2: {
- var /** number */ v_3 = base.limit - base.cursor;
- lab3: {
+ lab0: {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab1: {
if (!(base.eq_s_b("a")))
{
- break lab3;
+ break lab1;
}
- golab4: while(true)
+ if (!base.go_out_grouping_b(g_vowel1, 97, 305))
{
- var /** number */ v_4 = base.limit - base.cursor;
- lab5: {
- if (!(base.in_grouping_b(g_vowel1, 97, 305)))
- {
- break lab5;
- }
- base.cursor = base.limit - v_4;
- break golab4;
- }
- base.cursor = base.limit - v_4;
- if (base.cursor <= base.limit_backward)
- {
- break lab3;
- }
- base.cursor--;
+ break lab1;
}
- break lab2;
+ break lab0;
}
- base.cursor = base.limit - v_3;
- lab6: {
+ base.cursor = base.limit - v_2;
+ lab2: {
if (!(base.eq_s_b("e")))
{
- break lab6;
+ break lab2;
}
- golab7: while(true)
+ if (!base.go_out_grouping_b(g_vowel2, 101, 252))
{
- var /** number */ v_5 = base.limit - base.cursor;
- lab8: {
- if (!(base.in_grouping_b(g_vowel2, 101, 252)))
- {
- break lab8;
- }
- base.cursor = base.limit - v_5;
- break golab7;
- }
- base.cursor = base.limit - v_5;
- if (base.cursor <= base.limit_backward)
- {
- break lab6;
- }
- base.cursor--;
+ break lab2;
}
- break lab2;
+ break lab0;
}
- base.cursor = base.limit - v_3;
- lab9: {
+ base.cursor = base.limit - v_2;
+ lab3: {
if (!(base.eq_s_b("\u0131")))
{
- break lab9;
+ break lab3;
}
- golab10: while(true)
+ if (!base.go_out_grouping_b(g_vowel3, 97, 305))
{
- var /** number */ v_6 = base.limit - base.cursor;
- lab11: {
- if (!(base.in_grouping_b(g_vowel3, 97, 305)))
- {
- break lab11;
- }
- base.cursor = base.limit - v_6;
- break golab10;
- }
- base.cursor = base.limit - v_6;
- if (base.cursor <= base.limit_backward)
- {
- break lab9;
- }
- base.cursor--;
+ break lab3;
}
- break lab2;
+ break lab0;
}
- base.cursor = base.limit - v_3;
- lab12: {
+ base.cursor = base.limit - v_2;
+ lab4: {
if (!(base.eq_s_b("i")))
{
- break lab12;
+ break lab4;
}
- golab13: while(true)
+ if (!base.go_out_grouping_b(g_vowel4, 101, 105))
{
- var /** number */ v_7 = base.limit - base.cursor;
- lab14: {
- if (!(base.in_grouping_b(g_vowel4, 101, 105)))
- {
- break lab14;
- }
- base.cursor = base.limit - v_7;
- break golab13;
- }
- base.cursor = base.limit - v_7;
- if (base.cursor <= base.limit_backward)
- {
- break lab12;
- }
- base.cursor--;
+ break lab4;
}
- break lab2;
+ break lab0;
}
- base.cursor = base.limit - v_3;
- lab15: {
+ base.cursor = base.limit - v_2;
+ lab5: {
if (!(base.eq_s_b("o")))
{
- break lab15;
+ break lab5;
}
- golab16: while(true)
+ if (!base.go_out_grouping_b(g_vowel5, 111, 117))
{
- var /** number */ v_8 = base.limit - base.cursor;
- lab17: {
- if (!(base.in_grouping_b(g_vowel5, 111, 117)))
- {
- break lab17;
- }
- base.cursor = base.limit - v_8;
- break golab16;
- }
- base.cursor = base.limit - v_8;
- if (base.cursor <= base.limit_backward)
- {
- break lab15;
- }
- base.cursor--;
+ break lab5;
}
- break lab2;
+ break lab0;
}
- base.cursor = base.limit - v_3;
- lab18: {
+ base.cursor = base.limit - v_2;
+ lab6: {
if (!(base.eq_s_b("\u00F6")))
{
- break lab18;
+ break lab6;
}
- golab19: while(true)
+ if (!base.go_out_grouping_b(g_vowel6, 246, 252))
{
- var /** number */ v_9 = base.limit - base.cursor;
- lab20: {
- if (!(base.in_grouping_b(g_vowel6, 246, 252)))
- {
- break lab20;
- }
- base.cursor = base.limit - v_9;
- break golab19;
- }
- base.cursor = base.limit - v_9;
- if (base.cursor <= base.limit_backward)
- {
- break lab18;
- }
- base.cursor--;
+ break lab6;
}
- break lab2;
+ break lab0;
}
- base.cursor = base.limit - v_3;
- lab21: {
+ base.cursor = base.limit - v_2;
+ lab7: {
if (!(base.eq_s_b("u")))
{
- break lab21;
+ break lab7;
}
- golab22: while(true)
+ if (!base.go_out_grouping_b(g_vowel5, 111, 117))
{
- var /** number */ v_10 = base.limit - base.cursor;
- lab23: {
- if (!(base.in_grouping_b(g_vowel5, 111, 117)))
- {
- break lab23;
- }
- base.cursor = base.limit - v_10;
- break golab22;
- }
- base.cursor = base.limit - v_10;
- if (base.cursor <= base.limit_backward)
- {
- break lab21;
- }
- base.cursor--;
+ break lab7;
}
- break lab2;
+ break lab0;
}
- base.cursor = base.limit - v_3;
+ base.cursor = base.limit - v_2;
if (!(base.eq_s_b("\u00FC")))
{
return false;
}
- golab24: while(true)
+ if (!base.go_out_grouping_b(g_vowel6, 246, 252))
{
- var /** number */ v_11 = base.limit - base.cursor;
- lab25: {
- if (!(base.in_grouping_b(g_vowel6, 246, 252)))
- {
- break lab25;
- }
- base.cursor = base.limit - v_11;
- break golab24;
- }
- base.cursor = base.limit - v_11;
- if (base.cursor <= base.limit_backward)
- {
- return false;
- }
- base.cursor--;
+ return false;
}
}
base.cursor = base.limit - v_1;
@@ -449,13 +324,13 @@ TurkishStemmer = function() {
/** @return {boolean} */
function r_mark_suffix_with_optional_n_consonant() {
lab0: {
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab1: {
if (!(base.eq_s_b("n")))
{
break lab1;
}
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
if (!(base.in_grouping_b(g_vowel, 97, 305)))
{
break lab1;
@@ -465,9 +340,9 @@ TurkishStemmer = function() {
}
base.cursor = base.limit - v_1;
{
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
lab2: {
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
if (!(base.eq_s_b("n")))
{
break lab2;
@@ -477,7 +352,7 @@ TurkishStemmer = function() {
}
base.cursor = base.limit - v_3;
}
- var /** number */ v_5 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
if (base.cursor <= base.limit_backward)
{
return false;
@@ -495,13 +370,13 @@ TurkishStemmer = function() {
/** @return {boolean} */
function r_mark_suffix_with_optional_s_consonant() {
lab0: {
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab1: {
if (!(base.eq_s_b("s")))
{
break lab1;
}
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
if (!(base.in_grouping_b(g_vowel, 97, 305)))
{
break lab1;
@@ -511,9 +386,9 @@ TurkishStemmer = function() {
}
base.cursor = base.limit - v_1;
{
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
lab2: {
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
if (!(base.eq_s_b("s")))
{
break lab2;
@@ -523,7 +398,7 @@ TurkishStemmer = function() {
}
base.cursor = base.limit - v_3;
}
- var /** number */ v_5 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
if (base.cursor <= base.limit_backward)
{
return false;
@@ -541,13 +416,13 @@ TurkishStemmer = function() {
/** @return {boolean} */
function r_mark_suffix_with_optional_y_consonant() {
lab0: {
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab1: {
if (!(base.eq_s_b("y")))
{
break lab1;
}
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
if (!(base.in_grouping_b(g_vowel, 97, 305)))
{
break lab1;
@@ -557,9 +432,9 @@ TurkishStemmer = function() {
}
base.cursor = base.limit - v_1;
{
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
lab2: {
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
if (!(base.eq_s_b("y")))
{
break lab2;
@@ -569,7 +444,7 @@ TurkishStemmer = function() {
}
base.cursor = base.limit - v_3;
}
- var /** number */ v_5 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
if (base.cursor <= base.limit_backward)
{
return false;
@@ -587,13 +462,13 @@ TurkishStemmer = function() {
/** @return {boolean} */
function r_mark_suffix_with_optional_U_vowel() {
lab0: {
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab1: {
if (!(base.in_grouping_b(g_U, 105, 305)))
{
break lab1;
}
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
if (!(base.out_grouping_b(g_vowel, 97, 305)))
{
break lab1;
@@ -603,9 +478,9 @@ TurkishStemmer = function() {
}
base.cursor = base.limit - v_1;
{
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
lab2: {
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
if (!(base.in_grouping_b(g_U, 105, 305)))
{
break lab2;
@@ -615,7 +490,7 @@ TurkishStemmer = function() {
}
base.cursor = base.limit - v_3;
}
- var /** number */ v_5 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
if (base.cursor <= base.limit_backward)
{
return false;
@@ -1010,10 +885,10 @@ TurkishStemmer = function() {
base.ket = base.cursor;
B_continue_stemming_noun_suffixes = true;
lab0: {
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab1: {
lab2: {
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab3: {
if (!r_mark_ymUs_())
{
@@ -1052,7 +927,7 @@ TurkishStemmer = function() {
break lab6;
}
lab7: {
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
lab8: {
if (!r_mark_sUnUz())
{
@@ -1111,11 +986,11 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
lab14: {
base.ket = base.cursor;
lab15: {
- var /** number */ v_5 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
lab16: {
if (!r_mark_DUr())
{
@@ -1157,7 +1032,7 @@ TurkishStemmer = function() {
break lab19;
}
lab20: {
- var /** number */ v_6 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
lab21: {
if (!r_mark_yDU())
{
@@ -1176,7 +1051,7 @@ TurkishStemmer = function() {
base.cursor = base.limit - v_1;
lab22: {
lab23: {
- var /** number */ v_7 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
lab24: {
if (!r_mark_sUnUz())
{
@@ -1211,7 +1086,7 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_8 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
lab27: {
base.ket = base.cursor;
if (!r_mark_ymUs_())
@@ -1232,11 +1107,11 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_9 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_9 = base.limit - base.cursor;
lab28: {
base.ket = base.cursor;
lab29: {
- var /** number */ v_10 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_10 = base.limit - base.cursor;
lab30: {
if (!r_mark_sUnUz())
{
@@ -1301,7 +1176,7 @@ TurkishStemmer = function() {
return false;
}
lab0: {
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab1: {
if (!r_mark_DA())
{
@@ -1312,11 +1187,11 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab2: {
base.ket = base.cursor;
lab3: {
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
lab4: {
if (!r_mark_lAr())
{
@@ -1327,7 +1202,7 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
lab5: {
if (!r_stem_suffix_chain_before_ki())
{
@@ -1348,7 +1223,7 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_5 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
lab6: {
base.ket = base.cursor;
if (!r_mark_lAr())
@@ -1382,11 +1257,11 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_6 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
lab8: {
base.ket = base.cursor;
lab9: {
- var /** number */ v_7 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
lab10: {
if (!r_mark_lArI())
{
@@ -1403,7 +1278,7 @@ TurkishStemmer = function() {
lab11: {
base.ket = base.cursor;
lab12: {
- var /** number */ v_8 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
lab13: {
if (!r_mark_possessives())
{
@@ -1422,7 +1297,7 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_9 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_9 = base.limit - base.cursor;
lab14: {
base.ket = base.cursor;
if (!r_mark_lAr())
@@ -1459,7 +1334,7 @@ TurkishStemmer = function() {
return false;
}
lab15: {
- var /** number */ v_10 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_10 = base.limit - base.cursor;
lab16: {
if (!r_mark_lArI())
{
@@ -1483,7 +1358,7 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_11 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_11 = base.limit - base.cursor;
lab18: {
base.ket = base.cursor;
if (!r_mark_lAr())
@@ -1517,7 +1392,7 @@ TurkishStemmer = function() {
/** @return {boolean} */
function r_stem_noun_suffixes() {
lab0: {
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab1: {
base.ket = base.cursor;
if (!r_mark_lAr())
@@ -1529,7 +1404,7 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab2: {
if (!r_stem_suffix_chain_before_ki())
{
@@ -1551,10 +1426,10 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
lab4: {
lab5: {
- var /** number */ v_4 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
lab6: {
base.ket = base.cursor;
if (!r_mark_lArI())
@@ -1572,7 +1447,7 @@ TurkishStemmer = function() {
lab7: {
base.ket = base.cursor;
lab8: {
- var /** number */ v_5 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
lab9: {
if (!r_mark_possessives())
{
@@ -1591,7 +1466,7 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_6 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
lab10: {
base.ket = base.cursor;
if (!r_mark_lAr())
@@ -1637,7 +1512,7 @@ TurkishStemmer = function() {
lab11: {
base.ket = base.cursor;
lab12: {
- var /** number */ v_7 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
lab13: {
if (!r_mark_ndA())
{
@@ -1652,7 +1527,7 @@ TurkishStemmer = function() {
}
}
lab14: {
- var /** number */ v_8 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
lab15: {
if (!r_mark_lArI())
{
@@ -1676,7 +1551,7 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_9 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_9 = base.limit - base.cursor;
lab17: {
base.ket = base.cursor;
if (!r_mark_lAr())
@@ -1709,7 +1584,7 @@ TurkishStemmer = function() {
lab18: {
base.ket = base.cursor;
lab19: {
- var /** number */ v_10 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_10 = base.limit - base.cursor;
lab20: {
if (!r_mark_ndAn())
{
@@ -1724,7 +1599,7 @@ TurkishStemmer = function() {
}
}
lab21: {
- var /** number */ v_11 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_11 = base.limit - base.cursor;
lab22: {
if (!r_mark_sU())
{
@@ -1735,7 +1610,7 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_12 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_12 = base.limit - base.cursor;
lab23: {
base.ket = base.cursor;
if (!r_mark_lAr())
@@ -1776,11 +1651,11 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_13 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_13 = base.limit - base.cursor;
lab25: {
base.ket = base.cursor;
lab26: {
- var /** number */ v_14 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_14 = base.limit - base.cursor;
lab27: {
if (!r_mark_possessives())
{
@@ -1791,7 +1666,7 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_15 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_15 = base.limit - base.cursor;
lab28: {
base.ket = base.cursor;
if (!r_mark_lAr())
@@ -1823,7 +1698,7 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_16 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_16 = base.limit - base.cursor;
lab30: {
if (!r_stem_suffix_chain_before_ki())
{
@@ -1847,7 +1722,7 @@ TurkishStemmer = function() {
lab31: {
base.ket = base.cursor;
lab32: {
- var /** number */ v_17 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_17 = base.limit - base.cursor;
lab33: {
if (!r_mark_nUn())
{
@@ -1866,10 +1741,10 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_18 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_18 = base.limit - base.cursor;
lab34: {
lab35: {
- var /** number */ v_19 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_19 = base.limit - base.cursor;
lab36: {
base.ket = base.cursor;
if (!r_mark_lAr())
@@ -1891,7 +1766,7 @@ TurkishStemmer = function() {
lab37: {
base.ket = base.cursor;
lab38: {
- var /** number */ v_20 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_20 = base.limit - base.cursor;
lab39: {
if (!r_mark_possessives())
{
@@ -1910,7 +1785,7 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_21 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_21 = base.limit - base.cursor;
lab40: {
base.ket = base.cursor;
if (!r_mark_lAr())
@@ -1967,7 +1842,7 @@ TurkishStemmer = function() {
lab43: {
base.ket = base.cursor;
lab44: {
- var /** number */ v_22 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_22 = base.limit - base.cursor;
lab45: {
if (!r_mark_DA())
{
@@ -1994,11 +1869,11 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_23 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_23 = base.limit - base.cursor;
lab47: {
base.ket = base.cursor;
lab48: {
- var /** number */ v_24 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_24 = base.limit - base.cursor;
lab49: {
if (!r_mark_possessives())
{
@@ -2009,7 +1884,7 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_25 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_25 = base.limit - base.cursor;
lab50: {
base.ket = base.cursor;
if (!r_mark_lAr())
@@ -2044,7 +1919,7 @@ TurkishStemmer = function() {
base.cursor = base.limit - v_1;
base.ket = base.cursor;
lab51: {
- var /** number */ v_26 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_26 = base.limit - base.cursor;
lab52: {
if (!r_mark_possessives())
{
@@ -2063,7 +1938,7 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_27 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_27 = base.limit - base.cursor;
lab53: {
base.ket = base.cursor;
if (!r_mark_lAr())
@@ -2127,9 +2002,10 @@ TurkishStemmer = function() {
/** @return {boolean} */
function r_append_U_to_stems_ending_with_d_or_g() {
- var /** number */ v_1 = base.limit - base.cursor;
+ base.ket = base.cursor;
+ base.bra = base.cursor;
lab0: {
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab1: {
if (!(base.eq_s_b("d")))
{
@@ -2137,186 +2013,105 @@ TurkishStemmer = function() {
}
break lab0;
}
- base.cursor = base.limit - v_2;
+ base.cursor = base.limit - v_1;
if (!(base.eq_s_b("g")))
{
return false;
}
}
- base.cursor = base.limit - v_1;
+ if (!base.go_out_grouping_b(g_vowel, 97, 305))
+ {
+ return false;
+ }
lab2: {
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
lab3: {
- var /** number */ v_4 = base.limit - base.cursor;
- golab4: while(true)
- {
- var /** number */ v_5 = base.limit - base.cursor;
+ lab4: {
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
lab5: {
- if (!(base.in_grouping_b(g_vowel, 97, 305)))
- {
- break lab5;
- }
- base.cursor = base.limit - v_5;
- break golab4;
- }
- base.cursor = base.limit - v_5;
- if (base.cursor <= base.limit_backward)
- {
- break lab3;
- }
- base.cursor--;
- }
- lab6: {
- var /** number */ v_6 = base.limit - base.cursor;
- lab7: {
if (!(base.eq_s_b("a")))
{
- break lab7;
+ break lab5;
}
- break lab6;
+ break lab4;
}
- base.cursor = base.limit - v_6;
+ base.cursor = base.limit - v_3;
if (!(base.eq_s_b("\u0131")))
{
break lab3;
}
}
- base.cursor = base.limit - v_4;
+ if (!base.slice_from("\u0131"))
{
- var /** number */ c1 = base.cursor;
- base.insert(base.cursor, base.cursor, "\u0131");
- base.cursor = c1;
+ return false;
}
break lab2;
}
- base.cursor = base.limit - v_3;
- lab8: {
- var /** number */ v_7 = base.limit - base.cursor;
- golab9: while(true)
- {
- var /** number */ v_8 = base.limit - base.cursor;
- lab10: {
- if (!(base.in_grouping_b(g_vowel, 97, 305)))
- {
- break lab10;
- }
- base.cursor = base.limit - v_8;
- break golab9;
- }
- base.cursor = base.limit - v_8;
- if (base.cursor <= base.limit_backward)
- {
- break lab8;
- }
- base.cursor--;
- }
- lab11: {
- var /** number */ v_9 = base.limit - base.cursor;
- lab12: {
+ base.cursor = base.limit - v_2;
+ lab6: {
+ lab7: {
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ lab8: {
if (!(base.eq_s_b("e")))
{
- break lab12;
+ break lab8;
}
- break lab11;
+ break lab7;
}
- base.cursor = base.limit - v_9;
+ base.cursor = base.limit - v_4;
if (!(base.eq_s_b("i")))
{
- break lab8;
+ break lab6;
}
}
- base.cursor = base.limit - v_7;
+ if (!base.slice_from("i"))
{
- var /** number */ c2 = base.cursor;
- base.insert(base.cursor, base.cursor, "i");
- base.cursor = c2;
+ return false;
}
break lab2;
}
- base.cursor = base.limit - v_3;
- lab13: {
- var /** number */ v_10 = base.limit - base.cursor;
- golab14: while(true)
- {
- var /** number */ v_11 = base.limit - base.cursor;
- lab15: {
- if (!(base.in_grouping_b(g_vowel, 97, 305)))
- {
- break lab15;
- }
- base.cursor = base.limit - v_11;
- break golab14;
- }
- base.cursor = base.limit - v_11;
- if (base.cursor <= base.limit_backward)
- {
- break lab13;
- }
- base.cursor--;
- }
- lab16: {
- var /** number */ v_12 = base.limit - base.cursor;
- lab17: {
+ base.cursor = base.limit - v_2;
+ lab9: {
+ lab10: {
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
+ lab11: {
if (!(base.eq_s_b("o")))
{
- break lab17;
+ break lab11;
}
- break lab16;
+ break lab10;
}
- base.cursor = base.limit - v_12;
+ base.cursor = base.limit - v_5;
if (!(base.eq_s_b("u")))
{
- break lab13;
- }
- }
- base.cursor = base.limit - v_10;
- {
- var /** number */ c3 = base.cursor;
- base.insert(base.cursor, base.cursor, "u");
- base.cursor = c3;
- }
- break lab2;
- }
- base.cursor = base.limit - v_3;
- var /** number */ v_13 = base.limit - base.cursor;
- golab18: while(true)
- {
- var /** number */ v_14 = base.limit - base.cursor;
- lab19: {
- if (!(base.in_grouping_b(g_vowel, 97, 305)))
- {
- break lab19;
+ break lab9;
}
- base.cursor = base.limit - v_14;
- break golab18;
}
- base.cursor = base.limit - v_14;
- if (base.cursor <= base.limit_backward)
+ if (!base.slice_from("u"))
{
return false;
}
- base.cursor--;
+ break lab2;
}
- lab20: {
- var /** number */ v_15 = base.limit - base.cursor;
- lab21: {
+ base.cursor = base.limit - v_2;
+ lab12: {
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
+ lab13: {
if (!(base.eq_s_b("\u00F6")))
{
- break lab21;
+ break lab13;
}
- break lab20;
+ break lab12;
}
- base.cursor = base.limit - v_15;
+ base.cursor = base.limit - v_6;
if (!(base.eq_s_b("\u00FC")))
{
return false;
}
}
- base.cursor = base.limit - v_13;
+ if (!base.slice_from("\u00FC"))
{
- var /** number */ c4 = base.cursor;
- base.insert(base.cursor, base.cursor, "\u00FC");
- base.cursor = c4;
+ return false;
}
}
return true;
@@ -2328,7 +2123,7 @@ TurkishStemmer = function() {
{
return false;
}
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab0: {
if (!(base.eq_s_b("soy")))
{
@@ -2344,39 +2139,92 @@ TurkishStemmer = function() {
};
/** @return {boolean} */
- function r_more_than_one_syllable_word() {
- var /** number */ v_1 = base.cursor;
- {
- var v_2 = 2;
- while(true)
+ function r_remove_proper_noun_suffix() {
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ base.bra = base.cursor;
+ golab1: while(true)
{
- var /** number */ v_3 = base.cursor;
- lab0: {
- golab1: while(true)
+ /** @const */ var /** number */ v_2 = base.cursor;
+ lab2: {
{
- lab2: {
- if (!(base.in_grouping(g_vowel, 97, 305)))
+ /** @const */ var /** number */ v_3 = base.cursor;
+ lab3: {
+ if (!(base.eq_s("'")))
{
- break lab2;
+ break lab3;
}
- break golab1;
- }
- if (base.cursor >= base.limit)
- {
- break lab0;
+ break lab2;
}
- base.cursor++;
+ base.cursor = v_3;
}
- v_2--;
- continue;
+ base.cursor = v_2;
+ break golab1;
}
- base.cursor = v_3;
- break;
+ base.cursor = v_2;
+ if (base.cursor >= base.limit)
+ {
+ break lab0;
+ }
+ base.cursor++;
}
- if (v_2 > 0)
+ base.ket = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ }
+ base.cursor = v_1;
+ /** @const */ var /** number */ v_4 = base.cursor;
+ lab4: {
+ {
+ /** @const */ var /** number */ c1 = base.cursor + 2;
+ if (c1 > base.limit)
+ {
+ break lab4;
+ }
+ base.cursor = c1;
+ }
+ golab5: while(true)
+ {
+ /** @const */ var /** number */ v_5 = base.cursor;
+ lab6: {
+ if (!(base.eq_s("'")))
+ {
+ break lab6;
+ }
+ base.cursor = v_5;
+ break golab5;
+ }
+ base.cursor = v_5;
+ if (base.cursor >= base.limit)
+ {
+ break lab4;
+ }
+ base.cursor++;
+ }
+ base.bra = base.cursor;
+ base.cursor = base.limit;
+ base.ket = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ }
+ base.cursor = v_4;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_more_than_one_syllable_word() {
+ /** @const */ var /** number */ v_1 = base.cursor;
+ for (var /** number */ v_2 = 2; v_2 > 0; v_2--)
+ {
+ if (!base.go_out_grouping(g_vowel, 97, 305))
{
return false;
}
+ base.cursor++;
}
base.cursor = v_1;
return true;
@@ -2386,7 +2234,7 @@ TurkishStemmer = function() {
function r_postlude() {
base.limit_backward = base.cursor; base.cursor = base.limit;
{
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
lab0: {
if (!r_is_reserved_word())
{
@@ -2396,10 +2244,10 @@ TurkishStemmer = function() {
}
base.cursor = base.limit - v_1;
}
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
r_append_U_to_stems_ending_with_d_or_g();
base.cursor = base.limit - v_2;
- var /** number */ v_3 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
r_post_process_last_consonants();
base.cursor = base.limit - v_3;
base.cursor = base.limit_backward;
@@ -2407,19 +2255,20 @@ TurkishStemmer = function() {
};
this.stem = /** @return {boolean} */ function() {
+ r_remove_proper_noun_suffix();
if (!r_more_than_one_syllable_word())
{
return false;
}
base.limit_backward = base.cursor; base.cursor = base.limit;
- var /** number */ v_1 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
r_stem_nominal_verb_suffixes();
base.cursor = base.limit - v_1;
if (!B_continue_stemming_noun_suffixes)
{
return false;
}
- var /** number */ v_2 = base.limit - base.cursor;
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
r_stem_noun_suffixes();
base.cursor = base.limit - v_2;
base.cursor = base.limit_backward;
diff --git a/sphinx/search/non-minified-js/yiddish-stemmer.js b/sphinx/search/non-minified-js/yiddish-stemmer.js
new file mode 100644
index 00000000000..b9a7ddb411c
--- /dev/null
+++ b/sphinx/search/non-minified-js/yiddish-stemmer.js
@@ -0,0 +1,1160 @@
+// Generated from yiddish.sbl by Snowball 3.0.1 - https://snowballstem.org/
+
+/**@constructor*/
+var YiddishStemmer = function() {
+ var base = new BaseStemmer();
+
+ /** @const */ var a_0 = [
+ ["\u05D5\u05D5", -1, 1],
+ ["\u05D5\u05D9", -1, 2],
+ ["\u05D9\u05D9", -1, 3],
+ ["\u05DA", -1, 4],
+ ["\u05DD", -1, 5],
+ ["\u05DF", -1, 6],
+ ["\u05E3", -1, 7],
+ ["\u05E5", -1, 8]
+ ];
+
+ /** @const */ var a_1 = [
+ ["\u05D0\u05D3\u05D5\u05E8\u05DB", -1, 1],
+ ["\u05D0\u05D4\u05D9\u05E0", -1, 1],
+ ["\u05D0\u05D4\u05E2\u05E8", -1, 1],
+ ["\u05D0\u05D4\u05F2\u05DE", -1, 1],
+ ["\u05D0\u05D5\u05DE", -1, 1],
+ ["\u05D0\u05D5\u05E0\u05D8\u05E2\u05E8", -1, 1],
+ ["\u05D0\u05D9\u05D1\u05E2\u05E8", -1, 1],
+ ["\u05D0\u05E0", -1, 1],
+ ["\u05D0\u05E0\u05D8", 7, 1],
+ ["\u05D0\u05E0\u05D8\u05E7\u05E2\u05D2\u05E0", 8, 1],
+ ["\u05D0\u05E0\u05D9\u05D3\u05E2\u05E8", 7, 1],
+ ["\u05D0\u05E4", -1, 1],
+ ["\u05D0\u05E4\u05D9\u05E8", 11, 1],
+ ["\u05D0\u05E7\u05E2\u05D2\u05E0", -1, 1],
+ ["\u05D0\u05E8\u05D0\u05E4", -1, 1],
+ ["\u05D0\u05E8\u05D5\u05DE", -1, 1],
+ ["\u05D0\u05E8\u05D5\u05E0\u05D8\u05E2\u05E8", -1, 1],
+ ["\u05D0\u05E8\u05D9\u05D1\u05E2\u05E8", -1, 1],
+ ["\u05D0\u05E8\u05F1\u05E1", -1, 1],
+ ["\u05D0\u05E8\u05F1\u05E4", -1, 1],
+ ["\u05D0\u05E8\u05F2\u05E0", -1, 1],
+ ["\u05D0\u05F0\u05E2\u05E7", -1, 1],
+ ["\u05D0\u05F1\u05E1", -1, 1],
+ ["\u05D0\u05F1\u05E4", -1, 1],
+ ["\u05D0\u05F2\u05E0", -1, 1],
+ ["\u05D1\u05D0", -1, 1],
+ ["\u05D1\u05F2", -1, 1],
+ ["\u05D3\u05D5\u05E8\u05DB", -1, 1],
+ ["\u05D3\u05E2\u05E8", -1, 1],
+ ["\u05DE\u05D9\u05D8", -1, 1],
+ ["\u05E0\u05D0\u05DB", -1, 1],
+ ["\u05E4\u05D0\u05E8", -1, 1],
+ ["\u05E4\u05D0\u05E8\u05D1\u05F2", 31, 1],
+ ["\u05E4\u05D0\u05E8\u05F1\u05E1", 31, 1],
+ ["\u05E4\u05D5\u05E0\u05D0\u05E0\u05D3\u05E2\u05E8", -1, 1],
+ ["\u05E6\u05D5", -1, 1],
+ ["\u05E6\u05D5\u05D6\u05D0\u05DE\u05E2\u05E0", 35, 1],
+ ["\u05E6\u05D5\u05E0\u05F1\u05E4", 35, 1],
+ ["\u05E6\u05D5\u05E8\u05D9\u05E7", 35, 1],
+ ["\u05E6\u05E2", -1, 1]
+ ];
+
+ /** @const */ var a_2 = [
+ ["\u05D3\u05D6\u05E9", -1, -1],
+ ["\u05E9\u05D8\u05E8", -1, -1],
+ ["\u05E9\u05D8\u05E9", -1, -1],
+ ["\u05E9\u05E4\u05E8", -1, -1]
+ ];
+
+ /** @const */ var a_3 = [
+ ["\u05E7\u05DC\u05D9\u05D1", -1, 9],
+ ["\u05E8\u05D9\u05D1", -1, 10],
+ ["\u05D8\u05E8\u05D9\u05D1", 1, 7],
+ ["\u05E9\u05E8\u05D9\u05D1", 1, 15],
+ ["\u05D4\u05F1\u05D1", -1, 23],
+ ["\u05E9\u05F0\u05D9\u05D2", -1, 12],
+ ["\u05D2\u05D0\u05E0\u05D2", -1, 1],
+ ["\u05D6\u05D5\u05E0\u05D2", -1, 18],
+ ["\u05E9\u05DC\u05D5\u05E0\u05D2", -1, 21],
+ ["\u05E6\u05F0\u05D5\u05E0\u05D2", -1, 20],
+ ["\u05D1\u05F1\u05D2", -1, 22],
+ ["\u05D1\u05D5\u05E0\u05D3", -1, 16],
+ ["\u05F0\u05D9\u05D6", -1, 6],
+ ["\u05D1\u05D9\u05D8", -1, 4],
+ ["\u05DC\u05D9\u05D8", -1, 8],
+ ["\u05DE\u05D9\u05D8", -1, 3],
+ ["\u05E9\u05E0\u05D9\u05D8", -1, 14],
+ ["\u05E0\u05D5\u05DE", -1, 2],
+ ["\u05E9\u05D8\u05D0\u05E0", -1, 25],
+ ["\u05D1\u05D9\u05E1", -1, 5],
+ ["\u05E9\u05DE\u05D9\u05E1", -1, 13],
+ ["\u05E8\u05D9\u05E1", -1, 11],
+ ["\u05D8\u05E8\u05D5\u05E0\u05E7", -1, 19],
+ ["\u05E4\u05D0\u05E8\u05DC\u05F1\u05E8", -1, 24],
+ ["\u05E9\u05F0\u05F1\u05E8", -1, 26],
+ ["\u05F0\u05D5\u05D8\u05E9", -1, 17]
+ ];
+
+ /** @const */ var a_4 = [
+ ["\u05D5\u05E0\u05D2", -1, 1],
+ ["\u05E1\u05D8\u05D5", -1, 1],
+ ["\u05D8", -1, 1],
+ ["\u05D1\u05E8\u05D0\u05DB\u05D8", 2, 31],
+ ["\u05E1\u05D8", 2, 1],
+ ["\u05D9\u05E1\u05D8", 4, 33],
+ ["\u05E2\u05D8", 2, 1],
+ ["\u05E9\u05D0\u05E4\u05D8", 2, 1],
+ ["\u05D4\u05F2\u05D8", 2, 1],
+ ["\u05E7\u05F2\u05D8", 2, 1],
+ ["\u05D9\u05E7\u05F2\u05D8", 9, 1],
+ ["\u05DC\u05E2\u05DB", -1, 1],
+ ["\u05E2\u05DC\u05E2\u05DB", 11, 1],
+ ["\u05D9\u05D6\u05DE", -1, 1],
+ ["\u05D9\u05DE", -1, 1],
+ ["\u05E2\u05DE", -1, 1],
+ ["\u05E2\u05E0\u05E2\u05DE", 15, 3],
+ ["\u05D8\u05E2\u05E0\u05E2\u05DE", 16, 4],
+ ["\u05E0", -1, 1],
+ ["\u05E7\u05DC\u05D9\u05D1\u05E0", 18, 14],
+ ["\u05E8\u05D9\u05D1\u05E0", 18, 15],
+ ["\u05D8\u05E8\u05D9\u05D1\u05E0", 20, 12],
+ ["\u05E9\u05E8\u05D9\u05D1\u05E0", 20, 7],
+ ["\u05D4\u05F1\u05D1\u05E0", 18, 27],
+ ["\u05E9\u05F0\u05D9\u05D2\u05E0", 18, 17],
+ ["\u05D6\u05D5\u05E0\u05D2\u05E0", 18, 22],
+ ["\u05E9\u05DC\u05D5\u05E0\u05D2\u05E0", 18, 25],
+ ["\u05E6\u05F0\u05D5\u05E0\u05D2\u05E0", 18, 24],
+ ["\u05D1\u05F1\u05D2\u05E0", 18, 26],
+ ["\u05D1\u05D5\u05E0\u05D3\u05E0", 18, 20],
+ ["\u05F0\u05D9\u05D6\u05E0", 18, 11],
+ ["\u05D8\u05E0", 18, 4],
+ ["GE\u05D1\u05D9\u05D8\u05E0", 31, 9],
+ ["GE\u05DC\u05D9\u05D8\u05E0", 31, 13],
+ ["GE\u05DE\u05D9\u05D8\u05E0", 31, 8],
+ ["\u05E9\u05E0\u05D9\u05D8\u05E0", 31, 19],
+ ["\u05E1\u05D8\u05E0", 31, 1],
+ ["\u05D9\u05E1\u05D8\u05E0", 36, 1],
+ ["\u05E2\u05D8\u05E0", 31, 1],
+ ["GE\u05D1\u05D9\u05E1\u05E0", 18, 10],
+ ["\u05E9\u05DE\u05D9\u05E1\u05E0", 18, 18],
+ ["GE\u05E8\u05D9\u05E1\u05E0", 18, 16],
+ ["\u05E2\u05E0", 18, 1],
+ ["\u05D2\u05D0\u05E0\u05D2\u05E2\u05E0", 42, 5],
+ ["\u05E2\u05DC\u05E2\u05E0", 42, 1],
+ ["\u05E0\u05D5\u05DE\u05E2\u05E0", 42, 6],
+ ["\u05D9\u05D6\u05DE\u05E2\u05E0", 42, 1],
+ ["\u05E9\u05D8\u05D0\u05E0\u05E2\u05E0", 42, 29],
+ ["\u05D8\u05E8\u05D5\u05E0\u05E7\u05E0", 18, 23],
+ ["\u05E4\u05D0\u05E8\u05DC\u05F1\u05E8\u05E0", 18, 28],
+ ["\u05E9\u05F0\u05F1\u05E8\u05E0", 18, 30],
+ ["\u05F0\u05D5\u05D8\u05E9\u05E0", 18, 21],
+ ["\u05D2\u05F2\u05E0", 18, 5],
+ ["\u05E1", -1, 1],
+ ["\u05D8\u05E1", 53, 4],
+ ["\u05E2\u05D8\u05E1", 54, 1],
+ ["\u05E0\u05E1", 53, 1],
+ ["\u05D8\u05E0\u05E1", 56, 4],
+ ["\u05E2\u05E0\u05E1", 56, 3],
+ ["\u05E2\u05E1", 53, 1],
+ ["\u05D9\u05E2\u05E1", 59, 2],
+ ["\u05E2\u05DC\u05E2\u05E1", 59, 1],
+ ["\u05E2\u05E8\u05E1", 53, 1],
+ ["\u05E2\u05E0\u05E2\u05E8\u05E1", 62, 1],
+ ["\u05E2", -1, 1],
+ ["\u05D8\u05E2", 64, 4],
+ ["\u05E1\u05D8\u05E2", 65, 1],
+ ["\u05E2\u05D8\u05E2", 65, 1],
+ ["\u05D9\u05E2", 64, -1],
+ ["\u05E2\u05DC\u05E2", 64, 1],
+ ["\u05E2\u05E0\u05E2", 64, 3],
+ ["\u05D8\u05E2\u05E0\u05E2", 70, 4],
+ ["\u05E2\u05E8", -1, 1],
+ ["\u05D8\u05E2\u05E8", 72, 4],
+ ["\u05E1\u05D8\u05E2\u05E8", 73, 1],
+ ["\u05E2\u05D8\u05E2\u05E8", 73, 1],
+ ["\u05E2\u05E0\u05E2\u05E8", 72, 3],
+ ["\u05D8\u05E2\u05E0\u05E2\u05E8", 76, 4],
+ ["\u05D5\u05EA", -1, 32]
+ ];
+
+ /** @const */ var a_5 = [
+ ["\u05D5\u05E0\u05D2", -1, 1],
+ ["\u05E9\u05D0\u05E4\u05D8", -1, 1],
+ ["\u05D4\u05F2\u05D8", -1, 1],
+ ["\u05E7\u05F2\u05D8", -1, 1],
+ ["\u05D9\u05E7\u05F2\u05D8", 3, 1],
+ ["\u05DC", -1, 2]
+ ];
+
+ /** @const */ var a_6 = [
+ ["\u05D9\u05D2", -1, 1],
+ ["\u05D9\u05E7", -1, 1],
+ ["\u05D3\u05D9\u05E7", 1, 1],
+ ["\u05E0\u05D3\u05D9\u05E7", 2, 1],
+ ["\u05E2\u05E0\u05D3\u05D9\u05E7", 3, 1],
+ ["\u05D1\u05DC\u05D9\u05E7", 1, -1],
+ ["\u05D2\u05DC\u05D9\u05E7", 1, -1],
+ ["\u05E0\u05D9\u05E7", 1, 1],
+ ["\u05D9\u05E9", -1, 1]
+ ];
+
+ /** @const */ var /** Array */ g_niked = [255, 155, 6];
+
+ /** @const */ var /** Array */ g_vowel = [33, 2, 4, 0, 6];
+
+ /** @const */ var /** Array */ g_consonant = [239, 254, 253, 131];
+
+ var /** number */ I_x = 0;
+ var /** number */ I_p1 = 0;
+
+
+ /** @return {boolean} */
+ function r_prelude() {
+ var /** number */ among_var;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ while(true)
+ {
+ /** @const */ var /** number */ v_2 = base.cursor;
+ lab1: {
+ golab2: while(true)
+ {
+ /** @const */ var /** number */ v_3 = base.cursor;
+ lab3: {
+ base.bra = base.cursor;
+ among_var = base.find_among(a_0);
+ if (among_var == 0)
+ {
+ break lab3;
+ }
+ base.ket = base.cursor;
+ switch (among_var) {
+ case 1:
+ {
+ /** @const */ var /** number */ v_4 = base.cursor;
+ lab4: {
+ if (!(base.eq_s("\u05BC")))
+ {
+ break lab4;
+ }
+ break lab3;
+ }
+ base.cursor = v_4;
+ }
+ if (!base.slice_from("\u05F0"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ {
+ /** @const */ var /** number */ v_5 = base.cursor;
+ lab5: {
+ if (!(base.eq_s("\u05B4")))
+ {
+ break lab5;
+ }
+ break lab3;
+ }
+ base.cursor = v_5;
+ }
+ if (!base.slice_from("\u05F1"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ {
+ /** @const */ var /** number */ v_6 = base.cursor;
+ lab6: {
+ if (!(base.eq_s("\u05B4")))
+ {
+ break lab6;
+ }
+ break lab3;
+ }
+ base.cursor = v_6;
+ }
+ if (!base.slice_from("\u05F2"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("\u05DB"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("\u05DE"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!base.slice_from("\u05E0"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!base.slice_from("\u05E4"))
+ {
+ return false;
+ }
+ break;
+ case 8:
+ if (!base.slice_from("\u05E6"))
+ {
+ return false;
+ }
+ break;
+ }
+ base.cursor = v_3;
+ break golab2;
+ }
+ base.cursor = v_3;
+ if (base.cursor >= base.limit)
+ {
+ break lab1;
+ }
+ base.cursor++;
+ }
+ continue;
+ }
+ base.cursor = v_2;
+ break;
+ }
+ }
+ base.cursor = v_1;
+ /** @const */ var /** number */ v_7 = base.cursor;
+ lab7: {
+ while(true)
+ {
+ /** @const */ var /** number */ v_8 = base.cursor;
+ lab8: {
+ golab9: while(true)
+ {
+ /** @const */ var /** number */ v_9 = base.cursor;
+ lab10: {
+ base.bra = base.cursor;
+ if (!(base.in_grouping(g_niked, 1456, 1474)))
+ {
+ break lab10;
+ }
+ base.ket = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ base.cursor = v_9;
+ break golab9;
+ }
+ base.cursor = v_9;
+ if (base.cursor >= base.limit)
+ {
+ break lab8;
+ }
+ base.cursor++;
+ }
+ continue;
+ }
+ base.cursor = v_8;
+ break;
+ }
+ }
+ base.cursor = v_7;
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_mark_regions() {
+ I_p1 = base.limit;
+ /** @const */ var /** number */ v_1 = base.cursor;
+ lab0: {
+ base.bra = base.cursor;
+ if (!(base.eq_s("\u05D2\u05E2")))
+ {
+ base.cursor = v_1;
+ break lab0;
+ }
+ base.ket = base.cursor;
+ {
+ /** @const */ var /** number */ v_2 = base.cursor;
+ lab1: {
+ lab2: {
+ /** @const */ var /** number */ v_3 = base.cursor;
+ lab3: {
+ if (!(base.eq_s("\u05DC\u05D8")))
+ {
+ break lab3;
+ }
+ break lab2;
+ }
+ base.cursor = v_3;
+ lab4: {
+ if (!(base.eq_s("\u05D1\u05E0")))
+ {
+ break lab4;
+ }
+ break lab2;
+ }
+ base.cursor = v_3;
+ if (base.cursor < base.limit)
+ {
+ break lab1;
+ }
+ }
+ base.cursor = v_1;
+ break lab0;
+ }
+ base.cursor = v_2;
+ }
+ if (!base.slice_from("GE"))
+ {
+ return false;
+ }
+ }
+ /** @const */ var /** number */ v_4 = base.cursor;
+ lab5: {
+ if (base.find_among(a_1) == 0)
+ {
+ base.cursor = v_4;
+ break lab5;
+ }
+ lab6: {
+ /** @const */ var /** number */ v_5 = base.cursor;
+ lab7: {
+ /** @const */ var /** number */ v_6 = base.cursor;
+ lab8: {
+ /** @const */ var /** number */ v_7 = base.cursor;
+ lab9: {
+ if (!(base.eq_s("\u05E6\u05D5\u05D2\u05E0")))
+ {
+ break lab9;
+ }
+ break lab8;
+ }
+ base.cursor = v_7;
+ lab10: {
+ if (!(base.eq_s("\u05E6\u05D5\u05E7\u05D8")))
+ {
+ break lab10;
+ }
+ break lab8;
+ }
+ base.cursor = v_7;
+ if (!(base.eq_s("\u05E6\u05D5\u05E7\u05E0")))
+ {
+ break lab7;
+ }
+ }
+ if (base.cursor < base.limit)
+ {
+ break lab7;
+ }
+ base.cursor = v_6;
+ break lab6;
+ }
+ base.cursor = v_5;
+ lab11: {
+ /** @const */ var /** number */ v_8 = base.cursor;
+ if (!(base.eq_s("\u05D2\u05E2\u05D1\u05E0")))
+ {
+ break lab11;
+ }
+ base.cursor = v_8;
+ break lab6;
+ }
+ base.cursor = v_5;
+ lab12: {
+ base.bra = base.cursor;
+ if (!(base.eq_s("\u05D2\u05E2")))
+ {
+ break lab12;
+ }
+ base.ket = base.cursor;
+ if (!base.slice_from("GE"))
+ {
+ return false;
+ }
+ break lab6;
+ }
+ base.cursor = v_5;
+ base.bra = base.cursor;
+ if (!(base.eq_s("\u05E6\u05D5")))
+ {
+ base.cursor = v_4;
+ break lab5;
+ }
+ base.ket = base.cursor;
+ if (!base.slice_from("TSU"))
+ {
+ return false;
+ }
+ }
+ }
+ /** @const */ var /** number */ v_9 = base.cursor;
+ {
+ /** @const */ var /** number */ c1 = base.cursor + 3;
+ if (c1 > base.limit)
+ {
+ return false;
+ }
+ base.cursor = c1;
+ }
+ I_x = base.cursor;
+ base.cursor = v_9;
+ /** @const */ var /** number */ v_10 = base.cursor;
+ lab13: {
+ if (base.find_among(a_2) == 0)
+ {
+ base.cursor = v_10;
+ break lab13;
+ }
+ }
+ {
+ /** @const */ var /** number */ v_11 = base.cursor;
+ lab14: {
+ if (!(base.in_grouping(g_consonant, 1489, 1520)))
+ {
+ break lab14;
+ }
+ if (!(base.in_grouping(g_consonant, 1489, 1520)))
+ {
+ break lab14;
+ }
+ if (!(base.in_grouping(g_consonant, 1489, 1520)))
+ {
+ break lab14;
+ }
+ I_p1 = base.cursor;
+ return false;
+ }
+ base.cursor = v_11;
+ }
+ if (!base.go_out_grouping(g_vowel, 1488, 1522))
+ {
+ return false;
+ }
+ base.cursor++;
+ if (!base.go_in_grouping(g_vowel, 1488, 1522))
+ {
+ return false;
+ }
+ I_p1 = base.cursor;
+ lab15: {
+ if (I_p1 >= I_x)
+ {
+ break lab15;
+ }
+ I_p1 = I_x;
+ }
+ return true;
+ };
+
+ /** @return {boolean} */
+ function r_R1() {
+ return I_p1 <= base.cursor;
+ };
+
+ /** @return {boolean} */
+ function r_R1plus3() {
+ return I_p1 <= (base.cursor + 3);
+ };
+
+ /** @return {boolean} */
+ function r_standard_suffix() {
+ var /** number */ among_var;
+ /** @const */ var /** number */ v_1 = base.limit - base.cursor;
+ lab0: {
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_4);
+ if (among_var == 0)
+ {
+ break lab0;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!r_R1())
+ {
+ break lab0;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!r_R1())
+ {
+ break lab0;
+ }
+ if (!base.slice_from("\u05D9\u05E2"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!r_R1())
+ {
+ break lab0;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_3);
+ if (among_var == 0)
+ {
+ break lab0;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!base.slice_from("\u05D2\u05F2"))
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!base.slice_from("\u05E0\u05E2\u05DE"))
+ {
+ return false;
+ }
+ break;
+ case 3:
+ if (!base.slice_from("\u05DE\u05F2\u05D3"))
+ {
+ return false;
+ }
+ break;
+ case 4:
+ if (!base.slice_from("\u05D1\u05F2\u05D8"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("\u05D1\u05F2\u05E1"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!base.slice_from("\u05F0\u05F2\u05D6"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!base.slice_from("\u05D8\u05E8\u05F2\u05D1"))
+ {
+ return false;
+ }
+ break;
+ case 8:
+ if (!base.slice_from("\u05DC\u05F2\u05D8"))
+ {
+ return false;
+ }
+ break;
+ case 9:
+ if (!base.slice_from("\u05E7\u05DC\u05F2\u05D1"))
+ {
+ return false;
+ }
+ break;
+ case 10:
+ if (!base.slice_from("\u05E8\u05F2\u05D1"))
+ {
+ return false;
+ }
+ break;
+ case 11:
+ if (!base.slice_from("\u05E8\u05F2\u05E1"))
+ {
+ return false;
+ }
+ break;
+ case 12:
+ if (!base.slice_from("\u05E9\u05F0\u05F2\u05D2"))
+ {
+ return false;
+ }
+ break;
+ case 13:
+ if (!base.slice_from("\u05E9\u05DE\u05F2\u05E1"))
+ {
+ return false;
+ }
+ break;
+ case 14:
+ if (!base.slice_from("\u05E9\u05E0\u05F2\u05D3"))
+ {
+ return false;
+ }
+ break;
+ case 15:
+ if (!base.slice_from("\u05E9\u05E8\u05F2\u05D1"))
+ {
+ return false;
+ }
+ break;
+ case 16:
+ if (!base.slice_from("\u05D1\u05D9\u05E0\u05D3"))
+ {
+ return false;
+ }
+ break;
+ case 17:
+ if (!base.slice_from("\u05F0\u05D9\u05D8\u05E9"))
+ {
+ return false;
+ }
+ break;
+ case 18:
+ if (!base.slice_from("\u05D6\u05D9\u05E0\u05D2"))
+ {
+ return false;
+ }
+ break;
+ case 19:
+ if (!base.slice_from("\u05D8\u05E8\u05D9\u05E0\u05E7"))
+ {
+ return false;
+ }
+ break;
+ case 20:
+ if (!base.slice_from("\u05E6\u05F0\u05D9\u05E0\u05D2"))
+ {
+ return false;
+ }
+ break;
+ case 21:
+ if (!base.slice_from("\u05E9\u05DC\u05D9\u05E0\u05D2"))
+ {
+ return false;
+ }
+ break;
+ case 22:
+ if (!base.slice_from("\u05D1\u05F2\u05D2"))
+ {
+ return false;
+ }
+ break;
+ case 23:
+ if (!base.slice_from("\u05D4\u05F2\u05D1"))
+ {
+ return false;
+ }
+ break;
+ case 24:
+ if (!base.slice_from("\u05E4\u05D0\u05E8\u05DC\u05D9\u05E8"))
+ {
+ return false;
+ }
+ break;
+ case 25:
+ if (!base.slice_from("\u05E9\u05D8\u05F2"))
+ {
+ return false;
+ }
+ break;
+ case 26:
+ if (!base.slice_from("\u05E9\u05F0\u05E2\u05E8"))
+ {
+ return false;
+ }
+ break;
+ }
+ break;
+ case 4:
+ lab1: {
+ /** @const */ var /** number */ v_2 = base.limit - base.cursor;
+ lab2: {
+ if (!r_R1())
+ {
+ break lab2;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break lab1;
+ }
+ base.cursor = base.limit - v_2;
+ if (!base.slice_from("\u05D8"))
+ {
+ return false;
+ }
+ }
+ base.ket = base.cursor;
+ if (!(base.eq_s_b("\u05D1\u05E8\u05D0\u05DB")))
+ {
+ break lab0;
+ }
+ /** @const */ var /** number */ v_3 = base.limit - base.cursor;
+ lab3: {
+ if (!(base.eq_s_b("\u05D2\u05E2")))
+ {
+ base.cursor = base.limit - v_3;
+ break lab3;
+ }
+ }
+ base.bra = base.cursor;
+ if (!base.slice_from("\u05D1\u05E8\u05E2\u05E0\u05D2"))
+ {
+ return false;
+ }
+ break;
+ case 5:
+ if (!base.slice_from("\u05D2\u05F2"))
+ {
+ return false;
+ }
+ break;
+ case 6:
+ if (!base.slice_from("\u05E0\u05E2\u05DE"))
+ {
+ return false;
+ }
+ break;
+ case 7:
+ if (!base.slice_from("\u05E9\u05E8\u05F2\u05D1"))
+ {
+ return false;
+ }
+ break;
+ case 8:
+ if (!base.slice_from("\u05DE\u05F2\u05D3"))
+ {
+ return false;
+ }
+ break;
+ case 9:
+ if (!base.slice_from("\u05D1\u05F2\u05D8"))
+ {
+ return false;
+ }
+ break;
+ case 10:
+ if (!base.slice_from("\u05D1\u05F2\u05E1"))
+ {
+ return false;
+ }
+ break;
+ case 11:
+ if (!base.slice_from("\u05F0\u05F2\u05D6"))
+ {
+ return false;
+ }
+ break;
+ case 12:
+ if (!base.slice_from("\u05D8\u05E8\u05F2\u05D1"))
+ {
+ return false;
+ }
+ break;
+ case 13:
+ if (!base.slice_from("\u05DC\u05F2\u05D8"))
+ {
+ return false;
+ }
+ break;
+ case 14:
+ if (!base.slice_from("\u05E7\u05DC\u05F2\u05D1"))
+ {
+ return false;
+ }
+ break;
+ case 15:
+ if (!base.slice_from("\u05E8\u05F2\u05D1"))
+ {
+ return false;
+ }
+ break;
+ case 16:
+ if (!base.slice_from("\u05E8\u05F2\u05E1"))
+ {
+ return false;
+ }
+ break;
+ case 17:
+ if (!base.slice_from("\u05E9\u05F0\u05F2\u05D2"))
+ {
+ return false;
+ }
+ break;
+ case 18:
+ if (!base.slice_from("\u05E9\u05DE\u05F2\u05E1"))
+ {
+ return false;
+ }
+ break;
+ case 19:
+ if (!base.slice_from("\u05E9\u05E0\u05F2\u05D3"))
+ {
+ return false;
+ }
+ break;
+ case 20:
+ if (!base.slice_from("\u05D1\u05D9\u05E0\u05D3"))
+ {
+ return false;
+ }
+ break;
+ case 21:
+ if (!base.slice_from("\u05F0\u05D9\u05D8\u05E9"))
+ {
+ return false;
+ }
+ break;
+ case 22:
+ if (!base.slice_from("\u05D6\u05D9\u05E0\u05D2"))
+ {
+ return false;
+ }
+ break;
+ case 23:
+ if (!base.slice_from("\u05D8\u05E8\u05D9\u05E0\u05E7"))
+ {
+ return false;
+ }
+ break;
+ case 24:
+ if (!base.slice_from("\u05E6\u05F0\u05D9\u05E0\u05D2"))
+ {
+ return false;
+ }
+ break;
+ case 25:
+ if (!base.slice_from("\u05E9\u05DC\u05D9\u05E0\u05D2"))
+ {
+ return false;
+ }
+ break;
+ case 26:
+ if (!base.slice_from("\u05D1\u05F2\u05D2"))
+ {
+ return false;
+ }
+ break;
+ case 27:
+ if (!base.slice_from("\u05D4\u05F2\u05D1"))
+ {
+ return false;
+ }
+ break;
+ case 28:
+ if (!base.slice_from("\u05E4\u05D0\u05E8\u05DC\u05D9\u05E8"))
+ {
+ return false;
+ }
+ break;
+ case 29:
+ if (!base.slice_from("\u05E9\u05D8\u05F2"))
+ {
+ return false;
+ }
+ break;
+ case 30:
+ if (!base.slice_from("\u05E9\u05F0\u05E2\u05E8"))
+ {
+ return false;
+ }
+ break;
+ case 31:
+ if (!base.slice_from("\u05D1\u05E8\u05E2\u05E0\u05D2"))
+ {
+ return false;
+ }
+ break;
+ case 32:
+ if (!r_R1())
+ {
+ break lab0;
+ }
+ if (!base.slice_from("\u05D4"))
+ {
+ return false;
+ }
+ break;
+ case 33:
+ lab4: {
+ /** @const */ var /** number */ v_4 = base.limit - base.cursor;
+ lab5: {
+ lab6: {
+ /** @const */ var /** number */ v_5 = base.limit - base.cursor;
+ lab7: {
+ if (!(base.eq_s_b("\u05D2")))
+ {
+ break lab7;
+ }
+ break lab6;
+ }
+ base.cursor = base.limit - v_5;
+ if (!(base.eq_s_b("\u05E9")))
+ {
+ break lab5;
+ }
+ }
+ /** @const */ var /** number */ v_6 = base.limit - base.cursor;
+ lab8: {
+ if (!r_R1plus3())
+ {
+ base.cursor = base.limit - v_6;
+ break lab8;
+ }
+ if (!base.slice_from("\u05D9\u05E1"))
+ {
+ return false;
+ }
+ }
+ break lab4;
+ }
+ base.cursor = base.limit - v_4;
+ if (!r_R1())
+ {
+ break lab0;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ }
+ break;
+ }
+ }
+ base.cursor = base.limit - v_1;
+ /** @const */ var /** number */ v_7 = base.limit - base.cursor;
+ lab9: {
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_5);
+ if (among_var == 0)
+ {
+ break lab9;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!r_R1())
+ {
+ break lab9;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ case 2:
+ if (!r_R1())
+ {
+ break lab9;
+ }
+ if (!(base.in_grouping_b(g_consonant, 1489, 1520)))
+ {
+ break lab9;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ }
+ base.cursor = base.limit - v_7;
+ /** @const */ var /** number */ v_8 = base.limit - base.cursor;
+ lab10: {
+ base.ket = base.cursor;
+ among_var = base.find_among_b(a_6);
+ if (among_var == 0)
+ {
+ break lab10;
+ }
+ base.bra = base.cursor;
+ switch (among_var) {
+ case 1:
+ if (!r_R1())
+ {
+ break lab10;
+ }
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ break;
+ }
+ }
+ base.cursor = base.limit - v_8;
+ /** @const */ var /** number */ v_9 = base.limit - base.cursor;
+ lab11: {
+ while(true)
+ {
+ /** @const */ var /** number */ v_10 = base.limit - base.cursor;
+ lab12: {
+ golab13: while(true)
+ {
+ /** @const */ var /** number */ v_11 = base.limit - base.cursor;
+ lab14: {
+ base.ket = base.cursor;
+ lab15: {
+ /** @const */ var /** number */ v_12 = base.limit - base.cursor;
+ lab16: {
+ if (!(base.eq_s_b("GE")))
+ {
+ break lab16;
+ }
+ break lab15;
+ }
+ base.cursor = base.limit - v_12;
+ if (!(base.eq_s_b("TSU")))
+ {
+ break lab14;
+ }
+ }
+ base.bra = base.cursor;
+ if (!base.slice_del())
+ {
+ return false;
+ }
+ base.cursor = base.limit - v_11;
+ break golab13;
+ }
+ base.cursor = base.limit - v_11;
+ if (base.cursor <= base.limit_backward)
+ {
+ break lab12;
+ }
+ base.cursor--;
+ }
+ continue;
+ }
+ base.cursor = base.limit - v_10;
+ break;
+ }
+ }
+ base.cursor = base.limit - v_9;
+ return true;
+ };
+
+ this.stem = /** @return {boolean} */ function() {
+ r_prelude();
+ /** @const */ var /** number */ v_1 = base.cursor;
+ r_mark_regions();
+ base.cursor = v_1;
+ base.limit_backward = base.cursor; base.cursor = base.limit;
+ r_standard_suffix();
+ base.cursor = base.limit_backward;
+ return true;
+ };
+
+ /**@return{string}*/
+ this['stemWord'] = function(/**string*/word) {
+ base.setCurrent(word);
+ this.stem();
+ return base.getCurrent();
+ };
+};
diff --git a/sphinx/search/pt.py b/sphinx/search/pt.py
index bf9b7a3a2f8..a10e4cd2b53 100644
--- a/sphinx/search/pt.py
+++ b/sphinx/search/pt.py
@@ -1,258 +1,21 @@
-"""Portuguese search language: includes the JS Portuguese stemmer."""
+"""Portuguese search language."""
from __future__ import annotations
import snowballstemmer
-from sphinx.search import SearchLanguage, parse_stop_word
-
-portuguese_stopwords = parse_stop_word("""
-| source: https://snowball.tartarus.org/algorithms/portuguese/stop.txt
-de | of, from
-a | the; to, at; her
-o | the; him
-que | who, that
-e | and
-do | de + o
-da | de + a
-em | in
-um | a
-para | for
- | é from SER
-com | with
-não | not, no
-uma | a
-os | the; them
-no | em + o
-se | himself etc
-na | em + a
-por | for
-mais | more
-as | the; them
-dos | de + os
-como | as, like
-mas | but
- | foi from SER
-ao | a + o
-ele | he
-das | de + as
- | tem from TER
-à | a + a
-seu | his
-sua | her
-ou | or
- | ser from SER
-quando | when
-muito | much
- | há from HAV
-nos | em + os; us
-já | already, now
- | está from EST
-eu | I
-também | also
-só | only, just
-pelo | per + o
-pela | per + a
-até | up to
-isso | that
-ela | he
-entre | between
- | era from SER
-depois | after
-sem | without
-mesmo | same
-aos | a + os
- | ter from TER
-seus | his
-quem | whom
-nas | em + as
-me | me
-esse | that
-eles | they
- | estão from EST
-você | you
- | tinha from TER
- | foram from SER
-essa | that
-num | em + um
-nem | nor
-suas | her
-meu | my
-às | a + as
-minha | my
- | têm from TER
-numa | em + uma
-pelos | per + os
-elas | they
- | havia from HAV
- | seja from SER
-qual | which
- | será from SER
-nós | we
- | tenho from TER
-lhe | to him, her
-deles | of them
-essas | those
-esses | those
-pelas | per + as
-este | this
- | fosse from SER
-dele | of him
-
- | other words. There are many contractions such as naquele = em+aquele,
- | mo = me+o, but they are rare.
- | Indefinite article plural forms are also rare.
-
-tu | thou
-te | thee
-vocês | you (plural)
-vos | you
-lhes | to them
-meus | my
-minhas
-teu | thy
-tua
-teus
-tuas
-nosso | our
-nossa
-nossos
-nossas
-
-dela | of her
-delas | of them
-
-esta | this
-estes | these
-estas | these
-aquele | that
-aquela | that
-aqueles | those
-aquelas | those
-isto | this
-aquilo | that
-
- | forms of estar, to be (not including the infinitive):
-estou
-está
-estamos
-estão
-estive
-esteve
-estivemos
-estiveram
-estava
-estávamos
-estavam
-estivera
-estivéramos
-esteja
-estejamos
-estejam
-estivesse
-estivéssemos
-estivessem
-estiver
-estivermos
-estiverem
-
- | forms of haver, to have (not including the infinitive):
-hei
-há
-havemos
-hão
-houve
-houvemos
-houveram
-houvera
-houvéramos
-haja
-hajamos
-hajam
-houvesse
-houvéssemos
-houvessem
-houver
-houvermos
-houverem
-houverei
-houverá
-houveremos
-houverão
-houveria
-houveríamos
-houveriam
-
- | forms of ser, to be (not including the infinitive):
-sou
-somos
-são
-era
-éramos
-eram
-fui
-foi
-fomos
-foram
-fora
-fôramos
-seja
-sejamos
-sejam
-fosse
-fôssemos
-fossem
-for
-formos
-forem
-serei
-será
-seremos
-serão
-seria
-seríamos
-seriam
-
- | forms of ter, to have (not including the infinitive):
-tenho
-tem
-temos
-tém
-tinha
-tínhamos
-tinham
-tive
-teve
-tivemos
-tiveram
-tivera
-tivéramos
-tenha
-tenhamos
-tenham
-tivesse
-tivéssemos
-tivessem
-tiver
-tivermos
-tiverem
-terei
-terá
-teremos
-terão
-teria
-teríamos
-teriam
-""")
+from sphinx.search import SearchLanguage
+from sphinx.search._stopwords.pt import PORTUGUESE_STOPWORDS
class SearchPortuguese(SearchLanguage):
lang = 'pt'
language_name = 'Portuguese'
js_stemmer_rawcode = 'portuguese-stemmer.js'
- stopwords = portuguese_stopwords
+ stopwords = PORTUGUESE_STOPWORDS
- def init(self, options: dict[str, str]) -> None:
+ def __init__(self, options: dict[str, str]) -> None:
+ super().__init__(options)
self.stemmer = snowballstemmer.stemmer('portuguese')
def stem(self, word: str) -> str:
diff --git a/sphinx/search/ro.py b/sphinx/search/ro.py
index 0c00486319a..e9d29602f4e 100644
--- a/sphinx/search/ro.py
+++ b/sphinx/search/ro.py
@@ -1,4 +1,4 @@
-"""Romanian search language: includes the JS Romanian stemmer."""
+"""Romanian search language."""
from __future__ import annotations
@@ -11,9 +11,10 @@ class SearchRomanian(SearchLanguage):
lang = 'ro'
language_name = 'Romanian'
js_stemmer_rawcode = 'romanian-stemmer.js'
- stopwords: set[str] = set()
+ stopwords = frozenset()
- def init(self, options: dict[str, str]) -> None:
+ def __init__(self, options: dict[str, str]) -> None:
+ super().__init__(options)
self.stemmer = snowballstemmer.stemmer('romanian')
def stem(self, word: str) -> str:
diff --git a/sphinx/search/ru.py b/sphinx/search/ru.py
index e93046cba94..584b19b9f79 100644
--- a/sphinx/search/ru.py
+++ b/sphinx/search/ru.py
@@ -1,248 +1,21 @@
-"""Russian search language: includes the JS Russian stemmer."""
+"""Russian search language."""
from __future__ import annotations
import snowballstemmer
-from sphinx.search import SearchLanguage, parse_stop_word
-
-russian_stopwords = parse_stop_word("""
-| source: https://snowball.tartarus.org/algorithms/russian/stop.txt
-и | and
-в | in/into
-во | alternative form
-не | not
-что | what/that
-он | he
-на | on/onto
-я | i
-с | from
-со | alternative form
-как | how
-а | milder form of `no' (but)
-то | conjunction and form of `that'
-все | all
-она | she
-так | so, thus
-его | him
-но | but
-да | yes/and
-ты | thou
-к | towards, by
-у | around, chez
-же | intensifier particle
-вы | you
-за | beyond, behind
-бы | conditional/subj. particle
-по | up to, along
-только | only
-ее | her
-мне | to me
-было | it was
-вот | here is/are, particle
-от | away from
-меня | me
-еще | still, yet, more
-нет | no, there isnt/arent
-о | about
-из | out of
-ему | to him
-теперь | now
-когда | when
-даже | even
-ну | so, well
-вдруг | suddenly
-ли | interrogative particle
-если | if
-уже | already, but homonym of `narrower'
-или | or
-ни | neither
-быть | to be
-был | he was
-него | prepositional form of его
-до | up to
-вас | you accusative
-нибудь | indef. suffix preceded by hyphen
-опять | again
-уж | already, but homonym of `adder'
-вам | to you
-сказал | he said
-ведь | particle `after all'
-там | there
-потом | then
-себя | oneself
-ничего | nothing
-ей | to her
-может | usually with `быть' as `maybe'
-они | they
-тут | here
-где | where
-есть | there is/are
-надо | got to, must
-ней | prepositional form of ей
-для | for
-мы | we
-тебя | thee
-их | them, their
-чем | than
-была | she was
-сам | self
-чтоб | in order to
-без | without
-будто | as if
-человек | man, person, one
-чего | genitive form of `what'
-раз | once
-тоже | also
-себе | to oneself
-под | beneath
-жизнь | life
-будет | will be
-ж | short form of intensifer particle `же'
-тогда | then
-кто | who
-этот | this
-говорил | was saying
-того | genitive form of `that'
-потому | for that reason
-этого | genitive form of `this'
-какой | which
-совсем | altogether
-ним | prepositional form of `его', `они'
-здесь | here
-этом | prepositional form of `этот'
-один | one
-почти | almost
-мой | my
-тем | instrumental/dative plural of `тот', `то'
-чтобы | full form of `in order that'
-нее | her (acc.)
-кажется | it seems
-сейчас | now
-были | they were
-куда | where to
-зачем | why
-сказать | to say
-всех | all (acc., gen. preposn. plural)
-никогда | never
-сегодня | today
-можно | possible, one can
-при | by
-наконец | finally
-два | two
-об | alternative form of `о', about
-другой | another
-хоть | even
-после | after
-над | above
-больше | more
-тот | that one (masc.)
-через | across, in
-эти | these
-нас | us
-про | about
-всего | in all, only, of all
-них | prepositional form of `они' (they)
-какая | which, feminine
-много | lots
-разве | interrogative particle
-сказала | she said
-три | three
-эту | this, acc. fem. sing.
-моя | my, feminine
-впрочем | moreover, besides
-хорошо | good
-свою | ones own, acc. fem. sing.
-этой | oblique form of `эта', fem. `this'
-перед | in front of
-иногда | sometimes
-лучше | better
-чуть | a little
-том | preposn. form of `that one'
-нельзя | one must not
-такой | such a one
-им | to them
-более | more
-всегда | always
-конечно | of course
-всю | acc. fem. sing of `all'
-между | between
-
-
- | b: some paradigms
- |
- | personal pronouns
- |
- | я меня мне мной [мною]
- | ты тебя тебе тобой [тобою]
- | он его ему им [него, нему, ним]
- | она ее эи ею [нее, нэи, нею]
- | оно его ему им [него, нему, ним]
- |
- | мы нас нам нами
- | вы вас вам вами
- | они их им ими [них, ним, ними]
- |
- | себя себе собой [собою]
- |
- | demonstrative pronouns: этот (this), тот (that)
- |
- | этот эта это эти
- | этого эты это эти
- | этого этой этого этих
- | этому этой этому этим
- | этим этой этим [этою] этими
- | этом этой этом этих
- |
- | тот та то те
- | того ту то те
- | того той того тех
- | тому той тому тем
- | тем той тем [тою] теми
- | том той том тех
- |
- | determinative pronouns
- |
- | (a) весь (all)
- |
- | весь вся все все
- | всего всю все все
- | всего всей всего всех
- | всему всей всему всем
- | всем всей всем [всею] всеми
- | всем всей всем всех
- |
- | (b) сам (himself etc)
- |
- | сам сама само сами
- | самого саму само самих
- | самого самой самого самих
- | самому самой самому самим
- | самим самой самим [самою] самими
- | самом самой самом самих
- |
- | stems of verbs `to be', `to have', `to do' and modal
- |
- | быть бы буд быв есть суть
- | име
- | дел
- | мог мож мочь
- | уме
- | хоч хот
- | долж
- | можн
- | нужн
- | нельзя
-""")
+from sphinx.search import SearchLanguage
+from sphinx.search._stopwords.ru import RUSSIAN_STOPWORDS
class SearchRussian(SearchLanguage):
lang = 'ru'
language_name = 'Russian'
js_stemmer_rawcode = 'russian-stemmer.js'
- stopwords = russian_stopwords
+ stopwords = RUSSIAN_STOPWORDS
- def init(self, options: dict[str, str]) -> None:
+ def __init__(self, options: dict[str, str]) -> None:
+ super().__init__(options)
self.stemmer = snowballstemmer.stemmer('russian')
def stem(self, word: str) -> str:
diff --git a/sphinx/search/sv.py b/sphinx/search/sv.py
index b4fa1bd06a2..8b138ebdf80 100644
--- a/sphinx/search/sv.py
+++ b/sphinx/search/sv.py
@@ -1,137 +1,21 @@
-"""Swedish search language: includes the JS Swedish stemmer."""
+"""Swedish search language."""
from __future__ import annotations
import snowballstemmer
-from sphinx.search import SearchLanguage, parse_stop_word
-
-swedish_stopwords = parse_stop_word("""
-| source: https://snowball.tartarus.org/algorithms/swedish/stop.txt
-och | and
-det | it, this/that
-att | to (with infinitive)
-i | in, at
-en | a
-jag | I
-hon | she
-som | who, that
-han | he
-på | on
-den | it, this/that
-med | with
-var | where, each
-sig | him(self) etc
-för | for
-så | so (also: seed)
-till | to
-är | is
-men | but
-ett | a
-om | if; around, about
-hade | had
-de | they, these/those
-av | of
-icke | not, no
-mig | me
-du | you
-henne | her
-då | then, when
-sin | his
-nu | now
-har | have
-inte | inte någon = no one
-hans | his
-honom | him
-skulle | 'sake'
-hennes | her
-där | there
-min | my
-man | one (pronoun)
-ej | nor
-vid | at, by, on (also: vast)
-kunde | could
-något | some etc
-från | from, off
-ut | out
-när | when
-efter | after, behind
-upp | up
-vi | we
-dem | them
-vara | be
-vad | what
-över | over
-än | than
-dig | you
-kan | can
-sina | his
-här | here
-ha | have
-mot | towards
-alla | all
-under | under (also: wonder)
-någon | some etc
-eller | or (else)
-allt | all
-mycket | much
-sedan | since
-ju | why
-denna | this/that
-själv | myself, yourself etc
-detta | this/that
-åt | to
-utan | without
-varit | was
-hur | how
-ingen | no
-mitt | my
-ni | you
-bli | to be, become
-blev | from bli
-oss | us
-din | thy
-dessa | these/those
-några | some etc
-deras | their
-blir | from bli
-mina | my
-samma | (the) same
-vilken | who, that
-er | you, your
-sådan | such a
-vår | our
-blivit | from bli
-dess | its
-inom | within
-mellan | between
-sådant | such a
-varför | why
-varje | each
-vilka | who, that
-ditt | thy
-vem | who
-vilket | who, that
-sitta | his
-sådana | such a
-vart | each
-dina | thy
-vars | whose
-vårt | our
-våra | our
-ert | your
-era | your
-vilkas | whose
-""")
+from sphinx.search import SearchLanguage
+from sphinx.search._stopwords.sv import SWEDISH_STOPWORDS
class SearchSwedish(SearchLanguage):
lang = 'sv'
language_name = 'Swedish'
js_stemmer_rawcode = 'swedish-stemmer.js'
- stopwords = swedish_stopwords
+ stopwords = SWEDISH_STOPWORDS
- def init(self, options: dict[str, str]) -> None:
+ def __init__(self, options: dict[str, str]) -> None:
+ super().__init__(options)
self.stemmer = snowballstemmer.stemmer('swedish')
def stem(self, word: str) -> str:
diff --git a/sphinx/search/tr.py b/sphinx/search/tr.py
index b999e1d96d8..40131f0e372 100644
--- a/sphinx/search/tr.py
+++ b/sphinx/search/tr.py
@@ -1,4 +1,4 @@
-"""Turkish search language: includes the JS Turkish stemmer."""
+"""Turkish search language."""
from __future__ import annotations
@@ -11,9 +11,10 @@ class SearchTurkish(SearchLanguage):
lang = 'tr'
language_name = 'Turkish'
js_stemmer_rawcode = 'turkish-stemmer.js'
- stopwords: set[str] = set()
+ stopwords = frozenset()
- def init(self, options: dict[str, str]) -> None:
+ def __init__(self, options: dict[str, str]) -> None:
+ super().__init__(options)
self.stemmer = snowballstemmer.stemmer('turkish')
def stem(self, word: str) -> str:
diff --git a/sphinx/search/zh.py b/sphinx/search/zh.py
index 0f7e4dfd5f9..464e1e1fd47 100644
--- a/sphinx/search/zh.py
+++ b/sphinx/search/zh.py
@@ -9,6 +9,7 @@
import snowballstemmer
from sphinx.search import SearchLanguage
+from sphinx.search._stopwords.en import ENGLISH_STOPWORDS
if TYPE_CHECKING:
from collections.abc import Iterator
@@ -32,219 +33,19 @@ def cut_for_search(sentence: str, HMM: bool = True) -> Iterator[str]:
)
del jieba
-english_stopwords = {
- 'a', 'and', 'are', 'as', 'at',
- 'be', 'but', 'by',
- 'for',
- 'if', 'in', 'into', 'is', 'it',
- 'near', 'no', 'not',
- 'of', 'on', 'or',
- 'such',
- 'that', 'the', 'their', 'then', 'there', 'these', 'they', 'this', 'to',
- 'was', 'will', 'with',
-} # fmt: skip
-
-js_porter_stemmer = """
-/**
- * Porter Stemmer
- */
-var Stemmer = function() {
-
- var step2list = {
- ational: 'ate',
- tional: 'tion',
- enci: 'ence',
- anci: 'ance',
- izer: 'ize',
- bli: 'ble',
- alli: 'al',
- entli: 'ent',
- eli: 'e',
- ousli: 'ous',
- ization: 'ize',
- ation: 'ate',
- ator: 'ate',
- alism: 'al',
- iveness: 'ive',
- fulness: 'ful',
- ousness: 'ous',
- aliti: 'al',
- iviti: 'ive',
- biliti: 'ble',
- logi: 'log'
- };
-
- var step3list = {
- icate: 'ic',
- ative: '',
- alize: 'al',
- iciti: 'ic',
- ical: 'ic',
- ful: '',
- ness: ''
- };
-
- var c = "[^aeiou]"; // consonant
- var v = "[aeiouy]"; // vowel
- var C = c + "[^aeiouy]*"; // consonant sequence
- var V = v + "[aeiou]*"; // vowel sequence
-
- var mgr0 = "^(" + C + ")?" + V + C; // [C]VC... is m>0
- var meq1 = "^(" + C + ")?" + V + C + "(" + V + ")?$"; // [C]VC[V] is m=1
- var mgr1 = "^(" + C + ")?" + V + C + V + C; // [C]VCVC... is m>1
- var s_v = "^(" + C + ")?" + v; // vowel in stem
-
- this.stemWord = function (w) {
- var stem;
- var suffix;
- var firstch;
- var origword = w;
-
- if (w.length < 3)
- return w;
-
- var re;
- var re2;
- var re3;
- var re4;
-
- firstch = w.substr(0,1);
- if (firstch == "y")
- w = firstch.toUpperCase() + w.substr(1);
-
- // Step 1a
- re = /^(.+?)(ss|i)es$/;
- re2 = /^(.+?)([^s])s$/;
-
- if (re.test(w))
- w = w.replace(re,"$1$2");
- else if (re2.test(w))
- w = w.replace(re2,"$1$2");
-
- // Step 1b
- re = /^(.+?)eed$/;
- re2 = /^(.+?)(ed|ing)$/;
- if (re.test(w)) {
- var fp = re.exec(w);
- re = new RegExp(mgr0);
- if (re.test(fp[1])) {
- re = /.$/;
- w = w.replace(re,"");
- }
- }
- else if (re2.test(w)) {
- var fp = re2.exec(w);
- stem = fp[1];
- re2 = new RegExp(s_v);
- if (re2.test(stem)) {
- w = stem;
- re2 = /(at|bl|iz)$/;
- re3 = new RegExp("([^aeiouylsz])\\\\1$");
- re4 = new RegExp("^" + C + v + "[^aeiouwxy]$");
- if (re2.test(w))
- w = w + "e";
- else if (re3.test(w)) {
- re = /.$/;
- w = w.replace(re,"");
- }
- else if (re4.test(w))
- w = w + "e";
- }
- }
-
- // Step 1c
- re = /^(.+?)y$/;
- if (re.test(w)) {
- var fp = re.exec(w);
- stem = fp[1];
- re = new RegExp(s_v);
- if (re.test(stem))
- w = stem + "i";
- }
-
- // Step 2
- re = /^(.+?)(ational|tional|enci|anci|izer|bli|alli|entli|eli|ousli|\
-ization|ation|ator|alism|iveness|fulness|ousness|aliti|iviti|biliti|logi)$/;
- if (re.test(w)) {
- var fp = re.exec(w);
- stem = fp[1];
- suffix = fp[2];
- re = new RegExp(mgr0);
- if (re.test(stem))
- w = stem + step2list[suffix];
- }
-
- // Step 3
- re = /^(.+?)(icate|ative|alize|iciti|ical|ful|ness)$/;
- if (re.test(w)) {
- var fp = re.exec(w);
- stem = fp[1];
- suffix = fp[2];
- re = new RegExp(mgr0);
- if (re.test(stem))
- w = stem + step3list[suffix];
- }
-
- // Step 4
- re = /^(.+?)(al|ance|ence|er|ic|able|ible|ant|ement|ment|ent|ou|ism|ate|\
-iti|ous|ive|ize)$/;
- re2 = /^(.+?)(s|t)(ion)$/;
- if (re.test(w)) {
- var fp = re.exec(w);
- stem = fp[1];
- re = new RegExp(mgr1);
- if (re.test(stem))
- w = stem;
- }
- else if (re2.test(w)) {
- var fp = re2.exec(w);
- stem = fp[1] + fp[2];
- re2 = new RegExp(mgr1);
- if (re2.test(stem))
- w = stem;
- }
-
- // Step 5
- re = /^(.+?)e$/;
- if (re.test(w)) {
- var fp = re.exec(w);
- stem = fp[1];
- re = new RegExp(mgr1);
- re2 = new RegExp(meq1);
- re3 = new RegExp("^" + C + v + "[^aeiouwxy]$");
- if (re.test(stem) || (re2.test(stem) && !(re3.test(stem))))
- w = stem;
- }
- re = /ll$/;
- re2 = new RegExp(mgr1);
- if (re.test(w) && re2.test(w)) {
- re = /.$/;
- w = w.replace(re,"");
- }
-
- // and turn initial Y back to y
- if (firstch == "y")
- w = firstch.toLowerCase() + w.substr(1);
- return w;
- }
-}
-"""
-
class SearchChinese(SearchLanguage):
"""Chinese search implementation"""
lang = 'zh'
language_name = 'Chinese'
- js_stemmer_code = js_porter_stemmer
- stopwords = english_stopwords
+ js_stemmer_rawcode = 'english-stemmer.js'
+ stopwords = ENGLISH_STOPWORDS
latin1_letters = re.compile(r'[a-zA-Z0-9_]+')
def __init__(self, options: dict[str, str]) -> None:
super().__init__(options)
self.latin_terms: set[str] = set()
-
- def init(self, options: dict[str, str]) -> None:
dict_path = options.get('dict', JIEBA_DEFAULT_DICT)
if dict_path and Path(dict_path).is_file():
jieba_load_userdict(str(dict_path))
diff --git a/sphinx/templates/latex/latex.tex.jinja b/sphinx/templates/latex/latex.tex.jinja
index deb030504db..4ba2c46a793 100644
--- a/sphinx/templates/latex/latex.tex.jinja
+++ b/sphinx/templates/latex/latex.tex.jinja
@@ -16,6 +16,7 @@
\ifdefined\pdfimageresolution
\pdfimageresolution= \numexpr \dimexpr1in\relax/\sphinxpxdimen\relax
\fi
+\newdimen\sphinxremdimen\sphinxremdimen = <%= pointsize%>
%% let collapsible pdf bookmarks panel have high depth per default
\PassOptionsToPackage{bookmarksdepth=5}{hyperref}
<% if use_xindy -%>
diff --git a/sphinx/templates/latex/tabulary.tex.jinja b/sphinx/templates/latex/tabulary.tex.jinja
index 6ebcec6d264..7ba065ed1a7 100644
--- a/sphinx/templates/latex/tabulary.tex.jinja
+++ b/sphinx/templates/latex/tabulary.tex.jinja
@@ -21,6 +21,9 @@
<% if 'nocolorrows' in table.styles -%>
\sphinxthistablewithnocolorrowsstyle
<% endif -%>
+<% if table.is_nested -%>
+\sphinxthistabularywithnohlinesifinlongtable
+<% endif -%>
<% if table.align -%>
<%- if table.align in ('center', 'default') -%>
\centering
diff --git a/sphinx/testing/fixtures.py b/sphinx/testing/fixtures.py
index ec143faccf4..255bc589dee 100644
--- a/sphinx/testing/fixtures.py
+++ b/sphinx/testing/fixtures.py
@@ -151,7 +151,7 @@ def test_params(request: Any) -> dict[str, Any]:
def app(
test_params: dict[str, Any],
app_params: _app_params,
- make_app: Callable[[], SphinxTestApp],
+ make_app: Callable[..., SphinxTestApp],
shared_result: SharedResult,
) -> Iterator[SphinxTestApp]:
"""Provides the 'sphinx.application.Sphinx' object"""
@@ -183,7 +183,7 @@ def warning(app: SphinxTestApp) -> StringIO:
@pytest.fixture
-def make_app(test_params: dict[str, Any]) -> Iterator[Callable[[], SphinxTestApp]]:
+def make_app(test_params: dict[str, Any]) -> Iterator[Callable[..., SphinxTestApp]]:
"""Provides make_app function to initialize SphinxTestApp instance.
if you want to initialize 'app' in your test function. please use this
instead of using SphinxTestApp class directory.
diff --git a/sphinx/testing/restructuredtext.py b/sphinx/testing/restructuredtext.py
index b04b61a4021..548ef5f27b4 100644
--- a/sphinx/testing/restructuredtext.py
+++ b/sphinx/testing/restructuredtext.py
@@ -2,11 +2,8 @@
from typing import TYPE_CHECKING
-from docutils.core import publish_doctree
-
-from sphinx.io import SphinxStandaloneReader
from sphinx.parsers import RSTParser
-from sphinx.util.docutils import sphinx_domains
+from sphinx.util.docutils import _parse_str_to_doctree
if TYPE_CHECKING:
from docutils import nodes
@@ -15,27 +12,35 @@
def parse(app: Sphinx, text: str, docname: str = 'index') -> nodes.document:
- """Parse a string as reStructuredText with Sphinx application."""
+ """Parse a string as reStructuredText with Sphinx."""
+ config = app.config
env = app.env
+ registry = app.registry
+ srcdir = app.srcdir
+
+ # Get settings
+ settings_overrides = {
+ 'env': env,
+ 'gettext_compact': True,
+ 'input_encoding': 'utf-8',
+ 'output_encoding': 'unicode',
+ 'traceback': True,
+ }
+
+ # Create parser
+ parser = RSTParser()
+ parser._config = config
+ parser._env = env
+
+ env.current_document.docname = docname
try:
- app.env.current_document.docname = docname
- reader = SphinxStandaloneReader()
- reader.setup(app)
- parser = RSTParser()
- parser.set_application(app)
- with sphinx_domains(env):
- return publish_doctree(
- text,
- str(app.srcdir / f'{docname}.rst'),
- reader=reader,
- parser=parser,
- settings_overrides={
- 'env': env,
- 'gettext_compact': True,
- 'input_encoding': 'utf-8',
- 'output_encoding': 'unicode',
- 'traceback': True,
- },
- )
+ return _parse_str_to_doctree(
+ text,
+ filename=srcdir / f'{docname}.rst',
+ default_settings=settings_overrides,
+ env=env,
+ parser=parser,
+ transforms=registry.get_transforms(),
+ )
finally:
env.current_document.docname = ''
diff --git a/sphinx/testing/util.py b/sphinx/testing/util.py
index a7244bb32d5..e59b7248e6d 100644
--- a/sphinx/testing/util.py
+++ b/sphinx/testing/util.py
@@ -45,7 +45,7 @@ def assert_node(node: Node, cls: Any = None, xpath: str = '', **kwargs: Any) ->
assert_node(node[0], cls[1:], xpath=xpath + '[0]', **kwargs)
elif isinstance(cls, tuple):
assert (
- isinstance(node, list | nodes.Element)
+ isinstance(node, (list, nodes.Element))
), f'The node{xpath} does not have any items' # fmt: skip
assert (
len(node) == len(cls)
diff --git a/sphinx/texinputs/sphinx.sty b/sphinx/texinputs/sphinx.sty
index 8837485c5f7..109c07d1a9a 100644
--- a/sphinx/texinputs/sphinx.sty
+++ b/sphinx/texinputs/sphinx.sty
@@ -9,7 +9,7 @@
% by the Sphinx LaTeX writer.
\NeedsTeXFormat{LaTeX2e}[1995/12/01]
-\ProvidesPackage{sphinx}[2024/11/23 v8.2.0 Sphinx LaTeX package (sphinx-doc)]
+\ProvidesPackage{sphinx}[2025/08/03 v8.3.0 Sphinx LaTeX package (sphinx-doc)]
% provides \ltx@ifundefined
% (many packages load ltxcmds: graphicx does for pdftex and lualatex but
@@ -67,7 +67,7 @@
Footnote rendering may have had problems, due to extra package or
document class; check latex log for instructions}%
\@namedef{sphinx_buildwarning_badiconpackage}{%
- You have set iconpackage=\spx@opt@iconpackage, but this LaTeX package
+ You have set iconpackage=\spx@usr@iconpackage, but this LaTeX package
is not found}%
%% OPTION HANDLING
@@ -672,7 +672,7 @@
% defaults for them remain not to have specific colour.
%
% 7.4.0 adds keys for admonition titles: for background and foreground colors,
-% and for icons (whose defaults are picked from Free Fontawesome 5).
+% and for icons.
\def\spx@tempa#1{%
\expandafter\spx@tempb
\csname if#1withshadowcolor\expandafter\endcsname
@@ -869,80 +869,125 @@
}
% 7.4.0 Support for icons in admonition titles
-% We try to
-% - get Sphinx PDF builds to process fine in absence of fontawesome5
-% - use fontawesome5 if present, but not if user prefers another package
-% - provide an interface for using other LaTeX code for icons
-% - provide an interface for using some other package than fontawesome5
-% Indeed we can't load fontawesome5 unconditionally even if available,
-% as it proves incompatible with fontawesome package.
-% We thus must delay its loading.
-\IfFileExists{fontawesome5.sty}{%
- \DeclareStringOption[fontawesome5]{iconpackage}%
+%
+% We let Sphinx use, listed in order of priority:
+% - a user-specified package via iconpackage key,
+% - fontawesome7 (supported since 8.3.0),
+% - fontawesome6 (supported since 8.3.0),
+% - fontawesome5,
+% - fontawesome,
+% - or nothing at all, and then icons are not drawn.
+% To allow a user-specified package, an extra interface
+% is provided for specifying the icon-drawing LaTeX code.
+%
+% We can't load fontawesome7 (or 6 or 5) unconditionally even if available, as it
+% is incompatible with fontawesome package which may be preferred by user. We
+% thus must delay loading the package to at begin document, and for now can
+% only set the default value of iconpackage key.
+\IfFileExists{fontawesome7.sty}{%
+ \DeclareStringOption[fontawesome7]{iconpackage}%
}%
{%
- \IfFileExists{fontawesome.sty}
- {\DeclareStringOption[fontawesome]{iconpackage}}
- {\DeclareStringOption[none]{iconpackage}}%
+ \IfFileExists{fontawesome6.sty}{%
+ \DeclareStringOption[fontawesome6]{iconpackage}%
+ }%
+ {%
+ \IfFileExists{fontawesome5.sty}{%
+ \DeclareStringOption[fontawesome5]{iconpackage}%
+ }%
+ {%
+ \IfFileExists{fontawesome.sty}
+ {\DeclareStringOption[fontawesome]{iconpackage}}
+ {\DeclareStringOption[none]{iconpackage}}%
+ }%
+ }%
}%
-\newcommand\spx@faIcon[2][]{}%
-% The above \spx@faIcon which gobbles one mandatory and one optional
-% argument is put into use only if both fontawesome5 and fontawesome
-% LaTeX packages are not available, as part of the defaults for the
-% div.*_title-icon keys (these keys can be redefined via the sphinxsetup
-% interface).
-%
-\def\spxstring@fontawesome{fontawesome}
-\def\spxstring@fontawesomev{fontawesome5}
+% Unfortunately icon macro names are not the same across fontawesome,
+% fontawesome5, fontawesome6 and fontawesome7 LaTeX packages.
+% At 8.3.0 we refactor the
+% icon support code into something easier to maintain in future in case of a
+% fontawesome8, etc...
+%
+% TODO: Handle spaces possibly caused by bad user usage of iconpackage key?
+% This would need to check how LaTeX handle spaces in package name
+% in \RequirePackage command. Things in this area may have changed
+% recently (2025/04).
\AtBeginDocument{%
\ifx\spx@opt@iconpackage\spxstring@none
\else
\IfFileExists{\spx@opt@iconpackage.sty}
- {\RequirePackage{\spx@opt@iconpackage}%
- \ifx\spx@opt@iconpackage\spxstring@fontawesomev
- \renewcommand\spx@faIcon{\faIcon}%
- \else
- \ifx\spx@opt@iconpackage\spxstring@fontawesome
- \renewcommand\spx@faIcon[2][]{\faicon{##2}}%
- % The \ifdefined's are a bit silly because we know that
- % fontawesome.sty does not provide it, but perhaps
- % there can be some new release of that package?
- \ifdefined\faicon@lightbulb\else
- \let\faicon@lightbulb\faLightbulbO
- \fi
- \ifdefined\faicon@radiation\else
- \let\faicon@radiation\faBolt
- \fi
- \ifdefined\faicon@pen\else
- \let\faicon@pen\faPencil
- \fi
- % if neither has been required, \spx@faIcon will simply swallow
- % its argument and it is up to user
- % to set the various div.*_title-icon keys appropriately.
- \fi\fi %
- }%
+ {\RequirePackage{\spx@opt@iconpackage}}%
{%
- \sphinxbuildwarning{badiconpackage}%
- \PackageWarningNoLine{sphinx}{%
- You have set iconpackage=\spx@opt@iconpackage\MessageBreak
- But \spx@opt@iconpackage.sty is not found by LaTeX}
+ \let\spx@usr@iconpackage\spx@opt@iconpackage
+ \sphinxbuildwarning{badiconpackage}%
+ \PackageWarningNoLine{sphinx}{%
+ You have set iconpackage=\spx@usr@iconpackage\MessageBreak
+ But \spx@usr@iconpackage.sty is not found by LaTeX}
+ \let\spx@opt@iconpackage\spxstring@none
}%
\fi
}
+% Icon defaults depending on package used.
+% Attention! no extra spaces (to align here) when using \@namedef!
+
+\@namedef{spx@fontawesome6@note}{\faIcon{circle-info}}
+\@namedef{spx@fontawesome6@hint}{\faIcon[regular]{lightbulb}}
+\@namedef{spx@fontawesome6@tip}{\faIcon[regular]{lightbulb}}
+\@namedef{spx@fontawesome6@seealso}{\faIcon{share}}
+\@namedef{spx@fontawesome6@todo}{\faIcon{pen}}
+\@namedef{spx@fontawesome6@important}{\faIcon{circle-pause}}
+\@namedef{spx@fontawesome6@caution}{\faIcon{radiation}}
+\@namedef{spx@fontawesome6@warning}{\faIcon{triangle-exclamation}}
+\@namedef{spx@fontawesome6@attention}{\faIcon{triangle-exclamation}}
+\@namedef{spx@fontawesome6@danger}{\faIcon{radiation}}
+\@namedef{spx@fontawesome6@error}{\faIcon{circle-xmark}}
+
+% Turns out that fontawesome7 commands for our target icons are
+% same as with fontawesome6. So make a loop to spare a few lines here.
+\@for\x:=note,hint,tip,seealso,todo,important,caution,warning,attention,%
+ danger,error%
+\do{\expandafter\let\csname spx@fontawesome7@\x\expandafter\endcsname
+ \csname spx@fontawesome6@\x\endcsname}
+
+\@namedef{spx@fontawesome5@note}{\faIcon{info-circle}}
+\@namedef{spx@fontawesome5@hint}{\faIcon[regular]{lightbulb}}
+\@namedef{spx@fontawesome5@tip}{\faIcon[regular]{lightbulb}}
+\@namedef{spx@fontawesome5@seealso}{\faIcon{share}}
+\@namedef{spx@fontawesome5@todo}{\faIcon{pen}}
+\@namedef{spx@fontawesome5@important}{\faIcon{pause-circle}}
+\@namedef{spx@fontawesome5@caution}{\faIcon{radiation}}
+\@namedef{spx@fontawesome5@warning}{\faIcon{exclamation-triangle}}
+\@namedef{spx@fontawesome5@attention}{\faIcon{exclamation-triangle}}
+\@namedef{spx@fontawesome5@danger}{\faIcon{radiation}}
+\@namedef{spx@fontawesome5@error}{\faIcon{times-circle}}
+
+\def\spx@fontawesome@note {\faicon{info-circle}}
+\def\spx@fontawesome@hint {\faicon{lightbulb-o}}
+\def\spx@fontawesome@tip {\faicon{lightbulb-o}}
+\def\spx@fontawesome@seealso {\faicon{share}}
+\def\spx@fontawesome@todo {\faicon{pencil}}
+\def\spx@fontawesome@important{\faicon{pause-circle}}
+\def\spx@fontawesome@caution {\faicon{bolt}}
+\def\spx@fontawesome@warning {\faicon{exclamation-triangle}}
+\def\spx@fontawesome@attention{\faicon{exclamation-triangle}}
+\def\spx@fontawesome@danger {\faicon{bolt}}
+\def\spx@fontawesome@error {\faicon{times-circle}}
+
+% \spx@none@{note,hint,...} left undefined, the \@nameuse will be \relax
+\def\spx@titleicon@default#1{\@nameuse{spx@\spx@opt@iconpackage @#1}}
\setkeys{sphinx}{
-% Icon defaults.
- div.note_title-icon = \spx@faIcon{info-circle},
- div.hint_title-icon = \spx@faIcon[regular]{lightbulb},
- div.tip_title-icon = \spx@faIcon[regular]{lightbulb},
- div.seealso_title-icon = \spx@faIcon{share},
- div.todo_title-icon = \spx@faIcon{pen},
- div.important_title-icon = \spx@faIcon{pause-circle},
- div.caution_title-icon = \spx@faIcon{radiation},
- div.warning_title-icon = \spx@faIcon{exclamation-triangle},
- div.attention_title-icon = \spx@faIcon{exclamation-triangle},
- div.danger_title-icon = \spx@faIcon{radiation},
- div.error_title-icon = \spx@faIcon{times-circle},
+ div.note_title-icon = \spx@titleicon@default{note},
+ div.hint_title-icon = \spx@titleicon@default{hint},
+ div.tip_title-icon = \spx@titleicon@default{tip},
+ div.seealso_title-icon = \spx@titleicon@default{seealso},
+ div.todo_title-icon = \spx@titleicon@default{todo},
+ div.important_title-icon = \spx@titleicon@default{important},
+ div.caution_title-icon = \spx@titleicon@default{caution},
+ div.warning_title-icon = \spx@titleicon@default{warning},
+ div.attention_title-icon = \spx@titleicon@default{attention},
+ div.danger_title-icon = \spx@titleicon@default{danger},
+ div.error_title-icon = \spx@titleicon@default{error},
% MEMO: the new at 8.1.0 defaults for contents/topic/sidebar directives
% use no icons, they use \sphinxdotitlerow which detects automatically
% whether title-icon key has been set or not.
@@ -1192,5 +1237,25 @@
% FIXME: this line should be dropped, as "9" is default anyhow.
\ifdefined\pdfcompresslevel\pdfcompresslevel = 9 \fi
-
+%%% SUPPORT FOR CSS3 EXTRA LENGTH UNITS
+% cf rstdim_to_latexdim in latex.py
+%
+\def\sphinxchdimen{\dimexpr\fontcharwd\font`0\relax}
+% TODO: decide if we want rather \textwidth/\textheight.
+\newdimen\sphinxvwdimen
+ \sphinxvwdimen=\dimexpr0.01\paperwidth\relax
+\newdimen\sphinxvhdimen
+ \sphinxvhdimen=\dimexpr0.01\paperheight\relax
+\newdimen\sphinxvmindimen
+ \sphinxvmindimen=\dimexpr
+ \ifdim\paperwidth<\paperheight\sphinxvwdimen\else\sphinxvhdimen\fi
+ \relax
+\newdimen\sphinxvmaxdimen
+ \sphinxvmaxdimen=\dimexpr
+ \ifdim\paperwidth<\paperheight\sphinxvhdimen\else\sphinxvwdimen\fi
+ \relax
+\newdimen\sphinxQdimen
+ \sphinxQdimen=0.25mm
+% MEMO: \sphinxremdimen is defined in the template as it needs
+% the config variable pointsize.
\endinput
diff --git a/sphinx/texinputs/sphinxlatexadmonitions.sty b/sphinx/texinputs/sphinxlatexadmonitions.sty
index 0519903591b..6b2d45779ae 100644
--- a/sphinx/texinputs/sphinxlatexadmonitions.sty
+++ b/sphinx/texinputs/sphinxlatexadmonitions.sty
@@ -1,7 +1,7 @@
%% NOTICES AND ADMONITIONS
%
% change this info string if making any custom modification
-\ProvidesPackage{sphinxlatexadmonitions}[2024/10/11 v8.1.1 admonitions]
+\ProvidesPackage{sphinxlatexadmonitions}[2025/10/24 v8.3.0 admonitions]
% Provides support for this output mark-up from Sphinx latex writer:
%
@@ -53,6 +53,7 @@
% - \spx@boxes@fcolorbox@setup from sphinxpackageboxes.sty
%
\RequirePackage{framed}
+\RequirePackage{needspace}
% Those are required either before or after by sphinx.sty anyhow, but for
% clarity we list them here:
\RequirePackage{sphinxlatexgraphics}
@@ -107,7 +108,12 @@
%
% Code adapted from framed.sty's "snugshade" environment.
% Nesting works (inner frames do not allow page breaks).
+%
+% At 8.3.0, avoid admonition title getting separated from contents at a
+% page break.
+\newcommand\sphinxheavyboxneedspacecommand{\needspace{5\baselineskip}}
\newenvironment{sphinxheavybox}{\par
+ \sphinxheavyboxneedspacecommand
% (MEMO: it is not a problem here if there is no sphinxShadowColor,
% as it used only if set)
\spx@boxes@fcolorbox@setup{\spx@noticetype}%
@@ -342,8 +348,7 @@
\textcolor{sphinx#1TtlFgColor}{%
\@nameuse{sphinx#1TtlIcon}%
% The next macro is located here for legacy reasons of earlier
- % functioning of \spx@faIcon. When fontawesome{5,}.sty both
- % are unavailable, it (formerly) gobbled this next macro.
+ % functioning of sphinx.sty now removed \spx@faIcon macro.
% We leave it here now although it could be moved to after
% the closing brace.
\sphinxtitlerowaftericonspacecmd
diff --git a/sphinx/texinputs/sphinxlatexliterals.sty b/sphinx/texinputs/sphinxlatexliterals.sty
index 11991d9c3e8..8b0036b464b 100644
--- a/sphinx/texinputs/sphinxlatexliterals.sty
+++ b/sphinx/texinputs/sphinxlatexliterals.sty
@@ -1,7 +1,7 @@
%% LITERAL BLOCKS
%
% change this info string if making any custom modification
-\ProvidesPackage{sphinxlatexliterals}[2024/07/01 v7.4.0 code-blocks and parsed literals]
+\ProvidesPackage{sphinxlatexliterals}[2025/08/06 v8.3.0 code-blocks and parsed literals]
% Provides support for this output mark-up from Sphinx latex writer:
%
@@ -241,6 +241,8 @@
% _, }, ^, &, >, -, ~, and \: stay at end of broken line.
% Use of \textquotesingle for straight quote.
% FIXME: convert this to package options ?
+% MEMO: "beforelist" and "afterlist" hold no relation with lists,
+% they are just "\do lists" in the inherited TeX sense.
\newcommand*\sphinxbreaksbeforelist {%
\do\PYGZob\{\do\PYGZlt\<\do\PYGZsh\#\do\PYGZpc\%% {, <, #, %,
\do\PYGZdl\$\do\PYGZdq\"% $, "
@@ -278,6 +280,8 @@
\catcode`##1\active}%
\sphinxbreaksafteractivelist
\lccode`\~`\~
+ % visit_desc_name will insert non TeX-escaped ~ in the source
+ \let~\spx@verbatim@space
}
% If the linebreak is at a space, the latter will be displayed as visible
@@ -962,7 +966,13 @@
\let\sphinxhyphen\sphinxhyphenininlineliteral
\ifspx@opt@inlineliteralwraps
% break at . , ; ? ! /
+ % and also at ~ which will be handled like are spaces in verbatim
\sphinxbreaksviaactive
+ % but for this we need to set this box which is empty by default:
+ % MEMO: it looks suboptimal to redo this each time but this is
+ % to obey a \sphinxsetup via raw LaTeX to set verbatimvisiblespace,
+ % a possibility which however will be used by 0% of Sphinx users...
+ \sbox\sphinxvisiblespacebox {\spx@opt@verbatimvisiblespace}%
% break also at \
\setbox8=\hbox{\textbackslash}%
\def\sphinx@textbackslash{\copy8}%
diff --git a/sphinx/texinputs/sphinxlatexobjects.sty b/sphinx/texinputs/sphinxlatexobjects.sty
index 1147a016227..2a05dd6de8c 100644
--- a/sphinx/texinputs/sphinxlatexobjects.sty
+++ b/sphinx/texinputs/sphinxlatexobjects.sty
@@ -1,7 +1,7 @@
%% MODULE RELEASE DATA AND OBJECT DESCRIPTIONS
%
% change this info string if making any custom modification
-\ProvidesPackage{sphinxlatexobjects}[2025/02/11 documentation environments]
+\ProvidesPackage{sphinxlatexobjects}[2025/06/06 documentation environments]
% Provides support for this output mark-up from Sphinx latex writer:
%
@@ -155,20 +155,23 @@
\pysigadjustitemsep
}
\newcommand{\pysiglinewithargsret}[3]{%
- % as #1 may contain a footnote using \label we need to make \label
- % a no-op here to avoid LaTeX complaining about duplicates
-\let\spx@label\label\let\label\@gobble
- \settowidth{\py@argswidth}{#1\pysigarglistopen}%
-\let\label\spx@label
+ % #1 may contain a footnote (especially with latex_show_urls='footnote'
+ % and some intersphinx added hyperlinking). Here we want to measure
+ % a width but not typeset such a footnote (else #13619).
+ % Miraculously a sphinxpackagefootnote.sty tabulary compatibility
+ % layer employing the amsmath \ifmeasuring@ can be used here to let
+ % a footnote influence the actual width up to opening brace but not
+ % actually get typeset at this stage...
+ % MEMO: "argswidth" is misleading here, this code measures the name
+ % not the arguments.
+ \settowidth{\py@argswidth}{\measuring@true#1\pysigarglistopen}%
\py@argswidth=\dimexpr\linewidth+\labelwidth-\py@argswidth\relax\relax
\item[{#1\pysigarglistopen\py@sigparams{#2}{#3}\strut}]
\pysigadjustitemsep
}
-\newcommand{\pysiglinewithargsretwithtypelist}[4]{
-% #1 = name, #2 = typelist, #3 = arglist, #4 = retann
-\let\spx@label\label\let\label\@gobble
- \settowidth{\py@argswidth}{#1\pysigtypelistopen}%
-\let\label\spx@label
+\newcommand{\pysiglinewithargsretwithtypelist}[4]{%
+ % same comment as in \pysiglinewithargsret
+ \settowidth{\py@argswidth}{\measuring@true#1\pysigtypelistopen}%
\py@argswidth=\dimexpr\linewidth+\labelwidth-\py@argswidth\relax\relax
\item[{#1\pysigtypelistopen\py@sigparamswithtypelist{#2}{#3}{#4}\strut}]
\pysigadjustitemsep
@@ -244,9 +247,8 @@
\newcommand{\pysigwithonelineperargwithtypelist}[4]{
% #1 = name, #2 = typelist, #3 = arglist, #4 = retann
% render the type parameters list on one line, but each argument is rendered on its own line
-\let\spx@label\label\let\label\@gobble
- \settowidth{\py@argswidth}{#1\pysigtypelistopen}%
-\let\label\spx@label
+ % for \measuring@true see comment in \pysiglinewithargsret
+ \settowidth{\py@argswidth}{\measuring@true#1\pysigtypelistopen}%
\py@argswidth=\dimexpr\linewidth+\labelwidth-\py@argswidth\relax\relax
\item[{#1\pysigtypelistopen\parbox[t]{\py@argswidth}{%
\raggedright #2\pysigtypelistclose\pysigarglistopen\strut}\strut}]
diff --git a/sphinx/texinputs/sphinxlatexstyletext.sty b/sphinx/texinputs/sphinxlatexstyletext.sty
index d083cd96a83..6c80ce64b43 100644
--- a/sphinx/texinputs/sphinxlatexstyletext.sty
+++ b/sphinx/texinputs/sphinxlatexstyletext.sty
@@ -1,7 +1,7 @@
%% TEXT STYLING
%
% change this info string if making any custom modification
-\ProvidesPackage{sphinxlatexstyletext}[2024/07/28 v8.1.0 text styling]
+\ProvidesPackage{sphinxlatexstyletext}[2025/05/24 v8.3.0 text styling]
% 7.4.0 has moved all that is related to admonitions to sphinxlatexadmonitions.sty
% 8.1.0 has moved topic/contents/sidebar to sphinxlatexshadowbox.sty
@@ -57,7 +57,11 @@
% reduce hyperref "Token not allowed in a PDF string" warnings on PDF builds
\AtBeginDocument{\pdfstringdefDisableCommands{%
% all "protected" macros possibly ending up in section titles should be here
-% TODO: examine if \sphinxhref, \sphinxurl, \sphinnolinkurl should be handled
+% next four were added so that URLs and internal links in titles can be allowed
+ \let\sphinxurl \@firstofone
+ \let\sphinxnolinkurl\@firstofone
+ \let\sphinxhref \@secondoftwo
+ \def\hyperref[#1]#2{#2}% for PDF bookmark to ignore #1
\let\sphinxstyleemphasis \@firstofone
\let\sphinxstyleliteralemphasis \@firstofone
\let\sphinxstylestrong \@firstofone
diff --git a/sphinx/texinputs/sphinxlatextables.sty b/sphinx/texinputs/sphinxlatextables.sty
index 4114955e071..b80cc83be8d 100644
--- a/sphinx/texinputs/sphinxlatextables.sty
+++ b/sphinx/texinputs/sphinxlatextables.sty
@@ -1,7 +1,7 @@
%% TABLES (WITH SUPPORT FOR MERGED CELLS OF GENERAL CONTENTS)
%
% change this info string if making any custom modification
-\ProvidesPackage{sphinxlatextables}[2024/07/01 v7.4.0 tables]%
+\ProvidesPackage{sphinxlatextables}[2025/06/30 v8.3.0 tables]%
% Provides support for this output mark-up from Sphinx latex writer
% and table templates:
@@ -42,6 +42,29 @@
% - \sphinxthistablewithnocolorrowsstyle
% - \sphinxthistablewithvlinesstyle
% - \sphinxthistablewithnovlinesstyle
+% - \sphinxbeforeendvarwidth
+
+% At 8.3.0, ALL table cell contents are wrapped into a varwidth environment.
+% This helps solve issues such as #3447, #8828, and helps use tabulary
+% in many more cases hence obtain better looking tables.
+\def\sphinxbeforeendvarwidth{\par\vskip-\baselineskip\hbox{\strut}}
+% MEMO: Mark-up uses the above macro right before all \end{varwdith} so that
+% if the cell in a row extends lower than the others, its last line acquires
+% standard "depth". Else it may lack any depth if without descenders such as
+% "p" or "q" letters and the horizontal line or color panel will look strange.
+% It originates in PR #3435 from 2017 which solved *many* table issues for
+% merged cells (and injected the varwidth technique now at 8.3.0 applied to
+% all cells). The original used \vbox{\hbox{\strut}} but that \vbox appears
+% to do nothing, and it was decided after some testing (July 2025) to remove
+% it, the original rationale for it being now lost.
+
+% These conditionals added at 8.3.0 for nested tables not to break row colors
+% (#13635). Nested tables are only partially supported by Sphinx LaTeX.
+% The method here is with no changes to neither writer nor templates.
+\newif\ifspx@intable
+\newif\ifspx@thistableisnested
+% Try to allow nested tables in a longtable. But tabulary causes problems.
+\newif\ifspx@longtable
%
% Also provides user command (see docs)
% - \sphixncolorblend
@@ -62,8 +85,8 @@
\RequirePackage{tabulary}
% tabulary has a bug with its re-definition of \multicolumn in its first pass
% which is not \long. But now Sphinx does not use LaTeX's \multicolumn but its
-% own macro. Hence we don't even need to patch tabulary. See
-% sphinxpackagemulticell.sty
+% own macro. Hence we don't even need to patch tabulary.
+%
% X or S (Sphinx) may have meanings if some table package is loaded hence
% \X was chosen to avoid possibility of conflict
\newcolumntype{\X}[2]{p{\dimexpr
@@ -109,7 +132,9 @@
\LTpre\z@skip\LTpost\z@skip % set to zero longtable's own skips
\edef\sphinxbaselineskip{\dimexpr\the\dimexpr\baselineskip\relax\relax}%
\spx@inframedtrue % message to sphinxheavybox
- }%
+ \spx@table@setnestedflags
+ \spx@longtabletrue
+}
% Compatibility with caption package
\def\sphinxthelongtablecaptionisattop{%
\spx@ifcaptionpackage{\noalign{\vskip-\belowcaptionskip}}{}%
@@ -122,10 +147,29 @@
\def\sphinxatlongtableend{\@nobreakfalse % latex3/latex2e#173
\prevdepth\z@\vskip\sphinxtablepost\relax}%
% B. Table with tabular or tabulary
-\def\sphinxattablestart{\par\vskip\dimexpr\sphinxtablepre\relax
+\def\sphinxattablestart{\par
+ \ifvmode % guard agains being nested in a table cell
+ \vskip\dimexpr\sphinxtablepre\relax
+ \fi
\spx@inframedtrue % message to sphinxheavybox
+ \spx@table@setnestedflags
}%
-\let\sphinxattableend\sphinxatlongtableend
+% MEMO: this happens inside a savenotes environment and hence flags
+% are reset on exit of it.
+\def\spx@table@setnestedflags{% Issue #13635
+ \ifspx@intable
+ \let\spx@table@resetcolortbl\spx@nestedtable@resetcolortbl
+ \spx@thistableisnestedtrue
+ \else
+ \spx@intabletrue
+ \fi
+ }%
+\def\sphinxattableend{%
+ \@nobreakfalse % <- probably unneeded as this is not a longtable
+ \ifvmode % guard against being nested in a table cell
+ \prevdepth\z@\vskip\sphinxtablepost\relax
+ \fi
+}%
% This is used by tabular and tabulary templates
\newcommand*\sphinxcapstartof[1]{%
\vskip\parskip
@@ -223,7 +267,32 @@
%
% configuration of tabulary
\setlength{\tymin}{3\fontcharwd\font`0 }% minimal width of "squeezed" columns
-\setlength{\tymax}{10000pt}% allow enough room for paragraphs to "compete"
+\setlength{\tymax}{3000pt}% allow enough room for paragraphs to "compete"
+%
+% MEMO: tabulary initially renders cell contents "horizontally" to measure
+% them and compare their relative importance. Its goal is to choose the
+% column width so that, roughly, all columns will look about evenly
+% filled. "Horizontal" rendering is incompatible with many LaTeX
+% structures such as lists, so prior to Sphinx 8.3.0 cells with such
+% "problematic" contents caused Sphinx to use tabular not tabulary; the
+% tabular would then render each column in absence of :widths: option or
+% tabularcolumns directive the same width equal to available text width
+% divided by number of columns. At 8.3.0, "problematic" contents is
+% wrapped into a "varwidth" environment, as was already done formerly
+% for merged cells, and this avoids tabulary causing errors such as
+% "incompatible with LR mode"; \sphinxcolwidth is used which sets
+% the initial horizontal width for "varwidth". In the first tabulary
+% pass, \sphinxcolwidth is configured (by us) to use \tymax.
+%
+% During testing, it was determined that the former 10000pt setting for
+% \tymax would cause "Dimension too large" TeX error if two columns or
+% more had cells containing admonitions (such contents does not allow
+% "varwidth" to reduce the width automatically). So we use now 3000pt
+% which allows up to 5 such columns while being large enough for
+% tabulary algorithm to give good results for cells containing a few
+% dozen words. The tabulary default of 2\textwidth proves to be too
+% small for that.
+%
% we need access to tabulary's final computed width. \@tempdima is too volatile
% to hope it has kept tabulary's value when \sphinxcolwidth needs it.
\newdimen\sphinx@TY@tablewidth
@@ -270,10 +339,11 @@
% cells (the code does inserts & tokens, see TN1b). It was decided to keep it
% simple with \sphinxstartmulticolumn...\sphinxstopmulticolumn.
%
+% **** ATTENTION: Sphinx does generate at least some nested tables in LaTeX
+% **** TODO: clarify if next paragraph means we must raise an
+% **** if LaTeX writer detects a merged cell inside nested table.
% MEMO about nesting: if sphinxmulticolumn is encountered in a nested tabular
-% inside a tabulary it will think to be at top level in the tabulary. But
-% Sphinx generates no nested tables, and if some LaTeX macro uses internally a
-% tabular this will not have a \sphinxstartmulticolumn within it!
+% inside a tabulary it will think to be at top level in the tabulary.
%
% 5.3.0 adds a check for multirow as single-row multi-column will allow a row
% colour but multi-row multi-column should not.
@@ -376,8 +446,7 @@
}%
\newcommand*\sphinxcolwidth[2]{%
% this dimension will always be used for varwidth, and serves as maximum
- % width when cells are merged either via multirow or multicolumn or both,
- % as always their contents is wrapped in varwidth environment.
+ % width for cells whose contents are wrapped in varwidth environment.
\ifnum#1>\@ne % multi-column (and possibly also multi-row)
% we wrote our own multicolumn code especially to handle that (and allow
% verbatim contents)
@@ -857,7 +926,32 @@
}%
\the\everycr
}%
- \global\rownum\@ne % is done from inside table so ok with tabulary two passes
+ \ifspx@thistableisnested
+ % Attention that tabulary does two passes so we need to push the
+ % initial rownum and, after the first pass, we must reset it!
+ % Fortunately Sphinx LaTeX writer makes parent table tabular or
+ % longtable if a nested table is a tabulary. So we don't need to
+ % worry about distinguishing if this or parent is tabulary.
+ \ifx\TY@final\@undefined % tabular
+ \spx@gpush@rownum
+ \else
+ \ifx\equation$% tabulary, first pass
+ \spx@gpush@rownum
+ \else % tabulary, second pass
+ \spx@gpop@rownum % reset \rownum
+ \spx@gpush@rownum% and push it again.
+ \fi
+ \fi
+ % To make nested tables stand out in a color row, we toggle the parity.
+ % TODO: Double-check if compatible with method for color of header
+ % row.
+ % TODO: Perhaps better to use specific colors for nested tables?
+ % This would mean though adding new sphinxsetup parameters
+ % and extending the documentation...
+ \ifodd\rownum\global\rownum\z@\else\global\rownum\@ne\fi
+ \else
+ \global\rownum\@ne
+ \fi
\sphinxSwitchCaseRowColor\rownum % set up color for the first body row
\sphinxrowcolorON % has been done from \sphinxtoprule location but let's do
% it again in case \sphinxtabletoprulehook has been used
@@ -883,20 +977,39 @@
\let\sphinxtabledecrementrownum\@empty
% \sphinxtableafterendhook will be modified by colorrows class to execute
-% this after the table
+% this after the table.
\def\spx@table@resetcolortbl{%
\sphinxrowcolorOFF
- \spx@table@reset@CTeverycr
+ \spx@table@reset@CT@everycr
% this last bit is done in order for the \sphinxbottomrule from the "foot"
-% longtable template to be able to use same code as the \sphinxbottomrule
-% at end of table body; see \sphinxbooktabsspecialbottomrule code
+% part of the longtable template to be able to use same code as the
+% \sphinxbottomrule at end of table body; see \sphinxbooktabsspecialbottomrule.
\global\rownum\z@
+ \global\let\spx@rownum@stack\@empty
}
-\def\spx@table@reset@CTeverycr{%
+% Most of \spx@table@resetcolortbl must be avoided if the table is nested.
+% Besides the sphinxTableRowColor must be reset because it has been
+% redefined by the cells of the nested table. So this is the alternative
+% macro which is executed on exit of nested table.
+\def\spx@nestedtable@resetcolortbl{%
+ \ifx\spx@rownum@stack\@empty\else
+% The stack can be empty if this is executed on exit of a nested table,
+% and the parent table has received the "nocolorrows" class, but globally
+% colorrows are activated (default). So we protected against that case.
+ \spx@gpop@rownum
+ \fi
+ \sphinxSwitchCaseRowColor\rownum
+}
+\def\spx@table@reset@CT@everycr{%
% we should probably be more cautious and not hard-code here the colortbl
-% set-up; so the macro is defined without @ to fac
+% set-up.
\global\CT@everycr{\noalign{\global\let\CT@row@color\relax}\the\everycr}%
}
+\let\spx@rownum@stack\@empty
+\def\spx@gpush@rownum{\xdef\spx@rownum@stack{\the\rownum.\spx@rownum@stack}}%
+\def\spx@gpop@rownum{\afterassignment\spx@gpop@rownum@i
+ \global\rownum=\spx@rownum@stack\relax}
+\def\spx@gpop@rownum@i.#1\relax{\gdef\spx@rownum@stack{#1}}
% At last the style macros \sphinxthistablewithstandardstyle etc...
@@ -1019,6 +1132,10 @@ local use of booktabs table style}%
% borderless style
\def\sphinxthistablewithborderlessstyle{%
+ \sphinxthistablewithnohlines
+ \def\spx@arrayrulewidth{\z@}%
+}%
+\def\sphinxthistablewithnohlines{%
\let\sphinxhline \@empty
\let\sphinxcline \@gobble
\let\sphinxvlinecrossing\@gobble
@@ -1026,7 +1143,9 @@ local use of booktabs table style}%
\let\spx@toprule \@empty
\let\sphinxmidrule \@empty
\let\sphinxbottomrule \@empty
- \def\spx@arrayrulewidth{\z@}%
+}%
+\def\sphinxthistabularywithnohlinesifinlongtable{%
+ \ifspx@longtable\sphinxthistablewithnohlines\fi
}%
% colorrows style
@@ -1047,10 +1166,13 @@ local use of booktabs table style}%
%
% this one is not set to \@empty by nocolorrows, because it looks harmless
% to execute it always, as it simply resets to standard colortbl state after
-% the table; so we don't need an @@ version for this one
+% the table [^1]; so we don't need an @@ version for this one.
+% .. [1]: which is bad if nested in another table. This is taken care of
+% at level of \sphinxattablestart and \sphinxatlongtablestart.
\spx@prepend\spx@table@resetcolortbl\to\sphinxtableafterendhook
}
\def\spx@prepend#1\to#2{% attention about using this only with #2 "storage macro"
+% MEMO: #1 is prepended with no expansion, i.e. "as is".
\toks@{#1}%
\toks@\expandafter\expandafter\expandafter{\expandafter\the\expandafter\toks@#2}%
\edef#2{\the\toks@}%
@@ -1064,9 +1186,10 @@ local use of booktabs table style}%
\let\spx@table@startbodycolorrows\@empty
\let\sphinxtabledecrementrownum \@empty
% we don't worry about \sphinxtableafterendhook as the \spx@table@resetcolortbl
-% done at end can not do harm; and we could also have not bothered with the
+% done at end can not do harm [^1]; and we could also have not bothered with the
% \sphinxtabledecrementrownum as its \rownum decrement, if active, is harmless
-% in non-colorrows context
+% in non-colorrows context.
+% .. [1]: if nested in another table it is modified to do no harm.
}
% (not so easy) implementation of the booktabscolorgaps option. This option
diff --git a/sphinx/texinputs/sphinxpackagefootnote.sty b/sphinx/texinputs/sphinxpackagefootnote.sty
index 7f2e2913874..1bde37f1045 100644
--- a/sphinx/texinputs/sphinxpackagefootnote.sty
+++ b/sphinx/texinputs/sphinxpackagefootnote.sty
@@ -1,6 +1,6 @@
\NeedsTeXFormat{LaTeX2e}
\ProvidesPackage{sphinxpackagefootnote}%
- [2024/05/17 v7.3.x Sphinx custom footnotehyper package (Sphinx team)]
+ [2025/11/15 v8.3.0 Sphinx custom footnotehyper package (Sphinx team)]
%%
%% Package: sphinxpackagefootnote
%% Version: based on footnotehyper.sty 2021/02/04 v1.1d
@@ -19,11 +19,13 @@
% - \sphinxfootnotemark[N]
% where N is a number.
%
-%% Some small differences from upstream footnotehyper.sty:
+%% Some small differences from footnotehyper.sty 2021/02/04 v1.1d:
%% - a tabulary compatibility layer (partial but enough for Sphinx),
%% - usage of \spx@opt@BeforeFootnote
%% - usage of \sphinxunactivateextrasandspace from sphinx.sty,
%% - \sphinxlongtablepatch
+%% - fix for a change of babel-french from late June 2025
+%% (\localleftbox undefined with pdflatex/xelatex, issue #14059)
%%
%% Starting with Sphinx v4.5.0, inherited footnotehyper macros for
%% footnote/footnotetext receive some Sphinx specific extras to
@@ -274,8 +276,11 @@
\fi
}%
\def\FNH@check{%
- \ifx\@makefntextFB\@undefined\expandafter\FNH@check@
- \else\expandafter\FNH@frenchb@
+ \ifx\@makefntextFB\@undefined
+ \expandafter\FNH@check@
+ \else
+ \providecommand\localleftbox[1]{}%
+ \expandafter\FNH@frenchb@
\fi
}%
\def\FNH@frenchb@{%
diff --git a/sphinx/themes/basic/static/doctools.js b/sphinx/themes/basic/static/doctools.js
index 0398ebb9f03..807cdb176c0 100644
--- a/sphinx/themes/basic/static/doctools.js
+++ b/sphinx/themes/basic/static/doctools.js
@@ -59,7 +59,7 @@ const Documentation = {
Object.assign(Documentation.TRANSLATIONS, catalog.messages);
Documentation.PLURAL_EXPR = new Function(
"n",
- `return (${catalog.plural_expr})`
+ `return (${catalog.plural_expr})`,
);
Documentation.LOCALE = catalog.locale;
},
@@ -89,7 +89,7 @@ const Documentation = {
const togglerElements = document.querySelectorAll("img.toggler");
togglerElements.forEach((el) =>
- el.addEventListener("click", (event) => toggler(event.currentTarget))
+ el.addEventListener("click", (event) => toggler(event.currentTarget)),
);
togglerElements.forEach((el) => (el.style.display = ""));
if (DOCUMENTATION_OPTIONS.COLLAPSE_INDEX) togglerElements.forEach(toggler);
@@ -98,14 +98,15 @@ const Documentation = {
initOnKeyListeners: () => {
// only install a listener if it is really needed
if (
- !DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS &&
- !DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS
+ !DOCUMENTATION_OPTIONS.NAVIGATION_WITH_KEYS
+ && !DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS
)
return;
document.addEventListener("keydown", (event) => {
// bail for input elements
- if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return;
+ if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName))
+ return;
// bail with special keys
if (event.altKey || event.ctrlKey || event.metaKey) return;
diff --git a/sphinx/themes/basic/static/language_data.js.jinja b/sphinx/themes/basic/static/language_data.js.jinja
index 64aefa798c7..daefea7eb9e 100644
--- a/sphinx/themes/basic/static/language_data.js.jinja
+++ b/sphinx/themes/basic/static/language_data.js.jinja
@@ -1,12 +1,13 @@
/*
* This script contains the language-specific data used by searchtools.js,
- * namely the list of stopwords, stemmer, scorer and splitter.
+ * namely the set of stopwords, stemmer, scorer and splitter.
*/
-var stopwords = {{ search_language_stop_words }};
+const stopwords = new Set({{ search_language_stop_words }});
+window.stopwords = stopwords; // Export to global scope
{% if search_language_stemming_code %}
-/* Non-minified version is copied as a separate JS file, if available */
+/* Non-minified versions are copied as separate JavaScript files, if available */
{{ search_language_stemming_code|safe }}
{% endif -%}
diff --git a/sphinx/themes/basic/static/searchtools.js b/sphinx/themes/basic/static/searchtools.js
index 91f4be57fc8..5a7628a18a2 100644
--- a/sphinx/themes/basic/static/searchtools.js
+++ b/sphinx/themes/basic/static/searchtools.js
@@ -41,11 +41,12 @@ if (typeof Scorer === "undefined") {
}
// Global search result kind enum, used by themes to style search results.
+// prettier-ignore
class SearchResultKind {
- static get index() { return "index"; }
- static get object() { return "object"; }
- static get text() { return "text"; }
- static get title() { return "title"; }
+ static get index() { return "index"; }
+ static get object() { return "object"; }
+ static get text() { return "text"; }
+ static get title() { return "title"; }
}
const _removeChildren = (element) => {
@@ -95,20 +96,25 @@ const _displayItem = (item, searchTerms, highlightTerms) => {
listItem.appendChild(document.createElement("span")).innerHTML =
" (" + descr + ")";
// highlight search terms in the description
- if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js
- highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted"));
- }
- else if (showSearchSummary)
+ if (SPHINX_HIGHLIGHT_ENABLED)
+ // SPHINX_HIGHLIGHT_ENABLED is set in sphinx_highlight.js
+ highlightTerms.forEach((term) =>
+ _highlightText(listItem, term, "highlighted"),
+ );
+ } else if (showSearchSummary)
fetch(requestUrl)
.then((responseData) => responseData.text())
.then((data) => {
if (data)
listItem.appendChild(
- Search.makeSearchSummary(data, searchTerms, anchor)
+ Search.makeSearchSummary(data, searchTerms, anchor),
);
// highlight search terms in the summary
- if (SPHINX_HIGHLIGHT_ENABLED) // set in sphinx_highlight.js
- highlightTerms.forEach((term) => _highlightText(listItem, term, "highlighted"));
+ if (SPHINX_HIGHLIGHT_ENABLED)
+ // SPHINX_HIGHLIGHT_ENABLED is set in sphinx_highlight.js
+ highlightTerms.forEach((term) =>
+ _highlightText(listItem, term, "highlighted"),
+ );
});
Search.output.appendChild(listItem);
};
@@ -117,14 +123,14 @@ const _finishSearch = (resultCount) => {
Search.title.innerText = _("Search Results");
if (!resultCount)
Search.status.innerText = Documentation.gettext(
- "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories."
+ "Your search did not match any documents. Please make sure that all words are spelled correctly and that you've selected enough categories.",
);
else
Search.status.innerText = Documentation.ngettext(
"Search finished, found one page matching the search query.",
"Search finished, found ${resultCount} pages matching the search query.",
resultCount,
- ).replace('${resultCount}', resultCount);
+ ).replace("${resultCount}", resultCount);
};
const _displayNextItem = (
results,
@@ -138,7 +144,7 @@ const _displayNextItem = (
_displayItem(results.pop(), searchTerms, highlightTerms);
setTimeout(
() => _displayNextItem(results, resultCount, searchTerms, highlightTerms),
- 5
+ 5,
);
}
// search finished, update title and status message
@@ -170,9 +176,10 @@ const _orderResultsByScoreThenName = (a, b) => {
* This is the same as ``\W+`` in Python, preserving the surrogate pair area.
*/
if (typeof splitQuery === "undefined") {
- var splitQuery = (query) => query
+ var splitQuery = (query) =>
+ query
.split(/[^\p{Letter}\p{Number}_\p{Emoji_Presentation}]+/gu)
- .filter(term => term) // remove remaining empty strings
+ .filter((term) => term); // remove remaining empty strings
}
/**
@@ -184,16 +191,23 @@ const Search = {
_pulse_status: -1,
htmlToText: (htmlString, anchor) => {
- const htmlElement = new DOMParser().parseFromString(htmlString, 'text/html');
+ const htmlElement = new DOMParser().parseFromString(
+ htmlString,
+ "text/html",
+ );
for (const removalQuery of [".headerlink", "script", "style"]) {
- htmlElement.querySelectorAll(removalQuery).forEach((el) => { el.remove() });
+ htmlElement.querySelectorAll(removalQuery).forEach((el) => {
+ el.remove();
+ });
}
if (anchor) {
- const anchorContent = htmlElement.querySelector(`[role="main"] ${anchor}`);
+ const anchorContent = htmlElement.querySelector(
+ `[role="main"] ${anchor}`,
+ );
if (anchorContent) return anchorContent.textContent;
console.warn(
- `Anchored content block not found. Sphinx search tries to obtain it via DOM query '[role=main] ${anchor}'. Check your theme or template.`
+ `Anchored content block not found. Sphinx search tries to obtain it via DOM query '[role=main] ${anchor}'. Check your theme or template.`,
);
}
@@ -202,7 +216,7 @@ const Search = {
if (docContent) return docContent.textContent;
console.warn(
- "Content block not found. Sphinx search tries to obtain it via DOM query '[role=main]'. Check your theme or template."
+ "Content block not found. Sphinx search tries to obtain it via DOM query '[role=main]'. Check your theme or template.",
);
return "";
},
@@ -287,12 +301,8 @@ const Search = {
const queryTermLower = queryTerm.toLowerCase();
// maybe skip this "word"
- // stopwords array is from language_data.js
- if (
- stopwords.indexOf(queryTermLower) !== -1 ||
- queryTerm.match(/^\d+$/)
- )
- return;
+ // stopwords set is from language_data.js
+ if (stopwords.has(queryTermLower) || queryTerm.match(/^\d+$/)) return;
// stem the word
let word = stemmer.stemWord(queryTermLower);
@@ -304,8 +314,12 @@ const Search = {
}
});
- if (SPHINX_HIGHLIGHT_ENABLED) { // set in sphinx_highlight.js
- localStorage.setItem("sphinx_highlight_terms", [...highlightTerms].join(" "))
+ if (SPHINX_HIGHLIGHT_ENABLED) {
+ // SPHINX_HIGHLIGHT_ENABLED is set in sphinx_highlight.js
+ localStorage.setItem(
+ "sphinx_highlight_terms",
+ [...highlightTerms].join(" "),
+ );
}
// console.debug("SEARCH: searching for:");
@@ -318,7 +332,13 @@ const Search = {
/**
* execute search (requires search index to be loaded)
*/
- _performSearch: (query, searchTerms, excludedTerms, highlightTerms, objectTerms) => {
+ _performSearch: (
+ query,
+ searchTerms,
+ excludedTerms,
+ highlightTerms,
+ objectTerms,
+ ) => {
const filenames = Search._index.filenames;
const docNames = Search._index.docnames;
const titles = Search._index.titles;
@@ -334,10 +354,15 @@ const Search = {
const queryLower = query.toLowerCase().trim();
for (const [title, foundTitles] of Object.entries(allTitles)) {
- if (title.toLowerCase().trim().includes(queryLower) && (queryLower.length >= title.length/2)) {
+ if (
+ title.toLowerCase().trim().includes(queryLower)
+ && queryLower.length >= title.length / 2
+ ) {
for (const [file, id] of foundTitles) {
- const score = Math.round(Scorer.title * queryLower.length / title.length);
- const boost = titles[file] === title ? 1 : 0; // add a boost for document titles
+ const score = Math.round(
+ (Scorer.title * queryLower.length) / title.length,
+ );
+ const boost = titles[file] === title ? 1 : 0; // add a boost for document titles
normalResults.push([
docNames[file],
titles[file] !== title ? `${titles[file]} > ${title}` : title,
@@ -353,9 +378,9 @@ const Search = {
// search for explicit entries in index directives
for (const [entry, foundEntries] of Object.entries(indexEntries)) {
- if (entry.includes(queryLower) && (queryLower.length >= entry.length/2)) {
+ if (entry.includes(queryLower) && queryLower.length >= entry.length / 2) {
for (const [file, id, isMain] of foundEntries) {
- const score = Math.round(100 * queryLower.length / entry.length);
+ const score = Math.round((100 * queryLower.length) / entry.length);
const result = [
docNames[file],
titles[file],
@@ -376,11 +401,13 @@ const Search = {
// lookup as object
objectTerms.forEach((term) =>
- normalResults.push(...Search.performObjectSearch(term, objectTerms))
+ normalResults.push(...Search.performObjectSearch(term, objectTerms)),
);
// lookup as search terms in fulltext
- normalResults.push(...Search.performTermsSearch(searchTerms, excludedTerms));
+ normalResults.push(
+ ...Search.performTermsSearch(searchTerms, excludedTerms),
+ );
// let the scorer override scores with a custom scoring function
if (Scorer.score) {
@@ -401,7 +428,11 @@ const Search = {
// note the reversing of results, so that in the case of duplicates, the highest-scoring entry is kept
let seen = new Set();
results = results.reverse().reduce((acc, result) => {
- let resultStr = result.slice(0, 4).concat([result[5]]).map(v => String(v)).join(',');
+ let resultStr = result
+ .slice(0, 4)
+ .concat([result[5]])
+ .map((v) => String(v))
+ .join(",");
if (!seen.has(resultStr)) {
acc.push(result);
seen.add(resultStr);
@@ -413,8 +444,20 @@ const Search = {
},
query: (query) => {
- const [searchQuery, searchTerms, excludedTerms, highlightTerms, objectTerms] = Search._parseQuery(query);
- const results = Search._performSearch(searchQuery, searchTerms, excludedTerms, highlightTerms, objectTerms);
+ const [
+ searchQuery,
+ searchTerms,
+ excludedTerms,
+ highlightTerms,
+ objectTerms,
+ ] = Search._parseQuery(query);
+ const results = Search._performSearch(
+ searchQuery,
+ searchTerms,
+ excludedTerms,
+ highlightTerms,
+ objectTerms,
+ );
// for debugging
//Search.lastresults = results.slice(); // a copy
@@ -437,7 +480,7 @@ const Search = {
const results = [];
const objectSearchCallback = (prefix, match) => {
- const name = match[4]
+ const name = match[4];
const fullname = (prefix ? prefix + "." : "") + name;
const fullnameLower = fullname.toLowerCase();
if (fullnameLower.indexOf(object) < 0) return;
@@ -489,9 +532,7 @@ const Search = {
]);
};
Object.keys(objects).forEach((prefix) =>
- objects[prefix].forEach((array) =>
- objectSearchCallback(prefix, array)
- )
+ objects[prefix].forEach((array) => objectSearchCallback(prefix, array)),
);
return results;
},
@@ -516,8 +557,14 @@ const Search = {
// find documents, if any, containing the query word in their text/title term indices
// use Object.hasOwnProperty to avoid mismatching against prototype properties
const arr = [
- { files: terms.hasOwnProperty(word) ? terms[word] : undefined, score: Scorer.term },
- { files: titleTerms.hasOwnProperty(word) ? titleTerms[word] : undefined, score: Scorer.title },
+ {
+ files: terms.hasOwnProperty(word) ? terms[word] : undefined,
+ score: Scorer.term,
+ },
+ {
+ files: titleTerms.hasOwnProperty(word) ? titleTerms[word] : undefined,
+ score: Scorer.title,
+ },
];
// add support for partial matches
if (word.length > 2) {
@@ -558,7 +605,8 @@ const Search = {
// create the mapping
files.forEach((file) => {
if (!fileMap.has(file)) fileMap.set(file, [word]);
- else if (fileMap.get(file).indexOf(word) === -1) fileMap.get(file).push(word);
+ else if (fileMap.get(file).indexOf(word) === -1)
+ fileMap.get(file).push(word);
});
});
@@ -569,11 +617,11 @@ const Search = {
// as search terms with length < 3 are discarded
const filteredTermCount = [...searchTerms].filter(
- (term) => term.length > 2
+ (term) => term.length > 2,
).length;
if (
- wordList.length !== searchTerms.size &&
- wordList.length !== filteredTermCount
+ wordList.length !== searchTerms.size
+ && wordList.length !== filteredTermCount
)
continue;
@@ -581,10 +629,10 @@ const Search = {
if (
[...excludedTerms].some(
(term) =>
- terms[term] === file ||
- titleTerms[term] === file ||
- (terms[term] || []).includes(file) ||
- (titleTerms[term] || []).includes(file)
+ terms[term] === file
+ || titleTerms[term] === file
+ || (terms[term] || []).includes(file)
+ || (titleTerms[term] || []).includes(file),
)
)
break;
@@ -626,7 +674,8 @@ const Search = {
let summary = document.createElement("p");
summary.classList.add("context");
- summary.textContent = top + text.substr(startWithContext, 240).trim() + tail;
+ summary.textContent =
+ top + text.substr(startWithContext, 240).trim() + tail;
return summary;
},
diff --git a/sphinx/themes/basic/static/sphinx_highlight.js b/sphinx/themes/basic/static/sphinx_highlight.js
index 8a96c69a194..ce735d52ee4 100644
--- a/sphinx/themes/basic/static/sphinx_highlight.js
+++ b/sphinx/themes/basic/static/sphinx_highlight.js
@@ -1,7 +1,7 @@
/* Highlighting utilities for Sphinx HTML documentation. */
"use strict";
-const SPHINX_HIGHLIGHT_ENABLED = true
+const SPHINX_HIGHLIGHT_ENABLED = true;
/**
* highlight a given string on a node by wrapping it in
@@ -13,9 +13,9 @@ const _highlight = (node, addItems, text, className) => {
const parent = node.parentNode;
const pos = val.toLowerCase().indexOf(text);
if (
- pos >= 0 &&
- !parent.classList.contains(className) &&
- !parent.classList.contains("nohighlight")
+ pos >= 0
+ && !parent.classList.contains(className)
+ && !parent.classList.contains("nohighlight")
) {
let span;
@@ -30,13 +30,7 @@ const _highlight = (node, addItems, text, className) => {
span.appendChild(document.createTextNode(val.substr(pos, text.length)));
const rest = document.createTextNode(val.substr(pos + text.length));
- parent.insertBefore(
- span,
- parent.insertBefore(
- rest,
- node.nextSibling
- )
- );
+ parent.insertBefore(span, parent.insertBefore(rest, node.nextSibling));
node.nodeValue = val.substr(0, pos);
/* There may be more occurrences of search term in this node. So call this
* function recursively on the remaining fragment.
@@ -46,7 +40,7 @@ const _highlight = (node, addItems, text, className) => {
if (isInSVG) {
const rect = document.createElementNS(
"http://www.w3.org/2000/svg",
- "rect"
+ "rect",
);
const bbox = parent.getBBox();
rect.x.baseVal.value = bbox.x;
@@ -65,7 +59,7 @@ const _highlightText = (thisNode, text, className) => {
let addItems = [];
_highlight(thisNode, addItems, text, className);
addItems.forEach((obj) =>
- obj.parent.insertAdjacentElement("beforebegin", obj.target)
+ obj.parent.insertAdjacentElement("beforebegin", obj.target),
);
};
@@ -73,25 +67,27 @@ const _highlightText = (thisNode, text, className) => {
* Small JavaScript module for the documentation.
*/
const SphinxHighlight = {
-
/**
* highlight the search words provided in localstorage in the text
*/
highlightSearchWords: () => {
- if (!SPHINX_HIGHLIGHT_ENABLED) return; // bail if no highlight
+ if (!SPHINX_HIGHLIGHT_ENABLED) return; // bail if no highlight
// get and clear terms from localstorage
const url = new URL(window.location);
const highlight =
- localStorage.getItem("sphinx_highlight_terms")
- || url.searchParams.get("highlight")
- || "";
- localStorage.removeItem("sphinx_highlight_terms")
+ localStorage.getItem("sphinx_highlight_terms")
+ || url.searchParams.get("highlight")
+ || "";
+ localStorage.removeItem("sphinx_highlight_terms");
url.searchParams.delete("highlight");
window.history.replaceState({}, "", url);
// get individual terms from highlight string
- const terms = highlight.toLowerCase().split(/\s+/).filter(x => x);
+ const terms = highlight
+ .toLowerCase()
+ .split(/\s+/)
+ .filter((x) => x);
if (terms.length === 0) return; // nothing to do
// There should never be more than one element matching "div.body"
@@ -107,11 +103,11 @@ const SphinxHighlight = {
document
.createRange()
.createContextualFragment(
- '' +
- '' +
- _("Hide Search Matches") +
- "
"
- )
+ ''
+ + ''
+ + _("Hide Search Matches")
+ + "
",
+ ),
);
},
@@ -125,7 +121,7 @@ const SphinxHighlight = {
document
.querySelectorAll("span.highlighted")
.forEach((el) => el.classList.remove("highlighted"));
- localStorage.removeItem("sphinx_highlight_terms")
+ localStorage.removeItem("sphinx_highlight_terms");
},
initEscapeListener: () => {
@@ -134,10 +130,15 @@ const SphinxHighlight = {
document.addEventListener("keydown", (event) => {
// bail for input elements
- if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName)) return;
+ if (BLACKLISTED_KEY_CONTROL_ELEMENTS.has(document.activeElement.tagName))
+ return;
// bail with special keys
- if (event.shiftKey || event.altKey || event.ctrlKey || event.metaKey) return;
- if (DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS && (event.key === "Escape")) {
+ if (event.shiftKey || event.altKey || event.ctrlKey || event.metaKey)
+ return;
+ if (
+ DOCUMENTATION_OPTIONS.ENABLE_SEARCH_SHORTCUTS
+ && event.key === "Escape"
+ ) {
SphinxHighlight.hideSearchWords();
event.preventDefault();
}
diff --git a/sphinx/themes/scrolls/static/theme_extras.js b/sphinx/themes/scrolls/static/theme_extras.js
index df2be407339..84cc1509808 100644
--- a/sphinx/themes/scrolls/static/theme_extras.js
+++ b/sphinx/themes/scrolls/static/theme_extras.js
@@ -1,12 +1,12 @@
const initialiseThemeExtras = () => {
- const toc = document.getElementById("toc")
- toc.style.display = ""
- const items = toc.getElementsByTagName("ul")[0]
- items.style.display = "none"
- toc.getElementsByTagName("h3").addEventListener("click", () => {
- if (items.style.display !== "none") toc.classList.remove("expandedtoc")
- else toc.classList.add("expandedtoc");
- })
-}
-if (document.readyState !== "loading") initialiseThemeExtras()
-else document.addEventListener("DOMContentLoaded", initialiseThemeExtras)
+ const toc = document.getElementById("toc");
+ toc.style.display = "";
+ const items = toc.getElementsByTagName("ul")[0];
+ items.style.display = "none";
+ toc.getElementsByTagName("h3").addEventListener("click", () => {
+ if (items.style.display !== "none") toc.classList.remove("expandedtoc");
+ else toc.classList.add("expandedtoc");
+ });
+};
+if (document.readyState !== "loading") initialiseThemeExtras();
+else document.addEventListener("DOMContentLoaded", initialiseThemeExtras);
diff --git a/sphinx/theming.py b/sphinx/theming.py
index a27dbfe0973..9e06faaeffc 100644
--- a/sphinx/theming.py
+++ b/sphinx/theming.py
@@ -28,6 +28,8 @@
from typing import Any, Required, TypedDict
from sphinx.application import Sphinx
+ from sphinx.config import Config
+ from sphinx.registry import SphinxComponentRegistry
class _ThemeToml(TypedDict, total=False):
theme: Required[_ThemeTomlTheme]
@@ -148,13 +150,21 @@ def _cleanup(self) -> None:
class HTMLThemeFactory:
"""A factory class for HTML Themes."""
- def __init__(self, app: Sphinx) -> None:
+ def __init__(
+ self,
+ *,
+ confdir: Path,
+ app: Sphinx,
+ config: Config,
+ registry: SphinxComponentRegistry,
+ ) -> None:
self._app = app
- self._themes = app.registry.html_themes
+ self._confdir = confdir
+ self._themes = registry.html_themes
self._entry_point_themes: dict[str, Callable[[], None]] = {}
self._load_builtin_themes()
- if getattr(app.config, 'html_theme_path', None):
- self._load_additional_themes(app.config.html_theme_path)
+ if html_theme_path := getattr(config, 'html_theme_path', None):
+ self._load_additional_themes(html_theme_path)
self._load_entry_point_themes()
def _load_builtin_themes(self) -> None:
@@ -166,7 +176,7 @@ def _load_builtin_themes(self) -> None:
def _load_additional_themes(self, theme_paths: list[str]) -> None:
"""Load additional themes placed at specified directories."""
for theme_path in theme_paths:
- abs_theme_path = (self._app.confdir / theme_path).resolve()
+ abs_theme_path = (self._confdir / theme_path).resolve()
themes = self._find_themes(abs_theme_path)
for name, theme in themes.items():
self._themes[name] = _StrPath(theme)
diff --git a/sphinx/transforms/__init__.py b/sphinx/transforms/__init__.py
index e1f905d2d0f..b0bfbf281cf 100644
--- a/sphinx/transforms/__init__.py
+++ b/sphinx/transforms/__init__.py
@@ -15,9 +15,9 @@
from docutils.utils.smartquotes import smartchars
from sphinx import addnodes
+from sphinx.deprecation import _deprecation_warning
from sphinx.locale import _, __
from sphinx.util import logging
-from sphinx.util.docutils import new_document
from sphinx.util.i18n import format_date
from sphinx.util.nodes import apply_source_workaround, is_smartquotable
@@ -62,7 +62,10 @@ class SphinxTransform(Transform):
@property
def app(self) -> Sphinx:
"""Reference to the :class:`.Sphinx` object."""
- return self.env.app
+ cls_module = self.__class__.__module__
+ cls_name = self.__class__.__qualname__
+ _deprecation_warning(cls_module, f'{cls_name}.app', remove=(10, 0))
+ return self.env._app
@property
def env(self) -> BuildEnvironment:
@@ -93,6 +96,8 @@ def apply_transforms(self) -> None:
else:
# wrap the target node by document node during transforming
try:
+ from sphinx.util.docutils import new_document
+
document = new_document('')
if self.env:
document.settings.env = self.env
@@ -217,7 +222,7 @@ class SortIds(SphinxTransform):
def apply(self, **kwargs: Any) -> None:
for node in self.document.findall(nodes.section):
if len(node['ids']) > 1 and node['ids'][0].startswith('id'):
- node['ids'] = node['ids'][1:] + [node['ids'][0]]
+ node['ids'] = [*node['ids'][1:], node['ids'][0]]
TRANSLATABLE_NODES = {
@@ -236,7 +241,7 @@ class ApplySourceWorkaround(SphinxTransform):
def apply(self, **kwargs: Any) -> None:
for node in self.document.findall():
- if isinstance(node, nodes.TextElement | nodes.image | nodes.topic):
+ if isinstance(node, (nodes.TextElement, nodes.image, nodes.topic)):
apply_source_workaround(node)
@@ -367,7 +372,7 @@ def apply(self, **kwargs: Any) -> None:
# override default settings with :confval:`smartquotes_action`
self.smartquotes_action = self.config.smartquotes_action
- super().apply() # type: ignore[no-untyped-call]
+ super().apply()
def is_available(self) -> bool:
builders = self.config.smartquotes_excludes.get('builders', [])
@@ -379,7 +384,7 @@ def is_available(self) -> bool:
if self.config.smartquotes is False:
# disabled by confval smartquotes
return False
- if self.app.builder.name in builders:
+ if self.env._builder_cls.name in builders:
# disabled by confval smartquotes_excludes['builders']
return False
if self.config.language in languages:
@@ -409,7 +414,7 @@ class DoctreeReadEvent(SphinxTransform):
default_priority = 880
def apply(self, **kwargs: Any) -> None:
- self.app.events.emit('doctree-read', self.document)
+ self.env.events.emit('doctree-read', self.document)
class GlossarySorter(SphinxTransform):
@@ -477,7 +482,7 @@ def _reorder_index_target_nodes(start_node: nodes.target) -> None:
# as we want *consecutive* target & index nodes.
node: nodes.Node
for node in start_node.findall(descend=False, siblings=True):
- if isinstance(node, nodes.target | addnodes.index):
+ if isinstance(node, (nodes.target, addnodes.index)):
nodes_to_reorder.append(node)
continue
break # must be a consecutive run of target or index nodes
diff --git a/sphinx/transforms/i18n.py b/sphinx/transforms/i18n.py
index 815ca606bce..570154185e9 100644
--- a/sphinx/transforms/i18n.py
+++ b/sphinx/transforms/i18n.py
@@ -2,13 +2,12 @@
from __future__ import annotations
-import contextlib
from re import DOTALL, match
from textwrap import indent
from typing import TYPE_CHECKING, Any, TypeVar
+import docutils.utils
from docutils import nodes
-from docutils.io import StringInput
from sphinx import addnodes
from sphinx.domains.std import make_glossary_term, split_term_classifiers
@@ -17,6 +16,7 @@
from sphinx.locale import init as init_locale
from sphinx.transforms import SphinxTransform
from sphinx.util import get_filetype, logging
+from sphinx.util.docutils import LoggingReporter
from sphinx.util.i18n import docname_to_domain
from sphinx.util.index_entries import split_index_msg
from sphinx.util.nodes import (
@@ -30,8 +30,12 @@
if TYPE_CHECKING:
from collections.abc import Sequence
+ from docutils.frontend import Values
+
from sphinx.application import Sphinx
from sphinx.config import Config
+ from sphinx.environment import BuildEnvironment
+ from sphinx.registry import SphinxComponentRegistry
from sphinx.util.typing import ExtensionMetadata
@@ -47,49 +51,49 @@
N = TypeVar('N', bound=nodes.Node)
-def publish_msgstr(
- app: Sphinx,
+def _publish_msgstr(
source: str,
source_path: str,
source_line: int,
+ *,
config: Config,
- settings: Any,
+ env: BuildEnvironment,
+ registry: SphinxComponentRegistry,
+ settings: Values,
) -> nodes.Element:
"""Publish msgstr (single line) into docutils document
- :param sphinx.application.Sphinx app: sphinx application
:param str source: source text
:param str source_path: source path for warning indication
:param source_line: source line for warning indication
- :param sphinx.config.Config config: sphinx config
:param docutils.frontend.Values settings: docutils settings
+ :param sphinx.config.Config config: sphinx config
+ :param sphinx.environment.BuildEnvironment env: sphinx environment
+ :param sphinx.registry.SphinxComponentRegistry registry: sphinx registry
:return: document
:rtype: docutils.nodes.document
"""
+ filetype = get_filetype(config.source_suffix, source_path)
+ doc = docutils.utils.new_document(
+ f'{source_path}:{source_line}:', settings
+ )
+ doc.reporter = LoggingReporter.from_reporter(doc.reporter)
+
+ # clear rst_prolog temporarily
+ rst_prolog = config.rst_prolog
+ config.rst_prolog = None
try:
- # clear rst_prolog temporarily
- rst_prolog = config.rst_prolog
- config.rst_prolog = None
-
- from sphinx.io import SphinxI18nReader
-
- reader = SphinxI18nReader()
- reader.setup(app)
- filetype = get_filetype(config.source_suffix, source_path)
- parser = app.registry.create_source_parser(app, filetype)
- doc = reader.read(
- source=StringInput(
- source=source, source_path=f'{source_path}:{source_line}:'
- ),
- parser=parser,
- settings=settings,
- )
- with contextlib.suppress(IndexError): # empty node
- return doc[0]
- return doc
+ parser = registry.create_source_parser(filetype, config=config, env=env)
+ parser.parse(source, doc)
+ doc.current_source = doc.current_line = None
finally:
config.rst_prolog = rst_prolog
+ try:
+ return doc[0] # type: ignore[return-value]
+ except IndexError: # empty node
+ return doc
+
def parse_noqa(source: str) -> tuple[str, bool]:
m = match(r'(.*)(? None:
settings, source = self.document.settings, self.document['source']
msgstr = ''
- textdomain = docname_to_domain(self.env.docname, self.config.gettext_compact)
+ textdomain = docname_to_domain(
+ self.env.current_document.docname, self.config.gettext_compact
+ )
# fetch translations
srcdir = self.env.srcdir
@@ -435,13 +441,14 @@ def apply(self, **kwargs: Any) -> None:
if isinstance(node, LITERAL_TYPE_NODES):
msgstr = '::\n\n' + indent(msgstr, ' ' * 3)
- patch = publish_msgstr(
- self.app,
+ patch = _publish_msgstr(
msgstr,
source,
node.line, # type: ignore[arg-type]
- self.config,
- settings,
+ config=self.config,
+ env=self.env,
+ registry=self.env._registry,
+ settings=settings,
)
# FIXME: no warnings about inconsistent references in this part
# XXX doctest and other block markup
@@ -455,13 +462,14 @@ def apply(self, **kwargs: Any) -> None:
if isinstance(node, nodes.term):
for _id in node['ids']:
term, first_classifier = split_term_classifiers(msgstr)
- patch = publish_msgstr(
- self.app,
+ patch = _publish_msgstr(
term or '',
source,
node.line, # type: ignore[arg-type]
- self.config,
- settings,
+ config=self.config,
+ env=self.env,
+ registry=self.env._registry,
+ settings=settings,
)
updater.patch = make_glossary_term(
self.env,
@@ -532,13 +540,14 @@ def apply(self, **kwargs: Any) -> None:
# This generates:
msgstr = msgstr + '\n' + '=' * len(msgstr) * 2
- patch = publish_msgstr(
- self.app,
+ patch = _publish_msgstr(
msgstr,
source,
node.line, # type: ignore[arg-type]
- self.config,
- settings,
+ config=self.config,
+ env=self.env,
+ registry=self.env._registry,
+ settings=settings,
)
# Structural Subelements phase2
if isinstance(node, nodes.title):
@@ -612,7 +621,7 @@ class TranslationProgressTotaliser(SphinxTransform):
def apply(self, **kwargs: Any) -> None:
from sphinx.builders.gettext import MessageCatalogBuilder
- if isinstance(self.app.builder, MessageCatalogBuilder):
+ if issubclass(self.env._builder_cls, MessageCatalogBuilder):
return
total = translated = 0
@@ -635,7 +644,7 @@ class AddTranslationClasses(SphinxTransform):
def apply(self, **kwargs: Any) -> None:
from sphinx.builders.gettext import MessageCatalogBuilder
- if isinstance(self.app.builder, MessageCatalogBuilder):
+ if issubclass(self.env._builder_cls, MessageCatalogBuilder):
return
if not self.config.translation_progress_classes:
@@ -673,7 +682,7 @@ class RemoveTranslatableInline(SphinxTransform):
def apply(self, **kwargs: Any) -> None:
from sphinx.builders.gettext import MessageCatalogBuilder
- if isinstance(self.app.builder, MessageCatalogBuilder):
+ if issubclass(self.env._builder_cls, MessageCatalogBuilder):
return
matcher = NodeMatcher(nodes.inline, translatable=Any)
diff --git a/sphinx/transforms/post_transforms/__init__.py b/sphinx/transforms/post_transforms/__init__.py
index d9c5031b31c..ac95f56102a 100644
--- a/sphinx/transforms/post_transforms/__init__.py
+++ b/sphinx/transforms/post_transforms/__init__.py
@@ -47,9 +47,9 @@ def apply(self, **kwargs: Any) -> None:
def is_supported(self) -> bool:
"""Check this transform working for current builder."""
- if self.builders and self.app.builder.name not in self.builders:
+ if self.builders and self.env._builder_cls.name not in self.builders:
return False
- return not self.formats or self.app.builder.format in self.formats
+ return not self.formats or self.env._builder_cls.format in self.formats
def run(self, **kwargs: Any) -> None:
"""Main method of post transforms.
@@ -98,7 +98,7 @@ def _resolve_pending_xref(
new_node: nodes.reference | None
typ = node['reftype']
target = node['reftarget']
- ref_doc = node.setdefault('refdoc', self.env.docname)
+ ref_doc = node.setdefault('refdoc', self.env.current_document.docname)
ref_domain = node.get('refdomain', '')
domain: Domain | None
if ref_domain:
@@ -125,7 +125,7 @@ def _resolve_pending_xref(
try:
# no new node found? try the missing-reference event
- new_node = self.app.events.emit_firstresult(
+ new_node = self.env.events.emit_firstresult(
'missing-reference',
self.env,
node,
@@ -169,10 +169,11 @@ def _resolve_pending_xref_in_domain(
typ: str,
target: str,
) -> nodes.reference | None:
+ builder = self.env._app.builder
# let the domain try to resolve the reference
if domain is not None:
return domain.resolve_xref(
- self.env, ref_doc, self.app.builder, typ, target, node, contnode
+ self.env, ref_doc, builder, typ, target, node, contnode
)
# really hardwired reference types
@@ -193,7 +194,7 @@ def _resolve_pending_any_xref(
) -> nodes.reference | None:
"""Resolve reference generated by the "any" role."""
env = self.env
- builder = self.app.builder
+ builder = self.env._app.builder
domains = env.domains
results: list[tuple[str, nodes.reference]] = []
@@ -227,12 +228,7 @@ def _resolve_pending_any_xref(
if not results:
return None
if len(results) > 1:
-
- def stringify(name: str, node: Element) -> str:
- reftitle = node.get('reftitle', node.astext())
- return f':{name}:`{reftitle}`'
-
- candidates = ' or '.join(starmap(stringify, results))
+ candidates = ' or '.join(starmap(self._stringify, results))
msg = __(
"more than one target found for 'any' cross-reference %r: could be %s"
)
@@ -251,6 +247,11 @@ def stringify(name: str, node: Element) -> str:
new_node[0]['classes'].extend((res_domain, res_role.replace(':', '-')))
return new_node
+ @staticmethod
+ def _stringify(name: str, node: Element) -> str:
+ reftitle = node.get('reftitle', node.astext())
+ return f':{name}:`{reftitle}`'
+
def warn_missing_reference(
self,
refdoc: str,
@@ -273,25 +274,16 @@ def warn_missing_reference(
): # fmt: skip
warn = False
if self.config.nitpick_ignore_regex:
-
- def matches_ignore(entry_type: str, entry_target: str) -> bool:
- return any(
- (
- re.fullmatch(ignore_type, entry_type)
- and re.fullmatch(ignore_target, entry_target)
- )
- for ignore_type, ignore_target in self.config.nitpick_ignore_regex
- )
-
- if matches_ignore(dtype, target):
+ if _matches_ignore(self.config.nitpick_ignore_regex, dtype, target):
warn = False
# for "std" types also try without domain name
- if (not domain or domain.name == 'std') and matches_ignore(typ, target):
- warn = False
+ if not domain or domain.name == 'std':
+ if _matches_ignore(self.config.nitpick_ignore_regex, typ, target):
+ warn = False
if not warn:
return
- if self.app.events.emit_firstresult('warn-missing-reference', domain, node):
+ if self.env.events.emit_firstresult('warn-missing-reference', domain, node):
return
elif domain and typ in domain.dangling_warnings:
msg = domain.dangling_warnings[typ] % {'target': target}
@@ -317,6 +309,18 @@ def find_pending_xref_condition(
return None
+def _matches_ignore(
+ ignore_patterns: Sequence[tuple[str, str]], entry_type: str, entry_target: str
+) -> bool:
+ return any(
+ (
+ re.fullmatch(ignore_type, entry_type)
+ and re.fullmatch(ignore_target, entry_target)
+ )
+ for ignore_type, ignore_target in ignore_patterns
+ )
+
+
class OnlyNodeTransform(SphinxPostTransform):
default_priority = 50
@@ -325,7 +329,7 @@ def run(self, **kwargs: Any) -> None:
# result in a "Losing ids" exception if there is a target node before
# the only node, so we make sure docutils can transfer the id to
# something, even if it's just a comment and will lose the id anyway...
- process_only_nodes(self.document, self.app.tags)
+ process_only_nodes(self.document, self.env._tags)
class SigElementFallbackTransform(SphinxPostTransform):
@@ -340,7 +344,7 @@ def has_visitor(
return hasattr(translator, 'visit_%s' % node.__name__)
try:
- translator = self.app.builder.get_translator_class()
+ translator = self.env._registry.get_translator_class(self.env._builder_cls)
except AttributeError:
# do nothing if no translator class is specified (e.g., on a dummy builder)
return
diff --git a/sphinx/transforms/post_transforms/images.py b/sphinx/transforms/post_transforms/images.py
index d4c6262e529..6e6e9becb20 100644
--- a/sphinx/transforms/post_transforms/images.py
+++ b/sphinx/transforms/post_transforms/images.py
@@ -45,16 +45,16 @@ def handle(self, node: nodes.image) -> None:
@property
def imagedir(self) -> _StrPath:
- return self.app.doctreedir / 'images'
+ return self.env.doctreedir / 'images'
class ImageDownloader(BaseImageConverter):
default_priority = 100
def match(self, node: nodes.image) -> bool:
- if not self.app.builder.supported_image_types:
+ if not self.env._builder_cls.supported_image_types:
return False
- if self.app.builder.supported_remote_images:
+ if self.env._builder_cls.supported_remote_images:
return False
return '://' in node['uri']
@@ -123,14 +123,14 @@ def _process_image(self, node: nodes.image, path: Path) -> None:
node['candidates'].pop('?')
node['candidates'][mimetype] = path_str
node['uri'] = path_str
- self.env.images.add_file(self.env.docname, path_str)
+ self.env.images.add_file(self.env.current_document.docname, path_str)
class DataURIExtractor(BaseImageConverter):
default_priority = 150
def match(self, node: nodes.image) -> bool:
- if self.app.builder.supported_data_uri_images is True:
+ if self.env._builder_cls.supported_data_uri_images is True:
return False # do not transform the image; data URIs are valid in the build output
return node['uri'].startswith('data:')
@@ -156,7 +156,7 @@ def handle(self, node: nodes.image) -> None:
node['candidates'].pop('?')
node['candidates'][image.mimetype] = path_str
node['uri'] = path_str
- self.env.images.add_file(self.env.docname, path_str)
+ self.env.images.add_file(self.env.current_document.docname, path_str)
def get_filename_for(filename: str, mimetype: str) -> str:
@@ -208,12 +208,12 @@ class ImageConverter(BaseImageConverter):
conversion_rules: list[tuple[str, str]] = []
def match(self, node: nodes.image) -> bool:
- if not self.app.builder.supported_image_types:
+ if not self.env._builder_cls.supported_image_types:
return False
if '?' in node['candidates']:
return False
node_mime_types = set(self.guess_mimetypes(node))
- supported_image_types = set(self.app.builder.supported_image_types)
+ supported_image_types = set(self.env._builder_cls.supported_image_types)
if node_mime_types & supported_image_types:
# builder supports the image; no need to convert
return False
@@ -233,7 +233,7 @@ def match(self, node: nodes.image) -> bool:
def get_conversion_rule(self, node: nodes.image) -> tuple[str, str]:
for candidate in self.guess_mimetypes(node):
- for supported in self.app.builder.supported_image_types:
+ for supported in self.env._builder_cls.supported_image_types:
rule = (candidate, supported)
if rule in self.conversion_rules:
return rule
@@ -250,7 +250,7 @@ def guess_mimetypes(self, node: nodes.image) -> list[str]:
if '?' in node['candidates']:
return []
elif '*' in node['candidates']:
- path = self.app.srcdir / node['uri']
+ path = self.env.srcdir / node['uri']
guessed = guess_mimetype(path)
return [guessed] if guessed is not None else []
else:
@@ -269,7 +269,7 @@ def handle(self, node: nodes.image) -> None:
ensuredir(self.imagedir)
destpath = self.imagedir / filename
- abs_srcpath = self.app.srcdir / srcpath
+ abs_srcpath = self.env.srcdir / srcpath
if self.convert(abs_srcpath, destpath):
if '*' in node['candidates']:
node['candidates']['*'] = str(destpath)
@@ -278,7 +278,7 @@ def handle(self, node: nodes.image) -> None:
node['uri'] = str(destpath)
self.env.original_image_uri[destpath] = srcpath
- self.env.images.add_file(self.env.docname, destpath)
+ self.env.images.add_file(self.env.current_document.docname, destpath)
def convert(
self, _from: str | os.PathLike[str], _to: str | os.PathLike[str]
diff --git a/sphinx/transforms/references.py b/sphinx/transforms/references.py
index 447e9ded568..8655dbfa5f3 100644
--- a/sphinx/transforms/references.py
+++ b/sphinx/transforms/references.py
@@ -25,7 +25,7 @@ def apply(self, **kwargs: Any) -> None:
# suppress INFO level messages for a while
reporter.report_level = max(reporter.WARNING_LEVEL, reporter.report_level)
- super().apply() # type: ignore[no-untyped-call]
+ super().apply()
finally:
reporter.report_level = report_level
@@ -36,7 +36,9 @@ class SphinxDomains(SphinxTransform):
default_priority = 850
def apply(self, **kwargs: Any) -> None:
- self.env.domains._process_doc(self.env, self.env.docname, self.document)
+ self.env.domains._process_doc(
+ self.env, self.env.current_document.docname, self.document
+ )
def setup(app: Sphinx) -> ExtensionMetadata:
diff --git a/sphinx/util/_serialise.py b/sphinx/util/_serialise.py
index df2d66c717d..501a13bd713 100644
--- a/sphinx/util/_serialise.py
+++ b/sphinx/util/_serialise.py
@@ -19,9 +19,9 @@ def stable_hash(obj: Any) -> str:
"""
if isinstance(obj, dict):
obj = sorted(map(stable_hash, obj.items()))
- if isinstance(obj, list | tuple | set | frozenset):
+ if isinstance(obj, (list, tuple, set, frozenset)):
obj = sorted(map(stable_hash, obj))
- elif isinstance(obj, type | types.FunctionType):
+ elif isinstance(obj, (type, types.FunctionType)):
# The default repr() of functions includes the ID, which is not ideal.
# We use the fully qualified name instead.
obj = f'{obj.__module__}.{obj.__qualname__}'
@@ -42,10 +42,10 @@ def _stable_str_prep(obj: Any) -> dict[str, Any] | list[Any] | str:
obj = [(_stable_str_prep(k), _stable_str_prep(v)) for k, v in obj.items()]
obj.sort()
return dict(obj)
- if isinstance(obj, list | tuple | set | frozenset):
+ if isinstance(obj, (list, tuple, set, frozenset)):
# Convert to a sorted list
return sorted(map(_stable_str_prep, obj), key=str)
- if isinstance(obj, type | types.FunctionType):
+ if isinstance(obj, (type, types.FunctionType)):
# The default repr() of functions includes the ID, which is not ideal.
# We use the fully qualified name instead.
return f'{obj.__module__}.{obj.__qualname__}'
diff --git a/sphinx/util/display.py b/sphinx/util/display.py
index 1edca4e6c7d..8a2767bedb0 100644
--- a/sphinx/util/display.py
+++ b/sphinx/util/display.py
@@ -20,7 +20,7 @@
def display_chunk(chunk: Any) -> str:
- if isinstance(chunk, list | tuple):
+ if isinstance(chunk, (list, tuple)):
if len(chunk) == 1:
return str(chunk[0])
return f'{chunk[0]} .. {chunk[-1]}'
diff --git a/sphinx/util/docfields.py b/sphinx/util/docfields.py
index 1c24a73bf2e..3eaebbcf519 100644
--- a/sphinx/util/docfields.py
+++ b/sphinx/util/docfields.py
@@ -118,7 +118,7 @@ def make_xref(
if location is not None:
with contextlib.suppress(ValueError):
lineno = get_node_line(location)
- ns, messages = role(rolename, target, target, lineno, inliner, {}, [])
+ ns, _messages = role(rolename, target, target, lineno, inliner, {}, [])
return nodes.inline(target, '', *ns)
def make_xrefs(
@@ -386,7 +386,7 @@ def _transform_step_1(
field_body = cast('nodes.field_body', field[1])
try:
# split into field type and argument
- fieldtype_name, fieldarg = field_name.astext().split(None, 1)
+ fieldtype_name, fieldarg = field_name.astext().split(None, maxsplit=1)
except ValueError:
# maybe an argument-less field type?
fieldtype_name, fieldarg = field_name.astext(), ''
@@ -442,7 +442,7 @@ def _transform_step_1(
if is_typefield:
# filter out only inline nodes; others will result in invalid
# markup being written out
- content = [n for n in content if isinstance(n, nodes.Inline | nodes.Text)]
+ content = [n for n in content if isinstance(n, (nodes.Inline, nodes.Text))]
if content:
types.setdefault(typename, {})[fieldarg] = content
return
diff --git a/sphinx/util/docstrings.py b/sphinx/util/docstrings.py
index 53e7620edc2..6f23096d92b 100644
--- a/sphinx/util/docstrings.py
+++ b/sphinx/util/docstrings.py
@@ -26,7 +26,7 @@ def separate_metadata(s: str | None) -> tuple[str | None, dict[str, str]]:
else:
matched = field_list_item_re.match(line)
if matched and not in_other_element:
- field_name = matched.group()[1:].split(':', 1)[0]
+ field_name = matched.group()[1:].partition(':')[0]
if field_name.startswith('meta '):
name = field_name[5:].strip()
metadata[name] = line[matched.end() :].strip()
diff --git a/sphinx/util/docutils.py b/sphinx/util/docutils.py
index 70d8e69be6f..ff64906c7d5 100644
--- a/sphinx/util/docutils.py
+++ b/sphinx/util/docutils.py
@@ -4,21 +4,26 @@
import os
import re
-from contextlib import contextmanager
+import warnings
+from contextlib import contextmanager, nullcontext
from copy import copy
from pathlib import Path
from typing import TYPE_CHECKING
import docutils
from docutils import nodes
+from docutils.frontend import OptionParser
from docutils.io import FileOutput
from docutils.parsers.rst import Directive, directives, roles
+from docutils.readers import standalone
from docutils.statemachine import StateMachine
+from docutils.transforms.references import DanglingReferences
from docutils.utils import Reporter, unescape
from sphinx.errors import SphinxError
from sphinx.locale import __
-from sphinx.util import logging
+from sphinx.transforms import SphinxTransformer
+from sphinx.util import logging, rst
from sphinx.util.parsing import nested_parse_to_nodes
logger = logging.getLogger(__name__)
@@ -27,18 +32,22 @@
)
if TYPE_CHECKING:
- from collections.abc import Iterator, Sequence
+ from collections.abc import Iterator, Mapping, Sequence
from types import ModuleType, TracebackType
from typing import Any, Protocol
+ from docutils import Component
from docutils.frontend import Values
from docutils.nodes import Element, Node, system_message
+ from docutils.parsers import Parser
from docutils.parsers.rst.states import Inliner
from docutils.statemachine import State, StringList
+ from docutils.transforms import Transform
from sphinx.builders import Builder
from sphinx.config import Config
from sphinx.environment import BuildEnvironment
+ from sphinx.events import EventManager
from sphinx.util.typing import RoleFunction
class _LanguageModule(Protocol):
@@ -66,6 +75,13 @@ def __call__(
) -> tuple[RoleFunction | None, list[system_message]]: ...
+_READER_TRANSFORMS = [
+ transform
+ for transform in standalone.Reader().get_transforms()
+ if transform is not DanglingReferences
+]
+
+
additional_nodes: set[type[Element]] = set()
@@ -370,7 +386,7 @@ def write(self, text: str) -> None:
if not matched:
logger.warning(text.rstrip('\r\n'), type='docutils')
else:
- location, type, level = matched.groups()
+ location, type, _level = matched.groups()
message = report_re.sub('', text).rstrip()
logger.log(type, message, location=location, type='docutils')
@@ -816,3 +832,99 @@ def new_document(source_path: str, settings: Any = None) -> nodes.document:
document = nodes.document(settings, reporter, source=source_path)
document.note_source(source_path, -1)
return document
+
+
+def _parse_str_to_doctree(
+ content: str,
+ *,
+ filename: Path,
+ default_role: str = '',
+ default_settings: Mapping[str, Any],
+ env: BuildEnvironment,
+ events: EventManager | None = None,
+ parser: Parser,
+ transforms: Sequence[type[Transform]] = (),
+) -> nodes.document:
+ env.current_document._parser = parser
+
+ # Propagate exceptions by default when used programmatically:
+ defaults = {'traceback': True, **default_settings}
+ settings = _get_settings(
+ standalone.Reader, parser, defaults=defaults, read_config_files=True
+ )
+ settings._source = str(filename)
+
+ # Create root document node
+ reporter = LoggingReporter(
+ source=str(filename),
+ report_level=settings.report_level,
+ halt_level=settings.halt_level,
+ debug=settings.debug,
+ error_handler=settings.error_encoding_error_handler,
+ )
+ document = nodes.document(settings, reporter, source=str(filename))
+ document.note_source(str(filename), -1)
+
+ # substitute transformer
+ document.transformer = transformer = SphinxTransformer(document)
+ transformer.add_transforms(_READER_TRANSFORMS)
+ transformer.add_transforms(transforms)
+ transformer.add_transforms(parser.get_transforms())
+
+ if default_role:
+ default_role_cm = rst.default_role(env.current_document.docname, default_role)
+ else:
+ default_role_cm = nullcontext() # type: ignore[assignment]
+ with sphinx_domains(env), default_role_cm:
+ # TODO: Move the stanza below to Builder.read_doc(), within
+ # a sphinx_domains() context manager.
+ # This will require changes to IntersphinxDispatcher and/or
+ # CustomReSTDispatcher.
+ if events is not None:
+ # emit "source-read" event
+ arg = [content]
+ events.emit('source-read', env.current_document.docname, arg)
+ content = arg[0]
+
+ # parse content to abstract syntax tree
+ parser.parse(content, document)
+ document.current_source = document.current_line = None
+
+ # run transforms
+ transformer.apply_transforms()
+
+ return document
+
+
+def _get_settings(
+ *components: Component | type[Component],
+ defaults: Mapping[str, Any],
+ read_config_files: bool = False,
+) -> Values:
+ with warnings.catch_warnings(action='ignore', category=DeprecationWarning):
+ # DeprecationWarning: The frontend.OptionParser class will be replaced
+ # by a subclass of argparse.ArgumentParser in Docutils 0.21 or later.
+ # DeprecationWarning: The frontend.Option class will be removed
+ # in Docutils 0.21 or later.
+ option_parser = OptionParser(
+ components=components,
+ defaults=defaults,
+ read_config_files=read_config_files,
+ )
+ return option_parser.get_default_values() # type: ignore[return-value]
+
+
+if docutils.__version_info__[:2] < (0, 22):
+ from docutils.parsers.rst.roles import set_classes
+
+ def _normalize_options(options: dict[str, Any] | None) -> dict[str, Any]:
+ if options is None:
+ return {}
+ n_options = options.copy()
+ set_classes(n_options)
+ return n_options
+
+else:
+ from docutils.parsers.rst.roles import ( # type: ignore[attr-defined, no-redef]
+ normalize_options as _normalize_options, # NoQA: F401
+ )
diff --git a/sphinx/util/i18n.py b/sphinx/util/i18n.py
index 6cc4b31766e..7553119334b 100644
--- a/sphinx/util/i18n.py
+++ b/sphinx/util/i18n.py
@@ -168,7 +168,7 @@ def docname_to_domain(docname: str, compaction: bool | str) -> str:
if isinstance(compaction, str):
return compaction
if compaction:
- return docname.split(SEP, 1)[0]
+ return docname.partition(SEP)[0]
else:
return docname
@@ -228,6 +228,14 @@ def babel_format_date(
if not hasattr(date, 'tzinfo'):
formatter = babel.dates.format_date
+ if not locale:
+ # Babel would not accept a falsy locale
+ # (or would try to fall back to the LC_TIME
+ # locale, which would be not what was requested),
+ # so we can just short-cut to English, as we
+ # would for the `"fallback to English"` case.
+ locale = 'en'
+
try:
return formatter(date, format, locale=locale)
except (ValueError, babel.core.UnknownLocaleError):
@@ -311,7 +319,7 @@ def get_image_filename_for_language(
) -> str:
root, ext = os.path.splitext(filename)
dirname = os.path.dirname(root)
- docpath = os.path.dirname(env.docname)
+ docpath = os.path.dirname(env.current_document.docname)
try:
return env.config.figure_language_filename.format(
root=root,
diff --git a/sphinx/util/images.py b/sphinx/util/images.py
index b43a0705d36..ca6cb66764b 100644
--- a/sphinx/util/images.py
+++ b/sphinx/util/images.py
@@ -90,12 +90,17 @@ def get_image_extension(mimetype: str) -> str | None:
def parse_data_uri(uri: str) -> DataURI | None:
if not uri.startswith('data:'):
return None
+ uri = uri[5:]
+
+ if ',' not in uri:
+ msg = 'malformed data URI'
+ raise ValueError(msg)
# data:[][;charset=][;base64],
mimetype = 'text/plain'
charset = 'US-ASCII'
- properties, data = uri[5:].split(',', 1)
+ properties, _, data = uri.partition(',')
for prop in properties.split(';'):
if prop == 'base64':
pass # skip
diff --git a/sphinx/util/index_entries.py b/sphinx/util/index_entries.py
index 10046842976..36dba388146 100644
--- a/sphinx/util/index_entries.py
+++ b/sphinx/util/index_entries.py
@@ -20,7 +20,7 @@ def split_index_msg(entry_type: str, value: str) -> list[str]:
def _split_into(n: int, type: str, value: str) -> list[str]:
"""Split an index entry into a given number of parts at semicolons."""
- parts = [x.strip() for x in value.split(';', n - 1)]
+ parts = [x.strip() for x in value.split(';', maxsplit=n - 1)]
if len(list(filter(None, parts))) < n:
msg = f'invalid {type} index entry {value!r}'
raise ValueError(msg)
diff --git a/sphinx/util/inspect.py b/sphinx/util/inspect.py
index d2cba2b8ddc..4c5accfb6f1 100644
--- a/sphinx/util/inspect.py
+++ b/sphinx/util/inspect.py
@@ -103,7 +103,7 @@ class methods and static methods.
if ispartial(obj):
obj = obj.func
elif inspect.isroutine(obj) and hasattr(obj, '__wrapped__'):
- obj = obj.__wrapped__
+ obj = obj.__wrapped__ # pyright: ignore[reportFunctionMemberAccess]
elif isclassmethod(obj) or isstaticmethod(obj):
obj = obj.__func__
else:
@@ -114,7 +114,7 @@ class methods and static methods.
if ispartial(obj):
obj = obj.func
elif inspect.isroutine(obj) and hasattr(obj, '__wrapped__'):
- obj = obj.__wrapped__
+ obj = obj.__wrapped__ # pyright: ignore[reportFunctionMemberAccess]
elif isclassmethod(obj) or isstaticmethod(obj):
obj = obj.__func__
else:
@@ -131,7 +131,7 @@ def getall(obj: Any) -> Sequence[str] | None:
__all__ = safe_getattr(obj, '__all__', None)
if __all__ is None:
return None
- if isinstance(__all__, list | tuple) and all(isinstance(e, str) for e in __all__):
+ if isinstance(__all__, (list, tuple)) and all(isinstance(e, str) for e in __all__):
return __all__
raise ValueError(__all__)
@@ -160,7 +160,7 @@ def getmro(obj: Any) -> tuple[type, ...]:
return ()
-def getorigbases(obj: Any) -> tuple[Any, ...] | None:
+def getorigbases(obj: Any) -> tuple[type, ...] | None:
"""Safely get ``obj.__orig_bases__``.
This returns ``None`` if the object is not a class or if ``__orig_bases__``
@@ -195,7 +195,7 @@ def getslots(obj: Any) -> dict[str, Any] | dict[str, None] | None:
return __slots__
elif isinstance(__slots__, str):
return {__slots__: None}
- elif isinstance(__slots__, list | tuple):
+ elif isinstance(__slots__, (list, tuple)):
return dict.fromkeys(__slots__)
else:
raise ValueError
@@ -225,7 +225,7 @@ def unpartial(obj: Any) -> Any:
def ispartial(obj: Any) -> TypeIs[partial[Any] | partialmethod[Any]]:
"""Check if the object is a partial function or method."""
- return isinstance(obj, partial | partialmethod)
+ return isinstance(obj, (partial, partialmethod))
def isclassmethod(
@@ -374,8 +374,8 @@ def isattributedescriptor(obj: Any) -> bool:
if isinstance(unwrapped, _DESCRIPTOR_LIKE):
# attribute must not be a method descriptor
return False
- # attribute must not be an instancemethod (C-API)
- return type(unwrapped).__name__ != 'instancemethod'
+ # attribute must not be an instancemethod (C-API) nor nb_method (specific for nanobind)
+ return type(unwrapped).__name__ not in {'instancemethod', 'nb_method'}
return False
@@ -385,7 +385,7 @@ def is_singledispatch_function(obj: Any) -> bool:
inspect.isfunction(obj)
and hasattr(obj, 'dispatch')
and hasattr(obj, 'register')
- and obj.dispatch.__module__ == 'functools'
+ and obj.dispatch.__module__ == 'functools' # pyright: ignore[reportFunctionMemberAccess]
)
@@ -443,12 +443,12 @@ def _is_wrapped_coroutine(obj: Any) -> bool:
def isproperty(obj: Any) -> TypeIs[property | cached_property[Any]]:
"""Check if the object is property (possibly cached)."""
- return isinstance(obj, property | cached_property)
+ return isinstance(obj, (property, cached_property))
def isgenericalias(obj: Any) -> TypeIs[types.GenericAlias]:
"""Check if the object is a generic alias."""
- return isinstance(obj, types.GenericAlias | typing._BaseGenericAlias) # type: ignore[attr-defined]
+ return isinstance(obj, (types.GenericAlias, typing._BaseGenericAlias)) # type: ignore[attr-defined]
def safe_getattr(obj: Any, name: str, *defargs: Any) -> Any:
@@ -616,6 +616,14 @@ def __hash__(self) -> int:
def __repr__(self) -> str:
return f'{self.__class__.__name__}({self.name!r})'
+ def __or__(self, other: Any) -> Any:
+ # When evaluating type hints, our forward ref can appear in type expressions,
+ # i.e. `Alias | None`. This means it needs to support ``__or__`` and ``__ror__``.
+ return typing.Union[self, other] # NoQA: UP007
+
+ def __ror__(self, other: Any) -> Any:
+ return typing.Union[other, self] # NoQA: UP007
+
class TypeAliasModule:
"""Pseudo module class for :confval:`autodoc_type_aliases`."""
@@ -772,7 +780,7 @@ def signature(
def evaluate_signature(
sig: Signature,
globalns: dict[str, Any] | None = None,
- localns: dict[str, Any] | None = None,
+ localns: Mapping[str, Any] | None = None,
) -> Signature:
"""Evaluate unresolved type annotations in a signature object."""
if globalns is None:
@@ -796,7 +804,7 @@ def evaluate_signature(
def _evaluate_forwardref(
ref: ForwardRef,
globalns: dict[str, Any] | None,
- localns: dict[str, Any] | None,
+ localns: Mapping[str, Any] | None,
) -> Any:
"""Evaluate a forward reference."""
if sys.version_info[:2] >= (3, 14):
@@ -818,7 +826,7 @@ def _evaluate_forwardref(
def _evaluate(
annotation: Any,
globalns: dict[str, Any],
- localns: dict[str, Any],
+ localns: Mapping[str, Any],
) -> Any:
"""Evaluate unresolved type annotation."""
try:
@@ -854,6 +862,25 @@ def stringify_signature(
(ex. io.StringIO -> StringIO)
:param short_literals: If enabled, use short literal types.
"""
+ args, retann = _stringify_signature_to_parts(
+ sig=sig,
+ show_annotation=show_annotation,
+ show_return_annotation=show_return_annotation,
+ unqualified_typehints=unqualified_typehints,
+ short_literals=short_literals,
+ )
+ if retann:
+ return f'{args} -> {retann}'
+ return str(args)
+
+
+def _stringify_signature_to_parts(
+ sig: Signature,
+ show_annotation: bool = True,
+ show_return_annotation: bool = True,
+ unqualified_typehints: bool = False,
+ short_literals: bool = False,
+) -> tuple[str, str]:
mode: _StringifyMode
if unqualified_typehints:
mode = 'smart'
@@ -909,17 +936,18 @@ def stringify_signature(
args.append('/')
concatenated_args = ', '.join(args)
+ concatenated_args = f'({concatenated_args})'
if (
sig.return_annotation is EMPTY
or not show_annotation
or not show_return_annotation
):
- return f'({concatenated_args})'
+ retann = ''
else:
retann = stringify_annotation(
sig.return_annotation, mode, short_literals=short_literals
)
- return f'({concatenated_args}) -> {retann}'
+ return concatenated_args, retann
def signature_from_str(signature: str) -> Signature:
@@ -1051,10 +1079,8 @@ def getdoc(
return doc
-def _getdoc_internal(
- obj: Any, attrgetter: Callable[[Any, str, Any], Any] = safe_getattr
-) -> str | None:
- doc = attrgetter(obj, '__doc__', None)
+def _getdoc_internal(obj: Any, /) -> str | None:
+ doc = safe_getattr(obj, '__doc__', None)
if isinstance(doc, str):
return doc
return None
diff --git a/sphinx/util/inventory.py b/sphinx/util/inventory.py
index d37398a5562..6e4c648bd3f 100644
--- a/sphinx/util/inventory.py
+++ b/sphinx/util/inventory.py
@@ -75,7 +75,7 @@ def _loads_v1(cls, lines: Sequence[str], *, uri: str) -> _Inventory:
projname = lines[0].rstrip()[11:] # Project name
version = lines[1].rstrip()[11:] # Project version
for line in lines[2:]:
- name, item_type, location = line.rstrip().split(None, 2)
+ name, item_type, location = line.rstrip().split(None, maxsplit=2)
location = posixpath.join(uri, location)
# version 1 did not add anchors to the location
if item_type == 'mod':
diff --git a/sphinx/util/logging.py b/sphinx/util/logging.py
index fab8acc3b90..9ad035c49af 100644
--- a/sphinx/util/logging.py
+++ b/sphinx/util/logging.py
@@ -430,7 +430,7 @@ class WarningSuppressor(logging.Filter):
"""Filter logs by `suppress_warnings`."""
def __init__(self, app: Sphinx) -> None:
- self.app = app
+ self._app = app
super().__init__()
def filter(self, record: logging.LogRecord) -> bool:
@@ -438,7 +438,7 @@ def filter(self, record: logging.LogRecord) -> bool:
subtype = getattr(record, 'subtype', '')
try:
- suppress_warnings = self.app.config.suppress_warnings
+ suppress_warnings = self._app.config.suppress_warnings
except AttributeError:
# config is not initialized yet (ex. in conf.py)
suppress_warnings = ()
@@ -446,7 +446,7 @@ def filter(self, record: logging.LogRecord) -> bool:
if is_suppressed_warning(type, subtype, suppress_warnings):
return False
else:
- self.app._warncount += 1
+ self._app._warncount += 1
return True
@@ -496,7 +496,7 @@ class SphinxLogRecordTranslator(logging.Filter):
LogRecordClass: type[logging.LogRecord]
def __init__(self, app: Sphinx) -> None:
- self.app = app
+ self._app = app
super().__init__()
def filter(self, record: SphinxWarningLogRecord) -> bool: # type: ignore[override]
@@ -509,15 +509,15 @@ def filter(self, record: SphinxWarningLogRecord) -> bool: # type: ignore[overri
docname, lineno = location
if docname:
if lineno:
- record.location = f'{self.app.env.doc2path(docname)}:{lineno}'
+ record.location = f'{self._app.env.doc2path(docname)}:{lineno}'
else:
- record.location = f'{self.app.env.doc2path(docname)}'
+ record.location = f'{self._app.env.doc2path(docname)}'
else:
record.location = None
elif isinstance(location, nodes.Node):
record.location = get_node_location(location)
elif location and ':' not in location:
- record.location = f'{self.app.env.doc2path(location)}'
+ record.location = f'{self._app.env.doc2path(location)}'
return True
@@ -537,7 +537,7 @@ def filter(self, record: SphinxWarningLogRecord) -> bool: # type: ignore[overri
ret = super().filter(record)
try:
- show_warning_types = self.app.config.show_warning_types
+ show_warning_types = self._app.config.show_warning_types
except AttributeError:
# config is not initialized yet (ex. in conf.py)
show_warning_types = False
@@ -602,14 +602,18 @@ class LastMessagesWriter:
"""Stream writer storing last 10 messages in memory to save trackback"""
def __init__(self, app: Sphinx, stream: IO[str]) -> None:
- self.app = app
+ self._app = app
def write(self, data: str) -> None:
- self.app.messagelog.append(data)
+ self._app.messagelog.append(data)
-def setup(app: Sphinx, status: IO[str], warning: IO[str]) -> None:
+def setup(
+ app: Sphinx, status: IO[str], warning: IO[str], *, verbosity: int = 0
+) -> None:
"""Setup root logger for Sphinx"""
+ log_level = VERBOSITY_MAP[max(verbosity, 0)]
+
logger = logging.getLogger(NAMESPACE)
logger.setLevel(logging.DEBUG)
logger.propagate = False
@@ -621,7 +625,7 @@ def setup(app: Sphinx, status: IO[str], warning: IO[str]) -> None:
info_handler = NewLineStreamHandler(SafeEncodingWriter(status))
info_handler.addFilter(InfoFilter())
info_handler.addFilter(InfoLogRecordTranslator(app))
- info_handler.setLevel(VERBOSITY_MAP[app.verbosity])
+ info_handler.setLevel(log_level)
info_handler.setFormatter(ColorizeFormatter())
warning_handler = WarningStreamHandler(SafeEncodingWriter(warning))
@@ -635,7 +639,7 @@ def setup(app: Sphinx, status: IO[str], warning: IO[str]) -> None:
messagelog_handler = logging.StreamHandler(LastMessagesWriter(app, status))
messagelog_handler.addFilter(InfoFilter())
- messagelog_handler.setLevel(VERBOSITY_MAP[app.verbosity])
+ messagelog_handler.setLevel(log_level)
logger.addHandler(info_handler)
logger.addHandler(warning_handler)
diff --git a/sphinx/util/math.py b/sphinx/util/math.py
index f482e0c4dac..898aab2d617 100644
--- a/sphinx/util/math.py
+++ b/sphinx/util/math.py
@@ -14,7 +14,7 @@ def get_node_equation_number(writer: HTML5Translator, node: nodes.math_block) ->
if writer.builder.config.math_numfig and writer.builder.config.numfig:
figtype = 'displaymath'
if writer.builder.name == 'singlehtml':
- key = f'{writer.docnames[-1]}/{figtype}' # type: ignore[has-type]
+ key = f'{writer.docnames[-1]}/{figtype}'
else:
key = figtype
diff --git a/sphinx/util/nodes.py b/sphinx/util/nodes.py
index e29dc376884..1d316b9f9bf 100644
--- a/sphinx/util/nodes.py
+++ b/sphinx/util/nodes.py
@@ -5,6 +5,7 @@
import contextlib
import re
import unicodedata
+from io import StringIO
from typing import TYPE_CHECKING, Any, Generic, TypeVar, cast
from docutils import nodes
@@ -197,13 +198,13 @@ def apply_source_workaround(node: Element) -> None:
node,
(
# https://github.com/sphinx-doc/sphinx/issues/1305 rubric directive
- nodes.rubric
+ nodes.rubric,
# https://github.com/sphinx-doc/sphinx/issues/1477 line node
- | nodes.line
+ nodes.line,
# https://github.com/sphinx-doc/sphinx/issues/3093 image directive in substitution
- | nodes.image
+ nodes.image,
# https://github.com/sphinx-doc/sphinx/issues/3335 field list syntax
- | nodes.field_name
+ nodes.field_name,
),
):
logger.debug(
@@ -289,6 +290,35 @@ def is_translatable(node: Node) -> bool:
) # fmt: skip
+def _clean_extracted_message(text: str) -> str:
+ """Remove trailing backslashes from each line of *text*."""
+ if '\\' in text:
+ # TODO(picnixz): if possible, find a regex alternative
+ # that is not vulnerable to a ReDOS (the code below is
+ # equivalent to re.sub(r'[ \t]*\\[ \t]*$', text, re.MULTILINE)).
+ buffer = StringIO()
+ for line in text.splitlines(keepends=True):
+ split = line.rsplit('\\', maxsplit=1)
+ if len(split) == 2:
+ prefix, suffix = split
+ if re.match(r'^[ \t]*\s$', suffix):
+ # The line ends with some NL character, preceded by
+ # one or more whitespaces (to be dropped), the backslash,
+ # and possibly other whitespaces on its left.
+ buffer.write(prefix.rstrip(' \t'))
+ buffer.write(suffix.lstrip(' \t'))
+ elif not suffix:
+ # backslash is at the end of the LAST line
+ buffer.write(prefix.rstrip(' \t'))
+ else:
+ # backslash is is in the middle of the line
+ buffer.write(line)
+ else:
+ buffer.write(line)
+ text = buffer.getvalue()
+ return text.replace('\n', ' ').strip()
+
+
def extract_messages(doctree: Element) -> Iterable[tuple[Element, str]]:
"""Extract translatable messages from a document tree."""
for node in doctree.findall(is_translatable):
@@ -311,7 +341,8 @@ def extract_messages(doctree: Element) -> Iterable[tuple[Element, str]]:
elif isinstance(node, nodes.meta):
msg = node['content']
else:
- msg = node.rawsource.replace('\n', ' ').strip() # type: ignore[attr-defined]
+ text = node.rawsource # type: ignore[attr-defined]
+ msg = _clean_extracted_message(text)
# XXX nodes rendering empty are likely a bug in sphinx.addnodes
if msg:
diff --git a/sphinx/util/osutil.py b/sphinx/util/osutil.py
index 807db899af9..374c59ae904 100644
--- a/sphinx/util/osutil.py
+++ b/sphinx/util/osutil.py
@@ -47,8 +47,8 @@ def relative_uri(base: str, to: str) -> str:
"""Return a relative URL from ``base`` to ``to``."""
if to.startswith(SEP):
return to
- b2 = base.split('#')[0].split(SEP)
- t2 = to.split('#')[0].split(SEP)
+ b2 = base.partition('#')[0].split(SEP)
+ t2 = to.partition('#')[0].split(SEP)
# remove common segments (except the last segment)
for x, y in zip(b2[:-1], t2[:-1], strict=False):
if x != y:
diff --git a/sphinx/util/requests.py b/sphinx/util/requests.py
index b439ce437e8..dd39ca83fff 100644
--- a/sphinx/util/requests.py
+++ b/sphinx/util/requests.py
@@ -35,12 +35,12 @@ def _get_tls_cacert(url: str, certs: str | dict[str, str] | None) -> str | bool:
"""Get additional CA cert for a specific URL."""
if not certs:
return True
- elif isinstance(certs, str | tuple):
+ elif isinstance(certs, (str, tuple)):
return certs
else:
hostname = urlsplit(url).netloc
if '@' in hostname:
- _, hostname = hostname.split('@', 1)
+ hostname = hostname.partition('@')[-1]
return certs.get(hostname, True)
diff --git a/sphinx/util/rst.py b/sphinx/util/rst.py
index c848a9b3657..485f369766e 100644
--- a/sphinx/util/rst.py
+++ b/sphinx/util/rst.py
@@ -1,15 +1,15 @@
-"""reST helper functions."""
+"""reStructuredText helper functions."""
from __future__ import annotations
import re
from collections import defaultdict
from contextlib import contextmanager
-from typing import TYPE_CHECKING, cast
+from typing import TYPE_CHECKING
from unicodedata import east_asian_width
from docutils.parsers.rst import roles
-from docutils.parsers.rst.languages import en as english # type: ignore[attr-defined]
+from docutils.parsers.rst.languages import en as english
from docutils.parsers.rst.states import Body
from docutils.utils import Reporter
from jinja2 import pass_environment
@@ -25,7 +25,7 @@
logger = logging.getLogger(__name__)
-FIELD_NAME_RE = re.compile(Body.patterns['field_marker'])
+_FIELD_NAME_RE = re.compile(Body.patterns['field_marker'])
symbols_re = re.compile(r'([!-\-/:-@\[-`{-~])') # symbols without dot(0x2e)
SECTIONING_CHARS = ['=', '-', '~']
@@ -66,7 +66,7 @@ def heading(env: Environment, text: str, level: int = 1) -> str:
def default_role(docname: str, name: str) -> Iterator[None]:
if name:
dummy_reporter = Reporter('', 4, 4)
- role_fn, _ = roles.role(name, english, 0, dummy_reporter)
+ role_fn, _ = roles.role(name, english, 0, dummy_reporter) # type: ignore[arg-type]
if role_fn:
docutils.register_role('', role_fn) # type: ignore[arg-type]
else:
@@ -77,39 +77,39 @@ def default_role(docname: str, name: str) -> Iterator[None]:
docutils.unregister_role('')
-def prepend_prolog(content: StringList, prolog: str) -> None:
- """Prepend a string to content body as prolog."""
- if prolog:
- pos = 0
- for line in content:
- if FIELD_NAME_RE.match(line):
- pos += 1
- else:
- break
-
- if pos > 0:
- # insert a blank line after docinfo
- content.insert(pos, '', '', 0)
+def _prepend_prologue(content: StringList, prologue: str) -> None:
+ """Prepend a string to content body as a prologue."""
+ if not prologue:
+ return
+ pos = 0
+ for line in content:
+ if _FIELD_NAME_RE.match(line):
pos += 1
+ else:
+ break
- # insert prolog (after docinfo if exists)
- lineno = 0
- for lineno, line in enumerate(prolog.splitlines()):
- content.insert(pos + lineno, line, '', lineno)
+ if pos > 0:
+ # insert a blank line after docinfo
+ content.insert(pos, '', '', 0)
+ pos += 1
- content.insert(pos + lineno + 1, '', '', 0)
+ # insert prologue (after docinfo if exists)
+ lineno = 0
+ for lineno, line in enumerate(prologue.splitlines()):
+ content.insert(pos + lineno, line, '', lineno)
+ content.insert(pos + lineno + 1, '', '', 0)
-def append_epilog(content: StringList, epilog: str) -> None:
- """Append a string to content body as epilog."""
- if epilog:
- if len(content) > 0:
- source, lineno = content.info(-1)
- # lineno will never be None, since len(content) > 0
- lineno = cast('int', lineno)
- else:
- source = ''
- lineno = 0
- content.append('', source, lineno + 1)
- for lineno, line in enumerate(epilog.splitlines()):
- content.append(line, '', lineno)
+
+def _append_epilogue(content: StringList, epilogue: str) -> None:
+ """Append a string to content body as an epilogue."""
+ if not epilogue:
+ return
+ if len(content) > 0:
+ source, lineno = content.items[-1]
+ else:
+ source = ''
+ lineno = 0
+ content.append('', source, lineno + 1)
+ for lineno, line in enumerate(epilogue.splitlines()):
+ content.append(line, '', lineno)
diff --git a/sphinx/util/tags.py b/sphinx/util/tags.py
index 4467534a945..ded965c31fa 100644
--- a/sphinx/util/tags.py
+++ b/sphinx/util/tags.py
@@ -10,7 +10,7 @@
from sphinx.deprecation import RemovedInSphinx90Warning
if TYPE_CHECKING:
- from collections.abc import Iterator, Sequence
+ from collections.abc import Collection, Iterator
from typing import Literal
_ENV = jinja2.environment.Environment()
@@ -42,7 +42,7 @@ def parse_compare(self) -> jinja2.nodes.Expr:
class Tags:
- def __init__(self, tags: Sequence[str] = ()) -> None:
+ def __init__(self, tags: Collection[str] = ()) -> None:
self._tags = set(tags or ())
self._condition_cache: dict[str, bool] = {}
diff --git a/sphinx/util/template.py b/sphinx/util/template.py
index 03a9649345a..c7bb3993302 100644
--- a/sphinx/util/template.py
+++ b/sphinx/util/template.py
@@ -42,7 +42,7 @@ def render_string(self, source: str, context: dict[str, Any]) -> str:
class FileRenderer(BaseRenderer):
def __init__(self, search_path: Sequence[str | os.PathLike[str]]) -> None:
- if isinstance(search_path, str | os.PathLike):
+ if isinstance(search_path, (str, os.PathLike)):
search_path = [search_path]
else:
# filter "None" paths
diff --git a/sphinx/util/typing.py b/sphinx/util/typing.py
index 1a68a18e29a..cedffb46a02 100644
--- a/sphinx/util/typing.py
+++ b/sphinx/util/typing.py
@@ -33,6 +33,19 @@
'smart',
]
+AnyTypeAliasType: tuple[type, ...] = ()
+if sys.version_info[:2] >= (3, 12):
+ from typing import TypeAliasType
+
+ AnyTypeAliasType += (TypeAliasType,)
+
+try:
+ import typing_extensions
+except ImportError:
+ pass
+else:
+ AnyTypeAliasType += (typing_extensions.TypeAliasType,)
+
logger = logging.getLogger(__name__)
@@ -180,7 +193,7 @@ class ExtensionMetadata(typing.TypedDict, total=False):
def get_type_hints(
obj: Any,
globalns: dict[str, Any] | None = None,
- localns: dict[str, Any] | None = None,
+ localns: Mapping[str, Any] | None = None,
include_extras: bool = False,
) -> dict[str, Any]:
"""Return a dictionary containing type hints for a function, method, module or class
@@ -309,6 +322,11 @@ def restify(cls: Any, mode: _RestifyMode = 'fully-qualified-except-typing') -> s
# are printed natively and ``None``-like types are kept as is.
# *cls* is defined in ``typing``, and thus ``__args__`` must exist
return ' | '.join(restify(a, mode) for a in cls.__args__)
+ elif isinstance(cls, AnyTypeAliasType):
+ # TODO: Use ``__qualname__`` here unconditionally (not yet supported)
+ if hasattr(cls, '__qualname__'):
+ return f':py:type:`{module_prefix}{cls.__module__}.{cls.__qualname__}`'
+ return f':py:type:`{module_prefix}{cls.__module__}.{cls.__name__}`' # type: ignore[attr-defined]
elif cls.__module__ in {'__builtin__', 'builtins'}:
if hasattr(cls, '__args__'):
if not cls.__args__: # Empty tuple, list, ...
@@ -440,7 +458,9 @@ def stringify_annotation(
annotation_module_is_typing = True
# Extract the annotation's base type by considering formattable cases
- if isinstance(annotation, typing.TypeVar) and not _is_unpack_form(annotation):
+ if isinstance(
+ annotation, (typing.TypeVar, AnyTypeAliasType)
+ ) and not _is_unpack_form(annotation):
# typing_extensions.Unpack is incorrectly determined as a TypeVar
if annotation_module_is_typing and mode in {
'fully-qualified-except-typing',
@@ -524,7 +544,7 @@ def stringify_annotation(
# Process the generic arguments (if any).
# They must be a list or a tuple, otherwise they are considered 'broken'.
annotation_args = getattr(annotation, '__args__', ())
- if annotation_args and isinstance(annotation_args, list | tuple):
+ if annotation_args and isinstance(annotation_args, (list, tuple)):
if (
qualname in {'Union', 'types.UnionType'}
and all(getattr(a, '__origin__', ...) is typing.Literal for a in annotation_args)
diff --git a/sphinx/versioning.py b/sphinx/versioning.py
index 3de5a17ec9c..02bc6edd055 100644
--- a/sphinx/versioning.py
+++ b/sphinx/versioning.py
@@ -160,7 +160,7 @@ def apply(self, **kwargs: Any) -> None:
if env.versioning_compare:
# get old doctree
- filename = env.doctreedir / f'{env.docname}.doctree'
+ filename = env.doctreedir / f'{env.current_document.docname}.doctree'
try:
with open(filename, 'rb') as f:
old_doctree = pickle.load(f)
diff --git a/sphinx/writers/html.py b/sphinx/writers/html.py
index e2c04ca32db..04f9af122a4 100644
--- a/sphinx/writers/html.py
+++ b/sphinx/writers/html.py
@@ -4,7 +4,7 @@
from typing import TYPE_CHECKING, cast
-from docutils.writers.html4css1 import Writer
+from docutils.writers import html4css1
from sphinx.util import logging
from sphinx.writers.html5 import HTML5Translator
@@ -20,7 +20,7 @@
# https://www.arnebrodowski.de/blog/write-your-own-restructuredtext-writer.html
-class HTMLWriter(Writer): # type: ignore[misc]
+class HTMLWriter(html4css1.Writer): # type: ignore[misc]
# override embed-stylesheet default value to False.
settings_default_overrides = {'embed_stylesheet': False}
diff --git a/sphinx/writers/html5.py b/sphinx/writers/html5.py
index b39b463d6db..bbcd247e33c 100644
--- a/sphinx/writers/html5.py
+++ b/sphinx/writers/html5.py
@@ -305,10 +305,10 @@ def depart_desc_optional(self, node: Element) -> None:
self.param_group_index += 1
def visit_desc_annotation(self, node: Element) -> None:
- self.body.append(self.starttag(node, 'em', '', CLASS='property'))
+ self.body.append(self.starttag(node, 'span', '', CLASS='property'))
def depart_desc_annotation(self, node: Element) -> None:
- self.body.append('')
+ self.body.append('')
##############################################
@@ -357,7 +357,7 @@ def visit_reference(self, node: Element) -> None:
def visit_number_reference(self, node: Element) -> None:
self.visit_reference(node)
- def depart_number_reference(self, node: Element) -> None:
+ def depart_number_reference(self, node: nodes.reference) -> None:
self.depart_reference(node)
# overwritten -- we don't want source comments to show up in the HTML
@@ -451,7 +451,7 @@ def add_permalink_ref(self, node: Element, title: str) -> None:
)
# overwritten
- def visit_bullet_list(self, node: Element) -> None:
+ def visit_bullet_list(self, node: nodes.bullet_list) -> None:
if len(node) == 1 and isinstance(node[0], addnodes.toctree):
# avoid emitting empty
raise nodes.SkipNode
@@ -498,7 +498,7 @@ def depart_term(self, node: Element) -> None:
self.body.append('')
# overwritten
- def visit_title(self, node: Element) -> None:
+ def visit_title(self, node: nodes.title) -> None:
if (
isinstance(node.parent, addnodes.compact_paragraph)
and node.parent.get('toctree')
@@ -535,7 +535,7 @@ def visit_title(self, node: Element) -> None:
self.body.pop()
self.context[-1] = '
\n'
- def depart_title(self, node: Element) -> None:
+ def depart_title(self, node: nodes.title) -> None:
close_tag = self.context[-1]
if (
self.config.html_permalinks
@@ -586,7 +586,7 @@ def depart_rubric(self, node: nodes.rubric) -> None:
super().depart_rubric(node)
# overwritten
- def visit_literal_block(self, node: Element) -> None:
+ def visit_literal_block(self, node: nodes.literal_block) -> None:
if node.rawsource != node.astext():
# most probably a parsed-literal block -- don't highlight
return super().visit_literal_block(node)
@@ -614,7 +614,7 @@ def visit_literal_block(self, node: Element) -> None:
self.body.append(starttag + highlighted + '
\n')
raise nodes.SkipNode
- def visit_caption(self, node: Element) -> None:
+ def visit_caption(self, node: nodes.caption) -> None:
if (
isinstance(node.parent, nodes.container)
and node.parent.get('literal_block')
@@ -625,7 +625,7 @@ def visit_caption(self, node: Element) -> None:
self.add_fignumber(node.parent)
self.body.append(self.starttag(node, 'span', '', CLASS='caption-text'))
- def depart_caption(self, node: Element) -> None:
+ def depart_caption(self, node: nodes.caption) -> None:
self.body.append('')
# append permalink if available
@@ -648,7 +648,7 @@ def depart_caption(self, node: Element) -> None:
super().depart_caption(node)
def visit_doctest_block(self, node: Element) -> None:
- self.visit_literal_block(node)
+ self.visit_literal_block(node) # type: ignore[arg-type]
# overwritten to add the (for XHTML compliance)
def visit_block_quote(self, node: Element) -> None:
@@ -740,14 +740,14 @@ def depart_download_reference(self, node: Element) -> None:
self.body.append(self.context.pop())
# overwritten
- def visit_figure(self, node: Element) -> None:
+ def visit_figure(self, node: nodes.figure) -> None:
# set align=default if align not specified to give a default style
node.setdefault('align', 'default')
return super().visit_figure(node)
# overwritten
- def visit_image(self, node: Element) -> None:
+ def visit_image(self, node: nodes.image) -> None:
olduri = node['uri']
# rewrite the URI if the environment knows about it
if olduri in self.builder.images:
@@ -775,7 +775,7 @@ def visit_image(self, node: Element) -> None:
super().visit_image(node)
# overwritten
- def depart_image(self, node: Element) -> None:
+ def depart_image(self, node: nodes.image) -> None:
if node['uri'].lower().endswith(('svg', 'svgz')):
pass
else:
@@ -892,16 +892,16 @@ def visit_tip(self, node: Element) -> None:
def depart_tip(self, node: Element) -> None:
self.depart_admonition(node)
- def visit_literal_emphasis(self, node: Element) -> None:
+ def visit_literal_emphasis(self, node: nodes.emphasis) -> None:
return self.visit_emphasis(node)
- def depart_literal_emphasis(self, node: Element) -> None:
+ def depart_literal_emphasis(self, node: nodes.emphasis) -> None:
return self.depart_emphasis(node)
- def visit_literal_strong(self, node: Element) -> None:
+ def visit_literal_strong(self, node: nodes.strong) -> None:
return self.visit_strong(node)
- def depart_literal_strong(self, node: Element) -> None:
+ def depart_literal_strong(self, node: nodes.strong) -> None:
return self.depart_strong(node)
def visit_abbreviation(self, node: Element) -> None:
@@ -913,15 +913,15 @@ def visit_abbreviation(self, node: Element) -> None:
def depart_abbreviation(self, node: Element) -> None:
self.body.append('')
- def visit_manpage(self, node: Element) -> None:
+ def visit_manpage(self, node: nodes.emphasis) -> None:
self.visit_literal_emphasis(node)
- def depart_manpage(self, node: Element) -> None:
+ def depart_manpage(self, node: nodes.emphasis) -> None:
self.depart_literal_emphasis(node)
# overwritten to add even/odd classes
- def visit_table(self, node: Element) -> None:
+ def visit_table(self, node: nodes.table) -> None:
self._table_row_indices.append(0)
atts = {}
@@ -936,7 +936,7 @@ def visit_table(self, node: Element) -> None:
tag = self.starttag(node, 'table', CLASS=' '.join(classes), **atts)
self.body.append(tag)
- def depart_table(self, node: Element) -> None:
+ def depart_table(self, node: nodes.table) -> None:
self._table_row_indices.pop()
super().depart_table(node)
@@ -949,11 +949,11 @@ def visit_row(self, node: Element) -> None:
self.body.append(self.starttag(node, 'tr', ''))
node.column = 0 # type: ignore[attr-defined]
- def visit_field_list(self, node: Element) -> None:
+ def visit_field_list(self, node: nodes.field_list) -> None:
self._fieldlist_row_indices.append(0)
return super().visit_field_list(node)
- def depart_field_list(self, node: Element) -> None:
+ def depart_field_list(self, node: nodes.field_list) -> None:
self._fieldlist_row_indices.pop()
return super().depart_field_list(node)
diff --git a/sphinx/writers/latex.py b/sphinx/writers/latex.py
index a2a17855c18..f204f585f6a 100644
--- a/sphinx/writers/latex.py
+++ b/sphinx/writers/latex.py
@@ -132,8 +132,9 @@ def __init__(self, node: Element) -> None:
self.colsep = None
self.colwidths: list[int] = []
self.has_problematic = False
- self.has_oldproblematic = False
self.has_verbatim = False
+ # cf https://github.com/sphinx-doc/sphinx/issues/13646#issuecomment-2958309632
+ self.is_nested = False
self.caption: list[str] = []
self.stubs: list[int] = []
@@ -146,29 +147,49 @@ def __init__(self, node: Element) -> None:
self.cell_id = 0 # last assigned cell_id
def is_longtable(self) -> bool:
- """True if and only if table uses longtable environment."""
+ """True if and only if table uses longtable environment.
+
+ In absence of longtable class can only be used trustfully on departing
+ the table, as the number of rows is not known until then.
+ """
return self.row > 30 or 'longtable' in self.classes
def get_table_type(self) -> str:
"""Returns the LaTeX environment name for the table.
+ It is used at time of ``depart_table()`` and again via ``get_colspec()``.
The class currently supports:
* longtable
* tabular
* tabulary
"""
- if self.is_longtable():
+ if self.is_longtable() and not self.is_nested:
return 'longtable'
elif self.has_verbatim:
return 'tabular'
elif self.colspec:
- return 'tabulary'
+ # tabulary complains (only a LaTeX warning) if none of its column
+ # types is used. The next test will have false positive from
+ # syntax such as >{\RaggedRight} but it will catch *{3}{J} which
+ # does require tabulary and would crash tabular
+ # It is user responsability not to use a tabulary column type for
+ # a column having a problematic cell.
+ if any(c in 'LRCJT' for c in self.colspec):
+ return 'tabulary'
+ else:
+ return 'tabular'
elif self.has_problematic or (
self.colwidths and 'colwidths-given' in self.classes
):
return 'tabular'
else:
+ # A nested tabulary in a longtable can not use any \hline's,
+ # i.e. it can not use "booktabs" or "standard" styles (due to a
+ # LaTeX upstream bug we do not try to solve). But we can't know
+ # here if it ends up in a tabular or longtable. So it is via
+ # LaTeX macros inserted by the tabulary template that the problem
+ # will be solved.
return 'tabulary'
def get_colspec(self) -> str:
@@ -178,6 +199,7 @@ def get_colspec(self) -> str:
.. note::
+ This is used by the template renderer at time of depart_table().
The ``\\X`` and ``T`` column type specifiers are defined in
``sphinxlatextables.sty``.
"""
@@ -199,12 +221,6 @@ def get_colspec(self) -> str:
elif self.get_table_type() == 'tabulary':
# sphinx.sty sets T to be J by default.
return '{' + _colsep + (('T' + _colsep) * self.colcount) + '}' + CR
- elif self.has_oldproblematic:
- return (
- r'{%s*{%d}{\X{1}{%d}%s}}'
- % (_colsep, self.colcount, self.colcount, _colsep)
- + CR
- )
else:
return '{' + _colsep + (('l' + _colsep) * self.colcount) + '}' + CR
@@ -281,6 +297,9 @@ def escape_abbr(text: str) -> str:
def rstdim_to_latexdim(width_str: str, scale: int = 100) -> str:
"""Convert `width_str` with rst length to LaTeX length."""
+ # MEMO: the percent unit is interpreted here as a percentage
+ # of \linewidth. Let's keep in mind though that \linewidth
+ # is dynamic in LaTeX, e.g. it is smaller in lists.
match = re.match(r'^(\d*\.?\d*)\s*(\S*)$', width_str)
if not match:
raise ValueError
@@ -294,6 +313,8 @@ def rstdim_to_latexdim(width_str: str, scale: int = 100) -> str:
res = '%sbp' % amount # convert to 'bp'
elif unit == '%':
res = r'%.3f\linewidth' % (float(amount) / 100.0)
+ elif unit in {'ch', 'rem', 'vw', 'vh', 'vmin', 'vmax', 'Q'}:
+ res = rf'{amount}\sphinx{unit}dimen'
else:
amount_float = float(amount) * scale / 100.0
if unit in {'', 'px'}:
@@ -302,8 +323,13 @@ def rstdim_to_latexdim(width_str: str, scale: int = 100) -> str:
res = '%.5fbp' % amount_float
elif unit == '%':
res = r'%.5f\linewidth' % (amount_float / 100.0)
+ elif unit in {'ch', 'rem', 'vw', 'vh', 'vmin', 'vmax', 'Q'}:
+ res = rf'{amount_float:.5f}\sphinx{unit}dimen'
else:
res = f'{amount_float:.5f}{unit}'
+ # Those further units are passed through and accepted "as is" by TeX:
+ # em and ex (both font dependent), bp, cm, mm, in, and pc.
+ # Non-CSS units (TeX only presumably) are cc, nc, dd, nd, and sp.
return res
@@ -327,7 +353,6 @@ def __init__(
self.in_footnote = 0
self.in_caption = 0
self.in_term = 0
- self.needs_linetrimming = 0
self.in_minipage = 0
# only used by figure inside an admonition
self.no_latex_floats = 0
@@ -781,8 +806,6 @@ def visit_desc(self, node: Element) -> None:
else:
self.body.append(BLANKLINE)
self.body.append(r'\begin{fulllineitems}' + CR)
- if self.table:
- self.table.has_problematic = True
def depart_desc(self, node: Element) -> None:
if self.in_desc_signature:
@@ -1084,8 +1107,6 @@ def visit_seealso(self, node: Element) -> None:
r'\begin{sphinxseealso}{%s:}' % admonitionlabels['seealso'] + CR
)
self.no_latex_floats += 1
- if self.table:
- self.table.has_problematic = True
def depart_seealso(self, node: Element) -> None:
self.body.append(BLANKLINE)
@@ -1146,23 +1167,17 @@ def visit_tabular_col_spec(self, node: Element) -> None:
raise nodes.SkipNode
def visit_table(self, node: Element) -> None:
- if len(self.tables) == 1:
- assert self.table is not None
- if self.table.get_table_type() == 'longtable':
- raise UnsupportedError(
- '%s:%s: longtable does not support nesting a table.'
- % (self.curfilestack[-1], node.line or '')
- )
- # change type of parent table to tabular
- # see https://groups.google.com/d/msg/sphinx-users/7m3NeOBixeo/9LKP2B4WBQAJ
- self.table.has_problematic = True
- elif len(self.tables) > 2:
+ table = Table(node)
+ assert table is not None
+ if len(self.tables) >= 1:
+ table.is_nested = True
+ # TODO: do we want > 2, > 1, or actually nothing here?
+ if len(self.tables) > 2:
raise UnsupportedError(
'%s:%s: deeply nested tables are not implemented.'
% (self.curfilestack[-1], node.line or '')
)
- table = Table(node)
self.tables.append(table)
if table.colsep is None:
table.colsep = '|' * (
@@ -1191,6 +1206,35 @@ def depart_table(self, node: Element) -> None:
assert self.table is not None
labels = self.hypertarget_to(node)
table_type = self.table.get_table_type()
+ if table_type == 'tabulary':
+ if len(self.tables) > 1:
+ # tell parents to not be tabulary
+ for _ in self.tables[:-1]:
+ _.has_problematic = True
+ else:
+ # We try to catch a tabularcolumns using L, R, J, C, or T.
+ # We can not simply test for presence in the colspec of
+ # one of those letters due to syntax such as >{\RaggedRight}.
+ # The test will not catch *{3}{J} syntax, but it would be
+ # overkill to try to implement LaTeX preamble mini-language.
+ if self.table.colspec:
+ assert len(self.table.colspec) > 2
+ # cf how self.table.colspec got set in visit_table().
+ _colspec_as_given = self.table.colspec[1:-2]
+ _colspec_stripped = re.sub(r'\{.*?\}', '', _colspec_as_given)
+ if any(c in _colspec_stripped for c in 'LRJCT'):
+ logger.warning(
+ __(
+ 'colspec %s was given which appears to use '
+ 'tabulary syntax. But this table can not be '
+ 'rendered as a tabulary; the given colspec will '
+ 'be ignored.'
+ ),
+ _colspec_as_given,
+ type='latex',
+ location=node,
+ )
+ self.table.colspec = ''
table = self.render(
table_type + '.tex.jinja', {'table': self.table, 'labels': labels}
)
@@ -1321,19 +1365,25 @@ def visit_entry(self, node: Element) -> None:
r'\sphinxmultirow{%d}{%d}{%%' % (cell.height, cell.cell_id) + CR
)
context = '}%' + CR + context
- if cell.width > 1 or cell.height > 1:
- self.body.append(
- r'\begin{varwidth}[t]{\sphinxcolwidth{%d}{%d}}'
- % (cell.width, self.table.colcount)
- + CR
- )
- context = (
- r'\par' + CR + r'\vskip-\baselineskip'
- r'\vbox{\hbox{\strut}}\end{varwidth}%' + CR + context
- )
- self.needs_linetrimming = 1
- if len(list(node.findall(nodes.paragraph))) >= 2:
- self.table.has_oldproblematic = True
+ # 8.3.0 wraps ALL cells contents in "varwidth". This fixes a
+ # number of issues and allows more usage of tabulary.
+ #
+ # "varwidth" usage allows a *tight fit* to multiple paragraphs,
+ # line blocks, bullet lists, enumerated lists; it is less
+ # successful at finding a tight fit for object descriptions or
+ # admonitions: the table will then probably occupy full-width, and
+ # columns containing such cells will auto-divide the total width
+ # equally.
+ #
+ # "\sphinxcolwidth" has an appropriate definition in
+ # sphinxlatextables.sty which in particular takes into account
+ # tabulary "two-pass" system.
+ self.body.append(
+ r'\begin{varwidth}[t]{\sphinxcolwidth{%d}{%d}}'
+ % (cell.width, self.table.colcount)
+ + CR
+ )
+ context = r'\sphinxbeforeendvarwidth' + CR + r'\end{varwidth}%' + CR + context
if (
isinstance(node.parent.parent, nodes.thead)
or (cell.col in self.table.stubs)
@@ -1346,23 +1396,20 @@ def visit_entry(self, node: Element) -> None:
pass
else:
self.body.append(r'\sphinxstyletheadfamily ')
- if self.needs_linetrimming:
- self.pushbody([])
+ self.pushbody([])
self.context.append(context)
def depart_entry(self, node: Element) -> None:
- if self.needs_linetrimming:
- self.needs_linetrimming = 0
- body = self.popbody()
+ assert self.table is not None
+ body = self.popbody()
- # Remove empty lines from top of merged cell
- while body and body[0] == CR:
- body.pop(0)
- self.body.extend(body)
+ # Remove empty lines from top of merged cell
+ while body and body[0] == CR:
+ body.pop(0)
+ self.body.extend(body)
self.body.append(self.context.pop())
- assert self.table is not None
cell = self.table.cell()
assert cell is not None
self.table.col += cell.width
@@ -1400,8 +1447,6 @@ def visit_acks(self, node: Element) -> None:
def visit_bullet_list(self, node: Element) -> None:
if not self.compact_list:
self.body.append(r'\begin{itemize}' + CR)
- if self.table:
- self.table.has_problematic = True
def depart_bullet_list(self, node: Element) -> None:
if not self.compact_list:
@@ -1439,8 +1484,6 @@ def get_nested_level(node: Element) -> int:
)
if 'start' in node:
self.body.append(r'\setcounter{%s}{%d}' % (enum, node['start'] - 1) + CR)
- if self.table:
- self.table.has_problematic = True
def depart_enumerated_list(self, node: Element) -> None:
self.body.append(r'\end{enumerate}' + CR)
@@ -1455,8 +1498,6 @@ def depart_list_item(self, node: Element) -> None:
def visit_definition_list(self, node: Element) -> None:
self.body.append(r'\begin{description}' + CR)
- if self.table:
- self.table.has_problematic = True
def depart_definition_list(self, node: Element) -> None:
self.body.append(r'\end{description}' + CR)
@@ -1496,8 +1537,6 @@ def depart_definition(self, node: Element) -> None:
def visit_field_list(self, node: Element) -> None:
self.body.append(r'\begin{quote}\begin{description}' + CR)
- if self.table:
- self.table.has_problematic = True
def depart_field_list(self, node: Element) -> None:
self.body.append(r'\end{description}\end{quote}' + CR)
@@ -1524,7 +1563,7 @@ def visit_paragraph(self, node: Element) -> None:
):
# insert blank line, if the paragraph follows a non-paragraph node in a compound
self.body.append(r'\noindent' + CR)
- elif index == 1 and isinstance(node.parent, nodes.footnote | footnotetext):
+ elif index == 1 and isinstance(node.parent, (nodes.footnote, footnotetext)):
# don't insert blank line, if the paragraph is second child of a footnote
# (first one is label node)
pass
@@ -1539,8 +1578,6 @@ def depart_paragraph(self, node: Element) -> None:
def visit_centered(self, node: Element) -> None:
self.body.append(CR + r'\begin{center}')
- if self.table:
- self.table.has_problematic = True
def depart_centered(self, node: Element) -> None:
self.body.append(CR + r'\end{center}')
@@ -1555,8 +1592,6 @@ def visit_hlist(self, node: Element) -> None:
r'\begin{itemize}\setlength{\itemsep}{0pt}'
r'\setlength{\parskip}{0pt}' + CR
)
- if self.table:
- self.table.has_problematic = True
def depart_hlist(self, node: Element) -> None:
self.compact_list -= 1
@@ -1752,8 +1787,6 @@ def depart_legend(self, node: Element) -> None:
def visit_admonition(self, node: Element) -> None:
self.body.append(CR + r'\begin{sphinxadmonition}{note}')
self.no_latex_floats += 1
- if self.table:
- self.table.has_problematic = True
def depart_admonition(self, node: Element) -> None:
self.body.append(r'\end{sphinxadmonition}' + CR)
@@ -1765,8 +1798,6 @@ def _visit_named_admonition(self, node: Element) -> None:
CR + r'\begin{sphinxadmonition}{%s}{%s:}' % (node.tagname, label)
)
self.no_latex_floats += 1
- if self.table:
- self.table.has_problematic = True
def _depart_named_admonition(self, node: Element) -> None:
self.body.append(r'\end{sphinxadmonition}' + CR)
@@ -1843,30 +1874,10 @@ def add_target(id: str) -> None:
and node['refid'] == prev_node['refid']
):
# a target for a hyperlink reference having alias
- pass
+ return
else:
add_target(node['refid'])
- # Temporary fix for https://github.com/sphinx-doc/sphinx/issues/11093
- # TODO: investigate if a more elegant solution exists
- # (see comments of https://github.com/sphinx-doc/sphinx/issues/11093)
- if node.get('ismod', False):
- # Detect if the previous nodes are label targets. If so, remove
- # the refid thereof from node['ids'] to avoid duplicated ids.
- def has_dup_label(sib: Node | None) -> bool:
- return isinstance(sib, nodes.target) and sib.get('refid') in node['ids']
-
- prev = get_prev_node(node)
- if has_dup_label(prev):
- ids = node['ids'][:] # copy to avoid side-effects
- while has_dup_label(prev):
- ids.remove(prev['refid']) # type: ignore[index]
- prev = get_prev_node(prev) # type: ignore[arg-type]
- else:
- ids = iter(node['ids']) # read-only iterator
- else:
- ids = iter(node['ids']) # read-only iterator
-
- for id in ids:
+ for id in node['ids']:
add_target(id)
def depart_target(self, node: Element) -> None:
@@ -1962,7 +1973,7 @@ def visit_reference(self, node: Element) -> None:
uri = node.get('refuri', '')
if not uri and node.get('refid'):
uri = '%' + self.curfilestack[-1] + '#' + node['refid']
- if self.in_title or not uri:
+ if not uri:
self.context.append('')
elif uri.startswith('#'):
# references to labels in the same document
@@ -2265,8 +2276,6 @@ def visit_line_block(self, node: Element) -> None:
self.body.append(r'\begin{DUlineblock}{\DUlineblockindent}' + CR)
else:
self.body.append(CR + r'\begin{DUlineblock}{0em}' + CR)
- if self.table:
- self.table.has_problematic = True
def depart_line_block(self, node: Element) -> None:
self.body.append(r'\end{DUlineblock}' + CR)
@@ -2278,18 +2287,16 @@ def visit_block_quote(self, node: Element) -> None:
done = 0
if len(node.children) == 1:
child = node.children[0]
- if isinstance(child, nodes.bullet_list | nodes.enumerated_list):
+ if isinstance(child, (nodes.bullet_list, nodes.enumerated_list)):
done = 1
if not done:
self.body.append(r'\begin{quote}' + CR)
- if self.table:
- self.table.has_problematic = True
def depart_block_quote(self, node: Element) -> None:
done = 0
if len(node.children) == 1:
child = node.children[0]
- if isinstance(child, nodes.bullet_list | nodes.enumerated_list):
+ if isinstance(child, (nodes.bullet_list, nodes.enumerated_list)):
done = 1
if not done:
self.body.append(r'\end{quote}' + CR)
@@ -2323,8 +2330,6 @@ def depart_option_group(self, node: Element) -> None:
def visit_option_list(self, node: Element) -> None:
self.body.append(r'\begin{optionlist}{3cm}' + CR)
- if self.table:
- self.table.has_problematic = True
def depart_option_list(self, node: Element) -> None:
self.body.append(r'\end{optionlist}' + CR)
diff --git a/sphinx/writers/manpage.py b/sphinx/writers/manpage.py
index 171761fa2b0..282cd0ed14c 100644
--- a/sphinx/writers/manpage.py
+++ b/sphinx/writers/manpage.py
@@ -5,8 +5,7 @@
from typing import TYPE_CHECKING, cast
from docutils import nodes
-from docutils.writers.manpage import Translator as BaseTranslator
-from docutils.writers.manpage import Writer
+from docutils.writers import manpage
from sphinx import addnodes
from sphinx.locale import _, admonitionlabels
@@ -26,7 +25,7 @@
logger = logging.getLogger(__name__)
-class ManualPageWriter(Writer): # type: ignore[misc]
+class ManualPageWriter(manpage.Writer): # type: ignore[misc]
def __init__(self, builder: Builder) -> None:
super().__init__()
self.builder = builder
@@ -71,7 +70,7 @@ def apply(self, **kwargs: Any) -> None:
node.parent.remove(node)
-class ManualPageTranslator(SphinxTranslator, BaseTranslator): # type: ignore[misc]
+class ManualPageTranslator(SphinxTranslator, manpage.Translator):
"""Custom man page translator."""
_docinfo: dict[str, Any] = {}
@@ -130,17 +129,17 @@ def depart_start_of_file(self, node: Element) -> None:
# Top-level nodes for descriptions
##################################
- def visit_desc(self, node: Element) -> None:
+ def visit_desc(self, node: nodes.definition_list) -> None:
self.visit_definition_list(node)
- def depart_desc(self, node: Element) -> None:
+ def depart_desc(self, node: nodes.definition_list) -> None:
self.depart_definition_list(node)
- def visit_desc_signature(self, node: Element) -> None:
- self.visit_definition_list_item(node)
+ def visit_desc_signature(self, node: nodes.term) -> None:
+ self.visit_definition_list_item(node) # type: ignore[arg-type]
self.visit_term(node)
- def depart_desc_signature(self, node: Element) -> None:
+ def depart_desc_signature(self, node: nodes.term) -> None:
self.depart_term(node)
def visit_desc_signature_line(self, node: Element) -> None:
@@ -149,10 +148,10 @@ def visit_desc_signature_line(self, node: Element) -> None:
def depart_desc_signature_line(self, node: Element) -> None:
self.body.append(' ')
- def visit_desc_content(self, node: Element) -> None:
+ def visit_desc_content(self, node: nodes.definition) -> None:
self.visit_definition(node)
- def depart_desc_content(self, node: Element) -> None:
+ def depart_desc_content(self, node: nodes.definition) -> None:
self.depart_definition(node)
def visit_desc_inline(self, node: Element) -> None:
@@ -231,25 +230,25 @@ def depart_desc_annotation(self, node: Element) -> None:
##############################################
- def visit_versionmodified(self, node: Element) -> None:
+ def visit_versionmodified(self, node: nodes.paragraph) -> None:
self.visit_paragraph(node)
- def depart_versionmodified(self, node: Element) -> None:
+ def depart_versionmodified(self, node: nodes.paragraph) -> None:
self.depart_paragraph(node)
# overwritten -- don't make whole of term bold if it includes strong node
- def visit_term(self, node: Element) -> None:
+ def visit_term(self, node: nodes.term) -> None:
if any(node.findall(nodes.strong)):
self.body.append('\n')
else:
super().visit_term(node)
# overwritten -- we don't want source comments to show up
- def visit_comment(self, node: Element) -> None:
+ def visit_comment(self, node: Element) -> None: # type: ignore[override]
raise nodes.SkipNode
# overwritten -- added ensure_eol()
- def visit_footnote(self, node: Element) -> None:
+ def visit_footnote(self, node: nodes.footnote) -> None:
self.ensure_eol()
super().visit_footnote(node)
@@ -264,10 +263,10 @@ def visit_rubric(self, node: Element) -> None:
def depart_rubric(self, node: Element) -> None:
self.body.append('\n')
- def visit_seealso(self, node: Element) -> None:
+ def visit_seealso(self, node: nodes.admonition) -> None:
self.visit_admonition(node, 'seealso')
- def depart_seealso(self, node: Element) -> None:
+ def depart_seealso(self, node: nodes.admonition) -> None:
self.depart_admonition(node)
def visit_productionlist(self, node: Element) -> None:
@@ -291,7 +290,7 @@ def visit_image(self, node: Element) -> None:
raise nodes.SkipNode
# overwritten -- don't visit inner marked up nodes
- def visit_reference(self, node: Element) -> None:
+ def visit_reference(self, node: nodes.reference) -> None:
uri = node.get('refuri', '')
is_safe_to_click = uri.startswith(('mailto:', 'http:', 'https:', 'ftp:'))
if is_safe_to_click:
@@ -301,7 +300,7 @@ def visit_reference(self, node: Element) -> None:
self.body.append(self.defs['reference'][0])
# avoid repeating escaping code... fine since
# visit_Text calls astext() and only works on that afterwards
- self.visit_Text(node)
+ self.visit_Text(node) # type: ignore[arg-type]
self.body.append(self.defs['reference'][1])
if uri and not uri.startswith('#'):
@@ -369,10 +368,10 @@ def visit_acks(self, node: Element) -> None:
self.body.append('\n')
raise nodes.SkipNode
- def visit_hlist(self, node: Element) -> None:
+ def visit_hlist(self, node: nodes.bullet_list) -> None:
self.visit_bullet_list(node)
- def depart_hlist(self, node: Element) -> None:
+ def depart_hlist(self, node: nodes.bullet_list) -> None:
self.depart_bullet_list(node)
def visit_hlistcol(self, node: Element) -> None:
@@ -381,16 +380,16 @@ def visit_hlistcol(self, node: Element) -> None:
def depart_hlistcol(self, node: Element) -> None:
pass
- def visit_literal_emphasis(self, node: Element) -> None:
+ def visit_literal_emphasis(self, node: nodes.emphasis) -> None:
return self.visit_emphasis(node)
- def depart_literal_emphasis(self, node: Element) -> None:
+ def depart_literal_emphasis(self, node: nodes.emphasis) -> None:
return self.depart_emphasis(node)
- def visit_literal_strong(self, node: Element) -> None:
+ def visit_literal_strong(self, node: nodes.strong) -> None:
return self.visit_strong(node)
- def depart_literal_strong(self, node: Element) -> None:
+ def depart_literal_strong(self, node: nodes.strong) -> None:
return self.depart_strong(node)
def visit_abbreviation(self, node: Element) -> None:
@@ -399,14 +398,14 @@ def visit_abbreviation(self, node: Element) -> None:
def depart_abbreviation(self, node: Element) -> None:
pass
- def visit_manpage(self, node: Element) -> None:
+ def visit_manpage(self, node: nodes.strong) -> None:
return self.visit_strong(node)
- def depart_manpage(self, node: Element) -> None:
+ def depart_manpage(self, node: nodes.strong) -> None:
return self.depart_strong(node)
# overwritten: handle section titles better than in 0.6 release
- def visit_caption(self, node: Element) -> None:
+ def visit_caption(self, node: nodes.caption) -> None:
if (
isinstance(node.parent, nodes.container)
and node.parent.get('literal_block')
@@ -415,7 +414,7 @@ def visit_caption(self, node: Element) -> None:
else:
super().visit_caption(node)
- def depart_caption(self, node: Element) -> None:
+ def depart_caption(self, node: nodes.caption) -> None:
if (
isinstance(node.parent, nodes.container)
and node.parent.get('literal_block')
@@ -425,7 +424,7 @@ def depart_caption(self, node: Element) -> None:
super().depart_caption(node)
# overwritten: handle section titles better than in 0.6 release
- def visit_title(self, node: Element) -> None:
+ def visit_title(self, node: nodes.title) -> None:
if isinstance(node.parent, addnodes.seealso):
self.body.append('.IP "')
return None
@@ -438,7 +437,7 @@ def visit_title(self, node: Element) -> None:
raise nodes.SkipNode
return super().visit_title(node)
- def depart_title(self, node: Element) -> None:
+ def depart_title(self, node: nodes.title) -> None:
if isinstance(node.parent, addnodes.seealso):
self.body.append('"\n')
return None
diff --git a/sphinx/writers/texinfo.py b/sphinx/writers/texinfo.py
index b4f1272d49d..0731f168733 100644
--- a/sphinx/writers/texinfo.py
+++ b/sphinx/writers/texinfo.py
@@ -306,7 +306,7 @@ def collect_node_menus(self) -> None:
# try to find a suitable "Top" node
title = self.document.next_node(nodes.title)
top = title.parent if title else self.document
- if not isinstance(top, nodes.document | nodes.section):
+ if not isinstance(top, (nodes.document, nodes.section)):
top = self.document
if top is not self.document:
entries = node_menus[top['node_name']]
@@ -416,7 +416,7 @@ def add_menu_entries(
name = self.node_names[entry]
# special formatting for entries that are divided by an em-dash
try:
- parts = reg.split(name, 1)
+ parts = reg.split(name, maxsplit=1)
except TypeError:
# could be a gettext proxy
parts = [name]
@@ -638,7 +638,7 @@ def visit_title(self, node: Element) -> None:
parent = node.parent
if isinstance(parent, nodes.table):
return
- if isinstance(parent, nodes.Admonition | nodes.sidebar | nodes.topic):
+ if isinstance(parent, (nodes.Admonition, nodes.sidebar, nodes.topic)):
raise nodes.SkipNode
if not isinstance(parent, nodes.section):
logger.warning(
@@ -711,7 +711,7 @@ def depart_target(self, node: Element) -> None:
def visit_reference(self, node: Element) -> None:
# an xref's target is displayed in Info so we ignore a few
# cases for the sake of appearance
- if isinstance(node.parent, nodes.title | addnodes.desc_type):
+ if isinstance(node.parent, (nodes.title, addnodes.desc_type)):
return
if len(node) != 0 and isinstance(node[0], nodes.image):
return
@@ -748,7 +748,7 @@ def visit_reference(self, node: Element) -> None:
uri = self.escape_arg(uri)
id = 'Top'
if '#' in uri:
- uri, id = uri.split('#', 1)
+ uri, _, id = uri.partition('#')
id = self.escape_id(id)
name = self.escape_menu(name)
if name == id:
@@ -886,7 +886,7 @@ def depart_collected_footnote(self, node: Element) -> None:
def visit_footnote_reference(self, node: Element) -> None:
num = node.astext().strip()
try:
- footnode, used = self.footnotestack[-1][num]
+ footnode, _used = self.footnotestack[-1][num]
except (KeyError, IndexError) as exc:
raise nodes.SkipNode from exc
# footnotes are repeated for each reference
@@ -1002,7 +1002,7 @@ def visit_term(self, node: Element) -> None:
self.add_anchor(id, node)
# anchors and indexes need to go in front
for n in node[::]:
- if isinstance(n, addnodes.index | nodes.target):
+ if isinstance(n, (addnodes.index, nodes.target)):
n.walkabout(self)
node.remove(n)
self.body.append('\n%s ' % self.at_item_x)
diff --git a/sphinx/writers/xml.py b/sphinx/writers/xml.py
index 51f77ee2f01..e9877825de6 100644
--- a/sphinx/writers/xml.py
+++ b/sphinx/writers/xml.py
@@ -4,7 +4,7 @@
from typing import TYPE_CHECKING
-from docutils.writers.docutils_xml import Writer as BaseXMLWriter
+from docutils.writers import docutils_xml
if TYPE_CHECKING:
from typing import Any
@@ -12,7 +12,7 @@
from sphinx.builders import Builder
-class XMLWriter(BaseXMLWriter): # type: ignore[misc]
+class XMLWriter(docutils_xml.Writer): # type: ignore[misc]
output: str
def __init__(self, builder: Builder) -> None:
@@ -34,7 +34,7 @@ def translate(self, *args: Any, **kwargs: Any) -> None:
self.output = ''.join(visitor.output) # type: ignore[attr-defined]
-class PseudoXMLWriter(BaseXMLWriter): # type: ignore[misc]
+class PseudoXMLWriter(docutils_xml.Writer): # type: ignore[misc]
supported = ('pprint', 'pformat', 'pseudoxml')
"""Formats this writer supports."""
diff --git a/tests/conftest.py b/tests/conftest.py
index 8860e24e65c..b884f132f79 100644
--- a/tests/conftest.py
+++ b/tests/conftest.py
@@ -12,21 +12,16 @@
import sphinx
import sphinx.locale
-import sphinx.pycode
from sphinx.testing.util import _clean_up_global_state
+from tests.utils import TEST_ROOTS_DIR
+
if TYPE_CHECKING:
from collections.abc import Callable, Iterator
from typing import Any
from sphinx.testing.util import SphinxTestApp
-_TESTS_ROOT = Path(__file__).resolve().parent
-if 'CI' in os.environ and (_TESTS_ROOT / 'roots-read-only').is_dir():
- _ROOTS_DIR = _TESTS_ROOT / 'roots-read-only'
-else:
- _ROOTS_DIR = _TESTS_ROOT / 'roots'
-
def _init_console(
locale_dir: str | os.PathLike[str] | None = sphinx.locale._LOCALE_DIR,
@@ -46,14 +41,14 @@ def _init_console(
pytest_plugins = ['sphinx.testing.fixtures']
# Exclude 'roots' dirs for pytest test collector
-collect_ignore = ['roots']
+collect_ignore = ['roots', 'roots-read-only']
os.environ['SPHINX_AUTODOC_RELOAD_MODULES'] = '1'
@pytest.fixture(scope='session')
def rootdir() -> Path:
- return _ROOTS_DIR
+ return TEST_ROOTS_DIR
def pytest_report_header(config: pytest.Config) -> str:
@@ -62,7 +57,7 @@ def pytest_report_header(config: pytest.Config) -> str:
]
if sys.version_info[:2] >= (3, 13):
lines.append(f'GIL enabled?: {sys._is_gil_enabled()}')
- lines.append(f'test roots directory: {_ROOTS_DIR}')
+ lines.append(f'test roots directory: {TEST_ROOTS_DIR}')
if hasattr(config, '_tmp_path_factory'):
lines.append(f'base tmp_path: {config._tmp_path_factory.getbasetemp()}')
return '\n'.join(lines)
diff --git a/tests/js/fixtures/cpp/searchindex.js b/tests/js/fixtures/cpp/searchindex.js
index 42adb88db92..10ed84d7ff0 100644
--- a/tests/js/fixtures/cpp/searchindex.js
+++ b/tests/js/fixtures/cpp/searchindex.js
@@ -1 +1 @@
-Search.setIndex({"alltitles":{},"docnames":["index"],"envversion":{"sphinx":65,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst"],"indexentries":{"sphinx (c++ class)":[[0,"_CPPv46Sphinx",false]]},"objects":{"":[[0,0,1,"_CPPv46Sphinx","Sphinx"]]},"objnames":{"0":["cpp","class","C++ class"]},"objtypes":{"0":"cpp:class"},"terms":{"The":0,"becaus":0,"c":0,"can":0,"cardin":0,"challeng":0,"charact":0,"class":0,"descript":0,"drop":0,"engin":0,"fixtur":0,"frequent":0,"gener":0,"i":0,"index":0,"inflat":0,"mathemat":0,"occur":0,"often":0,"project":0,"punctuat":0,"queri":0,"relat":0,"sampl":0,"search":0,"size":0,"sphinx":0,"term":0,"thei":0,"thi":0,"token":0,"us":0,"web":0,"would":0},"titles":["<no title>"],"titleterms":{}})
\ No newline at end of file
+Search.setIndex({"alltitles":{},"docnames":["index"],"envversion":{"sphinx":66,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst"],"indexentries":{"sphinx (c++ class)":[[0,"_CPPv46Sphinx",false]]},"objects":{"":[[0,0,1,"_CPPv46Sphinx","Sphinx"]]},"objnames":{"0":["cpp","class","C++ class"]},"objtypes":{"0":"cpp:class"},"terms":{"The":0,"This":0,"becaus":0,"c":0,"can":0,"cardin":0,"challeng":0,"charact":0,"class":0,"descript":0,"drop":0,"engin":0,"fixtur":0,"frequent":0,"generat":0,"index":0,"inflat":0,"mathemat":0,"occur":0,"often":0,"project":0,"punctuat":0,"queri":0,"relat":0,"sampl":0,"search":0,"size":0,"sphinx":0,"term":0,"token":0,"use":0,"web":0},"titles":["<no title>"],"titleterms":{}})
\ No newline at end of file
diff --git a/tests/js/fixtures/multiterm/searchindex.js b/tests/js/fixtures/multiterm/searchindex.js
index 6f27d39329b..bd732522b3d 100644
--- a/tests/js/fixtures/multiterm/searchindex.js
+++ b/tests/js/fixtures/multiterm/searchindex.js
@@ -1 +1 @@
-Search.setIndex({"alltitles":{"Main Page":[[0,null]]},"docnames":["index"],"envversion":{"sphinx":65,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst"],"indexentries":{},"objects":{},"objnames":{},"objtypes":{},"terms":{"At":0,"adjac":0,"all":0,"an":0,"appear":0,"applic":0,"ar":0,"built":0,"can":0,"check":0,"contain":0,"do":0,"document":0,"doesn":0,"each":0,"fixtur":0,"format":0,"function":0,"futur":0,"html":0,"i":0,"includ":0,"match":0,"messag":0,"multipl":0,"multiterm":0,"order":0,"other":0,"output":0,"perform":0,"perhap":0,"phrase":0,"project":0,"queri":0,"requir":0,"same":0,"search":0,"successfulli":0,"support":0,"t":0,"term":0,"test":0,"thi":0,"time":0,"us":0,"when":0,"write":0},"titles":["Main Page"],"titleterms":{"main":0,"page":0}})
\ No newline at end of file
+Search.setIndex({"alltitles":{"Main Page":[[0,null]]},"docnames":["index"],"envversion":{"sphinx":66,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst"],"indexentries":{},"objects":{},"objnames":{},"objtypes":{},"terms":{"At":0,"This":0,"adjac":0,"appear":0,"applic":0,"built":0,"can":0,"check":0,"contain":0,"document":0,"doesn":0,"fixtur":0,"format":0,"function":0,"futur":0,"html":0,"includ":0,"match":0,"messag":0,"multipl":0,"multiterm":0,"order":0,"output":0,"perform":0,"perhap":0,"phrase":0,"project":0,"queri":0,"requir":0,"search":0,"success":0,"support":0,"t":0,"term":0,"test":0,"time":0,"use":0,"will":0,"write":0},"titles":["Main Page"],"titleterms":{"main":0,"page":0}})
\ No newline at end of file
diff --git a/tests/js/fixtures/partial/searchindex.js b/tests/js/fixtures/partial/searchindex.js
index cd9dbabb149..8a65718e2ea 100644
--- a/tests/js/fixtures/partial/searchindex.js
+++ b/tests/js/fixtures/partial/searchindex.js
@@ -1 +1 @@
-Search.setIndex({"alltitles":{"sphinx_utils module":[[0,null]]},"docnames":["index"],"envversion":{"sphinx":65,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst"],"indexentries":{},"objects":{},"objnames":{},"objtypes":{},"terms":{"ar":0,"both":0,"built":0,"confirm":0,"document":0,"function":0,"html":0,"i":0,"includ":0,"input":0,"javascript":0,"match":0,"partial":0,"possibl":0,"project":0,"provid":0,"restructuredtext":0,"sampl":0,"search":0,"should":0,"term":0,"thi":0,"titl":0,"us":0,"when":0},"titles":["sphinx_utils module"],"titleterms":{"modul":0,"sphinx_util":0}})
\ No newline at end of file
+Search.setIndex({"alltitles":{"sphinx_utils module":[[0,null]]},"docnames":["index"],"envversion":{"sphinx":66,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst"],"indexentries":{},"objects":{},"objnames":{},"objtypes":{},"terms":{"This":0,"built":0,"confirm":0,"document":0,"function":0,"html":0,"includ":0,"input":0,"javascript":0,"match":0,"partial":0,"possibl":0,"project":0,"provid":0,"restructuredtext":0,"sampl":0,"search":0,"term":0,"titl":0,"use":0},"titles":["sphinx_utils module"],"titleterms":{"modul":0,"sphinx_util":0}})
\ No newline at end of file
diff --git a/tests/js/fixtures/titles/searchindex.js b/tests/js/fixtures/titles/searchindex.js
index cb9abd1da07..fe325c7742d 100644
--- a/tests/js/fixtures/titles/searchindex.js
+++ b/tests/js/fixtures/titles/searchindex.js
@@ -1 +1 @@
-Search.setIndex({"alltitles":{"Main Page":[[0,null]],"Relevance":[[0,"relevance"],[1,null]],"Result Scoring":[[0,"result-scoring"]]},"docnames":["index","relevance"],"envversion":{"sphinx":65,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst","relevance.rst"],"indexentries":{"example (class in relevance)":[[0,"relevance.Example",false]],"module":[[0,"module-relevance",false]],"relevance":[[0,"index-1",false],[0,"module-relevance",false]],"relevance (relevance.example attribute)":[[0,"relevance.Example.relevance",false]],"scoring":[[0,"index-0",true]]},"objects":{"":[[0,0,0,"-","relevance"]],"relevance":[[0,1,1,"","Example"]],"relevance.Example":[[0,2,1,"","relevance"]]},"objnames":{"0":["py","module","Python module"],"1":["py","class","Python class"],"2":["py","attribute","Python attribute"]},"objtypes":{"0":"py:module","1":"py:class","2":"py:attribute"},"terms":{"":[0,1],"A":1,"By":0,"For":[0,1],"In":[0,1],"against":0,"align":0,"also":1,"an":0,"answer":0,"appear":1,"ar":1,"area":0,"ask":0,"assign":0,"attempt":0,"attribut":0,"both":0,"built":1,"can":[0,1],"class":0,"code":[0,1],"collect":0,"consid":1,"contain":0,"context":0,"corpu":1,"could":1,"demonstr":0,"describ":1,"detail":1,"determin":[0,1],"docstr":0,"document":[0,1],"domain":1,"dure":0,"engin":0,"evalu":0,"exampl":[0,1],"extract":0,"feedback":0,"find":0,"found":0,"from":0,"function":1,"ha":1,"handl":0,"happen":1,"head":0,"help":0,"highli":[0,1],"how":0,"i":[0,1],"improv":0,"inform":0,"intend":0,"issu":[0,1],"itself":1,"knowledg":0,"languag":1,"less":1,"like":[0,1],"mani":0,"match":0,"mention":1,"more":0,"name":[0,1],"numer":0,"object":0,"often":0,"one":[0,1],"onli":[0,1],"order":0,"other":0,"over":0,"page":1,"part":1,"particular":0,"present":0,"printf":1,"program":1,"project":0,"queri":[0,1],"question":0,"re":0,"rel":0,"research":0,"result":1,"retriev":0,"sai":0,"same":1,"search":[0,1],"seem":0,"softwar":1,"some":1,"sphinx":0,"straightforward":1,"subject":0,"subsect":0,"term":[0,1],"test":0,"text":0,"than":[0,1],"thei":0,"them":0,"thi":0,"time":0,"titl":0,"two":0,"typic":0,"us":0,"user":[0,1],"we":[0,1],"when":0,"whether":1,"which":0,"within":0,"word":0,"would":[0,1]},"titles":["Main Page","Relevance"],"titleterms":{"main":0,"page":0,"relev":[0,1],"result":0,"score":0}})
\ No newline at end of file
+Search.setIndex({"alltitles":{"Main Page":[[0,null]],"Relevance":[[0,"relevance"],[1,null]],"Result Scoring":[[0,"result-scoring"]]},"docnames":["index","relevance"],"envversion":{"sphinx":66,"sphinx.domains.c":3,"sphinx.domains.changeset":1,"sphinx.domains.citation":1,"sphinx.domains.cpp":9,"sphinx.domains.index":1,"sphinx.domains.javascript":3,"sphinx.domains.math":2,"sphinx.domains.python":4,"sphinx.domains.rst":2,"sphinx.domains.std":2},"filenames":["index.rst","relevance.rst"],"indexentries":{"example (class in relevance)":[[0,"relevance.Example",false]],"module":[[0,"module-relevance",false]],"relevance":[[0,"index-1",false],[0,"module-relevance",false]],"relevance (relevance.example attribute)":[[0,"relevance.Example.relevance",false]],"scoring":[[0,"index-0",true]]},"objects":{"":[[0,0,0,"-","relevance"]],"relevance":[[0,1,1,"","Example"]],"relevance.Example":[[0,2,1,"","relevance"]]},"objnames":{"0":["py","module","Python module"],"1":["py","class","Python class"],"2":["py","attribute","Python attribute"]},"objtypes":{"0":"py:module","1":"py:class","2":"py:attribute"},"terms":{"A":1,"By":0,"For":[0,1],"In":[0,1],"This":0,"align":0,"also":1,"answer":0,"appear":1,"area":0,"ask":0,"assign":0,"attempt":0,"attribut":0,"built":1,"can":[0,1],"class":0,"code":[0,1],"collect":0,"consid":1,"contain":0,"context":0,"corpus":1,"demonstr":0,"describ":1,"detail":1,"determin":[0,1],"docstr":0,"document":[0,1],"domain":1,"dure":0,"engin":0,"evalu":0,"exampl":[0,1],"extract":0,"feedback":0,"find":0,"found":0,"function":1,"handl":0,"happen":1,"head":0,"help":0,"high":[0,1],"improv":0,"inform":0,"intend":0,"issu":[0,1],"knowledg":0,"languag":1,"less":1,"like":[0,1],"mani":0,"match":0,"mention":1,"name":[0,1],"numer":0,"object":0,"often":0,"one":[0,1],"onli":[0,1],"order":0,"page":1,"part":1,"particular":0,"present":0,"printf":1,"program":1,"project":0,"queri":[0,1],"question":0,"re":0,"relat":0,"research":0,"result":1,"retriev":0,"s":[0,1],"say":0,"search":[0,1],"seem":0,"softwar":1,"sphinx":0,"straightforward":1,"subject":0,"subsect":0,"term":[0,1],"test":0,"text":0,"time":0,"titl":0,"two":0,"typic":0,"use":0,"user":[0,1],"whether":1,"will":0,"within":0,"word":0},"titles":["Main Page","Relevance"],"titleterms":{"main":0,"page":0,"relev":[0,1],"result":0,"score":0}})
\ No newline at end of file
diff --git a/tests/js/jasmine-browser.mjs b/tests/js/jasmine-browser.mjs
index b84217fd8c5..f11c04b95b7 100644
--- a/tests/js/jasmine-browser.mjs
+++ b/tests/js/jasmine-browser.mjs
@@ -1,28 +1,26 @@
export default {
srcDir: ".",
srcFiles: [
- 'sphinx/themes/basic/static/doctools.js',
- 'sphinx/themes/basic/static/searchtools.js',
- 'sphinx/themes/basic/static/sphinx_highlight.js',
- 'tests/js/fixtures/**/*.js',
- 'tests/js/documentation_options.js',
- 'tests/js/language_data.js',
+ "sphinx/themes/basic/static/doctools.js",
+ "sphinx/themes/basic/static/searchtools.js",
+ "sphinx/themes/basic/static/sphinx_highlight.js",
+ "tests/js/fixtures/**/*.js",
+ "tests/js/documentation_options.js",
+ "tests/js/language_data.js",
],
specDir: "tests/js",
- specFiles: [
- '**/*.spec.js',
- ],
+ specFiles: ["**/*.spec.js"],
helpers: [],
env: {
stopSpecOnExpectationFailure: false,
stopOnSpecFailure: false,
- random: true
+ random: true,
},
listenAddress: "127.0.0.1",
hostname: "127.0.0.1",
browser: {
- name: "headlessFirefox"
- }
+ name: "headlessFirefox",
+ },
};
diff --git a/tests/js/language_data.js b/tests/js/language_data.js
index 15e4a8447f0..c98e91ff6e2 100644
--- a/tests/js/language_data.js
+++ b/tests/js/language_data.js
@@ -3,17 +3,16 @@
* namely the list of stopwords, stemmer, scorer and splitter.
*/
-var stopwords = [];
+const stopwords = new Set([]);
+window.stopwords = stopwords; // Export to global scope
-
-/* Non-minified version is copied as a separate JS file, if available */
+/* Non-minified versions are copied as separate JavaScript files, if available */
/**
* Dummy stemmer for languages without stemming rules.
*/
-var Stemmer = function() {
- this.stemWord = function(w) {
+var Stemmer = function () {
+ this.stemWord = function (w) {
return w;
- }
-}
-
+ };
+};
diff --git a/tests/js/searchtools.spec.js b/tests/js/searchtools.spec.js
index 809fd19d0f4..d00689c907c 100644
--- a/tests/js/searchtools.spec.js
+++ b/tests/js/searchtools.spec.js
@@ -1,10 +1,9 @@
-describe('Basic html theme search', function() {
-
+describe("Basic html theme search", function () {
function loadFixture(name) {
- req = new XMLHttpRequest();
- req.open("GET", `__src__/tests/js/fixtures/${name}`, false);
- req.send(null);
- return req.responseText;
+ req = new XMLHttpRequest();
+ req.open("GET", `__src__/tests/js/fixtures/${name}`, false);
+ req.send(null);
+ return req.responseText;
}
function checkRanking(expectedRanking, results) {
@@ -16,7 +15,11 @@ describe('Basic html theme search', function() {
let [expectedPage, expectedTitle, expectedTarget] = nextExpected;
let [page, title, target] = result;
- if (page == expectedPage && title == expectedTitle && target == expectedTarget) {
+ if (
+ page == expectedPage
+ && title == expectedTitle
+ && target == expectedTarget
+ ) {
[nextExpected, ...remainingItems] = remainingItems;
}
}
@@ -25,13 +28,14 @@ describe('Basic html theme search', function() {
expect(nextExpected).toEqual(undefined);
}
- describe('terms search', function() {
-
- it('should find "C++" when in index', function() {
+ describe("terms search", function () {
+ it('should find "C++" when in index', function () {
eval(loadFixture("cpp/searchindex.js"));
- [_searchQuery, searchterms, excluded, ..._remainingItems] = Search._parseQuery('C++');
+ [_searchQuery, searchterms, excluded, ..._remainingItems] =
+ Search._parseQuery("C++");
+ // prettier-ignore
hits = [[
"index",
"<no title>",
@@ -44,10 +48,12 @@ describe('Basic html theme search', function() {
expect(Search.performTermsSearch(searchterms, excluded)).toEqual(hits);
});
- it('should be able to search for multiple terms', function() {
+ it("should be able to search for multiple terms", function () {
eval(loadFixture("multiterm/searchindex.js"));
- [_searchQuery, searchterms, excluded, ..._remainingItems] = Search._parseQuery('main page');
+ [_searchQuery, searchterms, excluded, ..._remainingItems] =
+ Search._parseQuery("main page");
+ // prettier-ignore
hits = [[
'index',
'Main Page',
@@ -60,11 +66,13 @@ describe('Basic html theme search', function() {
expect(Search.performTermsSearch(searchterms, excluded)).toEqual(hits);
});
- it('should partially-match "sphinx" when in title index', function() {
+ it('should partially-match "sphinx" when in title index', function () {
eval(loadFixture("partial/searchindex.js"));
- [_searchQuery, searchterms, excluded, ..._remainingItems] = Search._parseQuery('sphinx');
+ [_searchQuery, searchterms, excluded, ..._remainingItems] =
+ Search._parseQuery("sphinx");
+ // prettier-ignore
hits = [[
"index",
"sphinx_utils module",
@@ -77,13 +85,15 @@ describe('Basic html theme search', function() {
expect(Search.performTermsSearch(searchterms, excluded)).toEqual(hits);
});
- it('should partially-match within "possible" when in term index', function() {
+ it('should partially-match within "possible" when in term index', function () {
eval(loadFixture("partial/searchindex.js"));
- [_searchQuery, searchterms, excluded, ..._remainingItems] = Search._parseQuery('ossibl');
+ [_searchQuery, searchterms, excluded, ..._remainingItems] =
+ Search._parseQuery("ossibl");
terms = Search._index.terms;
titleterms = Search._index.titleterms;
+ // prettier-ignore
hits = [[
"index",
"sphinx_utils module",
@@ -93,18 +103,19 @@ describe('Basic html theme search', function() {
"index.rst",
"text"
]];
- expect(Search.performTermsSearch(searchterms, excluded, terms, titleterms)).toEqual(hits);
+ expect(
+ Search.performTermsSearch(searchterms, excluded, terms, titleterms),
+ ).toEqual(hits);
});
-
});
- describe('aggregation of search results', function() {
-
- it('should combine document title and document term matches', function() {
+ describe("aggregation of search results", function () {
+ it("should combine document title and document term matches", function () {
eval(loadFixture("multiterm/searchindex.js"));
- searchParameters = Search._parseQuery('main page');
+ searchParameters = Search._parseQuery("main page");
+ // prettier-ignore
hits = [
[
'index',
@@ -118,11 +129,9 @@ describe('Basic html theme search', function() {
];
expect(Search._performSearch(...searchParameters)).toEqual(hits);
});
-
});
- describe('search result ranking', function() {
-
+ describe("search result ranking", function () {
/*
* These tests should not proscribe precise expected ordering of search
* results; instead each test case should describe a single relevance rule
@@ -137,95 +146,96 @@ describe('Basic html theme search', function() {
* [1] - https://github.com/sphinx-doc/sphinx.git/
*/
- it('should score a code module match above a page-title match', function() {
+ it("should score a code module match above a page-title match", function () {
eval(loadFixture("titles/searchindex.js"));
+ // prettier-ignore
expectedRanking = [
['index', 'relevance', '#module-relevance'], /* py:module documentation */
['relevance', 'Relevance', ''], /* main title */
];
- searchParameters = Search._parseQuery('relevance');
+ searchParameters = Search._parseQuery("relevance");
results = Search._performSearch(...searchParameters);
checkRanking(expectedRanking, results);
});
- it('should score a main-title match above an object member match', function() {
+ it("should score a main-title match above an object member match", function () {
eval(loadFixture("titles/searchindex.js"));
+ // prettier-ignore
expectedRanking = [
['relevance', 'Relevance', ''], /* main title */
['index', 'relevance.Example.relevance', '#relevance.Example.relevance'], /* py:class attribute */
];
- searchParameters = Search._parseQuery('relevance');
+ searchParameters = Search._parseQuery("relevance");
results = Search._performSearch(...searchParameters);
checkRanking(expectedRanking, results);
});
- it('should score a title match above a standard index entry match', function() {
+ it("should score a title match above a standard index entry match", function () {
eval(loadFixture("titles/searchindex.js"));
+ // prettier-ignore
expectedRanking = [
['relevance', 'Relevance', ''], /* title */
['index', 'Main Page', '#index-1'], /* index entry */
];
- searchParameters = Search._parseQuery('relevance');
+ searchParameters = Search._parseQuery("relevance");
results = Search._performSearch(...searchParameters);
checkRanking(expectedRanking, results);
});
- it('should score a priority index entry match above a title match', function() {
+ it("should score a priority index entry match above a title match", function () {
eval(loadFixture("titles/searchindex.js"));
+ // prettier-ignore
expectedRanking = [
['index', 'Main Page', '#index-0'], /* index entry */
['index', 'Main Page > Result Scoring', '#result-scoring'], /* title */
];
- searchParameters = Search._parseQuery('scoring');
+ searchParameters = Search._parseQuery("scoring");
results = Search._performSearch(...searchParameters);
checkRanking(expectedRanking, results);
});
- it('should score a main-title match above a subheading-title match', function() {
+ it("should score a main-title match above a subheading-title match", function () {
eval(loadFixture("titles/searchindex.js"));
+ // prettier-ignore
expectedRanking = [
['relevance', 'Relevance', ''], /* main title */
['index', 'Main Page > Relevance', '#relevance'], /* subsection heading title */
];
- searchParameters = Search._parseQuery('relevance');
+ searchParameters = Search._parseQuery("relevance");
results = Search._performSearch(...searchParameters);
checkRanking(expectedRanking, results);
});
-
});
- describe('can handle edge-case search queries', function() {
-
- it('does not find the javascript prototype property in unrelated documents', function() {
+ describe("can handle edge-case search queries", function () {
+ it("does not find the javascript prototype property in unrelated documents", function () {
eval(loadFixture("partial/searchindex.js"));
- searchParameters = Search._parseQuery('__proto__');
+ searchParameters = Search._parseQuery("__proto__");
+ // prettier-ignore
hits = [];
expect(Search._performSearch(...searchParameters)).toEqual(hits);
});
-
});
-
});
-describe("htmlToText", function() {
-
+describe("htmlToText", function () {
const testHTML = `
@@ -257,44 +267,47 @@ describe("htmlToText", function() {
`;
it("basic case", () => {
- expect(Search.htmlToText(testHTML).trim().split(/\s+/)).toEqual([
- 'Getting', 'Started', 'Some', 'text',
- 'Other', 'Section', 'Other', 'text',
- 'Yet', 'Another', 'Section', 'More', 'text'
- ]);
+ expect(Search.htmlToText(testHTML).trim().split(/\s+/)).toEqual(
+ /* prettier-ignore */ [
+ "Getting", "Started", "Some", "text",
+ "Other", "Section", "Other", "text",
+ "Yet", "Another", "Section", "More", "text"
+ ],
+ );
});
it("will start reading from the anchor", () => {
- expect(Search.htmlToText(testHTML, '#other-section').trim().split(/\s+/)).toEqual(['Other', 'Section', 'Other', 'text']);
+ expect(
+ Search.htmlToText(testHTML, "#other-section").trim().split(/\s+/),
+ ).toEqual(["Other", "Section", "Other", "text"]);
});
});
// Regression test for https://github.com/sphinx-doc/sphinx/issues/3150
-describe('splitQuery regression tests', () => {
-
- it('can split English words', () => {
- const parts = splitQuery(' Hello World ')
- expect(parts).toEqual(['Hello', 'World'])
- })
-
- it('can split special characters', () => {
- const parts = splitQuery('Pin-Code')
- expect(parts).toEqual(['Pin', 'Code'])
- })
-
- it('can split Chinese characters', () => {
- const parts = splitQuery('Hello from 中国 上海')
- expect(parts).toEqual(['Hello', 'from', '中国', '上海'])
- })
-
- it('can split Emoji (surrogate pair) characters. It should keep emojis.', () => {
- const parts = splitQuery('😁😁')
- expect(parts).toEqual(['😁😁'])
- })
-
- it('can split umlauts. It should keep umlauts.', () => {
- const parts = splitQuery('Löschen Prüfung Abändern ærlig spørsmål')
- expect(parts).toEqual(['Löschen', 'Prüfung', 'Abändern', 'ærlig', 'spørsmål'])
- })
-
-})
+describe("splitQuery regression tests", () => {
+ it("can split English words", () => {
+ const parts = splitQuery(" Hello World ");
+ expect(parts).toEqual(["Hello", "World"]);
+ });
+
+ it("can split special characters", () => {
+ const parts = splitQuery("Pin-Code");
+ expect(parts).toEqual(["Pin", "Code"]);
+ });
+
+ it("can split Chinese characters", () => {
+ const parts = splitQuery("Hello from 中国 上海");
+ expect(parts).toEqual(["Hello", "from", "中国", "上海"]);
+ });
+
+ it("can split Emoji (surrogate pair) characters. It should keep emojis.", () => {
+ const parts = splitQuery("😁😁");
+ expect(parts).toEqual(["😁😁"]);
+ });
+
+ it("can split umlauts. It should keep umlauts.", () => {
+ const parts = splitQuery("Löschen Prüfung Abändern ærlig spørsmål");
+ // prettier-ignore
+ expect(parts).toEqual(["Löschen", "Prüfung", "Abändern", "ærlig", "spørsmål"])
+ });
+});
diff --git a/tests/js/sphinx_highlight.spec.js b/tests/js/sphinx_highlight.spec.js
index 1f52eabb96f..4d57d867745 100644
--- a/tests/js/sphinx_highlight.spec.js
+++ b/tests/js/sphinx_highlight.spec.js
@@ -1,30 +1,33 @@
-describe('highlightText', function() {
+describe("highlightText", function () {
+ const cyrillicTerm = "шеллы";
+ const umlautTerm = "gänsefüßchen";
- const cyrillicTerm = 'шеллы';
- const umlautTerm = 'gänsefüßchen';
-
- it('should highlight text incl. special characters correctly in HTML', function() {
+ it("should highlight text incl. special characters correctly in HTML", function () {
const highlightTestSpan = new DOMParser().parseFromString(
- '
This is the шеллы and Gänsefüßchen test!', 'text/html').body.firstChild
- _highlightText(highlightTestSpan, cyrillicTerm, 'highlighted');
- _highlightText(highlightTestSpan, umlautTerm, 'highlighted');
+ "
This is the шеллы and Gänsefüßchen test!",
+ "text/html",
+ ).body.firstChild;
+ _highlightText(highlightTestSpan, cyrillicTerm, "highlighted");
+ _highlightText(highlightTestSpan, umlautTerm, "highlighted");
const expectedHtmlString =
- 'This is the
шеллы and ' +
- '
Gänsefüßchen test!';
+ 'This is the
шеллы and '
+ + '
Gänsefüßchen test!';
expect(highlightTestSpan.innerHTML).toEqual(expectedHtmlString);
});
- it('should highlight text incl. special characters correctly in SVG', function() {
+ it("should highlight text incl. special characters correctly in SVG", function () {
const highlightTestSvg = new DOMParser().parseFromString(
- '
' +
- '' +
- '', 'text/html').body.firstChild
- _highlightText(highlightTestSvg, cyrillicTerm, 'highlighted');
- _highlightText(highlightTestSvg, umlautTerm, 'highlighted');
+ '
'
+ + '"
+ + "",
+ "text/html",
+ ).body.firstChild;
+ _highlightText(highlightTestSvg, cyrillicTerm, "highlighted");
+ _highlightText(highlightTestSvg, umlautTerm, "highlighted");
/* Note wild cards and ``toMatch``; allowing for some variability
seems to be necessary, even between different FF versions */
const expectedSvgString =
@@ -32,8 +35,9 @@ describe('highlightText', function() {
+ '
'
+ '
'
+ '
This is the шеллы and '
- + 'Gänsefüßchen test!';
- expect(new XMLSerializer().serializeToString(highlightTestSvg.firstChild)).toMatch(new RegExp(expectedSvgString));
+ + "
Gänsefüßchen test!";
+ expect(
+ new XMLSerializer().serializeToString(highlightTestSvg.firstChild),
+ ).toMatch(new RegExp(expectedSvgString));
});
-
});
diff --git a/tests/roots/test-build-html-translator/conf.py b/tests/roots/test-build-html-translator/conf.py
deleted file mode 100644
index 89448d45741..00000000000
--- a/tests/roots/test-build-html-translator/conf.py
+++ /dev/null
@@ -1,16 +0,0 @@
-from sphinx.writers.html import HTML5Translator
-
-project = 'test'
-
-
-class ConfHTMLTranslator(HTML5Translator):
- depart_with_node = 0
-
- def depart_admonition(self, node=None):
- if node is not None:
- self.depart_with_node += 1
- HTML5Translator.depart_admonition(self, node)
-
-
-def setup(app):
- app.set_translator('html', ConfHTMLTranslator)
diff --git a/tests/roots/test-build-html-translator/index.rst b/tests/roots/test-build-html-translator/index.rst
deleted file mode 100644
index 1610d2b4501..00000000000
--- a/tests/roots/test-build-html-translator/index.rst
+++ /dev/null
@@ -1,24 +0,0 @@
-=======================
-Test HTML admonitions
-=======================
-
-.. seealso:: test
-
-.. note:: test
-
-.. warning:: test
-
-.. attention:: test
-
-.. caution:: test
-
-.. danger:: test
-
-.. error:: test
-
-.. hint:: test
-
-.. important:: test
-
-.. tip:: test
-
diff --git a/tests/roots/test-changes/base.rst b/tests/roots/test-changes/base.rst
index 81d90e66ef4..a6a83e4a282 100644
--- a/tests/roots/test-changes/base.rst
+++ b/tests/roots/test-changes/base.rst
@@ -1,23 +1,38 @@
Version markup
--------------
-.. versionadded:: 0.6
+.. version-added:: 0.6
Some funny **stuff**.
-.. versionchanged:: 0.6
+.. version-changed:: 0.6
Even more funny stuff.
-.. deprecated:: 0.6
+.. version-deprecated:: 0.6
Boring stuff.
-.. versionremoved:: 0.6
+.. version-removed:: 0.6
Goodbye boring stuff.
-.. versionadded:: 1.2
+.. version-added:: 1.2
+
+ First paragraph of version-added.
+
+.. version-changed:: 1.2
+ First paragraph of version-changed.
+
+ Second paragraph of version-changed.
- First paragraph of versionadded.
+.. version-deprecated:: 1.3
+ First paragraph of version-deprecated.
-.. versionchanged:: 1.2
- First paragraph of versionchanged.
+.. version-added:: 0.6
+ Deprecated alias
- Second paragraph of versionchanged.
+.. version-changed:: 0.6
+ Deprecated alias
+
+.. versionremoved:: 0.6
+ Deprecated alias
+
+.. deprecated:: 0.6
+ Deprecated alias
diff --git a/tests/roots/test-changes/c-api.rst b/tests/roots/test-changes/c-api.rst
index f0ad413cd2e..6ded4afebd5 100644
--- a/tests/roots/test-changes/c-api.rst
+++ b/tests/roots/test-changes/c-api.rst
@@ -8,7 +8,7 @@ Memory
Allocate *n* bytes of memory.
- .. versionchanged:: 0.6
+ .. version-changed:: 0.6
Can now be replaced with a different allocator.
@@ -17,7 +17,7 @@ System
Access to the system allocator.
-.. versionadded:: 0.6
+.. version-added:: 0.6
.. c:function:: void* Test_SysMalloc(size_t n)
diff --git a/tests/roots/test-changes/library/utils.rst b/tests/roots/test-changes/library/utils.rst
index 86446995b2f..56675715593 100644
--- a/tests/roots/test-changes/library/utils.rst
+++ b/tests/roots/test-changes/library/utils.rst
@@ -16,10 +16,10 @@ Classes
Class for handling paths.
- .. versionadded:: 0.5
+ .. version-added:: 0.5
- Innovative new way to handle paths.
+ Innovative new way to handle paths.
- .. deprecated:: 0.6
+ .. version-deprecated:: 0.6
So, that was a bad idea it turns out.
diff --git a/tests/roots/test-domain-py-xref-type-alias/conf.py b/tests/roots/test-domain-py-xref-type-alias/conf.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/tests/roots/test-domain-py-xref-type-alias/index.rst b/tests/roots/test-domain-py-xref-type-alias/index.rst
new file mode 100644
index 00000000000..5d436e90102
--- /dev/null
+++ b/tests/roots/test-domain-py-xref-type-alias/index.rst
@@ -0,0 +1,47 @@
+Type Alias Cross-Reference Test
+===============================
+
+This tests that type aliases in function signatures can be cross-referenced properly.
+
+
+.. py:module:: alias_module
+
+ Module to test type alias cross-reference resolution.
+
+
+.. py:data:: Handler
+ :module: alias_module
+
+ A generic type alias for error handlers
+
+ alias of :py:class:`type`\ [:py:class:`Exception`]
+
+
+.. py:type:: HandlerType
+ :module: alias_module
+ :canonical: type[Exception]
+
+ A PEP 695 type alias for error handlers
+
+
+.. py:data:: pathlike
+ :module: alias_module
+ :value: str | pathlib.Path
+
+ Any type of path
+
+
+.. py:function:: process_error(handler: Handler, other: ~alias_module.HandlerType) -> str
+ :module: alias_module
+
+ Process an error with a custom handler type.
+
+ Tests generic type alias cross-reference resolution.
+
+
+.. py:function:: read_file(path: pathlike) -> bytes
+ :module: alias_module
+
+ Read a file and return its contents.
+
+ Tests Union type alias cross-reference resolution.
diff --git a/tests/roots/test-ext-autodoc/conf.py b/tests/roots/test-ext-autodoc/conf.py
index f134359a32a..98cfc20effa 100644
--- a/tests/roots/test-ext-autodoc/conf.py
+++ b/tests/roots/test-ext-autodoc/conf.py
@@ -9,4 +9,10 @@
'dummy',
]
+autodoc_type_aliases = {
+ 'buffer_like': 'buffer_like',
+ 'pathlike': 'pathlike',
+ 'Handler': 'Handler',
+}
+
nitpicky = True
diff --git a/tests/roots/test-ext-autodoc/target/enums.py b/tests/roots/test-ext-autodoc/target/enums.py
index 6b2731672d2..9d6bcdbc97f 100644
--- a/tests/roots/test-ext-autodoc/target/enums.py
+++ b/tests/roots/test-ext-autodoc/target/enums.py
@@ -1,4 +1,4 @@
-# ruff: NoQA: D403, PIE796
+# ruff: NoQA: PIE796
import enum
from typing import final
diff --git a/tests/roots/test-ext-autodoc/target/final.py b/tests/roots/test-ext-autodoc/target/final.py
index a8c3860e384..bd233abb580 100644
--- a/tests/roots/test-ext-autodoc/target/final.py
+++ b/tests/roots/test-ext-autodoc/target/final.py
@@ -3,6 +3,9 @@
import typing
from typing import final
+import typing_extensions
+from typing_extensions import final as final_ext # noqa: UP035
+
@typing.final
class Class:
@@ -14,3 +17,11 @@ def meth1(self):
def meth2(self):
"""docstring"""
+
+ @final_ext
+ def meth3(self):
+ """docstring"""
+
+ @typing_extensions.final
+ def meth4(self):
+ """docstring"""
diff --git a/tests/roots/test-ext-autodoc/target/need_mocks.py b/tests/roots/test-ext-autodoc/target/need_mocks.py
index 1b8af7055d6..73782a2fde8 100644
--- a/tests/roots/test-ext-autodoc/target/need_mocks.py
+++ b/tests/roots/test-ext-autodoc/target/need_mocks.py
@@ -1,10 +1,9 @@
import missing_module
import missing_package1.missing_module1
+import sphinx.missing_module4
from missing_module import missing_name
from missing_package2 import missing_module2
from missing_package3.missing_module3 import missing_name # NoQA: F811
-
-import sphinx.missing_module4
from sphinx.missing_module4 import missing_name2
diff --git a/tests/roots/test-ext-autodoc/target/overload3.py b/tests/roots/test-ext-autodoc/target/overload3.py
new file mode 100644
index 00000000000..a3cc34a9f85
--- /dev/null
+++ b/tests/roots/test-ext-autodoc/target/overload3.py
@@ -0,0 +1,18 @@
+import typing
+from typing import TYPE_CHECKING, overload
+
+import typing_extensions
+from typing_extensions import overload as over_ext # noqa: UP035
+
+
+@overload
+def test(x: int) -> int: ...
+@typing.overload
+def test(x: list[int]) -> list[int]: ...
+@over_ext
+def test(x: str) -> str: ...
+@typing_extensions.overload
+def test(x: float) -> float: ...
+def test(x):
+ """Documentation."""
+ return x
diff --git a/tests/roots/test-ext-autodoc/target/pep695.py b/tests/roots/test-ext-autodoc/target/pep695.py
new file mode 100644
index 00000000000..8aa30aaaaba
--- /dev/null
+++ b/tests/roots/test-ext-autodoc/target/pep695.py
@@ -0,0 +1,70 @@
+from __future__ import annotations
+
+import pathlib
+from typing import NewType, TypeAliasType
+
+import typing_extensions
+
+#: Some buffer-like object
+buffer_like = bytes | bytearray | memoryview
+
+#: Any type of path
+pathlike = str | pathlib.Path
+
+#: A generic type alias
+Handler = type[Exception]
+
+
+class Foo:
+ """This is class Foo."""
+
+
+type Pep695Alias = Foo
+"""This is PEP695 type alias."""
+
+TypeAliasTypeExplicit = TypeAliasType('TypeAliasTypeExplicit', Foo) # NoQA: UP040
+"""This is an explicitly constructed typing.TypeAlias."""
+
+HandlerTypeAliasType = TypeAliasType('HandlerTypeAliasType', type[Exception]) # NoQA: UP040
+"""This is an explicitly constructed generic alias typing.TypeAlias."""
+
+TypeAliasTypeExtension = typing_extensions.TypeAliasType('TypeAliasTypeExtension', Foo) # NoQA: UP040
+"""This is an explicitly constructed typing_extensions.TypeAlias."""
+
+#: This is PEP695 complex type alias with doc comment.
+type Pep695AliasC = dict[str, Foo]
+
+type Pep695AliasUnion = str | int
+"""This is PEP695 type alias for union."""
+
+type Pep695AliasOfAlias = Pep695AliasC
+"""This is PEP695 type alias of PEP695 alias."""
+
+Bar = NewType('Bar', Pep695Alias)
+"""This is newtype of Pep695Alias."""
+
+
+def ret_pep695(a: Pep695Alias) -> Pep695Alias:
+ """This fn accepts and returns PEP695 alias."""
+ ...
+
+
+def read_file(path: pathlike) -> bytes:
+ """Read a file and return its contents.
+
+ Tests Union type alias cross-reference resolution.
+ """
+
+
+def process_error(handler: Handler, other: HandlerTypeAliasType) -> str:
+ """Process an error with a custom handler type.
+
+ Tests generic type alias cross-reference resolution.
+ """
+
+
+def buffer_len(data: buffer_like) -> int:
+ """Return length of a buffer-like object.
+
+ Tests Union type alias cross-reference resolution.
+ """
diff --git a/tests/roots/test-ext-doctest-fail-fast/conf.py b/tests/roots/test-ext-doctest-fail-fast/conf.py
new file mode 100644
index 00000000000..227afbb2c95
--- /dev/null
+++ b/tests/roots/test-ext-doctest-fail-fast/conf.py
@@ -0,0 +1,11 @@
+extensions = ['sphinx.ext.doctest']
+
+project = 'test project for doctest'
+root_doc = 'fail-fast'
+source_suffix = {
+ '.txt': 'restructuredtext',
+}
+exclude_patterns = ['_build']
+
+# Set in tests.
+# doctest_fail_fast = ...
diff --git a/tests/roots/test-ext-doctest-fail-fast/fail-fast.txt b/tests/roots/test-ext-doctest-fail-fast/fail-fast.txt
new file mode 100644
index 00000000000..70a05af487b
--- /dev/null
+++ b/tests/roots/test-ext-doctest-fail-fast/fail-fast.txt
@@ -0,0 +1,11 @@
+Testing fast failure in the doctest extension
+=============================================
+
+>>> 1 + 1
+2
+
+>>> 1 + 1
+3
+
+>>> 1 + 1
+3
diff --git a/tests/roots/test-ext-math-duplicate-label/conf.py b/tests/roots/test-ext-math-duplicate-label/conf.py
new file mode 100644
index 00000000000..635774bdfa4
--- /dev/null
+++ b/tests/roots/test-ext-math-duplicate-label/conf.py
@@ -0,0 +1 @@
+extensions = ['sphinx.ext.mathjax']
diff --git a/tests/roots/test-ext-math-duplicate-label/index.rst b/tests/roots/test-ext-math-duplicate-label/index.rst
new file mode 100644
index 00000000000..fa4304d096e
--- /dev/null
+++ b/tests/roots/test-ext-math-duplicate-label/index.rst
@@ -0,0 +1,16 @@
+Test duplicate equation labels
+===============================
+
+First equation with label:
+
+.. math::
+ :label: duplicated
+
+ a^2 + b^2 = c^2
+
+Second equation with the same label:
+
+.. math::
+ :label: duplicated
+
+ e^{i\pi} + 1 = 0
diff --git a/tests/roots/test-intl/backslashes.txt b/tests/roots/test-intl/backslashes.txt
new file mode 100644
index 00000000000..7a16b6f6b62
--- /dev/null
+++ b/tests/roots/test-intl/backslashes.txt
@@ -0,0 +1,38 @@
+:tocdepth: 2
+
+i18n with backslashes
+=====================
+
+line 1\
+line 2 \
+line 3 \
+line 4a \ and 4b \
+line with spaces after backslash \
+last line with spaces \
+and done 1
+
+.. gettext parses the following lines as "a
bc",
+ while a C pre-processor would have produced "abc".
+
+a \
+b\
+c \
+
+last trailing \ \ \
+is ignored
+
+
+See [#]_
+
+.. [#] footnote with backslashes \
+ and done 2
+
+
+.. note:: directive with \
+ backslashes
+
+
+.. function:: foo(a, \
+ b, \
+ c, d, e, f)
+ the foo
diff --git a/tests/roots/test-intl/index.txt b/tests/roots/test-intl/index.txt
index ac68314f97d..52644e34be1 100644
--- a/tests/roots/test-intl/index.txt
+++ b/tests/roots/test-intl/index.txt
@@ -32,6 +32,7 @@ CONTENTS
translation_progress
topic
markup
+ backslashes
.. toctree::
:maxdepth: 2
diff --git a/tests/roots/test-intl/versionchange.txt b/tests/roots/test-intl/versionchange.txt
index 764534246c0..69b9ada1858 100644
--- a/tests/roots/test-intl/versionchange.txt
+++ b/tests/roots/test-intl/versionchange.txt
@@ -3,16 +3,16 @@
i18n with versionchange
============================
-.. deprecated:: 1.0
- This is the *first* paragraph of deprecated.
+.. version-deprecated:: 1.0
+ This is the *first* paragraph of version-deprecated.
- This is the *second* paragraph of deprecated.
+ This is the *second* paragraph of version-deprecated.
-.. versionadded:: 1.0
- This is the *first* paragraph of versionadded.
+.. version-added:: 1.0
+ This is the *first* paragraph of version-added.
-.. versionchanged:: 1.0
+.. version-changed:: 1.0
- This is the *first* paragraph of versionchanged.
+ This is the *first* paragraph of version-changed.
-.. versionremoved:: 1.0 This is the *first* paragraph of versionremoved.
+.. version-removed:: 1.0 This is the *first* paragraph of version-removed.
diff --git a/tests/roots/test-intl/xx/LC_MESSAGES/versionchange.po b/tests/roots/test-intl/xx/LC_MESSAGES/versionchange.po
index b1d786580f0..8f1b88bcaca 100644
--- a/tests/roots/test-intl/xx/LC_MESSAGES/versionchange.po
+++ b/tests/roots/test-intl/xx/LC_MESSAGES/versionchange.po
@@ -19,12 +19,24 @@ msgstr ""
msgid "i18n with versionchange"
msgstr "I18N WITH VERSIONCHANGE"
+msgid "This is the *first* paragraph of version-deprecated."
+msgstr "THIS IS THE *FIRST* PARAGRAPH OF VERSION-DEPRECATED."
+
+msgid "This is the *second* paragraph of version-deprecated."
+msgstr "THIS IS THE *SECOND* PARAGRAPH OF VERSION-DEPRECATED."
+
+msgid "This is the *first* paragraph of version-added."
+msgstr "THIS IS THE *FIRST* PARAGRAPH OF VERSION-ADDED."
+
+msgid "This is the *first* paragraph of version-changed."
+msgstr "THIS IS THE *FIRST* PARAGRAPH OF VERSION-CHANGED."
+
+msgid "This is the *first* paragraph of version-removed."
+msgstr "THIS IS THE *FIRST* PARAGRAPH OF VERSION-REMOVED."
+
msgid "This is the *first* paragraph of deprecated."
msgstr "THIS IS THE *FIRST* PARAGRAPH OF DEPRECATED."
-msgid "This is the *second* paragraph of deprecated."
-msgstr "THIS IS THE *SECOND* PARAGRAPH OF DEPRECATED."
-
msgid "This is the *first* paragraph of versionadded."
msgstr "THIS IS THE *FIRST* PARAGRAPH OF VERSIONADDED."
diff --git a/tests/roots/test-latex-images-css3-lengths/conf.py b/tests/roots/test-latex-images-css3-lengths/conf.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/tests/roots/test-latex-images-css3-lengths/img.png b/tests/roots/test-latex-images-css3-lengths/img.png
new file mode 100644
index 00000000000..a97e86d66af
Binary files /dev/null and b/tests/roots/test-latex-images-css3-lengths/img.png differ
diff --git a/tests/roots/test-latex-images-css3-lengths/index.rst b/tests/roots/test-latex-images-css3-lengths/index.rst
new file mode 100644
index 00000000000..52255262b1c
--- /dev/null
+++ b/tests/roots/test-latex-images-css3-lengths/index.rst
@@ -0,0 +1,25 @@
+=============
+ TEST IMAGES
+=============
+
+test-latex-images-css3-lengths
+==============================
+
+.. image:: img.png
+ :width: 10.03ch
+ :height: 9.97rem
+
+.. image:: img.png
+ :width: 60vw
+ :height: 10vh
+
+.. image:: img.png
+ :width: 10.5vmin
+ :height: 10.5vmax
+
+.. image:: img.png
+ :width: 195.345Q
+
+.. image:: img.png
+ :width: 195.345Q
+ :scale: 50%
diff --git a/tests/roots/test-latex-labels/index.rst b/tests/roots/test-latex-labels/index.rst
index 0021d5d4215..4abf72e2e03 100644
--- a/tests/roots/test-latex-labels/index.rst
+++ b/tests/roots/test-latex-labels/index.rst
@@ -69,6 +69,6 @@ subsubsection
otherdoc
-* Embedded standalone hyperlink reference: `subsection `_.
+* Named hyperlink reference with embedded alias reference: `subsection `_.
.. See: https://github.com/sphinx-doc/sphinx/issues/5948
diff --git a/tests/roots/test-latex-table/complex.rst b/tests/roots/test-latex-table/complex.rst
index d648ff194c4..d35927a96e0 100644
--- a/tests/roots/test-latex-table/complex.rst
+++ b/tests/roots/test-latex-table/complex.rst
@@ -20,8 +20,8 @@ grid table
| cell5-1 |
+---------+---------+---------+
-grid table with tabularcolumns having no vline
-----------------------------------------------
+grid table with tabularcolumns
+------------------------------
.. tabularcolumns:: TTT
diff --git a/tests/roots/test-latex-table/expects/complex_spanning_cell.tex b/tests/roots/test-latex-table/expects/complex_spanning_cell.tex
index d2d61894251..645d84b30b9 100644
--- a/tests/roots/test-latex-table/expects/complex_spanning_cell.tex
+++ b/tests/roots/test-latex-table/expects/complex_spanning_cell.tex
@@ -23,45 +23,49 @@
\begin{varwidth}[t]{\sphinxcolwidth{1}{5}}
\sphinxAtStartPar
cell1\sphinxhyphen{}1
-\par
-\vskip-\baselineskip\vbox{\hbox{\strut}}\end{varwidth}%
+\sphinxbeforeendvarwidth
+\end{varwidth}%
}%
&\sphinxmultirow{3}{2}{%
\begin{varwidth}[t]{\sphinxcolwidth{1}{5}}
\sphinxAtStartPar
cell1\sphinxhyphen{}2
-\par
-\vskip-\baselineskip\vbox{\hbox{\strut}}\end{varwidth}%
+\sphinxbeforeendvarwidth
+\end{varwidth}%
}%
-&
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{5}}
\sphinxAtStartPar
cell1\sphinxhyphen{}3
+\sphinxbeforeendvarwidth
+\end{varwidth}%
&\sphinxmultirow{3}{4}{%
\begin{varwidth}[t]{\sphinxcolwidth{1}{5}}
\sphinxAtStartPar
cell1\sphinxhyphen{}4
-\par
-\vskip-\baselineskip\vbox{\hbox{\strut}}\end{varwidth}%
+\sphinxbeforeendvarwidth
+\end{varwidth}%
}%
&\sphinxmultirow{2}{5}{%
\begin{varwidth}[t]{\sphinxcolwidth{1}{5}}
\sphinxAtStartPar
cell1\sphinxhyphen{}5
-\par
-\vskip-\baselineskip\vbox{\hbox{\strut}}\end{varwidth}%
+\sphinxbeforeendvarwidth
+\end{varwidth}%
}%
\\
\sphinxvlinecrossing{1}\sphinxcline{3-3}\sphinxvlinecrossing{4}\sphinxfixclines{5}\sphinxtablestrut{1}&\sphinxtablestrut{2}&\sphinxmultirow{2}{6}{%
\begin{varwidth}[t]{\sphinxcolwidth{1}{5}}
\sphinxAtStartPar
cell2\sphinxhyphen{}3
-\par
-\vskip-\baselineskip\vbox{\hbox{\strut}}\end{varwidth}%
+\sphinxbeforeendvarwidth
+\end{varwidth}%
}%
&\sphinxtablestrut{4}&\sphinxtablestrut{5}\\
-\sphinxvlinecrossing{1}\sphinxvlinecrossing{2}\sphinxvlinecrossing{3}\sphinxcline{5-5}\sphinxfixclines{5}\sphinxtablestrut{1}&\sphinxtablestrut{2}&\sphinxtablestrut{6}&\sphinxtablestrut{4}&
+\sphinxvlinecrossing{1}\sphinxvlinecrossing{2}\sphinxvlinecrossing{3}\sphinxcline{5-5}\sphinxfixclines{5}\sphinxtablestrut{1}&\sphinxtablestrut{2}&\sphinxtablestrut{6}&\sphinxtablestrut{4}&\begin{varwidth}[t]{\sphinxcolwidth{1}{5}}
\sphinxAtStartPar
cell3\sphinxhyphen{}5
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxbottomrule
\end{tabulary}
diff --git a/tests/roots/test-latex-table/expects/gridtable.tex b/tests/roots/test-latex-table/expects/grid_table.tex
similarity index 55%
rename from tests/roots/test-latex-table/expects/gridtable.tex
rename to tests/roots/test-latex-table/expects/grid_table.tex
index 407abe7f2c8..e5b3a6bda31 100644
--- a/tests/roots/test-latex-table/expects/gridtable.tex
+++ b/tests/roots/test-latex-table/expects/grid_table.tex
@@ -6,41 +6,53 @@
\centering
\begin{tabulary}{\linewidth}[t]{|T|T|T|}
\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
+\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header2
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header3
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxmidrule
-\sphinxtableatstartofbodyhook
+\sphinxtableatstartofbodyhook\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
\sphinxAtStartPar
cell1\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
&\sphinxmultirow{2}{5}{%
\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
\sphinxAtStartPar
cell1\sphinxhyphen{}2
-\par
-\vskip-\baselineskip\vbox{\hbox{\strut}}\end{varwidth}%
+\sphinxbeforeendvarwidth
+\end{varwidth}%
}%
-&
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
\sphinxAtStartPar
cell1\sphinxhyphen{}3
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxcline{1-1}\sphinxcline{3-3}\sphinxfixclines{3}\sphinxmultirow{2}{7}{%
\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
\sphinxAtStartPar
cell2\sphinxhyphen{}1
-\par
-\vskip-\baselineskip\vbox{\hbox{\strut}}\end{varwidth}%
+\sphinxbeforeendvarwidth
+\end{varwidth}%
}%
-&\sphinxtablestrut{5}&
+&\sphinxtablestrut{5}&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
\sphinxAtStartPar
cell2\sphinxhyphen{}3
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxcline{2-3}\sphinxfixclines{3}\sphinxtablestrut{7}&\sphinxstartmulticolumn{2}%
\sphinxmultirow{2}{9}{%
@@ -50,24 +62,26 @@
\sphinxAtStartPar
cell3\sphinxhyphen{}2\sphinxhyphen{}par2
-\par
-\vskip-\baselineskip\vbox{\hbox{\strut}}\end{varwidth}%
+\sphinxbeforeendvarwidth
+\end{varwidth}%
}%
\sphinxstopmulticolumn
\\
-\sphinxcline{1-1}\sphinxfixclines{3}
+\sphinxcline{1-1}\sphinxfixclines{3}\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
\sphinxAtStartPar
cell4\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
&\multicolumn{2}{l|}{\sphinxtablestrut{9}}\\
\sphinxhline\sphinxstartmulticolumn{3}%
\begin{varwidth}[t]{\sphinxcolwidth{3}{3}}
\sphinxAtStartPar
cell5\sphinxhyphen{}1
-\par
-\vskip-\baselineskip\vbox{\hbox{\strut}}\end{varwidth}%
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\sphinxstopmulticolumn
\\
\sphinxbottomrule
\end{tabulary}
\sphinxtableafterendhook\par
-\sphinxattableend\end{savenotes}
+\sphinxattableend\end{savenotes}
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/gridtable_with_tabularcolumn.tex b/tests/roots/test-latex-table/expects/grid_table_with_tabularcolumns.tex
similarity index 53%
rename from tests/roots/test-latex-table/expects/gridtable_with_tabularcolumn.tex
rename to tests/roots/test-latex-table/expects/grid_table_with_tabularcolumns.tex
index c77b99041ff..1fb5bf3f34e 100644
--- a/tests/roots/test-latex-table/expects/gridtable_with_tabularcolumn.tex
+++ b/tests/roots/test-latex-table/expects/grid_table_with_tabularcolumns.tex
@@ -1,4 +1,4 @@
-\label{\detokenize{complex:grid-table-with-tabularcolumns-having-no-vline}}
+\label{\detokenize{complex:grid-table-with-tabularcolumns}}
\begin{savenotes}\sphinxattablestart
\sphinxthistablewithglobalstyle
@@ -6,41 +6,53 @@
\centering
\begin{tabulary}{\linewidth}[t]{TTT}
\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
+\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header2
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header3
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxmidrule
-\sphinxtableatstartofbodyhook
+\sphinxtableatstartofbodyhook\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
\sphinxAtStartPar
cell1\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
&\sphinxmultirow{2}{5}{%
\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
\sphinxAtStartPar
cell1\sphinxhyphen{}2
-\par
-\vskip-\baselineskip\vbox{\hbox{\strut}}\end{varwidth}%
+\sphinxbeforeendvarwidth
+\end{varwidth}%
}%
-&
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
\sphinxAtStartPar
cell1\sphinxhyphen{}3
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxcline{1-1}\sphinxcline{3-3}\sphinxfixclines{3}\sphinxmultirow{2}{7}{%
\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
\sphinxAtStartPar
cell2\sphinxhyphen{}1
-\par
-\vskip-\baselineskip\vbox{\hbox{\strut}}\end{varwidth}%
+\sphinxbeforeendvarwidth
+\end{varwidth}%
}%
-&\sphinxtablestrut{5}&
+&\sphinxtablestrut{5}&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
\sphinxAtStartPar
cell2\sphinxhyphen{}3
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxcline{2-3}\sphinxfixclines{3}\sphinxtablestrut{7}&\sphinxstartmulticolumn{2}%
\sphinxmultirow{2}{9}{%
@@ -50,24 +62,26 @@
\sphinxAtStartPar
cell3\sphinxhyphen{}2\sphinxhyphen{}par2
-\par
-\vskip-\baselineskip\vbox{\hbox{\strut}}\end{varwidth}%
+\sphinxbeforeendvarwidth
+\end{varwidth}%
}%
\sphinxstopmulticolumn
\\
-\sphinxcline{1-1}\sphinxfixclines{3}
+\sphinxcline{1-1}\sphinxfixclines{3}\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
\sphinxAtStartPar
cell4\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
&\multicolumn{2}{l}{\sphinxtablestrut{9}}\\
\sphinxhline\sphinxstartmulticolumn{3}%
\begin{varwidth}[t]{\sphinxcolwidth{3}{3}}
\sphinxAtStartPar
cell5\sphinxhyphen{}1
-\par
-\vskip-\baselineskip\vbox{\hbox{\strut}}\end{varwidth}%
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\sphinxstopmulticolumn
\\
\sphinxbottomrule
\end{tabulary}
\sphinxtableafterendhook\par
-\sphinxattableend\end{savenotes}
+\sphinxattableend\end{savenotes}
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/longtable.tex b/tests/roots/test-latex-table/expects/longtable.tex
index 1fe1022b7d5..518b2cc0196 100644
--- a/tests/roots/test-latex-table/expects/longtable.tex
+++ b/tests/roots/test-latex-table/expects/longtable.tex
@@ -10,12 +10,16 @@
\makeatother
\begin{longtable}{ll}
\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxmidrule
\endfirsthead
@@ -24,12 +28,16 @@
\makebox[0pt]{\sphinxtablecontinued{\tablename\ \thetable{} \textendash{} continued from previous page}}%
}\\
\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxmidrule
\endhead
@@ -42,29 +50,41 @@
\endlastfoot
\sphinxtableatstartofbodyhook
-
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell1\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell1\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
-\sphinxhline
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell2\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell2\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
-\sphinxhline
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell3\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell3\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxbottomrule
\end{longtable}
\sphinxtableafterendhook
\sphinxatlongtableend
-\end{savenotes}
+\end{savenotes}
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/longtable_having_align.tex b/tests/roots/test-latex-table/expects/longtable_having_align_option.tex
similarity index 50%
rename from tests/roots/test-latex-table/expects/longtable_having_align.tex
rename to tests/roots/test-latex-table/expects/longtable_having_align_option.tex
index 4a4df1824e7..90f975bf21c 100644
--- a/tests/roots/test-latex-table/expects/longtable_having_align.tex
+++ b/tests/roots/test-latex-table/expects/longtable_having_align_option.tex
@@ -9,12 +9,16 @@
\makeatother
\begin{longtable}{|l|l|}
\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxmidrule
\endfirsthead
@@ -23,12 +27,16 @@
\makebox[0pt]{\sphinxtablecontinued{\tablename\ \thetable{} \textendash{} continued from previous page}}%
}\\
\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxmidrule
\endhead
@@ -41,29 +49,41 @@
\endlastfoot
\sphinxtableatstartofbodyhook
-
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell1\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell1\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
-\sphinxhline
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell2\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell2\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
-\sphinxhline
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell3\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell3\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxbottomrule
\end{longtable}
\sphinxtableafterendhook
\sphinxatlongtableend
-\end{savenotes}
+\end{savenotes}
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/longtable_having_caption.tex b/tests/roots/test-latex-table/expects/longtable_having_caption.tex
index a1aa65d7a8b..17617ab5929 100644
--- a/tests/roots/test-latex-table/expects/longtable_having_caption.tex
+++ b/tests/roots/test-latex-table/expects/longtable_having_caption.tex
@@ -11,12 +11,16 @@
\sphinxthelongtablecaptionisattop
\caption{caption for longtable\strut}\label{\detokenize{longtable:id1}}\\*[\sphinxlongtablecapskipadjust]
\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxmidrule
\endfirsthead
@@ -25,12 +29,16 @@
\makebox[0pt]{\sphinxtablecontinued{\tablename\ \thetable{} \textendash{} continued from previous page}}%
}\\
\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxmidrule
\endhead
@@ -43,29 +51,41 @@
\endlastfoot
\sphinxtableatstartofbodyhook
-
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell1\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell1\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
-\sphinxhline
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell2\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell2\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
-\sphinxhline
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell3\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell3\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxbottomrule
\end{longtable}
\sphinxtableafterendhook
\sphinxatlongtableend
-\end{savenotes}
+\end{savenotes}
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/longtable_having_formerly_problematic.tex b/tests/roots/test-latex-table/expects/longtable_having_formerly_problematic.tex
new file mode 100644
index 00000000000..2298fbc7a1d
--- /dev/null
+++ b/tests/roots/test-latex-table/expects/longtable_having_formerly_problematic.tex
@@ -0,0 +1,97 @@
+\label{\detokenize{longtable:longtable-having-formerly-problematic}}
+
+\begin{savenotes}
+\sphinxatlongtablestart
+\sphinxthistablewithglobalstyle
+\makeatletter
+ \LTleft \@totalleftmargin plus1fill
+ \LTright\dimexpr\columnwidth-\@totalleftmargin-\linewidth\relax plus1fill
+\makeatother
+\begin{longtable}{|l|l|}
+\sphinxtoprule
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxmidrule
+\endfirsthead
+
+\multicolumn{2}{c}{\sphinxnorowcolor
+ \makebox[0pt]{\sphinxtablecontinued{\tablename\ \thetable{} \textendash{} continued from previous page}}%
+}\\
+\sphinxtoprule
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxmidrule
+\endhead
+
+\sphinxbottomrule
+\multicolumn{2}{r}{\sphinxnorowcolor
+ \makebox[0pt][r]{\sphinxtablecontinued{continues on next page}}%
+}\\
+\endfoot
+
+\endlastfoot
+\sphinxtableatstartofbodyhook
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\begin{itemize}
+\item {}
+\sphinxAtStartPar
+item1
+
+\item {}
+\sphinxAtStartPar
+item2
+
+\end{itemize}
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell1\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell2\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell2\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell3\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell3\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxbottomrule
+\end{longtable}
+\sphinxtableafterendhook
+\sphinxatlongtableend
+\end{savenotes}
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/longtable_having_problematic_cell.tex b/tests/roots/test-latex-table/expects/longtable_having_problematic_cell.tex
deleted file mode 100644
index 240a7609384..00000000000
--- a/tests/roots/test-latex-table/expects/longtable_having_problematic_cell.tex
+++ /dev/null
@@ -1,76 +0,0 @@
-\label{\detokenize{longtable:longtable-having-problematic-cell}}
-
-\begin{savenotes}
-\sphinxatlongtablestart
-\sphinxthistablewithglobalstyle
-\makeatletter
- \LTleft \@totalleftmargin plus1fill
- \LTright\dimexpr\columnwidth-\@totalleftmargin-\linewidth\relax plus1fill
-\makeatother
-\begin{longtable}{|*{2}{\X{1}{2}|}}
-\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
-header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-header2
-\\
-\sphinxmidrule
-\endfirsthead
-
-\multicolumn{2}{c}{\sphinxnorowcolor
- \makebox[0pt]{\sphinxtablecontinued{\tablename\ \thetable{} \textendash{} continued from previous page}}%
-}\\
-\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
-header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-header2
-\\
-\sphinxmidrule
-\endhead
-
-\sphinxbottomrule
-\multicolumn{2}{r}{\sphinxnorowcolor
- \makebox[0pt][r]{\sphinxtablecontinued{continues on next page}}%
-}\\
-\endfoot
-
-\endlastfoot
-\sphinxtableatstartofbodyhook
-\begin{itemize}
-\item {}
-\sphinxAtStartPar
-item1
-
-\item {}
-\sphinxAtStartPar
-item2
-
-\end{itemize}
-&
-\sphinxAtStartPar
-cell1\sphinxhyphen{}2
-\\
-\sphinxhline
-\sphinxAtStartPar
-cell2\sphinxhyphen{}1
-&
-\sphinxAtStartPar
-cell2\sphinxhyphen{}2
-\\
-\sphinxhline
-\sphinxAtStartPar
-cell3\sphinxhyphen{}1
-&
-\sphinxAtStartPar
-cell3\sphinxhyphen{}2
-\\
-\sphinxbottomrule
-\end{longtable}
-\sphinxtableafterendhook
-\sphinxatlongtableend
-\end{savenotes}
diff --git a/tests/roots/test-latex-table/expects/longtable_having_stub_columns_and_formerly_problematic.tex b/tests/roots/test-latex-table/expects/longtable_having_stub_columns_and_formerly_problematic.tex
new file mode 100644
index 00000000000..8a95833e326
--- /dev/null
+++ b/tests/roots/test-latex-table/expects/longtable_having_stub_columns_and_formerly_problematic.tex
@@ -0,0 +1,106 @@
+\label{\detokenize{longtable:longtable-having-stub-columns-and-formerly-problematic}}
+
+\begin{savenotes}
+\sphinxatlongtablestart
+\sphinxthistablewithglobalstyle
+\makeatletter
+ \LTleft \@totalleftmargin plus1fill
+ \LTright\dimexpr\columnwidth-\@totalleftmargin-\linewidth\relax plus1fill
+\makeatother
+\begin{longtable}{|l|l|l|}
+\sphinxtoprule
+\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header3
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxmidrule
+\endfirsthead
+
+\multicolumn{3}{c}{\sphinxnorowcolor
+ \makebox[0pt]{\sphinxtablecontinued{\tablename\ \thetable{} \textendash{} continued from previous page}}%
+}\\
+\sphinxtoprule
+\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header3
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxmidrule
+\endhead
+
+\sphinxbottomrule
+\multicolumn{3}{r}{\sphinxnorowcolor
+ \makebox[0pt][r]{\sphinxtablecontinued{continues on next page}}%
+}\\
+\endfoot
+
+\endlastfoot
+\sphinxtableatstartofbodyhook
+\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \begin{itemize}
+\item {}
+\sphinxAtStartPar
+instub1\sphinxhyphen{}1a
+
+\item {}
+\sphinxAtStartPar
+instub1\sphinxhyphen{}1b
+
+\end{itemize}
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+instub1\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxAtStartPar
+notinstub1\sphinxhyphen{}3
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+cell2\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+cell2\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxAtStartPar
+cell2\sphinxhyphen{}3
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxbottomrule
+\end{longtable}
+\sphinxtableafterendhook
+\sphinxatlongtableend
+\end{savenotes}
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/longtable_having_stub_columns_and_problematic_cell.tex b/tests/roots/test-latex-table/expects/longtable_having_stub_columns_and_problematic_cell.tex
deleted file mode 100644
index 897830b1c02..00000000000
--- a/tests/roots/test-latex-table/expects/longtable_having_stub_columns_and_problematic_cell.tex
+++ /dev/null
@@ -1,81 +0,0 @@
-\label{\detokenize{longtable:longtable-having-both-stub-columns-and-problematic-cell}}
-
-\begin{savenotes}
-\sphinxatlongtablestart
-\sphinxthistablewithglobalstyle
-\makeatletter
- \LTleft \@totalleftmargin plus1fill
- \LTright\dimexpr\columnwidth-\@totalleftmargin-\linewidth\relax plus1fill
-\makeatother
-\begin{longtable}{|*{3}{\X{1}{3}|}}
-\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
-header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-header2
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-header3
-\\
-\sphinxmidrule
-\endfirsthead
-
-\multicolumn{3}{c}{\sphinxnorowcolor
- \makebox[0pt]{\sphinxtablecontinued{\tablename\ \thetable{} \textendash{} continued from previous page}}%
-}\\
-\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
-header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-header2
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-header3
-\\
-\sphinxmidrule
-\endhead
-
-\sphinxbottomrule
-\multicolumn{3}{r}{\sphinxnorowcolor
- \makebox[0pt][r]{\sphinxtablecontinued{continues on next page}}%
-}\\
-\endfoot
-
-\endlastfoot
-\sphinxtableatstartofbodyhook
-\sphinxstyletheadfamily \begin{itemize}
-\item {}
-\sphinxAtStartPar
-instub1\sphinxhyphen{}1a
-
-\item {}
-\sphinxAtStartPar
-instub1\sphinxhyphen{}1b
-
-\end{itemize}
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-instub1\sphinxhyphen{}2
-&
-\sphinxAtStartPar
-notinstub1\sphinxhyphen{}3
-\\
-\sphinxhline\sphinxstyletheadfamily
-\sphinxAtStartPar
-cell2\sphinxhyphen{}1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-cell2\sphinxhyphen{}2
-&
-\sphinxAtStartPar
-cell2\sphinxhyphen{}3
-\\
-\sphinxbottomrule
-\end{longtable}
-\sphinxtableafterendhook
-\sphinxatlongtableend
-\end{savenotes}
diff --git a/tests/roots/test-latex-table/expects/longtable_having_verbatim.tex b/tests/roots/test-latex-table/expects/longtable_having_verbatim.tex
index b9f75129c68..d7a86633d75 100644
--- a/tests/roots/test-latex-table/expects/longtable_having_verbatim.tex
+++ b/tests/roots/test-latex-table/expects/longtable_having_verbatim.tex
@@ -9,12 +9,16 @@
\makeatother
\begin{longtable}{|*{2}{\X{1}{2}|}}
\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxmidrule
\endfirsthead
@@ -23,12 +27,16 @@
\makebox[0pt]{\sphinxtablecontinued{\tablename\ \thetable{} \textendash{} continued from previous page}}%
}\\
\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxmidrule
\endhead
@@ -41,30 +49,43 @@
\endlastfoot
\sphinxtableatstartofbodyhook
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\begin{sphinxVerbatimintable}[commandchars=\\\{\}]
\PYG{n}{hello} \PYG{n}{world}
\end{sphinxVerbatimintable}
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell1\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
-\sphinxhline
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell2\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell2\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
-\sphinxhline
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell3\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell3\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxbottomrule
\end{longtable}
\sphinxtableafterendhook
\sphinxatlongtableend
-\end{savenotes}
+\end{savenotes}
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/longtable_having_widths_and_formerly_problematic.tex b/tests/roots/test-latex-table/expects/longtable_having_widths_and_formerly_problematic.tex
new file mode 100644
index 00000000000..fb9f39f5f30
--- /dev/null
+++ b/tests/roots/test-latex-table/expects/longtable_having_widths_and_formerly_problematic.tex
@@ -0,0 +1,97 @@
+\label{\detokenize{longtable:longtable-having-widths-and-formerly-problematic}}
+
+\begin{savenotes}
+\sphinxatlongtablestart
+\sphinxthistablewithglobalstyle
+\makeatletter
+ \LTleft \@totalleftmargin plus1fill
+ \LTright\dimexpr\columnwidth-\@totalleftmargin-\linewidth\relax plus1fill
+\makeatother
+\begin{longtable}{|\X{30}{100}|\X{70}{100}|}
+\sphinxtoprule
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxmidrule
+\endfirsthead
+
+\multicolumn{2}{c}{\sphinxnorowcolor
+ \makebox[0pt]{\sphinxtablecontinued{\tablename\ \thetable{} \textendash{} continued from previous page}}%
+}\\
+\sphinxtoprule
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxmidrule
+\endhead
+
+\sphinxbottomrule
+\multicolumn{2}{r}{\sphinxnorowcolor
+ \makebox[0pt][r]{\sphinxtablecontinued{continues on next page}}%
+}\\
+\endfoot
+
+\endlastfoot
+\sphinxtableatstartofbodyhook
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\begin{itemize}
+\item {}
+\sphinxAtStartPar
+item1
+
+\item {}
+\sphinxAtStartPar
+item2
+
+\end{itemize}
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell1\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell2\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell2\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell3\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell3\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxbottomrule
+\end{longtable}
+\sphinxtableafterendhook
+\sphinxatlongtableend
+\end{savenotes}
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/longtable_having_widths_and_problematic_cell.tex b/tests/roots/test-latex-table/expects/longtable_having_widths_and_problematic_cell.tex
deleted file mode 100644
index b4758caa08e..00000000000
--- a/tests/roots/test-latex-table/expects/longtable_having_widths_and_problematic_cell.tex
+++ /dev/null
@@ -1,76 +0,0 @@
-\label{\detokenize{longtable:longtable-having-both-widths-and-problematic-cell}}
-
-\begin{savenotes}
-\sphinxatlongtablestart
-\sphinxthistablewithglobalstyle
-\makeatletter
- \LTleft \@totalleftmargin plus1fill
- \LTright\dimexpr\columnwidth-\@totalleftmargin-\linewidth\relax plus1fill
-\makeatother
-\begin{longtable}{|\X{30}{100}|\X{70}{100}|}
-\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
-header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-header2
-\\
-\sphinxmidrule
-\endfirsthead
-
-\multicolumn{2}{c}{\sphinxnorowcolor
- \makebox[0pt]{\sphinxtablecontinued{\tablename\ \thetable{} \textendash{} continued from previous page}}%
-}\\
-\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
-header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-header2
-\\
-\sphinxmidrule
-\endhead
-
-\sphinxbottomrule
-\multicolumn{2}{r}{\sphinxnorowcolor
- \makebox[0pt][r]{\sphinxtablecontinued{continues on next page}}%
-}\\
-\endfoot
-
-\endlastfoot
-\sphinxtableatstartofbodyhook
-\begin{itemize}
-\item {}
-\sphinxAtStartPar
-item1
-
-\item {}
-\sphinxAtStartPar
-item2
-
-\end{itemize}
-&
-\sphinxAtStartPar
-cell1\sphinxhyphen{}2
-\\
-\sphinxhline
-\sphinxAtStartPar
-cell2\sphinxhyphen{}1
-&
-\sphinxAtStartPar
-cell2\sphinxhyphen{}2
-\\
-\sphinxhline
-\sphinxAtStartPar
-cell3\sphinxhyphen{}1
-&
-\sphinxAtStartPar
-cell3\sphinxhyphen{}2
-\\
-\sphinxbottomrule
-\end{longtable}
-\sphinxtableafterendhook
-\sphinxatlongtableend
-\end{savenotes}
diff --git a/tests/roots/test-latex-table/expects/longtable_having_widths.tex b/tests/roots/test-latex-table/expects/longtable_having_widths_option.tex
similarity index 58%
rename from tests/roots/test-latex-table/expects/longtable_having_widths.tex
rename to tests/roots/test-latex-table/expects/longtable_having_widths_option.tex
index bcad23be4f0..d09b56d6900 100644
--- a/tests/roots/test-latex-table/expects/longtable_having_widths.tex
+++ b/tests/roots/test-latex-table/expects/longtable_having_widths_option.tex
@@ -10,12 +10,16 @@
\begin{longtable}{|\X{30}{100}|\X{70}{100}|}
\noalign{\phantomsection\label{\detokenize{longtable:namedlongtable}}\label{\detokenize{longtable:mylongtable}}}%
\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxmidrule
\endfirsthead
@@ -24,12 +28,16 @@
\makebox[0pt]{\sphinxtablecontinued{\tablename\ \thetable{} \textendash{} continued from previous page}}%
}\\
\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxmidrule
\endhead
@@ -42,26 +50,38 @@
\endlastfoot
\sphinxtableatstartofbodyhook
-
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell1\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell1\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
-\sphinxhline
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell2\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell2\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
-\sphinxhline
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell3\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell3\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxbottomrule
\end{longtable}
@@ -70,4 +90,4 @@
\end{savenotes}
\sphinxAtStartPar
-See {\hyperref[\detokenize{longtable:mylongtable}]{\sphinxcrossref{mylongtable}}}, same as {\hyperref[\detokenize{longtable:namedlongtable}]{\sphinxcrossref{\DUrole{std}{\DUrole{std-ref}{this one}}}}}.
+See {\hyperref[\detokenize{longtable:mylongtable}]{\sphinxcrossref{mylongtable}}}, same as {\hyperref[\detokenize{longtable:namedlongtable}]{\sphinxcrossref{\DUrole{std}{\DUrole{std-ref}{this one}}}}}.
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/longtable_with_tabularcolumn.tex b/tests/roots/test-latex-table/expects/longtable_with_tabularcolumn.tex
deleted file mode 100644
index 4c380fed7a4..00000000000
--- a/tests/roots/test-latex-table/expects/longtable_with_tabularcolumn.tex
+++ /dev/null
@@ -1,70 +0,0 @@
-\label{\detokenize{longtable:longtable-with-tabularcolumn}}
-
-\begin{savenotes}
-\sphinxatlongtablestart
-\sphinxthistablewithglobalstyle
-\sphinxthistablewithvlinesstyle
-\makeatletter
- \LTleft \@totalleftmargin plus1fill
- \LTright\dimexpr\columnwidth-\@totalleftmargin-\linewidth\relax plus1fill
-\makeatother
-\begin{longtable}{|c|c|}
-\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
-header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-header2
-\\
-\sphinxmidrule
-\endfirsthead
-
-\multicolumn{2}{c}{\sphinxnorowcolor
- \makebox[0pt]{\sphinxtablecontinued{\tablename\ \thetable{} \textendash{} continued from previous page}}%
-}\\
-\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
-header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-header2
-\\
-\sphinxmidrule
-\endhead
-
-\sphinxbottomrule
-\multicolumn{2}{r}{\sphinxnorowcolor
- \makebox[0pt][r]{\sphinxtablecontinued{continues on next page}}%
-}\\
-\endfoot
-
-\endlastfoot
-\sphinxtableatstartofbodyhook
-
-\sphinxAtStartPar
-cell1\sphinxhyphen{}1
-&
-\sphinxAtStartPar
-cell1\sphinxhyphen{}2
-\\
-\sphinxhline
-\sphinxAtStartPar
-cell2\sphinxhyphen{}1
-&
-\sphinxAtStartPar
-cell2\sphinxhyphen{}2
-\\
-\sphinxhline
-\sphinxAtStartPar
-cell3\sphinxhyphen{}1
-&
-\sphinxAtStartPar
-cell3\sphinxhyphen{}2
-\\
-\sphinxbottomrule
-\end{longtable}
-\sphinxtableafterendhook
-\sphinxatlongtableend
-\end{savenotes}
diff --git a/tests/roots/test-latex-table/expects/longtable_with_tabularcolumns.tex b/tests/roots/test-latex-table/expects/longtable_with_tabularcolumns.tex
new file mode 100644
index 00000000000..764dfd8ff9b
--- /dev/null
+++ b/tests/roots/test-latex-table/expects/longtable_with_tabularcolumns.tex
@@ -0,0 +1,90 @@
+\label{\detokenize{longtable:longtable-with-tabularcolumns}}
+
+\begin{savenotes}
+\sphinxatlongtablestart
+\sphinxthistablewithglobalstyle
+\sphinxthistablewithvlinesstyle
+\makeatletter
+ \LTleft \@totalleftmargin plus1fill
+ \LTright\dimexpr\columnwidth-\@totalleftmargin-\linewidth\relax plus1fill
+\makeatother
+\begin{longtable}{|c|c|}
+\sphinxtoprule
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxmidrule
+\endfirsthead
+
+\multicolumn{2}{c}{\sphinxnorowcolor
+ \makebox[0pt]{\sphinxtablecontinued{\tablename\ \thetable{} \textendash{} continued from previous page}}%
+}\\
+\sphinxtoprule
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxmidrule
+\endhead
+
+\sphinxbottomrule
+\multicolumn{2}{r}{\sphinxnorowcolor
+ \makebox[0pt][r]{\sphinxtablecontinued{continues on next page}}%
+}\\
+\endfoot
+
+\endlastfoot
+\sphinxtableatstartofbodyhook
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell1\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell1\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell2\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell2\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell3\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell3\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxbottomrule
+\end{longtable}
+\sphinxtableafterendhook
+\sphinxatlongtableend
+\end{savenotes}
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/simple_table.tex b/tests/roots/test-latex-table/expects/simple_table.tex
index 7bd85c737b2..8a17635fe64 100644
--- a/tests/roots/test-latex-table/expects/simple_table.tex
+++ b/tests/roots/test-latex-table/expects/simple_table.tex
@@ -5,36 +5,52 @@
\centering
\begin{tabulary}{\linewidth}[t]{|T|T|}
\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxmidrule
-\sphinxtableatstartofbodyhook
+\sphinxtableatstartofbodyhook\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell1\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell1\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
-\sphinxhline
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell2\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell2\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
-\sphinxhline
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell3\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell3\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxbottomrule
\end{tabulary}
\sphinxtableafterendhook\par
-\sphinxattableend\end{savenotes}
+\sphinxattableend\end{savenotes}
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/table_having_caption.tex b/tests/roots/test-latex-table/expects/table_having_caption.tex
index f2ce5536021..450d370e1ba 100644
--- a/tests/roots/test-latex-table/expects/table_having_caption.tex
+++ b/tests/roots/test-latex-table/expects/table_having_caption.tex
@@ -9,36 +9,52 @@
\sphinxaftertopcaption
\begin{tabulary}{\linewidth}[t]{|T|T|}
\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxmidrule
-\sphinxtableatstartofbodyhook
+\sphinxtableatstartofbodyhook\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell1\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell1\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
-\sphinxhline
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell2\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell2\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
-\sphinxhline
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell3\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell3\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxbottomrule
\end{tabulary}
\sphinxtableafterendhook\par
-\sphinxattableend\end{savenotes}
+\sphinxattableend\end{savenotes}
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/table_having_formerly_problematic.tex b/tests/roots/test-latex-table/expects/table_having_formerly_problematic.tex
new file mode 100644
index 00000000000..fb882fea57f
--- /dev/null
+++ b/tests/roots/test-latex-table/expects/table_having_formerly_problematic.tex
@@ -0,0 +1,64 @@
+\label{\detokenize{tabular:table-having-formerly-problematic}}
+
+\begin{savenotes}\sphinxattablestart
+\sphinxthistablewithglobalstyle
+\centering
+\begin{tabulary}{\linewidth}[t]{|T|T|}
+\sphinxtoprule
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxmidrule
+\sphinxtableatstartofbodyhook\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\begin{itemize}
+\item {}
+\sphinxAtStartPar
+item1
+
+\item {}
+\sphinxAtStartPar
+item2
+
+\end{itemize}
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell1\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell2\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell2\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell3\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell3\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxbottomrule
+\end{tabulary}
+\sphinxtableafterendhook\par
+\sphinxattableend\end{savenotes}
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/table_having_problematic_cell.tex b/tests/roots/test-latex-table/expects/table_having_problematic_cell.tex
deleted file mode 100644
index 7d7ad4b715b..00000000000
--- a/tests/roots/test-latex-table/expects/table_having_problematic_cell.tex
+++ /dev/null
@@ -1,47 +0,0 @@
-\label{\detokenize{tabular:table-having-problematic-cell}}
-
-\begin{savenotes}\sphinxattablestart
-\sphinxthistablewithglobalstyle
-\centering
-\begin{tabular}[t]{|*{2}{\X{1}{2}|}}
-\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
-header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-header2
-\\
-\sphinxmidrule
-\sphinxtableatstartofbodyhook\begin{itemize}
-\item {}
-\sphinxAtStartPar
-item1
-
-\item {}
-\sphinxAtStartPar
-item2
-
-\end{itemize}
-&
-\sphinxAtStartPar
-cell1\sphinxhyphen{}2
-\\
-\sphinxhline
-\sphinxAtStartPar
-cell2\sphinxhyphen{}1
-&
-\sphinxAtStartPar
-cell2\sphinxhyphen{}2
-\\
-\sphinxhline
-\sphinxAtStartPar
-cell3\sphinxhyphen{}1
-&
-\sphinxAtStartPar
-cell3\sphinxhyphen{}2
-\\
-\sphinxbottomrule
-\end{tabular}
-\sphinxtableafterendhook\par
-\sphinxattableend\end{savenotes}
diff --git a/tests/roots/test-latex-table/expects/table_having_stub_columns_and_formerly_problematic.tex b/tests/roots/test-latex-table/expects/table_having_stub_columns_and_formerly_problematic.tex
new file mode 100644
index 00000000000..548008a2379
--- /dev/null
+++ b/tests/roots/test-latex-table/expects/table_having_stub_columns_and_formerly_problematic.tex
@@ -0,0 +1,68 @@
+\label{\detokenize{tabular:table-having-stub-columns-and-formerly-problematic}}
+
+\begin{savenotes}\sphinxattablestart
+\sphinxthistablewithglobalstyle
+\centering
+\begin{tabulary}{\linewidth}[t]{|T|T|T|}
+\sphinxtoprule
+\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header3
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxmidrule
+\sphinxtableatstartofbodyhook\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \begin{itemize}
+\item {}
+\sphinxAtStartPar
+instub1\sphinxhyphen{}1a
+
+\item {}
+\sphinxAtStartPar
+instub1\sphinxhyphen{}1b
+
+\end{itemize}
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+instub1\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxAtStartPar
+notinstub1\sphinxhyphen{}3
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+cell2\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+cell2\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{3}}
+\sphinxAtStartPar
+cell2\sphinxhyphen{}3
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxbottomrule
+\end{tabulary}
+\sphinxtableafterendhook\par
+\sphinxattableend\end{savenotes}
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/table_having_stub_columns_and_problematic_cell.tex b/tests/roots/test-latex-table/expects/table_having_stub_columns_and_problematic_cell.tex
deleted file mode 100644
index fbd797a1bd3..00000000000
--- a/tests/roots/test-latex-table/expects/table_having_stub_columns_and_problematic_cell.tex
+++ /dev/null
@@ -1,49 +0,0 @@
-\label{\detokenize{tabular:table-having-both-stub-columns-and-problematic-cell}}
-
-\begin{savenotes}\sphinxattablestart
-\sphinxthistablewithglobalstyle
-\centering
-\begin{tabular}[t]{|*{3}{\X{1}{3}|}}
-\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
-header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-header2
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-header3
-\\
-\sphinxmidrule
-\sphinxtableatstartofbodyhook\sphinxstyletheadfamily \begin{itemize}
-\item {}
-\sphinxAtStartPar
-instub1\sphinxhyphen{}1a
-
-\item {}
-\sphinxAtStartPar
-instub1\sphinxhyphen{}1b
-
-\end{itemize}
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-instub1\sphinxhyphen{}2
-&
-\sphinxAtStartPar
-notinstub1\sphinxhyphen{}3
-\\
-\sphinxhline\sphinxstyletheadfamily
-\sphinxAtStartPar
-cell2\sphinxhyphen{}1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-cell2\sphinxhyphen{}2
-&
-\sphinxAtStartPar
-cell2\sphinxhyphen{}3
-\\
-\sphinxbottomrule
-\end{tabular}
-\sphinxtableafterendhook\par
-\sphinxattableend\end{savenotes}
diff --git a/tests/roots/test-latex-table/expects/table_having_threeparagraphs_cell_in_first_col.tex b/tests/roots/test-latex-table/expects/table_having_three_paragraphs_cell_in_first_col.tex
similarity index 53%
rename from tests/roots/test-latex-table/expects/table_having_threeparagraphs_cell_in_first_col.tex
rename to tests/roots/test-latex-table/expects/table_having_three_paragraphs_cell_in_first_col.tex
index 9acd9a86d46..ad1d9ee79b7 100644
--- a/tests/roots/test-latex-table/expects/table_having_threeparagraphs_cell_in_first_col.tex
+++ b/tests/roots/test-latex-table/expects/table_having_three_paragraphs_cell_in_first_col.tex
@@ -1,16 +1,18 @@
-\label{\detokenize{tabular:table-with-cell-in-first-column-having-three-paragraphs}}
+\label{\detokenize{tabular:table-having-three-paragraphs-cell-in-first-col}}
\begin{savenotes}\sphinxattablestart
\sphinxthistablewithglobalstyle
\centering
\begin{tabulary}{\linewidth}[t]{|T|}
\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
+\begin{varwidth}[t]{\sphinxcolwidth{1}{1}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxmidrule
-\sphinxtableatstartofbodyhook
+\sphinxtableatstartofbodyhook\begin{varwidth}[t]{\sphinxcolwidth{1}{1}}
\sphinxAtStartPar
cell1\sphinxhyphen{}1\sphinxhyphen{}par1
@@ -19,8 +21,10 @@
\sphinxAtStartPar
cell1\sphinxhyphen{}1\sphinxhyphen{}par3
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxbottomrule
\end{tabulary}
\sphinxtableafterendhook\par
-\sphinxattableend\end{savenotes}
+\sphinxattableend\end{savenotes}
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/table_having_verbatim.tex b/tests/roots/test-latex-table/expects/table_having_verbatim.tex
index a002de58618..adc6baf6ceb 100644
--- a/tests/roots/test-latex-table/expects/table_having_verbatim.tex
+++ b/tests/roots/test-latex-table/expects/table_having_verbatim.tex
@@ -5,37 +5,54 @@
\centering
\begin{tabular}[t]{|*{2}{\X{1}{2}|}}
\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxmidrule
-\sphinxtableatstartofbodyhook
+\sphinxtableatstartofbodyhook\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+
\begin{sphinxVerbatimintable}[commandchars=\\\{\}]
\PYG{n}{hello} \PYG{n}{world}
\end{sphinxVerbatimintable}
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell1\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
-\sphinxhline
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell2\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell2\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
-\sphinxhline
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell3\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell3\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxbottomrule
\end{tabular}
\sphinxtableafterendhook\par
-\sphinxattableend\end{savenotes}
+\sphinxattableend\end{savenotes}
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/table_having_widths_and_formerly_problematic.tex b/tests/roots/test-latex-table/expects/table_having_widths_and_formerly_problematic.tex
new file mode 100644
index 00000000000..c103ab0a8aa
--- /dev/null
+++ b/tests/roots/test-latex-table/expects/table_having_widths_and_formerly_problematic.tex
@@ -0,0 +1,64 @@
+\label{\detokenize{tabular:table-having-widths-and-formerly-problematic}}
+
+\begin{savenotes}\sphinxattablestart
+\sphinxthistablewithglobalstyle
+\centering
+\begin{tabular}[t]{|\X{30}{100}|\X{70}{100}|}
+\sphinxtoprule
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxmidrule
+\sphinxtableatstartofbodyhook\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\begin{itemize}
+\item {}
+\sphinxAtStartPar
+item1
+
+\item {}
+\sphinxAtStartPar
+item2
+
+\end{itemize}
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell1\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell2\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell2\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell3\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell3\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxbottomrule
+\end{tabular}
+\sphinxtableafterendhook\par
+\sphinxattableend\end{savenotes}
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/table_having_widths_and_problematic_cell.tex b/tests/roots/test-latex-table/expects/table_having_widths_and_problematic_cell.tex
deleted file mode 100644
index 1baf92c1ae6..00000000000
--- a/tests/roots/test-latex-table/expects/table_having_widths_and_problematic_cell.tex
+++ /dev/null
@@ -1,47 +0,0 @@
-\label{\detokenize{tabular:table-having-both-widths-and-problematic-cell}}
-
-\begin{savenotes}\sphinxattablestart
-\sphinxthistablewithglobalstyle
-\centering
-\begin{tabular}[t]{|\X{30}{100}|\X{70}{100}|}
-\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
-header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-header2
-\\
-\sphinxmidrule
-\sphinxtableatstartofbodyhook\begin{itemize}
-\item {}
-\sphinxAtStartPar
-item1
-
-\item {}
-\sphinxAtStartPar
-item2
-
-\end{itemize}
-&
-\sphinxAtStartPar
-cell1\sphinxhyphen{}2
-\\
-\sphinxhline
-\sphinxAtStartPar
-cell2\sphinxhyphen{}1
-&
-\sphinxAtStartPar
-cell2\sphinxhyphen{}2
-\\
-\sphinxhline
-\sphinxAtStartPar
-cell3\sphinxhyphen{}1
-&
-\sphinxAtStartPar
-cell3\sphinxhyphen{}2
-\\
-\sphinxbottomrule
-\end{tabular}
-\sphinxtableafterendhook\par
-\sphinxattableend\end{savenotes}
diff --git a/tests/roots/test-latex-table/expects/table_having_widths.tex b/tests/roots/test-latex-table/expects/table_having_widths_option.tex
similarity index 50%
rename from tests/roots/test-latex-table/expects/table_having_widths.tex
rename to tests/roots/test-latex-table/expects/table_having_widths_option.tex
index e9863d277f6..668f4c63206 100644
--- a/tests/roots/test-latex-table/expects/table_having_widths.tex
+++ b/tests/roots/test-latex-table/expects/table_having_widths_option.tex
@@ -8,34 +8,50 @@
\phantomsection\label{\detokenize{tabular:namedtabular}}\label{\detokenize{tabular:mytabular}}\nobreak
\begin{tabular}[t]{\X{30}{100}\X{70}{100}}
\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxmidrule
-\sphinxtableatstartofbodyhook
+\sphinxtableatstartofbodyhook\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell1\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell1\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
-\sphinxhline
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell2\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell2\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
-\sphinxhline
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell3\sphinxhyphen{}1
-&
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
\sphinxAtStartPar
cell3\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
\\
\sphinxbottomrule
\end{tabular}
@@ -43,4 +59,4 @@
\sphinxattableend\end{savenotes}
\sphinxAtStartPar
-See {\hyperref[\detokenize{tabular:mytabular}]{\sphinxcrossref{\DUrole{std}{\DUrole{std-ref}{this}}}}}, same as {\hyperref[\detokenize{tabular:namedtabular}]{\sphinxcrossref{namedtabular}}}.
+See {\hyperref[\detokenize{tabular:mytabular}]{\sphinxcrossref{\DUrole{std}{\DUrole{std-ref}{this}}}}}, same as {\hyperref[\detokenize{tabular:namedtabular}]{\sphinxcrossref{namedtabular}}}.
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/table_with_tabularcolumns.tex b/tests/roots/test-latex-table/expects/table_with_tabularcolumns.tex
new file mode 100644
index 00000000000..64edba98e89
--- /dev/null
+++ b/tests/roots/test-latex-table/expects/table_with_tabularcolumns.tex
@@ -0,0 +1,57 @@
+\label{\detokenize{tabular:table-with-tabularcolumns}}
+
+\begin{savenotes}\sphinxattablestart
+\sphinxthistablewithglobalstyle
+\sphinxthistablewithnovlinesstyle
+\centering
+\begin{tabular}[t]{cc}
+\sphinxtoprule
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxmidrule
+\sphinxtableatstartofbodyhook\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell1\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell1\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell2\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell2\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell3\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell3\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxbottomrule
+\end{tabular}
+\sphinxtableafterendhook\par
+\sphinxattableend\end{savenotes}
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/tabular_having_align_option.tex b/tests/roots/test-latex-table/expects/tabular_having_align_option.tex
new file mode 100644
index 00000000000..ba61c6bde2c
--- /dev/null
+++ b/tests/roots/test-latex-table/expects/tabular_having_align_option.tex
@@ -0,0 +1,56 @@
+\label{\detokenize{tabular:tabular-having-align-option}}
+
+\begin{savenotes}\sphinxattablestart
+\sphinxthistablewithglobalstyle
+\raggedright
+\begin{tabular}[t]{|\X{30}{100}|\X{70}{100}|}
+\sphinxtoprule
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxmidrule
+\sphinxtableatstartofbodyhook\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell1\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell1\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell2\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell2\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell3\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell3\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxbottomrule
+\end{tabular}
+\sphinxtableafterendhook\par
+\sphinxattableend\end{savenotes}
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/tabular_having_widths.tex b/tests/roots/test-latex-table/expects/tabular_having_widths.tex
deleted file mode 100644
index 15321d693cf..00000000000
--- a/tests/roots/test-latex-table/expects/tabular_having_widths.tex
+++ /dev/null
@@ -1,40 +0,0 @@
-\label{\detokenize{tabular:table-having-align-option-tabular}}
-
-\begin{savenotes}\sphinxattablestart
-\sphinxthistablewithglobalstyle
-\raggedright
-\begin{tabular}[t]{|\X{30}{100}|\X{70}{100}|}
-\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
-header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-header2
-\\
-\sphinxmidrule
-\sphinxtableatstartofbodyhook
-\sphinxAtStartPar
-cell1\sphinxhyphen{}1
-&
-\sphinxAtStartPar
-cell1\sphinxhyphen{}2
-\\
-\sphinxhline
-\sphinxAtStartPar
-cell2\sphinxhyphen{}1
-&
-\sphinxAtStartPar
-cell2\sphinxhyphen{}2
-\\
-\sphinxhline
-\sphinxAtStartPar
-cell3\sphinxhyphen{}1
-&
-\sphinxAtStartPar
-cell3\sphinxhyphen{}2
-\\
-\sphinxbottomrule
-\end{tabular}
-\sphinxtableafterendhook\par
-\sphinxattableend\end{savenotes}
diff --git a/tests/roots/test-latex-table/expects/tabularcolumn.tex b/tests/roots/test-latex-table/expects/tabularcolumn.tex
deleted file mode 100644
index fcb01be3f50..00000000000
--- a/tests/roots/test-latex-table/expects/tabularcolumn.tex
+++ /dev/null
@@ -1,41 +0,0 @@
-\label{\detokenize{tabular:table-with-tabularcolumn}}
-
-\begin{savenotes}\sphinxattablestart
-\sphinxthistablewithglobalstyle
-\sphinxthistablewithnovlinesstyle
-\centering
-\begin{tabulary}{\linewidth}[t]{cc}
-\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
-header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-header2
-\\
-\sphinxmidrule
-\sphinxtableatstartofbodyhook
-\sphinxAtStartPar
-cell1\sphinxhyphen{}1
-&
-\sphinxAtStartPar
-cell1\sphinxhyphen{}2
-\\
-\sphinxhline
-\sphinxAtStartPar
-cell2\sphinxhyphen{}1
-&
-\sphinxAtStartPar
-cell2\sphinxhyphen{}2
-\\
-\sphinxhline
-\sphinxAtStartPar
-cell3\sphinxhyphen{}1
-&
-\sphinxAtStartPar
-cell3\sphinxhyphen{}2
-\\
-\sphinxbottomrule
-\end{tabulary}
-\sphinxtableafterendhook\par
-\sphinxattableend\end{savenotes}
diff --git a/tests/roots/test-latex-table/expects/tabulary_having_align_option.tex b/tests/roots/test-latex-table/expects/tabulary_having_align_option.tex
new file mode 100644
index 00000000000..2d62b0e3ff1
--- /dev/null
+++ b/tests/roots/test-latex-table/expects/tabulary_having_align_option.tex
@@ -0,0 +1,56 @@
+\label{\detokenize{tabular:tabulary-having-align-option}}
+
+\begin{savenotes}\sphinxattablestart
+\sphinxthistablewithglobalstyle
+\raggedleft
+\begin{tabulary}{\linewidth}[t]{|T|T|}
+\sphinxtoprule
+\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxstyletheadfamily \sphinxAtStartPar
+header2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxmidrule
+\sphinxtableatstartofbodyhook\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell1\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell1\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell2\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell2\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxhline\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell3\sphinxhyphen{}1
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+&\begin{varwidth}[t]{\sphinxcolwidth{1}{2}}
+\sphinxAtStartPar
+cell3\sphinxhyphen{}2
+\sphinxbeforeendvarwidth
+\end{varwidth}%
+\\
+\sphinxbottomrule
+\end{tabulary}
+\sphinxtableafterendhook\par
+\sphinxattableend\end{savenotes}
\ No newline at end of file
diff --git a/tests/roots/test-latex-table/expects/tabulary_having_widths.tex b/tests/roots/test-latex-table/expects/tabulary_having_widths.tex
deleted file mode 100644
index 24634163010..00000000000
--- a/tests/roots/test-latex-table/expects/tabulary_having_widths.tex
+++ /dev/null
@@ -1,40 +0,0 @@
-\label{\detokenize{tabular:table-having-align-option-tabulary}}
-
-\begin{savenotes}\sphinxattablestart
-\sphinxthistablewithglobalstyle
-\raggedleft
-\begin{tabulary}{\linewidth}[t]{|T|T|}
-\sphinxtoprule
-\sphinxstyletheadfamily
-\sphinxAtStartPar
-header1
-&\sphinxstyletheadfamily
-\sphinxAtStartPar
-header2
-\\
-\sphinxmidrule
-\sphinxtableatstartofbodyhook
-\sphinxAtStartPar
-cell1\sphinxhyphen{}1
-&
-\sphinxAtStartPar
-cell1\sphinxhyphen{}2
-\\
-\sphinxhline
-\sphinxAtStartPar
-cell2\sphinxhyphen{}1
-&
-\sphinxAtStartPar
-cell2\sphinxhyphen{}2
-\\
-\sphinxhline
-\sphinxAtStartPar
-cell3\sphinxhyphen{}1
-&
-\sphinxAtStartPar
-cell3\sphinxhyphen{}2
-\\
-\sphinxbottomrule
-\end{tabulary}
-\sphinxtableafterendhook\par
-\sphinxattableend\end{savenotes}
diff --git a/tests/roots/test-latex-table/longtable.rst b/tests/roots/test-latex-table/longtable.rst
index da6fa5c5cec..89a37b8ed8d 100644
--- a/tests/roots/test-latex-table/longtable.rst
+++ b/tests/roots/test-latex-table/longtable.rst
@@ -15,8 +15,8 @@ longtable
cell3-1 cell3-2
======= =======
-longtable having :widths: option
---------------------------------
+longtable having widths option
+------------------------------
.. _mylongtable:
@@ -35,8 +35,8 @@ longtable having :widths: option
See mylongtable_, same as :ref:`this one `.
-longtable having :align: option
--------------------------------
+longtable having align option
+-----------------------------
.. table::
:align: right
@@ -50,8 +50,8 @@ longtable having :align: option
cell3-1 cell3-2
======= =======
-longtable with tabularcolumn
-----------------------------
+longtable with tabularcolumns
+-----------------------------
.. tabularcolumns:: |c|c|
@@ -101,13 +101,12 @@ longtable having verbatim
* - cell3-1
- cell3-2
-longtable having both :widths: and problematic cell
----------------------------------------------------
+longtable having formerly problematic
+-------------------------------------
.. list-table::
:class: longtable
:header-rows: 1
- :widths: 30,70
* - header1
- header2
@@ -119,12 +118,13 @@ longtable having both :widths: and problematic cell
* - cell3-1
- cell3-2
-longtable having problematic cell
----------------------------------
+longtable having widths and formerly problematic
+------------------------------------------------
.. list-table::
:class: longtable
:header-rows: 1
+ :widths: 30,70
* - header1
- header2
@@ -136,8 +136,8 @@ longtable having problematic cell
* - cell3-1
- cell3-2
-longtable having both stub columns and problematic cell
--------------------------------------------------------
+longtable having stub columns and formerly problematic
+------------------------------------------------------
.. list-table::
:class: longtable
diff --git a/tests/roots/test-latex-table/tabular.rst b/tests/roots/test-latex-table/tabular.rst
index 15db823a05b..b5011539795 100644
--- a/tests/roots/test-latex-table/tabular.rst
+++ b/tests/roots/test-latex-table/tabular.rst
@@ -12,8 +12,8 @@ cell2-1 cell2-2
cell3-1 cell3-2
======= =======
-table having :widths: option
-----------------------------
+table having widths option
+--------------------------
.. _mytabular:
@@ -32,8 +32,8 @@ table having :widths: option
See :ref:`this `, same as namedtabular_.
-table having :align: option (tabulary)
---------------------------------------
+tabulary having align option
+----------------------------
.. table::
:align: right
@@ -46,8 +46,8 @@ table having :align: option (tabulary)
cell3-1 cell3-2
======= =======
-table having :align: option (tabular)
--------------------------------------
+tabular having align option
+---------------------------
.. table::
:align: left
@@ -61,8 +61,8 @@ table having :align: option (tabular)
cell3-1 cell3-2
======= =======
-table with tabularcolumn
-------------------------
+table with tabularcolumns
+-------------------------
.. tabularcolumns:: cc
@@ -74,8 +74,8 @@ cell2-1 cell2-2
cell3-1 cell3-2
======= =======
-table with cell in first column having three paragraphs
--------------------------------------------------------
+table having three paragraphs cell in first col
+-----------------------------------------------
+--------------+
| header1 |
@@ -121,12 +121,11 @@ table having verbatim
* - cell3-1
- cell3-2
-table having both :widths: and problematic cell
------------------------------------------------
+table having formerly problematic
+---------------------------------
.. list-table::
:header-rows: 1
- :widths: 30,70
* - header1
- header2
@@ -138,11 +137,12 @@ table having both :widths: and problematic cell
* - cell3-1
- cell3-2
-table having problematic cell
------------------------------
+table having widths and formerly problematic
+--------------------------------------------
.. list-table::
:header-rows: 1
+ :widths: 30,70
* - header1
- header2
@@ -154,8 +154,8 @@ table having problematic cell
* - cell3-1
- cell3-2
-table having both stub columns and problematic cell
----------------------------------------------------
+table having stub columns and formerly problematic
+--------------------------------------------------
.. list-table::
:header-rows: 1
diff --git a/tests/roots/test-root/markup.txt b/tests/roots/test-root/markup.txt
index 91f41946620..8f5e026a25f 100644
--- a/tests/roots/test-root/markup.txt
+++ b/tests/roots/test-root/markup.txt
@@ -182,7 +182,7 @@ With
Tables
------
-.. tabularcolumns:: |L|p{5cm}|R|
+.. tabularcolumns:: |*{1}{L|}p{5cm}|*{1}{R}|
.. _my-table:
@@ -223,6 +223,20 @@ Tables with multirow and multicol:
| |
+----+
+ +---+---+
+ | +---+ |
+ | | h | |
+ | +---+ |
+ +---+---+
+
+ .. rst-class:: longtable
+
+ +---+---+
+ | +---+ |
+ | | h | |
+ | +---+ |
+ +---+---+
+
.. list-table::
:header-rows: 0
@@ -278,27 +292,38 @@ Figures
Version markup
--------------
-.. versionadded:: 0.6
+.. version-added:: 0.6
Some funny **stuff**.
-.. versionchanged:: 0.6
+.. version-changed:: 0.6
Even more funny stuff.
-.. deprecated:: 0.6
+.. version-deprecated:: 0.6
Boring stuff.
-.. versionremoved:: 0.6
+.. version-removed:: 0.6
Goodbye boring stuff.
-.. versionadded:: 1.2
+.. version-added:: 1.2
+
+ First paragraph of version-added.
- First paragraph of versionadded.
+.. version-changed:: 1.2
+ First paragraph of version-changed.
-.. versionchanged:: 1.2
- First paragraph of versionchanged.
+ Second paragraph of version-changed.
- Second paragraph of versionchanged.
+.. version-added:: 0.6
+ Deprecated alias for version-added.
+.. version-changed:: 0.6
+ Deprecated alias for version-changed.
+
+.. deprecated:: 0.6
+ Deprecated alias for version-deprecated.
+
+.. versionremoved:: 0.6
+ Deprecated alias for version-removed.
Code blocks
-----------
@@ -469,3 +494,9 @@ Smart quotes
.. [#] Like footnotes.
+
+Link in a title: `Field lists `_
+---------------------------------------------------------------------------------------------------------------------
+
+Again: https://www.sphinx-doc.org/en/master/usage/restructuredtext/basics.html#field-lists
+------------------------------------------------------------------------------------------
diff --git a/tests/roots/test-toctree-only/conf.py b/tests/roots/test-toctree-only/conf.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/tests/roots/test-toctree-only/index.rst b/tests/roots/test-toctree-only/index.rst
new file mode 100644
index 00000000000..cbfb903a588
--- /dev/null
+++ b/tests/roots/test-toctree-only/index.rst
@@ -0,0 +1,26 @@
+test-toctree-only
+=================
+
+.. only:: not nonexistent
+
+ hello world
+
+ .. only:: text or not text
+
+ .. js:data:: test_toctree_only1
+
+ lorem ipsum dolor sit amet...
+
+ .. only:: not lorem
+
+ .. only:: not ipsum
+
+ .. js:data:: test_toctree_only2
+
+ lorem ipsum dolor sit amet...
+
+ after ``only:: not ipsum``
+
+ .. js:data:: test_toctree_only2
+
+we're just normal men; we're just innocent men
diff --git a/tests/test_addnodes.py b/tests/test_addnodes.py
index b3f77ad2bb9..8cac53b2828 100644
--- a/tests/test_addnodes.py
+++ b/tests/test_addnodes.py
@@ -20,7 +20,9 @@ def sig_elements() -> Iterator[set[type[addnodes.desc_sig_element]]]:
addnodes.SIG_ELEMENTS = original # restore the previous value
-def test_desc_sig_element_nodes(sig_elements):
+def test_desc_sig_element_nodes(
+ sig_elements: set[type[addnodes.desc_sig_element]],
+) -> None:
"""Test the registration of ``desc_sig_element`` subclasses."""
# expected desc_sig_* node classes (must be declared *after* reloading
# the module since otherwise the objects are not the correct ones)
diff --git a/tests/test_application.py b/tests/test_application.py
index b2bd7bbc66c..73c3f3556ca 100644
--- a/tests/test_application.py
+++ b/tests/test_application.py
@@ -20,6 +20,7 @@
if TYPE_CHECKING:
import os
+ from typing import Any
def test_instantiation(
@@ -50,7 +51,7 @@ def test_instantiation(
@pytest.mark.sphinx('html', testroot='root')
def test_events(app: SphinxTestApp) -> None:
- def empty():
+ def empty() -> None:
pass
with pytest.raises(ExtensionError) as excinfo:
@@ -62,7 +63,7 @@ def empty():
app.add_event('my_event')
assert "Event 'my_event' already present" in str(excinfo.value)
- def mock_callback(a_app, *args):
+ def mock_callback(a_app: SphinxTestApp, *args: Any) -> str:
assert a_app is app
assert emit_args == args
return 'ret'
diff --git a/tests/test_builders/test_build_gettext.py b/tests/test_builders/test_build_gettext.py
index 08c6e07d9ac..30798cc5070 100644
--- a/tests/test_builders/test_build_gettext.py
+++ b/tests/test_builders/test_build_gettext.py
@@ -323,3 +323,29 @@ def test_gettext_literalblock_additional(app: SphinxTestApp) -> None:
"stdout object\\n>>>\\n>>> if __name__ == '__main__': # if run this py "
'file as python script\\n... main() # call main',
]
+
+
+@pytest.mark.sphinx('gettext', testroot='intl', srcdir='gettext')
+def test_gettext_trailing_backslashes(app: SphinxTestApp) -> None:
+ app.build(force_all=True)
+
+ assert (app.outdir / 'backslashes.pot').is_file()
+ pot = (app.outdir / 'backslashes.pot').read_text(encoding='utf8')
+ msg_ids = get_msgids(pot)
+ assert msg_ids == [
+ 'i18n with backslashes',
+ (
+ 'line 1 line 2 line 3 '
+ # middle backslashes are escaped normally
+ 'line 4a \\\\ and 4b '
+ # whitespaces after backslashes are dropped
+ 'line with spaces after backslash '
+ 'last line with spaces '
+ 'and done 1'
+ ),
+ 'a b c',
+ 'last trailing \\\\ \\\\ is ignored',
+ 'See [#]_',
+ 'footnote with backslashes and done 2',
+ 'directive with backslashes',
+ ]
diff --git a/tests/test_builders/test_build_html.py b/tests/test_builders/test_build_html.py
index d374ff93177..a0db51415ad 100644
--- a/tests/test_builders/test_build_html.py
+++ b/tests/test_builders/test_build_html.py
@@ -8,7 +8,12 @@
from typing import TYPE_CHECKING
import pytest
+from docutils import nodes
+from docutils.parsers import rst
+from docutils.readers import standalone
+from docutils.writers import html5_polyglot
+from sphinx import addnodes
from sphinx._cli.util.errors import strip_escape_sequences
from sphinx.builders.html import (
StandaloneHTMLBuilder,
@@ -17,14 +22,14 @@
)
from sphinx.errors import ConfigError
from sphinx.testing.util import etree_parse
+from sphinx.util.docutils import _get_settings, new_document
from sphinx.util.inventory import InventoryFile, _InventoryItem
+from sphinx.writers.html5 import HTML5Translator
from tests.test_builders.xpath_data import FIGURE_CAPTION
from tests.test_builders.xpath_util import check_xpath
if TYPE_CHECKING:
- from typing import Any
-
from sphinx.testing.util import SphinxTestApp
@@ -146,11 +151,38 @@ def test_html_parallel(app: SphinxTestApp) -> None:
app.build()
-@pytest.mark.sphinx('html', testroot='build-html-translator')
+class ConfHTMLTranslator(HTML5Translator):
+ depart_with_node = 0
+
+ def depart_admonition(self, node: nodes.Element | None = None) -> None:
+ if node is not None:
+ self.depart_with_node += 1
+ super().depart_admonition(node)
+
+
+@pytest.mark.sphinx('html', testroot='_blank')
def test_html_translator(app: SphinxTestApp) -> None:
- app.build()
- assert isinstance(app.builder, StandaloneHTMLBuilder) # type-checking
- assert app.builder.docwriter.visitor.depart_with_node == 10
+ settings = _get_settings(
+ standalone.Reader, rst.Parser, html5_polyglot.Writer, defaults={}
+ )
+ doctree = new_document(__file__, settings)
+ doctree.append(addnodes.seealso('test', nodes.Text('test')))
+ doctree.append(nodes.note('test', nodes.Text('test')))
+ doctree.append(nodes.warning('test', nodes.Text('test')))
+ doctree.append(nodes.attention('test', nodes.Text('test')))
+ doctree.append(nodes.caution('test', nodes.Text('test')))
+ doctree.append(nodes.danger('test', nodes.Text('test')))
+ doctree.append(nodes.error('test', nodes.Text('test')))
+ doctree.append(nodes.hint('test', nodes.Text('test')))
+ doctree.append(nodes.important('test', nodes.Text('test')))
+ doctree.append(nodes.tip('test', nodes.Text('test')))
+
+ visitor = ConfHTMLTranslator(doctree, app.builder)
+ assert isinstance(visitor, ConfHTMLTranslator)
+ assert isinstance(visitor, HTML5Translator)
+ doctree.walkabout(visitor)
+
+ assert visitor.depart_with_node == 10
@pytest.mark.parametrize(
@@ -382,8 +414,6 @@ def test_html_style(app: SphinxTestApp) -> None:
},
)
def test_html_sidebar(app: SphinxTestApp) -> None:
- ctx: dict[str, Any] = {}
-
# default for alabaster
app.build(force_all=True)
result = (app.outdir / 'index.html').read_text(encoding='utf8')
@@ -399,12 +429,12 @@ def test_html_sidebar(app: SphinxTestApp) -> None:
assert 'This Page
' in result
assert isinstance(app.builder, StandaloneHTMLBuilder) # type-checking
- app.builder.add_sidebars('index', ctx)
- assert ctx['sidebars'] == [
+ sidebars = app.builder._get_sidebars('index')
+ assert sidebars == (
'localtoc.html',
'searchfield.html',
'sourcelink.html',
- ]
+ )
# only sourcelink.html
app.config.html_sidebars = {'**': ['sourcelink.html']}
@@ -422,8 +452,8 @@ def test_html_sidebar(app: SphinxTestApp) -> None:
assert 'This Page
' in result
assert isinstance(app.builder, StandaloneHTMLBuilder) # type-checking
- app.builder.add_sidebars('index', ctx)
- assert ctx['sidebars'] == ['sourcelink.html']
+ sidebars = app.builder._get_sidebars('index')
+ assert sidebars == ('sourcelink.html',)
# no sidebars
app.config.html_sidebars = {'**': []}
@@ -443,8 +473,8 @@ def test_html_sidebar(app: SphinxTestApp) -> None:
assert 'This Page
' not in result
assert isinstance(app.builder, StandaloneHTMLBuilder) # type-checking
- app.builder.add_sidebars('index', ctx)
- assert ctx['sidebars'] == []
+ sidebars = app.builder._get_sidebars('index')
+ assert sidebars == ()
@pytest.mark.parametrize(
diff --git a/tests/test_builders/test_build_html_5_output.py b/tests/test_builders/test_build_html_5_output.py
index db9dd8a749c..6798482f628 100644
--- a/tests/test_builders/test_build_html_5_output.py
+++ b/tests/test_builders/test_build_html_5_output.py
@@ -205,23 +205,53 @@ def checker(nodes: Iterable[Element]) -> Literal[True]:
(
'markup.html',
".//div[@class='versionadded']/p/span",
- tail_check('First paragraph of versionadded'),
+ tail_check('First paragraph of version-added'),
+ ),
+ (
+ 'markup.html',
+ ".//div[@class='versionadded']/p/span",
+ tail_check('Deprecated alias for version-added'),
),
(
'markup.html',
".//div[@class='versionchanged']/p/span",
- tail_check('First paragraph of versionchanged'),
+ tail_check('First paragraph of version-changed'),
),
(
'markup.html',
".//div[@class='versionchanged']/p",
- 'Second paragraph of versionchanged',
+ 'Second paragraph of version-changed',
+ ),
+ (
+ 'markup.html',
+ ".//div[@class='versionchanged']/p/span",
+ tail_check('Deprecated alias for version-changed'),
+ ),
+ (
+ 'markup.html',
+ ".//div[@class='deprecated']/p/span",
+ 'Deprecated since version 0.6: ',
+ ),
+ (
+ 'markup.html',
+ ".//div[@class='deprecated']/p/span",
+ tail_check('Boring stuff.'),
+ ),
+ (
+ 'markup.html',
+ ".//div[@class='deprecated']/p/span",
+ tail_check('Deprecated alias for version-deprecated'),
),
(
'markup.html',
".//div[@class='versionremoved']/p/span",
'Removed in version 0.6: ',
),
+ (
+ 'markup.html',
+ ".//div[@class='versionremoved']/p/span",
+ tail_check('Deprecated alias for version-removed'),
+ ),
# footnote reference
('markup.html', ".//a[@class='footnote-reference brackets']", r'1'),
# created by reference lookup
diff --git a/tests/test_builders/test_build_html_assets.py b/tests/test_builders/test_build_html_assets.py
index 7478f41829d..9059b423224 100644
--- a/tests/test_builders/test_build_html_assets.py
+++ b/tests/test_builders/test_build_html_assets.py
@@ -68,7 +68,7 @@ def test_html_assets(app: SphinxTestApp) -> None:
@pytest.mark.sphinx('html', testroot='html_assets')
-def test_assets_order(app, monkeypatch):
+def test_assets_order(app: SphinxTestApp, monkeypatch: pytest.MonkeyPatch) -> None:
monkeypatch.setattr(sphinx.builders.html, '_file_checksum', lambda o, f: '')
app.add_css_file('normal.css')
diff --git a/tests/test_builders/test_build_html_maths.py b/tests/test_builders/test_build_html_maths.py
index cc21142b355..8654ca99604 100644
--- a/tests/test_builders/test_build_html_maths.py
+++ b/tests/test_builders/test_build_html_maths.py
@@ -8,6 +8,9 @@
from sphinx.errors import ConfigError
if TYPE_CHECKING:
+ from collections.abc import Callable
+
+ from sphinx.testing.fixtures import _app_params
from sphinx.testing.util import SphinxTestApp
@@ -42,7 +45,9 @@ def test_html_math_renderer_is_imgmath(app: SphinxTestApp) -> None:
testroot='basic',
confoverrides={'extensions': ['sphinxcontrib.jsmath', 'sphinx.ext.imgmath']},
)
-def test_html_math_renderer_is_duplicated(make_app, app_params):
+def test_html_math_renderer_is_duplicated(
+ make_app: Callable[..., SphinxTestApp], app_params: _app_params
+) -> None:
args, kwargs = app_params
with pytest.raises(
ConfigError,
@@ -83,7 +88,9 @@ def test_html_math_renderer_is_chosen(app: SphinxTestApp) -> None:
'html_math_renderer': 'imgmath',
},
)
-def test_html_math_renderer_is_mismatched(make_app, app_params):
+def test_html_math_renderer_is_mismatched(
+ make_app: Callable[..., SphinxTestApp], app_params: _app_params
+) -> None:
args, kwargs = app_params
with pytest.raises(
ConfigError,
diff --git a/tests/test_builders/test_build_html_numfig.py b/tests/test_builders/test_build_html_numfig.py
index e338c5b92e5..144d9958d0d 100644
--- a/tests/test_builders/test_build_html_numfig.py
+++ b/tests/test_builders/test_build_html_numfig.py
@@ -11,6 +11,10 @@
from tests.test_builders.xpath_util import check_xpath
if TYPE_CHECKING:
+ from collections.abc import Callable
+ from pathlib import Path
+ from xml.etree.ElementTree import ElementTree
+
from sphinx.testing.util import SphinxTestApp
@@ -73,7 +77,14 @@ def test_numfig_disabled_warn(app: SphinxTestApp) -> None:
)
@pytest.mark.sphinx('html', testroot='numfig')
@pytest.mark.test_params(shared_result='test_build_html_numfig')
-def test_numfig_disabled(app, cached_etree_parse, fname, path, check, be_found):
+def test_numfig_disabled(
+ app: SphinxTestApp,
+ cached_etree_parse: Callable[[Path], ElementTree],
+ fname: str,
+ path: str,
+ check: str | None,
+ be_found: bool,
+) -> None:
app.build()
check_xpath(cached_etree_parse(app.outdir / fname), fname, path, check, be_found)
@@ -305,8 +316,13 @@ def test_numfig_without_numbered_toctree_warn(app: SphinxTestApp) -> None:
confoverrides={'numfig': True},
)
def test_numfig_without_numbered_toctree(
- app, cached_etree_parse, fname, path, check, be_found
-):
+ app: SphinxTestApp,
+ cached_etree_parse: Callable[[Path], ElementTree],
+ fname: str,
+ path: str,
+ check: str | None,
+ be_found: bool,
+) -> None:
# remove :numbered: option
index = (app.srcdir / 'index.rst').read_text(encoding='utf8')
index = re.sub(':numbered:.*', '', index)
@@ -538,8 +554,13 @@ def test_numfig_with_numbered_toctree_warn(app: SphinxTestApp) -> None:
)
@pytest.mark.test_params(shared_result='test_build_html_numfig_on')
def test_numfig_with_numbered_toctree(
- app, cached_etree_parse, fname, path, check, be_found
-):
+ app: SphinxTestApp,
+ cached_etree_parse: Callable[[Path], ElementTree],
+ fname: str,
+ path: str,
+ check: str | None,
+ be_found: bool,
+) -> None:
app.build()
check_xpath(cached_etree_parse(app.outdir / fname), fname, path, check, be_found)
@@ -780,7 +801,14 @@ def test_numfig_with_prefix_warn(app: SphinxTestApp) -> None:
},
)
@pytest.mark.test_params(shared_result='test_build_html_numfig_format_warn')
-def test_numfig_with_prefix(app, cached_etree_parse, fname, path, check, be_found):
+def test_numfig_with_prefix(
+ app: SphinxTestApp,
+ cached_etree_parse: Callable[[Path], ElementTree],
+ fname: str,
+ path: str,
+ check: str | None,
+ be_found: bool,
+) -> None:
app.build()
check_xpath(cached_etree_parse(app.outdir / fname), fname, path, check, be_found)
@@ -1006,8 +1034,13 @@ def test_numfig_with_secnum_depth_warn(app: SphinxTestApp) -> None:
)
@pytest.mark.test_params(shared_result='test_build_html_numfig_depth_2')
def test_numfig_with_secnum_depth(
- app, cached_etree_parse, fname, path, check, be_found
-):
+ app: SphinxTestApp,
+ cached_etree_parse: Callable[[Path], ElementTree],
+ fname: str,
+ path: str,
+ check: str | None,
+ be_found: bool,
+) -> None:
app.build()
check_xpath(cached_etree_parse(app.outdir / fname), fname, path, check, be_found)
@@ -1103,6 +1136,10 @@ def test_numfig_with_secnum_depth(
confoverrides={'numfig': True},
)
@pytest.mark.test_params(shared_result='test_build_html_numfig_on')
-def test_numfig_with_singlehtml(app, cached_etree_parse, expect):
+def test_numfig_with_singlehtml(
+ app: SphinxTestApp,
+ cached_etree_parse: Callable[[Path], ElementTree],
+ expect: tuple[str, str, bool],
+) -> None:
app.build()
check_xpath(cached_etree_parse(app.outdir / 'index.html'), 'index.html', *expect)
diff --git a/tests/test_builders/test_build_html_tocdepth.py b/tests/test_builders/test_build_html_tocdepth.py
index 003ba02e5f0..0fe83e0ff34 100644
--- a/tests/test_builders/test_build_html_tocdepth.py
+++ b/tests/test_builders/test_build_html_tocdepth.py
@@ -2,11 +2,20 @@
from __future__ import annotations
+from typing import TYPE_CHECKING
+
import pytest
from tests.test_builders.xpath_html_util import _intradocument_hyperlink_check
from tests.test_builders.xpath_util import check_xpath
+if TYPE_CHECKING:
+ from collections.abc import Callable
+ from pathlib import Path
+ from xml.etree.ElementTree import ElementTree
+
+ from sphinx.testing.util import SphinxTestApp
+
@pytest.mark.parametrize(
('fname', 'path', 'check', 'be_found'),
@@ -68,7 +77,14 @@
)
@pytest.mark.sphinx('html', testroot='tocdepth')
@pytest.mark.test_params(shared_result='test_build_html_tocdepth')
-def test_tocdepth(app, cached_etree_parse, fname, path, check, be_found):
+def test_tocdepth(
+ app: SphinxTestApp,
+ cached_etree_parse: Callable[[Path], ElementTree],
+ fname: str,
+ path: str,
+ check: str,
+ be_found: bool,
+) -> None:
app.build()
# https://github.com/sphinx-doc/sphinx/issues/1251
check_xpath(cached_etree_parse(app.outdir / fname), fname, path, check, be_found)
@@ -111,6 +127,10 @@ def test_tocdepth(app, cached_etree_parse, fname, path, check, be_found):
)
@pytest.mark.sphinx('singlehtml', testroot='tocdepth')
@pytest.mark.test_params(shared_result='test_build_html_tocdepth')
-def test_tocdepth_singlehtml(app, cached_etree_parse, expect):
+def test_tocdepth_singlehtml(
+ app: SphinxTestApp,
+ cached_etree_parse: Callable[[Path], ElementTree],
+ expect: tuple[str, str, bool],
+) -> None:
app.build()
check_xpath(cached_etree_parse(app.outdir / 'index.html'), 'index.html', *expect)
diff --git a/tests/test_builders/test_build_html_toctree.py b/tests/test_builders/test_build_html_toctree.py
index 255a2001960..ae0dc04fc08 100644
--- a/tests/test_builders/test_build_html_toctree.py
+++ b/tests/test_builders/test_build_html_toctree.py
@@ -14,6 +14,10 @@
from tests.test_builders.xpath_util import check_xpath
if TYPE_CHECKING:
+ from collections.abc import Callable, Sequence
+ from pathlib import Path
+ from xml.etree.ElementTree import Element, ElementTree
+
from sphinx.testing.util import SphinxTestApp
@@ -71,7 +75,11 @@ def test_numbered_toctree(app: SphinxTestApp) -> None:
],
)
@pytest.mark.sphinx('singlehtml', testroot='toctree')
-def test_singlehtml_hyperlinks(app, cached_etree_parse, expect):
+def test_singlehtml_hyperlinks(
+ app: SphinxTestApp,
+ cached_etree_parse: Callable[[Path], ElementTree],
+ expect: tuple[str, str | Callable[[Sequence[Element]], None]],
+) -> None:
app.build()
check_xpath(cached_etree_parse(app.outdir / 'index.html'), 'index.html', *expect)
@@ -81,7 +89,9 @@ def test_singlehtml_hyperlinks(app, cached_etree_parse, expect):
testroot='toctree-multiple-parents',
confoverrides={'html_theme': 'alabaster'},
)
-def test_toctree_multiple_parents(app, cached_etree_parse):
+def test_toctree_multiple_parents(
+ app: SphinxTestApp, cached_etree_parse: Callable[[Path], ElementTree]
+) -> None:
# The lexicographically greatest parent of the document in global toctree
# should be chosen, regardless of the order in which files are read
with patch.object(app.builder, '_read_serial') as m:
diff --git a/tests/test_builders/test_build_latex.py b/tests/test_builders/test_build_latex.py
index ea585cd6f21..7a51a82e51a 100644
--- a/tests/test_builders/test_build_latex.py
+++ b/tests/test_builders/test_build_latex.py
@@ -12,6 +12,7 @@
from subprocess import CalledProcessError
from typing import TYPE_CHECKING
+import docutils
import pygments
import pytest
@@ -54,7 +55,9 @@ def kpsetest(*filenames):
# compile latex document with app.config.latex_engine
-def compile_latex_document(app, filename='projectnamenotset.tex', docclass='manual'):
+def compile_latex_document(
+ app, filename='projectnamenotset.tex', docclass='manual', runtwice=False
+):
# now, try to run latex over it
try:
with chdir(app.outdir):
@@ -72,6 +75,17 @@ def compile_latex_document(app, filename='projectnamenotset.tex', docclass='manu
filename,
]
subprocess.run(args, capture_output=True, check=True)
+ # Run a second time (if engine is pdflatex), to have a chance to
+ # detect problems caused on second LaTeX pass (for example, this
+ # is required for the TOC in PDF to show up, for internal
+ # hyperlinks to actually work). Of course, this increases
+ # duration of test, but also its usefulness.
+ # TODO: in theory the correct way is to run Latexmk with options
+ # as configured in the Makefile and in presence of latexmkrc
+ # or latexmkjarc and also sphinx.xdy and other xindy support.
+ # And two passes are not enough except for simplest documents.
+ if runtwice:
+ subprocess.run(args, capture_output=True, check=True)
except OSError as exc: # most likely the latex executable was not found
raise pytest.skip.Exception from exc
except CalledProcessError as exc:
@@ -89,6 +103,10 @@ def compile_latex_document(app, filename='projectnamenotset.tex', docclass='manu
not kpsetest(*STYLEFILES),
reason='not running latex, the required styles do not seem to be installed',
)
+skip_if_docutils_not_at_least_at_0_22 = pytest.mark.skipif(
+ docutils.__version_info__[:2] < (0, 22),
+ reason='this test requires Docutils at least at 0.22',
+)
class RemoteImageHandler(http.server.BaseHTTPRequestHandler):
@@ -116,17 +134,17 @@ def do_GET(self):
@skip_if_requested
@skip_if_stylefiles_notfound
@pytest.mark.parametrize(
- ('engine', 'docclass', 'python_maximum_signature_line_length'),
+ ('engine', 'docclass', 'python_maximum_signature_line_length', 'runtwice'),
# Only running test with `python_maximum_signature_line_length` not None with last
# LaTeX engine to reduce testing time, as if this configuration does not fail with
# one engine, it's almost impossible it would fail with another.
[
- ('pdflatex', 'manual', None),
- ('pdflatex', 'howto', None),
- ('lualatex', 'manual', None),
- ('lualatex', 'howto', None),
- ('xelatex', 'manual', 1),
- ('xelatex', 'howto', 1),
+ ('pdflatex', 'manual', None, True),
+ ('pdflatex', 'howto', None, True),
+ ('lualatex', 'manual', None, False),
+ ('lualatex', 'howto', None, False),
+ ('xelatex', 'manual', 1, False),
+ ('xelatex', 'howto', 1, False),
],
)
@pytest.mark.sphinx(
@@ -134,7 +152,9 @@ def do_GET(self):
testroot='root',
freshenv=True,
)
-def test_build_latex_doc(app, engine, docclass, python_maximum_signature_line_length):
+def test_build_latex_doc(
+ app, engine, docclass, python_maximum_signature_line_length, runtwice
+):
app.config.python_maximum_signature_line_length = (
python_maximum_signature_line_length
)
@@ -143,7 +163,7 @@ def test_build_latex_doc(app, engine, docclass, python_maximum_signature_line_le
}
intersphinx_setup(app)
app.config.latex_engine = engine
- app.config.latex_documents = [app.config.latex_documents[0][:4] + (docclass,)]
+ app.config.latex_documents = [(*app.config.latex_documents[0][:4], docclass)]
if engine == 'xelatex':
app.config.latex_table_style = ['booktabs']
elif engine == 'lualatex':
@@ -158,7 +178,23 @@ def test_build_latex_doc(app, engine, docclass, python_maximum_signature_line_le
# file from latex_additional_files
assert (app.outdir / 'svgimg.svg').is_file()
- compile_latex_document(app, 'sphinxtests.tex', docclass)
+ compile_latex_document(app, 'sphinxtests.tex', docclass, runtwice)
+
+
+@skip_if_requested
+@skip_if_stylefiles_notfound
+@skip_if_docutils_not_at_least_at_0_22
+@pytest.mark.parametrize('engine', ['pdflatex', 'lualatex', 'xelatex'])
+@pytest.mark.sphinx(
+ 'latex',
+ testroot='latex-images-css3-lengths',
+)
+def test_build_latex_with_css3_lengths(app, engine):
+ app.config.latex_engine = engine
+ app.config.latex_documents = [(*app.config.latex_documents[0][:4], 'howto')]
+ app.builder.init()
+ app.build(force_all=True)
+ compile_latex_document(app, docclass='howto')
@pytest.mark.sphinx('latex', testroot='root')
@@ -944,13 +980,20 @@ def test_footnote(app: SphinxTestApp) -> None:
'footnote in table caption\n%\n\\end{footnotetext}\\ignorespaces %\n'
'\\begin{footnotetext}[5]\\sphinxAtStartFootnote\n'
'footnote in table header\n%\n\\end{footnotetext}\\ignorespaces '
+ '\\begin{varwidth}[t]{\\sphinxcolwidth{1}{2}}'
'\n\\sphinxAtStartPar\n'
- 'VIDIOC\\_CROPCAP\n&\n\\sphinxAtStartPar\n'
+ 'VIDIOC\\_CROPCAP\n'
+ '\\sphinxbeforeendvarwidth\n'
+ '\\end{varwidth}%\n'
) in result
assert (
+ '&\\begin{varwidth}[t]{\\sphinxcolwidth{1}{2}}\n'
+ '\\sphinxAtStartPar\n'
'Information about VIDIOC\\_CROPCAP %\n'
'\\begin{footnote}[6]\\sphinxAtStartFootnote\n'
- 'footnote in table not in header\n%\n\\end{footnote}\n\\\\\n'
+ 'footnote in table not in header\n%\n\\end{footnote}\n'
+ '\\sphinxbeforeendvarwidth\n'
+ '\\end{varwidth}%\n\\\\\n'
'\\sphinxbottomrule\n\\end{tabulary}\n'
'\\sphinxtableafterendhook\\par\n\\sphinxattableend\\end{savenotes}\n'
) in result
@@ -1008,11 +1051,15 @@ def test_reference_in_caption_and_codeblock_in_footnote(app: SphinxTestApp) -> N
'{I am in a footnote}}}}}'
) in result
assert (
- '&\n\\sphinxAtStartPar\nThis is one more footnote with some code in it %\n'
+ '&\\begin{varwidth}[t]{\\sphinxcolwidth{1}{2}}\n'
+ '\\sphinxAtStartPar\nThis is one more footnote with some code in it %\n'
'\\begin{footnote}[12]\\sphinxAtStartFootnote\n'
'Third footnote in longtable\n'
) in result
- assert '\\end{sphinxVerbatim}\n%\n\\end{footnote}.\n' in result
+ assert (
+ '\\end{sphinxVerbatim}\n%\n\\end{footnote}.\n'
+ '\\sphinxbeforeendvarwidth\n\\end{varwidth}%\n\\\\'
+ ) in result
assert '\\begin{sphinxVerbatim}[commandchars=\\\\\\{\\}]' in result
@@ -1554,7 +1601,7 @@ def test_latex_table_tabulars(app: SphinxTestApp) -> None:
result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
tables = {}
for chap in re.split(r'\\(?:section|chapter){', result)[1:]:
- sectname, content = chap.split('}', 1)
+ sectname, _, content = chap.partition('}')
content = re.sub(r'\\sphinxstepscope', '', content) # filter a separator
tables[sectname] = content.strip()
@@ -1565,60 +1612,22 @@ def get_expected(name):
.strip()
)
- # simple_table
- actual = tables['simple table']
- expected = get_expected('simple_table')
- assert actual == expected
-
- # table having :widths: option
- actual = tables['table having :widths: option']
- expected = get_expected('table_having_widths')
- assert actual == expected
-
- # table having :align: option (tabulary)
- actual = tables['table having :align: option (tabulary)']
- expected = get_expected('tabulary_having_widths')
- assert actual == expected
-
- # table having :align: option (tabular)
- actual = tables['table having :align: option (tabular)']
- expected = get_expected('tabular_having_widths')
- assert actual == expected
-
- # table with tabularcolumn
- actual = tables['table with tabularcolumn']
- expected = get_expected('tabularcolumn')
- assert actual == expected
-
- # table with cell in first column having three paragraphs
- actual = tables['table with cell in first column having three paragraphs']
- expected = get_expected('table_having_threeparagraphs_cell_in_first_col')
- assert actual == expected
-
- # table having caption
- actual = tables['table having caption']
- expected = get_expected('table_having_caption')
- assert actual == expected
-
- # table having verbatim
- actual = tables['table having verbatim']
- expected = get_expected('table_having_verbatim')
- assert actual == expected
-
- # table having problematic cell
- actual = tables['table having problematic cell']
- expected = get_expected('table_having_problematic_cell')
- assert actual == expected
-
- # table having both :widths: and problematic cell
- actual = tables['table having both :widths: and problematic cell']
- expected = get_expected('table_having_widths_and_problematic_cell')
- assert actual == expected
-
- # table having both stub columns and problematic cell
- actual = tables['table having both stub columns and problematic cell']
- expected = get_expected('table_having_stub_columns_and_problematic_cell')
- assert actual == expected
+ for sectname in (
+ 'simple table',
+ 'table having widths option',
+ 'tabulary having align option',
+ 'tabular having align option',
+ 'table with tabularcolumns',
+ 'table having three paragraphs cell in first col',
+ 'table having caption',
+ 'table having verbatim',
+ 'table having formerly problematic',
+ 'table having widths and formerly problematic',
+ 'table having stub columns and formerly problematic',
+ ):
+ actual = tables[sectname]
+ expected = get_expected(sectname.replace(' ', '_'))
+ assert actual == expected
@pytest.mark.sphinx(
@@ -1632,7 +1641,7 @@ def test_latex_table_longtable(app: SphinxTestApp) -> None:
result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
tables = {}
for chap in re.split(r'\\(?:section|chapter){', result)[1:]:
- sectname, content = chap.split('}', 1)
+ sectname, _, content = chap.partition('}')
content = re.sub(r'\\sphinxstepscope', '', content) # filter a separator
tables[sectname] = content.strip()
@@ -1643,50 +1652,20 @@ def get_expected(name):
.strip()
)
- # longtable
- actual = tables['longtable']
- expected = get_expected('longtable')
- assert actual == expected
-
- # longtable having :widths: option
- actual = tables['longtable having :widths: option']
- expected = get_expected('longtable_having_widths')
- assert actual == expected
-
- # longtable having :align: option
- actual = tables['longtable having :align: option']
- expected = get_expected('longtable_having_align')
- assert actual == expected
-
- # longtable with tabularcolumn
- actual = tables['longtable with tabularcolumn']
- expected = get_expected('longtable_with_tabularcolumn')
- assert actual == expected
-
- # longtable having caption
- actual = tables['longtable having caption']
- expected = get_expected('longtable_having_caption')
- assert actual == expected
-
- # longtable having verbatim
- actual = tables['longtable having verbatim']
- expected = get_expected('longtable_having_verbatim')
- assert actual == expected
-
- # longtable having problematic cell
- actual = tables['longtable having problematic cell']
- expected = get_expected('longtable_having_problematic_cell')
- assert actual == expected
-
- # longtable having both :widths: and problematic cell
- actual = tables['longtable having both :widths: and problematic cell']
- expected = get_expected('longtable_having_widths_and_problematic_cell')
- assert actual == expected
-
- # longtable having both stub columns and problematic cell
- actual = tables['longtable having both stub columns and problematic cell']
- expected = get_expected('longtable_having_stub_columns_and_problematic_cell')
- assert actual == expected
+ for sectname in (
+ 'longtable',
+ 'longtable having widths option',
+ 'longtable having align option',
+ 'longtable with tabularcolumns',
+ 'longtable having caption',
+ 'longtable having verbatim',
+ 'longtable having formerly problematic',
+ 'longtable having widths and formerly problematic',
+ 'longtable having stub columns and formerly problematic',
+ ):
+ actual = tables[sectname]
+ expected = get_expected(sectname.replace(' ', '_'))
+ assert actual == expected
@pytest.mark.sphinx(
@@ -1700,7 +1679,7 @@ def test_latex_table_complex_tables(app: SphinxTestApp) -> None:
result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
tables = {}
for chap in re.split(r'\\(?:section|renewcommand){', result)[1:]:
- sectname, content = chap.split('}', 1)
+ sectname, _, content = chap.partition('}')
tables[sectname] = content.strip()
def get_expected(name):
@@ -1710,22 +1689,14 @@ def get_expected(name):
.strip()
)
- # grid table
- actual = tables['grid table']
- expected = get_expected('gridtable')
- assert actual == expected
-
- # grid table with tabularcolumns
- # MEMO: filename should end with tabularcolumns but tabularcolumn has been
- # used in existing other cases
- actual = tables['grid table with tabularcolumns having no vline']
- expected = get_expected('gridtable_with_tabularcolumn')
- assert actual == expected
-
- # complex spanning cell
- actual = tables['complex spanning cell']
- expected = get_expected('complex_spanning_cell')
- assert actual == expected
+ for sectname in (
+ 'grid table',
+ 'grid table with tabularcolumns',
+ 'complex spanning cell',
+ ):
+ actual = tables[sectname]
+ expected = get_expected(sectname.replace(' ', '_'))
+ assert actual == expected
@pytest.mark.sphinx('latex', testroot='latex-table')
@@ -1948,10 +1919,16 @@ def test_latex_labels(app: SphinxTestApp) -> None:
result = (app.outdir / 'projectnamenotset.tex').read_text(encoding='utf8')
+ # ref: docutils r10151
+ if docutils.__version_info__[:2] < (0, 22):
+ figure_id, table_id = 'id1', 'id2'
+ else:
+ figure_id, table_id = 'id2', 'id3'
+
# figures
assert (
r'\caption{labeled figure}'
- r'\label{\detokenize{index:id1}}'
+ r'\label{\detokenize{index:' + figure_id + '}}'
r'\label{\detokenize{index:figure2}}'
r'\label{\detokenize{index:figure1}}'
r'\end{figure}'
@@ -1977,7 +1954,7 @@ def test_latex_labels(app: SphinxTestApp) -> None:
# tables
assert (
r'\sphinxcaption{table caption}'
- r'\label{\detokenize{index:id2}}'
+ r'\label{\detokenize{index:' + table_id + '}}'
r'\label{\detokenize{index:table2}}'
r'\label{\detokenize{index:table1}}'
) in result
@@ -2001,9 +1978,11 @@ def test_latex_labels(app: SphinxTestApp) -> None:
r'\label{\detokenize{otherdoc::doc}}'
) in result
- # Embedded standalone hyperlink reference
+ # Named hyperlink reference with embedded alias reference
# See: https://github.com/sphinx-doc/sphinx/issues/5948
assert result.count(r'\label{\detokenize{index:section1}}') == 1
+ # https://github.com/sphinx-doc/sphinx/issues/13609
+ assert r'\phantomsection\label{\detokenize{index:id' not in result
@pytest.mark.sphinx('latex', testroot='latex-figure-in-admonition')
diff --git a/tests/test_builders/test_build_linkcheck.py b/tests/test_builders/test_build_linkcheck.py
index 32b7ae79ff7..a09a4a42216 100644
--- a/tests/test_builders/test_build_linkcheck.py
+++ b/tests/test_builders/test_build_linkcheck.py
@@ -10,6 +10,7 @@
import wsgiref.handlers
from base64 import b64encode
from http.server import BaseHTTPRequestHandler
+from io import StringIO
from queue import Queue
from typing import TYPE_CHECKING
from unittest import mock
@@ -27,6 +28,7 @@
RateLimit,
compile_linkcheck_allowed_redirects,
)
+from sphinx.errors import ConfigError
from sphinx.testing.util import SphinxTestApp
from sphinx.util import requests
from sphinx.util._pathlib import _StrPath
@@ -37,6 +39,7 @@
if TYPE_CHECKING:
from collections.abc import Callable, Iterable
+ from pathlib import Path
from typing import Any
from urllib3 import HTTPConnectionPool
@@ -677,7 +680,7 @@ def check_headers(self):
assert content['status'] == 'working'
-def make_redirect_handler(*, support_head: bool) -> type[BaseHTTPRequestHandler]:
+def make_redirect_handler(*, support_head: bool = True) -> type[BaseHTTPRequestHandler]:
class RedirectOnceHandler(BaseHTTPRequestHandler):
protocol_version = 'HTTP/1.1'
@@ -712,8 +715,9 @@ def log_date_time_string(self):
)
def test_follows_redirects_on_HEAD(app, capsys):
with serve_application(app, make_redirect_handler(support_head=True)) as address:
+ compile_linkcheck_allowed_redirects(app, app.config)
app.build()
- stdout, stderr = capsys.readouterr()
+ _stdout, stderr = capsys.readouterr()
content = (app.outdir / 'output.txt').read_text(encoding='utf8')
assert content == (
'index.rst:1: [redirected with Found] '
@@ -725,6 +729,9 @@ def test_follows_redirects_on_HEAD(app, capsys):
127.0.0.1 - - [] "HEAD /?redirected=1 HTTP/1.1" 204 -
""",
)
+ assert (
+ f'redirect http://{address}/ - with Found to http://{address}/?redirected=1\n'
+ ) in strip_escape_sequences(app.status.getvalue())
assert app.warning.getvalue() == ''
@@ -735,8 +742,9 @@ def test_follows_redirects_on_HEAD(app, capsys):
)
def test_follows_redirects_on_GET(app, capsys):
with serve_application(app, make_redirect_handler(support_head=False)) as address:
+ compile_linkcheck_allowed_redirects(app, app.config)
app.build()
- stdout, stderr = capsys.readouterr()
+ _stdout, stderr = capsys.readouterr()
content = (app.outdir / 'output.txt').read_text(encoding='utf8')
assert content == (
'index.rst:1: [redirected with Found] '
@@ -749,9 +757,65 @@ def test_follows_redirects_on_GET(app, capsys):
127.0.0.1 - - [] "GET /?redirected=1 HTTP/1.1" 204 -
""",
)
+ assert (
+ f'redirect http://{address}/ - with Found to http://{address}/?redirected=1\n'
+ ) in strip_escape_sequences(app.status.getvalue())
assert app.warning.getvalue() == ''
+@pytest.mark.sphinx(
+ 'linkcheck',
+ testroot='linkcheck-localserver',
+ freshenv=True,
+ confoverrides={'linkcheck_allowed_redirects': {}}, # warn about any redirects
+)
+def test_warns_disallowed_redirects(app, capsys):
+ with serve_application(app, make_redirect_handler()) as address:
+ compile_linkcheck_allowed_redirects(app, app.config)
+ app.build()
+ _stdout, stderr = capsys.readouterr()
+ content = (app.outdir / 'output.txt').read_text(encoding='utf8')
+ assert content == (
+ 'index.rst:1: [redirected with Found] '
+ f'http://{address}/ to http://{address}/?redirected=1\n'
+ )
+ assert stderr == textwrap.dedent(
+ """\
+ 127.0.0.1 - - [] "HEAD / HTTP/1.1" 302 -
+ 127.0.0.1 - - [] "HEAD /?redirected=1 HTTP/1.1" 204 -
+ """,
+ )
+ assert len(app.warning.getvalue().splitlines()) == 1
+
+
+def test_linkcheck_allowed_redirects_config(
+ make_app: Callable[..., SphinxTestApp], tmp_path: Path
+) -> None:
+ tmp_path.joinpath('conf.py').touch()
+ tmp_path.joinpath('index.rst').touch()
+
+ # ``linkcheck_allowed_redirects = None`` is rejected
+ warning_stream = StringIO()
+ with pytest.raises(ConfigError):
+ make_app(
+ 'linkcheck',
+ srcdir=tmp_path,
+ confoverrides={'linkcheck_allowed_redirects': None},
+ warning=warning_stream,
+ )
+ assert strip_escape_sequences(warning_stream.getvalue()).splitlines() == [
+ "WARNING: The config value `linkcheck_allowed_redirects' has type `NoneType'; expected `dict'."
+ ]
+
+ # ``linkcheck_allowed_redirects = {}`` is permitted
+ app = make_app(
+ 'linkcheck',
+ srcdir=tmp_path,
+ confoverrides={'linkcheck_allowed_redirects': {}},
+ )
+ assert strip_escape_sequences(app.warning.getvalue()) == ''
+
+
@pytest.mark.sphinx('linkcheck', testroot='linkcheck-localserver-warn-redirects')
def test_linkcheck_allowed_redirects(app: SphinxTestApp) -> None:
with serve_application(app, make_redirect_handler(support_head=False)) as address:
@@ -1096,6 +1160,12 @@ def test_too_many_requests_retry_after_HTTP_date(tz, app, monkeypatch, capsys):
) as address:
app.build()
+ # Undo side-effects: the monkeypatch context manager clears the TZ environment
+ # variable, but we also need to reset Python's internal notion of the current
+ # timezone.
+ if sys.platform != 'win32':
+ time.tzset()
+
content = (app.outdir / 'output.json').read_text(encoding='utf8')
assert json.loads(content) == {
'filename': 'index.rst',
diff --git a/tests/test_builders/test_build_warnings.py b/tests/test_builders/test_build_warnings.py
index 65e359ad666..adf5647dbd4 100644
--- a/tests/test_builders/test_build_warnings.py
+++ b/tests/test_builders/test_build_warnings.py
@@ -12,6 +12,9 @@
from sphinx.errors import SphinxError
if TYPE_CHECKING:
+ from collections.abc import Callable
+ from pathlib import Path
+
from sphinx.testing.util import SphinxTestApp
ENV_WARNINGS = """\
@@ -23,7 +26,7 @@
{root}/index.rst:\\d+: WARNING: image file not readable: foo.png \\[image.not_readable\\]
{root}/index.rst:\\d+: WARNING: download file not readable: {root}/nonexisting.png \\[download.not_readable\\]
{root}/undecodable.rst:\\d+: WARNING: undecodable source characters, replacing \
-with "\\?": b?'here: >>>(\\\\|/)xbb<<<((\\\\|/)r)?'
+with '\\?': 'here: >>>(\\\\|/)xbb<<<'\\. This will become an error in Sphinx 9\\.0\\.
"""
HTML_WARNINGS = (
@@ -117,7 +120,9 @@ def test_texinfo_warnings(app: SphinxTestApp) -> None:
_check_warnings(warnings_exp, app.warning.getvalue())
-def test_uncacheable_config_warning(make_app, tmp_path):
+def test_uncacheable_config_warning(
+ make_app: Callable[..., SphinxTestApp], tmp_path: Path
+) -> None:
"""Test that an unpickleable config value raises a warning."""
tmp_path.joinpath('conf.py').write_text(
"""\
diff --git a/tests/test_command_line.py b/tests/test_command_line.py
index 3f35a495fcc..11e3d6a7341 100644
--- a/tests/test_command_line.py
+++ b/tests/test_command_line.py
@@ -6,6 +6,7 @@
import pytest
+from sphinx._cli.util.errors import strip_escape_sequences
from sphinx.cmd import make_mode
from sphinx.cmd.build import get_parser
from sphinx.cmd.make_mode import run_make_mode
@@ -150,7 +151,7 @@ def test_build_main_parse_arguments_pos_intermixed(
if broken_argparse:
with pytest.raises(SystemExit):
parse_arguments(args)
- stderr = capsys.readouterr().err.splitlines()
+ stderr = strip_escape_sequences(capsys.readouterr().err).splitlines()
assert stderr[-1].endswith('error: unrecognized arguments: filename1 filename2')
else:
assert parse_arguments(args) == EXPECTED_BUILD_MAIN
@@ -178,7 +179,7 @@ def test_make_mode_parse_arguments_pos_last(
]
with pytest.raises(SystemExit):
run_make_mode(args)
- stderr = capsys.readouterr().err.splitlines()
+ stderr = strip_escape_sequences(capsys.readouterr().err).splitlines()
assert stderr[-1].endswith('error: argument --builder/-b: expected one argument')
@@ -195,7 +196,7 @@ def test_make_mode_parse_arguments_pos_middle(
]
with pytest.raises(SystemExit):
run_make_mode(args)
- stderr = capsys.readouterr().err.splitlines()
+ stderr = strip_escape_sequences(capsys.readouterr().err).splitlines()
assert stderr[-1].endswith('error: argument --builder/-b: expected one argument')
@@ -232,5 +233,5 @@ def test_make_mode_parse_arguments_pos_intermixed(
]
with pytest.raises(SystemExit):
run_make_mode(args)
- stderr = capsys.readouterr().err.splitlines()
+ stderr = strip_escape_sequences(capsys.readouterr().err).splitlines()
assert stderr[-1].endswith('error: argument --builder/-b: expected one argument')
diff --git a/tests/test_config/test_config.py b/tests/test_config/test_config.py
index d297af6f2ee..3a7e58c146c 100644
--- a/tests/test_config/test_config.py
+++ b/tests/test_config/test_config.py
@@ -19,13 +19,14 @@
)
from sphinx.deprecation import RemovedInSphinx90Warning
from sphinx.errors import ConfigError, ExtensionError, VersionRequirementError
+from sphinx.testing.util import SphinxTestApp
+from sphinx.util.tags import Tags
if TYPE_CHECKING:
from collections.abc import Iterable
+ from pathlib import Path
from typing import TypeAlias
- from sphinx.testing.util import SphinxTestApp
-
CircularList: TypeAlias = list[int | 'CircularList']
CircularDict: TypeAlias = dict[str, int | 'CircularDict']
@@ -68,7 +69,7 @@ def test_config_opt_deprecated(recwarn):
opt = _Opt('default', '', ())
with pytest.warns(RemovedInSphinx90Warning):
- default, rebuild, valid_types = opt
+ _default, _rebuild, _valid_types = opt
with pytest.warns(RemovedInSphinx90Warning):
_ = opt[0]
@@ -139,11 +140,11 @@ def test_core_config(app: SphinxTestApp) -> None:
def test_config_not_found(tmp_path):
with pytest.raises(ConfigError):
- Config.read(tmp_path)
+ Config.read(tmp_path, overrides={}, tags=Tags())
@pytest.mark.parametrize('protocol', list(range(pickle.HIGHEST_PROTOCOL)))
-def test_config_pickle_protocol(tmp_path, protocol: int):
+def test_config_pickle_protocol(protocol: int) -> None:
config = Config()
pickled_config = pickle.loads(pickle.dumps(config, protocol))
@@ -394,12 +395,12 @@ def test_errors_warnings(logger, tmp_path):
# test the error for syntax errors in the config file
(tmp_path / 'conf.py').write_text('project = \n', encoding='ascii')
with pytest.raises(ConfigError) as excinfo:
- Config.read(tmp_path, {}, None)
+ Config.read(tmp_path, overrides={}, tags=Tags())
assert 'conf.py' in str(excinfo.value)
# test the automatic conversion of 2.x only code in configs
(tmp_path / 'conf.py').write_text('project = u"Jägermeister"\n', encoding='utf8')
- cfg = Config.read(tmp_path, {}, None)
+ cfg = Config.read(tmp_path, overrides={}, tags=Tags())
assert cfg.project == 'Jägermeister'
assert logger.called is False
@@ -440,7 +441,7 @@ def test_config_eol(logger, tmp_path):
configfile = tmp_path / 'conf.py'
for eol in (b'\n', b'\r\n'):
configfile.write_bytes(b'project = "spam"' + eol)
- cfg = Config.read(tmp_path, {}, None)
+ cfg = Config.read(tmp_path, overrides={}, tags=Tags())
assert cfg.project == 'spam'
assert logger.called is False
@@ -678,7 +679,7 @@ def test_conf_py_language_none(tmp_path):
(tmp_path / 'conf.py').write_text('language = None', encoding='utf-8')
# When we load conf.py into a Config object
- cfg = Config.read(tmp_path, {}, None)
+ cfg = Config.read(tmp_path, overrides={}, tags=Tags())
# Then the language is coerced to English
assert cfg.language == 'en'
@@ -691,7 +692,7 @@ def test_conf_py_language_none_warning(logger, tmp_path):
(tmp_path / 'conf.py').write_text('language = None', encoding='utf-8')
# When we load conf.py into a Config object
- Config.read(tmp_path, {}, None)
+ Config.read(tmp_path, overrides={}, tags=Tags())
# Then a warning is raised
assert logger.warning.called
@@ -708,7 +709,7 @@ def test_conf_py_no_language(tmp_path):
(tmp_path / 'conf.py').touch()
# When we load conf.py into a Config object
- cfg = Config.read(tmp_path, {}, None)
+ cfg = Config.read(tmp_path, overrides={}, tags=Tags())
# Then the language is coerced to English
assert cfg.language == 'en'
@@ -720,7 +721,7 @@ def test_conf_py_nitpick_ignore_list(tmp_path):
(tmp_path / 'conf.py').touch()
# When we load conf.py into a Config object
- cfg = Config.read(tmp_path, {}, None)
+ cfg = Config.read(tmp_path, overrides={}, tags=Tags())
# Then the default nitpick_ignore[_regex] is an empty list
assert cfg.nitpick_ignore == []
@@ -810,3 +811,14 @@ def test_root_doc_and_master_doc_are_synchronized() -> None:
c.root_doc = '1234'
assert c.master_doc == '1234'
assert c.root_doc == c.master_doc
+
+
+def test_source_encoding_deprecation(tmp_path: Path) -> None:
+ (tmp_path / 'conf.py').touch()
+ app = SphinxTestApp(
+ buildername='dummy',
+ srcdir=tmp_path,
+ confoverrides={'source_encoding': 'latin-1'},
+ )
+ expected = 'Support for source encodings other than UTF-8 is deprecated and will be removed'
+ assert expected in app.warning.getvalue()
diff --git a/tests/test_directives/test_directive_code.py b/tests/test_directives/test_directive_code.py
index 625d15c1f31..525071f9e45 100644
--- a/tests/test_directives/test_directive_code.py
+++ b/tests/test_directives/test_directive_code.py
@@ -20,17 +20,17 @@
@pytest.fixture(scope='module')
-def testroot(rootdir):
+def testroot(rootdir: Path) -> Path:
testroot_path = rootdir / 'test-directive-code'
return testroot_path
@pytest.fixture(scope='module')
-def literal_inc_path(testroot):
+def literal_inc_path(testroot: Path) -> Path:
return testroot / 'literal.inc'
-def test_LiteralIncludeReader(literal_inc_path):
+def test_LiteralIncludeReader(literal_inc_path: Path) -> None:
options = {'lineno-match': True}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
content, lines = reader.read()
@@ -39,7 +39,7 @@ def test_LiteralIncludeReader(literal_inc_path):
assert reader.lineno_start == 1
-def test_LiteralIncludeReader_lineno_start(literal_inc_path):
+def test_LiteralIncludeReader_lineno_start(literal_inc_path: Path) -> None:
options = {'lineno-start': 4}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
content, lines = reader.read()
@@ -48,40 +48,40 @@ def test_LiteralIncludeReader_lineno_start(literal_inc_path):
assert reader.lineno_start == 4
-def test_LiteralIncludeReader_pyobject1(literal_inc_path):
+def test_LiteralIncludeReader_pyobject1(literal_inc_path: Path) -> None:
options = {'lineno-match': True, 'pyobject': 'Foo'}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == 'class Foo:\n pass\n'
assert reader.lineno_start == 5
-def test_LiteralIncludeReader_pyobject2(literal_inc_path):
+def test_LiteralIncludeReader_pyobject2(literal_inc_path: Path) -> None:
options = {'pyobject': 'Bar'}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == 'class Bar:\n def baz():\n pass\n'
assert reader.lineno_start == 1 # no lineno-match
-def test_LiteralIncludeReader_pyobject3(literal_inc_path):
+def test_LiteralIncludeReader_pyobject3(literal_inc_path: Path) -> None:
options = {'pyobject': 'Bar.baz'}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == ' def baz():\n pass\n'
-def test_LiteralIncludeReader_pyobject_and_lines(literal_inc_path):
+def test_LiteralIncludeReader_pyobject_and_lines(literal_inc_path: Path) -> None:
options = {'pyobject': 'Bar', 'lines': '2-'}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == ' def baz():\n pass\n'
-def test_LiteralIncludeReader_lines1(literal_inc_path):
+def test_LiteralIncludeReader_lines1(literal_inc_path: Path) -> None:
options = {'lines': '1-3'}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == (
'# Literally included file using Python highlighting\n'
'\n'
@@ -89,10 +89,10 @@ def test_LiteralIncludeReader_lines1(literal_inc_path):
)
-def test_LiteralIncludeReader_lines2(literal_inc_path):
+def test_LiteralIncludeReader_lines2(literal_inc_path: Path) -> None:
options = {'lines': '1,3,5'}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == (
'# Literally included file using Python highlighting\n'
'foo = "Including Unicode characters: üöä"\n'
@@ -100,16 +100,18 @@ def test_LiteralIncludeReader_lines2(literal_inc_path):
)
-def test_LiteralIncludeReader_lines_and_lineno_match1(literal_inc_path):
+def test_LiteralIncludeReader_lines_and_lineno_match1(literal_inc_path: Path) -> None:
options = {'lines': '3-5', 'lineno-match': True}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == 'foo = "Including Unicode characters: üöä"\n\nclass Foo:\n'
assert reader.lineno_start == 3
@pytest.mark.sphinx('html', testroot='root') # init locale for errors
-def test_LiteralIncludeReader_lines_and_lineno_match2(literal_inc_path, app):
+def test_LiteralIncludeReader_lines_and_lineno_match2(
+ literal_inc_path: Path, app: SphinxTestApp
+) -> None:
options = {'lines': '0,3,5', 'lineno-match': True}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
with pytest.raises(
@@ -120,7 +122,9 @@ def test_LiteralIncludeReader_lines_and_lineno_match2(literal_inc_path, app):
@pytest.mark.sphinx('html', testroot='root') # init locale for errors
-def test_LiteralIncludeReader_lines_and_lineno_match3(literal_inc_path, app):
+def test_LiteralIncludeReader_lines_and_lineno_match3(
+ literal_inc_path: Path, app: SphinxTestApp
+) -> None:
options = {'lines': '100-', 'lineno-match': True}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
with pytest.raises(
@@ -130,23 +134,23 @@ def test_LiteralIncludeReader_lines_and_lineno_match3(literal_inc_path, app):
reader.read()
-def test_LiteralIncludeReader_start_at(literal_inc_path):
+def test_LiteralIncludeReader_start_at(literal_inc_path: Path) -> None:
options = {'lineno-match': True, 'start-at': 'Foo', 'end-at': 'Bar'}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == 'class Foo:\n pass\n\nclass Bar:\n'
assert reader.lineno_start == 5
-def test_LiteralIncludeReader_start_after(literal_inc_path):
+def test_LiteralIncludeReader_start_after(literal_inc_path: Path) -> None:
options = {'lineno-match': True, 'start-after': 'Foo', 'end-before': 'Bar'}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == ' pass\n\n'
assert reader.lineno_start == 6
-def test_LiteralIncludeReader_start_after_and_lines(literal_inc_path):
+def test_LiteralIncludeReader_start_after_and_lines(literal_inc_path: Path) -> None:
options = {
'lineno-match': True,
'lines': '6-',
@@ -154,20 +158,20 @@ def test_LiteralIncludeReader_start_after_and_lines(literal_inc_path):
'end-before': 'comment',
}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == '\nclass Bar:\n def baz():\n pass\n\n'
assert reader.lineno_start == 7
-def test_LiteralIncludeReader_start_at_and_lines(literal_inc_path):
+def test_LiteralIncludeReader_start_at_and_lines(literal_inc_path: Path) -> None:
options = {'lines': '2, 3, 5', 'start-at': 'foo', 'end-before': '#'}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == '\nclass Foo:\n\n'
assert reader.lineno_start == 1
-def test_LiteralIncludeReader_missing_start_and_end(literal_inc_path):
+def test_LiteralIncludeReader_missing_start_and_end(literal_inc_path: Path) -> None:
options = {'start-at': 'NOTHING'}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
with pytest.raises(ValueError, match='start-at pattern not found: NOTHING'):
@@ -189,49 +193,51 @@ def test_LiteralIncludeReader_missing_start_and_end(literal_inc_path):
reader.read()
-def test_LiteralIncludeReader_end_before(literal_inc_path):
+def test_LiteralIncludeReader_end_before(literal_inc_path: Path) -> None:
options = {'end-before': 'nclud'} # *nclud* matches first and third lines.
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == '# Literally included file using Python highlighting\n\n'
-def test_LiteralIncludeReader_prepend(literal_inc_path):
+def test_LiteralIncludeReader_prepend(literal_inc_path: Path) -> None:
options = {'lines': '1', 'prepend': 'Hello', 'append': 'Sphinx'}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == (
'Hello\n# Literally included file using Python highlighting\nSphinx\n'
)
-def test_LiteralIncludeReader_dedent(literal_inc_path):
+def test_LiteralIncludeReader_dedent(literal_inc_path: Path) -> None:
# dedent: 2
options = {'lines': '9-11', 'dedent': 2}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == ' def baz():\n pass\n\n'
# dedent: 4
options = {'lines': '9-11', 'dedent': 4}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == 'def baz():\n pass\n\n'
# dedent: 6
options = {'lines': '9-11', 'dedent': 6}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == 'f baz():\n pass\n\n'
# dedent: None
options = {'lines': '9-11', 'dedent': None}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == 'def baz():\n pass\n\n'
-def test_LiteralIncludeReader_dedent_and_append_and_prepend(literal_inc_path):
+def test_LiteralIncludeReader_dedent_and_append_and_prepend(
+ literal_inc_path: Path,
+) -> None:
# dedent: 2
options = {
'lines': '9-11',
@@ -240,7 +246,7 @@ def test_LiteralIncludeReader_dedent_and_append_and_prepend(literal_inc_path):
'append': '# comment',
}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == 'class Foo:\n def baz():\n pass\n\n# comment\n'
@@ -248,20 +254,20 @@ def test_LiteralIncludeReader_tabwidth(testroot):
# tab-width: 4
options = {'tab-width': 4, 'pyobject': 'Qux'}
reader = LiteralIncludeReader(testroot / 'target.py', options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == 'class Qux:\n def quux(self):\n pass\n'
# tab-width: 8
options = {'tab-width': 8, 'pyobject': 'Qux'}
reader = LiteralIncludeReader(testroot / 'target.py', options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == 'class Qux:\n def quux(self):\n pass\n'
def test_LiteralIncludeReader_tabwidth_dedent(testroot):
options = {'tab-width': 4, 'dedent': 4, 'pyobject': 'Qux.quux'}
reader = LiteralIncludeReader(testroot / 'target.py', options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == 'def quux(self):\n pass\n'
@@ -269,7 +275,7 @@ def test_LiteralIncludeReader_diff(testroot, literal_inc_path):
literal_diff_path = testroot / 'literal-diff.inc'
options = {'diff': literal_diff_path}
reader = LiteralIncludeReader(literal_inc_path, options, DUMMY_CONFIG)
- content, lines = reader.read()
+ content, _lines = reader.read()
assert content == (
f'--- {literal_diff_path}\n'
f'+++ {literal_inc_path}\n'
diff --git a/tests/test_directives/test_directive_object_description.py b/tests/test_directives/test_directive_object_description.py
index 210b9aac381..4cbe3e26697 100644
--- a/tests/test_directives/test_directive_object_description.py
+++ b/tests/test_directives/test_directive_object_description.py
@@ -9,9 +9,8 @@
from docutils import nodes
from sphinx import addnodes
-from sphinx.io import create_publisher
from sphinx.testing import restructuredtext
-from sphinx.util.docutils import sphinx_domains
+from sphinx.util.docutils import _parse_str_to_doctree
if TYPE_CHECKING:
from sphinx.application import Sphinx
@@ -22,12 +21,22 @@
def _doctree_for_test(
app: Sphinx, env: BuildEnvironment, docname: str
) -> nodes.document:
+ config = app.config
+ registry = app.registry
+
+ filename = env.doc2path(docname)
+ content = filename.read_text(encoding='utf-8')
+
env.prepare_settings(docname)
- publisher = create_publisher(app, 'restructuredtext')
- with sphinx_domains(env):
- publisher.set_source(source_path=str(env.doc2path(docname)))
- publisher.publish()
- return publisher.document
+ parser = registry.create_source_parser('restructuredtext', config=config, env=env)
+ return _parse_str_to_doctree(
+ content,
+ filename=filename,
+ default_settings={'env': env},
+ env=env,
+ parser=parser,
+ transforms=registry.get_transforms(),
+ )
@pytest.mark.sphinx('text', testroot='object-description-sections')
diff --git a/tests/test_directives/test_directive_only.py b/tests/test_directives/test_directive_only.py
index 9e62f4cb3eb..dbabffaa5d5 100644
--- a/tests/test_directives/test_directive_only.py
+++ b/tests/test_directives/test_directive_only.py
@@ -9,44 +9,53 @@
from docutils import nodes
if TYPE_CHECKING:
+ from typing import Any
+
from sphinx.testing.util import SphinxTestApp
@pytest.mark.sphinx('text', testroot='directive-only')
def test_sectioning(app: SphinxTestApp) -> None:
- def getsects(section):
- if not isinstance(section, nodes.section):
- return [getsects(n) for n in section.children]
- title = section.next_node(nodes.title).astext().strip()
- subsects = []
- children = section.children[:]
- while children:
- node = children.pop(0)
- if isinstance(node, nodes.section):
- subsects.append(node)
- continue
- children = list(node.children) + children
- return [title, [getsects(subsect) for subsect in subsects]]
-
- def testsects(prefix, sects, indent=0):
- title = sects[0]
- parent_num = title.split()[0]
- assert prefix == parent_num, f'Section out of place: {title!r}'
- for i, subsect in enumerate(sects[1]):
- num = subsect[0].split()[0]
- assert re.match('[0-9]+[.0-9]*[.]', num), (
- f'Unnumbered section: {subsect[0]!r}'
- )
- testsects(prefix + str(i + 1) + '.', subsect, indent + 4)
-
app.build(filenames=[app.srcdir / 'only.rst'])
doctree = app.env.get_doctree('only')
app.env.apply_post_transforms(doctree, 'only')
- parts = [getsects(n) for n in doctree.children if isinstance(n, nodes.section)]
- for i, s in enumerate(parts):
- testsects(str(i + 1) + '.', s, 4)
- actual_headings = '\n'.join(p[0] for p in parts)
+ parts = [_get_sections(n) for n in doctree.children if isinstance(n, nodes.section)]
+ for i, section in enumerate(parts):
+ _test_sections(f'{i + 1}.', section, 4)
+ actual_headings = '\n'.join(p[0] for p in parts) # type: ignore[misc]
assert len(parts) == 4, (
f'Expected 4 document level headings, got:\n{actual_headings}'
)
+
+
+def _get_sections(section: nodes.Node) -> list[str | list[Any]]:
+ if not isinstance(section, nodes.section):
+ return list(map(_get_sections, section.children))
+ title = section.next_node(nodes.title).astext().strip()
+ subsections = []
+ children = section.children.copy()
+ while children:
+ node = children.pop(0)
+ if isinstance(node, nodes.section):
+ subsections.append(node)
+ continue
+ children = list(node.children) + children
+ return [title, list(map(_get_sections, subsections))]
+
+
+def _test_sections(
+ prefix: str, sections: list[str | list[Any]], indent: int = 0
+) -> None:
+ title = sections[0]
+ assert isinstance(title, str)
+ parent_num = title.partition(' ')[0]
+ assert prefix == parent_num, f'Section out of place: {title!r}'
+ for i, subsection in enumerate(sections[1]):
+ subsection_title = subsection[0]
+ assert isinstance(subsection_title, str)
+ num = subsection_title.partition(' ')[0]
+ assert re.match('[0-9]+[.0-9]*[.]', num), (
+ f'Unnumbered section: {subsection[0]!r}'
+ )
+ _test_sections(f'{prefix}{i + 1}.', subsection, indent + 4)
diff --git a/tests/test_domains/test_domain_c.py b/tests/test_domains/test_domain_c.py
index 23ee25ffa83..d83693c09dd 100644
--- a/tests/test_domains/test_domain_c.py
+++ b/tests/test_domains/test_domain_c.py
@@ -700,14 +700,14 @@ def test_extra_keywords() -> None:
# raise DefinitionError
-def split_warnings(warning: StringIO):
+def split_warnings(warning: StringIO) -> list[str]:
ws = warning.getvalue().split('\n')
assert len(ws) >= 1
assert ws[-1] == ''
return ws[:-1]
-def filter_warnings(warning: StringIO, file):
+def filter_warnings(warning: StringIO, file: str) -> list[str]:
lines = split_warnings(warning)
res = [
l
diff --git a/tests/test_domains/test_domain_cpp.py b/tests/test_domains/test_domain_cpp.py
index 88505a4aa91..2e88625a9fe 100644
--- a/tests/test_domains/test_domain_cpp.py
+++ b/tests/test_domains/test_domain_cpp.py
@@ -35,6 +35,8 @@
if TYPE_CHECKING:
from io import StringIO
+ from sphinx.domains.cpp._ast import ASTTemplateParamType
+
def parse(name, string):
class Config:
@@ -1515,8 +1517,8 @@ def test_domain_cpp_ast_xref_parsing() -> None:
('template class...', True),
],
)
-def test_domain_cpp_template_parameters_is_pack(param: str, is_pack: bool):
- def parse_template_parameter(param: str):
+def test_domain_cpp_template_parameters_is_pack(param: str, is_pack: bool) -> None:
+ def parse_template_parameter(param: str) -> ASTTemplateParamType:
ast = parse('type', 'template<' + param + '> X')
return ast.templatePrefix.templates[0].params[0]
@@ -1531,7 +1533,7 @@ def parse_template_parameter(param: str):
# raise DefinitionError
-def filter_warnings(warning: StringIO, file):
+def filter_warnings(warning: StringIO, file: str) -> list[str]:
lines = warning.getvalue().split('\n')
res = [
l
diff --git a/tests/test_domains/test_domain_py.py b/tests/test_domains/test_domain_py.py
index 151fb4494f7..20192b638b8 100644
--- a/tests/test_domains/test_domain_py.py
+++ b/tests/test_domains/test_domain_py.py
@@ -38,20 +38,26 @@
from sphinx.testing.util import assert_node
from sphinx.writers.text import STDINDENT
+TYPE_CHECKING = False
+if TYPE_CHECKING:
+ from sphinx.application import Sphinx
+ from sphinx.environment import BuildEnvironment
+ from sphinx.testing.util import SphinxTestApp
-def parse(sig):
+
+def parse(sig: str, *, env: BuildEnvironment) -> str:
m = py_sig_re.match(sig)
if m is None:
raise ValueError
- name_prefix, tp_list, name, arglist, retann = m.groups()
+ _name_prefix, _tp_list, _name, arglist, _retann = m.groups()
signode = addnodes.desc_signature(sig, '')
- _pseudo_parse_arglist(signode, arglist)
+ _pseudo_parse_arglist(signode, arglist, env=env)
return signode.astext()
-def test_function_signatures() -> None:
- rv = parse("compile(source : string, filename, symbol='file')")
- assert rv == "(source : string, filename, symbol='file')"
+def test_function_signatures(app: Sphinx) -> None:
+ rv = parse("compile(source : string, filename, symbol='file')", env=app.env)
+ assert rv == "(source: string, filename, symbol='file')"
for params, expect in [
('(a=1)', '(a=1)'),
@@ -60,9 +66,9 @@ def test_function_signatures() -> None:
('(a=1[, b=None])', '(a=1, [b=None])'),
('(a=[], [b=None])', '(a=[], [b=None])'),
('(a=[][, b=None])', '(a=[], [b=None])'),
- ('(a: Foo[Bar]=[][, b=None])', '(a: Foo[Bar]=[], [b=None])'),
+ ('(a: Foo[Bar]=[][, b=None])', '(a: Foo[Bar] = [], [b=None])'),
]:
- rv = parse(f'func{params}')
+ rv = parse(f'func{params}', env=app.env)
assert rv == expect
# Note: 'def f[Foo[Bar]]()' is not valid Python but people might write
@@ -70,7 +76,7 @@ def test_function_signatures() -> None:
# variable.
for tparams in ['', '[Foo]', '[Foo[Bar]]']:
for retann in ['', '-> Foo', '-> Foo[Bar]', '-> anything else']:
- rv = parse(f'func{tparams}{params} {retann}'.rstrip())
+ rv = parse(f'func{tparams}{params} {retann}'.rstrip(), env=app.env)
assert rv == expect
@@ -508,6 +514,28 @@ def test_parse_annotation(app):
),
)
+ doctree = _parse_annotation('*tuple[str, int]', app.env)
+ assert_node(
+ doctree,
+ (
+ [desc_sig_operator, '*'],
+ [pending_xref, 'tuple'],
+ [desc_sig_punctuation, '['],
+ [pending_xref, 'str'],
+ [desc_sig_punctuation, ','],
+ desc_sig_space,
+ [pending_xref, 'int'],
+ [desc_sig_punctuation, ']'],
+ ),
+ )
+ assert_node(
+ doctree[1],
+ pending_xref,
+ refdomain='py',
+ reftype='class',
+ reftarget='tuple',
+ )
+
@pytest.mark.sphinx('html', testroot='_blank')
def test_parse_annotation_suppress(app):
@@ -1764,3 +1792,105 @@ def test_pep_695_and_pep_696_whitespaces_in_default(app, tp_list, tptext):
text = f'.. py:function:: f{tp_list}() -> Annotated[T, Qux[int]()]'
doctree = restructuredtext.parse(app, text)
assert doctree.astext() == f'\n\nf{tptext}() -> Annotated[T, Qux[int]()]\n\n'
+
+
+def test_deco_role(app):
+ text = """\
+.. py:decorator:: foo.bar
+ :no-contents-entry:
+ :no-index-entry:
+ :no-typesetting:
+"""
+
+ doctree = restructuredtext.parse(app, text + '\n:py:deco:`foo.bar`')
+ assert doctree.astext() == '\n\n\n\n@foo.bar'
+
+ doctree = restructuredtext.parse(app, text + '\n:py:deco:`~foo.bar`')
+ assert doctree.astext() == '\n\n\n\n@bar'
+
+
+def test_pytype_canonical(app):
+ text = """\
+.. py:type:: A
+ :canonical: int
+
+.. py:type:: B
+ :canonical: int
+ """
+
+ doctree = restructuredtext.parse(app, text)
+ assert not app.warning.getvalue()
+
+
+@pytest.mark.sphinx('html', testroot='domain-py-xref-type-alias')
+def test_type_alias_xref_resolution(app: SphinxTestApp) -> None:
+ """Test that type aliases in function signatures can be cross-referenced.
+
+ This tests the fix for issue https://github.com/sphinx-doc/sphinx/issues/10785
+ where type aliases documented as :py:data: but referenced as :py:class: in
+ function signatures would not resolve properly.
+
+ Tests both a Union type alias and a generic type alias to ensure our
+ domain fallback mechanism works for various type alias patterns.
+ """
+ app.config.nitpicky = True
+ app.build()
+
+ # In nitpicky mode, check that no warnings were generated for type alias cross-references
+ warnings_text = app.warning.getvalue()
+ assert 'py:class reference target not found: pathlike' not in warnings_text, (
+ f'Type alias cross-reference failed in nitpicky mode. Warnings: {warnings_text}'
+ )
+ assert 'py:class reference target not found: Handler' not in warnings_text, (
+ f'Type alias cross-reference failed for Handler. Warnings: {warnings_text}'
+ )
+
+ # Core functionality test: Verify type alias links are generated in function signatures
+ html_content = (app.outdir / 'index.html').read_text(encoding='utf8')
+
+ # Both type aliases should be documented and have anchors
+ assert 'id="alias_module.pathlike"' in html_content, (
+ 'pathlike type alias definition anchor not found in HTML'
+ )
+ assert 'id="alias_module.Handler"' in html_content, (
+ 'Handler type alias definition anchor not found in HTML'
+ )
+
+ # The critical test: type aliases in function signatures should be clickable links
+ # This tests the original issue - function signature type annotations should resolve
+ assert (
+ 'read_file.*?', html_content, re.DOTALL
+ )
+ assert read_file_match is not None, 'Could not find read_file function signature'
+ read_file_signature = read_file_match.group(0)
+ assert (
+ 'process_error.*?', html_content, re.DOTALL
+ )
+ assert process_error_match is not None, (
+ 'Could not find process_error function signature'
+ )
+ process_error_signature = process_error_match.group(0)
+ assert (
+ ' None:
+ text = '.. py:function:: hello(a : ~typing.Any = ) -> None'
+ doctree = restructuredtext.parse(app, text)
+ assert_node(
+ doctree,
+ (
+ addnodes.index,
+ [
+ desc,
+ (
+ [
+ desc_signature,
+ (
+ [desc_name, 'hello'],
+ desc_parameterlist,
+ [desc_returns, pending_xref, 'None'],
+ ),
+ ],
+ desc_content,
+ ),
+ ],
+ ),
+ )
+ assert_node(
+ doctree[1],
+ addnodes.desc,
+ desctype='function',
+ domain='py',
+ objtype='function',
+ no_index=False,
+ )
+ assert_node(
+ doctree[1][0][1], # type: ignore[index]
+ (
+ [
+ desc_parameter,
+ (
+ [desc_sig_name, 'a'],
+ [desc_sig_punctuation, ':'],
+ desc_sig_space,
+ [desc_sig_name, pending_xref, 'Any'],
+ desc_sig_space,
+ [desc_sig_operator, '='],
+ desc_sig_space,
+ [nodes.inline, ''],
+ ),
+ ],
+ ),
+ )
+
+
@pytest.mark.sphinx(
'html',
testroot='root',
diff --git a/tests/test_domains/test_domain_py_pyobject.py b/tests/test_domains/test_domain_py_pyobject.py
index 12b22a04dcc..dec4d71546e 100644
--- a/tests/test_domains/test_domain_py_pyobject.py
+++ b/tests/test_domains/test_domain_py_pyobject.py
@@ -925,17 +925,17 @@ def test_domain_py_type_alias(app):
content = (app.outdir / 'type_alias.html').read_text(encoding='utf8')
assert (
- 'type '
+ 'type '
'module_one.'
'MyAlias'
- ' ='
+ ' ='
' list'
'['
'int '
'| '
''
'module_two.SomeClass'
- ']'
+ ']'
) in content
assert app.warning.getvalue() == ''
diff --git a/tests/test_environment/test_environment.py b/tests/test_environment/test_environment.py
index 872d0b857ce..08e0abdd61b 100644
--- a/tests/test_environment/test_environment.py
+++ b/tests/test_environment/test_environment.py
@@ -21,11 +21,16 @@
)
if TYPE_CHECKING:
+ from collections.abc import Callable
+
+ from sphinx.testing.fixtures import _app_params
from sphinx.testing.util import SphinxTestApp
@pytest.mark.sphinx('dummy', testroot='basic', copy_test_root=True)
-def test_config_status(make_app, app_params):
+def test_config_status(
+ make_app: Callable[..., SphinxTestApp], app_params: _app_params
+) -> None:
args, kwargs = app_params
# clean build
diff --git a/tests/test_environment/test_environment_record_dependencies.py b/tests/test_environment/test_environment_record_dependencies.py
index c3690e410fb..b70242bdb45 100644
--- a/tests/test_environment/test_environment_record_dependencies.py
+++ b/tests/test_environment/test_environment_record_dependencies.py
@@ -6,8 +6,6 @@
import pytest
-from sphinx.util._pathlib import _StrPath
-
if TYPE_CHECKING:
from sphinx.testing.util import SphinxTestApp
@@ -16,4 +14,4 @@
def test_record_dependencies_cleared(app: SphinxTestApp) -> None:
app.builder.read()
assert 'index' not in app.env.dependencies
- assert app.env.dependencies['api'] == {_StrPath('example_module.py')}
+ assert app.env.dependencies['api'] == {app.srcdir / 'example_module.py'}
diff --git a/tests/test_environment/test_environment_toctree.py b/tests/test_environment/test_environment_toctree.py
index f6b849c5bec..dcf5f8954da 100644
--- a/tests/test_environment/test_environment_toctree.py
+++ b/tests/test_environment/test_environment_toctree.py
@@ -4,6 +4,7 @@
from typing import TYPE_CHECKING
+import docutils
import pytest
from docutils import nodes
from docutils.nodes import bullet_list, list_item, literal, reference, title
@@ -11,8 +12,13 @@
from sphinx import addnodes
from sphinx.addnodes import compact_paragraph, only
from sphinx.builders.html import StandaloneHTMLBuilder
-from sphinx.environment.adapters.toctree import document_toc, global_toctree_for_doc
+from sphinx.environment.adapters.toctree import (
+ _toctree_copy,
+ document_toc,
+ global_toctree_for_doc,
+)
from sphinx.testing.util import assert_node
+from sphinx.util.tags import Tags
if TYPE_CHECKING:
from sphinx.testing.util import SphinxTestApp
@@ -614,7 +620,9 @@ def test_document_toc_tocdepth(app):
@pytest.mark.test_params(shared_result='test_environment_toctree_basic')
def test_global_toctree_for_doc(app):
app.build()
- toctree = global_toctree_for_doc(app.env, 'index', app.builder, collapse=False)
+ toctree = global_toctree_for_doc(
+ app.env, 'index', app.builder, tags=app.tags, collapse=False
+ )
assert_node(
toctree,
[
@@ -676,7 +684,9 @@ def test_global_toctree_for_doc(app):
@pytest.mark.test_params(shared_result='test_environment_toctree_basic')
def test_global_toctree_for_doc_collapse(app):
app.build()
- toctree = global_toctree_for_doc(app.env, 'index', app.builder, collapse=True)
+ toctree = global_toctree_for_doc(
+ app.env, 'index', app.builder, tags=app.tags, collapse=True
+ )
assert_node(
toctree,
[
@@ -723,7 +733,7 @@ def test_global_toctree_for_doc_collapse(app):
def test_global_toctree_for_doc_maxdepth(app):
app.build()
toctree = global_toctree_for_doc(
- app.env, 'index', app.builder, collapse=False, maxdepth=3
+ app.env, 'index', app.builder, tags=app.tags, collapse=False, maxdepth=3
)
assert_node(
toctree,
@@ -814,7 +824,12 @@ def test_global_toctree_for_doc_maxdepth(app):
def test_global_toctree_for_doc_includehidden(app):
app.build()
toctree = global_toctree_for_doc(
- app.env, 'index', app.builder, collapse=False, includehidden=False
+ app.env,
+ 'index',
+ app.builder,
+ tags=app.tags,
+ collapse=False,
+ includehidden=False,
)
assert_node(
toctree,
@@ -907,3 +922,87 @@ def test_toctree_index(app):
numbered=0,
entries=[(None, 'genindex'), (None, 'modindex'), (None, 'search')],
)
+
+
+@pytest.mark.sphinx('dummy', testroot='toctree-only')
+def test_toctree_only(app):
+ # regression test for https://github.com/sphinx-doc/sphinx/issues/13022
+ # we mainly care that this doesn't fail
+
+ if docutils.__version_info__[:2] >= (0, 22):
+ true = '1'
+ else:
+ true = 'True'
+ expected_pformat = f"""\
+
+
+
+
+ test-toctree-only
+
+
+
+
+
+ test_toctree_only1
+
+
+
+
+ test_toctree_only2
+
+
+
+
+ test_toctree_only2
+"""
+ app.build()
+ toc = document_toc(app.env, 'index', app.tags)
+ assert toc.pformat(' ') == expected_pformat
+
+
+def test_toctree_copy_only():
+ # regression test for https://github.com/sphinx-doc/sphinx/issues/13022
+ # ensure ``_toctree_copy()`` properly filters out ``only`` nodes,
+ # including nested nodes.
+ node = nodes.literal('lobster!', 'lobster!')
+ node = nodes.reference('', '', node, anchorname='', internal=True, refuri='index')
+ node = addnodes.only('', node, expr='lobster')
+ node = addnodes.compact_paragraph('', '', node, skip_section_number=True)
+ node = nodes.list_item('', node)
+ node = addnodes.only('', node, expr='not spam')
+ node = addnodes.only('', node, expr='lobster')
+ node = addnodes.only('', node, expr='not ham')
+ node = nodes.bullet_list('', node)
+ # this is a tree of the shape:
+ #
+ #
+ #
+ #
+ #
+ #
+ #
+ #
+ #
+ # lobster!
+
+ tags = Tags({'lobster'})
+ toc = _toctree_copy(node, 2, 0, False, tags)
+ # the filtered ToC should look like:
+ #
+ #
+ #
+ #
+ #
+ # lobster!
+
+ # no only nodes should remain
+ assert list(toc.findall(addnodes.only)) == []
+
+ # the tree is preserved
+ assert isinstance(toc, nodes.bullet_list)
+ assert isinstance(toc[0], nodes.list_item)
+ assert isinstance(toc[0][0], addnodes.compact_paragraph)
+ assert isinstance(toc[0][0][0], nodes.reference)
+ assert isinstance(toc[0][0][0][0], nodes.literal)
+ assert toc[0][0][0][0][0] == nodes.Text('lobster!')
diff --git a/tests/test_events.py b/tests/test_events.py
index 56f76511dcb..50b7bb5fd76 100644
--- a/tests/test_events.py
+++ b/tests/test_events.py
@@ -3,16 +3,22 @@
from __future__ import annotations
from types import SimpleNamespace
+from typing import TYPE_CHECKING
import pytest
from sphinx.errors import ExtensionError
from sphinx.events import EventManager
+if TYPE_CHECKING:
+ from typing import NoReturn
+
+ from sphinx.application import Sphinx
+
def test_event_priority() -> None:
result = []
- app = object() # pass a dummy object as an app
+ app = SimpleNamespace(pdb=False) # pass a dummy object as an app
events = EventManager(app) # type: ignore[arg-type]
events.connect('builder-inited', lambda app: result.append(1), priority=500)
events.connect('builder-inited', lambda app: result.append(2), priority=500)
@@ -27,7 +33,7 @@ def test_event_priority() -> None:
def test_event_allowed_exceptions() -> None:
- def raise_error(app):
+ def raise_error(app: Sphinx) -> NoReturn:
raise RuntimeError
app = SimpleNamespace(pdb=False) # pass a dummy object as an app
@@ -44,7 +50,7 @@ def raise_error(app):
def test_event_pdb() -> None:
- def raise_error(app):
+ def raise_error(app: Sphinx) -> NoReturn:
raise RuntimeError
app = SimpleNamespace(pdb=True) # pass a dummy object as an app
diff --git a/tests/test_ext_autodoc/__init__.py b/tests/test_ext_autodoc/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/tests/test_ext_autodoc/autodoc_util.py b/tests/test_ext_autodoc/autodoc_util.py
new file mode 100644
index 00000000000..64b31825a49
--- /dev/null
+++ b/tests/test_ext_autodoc/autodoc_util.py
@@ -0,0 +1,98 @@
+from __future__ import annotations
+
+from types import SimpleNamespace
+from typing import TYPE_CHECKING
+
+from docutils.statemachine import StringList
+
+from sphinx.environment import _CurrentDocument
+from sphinx.events import EventManager
+from sphinx.ext.autodoc._directive_options import _process_documenter_options
+from sphinx.ext.autodoc._generate import _generate_directives
+from sphinx.ext.autodoc._loader import _load_object_by_name
+from sphinx.ext.autodoc._shared import _AutodocConfig
+from sphinx.util.inspect import safe_getattr
+
+if TYPE_CHECKING:
+ from collections.abc import Callable, Mapping
+ from typing import Any
+
+ from sphinx.ext.autodoc._property_types import _AutodocObjType
+
+_DEFAULT_CONFIG = _AutodocConfig()
+
+
+class FakeEvents(EventManager):
+ def __init__(self) -> None:
+ super().__init__(SimpleNamespace(pdb=False)) # type: ignore[arg-type]
+
+ self.add('autodoc-before-process-signature')
+ self.add('autodoc-process-docstring')
+ self.add('autodoc-process-signature')
+ self.add('autodoc-skip-member')
+ self.add('autodoc-process-bases')
+ self.add('object-description-transform')
+
+ def connect(
+ self, name: str, callback: Callable[..., Any], priority: int = 500
+ ) -> int:
+ return super().connect(name, callback, priority)
+
+
+def do_autodoc(
+ obj_type: _AutodocObjType,
+ name: str,
+ *,
+ config: _AutodocConfig = _DEFAULT_CONFIG,
+ current_document: _CurrentDocument | None = None,
+ events: FakeEvents | None = None,
+ expect_import_error: bool = False,
+ options: dict[str, Any] | None = None,
+ ref_context: Mapping[str, str | None] | None = None,
+) -> list[str]:
+ if current_document is None:
+ current_document = _CurrentDocument(docname='index')
+ if events is None:
+ events = FakeEvents()
+ if ref_context is None:
+ ref_context = {}
+ reread_always: set[str] = set()
+
+ options = {} if options is None else options.copy()
+ doc_options = _process_documenter_options(
+ obj_type=obj_type,
+ default_options=config.autodoc_default_options,
+ options=options,
+ )
+
+ props = _load_object_by_name(
+ name=name,
+ objtype=obj_type,
+ current_document=current_document,
+ config=config,
+ events=events,
+ get_attr=safe_getattr,
+ options=doc_options,
+ ref_context=ref_context,
+ reread_always=reread_always,
+ )
+ if expect_import_error:
+ assert props is None
+ return []
+
+ assert props is not None
+ result = StringList()
+ _generate_directives(
+ config=config,
+ current_document=current_document,
+ events=events,
+ get_attr=safe_getattr,
+ indent='',
+ options=doc_options,
+ props=props,
+ record_dependencies=set(),
+ ref_context=ref_context,
+ reread_always=reread_always,
+ result=result,
+ )
+ return result.data
diff --git a/tests/test_ext_autodoc/conftest.py b/tests/test_ext_autodoc/conftest.py
new file mode 100644
index 00000000000..22f99cc4e9c
--- /dev/null
+++ b/tests/test_ext_autodoc/conftest.py
@@ -0,0 +1,20 @@
+from __future__ import annotations
+
+import sys
+
+import pytest
+
+from tests.utils import TEST_ROOTS_DIR
+
+TYPE_CHECKING = False
+if TYPE_CHECKING:
+ from collections.abc import Iterator
+
+
+@pytest.fixture(scope='module')
+def inject_autodoc_root_into_sys_path() -> Iterator[None]:
+ autodoc_root_path = str(TEST_ROOTS_DIR / 'test-ext-autodoc')
+
+ sys.path.insert(0, autodoc_root_path)
+ yield
+ sys.path[:] = [p for p in sys.path if p != autodoc_root_path]
diff --git a/tests/test_extensions/test_ext_autodoc.py b/tests/test_ext_autodoc/test_ext_autodoc.py
similarity index 63%
rename from tests/test_extensions/test_ext_autodoc.py
rename to tests/test_ext_autodoc/test_ext_autodoc.py
index a06c1bbe30d..03355d1f703 100644
--- a/tests/test_extensions/test_ext_autodoc.py
+++ b/tests/test_ext_autodoc/test_ext_autodoc.py
@@ -6,23 +6,31 @@
from __future__ import annotations
-import functools
import itertools
-import operator
+import logging
+import pathlib
import sys
from typing import TYPE_CHECKING
-from unittest.mock import Mock
from warnings import catch_warnings
import pytest
+from docutils.statemachine import StringList
-from sphinx import addnodes
-from sphinx.ext.autodoc import ALL, ModuleLevelDocumenter, Options
-
-# NEVER import these objects from sphinx.ext.autodoc directly
-from sphinx.ext.autodoc.directive import DocumenterBridge
+from sphinx.environment import _CurrentDocument
+from sphinx.ext.autodoc._directive_options import (
+ _AutoDocumenterOptions,
+ inherited_members_option,
+)
+from sphinx.ext.autodoc._docstrings import _get_docstring_lines
+from sphinx.ext.autodoc._documenters import Documenter
+from sphinx.ext.autodoc._generate import _generate_directives
+from sphinx.ext.autodoc._loader import _load_object_by_name
+from sphinx.ext.autodoc._property_types import _ItemProperties
+from sphinx.ext.autodoc._sentinels import ALL
+from sphinx.ext.autodoc._shared import _AutodocAttrGetter, _AutodocConfig
+from sphinx.util.inspect import safe_getattr
-from tests.test_extensions.autodoc_util import do_autodoc
+from tests.test_ext_autodoc.autodoc_util import FakeEvents, do_autodoc
try:
# Enable pyximport to test cython module
@@ -35,359 +43,60 @@
if TYPE_CHECKING:
from typing import Any
- from sphinx.environment import BuildEnvironment
-
-
-def make_directive_bridge(env: BuildEnvironment) -> DocumenterBridge:
- options = Options(
- inherited_members=False,
- undoc_members=False,
- private_members=False,
- special_members=False,
- imported_members=False,
- show_inheritance=False,
- no_index=False,
- annotation=None,
- synopsis='',
- platform='',
- deprecated=False,
- members=[],
- member_order='alphabetical',
- exclude_members=set(),
- ignore_module_all=False,
- )
-
- directive = DocumenterBridge(
- env=env,
- reporter=None,
- options=options,
- lineno=0,
- state=Mock(),
- )
- directive.state.document.settings.tab_width = 8
-
- return directive
-
-
-processed_signatures = []
+ from sphinx.ext.autodoc._property_types import _AutodocObjType
+ from sphinx.ext.autodoc._shared import _AttrGetter
+pytestmark = pytest.mark.usefixtures('inject_autodoc_root_into_sys_path')
-@pytest.mark.sphinx('html', testroot='root')
-def test_parse_name(app):
- def verify(objtype, name, result):
- inst = app.registry.documenters[objtype](directive, name)
- assert inst.parse_name()
- assert (inst.modname, inst.objpath, inst.args, inst.retann) == result
+processed_signatures: list[tuple[str, str]] = []
- directive = make_directive_bridge(app.env)
- # for modules
- verify('module', 'test_ext_autodoc', ('test_ext_autodoc', [], None, None))
- verify('module', 'test.test_ext_autodoc', ('test.test_ext_autodoc', [], None, None))
- verify('module', 'test(arg)', ('test', [], 'arg', None))
- assert 'signature arguments' in app.warning.getvalue()
+def get_docstring_lines(obj_type, obj):
+ config = _AutodocConfig()
- # for functions/classes
- verify(
- 'function',
- 'test_ext_autodoc.raises',
- ('test_ext_autodoc', ['raises'], None, None),
- )
- verify(
- 'function',
- 'test_ext_autodoc.raises(exc) -> None',
- ('test_ext_autodoc', ['raises'], 'exc', 'None'),
- )
- directive.env.current_document.autodoc_module = 'test_ext_autodoc'
- verify('function', 'raises', ('test_ext_autodoc', ['raises'], None, None))
- directive.env.current_document.autodoc_module = ''
-
- directive.env.ref_context['py:module'] = 'test_ext_autodoc'
- verify('function', 'raises', ('test_ext_autodoc', ['raises'], None, None))
- verify('class', 'Base', ('test_ext_autodoc', ['Base'], None, None))
-
- # for members
- directive.env.ref_context['py:module'] = 'sphinx.testing.util'
- verify(
- 'method',
- 'SphinxTestApp.cleanup',
- ('sphinx.testing.util', ['SphinxTestApp', 'cleanup'], None, None),
+ parent = object # dummy
+ props = _ItemProperties(
+ obj_type=obj_type,
+ module_name='',
+ parts=(obj.__name__,),
+ docstring_lines=(),
+ _obj=obj,
+ _obj___module__=getattr(obj, '__module__', None),
)
- directive.env.ref_context['py:module'] = 'sphinx.testing.util'
- directive.env.ref_context['py:class'] = 'Foo'
- directive.env.current_document.autodoc_class = 'SphinxTestApp'
- verify(
- 'method',
- 'cleanup',
- ('sphinx.testing.util', ['SphinxTestApp', 'cleanup'], None, None),
- )
- verify(
- 'method',
- 'SphinxTestApp.cleanup',
- ('sphinx.testing.util', ['SphinxTestApp', 'cleanup'], None, None),
- )
-
-
-@pytest.mark.sphinx('html', testroot='root')
-def test_format_signature(app):
- def process_signature(app, what, name, obj, options, args, retann):
- processed_signatures.append((what, name))
- if name == 'bar':
- return '42', None
- return None
-
- def skip_member(app, what, name, obj, skip, options):
- if name in {'__special1__', '__special2__'}:
- return skip
- if name.startswith('__'):
- return True
- if name == 'skipmeth':
- return True
- return None
-
- app.connect('autodoc-process-signature', process_signature)
- app.connect('autodoc-skip-member', skip_member)
-
- directive = make_directive_bridge(app.env)
-
- def formatsig(objtype, name, obj, args, retann):
- inst = app.registry.documenters[objtype](directive, name)
- inst.fullname = name
- inst.doc_as_attr = False # for class objtype
- inst.parent = object # dummy
- inst.object = obj
- inst.objpath = [name]
- inst.args = args
- inst.retann = retann
- res = inst.format_signature()
- print(res)
- return res
-
- # no signatures for modules
- assert formatsig('module', 'test', None, None, None) == ''
-
- # test for functions
- def f(a, b, c=1, **d):
- pass
-
- def g(a='\n'):
- pass
-
- assert formatsig('function', 'f', f, None, None) == '(a, b, c=1, **d)'
- assert formatsig('function', 'f', f, 'a, b, c, d', None) == '(a, b, c, d)'
- assert formatsig('function', 'g', g, None, None) == r"(a='\n')"
-
- if sys.version_info >= (3, 12):
- for params, expect in [
- ('(a=1)', '(a=1)'),
- ('(a: int=1)', '(a: int = 1)'), # auto whitespace formatting
- ('(a:list[T] =[], b=None)', '(a: list[T] = [], b=None)'), # idem
- ]:
- ns = {}
- exec(f'def f[T]{params}: pass', ns) # NoQA: S102
- f = ns['f']
- assert formatsig('function', 'f', f, None, None) == expect
- assert formatsig('function', 'f', f, '...', None) == '(...)'
- assert formatsig('function', 'f', f, '...', '...') == '(...) -> ...'
-
- exec(f'def f[T]{params} -> list[T]: return []', ns) # NoQA: S102
- f = ns['f']
- assert formatsig('function', 'f', f, None, None) == f'{expect} -> list[T]'
- assert formatsig('function', 'f', f, '...', None) == '(...)'
- assert formatsig('function', 'f', f, '...', '...') == '(...) -> ...'
-
- # TODO(picnixz): add more test cases for PEP-695 classes as well (though
- # complex cases are less likely to appear and are painful to test).
-
- # test for classes
- class D:
- pass
-
- class E:
- def __init__(self):
- pass
-
- # an empty init and no init are the same
- for C in (D, E):
- assert formatsig('class', 'D', C, None, None) == '()'
-
- class SomeMeta(type):
- def __call__(cls, a, b=None):
- return type.__call__(cls, a, b)
-
- # these three are all equivalent
- class F:
- def __init__(self, a, b=None):
- pass
-
- class FNew:
- def __new__(cls, a, b=None): # NoQA: ARG004
- return super().__new__(cls)
-
- class FMeta(metaclass=SomeMeta):
- pass
-
- # and subclasses should always inherit
- class G(F):
- pass
-
- class GNew(FNew):
- pass
-
- class GMeta(FMeta):
- pass
-
- # subclasses inherit
- for C in (F, FNew, FMeta, G, GNew, GMeta):
- assert formatsig('class', 'C', C, None, None) == '(a, b=None)'
- assert formatsig('class', 'C', D, 'a, b', 'X') == '(a, b) -> X'
-
- class ListSubclass(list): # NoQA: FURB189
- pass
-
- # only supported if the python implementation decides to document it
- if getattr(list, '__text_signature__', None) is not None:
- assert formatsig('class', 'C', ListSubclass, None, None) == '(iterable=(), /)'
- else:
- assert formatsig('class', 'C', ListSubclass, None, None) == ''
-
- class ExceptionSubclass(Exception):
- pass
-
- # Exception has no __text_signature__ at least in Python 3.11
- if getattr(Exception, '__text_signature__', None) is None:
- assert formatsig('class', 'C', ExceptionSubclass, None, None) == ''
-
- # __init__ have signature at first line of docstring
- directive.env.config.autoclass_content = 'both'
-
- class F2:
- """some docstring for F2."""
-
- def __init__(self, *args, **kw):
- """__init__(a1, a2, kw1=True, kw2=False)
-
- some docstring for __init__.
- """
-
- class G2(F2):
- pass
-
- assert formatsig('class', 'F2', F2, None, None) == '(a1, a2, kw1=True, kw2=False)'
- assert formatsig('class', 'G2', G2, None, None) == '(a1, a2, kw1=True, kw2=False)'
-
- # test for methods
- class H:
- def foo1(self, b, *c):
- pass
-
- def foo2(b, *c): # NoQA: N805
- pass
-
- def foo3(self, d='\n'):
- pass
-
- assert formatsig('method', 'H.foo', H.foo1, None, None) == '(b, *c)'
- assert formatsig('method', 'H.foo', H.foo1, 'a', None) == '(a)'
- assert formatsig('method', 'H.foo', H.foo2, None, None) == '(*c)'
- assert formatsig('method', 'H.foo', H.foo3, None, None) == r"(d='\n')"
-
- # test bound methods interpreted as functions
- assert formatsig('function', 'foo', H().foo1, None, None) == '(b, *c)'
- assert formatsig('function', 'foo', H().foo2, None, None) == '(*c)'
- assert formatsig('function', 'foo', H().foo3, None, None) == r"(d='\n')"
-
- # test exception handling (exception is caught and args is '')
- directive.env.config.autodoc_docstring_signature = False
- assert formatsig('function', 'int', int, None, None) == ''
-
- # test processing by event handler
- assert formatsig('method', 'bar', H.foo1, None, None) == '42'
-
- # test functions created via functools.partial
- from functools import partial
-
- curried1 = partial(lambda a, b, c: None, 'A')
- assert formatsig('function', 'curried1', curried1, None, None) == '(b, c)'
- curried2 = partial(lambda a, b, c=42: None, 'A')
- assert formatsig('function', 'curried2', curried2, None, None) == '(b, c=42)'
- curried3 = partial(lambda a, b, *c: None, 'A')
- assert formatsig('function', 'curried3', curried3, None, None) == '(b, *c)'
- curried4 = partial(lambda a, b, c=42, *d, **e: None, 'A')
- assert (
- formatsig('function', 'curried4', curried4, None, None) == '(b, c=42, *d, **e)'
+ ds = _get_docstring_lines(
+ props,
+ class_doc_from=config.autoclass_content,
+ get_attr=safe_getattr,
+ inherit_docstrings=config.autodoc_inherit_docstrings,
+ parent=parent,
+ tab_width=8,
)
+ # for testing purposes, concat them and strip the empty line at the end
+ res = list(itertools.chain.from_iterable(ds or ()))
+ if res:
+ res.pop()
+ return tuple(res)
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_process_signature_typing_generic(app):
- actual = do_autodoc(app, 'class', 'target.generic_class.A', {})
-
- assert list(actual) == [
- '',
- '.. py:class:: A(a, b=None)',
- ' :module: target.generic_class',
- '',
- ' docstring for A',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='root')
-def test_autodoc_process_signature_typehints(app):
- captured = []
-
- def process_signature(*args):
- captured.append(args)
-
- app.connect('autodoc-process-signature', process_signature)
-
- def func(x: int, y: int) -> int: # type: ignore[empty-body]
- pass
-
- directive = make_directive_bridge(app.env)
- inst = app.registry.documenters['function'](directive, 'func')
- inst.fullname = 'func'
- inst.object = func
- inst.objpath = ['func']
- inst.format_signature()
- assert captured == [
- (app, 'function', 'func', func, directive.genopt, '(x: int, y: int)', 'int')
- ]
-
-
-@pytest.mark.sphinx('html', testroot='root')
-def test_get_doc(app):
- directive = make_directive_bridge(app.env)
-
- def getdocl(objtype, obj):
- inst = app.registry.documenters[objtype](directive, 'tmp')
- inst.parent = object # dummy
- inst.object = obj
- inst.objpath = [obj.__name__]
- inst.doc_as_attr = False
- inst.format_signature() # handle docstring signatures!
- ds = inst.get_doc()
- # for testing purposes, concat them and strip the empty line at the end
- res = functools.reduce(operator.iadd, ds, [])[:-1]
- print(res)
- return res
-
+def test_get_docstring_lines():
# objects without docstring
def f():
pass
- assert getdocl('function', f) == []
+ assert get_docstring_lines('function', f) == ()
# standard function, diverse docstring styles...
def f():
"""Docstring"""
- def g():
- """Docstring"""
+ assert get_docstring_lines('function', f) == ('Docstring',)
+
+ def f():
+ """
+ Docstring
+ """ # NoQA: D212
- for func in (f, g):
- assert getdocl('function', func) == ['Docstring']
+ assert get_docstring_lines('function', f) == ('Docstring',)
# first line vs. other lines indentation
def f():
@@ -397,13 +106,18 @@ def f():
lines
"""
- assert getdocl('function', f) == ['First line', '', 'Other', ' lines']
+ assert get_docstring_lines('function', f) == (
+ 'First line',
+ '',
+ 'Other',
+ ' lines',
+ )
# charset guessing (this module is encoded in utf-8)
def f():
"""Döcstring"""
- assert getdocl('function', f) == ['Döcstring']
+ assert get_docstring_lines('function', f) == ('Döcstring',)
# verify that method docstrings get extracted in both normal case
# and in case of bound method posing as a function
@@ -411,29 +125,33 @@ class J:
def foo(self):
"""Method docstring"""
- assert getdocl('method', J.foo) == ['Method docstring']
- assert getdocl('function', J().foo) == ['Method docstring']
+ expected = ('Method docstring',)
+ assert get_docstring_lines('method', J.foo) == expected
+ assert get_docstring_lines('function', J().foo) == expected
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_new_documenter(app):
- class MyDocumenter(ModuleLevelDocumenter):
- objtype = 'integer'
- directivetype = 'integer'
- priority = 100
+class _MyDocumenter(Documenter):
+ objtype = 'integer'
+ directivetype = 'integer'
+ priority = 100
- @classmethod
- def can_document_member(cls, member, membername, isattr, parent):
- return isinstance(member, int)
+ @classmethod
+ def can_document_member(cls, member, membername, isattr, parent):
+ return isinstance(member, int)
- def document_members(self, all_members=False):
- return
+ def document_members(self, all_members=False):
+ return
- app.add_autodocumenter(MyDocumenter)
+
+def test_new_documenter():
+ config = _AutodocConfig()
+ # app.add_autodocumenter(_MyDocumenter)
options = {'members': 'integer'}
- actual = do_autodoc(app, 'module', 'target', options)
- assert list(actual) == [
+ # TODO: Fix! Perhaps add a way to signal module/class-level?
+ actual = do_autodoc('module', 'target', config=config, options=options)
+ return
+ assert actual == [
'',
'.. py:module:: target',
'',
@@ -446,24 +164,11 @@ def document_members(self, all_members=False):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_attrgetter_using(app):
- directive = make_directive_bridge(app.env)
- directive.genopt['members'] = ALL
-
- directive.genopt['inherited_members'] = False
- with catch_warnings(record=True):
- _assert_getter_works(app, directive, 'class', 'target.Class', ['meth'])
-
- directive.genopt['inherited_members'] = True
- with catch_warnings(record=True):
- _assert_getter_works(
- app, directive, 'class', 'target.inheritance.Derived', ['inheritedmeth']
- )
+getattr_spy = []
-def _assert_getter_works(app, directive, objtype, name, attrs=(), **kw):
- getattr_spy = []
+def test_attrgetter_using():
+ attrs = []
def _special_getattr(obj, attr_name, *defargs):
if attr_name in attrs:
@@ -471,10 +176,72 @@ def _special_getattr(obj, attr_name, *defargs):
return None
return getattr(obj, attr_name, *defargs)
- app.add_autodoc_attrgetter(type, _special_getattr)
+ # See Sphinx.add_autodoc_attrgetter()
+ autodoc_attrgetters = {type: _special_getattr}
+ get_attr = _AutodocAttrGetter(autodoc_attrgetters)
+ options = _AutoDocumenterOptions(members=ALL)
+
+ options.inherited_members = inherited_members_option(False)
+ attrs[:] = ['meth']
+ with catch_warnings(record=True):
+ _assert_getter_works(
+ 'class',
+ 'target.Class',
+ *attrs,
+ get_attr=get_attr,
+ options=options,
+ )
+ options.inherited_members = inherited_members_option(True)
+ attrs[:] = ['inheritedmeth']
+ with catch_warnings(record=True):
+ _assert_getter_works(
+ 'class',
+ 'target.inheritance.Derived',
+ *attrs,
+ get_attr=get_attr,
+ options=options,
+ )
+
+
+def _assert_getter_works(
+ objtype: _AutodocObjType,
+ name: str,
+ *attrs: str,
+ get_attr: _AttrGetter,
+ options: _AutoDocumenterOptions,
+) -> None:
getattr_spy.clear()
- app.registry.documenters[objtype](directive, name).generate(**kw)
+
+ config = _AutodocConfig()
+ current_document = _CurrentDocument()
+ events = FakeEvents()
+
+ props = _load_object_by_name(
+ name=name,
+ objtype=objtype,
+ current_document=current_document,
+ config=config,
+ events=events,
+ get_attr=get_attr,
+ options=options,
+ ref_context={},
+ reread_always=set(),
+ )
+ if props is not None:
+ _generate_directives(
+ config=config,
+ current_document=current_document,
+ events=events,
+ get_attr=get_attr,
+ indent='',
+ options=options,
+ props=props,
+ record_dependencies=set(),
+ ref_context={},
+ reread_always=set(),
+ result=StringList(),
+ )
hooked_members = {s[1] for s in getattr_spy}
documented_members = {s[1] for s in processed_signatures}
@@ -484,21 +251,25 @@ def _special_getattr(obj, attr_name, *defargs):
assert fullname not in documented_members, f'{fullname!r} not intercepted'
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_py_module(app):
+def test_py_module(caplog: pytest.LogCaptureFixture) -> None:
+ # work around sphinx.util.logging.setup()
+ logger = logging.getLogger('sphinx')
+ logger.handlers[:] = [caplog.handler]
+ caplog.set_level(logging.WARNING)
+
# without py:module
- actual = do_autodoc(app, 'method', 'Class.meth')
- assert list(actual) == []
+ actual = do_autodoc('method', 'Class.meth', expect_import_error=True)
+ assert actual == []
+ assert len(set(caplog.messages)) == 1
assert (
"don't know which module to import for autodocumenting 'Class.meth'"
- ) in app.warning.getvalue()
+ ) in caplog.messages[0]
+ caplog.clear()
# with py:module
- app.env.ref_context['py:module'] = 'target'
- app.warning.truncate(0)
-
- actual = do_autodoc(app, 'method', 'Class.meth')
- assert list(actual) == [
+ ref_context: dict[str, Any] = {'py:module': 'target'}
+ actual = do_autodoc('method', 'Class.meth', ref_context=ref_context)
+ assert actual == [
'',
'.. py:method:: Class.meth()',
' :module: target',
@@ -506,15 +277,12 @@ def test_py_module(app):
' Function.',
'',
]
- assert (
- "don't know which module to import for autodocumenting 'Class.meth'"
- ) not in app.warning.getvalue()
+ assert len(caplog.records) == 0
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_decorator(app):
- actual = do_autodoc(app, 'decorator', 'target.decorator.deco1')
- assert list(actual) == [
+def test_autodoc_decorator() -> None:
+ actual = do_autodoc('decorator', 'target.decorator.deco1')
+ assert actual == [
'',
'.. py:decorator:: deco1',
' :module: target.decorator',
@@ -523,8 +291,8 @@ def test_autodoc_decorator(app):
'',
]
- actual = do_autodoc(app, 'decorator', 'target.decorator.deco2')
- assert list(actual) == [
+ actual = do_autodoc('decorator', 'target.decorator.deco2')
+ assert actual == [
'',
'.. py:decorator:: deco2(condition, message)',
' :module: target.decorator',
@@ -534,10 +302,9 @@ def test_autodoc_decorator(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_exception(app):
- actual = do_autodoc(app, 'exception', 'target.CustomEx')
- assert list(actual) == [
+def test_autodoc_exception() -> None:
+ actual = do_autodoc('exception', 'target.CustomEx')
+ assert actual == [
'',
'.. py:exception:: CustomEx',
' :module: target',
@@ -547,39 +314,63 @@ def test_autodoc_exception(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_warnings(app):
- app.env.current_document.docname = 'dummy'
+def test_autodoc_warnings(caplog: pytest.LogCaptureFixture) -> None:
+ # work around sphinx.util.logging.setup()
+ logger = logging.getLogger('sphinx')
+ logger.handlers[:] = [caplog.handler]
+ caplog.set_level(logging.WARNING)
+
+ current_document = _CurrentDocument(docname='dummy')
# can't import module
- do_autodoc(app, 'module', 'unknown')
- assert "failed to import module 'unknown'" in app.warning.getvalue()
+ caplog.clear()
+ do_autodoc(
+ 'module', 'unknown', current_document=current_document, expect_import_error=True
+ )
+ assert len(set(caplog.messages)) == 1
+ assert "failed to import 'unknown'" in caplog.messages[0]
# missing function
- do_autodoc(app, 'function', 'unknown')
- assert "import for autodocumenting 'unknown'" in app.warning.getvalue()
+ caplog.clear()
+ do_autodoc(
+ 'function',
+ 'unknown',
+ current_document=current_document,
+ expect_import_error=True,
+ )
+ assert len(set(caplog.messages)) == 1
+ assert "import for autodocumenting 'unknown'" in caplog.messages[0]
- do_autodoc(app, 'function', 'target.unknown')
- assert (
- "failed to import function 'unknown' from module 'target'"
- ) in app.warning.getvalue()
+ caplog.clear()
+ do_autodoc(
+ 'function',
+ 'target.unknown',
+ current_document=current_document,
+ expect_import_error=True,
+ )
+ assert len(set(caplog.messages)) == 1
+ assert "failed to import 'unknown' from module 'target'" in caplog.messages[0]
# missing method
- do_autodoc(app, 'method', 'target.Class.unknown')
- assert (
- "failed to import method 'Class.unknown' from module 'target'"
- ) in app.warning.getvalue()
+ caplog.clear()
+ do_autodoc(
+ 'method',
+ 'target.Class.unknown',
+ current_document=current_document,
+ expect_import_error=True,
+ )
+ assert len(set(caplog.messages)) == 1
+ assert "failed to import 'Class.unknown' from module 'target'" in caplog.messages[0]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_attributes(app):
+def test_autodoc_attributes() -> None:
options = {
'synopsis': 'Synopsis',
'platform': 'Platform',
'deprecated': None,
}
- actual = do_autodoc(app, 'module', 'target', options)
- assert list(actual) == [
+ actual = do_autodoc('module', 'target', options=options)
+ assert actual == [
'',
'.. py:module:: target',
' :synopsis: Synopsis',
@@ -589,18 +380,19 @@ def test_autodoc_attributes(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_members(app):
+def test_autodoc_members() -> None:
+ options: dict[str, Any]
+
# default (no-members)
- actual = do_autodoc(app, 'class', 'target.inheritance.Base')
- assert list(filter(lambda l: '::' in l, actual)) == [
+ actual = do_autodoc('class', 'target.inheritance.Base')
+ assert [line for line in actual if '::' in line] == [
'.. py:class:: Base()',
]
# default ALL-members
options = {'members': None}
- actual = do_autodoc(app, 'class', 'target.inheritance.Base', options)
- assert list(filter(lambda l: '::' in l, actual)) == [
+ actual = do_autodoc('class', 'target.inheritance.Base', options=options)
+ assert [line for line in actual if '::' in line] == [
'.. py:class:: Base()',
' .. py:attribute:: Base.inheritedattr',
' .. py:method:: Base.inheritedclassmeth()',
@@ -610,8 +402,8 @@ def test_autodoc_members(app):
# default specific-members
options = {'members': 'inheritedmeth,inheritedstaticmeth'}
- actual = do_autodoc(app, 'class', 'target.inheritance.Base', options)
- assert list(filter(lambda l: '::' in l, actual)) == [
+ actual = do_autodoc('class', 'target.inheritance.Base', options=options)
+ assert [line for line in actual if '::' in line] == [
'.. py:class:: Base()',
' .. py:method:: Base.inheritedmeth()',
' .. py:method:: Base.inheritedstaticmeth(cls)',
@@ -619,9 +411,11 @@ def test_autodoc_members(app):
# ALL-members override autodoc_default_options
options = {'members': None}
- app.config.autodoc_default_options['members'] = 'inheritedstaticmeth'
- actual = do_autodoc(app, 'class', 'target.inheritance.Base', options)
- assert list(filter(lambda l: '::' in l, actual)) == [
+ config = _AutodocConfig(autodoc_default_options={'members': 'inheritedstaticmeth'})
+ actual = do_autodoc(
+ 'class', 'target.inheritance.Base', config=config, options=options
+ )
+ assert [line for line in actual if '::' in line] == [
'.. py:class:: Base()',
' .. py:attribute:: Base.inheritedattr',
' .. py:method:: Base.inheritedclassmeth()',
@@ -631,32 +425,33 @@ def test_autodoc_members(app):
# members override autodoc_default_options
options = {'members': 'inheritedmeth'}
- app.config.autodoc_default_options['members'] = 'inheritedstaticmeth'
- actual = do_autodoc(app, 'class', 'target.inheritance.Base', options)
- assert list(filter(lambda l: '::' in l, actual)) == [
+ actual = do_autodoc(
+ 'class', 'target.inheritance.Base', config=config, options=options
+ )
+ assert [line for line in actual if '::' in line] == [
'.. py:class:: Base()',
' .. py:method:: Base.inheritedmeth()',
]
# members extends autodoc_default_options
options = {'members': '+inheritedmeth'}
- app.config.autodoc_default_options['members'] = 'inheritedstaticmeth'
- actual = do_autodoc(app, 'class', 'target.inheritance.Base', options)
- assert list(filter(lambda l: '::' in l, actual)) == [
+ actual = do_autodoc(
+ 'class', 'target.inheritance.Base', config=config, options=options
+ )
+ assert [line for line in actual if '::' in line] == [
'.. py:class:: Base()',
' .. py:method:: Base.inheritedmeth()',
' .. py:method:: Base.inheritedstaticmeth(cls)',
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_exclude_members(app):
+def test_autodoc_exclude_members() -> None:
options = {
'members': None,
'exclude-members': 'inheritedmeth,inheritedstaticmeth',
}
- actual = do_autodoc(app, 'class', 'target.inheritance.Base', options)
- assert list(filter(lambda l: '::' in l, actual)) == [
+ actual = do_autodoc('class', 'target.inheritance.Base', options=options)
+ assert [line for line in actual if '::' in line] == [
'.. py:class:: Base()',
' .. py:attribute:: Base.inheritedattr',
' .. py:method:: Base.inheritedclassmeth()',
@@ -667,8 +462,8 @@ def test_autodoc_exclude_members(app):
'members': 'inheritedmeth',
'exclude-members': 'inheritedmeth',
}
- actual = do_autodoc(app, 'class', 'target.inheritance.Base', options)
- assert list(filter(lambda l: '::' in l, actual)) == [
+ actual = do_autodoc('class', 'target.inheritance.Base', options=options)
+ assert [line for line in actual if '::' in line] == [
'.. py:class:: Base()',
]
@@ -677,8 +472,8 @@ def test_autodoc_exclude_members(app):
'members': None,
'exclude-members': '+inheritedmeth,inheritedstaticmeth',
}
- actual = do_autodoc(app, 'class', 'target.inheritance.Base', options)
- assert list(filter(lambda l: '::' in l, actual)) == [
+ actual = do_autodoc('class', 'target.inheritance.Base', options=options)
+ assert [line for line in actual if '::' in line] == [
'.. py:class:: Base()',
' .. py:attribute:: Base.inheritedattr',
' .. py:method:: Base.inheritedclassmeth()',
@@ -689,9 +484,13 @@ def test_autodoc_exclude_members(app):
'members': None,
'exclude-members': 'inheritedmeth',
}
- app.config.autodoc_default_options['exclude-members'] = 'inheritedstaticmeth'
- actual = do_autodoc(app, 'class', 'target.inheritance.Base', options)
- assert list(filter(lambda l: '::' in l, actual)) == [
+ config = _AutodocConfig(
+ autodoc_default_options={'exclude-members': 'inheritedstaticmeth'}
+ )
+ actual = do_autodoc(
+ 'class', 'target.inheritance.Base', config=config, options=options
+ )
+ assert [line for line in actual if '::' in line] == [
'.. py:class:: Base()',
' .. py:attribute:: Base.inheritedattr',
' .. py:method:: Base.inheritedclassmeth()',
@@ -703,9 +502,13 @@ def test_autodoc_exclude_members(app):
'members': None,
'exclude-members': '+inheritedmeth',
}
- app.config.autodoc_default_options['exclude-members'] = 'inheritedstaticmeth'
- actual = do_autodoc(app, 'class', 'target.inheritance.Base', options)
- assert list(filter(lambda l: '::' in l, actual)) == [
+ config = _AutodocConfig(
+ autodoc_default_options={'exclude-members': 'inheritedstaticmeth'}
+ )
+ actual = do_autodoc(
+ 'class', 'target.inheritance.Base', config=config, options=options
+ )
+ assert [line for line in actual if '::' in line] == [
'.. py:class:: Base()',
' .. py:attribute:: Base.inheritedattr',
' .. py:method:: Base.inheritedclassmeth()',
@@ -713,11 +516,13 @@ def test_autodoc_exclude_members(app):
# no exclude-members causes use autodoc_default_options
options = {'members': None}
- app.config.autodoc_default_options['exclude-members'] = (
- 'inheritedstaticmeth,inheritedmeth'
+ config = _AutodocConfig(
+ autodoc_default_options={'exclude-members': 'inheritedstaticmeth,inheritedmeth'}
+ )
+ actual = do_autodoc(
+ 'class', 'target.inheritance.Base', config=config, options=options
)
- actual = do_autodoc(app, 'class', 'target.inheritance.Base', options)
- assert list(filter(lambda l: '::' in l, actual)) == [
+ assert [line for line in actual if '::' in line] == [
'.. py:class:: Base()',
' .. py:attribute:: Base.inheritedattr',
' .. py:method:: Base.inheritedclassmeth()',
@@ -728,11 +533,13 @@ def test_autodoc_exclude_members(app):
'members': None,
'exclude-members': None,
}
- app.config.autodoc_default_options['exclude-members'] = (
- 'inheritedstaticmeth,inheritedmeth'
+ config = _AutodocConfig(
+ autodoc_default_options={'exclude-members': 'inheritedstaticmeth,inheritedmeth'}
+ )
+ actual = do_autodoc(
+ 'class', 'target.inheritance.Base', config=config, options=options
)
- actual = do_autodoc(app, 'class', 'target.inheritance.Base', options)
- assert list(filter(lambda l: '::' in l, actual)) == [
+ assert [line for line in actual if '::' in line] == [
'.. py:class:: Base()',
' .. py:attribute:: Base.inheritedattr',
' .. py:method:: Base.inheritedclassmeth()',
@@ -741,14 +548,13 @@ def test_autodoc_exclude_members(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_undoc_members(app):
+def test_autodoc_undoc_members() -> None:
options = {
'members': None,
'undoc-members': None,
}
- actual = do_autodoc(app, 'class', 'target.Class', options)
- assert list(filter(lambda l: '::' in l, actual)) == [
+ actual = do_autodoc('class', 'target.Class', options=options)
+ assert [line for line in actual if '::' in line] == [
'.. py:class:: Class(arg)',
' .. py:method:: Class.a_staticmeth()',
' .. py:attribute:: Class.attr',
@@ -770,9 +576,9 @@ def test_autodoc_undoc_members(app):
# use autodoc_default_options
options = {'members': None}
- app.config.autodoc_default_options['undoc-members'] = None
- actual = do_autodoc(app, 'class', 'target.Class', options)
- assert list(filter(lambda l: '::' in l, actual)) == [
+ config = _AutodocConfig(autodoc_default_options={'undoc-members': True})
+ actual = do_autodoc('class', 'target.Class', config=config, options=options)
+ assert [line for line in actual if '::' in line] == [
'.. py:class:: Class(arg)',
' .. py:method:: Class.a_staticmeth()',
' .. py:attribute:: Class.attr',
@@ -797,9 +603,8 @@ def test_autodoc_undoc_members(app):
'members': None,
'no-undoc-members': None,
}
- app.config.autodoc_default_options['undoc-members'] = None
- actual = do_autodoc(app, 'class', 'target.Class', options)
- assert list(filter(lambda l: '::' in l, actual)) == [
+ actual = do_autodoc('class', 'target.Class', config=config, options=options)
+ assert [line for line in actual if '::' in line] == [
'.. py:class:: Class(arg)',
' .. py:attribute:: Class.attr',
' .. py:attribute:: Class.docattr',
@@ -815,12 +620,11 @@ def test_autodoc_undoc_members(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_undoc_members_for_metadata_only(app):
+def test_autodoc_undoc_members_for_metadata_only() -> None:
# metadata only member is not displayed
options = {'members': None}
- actual = do_autodoc(app, 'module', 'target.metadata', options)
- assert list(actual) == [
+ actual = do_autodoc('module', 'target.metadata', options=options)
+ assert actual == [
'',
'.. py:module:: target.metadata',
'',
@@ -831,8 +635,8 @@ def test_autodoc_undoc_members_for_metadata_only(app):
'members': None,
'undoc-members': None,
}
- actual = do_autodoc(app, 'module', 'target.metadata', options)
- assert list(actual) == [
+ actual = do_autodoc('module', 'target.metadata', options=options)
+ assert actual == [
'',
'.. py:module:: target.metadata',
'',
@@ -845,14 +649,13 @@ def test_autodoc_undoc_members_for_metadata_only(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_inherited_members(app):
+def test_autodoc_inherited_members() -> None:
options = {
'members': None,
'inherited-members': None,
}
- actual = do_autodoc(app, 'class', 'target.inheritance.Derived', options)
- assert list(filter(lambda l: 'method::' in l, actual)) == [
+ actual = do_autodoc('class', 'target.inheritance.Derived', options=options)
+ assert [line for line in actual if 'method::' in line] == [
' .. py:method:: Derived.another_inheritedmeth()',
' .. py:method:: Derived.inheritedclassmeth()',
' .. py:method:: Derived.inheritedmeth()',
@@ -860,8 +663,7 @@ def test_autodoc_inherited_members(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_inherited_members_Base(app):
+def test_autodoc_inherited_members_Base() -> None:
options = {
'members': None,
'inherited-members': 'Base',
@@ -869,13 +671,12 @@ def test_autodoc_inherited_members_Base(app):
}
# check methods for object class are shown
- actual = do_autodoc(app, 'class', 'target.inheritance.Derived', options)
+ actual = do_autodoc('class', 'target.inheritance.Derived', options=options)
assert ' .. py:method:: Derived.inheritedmeth()' in actual
assert ' .. py:method:: Derived.inheritedclassmeth' not in actual
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_inherited_members_None(app):
+def test_autodoc_inherited_members_None() -> None:
options = {
'members': None,
'inherited-members': 'None',
@@ -883,33 +684,31 @@ def test_autodoc_inherited_members_None(app):
}
# check methods for object class are shown
- actual = do_autodoc(app, 'class', 'target.inheritance.Derived', options)
+ actual = do_autodoc('class', 'target.inheritance.Derived', options=options)
assert ' .. py:method:: Derived.__init__()' in actual
assert ' .. py:method:: Derived.__str__()' in actual
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_imported_members(app):
+def test_autodoc_imported_members() -> None:
options = {
'members': None,
'imported-members': None,
'ignore-module-all': None,
}
- actual = do_autodoc(app, 'module', 'target', options)
+ actual = do_autodoc('module', 'target', options=options)
assert (
'.. py:function:: function_to_be_imported(app: ~sphinx.application.Sphinx | None) -> str'
) in actual
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_special_members(app):
+def test_autodoc_special_members() -> None:
# specific special methods
options = {
'undoc-members': None,
'special-members': '__init__,__special1__',
}
- actual = do_autodoc(app, 'class', 'target.Class', options)
- assert list(filter(lambda l: '::' in l, actual)) == [
+ actual = do_autodoc('class', 'target.Class', options=options)
+ assert [line for line in actual if '::' in line] == [
'.. py:class:: Class(arg)',
' .. py:method:: Class.__init__(arg)',
' .. py:method:: Class.__special1__()',
@@ -921,8 +720,8 @@ def test_autodoc_special_members(app):
'undoc-members': None,
'special-members': '__init__,__special1__',
}
- actual = do_autodoc(app, 'class', 'target.Class', options)
- assert list(filter(lambda l: '::' in l, actual)) == [
+ actual = do_autodoc('class', 'target.Class', options=options)
+ assert [line for line in actual if '::' in line] == [
'.. py:class:: Class(arg)',
' .. py:method:: Class.__init__(arg)',
' .. py:method:: Class.__special1__()',
@@ -938,10 +737,17 @@ def test_autodoc_special_members(app):
}
if sys.version_info >= (3, 13, 0, 'alpha', 5):
options['exclude-members'] = '__static_attributes__,__firstlineno__'
- actual = do_autodoc(app, 'class', 'target.Class', options)
- assert list(filter(lambda l: '::' in l, actual)) == [
+ if sys.version_info >= (3, 14, 0, 'alpha', 7):
+ ann_attrs: tuple[str, ...] = (
+ ' .. py:attribute:: Class.__annotate_func__',
+ ' .. py:attribute:: Class.__annotations_cache__',
+ )
+ else:
+ ann_attrs = (' .. py:attribute:: Class.__annotations__',)
+ actual = do_autodoc('class', 'target.Class', options=options)
+ assert [line for line in actual if '::' in line] == [
'.. py:class:: Class(arg)',
- ' .. py:attribute:: Class.__annotations__',
+ *ann_attrs,
' .. py:attribute:: Class.__dict__',
' .. py:method:: Class.__init__(arg)',
' .. py:attribute:: Class.__module__',
@@ -968,9 +774,9 @@ def test_autodoc_special_members(app):
# specific special methods from autodoc_default_options
options = {'undoc-members': None}
- app.config.autodoc_default_options['special-members'] = '__special2__'
- actual = do_autodoc(app, 'class', 'target.Class', options)
- assert list(filter(lambda l: '::' in l, actual)) == [
+ config = _AutodocConfig(autodoc_default_options={'special-members': '__special2__'})
+ actual = do_autodoc('class', 'target.Class', config=config, options=options)
+ assert [line for line in actual if '::' in line] == [
'.. py:class:: Class(arg)',
' .. py:method:: Class.__special2__()',
]
@@ -980,9 +786,8 @@ def test_autodoc_special_members(app):
'undoc-members': None,
'special-members': '__init__,__special1__',
}
- app.config.autodoc_default_options['special-members'] = '__special2__'
- actual = do_autodoc(app, 'class', 'target.Class', options)
- assert list(filter(lambda l: '::' in l, actual)) == [
+ actual = do_autodoc('class', 'target.Class', config=config, options=options)
+ assert [line for line in actual if '::' in line] == [
'.. py:class:: Class(arg)',
' .. py:method:: Class.__init__(arg)',
' .. py:method:: Class.__special1__()',
@@ -993,9 +798,8 @@ def test_autodoc_special_members(app):
'undoc-members': None,
'special-members': '+__init__,__special1__',
}
- app.config.autodoc_default_options['special-members'] = '__special2__'
- actual = do_autodoc(app, 'class', 'target.Class', options)
- assert list(filter(lambda l: '::' in l, actual)) == [
+ actual = do_autodoc('class', 'target.Class', config=config, options=options)
+ assert [line for line in actual if '::' in line] == [
'.. py:class:: Class(arg)',
' .. py:method:: Class.__init__(arg)',
' .. py:method:: Class.__special1__()',
@@ -1003,12 +807,11 @@ def test_autodoc_special_members(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_ignore_module_all(app):
+def test_autodoc_ignore_module_all() -> None:
# default (no-ignore-module-all)
options = {'members': None}
- actual = do_autodoc(app, 'module', 'target', options)
- assert list(filter(lambda l: 'class::' in l, actual)) == [
+ actual = do_autodoc('module', 'target', options=options)
+ assert [line for line in actual if 'class::' in line] == [
'.. py:class:: Class(arg)',
]
@@ -1017,8 +820,8 @@ def test_autodoc_ignore_module_all(app):
'members': None,
'ignore-module-all': None,
}
- actual = do_autodoc(app, 'module', 'target', options)
- assert list(filter(lambda l: 'class::' in l, actual)) == [
+ actual = do_autodoc('module', 'target', options=options)
+ assert [line for line in actual if 'class::' in line] == [
'.. py:class:: Class(arg)',
'.. py:class:: CustomDict',
'.. py:class:: InnerChild()',
@@ -1029,11 +832,10 @@ def test_autodoc_ignore_module_all(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_noindex(app):
+def test_autodoc_noindex() -> None:
options = {'no-index': None}
- actual = do_autodoc(app, 'module', 'target', options)
- assert list(actual) == [
+ actual = do_autodoc('module', 'target', options=options)
+ assert actual == [
'',
'.. py:module:: target',
' :no-index:',
@@ -1042,8 +844,8 @@ def test_autodoc_noindex(app):
# TODO: :no-index: should be propagated to children of target item.
- actual = do_autodoc(app, 'class', 'target.inheritance.Base', options)
- assert list(actual) == [
+ actual = do_autodoc('class', 'target.inheritance.Base', options=options)
+ assert actual == [
'',
'.. py:class:: Base()',
' :no-index:',
@@ -1052,11 +854,10 @@ def test_autodoc_noindex(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_subclass_of_builtin_class(app):
+def test_autodoc_subclass_of_builtin_class() -> None:
options = {'members': None}
- actual = do_autodoc(app, 'class', 'target.CustomDict', options)
- assert list(actual) == [
+ actual = do_autodoc('class', 'target.CustomDict', options=options)
+ assert actual == [
'',
'.. py:class:: CustomDict',
' :module: target',
@@ -1066,11 +867,10 @@ def test_autodoc_subclass_of_builtin_class(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_inner_class(app):
+def test_autodoc_inner_class() -> None:
options = {'members': None}
- actual = do_autodoc(app, 'class', 'target.Outer', options)
- assert list(actual) == [
+ actual = do_autodoc('class', 'target.Outer', options=options)
+ assert actual == [
'',
'.. py:class:: Outer()',
' :module: target',
@@ -1096,8 +896,8 @@ def test_autodoc_inner_class(app):
' alias of :py:class:`dict`',
]
- actual = do_autodoc(app, 'class', 'target.Outer.Inner', options)
- assert list(actual) == [
+ actual = do_autodoc('class', 'target.Outer.Inner', options=options)
+ assert actual == [
'',
'.. py:class:: Inner()',
' :module: target.Outer',
@@ -1113,8 +913,8 @@ def test_autodoc_inner_class(app):
]
options['show-inheritance'] = None
- actual = do_autodoc(app, 'class', 'target.InnerChild', options)
- assert list(actual) == [
+ actual = do_autodoc('class', 'target.InnerChild', options=options)
+ assert actual == [
'',
'.. py:class:: InnerChild()',
' :module: target',
@@ -1126,10 +926,9 @@ def test_autodoc_inner_class(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_classmethod(app):
- actual = do_autodoc(app, 'method', 'target.inheritance.Base.inheritedclassmeth')
- assert list(actual) == [
+def test_autodoc_classmethod() -> None:
+ actual = do_autodoc('method', 'target.inheritance.Base.inheritedclassmeth')
+ assert actual == [
'',
'.. py:method:: Base.inheritedclassmeth()',
' :module: target.inheritance',
@@ -1140,10 +939,9 @@ def test_autodoc_classmethod(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_staticmethod(app):
- actual = do_autodoc(app, 'method', 'target.inheritance.Base.inheritedstaticmeth')
- assert list(actual) == [
+def test_autodoc_staticmethod() -> None:
+ actual = do_autodoc('method', 'target.inheritance.Base.inheritedstaticmeth')
+ assert actual == [
'',
'.. py:method:: Base.inheritedstaticmeth(cls)',
' :module: target.inheritance',
@@ -1154,14 +952,13 @@ def test_autodoc_staticmethod(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_descriptor(app):
+def test_autodoc_descriptor() -> None:
options = {
'members': None,
'undoc-members': None,
}
- actual = do_autodoc(app, 'class', 'target.descriptor.Class', options)
- assert list(actual) == [
+ actual = do_autodoc('class', 'target.descriptor.Class', options=options)
+ assert actual == [
'',
'.. py:class:: Class()',
' :module: target.descriptor',
@@ -1181,14 +978,13 @@ def test_autodoc_descriptor(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_cached_property(app):
+def test_autodoc_cached_property() -> None:
options = {
'members': None,
'undoc-members': None,
}
- actual = do_autodoc(app, 'class', 'target.cached_property.Foo', options)
- assert list(actual) == [
+ actual = do_autodoc('class', 'target.cached_property.Foo', options=options)
+ assert actual == [
'',
'.. py:class:: Foo()',
' :module: target.cached_property',
@@ -1206,8 +1002,7 @@ def test_autodoc_cached_property(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_member_order(app):
+def test_autodoc_member_order() -> None:
# case member-order='bysource'
options = {
'members': None,
@@ -1215,8 +1010,8 @@ def test_autodoc_member_order(app):
'undoc-members': None,
'private-members': None,
}
- actual = do_autodoc(app, 'class', 'target.Class', options)
- assert list(filter(lambda l: '::' in l, actual)) == [
+ actual = do_autodoc('class', 'target.Class', options=options)
+ assert [line for line in actual if '::' in line] == [
'.. py:class:: Class(arg)',
' .. py:method:: Class.meth()',
' .. py:method:: Class.undocmeth()',
@@ -1244,8 +1039,8 @@ def test_autodoc_member_order(app):
'undoc-members': None,
'private-members': None,
}
- actual = do_autodoc(app, 'class', 'target.Class', options)
- assert list(filter(lambda l: '::' in l, actual)) == [
+ actual = do_autodoc('class', 'target.Class', options=options)
+ assert [line for line in actual if '::' in line] == [
'.. py:class:: Class(arg)',
# class methods
' .. py:method:: Class.moore(a, e, f) -> happiness',
@@ -1275,8 +1070,8 @@ def test_autodoc_member_order(app):
'undoc-members': None,
'private-members': None,
}
- actual = do_autodoc(app, 'class', 'target.Class', options)
- assert list(filter(lambda l: '::' in l, actual)) == [
+ actual = do_autodoc('class', 'target.Class', options=options)
+ assert [line for line in actual if '::' in line] == [
'.. py:class:: Class(arg)',
' .. py:attribute:: Class._private_inst_attr',
' .. py:method:: Class.a_staticmeth()',
@@ -1298,16 +1093,15 @@ def test_autodoc_member_order(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_module_member_order(app):
+def test_autodoc_module_member_order() -> None:
# case member-order='bysource'
options = {
'members': 'foo, Bar, baz, qux, Quux, foobar',
'member-order': 'bysource',
'undoc-members': None,
}
- actual = do_autodoc(app, 'module', 'target.sort_by_all', options)
- assert list(filter(lambda l: '::' in l, actual)) == [
+ actual = do_autodoc('module', 'target.sort_by_all', options=options)
+ assert [line for line in actual if '::' in line] == [
'.. py:module:: target.sort_by_all',
'.. py:function:: baz()',
'.. py:function:: foo()',
@@ -1324,8 +1118,8 @@ def test_autodoc_module_member_order(app):
'undoc-members': None,
'ignore-module-all': None,
}
- actual = do_autodoc(app, 'module', 'target.sort_by_all', options)
- assert list(filter(lambda l: '::' in l, actual)) == [
+ actual = do_autodoc('module', 'target.sort_by_all', options=options)
+ assert [line for line in actual if '::' in line] == [
'.. py:module:: target.sort_by_all',
'.. py:function:: foo()',
'.. py:class:: Bar()',
@@ -1336,11 +1130,13 @@ def test_autodoc_module_member_order(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_module_scope(app):
- app.env.current_document.autodoc_module = 'target'
- actual = do_autodoc(app, 'attribute', 'Class.mdocattr')
- assert list(actual) == [
+def test_autodoc_module_scope() -> None:
+ current_document = _CurrentDocument(docname='index')
+ current_document.autodoc_module = 'target'
+ actual = do_autodoc(
+ 'attribute', 'Class.mdocattr', current_document=current_document
+ )
+ assert actual == [
'',
'.. py:attribute:: Class.mdocattr',
' :module: target',
@@ -1351,12 +1147,12 @@ def test_autodoc_module_scope(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_class_scope(app):
- app.env.current_document.autodoc_module = 'target'
- app.env.current_document.autodoc_class = 'Class'
- actual = do_autodoc(app, 'attribute', 'mdocattr')
- assert list(actual) == [
+def test_autodoc_class_scope() -> None:
+ current_document = _CurrentDocument(docname='index')
+ current_document.autodoc_module = 'target'
+ current_document.autodoc_class = 'Class'
+ actual = do_autodoc('attribute', 'mdocattr', current_document=current_document)
+ assert actual == [
'',
'.. py:attribute:: Class.mdocattr',
' :module: target',
@@ -1367,14 +1163,13 @@ def test_autodoc_class_scope(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_class_attributes(app):
+def test_class_attributes() -> None:
options = {
'members': None,
'undoc-members': None,
}
- actual = do_autodoc(app, 'class', 'target.AttCls', options)
- assert list(actual) == [
+ actual = do_autodoc('class', 'target.AttCls', options=options)
+ assert actual == [
'',
'.. py:class:: AttCls()',
' :module: target',
@@ -1392,11 +1187,11 @@ def test_class_attributes(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autoclass_instance_attributes(app):
+def test_autoclass_instance_attributes() -> None:
+ options: dict[str, Any]
options = {'members': None}
- actual = do_autodoc(app, 'class', 'target.InstAttCls', options)
- assert list(actual) == [
+ actual = do_autodoc('class', 'target.InstAttCls', options=options)
+ assert actual == [
'',
'.. py:class:: InstAttCls()',
' :module: target',
@@ -1440,11 +1235,9 @@ def test_autoclass_instance_attributes(app):
]
# pick up arbitrary attributes
- options = {
- 'members': 'ca1,ia1',
- }
- actual = do_autodoc(app, 'class', 'target.InstAttCls', options)
- assert list(actual) == [
+ options = {'members': 'ca1,ia1'}
+ actual = do_autodoc('class', 'target.InstAttCls', options=options)
+ assert actual == [
'',
'.. py:class:: InstAttCls()',
' :module: target',
@@ -1468,10 +1261,9 @@ def test_autoclass_instance_attributes(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autoattribute_instance_attributes(app):
- actual = do_autodoc(app, 'attribute', 'target.InstAttCls.ia1')
- assert list(actual) == [
+def test_autoattribute_instance_attributes() -> None:
+ actual = do_autodoc('attribute', 'target.InstAttCls.ia1')
+ assert actual == [
'',
'.. py:attribute:: InstAttCls.ia1',
' :module: target',
@@ -1481,14 +1273,13 @@ def test_autoattribute_instance_attributes(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_slots(app):
+def test_slots() -> None:
options = {
'members': None,
'undoc-members': None,
}
- actual = do_autodoc(app, 'module', 'target.slots', options)
- assert list(actual) == [
+ actual = do_autodoc('module', 'target.slots', options=options)
+ assert actual == [
'',
'.. py:module:: target.slots',
'',
@@ -1621,7 +1412,7 @@ def _preamble(
return self._node('class', self.name, doc, args=args, indent=indent, **options)
@staticmethod
- def _preamble_args(functional_constructor: bool = False):
+ def _preamble_args(functional_constructor: bool = False) -> str:
"""EnumType.__call__() is a dual-purpose method:
* Look an enum member (valid only if the enum has members)
@@ -1674,13 +1465,12 @@ def autodoc_enum_options() -> dict[str, object]:
return {'members': None, 'undoc-members': None}
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_enum_class(app, autodoc_enum_options):
+def test_enum_class(autodoc_enum_options):
fmt = _EnumFormatter('EnumCls')
options = autodoc_enum_options | {'private-members': None}
- actual = do_autodoc(app, 'class', fmt.target, options)
- assert list(actual) == [
+ actual = do_autodoc('class', fmt.target, options=options)
+ assert actual == [
*fmt.preamble_lookup('this is enum class'),
*fmt.method(
'say_goodbye', 'a classmethod says good-bye to you.', 'classmethod'
@@ -1695,8 +1485,10 @@ def test_enum_class(app, autodoc_enum_options):
# Inherited members exclude the native Enum API (in particular
# the 'name' and 'value' properties), unless they were explicitly
# redefined by the user in one of the bases.
- actual = do_autodoc(app, 'class', fmt.target, options | {'inherited-members': None})
- assert list(actual) == [
+ actual = do_autodoc(
+ 'class', fmt.target, options=options | {'inherited-members': None}
+ )
+ assert actual == [
*fmt.preamble_lookup('this is enum class'),
*fmt.method(
'say_goodbye', 'a classmethod says good-bye to you.', 'classmethod'
@@ -1709,16 +1501,15 @@ def test_enum_class(app, autodoc_enum_options):
]
# checks for an attribute of EnumCls
- actual = do_autodoc(app, 'attribute', fmt.subtarget('val1'))
- assert list(actual) == fmt.member('val1', 12, 'doc for val1', indent=0)
+ actual = do_autodoc('attribute', fmt.subtarget('val1'))
+ assert actual == fmt.member('val1', 12, 'doc for val1', indent=0)
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_enum_class_with_data_type(app, autodoc_enum_options):
+def test_enum_class_with_data_type(autodoc_enum_options):
fmt = _EnumFormatter('EnumClassWithDataType')
- actual = do_autodoc(app, 'class', fmt.target, autodoc_enum_options)
- assert list(actual) == [
+ actual = do_autodoc('class', fmt.target, options=autodoc_enum_options)
+ assert actual == [
*fmt.preamble_lookup('this is enum class'),
*fmt.method('say_goodbye', 'docstring', 'classmethod'),
*fmt.method('say_hello', 'docstring'),
@@ -1726,8 +1517,8 @@ def test_enum_class_with_data_type(app, autodoc_enum_options):
]
options = autodoc_enum_options | {'inherited-members': None}
- actual = do_autodoc(app, 'class', fmt.target, options)
- assert list(actual) == [
+ actual = do_autodoc('class', fmt.target, options=options)
+ assert actual == [
*fmt.preamble_lookup('this is enum class'),
*fmt.entry('dtype', 'docstring', role='property'),
*fmt.method('isupper', 'inherited'),
@@ -1737,12 +1528,11 @@ def test_enum_class_with_data_type(app, autodoc_enum_options):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_enum_class_with_mixin_type(app, autodoc_enum_options):
+def test_enum_class_with_mixin_type(autodoc_enum_options):
fmt = _EnumFormatter('EnumClassWithMixinType')
- actual = do_autodoc(app, 'class', fmt.target, autodoc_enum_options)
- assert list(actual) == [
+ actual = do_autodoc('class', fmt.target, options=autodoc_enum_options)
+ assert actual == [
*fmt.preamble_lookup('this is enum class'),
*fmt.method('say_goodbye', 'docstring', 'classmethod'),
*fmt.method('say_hello', 'docstring'),
@@ -1750,8 +1540,8 @@ def test_enum_class_with_mixin_type(app, autodoc_enum_options):
]
options = autodoc_enum_options | {'inherited-members': None}
- actual = do_autodoc(app, 'class', fmt.target, options)
- assert list(actual) == [
+ actual = do_autodoc('class', fmt.target, options=options)
+ assert actual == [
*fmt.preamble_lookup('this is enum class'),
*fmt.method('say_goodbye', 'docstring', 'classmethod'),
*fmt.method('say_hello', 'docstring'),
@@ -1760,19 +1550,18 @@ def test_enum_class_with_mixin_type(app, autodoc_enum_options):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_enum_class_with_mixin_type_and_inheritence(app, autodoc_enum_options):
+def test_enum_class_with_mixin_type_and_inheritence(autodoc_enum_options):
fmt = _EnumFormatter('EnumClassWithMixinTypeInherit')
- actual = do_autodoc(app, 'class', fmt.target, autodoc_enum_options)
- assert list(actual) == [
+ actual = do_autodoc('class', fmt.target, options=autodoc_enum_options)
+ assert actual == [
*fmt.preamble_lookup('this is enum class'),
*fmt.member('x', 'X', ''),
]
options = autodoc_enum_options | {'inherited-members': None}
- actual = do_autodoc(app, 'class', fmt.target, options)
- assert list(actual) == [
+ actual = do_autodoc('class', fmt.target, options=options)
+ assert actual == [
*fmt.preamble_lookup('this is enum class'),
*fmt.method('say_goodbye', 'inherited', 'classmethod'),
*fmt.method('say_hello', 'inherited'),
@@ -1781,12 +1570,11 @@ def test_enum_class_with_mixin_type_and_inheritence(app, autodoc_enum_options):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_enum_class_with_mixin_enum_type(app, autodoc_enum_options):
+def test_enum_class_with_mixin_enum_type(autodoc_enum_options):
fmt = _EnumFormatter('EnumClassWithMixinEnumType')
- actual = do_autodoc(app, 'class', fmt.target, autodoc_enum_options)
- assert list(actual) == [
+ actual = do_autodoc('class', fmt.target, options=autodoc_enum_options)
+ assert actual == [
*fmt.preamble_lookup('this is enum class'),
# override() is overridden at the class level so it should be rendered
*fmt.method('override', 'overridden'),
@@ -1795,8 +1583,8 @@ def test_enum_class_with_mixin_enum_type(app, autodoc_enum_options):
]
options = autodoc_enum_options | {'inherited-members': None}
- actual = do_autodoc(app, 'class', fmt.target, options)
- assert list(actual) == [
+ actual = do_autodoc('class', fmt.target, options=options)
+ assert actual == [
*fmt.preamble_lookup('this is enum class'),
*fmt.method('override', 'overridden'),
*fmt.method('say_goodbye', 'inherited', 'classmethod'),
@@ -1805,12 +1593,11 @@ def test_enum_class_with_mixin_enum_type(app, autodoc_enum_options):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_enum_class_with_mixin_and_data_type(app, autodoc_enum_options):
+def test_enum_class_with_mixin_and_data_type(autodoc_enum_options):
fmt = _EnumFormatter('EnumClassWithMixinAndDataType')
- actual = do_autodoc(app, 'class', fmt.target, autodoc_enum_options)
- assert list(actual) == [
+ actual = do_autodoc('class', fmt.target, options=autodoc_enum_options)
+ assert actual == [
*fmt.preamble_lookup('this is enum class'),
*fmt.method('isupper', 'overridden'),
*fmt.method('say_goodbye', 'overridden', 'classmethod'),
@@ -1820,8 +1607,8 @@ def test_enum_class_with_mixin_and_data_type(app, autodoc_enum_options):
# add the special member __str__ (but not the inherited members)
options = autodoc_enum_options | {'special-members': '__str__'}
- actual = do_autodoc(app, 'class', fmt.target, options)
- assert list(actual) == [
+ actual = do_autodoc('class', fmt.target, options=options)
+ assert actual == [
*fmt.preamble_lookup('this is enum class'),
*fmt.method('__str__', 'overridden'),
*fmt.method('isupper', 'overridden'),
@@ -1831,8 +1618,8 @@ def test_enum_class_with_mixin_and_data_type(app, autodoc_enum_options):
]
options = autodoc_enum_options | {'inherited-members': None}
- actual = do_autodoc(app, 'class', fmt.target, options)
- assert list(actual) == [
+ actual = do_autodoc('class', fmt.target, options=options)
+ assert actual == [
*fmt.preamble_lookup('this is enum class'),
*fmt.entry('dtype', 'docstring', role='property'),
*fmt.method('isupper', 'overridden'),
@@ -1843,12 +1630,11 @@ def test_enum_class_with_mixin_and_data_type(app, autodoc_enum_options):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_enum_with_parent_enum(app, autodoc_enum_options):
+def test_enum_with_parent_enum(autodoc_enum_options):
fmt = _EnumFormatter('EnumClassWithParentEnum')
- actual = do_autodoc(app, 'class', fmt.target, autodoc_enum_options)
- assert list(actual) == [
+ actual = do_autodoc('class', fmt.target, options=autodoc_enum_options)
+ assert actual == [
*fmt.preamble_lookup('this is enum class'),
*fmt.method('isupper', 'overridden'),
*fmt.member('x', 'X', ''),
@@ -1856,8 +1642,8 @@ def test_enum_with_parent_enum(app, autodoc_enum_options):
# add the special member __str__ (but not the inherited members)
options = autodoc_enum_options | {'special-members': '__str__'}
- actual = do_autodoc(app, 'class', fmt.target, options)
- assert list(actual) == [
+ actual = do_autodoc('class', fmt.target, options=options)
+ assert actual == [
*fmt.preamble_lookup('this is enum class'),
*fmt.method('__str__', 'overridden'),
*fmt.method('isupper', 'overridden'),
@@ -1865,8 +1651,8 @@ def test_enum_with_parent_enum(app, autodoc_enum_options):
]
options = autodoc_enum_options | {'inherited-members': None}
- actual = do_autodoc(app, 'class', fmt.target, options)
- assert list(actual) == [
+ actual = do_autodoc('class', fmt.target, options=options)
+ assert actual == [
*fmt.preamble_lookup('this is enum class'),
*fmt.entry('dtype', 'docstring', role='property'),
*fmt.method('isupper', 'overridden'),
@@ -1878,62 +1664,60 @@ def test_enum_with_parent_enum(app, autodoc_enum_options):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_enum_sunder_method(app, autodoc_enum_options):
+def test_enum_sunder_method(autodoc_enum_options):
PRIVATE = {'private-members': None} # sunder methods are recognized as private
fmt = _EnumFormatter('EnumSunderMissingInNonEnumMixin')
- actual = do_autodoc(app, 'class', fmt.target, autodoc_enum_options)
- assert list(actual) == [*fmt.preamble_constructor('this is enum class')]
- actual = do_autodoc(app, 'class', fmt.target, autodoc_enum_options | PRIVATE)
- assert list(actual) == [*fmt.preamble_constructor('this is enum class')]
+ actual = do_autodoc('class', fmt.target, options=autodoc_enum_options)
+ assert actual == [*fmt.preamble_constructor('this is enum class')]
+ actual = do_autodoc('class', fmt.target, options=autodoc_enum_options | PRIVATE)
+ assert actual == [*fmt.preamble_constructor('this is enum class')]
fmt = _EnumFormatter('EnumSunderMissingInEnumMixin')
- actual = do_autodoc(app, 'class', fmt.target, autodoc_enum_options)
- assert list(actual) == [*fmt.preamble_constructor('this is enum class')]
- actual = do_autodoc(app, 'class', fmt.target, autodoc_enum_options | PRIVATE)
- assert list(actual) == [*fmt.preamble_constructor('this is enum class')]
+ actual = do_autodoc('class', fmt.target, options=autodoc_enum_options)
+ assert actual == [*fmt.preamble_constructor('this is enum class')]
+ actual = do_autodoc('class', fmt.target, options=autodoc_enum_options | PRIVATE)
+ assert actual == [*fmt.preamble_constructor('this is enum class')]
fmt = _EnumFormatter('EnumSunderMissingInDataType')
- actual = do_autodoc(app, 'class', fmt.target, autodoc_enum_options)
- assert list(actual) == [*fmt.preamble_constructor('this is enum class')]
- actual = do_autodoc(app, 'class', fmt.target, autodoc_enum_options | PRIVATE)
- assert list(actual) == [*fmt.preamble_constructor('this is enum class')]
+ actual = do_autodoc('class', fmt.target, options=autodoc_enum_options)
+ assert actual == [*fmt.preamble_constructor('this is enum class')]
+ actual = do_autodoc('class', fmt.target, options=autodoc_enum_options | PRIVATE)
+ assert actual == [*fmt.preamble_constructor('this is enum class')]
fmt = _EnumFormatter('EnumSunderMissingInClass')
- actual = do_autodoc(app, 'class', fmt.target, autodoc_enum_options)
- assert list(actual) == [*fmt.preamble_constructor('this is enum class')]
- actual = do_autodoc(app, 'class', fmt.target, autodoc_enum_options | PRIVATE)
- assert list(actual) == [
+ actual = do_autodoc('class', fmt.target, options=autodoc_enum_options)
+ assert actual == [*fmt.preamble_constructor('this is enum class')]
+ actual = do_autodoc('class', fmt.target, options=autodoc_enum_options | PRIVATE)
+ assert actual == [
*fmt.preamble_constructor('this is enum class'),
*fmt.method('_missing_', 'docstring', 'classmethod', args='(value)'),
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_enum_inherited_sunder_method(app, autodoc_enum_options):
+def test_enum_inherited_sunder_method(autodoc_enum_options):
options = autodoc_enum_options | {
'private-members': None,
'inherited-members': None,
}
fmt = _EnumFormatter('EnumSunderMissingInNonEnumMixin')
- actual = do_autodoc(app, 'class', fmt.target, options)
- assert list(actual) == [
+ actual = do_autodoc('class', fmt.target, options=options)
+ assert actual == [
*fmt.preamble_constructor('this is enum class'),
*fmt.method('_missing_', 'inherited', 'classmethod', args='(value)'),
]
fmt = _EnumFormatter('EnumSunderMissingInEnumMixin')
- actual = do_autodoc(app, 'class', fmt.target, options)
- assert list(actual) == [
+ actual = do_autodoc('class', fmt.target, options=options)
+ assert actual == [
*fmt.preamble_constructor('this is enum class'),
*fmt.method('_missing_', 'inherited', 'classmethod', args='(value)'),
]
fmt = _EnumFormatter('EnumSunderMissingInDataType')
- actual = do_autodoc(app, 'class', fmt.target, options)
- assert list(actual) == [
+ actual = do_autodoc('class', fmt.target, options=options)
+ assert actual == [
*fmt.preamble_constructor('this is enum class'),
*fmt.method('_missing_', 'inherited', 'classmethod', args='(value)'),
*fmt.entry('dtype', 'docstring', role='property'),
@@ -1941,56 +1725,54 @@ def test_enum_inherited_sunder_method(app, autodoc_enum_options):
]
fmt = _EnumFormatter('EnumSunderMissingInClass')
- actual = do_autodoc(app, 'class', fmt.target, options)
- assert list(actual) == [
+ actual = do_autodoc('class', fmt.target, options=options)
+ assert actual == [
*fmt.preamble_constructor('this is enum class'),
*fmt.method('_missing_', 'docstring', 'classmethod', args='(value)'),
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_enum_custom_name_property(app, autodoc_enum_options):
+def test_enum_custom_name_property(autodoc_enum_options):
fmt = _EnumFormatter('EnumNamePropertyInNonEnumMixin')
- actual = do_autodoc(app, 'class', fmt.target, autodoc_enum_options)
- assert list(actual) == [*fmt.preamble_constructor('this is enum class')]
+ actual = do_autodoc('class', fmt.target, options=autodoc_enum_options)
+ assert actual == [*fmt.preamble_constructor('this is enum class')]
fmt = _EnumFormatter('EnumNamePropertyInEnumMixin')
- actual = do_autodoc(app, 'class', fmt.target, autodoc_enum_options)
- assert list(actual) == [*fmt.preamble_constructor('this is enum class')]
+ actual = do_autodoc('class', fmt.target, options=autodoc_enum_options)
+ assert actual == [*fmt.preamble_constructor('this is enum class')]
fmt = _EnumFormatter('EnumNamePropertyInDataType')
- actual = do_autodoc(app, 'class', fmt.target, autodoc_enum_options)
- assert list(actual) == [*fmt.preamble_constructor('this is enum class')]
+ actual = do_autodoc('class', fmt.target, options=autodoc_enum_options)
+ assert actual == [*fmt.preamble_constructor('this is enum class')]
fmt = _EnumFormatter('EnumNamePropertyInClass')
- actual = do_autodoc(app, 'class', fmt.target, autodoc_enum_options)
- assert list(actual) == [
+ actual = do_autodoc('class', fmt.target, options=autodoc_enum_options)
+ assert actual == [
*fmt.preamble_constructor('this is enum class'),
*fmt.entry('name', 'docstring', role='property'),
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_enum_inherited_custom_name_property(app, autodoc_enum_options):
+def test_enum_inherited_custom_name_property(autodoc_enum_options):
options = autodoc_enum_options | {'inherited-members': None}
fmt = _EnumFormatter('EnumNamePropertyInNonEnumMixin')
- actual = do_autodoc(app, 'class', fmt.target, options)
- assert list(actual) == [
+ actual = do_autodoc('class', fmt.target, options=options)
+ assert actual == [
*fmt.preamble_constructor('this is enum class'),
*fmt.entry('name', 'inherited', role='property'),
]
fmt = _EnumFormatter('EnumNamePropertyInEnumMixin')
- actual = do_autodoc(app, 'class', fmt.target, options)
- assert list(actual) == [
+ actual = do_autodoc('class', fmt.target, options=options)
+ assert actual == [
*fmt.preamble_constructor('this is enum class'),
*fmt.entry('name', 'inherited', role='property'),
]
fmt = _EnumFormatter('EnumNamePropertyInDataType')
- actual = do_autodoc(app, 'class', fmt.target, options)
- assert list(actual) == [
+ actual = do_autodoc('class', fmt.target, options=options)
+ assert actual == [
*fmt.preamble_constructor('this is enum class'),
*fmt.entry('dtype', 'docstring', role='property'),
*fmt.method('isupper', 'inherited'),
@@ -1998,20 +1780,17 @@ def test_enum_inherited_custom_name_property(app, autodoc_enum_options):
]
fmt = _EnumFormatter('EnumNamePropertyInClass')
- actual = do_autodoc(app, 'class', fmt.target, options)
- assert list(actual) == [
+ actual = do_autodoc('class', fmt.target, options=options)
+ assert actual == [
*fmt.preamble_constructor('this is enum class'),
*fmt.entry('name', 'docstring', role='property'),
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_descriptor_class(app):
- options = {
- 'members': 'CustomDataDescriptor,CustomDataDescriptor2',
- }
- actual = do_autodoc(app, 'module', 'target.descriptor', options)
- assert list(actual) == [
+def test_descriptor_class() -> None:
+ options = {'members': 'CustomDataDescriptor,CustomDataDescriptor2'}
+ actual = do_autodoc('module', 'target.descriptor', options=options)
+ assert actual == [
'',
'.. py:module:: target.descriptor',
'',
@@ -2036,10 +1815,9 @@ def test_descriptor_class(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_automethod_for_builtin(app):
- actual = do_autodoc(app, 'method', 'builtins.int.__add__')
- assert list(actual) == [
+def test_automethod_for_builtin() -> None:
+ actual = do_autodoc('method', 'builtins.int.__add__')
+ assert actual == [
'',
'.. py:method:: int.__add__(value, /)',
' :module: builtins',
@@ -2049,10 +1827,9 @@ def test_automethod_for_builtin(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_automethod_for_decorated(app):
- actual = do_autodoc(app, 'method', 'target.decorator.Bar.meth')
- assert list(actual) == [
+def test_automethod_for_decorated() -> None:
+ actual = do_autodoc('method', 'target.decorator.Bar.meth')
+ assert actual == [
'',
'.. py:method:: Bar.meth(name=None, age=None)',
' :module: target.decorator',
@@ -2060,14 +1837,13 @@ def test_automethod_for_decorated(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_abstractmethods(app):
+def test_abstractmethods() -> None:
options = {
'members': None,
'undoc-members': None,
}
- actual = do_autodoc(app, 'module', 'target.abstractmethods', options)
- assert list(actual) == [
+ actual = do_autodoc('module', 'target.abstractmethods', options=options)
+ assert actual == [
'',
'.. py:module:: target.abstractmethods',
'',
@@ -2110,11 +1886,10 @@ def test_abstractmethods(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_partialfunction(app):
+def test_partialfunction() -> None:
options = {'members': None}
- actual = do_autodoc(app, 'module', 'target.partialfunction', options)
- assert list(actual) == [
+ actual = do_autodoc('module', 'target.partialfunction', options=options)
+ assert actual == [
'',
'.. py:module:: target.partialfunction',
'',
@@ -2145,22 +1920,20 @@ def test_partialfunction(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_imported_partialfunction_should_not_shown_without_imported_members(app):
+def test_imported_partialfunction_should_not_shown_without_imported_members() -> None:
options = {'members': None}
- actual = do_autodoc(app, 'module', 'target.imported_members', options)
- assert list(actual) == [
+ actual = do_autodoc('module', 'target.imported_members', options=options)
+ assert actual == [
'',
'.. py:module:: target.imported_members',
'',
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_bound_method(app):
+def test_bound_method() -> None:
options = {'members': None}
- actual = do_autodoc(app, 'module', 'target.bound_method', options)
- assert list(actual) == [
+ actual = do_autodoc('module', 'target.bound_method', options=options)
+ assert actual == [
'',
'.. py:module:: target.bound_method',
'',
@@ -2173,8 +1946,7 @@ def test_bound_method(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_partialmethod(app):
+def test_partialmethod() -> None:
expected = [
'',
'.. py:class:: Cell()',
@@ -2199,12 +1971,11 @@ def test_partialmethod(app):
]
options = {'members': None}
- actual = do_autodoc(app, 'class', 'target.partialmethod.Cell', options)
- assert list(actual) == expected
+ actual = do_autodoc('class', 'target.partialmethod.Cell', options=options)
+ assert actual == expected
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_partialmethod_undoc_members(app):
+def test_partialmethod_undoc_members() -> None:
expected = [
'',
'.. py:class:: Cell()',
@@ -2236,18 +2007,22 @@ def test_partialmethod_undoc_members(app):
'members': None,
'undoc-members': None,
}
- actual = do_autodoc(app, 'class', 'target.partialmethod.Cell', options)
- assert list(actual) == expected
+ actual = do_autodoc('class', 'target.partialmethod.Cell', options=options)
+ assert actual == expected
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_typed_instance_variables(app):
+def test_autodoc_typed_instance_variables() -> None:
options = {
'members': None,
'undoc-members': None,
}
- actual = do_autodoc(app, 'module', 'target.typed_vars', options)
- assert list(actual) == [
+ # First compute autodoc of a `Derived` member to verify that it
+ # doesn't result in inherited members in
+ # `Derived.__annotations__`.
+ # https://github.com/sphinx-doc/sphinx/issues/13934
+ do_autodoc('attribute', 'target.typed_vars.Derived.attr2')
+ actual = do_autodoc('module', 'target.typed_vars', options=options)
+ assert actual == [
'',
'.. py:module:: target.typed_vars',
'',
@@ -2340,15 +2115,14 @@ def test_autodoc_typed_instance_variables(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_typed_inherited_instance_variables(app):
+def test_autodoc_typed_inherited_instance_variables() -> None:
options = {
'members': None,
'undoc-members': None,
'inherited-members': None,
}
- actual = do_autodoc(app, 'class', 'target.typed_vars.Derived', options)
- assert list(actual) == [
+ actual = do_autodoc('class', 'target.typed_vars.Derived', options=options)
+ assert actual == [
'',
'.. py:class:: Derived()',
' :module: target.typed_vars',
@@ -2404,14 +2178,13 @@ def test_autodoc_typed_inherited_instance_variables(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_GenericAlias(app):
+def test_autodoc_GenericAlias() -> None:
options = {
'members': None,
'undoc-members': None,
}
- actual = do_autodoc(app, 'module', 'target.genericalias', options)
- assert list(actual) == [
+ actual = do_autodoc('module', 'target.genericalias', options=options)
+ assert actual == [
'',
'.. py:module:: target.genericalias',
'',
@@ -2447,14 +2220,152 @@ def test_autodoc_GenericAlias(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_TypeVar(app):
+@pytest.mark.skipif(
+ sys.version_info[:2] < (3, 12),
+ reason='type statement introduced in Python 3.12',
+)
+def test_autodoc_pep695_type_alias() -> None:
+ config = _AutodocConfig(
+ autodoc_type_aliases={
+ 'buffer_like': 'buffer_like',
+ 'pathlike': 'pathlike',
+ 'Handler': 'Handler',
+ }
+ )
+ options = {
+ 'members': None,
+ 'undoc-members': None,
+ }
+ actual = do_autodoc('module', 'target.pep695', config=config, options=options)
+ assert actual == [
+ '',
+ '.. py:module:: target.pep695',
+ '',
+ '',
+ '.. py:class:: Bar',
+ ' :module: target.pep695',
+ '',
+ ' This is newtype of Pep695Alias.',
+ '',
+ ' alias of :py:type:`~target.pep695.Pep695Alias`',
+ '',
+ '',
+ '.. py:class:: Foo()',
+ ' :module: target.pep695',
+ '',
+ ' This is class Foo.',
+ '',
+ '',
+ '.. py:data:: Handler',
+ ' :module: target.pep695',
+ '',
+ ' A generic type alias',
+ '',
+ ' alias of :py:class:`type`\\ [:py:class:`Exception`]',
+ '',
+ '',
+ '.. py:type:: HandlerTypeAliasType',
+ ' :module: target.pep695',
+ ' :canonical: type[Exception]',
+ '',
+ ' This is an explicitly constructed generic alias typing.TypeAlias.',
+ '',
+ '',
+ '.. py:type:: Pep695Alias',
+ ' :module: target.pep695',
+ ' :canonical: ~target.pep695.Foo',
+ '',
+ ' This is PEP695 type alias.',
+ '',
+ '',
+ '.. py:type:: Pep695AliasC',
+ ' :module: target.pep695',
+ ' :canonical: dict[str, ~target.pep695.Foo]',
+ '',
+ ' This is PEP695 complex type alias with doc comment.',
+ '',
+ '',
+ '.. py:type:: Pep695AliasOfAlias',
+ ' :module: target.pep695',
+ ' :canonical: ~target.pep695.Pep695AliasC',
+ '',
+ ' This is PEP695 type alias of PEP695 alias.',
+ '',
+ '',
+ '.. py:type:: Pep695AliasUnion',
+ ' :module: target.pep695',
+ ' :canonical: str | int',
+ '',
+ ' This is PEP695 type alias for union.',
+ '',
+ '',
+ '.. py:type:: TypeAliasTypeExplicit',
+ ' :module: target.pep695',
+ ' :canonical: ~target.pep695.Foo',
+ '',
+ ' This is an explicitly constructed typing.TypeAlias.',
+ '',
+ '',
+ '.. py:type:: TypeAliasTypeExtension',
+ ' :module: target.pep695',
+ ' :canonical: ~target.pep695.Foo',
+ '',
+ ' This is an explicitly constructed typing_extensions.TypeAlias.',
+ '',
+ '',
+ '.. py:function:: buffer_len(data: buffer_like) -> int',
+ ' :module: target.pep695',
+ '',
+ ' Return length of a buffer-like object.',
+ '',
+ ' Tests Union type alias cross-reference resolution.',
+ '',
+ '',
+ '.. py:data:: buffer_like',
+ ' :module: target.pep695',
+ ' :value: bytes | bytearray | memoryview',
+ '',
+ ' Some buffer-like object',
+ '',
+ '',
+ '.. py:data:: pathlike',
+ ' :module: target.pep695',
+ f' :value: str | {pathlib.Path.__module__}.Path',
+ '',
+ ' Any type of path',
+ '',
+ '',
+ '.. py:function:: process_error(handler: Handler, other: ~target.pep695.HandlerTypeAliasType) -> str',
+ ' :module: target.pep695',
+ '',
+ ' Process an error with a custom handler type.',
+ '',
+ ' Tests generic type alias cross-reference resolution.',
+ '',
+ '',
+ '.. py:function:: read_file(path: pathlike) -> bytes',
+ ' :module: target.pep695',
+ '',
+ ' Read a file and return its contents.',
+ '',
+ ' Tests Union type alias cross-reference resolution.',
+ '',
+ '',
+ '.. py:function:: ret_pep695(a: ~target.pep695.Pep695Alias) -> ~target.pep695.Pep695Alias',
+ ' :module: target.pep695',
+ '',
+ ' This fn accepts and returns PEP695 alias.',
+ '',
+ ]
+
+
+def test_autodoc_TypeVar() -> None:
options = {
'members': None,
'undoc-members': None,
}
- actual = do_autodoc(app, 'module', 'target.typevar', options)
- assert list(actual) == [
+ actual = do_autodoc('module', 'target.typevar', options=options)
+ assert actual == [
'',
'.. py:module:: target.typevar',
'',
@@ -2529,14 +2440,13 @@ def test_autodoc_TypeVar(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_Annotated(app):
+def test_autodoc_Annotated() -> None:
options = {
'members': None,
'member-order': 'bysource',
}
- actual = do_autodoc(app, 'module', 'target.annotated', options)
- assert list(actual) == [
+ actual = do_autodoc('module', 'target.annotated', options=options)
+ assert actual == [
'',
'.. py:module:: target.annotated',
'',
@@ -2593,14 +2503,13 @@ def test_autodoc_Annotated(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_TYPE_CHECKING(app):
+def test_autodoc_TYPE_CHECKING() -> None:
options = {
'members': None,
'undoc-members': None,
}
- actual = do_autodoc(app, 'module', 'target.TYPE_CHECKING', options)
- assert list(actual) == [
+ actual = do_autodoc('module', 'target.TYPE_CHECKING', options=options)
+ assert actual == [
'',
'.. py:module:: target.TYPE_CHECKING',
'',
@@ -2620,14 +2529,13 @@ def test_autodoc_TYPE_CHECKING(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_TYPE_CHECKING_circular_import(app):
+def test_autodoc_TYPE_CHECKING_circular_import() -> None:
options = {
'members': None,
'undoc-members': None,
}
- actual = do_autodoc(app, 'module', 'circular_import', options)
- assert list(actual) == [
+ actual = do_autodoc('module', 'circular_import', options=options)
+ assert actual == [
'',
'.. py:module:: circular_import',
'',
@@ -2635,11 +2543,10 @@ def test_autodoc_TYPE_CHECKING_circular_import(app):
assert sys.modules['circular_import'].a is sys.modules['circular_import.a']
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_singledispatch(app):
+def test_singledispatch() -> None:
options = {'members': None}
- actual = do_autodoc(app, 'module', 'target.singledispatch', options)
- assert list(actual) == [
+ actual = do_autodoc('module', 'target.singledispatch', options=options)
+ assert actual == [
'',
'.. py:module:: target.singledispatch',
'',
@@ -2656,11 +2563,10 @@ def test_singledispatch(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_singledispatchmethod(app):
+def test_singledispatchmethod() -> None:
options = {'members': None}
- actual = do_autodoc(app, 'module', 'target.singledispatchmethod', options)
- assert list(actual) == [
+ actual = do_autodoc('module', 'target.singledispatchmethod', options=options)
+ assert actual == [
'',
'.. py:module:: target.singledispatchmethod',
'',
@@ -2683,11 +2589,9 @@ def test_singledispatchmethod(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_singledispatchmethod_automethod(app):
- options = {}
- actual = do_autodoc(app, 'method', 'target.singledispatchmethod.Foo.meth', options)
- assert list(actual) == [
+def test_singledispatchmethod_automethod() -> None:
+ actual = do_autodoc('method', 'target.singledispatchmethod.Foo.meth')
+ assert actual == [
'',
'.. py:method:: Foo.meth(arg, kwarg=None)',
' Foo.meth(arg: float, kwarg=None)',
@@ -2701,14 +2605,12 @@ def test_singledispatchmethod_automethod(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_singledispatchmethod_classmethod(app):
+def test_singledispatchmethod_classmethod() -> None:
options = {'members': None}
actual = do_autodoc(
- app, 'module', 'target.singledispatchmethod_classmethod', options
+ 'module', 'target.singledispatchmethod_classmethod', options=options
)
-
- assert list(actual) == [
+ assert actual == [
'',
'.. py:module:: target.singledispatchmethod_classmethod',
'',
@@ -2732,14 +2634,11 @@ def test_singledispatchmethod_classmethod(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_singledispatchmethod_classmethod_automethod(app):
- options = {}
+def test_singledispatchmethod_classmethod_automethod() -> None:
actual = do_autodoc(
- app, 'method', 'target.singledispatchmethod_classmethod.Foo.class_meth', options
+ 'method', 'target.singledispatchmethod_classmethod.Foo.class_meth'
)
-
- assert list(actual) == [
+ assert actual == [
'',
'.. py:method:: Foo.class_meth(arg, kwarg=None)',
' Foo.class_meth(arg: float, kwarg=None)',
@@ -2759,14 +2658,13 @@ def test_singledispatchmethod_classmethod_automethod(app):
reason='Cython does not support Python 3.13 yet.',
)
@pytest.mark.skipif(pyximport is None, reason='cython is not installed')
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_cython(app):
+def test_cython() -> None:
options = {
'members': None,
'undoc-members': None,
}
- actual = do_autodoc(app, 'module', 'target.cython', options)
- assert list(actual) == [
+ actual = do_autodoc('module', 'target.cython', options=options)
+ assert actual == [
'',
'.. py:module:: target.cython',
'',
@@ -2791,11 +2689,10 @@ def test_cython(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_final(app):
+def test_final() -> None:
options = {'members': None}
- actual = do_autodoc(app, 'module', 'target.final', options)
- assert list(actual) == [
+ actual = do_autodoc('module', 'target.final', options=options)
+ assert actual == [
'',
'.. py:module:: target.final',
'',
@@ -2819,14 +2716,27 @@ def test_final(app):
'',
' docstring',
'',
+ '',
+ ' .. py:method:: Class.meth3()',
+ ' :module: target.final',
+ ' :final:',
+ '',
+ ' docstring',
+ '',
+ '',
+ ' .. py:method:: Class.meth4()',
+ ' :module: target.final',
+ ' :final:',
+ '',
+ ' docstring',
+ '',
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_overload(app):
+def test_overload() -> None:
options = {'members': None}
- actual = do_autodoc(app, 'module', 'target.overload', options)
- assert list(actual) == [
+ actual = do_autodoc('module', 'target.overload', options=options)
+ assert actual == [
'',
'.. py:module:: target.overload',
'',
@@ -2876,11 +2786,10 @@ def test_overload(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_overload2(app):
+def test_overload2() -> None:
options = {'members': None}
- actual = do_autodoc(app, 'module', 'target.overload2', options)
- assert list(actual) == [
+ actual = do_autodoc('module', 'target.overload2', options=options)
+ assert actual == [
'',
'.. py:module:: target.overload2',
'',
@@ -2892,11 +2801,29 @@ def test_overload2(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_pymodule_for_ModuleLevelDocumenter(app):
- app.env.ref_context['py:module'] = 'target.classes'
- actual = do_autodoc(app, 'class', 'Foo')
- assert list(actual) == [
+def test_overload3() -> None:
+ options = {'members': None}
+ actual = do_autodoc('module', 'target.overload3', options=options)
+ assert actual == [
+ '',
+ '.. py:module:: target.overload3',
+ '',
+ '',
+ '.. py:function:: test(x: int) -> int',
+ ' test(x: list[int]) -> list[int]',
+ ' test(x: str) -> str',
+ ' test(x: float) -> float',
+ ' :module: target.overload3',
+ '',
+ ' Documentation.',
+ '',
+ ]
+
+
+def test_pymodule_for_ModuleLevelDocumenter() -> None:
+ ref_context: dict[str, Any] = {'py:module': 'target.classes'}
+ actual = do_autodoc('class', 'Foo', ref_context=ref_context)
+ assert actual == [
'',
'.. py:class:: Foo()',
' :module: target.classes',
@@ -2904,11 +2831,10 @@ def test_pymodule_for_ModuleLevelDocumenter(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_pymodule_for_ClassLevelDocumenter(app):
- app.env.ref_context['py:module'] = 'target.methods'
- actual = do_autodoc(app, 'method', 'Base.meth')
- assert list(actual) == [
+def test_pymodule_for_ClassLevelDocumenter() -> None:
+ ref_context: dict[str, Any] = {'py:module': 'target.methods'}
+ actual = do_autodoc('method', 'Base.meth', ref_context=ref_context)
+ assert actual == [
'',
'.. py:method:: Base.meth()',
' :module: target.methods',
@@ -2916,12 +2842,10 @@ def test_pymodule_for_ClassLevelDocumenter(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_pyclass_for_ClassLevelDocumenter(app):
- app.env.ref_context['py:module'] = 'target.methods'
- app.env.ref_context['py:class'] = 'Base'
- actual = do_autodoc(app, 'method', 'meth')
- assert list(actual) == [
+def test_pyclass_for_ClassLevelDocumenter() -> None:
+ ref_context: dict[str, Any] = {'py:module': 'target.methods', 'py:class': 'Base'}
+ actual = do_autodoc('method', 'meth', ref_context=ref_context)
+ assert actual == [
'',
'.. py:method:: Base.meth()',
' :module: target.methods',
@@ -2929,32 +2853,55 @@ def test_pyclass_for_ClassLevelDocumenter(app):
]
-@pytest.mark.sphinx('dummy', testroot='ext-autodoc')
-def test_autodoc(app):
- app.build(force_all=True)
+def test_autodoc(caplog: pytest.LogCaptureFixture) -> None:
+ # work around sphinx.util.logging.setup()
+ logger = logging.getLogger('sphinx')
+ logger.handlers[:] = [caplog.handler]
+ caplog.set_level(logging.WARNING)
- content = app.env.get_doctree('index')
- assert isinstance(content[3], addnodes.desc)
- assert content[3][0].astext() == 'autodoc_dummy_module.test()'
- assert content[3][1].astext() == 'Dummy function using dummy.*'
+ config = _AutodocConfig(autodoc_mock_imports=['dummy'])
+ options = {'members': None}
+ actual = do_autodoc(
+ 'module', 'autodoc_dummy_module', config=config, options=options
+ )
+ assert actual == [
+ '',
+ '.. py:module:: autodoc_dummy_module',
+ '',
+ '',
+ '.. py:function:: test()',
+ ' :module: autodoc_dummy_module',
+ '',
+ ' Dummy function using dummy.*',
+ '',
+ ]
# See: https://github.com/sphinx-doc/sphinx/issues/2437
- assert content[11][-1].astext() == (
- """Dummy class Bar with alias.
-
-
-
-my_name
+ do_autodoc('module', 'bug2437.autodoc_dummy_foo', options=options)
+ actual = do_autodoc('module', 'autodoc_dummy_bar', options=options)
+ assert actual == [
+ '',
+ '.. py:module:: autodoc_dummy_bar',
+ '',
+ '',
+ '.. py:class:: Bar()',
+ ' :module: autodoc_dummy_bar',
+ '',
+ ' Dummy class Bar with alias.',
+ '',
+ '',
+ ' .. py:attribute:: Bar.my_name',
+ ' :module: autodoc_dummy_bar',
+ '',
+ ' alias of :py:class:`~bug2437.autodoc_dummy_foo.Foo`',
+ ]
-alias of Foo"""
- )
- assert app.warning.getvalue() == ''
+ assert not caplog.records
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_name_conflict(app):
- actual = do_autodoc(app, 'class', 'target.name_conflict.foo')
- assert list(actual) == [
+def test_name_conflict() -> None:
+ actual = do_autodoc('class', 'target.name_conflict.foo')
+ assert actual == [
'',
'.. py:class:: foo()',
' :module: target.name_conflict',
@@ -2963,8 +2910,8 @@ def test_name_conflict(app):
'',
]
- actual = do_autodoc(app, 'class', 'target.name_conflict.foo.bar')
- assert list(actual) == [
+ actual = do_autodoc('class', 'target.name_conflict.foo.bar')
+ assert actual == [
'',
'.. py:class:: bar()',
' :module: target.name_conflict.foo',
@@ -2974,15 +2921,14 @@ def test_name_conflict(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_name_mangling(app):
+def test_name_mangling() -> None:
options = {
'members': None,
'undoc-members': None,
'private-members': None,
}
- actual = do_autodoc(app, 'module', 'target.name_mangling', options)
- assert list(actual) == [
+ actual = do_autodoc('module', 'target.name_mangling', options=options)
+ assert actual == [
'',
'.. py:module:: target.name_mangling',
'',
@@ -3021,11 +2967,10 @@ def test_name_mangling(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_type_union_operator(app):
+def test_type_union_operator() -> None:
options = {'members': None}
- actual = do_autodoc(app, 'module', 'target.pep604', options)
- assert list(actual) == [
+ actual = do_autodoc('module', 'target.pep604', options=options)
+ assert actual == [
'',
'.. py:module:: target.pep604',
'',
@@ -3064,11 +3009,10 @@ def test_type_union_operator(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_hide_value(app):
+def test_hide_value() -> None:
options = {'members': None}
- actual = do_autodoc(app, 'module', 'target.hide_value', options)
- assert list(actual) == [
+ actual = do_autodoc('module', 'target.hide_value', options=options)
+ assert actual == [
'',
'.. py:module:: target.hide_value',
'',
@@ -3109,14 +3053,13 @@ def test_hide_value(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_canonical(app):
+def test_canonical() -> None:
options = {
'members': None,
'imported-members': None,
}
- actual = do_autodoc(app, 'module', 'target.canonical', options)
- assert list(actual) == [
+ actual = do_autodoc('module', 'target.canonical', options=options)
+ assert actual == [
'',
'.. py:module:: target.canonical',
'',
@@ -3166,8 +3109,9 @@ def function_rst(name, sig):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc', freshenv=True)
-def test_literal_render(app):
+def test_literal_render() -> None:
+ config = _AutodocConfig(autodoc_typehints_format='short')
+
# autodoc_typehints_format can take 'short' or 'fully-qualified' values
# and this will be interpreted as 'smart' or 'fully-qualified-except-typing' by restify()
# and 'smart' or 'fully-qualified' by stringify_annotation().
@@ -3176,9 +3120,8 @@ def test_literal_render(app):
'members': None,
'exclude-members': 'MyEnum',
}
- app.config.autodoc_typehints_format = 'short'
- actual = do_autodoc(app, 'module', 'target.literal', options)
- assert list(actual) == [
+ actual = do_autodoc('module', 'target.literal', config=config, options=options)
+ assert actual == [
'',
'.. py:module:: target.literal',
'',
@@ -3195,9 +3138,9 @@ def test_literal_render(app):
# restify() assumes that 'fully-qualified' is 'fully-qualified-except-typing'
# because it is more likely that a user wants to suppress 'typing.*'
- app.config.autodoc_typehints_format = 'fully-qualified'
- actual = do_autodoc(app, 'module', 'target.literal', options)
- assert list(actual) == [
+ config = _AutodocConfig(autodoc_typehints_format='fully-qualified')
+ actual = do_autodoc('module', 'target.literal', config=config, options=options)
+ assert actual == [
'',
'.. py:module:: target.literal',
'',
@@ -3216,20 +3159,17 @@ def test_literal_render(app):
]
-@pytest.mark.sphinx(
- 'html',
- testroot='ext-autodoc',
- freshenv=True,
- confoverrides={'python_display_short_literal_types': True},
-)
-def test_literal_render_pep604(app):
+def test_literal_render_pep604() -> None:
+ config = _AutodocConfig(
+ python_display_short_literal_types=True,
+ autodoc_typehints_format='short',
+ )
options = {
'members': None,
'exclude-members': 'MyEnum',
}
- app.config.autodoc_typehints_format = 'short'
- actual = do_autodoc(app, 'module', 'target.literal', options)
- assert list(actual) == [
+ actual = do_autodoc('module', 'target.literal', config=config, options=options)
+ assert actual == [
'',
'.. py:module:: target.literal',
'',
@@ -3246,9 +3186,12 @@ def test_literal_render_pep604(app):
# restify() assumes that 'fully-qualified' is 'fully-qualified-except-typing'
# because it is more likely that a user wants to suppress 'typing.*'
- app.config.autodoc_typehints_format = 'fully-qualified'
- actual = do_autodoc(app, 'module', 'target.literal', options)
- assert list(actual) == [
+ config = _AutodocConfig(
+ python_display_short_literal_types=True,
+ autodoc_typehints_format='fully-qualified',
+ )
+ actual = do_autodoc('module', 'target.literal', config=config, options=options)
+ assert actual == [
'',
'.. py:module:: target.literal',
'',
@@ -3264,30 +3207,29 @@ def test_literal_render_pep604(app):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_no_index_entry(app):
+def test_no_index_entry() -> None:
# modules can use no-index-entry
options = {'no-index-entry': None}
- actual = do_autodoc(app, 'module', 'target.module', options)
- assert ' :no-index-entry:' in list(actual)
+ actual = do_autodoc('module', 'target.module', options=options)
+ assert ' :no-index-entry:' in actual
# classes can use no-index-entry
- actual = do_autodoc(app, 'class', 'target.classes.Foo', options)
- assert ' :no-index-entry:' in list(actual)
+ actual = do_autodoc('class', 'target.classes.Foo', options=options)
+ assert ' :no-index-entry:' in actual
# functions can use no-index-entry
- actual = do_autodoc(app, 'function', 'target.functions.func', options)
- assert ' :no-index-entry:' in list(actual)
+ actual = do_autodoc('function', 'target.functions.func', options=options)
+ assert ' :no-index-entry:' in actual
# modules respect no-index-entry in autodoc_default_options
- app.config.autodoc_default_options = {'no-index-entry': True}
- actual = do_autodoc(app, 'module', 'target.module')
- assert ' :no-index-entry:' in list(actual)
+ config = _AutodocConfig(autodoc_default_options={'no-index-entry': True})
+ actual = do_autodoc('module', 'target.module', config=config)
+ assert ' :no-index-entry:' in actual
# classes respect config-level no-index-entry
- actual = do_autodoc(app, 'class', 'target.classes.Foo')
- assert ' :no-index-entry:' in list(actual)
+ actual = do_autodoc('class', 'target.classes.Foo', config=config)
+ assert ' :no-index-entry:' in actual
# functions respect config-level no-index-entry
- actual = do_autodoc(app, 'function', 'target.functions.func')
- assert ' :no-index-entry:' in list(actual)
+ actual = do_autodoc('function', 'target.functions.func', config=config)
+ assert ' :no-index-entry:' in actual
diff --git a/tests/test_ext_autodoc/test_ext_autodoc_autoattribute.py b/tests/test_ext_autodoc/test_ext_autodoc_autoattribute.py
new file mode 100644
index 00000000000..ba45fd52b7c
--- /dev/null
+++ b/tests/test_ext_autodoc/test_ext_autodoc_autoattribute.py
@@ -0,0 +1,168 @@
+"""Test the autodoc extension.
+
+This tests mainly the Documenters; the auto directives are tested in a test
+source file translated by test_build.
+"""
+
+from __future__ import annotations
+
+import pytest
+
+from tests.test_ext_autodoc.autodoc_util import do_autodoc
+
+pytestmark = pytest.mark.usefixtures('inject_autodoc_root_into_sys_path')
+
+
+def test_autoattribute() -> None:
+ actual = do_autodoc('attribute', 'target.Class.attr')
+ assert actual == [
+ '',
+ '.. py:attribute:: Class.attr',
+ ' :module: target',
+ " :value: 'bar'",
+ '',
+ ' should be documented -- süß',
+ '',
+ ]
+
+
+def test_autoattribute_novalue() -> None:
+ options = {'no-value': None}
+ actual = do_autodoc('attribute', 'target.Class.attr', options=options)
+ assert actual == [
+ '',
+ '.. py:attribute:: Class.attr',
+ ' :module: target',
+ '',
+ ' should be documented -- süß',
+ '',
+ ]
+
+
+def test_autoattribute_typed_variable() -> None:
+ actual = do_autodoc('attribute', 'target.typed_vars.Class.attr2')
+ assert actual == [
+ '',
+ '.. py:attribute:: Class.attr2',
+ ' :module: target.typed_vars',
+ ' :type: int',
+ '',
+ ]
+
+
+def test_autoattribute_typed_variable_in_alias() -> None:
+ actual = do_autodoc('attribute', 'target.typed_vars.Alias.attr2')
+ assert actual == [
+ '',
+ '.. py:attribute:: Alias.attr2',
+ ' :module: target.typed_vars',
+ ' :type: int',
+ '',
+ ]
+
+
+def test_autoattribute_instance_variable() -> None:
+ actual = do_autodoc('attribute', 'target.typed_vars.Class.attr4')
+ assert actual == [
+ '',
+ '.. py:attribute:: Class.attr4',
+ ' :module: target.typed_vars',
+ ' :type: int',
+ '',
+ ' attr4',
+ '',
+ ]
+
+
+def test_autoattribute_instance_variable_in_alias() -> None:
+ actual = do_autodoc('attribute', 'target.typed_vars.Alias.attr4')
+ assert actual == [
+ '',
+ '.. py:attribute:: Alias.attr4',
+ ' :module: target.typed_vars',
+ ' :type: int',
+ '',
+ ' attr4',
+ '',
+ ]
+
+
+def test_autoattribute_instance_variable_without_comment() -> None:
+ actual = do_autodoc('attribute', 'target.instance_variable.Bar.attr4')
+ assert actual == [
+ '',
+ '.. py:attribute:: Bar.attr4',
+ ' :module: target.instance_variable',
+ '',
+ ]
+
+
+def test_autoattribute_slots_variable_list() -> None:
+ actual = do_autodoc('attribute', 'target.slots.Foo.attr')
+ assert actual == [
+ '',
+ '.. py:attribute:: Foo.attr',
+ ' :module: target.slots',
+ '',
+ ]
+
+
+def test_autoattribute_slots_variable_dict() -> None:
+ actual = do_autodoc('attribute', 'target.slots.Bar.attr1')
+ assert actual == [
+ '',
+ '.. py:attribute:: Bar.attr1',
+ ' :module: target.slots',
+ ' :type: int',
+ '',
+ ' docstring of attr1',
+ '',
+ ]
+
+
+def test_autoattribute_slots_variable_str() -> None:
+ actual = do_autodoc('attribute', 'target.slots.Baz.attr')
+ assert actual == [
+ '',
+ '.. py:attribute:: Baz.attr',
+ ' :module: target.slots',
+ '',
+ ]
+
+
+def test_autoattribute_GenericAlias() -> None:
+ actual = do_autodoc('attribute', 'target.genericalias.Class.T')
+ assert actual == [
+ '',
+ '.. py:attribute:: Class.T',
+ ' :module: target.genericalias',
+ '',
+ ' A list of int',
+ '',
+ ' alias of :py:class:`~typing.List`\\ [:py:class:`int`]',
+ '',
+ ]
+
+
+def test_autoattribute_hide_value() -> None:
+ actual = do_autodoc('attribute', 'target.hide_value.Foo.SENTINEL1')
+ assert actual == [
+ '',
+ '.. py:attribute:: Foo.SENTINEL1',
+ ' :module: target.hide_value',
+ '',
+ ' docstring',
+ '',
+ ' :meta hide-value:',
+ '',
+ ]
+
+ actual = do_autodoc('attribute', 'target.hide_value.Foo.SENTINEL2')
+ assert actual == [
+ '',
+ '.. py:attribute:: Foo.SENTINEL2',
+ ' :module: target.hide_value',
+ '',
+ ' :meta hide-value:',
+ '',
+ ]
diff --git a/tests/test_extensions/test_ext_autodoc_autoclass.py b/tests/test_ext_autodoc/test_ext_autodoc_autoclass.py
similarity index 64%
rename from tests/test_extensions/test_ext_autodoc_autoclass.py
rename to tests/test_ext_autodoc/test_ext_autodoc_autoclass.py
index 04499efd183..e37e112fd5a 100644
--- a/tests/test_extensions/test_ext_autodoc_autoclass.py
+++ b/tests/test_ext_autodoc/test_ext_autodoc_autoclass.py
@@ -10,40 +10,38 @@
import pytest
-from tests.test_extensions.autodoc_util import do_autodoc
+from tests.test_ext_autodoc.autodoc_util import FakeEvents, do_autodoc
-if typing.TYPE_CHECKING:
- from sphinx.testing.util import SphinxTestApp
+pytestmark = pytest.mark.usefixtures('inject_autodoc_root_into_sys_path')
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_classes(app: SphinxTestApp) -> None:
- actual = do_autodoc(app, 'function', 'target.classes.Foo')
- assert list(actual) == [
+def test_classes() -> None:
+ actual = do_autodoc('function', 'target.classes.Foo')
+ assert actual == [
'',
'.. py:function:: Foo()',
' :module: target.classes',
'',
]
- actual = do_autodoc(app, 'function', 'target.classes.Bar')
- assert list(actual) == [
+ actual = do_autodoc('function', 'target.classes.Bar')
+ assert actual == [
'',
'.. py:function:: Bar(x, y)',
' :module: target.classes',
'',
]
- actual = do_autodoc(app, 'function', 'target.classes.Baz')
- assert list(actual) == [
+ actual = do_autodoc('function', 'target.classes.Baz')
+ assert actual == [
'',
'.. py:function:: Baz(x, y)',
' :module: target.classes',
'',
]
- actual = do_autodoc(app, 'function', 'target.classes.Qux')
- assert list(actual) == [
+ actual = do_autodoc('function', 'target.classes.Qux')
+ assert actual == [
'',
'.. py:function:: Qux(foo, bar)',
' :module: target.classes',
@@ -51,11 +49,10 @@ def test_classes(app: SphinxTestApp) -> None:
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_instance_variable(app: SphinxTestApp) -> None:
+def test_instance_variable() -> None:
options = {'members': None}
- actual = do_autodoc(app, 'class', 'target.instance_variable.Bar', options)
- assert list(actual) == [
+ actual = do_autodoc('class', 'target.instance_variable.Bar', options=options)
+ assert actual == [
'',
'.. py:class:: Bar()',
' :module: target.instance_variable',
@@ -75,14 +72,13 @@ def test_instance_variable(app: SphinxTestApp) -> None:
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_inherited_instance_variable(app: SphinxTestApp) -> None:
+def test_inherited_instance_variable() -> None:
options = {
'members': None,
'inherited-members': None,
}
- actual = do_autodoc(app, 'class', 'target.instance_variable.Bar', options)
- assert list(actual) == [
+ actual = do_autodoc('class', 'target.instance_variable.Bar', options=options)
+ assert actual == [
'',
'.. py:class:: Bar()',
' :module: target.instance_variable',
@@ -108,16 +104,15 @@ def test_inherited_instance_variable(app: SphinxTestApp) -> None:
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_uninitialized_attributes(app: SphinxTestApp) -> None:
+def test_uninitialized_attributes() -> None:
options = {
'members': None,
'inherited-members': None,
}
actual = do_autodoc(
- app, 'class', 'target.uninitialized_attributes.Derived', options
+ 'class', 'target.uninitialized_attributes.Derived', options=options
)
- assert list(actual) == [
+ assert actual == [
'',
'.. py:class:: Derived()',
' :module: target.uninitialized_attributes',
@@ -139,17 +134,16 @@ def test_uninitialized_attributes(app: SphinxTestApp) -> None:
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_undocumented_uninitialized_attributes(app: SphinxTestApp) -> None:
+def test_undocumented_uninitialized_attributes() -> None:
options = {
'members': None,
'inherited-members': None,
'undoc-members': None,
}
actual = do_autodoc(
- app, 'class', 'target.uninitialized_attributes.Derived', options
+ 'class', 'target.uninitialized_attributes.Derived', options=options
)
- assert list(actual) == [
+ assert actual == [
'',
'.. py:class:: Derived()',
' :module: target.uninitialized_attributes',
@@ -181,26 +175,25 @@ def test_undocumented_uninitialized_attributes(app: SphinxTestApp) -> None:
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_decorators(app: SphinxTestApp) -> None:
- actual = do_autodoc(app, 'class', 'target.decorator.Baz')
- assert list(actual) == [
+def test_decorators() -> None:
+ actual = do_autodoc('class', 'target.decorator.Baz')
+ assert actual == [
'',
'.. py:class:: Baz(name=None, age=None)',
' :module: target.decorator',
'',
]
- actual = do_autodoc(app, 'class', 'target.decorator.Qux')
- assert list(actual) == [
+ actual = do_autodoc('class', 'target.decorator.Qux')
+ assert actual == [
'',
'.. py:class:: Qux(name=None, age=None)',
' :module: target.decorator',
'',
]
- actual = do_autodoc(app, 'class', 'target.decorator.Quux')
- assert list(actual) == [
+ actual = do_autodoc('class', 'target.decorator.Quux')
+ assert actual == [
'',
'.. py:class:: Quux(name=None, age=None)',
' :module: target.decorator',
@@ -208,11 +201,10 @@ def test_decorators(app: SphinxTestApp) -> None:
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_properties(app: SphinxTestApp) -> None:
+def test_properties() -> None:
options = {'members': None}
- actual = do_autodoc(app, 'class', 'target.properties.Foo', options)
- assert list(actual) == [
+ actual = do_autodoc('class', 'target.properties.Foo', options=options)
+ assert actual == [
'',
'.. py:class:: Foo()',
' :module: target.properties',
@@ -252,11 +244,10 @@ def test_properties(app: SphinxTestApp) -> None:
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_slots_attribute(app: SphinxTestApp) -> None:
+def test_slots_attribute() -> None:
options = {'members': None}
- actual = do_autodoc(app, 'class', 'target.slots.Bar', options)
- assert list(actual) == [
+ actual = do_autodoc('class', 'target.slots.Bar', options=options)
+ assert actual == [
'',
'.. py:class:: Bar()',
' :module: target.slots',
@@ -279,11 +270,10 @@ def test_slots_attribute(app: SphinxTestApp) -> None:
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_show_inheritance_for_subclass_of_generic_type(app: SphinxTestApp) -> None:
+def test_show_inheritance_for_subclass_of_generic_type() -> None:
options = {'show-inheritance': None}
- actual = do_autodoc(app, 'class', 'target.classes.Quux', options)
- assert list(actual) == [
+ actual = do_autodoc('class', 'target.classes.Quux', options=options)
+ assert actual == [
'',
'.. py:class:: Quux(iterable=(), /)',
' :module: target.classes',
@@ -295,11 +285,10 @@ def test_show_inheritance_for_subclass_of_generic_type(app: SphinxTestApp) -> No
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_show_inheritance_for_decendants_of_generic_type(app: SphinxTestApp) -> None:
+def test_show_inheritance_for_decendants_of_generic_type() -> None:
options = {'show-inheritance': None}
- actual = do_autodoc(app, 'class', 'target.classes.Corge', options)
- assert list(actual) == [
+ actual = do_autodoc('class', 'target.classes.Corge', options=options)
+ assert actual == [
'',
'.. py:class:: Corge(iterable=(), /)',
' :module: target.classes',
@@ -309,23 +298,23 @@ def test_show_inheritance_for_decendants_of_generic_type(app: SphinxTestApp) ->
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_process_bases(app: SphinxTestApp) -> None:
+def test_autodoc_process_bases() -> None:
def autodoc_process_bases(app, name, obj, options, bases):
assert name == 'target.classes.Quux'
assert obj.__module__ == 'target.classes'
assert obj.__name__ == 'Quux'
- assert options == {'show-inheritance': True, 'members': []}
+ assert vars(options) == {}
assert bases == [typing.List[typing.Union[int, float]]] # NoQA: UP006, UP007
bases.pop()
bases.extend([int, str])
- app.connect('autodoc-process-bases', autodoc_process_bases)
+ events = FakeEvents()
+ events.connect('autodoc-process-bases', autodoc_process_bases)
options = {'show-inheritance': None}
- actual = do_autodoc(app, 'class', 'target.classes.Quux', options)
- assert list(actual) == [
+ actual = do_autodoc('class', 'target.classes.Quux', events=events, options=options)
+ assert actual == [
'',
'.. py:class:: Quux(iterable=(), /)',
' :module: target.classes',
@@ -337,14 +326,13 @@ def autodoc_process_bases(app, name, obj, options, bases):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_class_doc_from_class(app: SphinxTestApp) -> None:
+def test_class_doc_from_class() -> None:
options = {
'members': None,
'class-doc-from': 'class',
}
- actual = do_autodoc(app, 'class', 'target.autoclass_content.C', options)
- assert list(actual) == [
+ actual = do_autodoc('class', 'target.autoclass_content.C', options=options)
+ assert actual == [
'',
'.. py:class:: C()',
' :module: target.autoclass_content',
@@ -354,14 +342,13 @@ def test_class_doc_from_class(app: SphinxTestApp) -> None:
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_class_doc_from_init(app: SphinxTestApp) -> None:
+def test_class_doc_from_init() -> None:
options = {
'members': None,
'class-doc-from': 'init',
}
- actual = do_autodoc(app, 'class', 'target.autoclass_content.C', options)
- assert list(actual) == [
+ actual = do_autodoc('class', 'target.autoclass_content.C', options=options)
+ assert actual == [
'',
'.. py:class:: C()',
' :module: target.autoclass_content',
@@ -371,14 +358,13 @@ def test_class_doc_from_init(app: SphinxTestApp) -> None:
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_class_doc_from_both(app: SphinxTestApp) -> None:
+def test_class_doc_from_both() -> None:
options = {
'members': None,
'class-doc-from': 'both',
}
- actual = do_autodoc(app, 'class', 'target.autoclass_content.C', options)
- assert list(actual) == [
+ actual = do_autodoc('class', 'target.autoclass_content.C', options=options)
+ assert actual == [
'',
'.. py:class:: C()',
' :module: target.autoclass_content',
@@ -390,17 +376,17 @@ def test_class_doc_from_both(app: SphinxTestApp) -> None:
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_class_alias(app: SphinxTestApp) -> None:
+def test_class_alias() -> None:
def autodoc_process_docstring(*args):
"""A handler always raises an error.
This confirms this handler is never called for class aliases.
"""
raise RuntimeError
- app.connect('autodoc-process-docstring', autodoc_process_docstring)
- actual = do_autodoc(app, 'class', 'target.classes.Alias')
- assert list(actual) == [
+ events = FakeEvents()
+ events.connect('autodoc-process-docstring', autodoc_process_docstring)
+ actual = do_autodoc('class', 'target.classes.Alias', events=events)
+ assert actual == [
'',
'.. py:attribute:: Alias',
' :module: target.classes',
@@ -409,10 +395,9 @@ def autodoc_process_docstring(*args):
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_class_alias_having_doccomment(app: SphinxTestApp) -> None:
- actual = do_autodoc(app, 'class', 'target.classes.OtherAlias')
- assert list(actual) == [
+def test_class_alias_having_doccomment() -> None:
+ actual = do_autodoc('class', 'target.classes.OtherAlias')
+ assert actual == [
'',
'.. py:attribute:: OtherAlias',
' :module: target.classes',
@@ -422,10 +407,9 @@ def test_class_alias_having_doccomment(app: SphinxTestApp) -> None:
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_class_alias_for_imported_object_having_doccomment(app: SphinxTestApp) -> None:
- actual = do_autodoc(app, 'class', 'target.classes.IntAlias')
- assert list(actual) == [
+def test_class_alias_for_imported_object_having_doccomment() -> None:
+ actual = do_autodoc('class', 'target.classes.IntAlias')
+ assert actual == [
'',
'.. py:attribute:: IntAlias',
' :module: target.classes',
@@ -435,11 +419,10 @@ def test_class_alias_for_imported_object_having_doccomment(app: SphinxTestApp) -
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_coroutine(app: SphinxTestApp) -> None:
+def test_coroutine() -> None:
options = {'members': None}
- actual = do_autodoc(app, 'class', 'target.coroutine.AsyncClass', options)
- assert list(actual) == [
+ actual = do_autodoc('class', 'target.coroutine.AsyncClass', options=options)
+ assert actual == [
'',
'.. py:class:: AsyncClass()',
' :module: target.coroutine',
@@ -477,10 +460,9 @@ def test_coroutine(app: SphinxTestApp) -> None:
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodata_NewType_module_level(app: SphinxTestApp) -> None:
- actual = do_autodoc(app, 'class', 'target.typevar.T6')
- assert list(actual) == [
+def test_autodata_NewType_module_level() -> None:
+ actual = do_autodoc('class', 'target.typevar.T6')
+ assert actual == [
'',
'.. py:class:: T6',
' :module: target.typevar',
@@ -492,10 +474,9 @@ def test_autodata_NewType_module_level(app: SphinxTestApp) -> None:
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autoattribute_NewType_class_level(app: SphinxTestApp) -> None:
- actual = do_autodoc(app, 'class', 'target.typevar.Class.T6')
- assert list(actual) == [
+def test_autoattribute_NewType_class_level() -> None:
+ actual = do_autodoc('class', 'target.typevar.Class.T6')
+ assert actual == [
'',
'.. py:class:: Class.T6',
' :module: target.typevar',
@@ -507,10 +488,9 @@ def test_autoattribute_NewType_class_level(app: SphinxTestApp) -> None:
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodata_TypeVar_class_level(app: SphinxTestApp) -> None:
- actual = do_autodoc(app, 'class', 'target.typevar.T1')
- assert list(actual) == [
+def test_autodata_TypeVar_class_level() -> None:
+ actual = do_autodoc('class', 'target.typevar.T1')
+ assert actual == [
'',
'.. py:class:: T1',
' :module: target.typevar',
@@ -522,10 +502,9 @@ def test_autodata_TypeVar_class_level(app: SphinxTestApp) -> None:
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autoattribute_TypeVar_module_level(app: SphinxTestApp) -> None:
- actual = do_autodoc(app, 'class', 'target.typevar.Class.T1')
- assert list(actual) == [
+def test_autoattribute_TypeVar_module_level() -> None:
+ actual = do_autodoc('class', 'target.typevar.Class.T1')
+ assert actual == [
'',
'.. py:class:: Class.T1',
' :module: target.typevar',
@@ -537,16 +516,15 @@ def test_autoattribute_TypeVar_module_level(app: SphinxTestApp) -> None:
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_inherited_instance_variable_with_annotations(app: SphinxTestApp) -> None:
+def test_inherited_instance_variable_with_annotations() -> None:
options = {
'members': None,
'inherited-members': None,
}
actual = do_autodoc(
- app, 'class', 'target.inherited_annotations.NoTypeAnnotation', options
+ 'class', 'target.inherited_annotations.NoTypeAnnotation', options=options
)
- assert list(actual) == [
+ assert actual == [
'',
'.. py:class:: NoTypeAnnotation()',
' :module: target.inherited_annotations',
@@ -568,13 +546,12 @@ def test_inherited_instance_variable_with_annotations(app: SphinxTestApp) -> Non
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_no_inherited_instance_variable_with_annotations(app: SphinxTestApp) -> None:
+def test_no_inherited_instance_variable_with_annotations() -> None:
options = {'members': None}
actual = do_autodoc(
- app, 'class', 'target.inherited_annotations.NoTypeAnnotation2', options
+ 'class', 'target.inherited_annotations.NoTypeAnnotation2', options=options
)
- assert list(actual) == [
+ assert actual == [
'',
'.. py:class:: NoTypeAnnotation2()',
' :module: target.inherited_annotations',
diff --git a/tests/test_ext_autodoc/test_ext_autodoc_autodata.py b/tests/test_ext_autodoc/test_ext_autodoc_autodata.py
new file mode 100644
index 00000000000..853d6dcdc28
--- /dev/null
+++ b/tests/test_ext_autodoc/test_ext_autodoc_autodata.py
@@ -0,0 +1,104 @@
+"""Test the autodoc extension.
+
+This tests mainly the Documenters; the auto directives are tested in a test
+source file translated by test_build.
+"""
+
+from __future__ import annotations
+
+import pytest
+
+from tests.test_ext_autodoc.autodoc_util import do_autodoc
+
+pytestmark = pytest.mark.usefixtures('inject_autodoc_root_into_sys_path')
+
+
+def test_autodata() -> None:
+ actual = do_autodoc('data', 'target.integer')
+ assert actual == [
+ '',
+ '.. py:data:: integer',
+ ' :module: target',
+ ' :value: 1',
+ '',
+ ' documentation for the integer',
+ '',
+ ]
+
+
+def test_autodata_novalue() -> None:
+ options = {'no-value': None}
+ actual = do_autodoc('data', 'target.integer', options=options)
+ assert actual == [
+ '',
+ '.. py:data:: integer',
+ ' :module: target',
+ '',
+ ' documentation for the integer',
+ '',
+ ]
+
+
+def test_autodata_typed_variable() -> None:
+ actual = do_autodoc('data', 'target.typed_vars.attr2')
+ assert actual == [
+ '',
+ '.. py:data:: attr2',
+ ' :module: target.typed_vars',
+ ' :type: str',
+ '',
+ ' attr2',
+ '',
+ ]
+
+
+def test_autodata_type_comment() -> None:
+ actual = do_autodoc('data', 'target.typed_vars.attr3')
+ assert actual == [
+ '',
+ '.. py:data:: attr3',
+ ' :module: target.typed_vars',
+ ' :type: str',
+ " :value: ''",
+ '',
+ ' attr3',
+ '',
+ ]
+
+
+def test_autodata_GenericAlias() -> None:
+ actual = do_autodoc('data', 'target.genericalias.T')
+ assert actual == [
+ '',
+ '.. py:data:: T',
+ ' :module: target.genericalias',
+ '',
+ ' A list of int',
+ '',
+ ' alias of :py:class:`~typing.List`\\ [:py:class:`int`]',
+ '',
+ ]
+
+
+def test_autodata_hide_value() -> None:
+ actual = do_autodoc('data', 'target.hide_value.SENTINEL1')
+ assert actual == [
+ '',
+ '.. py:data:: SENTINEL1',
+ ' :module: target.hide_value',
+ '',
+ ' docstring',
+ '',
+ ' :meta hide-value:',
+ '',
+ ]
+
+ actual = do_autodoc('data', 'target.hide_value.SENTINEL2')
+ assert actual == [
+ '',
+ '.. py:data:: SENTINEL2',
+ ' :module: target.hide_value',
+ '',
+ ' :meta hide-value:',
+ '',
+ ]
diff --git a/tests/test_ext_autodoc/test_ext_autodoc_autofunction.py b/tests/test_ext_autodoc/test_ext_autodoc_autofunction.py
new file mode 100644
index 00000000000..485329ebb37
--- /dev/null
+++ b/tests/test_ext_autodoc/test_ext_autodoc_autofunction.py
@@ -0,0 +1,201 @@
+"""Test the autodoc extension.
+
+This tests mainly the Documenters; the auto directives are tested in a test
+source file translated by test_build.
+"""
+
+from __future__ import annotations
+
+import pytest
+
+from tests.test_ext_autodoc.autodoc_util import do_autodoc
+
+pytestmark = pytest.mark.usefixtures('inject_autodoc_root_into_sys_path')
+
+
+def test_classes() -> None:
+ actual = do_autodoc('function', 'target.classes.Foo')
+ assert actual == [
+ '',
+ '.. py:function:: Foo()',
+ ' :module: target.classes',
+ '',
+ ]
+
+ actual = do_autodoc('function', 'target.classes.Bar')
+ assert actual == [
+ '',
+ '.. py:function:: Bar(x, y)',
+ ' :module: target.classes',
+ '',
+ ]
+
+ actual = do_autodoc('function', 'target.classes.Baz')
+ assert actual == [
+ '',
+ '.. py:function:: Baz(x, y)',
+ ' :module: target.classes',
+ '',
+ ]
+
+ actual = do_autodoc('function', 'target.classes.Qux')
+ assert actual == [
+ '',
+ '.. py:function:: Qux(foo, bar)',
+ ' :module: target.classes',
+ '',
+ ]
+
+
+def test_callable() -> None:
+ actual = do_autodoc('function', 'target.callable.function')
+ assert actual == [
+ '',
+ '.. py:function:: function(arg1, arg2, **kwargs)',
+ ' :module: target.callable',
+ '',
+ ' A callable object that behaves like a function.',
+ '',
+ ]
+
+
+def test_method() -> None:
+ actual = do_autodoc('function', 'target.callable.method')
+ assert actual == [
+ '',
+ '.. py:function:: method(arg1, arg2)',
+ ' :module: target.callable',
+ '',
+ ' docstring of Callable.method().',
+ '',
+ ]
+
+
+def test_builtin_function() -> None:
+ actual = do_autodoc('function', 'os.umask')
+ assert actual == [
+ '',
+ '.. py:function:: umask(mask, /)',
+ ' :module: os',
+ '',
+ ' Set the current numeric umask and return the previous umask.',
+ '',
+ ]
+
+
+def test_methoddescriptor() -> None:
+ actual = do_autodoc('function', 'builtins.int.__add__')
+ assert actual == [
+ '',
+ '.. py:function:: __add__(self, value, /)',
+ ' :module: builtins.int',
+ '',
+ ' Return self+value.',
+ '',
+ ]
+
+
+def test_decorated() -> None:
+ actual = do_autodoc('function', 'target.decorator.foo')
+ assert actual == [
+ '',
+ '.. py:function:: foo(name=None, age=None)',
+ ' :module: target.decorator',
+ '',
+ ]
+
+
+def test_singledispatch() -> None:
+ actual = do_autodoc('function', 'target.singledispatch.func')
+ assert actual == [
+ '',
+ '.. py:function:: func(arg, kwarg=None)',
+ ' func(arg: float, kwarg=None)',
+ ' func(arg: int, kwarg=None)',
+ ' func(arg: str, kwarg=None)',
+ ' func(arg: dict, kwarg=None)',
+ ' :module: target.singledispatch',
+ '',
+ ' A function for general use.',
+ '',
+ ]
+
+
+def test_cfunction() -> None:
+ actual = do_autodoc('function', 'time.asctime')
+ assert actual == [
+ '',
+ '.. py:function:: asctime([tuple]) -> string',
+ ' :module: time',
+ '',
+ " Convert a time tuple to a string, e.g. 'Sat Jun 06 16:26:11 1998'.",
+ ' When the time tuple is not present, current time as returned by localtime()',
+ ' is used.',
+ '',
+ ]
+
+
+def test_wrapped_function() -> None:
+ actual = do_autodoc('function', 'target.wrappedfunction.slow_function')
+ assert actual == [
+ '',
+ '.. py:function:: slow_function(message, timeout)',
+ ' :module: target.wrappedfunction',
+ '',
+ ' This function is slow.',
+ '',
+ ]
+
+
+def test_wrapped_function_contextmanager() -> None:
+ actual = do_autodoc('function', 'target.wrappedfunction.feeling_good')
+ assert actual == [
+ '',
+ '.. py:function:: feeling_good(x: int, y: int) -> ~typing.Generator',
+ ' :module: target.wrappedfunction',
+ '',
+ " You'll feel better in this context!",
+ '',
+ ]
+
+
+def test_coroutine() -> None:
+ actual = do_autodoc('function', 'target.functions.coroutinefunc')
+ assert actual == [
+ '',
+ '.. py:function:: coroutinefunc()',
+ ' :module: target.functions',
+ ' :async:',
+ '',
+ ]
+
+
+def test_synchronized_coroutine() -> None:
+ actual = do_autodoc('function', 'target.coroutine.sync_func')
+ assert actual == [
+ '',
+ '.. py:function:: sync_func()',
+ ' :module: target.coroutine',
+ '',
+ ]
+
+
+def test_async_generator() -> None:
+ actual = do_autodoc('function', 'target.functions.asyncgenerator')
+ assert actual == [
+ '',
+ '.. py:function:: asyncgenerator()',
+ ' :module: target.functions',
+ ' :async:',
+ '',
+ ]
+
+
+def test_slice_function_arg() -> None:
+ actual = do_autodoc('function', 'target.functions.slice_arg_func')
+ assert actual == [
+ '',
+ '.. py:function:: slice_arg_func(arg: float64[:, :])',
+ ' :module: target.functions',
+ '',
+ ]
diff --git a/tests/test_extensions/test_ext_autodoc_automodule.py b/tests/test_ext_autodoc/test_ext_autodoc_automodule.py
similarity index 75%
rename from tests/test_extensions/test_ext_autodoc_automodule.py
rename to tests/test_ext_autodoc/test_ext_autodoc_automodule.py
index 9404e01b730..745447f8f1f 100644
--- a/tests/test_extensions/test_ext_autodoc_automodule.py
+++ b/tests/test_ext_autodoc/test_ext_autodoc_automodule.py
@@ -12,17 +12,18 @@
import pytest
-from tests.test_extensions.autodoc_util import do_autodoc
+from sphinx.ext.autodoc._shared import _AutodocConfig
+from sphinx.ext.autodoc.mock import _MockObject
-if typing.TYPE_CHECKING:
- from sphinx.testing.util import SphinxTestApp
+from tests.test_ext_autodoc.autodoc_util import do_autodoc
+pytestmark = pytest.mark.usefixtures('inject_autodoc_root_into_sys_path')
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_empty_all(app: SphinxTestApp) -> None:
+
+def test_empty_all() -> None:
options = {'members': None}
- actual = do_autodoc(app, 'module', 'target.empty_all', options)
- assert list(actual) == [
+ actual = do_autodoc('module', 'target.empty_all', options=options)
+ assert actual == [
'',
'.. py:module:: target.empty_all',
'',
@@ -31,11 +32,10 @@ def test_empty_all(app: SphinxTestApp) -> None:
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_automodule(app: SphinxTestApp) -> None:
+def test_automodule() -> None:
options = {'members': None}
- actual = do_autodoc(app, 'module', 'target.module', options)
- assert list(actual) == [
+ actual = do_autodoc('module', 'target.module', options=options)
+ assert actual == [
'',
'.. py:module:: target.module',
'',
@@ -56,14 +56,13 @@ def test_automodule(app: SphinxTestApp) -> None:
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_automodule_undoc_members(app: SphinxTestApp) -> None:
+def test_automodule_undoc_members() -> None:
options = {
'members': None,
'undoc-members': None,
}
- actual = do_autodoc(app, 'module', 'target.module', options)
- assert list(actual) == [
+ actual = do_autodoc('module', 'target.module', options=options)
+ assert actual == [
'',
'.. py:module:: target.module',
'',
@@ -89,14 +88,13 @@ def test_automodule_undoc_members(app: SphinxTestApp) -> None:
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_automodule_special_members(app: SphinxTestApp) -> None:
+def test_automodule_special_members() -> None:
options = {
'members': None,
'special-members': None,
}
- actual = do_autodoc(app, 'module', 'target.module', options)
- assert list(actual) == [
+ actual = do_autodoc('module', 'target.module', options=options)
+ assert actual == [
'',
'.. py:module:: target.module',
'',
@@ -124,15 +122,14 @@ def test_automodule_special_members(app: SphinxTestApp) -> None:
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_automodule_inherited_members(app: SphinxTestApp) -> None:
+def test_automodule_inherited_members() -> None:
options = {
'members': None,
'undoc-members': None,
'inherited-members': 'Base, list',
}
- actual = do_autodoc(app, 'module', 'target.inheritance', options)
- assert list(actual) == [
+ actual = do_autodoc('module', 'target.inheritance', options=options)
+ assert actual == [
'',
'.. py:module:: target.inheritance',
'',
@@ -206,29 +203,26 @@ def test_automodule_inherited_members(app: SphinxTestApp) -> None:
]
-@pytest.mark.sphinx(
- 'html',
- testroot='ext-autodoc',
- confoverrides={
- 'autodoc_mock_imports': [
+@pytest.mark.usefixtures('rollback_sysmodules')
+def test_subclass_of_mocked_object() -> None:
+ config = _AutodocConfig(
+ autodoc_mock_imports=[
'missing_module',
'missing_package1',
'missing_package2',
'missing_package3',
'sphinx.missing_module4',
]
- },
-)
-@pytest.mark.usefixtures('rollback_sysmodules')
-def test_subclass_of_mocked_object(app: SphinxTestApp) -> None:
- from sphinx.ext.autodoc.mock import _MockObject
+ )
sys.modules.pop('target', None) # unload target module to clear the module cache
options = {'members': None}
- actual = do_autodoc(app, 'module', 'target.need_mocks', options)
+ actual = do_autodoc('module', 'target.need_mocks', config=config, options=options)
# ``typing.Any`` is not available at runtime on ``_MockObject.__new__``
- assert '.. py:class:: Inherited(*args: Any, **kwargs: Any)' in actual
+ assert actual[10:11] == [
+ '.. py:class:: Inherited(*args: Any, **kwargs: Any)',
+ ]
# make ``typing.Any`` available at runtime on ``_MockObject.__new__``
sig = inspect.signature(_MockObject.__new__)
@@ -239,7 +233,7 @@ def test_subclass_of_mocked_object(app: SphinxTestApp) -> None:
_MockObject.__new__.__signature__ = sig # type: ignore[attr-defined]
options = {'members': None}
- actual = do_autodoc(app, 'module', 'target.need_mocks', options)
- assert (
- '.. py:class:: Inherited(*args: ~typing.Any, **kwargs: ~typing.Any)'
- ) in actual
+ actual = do_autodoc('module', 'target.need_mocks', config=config, options=options)
+ assert actual[10:11] == [
+ '.. py:class:: Inherited(*args: ~typing.Any, **kwargs: ~typing.Any)',
+ ]
diff --git a/tests/test_ext_autodoc/test_ext_autodoc_autoproperty.py b/tests/test_ext_autodoc/test_ext_autodoc_autoproperty.py
new file mode 100644
index 00000000000..7fdd57ad7f8
--- /dev/null
+++ b/tests/test_ext_autodoc/test_ext_autodoc_autoproperty.py
@@ -0,0 +1,89 @@
+"""Test the autodoc extension.
+
+This tests mainly the Documenters; the auto directives are tested in a test
+source file translated by test_build.
+"""
+
+from __future__ import annotations
+
+import pytest
+
+from tests.test_ext_autodoc.autodoc_util import do_autodoc
+
+pytestmark = pytest.mark.usefixtures('inject_autodoc_root_into_sys_path')
+
+
+def test_properties() -> None:
+ actual = do_autodoc('property', 'target.properties.Foo.prop1')
+ assert actual == [
+ '',
+ '.. py:property:: Foo.prop1',
+ ' :module: target.properties',
+ ' :type: int',
+ '',
+ ' docstring',
+ '',
+ ]
+
+
+def test_class_properties() -> None:
+ actual = do_autodoc('property', 'target.properties.Foo.prop2')
+ assert actual == [
+ '',
+ '.. py:property:: Foo.prop2',
+ ' :module: target.properties',
+ ' :classmethod:',
+ ' :type: int',
+ '',
+ ' docstring',
+ '',
+ ]
+
+
+def test_properties_with_type_comment() -> None:
+ actual = do_autodoc('property', 'target.properties.Foo.prop1_with_type_comment')
+ assert actual == [
+ '',
+ '.. py:property:: Foo.prop1_with_type_comment',
+ ' :module: target.properties',
+ ' :type: int',
+ '',
+ ' docstring',
+ '',
+ ]
+
+
+def test_class_properties_with_type_comment() -> None:
+ actual = do_autodoc('property', 'target.properties.Foo.prop2_with_type_comment')
+ assert actual == [
+ '',
+ '.. py:property:: Foo.prop2_with_type_comment',
+ ' :module: target.properties',
+ ' :classmethod:',
+ ' :type: int',
+ '',
+ ' docstring',
+ '',
+ ]
+
+
+def test_cached_properties() -> None:
+ actual = do_autodoc('property', 'target.cached_property.Foo.prop')
+ assert actual == [
+ '',
+ '.. py:property:: Foo.prop',
+ ' :module: target.cached_property',
+ ' :type: int',
+ '',
+ ]
+
+
+def test_cached_properties_with_type_comment() -> None:
+ actual = do_autodoc('property', 'target.cached_property.Foo.prop_with_type_comment')
+ assert actual == [
+ '',
+ '.. py:property:: Foo.prop_with_type_comment',
+ ' :module: target.cached_property',
+ ' :type: int',
+ '',
+ ]
diff --git a/tests/test_ext_autodoc/test_ext_autodoc_configs.py b/tests/test_ext_autodoc/test_ext_autodoc_configs.py
new file mode 100644
index 00000000000..65c650cdce8
--- /dev/null
+++ b/tests/test_ext_autodoc/test_ext_autodoc_configs.py
@@ -0,0 +1,1009 @@
+"""Test the autodoc extension. This tests mainly for config variables"""
+
+from __future__ import annotations
+
+import logging
+import platform
+import sys
+
+import pytest
+
+from sphinx.ext.autodoc._shared import _AutodocConfig
+
+from tests.test_ext_autodoc.autodoc_util import do_autodoc
+
+pytestmark = pytest.mark.usefixtures('inject_autodoc_root_into_sys_path')
+
+IS_PYPY = platform.python_implementation() == 'PyPy'
+
+
+def test_autoclass_content_class() -> None:
+ config = _AutodocConfig(autoclass_content='class')
+ options = {'members': None}
+ actual = do_autodoc(
+ 'module', 'target.autoclass_content', config=config, options=options
+ )
+ assert actual == [
+ '',
+ '.. py:module:: target.autoclass_content',
+ '',
+ '',
+ '.. py:class:: A()',
+ ' :module: target.autoclass_content',
+ '',
+ ' A class having no __init__, no __new__',
+ '',
+ '',
+ '.. py:class:: B()',
+ ' :module: target.autoclass_content',
+ '',
+ ' A class having __init__(no docstring), no __new__',
+ '',
+ '',
+ '.. py:class:: C()',
+ ' :module: target.autoclass_content',
+ '',
+ ' A class having __init__, no __new__',
+ '',
+ '',
+ '.. py:class:: D()',
+ ' :module: target.autoclass_content',
+ '',
+ ' A class having no __init__, __new__(no docstring)',
+ '',
+ '',
+ '.. py:class:: E()',
+ ' :module: target.autoclass_content',
+ '',
+ ' A class having no __init__, __new__',
+ '',
+ '',
+ '.. py:class:: F()',
+ ' :module: target.autoclass_content',
+ '',
+ ' A class having both __init__ and __new__',
+ '',
+ '',
+ '.. py:class:: G()',
+ ' :module: target.autoclass_content',
+ '',
+ ' A class inherits __init__ without docstring.',
+ '',
+ '',
+ '.. py:class:: H()',
+ ' :module: target.autoclass_content',
+ '',
+ ' A class inherits __new__ without docstring.',
+ '',
+ ]
+
+
+def test_autoclass_content_init() -> None:
+ config = _AutodocConfig(autoclass_content='init')
+ options = {'members': None}
+ actual = do_autodoc(
+ 'module', 'target.autoclass_content', config=config, options=options
+ )
+ assert actual == [
+ '',
+ '.. py:module:: target.autoclass_content',
+ '',
+ '',
+ '.. py:class:: A()',
+ ' :module: target.autoclass_content',
+ '',
+ ' A class having no __init__, no __new__',
+ '',
+ '',
+ '.. py:class:: B()',
+ ' :module: target.autoclass_content',
+ '',
+ ' A class having __init__(no docstring), no __new__',
+ '',
+ '',
+ '.. py:class:: C()',
+ ' :module: target.autoclass_content',
+ '',
+ ' __init__ docstring',
+ '',
+ '',
+ '.. py:class:: D()',
+ ' :module: target.autoclass_content',
+ '',
+ ' A class having no __init__, __new__(no docstring)',
+ '',
+ '',
+ '.. py:class:: E()',
+ ' :module: target.autoclass_content',
+ '',
+ ' __new__ docstring',
+ '',
+ '',
+ '.. py:class:: F()',
+ ' :module: target.autoclass_content',
+ '',
+ ' __init__ docstring',
+ '',
+ '',
+ '.. py:class:: G()',
+ ' :module: target.autoclass_content',
+ '',
+ ' __init__ docstring',
+ '',
+ '',
+ '.. py:class:: H()',
+ ' :module: target.autoclass_content',
+ '',
+ ' __new__ docstring',
+ '',
+ ]
+
+
+def test_autodoc_class_signature_mixed() -> None:
+ config = _AutodocConfig(autodoc_class_signature='mixed')
+ options = {
+ 'members': None,
+ 'undoc-members': None,
+ }
+ actual = do_autodoc('class', 'target.classes.Bar', config=config, options=options)
+ assert actual == [
+ '',
+ '.. py:class:: Bar(x, y)',
+ ' :module: target.classes',
+ '',
+ ]
+
+
+def test_autodoc_class_signature_separated_init() -> None:
+ config = _AutodocConfig(autodoc_class_signature='separated')
+ options = {
+ 'members': None,
+ 'undoc-members': None,
+ }
+ actual = do_autodoc('class', 'target.classes.Bar', config=config, options=options)
+ assert actual == [
+ '',
+ '.. py:class:: Bar',
+ ' :module: target.classes',
+ '',
+ '',
+ ' .. py:method:: Bar.__init__(x, y)',
+ ' :module: target.classes',
+ '',
+ ]
+
+
+def test_autodoc_class_signature_separated_new() -> None:
+ config = _AutodocConfig(autodoc_class_signature='separated')
+ options = {
+ 'members': None,
+ 'undoc-members': None,
+ }
+ actual = do_autodoc('class', 'target.classes.Baz', config=config, options=options)
+ assert actual == [
+ '',
+ '.. py:class:: Baz',
+ ' :module: target.classes',
+ '',
+ '',
+ ' .. py:method:: Baz.__new__(cls, x, y)',
+ ' :module: target.classes',
+ ' :staticmethod:',
+ '',
+ ]
+
+
+def test_autoclass_content_both() -> None:
+ config = _AutodocConfig(autoclass_content='both')
+ options = {'members': None}
+ actual = do_autodoc(
+ 'module', 'target.autoclass_content', config=config, options=options
+ )
+ assert actual == [
+ '',
+ '.. py:module:: target.autoclass_content',
+ '',
+ '',
+ '.. py:class:: A()',
+ ' :module: target.autoclass_content',
+ '',
+ ' A class having no __init__, no __new__',
+ '',
+ '',
+ '.. py:class:: B()',
+ ' :module: target.autoclass_content',
+ '',
+ ' A class having __init__(no docstring), no __new__',
+ '',
+ '',
+ '.. py:class:: C()',
+ ' :module: target.autoclass_content',
+ '',
+ ' A class having __init__, no __new__',
+ '',
+ ' __init__ docstring',
+ '',
+ '',
+ '.. py:class:: D()',
+ ' :module: target.autoclass_content',
+ '',
+ ' A class having no __init__, __new__(no docstring)',
+ '',
+ '',
+ '.. py:class:: E()',
+ ' :module: target.autoclass_content',
+ '',
+ ' A class having no __init__, __new__',
+ '',
+ ' __new__ docstring',
+ '',
+ '',
+ '.. py:class:: F()',
+ ' :module: target.autoclass_content',
+ '',
+ ' A class having both __init__ and __new__',
+ '',
+ ' __init__ docstring',
+ '',
+ '',
+ '.. py:class:: G()',
+ ' :module: target.autoclass_content',
+ '',
+ ' A class inherits __init__ without docstring.',
+ '',
+ ' __init__ docstring',
+ '',
+ '',
+ '.. py:class:: H()',
+ ' :module: target.autoclass_content',
+ '',
+ ' A class inherits __new__ without docstring.',
+ '',
+ ' __new__ docstring',
+ '',
+ ]
+
+
+def test_autodoc_inherit_docstrings() -> None:
+ config = _AutodocConfig()
+ assert config.autodoc_inherit_docstrings is True # default
+ actual = do_autodoc(
+ 'method', 'target.inheritance.Derived.inheritedmeth', config=config
+ )
+ assert actual == [
+ '',
+ '.. py:method:: Derived.inheritedmeth()',
+ ' :module: target.inheritance',
+ '',
+ ' Inherited function.',
+ '',
+ ]
+
+ # disable autodoc_inherit_docstrings
+ config = _AutodocConfig(autodoc_inherit_docstrings=False)
+ actual = do_autodoc(
+ 'method', 'target.inheritance.Derived.inheritedmeth', config=config
+ )
+ assert actual == [
+ '',
+ '.. py:method:: Derived.inheritedmeth()',
+ ' :module: target.inheritance',
+ '',
+ ]
+
+
+def test_autodoc_inherit_docstrings_for_inherited_members() -> None:
+ config = _AutodocConfig()
+ options = {
+ 'members': None,
+ 'inherited-members': None,
+ }
+
+ assert config.autodoc_inherit_docstrings is True # default
+ actual = do_autodoc(
+ 'class', 'target.inheritance.Derived', config=config, options=options
+ )
+ assert actual == [
+ '',
+ '.. py:class:: Derived()',
+ ' :module: target.inheritance',
+ '',
+ '',
+ ' .. py:method:: Derived.another_inheritedmeth()',
+ ' :module: target.inheritance',
+ '',
+ ' Another inherited function.',
+ '',
+ '',
+ ' .. py:attribute:: Derived.inheritedattr',
+ ' :module: target.inheritance',
+ ' :value: None',
+ '',
+ ' docstring',
+ '',
+ '',
+ ' .. py:method:: Derived.inheritedclassmeth()',
+ ' :module: target.inheritance',
+ ' :classmethod:',
+ '',
+ ' Inherited class method.',
+ '',
+ '',
+ ' .. py:method:: Derived.inheritedmeth()',
+ ' :module: target.inheritance',
+ '',
+ ' Inherited function.',
+ '',
+ '',
+ ' .. py:method:: Derived.inheritedstaticmeth(cls)',
+ ' :module: target.inheritance',
+ ' :staticmethod:',
+ '',
+ ' Inherited static method.',
+ '',
+ ]
+
+ # disable autodoc_inherit_docstrings
+ config = _AutodocConfig(autodoc_inherit_docstrings=False)
+ actual = do_autodoc(
+ 'class', 'target.inheritance.Derived', config=config, options=options
+ )
+ assert actual == [
+ '',
+ '.. py:class:: Derived()',
+ ' :module: target.inheritance',
+ '',
+ '',
+ ' .. py:method:: Derived.another_inheritedmeth()',
+ ' :module: target.inheritance',
+ '',
+ ' Another inherited function.',
+ '',
+ '',
+ ' .. py:method:: Derived.inheritedclassmeth()',
+ ' :module: target.inheritance',
+ ' :classmethod:',
+ '',
+ ' Inherited class method.',
+ '',
+ '',
+ ' .. py:method:: Derived.inheritedstaticmeth(cls)',
+ ' :module: target.inheritance',
+ ' :staticmethod:',
+ '',
+ ' Inherited static method.',
+ '',
+ ]
+
+
+def test_autodoc_docstring_signature() -> None:
+ options = {
+ 'members': None,
+ 'special-members': '__init__, __new__',
+ }
+ actual = do_autodoc('class', 'target.DocstringSig', options=options)
+ assert actual == [
+ '',
+ # FIXME: Ideally this would instead be: `DocstringSig(d, e=1)` but
+ # currently `ClassDocumenter` does not apply the docstring signature
+ # logic when extracting a signature from a __new__ or __init__ method.
+ '.. py:class:: DocstringSig(*new_args, **new_kwargs)',
+ ' :module: target',
+ '',
+ '',
+ ' .. py:method:: DocstringSig.__init__(self, a, b=1) -> None',
+ ' :module: target',
+ '',
+ ' First line of docstring',
+ '',
+ ' rest of docstring',
+ '',
+ '',
+ ' .. py:method:: DocstringSig.__new__(cls, d, e=1) -> DocstringSig',
+ ' :module: target',
+ ' :staticmethod:',
+ '',
+ ' First line of docstring',
+ '',
+ ' rest of docstring',
+ '',
+ '',
+ ' .. py:method:: DocstringSig.meth(FOO, BAR=1) -> BAZ',
+ ' :module: target',
+ '',
+ ' First line of docstring',
+ '',
+ ' rest of docstring',
+ '',
+ '',
+ ' .. py:method:: DocstringSig.meth2()',
+ ' :module: target',
+ '',
+ ' First line, no signature',
+ ' Second line followed by indentation::',
+ '',
+ ' indented line',
+ '',
+ '',
+ ' .. py:property:: DocstringSig.prop1',
+ ' :module: target',
+ '',
+ ' First line of docstring',
+ '',
+ '',
+ ' .. py:property:: DocstringSig.prop2',
+ ' :module: target',
+ '',
+ ' First line of docstring',
+ ' Second line of docstring',
+ '',
+ ]
+
+ # disable autodoc_docstring_signature
+ config = _AutodocConfig(autodoc_docstring_signature=False)
+ actual = do_autodoc('class', 'target.DocstringSig', config=config, options=options)
+ assert actual == [
+ '',
+ '.. py:class:: DocstringSig(*new_args, **new_kwargs)',
+ ' :module: target',
+ '',
+ '',
+ ' .. py:method:: DocstringSig.__init__(*init_args, **init_kwargs)',
+ ' :module: target',
+ '',
+ ' __init__(self, a, b=1) -> None',
+ ' First line of docstring',
+ '',
+ ' rest of docstring',
+ '',
+ '',
+ ' .. py:method:: DocstringSig.__new__(cls, *new_args, **new_kwargs)',
+ ' :module: target',
+ ' :staticmethod:',
+ '',
+ ' __new__(cls, d, e=1) -> DocstringSig',
+ ' First line of docstring',
+ '',
+ ' rest of docstring',
+ '',
+ '',
+ ' .. py:method:: DocstringSig.meth()',
+ ' :module: target',
+ '',
+ ' meth(FOO, BAR=1) -> BAZ',
+ ' First line of docstring',
+ '',
+ ' rest of docstring',
+ '',
+ '',
+ ' .. py:method:: DocstringSig.meth2()',
+ ' :module: target',
+ '',
+ ' First line, no signature',
+ ' Second line followed by indentation::',
+ '',
+ ' indented line',
+ '',
+ '',
+ ' .. py:property:: DocstringSig.prop1',
+ ' :module: target',
+ '',
+ ' DocstringSig.prop1(self)',
+ ' First line of docstring',
+ '',
+ '',
+ ' .. py:property:: DocstringSig.prop2',
+ ' :module: target',
+ '',
+ ' First line of docstring',
+ ' Second line of docstring',
+ '',
+ ]
+
+
+def test_autoclass_content_and_docstring_signature_class() -> None:
+ config = _AutodocConfig(autoclass_content='class')
+ options = {
+ 'members': None,
+ 'undoc-members': None,
+ }
+ actual = do_autodoc(
+ 'module', 'target.docstring_signature', config=config, options=options
+ )
+ assert actual == [
+ '',
+ '.. py:module:: target.docstring_signature',
+ '',
+ '',
+ '.. py:class:: A(foo, bar)',
+ ' :module: target.docstring_signature',
+ '',
+ '',
+ '.. py:class:: B(foo, bar)',
+ ' :module: target.docstring_signature',
+ '',
+ '',
+ '.. py:class:: C(foo, bar)',
+ ' :module: target.docstring_signature',
+ '',
+ '',
+ '.. py:class:: D()',
+ ' :module: target.docstring_signature',
+ '',
+ '',
+ '.. py:class:: E()',
+ ' :module: target.docstring_signature',
+ '',
+ '',
+ '.. py:class:: F()',
+ ' :module: target.docstring_signature',
+ '',
+ ]
+
+
+def test_autoclass_content_and_docstring_signature_init() -> None:
+ config = _AutodocConfig(autoclass_content='init')
+ options = {
+ 'members': None,
+ 'undoc-members': None,
+ }
+ actual = do_autodoc(
+ 'module', 'target.docstring_signature', config=config, options=options
+ )
+ assert actual == [
+ '',
+ '.. py:module:: target.docstring_signature',
+ '',
+ '',
+ '.. py:class:: A(foo, bar)',
+ ' :module: target.docstring_signature',
+ '',
+ '',
+ '.. py:class:: B(foo, bar, baz)',
+ ' :module: target.docstring_signature',
+ '',
+ '',
+ '.. py:class:: C(foo, bar, baz)',
+ ' :module: target.docstring_signature',
+ '',
+ '',
+ '.. py:class:: D(foo, bar, baz)',
+ ' :module: target.docstring_signature',
+ '',
+ '',
+ '.. py:class:: E(foo: int, bar: int, baz: int)',
+ ' E(foo: str, bar: str, baz: str)',
+ ' E(foo: float, bar: float, baz: float)',
+ ' :module: target.docstring_signature',
+ '',
+ '',
+ '.. py:class:: F(foo: int, bar: int, baz: int)',
+ ' F(foo: str, bar: str, baz: str)',
+ ' F(foo: float, bar: float, baz: float)',
+ ' :module: target.docstring_signature',
+ '',
+ ]
+
+
+def test_autoclass_content_and_docstring_signature_both() -> None:
+ config = _AutodocConfig(autoclass_content='both')
+ options = {
+ 'members': None,
+ 'undoc-members': None,
+ }
+ actual = do_autodoc(
+ 'module', 'target.docstring_signature', config=config, options=options
+ )
+ assert actual == [
+ '',
+ '.. py:module:: target.docstring_signature',
+ '',
+ '',
+ '.. py:class:: A(foo, bar)',
+ ' :module: target.docstring_signature',
+ '',
+ '',
+ '.. py:class:: B(foo, bar)',
+ ' :module: target.docstring_signature',
+ '',
+ ' B(foo, bar, baz)',
+ '',
+ '',
+ '.. py:class:: C(foo, bar)',
+ ' :module: target.docstring_signature',
+ '',
+ ' C(foo, bar, baz)',
+ '',
+ '',
+ '.. py:class:: D(foo, bar, baz)',
+ ' :module: target.docstring_signature',
+ '',
+ '',
+ '.. py:class:: E(foo: int, bar: int, baz: int)',
+ ' E(foo: str, bar: str, baz: str)',
+ ' E(foo: float, bar: float, baz: float)',
+ ' :module: target.docstring_signature',
+ '',
+ '',
+ '.. py:class:: F(foo: int, bar: int, baz: int)',
+ ' F(foo: str, bar: str, baz: str)',
+ ' F(foo: float, bar: float, baz: float)',
+ ' :module: target.docstring_signature',
+ '',
+ ]
+
+
+@pytest.mark.usefixtures('rollback_sysmodules')
+def test_mocked_module_imports(caplog: pytest.LogCaptureFixture) -> None:
+ # work around sphinx.util.logging.setup()
+ logger = logging.getLogger('sphinx')
+ logger.handlers[:] = [caplog.handler]
+ caplog.set_level(logging.WARNING)
+
+ sys.modules.pop('target', None) # unload target module to clear the module cache
+
+ # no autodoc_mock_imports
+ options = {'members': 'TestAutodoc,decorated_function,func,Alias'}
+ actual = do_autodoc(
+ 'module', 'target.need_mocks', expect_import_error=True, options=options
+ )
+ assert actual == []
+ assert len(set(caplog.messages)) == 1
+ assert "autodoc: failed to import 'need_mocks'" in caplog.messages[0]
+
+ # with autodoc_mock_imports
+ config = _AutodocConfig(
+ autodoc_mock_imports=[
+ 'missing_module',
+ 'missing_package1',
+ 'missing_package2',
+ 'missing_package3',
+ 'sphinx.missing_module4',
+ ]
+ )
+
+ caplog.clear()
+ actual = do_autodoc('module', 'target.need_mocks', config=config, options=options)
+ assert actual == [
+ '',
+ '.. py:module:: target.need_mocks',
+ '',
+ '',
+ '.. py:data:: Alias',
+ ' :module: target.need_mocks',
+ '',
+ ' docstring',
+ '',
+ '',
+ '.. py:class:: TestAutodoc()',
+ ' :module: target.need_mocks',
+ '',
+ ' TestAutodoc docstring.',
+ '',
+ '',
+ ' .. py:attribute:: TestAutodoc.Alias',
+ ' :module: target.need_mocks',
+ '',
+ ' docstring',
+ '',
+ '',
+ ' .. py:method:: TestAutodoc.decorated_method()',
+ ' :module: target.need_mocks',
+ '',
+ ' TestAutodoc::decorated_method docstring',
+ '',
+ '',
+ '.. py:function:: decorated_function()',
+ ' :module: target.need_mocks',
+ '',
+ ' decorated_function docstring',
+ '',
+ '',
+ '.. py:function:: func(arg: missing_module.Class)',
+ ' :module: target.need_mocks',
+ '',
+ ' a function takes mocked object as an argument',
+ '',
+ ]
+ assert len(caplog.records) == 0
+
+
+def test_autodoc_type_aliases() -> None:
+ # default
+ options = {'members': None}
+ actual = do_autodoc('module', 'target.autodoc_type_aliases', options=options)
+ assert actual == [
+ '',
+ '.. py:module:: target.autodoc_type_aliases',
+ '',
+ '',
+ '.. py:class:: Foo()',
+ ' :module: target.autodoc_type_aliases',
+ '',
+ ' docstring',
+ '',
+ '',
+ ' .. py:attribute:: Foo.attr1',
+ ' :module: target.autodoc_type_aliases',
+ ' :type: int',
+ '',
+ ' docstring',
+ '',
+ '',
+ ' .. py:attribute:: Foo.attr2',
+ ' :module: target.autodoc_type_aliases',
+ ' :type: int',
+ '',
+ ' docstring',
+ '',
+ '',
+ '.. py:function:: mult(x: int, y: int) -> int',
+ ' mult(x: float, y: float) -> float',
+ ' :module: target.autodoc_type_aliases',
+ '',
+ ' docstring',
+ '',
+ '',
+ '.. py:function:: read(r: ~io.BytesIO) -> ~io.StringIO',
+ ' :module: target.autodoc_type_aliases',
+ '',
+ ' docstring',
+ '',
+ '',
+ '.. py:function:: sum(x: int, y: int) -> int',
+ ' :module: target.autodoc_type_aliases',
+ '',
+ ' docstring',
+ '',
+ '',
+ '.. py:data:: variable',
+ ' :module: target.autodoc_type_aliases',
+ ' :type: int',
+ '',
+ ' docstring',
+ '',
+ '',
+ '.. py:data:: variable2',
+ ' :module: target.autodoc_type_aliases',
+ ' :type: int',
+ ' :value: None',
+ '',
+ ' docstring',
+ '',
+ '',
+ '.. py:data:: variable3',
+ ' :module: target.autodoc_type_aliases',
+ ' :type: int | None',
+ '',
+ ' docstring',
+ '',
+ ]
+
+ # define aliases
+ config = _AutodocConfig(
+ autodoc_type_aliases={
+ 'myint': 'myint',
+ 'io.StringIO': 'my.module.StringIO',
+ }
+ )
+ actual = do_autodoc(
+ 'module', 'target.autodoc_type_aliases', config=config, options=options
+ )
+ assert actual == [
+ '',
+ '.. py:module:: target.autodoc_type_aliases',
+ '',
+ '',
+ '.. py:class:: Foo()',
+ ' :module: target.autodoc_type_aliases',
+ '',
+ ' docstring',
+ '',
+ '',
+ ' .. py:attribute:: Foo.attr1',
+ ' :module: target.autodoc_type_aliases',
+ ' :type: myint',
+ '',
+ ' docstring',
+ '',
+ '',
+ ' .. py:attribute:: Foo.attr2',
+ ' :module: target.autodoc_type_aliases',
+ ' :type: myint',
+ '',
+ ' docstring',
+ '',
+ '',
+ '.. py:function:: mult(x: myint, y: myint) -> myint',
+ ' mult(x: float, y: float) -> float',
+ ' :module: target.autodoc_type_aliases',
+ '',
+ ' docstring',
+ '',
+ '',
+ '.. py:function:: read(r: ~io.BytesIO) -> my.module.StringIO',
+ ' :module: target.autodoc_type_aliases',
+ '',
+ ' docstring',
+ '',
+ '',
+ '.. py:function:: sum(x: myint, y: myint) -> myint',
+ ' :module: target.autodoc_type_aliases',
+ '',
+ ' docstring',
+ '',
+ '',
+ '.. py:data:: variable',
+ ' :module: target.autodoc_type_aliases',
+ ' :type: myint',
+ '',
+ ' docstring',
+ '',
+ '',
+ '.. py:data:: variable2',
+ ' :module: target.autodoc_type_aliases',
+ ' :type: myint',
+ ' :value: None',
+ '',
+ ' docstring',
+ '',
+ '',
+ '.. py:data:: variable3',
+ ' :module: target.autodoc_type_aliases',
+ ' :type: myint | None',
+ '',
+ ' docstring',
+ '',
+ ]
+
+
+def test_autodoc_default_options() -> None:
+ if (3, 11, 7) <= sys.version_info < (3, 12) or sys.version_info >= (3, 12, 1):
+ list_of_weak_references = ' list of weak references to the object'
+ else:
+ list_of_weak_references = ' list of weak references to the object (if defined)' # fmt: skip
+
+ # no settings
+ actual = do_autodoc('class', 'target.enums.EnumCls')
+ assert ' .. py:attribute:: EnumCls.val1' not in actual
+ assert ' .. py:attribute:: EnumCls.val4' not in actual
+ actual = do_autodoc('class', 'target.CustomIter')
+ assert ' .. py:method:: target.CustomIter' not in actual
+ actual = do_autodoc('module', 'target')
+ assert '.. py:function:: function_to_be_imported(app)' not in actual
+
+ # with :members:
+ config = _AutodocConfig(autodoc_default_options={'members': True})
+ actual = do_autodoc('class', 'target.enums.EnumCls', config=config)
+ assert ' .. py:attribute:: EnumCls.val1' in actual
+ assert ' .. py:attribute:: EnumCls.val4' not in actual
+
+ # with :members: = True
+ config = _AutodocConfig(autodoc_default_options={'members': True})
+ actual = do_autodoc('class', 'target.enums.EnumCls', config=config)
+ assert ' .. py:attribute:: EnumCls.val1' in actual
+ assert ' .. py:attribute:: EnumCls.val4' not in actual
+
+ # with :members: and :undoc-members:
+ config = _AutodocConfig(
+ autodoc_default_options={'members': True, 'undoc-members': True}
+ )
+ actual = do_autodoc('class', 'target.enums.EnumCls', config=config)
+ assert ' .. py:attribute:: EnumCls.val1' in actual
+ assert ' .. py:attribute:: EnumCls.val4' in actual
+
+ # with :special-members:
+ # Note that :members: must be *on* for :special-members: to work.
+ config = _AutodocConfig(
+ autodoc_default_options={'members': True, 'special-members': True}
+ )
+ actual = do_autodoc('class', 'target.CustomIter', config=config)
+ assert ' .. py:method:: CustomIter.__init__()' in actual
+ assert ' Create a new `CustomIter`.' in actual
+ assert ' .. py:method:: CustomIter.__iter__()' in actual
+ assert ' Iterate squares of each value.' in actual
+ if not IS_PYPY:
+ assert ' .. py:attribute:: CustomIter.__weakref__' in actual
+ assert list_of_weak_references in actual
+
+ # :exclude-members: None - has no effect. Unlike :members:,
+ # :special-members:, etc. where None == "include all", here None means
+ # "no/false/off".
+ config = _AutodocConfig(
+ autodoc_default_options={'members': True, 'exclude-members': True}
+ )
+ actual = do_autodoc('class', 'target.enums.EnumCls', config=config)
+ assert ' .. py:attribute:: EnumCls.val1' in actual
+ assert ' .. py:attribute:: EnumCls.val4' not in actual
+ config = _AutodocConfig(
+ autodoc_default_options={
+ 'members': True,
+ 'special-members': True,
+ 'exclude-members': True,
+ }
+ )
+ actual = do_autodoc('class', 'target.CustomIter', config=config)
+ assert ' .. py:method:: CustomIter.__init__()' in actual
+ assert ' Create a new `CustomIter`.' in actual
+ assert ' .. py:method:: CustomIter.__iter__()' in actual
+ assert ' Iterate squares of each value.' in actual
+ if not IS_PYPY:
+ assert ' .. py:attribute:: CustomIter.__weakref__' in actual
+ assert list_of_weak_references in actual
+ assert ' .. py:method:: CustomIter.snafucate()' in actual
+ assert ' Makes this snafucated.' in actual
+
+
+def test_autodoc_default_options_with_values() -> None:
+ if (3, 11, 7) <= sys.version_info < (3, 12) or sys.version_info >= (3, 12, 1):
+ list_of_weak_references = ' list of weak references to the object'
+ else:
+ list_of_weak_references = ' list of weak references to the object (if defined)' # fmt: skip
+
+ # with :members:
+ config = _AutodocConfig(autodoc_default_options={'members': 'val1,val2'})
+ actual = do_autodoc('class', 'target.enums.EnumCls', config=config)
+ assert ' .. py:attribute:: EnumCls.val1' in actual
+ assert ' .. py:attribute:: EnumCls.val2' in actual
+ assert ' .. py:attribute:: EnumCls.val3' not in actual
+ assert ' .. py:attribute:: EnumCls.val4' not in actual
+
+ # with :member-order:
+ config = _AutodocConfig(
+ autodoc_default_options={'members': True, 'member-order': 'bysource'}
+ )
+ actual = do_autodoc('class', 'target.Class', config=config)
+ assert [line for line in actual if '::' in line] == [
+ '.. py:class:: Class(arg)',
+ ' .. py:method:: Class.meth()',
+ ' .. py:method:: Class.skipmeth()',
+ ' .. py:method:: Class.excludemeth()',
+ ' .. py:attribute:: Class.attr',
+ ' .. py:attribute:: Class.docattr',
+ ' .. py:attribute:: Class.udocattr',
+ ' .. py:attribute:: Class.mdocattr',
+ ' .. py:method:: Class.moore(a, e, f) -> happiness',
+ ' .. py:attribute:: Class.inst_attr_inline',
+ ' .. py:attribute:: Class.inst_attr_comment',
+ ' .. py:attribute:: Class.inst_attr_string',
+ ]
+
+ # with :special-members:
+ config = _AutodocConfig(
+ autodoc_default_options={'special-members': '__init__,__iter__'}
+ )
+ actual = do_autodoc('class', 'target.CustomIter', config=config)
+ assert ' .. py:method:: CustomIter.__init__()' in actual
+ assert ' Create a new `CustomIter`.' in actual
+ assert ' .. py:method:: CustomIter.__iter__()' in actual
+ assert ' Iterate squares of each value.' in actual
+ if not IS_PYPY:
+ assert ' .. py:attribute:: CustomIter.__weakref__' not in actual
+ assert list_of_weak_references not in actual
+
+ # with :exclude-members:
+ config = _AutodocConfig(
+ autodoc_default_options={'members': True, 'exclude-members': 'val1'}
+ )
+ actual = do_autodoc('class', 'target.enums.EnumCls', config=config)
+ assert ' .. py:attribute:: EnumCls.val1' not in actual
+ assert ' .. py:attribute:: EnumCls.val2' in actual
+ assert ' .. py:attribute:: EnumCls.val3' in actual
+ assert ' .. py:attribute:: EnumCls.val4' not in actual
+ config = _AutodocConfig(
+ autodoc_default_options={
+ 'members': True,
+ 'special-members': True,
+ 'exclude-members': '__weakref__,snafucate',
+ }
+ )
+ actual = do_autodoc('class', 'target.CustomIter', config=config)
+ assert ' .. py:method:: CustomIter.__init__()' in actual
+ assert ' Create a new `CustomIter`.' in actual
+ assert ' .. py:method:: CustomIter.__iter__()' in actual
+ assert ' Iterate squares of each value.' in actual
+ if not IS_PYPY:
+ assert ' .. py:attribute:: CustomIter.__weakref__' not in actual
+ assert list_of_weak_references not in actual
+ assert ' .. py:method:: CustomIter.snafucate()' not in actual
+ assert ' Makes this snafucated.' not in actual
diff --git a/tests/test_ext_autodoc/test_ext_autodoc_events.py b/tests/test_ext_autodoc/test_ext_autodoc_events.py
new file mode 100644
index 00000000000..af57488f344
--- /dev/null
+++ b/tests/test_ext_autodoc/test_ext_autodoc_events.py
@@ -0,0 +1,142 @@
+"""Test the autodoc extension. This tests mainly for autodoc events"""
+
+from __future__ import annotations
+
+import pytest
+
+from sphinx.ext.autodoc import between, cut_lines
+
+from tests.test_ext_autodoc.autodoc_util import FakeEvents, do_autodoc
+
+pytestmark = pytest.mark.usefixtures('inject_autodoc_root_into_sys_path')
+
+
+def test_process_docstring() -> None:
+ def on_process_docstring(app, what, name, obj, options, lines):
+ lines.clear()
+ lines.append('my docstring')
+
+ events = FakeEvents()
+ events.connect('autodoc-process-docstring', on_process_docstring)
+
+ actual = do_autodoc('function', 'target.process_docstring.func', events=events)
+ assert actual == [
+ '',
+ '.. py:function:: func()',
+ ' :module: target.process_docstring',
+ '',
+ ' my docstring',
+ '',
+ ]
+
+
+def test_process_docstring_for_nondatadescriptor() -> None:
+ def on_process_docstring(app, what, name, obj, options, lines):
+ raise RuntimeError
+
+ events = FakeEvents()
+ events.connect('autodoc-process-docstring', on_process_docstring)
+
+ actual = do_autodoc('attribute', 'target.AttCls.a1', events=events)
+ assert actual == [
+ '',
+ '.. py:attribute:: AttCls.a1',
+ ' :module: target',
+ ' :value: hello world',
+ '',
+ ]
+
+
+def test_cut_lines() -> None:
+ events = FakeEvents()
+ events.connect('autodoc-process-docstring', cut_lines(2, 2, ['function']))
+
+ actual = do_autodoc('function', 'target.process_docstring.func', events=events)
+ assert actual == [
+ '',
+ '.. py:function:: func()',
+ ' :module: target.process_docstring',
+ '',
+ ' second line',
+ '',
+ ]
+
+
+def test_cut_lines_no_objtype():
+ docstring_lines = [
+ 'first line',
+ '---',
+ 'second line',
+ '---',
+ 'third line ',
+ '',
+ ]
+ process = cut_lines(2)
+
+ process(None, 'function', 'func', None, {}, docstring_lines)
+ assert docstring_lines == [
+ 'second line',
+ '---',
+ 'third line ',
+ '',
+ ]
+
+
+def test_between() -> None:
+ events = FakeEvents()
+ events.connect('autodoc-process-docstring', between('---', ['function']))
+
+ actual = do_autodoc('function', 'target.process_docstring.func', events=events)
+ assert actual == [
+ '',
+ '.. py:function:: func()',
+ ' :module: target.process_docstring',
+ '',
+ ' second line',
+ '',
+ ]
+
+
+def test_between_exclude() -> None:
+ events = FakeEvents()
+ events.connect(
+ 'autodoc-process-docstring', between('---', ['function'], exclude=True)
+ )
+
+ actual = do_autodoc('function', 'target.process_docstring.func', events=events)
+ assert actual == [
+ '',
+ '.. py:function:: func()',
+ ' :module: target.process_docstring',
+ '',
+ ' first line',
+ ' third line',
+ '',
+ ]
+
+
+def test_skip_module_member() -> None:
+ def autodoc_skip_member(app, what, name, obj, skip, options):
+ if name == 'Class':
+ return True # Skip "Class" class in __all__
+ elif name == 'raises':
+ return False # Show "raises()" function (not in __all__)
+ return None
+
+ events = FakeEvents()
+ events.connect('autodoc-skip-member', autodoc_skip_member)
+
+ options = {'members': None}
+ actual = do_autodoc('module', 'target', events=events, options=options)
+ assert actual == [
+ '',
+ '.. py:module:: target',
+ '',
+ # TODO: consider.
+ # '',
+ # '.. py:function:: raises(exc, func, *args, **kwds)',
+ # ' :module: target',
+ # '',
+ # ' Raise AssertionError if ``func(*args, **kwds)`` does not raise *exc*.',
+ # '',
+ ]
diff --git a/tests/test_extensions/test_ext_autodoc_importer.py b/tests/test_ext_autodoc/test_ext_autodoc_importer.py
similarity index 84%
rename from tests/test_extensions/test_ext_autodoc_importer.py
rename to tests/test_ext_autodoc/test_ext_autodoc_importer.py
index f14b8256c14..e61fd8b6594 100644
--- a/tests/test_extensions/test_ext_autodoc_importer.py
+++ b/tests/test_ext_autodoc/test_ext_autodoc_importer.py
@@ -3,7 +3,7 @@
import sys
from pathlib import Path
-from sphinx.ext.autodoc.importer import import_module
+from sphinx.ext.autodoc._importer import _import_module
def test_import_native_module_stubs(rootdir: Path) -> None:
@@ -11,7 +11,7 @@ def test_import_native_module_stubs(rootdir: Path) -> None:
sys_path = list(sys.path)
sys.path.insert(0, str(fish_licence_root))
- halibut = import_module('fish_licence.halibut')
+ halibut = _import_module('fish_licence.halibut')
sys.path[:] = sys_path
assert halibut.__file__.endswith('halibut.pyi')
diff --git a/tests/test_extensions/test_ext_autodoc_mock.py b/tests/test_ext_autodoc/test_ext_autodoc_mock.py
similarity index 100%
rename from tests/test_extensions/test_ext_autodoc_mock.py
rename to tests/test_ext_autodoc/test_ext_autodoc_mock.py
diff --git a/tests/test_ext_autodoc/test_ext_autodoc_names.py b/tests/test_ext_autodoc/test_ext_autodoc_names.py
new file mode 100644
index 00000000000..e2ac9316c84
--- /dev/null
+++ b/tests/test_ext_autodoc/test_ext_autodoc_names.py
@@ -0,0 +1,100 @@
+"""Test the autodoc extension. This mainly tests name resolution & parsing."""
+
+from __future__ import annotations
+
+import logging
+
+from sphinx.environment import _CurrentDocument
+from sphinx.ext.autodoc._names import _parse_name
+
+TYPE_CHECKING = False
+if TYPE_CHECKING:
+ from collections.abc import Mapping
+ from typing import Any
+
+ import pytest
+
+ from sphinx.ext.autodoc._property_types import _AutodocObjType
+
+
+def parse_name(
+ objtype: _AutodocObjType,
+ name: str,
+ *,
+ current_document: _CurrentDocument | None = None,
+ ref_context: Mapping[str, str | None] | None = None,
+) -> tuple[str, list[str], str | None, str | None] | None:
+ if current_document is None:
+ current_document = _CurrentDocument()
+ if ref_context is None:
+ ref_context = {}
+
+ parsed = _parse_name(
+ name=name,
+ objtype=objtype,
+ current_document=current_document,
+ ref_context=ref_context,
+ )
+ if parsed is None:
+ return None
+ module_name, parts, args, retann = parsed
+ return module_name, list(parts), args, retann
+
+
+def test_parse_module_names(caplog: pytest.LogCaptureFixture) -> None:
+ # work around sphinx.util.logging.setup()
+ logger = logging.getLogger('sphinx')
+ logger.handlers[:] = [caplog.handler]
+ caplog.set_level(logging.WARNING)
+
+ parsed = parse_name('module', 'test_ext_autodoc')
+ assert parsed == ('test_ext_autodoc', [], None, None)
+ parsed = parse_name('module', 'test.test_ext_autodoc')
+ assert parsed == ('test.test_ext_autodoc', [], None, None)
+ parsed = parse_name('module', 'test(arg)')
+ assert parsed is None
+ assert 'signature arguments given for automodule' in caplog.messages[0]
+
+
+def test_parse_function_names() -> None:
+ parsed = parse_name('function', 'test_ext_autodoc.raises')
+ assert parsed == ('test_ext_autodoc', ['raises'], None, None)
+ parsed = parse_name('function', 'test_ext_autodoc.raises(exc) -> None')
+ assert parsed == ('test_ext_autodoc', ['raises'], '(exc)', 'None')
+
+
+def test_parse_function_names_current_document() -> None:
+ current_document = _CurrentDocument()
+ current_document.autodoc_module = 'test_ext_autodoc'
+ parsed = parse_name('function', 'raises', current_document=current_document)
+ assert parsed == ('test_ext_autodoc', ['raises'], None, None)
+
+
+def test_parse_function_names_ref_context() -> None:
+ ref_context: dict[str, Any] = {'py:module': 'test_ext_autodoc'}
+ parsed = parse_name('function', 'raises', ref_context=ref_context)
+ assert parsed == ('test_ext_autodoc', ['raises'], None, None)
+ parsed = parse_name('class', 'Base', ref_context=ref_context)
+ assert parsed == ('test_ext_autodoc', ['Base'], None, None)
+
+
+def test_parse_name_members() -> None:
+ # for members
+ ref_context: dict[str, Any] = {'py:module': 'sphinx.testing.util'}
+ parsed = parse_name('method', 'SphinxTestApp.cleanup', ref_context=ref_context)
+ assert parsed == ('sphinx.testing.util', ['SphinxTestApp', 'cleanup'], None, None)
+
+ current_document = _CurrentDocument()
+ current_document.autodoc_class = 'SphinxTestApp'
+ ref_context['py:class'] = 'Foo'
+ parsed = parse_name(
+ 'method', 'cleanup', current_document=current_document, ref_context=ref_context
+ )
+ assert parsed == ('sphinx.testing.util', ['SphinxTestApp', 'cleanup'], None, None)
+ parsed = parse_name(
+ 'method',
+ 'SphinxTestApp.cleanup',
+ current_document=current_document,
+ ref_context=ref_context,
+ )
+ assert parsed == ('sphinx.testing.util', ['SphinxTestApp', 'cleanup'], None, None)
diff --git a/tests/test_extensions/test_ext_autodoc_preserve_defaults.py b/tests/test_ext_autodoc/test_ext_autodoc_preserve_defaults.py
similarity index 88%
rename from tests/test_extensions/test_ext_autodoc_preserve_defaults.py
rename to tests/test_ext_autodoc/test_ext_autodoc_preserve_defaults.py
index 8fdc7de79d3..6246d6bf2ea 100644
--- a/tests/test_extensions/test_ext_autodoc_preserve_defaults.py
+++ b/tests/test_ext_autodoc/test_ext_autodoc_preserve_defaults.py
@@ -2,27 +2,25 @@
from __future__ import annotations
-from typing import TYPE_CHECKING
-
import pytest
-from tests.test_extensions.autodoc_util import do_autodoc
+from sphinx.ext.autodoc._shared import _AutodocConfig
+
+from tests.test_ext_autodoc.autodoc_util import do_autodoc
-if TYPE_CHECKING:
- from sphinx.testing.util import SphinxTestApp
+pytestmark = pytest.mark.usefixtures('inject_autodoc_root_into_sys_path')
-@pytest.mark.sphinx(
- 'html',
- testroot='ext-autodoc',
- confoverrides={'autodoc_preserve_defaults': True},
-)
-def test_preserve_defaults(app: SphinxTestApp) -> None:
+def test_preserve_defaults() -> None:
+ config = _AutodocConfig(autodoc_preserve_defaults=True)
+
color = '0xFFFFFF'
options = {'members': None}
- actual = do_autodoc(app, 'module', 'target.preserve_defaults', options)
- assert list(actual) == [
+ actual = do_autodoc(
+ 'module', 'target.preserve_defaults', config=config, options=options
+ )
+ assert actual == [
'',
'.. py:module:: target.preserve_defaults',
'',
@@ -102,15 +100,15 @@ def test_preserve_defaults(app: SphinxTestApp) -> None:
]
-@pytest.mark.sphinx(
- 'html',
- testroot='ext-autodoc',
- confoverrides={'autodoc_preserve_defaults': True},
-)
-def test_preserve_defaults_special_constructs(app: SphinxTestApp) -> None:
+def test_preserve_defaults_special_constructs() -> None:
+ config = _AutodocConfig(autodoc_preserve_defaults=True)
+
options = {'members': None}
actual = do_autodoc(
- app, 'module', 'target.preserve_defaults_special_constructs', options
+ 'module',
+ 'target.preserve_defaults_special_constructs',
+ config=config,
+ options=options,
)
# * dataclasses.dataclass:
@@ -127,8 +125,7 @@ def test_preserve_defaults_special_constructs(app: SphinxTestApp) -> None:
# In the future, it might be possible to find some additional default
# values by parsing the source code of the annotations but the task is
# rather complex.
-
- assert list(actual) == [
+ assert actual == [
'',
'.. py:module:: target.preserve_defaults_special_constructs',
'',
diff --git a/tests/test_extensions/test_ext_autodoc_private_members.py b/tests/test_ext_autodoc/test_ext_autodoc_private_members.py
similarity index 69%
rename from tests/test_extensions/test_ext_autodoc_private_members.py
rename to tests/test_ext_autodoc/test_ext_autodoc_private_members.py
index 9c144ea40af..b4deb25f60f 100644
--- a/tests/test_extensions/test_ext_autodoc_private_members.py
+++ b/tests/test_ext_autodoc/test_ext_autodoc_private_members.py
@@ -2,22 +2,20 @@
from __future__ import annotations
-from typing import TYPE_CHECKING
-
import pytest
-from tests.test_extensions.autodoc_util import do_autodoc
+from sphinx.ext.autodoc._shared import _AutodocConfig
+
+from tests.test_ext_autodoc.autodoc_util import do_autodoc
-if TYPE_CHECKING:
- from sphinx.testing.util import SphinxTestApp
+pytestmark = pytest.mark.usefixtures('inject_autodoc_root_into_sys_path')
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_private_field(app: SphinxTestApp) -> None:
- app.config.autoclass_content = 'class'
+def test_private_field() -> None:
+ config = _AutodocConfig(autoclass_content='class')
options = {'members': None}
- actual = do_autodoc(app, 'module', 'target.private', options)
- assert list(actual) == [
+ actual = do_autodoc('module', 'target.private', config=config, options=options)
+ assert actual == [
'',
'.. py:module:: target.private',
'',
@@ -39,15 +37,14 @@ def test_private_field(app: SphinxTestApp) -> None:
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_private_field_and_private_members(app: SphinxTestApp) -> None:
- app.config.autoclass_content = 'class'
+def test_private_field_and_private_members() -> None:
+ config = _AutodocConfig(autoclass_content='class')
options = {
'members': None,
'private-members': None,
}
- actual = do_autodoc(app, 'module', 'target.private', options)
- assert list(actual) == [
+ actual = do_autodoc('module', 'target.private', config=config, options=options)
+ assert actual == [
'',
'.. py:module:: target.private',
'',
@@ -84,15 +81,14 @@ def test_private_field_and_private_members(app: SphinxTestApp) -> None:
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_private_members(app: SphinxTestApp) -> None:
- app.config.autoclass_content = 'class'
+def test_private_members() -> None:
+ config = _AutodocConfig(autoclass_content='class')
options = {
'members': None,
'private-members': '_PUBLIC_CONSTANT,_public_function',
}
- actual = do_autodoc(app, 'module', 'target.private', options)
- assert list(actual) == [
+ actual = do_autodoc('module', 'target.private', config=config, options=options)
+ assert actual == [
'',
'.. py:module:: target.private',
'',
@@ -114,12 +110,11 @@ def test_private_members(app: SphinxTestApp) -> None:
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_private_attributes(app: SphinxTestApp) -> None:
- app.config.autoclass_content = 'class'
+def test_private_attributes() -> None:
+ config = _AutodocConfig(autoclass_content='class')
options = {'members': None}
- actual = do_autodoc(app, 'class', 'target.private.Foo', options)
- assert list(actual) == [
+ actual = do_autodoc('class', 'target.private.Foo', config=config, options=options)
+ assert actual == [
'',
'.. py:class:: Foo()',
' :module: target.private',
@@ -136,15 +131,14 @@ def test_private_attributes(app: SphinxTestApp) -> None:
]
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_private_attributes_and_private_members(app: SphinxTestApp) -> None:
- app.config.autoclass_content = 'class'
+def test_private_attributes_and_private_members() -> None:
+ config = _AutodocConfig(autoclass_content='class')
options = {
'members': None,
'private-members': None,
}
- actual = do_autodoc(app, 'class', 'target.private.Foo', options)
- assert list(actual) == [
+ actual = do_autodoc('class', 'target.private.Foo', config=config, options=options)
+ assert actual == [
'',
'.. py:class:: Foo()',
' :module: target.private',
diff --git a/tests/test_ext_autodoc/test_ext_autodoc_signatures.py b/tests/test_ext_autodoc/test_ext_autodoc_signatures.py
new file mode 100644
index 00000000000..9404d41386f
--- /dev/null
+++ b/tests/test_ext_autodoc/test_ext_autodoc_signatures.py
@@ -0,0 +1,362 @@
+"""Test the autodoc extension. This mainly tests the signature utilities."""
+
+from __future__ import annotations
+
+import sys
+from typing import Generic, TypeVar
+
+import pytest
+
+from sphinx.ext.autodoc._directive_options import _AutoDocumenterOptions
+from sphinx.ext.autodoc._docstrings import _get_docstring_lines
+from sphinx.ext.autodoc._property_types import (
+ _ClassDefProperties,
+ _FunctionDefProperties,
+)
+from sphinx.ext.autodoc._shared import _AutodocConfig
+from sphinx.ext.autodoc._signatures import _format_signatures
+from sphinx.util.inspect import safe_getattr
+
+from tests.test_ext_autodoc.autodoc_util import FakeEvents
+
+TYPE_CHECKING = False
+if TYPE_CHECKING:
+ from typing import Any
+
+ from sphinx.application import Sphinx
+ from sphinx.events import EventManager
+ from sphinx.ext.autodoc._property_types import _AutodocObjType
+
+processed_signatures = []
+
+
+def format_sig(
+ obj_type: _AutodocObjType,
+ name: str,
+ obj: Any,
+ *,
+ config: _AutodocConfig | None = None,
+ events: EventManager | None = None,
+ args: str | None = None,
+ retann: str | None = None,
+) -> tuple[str, str] | tuple[()]:
+ if config is None:
+ config = _AutodocConfig()
+ if events is None:
+ events = FakeEvents()
+ options = _AutoDocumenterOptions()
+
+ parent = object # dummy
+ props = _ClassDefProperties(
+ obj_type=obj_type, # type: ignore[arg-type]
+ module_name='',
+ parts=(name,),
+ docstring_lines=(),
+ bases=getattr(obj, '__bases__', None),
+ _obj=obj,
+ _obj___module__=getattr(obj, '__module__', None),
+ _obj___qualname__=getattr(obj, '__qualname__', None),
+ _obj___name__=name,
+ _obj_bases=(),
+ _obj_is_new_type=False,
+ _obj_is_typevar=False,
+ )
+ docstrings = _get_docstring_lines(
+ props,
+ class_doc_from=config.autoclass_content,
+ get_attr=safe_getattr,
+ inherit_docstrings=config.autodoc_inherit_docstrings,
+ parent=parent,
+ tab_width=8,
+ )
+ signatures = _format_signatures(
+ autodoc_annotations={},
+ config=config,
+ docstrings=docstrings,
+ events=events,
+ get_attr=safe_getattr,
+ options=options,
+ parent=parent,
+ props=props,
+ args=args,
+ retann=retann,
+ )
+ if not signatures:
+ return ()
+ assert len(signatures) == 1
+ return signatures[0]
+
+
+def _process_signature(
+ _app: Sphinx,
+ what: str,
+ name: str,
+ _obj: Any,
+ _options: Any,
+ _args: str,
+ _retann: str,
+) -> tuple[str | None, str | None] | None:
+ processed_signatures.append((what, name))
+ if name == '.bar':
+ return '42', None
+ return None
+
+
+def test_format_module_signatures() -> None:
+ # no signatures for modules
+ assert format_sig('module', 'test', None) == ()
+
+
+def test_format_function_signatures() -> None:
+ # test for functions
+ def f(a, b, c=1, **d): # type: ignore[no-untyped-def]
+ pass
+
+ def g(a='\n'): # type: ignore[no-untyped-def]
+ pass
+
+ assert format_sig('function', 'f', f) == ('(a, b, c=1, **d)', '')
+ assert format_sig('function', 'f', f, args='(a, b, c, d)') == ('(a, b, c, d)', '')
+ assert format_sig('function', 'g', g) == (r"(a='\n')", '')
+
+
+@pytest.mark.skipif(
+ sys.version_info[:2] < (3, 12),
+ reason='type statement introduced in Python 3.12',
+)
+@pytest.mark.parametrize(
+ ('params', 'expect'),
+ [
+ ('(a=1)', '(a=1)'),
+ ('(a: int=1)', '(a: int = 1)'), # auto whitespace formatting
+ ('(a:list[T] =[], b=None)', '(a: list[T] = [], b=None)'), # idem
+ ],
+)
+def test_format_function_signatures_pep695(params: str, expect: str) -> None:
+ ns: dict[str, Any] = {}
+ exec(f'def f[T]{params}: pass', ns) # NoQA: S102
+ f = ns['f']
+ assert format_sig('function', 'f', f) == (expect, '')
+ assert format_sig('function', 'f', f, args='(...)') == ('(...)', '')
+ assert format_sig('function', 'f', f, args='(...)', retann='...') == (
+ '(...)',
+ '...',
+ )
+
+ exec(f'def f[T]{params} -> list[T]: return []', ns) # NoQA: S102
+ f = ns['f']
+ assert format_sig('function', 'f', f) == (expect, 'list[T]')
+ assert format_sig('function', 'f', f, args='(...)') == ('(...)', '')
+ assert format_sig('function', 'f', f, args='(...)', retann='...') == (
+ '(...)',
+ '...',
+ )
+
+ # TODO(picnixz): add more test cases for PEP-695 classes as well (though
+ # complex cases are less likely to appear and are painful to test).
+
+
+def test_format_class_signatures() -> None:
+ # test for classes
+ class D:
+ pass
+
+ class E:
+ def __init__(self): # type: ignore[no-untyped-def]
+ pass
+
+ # an empty init and no init are the same
+ for C in (D, E):
+ assert format_sig('class', 'D', C) == ('()', '')
+
+ class SomeMeta(type):
+ def __call__(cls, a, b=None): # type: ignore[no-untyped-def]
+ return type.__call__(cls, a, b)
+
+ # these three are all equivalent
+ class F:
+ def __init__(self, a, b=None): # type: ignore[no-untyped-def]
+ pass
+
+ class FNew:
+ def __new__(cls, a, b=None): # type: ignore[no-untyped-def] # NoQA: ARG004
+ return super().__new__(cls)
+
+ class FMeta(metaclass=SomeMeta):
+ pass
+
+ # and subclasses should always inherit
+ class G(F):
+ pass
+
+ class GNew(FNew):
+ pass
+
+ class GMeta(FMeta):
+ pass
+
+ # subclasses inherit
+ assert format_sig('class', 'C', F) == ('(a, b=None)', '')
+ assert format_sig('class', 'C', FNew) == ('(a, b=None)', '')
+ assert format_sig('class', 'C', FMeta) == ('(a, b=None)', '')
+ assert format_sig('class', 'C', G) == ('(a, b=None)', '')
+ assert format_sig('class', 'C', GNew) == ('(a, b=None)', '')
+ assert format_sig('class', 'C', GMeta) == ('(a, b=None)', '')
+ assert format_sig('class', 'C', D, args='(a, b)', retann='X') == ('(a, b)', 'X')
+
+
+def test_format_class_signatures_text_signature() -> None:
+ class ListSubclass(list): # type: ignore[type-arg] # NoQA: FURB189
+ pass
+
+ # only supported if the python implementation decides to document it
+ if getattr(list, '__text_signature__', None) is not None:
+ assert format_sig('class', 'C', ListSubclass) == ('(iterable=(), /)', '')
+ else:
+ assert format_sig('class', 'C', ListSubclass) == ()
+
+
+def test_format_class_signatures_no_text_signature() -> None:
+ class ExceptionSubclass(Exception):
+ pass
+
+ # Exception has no __text_signature__ at least in Python 3.11
+ if getattr(Exception, '__text_signature__', None) is not None:
+ pytest.skip()
+ assert format_sig('class', 'C', ExceptionSubclass) == ()
+
+
+def test_format_class_signatures_init_both() -> None:
+ # __init__ have signature at first line of docstring
+ config = _AutodocConfig(autoclass_content='both')
+
+ class F2:
+ """some docstring for F2."""
+
+ def __init__(self, *args, **kw): # type: ignore[no-untyped-def]
+ """
+ __init__(a1, a2, kw1=True, kw2=False)
+
+ some docstring for __init__.
+ """ # NoQA: D212
+
+ class G2(F2):
+ pass
+
+ assert format_sig('class', 'F2', F2, config=config) == (
+ '(a1, a2, kw1=True, kw2=False)',
+ '',
+ )
+ assert format_sig('class', 'G2', G2, config=config) == (
+ '(a1, a2, kw1=True, kw2=False)',
+ '',
+ )
+
+
+def test_format_method_signatures() -> None:
+ # test for methods
+ class H:
+ def foo1(self, b, *c): # type: ignore[no-untyped-def]
+ pass
+
+ def foo2(b, *c): # type: ignore[no-untyped-def] # NoQA: N805
+ pass
+
+ def foo3(self, d='\n'): # type: ignore[no-untyped-def]
+ pass
+
+ assert format_sig('method', 'H.foo', H.foo1) == ('(b, *c)', '')
+ assert format_sig('method', 'H.foo', H.foo1, args='(a)') == ('(a)', '')
+ assert format_sig('method', 'H.foo', H.foo2) == ('(*c)', '')
+ assert format_sig('method', 'H.foo', H.foo3) == (r"(d='\n')", '')
+
+ # test bound methods interpreted as functions
+ assert format_sig('function', 'foo', H().foo1) == ('(b, *c)', '')
+ assert format_sig('function', 'foo', H().foo2) == ('(*c)', '')
+ assert format_sig('function', 'foo', H().foo3) == (r"(d='\n')", '')
+
+
+def test_format_method_signatures_error_handling() -> None:
+ # test exception handling (exception is caught and args is '')
+ config = _AutodocConfig(autodoc_docstring_signature=False)
+ assert format_sig('function', 'int', int, config=config) == ()
+
+
+def test_format_signatures_event_handler() -> None:
+ events = FakeEvents()
+ events.connect('autodoc-process-signature', _process_signature)
+
+ class H:
+ def foo1(self, b, *c): # type: ignore[no-untyped-def]
+ pass
+
+ # test processing by event handler
+ assert format_sig('method', 'bar', H.foo1, events=events) == ('42', '')
+
+
+def test_format_functools_partial_signatures() -> None:
+ # test functions created via functools.partial
+ from functools import partial
+
+ curried1 = partial(lambda a, b, c: None, 'A')
+ assert format_sig('function', 'curried1', curried1) == ('(b, c)', '')
+ curried2 = partial(lambda a, b, c=42: None, 'A')
+ assert format_sig('function', 'curried2', curried2) == ('(b, c=42)', '')
+ curried3 = partial(lambda a, b, *c: None, 'A')
+ assert format_sig('function', 'curried3', curried3) == ('(b, *c)', '')
+ curried4 = partial(lambda a, b, c=42, *d, **e: None, 'A')
+ assert format_sig('function', 'curried4', curried4) == ('(b, c=42, *d, **e)', '')
+
+
+def test_autodoc_process_signature_typing_generic() -> None:
+ T = TypeVar('T')
+
+ class A(Generic[T]):
+ def __init__(self, a, b=None): # type: ignore[no-untyped-def]
+ pass
+
+ # Test that typing.Generic's __new__ method does not mask
+ # the class's __init__ signature.
+ assert format_sig('class', 'A', A) == ('(a, b=None)', '')
+
+
+def test_autodoc_process_signature_typehints() -> None:
+ captured = []
+
+ def process_signature(*args: Any) -> None:
+ captured.append(args)
+
+ events = FakeEvents()
+ events.connect('autodoc-process-signature', process_signature)
+
+ def func(x: int, y: int) -> int: # type: ignore[empty-body]
+ pass
+
+ props = _FunctionDefProperties(
+ obj_type='function',
+ module_name='',
+ parts=('func',),
+ docstring_lines=(),
+ _obj=func,
+ _obj___module__=None,
+ _obj___qualname__=None,
+ _obj___name__=None,
+ properties=frozenset(),
+ )
+
+ options = _AutoDocumenterOptions()
+ _format_signatures(
+ autodoc_annotations={},
+ config=_AutodocConfig(),
+ docstrings=None,
+ events=events,
+ get_attr=safe_getattr,
+ options=options,
+ parent=None,
+ props=props,
+ )
+
+ app = events._app
+ assert captured == [
+ (app, 'function', '.func', func, options, '(x: int, y: int)', 'int')
+ ]
diff --git a/tests/test_ext_autodoc/test_ext_autodoc_typehints.py b/tests/test_ext_autodoc/test_ext_autodoc_typehints.py
new file mode 100644
index 00000000000..c1d7a7e401b
--- /dev/null
+++ b/tests/test_ext_autodoc/test_ext_autodoc_typehints.py
@@ -0,0 +1,842 @@
+"""Test the autodoc extension."""
+
+from __future__ import annotations
+
+from contextlib import contextmanager
+from typing import TYPE_CHECKING
+
+import pytest
+
+from sphinx.ext.autodoc._shared import _AutodocConfig
+from sphinx.testing import restructuredtext
+
+from tests.test_ext_autodoc.autodoc_util import do_autodoc
+
+if TYPE_CHECKING:
+ from collections.abc import Iterator
+ from pathlib import Path
+
+ from sphinx.testing.util import SphinxTestApp
+
+pytestmark = pytest.mark.usefixtures('inject_autodoc_root_into_sys_path')
+
+
+@contextmanager
+def overwrite_file(path: Path, content: str) -> Iterator[None]:
+ current_content = path.read_bytes() if path.exists() else None
+ try:
+ path.write_text(content, encoding='utf-8')
+ yield
+ finally:
+ if current_content is not None:
+ path.write_bytes(current_content)
+ else:
+ path.unlink()
+
+
+def test_autodoc_typehints_signature() -> None:
+ config = _AutodocConfig(autodoc_typehints='signature')
+
+ options = {
+ 'members': None,
+ 'undoc-members': None,
+ }
+ actual = do_autodoc('module', 'target.typehints', config=config, options=options)
+ assert actual == [
+ '',
+ '.. py:module:: target.typehints',
+ '',
+ '',
+ '.. py:data:: CONST1',
+ ' :module: target.typehints',
+ ' :type: int',
+ '',
+ '',
+ '.. py:data:: CONST2',
+ ' :module: target.typehints',
+ ' :type: int',
+ ' :value: 1',
+ '',
+ ' docstring',
+ '',
+ '',
+ '.. py:data:: CONST3',
+ ' :module: target.typehints',
+ ' :type: ~pathlib.PurePosixPath',
+ " :value: PurePosixPath('/a/b/c')",
+ '',
+ ' docstring',
+ '',
+ '',
+ '.. py:class:: Math(s: str, o: ~typing.Any = None)',
+ ' :module: target.typehints',
+ '',
+ '',
+ ' .. py:attribute:: Math.CONST1',
+ ' :module: target.typehints',
+ ' :type: int',
+ '',
+ '',
+ ' .. py:attribute:: Math.CONST2',
+ ' :module: target.typehints',
+ ' :type: int',
+ ' :value: 1',
+ '',
+ '',
+ ' .. py:attribute:: Math.CONST3',
+ ' :module: target.typehints',
+ ' :type: ~pathlib.PurePosixPath',
+ " :value: PurePosixPath('/a/b/c')",
+ '',
+ '',
+ ' .. py:method:: Math.decr(a: int, b: int = 1) -> int',
+ ' :module: target.typehints',
+ '',
+ '',
+ ' .. py:method:: Math.horse(a: str, b: int) -> None',
+ ' :module: target.typehints',
+ '',
+ '',
+ ' .. py:method:: Math.incr(a: int, b: int = 1) -> int',
+ ' :module: target.typehints',
+ '',
+ '',
+ ' .. py:method:: Math.nothing() -> None',
+ ' :module: target.typehints',
+ '',
+ '',
+ ' .. py:property:: Math.path',
+ ' :module: target.typehints',
+ ' :type: ~pathlib.PurePosixPath',
+ '',
+ '',
+ ' .. py:property:: Math.prop',
+ ' :module: target.typehints',
+ ' :type: int',
+ '',
+ '',
+ '.. py:class:: NewAnnotation(i: int)',
+ ' :module: target.typehints',
+ '',
+ '',
+ '.. py:class:: NewComment(i: int)',
+ ' :module: target.typehints',
+ '',
+ '',
+ '.. py:class:: SignatureFromMetaclass(a: int)',
+ ' :module: target.typehints',
+ '',
+ '',
+ '.. py:class:: T',
+ ' :module: target.typehints',
+ '',
+ ' docstring',
+ '',
+ " alias of TypeVar('T', bound=\\ :py:class:`~pathlib.PurePosixPath`)",
+ '',
+ '',
+ '.. py:function:: complex_func(arg1: str, arg2: List[int], arg3: Tuple[int, '
+ 'Union[str, Unknown]] = None, *args: str, **kwargs: str) -> None',
+ ' :module: target.typehints',
+ '',
+ '',
+ '.. py:function:: decr(a: int, b: int = 1) -> int',
+ ' :module: target.typehints',
+ '',
+ '',
+ '.. py:function:: incr(a: int, b: int = 1) -> int',
+ ' :module: target.typehints',
+ '',
+ '',
+ '.. py:function:: missing_attr(c, a: str, b: Optional[str] = None) -> str',
+ ' :module: target.typehints',
+ '',
+ '',
+ '.. py:function:: tuple_args(x: tuple[int, int | str]) -> tuple[int, int]',
+ ' :module: target.typehints',
+ '',
+ ]
+
+
+def test_autodoc_typehints_none() -> None:
+ config = _AutodocConfig(autodoc_typehints='none')
+
+ options = {
+ 'members': None,
+ 'undoc-members': None,
+ }
+ actual = do_autodoc('module', 'target.typehints', config=config, options=options)
+ assert actual == [
+ '',
+ '.. py:module:: target.typehints',
+ '',
+ '',
+ '.. py:data:: CONST1',
+ ' :module: target.typehints',
+ '',
+ '',
+ '.. py:data:: CONST2',
+ ' :module: target.typehints',
+ ' :value: 1',
+ '',
+ ' docstring',
+ '',
+ '',
+ '.. py:data:: CONST3',
+ ' :module: target.typehints',
+ " :value: PurePosixPath('/a/b/c')",
+ '',
+ ' docstring',
+ '',
+ '',
+ '.. py:class:: Math(s, o=None)',
+ ' :module: target.typehints',
+ '',
+ '',
+ ' .. py:attribute:: Math.CONST1',
+ ' :module: target.typehints',
+ '',
+ '',
+ ' .. py:attribute:: Math.CONST2',
+ ' :module: target.typehints',
+ ' :value: 1',
+ '',
+ '',
+ ' .. py:attribute:: Math.CONST3',
+ ' :module: target.typehints',
+ " :value: PurePosixPath('/a/b/c')",
+ '',
+ '',
+ ' .. py:method:: Math.decr(a, b=1)',
+ ' :module: target.typehints',
+ '',
+ '',
+ ' .. py:method:: Math.horse(a, b)',
+ ' :module: target.typehints',
+ '',
+ '',
+ ' .. py:method:: Math.incr(a, b=1)',
+ ' :module: target.typehints',
+ '',
+ '',
+ ' .. py:method:: Math.nothing()',
+ ' :module: target.typehints',
+ '',
+ '',
+ ' .. py:property:: Math.path',
+ ' :module: target.typehints',
+ '',
+ '',
+ ' .. py:property:: Math.prop',
+ ' :module: target.typehints',
+ '',
+ '',
+ '.. py:class:: NewAnnotation(i)',
+ ' :module: target.typehints',
+ '',
+ '',
+ '.. py:class:: NewComment(i)',
+ ' :module: target.typehints',
+ '',
+ '',
+ '.. py:class:: SignatureFromMetaclass(a)',
+ ' :module: target.typehints',
+ '',
+ '',
+ '.. py:class:: T',
+ ' :module: target.typehints',
+ '',
+ ' docstring',
+ '',
+ " alias of TypeVar('T', bound=\\ :py:class:`~pathlib.PurePosixPath`)",
+ '',
+ '',
+ '.. py:function:: complex_func(arg1, arg2, arg3=None, *args, **kwargs)',
+ ' :module: target.typehints',
+ '',
+ '',
+ '.. py:function:: decr(a, b=1)',
+ ' :module: target.typehints',
+ '',
+ '',
+ '.. py:function:: incr(a, b=1)',
+ ' :module: target.typehints',
+ '',
+ '',
+ '.. py:function:: missing_attr(c, a, b=None)',
+ ' :module: target.typehints',
+ '',
+ '',
+ '.. py:function:: tuple_args(x)',
+ ' :module: target.typehints',
+ '',
+ ]
+
+
+def test_autodoc_typehints_none_for_overload() -> None:
+ config = _AutodocConfig(autodoc_typehints='none')
+
+ options = {'members': None}
+ actual = do_autodoc('module', 'target.overload', config=config, options=options)
+ assert actual == [
+ '',
+ '.. py:module:: target.overload',
+ '',
+ '',
+ '.. py:class:: Bar(x, y)',
+ ' :module: target.overload',
+ '',
+ ' docstring',
+ '',
+ '',
+ '.. py:class:: Baz(x, y)',
+ ' :module: target.overload',
+ '',
+ ' docstring',
+ '',
+ '',
+ '.. py:class:: Foo(x, y)',
+ ' :module: target.overload',
+ '',
+ ' docstring',
+ '',
+ '',
+ '.. py:class:: Math()',
+ ' :module: target.overload',
+ '',
+ ' docstring',
+ '',
+ '',
+ ' .. py:method:: Math.sum(x, y=None)',
+ ' :module: target.overload',
+ '',
+ ' docstring',
+ '',
+ '',
+ '.. py:function:: sum(x, y=None)',
+ ' :module: target.overload',
+ '',
+ ' docstring',
+ '',
+ ]
+
+
+@pytest.mark.sphinx('text', testroot='ext-autodoc', freshenv=True)
+def test_autodoc_typehints_description(app: SphinxTestApp) -> None:
+ app.config.autodoc_typehints = 'description'
+
+ app.build()
+ context = (app.outdir / 'index.txt').read_text(encoding='utf8')
+ expect = '\n'.join(( # NoQA: FLY002
+ 'target.typehints.incr(a, b=1)',
+ '',
+ ' Parameters:',
+ ' * **a** (*int*)',
+ '',
+ ' * **b** (*int*)',
+ '',
+ ' Return type:',
+ ' int',
+ '',
+ ))
+ assert expect in context
+ expect = '\n'.join(( # NoQA: FLY002
+ 'target.typehints.tuple_args(x)',
+ '',
+ ' Parameters:',
+ ' **x** (*tuple**[**int**, **int** | **str**]*)',
+ '',
+ ' Return type:',
+ ' tuple[int, int]',
+ '',
+ ))
+ assert expect in context
+
+ # Overloads still get displayed in the signature
+ expect = '\n'.join(( # NoQA: FLY002
+ 'target.overload.sum(x: int, y: int = 0) -> int',
+ 'target.overload.sum(x: float, y: float = 0.0) -> float',
+ 'target.overload.sum(x: str, y: str = None) -> str',
+ '',
+ ' docstring',
+ '',
+ ))
+ assert expect in context
+
+
+@pytest.mark.sphinx('text', testroot='ext-autodoc', copy_test_root=True)
+def test_autodoc_typehints_description_no_undoc(app: SphinxTestApp) -> None:
+ app.config.autodoc_typehints = 'description'
+ app.config.autodoc_typehints_description_target = 'documented'
+
+ # No :type: or :rtype: will be injected for `incr`, which does not have
+ # a description for its parameters or its return. `tuple_args` does
+ # describe them, so :type: and :rtype: will be added.
+ with overwrite_file(
+ app.srcdir / 'index.rst',
+ '.. autofunction:: target.typehints.incr\n'
+ '\n'
+ '.. autofunction:: target.typehints.decr\n'
+ '\n'
+ ' :returns: decremented number\n'
+ '\n'
+ '.. autofunction:: target.typehints.tuple_args\n'
+ '\n'
+ ' :param x: arg\n'
+ ' :return: another tuple\n',
+ ):
+ app.build()
+ # Restore the original content of the file
+ context = (app.outdir / 'index.txt').read_text(encoding='utf8')
+ expect = '\n'.join(( # NoQA: FLY002
+ 'target.typehints.incr(a, b=1)',
+ '',
+ 'target.typehints.decr(a, b=1)',
+ '',
+ ' Returns:',
+ ' decremented number',
+ '',
+ ' Return type:',
+ ' int',
+ '',
+ 'target.typehints.tuple_args(x)',
+ '',
+ ' Parameters:',
+ ' **x** (*tuple**[**int**, **int** | **str**]*) -- arg',
+ '',
+ ' Returns:',
+ ' another tuple',
+ '',
+ ' Return type:',
+ ' tuple[int, int]',
+ '',
+ ))
+ assert expect in context
+
+
+@pytest.mark.sphinx('text', testroot='ext-autodoc', copy_test_root=True)
+def test_autodoc_typehints_description_no_undoc_doc_rtype(app: SphinxTestApp) -> None:
+ app.config.autodoc_typehints = 'description'
+ app.config.autodoc_typehints_description_target = 'documented_params'
+
+ # No :type: will be injected for `incr`, which does not have a description
+ # for its parameters or its return, just :rtype: will be injected due to
+ # autodoc_typehints_description_target. `tuple_args` does describe both, so
+ # :type: and :rtype: will be added. `nothing` has no parameters but a return
+ # type of None, which will be added.
+ with overwrite_file(
+ app.srcdir / 'index.rst',
+ '.. autofunction:: target.typehints.incr\n'
+ '\n'
+ '.. autofunction:: target.typehints.decr\n'
+ '\n'
+ ' :returns: decremented number\n'
+ '\n'
+ '.. autofunction:: target.typehints.tuple_args\n'
+ '\n'
+ ' :param x: arg\n'
+ ' :return: another tuple\n'
+ '\n'
+ '.. autofunction:: target.typehints.Math.nothing\n'
+ '\n'
+ '.. autofunction:: target.typehints.Math.horse\n'
+ '\n'
+ ' :return: nothing\n',
+ ):
+ app.build()
+ context = (app.outdir / 'index.txt').read_text(encoding='utf8')
+ assert context == (
+ 'target.typehints.incr(a, b=1)\n'
+ '\n'
+ ' Return type:\n'
+ ' int\n'
+ '\n'
+ 'target.typehints.decr(a, b=1)\n'
+ '\n'
+ ' Returns:\n'
+ ' decremented number\n'
+ '\n'
+ ' Return type:\n'
+ ' int\n'
+ '\n'
+ 'target.typehints.tuple_args(x)\n'
+ '\n'
+ ' Parameters:\n'
+ ' **x** (*tuple**[**int**, **int** | **str**]*) -- arg\n'
+ '\n'
+ ' Returns:\n'
+ ' another tuple\n'
+ '\n'
+ ' Return type:\n'
+ ' tuple[int, int]\n'
+ '\n'
+ 'target.typehints.Math.nothing(self)\n'
+ '\n'
+ 'target.typehints.Math.horse(self, a, b)\n'
+ '\n'
+ ' Returns:\n'
+ ' nothing\n'
+ '\n'
+ ' Return type:\n'
+ ' None\n'
+ )
+
+
+@pytest.mark.sphinx('text', testroot='ext-autodoc', copy_test_root=True)
+def test_autodoc_typehints_description_with_documented_init(app: SphinxTestApp) -> None:
+ app.config.autodoc_typehints = 'description'
+
+ with overwrite_file(
+ app.srcdir / 'index.rst',
+ '.. autoclass:: target.typehints._ClassWithDocumentedInit\n'
+ ' :special-members: __init__\n',
+ ):
+ app.build()
+ context = (app.outdir / 'index.txt').read_text(encoding='utf8')
+ assert context == (
+ 'class target.typehints._ClassWithDocumentedInit(x, *args, **kwargs)\n'
+ '\n'
+ ' Class docstring.\n'
+ '\n'
+ ' Parameters:\n'
+ ' * **x** (*int*)\n'
+ '\n'
+ ' * **args** (*int*)\n'
+ '\n'
+ ' * **kwargs** (*int*)\n'
+ '\n'
+ ' __init__(x, *args, **kwargs)\n'
+ '\n'
+ ' Init docstring.\n'
+ '\n'
+ ' Parameters:\n'
+ ' * **x** (*int*) -- Some integer\n'
+ '\n'
+ ' * **args** (*int*) -- Some integer\n'
+ '\n'
+ ' * **kwargs** (*int*) -- Some integer\n'
+ '\n'
+ ' Return type:\n'
+ ' None\n'
+ )
+
+
+@pytest.mark.sphinx('text', testroot='ext-autodoc', copy_test_root=True)
+def test_autodoc_typehints_description_with_documented_init_no_undoc(
+ app: SphinxTestApp,
+) -> None:
+ app.config.autodoc_typehints = 'description'
+ app.config.autodoc_typehints_description_target = 'documented'
+
+ with overwrite_file(
+ app.srcdir / 'index.rst',
+ '.. autoclass:: target.typehints._ClassWithDocumentedInit\n'
+ ' :special-members: __init__\n',
+ ):
+ app.build()
+ context = (app.outdir / 'index.txt').read_text(encoding='utf8')
+ assert context == (
+ 'class target.typehints._ClassWithDocumentedInit(x, *args, **kwargs)\n'
+ '\n'
+ ' Class docstring.\n'
+ '\n'
+ ' __init__(x, *args, **kwargs)\n'
+ '\n'
+ ' Init docstring.\n'
+ '\n'
+ ' Parameters:\n'
+ ' * **x** (*int*) -- Some integer\n'
+ '\n'
+ ' * **args** (*int*) -- Some integer\n'
+ '\n'
+ ' * **kwargs** (*int*) -- Some integer\n'
+ )
+
+
+@pytest.mark.sphinx('text', testroot='ext-autodoc', copy_test_root=True)
+def test_autodoc_typehints_description_with_documented_init_no_undoc_doc_rtype(
+ app: SphinxTestApp,
+) -> None:
+ app.config.autodoc_typehints = 'description'
+ app.config.autodoc_typehints_description_target = 'documented_params'
+
+ # see test_autodoc_typehints_description_with_documented_init_no_undoc
+ # returnvalue_and_documented_params should not change class or method
+ # docstring.
+ with overwrite_file(
+ app.srcdir / 'index.rst',
+ '.. autoclass:: target.typehints._ClassWithDocumentedInit\n'
+ ' :special-members: __init__\n',
+ ):
+ app.build()
+ context = (app.outdir / 'index.txt').read_text(encoding='utf8')
+ assert context == (
+ 'class target.typehints._ClassWithDocumentedInit(x, *args, **kwargs)\n'
+ '\n'
+ ' Class docstring.\n'
+ '\n'
+ ' __init__(x, *args, **kwargs)\n'
+ '\n'
+ ' Init docstring.\n'
+ '\n'
+ ' Parameters:\n'
+ ' * **x** (*int*) -- Some integer\n'
+ '\n'
+ ' * **args** (*int*) -- Some integer\n'
+ '\n'
+ ' * **kwargs** (*int*) -- Some integer\n'
+ )
+
+
+@pytest.mark.sphinx('text', testroot='ext-autodoc')
+def test_autodoc_typehints_description_for_invalid_node(app: SphinxTestApp) -> None:
+ app.config.autodoc_typehints = 'description'
+
+ text = '.. py:function:: hello; world'
+ restructuredtext.parse(app, text) # raises no error
+
+
+@pytest.mark.sphinx('text', testroot='ext-autodoc', copy_test_root=True)
+def test_autodoc_typehints_both(app: SphinxTestApp) -> None:
+ app.config.autodoc_typehints = 'both'
+
+ with overwrite_file(
+ app.srcdir / 'index.rst',
+ '.. autofunction:: target.typehints.incr\n'
+ '\n'
+ '.. autofunction:: target.typehints.tuple_args\n'
+ '\n'
+ '.. autofunction:: target.overload.sum\n',
+ ):
+ app.build()
+ context = (app.outdir / 'index.txt').read_text(encoding='utf8')
+ expect = '\n'.join(( # NoQA: FLY002
+ 'target.typehints.incr(a: int, b: int = 1) -> int',
+ '',
+ ' Parameters:',
+ ' * **a** (*int*)',
+ '',
+ ' * **b** (*int*)',
+ '',
+ ' Return type:',
+ ' int',
+ '',
+ ))
+ assert expect in context
+ expect = '\n'.join(( # NoQA: FLY002
+ 'target.typehints.tuple_args(x: tuple[int, int | str]) -> tuple[int, int]',
+ '',
+ ' Parameters:',
+ ' **x** (*tuple**[**int**, **int** | **str**]*)',
+ '',
+ ' Return type:',
+ ' tuple[int, int]',
+ '',
+ ))
+ assert expect in context
+
+ # Overloads still get displayed in the signature
+ expect = '\n'.join(( # NoQA: FLY002
+ 'target.overload.sum(x: int, y: int = 0) -> int',
+ 'target.overload.sum(x: float, y: float = 0.0) -> float',
+ 'target.overload.sum(x: str, y: str = None) -> str',
+ '',
+ ' docstring',
+ '',
+ ))
+ assert expect in context
+
+
+@pytest.mark.sphinx('text', testroot='ext-autodoc', copy_test_root=True)
+def test_autodoc_typehints_description_and_type_aliases(app: SphinxTestApp) -> None:
+ app.config.autodoc_typehints = 'description'
+ app.config.autodoc_type_aliases = {'myint': 'myint'}
+
+ with overwrite_file(
+ app.srcdir / 'autodoc_type_aliases.rst',
+ '.. autofunction:: target.autodoc_type_aliases.sum',
+ ):
+ app.build()
+ context = (app.outdir / 'autodoc_type_aliases.txt').read_text(encoding='utf8')
+ assert context == (
+ 'target.autodoc_type_aliases.sum(x, y)\n'
+ '\n'
+ ' docstring\n'
+ '\n'
+ ' Parameters:\n'
+ ' * **x** (*myint*)\n'
+ '\n'
+ ' * **y** (*myint*)\n'
+ '\n'
+ ' Return type:\n'
+ ' myint\n'
+ )
+
+
+def test_autodoc_typehints_format_fully_qualified() -> None:
+ config = _AutodocConfig(autodoc_typehints_format='fully-qualified')
+
+ options = {
+ 'members': None,
+ 'undoc-members': None,
+ }
+ actual = do_autodoc('module', 'target.typehints', config=config, options=options)
+ assert actual == [
+ '',
+ '.. py:module:: target.typehints',
+ '',
+ '',
+ '.. py:data:: CONST1',
+ ' :module: target.typehints',
+ ' :type: int',
+ '',
+ '',
+ '.. py:data:: CONST2',
+ ' :module: target.typehints',
+ ' :type: int',
+ ' :value: 1',
+ '',
+ ' docstring',
+ '',
+ '',
+ '.. py:data:: CONST3',
+ ' :module: target.typehints',
+ ' :type: pathlib.PurePosixPath',
+ " :value: PurePosixPath('/a/b/c')",
+ '',
+ ' docstring',
+ '',
+ '',
+ '.. py:class:: Math(s: str, o: typing.Any = None)',
+ ' :module: target.typehints',
+ '',
+ '',
+ ' .. py:attribute:: Math.CONST1',
+ ' :module: target.typehints',
+ ' :type: int',
+ '',
+ '',
+ ' .. py:attribute:: Math.CONST2',
+ ' :module: target.typehints',
+ ' :type: int',
+ ' :value: 1',
+ '',
+ '',
+ ' .. py:attribute:: Math.CONST3',
+ ' :module: target.typehints',
+ ' :type: pathlib.PurePosixPath',
+ " :value: PurePosixPath('/a/b/c')",
+ '',
+ '',
+ ' .. py:method:: Math.decr(a: int, b: int = 1) -> int',
+ ' :module: target.typehints',
+ '',
+ '',
+ ' .. py:method:: Math.horse(a: str, b: int) -> None',
+ ' :module: target.typehints',
+ '',
+ '',
+ ' .. py:method:: Math.incr(a: int, b: int = 1) -> int',
+ ' :module: target.typehints',
+ '',
+ '',
+ ' .. py:method:: Math.nothing() -> None',
+ ' :module: target.typehints',
+ '',
+ '',
+ ' .. py:property:: Math.path',
+ ' :module: target.typehints',
+ ' :type: pathlib.PurePosixPath',
+ '',
+ '',
+ ' .. py:property:: Math.prop',
+ ' :module: target.typehints',
+ ' :type: int',
+ '',
+ '',
+ '.. py:class:: NewAnnotation(i: int)',
+ ' :module: target.typehints',
+ '',
+ '',
+ '.. py:class:: NewComment(i: int)',
+ ' :module: target.typehints',
+ '',
+ '',
+ '.. py:class:: SignatureFromMetaclass(a: int)',
+ ' :module: target.typehints',
+ '',
+ '',
+ '.. py:class:: T',
+ ' :module: target.typehints',
+ '',
+ ' docstring',
+ '',
+ " alias of TypeVar('T', bound=\\ :py:class:`pathlib.PurePosixPath`)",
+ '',
+ '',
+ '.. py:function:: complex_func(arg1: str, arg2: List[int], arg3: Tuple[int, '
+ 'Union[str, Unknown]] = None, *args: str, **kwargs: str) -> None',
+ ' :module: target.typehints',
+ '',
+ '',
+ '.. py:function:: decr(a: int, b: int = 1) -> int',
+ ' :module: target.typehints',
+ '',
+ '',
+ '.. py:function:: incr(a: int, b: int = 1) -> int',
+ ' :module: target.typehints',
+ '',
+ '',
+ '.. py:function:: missing_attr(c, a: str, b: Optional[str] = None) -> str',
+ ' :module: target.typehints',
+ '',
+ '',
+ '.. py:function:: tuple_args(x: tuple[int, int | str]) -> tuple[int, int]',
+ ' :module: target.typehints',
+ '',
+ ]
+
+
+def test_autodoc_typehints_format_fully_qualified_for_class_alias() -> None:
+ config = _AutodocConfig(autodoc_typehints_format='fully-qualified')
+
+ actual = do_autodoc('class', 'target.classes.Alias', config=config)
+ assert actual == [
+ '',
+ '.. py:attribute:: Alias',
+ ' :module: target.classes',
+ '',
+ ' alias of :py:class:`target.classes.Foo`',
+ ]
+
+
+def test_autodoc_typehints_format_fully_qualified_for_generic_alias() -> None:
+ config = _AutodocConfig(autodoc_typehints_format='fully-qualified')
+
+ actual = do_autodoc('data', 'target.genericalias.L', config=config)
+ assert actual == [
+ '',
+ '.. py:data:: L',
+ ' :module: target.genericalias',
+ '',
+ ' A list of Class',
+ '',
+ ' alias of :py:class:`~typing.List`\\ [:py:class:`target.genericalias.Class`]',
+ '',
+ ]
+
+
+def test_autodoc_typehints_format_fully_qualified_for_newtype_alias() -> None:
+ config = _AutodocConfig(autodoc_typehints_format='fully-qualified')
+
+ actual = do_autodoc('class', 'target.typevar.T6', config=config)
+ assert actual == [
+ '',
+ '.. py:class:: T6',
+ ' :module: target.typevar',
+ '',
+ ' T6',
+ '',
+ ' alias of :py:class:`datetime.date`',
+ '',
+ ]
diff --git a/tests/test_ext_autosummary/__init__.py b/tests/test_ext_autosummary/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/tests/test_extensions/test_ext_autosummary.py b/tests/test_ext_autosummary/test_ext_autosummary.py
similarity index 96%
rename from tests/test_extensions/test_ext_autosummary.py
rename to tests/test_ext_autosummary/test_ext_autosummary.py
index c807ddba3d1..9749cd8f050 100644
--- a/tests/test_extensions/test_ext_autosummary.py
+++ b/tests/test_ext_autosummary/test_ext_autosummary.py
@@ -26,7 +26,6 @@
)
from sphinx.ext.autosummary.generate import main as autogen_main
from sphinx.testing.util import assert_node, etree_parse
-from sphinx.util.docutils import new_document
if TYPE_CHECKING:
from xml.etree.ElementTree import Element
@@ -86,7 +85,6 @@ def test_extract_summary(capsys):
pep_reference=False,
rfc_reference=False,
)
- document = new_document('', settings)
# normal case
doc = [
@@ -95,52 +93,52 @@ def test_extract_summary(capsys):
'',
'Second block is here',
]
- assert extract_summary(doc, document) == 'This is a first sentence.'
+ assert extract_summary(doc, settings) == 'This is a first sentence.'
# inliner case
doc = [
'This sentence contains *emphasis text having dots.*,',
'it does not break sentence.',
]
- assert extract_summary(doc, document) == ' '.join(doc)
+ assert extract_summary(doc, settings) == ' '.join(doc)
# abbreviations
doc = ['Blabla, i.e. bla.']
- assert extract_summary(doc, document) == ' '.join(doc)
+ assert extract_summary(doc, settings) == ' '.join(doc)
doc = ['Blabla, (i.e. bla).']
- assert extract_summary(doc, document) == ' '.join(doc)
+ assert extract_summary(doc, settings) == ' '.join(doc)
doc = ['Blabla, e.g. bla.']
- assert extract_summary(doc, document) == ' '.join(doc)
+ assert extract_summary(doc, settings) == ' '.join(doc)
doc = ['Blabla, (e.g. bla).']
- assert extract_summary(doc, document) == ' '.join(doc)
+ assert extract_summary(doc, settings) == ' '.join(doc)
doc = ['Blabla, et al. bla.']
- assert extract_summary(doc, document) == ' '.join(doc)
+ assert extract_summary(doc, settings) == ' '.join(doc)
# literal
doc = ['blah blah::']
- assert extract_summary(doc, document) == 'blah blah.'
+ assert extract_summary(doc, settings) == 'blah blah.'
# heading
doc = [
'blah blah',
'=========',
]
- assert extract_summary(doc, document) == 'blah blah'
+ assert extract_summary(doc, settings) == 'blah blah'
doc = [
'=========',
'blah blah',
'=========',
]
- assert extract_summary(doc, document) == 'blah blah'
+ assert extract_summary(doc, settings) == 'blah blah'
# hyperlink target
doc = ['Do `this `_ and that. blah blah blah.']
- extracted = extract_summary(doc, document)
+ extracted = extract_summary(doc, settings)
assert extracted == 'Do `this `_ and that.'
_, err = capsys.readouterr()
@@ -155,11 +153,9 @@ def test_extract_summary(capsys):
)
def test_get_items_summary(make_app, app_params):
import sphinx.ext.autosummary
- import sphinx.ext.autosummary.generate
args, kwargs = app_params
app = make_app(*args, **kwargs)
- sphinx.ext.autosummary.generate.setup_documenters(app)
# monkey-patch Autosummary.get_items so we can easily get access to it's
# results..
orig_get_items = sphinx.ext.autosummary.Autosummary.get_items
@@ -257,7 +253,6 @@ def test_autosummary_generate_content_for_module(app):
{},
config=app.config,
events=app.events,
- registry=app.registry,
)
assert template.render.call_args[0][0] == 'module'
@@ -320,7 +315,6 @@ def test_autosummary_generate_content_for_module___all__(app):
{},
config=app.config,
events=app.events,
- registry=app.registry,
)
assert template.render.call_args[0][0] == 'module'
@@ -372,7 +366,6 @@ def skip_member(app, what, name, obj, skip, options):
{},
config=app.config,
events=app.events,
- registry=app.registry,
)
context = template.render.call_args[0][1]
assert context['members'] == [
@@ -414,7 +407,6 @@ def test_autosummary_generate_content_for_module_imported_members(app):
{},
config=app.config,
events=app.events,
- registry=app.registry,
)
assert template.render.call_args[0][0] == 'module'
@@ -486,7 +478,6 @@ def test_autosummary_generate_content_for_module_imported_members_inherited_modu
{},
config=app.config,
events=app.events,
- registry=app.registry,
)
assert template.render.call_args[0][0] == 'module'
diff --git a/tests/test_extensions/test_ext_autosummary_imports.py b/tests/test_ext_autosummary/test_ext_autosummary_imports.py
similarity index 100%
rename from tests/test_extensions/test_ext_autosummary_imports.py
rename to tests/test_ext_autosummary/test_ext_autosummary_imports.py
diff --git a/tests/test_ext_imgconverter/__init__.py b/tests/test_ext_imgconverter/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/tests/test_extensions/test_ext_imgconverter.py b/tests/test_ext_imgconverter/test_ext_imgconverter.py
similarity index 96%
rename from tests/test_extensions/test_ext_imgconverter.py
rename to tests/test_ext_imgconverter/test_ext_imgconverter.py
index a2078f9fe11..c39f885e97e 100644
--- a/tests/test_extensions/test_ext_imgconverter.py
+++ b/tests/test_ext_imgconverter/test_ext_imgconverter.py
@@ -12,7 +12,7 @@
@pytest.fixture
-def _if_converter_found(app):
+def _if_converter_found(app: SphinxTestApp) -> None:
image_converter = getattr(app.config, 'image_converter', '')
try:
if image_converter:
diff --git a/tests/test_extensions/test_ext_imgmockconverter.py b/tests/test_ext_imgconverter/test_ext_imgmockconverter.py
similarity index 100%
rename from tests/test_extensions/test_ext_imgmockconverter.py
rename to tests/test_ext_imgconverter/test_ext_imgmockconverter.py
diff --git a/tests/test_ext_intersphinx/__init__.py b/tests/test_ext_intersphinx/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/tests/test_extensions/test_ext_intersphinx.py b/tests/test_ext_intersphinx/test_ext_intersphinx.py
similarity index 92%
rename from tests/test_extensions/test_ext_intersphinx.py
rename to tests/test_ext_intersphinx/test_ext_intersphinx.py
index c2b17d36a78..a4d7f4fe6fe 100644
--- a/tests/test_extensions/test_ext_intersphinx.py
+++ b/tests/test_ext_intersphinx/test_ext_intersphinx.py
@@ -18,10 +18,11 @@
from sphinx.ext.intersphinx import setup as intersphinx_setup
from sphinx.ext.intersphinx._cli import inspect_main
from sphinx.ext.intersphinx._load import (
- _fetch_inventory,
+ _fetch_inventory_data,
_fetch_inventory_group,
_get_safe_url,
_InvConfig,
+ _load_inventory,
_strip_basic_auth,
load_mappings,
validate_intersphinx_mapping,
@@ -34,6 +35,7 @@
INVENTORY_V2,
INVENTORY_V2_AMBIGUOUS_TERMS,
INVENTORY_V2_NO_VERSION,
+ INVENTORY_V2_TEXT_VERSION,
)
from tests.utils import http_server
@@ -85,12 +87,15 @@ def test_fetch_inventory_redirection(get_request, InventoryFile, app):
# same uri and inv, not redirected
mocked_get.url = 'https://hostname/' + INVENTORY_FILENAME
- _fetch_inventory(
- target_uri='https://hostname/',
+ target_uri = 'https://hostname/'
+ raw_data, target_uri = _fetch_inventory_data(
+ target_uri=target_uri,
inv_location='https://hostname/' + INVENTORY_FILENAME,
config=_InvConfig.from_config(app.config),
srcdir=app.srcdir,
+ cache_path=None,
)
+ _load_inventory(raw_data, target_uri=target_uri)
assert 'intersphinx inventory has moved' not in app.status.getvalue()
assert InventoryFile.loads.call_args[1]['uri'] == 'https://hostname/'
@@ -99,12 +104,15 @@ def test_fetch_inventory_redirection(get_request, InventoryFile, app):
app.status.truncate(0)
mocked_get.url = 'https://hostname/new/' + INVENTORY_FILENAME
- _fetch_inventory(
- target_uri='https://hostname/',
+ target_uri = 'https://hostname/'
+ raw_data, target_uri = _fetch_inventory_data(
+ target_uri=target_uri,
inv_location='https://hostname/' + INVENTORY_FILENAME,
config=_InvConfig.from_config(app.config),
srcdir=app.srcdir,
+ cache_path=None,
)
+ _load_inventory(raw_data, target_uri=target_uri)
assert app.status.getvalue() == (
'intersphinx inventory has moved: '
'https://hostname/%s -> https://hostname/new/%s\n'
@@ -117,12 +125,15 @@ def test_fetch_inventory_redirection(get_request, InventoryFile, app):
app.status.truncate(0)
mocked_get.url = 'https://hostname/new/' + INVENTORY_FILENAME
- _fetch_inventory(
- target_uri='https://hostname/',
+ target_uri = 'https://hostname/'
+ raw_data, target_uri = _fetch_inventory_data(
+ target_uri=target_uri,
inv_location='https://hostname/new/' + INVENTORY_FILENAME,
config=_InvConfig.from_config(app.config),
srcdir=app.srcdir,
+ cache_path=None,
)
+ _load_inventory(raw_data, target_uri=target_uri)
assert 'intersphinx inventory has moved' not in app.status.getvalue()
assert InventoryFile.loads.call_args[1]['uri'] == 'https://hostname/'
@@ -131,12 +142,15 @@ def test_fetch_inventory_redirection(get_request, InventoryFile, app):
app.status.truncate(0)
mocked_get.url = 'https://hostname/other/' + INVENTORY_FILENAME
- _fetch_inventory(
- target_uri='https://hostname/',
+ target_uri = 'https://hostname/'
+ raw_data, target_uri = _fetch_inventory_data(
+ target_uri=target_uri,
inv_location='https://hostname/new/' + INVENTORY_FILENAME,
config=_InvConfig.from_config(app.config),
srcdir=app.srcdir,
+ cache_path=None,
)
+ _load_inventory(raw_data, target_uri=target_uri)
assert app.status.getvalue() == (
'intersphinx inventory has moved: '
'https://hostname/new/%s -> https://hostname/other/%s\n'
@@ -774,13 +788,16 @@ def test_intersphinx_cache_limit(app, monkeypatch, cache_limit, expected_expired
now = 2 * 86400
monkeypatch.setattr('time.time', lambda: now)
- # `_fetch_inventory_group` calls `_fetch_inventory`.
+ # `_fetch_inventory_group` calls `_fetch_inventory_data`.
# We replace it with a mock to test whether it has been called.
# If it has been called, it means the cache had expired.
- mock_fake_inventory = _Inventory({}) # must be truthy
- mock_fetch_inventory = mock.Mock(return_value=mock_fake_inventory)
monkeypatch.setattr(
- 'sphinx.ext.intersphinx._load._fetch_inventory', mock_fetch_inventory
+ 'sphinx.ext.intersphinx._load._fetch_inventory_data',
+ mock.Mock(return_value=(b'', '')),
+ )
+ mock_fetch_inventory = mock.Mock(return_value=_Inventory({}))
+ monkeypatch.setattr(
+ 'sphinx.ext.intersphinx._load._load_inventory', mock_fetch_inventory
)
for name, (uri, locations) in app.config.intersphinx_mapping.values():
@@ -791,8 +808,9 @@ def test_intersphinx_cache_limit(app, monkeypatch, cache_limit, expected_expired
now=now,
config=_InvConfig.from_config(app.config),
srcdir=app.srcdir,
+ cache_dir=None,
)
- # If we hadn't mocked `_fetch_inventory`, it would've made
+ # If we hadn't mocked `_fetch_inventory_data`, it would've made
# a request to `https://example.org/` and found no inventory
# file. That would've been an error, and `updated` would've been
# False even if the cache had expired. The mock makes it behave
@@ -826,8 +844,14 @@ def log_message(*args, **kwargs):
}
now = int(time.time())
- # we can use 'srcdir=None' since we are raising in _fetch_inventory
- kwds = {'cache': {}, 'now': now, 'config': config, 'srcdir': None}
+ # we can use 'srcdir=None' since we are raising in _fetch_inventory_data
+ kwds = {
+ 'cache': {},
+ 'now': now,
+ 'config': config,
+ 'srcdir': None,
+ 'cache_dir': None,
+ }
# We need an exception with its 'args' attribute set (see error
# handling in sphinx.ext.intersphinx._load._fetch_inventory_group).
side_effect = ValueError('')
@@ -836,7 +860,8 @@ def log_message(*args, **kwargs):
name='1', target_uri=url1, locations=(url1, None)
)
with mock.patch(
- 'sphinx.ext.intersphinx._load._fetch_inventory', side_effect=side_effect
+ 'sphinx.ext.intersphinx._load._fetch_inventory_data',
+ side_effect=side_effect,
) as mockfn:
assert not _fetch_inventory_group(project=project1, **kwds)
mockfn.assert_any_call(
@@ -844,19 +869,22 @@ def log_message(*args, **kwargs):
inv_location=url1,
config=config,
srcdir=None,
+ cache_path=None,
)
mockfn.assert_any_call(
target_uri=url1,
inv_location=url1 + '/' + INVENTORY_FILENAME,
config=config,
srcdir=None,
+ cache_path=None,
)
project2 = _IntersphinxProject(
name='2', target_uri=url2, locations=(url2, None)
)
with mock.patch(
- 'sphinx.ext.intersphinx._load._fetch_inventory', side_effect=side_effect
+ 'sphinx.ext.intersphinx._load._fetch_inventory_data',
+ side_effect=side_effect,
) as mockfn:
assert not _fetch_inventory_group(project=project2, **kwds)
mockfn.assert_any_call(
@@ -864,10 +892,34 @@ def log_message(*args, **kwargs):
inv_location=url2,
config=config,
srcdir=None,
+ cache_path=None,
)
mockfn.assert_any_call(
target_uri=url2,
inv_location=url2 + INVENTORY_FILENAME,
config=config,
srcdir=None,
+ cache_path=None,
)
+
+
+@pytest.mark.sphinx('html', testroot='root')
+def test_inventory_text_version(tmp_path, app):
+ inv_file = tmp_path / 'inventory'
+ inv_file.write_bytes(INVENTORY_V2_TEXT_VERSION)
+ set_config(
+ app,
+ {
+ 'python': ('https://docs.python.org/', str(inv_file)),
+ },
+ )
+
+ # load the inventory and check if non-numeric version is handled correctly
+ validate_intersphinx_mapping(app, app.config)
+ load_mappings(app)
+
+ rn = reference_check(app, 'py', 'mod', 'module1', 'foo')
+ assert isinstance(rn, nodes.reference)
+ assert rn['refuri'] == 'https://docs.python.org/foo.html#module-module1'
+ assert rn['reftitle'] == '(in foo stable)'
+ assert rn[0].astext() == 'Long Module desc'
diff --git a/tests/test_extensions/test_ext_intersphinx_cache.py b/tests/test_ext_intersphinx/test_ext_intersphinx_cache.py
similarity index 95%
rename from tests/test_extensions/test_ext_intersphinx_cache.py
rename to tests/test_ext_intersphinx/test_ext_intersphinx_cache.py
index b2e52a5b180..7290e4430ba 100644
--- a/tests/test_extensions/test_ext_intersphinx_cache.py
+++ b/tests/test_ext_intersphinx/test_ext_intersphinx_cache.py
@@ -4,6 +4,7 @@
import posixpath
import re
+import shutil
import zlib
from http.server import BaseHTTPRequestHandler
from io import BytesIO
@@ -17,7 +18,7 @@
if TYPE_CHECKING:
from collections.abc import Iterable
- from typing import BinaryIO
+ from typing import Any, BinaryIO
BASE_CONFIG = {
@@ -212,7 +213,7 @@ def do_GET(self):
self.end_headers()
self.wfile.write(data)
- def log_message(*args, **kwargs):
+ def log_message(*args: Any, **kwargs: Any) -> None:
pass
return InventoryHandler
@@ -242,7 +243,7 @@ def test_load_mappings_cache(tmp_path):
item = dict((project.normalise(entry),))
inventories = InventoryAdapter(app.env)
assert list(inventories.cache) == ['http://localhost:9341/a']
- e_name, e_time, e_inv = inventories.cache['http://localhost:9341/a']
+ e_name, _e_time, e_inv = inventories.cache['http://localhost:9341/a']
assert e_name == 'spam'
assert e_inv == {'py:module': item}
assert inventories.named_inventory == {'spam': {'py:module': item}}
@@ -261,19 +262,21 @@ def test_load_mappings_cache_update(tmp_path):
app1 = SphinxTestApp('dummy', srcdir=tmp_path, confoverrides=confoverrides1)
app1.build()
app1.cleanup()
+ shutil.rmtree(app1.doctreedir / '__intersphinx_cache__', ignore_errors=True)
# switch to new url and assert that the old URL is no more stored
confoverrides2 = BASE_CONFIG | {'intersphinx_mapping': new_project.record}
app2 = SphinxTestApp('dummy', srcdir=tmp_path, confoverrides=confoverrides2)
app2.build()
app2.cleanup()
+ shutil.rmtree(app2.doctreedir / '__intersphinx_cache__', ignore_errors=True)
entry = new_project.make_entry()
item = dict((new_project.normalise(entry),))
inventories = InventoryAdapter(app2.env)
# check that the URLs were changed accordingly
assert list(inventories.cache) == ['http://localhost:9341/new']
- e_name, e_time, e_inv = inventories.cache['http://localhost:9341/new']
+ e_name, _e_time, e_inv = inventories.cache['http://localhost:9341/new']
assert e_name == 'spam'
assert e_inv == {'py:module': item}
assert inventories.named_inventory == {'spam': {'py:module': item}}
@@ -310,7 +313,7 @@ def test_load_mappings_cache_revert_update(tmp_path):
inventories = InventoryAdapter(app3.env)
# check that the URLs were changed accordingly
assert list(inventories.cache) == ['http://localhost:9341/old']
- e_name, e_time, e_inv = inventories.cache['http://localhost:9341/old']
+ e_name, _e_time, e_inv = inventories.cache['http://localhost:9341/old']
assert e_name == 'spam'
assert e_inv == {'py:module': item}
assert inventories.named_inventory == {'spam': {'py:module': item}}
diff --git a/tests/test_ext_napoleon/__init__.py b/tests/test_ext_napoleon/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/tests/test_extensions/ext_napoleon_pep526_data_google.py b/tests/test_ext_napoleon/pep526_data_google.py
similarity index 100%
rename from tests/test_extensions/ext_napoleon_pep526_data_google.py
rename to tests/test_ext_napoleon/pep526_data_google.py
diff --git a/tests/test_extensions/ext_napoleon_pep526_data_numpy.py b/tests/test_ext_napoleon/pep526_data_numpy.py
similarity index 100%
rename from tests/test_extensions/ext_napoleon_pep526_data_numpy.py
rename to tests/test_ext_napoleon/pep526_data_numpy.py
diff --git a/tests/test_extensions/test_ext_napoleon.py b/tests/test_ext_napoleon/test_ext_napoleon.py
similarity index 100%
rename from tests/test_extensions/test_ext_napoleon.py
rename to tests/test_ext_napoleon/test_ext_napoleon.py
diff --git a/tests/test_extensions/test_ext_napoleon_docstring.py b/tests/test_ext_napoleon/test_ext_napoleon_docstring.py
similarity index 99%
rename from tests/test_extensions/test_ext_napoleon_docstring.py
rename to tests/test_ext_napoleon/test_ext_napoleon_docstring.py
index a6e9820e5c9..62e103d6a14 100644
--- a/tests/test_extensions/test_ext_napoleon_docstring.py
+++ b/tests/test_ext_napoleon/test_ext_napoleon_docstring.py
@@ -25,8 +25,8 @@
)
from sphinx.testing.util import etree_parse
-from tests.test_extensions.ext_napoleon_pep526_data_google import PEP526GoogleClass
-from tests.test_extensions.ext_napoleon_pep526_data_numpy import PEP526NumpyClass
+from tests.test_ext_napoleon.pep526_data_google import PEP526GoogleClass
+from tests.test_ext_napoleon.pep526_data_numpy import PEP526NumpyClass
if TYPE_CHECKING:
from sphinx.testing.util import SphinxTestApp
diff --git a/tests/test_extensions/autodoc_util.py b/tests/test_extensions/autodoc_util.py
deleted file mode 100644
index 3d08c739300..00000000000
--- a/tests/test_extensions/autodoc_util.py
+++ /dev/null
@@ -1,34 +0,0 @@
-from __future__ import annotations
-
-from typing import TYPE_CHECKING
-from unittest.mock import Mock
-
-# NEVER import those objects from sphinx.ext.autodoc directly
-from sphinx.ext.autodoc.directive import DocumenterBridge, process_documenter_options
-from sphinx.util.docutils import LoggingReporter
-
-if TYPE_CHECKING:
- from typing import Any
-
- from docutils.statemachine import StringList
-
- from sphinx.application import Sphinx
-
-
-def do_autodoc(
- app: Sphinx,
- objtype: str,
- name: str,
- options: dict[str, Any] | None = None,
-) -> StringList:
- options = {} if options is None else options.copy()
- if not app.env.current_document.docname:
- app.env.current_document.docname = 'index' # set dummy docname
- doccls = app.registry.documenters[objtype]
- docoptions = process_documenter_options(doccls, app.config, options)
- state = Mock()
- state.document.settings.tab_width = 8
- bridge = DocumenterBridge(app.env, LoggingReporter(''), docoptions, 1, state)
- documenter = doccls(bridge, name)
- documenter.generate()
- return bridge.result
diff --git a/tests/test_extensions/test_ext_apidoc.py b/tests/test_extensions/test_ext_apidoc.py
index a9742337013..0052a4740bb 100644
--- a/tests/test_extensions/test_ext_apidoc.py
+++ b/tests/test_extensions/test_ext_apidoc.py
@@ -752,7 +752,7 @@ def test_no_duplicates(rootdir, tmp_path):
sphinx.ext.apidoc._generate.PY_SUFFIXES = original_suffixes
-def test_remove_old_files(tmp_path: Path):
+def test_remove_old_files(tmp_path: Path) -> None:
"""Test that old files are removed when using the -r option.
Also ensure that pre-existing files are not re-written, if unchanged.
diff --git a/tests/test_extensions/test_ext_autodoc_autoattribute.py b/tests/test_extensions/test_ext_autodoc_autoattribute.py
deleted file mode 100644
index 878f5b6e31b..00000000000
--- a/tests/test_extensions/test_ext_autodoc_autoattribute.py
+++ /dev/null
@@ -1,183 +0,0 @@
-"""Test the autodoc extension.
-
-This tests mainly the Documenters; the auto directives are tested in a test
-source file translated by test_build.
-"""
-
-from __future__ import annotations
-
-from typing import TYPE_CHECKING
-
-import pytest
-
-from tests.test_extensions.autodoc_util import do_autodoc
-
-if TYPE_CHECKING:
- from sphinx.testing.util import SphinxTestApp
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autoattribute(app: SphinxTestApp) -> None:
- actual = do_autodoc(app, 'attribute', 'target.Class.attr')
- assert list(actual) == [
- '',
- '.. py:attribute:: Class.attr',
- ' :module: target',
- " :value: 'bar'",
- '',
- ' should be documented -- süß',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autoattribute_novalue(app: SphinxTestApp) -> None:
- options = {'no-value': None}
- actual = do_autodoc(app, 'attribute', 'target.Class.attr', options)
- assert list(actual) == [
- '',
- '.. py:attribute:: Class.attr',
- ' :module: target',
- '',
- ' should be documented -- süß',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autoattribute_typed_variable(app: SphinxTestApp) -> None:
- actual = do_autodoc(app, 'attribute', 'target.typed_vars.Class.attr2')
- assert list(actual) == [
- '',
- '.. py:attribute:: Class.attr2',
- ' :module: target.typed_vars',
- ' :type: int',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autoattribute_typed_variable_in_alias(app: SphinxTestApp) -> None:
- actual = do_autodoc(app, 'attribute', 'target.typed_vars.Alias.attr2')
- assert list(actual) == [
- '',
- '.. py:attribute:: Alias.attr2',
- ' :module: target.typed_vars',
- ' :type: int',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autoattribute_instance_variable(app: SphinxTestApp) -> None:
- actual = do_autodoc(app, 'attribute', 'target.typed_vars.Class.attr4')
- assert list(actual) == [
- '',
- '.. py:attribute:: Class.attr4',
- ' :module: target.typed_vars',
- ' :type: int',
- '',
- ' attr4',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autoattribute_instance_variable_in_alias(app: SphinxTestApp) -> None:
- actual = do_autodoc(app, 'attribute', 'target.typed_vars.Alias.attr4')
- assert list(actual) == [
- '',
- '.. py:attribute:: Alias.attr4',
- ' :module: target.typed_vars',
- ' :type: int',
- '',
- ' attr4',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autoattribute_instance_variable_without_comment(app: SphinxTestApp) -> None:
- actual = do_autodoc(app, 'attribute', 'target.instance_variable.Bar.attr4')
- assert list(actual) == [
- '',
- '.. py:attribute:: Bar.attr4',
- ' :module: target.instance_variable',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autoattribute_slots_variable_list(app: SphinxTestApp) -> None:
- actual = do_autodoc(app, 'attribute', 'target.slots.Foo.attr')
- assert list(actual) == [
- '',
- '.. py:attribute:: Foo.attr',
- ' :module: target.slots',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autoattribute_slots_variable_dict(app: SphinxTestApp) -> None:
- actual = do_autodoc(app, 'attribute', 'target.slots.Bar.attr1')
- assert list(actual) == [
- '',
- '.. py:attribute:: Bar.attr1',
- ' :module: target.slots',
- ' :type: int',
- '',
- ' docstring of attr1',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autoattribute_slots_variable_str(app: SphinxTestApp) -> None:
- actual = do_autodoc(app, 'attribute', 'target.slots.Baz.attr')
- assert list(actual) == [
- '',
- '.. py:attribute:: Baz.attr',
- ' :module: target.slots',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autoattribute_GenericAlias(app: SphinxTestApp) -> None:
- actual = do_autodoc(app, 'attribute', 'target.genericalias.Class.T')
- assert list(actual) == [
- '',
- '.. py:attribute:: Class.T',
- ' :module: target.genericalias',
- '',
- ' A list of int',
- '',
- ' alias of :py:class:`~typing.List`\\ [:py:class:`int`]',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autoattribute_hide_value(app: SphinxTestApp) -> None:
- actual = do_autodoc(app, 'attribute', 'target.hide_value.Foo.SENTINEL1')
- assert list(actual) == [
- '',
- '.. py:attribute:: Foo.SENTINEL1',
- ' :module: target.hide_value',
- '',
- ' docstring',
- '',
- ' :meta hide-value:',
- '',
- ]
-
- actual = do_autodoc(app, 'attribute', 'target.hide_value.Foo.SENTINEL2')
- assert list(actual) == [
- '',
- '.. py:attribute:: Foo.SENTINEL2',
- ' :module: target.hide_value',
- '',
- ' :meta hide-value:',
- '',
- ]
diff --git a/tests/test_extensions/test_ext_autodoc_autodata.py b/tests/test_extensions/test_ext_autodoc_autodata.py
deleted file mode 100644
index 1474d70dc56..00000000000
--- a/tests/test_extensions/test_ext_autodoc_autodata.py
+++ /dev/null
@@ -1,115 +0,0 @@
-"""Test the autodoc extension.
-
-This tests mainly the Documenters; the auto directives are tested in a test
-source file translated by test_build.
-"""
-
-from __future__ import annotations
-
-from typing import TYPE_CHECKING
-
-import pytest
-
-from sphinx.testing.util import SphinxTestApp
-
-from tests.test_extensions.autodoc_util import do_autodoc
-
-if TYPE_CHECKING:
- from sphinx.testing.util import SphinxTestApp
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodata(app: SphinxTestApp) -> None:
- actual = do_autodoc(app, 'data', 'target.integer')
- assert list(actual) == [
- '',
- '.. py:data:: integer',
- ' :module: target',
- ' :value: 1',
- '',
- ' documentation for the integer',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodata_novalue(app: SphinxTestApp) -> None:
- options = {'no-value': None}
- actual = do_autodoc(app, 'data', 'target.integer', options)
- assert list(actual) == [
- '',
- '.. py:data:: integer',
- ' :module: target',
- '',
- ' documentation for the integer',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodata_typed_variable(app: SphinxTestApp) -> None:
- actual = do_autodoc(app, 'data', 'target.typed_vars.attr2')
- assert list(actual) == [
- '',
- '.. py:data:: attr2',
- ' :module: target.typed_vars',
- ' :type: str',
- '',
- ' attr2',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodata_type_comment(app: SphinxTestApp) -> None:
- actual = do_autodoc(app, 'data', 'target.typed_vars.attr3')
- assert list(actual) == [
- '',
- '.. py:data:: attr3',
- ' :module: target.typed_vars',
- ' :type: str',
- " :value: ''",
- '',
- ' attr3',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodata_GenericAlias(app: SphinxTestApp) -> None:
- actual = do_autodoc(app, 'data', 'target.genericalias.T')
- assert list(actual) == [
- '',
- '.. py:data:: T',
- ' :module: target.genericalias',
- '',
- ' A list of int',
- '',
- ' alias of :py:class:`~typing.List`\\ [:py:class:`int`]',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodata_hide_value(app: SphinxTestApp) -> None:
- actual = do_autodoc(app, 'data', 'target.hide_value.SENTINEL1')
- assert list(actual) == [
- '',
- '.. py:data:: SENTINEL1',
- ' :module: target.hide_value',
- '',
- ' docstring',
- '',
- ' :meta hide-value:',
- '',
- ]
-
- actual = do_autodoc(app, 'data', 'target.hide_value.SENTINEL2')
- assert list(actual) == [
- '',
- '.. py:data:: SENTINEL2',
- ' :module: target.hide_value',
- '',
- ' :meta hide-value:',
- '',
- ]
diff --git a/tests/test_extensions/test_ext_autodoc_autofunction.py b/tests/test_extensions/test_ext_autodoc_autofunction.py
deleted file mode 100644
index 946d051d570..00000000000
--- a/tests/test_extensions/test_ext_autodoc_autofunction.py
+++ /dev/null
@@ -1,221 +0,0 @@
-"""Test the autodoc extension.
-
-This tests mainly the Documenters; the auto directives are tested in a test
-source file translated by test_build.
-"""
-
-from __future__ import annotations
-
-from typing import TYPE_CHECKING
-
-if TYPE_CHECKING:
- from typing import Any
-
- from sphinx.testing.util import SphinxTestApp
-
-import pytest
-
-from tests.test_extensions.autodoc_util import do_autodoc
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_classes(app: SphinxTestApp) -> None:
- actual = do_autodoc(app, 'function', 'target.classes.Foo')
- assert list(actual) == [
- '',
- '.. py:function:: Foo()',
- ' :module: target.classes',
- '',
- ]
-
- actual = do_autodoc(app, 'function', 'target.classes.Bar')
- assert list(actual) == [
- '',
- '.. py:function:: Bar(x, y)',
- ' :module: target.classes',
- '',
- ]
-
- actual = do_autodoc(app, 'function', 'target.classes.Baz')
- assert list(actual) == [
- '',
- '.. py:function:: Baz(x, y)',
- ' :module: target.classes',
- '',
- ]
-
- actual = do_autodoc(app, 'function', 'target.classes.Qux')
- assert list(actual) == [
- '',
- '.. py:function:: Qux(foo, bar)',
- ' :module: target.classes',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_callable(app: SphinxTestApp) -> None:
- actual = do_autodoc(app, 'function', 'target.callable.function')
- assert list(actual) == [
- '',
- '.. py:function:: function(arg1, arg2, **kwargs)',
- ' :module: target.callable',
- '',
- ' A callable object that behaves like a function.',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_method(app: SphinxTestApp) -> None:
- actual = do_autodoc(app, 'function', 'target.callable.method')
- assert list(actual) == [
- '',
- '.. py:function:: method(arg1, arg2)',
- ' :module: target.callable',
- '',
- ' docstring of Callable.method().',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_builtin_function(app: SphinxTestApp) -> None:
- actual = do_autodoc(app, 'function', 'os.umask')
- assert list(actual) == [
- '',
- '.. py:function:: umask(mask, /)',
- ' :module: os',
- '',
- ' Set the current numeric umask and return the previous umask.',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_methoddescriptor(app: SphinxTestApp) -> None:
- actual = do_autodoc(app, 'function', 'builtins.int.__add__')
- assert list(actual) == [
- '',
- '.. py:function:: __add__(self, value, /)',
- ' :module: builtins.int',
- '',
- ' Return self+value.',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_decorated(app: SphinxTestApp) -> None:
- actual = do_autodoc(app, 'function', 'target.decorator.foo')
- assert list(actual) == [
- '',
- '.. py:function:: foo(name=None, age=None)',
- ' :module: target.decorator',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_singledispatch(app: SphinxTestApp) -> None:
- options: dict[str, Any] = {}
- actual = do_autodoc(app, 'function', 'target.singledispatch.func', options)
- assert list(actual) == [
- '',
- '.. py:function:: func(arg, kwarg=None)',
- ' func(arg: float, kwarg=None)',
- ' func(arg: int, kwarg=None)',
- ' func(arg: str, kwarg=None)',
- ' func(arg: dict, kwarg=None)',
- ' :module: target.singledispatch',
- '',
- ' A function for general use.',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_cfunction(app: SphinxTestApp) -> None:
- actual = do_autodoc(app, 'function', 'time.asctime')
- assert list(actual) == [
- '',
- '.. py:function:: asctime([tuple]) -> string',
- ' :module: time',
- '',
- " Convert a time tuple to a string, e.g. 'Sat Jun 06 16:26:11 1998'.",
- ' When the time tuple is not present, current time as returned by localtime()',
- ' is used.',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_wrapped_function(app: SphinxTestApp) -> None:
- actual = do_autodoc(app, 'function', 'target.wrappedfunction.slow_function')
- assert list(actual) == [
- '',
- '.. py:function:: slow_function(message, timeout)',
- ' :module: target.wrappedfunction',
- '',
- ' This function is slow.',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_wrapped_function_contextmanager(app: SphinxTestApp) -> None:
- actual = do_autodoc(app, 'function', 'target.wrappedfunction.feeling_good')
- assert list(actual) == [
- '',
- '.. py:function:: feeling_good(x: int, y: int) -> ~typing.Generator',
- ' :module: target.wrappedfunction',
- '',
- " You'll feel better in this context!",
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_coroutine(app: SphinxTestApp) -> None:
- actual = do_autodoc(app, 'function', 'target.functions.coroutinefunc')
- assert list(actual) == [
- '',
- '.. py:function:: coroutinefunc()',
- ' :module: target.functions',
- ' :async:',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_synchronized_coroutine(app: SphinxTestApp) -> None:
- actual = do_autodoc(app, 'function', 'target.coroutine.sync_func')
- assert list(actual) == [
- '',
- '.. py:function:: sync_func()',
- ' :module: target.coroutine',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_async_generator(app: SphinxTestApp) -> None:
- actual = do_autodoc(app, 'function', 'target.functions.asyncgenerator')
- assert list(actual) == [
- '',
- '.. py:function:: asyncgenerator()',
- ' :module: target.functions',
- ' :async:',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_slice_function_arg(app: SphinxTestApp) -> None:
- actual = do_autodoc(app, 'function', 'target.functions.slice_arg_func')
- assert list(actual) == [
- '',
- '.. py:function:: slice_arg_func(arg: float64[:, :])',
- ' :module: target.functions',
- '',
- ]
diff --git a/tests/test_extensions/test_ext_autodoc_autoproperty.py b/tests/test_extensions/test_ext_autodoc_autoproperty.py
deleted file mode 100644
index 6698d4c8fc2..00000000000
--- a/tests/test_extensions/test_ext_autodoc_autoproperty.py
+++ /dev/null
@@ -1,104 +0,0 @@
-"""Test the autodoc extension.
-
-This tests mainly the Documenters; the auto directives are tested in a test
-source file translated by test_build.
-"""
-
-from __future__ import annotations
-
-from typing import TYPE_CHECKING
-
-import pytest
-
-from tests.test_extensions.autodoc_util import do_autodoc
-
-if TYPE_CHECKING:
- from sphinx.testing.util import SphinxTestApp
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_properties(app: SphinxTestApp) -> None:
- actual = do_autodoc(app, 'property', 'target.properties.Foo.prop1')
- assert list(actual) == [
- '',
- '.. py:property:: Foo.prop1',
- ' :module: target.properties',
- ' :type: int',
- '',
- ' docstring',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_class_properties(app: SphinxTestApp) -> None:
- actual = do_autodoc(app, 'property', 'target.properties.Foo.prop2')
- assert list(actual) == [
- '',
- '.. py:property:: Foo.prop2',
- ' :module: target.properties',
- ' :classmethod:',
- ' :type: int',
- '',
- ' docstring',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_properties_with_type_comment(app: SphinxTestApp) -> None:
- actual = do_autodoc(
- app, 'property', 'target.properties.Foo.prop1_with_type_comment'
- )
- assert list(actual) == [
- '',
- '.. py:property:: Foo.prop1_with_type_comment',
- ' :module: target.properties',
- ' :type: int',
- '',
- ' docstring',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_class_properties_with_type_comment(app: SphinxTestApp) -> None:
- actual = do_autodoc(
- app, 'property', 'target.properties.Foo.prop2_with_type_comment'
- )
- assert list(actual) == [
- '',
- '.. py:property:: Foo.prop2_with_type_comment',
- ' :module: target.properties',
- ' :classmethod:',
- ' :type: int',
- '',
- ' docstring',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_cached_properties(app: SphinxTestApp) -> None:
- actual = do_autodoc(app, 'property', 'target.cached_property.Foo.prop')
- assert list(actual) == [
- '',
- '.. py:property:: Foo.prop',
- ' :module: target.cached_property',
- ' :type: int',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_cached_properties_with_type_comment(app: SphinxTestApp) -> None:
- actual = do_autodoc(
- app, 'property', 'target.cached_property.Foo.prop_with_type_comment'
- )
- assert list(actual) == [
- '',
- '.. py:property:: Foo.prop_with_type_comment',
- ' :module: target.cached_property',
- ' :type: int',
- '',
- ]
diff --git a/tests/test_extensions/test_ext_autodoc_configs.py b/tests/test_extensions/test_ext_autodoc_configs.py
deleted file mode 100644
index ab7539190e0..00000000000
--- a/tests/test_extensions/test_ext_autodoc_configs.py
+++ /dev/null
@@ -1,1862 +0,0 @@
-"""Test the autodoc extension. This tests mainly for config variables"""
-
-from __future__ import annotations
-
-import platform
-import sys
-from contextlib import contextmanager
-from typing import TYPE_CHECKING
-
-import pytest
-
-from sphinx.testing import restructuredtext
-
-from tests.test_extensions.autodoc_util import do_autodoc
-
-if TYPE_CHECKING:
- from collections.abc import Iterator
- from pathlib import Path
-
- from sphinx.testing.util import SphinxTestApp
-
-IS_PYPY = platform.python_implementation() == 'PyPy'
-
-
-@contextmanager
-def overwrite_file(path: Path, content: str) -> Iterator[None]:
- current_content = path.read_bytes() if path.exists() else None
- try:
- path.write_text(content, encoding='utf-8')
- yield
- finally:
- if current_content is not None:
- path.write_bytes(current_content)
- else:
- path.unlink()
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autoclass_content_class(app: SphinxTestApp) -> None:
- app.config.autoclass_content = 'class'
- options = {'members': None}
- actual = do_autodoc(app, 'module', 'target.autoclass_content', options)
- assert list(actual) == [
- '',
- '.. py:module:: target.autoclass_content',
- '',
- '',
- '.. py:class:: A()',
- ' :module: target.autoclass_content',
- '',
- ' A class having no __init__, no __new__',
- '',
- '',
- '.. py:class:: B()',
- ' :module: target.autoclass_content',
- '',
- ' A class having __init__(no docstring), no __new__',
- '',
- '',
- '.. py:class:: C()',
- ' :module: target.autoclass_content',
- '',
- ' A class having __init__, no __new__',
- '',
- '',
- '.. py:class:: D()',
- ' :module: target.autoclass_content',
- '',
- ' A class having no __init__, __new__(no docstring)',
- '',
- '',
- '.. py:class:: E()',
- ' :module: target.autoclass_content',
- '',
- ' A class having no __init__, __new__',
- '',
- '',
- '.. py:class:: F()',
- ' :module: target.autoclass_content',
- '',
- ' A class having both __init__ and __new__',
- '',
- '',
- '.. py:class:: G()',
- ' :module: target.autoclass_content',
- '',
- ' A class inherits __init__ without docstring.',
- '',
- '',
- '.. py:class:: H()',
- ' :module: target.autoclass_content',
- '',
- ' A class inherits __new__ without docstring.',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autoclass_content_init(app: SphinxTestApp) -> None:
- app.config.autoclass_content = 'init'
- options = {'members': None}
- actual = do_autodoc(app, 'module', 'target.autoclass_content', options)
- assert list(actual) == [
- '',
- '.. py:module:: target.autoclass_content',
- '',
- '',
- '.. py:class:: A()',
- ' :module: target.autoclass_content',
- '',
- ' A class having no __init__, no __new__',
- '',
- '',
- '.. py:class:: B()',
- ' :module: target.autoclass_content',
- '',
- ' A class having __init__(no docstring), no __new__',
- '',
- '',
- '.. py:class:: C()',
- ' :module: target.autoclass_content',
- '',
- ' __init__ docstring',
- '',
- '',
- '.. py:class:: D()',
- ' :module: target.autoclass_content',
- '',
- ' A class having no __init__, __new__(no docstring)',
- '',
- '',
- '.. py:class:: E()',
- ' :module: target.autoclass_content',
- '',
- ' __new__ docstring',
- '',
- '',
- '.. py:class:: F()',
- ' :module: target.autoclass_content',
- '',
- ' __init__ docstring',
- '',
- '',
- '.. py:class:: G()',
- ' :module: target.autoclass_content',
- '',
- ' __init__ docstring',
- '',
- '',
- '.. py:class:: H()',
- ' :module: target.autoclass_content',
- '',
- ' __new__ docstring',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_class_signature_mixed(app: SphinxTestApp) -> None:
- app.config.autodoc_class_signature = 'mixed'
- options = {
- 'members': None,
- 'undoc-members': None,
- }
- actual = do_autodoc(app, 'class', 'target.classes.Bar', options)
- assert list(actual) == [
- '',
- '.. py:class:: Bar(x, y)',
- ' :module: target.classes',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_class_signature_separated_init(app: SphinxTestApp) -> None:
- app.config.autodoc_class_signature = 'separated'
- options = {
- 'members': None,
- 'undoc-members': None,
- }
- actual = do_autodoc(app, 'class', 'target.classes.Bar', options)
- assert list(actual) == [
- '',
- '.. py:class:: Bar',
- ' :module: target.classes',
- '',
- '',
- ' .. py:method:: Bar.__init__(x, y)',
- ' :module: target.classes',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_class_signature_separated_new(app: SphinxTestApp) -> None:
- app.config.autodoc_class_signature = 'separated'
- options = {
- 'members': None,
- 'undoc-members': None,
- }
- actual = do_autodoc(app, 'class', 'target.classes.Baz', options)
- assert list(actual) == [
- '',
- '.. py:class:: Baz',
- ' :module: target.classes',
- '',
- '',
- ' .. py:method:: Baz.__new__(cls, x, y)',
- ' :module: target.classes',
- ' :staticmethod:',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autoclass_content_both(app: SphinxTestApp) -> None:
- app.config.autoclass_content = 'both'
- options = {'members': None}
- actual = do_autodoc(app, 'module', 'target.autoclass_content', options)
- assert list(actual) == [
- '',
- '.. py:module:: target.autoclass_content',
- '',
- '',
- '.. py:class:: A()',
- ' :module: target.autoclass_content',
- '',
- ' A class having no __init__, no __new__',
- '',
- '',
- '.. py:class:: B()',
- ' :module: target.autoclass_content',
- '',
- ' A class having __init__(no docstring), no __new__',
- '',
- '',
- '.. py:class:: C()',
- ' :module: target.autoclass_content',
- '',
- ' A class having __init__, no __new__',
- '',
- ' __init__ docstring',
- '',
- '',
- '.. py:class:: D()',
- ' :module: target.autoclass_content',
- '',
- ' A class having no __init__, __new__(no docstring)',
- '',
- '',
- '.. py:class:: E()',
- ' :module: target.autoclass_content',
- '',
- ' A class having no __init__, __new__',
- '',
- ' __new__ docstring',
- '',
- '',
- '.. py:class:: F()',
- ' :module: target.autoclass_content',
- '',
- ' A class having both __init__ and __new__',
- '',
- ' __init__ docstring',
- '',
- '',
- '.. py:class:: G()',
- ' :module: target.autoclass_content',
- '',
- ' A class inherits __init__ without docstring.',
- '',
- ' __init__ docstring',
- '',
- '',
- '.. py:class:: H()',
- ' :module: target.autoclass_content',
- '',
- ' A class inherits __new__ without docstring.',
- '',
- ' __new__ docstring',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_inherit_docstrings(app: SphinxTestApp) -> None:
- assert app.config.autodoc_inherit_docstrings is True # default
- actual = do_autodoc(app, 'method', 'target.inheritance.Derived.inheritedmeth')
- assert list(actual) == [
- '',
- '.. py:method:: Derived.inheritedmeth()',
- ' :module: target.inheritance',
- '',
- ' Inherited function.',
- '',
- ]
-
- # disable autodoc_inherit_docstrings
- app.config.autodoc_inherit_docstrings = False
- actual = do_autodoc(app, 'method', 'target.inheritance.Derived.inheritedmeth')
- assert list(actual) == [
- '',
- '.. py:method:: Derived.inheritedmeth()',
- ' :module: target.inheritance',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_inherit_docstrings_for_inherited_members(app: SphinxTestApp) -> None:
- options = {
- 'members': None,
- 'inherited-members': None,
- }
-
- assert app.config.autodoc_inherit_docstrings is True # default
- actual = do_autodoc(app, 'class', 'target.inheritance.Derived', options)
- assert list(actual) == [
- '',
- '.. py:class:: Derived()',
- ' :module: target.inheritance',
- '',
- '',
- ' .. py:method:: Derived.another_inheritedmeth()',
- ' :module: target.inheritance',
- '',
- ' Another inherited function.',
- '',
- '',
- ' .. py:attribute:: Derived.inheritedattr',
- ' :module: target.inheritance',
- ' :value: None',
- '',
- ' docstring',
- '',
- '',
- ' .. py:method:: Derived.inheritedclassmeth()',
- ' :module: target.inheritance',
- ' :classmethod:',
- '',
- ' Inherited class method.',
- '',
- '',
- ' .. py:method:: Derived.inheritedmeth()',
- ' :module: target.inheritance',
- '',
- ' Inherited function.',
- '',
- '',
- ' .. py:method:: Derived.inheritedstaticmeth(cls)',
- ' :module: target.inheritance',
- ' :staticmethod:',
- '',
- ' Inherited static method.',
- '',
- ]
-
- # disable autodoc_inherit_docstrings
- app.config.autodoc_inherit_docstrings = False
- actual = do_autodoc(app, 'class', 'target.inheritance.Derived', options)
- assert list(actual) == [
- '',
- '.. py:class:: Derived()',
- ' :module: target.inheritance',
- '',
- '',
- ' .. py:method:: Derived.another_inheritedmeth()',
- ' :module: target.inheritance',
- '',
- ' Another inherited function.',
- '',
- '',
- ' .. py:method:: Derived.inheritedclassmeth()',
- ' :module: target.inheritance',
- ' :classmethod:',
- '',
- ' Inherited class method.',
- '',
- '',
- ' .. py:method:: Derived.inheritedstaticmeth(cls)',
- ' :module: target.inheritance',
- ' :staticmethod:',
- '',
- ' Inherited static method.',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_docstring_signature(app: SphinxTestApp) -> None:
- options = {'members': None, 'special-members': '__init__, __new__'}
- actual = do_autodoc(app, 'class', 'target.DocstringSig', options)
- assert list(actual) == [
- '',
- # FIXME: Ideally this would instead be: `DocstringSig(d, e=1)` but
- # currently `ClassDocumenter` does not apply the docstring signature
- # logic when extracting a signature from a __new__ or __init__ method.
- '.. py:class:: DocstringSig(*new_args, **new_kwargs)',
- ' :module: target',
- '',
- '',
- ' .. py:method:: DocstringSig.__init__(self, a, b=1) -> None',
- ' :module: target',
- '',
- ' First line of docstring',
- '',
- ' rest of docstring',
- '',
- '',
- ' .. py:method:: DocstringSig.__new__(cls, d, e=1) -> DocstringSig',
- ' :module: target',
- ' :staticmethod:',
- '',
- ' First line of docstring',
- '',
- ' rest of docstring',
- '',
- '',
- ' .. py:method:: DocstringSig.meth(FOO, BAR=1) -> BAZ',
- ' :module: target',
- '',
- ' First line of docstring',
- '',
- ' rest of docstring',
- '',
- '',
- ' .. py:method:: DocstringSig.meth2()',
- ' :module: target',
- '',
- ' First line, no signature',
- ' Second line followed by indentation::',
- '',
- ' indented line',
- '',
- '',
- ' .. py:property:: DocstringSig.prop1',
- ' :module: target',
- '',
- ' First line of docstring',
- '',
- '',
- ' .. py:property:: DocstringSig.prop2',
- ' :module: target',
- '',
- ' First line of docstring',
- ' Second line of docstring',
- '',
- ]
-
- # disable autodoc_docstring_signature
- app.config.autodoc_docstring_signature = False
- actual = do_autodoc(app, 'class', 'target.DocstringSig', options)
- assert list(actual) == [
- '',
- '.. py:class:: DocstringSig(*new_args, **new_kwargs)',
- ' :module: target',
- '',
- '',
- ' .. py:method:: DocstringSig.__init__(*init_args, **init_kwargs)',
- ' :module: target',
- '',
- ' __init__(self, a, b=1) -> None',
- ' First line of docstring',
- '',
- ' rest of docstring',
- '',
- '',
- ' .. py:method:: DocstringSig.__new__(cls, *new_args, **new_kwargs)',
- ' :module: target',
- ' :staticmethod:',
- '',
- ' __new__(cls, d, e=1) -> DocstringSig',
- ' First line of docstring',
- '',
- ' rest of docstring',
- '',
- '',
- ' .. py:method:: DocstringSig.meth()',
- ' :module: target',
- '',
- ' meth(FOO, BAR=1) -> BAZ',
- ' First line of docstring',
- '',
- ' rest of docstring',
- '',
- '',
- ' .. py:method:: DocstringSig.meth2()',
- ' :module: target',
- '',
- ' First line, no signature',
- ' Second line followed by indentation::',
- '',
- ' indented line',
- '',
- '',
- ' .. py:property:: DocstringSig.prop1',
- ' :module: target',
- '',
- ' DocstringSig.prop1(self)',
- ' First line of docstring',
- '',
- '',
- ' .. py:property:: DocstringSig.prop2',
- ' :module: target',
- '',
- ' First line of docstring',
- ' Second line of docstring',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autoclass_content_and_docstring_signature_class(app: SphinxTestApp) -> None:
- app.config.autoclass_content = 'class'
- options = {
- 'members': None,
- 'undoc-members': None,
- }
- actual = do_autodoc(app, 'module', 'target.docstring_signature', options)
- assert list(actual) == [
- '',
- '.. py:module:: target.docstring_signature',
- '',
- '',
- '.. py:class:: A(foo, bar)',
- ' :module: target.docstring_signature',
- '',
- '',
- '.. py:class:: B(foo, bar)',
- ' :module: target.docstring_signature',
- '',
- '',
- '.. py:class:: C(foo, bar)',
- ' :module: target.docstring_signature',
- '',
- '',
- '.. py:class:: D()',
- ' :module: target.docstring_signature',
- '',
- '',
- '.. py:class:: E()',
- ' :module: target.docstring_signature',
- '',
- '',
- '.. py:class:: F()',
- ' :module: target.docstring_signature',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autoclass_content_and_docstring_signature_init(app: SphinxTestApp) -> None:
- app.config.autoclass_content = 'init'
- options = {
- 'members': None,
- 'undoc-members': None,
- }
- actual = do_autodoc(app, 'module', 'target.docstring_signature', options)
- assert list(actual) == [
- '',
- '.. py:module:: target.docstring_signature',
- '',
- '',
- '.. py:class:: A(foo, bar)',
- ' :module: target.docstring_signature',
- '',
- '',
- '.. py:class:: B(foo, bar, baz)',
- ' :module: target.docstring_signature',
- '',
- '',
- '.. py:class:: C(foo, bar, baz)',
- ' :module: target.docstring_signature',
- '',
- '',
- '.. py:class:: D(foo, bar, baz)',
- ' :module: target.docstring_signature',
- '',
- '',
- '.. py:class:: E(foo: int, bar: int, baz: int)',
- ' E(foo: str, bar: str, baz: str)',
- ' E(foo: float, bar: float, baz: float)',
- ' :module: target.docstring_signature',
- '',
- '',
- '.. py:class:: F(foo: int, bar: int, baz: int)',
- ' F(foo: str, bar: str, baz: str)',
- ' F(foo: float, bar: float, baz: float)',
- ' :module: target.docstring_signature',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autoclass_content_and_docstring_signature_both(app: SphinxTestApp) -> None:
- app.config.autoclass_content = 'both'
- options = {
- 'members': None,
- 'undoc-members': None,
- }
- actual = do_autodoc(app, 'module', 'target.docstring_signature', options)
- assert list(actual) == [
- '',
- '.. py:module:: target.docstring_signature',
- '',
- '',
- '.. py:class:: A(foo, bar)',
- ' :module: target.docstring_signature',
- '',
- '',
- '.. py:class:: B(foo, bar)',
- ' :module: target.docstring_signature',
- '',
- ' B(foo, bar, baz)',
- '',
- '',
- '.. py:class:: C(foo, bar)',
- ' :module: target.docstring_signature',
- '',
- ' C(foo, bar, baz)',
- '',
- '',
- '.. py:class:: D(foo, bar, baz)',
- ' :module: target.docstring_signature',
- '',
- '',
- '.. py:class:: E(foo: int, bar: int, baz: int)',
- ' E(foo: str, bar: str, baz: str)',
- ' E(foo: float, bar: float, baz: float)',
- ' :module: target.docstring_signature',
- '',
- '',
- '.. py:class:: F(foo: int, bar: int, baz: int)',
- ' F(foo: str, bar: str, baz: str)',
- ' F(foo: float, bar: float, baz: float)',
- ' :module: target.docstring_signature',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-@pytest.mark.usefixtures('rollback_sysmodules')
-def test_mocked_module_imports(app: SphinxTestApp) -> None:
- sys.modules.pop('target', None) # unload target module to clear the module cache
-
- # no autodoc_mock_imports
- options = {'members': 'TestAutodoc,decorated_function,func,Alias'}
- actual = do_autodoc(app, 'module', 'target.need_mocks', options)
- assert list(actual) == []
- assert "autodoc: failed to import module 'need_mocks'" in app.warning.getvalue()
-
- # with autodoc_mock_imports
- app.config.autodoc_mock_imports = [
- 'missing_module',
- 'missing_package1',
- 'missing_package2',
- 'missing_package3',
- 'sphinx.missing_module4',
- ]
-
- app.warning.truncate(0)
- actual = do_autodoc(app, 'module', 'target.need_mocks', options)
- assert list(actual) == [
- '',
- '.. py:module:: target.need_mocks',
- '',
- '',
- '.. py:data:: Alias',
- ' :module: target.need_mocks',
- '',
- ' docstring',
- '',
- '',
- '.. py:class:: TestAutodoc()',
- ' :module: target.need_mocks',
- '',
- ' TestAutodoc docstring.',
- '',
- '',
- ' .. py:attribute:: TestAutodoc.Alias',
- ' :module: target.need_mocks',
- '',
- ' docstring',
- '',
- '',
- ' .. py:method:: TestAutodoc.decorated_method()',
- ' :module: target.need_mocks',
- '',
- ' TestAutodoc::decorated_method docstring',
- '',
- '',
- '.. py:function:: decorated_function()',
- ' :module: target.need_mocks',
- '',
- ' decorated_function docstring',
- '',
- '',
- '.. py:function:: func(arg: missing_module.Class)',
- ' :module: target.need_mocks',
- '',
- ' a function takes mocked object as an argument',
- '',
- ]
- assert app.warning.getvalue() == ''
-
-
-@pytest.mark.sphinx(
- 'html',
- testroot='ext-autodoc',
- confoverrides={'autodoc_typehints': 'signature'},
-)
-def test_autodoc_typehints_signature(app: SphinxTestApp) -> None:
- options = {
- 'members': None,
- 'undoc-members': None,
- }
- actual = do_autodoc(app, 'module', 'target.typehints', options)
- assert list(actual) == [
- '',
- '.. py:module:: target.typehints',
- '',
- '',
- '.. py:data:: CONST1',
- ' :module: target.typehints',
- ' :type: int',
- '',
- '',
- '.. py:data:: CONST2',
- ' :module: target.typehints',
- ' :type: int',
- ' :value: 1',
- '',
- ' docstring',
- '',
- '',
- '.. py:data:: CONST3',
- ' :module: target.typehints',
- ' :type: ~pathlib.PurePosixPath',
- " :value: PurePosixPath('/a/b/c')",
- '',
- ' docstring',
- '',
- '',
- '.. py:class:: Math(s: str, o: ~typing.Any = None)',
- ' :module: target.typehints',
- '',
- '',
- ' .. py:attribute:: Math.CONST1',
- ' :module: target.typehints',
- ' :type: int',
- '',
- '',
- ' .. py:attribute:: Math.CONST2',
- ' :module: target.typehints',
- ' :type: int',
- ' :value: 1',
- '',
- '',
- ' .. py:attribute:: Math.CONST3',
- ' :module: target.typehints',
- ' :type: ~pathlib.PurePosixPath',
- " :value: PurePosixPath('/a/b/c')",
- '',
- '',
- ' .. py:method:: Math.decr(a: int, b: int = 1) -> int',
- ' :module: target.typehints',
- '',
- '',
- ' .. py:method:: Math.horse(a: str, b: int) -> None',
- ' :module: target.typehints',
- '',
- '',
- ' .. py:method:: Math.incr(a: int, b: int = 1) -> int',
- ' :module: target.typehints',
- '',
- '',
- ' .. py:method:: Math.nothing() -> None',
- ' :module: target.typehints',
- '',
- '',
- ' .. py:property:: Math.path',
- ' :module: target.typehints',
- ' :type: ~pathlib.PurePosixPath',
- '',
- '',
- ' .. py:property:: Math.prop',
- ' :module: target.typehints',
- ' :type: int',
- '',
- '',
- '.. py:class:: NewAnnotation(i: int)',
- ' :module: target.typehints',
- '',
- '',
- '.. py:class:: NewComment(i: int)',
- ' :module: target.typehints',
- '',
- '',
- '.. py:class:: SignatureFromMetaclass(a: int)',
- ' :module: target.typehints',
- '',
- '',
- '.. py:class:: T',
- ' :module: target.typehints',
- '',
- ' docstring',
- '',
- " alias of TypeVar('T', bound=\\ :py:class:`~pathlib.PurePosixPath`)",
- '',
- '',
- '.. py:function:: complex_func(arg1: str, arg2: List[int], arg3: Tuple[int, '
- 'Union[str, Unknown]] = None, *args: str, **kwargs: str) -> None',
- ' :module: target.typehints',
- '',
- '',
- '.. py:function:: decr(a: int, b: int = 1) -> int',
- ' :module: target.typehints',
- '',
- '',
- '.. py:function:: incr(a: int, b: int = 1) -> int',
- ' :module: target.typehints',
- '',
- '',
- '.. py:function:: missing_attr(c, a: str, b: Optional[str] = None) -> str',
- ' :module: target.typehints',
- '',
- '',
- '.. py:function:: tuple_args(x: tuple[int, int | str]) -> tuple[int, int]',
- ' :module: target.typehints',
- '',
- ]
-
-
-@pytest.mark.sphinx(
- 'html',
- testroot='ext-autodoc',
- confoverrides={'autodoc_typehints': 'none'},
-)
-def test_autodoc_typehints_none(app: SphinxTestApp) -> None:
- options = {
- 'members': None,
- 'undoc-members': None,
- }
- actual = do_autodoc(app, 'module', 'target.typehints', options)
- assert list(actual) == [
- '',
- '.. py:module:: target.typehints',
- '',
- '',
- '.. py:data:: CONST1',
- ' :module: target.typehints',
- '',
- '',
- '.. py:data:: CONST2',
- ' :module: target.typehints',
- ' :value: 1',
- '',
- ' docstring',
- '',
- '',
- '.. py:data:: CONST3',
- ' :module: target.typehints',
- " :value: PurePosixPath('/a/b/c')",
- '',
- ' docstring',
- '',
- '',
- '.. py:class:: Math(s, o=None)',
- ' :module: target.typehints',
- '',
- '',
- ' .. py:attribute:: Math.CONST1',
- ' :module: target.typehints',
- '',
- '',
- ' .. py:attribute:: Math.CONST2',
- ' :module: target.typehints',
- ' :value: 1',
- '',
- '',
- ' .. py:attribute:: Math.CONST3',
- ' :module: target.typehints',
- " :value: PurePosixPath('/a/b/c')",
- '',
- '',
- ' .. py:method:: Math.decr(a, b=1)',
- ' :module: target.typehints',
- '',
- '',
- ' .. py:method:: Math.horse(a, b)',
- ' :module: target.typehints',
- '',
- '',
- ' .. py:method:: Math.incr(a, b=1)',
- ' :module: target.typehints',
- '',
- '',
- ' .. py:method:: Math.nothing()',
- ' :module: target.typehints',
- '',
- '',
- ' .. py:property:: Math.path',
- ' :module: target.typehints',
- '',
- '',
- ' .. py:property:: Math.prop',
- ' :module: target.typehints',
- '',
- '',
- '.. py:class:: NewAnnotation(i)',
- ' :module: target.typehints',
- '',
- '',
- '.. py:class:: NewComment(i)',
- ' :module: target.typehints',
- '',
- '',
- '.. py:class:: SignatureFromMetaclass(a)',
- ' :module: target.typehints',
- '',
- '',
- '.. py:class:: T',
- ' :module: target.typehints',
- '',
- ' docstring',
- '',
- " alias of TypeVar('T', bound=\\ :py:class:`~pathlib.PurePosixPath`)",
- '',
- '',
- '.. py:function:: complex_func(arg1, arg2, arg3=None, *args, **kwargs)',
- ' :module: target.typehints',
- '',
- '',
- '.. py:function:: decr(a, b=1)',
- ' :module: target.typehints',
- '',
- '',
- '.. py:function:: incr(a, b=1)',
- ' :module: target.typehints',
- '',
- '',
- '.. py:function:: missing_attr(c, a, b=None)',
- ' :module: target.typehints',
- '',
- '',
- '.. py:function:: tuple_args(x)',
- ' :module: target.typehints',
- '',
- ]
-
-
-@pytest.mark.sphinx(
- 'html',
- testroot='ext-autodoc',
- confoverrides={'autodoc_typehints': 'none'},
-)
-def test_autodoc_typehints_none_for_overload(app: SphinxTestApp) -> None:
- options = {'members': None}
- actual = do_autodoc(app, 'module', 'target.overload', options)
- assert list(actual) == [
- '',
- '.. py:module:: target.overload',
- '',
- '',
- '.. py:class:: Bar(x, y)',
- ' :module: target.overload',
- '',
- ' docstring',
- '',
- '',
- '.. py:class:: Baz(x, y)',
- ' :module: target.overload',
- '',
- ' docstring',
- '',
- '',
- '.. py:class:: Foo(x, y)',
- ' :module: target.overload',
- '',
- ' docstring',
- '',
- '',
- '.. py:class:: Math()',
- ' :module: target.overload',
- '',
- ' docstring',
- '',
- '',
- ' .. py:method:: Math.sum(x, y=None)',
- ' :module: target.overload',
- '',
- ' docstring',
- '',
- '',
- '.. py:function:: sum(x, y=None)',
- ' :module: target.overload',
- '',
- ' docstring',
- '',
- ]
-
-
-@pytest.mark.sphinx(
- 'text',
- testroot='ext-autodoc',
- confoverrides={'autodoc_typehints': 'description'},
- freshenv=True,
-)
-def test_autodoc_typehints_description(app: SphinxTestApp) -> None:
- app.build()
- context = (app.outdir / 'index.txt').read_text(encoding='utf8')
- assert (
- 'target.typehints.incr(a, b=1)\n'
- '\n'
- ' Parameters:\n'
- ' * **a** (*int*)\n'
- '\n'
- ' * **b** (*int*)\n'
- '\n'
- ' Return type:\n'
- ' int\n'
- ) in context
- assert (
- 'target.typehints.tuple_args(x)\n'
- '\n'
- ' Parameters:\n'
- ' **x** (*tuple**[**int**, **int** | **str**]*)\n'
- '\n'
- ' Return type:\n'
- ' tuple[int, int]\n'
- ) in context
-
- # Overloads still get displayed in the signature
- assert (
- 'target.overload.sum(x: int, y: int = 0) -> int\n'
- 'target.overload.sum(x: float, y: float = 0.0) -> float\n'
- 'target.overload.sum(x: str, y: str = None) -> str\n'
- '\n'
- ' docstring\n'
- ) in context
-
-
-@pytest.mark.sphinx(
- 'text',
- testroot='ext-autodoc',
- confoverrides={
- 'autodoc_typehints': 'description',
- 'autodoc_typehints_description_target': 'documented',
- },
- copy_test_root=True,
-)
-def test_autodoc_typehints_description_no_undoc(app: SphinxTestApp) -> None:
- # No :type: or :rtype: will be injected for `incr`, which does not have
- # a description for its parameters or its return. `tuple_args` does
- # describe them, so :type: and :rtype: will be added.
- with overwrite_file(
- app.srcdir / 'index.rst',
- '.. autofunction:: target.typehints.incr\n'
- '\n'
- '.. autofunction:: target.typehints.decr\n'
- '\n'
- ' :returns: decremented number\n'
- '\n'
- '.. autofunction:: target.typehints.tuple_args\n'
- '\n'
- ' :param x: arg\n'
- ' :return: another tuple\n',
- ):
- app.build()
- # Restore the original content of the file
- context = (app.outdir / 'index.txt').read_text(encoding='utf8')
- assert (
- 'target.typehints.incr(a, b=1)\n'
- '\n'
- 'target.typehints.decr(a, b=1)\n'
- '\n'
- ' Returns:\n'
- ' decremented number\n'
- '\n'
- ' Return type:\n'
- ' int\n'
- '\n'
- 'target.typehints.tuple_args(x)\n'
- '\n'
- ' Parameters:\n'
- ' **x** (*tuple**[**int**, **int** | **str**]*) -- arg\n'
- '\n'
- ' Returns:\n'
- ' another tuple\n'
- '\n'
- ' Return type:\n'
- ' tuple[int, int]\n'
- ) in context
-
-
-@pytest.mark.sphinx(
- 'text',
- testroot='ext-autodoc',
- confoverrides={
- 'autodoc_typehints': 'description',
- 'autodoc_typehints_description_target': 'documented_params',
- },
- copy_test_root=True,
-)
-def test_autodoc_typehints_description_no_undoc_doc_rtype(app: SphinxTestApp) -> None:
- # No :type: will be injected for `incr`, which does not have a description
- # for its parameters or its return, just :rtype: will be injected due to
- # autodoc_typehints_description_target. `tuple_args` does describe both, so
- # :type: and :rtype: will be added. `nothing` has no parameters but a return
- # type of None, which will be added.
- with overwrite_file(
- app.srcdir / 'index.rst',
- '.. autofunction:: target.typehints.incr\n'
- '\n'
- '.. autofunction:: target.typehints.decr\n'
- '\n'
- ' :returns: decremented number\n'
- '\n'
- '.. autofunction:: target.typehints.tuple_args\n'
- '\n'
- ' :param x: arg\n'
- ' :return: another tuple\n'
- '\n'
- '.. autofunction:: target.typehints.Math.nothing\n'
- '\n'
- '.. autofunction:: target.typehints.Math.horse\n'
- '\n'
- ' :return: nothing\n',
- ):
- app.build()
- context = (app.outdir / 'index.txt').read_text(encoding='utf8')
- assert context == (
- 'target.typehints.incr(a, b=1)\n'
- '\n'
- ' Return type:\n'
- ' int\n'
- '\n'
- 'target.typehints.decr(a, b=1)\n'
- '\n'
- ' Returns:\n'
- ' decremented number\n'
- '\n'
- ' Return type:\n'
- ' int\n'
- '\n'
- 'target.typehints.tuple_args(x)\n'
- '\n'
- ' Parameters:\n'
- ' **x** (*tuple**[**int**, **int** | **str**]*) -- arg\n'
- '\n'
- ' Returns:\n'
- ' another tuple\n'
- '\n'
- ' Return type:\n'
- ' tuple[int, int]\n'
- '\n'
- 'target.typehints.Math.nothing(self)\n'
- '\n'
- 'target.typehints.Math.horse(self, a, b)\n'
- '\n'
- ' Returns:\n'
- ' nothing\n'
- '\n'
- ' Return type:\n'
- ' None\n'
- )
-
-
-@pytest.mark.sphinx(
- 'text',
- testroot='ext-autodoc',
- confoverrides={'autodoc_typehints': 'description'},
- copy_test_root=True,
-)
-def test_autodoc_typehints_description_with_documented_init(app: SphinxTestApp) -> None:
- with overwrite_file(
- app.srcdir / 'index.rst',
- '.. autoclass:: target.typehints._ClassWithDocumentedInit\n'
- ' :special-members: __init__\n',
- ):
- app.build()
- context = (app.outdir / 'index.txt').read_text(encoding='utf8')
- assert context == (
- 'class target.typehints._ClassWithDocumentedInit(x, *args, **kwargs)\n'
- '\n'
- ' Class docstring.\n'
- '\n'
- ' Parameters:\n'
- ' * **x** (*int*)\n'
- '\n'
- ' * **args** (*int*)\n'
- '\n'
- ' * **kwargs** (*int*)\n'
- '\n'
- ' __init__(x, *args, **kwargs)\n'
- '\n'
- ' Init docstring.\n'
- '\n'
- ' Parameters:\n'
- ' * **x** (*int*) -- Some integer\n'
- '\n'
- ' * **args** (*int*) -- Some integer\n'
- '\n'
- ' * **kwargs** (*int*) -- Some integer\n'
- '\n'
- ' Return type:\n'
- ' None\n'
- )
-
-
-@pytest.mark.sphinx(
- 'text',
- testroot='ext-autodoc',
- confoverrides={
- 'autodoc_typehints': 'description',
- 'autodoc_typehints_description_target': 'documented',
- },
- copy_test_root=True,
-)
-def test_autodoc_typehints_description_with_documented_init_no_undoc(
- app: SphinxTestApp,
-) -> None:
- with overwrite_file(
- app.srcdir / 'index.rst',
- '.. autoclass:: target.typehints._ClassWithDocumentedInit\n'
- ' :special-members: __init__\n',
- ):
- app.build()
- context = (app.outdir / 'index.txt').read_text(encoding='utf8')
- assert context == (
- 'class target.typehints._ClassWithDocumentedInit(x, *args, **kwargs)\n'
- '\n'
- ' Class docstring.\n'
- '\n'
- ' __init__(x, *args, **kwargs)\n'
- '\n'
- ' Init docstring.\n'
- '\n'
- ' Parameters:\n'
- ' * **x** (*int*) -- Some integer\n'
- '\n'
- ' * **args** (*int*) -- Some integer\n'
- '\n'
- ' * **kwargs** (*int*) -- Some integer\n'
- )
-
-
-@pytest.mark.sphinx(
- 'text',
- testroot='ext-autodoc',
- confoverrides={
- 'autodoc_typehints': 'description',
- 'autodoc_typehints_description_target': 'documented_params',
- },
- copy_test_root=True,
-)
-def test_autodoc_typehints_description_with_documented_init_no_undoc_doc_rtype(
- app: SphinxTestApp,
-) -> None:
- # see test_autodoc_typehints_description_with_documented_init_no_undoc
- # returnvalue_and_documented_params should not change class or method
- # docstring.
- with overwrite_file(
- app.srcdir / 'index.rst',
- '.. autoclass:: target.typehints._ClassWithDocumentedInit\n'
- ' :special-members: __init__\n',
- ):
- app.build()
- context = (app.outdir / 'index.txt').read_text(encoding='utf8')
- assert context == (
- 'class target.typehints._ClassWithDocumentedInit(x, *args, **kwargs)\n'
- '\n'
- ' Class docstring.\n'
- '\n'
- ' __init__(x, *args, **kwargs)\n'
- '\n'
- ' Init docstring.\n'
- '\n'
- ' Parameters:\n'
- ' * **x** (*int*) -- Some integer\n'
- '\n'
- ' * **args** (*int*) -- Some integer\n'
- '\n'
- ' * **kwargs** (*int*) -- Some integer\n'
- )
-
-
-@pytest.mark.sphinx(
- 'text',
- testroot='ext-autodoc',
- confoverrides={'autodoc_typehints': 'description'},
-)
-def test_autodoc_typehints_description_for_invalid_node(app: SphinxTestApp) -> None:
- text = '.. py:function:: hello; world'
- restructuredtext.parse(app, text) # raises no error
-
-
-@pytest.mark.sphinx(
- 'text',
- testroot='ext-autodoc',
- confoverrides={'autodoc_typehints': 'both'},
- copy_test_root=True,
-)
-def test_autodoc_typehints_both(app: SphinxTestApp) -> None:
- with overwrite_file(
- app.srcdir / 'index.rst',
- '.. autofunction:: target.typehints.incr\n'
- '\n'
- '.. autofunction:: target.typehints.tuple_args\n'
- '\n'
- '.. autofunction:: target.overload.sum\n',
- ):
- app.build()
- context = (app.outdir / 'index.txt').read_text(encoding='utf8')
- assert (
- 'target.typehints.incr(a: int, b: int = 1) -> int\n'
- '\n'
- ' Parameters:\n'
- ' * **a** (*int*)\n'
- '\n'
- ' * **b** (*int*)\n'
- '\n'
- ' Return type:\n'
- ' int\n'
- ) in context
- assert (
- 'target.typehints.tuple_args(x: tuple[int, int | str]) -> tuple[int, int]\n'
- '\n'
- ' Parameters:\n'
- ' **x** (*tuple**[**int**, **int** | **str**]*)\n'
- '\n'
- ' Return type:\n'
- ' tuple[int, int]\n'
- ) in context
-
- # Overloads still get displayed in the signature
- assert (
- 'target.overload.sum(x: int, y: int = 0) -> int\n'
- 'target.overload.sum(x: float, y: float = 0.0) -> float\n'
- 'target.overload.sum(x: str, y: str = None) -> str\n'
- '\n'
- ' docstring\n'
- ) in context
-
-
-@pytest.mark.sphinx('text', testroot='ext-autodoc')
-def test_autodoc_type_aliases(app: SphinxTestApp) -> None:
- # default
- options = {'members': None}
- actual = do_autodoc(app, 'module', 'target.autodoc_type_aliases', options)
- assert list(actual) == [
- '',
- '.. py:module:: target.autodoc_type_aliases',
- '',
- '',
- '.. py:class:: Foo()',
- ' :module: target.autodoc_type_aliases',
- '',
- ' docstring',
- '',
- '',
- ' .. py:attribute:: Foo.attr1',
- ' :module: target.autodoc_type_aliases',
- ' :type: int',
- '',
- ' docstring',
- '',
- '',
- ' .. py:attribute:: Foo.attr2',
- ' :module: target.autodoc_type_aliases',
- ' :type: int',
- '',
- ' docstring',
- '',
- '',
- '.. py:function:: mult(x: int, y: int) -> int',
- ' mult(x: float, y: float) -> float',
- ' :module: target.autodoc_type_aliases',
- '',
- ' docstring',
- '',
- '',
- '.. py:function:: read(r: ~io.BytesIO) -> ~io.StringIO',
- ' :module: target.autodoc_type_aliases',
- '',
- ' docstring',
- '',
- '',
- '.. py:function:: sum(x: int, y: int) -> int',
- ' :module: target.autodoc_type_aliases',
- '',
- ' docstring',
- '',
- '',
- '.. py:data:: variable',
- ' :module: target.autodoc_type_aliases',
- ' :type: int',
- '',
- ' docstring',
- '',
- '',
- '.. py:data:: variable2',
- ' :module: target.autodoc_type_aliases',
- ' :type: int',
- ' :value: None',
- '',
- ' docstring',
- '',
- '',
- '.. py:data:: variable3',
- ' :module: target.autodoc_type_aliases',
- ' :type: int | None',
- '',
- ' docstring',
- '',
- ]
-
- # define aliases
- app.config.autodoc_type_aliases = {
- 'myint': 'myint',
- 'io.StringIO': 'my.module.StringIO',
- }
- actual = do_autodoc(app, 'module', 'target.autodoc_type_aliases', options)
- assert list(actual) == [
- '',
- '.. py:module:: target.autodoc_type_aliases',
- '',
- '',
- '.. py:class:: Foo()',
- ' :module: target.autodoc_type_aliases',
- '',
- ' docstring',
- '',
- '',
- ' .. py:attribute:: Foo.attr1',
- ' :module: target.autodoc_type_aliases',
- ' :type: myint',
- '',
- ' docstring',
- '',
- '',
- ' .. py:attribute:: Foo.attr2',
- ' :module: target.autodoc_type_aliases',
- ' :type: myint',
- '',
- ' docstring',
- '',
- '',
- '.. py:function:: mult(x: myint, y: myint) -> myint',
- ' mult(x: float, y: float) -> float',
- ' :module: target.autodoc_type_aliases',
- '',
- ' docstring',
- '',
- '',
- '.. py:function:: read(r: ~io.BytesIO) -> my.module.StringIO',
- ' :module: target.autodoc_type_aliases',
- '',
- ' docstring',
- '',
- '',
- '.. py:function:: sum(x: myint, y: myint) -> myint',
- ' :module: target.autodoc_type_aliases',
- '',
- ' docstring',
- '',
- '',
- '.. py:data:: variable',
- ' :module: target.autodoc_type_aliases',
- ' :type: myint',
- '',
- ' docstring',
- '',
- '',
- '.. py:data:: variable2',
- ' :module: target.autodoc_type_aliases',
- ' :type: myint',
- ' :value: None',
- '',
- ' docstring',
- '',
- '',
- '.. py:data:: variable3',
- ' :module: target.autodoc_type_aliases',
- ' :type: myint | None',
- '',
- ' docstring',
- '',
- ]
-
-
-@pytest.mark.sphinx(
- 'text',
- testroot='ext-autodoc',
- srcdir='autodoc_typehints_description_and_type_aliases',
- confoverrides={
- 'autodoc_typehints': 'description',
- 'autodoc_type_aliases': {'myint': 'myint'},
- },
-)
-def test_autodoc_typehints_description_and_type_aliases(app: SphinxTestApp) -> None:
- with overwrite_file(
- app.srcdir / 'autodoc_type_aliases.rst',
- '.. autofunction:: target.autodoc_type_aliases.sum',
- ):
- app.build()
- context = (app.outdir / 'autodoc_type_aliases.txt').read_text(encoding='utf8')
- assert context == (
- 'target.autodoc_type_aliases.sum(x, y)\n'
- '\n'
- ' docstring\n'
- '\n'
- ' Parameters:\n'
- ' * **x** (*myint*)\n'
- '\n'
- ' * **y** (*myint*)\n'
- '\n'
- ' Return type:\n'
- ' myint\n'
- )
-
-
-@pytest.mark.sphinx(
- 'html',
- testroot='ext-autodoc',
- confoverrides={'autodoc_typehints_format': 'fully-qualified'},
-)
-def test_autodoc_typehints_format_fully_qualified(app: SphinxTestApp) -> None:
- options = {
- 'members': None,
- 'undoc-members': None,
- }
- actual = do_autodoc(app, 'module', 'target.typehints', options)
- assert list(actual) == [
- '',
- '.. py:module:: target.typehints',
- '',
- '',
- '.. py:data:: CONST1',
- ' :module: target.typehints',
- ' :type: int',
- '',
- '',
- '.. py:data:: CONST2',
- ' :module: target.typehints',
- ' :type: int',
- ' :value: 1',
- '',
- ' docstring',
- '',
- '',
- '.. py:data:: CONST3',
- ' :module: target.typehints',
- ' :type: pathlib.PurePosixPath',
- " :value: PurePosixPath('/a/b/c')",
- '',
- ' docstring',
- '',
- '',
- '.. py:class:: Math(s: str, o: typing.Any = None)',
- ' :module: target.typehints',
- '',
- '',
- ' .. py:attribute:: Math.CONST1',
- ' :module: target.typehints',
- ' :type: int',
- '',
- '',
- ' .. py:attribute:: Math.CONST2',
- ' :module: target.typehints',
- ' :type: int',
- ' :value: 1',
- '',
- '',
- ' .. py:attribute:: Math.CONST3',
- ' :module: target.typehints',
- ' :type: pathlib.PurePosixPath',
- " :value: PurePosixPath('/a/b/c')",
- '',
- '',
- ' .. py:method:: Math.decr(a: int, b: int = 1) -> int',
- ' :module: target.typehints',
- '',
- '',
- ' .. py:method:: Math.horse(a: str, b: int) -> None',
- ' :module: target.typehints',
- '',
- '',
- ' .. py:method:: Math.incr(a: int, b: int = 1) -> int',
- ' :module: target.typehints',
- '',
- '',
- ' .. py:method:: Math.nothing() -> None',
- ' :module: target.typehints',
- '',
- '',
- ' .. py:property:: Math.path',
- ' :module: target.typehints',
- ' :type: pathlib.PurePosixPath',
- '',
- '',
- ' .. py:property:: Math.prop',
- ' :module: target.typehints',
- ' :type: int',
- '',
- '',
- '.. py:class:: NewAnnotation(i: int)',
- ' :module: target.typehints',
- '',
- '',
- '.. py:class:: NewComment(i: int)',
- ' :module: target.typehints',
- '',
- '',
- '.. py:class:: SignatureFromMetaclass(a: int)',
- ' :module: target.typehints',
- '',
- '',
- '.. py:class:: T',
- ' :module: target.typehints',
- '',
- ' docstring',
- '',
- " alias of TypeVar('T', bound=\\ :py:class:`pathlib.PurePosixPath`)",
- '',
- '',
- '.. py:function:: complex_func(arg1: str, arg2: List[int], arg3: Tuple[int, '
- 'Union[str, Unknown]] = None, *args: str, **kwargs: str) -> None',
- ' :module: target.typehints',
- '',
- '',
- '.. py:function:: decr(a: int, b: int = 1) -> int',
- ' :module: target.typehints',
- '',
- '',
- '.. py:function:: incr(a: int, b: int = 1) -> int',
- ' :module: target.typehints',
- '',
- '',
- '.. py:function:: missing_attr(c, a: str, b: Optional[str] = None) -> str',
- ' :module: target.typehints',
- '',
- '',
- '.. py:function:: tuple_args(x: tuple[int, int | str]) -> tuple[int, int]',
- ' :module: target.typehints',
- '',
- ]
-
-
-@pytest.mark.sphinx(
- 'html',
- testroot='ext-autodoc',
- confoverrides={'autodoc_typehints_format': 'fully-qualified'},
-)
-def test_autodoc_typehints_format_fully_qualified_for_class_alias(
- app: SphinxTestApp,
-) -> None:
- actual = do_autodoc(app, 'class', 'target.classes.Alias')
- assert list(actual) == [
- '',
- '.. py:attribute:: Alias',
- ' :module: target.classes',
- '',
- ' alias of :py:class:`target.classes.Foo`',
- ]
-
-
-@pytest.mark.sphinx(
- 'html',
- testroot='ext-autodoc',
- confoverrides={'autodoc_typehints_format': 'fully-qualified'},
-)
-def test_autodoc_typehints_format_fully_qualified_for_generic_alias(
- app: SphinxTestApp,
-) -> None:
- actual = do_autodoc(app, 'data', 'target.genericalias.L')
- assert list(actual) == [
- '',
- '.. py:data:: L',
- ' :module: target.genericalias',
- '',
- ' A list of Class',
- '',
- ' alias of :py:class:`~typing.List`\\ [:py:class:`target.genericalias.Class`]',
- '',
- ]
-
-
-@pytest.mark.sphinx(
- 'html',
- testroot='ext-autodoc',
- confoverrides={'autodoc_typehints_format': 'fully-qualified'},
-)
-def test_autodoc_typehints_format_fully_qualified_for_newtype_alias(
- app: SphinxTestApp,
-) -> None:
- actual = do_autodoc(app, 'class', 'target.typevar.T6')
- assert list(actual) == [
- '',
- '.. py:class:: T6',
- ' :module: target.typevar',
- '',
- ' T6',
- '',
- ' alias of :py:class:`datetime.date`',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_default_options(app: SphinxTestApp) -> None:
- if (3, 11, 7) <= sys.version_info < (3, 12) or sys.version_info >= (3, 12, 1):
- list_of_weak_references = ' list of weak references to the object'
- else:
- list_of_weak_references = ' list of weak references to the object (if defined)' # fmt: skip
-
- # no settings
- actual = do_autodoc(app, 'class', 'target.enums.EnumCls')
- assert ' .. py:attribute:: EnumCls.val1' not in actual
- assert ' .. py:attribute:: EnumCls.val4' not in actual
- actual = do_autodoc(app, 'class', 'target.CustomIter')
- assert ' .. py:method:: target.CustomIter' not in actual
- actual = do_autodoc(app, 'module', 'target')
- assert '.. py:function:: function_to_be_imported(app)' not in actual
-
- # with :members:
- app.config.autodoc_default_options = {'members': None}
- actual = do_autodoc(app, 'class', 'target.enums.EnumCls')
- assert ' .. py:attribute:: EnumCls.val1' in actual
- assert ' .. py:attribute:: EnumCls.val4' not in actual
-
- # with :members: = True
- app.config.autodoc_default_options = {'members': None}
- actual = do_autodoc(app, 'class', 'target.enums.EnumCls')
- assert ' .. py:attribute:: EnumCls.val1' in actual
- assert ' .. py:attribute:: EnumCls.val4' not in actual
-
- # with :members: and :undoc-members:
- app.config.autodoc_default_options = {
- 'members': None,
- 'undoc-members': None,
- }
- actual = do_autodoc(app, 'class', 'target.enums.EnumCls')
- assert ' .. py:attribute:: EnumCls.val1' in actual
- assert ' .. py:attribute:: EnumCls.val4' in actual
-
- # with :special-members:
- # Note that :members: must be *on* for :special-members: to work.
- app.config.autodoc_default_options = {
- 'members': None,
- 'special-members': None,
- }
- actual = do_autodoc(app, 'class', 'target.CustomIter')
- assert ' .. py:method:: CustomIter.__init__()' in actual
- assert ' Create a new `CustomIter`.' in actual
- assert ' .. py:method:: CustomIter.__iter__()' in actual
- assert ' Iterate squares of each value.' in actual
- if not IS_PYPY:
- assert ' .. py:attribute:: CustomIter.__weakref__' in actual
- assert list_of_weak_references in actual
-
- # :exclude-members: None - has no effect. Unlike :members:,
- # :special-members:, etc. where None == "include all", here None means
- # "no/false/off".
- app.config.autodoc_default_options = {
- 'members': None,
- 'exclude-members': None,
- }
- actual = do_autodoc(app, 'class', 'target.enums.EnumCls')
- assert ' .. py:attribute:: EnumCls.val1' in actual
- assert ' .. py:attribute:: EnumCls.val4' not in actual
- app.config.autodoc_default_options = {
- 'members': None,
- 'special-members': None,
- 'exclude-members': None,
- }
- actual = do_autodoc(app, 'class', 'target.CustomIter')
- assert ' .. py:method:: CustomIter.__init__()' in actual
- assert ' Create a new `CustomIter`.' in actual
- assert ' .. py:method:: CustomIter.__iter__()' in actual
- assert ' Iterate squares of each value.' in actual
- if not IS_PYPY:
- assert ' .. py:attribute:: CustomIter.__weakref__' in actual
- assert list_of_weak_references in actual
- assert ' .. py:method:: CustomIter.snafucate()' in actual
- assert ' Makes this snafucated.' in actual
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_autodoc_default_options_with_values(app: SphinxTestApp) -> None:
- if (3, 11, 7) <= sys.version_info < (3, 12) or sys.version_info >= (3, 12, 1):
- list_of_weak_references = ' list of weak references to the object'
- else:
- list_of_weak_references = ' list of weak references to the object (if defined)' # fmt: skip
-
- # with :members:
- app.config.autodoc_default_options = {'members': 'val1,val2'}
- actual = do_autodoc(app, 'class', 'target.enums.EnumCls')
- assert ' .. py:attribute:: EnumCls.val1' in actual
- assert ' .. py:attribute:: EnumCls.val2' in actual
- assert ' .. py:attribute:: EnumCls.val3' not in actual
- assert ' .. py:attribute:: EnumCls.val4' not in actual
-
- # with :member-order:
- app.config.autodoc_default_options = {
- 'members': None,
- 'member-order': 'bysource',
- }
- actual = do_autodoc(app, 'class', 'target.Class')
- assert list(filter(lambda l: '::' in l, actual)) == [
- '.. py:class:: Class(arg)',
- ' .. py:method:: Class.meth()',
- ' .. py:method:: Class.skipmeth()',
- ' .. py:method:: Class.excludemeth()',
- ' .. py:attribute:: Class.attr',
- ' .. py:attribute:: Class.docattr',
- ' .. py:attribute:: Class.udocattr',
- ' .. py:attribute:: Class.mdocattr',
- ' .. py:method:: Class.moore(a, e, f) -> happiness',
- ' .. py:attribute:: Class.inst_attr_inline',
- ' .. py:attribute:: Class.inst_attr_comment',
- ' .. py:attribute:: Class.inst_attr_string',
- ]
-
- # with :special-members:
- app.config.autodoc_default_options = {
- 'special-members': '__init__,__iter__',
- }
- actual = do_autodoc(app, 'class', 'target.CustomIter')
- assert ' .. py:method:: CustomIter.__init__()' in actual
- assert ' Create a new `CustomIter`.' in actual
- assert ' .. py:method:: CustomIter.__iter__()' in actual
- assert ' Iterate squares of each value.' in actual
- if not IS_PYPY:
- assert ' .. py:attribute:: CustomIter.__weakref__' not in actual
- assert list_of_weak_references not in actual
-
- # with :exclude-members:
- app.config.autodoc_default_options = {
- 'members': None,
- 'exclude-members': 'val1',
- }
- actual = do_autodoc(app, 'class', 'target.enums.EnumCls')
- assert ' .. py:attribute:: EnumCls.val1' not in actual
- assert ' .. py:attribute:: EnumCls.val2' in actual
- assert ' .. py:attribute:: EnumCls.val3' in actual
- assert ' .. py:attribute:: EnumCls.val4' not in actual
- app.config.autodoc_default_options = {
- 'members': None,
- 'special-members': None,
- 'exclude-members': '__weakref__,snafucate',
- }
- actual = do_autodoc(app, 'class', 'target.CustomIter')
- assert ' .. py:method:: CustomIter.__init__()' in actual
- assert ' Create a new `CustomIter`.' in actual
- assert ' .. py:method:: CustomIter.__iter__()' in actual
- assert ' Iterate squares of each value.' in actual
- if not IS_PYPY:
- assert ' .. py:attribute:: CustomIter.__weakref__' not in actual
- assert list_of_weak_references not in actual
- assert ' .. py:method:: CustomIter.snafucate()' not in actual
- assert ' Makes this snafucated.' not in actual
diff --git a/tests/test_extensions/test_ext_autodoc_events.py b/tests/test_extensions/test_ext_autodoc_events.py
deleted file mode 100644
index ddc00476ad0..00000000000
--- a/tests/test_extensions/test_ext_autodoc_events.py
+++ /dev/null
@@ -1,142 +0,0 @@
-"""Test the autodoc extension. This tests mainly for autodoc events"""
-
-from __future__ import annotations
-
-from typing import TYPE_CHECKING
-
-import pytest
-
-from sphinx.ext.autodoc import between, cut_lines
-
-from tests.test_extensions.autodoc_util import do_autodoc
-
-if TYPE_CHECKING:
- from sphinx.testing.util import SphinxTestApp
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_process_docstring(app: SphinxTestApp) -> None:
- def on_process_docstring(app, what, name, obj, options, lines):
- lines.clear()
- lines.append('my docstring')
-
- app.connect('autodoc-process-docstring', on_process_docstring)
-
- actual = do_autodoc(app, 'function', 'target.process_docstring.func')
- assert list(actual) == [
- '',
- '.. py:function:: func()',
- ' :module: target.process_docstring',
- '',
- ' my docstring',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_process_docstring_for_nondatadescriptor(app: SphinxTestApp) -> None:
- def on_process_docstring(app, what, name, obj, options, lines):
- raise RuntimeError
-
- app.connect('autodoc-process-docstring', on_process_docstring)
-
- actual = do_autodoc(app, 'attribute', 'target.AttCls.a1')
- assert list(actual) == [
- '',
- '.. py:attribute:: AttCls.a1',
- ' :module: target',
- ' :value: hello world',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_cut_lines(app: SphinxTestApp) -> None:
- app.connect('autodoc-process-docstring', cut_lines(2, 2, ['function']))
-
- actual = do_autodoc(app, 'function', 'target.process_docstring.func')
- assert list(actual) == [
- '',
- '.. py:function:: func()',
- ' :module: target.process_docstring',
- '',
- ' second line',
- '',
- ]
-
-
-def test_cut_lines_no_objtype():
- docstring_lines = [
- 'first line',
- '---',
- 'second line',
- '---',
- 'third line ',
- '',
- ]
- process = cut_lines(2)
-
- process(None, 'function', 'func', None, {}, docstring_lines)
- assert docstring_lines == [
- 'second line',
- '---',
- 'third line ',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_between(app: SphinxTestApp) -> None:
- app.connect('autodoc-process-docstring', between('---', ['function']))
-
- actual = do_autodoc(app, 'function', 'target.process_docstring.func')
- assert list(actual) == [
- '',
- '.. py:function:: func()',
- ' :module: target.process_docstring',
- '',
- ' second line',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_between_exclude(app: SphinxTestApp) -> None:
- app.connect('autodoc-process-docstring', between('---', ['function'], exclude=True))
-
- actual = do_autodoc(app, 'function', 'target.process_docstring.func')
- assert list(actual) == [
- '',
- '.. py:function:: func()',
- ' :module: target.process_docstring',
- '',
- ' first line',
- ' third line',
- '',
- ]
-
-
-@pytest.mark.sphinx('html', testroot='ext-autodoc')
-def test_skip_module_member(app: SphinxTestApp) -> None:
- def autodoc_skip_member(app, what, name, obj, skip, options):
- if name == 'Class':
- return True # Skip "Class" class in __all__
- elif name == 'raises':
- return False # Show "raises()" function (not in __all__)
- return None
-
- app.connect('autodoc-skip-member', autodoc_skip_member)
-
- options = {'members': None}
- actual = do_autodoc(app, 'module', 'target', options)
- assert list(actual) == [
- '',
- '.. py:module:: target',
- '',
- '',
- '.. py:function:: raises(exc, func, *args, **kwds)',
- ' :module: target',
- '',
- ' Raise AssertionError if ``func(*args, **kwds)`` does not raise *exc*.',
- '',
- ]
diff --git a/tests/test_extensions/test_ext_coverage.py b/tests/test_extensions/test_ext_coverage.py
index 3c40d3dfaea..32fc2dba2d7 100644
--- a/tests/test_extensions/test_ext_coverage.py
+++ b/tests/test_extensions/test_ext_coverage.py
@@ -38,7 +38,7 @@ def test_build(app: SphinxTestApp) -> None:
assert 'api.h' in c_undoc
assert ' * Py_SphinxTest' in c_undoc
- undoc_py, undoc_c, py_undocumented, py_documented = pickle.loads(
+ undoc_py, undoc_c, _py_undocumented, _py_documented = pickle.loads(
(app.outdir / 'undoc.pickle').read_bytes()
)
assert len(undoc_c) == 1
@@ -117,7 +117,7 @@ def test_show_missing_items(app: SphinxTestApp) -> None:
'coverage', testroot='root', confoverrides={'coverage_show_missing_items': True}
)
def test_show_missing_items_quiet(app: SphinxTestApp) -> None:
- app.quiet = True
+ app.config._verbosity = -1 # mimics status=None / app.quiet = True
app.build(force_all=True)
assert (
diff --git a/tests/test_extensions/test_ext_doctest.py b/tests/test_extensions/test_ext_doctest.py
index cb540fda7ec..89d2b8e49a5 100644
--- a/tests/test_extensions/test_ext_doctest.py
+++ b/tests/test_extensions/test_ext_doctest.py
@@ -147,3 +147,49 @@ def test_reporting_with_autodoc(app, capfd):
assert 'File "dir/bar.py", line ?, in default' in failures
assert 'File "foo.py", line ?, in default' in failures
assert 'File "index.rst", line 4, in default' in failures
+
+
+@pytest.mark.sphinx('doctest', testroot='ext-doctest-fail-fast')
+@pytest.mark.parametrize('fail_fast', [False, True, None])
+def test_fail_fast(app, fail_fast, capsys):
+ if fail_fast is not None:
+ app.config.doctest_fail_fast = fail_fast
+ # Patch builder to get a copy of the output
+ written = []
+ app.builder._out = written.append
+ app.build(force_all=True)
+ assert app.statuscode
+
+ written = ''.join(written)
+ if fail_fast:
+ assert 'Doctest summary (exiting after first failed test)' in written
+ assert '1 failure in tests' in written
+ else:
+ assert 'Doctest summary\n' in written
+ assert '2 failures in tests' in written
+
+
+@pytest.mark.sphinx('doctest', testroot='ext-doctest-with-autodoc')
+@pytest.mark.parametrize(
+ ('test_doctest_blocks', 'group_name'),
+ [(None, 'default'), ('CustomGroupName', 'CustomGroupName')],
+)
+def test_doctest_block_group_name(app, test_doctest_blocks, group_name, capfd):
+ if test_doctest_blocks is not None:
+ app.config.doctest_test_doctest_blocks = test_doctest_blocks
+
+ # Patch builder to get a copy of the output
+ written = []
+ app.builder._warn_out = written.append
+ app.build(force_all=True)
+
+ failures = [
+ line.replace(os.sep, '/')
+ for line in '\n'.join(written).splitlines()
+ if line.startswith('File')
+ ]
+
+ assert f'File "dir/inner.rst", line 1, in {group_name}' in failures
+ assert f'File "dir/bar.py", line ?, in {group_name}' in failures
+ assert f'File "foo.py", line ?, in {group_name}' in failures
+ assert f'File "index.rst", line 4, in {group_name}' in failures
diff --git a/tests/test_extensions/test_ext_inheritance_diagram.py b/tests/test_extensions/test_ext_inheritance_diagram.py
index 4153113d12e..f98d424eda3 100644
--- a/tests/test_extensions/test_ext_inheritance_diagram.py
+++ b/tests/test_extensions/test_ext_inheritance_diagram.py
@@ -293,7 +293,7 @@ def test_inheritance_diagram_latex_alias(app):
app.config.inheritance_alias = {'test.Foo': 'alias.Foo'}
app.build(force_all=True)
- doc = app.env.get_and_resolve_doctree('index', app)
+ doc = app.env.get_and_resolve_doctree('index', app.builder, tags=app.tags)
aliased_graph = doc.children[0].children[3]['graph'].class_info
assert len(aliased_graph) == 4
assert (
diff --git a/tests/test_extensions/test_ext_math.py b/tests/test_extensions/test_ext_math.py
index 02d215bef01..9c8e620d655 100644
--- a/tests/test_extensions/test_ext_math.py
+++ b/tests/test_extensions/test_ext_math.py
@@ -323,7 +323,7 @@ def test_imgmath_numfig_html(app: SphinxTestApp) -> None:
def test_math_compat(app):
with warnings.catch_warnings(record=True):
app.build(force_all=True)
- doctree = app.env.get_and_resolve_doctree('index', app.builder)
+ doctree = app.env.get_and_resolve_doctree('index', app.builder, tags=app.tags)
assert_node(
doctree,
@@ -566,3 +566,19 @@ def test_mathjax_is_installed_if_included_file_has_equations_singlehtml(
content = (app.outdir / 'index.html').read_text(encoding='utf8')
assert MATHJAX_URL in content
+
+
+@pytest.mark.sphinx(
+ 'html',
+ testroot='ext-math-duplicate-label',
+ confoverrides={'extensions': ['sphinx.ext.mathjax'], 'show_warning_types': True},
+)
+def test_duplicate_equation_label_warning_type(app: SphinxTestApp) -> None:
+ """Test that duplicate equation labels emit warnings with type ref.equation."""
+ app.build(force_all=True)
+
+ from sphinx._cli.util.errors import strip_escape_sequences
+
+ warnings = strip_escape_sequences(app.warning.getvalue())
+ assert 'WARNING: duplicate label of equation duplicated' in warnings
+ assert '[ref.equation]' in warnings
diff --git a/tests/test_extensions/test_ext_todo.py b/tests/test_extensions/test_ext_todo.py
index a19c880b47b..38a6f81e16b 100644
--- a/tests/test_extensions/test_ext_todo.py
+++ b/tests/test_extensions/test_ext_todo.py
@@ -8,6 +8,8 @@
import pytest
if TYPE_CHECKING:
+ from sphinx.application import Sphinx
+ from sphinx.ext.todo import todo_node
from sphinx.testing.util import SphinxTestApp
@@ -20,7 +22,7 @@
def test_todo(app: SphinxTestApp) -> None:
todos = []
- def on_todo_defined(app, node):
+ def on_todo_defined(app: Sphinx, node: todo_node) -> None:
todos.append(node)
app.connect('todo-defined', on_todo_defined)
@@ -62,7 +64,7 @@ def on_todo_defined(app, node):
def test_todo_not_included(app: SphinxTestApp) -> None:
todos = []
- def on_todo_defined(app, node):
+ def on_todo_defined(app: Sphinx, node: todo_node) -> None:
todos.append(node)
app.connect('todo-defined', on_todo_defined)
diff --git a/tests/test_extensions/test_ext_viewcode.py b/tests/test_extensions/test_ext_viewcode.py
index eb8b5b141d5..03493dda4ff 100644
--- a/tests/test_extensions/test_ext_viewcode.py
+++ b/tests/test_extensions/test_ext_viewcode.py
@@ -9,9 +9,8 @@
import pygments
import pytest
-from sphinx.testing.util import SphinxTestApp
-
if TYPE_CHECKING:
+ from sphinx.application import Sphinx
from sphinx.testing.util import SphinxTestApp
@@ -130,7 +129,9 @@ def test_linkcode(app: SphinxTestApp) -> None:
@pytest.mark.sphinx('html', testroot='ext-viewcode-find', freshenv=True)
def test_local_source_files(app: SphinxTestApp) -> None:
- def find_source(app, modname):
+ def find_source(
+ app: Sphinx, modname: str
+ ) -> tuple[str, dict[str, tuple[str, int, int]]]:
if modname == 'not_a_package':
source = app.srcdir / 'not_a_package/__init__.py'
tags = {
@@ -174,7 +175,7 @@ def find_source(app, modname):
@pytest.mark.sphinx('html', testroot='ext-viewcode-find-package', freshenv=True)
-def test_find_local_package_import_path(app, status, warning):
+def test_find_local_package_import_path(app: Sphinx) -> None:
app.build(force_all=True)
result = (app.outdir / 'index.html').read_text(encoding='utf8')
diff --git a/tests/test_highlighting.py b/tests/test_highlighting.py
index 5b7ee2c8def..41dd8395f90 100644
--- a/tests/test_highlighting.py
+++ b/tests/test_highlighting.py
@@ -14,6 +14,11 @@
from sphinx.highlighting import PygmentsBridge
if TYPE_CHECKING:
+ from collections.abc import Iterable
+ from typing import TextIO
+
+ from pygments.token import _TokenType
+
from sphinx.testing.util import SphinxTestApp
if tuple(map(int, pygments.__version__.split('.')[:2])) < (2, 18):
@@ -34,14 +39,12 @@ class MyLexer(RegexLexer):
class MyFormatter(HtmlFormatter[str]):
- def format(self, tokensource, outfile):
- for tok in tokensource:
- outfile.write(tok[1])
-
-
-class ComplainOnUnhighlighted(PygmentsBridge):
- def unhighlighted(self, source):
- raise AssertionError('should highlight %r' % source)
+ def format(
+ self,
+ tokensource: Iterable[tuple[_TokenType, str]],
+ outfile: TextIO,
+ ) -> None:
+ outfile.writelines(tok[1] for tok in tokensource)
@pytest.mark.sphinx('html', testroot='root')
@@ -54,7 +57,7 @@ def test_add_lexer(app: SphinxTestApp) -> None:
def test_detect_interactive() -> None:
- bridge = ComplainOnUnhighlighted('html')
+ bridge = PygmentsBridge('html')
blocks = [
"""
>>> testing()
@@ -83,7 +86,7 @@ def test_set_formatter() -> None:
@mock.patch('sphinx.highlighting.logger')
-def test_default_highlight(logger):
+def test_default_highlight(logger: mock.Mock) -> None:
bridge = PygmentsBridge('html')
# default: highlights as python3
diff --git a/tests/test_intl/test_catalogs.py b/tests/test_intl/test_catalogs.py
index 022d5c6a64f..200e8d95286 100644
--- a/tests/test_intl/test_catalogs.py
+++ b/tests/test_intl/test_catalogs.py
@@ -9,11 +9,14 @@
import pytest
if TYPE_CHECKING:
+ from collections.abc import Iterator
+
+ from sphinx.testing.fixtures import _app_params
from sphinx.testing.util import SphinxTestApp
@pytest.fixture
-def _setup_test(app_params):
+def _setup_test(app_params: _app_params) -> Iterator[None]:
assert isinstance(app_params.kwargs['srcdir'], Path)
srcdir = app_params.kwargs['srcdir']
src_locale_dir = srcdir / 'xx' / 'LC_MESSAGES'
diff --git a/tests/test_intl/test_intl.py b/tests/test_intl/test_intl.py
index d43c029d4d6..2375c1a6687 100644
--- a/tests/test_intl/test_intl.py
+++ b/tests/test_intl/test_intl.py
@@ -1101,29 +1101,29 @@ def get_content(result, name):
expect1 = (
"""Deprecated since version 1.0: """
- """THIS IS THE FIRST PARAGRAPH OF DEPRECATED.
\n"""
- """THIS IS THE SECOND PARAGRAPH OF DEPRECATED.
\n"""
+ """THIS IS THE FIRST PARAGRAPH OF VERSION-DEPRECATED.\n"""
+ """THIS IS THE SECOND PARAGRAPH OF VERSION-DEPRECATED.
\n"""
)
matched_content = get_content(result, 'deprecated')
assert matched_content == expect1
expect2 = (
"""Added in version 1.0: """
- """THIS IS THE FIRST PARAGRAPH OF VERSIONADDED.
\n"""
+ """THIS IS THE FIRST PARAGRAPH OF VERSION-ADDED.\n"""
)
matched_content = get_content(result, 'versionadded')
assert matched_content == expect2
expect3 = (
"""Changed in version 1.0: """
- """THIS IS THE FIRST PARAGRAPH OF VERSIONCHANGED.
\n"""
+ """THIS IS THE FIRST PARAGRAPH OF VERSION-CHANGED.\n"""
)
matched_content = get_content(result, 'versionchanged')
assert matched_content == expect3
expect4 = (
"""Removed in version 1.0: """
- """THIS IS THE FIRST PARAGRAPH OF VERSIONREMOVED.
\n"""
+ """THIS IS THE FIRST PARAGRAPH OF VERSION-REMOVED.\n"""
)
matched_content = get_content(result, 'versionremoved')
assert matched_content == expect4
diff --git a/tests/test_intl/test_locale.py b/tests/test_intl/test_locale.py
index c85c9f7078e..2addbfbfa24 100644
--- a/tests/test_intl/test_locale.py
+++ b/tests/test_intl/test_locale.py
@@ -9,17 +9,19 @@
from sphinx import locale
if TYPE_CHECKING:
- from collections.abc import Callable
+ from collections.abc import Callable, Iterator
from pathlib import Path
+ from sphinx.testing.util import SphinxTestApp
+
@pytest.fixture(autouse=True)
-def _cleanup_translations():
+def _cleanup_translations() -> Iterator[None]:
yield
locale.translators.clear()
-def test_init(rootdir):
+def test_init(rootdir: Path) -> None:
# not initialized yet
_ = locale.get_translation('myext')
assert _('Hello world') == 'Hello world'
@@ -48,7 +50,7 @@ def test_init(rootdir):
assert _('Hello reST') == 'Hello reST'
-def test_init_with_unknown_language(rootdir):
+def test_init_with_unknown_language(rootdir: Path) -> None:
locale.init([rootdir / 'test-locale' / 'locale1'], 'unknown', 'myext')
_ = locale.get_translation('myext')
assert _('Hello world') == 'Hello world'
@@ -57,7 +59,7 @@ def test_init_with_unknown_language(rootdir):
@pytest.mark.sphinx('html', testroot='root')
-def test_add_message_catalog(app, rootdir):
+def test_add_message_catalog(app: SphinxTestApp, rootdir: Path) -> None:
app.config.language = 'en'
app.add_message_catalog('myext', rootdir / 'test-locale' / 'locale1')
_ = locale.get_translation('myext')
@@ -73,7 +75,9 @@ def _empty_language_translation(rootdir: Path) -> Callable[[str], str]:
return locale.get_translation(catalog)
-def test_init_environment_language(rootdir, monkeypatch):
+def test_init_environment_language(
+ rootdir: Path, monkeypatch: pytest.MonkeyPatch
+) -> None:
with monkeypatch.context() as m:
m.setenv('LANGUAGE', 'en_US:en')
_ = _empty_language_translation(rootdir)
diff --git a/tests/test_markup/test_markup.py b/tests/test_markup/test_markup.py
index 3a370ee46ad..fb4df4c400b 100644
--- a/tests/test_markup/test_markup.py
+++ b/tests/test_markup/test_markup.py
@@ -3,11 +3,10 @@
from __future__ import annotations
import re
-import warnings
from types import SimpleNamespace
import pytest
-from docutils import frontend, nodes, utils
+from docutils import nodes, utils
from docutils.parsers.rst import Parser as RstParser
from sphinx import addnodes
@@ -17,7 +16,7 @@
from sphinx.testing.util import assert_node
from sphinx.transforms import SphinxSmartQuotes
from sphinx.util import texescape
-from sphinx.util.docutils import sphinx_domains
+from sphinx.util.docutils import _get_settings, sphinx_domains
from sphinx.writers.html import HTMLWriter
from sphinx.writers.html5 import HTML5Translator
from sphinx.writers.latex import LaTeXTranslator, LaTeXWriter
@@ -27,15 +26,9 @@
def settings(app):
env = app.env
texescape.init() # otherwise done by the latex builder
- with warnings.catch_warnings():
- warnings.filterwarnings('ignore', category=DeprecationWarning)
- # DeprecationWarning: The frontend.OptionParser class will be replaced
- # by a subclass of argparse.ArgumentParser in Docutils 0.21 or later.
- optparser = frontend.OptionParser(
- components=(RstParser, HTMLWriter, LaTeXWriter),
- defaults=default_settings,
- )
- settings = optparser.get_default_values()
+ settings = _get_settings(
+ RstParser, HTMLWriter, LaTeXWriter, defaults=default_settings
+ )
settings.smart_quotes = True
settings.env = env
settings.env.current_document.docname = 'dummy'
@@ -69,7 +62,7 @@ def parse_(rst):
document = new_document()
parser = RstParser()
parser.parse(rst, document)
- SphinxSmartQuotes(document, startnode=None).apply() # type: ignore[no-untyped-call]
+ SphinxSmartQuotes(document, startnode=None).apply()
for msg in list(document.findall(nodes.system_message)):
if msg['level'] == 1:
msg.replace_self([])
diff --git a/tests/test_markup/test_parser.py b/tests/test_markup/test_parser.py
index eb8ccf24f1d..215fdf04dd6 100644
--- a/tests/test_markup/test_parser.py
+++ b/tests/test_markup/test_parser.py
@@ -2,6 +2,7 @@
from __future__ import annotations
+from typing import TYPE_CHECKING
from unittest.mock import Mock, patch
import pytest
@@ -9,14 +10,18 @@
from sphinx.parsers import RSTParser
from sphinx.util.docutils import new_document
+if TYPE_CHECKING:
+ from sphinx.testing.util import SphinxTestApp
+
@pytest.mark.sphinx('html', testroot='basic')
@patch('docutils.parsers.rst.states.RSTStateMachine')
-def test_RSTParser_prolog_epilog(RSTStateMachine, app):
+def test_RSTParser_prolog_epilog(RSTStateMachine: Mock, app: SphinxTestApp) -> None:
document = new_document('dummy.rst')
document.settings = Mock(tab_width=8, language_code='')
parser = RSTParser()
- parser.set_application(app)
+ parser._config = app.config
+ parser._env = app.env
# normal case
text = 'hello Sphinx world\nSphinx is a document generator'
@@ -33,8 +38,8 @@ def test_RSTParser_prolog_epilog(RSTStateMachine, app):
parser.parse(text, document)
(content, _), _ = RSTStateMachine().run.call_args
assert list(content.xitems()) == [
- ('', 0, 'this is rst_prolog'),
- ('', 1, 'hello reST!'),
+ ('', 0, 'this is rst_prolog'),
+ ('', 1, 'hello reST!'),
('', 0, ''),
('dummy.rst', 0, 'hello Sphinx world'),
('dummy.rst', 1, 'Sphinx is a document generator'),
@@ -49,8 +54,8 @@ def test_RSTParser_prolog_epilog(RSTStateMachine, app):
('dummy.rst', 0, 'hello Sphinx world'),
('dummy.rst', 1, 'Sphinx is a document generator'),
('dummy.rst', 2, ''),
- ('', 0, 'this is rst_epilog'),
- ('', 1, 'good-bye reST!'),
+ ('', 0, 'this is rst_epilog'),
+ ('', 1, 'good-bye reST!'),
]
# expandtabs / convert whitespaces
diff --git a/tests/test_project.py b/tests/test_project.py
index 5c4fb5cbafb..6d638265ff9 100644
--- a/tests/test_project.py
+++ b/tests/test_project.py
@@ -32,13 +32,13 @@
SUBDIR_DOCNAMES = {'subdir/excluded', 'subdir/images', 'subdir/includes'}
-def test_project_discover_basic(rootdir):
+def test_project_discover_basic(rootdir: Path) -> None:
# basic case
project = Project(rootdir / 'test-root', ['.txt'])
assert project.discover() == DOCNAMES
-def test_project_discover_exclude_patterns(rootdir):
+def test_project_discover_exclude_patterns(rootdir: Path) -> None:
project = Project(rootdir / 'test-root', ['.txt'])
# exclude_paths option
@@ -46,19 +46,19 @@ def test_project_discover_exclude_patterns(rootdir):
assert project.discover(['.txt', 'subdir/*']) == DOCNAMES - SUBDIR_DOCNAMES
-def test_project_discover_multiple_suffixes(rootdir):
+def test_project_discover_multiple_suffixes(rootdir: Path) -> None:
# multiple source_suffixes
project = Project(rootdir / 'test-root', ['.txt', '.foo'])
assert project.discover() == DOCNAMES | {'otherext'}
-def test_project_discover_complicated_suffix(rootdir):
+def test_project_discover_complicated_suffix(rootdir: Path) -> None:
# complicated source_suffix
project = Project(rootdir / 'test-root', ['.foo.png'])
assert project.discover() == {'img'}
-def test_project_discover_templates_path(rootdir):
+def test_project_discover_templates_path(rootdir: Path) -> None:
# templates_path
project = Project(rootdir / 'test-root', ['.html'])
assert project.discover() == {
@@ -70,7 +70,7 @@ def test_project_discover_templates_path(rootdir):
assert project.discover(['_templates']) == set()
-def test_project_path2doc(rootdir):
+def test_project_path2doc(rootdir: Path) -> None:
project = Project(rootdir / 'test-basic', {'.rst': 'restructuredtext'})
assert project.path2doc('index.rst') == 'index'
assert project.path2doc('index.foo') is None # unknown extension
diff --git a/tests/test_pycode/test_pycode.py b/tests/test_pycode/test_pycode.py
index 51b525f7b5b..4caf5019b94 100644
--- a/tests/test_pycode/test_pycode.py
+++ b/tests/test_pycode/test_pycode.py
@@ -41,7 +41,7 @@ def test_ModuleAnalyzer_for_file() -> None:
assert analyzer.srcname == str(SPHINX_MODULE_PATH)
-def test_ModuleAnalyzer_for_module(rootdir):
+def test_ModuleAnalyzer_for_module(rootdir: Path) -> None:
analyzer = ModuleAnalyzer.for_module('sphinx')
assert analyzer.modname == 'sphinx'
assert analyzer.srcname == str(SPHINX_MODULE_PATH)
diff --git a/tests/test_pycode/test_pycode_ast.py b/tests/test_pycode/test_pycode_ast.py
index 6ebc1a91099..9dd8c8f5d17 100644
--- a/tests/test_pycode/test_pycode_ast.py
+++ b/tests/test_pycode/test_pycode_ast.py
@@ -62,11 +62,13 @@
'x[:, np.newaxis, :, :]'), # Index, Subscript, numpy extended syntax
('y[:, 1:3][np.array([0, 2, 4]), :]',
'y[:, 1:3][np.array([0, 2, 4]), :]'), # Index, 2x Subscript, numpy extended syntax
+ ('*tuple[str, int]', '*tuple[str, int]'), # Starred
],
) # fmt: skip
-def test_unparse(source, expected):
- module = ast.parse(source)
- assert ast_unparse(module.body[0].value, source) == expected
+def test_unparse(source: str, expected: str) -> None:
+ expr = ast.parse(source).body[0]
+ assert isinstance(expr, ast.Expr)
+ assert ast_unparse(expr.value, source) == expected
def test_unparse_None() -> None:
diff --git a/tests/test_quickstart.py b/tests/test_quickstart.py
index a66b7c58128..3688984d9c2 100644
--- a/tests/test_quickstart.py
+++ b/tests/test_quickstart.py
@@ -20,7 +20,7 @@
warnfile = StringIO()
-def setup_module():
+def setup_module() -> None:
disable_colour()
@@ -48,7 +48,7 @@ def input_(prompt: str) -> str:
real_input: Callable[[str], str] = input
-def teardown_module():
+def teardown_module() -> None:
qs.term_input = real_input
enable_colour()
@@ -61,7 +61,7 @@ def test_do_prompt() -> None:
'Q5': 'no',
'Q6': 'foo',
}
- qs.term_input = mock_input(answers) # type: ignore[assignment]
+ qs.term_input = mock_input(answers)
assert qs.do_prompt('Q1', default='v1') == 'v1'
assert qs.do_prompt('Q3', default='v3_default') == 'v3'
@@ -79,7 +79,7 @@ def test_do_prompt_inputstrip() -> None:
'Q3': 'N',
'Q4': 'N ',
}
- qs.term_input = mock_input(answers) # type: ignore[assignment]
+ qs.term_input = mock_input(answers)
assert qs.do_prompt('Q1') == 'Y'
assert qs.do_prompt('Q2') == 'Yes'
@@ -91,12 +91,12 @@ def test_do_prompt_with_nonascii() -> None:
answers = {
'Q1': '\u30c9\u30a4\u30c4',
}
- qs.term_input = mock_input(answers) # type: ignore[assignment]
+ qs.term_input = mock_input(answers)
result = qs.do_prompt('Q1', default='\u65e5\u672c')
assert result == '\u30c9\u30a4\u30c4'
-def test_quickstart_defaults(tmp_path):
+def test_quickstart_defaults(tmp_path: Path) -> None:
answers = {
'Root path': str(tmp_path),
'Project name': 'Sphinx Test',
@@ -127,7 +127,7 @@ def test_quickstart_defaults(tmp_path):
assert (tmp_path / 'make.bat').is_file()
-def test_quickstart_all_answers(tmp_path):
+def test_quickstart_all_answers(tmp_path: Path) -> None:
answers = {
'Root path': str(tmp_path),
'Separate source and build': 'y',
@@ -185,7 +185,7 @@ def test_quickstart_all_answers(tmp_path):
assert (tmp_path / 'source' / 'contents.txt').is_file()
-def test_generated_files_eol(tmp_path):
+def test_generated_files_eol(tmp_path: Path) -> None:
answers = {
'Root path': str(tmp_path),
'Project name': 'Sphinx Test',
@@ -205,7 +205,7 @@ def assert_eol(filename: Path, eol: str) -> None:
assert_eol(tmp_path / 'Makefile', '\n')
-def test_quickstart_and_build(tmp_path):
+def test_quickstart_and_build(tmp_path: Path) -> None:
answers = {
'Root path': str(tmp_path),
'Project name': 'Fullwidth characters: \u30c9\u30a4\u30c4',
@@ -224,7 +224,7 @@ def test_quickstart_and_build(tmp_path):
assert not warnings
-def test_default_filename(tmp_path):
+def test_default_filename(tmp_path: Path) -> None:
answers = {
'Root path': str(tmp_path),
'Project name': '\u30c9\u30a4\u30c4', # Fullwidth characters only
@@ -242,7 +242,7 @@ def test_default_filename(tmp_path):
exec(conffile.read_text(encoding='utf8'), ns) # NoQA: S102
-def test_extensions(tmp_path):
+def test_extensions(tmp_path: Path) -> None:
qs.main([
'-q',
'-p',
@@ -261,7 +261,7 @@ def test_extensions(tmp_path):
assert ns['extensions'] == ['foo', 'bar', 'baz']
-def test_exits_when_existing_confpy(monkeypatch):
+def test_exits_when_existing_confpy(monkeypatch: pytest.MonkeyPatch) -> None:
# The code detects existing conf.py with path.is_file()
# so we mock it as True with pytest's monkeypatch
monkeypatch.setattr('os.path.isfile', lambda path: True)
diff --git a/tests/test_search.py b/tests/test_search.py
index 22fa6ab7616..0dd4d643ef1 100644
--- a/tests/test_search.py
+++ b/tests/test_search.py
@@ -3,7 +3,6 @@
from __future__ import annotations
import json
-import warnings
from io import BytesIO
from typing import TYPE_CHECKING
@@ -44,7 +43,7 @@ def __init__(self, version: str, domains: DummyDomainsContainer) -> None:
self.version = version
self.domains = domains
- def __getattr__(self, name: str):
+ def __getattr__(self, name: str) -> Any:
if name.startswith('_search_index_'):
setattr(self, name, {})
return getattr(self, name, {})
@@ -107,7 +106,7 @@ def test_meta_keys_are_handled_for_language_en(app: SphinxTestApp) -> None:
searchindex = load_searchindex(app.outdir / 'searchindex.js')
assert not is_registered_term(searchindex, 'thisnoteith')
assert is_registered_term(searchindex, 'thisonetoo')
- assert is_registered_term(searchindex, 'findthiskei')
+ assert is_registered_term(searchindex, 'findthiskey')
assert is_registered_term(searchindex, 'thistoo')
assert not is_registered_term(searchindex, 'onlygerman')
assert is_registered_term(searchindex, 'notgerman')
@@ -125,7 +124,7 @@ def test_meta_keys_are_handled_for_language_de(app: SphinxTestApp) -> None:
searchindex = load_searchindex(app.outdir / 'searchindex.js')
assert not is_registered_term(searchindex, 'thisnoteith')
assert is_registered_term(searchindex, 'thisonetoo')
- assert not is_registered_term(searchindex, 'findthiskei')
+ assert not is_registered_term(searchindex, 'findthiskey')
assert not is_registered_term(searchindex, 'thistoo')
assert is_registered_term(searchindex, 'onlygerman')
assert not is_registered_term(searchindex, 'notgerman')
@@ -144,7 +143,7 @@ def test_stemmer(app: SphinxTestApp) -> None:
app.build(force_all=True)
searchindex = load_searchindex(app.outdir / 'searchindex.js')
print(searchindex)
- assert is_registered_term(searchindex, 'findthisstemmedkei')
+ assert is_registered_term(searchindex, 'findthisstemmedkey')
assert is_registered_term(searchindex, 'intern')
@@ -169,12 +168,7 @@ def test_term_in_raw_directive(app: SphinxTestApp) -> None:
def test_IndexBuilder():
- with warnings.catch_warnings():
- warnings.filterwarnings('ignore', category=DeprecationWarning)
- # DeprecationWarning: The frontend.OptionParser class will be replaced
- # by a subclass of argparse.ArgumentParser in Docutils 0.21 or later.
- optparser = frontend.OptionParser(components=(rst.Parser,))
- settings = optparser.get_default_values()
+ settings = frontend.get_default_settings(rst.Parser)
parser = rst.Parser()
domain1 = DummyDomain(
@@ -219,7 +213,6 @@ def test_IndexBuilder():
# dictionaries below may be iterated in arbitrary order by Python at
# runtime.
assert index._mapping == {
- 'ar': {'docname1_1', 'docname1_2', 'docname2_1', 'docname2_2'},
'fermion': {'docname1_1', 'docname1_2', 'docname2_1', 'docname2_2'},
'comment': {'docname1_1', 'docname1_2', 'docname2_1', 'docname2_2'},
'non': {'docname1_1', 'docname1_2', 'docname2_1', 'docname2_2'},
@@ -250,7 +243,6 @@ def test_IndexBuilder():
},
'objtypes': {0: 'dummy1:objtype1', 1: 'dummy2:objtype1'},
'terms': {
- 'ar': [0, 1, 2, 3],
'comment': [0, 1, 2, 3],
'fermion': [0, 1, 2, 3],
'index': [0, 1, 2, 3],
@@ -309,7 +301,6 @@ def test_IndexBuilder():
'docname2_2': 'filename2_2',
}
assert index._mapping == {
- 'ar': {'docname1_2', 'docname2_2'},
'fermion': {'docname1_2', 'docname2_2'},
'comment': {'docname1_2', 'docname2_2'},
'non': {'docname1_2', 'docname2_2'},
@@ -338,7 +329,6 @@ def test_IndexBuilder():
},
'objtypes': {0: 'dummy1:objtype1', 1: 'dummy2:objtype1'},
'terms': {
- 'ar': [0, 1],
'comment': [0, 1],
'fermion': [0, 1],
'index': [0, 1],
@@ -466,7 +456,7 @@ def assert_is_sorted(
assert_is_sorted(child, f'{path}[{i}]')
-@pytest.mark.parametrize('directory', JAVASCRIPT_TEST_ROOTS)
+@pytest.mark.parametrize('directory', JAVASCRIPT_TEST_ROOTS, ids=lambda p: p.name)
def test_check_js_search_indexes(make_app, sphinx_test_tempdir, directory):
app = make_app(
'html',
diff --git a/tests/test_theming/test_templating.py b/tests/test_theming/test_templating.py
index b2d6f0d6eae..e2b61617c33 100644
--- a/tests/test_theming/test_templating.py
+++ b/tests/test_theming/test_templating.py
@@ -4,14 +4,11 @@
import pytest
-from sphinx.ext.autosummary.generate import setup_documenters
-
@pytest.mark.sphinx('html', testroot='templating', copy_test_root=True)
def test_layout_overloading(make_app, app_params):
args, kwargs = app_params
app = make_app(*args, **kwargs)
- setup_documenters(app)
app.build()
result = (app.outdir / 'index.html').read_text(encoding='utf8')
@@ -22,7 +19,6 @@ def test_layout_overloading(make_app, app_params):
def test_autosummary_class_template_overloading(make_app, app_params):
args, kwargs = app_params
app = make_app(*args, **kwargs)
- setup_documenters(app)
app.build()
result = (
@@ -41,7 +37,6 @@ def test_autosummary_class_template_overloading(make_app, app_params):
def test_autosummary_context(make_app, app_params):
args, kwargs = app_params
app = make_app(*args, **kwargs)
- setup_documenters(app)
app.build()
result = (
diff --git a/tests/test_theming/test_theming.py b/tests/test_theming/test_theming.py
index 173e0c9c64b..8ff3919c967 100644
--- a/tests/test_theming/test_theming.py
+++ b/tests/test_theming/test_theming.py
@@ -159,10 +159,10 @@ def test_dark_style(app, monkeypatch):
app.build()
assert (app.outdir / '_static' / 'pygments_dark.css').exists()
- css_file, properties = app.registry.css_files[0]
- assert css_file == 'pygments_dark.css'
- assert 'media' in properties
- assert properties['media'] == '(prefers-color-scheme: dark)'
+ css_file = app.builder._css_files[1]
+ assert css_file.filename == '_static/pygments_dark.css'
+ assert 'media' in css_file.attributes
+ assert css_file.attributes['media'] == '(prefers-color-scheme: dark)'
assert sorted(f.filename for f in app.builder._css_files) == [
'_static/classic.css',
diff --git a/tests/test_transforms/test_transforms_move_module_targets.py b/tests/test_transforms/test_transforms_move_module_targets.py
index f64b7d6a500..839eb615adc 100644
--- a/tests/test_transforms/test_transforms_move_module_targets.py
+++ b/tests/test_transforms/test_transforms_move_module_targets.py
@@ -1,5 +1,7 @@
from __future__ import annotations
+from typing import TYPE_CHECKING
+
import pytest
from docutils import nodes
@@ -7,6 +9,9 @@
from sphinx.testing.util import SphinxTestApp
from sphinx.transforms import MoveModuleTargets
+if TYPE_CHECKING:
+ from pathlib import Path
+
CONTENT_PY = """\
move-module-targets
===================
@@ -29,7 +34,7 @@
],
)
@pytest.mark.usefixtures('rollback_sysmodules')
-def test_move_module_targets(tmp_path, content):
+def test_move_module_targets(tmp_path: Path, content: str) -> None:
# Test for the MoveModuleTargets transform
tmp_path.joinpath('conf.py').touch()
tmp_path.joinpath('index.rst').write_text(content, encoding='utf-8')
@@ -48,7 +53,7 @@ def test_move_module_targets(tmp_path, content):
@pytest.mark.usefixtures('rollback_sysmodules')
-def test_move_module_targets_no_section(tmp_path):
+def test_move_module_targets_no_section(tmp_path: Path) -> None:
# Test for the MoveModuleTargets transform
tmp_path.joinpath('conf.py').touch()
tmp_path.joinpath('index.rst').write_text(
@@ -63,7 +68,7 @@ def test_move_module_targets_no_section(tmp_path):
@pytest.mark.usefixtures('rollback_sysmodules')
-def test_move_module_targets_disabled(tmp_path):
+def test_move_module_targets_disabled(tmp_path: Path) -> None:
# Test for the MoveModuleTargets transform
tmp_path.joinpath('conf.py').touch()
tmp_path.joinpath('index.rst').write_text(CONTENT_PY, encoding='utf-8')
diff --git a/tests/test_transforms/test_transforms_post_transforms_images.py b/tests/test_transforms/test_transforms_post_transforms_images.py
index 800fb3b986b..c6c80f59c62 100644
--- a/tests/test_transforms/test_transforms_post_transforms_images.py
+++ b/tests/test_transforms/test_transforms_post_transforms_images.py
@@ -35,7 +35,7 @@
def test_guess_mimetype_webp(tmp_path):
document = new_document('')
- document.settings.env = SimpleNamespace(app=SimpleNamespace(srcdir=tmp_path))
+ document.settings.env = SimpleNamespace(srcdir=tmp_path)
converter = ImageConverter(document)
file_webp = 'webp-image.webp'
diff --git a/tests/test_transforms/test_transforms_reorder_nodes.py b/tests/test_transforms/test_transforms_reorder_nodes.py
index aa9bee43d04..b81a47f5b86 100644
--- a/tests/test_transforms/test_transforms_reorder_nodes.py
+++ b/tests/test_transforms/test_transforms_reorder_nodes.py
@@ -56,8 +56,8 @@ def test_transforms_reorder_consecutive_target_and_index_nodes_preserve_order(
@pytest.mark.sphinx('html', testroot='_blank')
def test_transforms_reorder_consecutive_target_and_index_nodes_no_merge_across_other_nodes(
- app,
-):
+ app: SphinxTestApp,
+) -> None:
text = (
'.. index:: abc\n'
'.. index:: def\n'
diff --git a/tests/test_util/intersphinx_data.py b/tests/test_util/intersphinx_data.py
index 2b3489da465..24fcc4ae311 100644
--- a/tests/test_util/intersphinx_data.py
+++ b/tests/test_util/intersphinx_data.py
@@ -62,3 +62,12 @@
b term std:term -1 document.html#id5 -
B term std:term -1 document.html#B -
""")
+
+INVENTORY_V2_TEXT_VERSION: Final[bytes] = b"""\
+# Sphinx inventory version 2
+# Project: foo
+# Version: stable
+# The remainder of this file is compressed with zlib.
+""" + zlib.compress(b"""\
+module1 py:module 0 foo.html#module-module1 Long Module desc
+""")
diff --git a/tests/test_util/test_util.py b/tests/test_util/test_util.py
index e4881764680..ce403afd0ed 100644
--- a/tests/test_util/test_util.py
+++ b/tests/test_util/test_util.py
@@ -2,6 +2,8 @@
from __future__ import annotations
+from typing import TYPE_CHECKING
+
import pytest
import sphinx.util
@@ -29,8 +31,11 @@
relative_uri,
)
+if TYPE_CHECKING:
+ from pathlib import Path
+
-def test_ensuredir(tmp_path):
+def test_ensuredir(tmp_path: Path) -> None:
# Does not raise an exception for an existing directory.
ensuredir(tmp_path)
diff --git a/tests/test_util/test_util_display.py b/tests/test_util/test_util_display.py
index a3dda71b999..f4fa9c997ca 100644
--- a/tests/test_util/test_util_display.py
+++ b/tests/test_util/test_util_display.py
@@ -41,7 +41,9 @@ def test_status_iterator_length_0(app: SphinxTestApp) -> None:
@pytest.mark.sphinx('dummy', testroot='root')
-def test_status_iterator_verbosity_0(app, monkeypatch):
+def test_status_iterator_verbosity_0(
+ app: SphinxTestApp, monkeypatch: pytest.MonkeyPatch
+) -> None:
monkeypatch.setenv('FORCE_COLOR', '1')
logging.setup(app, app.status, app.warning)
@@ -59,7 +61,9 @@ def test_status_iterator_verbosity_0(app, monkeypatch):
@pytest.mark.sphinx('dummy', testroot='root')
-def test_status_iterator_verbosity_1(app, monkeypatch):
+def test_status_iterator_verbosity_1(
+ app: SphinxTestApp, monkeypatch: pytest.MonkeyPatch
+) -> None:
monkeypatch.setenv('FORCE_COLOR', '1')
logging.setup(app, app.status, app.warning)
@@ -107,7 +111,7 @@ def test_progress_message(app: SphinxTestApp) -> None:
# decorator
@progress_message('testing')
- def func():
+ def func() -> None:
logger.info('in func ', nonl=True)
func()
diff --git a/tests/test_util/test_util_docutils.py b/tests/test_util/test_util_docutils.py
index e44d508bfaa..ef281c1852a 100644
--- a/tests/test_util/test_util_docutils.py
+++ b/tests/test_util/test_util_docutils.py
@@ -17,6 +17,8 @@
)
if TYPE_CHECKING:
+ from pathlib import Path
+
from sphinx.builders import Builder
from sphinx.testing.util import SphinxTestApp
@@ -41,7 +43,7 @@ class custom_node(nodes.Element):
assert not hasattr(nodes.SparseNodeVisitor, 'depart_custom_node')
-def test_SphinxFileOutput(tmp_path):
+def test_SphinxFileOutput(tmp_path: Path) -> None:
content = 'Hello Sphinx World'
# write test.txt at first
@@ -79,16 +81,16 @@ def __init__(self, document: nodes.document, builder: Builder):
self.called: list[str] = []
super().__init__(document, builder)
- def visit_document(self, node):
+ def visit_document(self, node: nodes.document) -> None:
pass
- def depart_document(self, node):
+ def depart_document(self, node: nodes.document) -> None:
pass
- def visit_inline(self, node):
+ def visit_inline(self, node: nodes.inline) -> None:
self.called.append('visit_inline')
- def depart_inline(self, node):
+ def depart_inline(self, node: nodes.inline) -> None:
self.called.append('depart_inline')
document = new_document('')
diff --git a/tests/test_util/test_util_docutils_sphinx_directive.py b/tests/test_util/test_util_docutils_sphinx_directive.py
index ecfcab0b489..0f4e558d885 100644
--- a/tests/test_util/test_util_docutils_sphinx_directive.py
+++ b/tests/test_util/test_util_docutils_sphinx_directive.py
@@ -3,7 +3,7 @@
from types import SimpleNamespace
from docutils import nodes
-from docutils.parsers.rst.languages import en as english # type: ignore[attr-defined]
+from docutils.parsers.rst.languages import en as english
from docutils.parsers.rst.states import (
Inliner,
RSTState,
@@ -30,22 +30,23 @@ def make_directive_and_state(
if input_lines is not None:
sm.input_lines = input_lines
state = RSTState(sm)
- state.document = new_document('')
- state.document.settings.env = env
- state.document.settings.tab_width = 4
- state.document.settings.pep_references = None
- state.document.settings.rfc_references = None
+ document = state.document = new_document('')
+ document.settings.env = env
+ document.settings.tab_width = 4
+ document.settings.pep_references = None
+ document.settings.rfc_references = None
inliner = Inliner()
- inliner.init_customizations(state.document.settings)
+ inliner.init_customizations(document.settings)
state.inliner = inliner
- state.parent = None
+ state.parent = document
state.memo = SimpleNamespace(
- document=state.document,
+ document=document,
+ reporter=document.reporter,
language=english,
- inliner=state.inliner,
- reporter=state.document.reporter,
- section_level=0,
title_styles=[],
+ section_level=0,
+ section_bubble_up_kludge=False,
+ inliner=inliner,
)
directive = SphinxDirective(
name='test_directive',
diff --git a/tests/test_util/test_util_fileutil.py b/tests/test_util/test_util_fileutil.py
index 26b75d82e05..9311be58153 100644
--- a/tests/test_util/test_util_fileutil.py
+++ b/tests/test_util/test_util_fileutil.py
@@ -12,12 +12,13 @@
from sphinx._cli.util.errors import strip_escape_sequences
from sphinx.jinja2glue import BuiltinTemplateLoader
from sphinx.util.fileutil import _template_basename, copy_asset, copy_asset_file
+from sphinx.util.template import BaseRenderer
if TYPE_CHECKING:
from sphinx.testing.util import SphinxTestApp
-class DummyTemplateLoader(BuiltinTemplateLoader):
+class DummyTemplateLoader(BuiltinTemplateLoader, BaseRenderer):
def __init__(self) -> None:
super().__init__()
builder = mock.Mock()
@@ -26,7 +27,7 @@ def __init__(self) -> None:
self.init(builder)
-def test_copy_asset_file(tmp_path):
+def test_copy_asset_file(tmp_path: Path) -> None:
renderer = DummyTemplateLoader()
# copy normal file
@@ -69,7 +70,7 @@ def test_copy_asset_file(tmp_path):
assert (subdir2 / 'asset.txt.jinja').read_text(encoding='utf8') == '# {{var1}} data'
-def test_copy_asset(tmp_path):
+def test_copy_asset(tmp_path: Path) -> None:
renderer = DummyTemplateLoader()
# prepare source files
@@ -113,7 +114,7 @@ def test_copy_asset(tmp_path):
assert sidebar == 'sidebar: baz'
# copy with exclusion
- def excluded(path):
+ def excluded(path: str) -> bool:
return 'sidebar.html' in path or 'basic.css' in path
destdir = tmp_path / 'test3'
diff --git a/tests/test_util/test_util_i18n.py b/tests/test_util/test_util_i18n.py
index 4326b4382dd..31e774b7932 100644
--- a/tests/test_util/test_util_i18n.py
+++ b/tests/test_util/test_util_i18n.py
@@ -4,12 +4,10 @@
import datetime
import os
-import sys
import time
from pathlib import Path
from typing import TYPE_CHECKING
-import babel
import pytest
from babel.messages.mofile import read_mo
@@ -60,11 +58,6 @@ def test_catalog_write_mo(tmp_path):
assert read_mo(f) is not None
-# https://github.com/python-babel/babel/issues/1183
-@pytest.mark.xfail(
- sys.platform == 'win32' and babel.__version__ == '2.17.0',
- reason='Windows tests fail with Babel 2.17',
-)
def test_format_date():
date = datetime.date(2016, 2, 7)
diff --git a/tests/test_util/test_util_images.py b/tests/test_util/test_util_images.py
index b56d68c1083..d0b4f918afc 100644
--- a/tests/test_util/test_util_images.py
+++ b/tests/test_util/test_util_images.py
@@ -11,13 +11,17 @@
parse_data_uri,
)
+TYPE_CHECKING = False
+if TYPE_CHECKING:
+ from pathlib import Path
+
GIF_FILENAME = 'img.gif'
PNG_FILENAME = 'img.png'
PDF_FILENAME = 'img.pdf'
TXT_FILENAME = 'index.txt'
-def test_get_image_size(rootdir):
+def test_get_image_size(rootdir: Path) -> None:
assert get_image_size(rootdir / 'test-root' / GIF_FILENAME) == (200, 181)
assert get_image_size(rootdir / 'test-root' / PNG_FILENAME) == (200, 181)
assert get_image_size(rootdir / 'test-root' / PDF_FILENAME) is None
@@ -80,8 +84,5 @@ def test_parse_data_uri() -> None:
'data:iVBORw0KGgoAAAANSUhEUgAAAAUAAAAFCAYAAACNbyblAAAAHElEQVQI12P4'
'//8/w38GIAXDIBKE0DHxgljNBAAO9TXL0Y4OHwAAAABJRU5ErkJggg=='
)
- with pytest.raises(
- ValueError,
- match=r'not enough values to unpack \(expected 2, got 1\)',
- ):
+ with pytest.raises(ValueError, match=r'malformed data URI'):
parse_data_uri(uri)
diff --git a/tests/test_util/test_util_inspect.py b/tests/test_util/test_util_inspect.py
index 2ff37091fd2..1c864ba4544 100644
--- a/tests/test_util/test_util_inspect.py
+++ b/tests/test_util/test_util_inspect.py
@@ -115,6 +115,14 @@ def test_TypeAliasForwardRef():
sig_str = stringify_annotation(alias, 'fully-qualified-except-typing')
assert sig_str == "TypeAliasForwardRef('example') | None"
+ alias = alias | None
+ sig_str = stringify_annotation(alias, 'fully-qualified-except-typing')
+ assert sig_str == "TypeAliasForwardRef('example') | None"
+
+ alias = None | alias # NoQA: RUF036
+ sig_str = stringify_annotation(alias, 'fully-qualified-except-typing')
+ assert sig_str == "None | TypeAliasForwardRef('example')"
+
def test_TypeAliasNamespace() -> None:
import logging.config
diff --git a/tests/test_util/test_util_inventory.py b/tests/test_util/test_util_inventory.py
index 0cab37d7904..5432d8cc5ca 100644
--- a/tests/test_util/test_util_inventory.py
+++ b/tests/test_util/test_util_inventory.py
@@ -107,7 +107,7 @@ def _build_inventory(srcdir: Path) -> Path:
return app.outdir / 'objects.inv'
-def test_inventory_localization(tmp_path):
+def test_inventory_localization(tmp_path: Path) -> None:
# Build an app using Estonian (EE) locale
srcdir_et = _write_appconfig(tmp_path, 'et')
inventory_et = _build_inventory(srcdir_et)
diff --git a/tests/test_util/test_util_logging.py b/tests/test_util/test_util_logging.py
index a9ef7f6c4c7..2eee35ce847 100644
--- a/tests/test_util/test_util_logging.py
+++ b/tests/test_util/test_util_logging.py
@@ -18,7 +18,7 @@
from sphinx.util.logging import is_suppressed_warning, prefixed_warnings
from sphinx.util.parallel import ParallelTasks
-from tests.utils import TESTS_ROOT
+from tests.utils import TEST_ROOTS_DIR
if TYPE_CHECKING:
from sphinx.testing.util import SphinxTestApp
@@ -26,8 +26,7 @@
@pytest.mark.sphinx('html', testroot='root')
def test_info_and_warning(app: SphinxTestApp) -> None:
- app.verbosity = 2
- logging.setup(app, app.status, app.warning)
+ logging.setup(app, app.status, app.warning, verbosity=2)
logger = logging.getLogger(__name__)
logger.debug('message1')
@@ -61,8 +60,7 @@ def test_Exception(app: SphinxTestApp) -> None:
@pytest.mark.sphinx('html', testroot='root')
def test_verbosity_filter(app: SphinxTestApp) -> None:
# verbosity = 0: INFO
- app.verbosity = 0
- logging.setup(app, app.status, app.warning)
+ logging.setup(app, app.status, app.warning, verbosity=0)
logger = logging.getLogger(__name__)
logger.info('message1')
@@ -75,8 +73,7 @@ def test_verbosity_filter(app: SphinxTestApp) -> None:
assert 'message4' not in app.status.getvalue()
# verbosity = 1: VERBOSE
- app.verbosity = 1
- logging.setup(app, app.status, app.warning)
+ logging.setup(app, app.status, app.warning, verbosity=1)
logger = logging.getLogger(__name__)
logger.info('message1')
@@ -89,8 +86,7 @@ def test_verbosity_filter(app: SphinxTestApp) -> None:
assert 'message4' not in app.status.getvalue()
# verbosity = 2: DEBUG
- app.verbosity = 2
- logging.setup(app, app.status, app.warning)
+ logging.setup(app, app.status, app.warning, verbosity=2)
logger = logging.getLogger(__name__)
logger.info('message1')
@@ -301,7 +297,7 @@ def force_colors():
def test_log_no_ansi_colors(tmp_path):
with force_colors():
wfile = tmp_path / 'warnings.txt'
- srcdir = TESTS_ROOT / 'roots' / 'test-nitpicky-warnings'
+ srcdir = TEST_ROOTS_DIR / 'test-nitpicky-warnings'
argv = list(map(str, ['-b', 'html', srcdir, tmp_path, '-n', '-w', wfile]))
retcode = build_main(argv)
assert retcode == 0
@@ -312,8 +308,7 @@ def test_log_no_ansi_colors(tmp_path):
@pytest.mark.sphinx('html', testroot='root')
def test_colored_logs(app: SphinxTestApp) -> None:
- app.verbosity = 2
- logging.setup(app, app.status, app.warning)
+ logging.setup(app, app.status, app.warning, verbosity=2)
logger = logging.getLogger(__name__)
# default colors
@@ -363,7 +358,7 @@ class StreamWriter(codecs.StreamWriter):
def write(self, object):
self.stream.write(object.encode('cp1252').decode('cp1252'))
- logging.setup(app, StreamWriter(app.status), app.warning)
+ logging.setup(app, StreamWriter(app.status), app.warning, verbosity=0)
logger = logging.getLogger(__name__)
# info with UnicodeEncodeError
diff --git a/tests/test_util/test_util_matching.py b/tests/test_util/test_util_matching.py
index 0c17280aa6e..a2ad1ff5ba6 100644
--- a/tests/test_util/test_util_matching.py
+++ b/tests/test_util/test_util_matching.py
@@ -2,8 +2,13 @@
from __future__ import annotations
+from typing import TYPE_CHECKING
+
from sphinx.util.matching import Matcher, compile_matchers, get_matching_files
+if TYPE_CHECKING:
+ from pathlib import Path
+
def test_compile_matchers() -> None:
# exact matching
@@ -85,7 +90,7 @@ def test_Matcher() -> None:
assert matcher('subdir/world.py')
-def test_get_matching_files_all(rootdir):
+def test_get_matching_files_all(rootdir: Path) -> None:
files = get_matching_files(rootdir / 'test-root')
assert sorted(files) == [
'Makefile',
@@ -133,7 +138,7 @@ def test_get_matching_files_all(rootdir):
]
-def test_get_matching_files_all_exclude_single(rootdir):
+def test_get_matching_files_all_exclude_single(rootdir: Path) -> None:
files = get_matching_files(rootdir / 'test-root', exclude_patterns=['**.html'])
assert sorted(files) == [
'Makefile',
@@ -178,7 +183,7 @@ def test_get_matching_files_all_exclude_single(rootdir):
]
-def test_get_matching_files_all_exclude_multiple(rootdir):
+def test_get_matching_files_all_exclude_multiple(rootdir: Path) -> None:
files = get_matching_files(
rootdir / 'test-root', exclude_patterns=['**.html', '**.inc']
)
@@ -218,7 +223,7 @@ def test_get_matching_files_all_exclude_multiple(rootdir):
]
-def test_get_matching_files_all_exclude_nonexistent(rootdir):
+def test_get_matching_files_all_exclude_nonexistent(rootdir: Path) -> None:
files = get_matching_files(rootdir / 'test-root', exclude_patterns=['halibut/**'])
assert sorted(files) == [
'Makefile',
@@ -266,7 +271,7 @@ def test_get_matching_files_all_exclude_nonexistent(rootdir):
]
-def test_get_matching_files_all_include_single(rootdir):
+def test_get_matching_files_all_include_single(rootdir: Path) -> None:
files = get_matching_files(rootdir / 'test-root', include_patterns=['subdir/**'])
assert sorted(files) == [
'subdir/excluded.txt',
@@ -278,7 +283,7 @@ def test_get_matching_files_all_include_single(rootdir):
]
-def test_get_matching_files_all_include_multiple(rootdir):
+def test_get_matching_files_all_include_multiple(rootdir: Path) -> None:
files = get_matching_files(
rootdir / 'test-root', include_patterns=['special/**', 'subdir/**']
)
@@ -294,12 +299,12 @@ def test_get_matching_files_all_include_multiple(rootdir):
]
-def test_get_matching_files_all_include_nonexistent(rootdir):
+def test_get_matching_files_all_include_nonexistent(rootdir: Path) -> None:
files = get_matching_files(rootdir / 'test-root', include_patterns=['halibut/**'])
assert sorted(files) == []
-def test_get_matching_files_all_include_prefix(rootdir):
+def test_get_matching_files_all_include_prefix(rootdir: Path) -> None:
files = get_matching_files(rootdir / 'test-root', include_patterns=['autodoc*'])
assert sorted(files) == [
'autodoc.txt',
@@ -307,7 +312,7 @@ def test_get_matching_files_all_include_prefix(rootdir):
]
-def test_get_matching_files_all_include_question_mark(rootdir):
+def test_get_matching_files_all_include_question_mark(rootdir: Path) -> None:
files = get_matching_files(rootdir / 'test-root', include_patterns=['img.???'])
assert sorted(files) == [
'img.gif',
diff --git a/tests/test_util/test_util_nodes.py b/tests/test_util/test_util_nodes.py
index 61342efdb1b..406e08c815c 100644
--- a/tests/test_util/test_util_nodes.py
+++ b/tests/test_util/test_util_nodes.py
@@ -2,7 +2,6 @@
from __future__ import annotations
-import warnings
from textwrap import dedent
from typing import TYPE_CHECKING, Any
@@ -22,19 +21,17 @@
)
if TYPE_CHECKING:
+ from collections.abc import Iterable
+
from docutils.nodes import document
-def _transform(doctree) -> None:
+def _transform(doctree: nodes.document) -> None:
ApplySourceWorkaround(doctree).apply()
def create_new_document() -> document:
- with warnings.catch_warnings():
- warnings.filterwarnings('ignore', category=DeprecationWarning)
- # DeprecationWarning: The frontend.OptionParser class will be replaced
- # by a subclass of argparse.ArgumentParser in Docutils 0.21 or later.
- settings = frontend.OptionParser(components=(rst.Parser,)).get_default_values()
+ settings = frontend.get_default_settings(rst.Parser)
settings.id_prefix = 'id'
document = new_document('dummy.txt', settings)
return document
@@ -47,7 +44,11 @@ def _get_doctree(text):
return document
-def assert_node_count(messages, node_type, expect_count) -> None:
+def assert_node_count(
+ messages: Iterable[tuple[nodes.Element, str]],
+ node_type: type[nodes.Node],
+ expect_count: int,
+) -> None:
count = 0
node_list = [node for node, msg in messages]
for node in node_list:
@@ -155,7 +156,7 @@ def test_NodeMatcher():
),
],
)
-def test_extract_messages(rst, node_cls, count):
+def test_extract_messages(rst: str, node_cls: type[nodes.Element], count: int) -> None:
msg = extract_messages(_get_doctree(dedent(rst)))
assert_node_count(msg, node_cls, count)
@@ -182,7 +183,8 @@ def test_extract_messages_without_rawsource() -> None:
assert next(m for n, m in extract_messages(document)), 'text sentence'
-def test_clean_astext():
+def test_clean_astext() -> None:
+ node: nodes.Element
node = nodes.paragraph(text='hello world')
assert clean_astext(node) == 'hello world'
diff --git a/tests/test_util/test_util_rst.py b/tests/test_util/test_util_rst.py
index d8fcf8d12d0..41854c2aecc 100644
--- a/tests/test_util/test_util_rst.py
+++ b/tests/test_util/test_util_rst.py
@@ -5,7 +5,13 @@
from docutils.statemachine import StringList
from jinja2 import Environment
-from sphinx.util.rst import append_epilog, escape, heading, prepend_prolog, textwidth
+from sphinx.util.rst import (
+ _append_epilogue,
+ _prepend_prologue,
+ escape,
+ heading,
+ textwidth,
+)
def test_escape() -> None:
@@ -15,25 +21,25 @@ def test_escape() -> None:
assert escape('.. toctree::') == r'\.. toctree\:\:'
-def test_append_epilog() -> None:
+def test_append_epilogue() -> None:
epilog = 'this is rst_epilog\ngood-bye reST!'
content = StringList(
['hello Sphinx world', 'Sphinx is a document generator'],
'dummy.rst',
)
- append_epilog(content, epilog)
+ _append_epilogue(content, epilog)
assert list(content.xitems()) == [
('dummy.rst', 0, 'hello Sphinx world'),
('dummy.rst', 1, 'Sphinx is a document generator'),
('dummy.rst', 2, ''),
- ('', 0, 'this is rst_epilog'),
- ('', 1, 'good-bye reST!'),
+ ('', 0, 'this is rst_epilog'),
+ ('', 1, 'good-bye reST!'),
]
-def test_prepend_prolog() -> None:
- prolog = 'this is rst_prolog\nhello reST!'
+def test_prepend_prologue() -> None:
+ prologue = 'this is rst_prolog\nhello reST!'
content = StringList(
[
':title: test of SphinxFileInput',
@@ -44,14 +50,14 @@ def test_prepend_prolog() -> None:
],
'dummy.rst',
)
- prepend_prolog(content, prolog)
+ _prepend_prologue(content, prologue)
assert list(content.xitems()) == [
('dummy.rst', 0, ':title: test of SphinxFileInput'),
('dummy.rst', 1, ':author: Sphinx team'),
('', 0, ''),
- ('', 0, 'this is rst_prolog'),
- ('', 1, 'hello reST!'),
+ ('', 0, 'this is rst_prolog'),
+ ('', 1, 'hello reST!'),
('', 0, ''),
('dummy.rst', 2, ''),
('dummy.rst', 3, 'hello Sphinx world'),
@@ -60,17 +66,17 @@ def test_prepend_prolog() -> None:
def test_prepend_prolog_with_CR() -> None:
- # prolog having CR at tail
- prolog = 'this is rst_prolog\nhello reST!\n'
+ # prologue having CR at tail
+ prologue = 'this is rst_prolog\nhello reST!\n'
content = StringList(
['hello Sphinx world', 'Sphinx is a document generator'],
'dummy.rst',
)
- prepend_prolog(content, prolog)
+ _prepend_prologue(content, prologue)
assert list(content.xitems()) == [
- ('', 0, 'this is rst_prolog'),
- ('', 1, 'hello reST!'),
+ ('', 0, 'this is rst_prolog'),
+ ('', 1, 'hello reST!'),
('', 0, ''),
('dummy.rst', 0, 'hello Sphinx world'),
('dummy.rst', 1, 'Sphinx is a document generator'),
@@ -78,17 +84,17 @@ def test_prepend_prolog_with_CR() -> None:
def test_prepend_prolog_without_CR() -> None:
- # prolog not having CR at tail
- prolog = 'this is rst_prolog\nhello reST!'
+ # prologue not having CR at tail
+ prologue = 'this is rst_prolog\nhello reST!'
content = StringList(
['hello Sphinx world', 'Sphinx is a document generator'],
'dummy.rst',
)
- prepend_prolog(content, prolog)
+ _prepend_prologue(content, prologue)
assert list(content.xitems()) == [
- ('', 0, 'this is rst_prolog'),
- ('', 1, 'hello reST!'),
+ ('', 0, 'this is rst_prolog'),
+ ('', 1, 'hello reST!'),
('', 0, ''),
('dummy.rst', 0, 'hello Sphinx world'),
('dummy.rst', 1, 'Sphinx is a document generator'),
@@ -96,7 +102,7 @@ def test_prepend_prolog_without_CR() -> None:
def test_prepend_prolog_with_roles_in_sections() -> None:
- prolog = 'this is rst_prolog\nhello reST!'
+ prologue = 'this is rst_prolog\nhello reST!'
content = StringList(
[
':title: test of SphinxFileInput',
@@ -109,14 +115,14 @@ def test_prepend_prolog_with_roles_in_sections() -> None:
],
'dummy.rst',
)
- prepend_prolog(content, prolog)
+ _prepend_prologue(content, prologue)
assert list(content.xitems()) == [
('dummy.rst', 0, ':title: test of SphinxFileInput'),
('dummy.rst', 1, ':author: Sphinx team'),
('', 0, ''),
- ('', 0, 'this is rst_prolog'),
- ('', 1, 'hello reST!'),
+ ('', 0, 'this is rst_prolog'),
+ ('', 1, 'hello reST!'),
('', 0, ''),
('dummy.rst', 2, ''),
('dummy.rst', 3, ':mod:`foo`'),
@@ -128,13 +134,13 @@ def test_prepend_prolog_with_roles_in_sections() -> None:
def test_prepend_prolog_with_roles_in_sections_with_newline() -> None:
# prologue with trailing line break
- prolog = 'this is rst_prolog\nhello reST!\n'
+ prologue = 'this is rst_prolog\nhello reST!\n'
content = StringList([':mod:`foo`', '-' * 10, '', 'hello'], 'dummy.rst')
- prepend_prolog(content, prolog)
+ _prepend_prologue(content, prologue)
assert list(content.xitems()) == [
- ('', 0, 'this is rst_prolog'),
- ('', 1, 'hello reST!'),
+ ('', 0, 'this is rst_prolog'),
+ ('', 1, 'hello reST!'),
('', 0, ''),
('dummy.rst', 0, ':mod:`foo`'),
('dummy.rst', 1, '----------'),
@@ -145,13 +151,13 @@ def test_prepend_prolog_with_roles_in_sections_with_newline() -> None:
def test_prepend_prolog_with_roles_in_sections_without_newline() -> None:
# prologue with no trailing line break
- prolog = 'this is rst_prolog\nhello reST!'
+ prologue = 'this is rst_prolog\nhello reST!'
content = StringList([':mod:`foo`', '-' * 10, '', 'hello'], 'dummy.rst')
- prepend_prolog(content, prolog)
+ _prepend_prologue(content, prologue)
assert list(content.xitems()) == [
- ('', 0, 'this is rst_prolog'),
- ('', 1, 'hello reST!'),
+ ('', 0, 'this is rst_prolog'),
+ ('', 1, 'hello reST!'),
('', 0, ''),
('dummy.rst', 0, ':mod:`foo`'),
('dummy.rst', 1, '----------'),
diff --git a/tests/test_util/test_util_typing.py b/tests/test_util/test_util_typing.py
index 35ee240f7b8..8a561c378ed 100644
--- a/tests/test_util/test_util_typing.py
+++ b/tests/test_util/test_util_typing.py
@@ -205,7 +205,7 @@ def test_is_invalid_builtin_class() -> None:
zipfile.Path,
zipfile.CompleteDirs,
)
- if sys.version_info[:2] >= (3, 13):
+ if sys.version_info[:2] == (3, 13):
invalid_types += (
# pathlib
Path,
@@ -217,7 +217,7 @@ def test_is_invalid_builtin_class() -> None:
)
invalid_names = {(cls.__module__, cls.__qualname__) for cls in invalid_types}
- if sys.version_info[:2] < (3, 13):
+ if sys.version_info[:2] != (3, 13):
invalid_names |= {
('pathlib._local', 'Path'),
('pathlib._local', 'PosixPath'),
@@ -231,7 +231,7 @@ def test_is_invalid_builtin_class() -> None:
('zipfile._path', 'Path'),
('zipfile._path', 'CompleteDirs'),
}
- assert _INVALID_BUILTIN_CLASSES.keys() == invalid_names
+ assert set(_INVALID_BUILTIN_CLASSES) == invalid_names
def test_restify_type_hints_containers():
diff --git a/tests/test_versioning.py b/tests/test_versioning.py
index 58e3b224c58..7b27106b98e 100644
--- a/tests/test_versioning.py
+++ b/tests/test_versioning.py
@@ -20,7 +20,7 @@ def _setup_module(rootdir, sphinx_test_tempdir):
if not srcdir.exists():
shutil.copytree(rootdir / 'test-versioning', srcdir)
app = SphinxTestApp(srcdir=srcdir)
- app.builder.env.app = app
+ app.builder.env._app = app
app.connect('doctree-resolved', on_doctree_resolved)
app.build()
original = doctrees['original']
diff --git a/tests/utils.py b/tests/utils.py
index 59757edbd99..b75f9cc1c4b 100644
--- a/tests/utils.py
+++ b/tests/utils.py
@@ -2,6 +2,7 @@
__all__ = ('http_server',)
+import os
import socket
from contextlib import contextmanager
from http.server import ThreadingHTTPServer
@@ -19,10 +20,16 @@
from sphinx.application import Sphinx
+TESTS_ROOT: Final[Path] = Path(__file__).resolve().parent
+TEST_ROOTS_DIR: Final[Path] = TESTS_ROOT / (
+ 'roots-read-only'
+ if 'CI' in os.environ and (TESTS_ROOT / 'roots-read-only').is_dir()
+ else 'roots'
+)
+
# Generated with:
# $ openssl req -new -x509 -days 3650 -nodes -out cert.pem \
# -keyout cert.pem -addext "subjectAltName = DNS:localhost"
-TESTS_ROOT: Final[Path] = Path(__file__).resolve().parent
CERT_FILE: Final[str] = str(TESTS_ROOT / 'certs' / 'cert.pem')
diff --git a/tox.ini b/tox.ini
index 674013fdc08..58c2b140351 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,6 +1,6 @@
[tox]
minversion = 4.2.0
-envlist = py{311,312,313,314}
+envlist = py{311,312,313,314,315}
[testenv]
usedevelop = True
@@ -19,8 +19,8 @@ passenv =
BUILDER
READTHEDOCS
description =
- py{311,312,313,314}: Run unit tests against {envname}.
-extras =
+ py{311,312,313,314,315}: Run unit tests against {envname}.
+dependency_groups =
test
setenv =
PYTHONWARNINGS = error
@@ -31,8 +31,9 @@ commands=
[testenv:lint]
description =
Run linters.
-extras =
+dependency_groups =
lint
+ types
# If you update any of these commands, don't forget to update the equivalent
# GitHub Workflow step
commands =
@@ -43,7 +44,7 @@ commands =
[testenv:docs]
description =
Build documentation.
-extras =
+dependency_groups =
docs
commands =
python -c "import shutil; shutil.rmtree('./build/sphinx', ignore_errors=True) if '{env:CLEAN:}' else None"
@@ -52,7 +53,7 @@ commands =
[testenv:docs-live]
description =
Build documentation.
-extras =
+dependency_groups =
docs
deps =
sphinx-autobuild
@@ -70,7 +71,7 @@ commands =
[testenv:ruff]
description =
Run ruff formatting and linting.
-extras =
+dependency_groups =
lint
commands =
ruff format .
@@ -79,8 +80,13 @@ commands =
[testenv:mypy]
description =
Run mypy type checking.
-extras =
- lint
- test
+dependency_groups =
+ types
commands =
mypy {posargs}
+
+[testenv:prettier]
+description =
+ Run the Prettier JavaScript formatter.
+commands =
+ npx prettier@3.5 --write "sphinx/themes/**/*.js" "!sphinx/themes/bizstyle/static/css3-mediaqueries*.js" "tests/js/**/*.{js,mjs}" "!tests/js/fixtures/**"
diff --git a/ty.toml b/ty.toml
new file mode 100644
index 00000000000..50f28f49900
--- /dev/null
+++ b/ty.toml
@@ -0,0 +1,21 @@
+# Configuration file for Ty_.
+# n.b. Ty is early in development.
+# Sphinx's current primary/reference type-checker is mypy.
+#
+# .. _Ty: https://docs.astral.sh/ty/
+
+[src]
+include = [
+ "doc/conf.py",
+ "doc/development/tutorials/examples/autodoc_intenum.py",
+ "doc/development/tutorials/examples/helloworld.py",
+ "sphinx",
+ "tests",
+ "utils",
+]
+exclude = [
+ "tests/roots/test-pycode/cp_1251_coded.py", # Not UTF-8
+ # This panics (2025-08-18; ty 0.0.1-alpha.18).
+ # See https://github.com/astral-sh/ty/issues/256
+ "tests/test_config/test_config.py",
+]
diff --git a/utils/babel_runner.py b/utils/babel_runner.py
index 82bf79fd5b3..63fe8ab13e8 100644
--- a/utils/babel_runner.py
+++ b/utils/babel_runner.py
@@ -233,7 +233,7 @@ def run_compile() -> None:
for x in message.locations
):
msgid = message.id
- if isinstance(msgid, list | tuple):
+ if isinstance(msgid, (list, tuple)):
msgid = msgid[0]
js_catalogue[msgid] = message.string
diff --git a/utils/convert_attestations.py b/utils/convert_attestations.py
index 7e227c21358..e015da7d5c0 100644
--- a/utils/convert_attestations.py
+++ b/utils/convert_attestations.py
@@ -7,7 +7,7 @@
# /// script
# requires-python = ">=3.11"
# dependencies = [
-# "pypi-attestations==0.0.21",
+# "pypi-attestations==0.0.27",
# "betterproto==2.0.0b6",
# ]
# ///
diff --git a/utils/generate_snowball.py b/utils/generate_snowball.py
new file mode 100755
index 00000000000..181cb01a1c9
--- /dev/null
+++ b/utils/generate_snowball.py
@@ -0,0 +1,138 @@
+#!/usr/bin/env python3
+
+"""Refresh and update language stemming data from the Snowball project."""
+
+# /// script
+# requires-python = ">=3.11"
+# dependencies = [
+# "requests>=2.30",
+# ]
+# ///
+
+from __future__ import annotations
+
+import hashlib
+import shutil
+import subprocess
+import sys
+import tarfile
+import tempfile
+from io import BytesIO
+from pathlib import Path
+
+import requests
+
+SNOWBALL_VERSION = '3.0.1'
+SNOWBALL_URL = f'https://github.com/snowballstem/snowball/archive/refs/tags/v{SNOWBALL_VERSION}.tar.gz'
+SNOWBALL_SHA256 = '80ac10ce40dc4fcfbfed8d085c457b5613da0e86a73611a3d5527d044a142d60'
+
+ROOT = Path(__file__).resolve().parent.parent
+SEARCH_DIR = ROOT / 'sphinx' / 'search'
+STOPWORDS_DIR = SEARCH_DIR / '_stopwords'
+NON_MINIFIED_JS_DIR = SEARCH_DIR / 'non-minified-js'
+
+STOPWORD_URLS = (
+ ('da', 'danish', 'https://snowballstem.org/algorithms/danish/stop.txt'),
+ ('de', 'german', 'https://snowballstem.org/algorithms/german/stop.txt'),
+ ('en', 'english', 'https://snowballstem.org/algorithms/english/stop.txt'),
+ ('es', 'spanish', 'https://snowballstem.org/algorithms/spanish/stop.txt'),
+ ('fi', 'finnish', 'https://snowballstem.org/algorithms/finnish/stop.txt'),
+ ('fr', 'french', 'https://snowballstem.org/algorithms/french/stop.txt'),
+ ('hu', 'hungarian', 'https://snowballstem.org/algorithms/hungarian/stop.txt'),
+ ('it', 'italian', 'https://snowballstem.org/algorithms/italian/stop.txt'),
+ ('nl', 'dutch', 'https://snowballstem.org/algorithms/dutch/stop.txt'),
+ ('no', 'norwegian', 'https://snowballstem.org/algorithms/norwegian/stop.txt'),
+ ('pt', 'portuguese', 'https://snowballstem.org/algorithms/portuguese/stop.txt'),
+ # ('ro', 'romanian', ''),
+ ('ru', 'russian', 'https://snowballstem.org/algorithms/russian/stop.txt'),
+ ('sv', 'swedish', 'https://snowballstem.org/algorithms/swedish/stop.txt'),
+ # ('tr', 'turkish', ''),
+)
+
+
+def regenerate_stopwords() -> None:
+ STOPWORDS_DIR.mkdir(parents=True, exist_ok=True)
+ STOPWORDS_DIR.joinpath('__init__.py').touch()
+
+ for lang_code, lang_name, url in STOPWORD_URLS:
+ content = requests.get(url, timeout=5).content
+ try:
+ data = content.decode('utf-8')
+ except UnicodeDecodeError:
+ if lang_code == 'ru':
+ data = content.decode('koi8-r')
+ else:
+ data = content.decode('latin-1')
+
+ # record the original source of the stopwords list
+ txt_path = STOPWORDS_DIR / f'{lang_code}.txt'
+ txt_path.write_text(data.rstrip() + '\n', encoding='utf-8')
+
+ # generate the Python stopwords set
+ stopwords = parse_stop_word(data)
+ with (STOPWORDS_DIR / f'{lang_code}.py').open('w', encoding='utf-8') as f:
+ f.write('# automatically generated by utils/generate-snowball.py\n')
+ f.write(f'# from {url}\n\n')
+ f.write('from __future__ import annotations\n\n')
+ f.write(f'{lang_name.upper()}_STOPWORDS = frozenset(')
+ if stopwords:
+ f.write('{\n')
+ for word in sorted(stopwords, key=str.casefold):
+ f.write(f' {word!r},\n')
+ f.write('}')
+ f.write(')\n')
+
+
+def parse_stop_word(source: str) -> frozenset[str]:
+ """Collect the stopwords from a snowball style word list:
+
+ .. code:: text
+
+ list of space separated stop words | optional comment
+ """
+ stop_words: set[str] = set()
+ for line in source.splitlines():
+ stop_words.update(line.partition('|')[0].split()) # remove comment
+ return frozenset(stop_words)
+
+
+def regenerate_javascript() -> None:
+ tmp_root = Path(tempfile.mkdtemp())
+
+ # Download and verify the snowball release
+ archive = requests.get(SNOWBALL_URL, timeout=60).content
+ digest = hashlib.sha256(archive).hexdigest()
+ if digest != SNOWBALL_SHA256:
+ msg = (
+ f'data does not match expected checksum '
+ f'(expected {SNOWBALL_SHA256}, saw {digest}).'
+ )
+ raise RuntimeError(msg)
+
+ # Extract the release archive
+ with tarfile.TarFile.gzopen(
+ 'snowball.tar.gz', mode='r', fileobj=BytesIO(archive)
+ ) as tar:
+ tar.extractall(tmp_root, filter='data')
+ snowball_root = tmp_root / f'snowball-{SNOWBALL_VERSION}'
+ snowball_dist = snowball_root / 'dist'
+
+ # Generate JS stemmer files
+ cmd = ('make', '--jobs=8', 'dist_libstemmer_js')
+ subprocess.run(cmd, check=True, cwd=snowball_root)
+ with tarfile.open(snowball_dist / f'jsstemmer-{SNOWBALL_VERSION}.tar.gz') as tar:
+ tar.extractall(snowball_dist, filter='data')
+
+ # Copy generated JS to sphinx/search/
+ NON_MINIFIED_JS_DIR.mkdir(exist_ok=True)
+ js_dir = snowball_dist / f'jsstemmer-{SNOWBALL_VERSION}' / 'javascript'
+ shutil.copytree(js_dir, NON_MINIFIED_JS_DIR, dirs_exist_ok=True)
+
+ # Clean up
+ shutil.rmtree(snowball_root)
+
+
+if __name__ == '__main__':
+ regenerate_stopwords()
+ if sys.platform != 'win32':
+ regenerate_javascript()