diff --git a/.git-blame-ignore-revs b/.git-blame-ignore-revs new file mode 100644 index 00000000..827c6271 --- /dev/null +++ b/.git-blame-ignore-revs @@ -0,0 +1,5 @@ +# Reformatting with Black +d47dc3c1fd1f2bafcc079006c3283e465b372f75 + +# Reformatting with Ruff +fc2851a23b840481eb92ac522da5bc85305e9d8e \ No newline at end of file diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index c02c5c37..c42d0fab 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -17,7 +17,7 @@ jobs: strategy: fail-fast: false matrix: - PYTHON_VERSION: ['3.8'] + PYTHON_VERSION: ['3.9'] timeout-minutes: 10 steps: - uses: actions/cache@v1 diff --git a/.github/workflows/static.yml b/.github/workflows/static.yml index 37e2b763..881a0aa6 100644 --- a/.github/workflows/static.yml +++ b/.github/workflows/static.yml @@ -9,6 +9,10 @@ on: branches: - '*' +concurrency: + group: static-${{ github.ref }} + cancel-in-progress: true + jobs: build: name: Static code analysis @@ -18,26 +22,26 @@ jobs: OS: 'linux' timeout-minutes: 2 steps: - - uses: actions/cache@v1 + - uses: actions/cache@v4 with: path: ~/.cache/pip - key: static-pip-${{ hashFiles('setup.py') }} + key: static-pip-${{ hashFiles('pyproject.toml') }} restore-keys: static-pip- - - uses: actions/checkout@v2 - - uses: actions/setup-python@v2 + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 with: - # TODO: check with Python 3, but need to fix the - # errors first - python-version: '3.7' + python-version: '3.9' architecture: 'x64' - run: python -m pip install --upgrade pip setuptools jsonschema - - run: pip install -e .[pylint,pycodestyle,pyflakes] - - name: Pylint checks - run: pylint pylsp test - - name: Code style checks - run: pycodestyle pylsp test - - name: Pyflakes checks - run: pyflakes pylsp test + # If we don't install pycodestyle, pylint will throw an unused-argument error in pylsp/plugins/pycodestyle_lint.py:72 + # This error cannot be resolved by adding a pylint: disable=unused-argument comment ... + - run: | + pip install -e .[pylint,pycodestyle] + pip install ruff==0.9.4 + - name: ruff linter and code style checks + run: ruff check pylsp test + - name: ruff code formatter check + run: ruff format --check pylsp test - name: Validate JSON schema run: echo {} | jsonschema pylsp/config/schema.json - name: Ensure JSON schema and Markdown docs are in sync diff --git a/.github/workflows/test-linux.yml b/.github/workflows/test-linux.yml index 861f423b..543744a6 100644 --- a/.github/workflows/test-linux.yml +++ b/.github/workflows/test-linux.yml @@ -9,6 +9,10 @@ on: branches: - '*' +concurrency: + group: test-linux-${{ github.ref }} + cancel-in-progress: true + jobs: build: name: Linux Py${{ matrix.PYTHON_VERSION }} @@ -20,21 +24,20 @@ jobs: strategy: fail-fast: false matrix: - PYTHON_VERSION: ['3.9', '3.8', '3.7'] + PYTHON_VERSION: ['3.14', '3.13', '3.12', '3.11', '3.10', '3.9'] timeout-minutes: 10 steps: - - uses: actions/cache@v1 + - uses: actions/cache@v4 with: path: ~/.cache/pip - key: ${{ runner.os }}-${{ matrix.PYTHON_VERSION }}-pip-${{ hashFiles('setup.py') }} + key: ${{ runner.os }}-${{ matrix.PYTHON_VERSION }}-pip-${{ hashFiles('pyproject.toml') }} restore-keys: ${{ runner.os }}-${{ matrix.PYTHON_VERSION }}-pip- - - uses: actions/checkout@v2 - - uses: actions/setup-python@v2 + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 with: python-version: ${{ matrix.PYTHON_VERSION }} architecture: 'x64' - name: Create Jedi environment for testing - if: matrix.PYTHON_VERSION != '2.7' run: | python3 -m venv /tmp/pyenv /tmp/pyenv/bin/python -m pip install loghub @@ -42,7 +45,7 @@ jobs: - run: pip install -e .[all,test] - name: Show test environment run: pip list - - run: pytest -v test/ + - run: pytest --color=yes -v test/ # Enable this if SSH debugging is required # - name: Setup tmate session # uses: mxschmitt/action-tmate@v3 diff --git a/.github/workflows/test-mac.yml b/.github/workflows/test-mac.yml index 29084068..7b06ad26 100644 --- a/.github/workflows/test-mac.yml +++ b/.github/workflows/test-mac.yml @@ -9,10 +9,14 @@ on: branches: - '*' +concurrency: + group: test-mac-${{ github.ref }} + cancel-in-progress: true + jobs: build: name: Mac Py${{ matrix.PYTHON_VERSION }} - runs-on: macos-latest + runs-on: macos-13 env: CI: 'true' OS: 'macos' @@ -20,21 +24,20 @@ jobs: strategy: fail-fast: false matrix: - PYTHON_VERSION: ['3.9', '3.8', '3.7'] + PYTHON_VERSION: ['3.14', '3.12', '3.9'] timeout-minutes: 10 steps: - - uses: actions/cache@v1 + - uses: actions/cache@v4 with: path: ~/Library/Caches/pip - key: ${{ runner.os }}-${{ matrix.PYTHON_VERSION }}-pip-${{ hashFiles('setup.py') }} + key: ${{ runner.os }}-${{ matrix.PYTHON_VERSION }}-pip-${{ hashFiles('pyproject.toml') }} restore-keys: ${{ runner.os }}-${{ matrix.PYTHON_VERSION }}-pip- - - uses: actions/checkout@v2 - - uses: actions/setup-python@v2 + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 with: python-version: ${{ matrix.PYTHON_VERSION }} architecture: 'x64' - name: Create Jedi environment for testing - if: matrix.PYTHON_VERSION != '2.7' run: | python3 -m venv /tmp/pyenv /tmp/pyenv/bin/python -m pip install loghub @@ -42,7 +45,7 @@ jobs: - run: pip install -e .[all,test] - name: Show test environment run: pip list - - run: pytest -v test/ + - run: pytest --color=yes -v test/ # Enable this if SSH debugging is required # - name: Setup tmate session # uses: mxschmitt/action-tmate@v3 diff --git a/.github/workflows/test-win.yml b/.github/workflows/test-win.yml index 85f1013e..fa71c5ce 100644 --- a/.github/workflows/test-win.yml +++ b/.github/workflows/test-win.yml @@ -9,6 +9,10 @@ on: branches: - '*' +concurrency: + group: test-win-${{ github.ref }} + cancel-in-progress: true + jobs: build: name: Win Py${{ matrix.PYTHON_VERSION }} @@ -20,16 +24,16 @@ jobs: strategy: fail-fast: false matrix: - PYTHON_VERSION: ['3.9', '3.8', '3.7'] + PYTHON_VERSION: ['3.14', '3.12', '3.9'] timeout-minutes: 10 steps: - - uses: actions/cache@v1 + - uses: actions/cache@v4 with: path: ~\AppData\Local\pip\Cache - key: ${{ runner.os }}-${{ matrix.PYTHON_VERSION }}-pip-${{ hashFiles('setup.py') }} + key: ${{ runner.os }}-${{ matrix.PYTHON_VERSION }}-pip-${{ hashFiles('pyproject.toml') }} restore-keys: ${{ runner.os }}-${{ matrix.PYTHON_VERSION }}-pip- - - uses: actions/checkout@v2 - - uses: actions/setup-python@v2 + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 with: python-version: ${{ matrix.PYTHON_VERSION }} architecture: 'x64' @@ -37,4 +41,4 @@ jobs: - run: pip install -e .[all,test] - name: Show test environment run: pip list - - run: pytest -v test/ + - run: pytest --color=yes -v test/ diff --git a/.gitignore b/.gitignore index 3c4093d1..fe35f067 100644 --- a/.gitignore +++ b/.gitignore @@ -100,6 +100,7 @@ ENV/ # Spyder project settings .spyderproject +.spyproject # Rope project settings .ropeproject diff --git a/.pylintrc b/.pylintrc index ebe4f30f..fe36d8cb 100644 --- a/.pylintrc +++ b/.pylintrc @@ -28,4 +28,4 @@ reports = no generated-members = pylsp_* - cache_clear + cache_clear \ No newline at end of file diff --git a/.well-known/funding-manifest-urls b/.well-known/funding-manifest-urls new file mode 100644 index 00000000..dc9cf163 --- /dev/null +++ b/.well-known/funding-manifest-urls @@ -0,0 +1 @@ +https://www.spyder-ide.org/funding.json diff --git a/CHANGELOG.md b/CHANGELOG.md index 857d9e1b..1fa1aacf 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,554 @@ # History of changes +## Version 1.14.0 (2025/12/06) + +### New features +* Add `pylsp.signature.include_docstring` to hide docstring in signatures. +* Add support for Pylint 4. +* Fix support for Python 3.14. + +### Pull Requests Merged + +* [PR 690](https://github.com/python-lsp/python-lsp-server/pull/690) - Fix starting the server for Python 3.14 and run tests with that version, by [@ccordoba12](https://github.com/ccordoba12) +* [PR 689](https://github.com/python-lsp/python-lsp-server/pull/689) - Adjust test based on Jedi version, by [@aeliton](https://github.com/aeliton) +* [PR 687](https://github.com/python-lsp/python-lsp-server/pull/687) - Bump allowed pylint to less than 4.1, by [@tiltingpenguin](https://github.com/tiltingpenguin) +* [PR 686](https://github.com/python-lsp/python-lsp-server/pull/686) - Add option to hide docstring in signatures, by [@tomekwojcik](https://github.com/tomekwojcik) + +In this release 4 pull requests were closed. + +---- + +## Version 1.13.2 (2025/11/19) + +### Pull Requests Merged + +* [PR 683](https://github.com/python-lsp/python-lsp-server/pull/683) - Prevent showing cmd on Windows when running flake8, by [@dalthviz](https://github.com/dalthviz) +* [PR 669](https://github.com/python-lsp/python-lsp-server/pull/669) - Fix license entries in `pyproject.toml` due to pep 639, by [@ccordoba12](https://github.com/ccordoba12) + +In this release 2 pull requests were closed. + +---- + +## Version 1.13.1 (2025/08/26) + +### Pull Requests Merged + +* [PR 667](https://github.com/python-lsp/python-lsp-server/pull/667) - Use PyQt6 for testing, by [@WhyNotHugo](https://github.com/WhyNotHugo) +* [PR 666](https://github.com/python-lsp/python-lsp-server/pull/666) - Expose a shutdown hook, by [@dlax](https://github.com/dlax) +* [PR 663](https://github.com/python-lsp/python-lsp-server/pull/663) - Copy `LAST_JEDI_COMPLETIONS` to cell document so that `completionItem/resolve` will work, by [@hjr265](https://github.com/hjr265) + +In this release 3 pull requests were closed. + +---- + +## Version 1.13.0 (2025/07/07) + +### New features +* Format signatures in docstrings. +* Add support for type definition. +* Send websocket payload using a queue. +* Fix getting symbols with inline comments that include the `import` word. +* Drop support for Python 3.8 + +### Issues Closed + +* [Issue 640](https://github.com/python-lsp/python-lsp-server/issues/640) - Should we add `py.typed` marker? ([PR 641](https://github.com/python-lsp/python-lsp-server/pull/641) by [@krassowski](https://github.com/krassowski)) +* [Issue 630](https://github.com/python-lsp/python-lsp-server/issues/630) - Formatting of signatures in docstrings +* [Issue 627](https://github.com/python-lsp/python-lsp-server/issues/627) - Do not call str.splitlines() twice in the same function +* [Issue 97](https://github.com/python-lsp/python-lsp-server/issues/97) - Failed to run lsp-goto-type-definition and lsp-goto-implementation. + +In this release 4 issues were closed. + +### Pull Requests Merged + +* [PR 656](https://github.com/python-lsp/python-lsp-server/pull/656) - Add space between punctuation and next sentence, by [@spenserblack](https://github.com/spenserblack) +* [PR 650](https://github.com/python-lsp/python-lsp-server/pull/650) - Drop Python 3.8, add Python 3.11 to CI and run `pyupgrade`, by [@krassowski](https://github.com/krassowski) +* [PR 646](https://github.com/python-lsp/python-lsp-server/pull/646) - Enforce `setuptools` 69 or newer to ensure `py.typed` marker gets included, by [@krassowski](https://github.com/krassowski) +* [PR 645](https://github.com/python-lsp/python-lsp-server/pull/645) - Add support for type definition, by [@Hoblovski](https://github.com/Hoblovski) +* [PR 641](https://github.com/python-lsp/python-lsp-server/pull/641) - Add `py.typed` marker to `pylsp` imports to be analysed with `mypy`, by [@krassowski](https://github.com/krassowski) ([640](https://github.com/python-lsp/python-lsp-server/issues/640)) +* [PR 639](https://github.com/python-lsp/python-lsp-server/pull/639) - Fix inline comments that include text with `import`, by [@jsbautista](https://github.com/jsbautista) +* [PR 633](https://github.com/python-lsp/python-lsp-server/pull/633) - Send websocket payload using a queue, by [@Raekkeri](https://github.com/Raekkeri) +* [PR 631](https://github.com/python-lsp/python-lsp-server/pull/631) - Allow to format signatures in docstrings, by [@krassowski](https://github.com/krassowski) +* [PR 628](https://github.com/python-lsp/python-lsp-server/pull/628) - Do not call `str.splitlines()` twice in the same function., by [@fukanchik](https://github.com/fukanchik) + +In this release 9 pull requests were closed. + +---- + +## Version 1.12.2 (2025/02/07) + +### Pull Requests Merged + +* [PR 608](https://github.com/python-lsp/python-lsp-server/pull/608) - Fix putting `extra_paths` in front of `sys.path`, by [@cmashinho](https://github.com/cmashinho) + +In this release 1 pull request was closed. + +---- + +## Version 1.12.1 (2025/02/06) + +### Issues Closed + +* [Issue 602](https://github.com/python-lsp/python-lsp-server/issues/602) - `test_jedi_completion_with_fuzzy_enabled` fails with jedi 0.19.2 + +In this release 1 issue was closed. + +### Pull Requests Merged + +* [PR 616](https://github.com/python-lsp/python-lsp-server/pull/616) - Fix formatting issues reported by the latest Ruff version, by [@ccordoba12](https://github.com/ccordoba12) +* [PR 609](https://github.com/python-lsp/python-lsp-server/pull/609) - Fix test for Jedi 0.19.2, by [@cmashinho](https://github.com/cmashinho) +* [PR 604](https://github.com/python-lsp/python-lsp-server/pull/604) - Correctly handle null value for `ropeFolder` config, by [@osiewicz](https://github.com/osiewicz) + +In this release 3 pull requests were closed. + +---- + +## Version 1.12.0 (2024/08/25) + +### New features + +* Add support for `window/logMessage`. +* Add version support to `workspace/publishDiagnostics`. +* Add `extendSelect` option to flake8 plugin. +* Allow Jedi's `extra_paths` to be placed in front of `sys.path`. +* Bump flake8 to 7.1 + +### Pull Requests Merged + +* [PR 586](https://github.com/python-lsp/python-lsp-server/pull/586) - Update versions of Github actions used on CI, by [@ccordoba12](https://github.com/ccordoba12) +* [PR 585](https://github.com/python-lsp/python-lsp-server/pull/585) - Fix linting issues reported by the latest version of Ruff, by [@ccordoba12](https://github.com/ccordoba12) +* [PR 584](https://github.com/python-lsp/python-lsp-server/pull/584) - Use `%r` to have a better log, by [@tebeka](https://github.com/tebeka) +* [PR 581](https://github.com/python-lsp/python-lsp-server/pull/581) - Set return type to `None` for functions without returns, by [@agserrano3](https://github.com/agserrano3) +* [PR 576](https://github.com/python-lsp/python-lsp-server/pull/576) - Bump flake8 to 7.1, by [@bnavigator](https://github.com/bnavigator) +* [PR 573](https://github.com/python-lsp/python-lsp-server/pull/573) - Add `window/logMessage` support, by [@Dylmay](https://github.com/Dylmay) +* [PR 570](https://github.com/python-lsp/python-lsp-server/pull/570) - Fix Fedora instructions, by [@penguinpee](https://github.com/penguinpee) +* [PR 565](https://github.com/python-lsp/python-lsp-server/pull/565) - Add version support to `workspace/publishDiagnostics`, by [@Dylmay](https://github.com/Dylmay) +* [PR 560](https://github.com/python-lsp/python-lsp-server/pull/560) - Use macOS 13 to run our tests on CI, by [@ccordoba12](https://github.com/ccordoba12) +* [PR 559](https://github.com/python-lsp/python-lsp-server/pull/559) - Add `extendSelect` option to flake8 plugin, by [@Susensio](https://github.com/Susensio) +* [PR 547](https://github.com/python-lsp/python-lsp-server/pull/547) - Infer end position for Pylint diagnostics, by [@Wuestengecko](https://github.com/Wuestengecko) +* [PR 527](https://github.com/python-lsp/python-lsp-server/pull/527) - Allow `extra_paths` to be placed in front of `sys.path`, by [@mrclary](https://github.com/mrclary) + +In this release 12 pull requests were closed. + +---- + +## Version 1.11.0 (2024/03/29) + +### New features + +* Remove the `rope_rename` plugin. People that were using it need to install + the `pylsp-rope` third-party plugin instead. +* Add support for Pylint 3.1 + +### Issues Closed + +* [Issue 255](https://github.com/python-lsp/python-lsp-server/issues/255) - Confusion about rename support ([PR 515](https://github.com/python-lsp/python-lsp-server/pull/515) by [@doolio](https://github.com/doolio)) + +In this release 1 issue was closed. + +### Pull Requests Merged + +* [PR 543](https://github.com/python-lsp/python-lsp-server/pull/543) - Bump pylint to `>=3.1,<4`, by [@bnavigator](https://github.com/bnavigator) +* [PR 541](https://github.com/python-lsp/python-lsp-server/pull/541) - Add fallback for `ujson` import, by [@Savalek](https://github.com/Savalek) +* [PR 538](https://github.com/python-lsp/python-lsp-server/pull/538) - Remove `.config/flake8` reference in Readme, by [@justin-f-perez](https://github.com/justin-f-perez) +* [PR 536](https://github.com/python-lsp/python-lsp-server/pull/536) - Fix isort plugin name in Readme, by [@Piraty](https://github.com/Piraty) +* [PR 515](https://github.com/python-lsp/python-lsp-server/pull/515) - Remove built-in `rope_rename` plugin, by [@doolio](https://github.com/doolio) ([255](https://github.com/python-lsp/python-lsp-server/issues/255)) +* [PR 470](https://github.com/python-lsp/python-lsp-server/pull/470) - Add contributing guide to setup dev environment, by [@staticf0x](https://github.com/staticf0x) + +In this release 6 pull requests were closed. + +---- + +## Version 1.10.1 (2024/03/12) + +### Issues Closed + +* [Issue 529](https://github.com/python-lsp/python-lsp-server/issues/529) - Autoimports: sqlite3.OperationalError: database is locked ([PR 530](https://github.com/python-lsp/python-lsp-server/pull/530) by [@last-partizan](https://github.com/last-partizan)) + +In this release 1 issue was closed. + +### Pull Requests Merged + +* [PR 530](https://github.com/python-lsp/python-lsp-server/pull/530) - Fix progress reporting with autoimport plugin, by [@last-partizan](https://github.com/last-partizan) ([529](https://github.com/python-lsp/python-lsp-server/issues/529)) +* [PR 528](https://github.com/python-lsp/python-lsp-server/pull/528) - Improve error message about missing `websockets` module, by [@tomplus](https://github.com/tomplus) + +In this release 2 pull requests were closed. + +---- + +## Version 1.10.0 (2024/01/21) + +### New features + +* Add support for notebook document completions. +* Add support for flake8 version 7. + +### Issues Closed + +* [Issue 513](https://github.com/python-lsp/python-lsp-server/issues/513) - Different versions of autopep can be installed as optional dependencies ([PR 514](https://github.com/python-lsp/python-lsp-server/pull/514) by [@doolio](https://github.com/doolio)) +* [Issue 478](https://github.com/python-lsp/python-lsp-server/issues/478) - Considering pointing to python-lsp-isort rather than pyls-isort in the README ([PR 483](https://github.com/python-lsp/python-lsp-server/pull/483) by [@doolio](https://github.com/doolio)) +* [Issue 474](https://github.com/python-lsp/python-lsp-server/issues/474) - AutoImport can break when being called by multiple threads ([PR 498](https://github.com/python-lsp/python-lsp-server/pull/498) by [@tkrabel](https://github.com/tkrabel)) +* [Issue 373](https://github.com/python-lsp/python-lsp-server/issues/373) - file path auto completion add \ in path string ([PR 497](https://github.com/python-lsp/python-lsp-server/pull/497) by [@i-aki-y](https://github.com/i-aki-y)) +* [Issue 256](https://github.com/python-lsp/python-lsp-server/issues/256) - Flake8 Severity too high ([PR 490](https://github.com/python-lsp/python-lsp-server/pull/490) by [@kunhtkun](https://github.com/kunhtkun)) + +In this release 5 issues were closed. + +### Pull Requests Merged + +* [PR 517](https://github.com/python-lsp/python-lsp-server/pull/517) - Combine ruff.toml into pyproject.toml, by [@doolio](https://github.com/doolio) +* [PR 514](https://github.com/python-lsp/python-lsp-server/pull/514) - Fix optional dependency version for autopep8, by [@doolio](https://github.com/doolio) ([513](https://github.com/python-lsp/python-lsp-server/issues/513)) +* [PR 510](https://github.com/python-lsp/python-lsp-server/pull/510) - Bump flake8 to version 7, by [@bnavigator](https://github.com/bnavigator) +* [PR 507](https://github.com/python-lsp/python-lsp-server/pull/507) - Fix extra end line increment in autopep8 plugin, by [@remisalmon](https://github.com/remisalmon) +* [PR 502](https://github.com/python-lsp/python-lsp-server/pull/502) - Use ruff as linter and code formatter, by [@tkrabel](https://github.com/tkrabel) +* [PR 499](https://github.com/python-lsp/python-lsp-server/pull/499) - Make autoimport cache generation non-blocking, by [@tkrabel](https://github.com/tkrabel) +* [PR 498](https://github.com/python-lsp/python-lsp-server/pull/498) - Update rope to 1.11.0 for multi-threading capabilities, by [@tkrabel](https://github.com/tkrabel) ([474](https://github.com/python-lsp/python-lsp-server/issues/474)) +* [PR 497](https://github.com/python-lsp/python-lsp-server/pull/497) - Fix path completion when client doesn't support code snippets, by [@i-aki-y](https://github.com/i-aki-y) ([373](https://github.com/python-lsp/python-lsp-server/issues/373)) +* [PR 490](https://github.com/python-lsp/python-lsp-server/pull/490) - Refine diagnostic severity for flake8, by [@kunhtkun](https://github.com/kunhtkun) ([256](https://github.com/python-lsp/python-lsp-server/issues/256)) +* [PR 487](https://github.com/python-lsp/python-lsp-server/pull/487) - Replace call to `python` with `sys.executable` in Pylint plugin, by [@jspricke](https://github.com/jspricke) +* [PR 486](https://github.com/python-lsp/python-lsp-server/pull/486) - Add support for notebook document completions, by [@smacke](https://github.com/smacke) +* [PR 483](https://github.com/python-lsp/python-lsp-server/pull/483) - Point to a more up to date isort plugin in README, by [@doolio](https://github.com/doolio) ([478](https://github.com/python-lsp/python-lsp-server/issues/478)) + +In this release 12 pull requests were closed. + +---- + +## Version 1.9.0 (2023/11/06) + +### New features + +* Support `initializationOptions` to configure the server. +* Add code completions to the autoimport plugin. +* Add support for Pylint 3. +* Pass `extendIgnore` argument to Flake8. +* Add new `pylsp_workspace_configuration_changed` hookspec so that plugins can + react when client sends a configuration change to the server. + +### Issues Closed + +* [Issue 460](https://github.com/python-lsp/python-lsp-server/issues/460) - rope_autoimport doesn't initialize after `workspace/didChangeConfiguration` message ([PR 461](https://github.com/python-lsp/python-lsp-server/pull/461) by [@tkrabel-db](https://github.com/tkrabel-db)) +* [Issue 403](https://github.com/python-lsp/python-lsp-server/issues/403) - Add code action for implementing auto-import ([PR 471](https://github.com/python-lsp/python-lsp-server/pull/471) by [@tkrabel-db](https://github.com/tkrabel-db)) +* [Issue 195](https://github.com/python-lsp/python-lsp-server/issues/195) - Maybe use initializationOptions as additional source of settings ([PR 459](https://github.com/python-lsp/python-lsp-server/pull/459) by [@tkrabel-db](https://github.com/tkrabel-db)) + +In this release 3 issues were closed. + +### Pull Requests Merged + +* [PR 481](https://github.com/python-lsp/python-lsp-server/pull/481) - Revert "Rename `_utils` module to `utils`", by [@ccordoba12](https://github.com/ccordoba12) +* [PR 480](https://github.com/python-lsp/python-lsp-server/pull/480) - Rename `_utils` module to `utils`, by [@ccordoba12](https://github.com/ccordoba12) +* [PR 475](https://github.com/python-lsp/python-lsp-server/pull/475) - Raise supported Pylint upper version, by [@bnavigator](https://github.com/bnavigator) +* [PR 473](https://github.com/python-lsp/python-lsp-server/pull/473) - Improve/simplify README Development section, by [@tkrabel](https://github.com/tkrabel) +* [PR 471](https://github.com/python-lsp/python-lsp-server/pull/471) - Add code completions to `rope_autoimport` plugin, by [@tkrabel-db](https://github.com/tkrabel-db) ([403](https://github.com/python-lsp/python-lsp-server/issues/403)) +* [PR 469](https://github.com/python-lsp/python-lsp-server/pull/469) - Pass argument `extendIgnore` to flake8, by [@UnkwUsr](https://github.com/UnkwUsr) +* [PR 466](https://github.com/python-lsp/python-lsp-server/pull/466) - Ignore notebook names on cell completion for autoimport, by [@tkrabel-db](https://github.com/tkrabel-db) +* [PR 464](https://github.com/python-lsp/python-lsp-server/pull/464) - Minor bug fix in Rope autoimport plugin, by [@tkrabel-db](https://github.com/tkrabel-db) +* [PR 462](https://github.com/python-lsp/python-lsp-server/pull/462) - Make workspace/didChangeConfig work with notebook documents, by [@tkrabel-db](https://github.com/tkrabel-db) +* [PR 461](https://github.com/python-lsp/python-lsp-server/pull/461) - Load `rope_autoimport` cache on `workspace/didChangeConfiguration`, by [@tkrabel-db](https://github.com/tkrabel-db) ([460](https://github.com/python-lsp/python-lsp-server/issues/460)) +* [PR 459](https://github.com/python-lsp/python-lsp-server/pull/459) - Support `initializationOptions` to configure the server, by [@tkrabel-db](https://github.com/tkrabel-db) ([195](https://github.com/python-lsp/python-lsp-server/issues/195)) +* [PR 457](https://github.com/python-lsp/python-lsp-server/pull/457) - Fix missing signatures for docstrings in Markdown, by [@staticf0x](https://github.com/staticf0x) + +In this release 12 pull requests were closed. + +---- + +## Version 1.8.2 (2023/10/09) + +### Issues Closed + +* [Issue 453](https://github.com/python-lsp/python-lsp-server/issues/453) - notebookDocumentSync notebookSelector type error ([PR 454](https://github.com/python-lsp/python-lsp-server/pull/454) by [@smacke](https://github.com/smacke)) + +In this release 1 issue was closed. + +### Pull Requests Merged + +* [PR 454](https://github.com/python-lsp/python-lsp-server/pull/454) - Fix notebook document selector not being a list in capabilities, by [@smacke](https://github.com/smacke) ([453](https://github.com/python-lsp/python-lsp-server/issues/453)) + +In this release 1 pull request was closed. + +---- + +## Version 1.8.1 (2023/10/05) + +### Issues Closed + +* [Issue 439](https://github.com/python-lsp/python-lsp-server/issues/439) - `includeDeclaration` is no longer respected in `textDocument/references` ([PR 440](https://github.com/python-lsp/python-lsp-server/pull/440) by [@krassowski](https://github.com/krassowski)) +* [Issue 438](https://github.com/python-lsp/python-lsp-server/issues/438) - flake8 can error out when deleting lines ([PR 441](https://github.com/python-lsp/python-lsp-server/pull/441) by [@krassowski](https://github.com/krassowski)) +* [Issue 413](https://github.com/python-lsp/python-lsp-server/issues/413) - textDocument/rename reports positions outside of the document ([PR 450](https://github.com/python-lsp/python-lsp-server/pull/450) by [@ccordoba12](https://github.com/ccordoba12)) + +In this release 3 issues were closed. + +### Pull Requests Merged + +* [PR 450](https://github.com/python-lsp/python-lsp-server/pull/450) - Fix renaming when file has no EOLs, by [@ccordoba12](https://github.com/ccordoba12) ([413](https://github.com/python-lsp/python-lsp-server/issues/413)) +* [PR 449](https://github.com/python-lsp/python-lsp-server/pull/449) - Increase minimal required version of autopep8 to `>=2.0.4,<2.1.0`, by [@ccordoba12](https://github.com/ccordoba12) +* [PR 447](https://github.com/python-lsp/python-lsp-server/pull/447) - Fix numpy go-to-definition by taking it off autoimport list for this case, by [@smacke](https://github.com/smacke) +* [PR 443](https://github.com/python-lsp/python-lsp-server/pull/443) - Allow Jedi "goto" to perform multiple hops for "go to definition", by [@smacke](https://github.com/smacke) +* [PR 441](https://github.com/python-lsp/python-lsp-server/pull/441) - Pass a single copy of the document's source around for flake8, by [@krassowski](https://github.com/krassowski) ([438](https://github.com/python-lsp/python-lsp-server/issues/438)) +* [PR 440](https://github.com/python-lsp/python-lsp-server/pull/440) - Fix `include_declaration` handling in references request, by [@krassowski](https://github.com/krassowski) ([439](https://github.com/python-lsp/python-lsp-server/issues/439)) +* [PR 436](https://github.com/python-lsp/python-lsp-server/pull/436) - Add black reformatting commit to `.git-blame-ignore-revs`, by [@krassowski](https://github.com/krassowski) + +In this release 7 pull requests were closed. + +---- + +## Version 1.8.0 (2023/09/08) + +### New features + +* Add notebooks suppport and make go-to-definition work for them. +* Add support for Pyflakes 3.1, Pycodestyle 2.11 and Jedi 0.19. +* Drop support for Python 3.7. + +### Issues Closed + +* [Issue 429](https://github.com/python-lsp/python-lsp-server/issues/429) - Error in Pyflakes plugin: 'NoneType' has no len() ([PR 433](https://github.com/python-lsp/python-lsp-server/pull/433) by [@smacke](https://github.com/smacke)) +* [Issue 414](https://github.com/python-lsp/python-lsp-server/issues/414) - Support Jedi 0.19 ([PR 416](https://github.com/python-lsp/python-lsp-server/pull/416) by [@bnavigator](https://github.com/bnavigator)) +* [Issue 412](https://github.com/python-lsp/python-lsp-server/issues/412) - Add support for pyflakes 3.1 ([PR 415](https://github.com/python-lsp/python-lsp-server/pull/415) by [@yan12125](https://github.com/yan12125)) +* [Issue 406](https://github.com/python-lsp/python-lsp-server/issues/406) - flake8_lint plugin: Popen fails when no workspace given by language server client on Windows ([PR 434](https://github.com/python-lsp/python-lsp-server/pull/434) by [@smacke](https://github.com/smacke)) +* [Issue 392](https://github.com/python-lsp/python-lsp-server/issues/392) - Using black as an autoformatter ([PR 419](https://github.com/python-lsp/python-lsp-server/pull/419) by [@tkrabel-db](https://github.com/tkrabel-db)) +* [Issue 384](https://github.com/python-lsp/python-lsp-server/issues/384) - Replace `setuptools`/`pkg_resources` with `importlib(.|_)metadata` ([PR 385](https://github.com/python-lsp/python-lsp-server/pull/385) by [@bollwyvl](https://github.com/bollwyvl)) +* [Issue 314](https://github.com/python-lsp/python-lsp-server/issues/314) - Failed to handle requests after exit ([PR 432](https://github.com/python-lsp/python-lsp-server/pull/432) by [@smacke](https://github.com/smacke)) + +In this release 7 issues were closed. + +### Pull Requests Merged + +* [PR 434](https://github.com/python-lsp/python-lsp-server/pull/434) - Don't set cwd in Popen kwargs when document root is empty (flake8), by [@smacke](https://github.com/smacke) ([406](https://github.com/python-lsp/python-lsp-server/issues/406)) +* [PR 433](https://github.com/python-lsp/python-lsp-server/pull/433) - Fix null reference for syntax errors due to invalid encodings (Pyflakes), by [@smacke](https://github.com/smacke) ([429](https://github.com/python-lsp/python-lsp-server/issues/429), [429](https://github.com/python-lsp/python-lsp-server/issues/429)) +* [PR 432](https://github.com/python-lsp/python-lsp-server/pull/432) - Use invalid request handler rather than raising key error for requests after shutdown, by [@smacke](https://github.com/smacke) ([314](https://github.com/python-lsp/python-lsp-server/issues/314)) +* [PR 419](https://github.com/python-lsp/python-lsp-server/pull/419) - Format the whole repo with Black, by [@tkrabel-db](https://github.com/tkrabel-db) ([392](https://github.com/python-lsp/python-lsp-server/issues/392)) +* [PR 418](https://github.com/python-lsp/python-lsp-server/pull/418) - Converge unit tests for test_language_server and test_notebook_document, by [@tkrabel-db](https://github.com/tkrabel-db) +* [PR 417](https://github.com/python-lsp/python-lsp-server/pull/417) - Drop support for Python 3.7, by [@ccordoba12](https://github.com/ccordoba12) +* [PR 416](https://github.com/python-lsp/python-lsp-server/pull/416) - Bump Jedi upper pin to <0.20, by [@bnavigator](https://github.com/bnavigator) ([414](https://github.com/python-lsp/python-lsp-server/issues/414)) +* [PR 415](https://github.com/python-lsp/python-lsp-server/pull/415) - Add support for pyflakes 3.1 and pycodestyle 2.11, by [@yan12125](https://github.com/yan12125) ([412](https://github.com/python-lsp/python-lsp-server/issues/412)) +* [PR 408](https://github.com/python-lsp/python-lsp-server/pull/408) - Notebook protocol go-to-definition support, by [@jasongrout](https://github.com/jasongrout) +* [PR 389](https://github.com/python-lsp/python-lsp-server/pull/389) - Add notebooks suppport to pylsp, by [@tkrabel-db](https://github.com/tkrabel-db) +* [PR 385](https://github.com/python-lsp/python-lsp-server/pull/385) - Find `entry_points` with `importlib(.|_)metadata`, drop `setuptools` from `dependencies`, by [@bollwyvl](https://github.com/bollwyvl) ([384](https://github.com/python-lsp/python-lsp-server/issues/384)) + +In this release 11 pull requests were closed. + +---- + +## Version 1.7.4 (2023/06/29) + +### Issues Closed + +* [Issue 393](https://github.com/python-lsp/python-lsp-server/issues/393) - Environment path doesn't expand user directory + +In this release 1 issue was closed. + +### Pull Requests Merged + +* [PR 394](https://github.com/python-lsp/python-lsp-server/pull/394) - Resolve homedir references in Jedi environment path, by [@odiroot](https://github.com/odiroot) +* [PR 381](https://github.com/python-lsp/python-lsp-server/pull/381) - Report progress even when initialization fails, by [@syphar](https://github.com/syphar) +* [PR 380](https://github.com/python-lsp/python-lsp-server/pull/380) - Fix pylint hang on file with many errors, by [@hetmankp](https://github.com/hetmankp) + +In this release 3 pull requests were closed. + +---- + +## Version 1.7.3 (2023/05/15) + +### Issues Closed + +* [Issue 369](https://github.com/python-lsp/python-lsp-server/issues/369) - Failed to load hook pylsp_lint: [Errno 2] No such file or directory: '' ([PR 371](https://github.com/python-lsp/python-lsp-server/pull/371) by [@Ultimator14](https://github.com/Ultimator14)) + +In this release 1 issue was closed. + +### Pull Requests Merged + +* [PR 377](https://github.com/python-lsp/python-lsp-server/pull/377) - Update yapf requirement to 0.33+, by [@bnavigator](https://github.com/bnavigator) +* [PR 371](https://github.com/python-lsp/python-lsp-server/pull/371) - Fix empty cwd value for pylint, by [@Ultimator14](https://github.com/Ultimator14) ([369](https://github.com/python-lsp/python-lsp-server/issues/369)) +* [PR 364](https://github.com/python-lsp/python-lsp-server/pull/364) - Add Arch Linux installation command to Readme, by [@GNVageesh](https://github.com/GNVageesh) + +In this release 3 pull requests were closed. + +---- + +## Version 1.7.2 (2023/04/02) + +### Issues Closed + +* [Issue 325](https://github.com/python-lsp/python-lsp-server/issues/325) - WorkDoneProgress tokens not initialized properly by the server ([PR 328](https://github.com/python-lsp/python-lsp-server/pull/328) by [@syphar](https://github.com/syphar)) +* [Issue 260](https://github.com/python-lsp/python-lsp-server/issues/260) - yapf formatting fails when pyproject.toml is in the workspace ([PR 346](https://github.com/python-lsp/python-lsp-server/pull/346) by [@bnavigator](https://github.com/bnavigator)) + +In this release 2 issues were closed. + +### Pull Requests Merged + +* [PR 346](https://github.com/python-lsp/python-lsp-server/pull/346) - Add toml dependency for yapf and constrain yapf to be less than 0.32, by [@bnavigator](https://github.com/bnavigator) ([260](https://github.com/python-lsp/python-lsp-server/issues/260)) +* [PR 345](https://github.com/python-lsp/python-lsp-server/pull/345) - Raise upper bound of autopep8, by [@bnavigator](https://github.com/bnavigator) +* [PR 340](https://github.com/python-lsp/python-lsp-server/pull/340) - Bump pydocstyle to 6.3, by [@bnavigator](https://github.com/bnavigator) +* [PR 328](https://github.com/python-lsp/python-lsp-server/pull/328) - Initialize LSP progress token before using it and remove progress for sync plugins, by [@syphar](https://github.com/syphar) ([325](https://github.com/python-lsp/python-lsp-server/issues/325)) + +In this release 4 pull requests were closed. + +---- + +## Version 1.7.1 (2023/01/17) + +### Issues Closed + +* [Issue 332](https://github.com/python-lsp/python-lsp-server/issues/332) - Failed to load hook pylsp_lint: too many values to unpack (expected 3) ([PR 329](https://github.com/python-lsp/python-lsp-server/pull/329) by [@ccordoba12](https://github.com/ccordoba12)) + +In this release 1 issue was closed. + +### Pull Requests Merged + +* [PR 338](https://github.com/python-lsp/python-lsp-server/pull/338) - Use shlex.split() to split pylint flags, by [@hfrentzel](https://github.com/hfrentzel) +* [PR 337](https://github.com/python-lsp/python-lsp-server/pull/337) - Improve Jedi file completions for directories, by [@ccordoba12](https://github.com/ccordoba12) +* [PR 334](https://github.com/python-lsp/python-lsp-server/pull/334) - Include missing Pylint "information" category, by [@juliangilbey](https://github.com/juliangilbey) +* [PR 333](https://github.com/python-lsp/python-lsp-server/pull/333) - Add top constraint to Pylint and fix constraint for `whatthepatch`, by [@ccordoba12](https://github.com/ccordoba12) +* [PR 329](https://github.com/python-lsp/python-lsp-server/pull/329) - Fix pydocstyle linting with its 6.2.0 version, by [@ccordoba12](https://github.com/ccordoba12) ([332](https://github.com/python-lsp/python-lsp-server/issues/332)) +* [PR 327](https://github.com/python-lsp/python-lsp-server/pull/327) - Use `sys.executable` instead of `python` in Pylint plugin, by [@bnavigator](https://github.com/bnavigator) + +In this release 6 pull requests were closed. + +---- + +## Version 1.7.0 (2022/12/29) + +### New features + +* Add a new plugin to provide autoimport functionality (disabled by default). +* Add progress reporting. +* Make `jedi_definition` plugin follow definitions to `pyi` files. +* Add support for flake8 version 6. +* Add support for Yapf ignore patterns. +* Add mccabe setting to flake8 plugin. + +### Issues Closed + +* [Issue 317](https://github.com/python-lsp/python-lsp-server/issues/317) - Is there a configuration option to enable jumping to builtin module stubs? ([PR 321](https://github.com/python-lsp/python-lsp-server/pull/321) by [@bzoracler](https://github.com/bzoracler)) +* [Issue 307](https://github.com/python-lsp/python-lsp-server/issues/307) - Autoimport keep throwing exception when delete a line ([PR 309](https://github.com/python-lsp/python-lsp-server/pull/309) by [@douo](https://github.com/douo)) +* [Issue 301](https://github.com/python-lsp/python-lsp-server/issues/301) - `textDocument/documentSymbol` returns empty result for non-existing files ([PR 302](https://github.com/python-lsp/python-lsp-server/pull/302) by [@rear1019](https://github.com/rear1019)) +* [Issue 292](https://github.com/python-lsp/python-lsp-server/issues/292) - List of allowed values for pylsp.plugins.pydocstyle.convention in CONFIGURATION.md incorrect ([PR 295](https://github.com/python-lsp/python-lsp-server/pull/295) by [@doolio](https://github.com/doolio)) +* [Issue 201](https://github.com/python-lsp/python-lsp-server/issues/201) - Progress support ([PR 236](https://github.com/python-lsp/python-lsp-server/pull/236) by [@syphar](https://github.com/syphar)) +* [Issue 34](https://github.com/python-lsp/python-lsp-server/issues/34) - Auto-import? ([PR 199](https://github.com/python-lsp/python-lsp-server/pull/199) by [@bagel897](https://github.com/bagel897)) + +In this release 6 issues were closed. + +### Pull Requests Merged + +* [PR 323](https://github.com/python-lsp/python-lsp-server/pull/323) - Don't show signature for modules in hovers, by [@ccordoba12](https://github.com/ccordoba12) +* [PR 322](https://github.com/python-lsp/python-lsp-server/pull/322) - Change Pylint run to set cwd correctly, by [@Corentin-pro](https://github.com/Corentin-pro) +* [PR 321](https://github.com/python-lsp/python-lsp-server/pull/321) - Expose setting to follow builtin and extension definitions to stub files, by [@bzoracler](https://github.com/bzoracler) ([317](https://github.com/python-lsp/python-lsp-server/issues/317)) +* [PR 319](https://github.com/python-lsp/python-lsp-server/pull/319) - Fix Pycodestyle linting with line endings other than LF , by [@ccordoba12](https://github.com/ccordoba12) +* [PR 318](https://github.com/python-lsp/python-lsp-server/pull/318) - Ensure proper document match to avoid empty outline (Symbols), by [@mnauw](https://github.com/mnauw) +* [PR 316](https://github.com/python-lsp/python-lsp-server/pull/316) - Support Flake8 version 6, by [@bnavigator](https://github.com/bnavigator) +* [PR 312](https://github.com/python-lsp/python-lsp-server/pull/312) - Update Readme with link to python-lsp-ruff and mention to code actions, by [@jhossbach](https://github.com/jhossbach) +* [PR 311](https://github.com/python-lsp/python-lsp-server/pull/311) - Make flake8 respect configuration, by [@delfick](https://github.com/delfick) +* [PR 309](https://github.com/python-lsp/python-lsp-server/pull/309) - Fix autoimport raising AttributeError in some cases, by [@douo](https://github.com/douo) ([307](https://github.com/python-lsp/python-lsp-server/issues/307)) +* [PR 306](https://github.com/python-lsp/python-lsp-server/pull/306) - Fix the completion of `include_function_objects`, by [@llan-ml](https://github.com/llan-ml) +* [PR 305](https://github.com/python-lsp/python-lsp-server/pull/305) - Report autoimport progress, by [@bagel897](https://github.com/bagel897) +* [PR 302](https://github.com/python-lsp/python-lsp-server/pull/302) - Fix symbols for non-existing (unsaved) files, by [@rear1019](https://github.com/rear1019) ([301](https://github.com/python-lsp/python-lsp-server/issues/301)) +* [PR 300](https://github.com/python-lsp/python-lsp-server/pull/300) - Fix autoimport plugin not being disabled by default, by [@ccordoba12](https://github.com/ccordoba12) +* [PR 296](https://github.com/python-lsp/python-lsp-server/pull/296) - Update outdated username in docs, by [@bagel897](https://github.com/bagel897) +* [PR 295](https://github.com/python-lsp/python-lsp-server/pull/295) - Update allowed values for pydocstyle convention in CONFIGURATION.md, by [@doolio](https://github.com/doolio) ([292](https://github.com/python-lsp/python-lsp-server/issues/292)) +* [PR 290](https://github.com/python-lsp/python-lsp-server/pull/290) - Fix Debian package name, by [@jspricke](https://github.com/jspricke) +* [PR 236](https://github.com/python-lsp/python-lsp-server/pull/236) - Add progress reporting, by [@syphar](https://github.com/syphar) ([201](https://github.com/python-lsp/python-lsp-server/issues/201)) +* [PR 199](https://github.com/python-lsp/python-lsp-server/pull/199) - Add a plugin to provide autoimport functionality, by [@bagel897](https://github.com/bagel897) ([34](https://github.com/python-lsp/python-lsp-server/issues/34)) +* [PR 63](https://github.com/python-lsp/python-lsp-server/pull/63) - Add mccabe setting to flake8, by [@baco](https://github.com/baco) +* [PR 60](https://github.com/python-lsp/python-lsp-server/pull/60) - Add support for Yapf ignore patterns, by [@jjlorenzo](https://github.com/jjlorenzo) + +In this release 20 pull requests were closed. + +---- + +## Version 1.6.0 (2022/11/02) + +### New features + +* Migrate to MarkupContent and convert docstrings to Markdown by default. +* Add support for flake8 version 5. +* Add function objects to Jedi completions. +* Don't include class and functions objects by default in Jedi completions. + +### Issues Closed + +* [Issue 273](https://github.com/python-lsp/python-lsp-server/issues/273) - Completion result have "typeParameter" duplicates ([PR 274](https://github.com/python-lsp/python-lsp-server/pull/274) by [@airportyh](https://github.com/airportyh)) +* [Issue 265](https://github.com/python-lsp/python-lsp-server/issues/265) - Server warns when optional modules do not exist ([PR 266](https://github.com/python-lsp/python-lsp-server/pull/266) by [@doolio](https://github.com/doolio)) +* [Issue 264](https://github.com/python-lsp/python-lsp-server/issues/264) - Errors in CONFIGURATION.md? ([PR 267](https://github.com/python-lsp/python-lsp-server/pull/267) by [@doolio](https://github.com/doolio)) +* [Issue 263](https://github.com/python-lsp/python-lsp-server/issues/263) - Conflict between README and CONFIGURATION ([PR 267](https://github.com/python-lsp/python-lsp-server/pull/267) by [@doolio](https://github.com/doolio)) +* [Issue 245](https://github.com/python-lsp/python-lsp-server/issues/245) - Add alternative ways to install python-lsp-server ([PR 248](https://github.com/python-lsp/python-lsp-server/pull/248) by [@nougcat](https://github.com/nougcat)) +* [Issue 244](https://github.com/python-lsp/python-lsp-server/issues/244) - Add function objects to completions ([PR 246](https://github.com/python-lsp/python-lsp-server/pull/246) by [@llan-ml](https://github.com/llan-ml)) +* [Issue 243](https://github.com/python-lsp/python-lsp-server/issues/243) - `Failed to load hook pylsp_completions: 'NoneType' object has no attribute 'type'` when working with Numpy 1.23 ([PR 281](https://github.com/python-lsp/python-lsp-server/pull/281) by [@gav451](https://github.com/gav451)) +* [Issue 22](https://github.com/python-lsp/python-lsp-server/issues/22) - Consider using docstring_to_markdown for markdown hover and documentation ([PR 80](https://github.com/python-lsp/python-lsp-server/pull/80) by [@krassowski](https://github.com/krassowski)) +* [Issue 21](https://github.com/python-lsp/python-lsp-server/issues/21) - Migrate from deprecated MarkedString to MarkupContent ([PR 80](https://github.com/python-lsp/python-lsp-server/pull/80) by [@krassowski](https://github.com/krassowski)) + +In this release 9 issues were closed. + +### Pull Requests Merged + +* [PR 285](https://github.com/python-lsp/python-lsp-server/pull/285) - Don't include class objects by default in completions, by [@ccordoba12](https://github.com/ccordoba12) +* [PR 281](https://github.com/python-lsp/python-lsp-server/pull/281) - Improve how Jedi handles Numpy, by [@gav451](https://github.com/gav451) ([243](https://github.com/python-lsp/python-lsp-server/issues/243)) +* [PR 274](https://github.com/python-lsp/python-lsp-server/pull/274) - Make default for `include_function_objects` false, by [@airportyh](https://github.com/airportyh) ([273](https://github.com/python-lsp/python-lsp-server/issues/273)) +* [PR 272](https://github.com/python-lsp/python-lsp-server/pull/272) - Include params only for classes and functions, by [@llan-ml](https://github.com/llan-ml) +* [PR 267](https://github.com/python-lsp/python-lsp-server/pull/267) - Update the configuration schema for consistency, by [@doolio](https://github.com/doolio) ([264](https://github.com/python-lsp/python-lsp-server/issues/264), [263](https://github.com/python-lsp/python-lsp-server/issues/263)) +* [PR 266](https://github.com/python-lsp/python-lsp-server/pull/266) - Prefer info log message for missing optional modules, by [@doolio](https://github.com/doolio) ([265](https://github.com/python-lsp/python-lsp-server/issues/265)) +* [PR 262](https://github.com/python-lsp/python-lsp-server/pull/262) - Fix options not being passed to yapf format, by [@masad-frost](https://github.com/masad-frost) +* [PR 261](https://github.com/python-lsp/python-lsp-server/pull/261) - PR: Include all symbols that Jedi reports as declared in a file when `add_import_symbols` is `False`, by [@ccordoba12](https://github.com/ccordoba12) +* [PR 258](https://github.com/python-lsp/python-lsp-server/pull/258) - Fix pylint message in tests, by [@bnavigator](https://github.com/bnavigator) +* [PR 257](https://github.com/python-lsp/python-lsp-server/pull/257) - Add support for flake8 version 5, by [@bnavigator](https://github.com/bnavigator) +* [PR 250](https://github.com/python-lsp/python-lsp-server/pull/250) - Include traceback when plugin fails to load, by [@j2kun](https://github.com/j2kun) +* [PR 248](https://github.com/python-lsp/python-lsp-server/pull/248) - Add more installation instructions to Readme, by [@nougcat](https://github.com/nougcat) ([245](https://github.com/python-lsp/python-lsp-server/issues/245)) +* [PR 246](https://github.com/python-lsp/python-lsp-server/pull/246) - Add support for including function objects, by [@llan-ml](https://github.com/llan-ml) ([244](https://github.com/python-lsp/python-lsp-server/issues/244)) +* [PR 242](https://github.com/python-lsp/python-lsp-server/pull/242) - Remove redundant wheel dep from pyproject.toml, by [@mgorny](https://github.com/mgorny) +* [PR 241](https://github.com/python-lsp/python-lsp-server/pull/241) - Update release instructions to use new build mechanism, by [@ccordoba12](https://github.com/ccordoba12) +* [PR 80](https://github.com/python-lsp/python-lsp-server/pull/80) - Migrate to MarkupContent and convert docstrings to Markdown, by [@krassowski](https://github.com/krassowski) ([22](https://github.com/python-lsp/python-lsp-server/issues/22), [21](https://github.com/python-lsp/python-lsp-server/issues/21)) + +In this release 16 pull requests were closed. + +---- + +## Version 1.5.0 (2022/07/10) + +### New features + +* Add `DiagnosticTag` tags for Pylint, Pycodestyle, and Flake8 plugins. +* Add support to connect to the server through websockets. +* Allow multiple per-file-ignores for the same pattern in Flake8 plugin. +* Parse YAPF diffs into TextEdits. +* Add support for LSP formatting `options` parameter. + +### Issues Closed + +* [Issue 230](https://github.com/python-lsp/python-lsp-server/issues/230) - Flake8 reports wrong severity level for code Fxxx ([PR 234](https://github.com/python-lsp/python-lsp-server/pull/234) by [@lcheylus](https://github.com/lcheylus)) +* [Issue 220](https://github.com/python-lsp/python-lsp-server/issues/220) - Flake8 reports wrong severity level for E999 ([PR 223](https://github.com/python-lsp/python-lsp-server/pull/223) by [@jhossbach](https://github.com/jhossbach)) +* [Issue 219](https://github.com/python-lsp/python-lsp-server/issues/219) - Add .flake8 to the discovery paths ([PR 233](https://github.com/python-lsp/python-lsp-server/pull/233) by [@lcheylus](https://github.com/lcheylus)) +* [Issue 209](https://github.com/python-lsp/python-lsp-server/issues/209) - Rope completions enabled or disabled by default? ([PR 210](https://github.com/python-lsp/python-lsp-server/pull/210) by [@rchl](https://github.com/rchl)) +* [Issue 157](https://github.com/python-lsp/python-lsp-server/issues/157) - Please add basic usage documentation ([PR 185](https://github.com/python-lsp/python-lsp-server/pull/185) by [@jgollenz](https://github.com/jgollenz)) +* [Issue 144](https://github.com/python-lsp/python-lsp-server/issues/144) - Add `DiagnosticTag` tags for pylint, pycodestyle, and flake8 ([PR 229](https://github.com/python-lsp/python-lsp-server/pull/229) by [@krassowski](https://github.com/krassowski)) +* [Issue 140](https://github.com/python-lsp/python-lsp-server/issues/140) - Flake8 plugins issues ([PR 215](https://github.com/python-lsp/python-lsp-server/pull/215) by [@yeraydiazdiaz](https://github.com/yeraydiazdiaz)) +* [Issue 117](https://github.com/python-lsp/python-lsp-server/issues/117) - Websockets built-in support ([PR 128](https://github.com/python-lsp/python-lsp-server/pull/128) by [@npradeep357](https://github.com/npradeep357)) + +In this release 8 issues were closed. + +### Pull Requests Merged + +* [PR 234](https://github.com/python-lsp/python-lsp-server/pull/234) - Report Flake8 errors with Error severity level, by [@lcheylus](https://github.com/lcheylus) ([230](https://github.com/python-lsp/python-lsp-server/issues/230)) +* [PR 233](https://github.com/python-lsp/python-lsp-server/pull/233) - Fix documentation for location of Flake8 configuration files, by [@lcheylus](https://github.com/lcheylus) ([219](https://github.com/python-lsp/python-lsp-server/issues/219)) +* [PR 231](https://github.com/python-lsp/python-lsp-server/pull/231) - Use Numpy less than 1.23 in our tests, by [@ccordoba12](https://github.com/ccordoba12) +* [PR 229](https://github.com/python-lsp/python-lsp-server/pull/229) - Add `DiagnosticTag` support, by [@krassowski](https://github.com/krassowski) ([144](https://github.com/python-lsp/python-lsp-server/issues/144)) +* [PR 228](https://github.com/python-lsp/python-lsp-server/pull/228) - Improve schema type compliance, improve CONFIGURATION.md, by [@krassowski](https://github.com/krassowski) +* [PR 225](https://github.com/python-lsp/python-lsp-server/pull/225) - Add autopep8.enabled to the configuration schema, by [@j2kun](https://github.com/j2kun) +* [PR 223](https://github.com/python-lsp/python-lsp-server/pull/223) - Change severity level for flake8 errors, by [@jhossbach](https://github.com/jhossbach) ([220](https://github.com/python-lsp/python-lsp-server/issues/220)) +* [PR 221](https://github.com/python-lsp/python-lsp-server/pull/221) - Remove preload module from Readme, by [@bageljrkhanofemus](https://github.com/bageljrkhanofemus) +* [PR 217](https://github.com/python-lsp/python-lsp-server/pull/217) - Allow multiple per-file-ignores for the same pattern in flake8 plugin, by [@dedi](https://github.com/dedi) +* [PR 215](https://github.com/python-lsp/python-lsp-server/pull/215) - Remove reference to pyls-flake8 in Readme, by [@yeraydiazdiaz](https://github.com/yeraydiazdiaz) ([140](https://github.com/python-lsp/python-lsp-server/issues/140)) +* [PR 211](https://github.com/python-lsp/python-lsp-server/pull/211) - Restore the copyright headers in `setup.cfg` and `pyproject.toml`, by [@KOLANICH](https://github.com/KOLANICH) +* [PR 210](https://github.com/python-lsp/python-lsp-server/pull/210) - Match rope_completions setting documentation with reality, by [@rchl](https://github.com/rchl) ([209](https://github.com/python-lsp/python-lsp-server/issues/209)) +* [PR 207](https://github.com/python-lsp/python-lsp-server/pull/207) - Move the project metadata into `PEP 621`-compliant `pyproject.toml`, by [@KOLANICH](https://github.com/KOLANICH) +* [PR 187](https://github.com/python-lsp/python-lsp-server/pull/187) - Add plugins for pylint and flake8 to readme, by [@bageljrkhanofemus](https://github.com/bageljrkhanofemus) +* [PR 185](https://github.com/python-lsp/python-lsp-server/pull/185) - Mention `pylsp` command in README, by [@jgollenz](https://github.com/jgollenz) ([157](https://github.com/python-lsp/python-lsp-server/issues/157)) +* [PR 181](https://github.com/python-lsp/python-lsp-server/pull/181) - Fix section that was misplaced in changelog, by [@ccordoba12](https://github.com/ccordoba12) +* [PR 136](https://github.com/python-lsp/python-lsp-server/pull/136) - Parse YAPF diffs into TextEdits (instead of sending the full doc), by [@masad-frost](https://github.com/masad-frost) +* [PR 134](https://github.com/python-lsp/python-lsp-server/pull/134) - Add support for LSP formatting `options` parameter, by [@masad-frost](https://github.com/masad-frost) +* [PR 128](https://github.com/python-lsp/python-lsp-server/pull/128) - Add web sockets support, by [@npradeep357](https://github.com/npradeep357) ([117](https://github.com/python-lsp/python-lsp-server/issues/117)) + +In this release 19 pull requests were closed. + +---- + ## Version 1.4.1 (2022/03/27) ### Pull Requests Merged diff --git a/CONFIGURATION.md b/CONFIGURATION.md index 7ba70cf9..53d3b2f1 100644 --- a/CONFIGURATION.md +++ b/CONFIGURATION.md @@ -1,67 +1,83 @@ # Python Language Server Configuration -This server can be configured using `workspace/didChangeConfiguration` method. Each configuration option is described below: +This server can be configured using the `workspace/didChangeConfiguration` method. Each configuration option is described below. Note, a value of `null` means that we do not set a value and thus use the plugin's default value. | **Configuration Key** | **Type** | **Description** | **Default** |----|----|----|----| -| `pylsp.configurationSources` | `array` of unique `string` items | List of configuration sources to use. | `["pycodestyle"]` | +| `pylsp.configurationSources` | `array` of unique `string` (one of: `'pycodestyle'`, `'flake8'`) items | List of configuration sources to use. | `["pycodestyle"]` | +| `pylsp.plugins.autopep8.enabled` | `boolean` | Enable or disable the plugin (disabling required to use `yapf`). | `true` | | `pylsp.plugins.flake8.config` | `string` | Path to the config file that will be the authoritative config source. | `null` | | `pylsp.plugins.flake8.enabled` | `boolean` | Enable or disable the plugin. | `false` | -| `pylsp.plugins.flake8.exclude` | `array` | List of files or directories to exclude. | `null` | +| `pylsp.plugins.flake8.exclude` | `array` of `string` items | List of files or directories to exclude. | `[]` | +| `pylsp.plugins.flake8.extendIgnore` | `array` of `string` items | List of errors and warnings to append to ignore list. | `[]` | +| `pylsp.plugins.flake8.extendSelect` | `array` of `string` items | List of errors and warnings to append to select list. | `[]` | | `pylsp.plugins.flake8.executable` | `string` | Path to the flake8 executable. | `"flake8"` | | `pylsp.plugins.flake8.filename` | `string` | Only check for filenames matching the patterns in this list. | `null` | | `pylsp.plugins.flake8.hangClosing` | `boolean` | Hang closing bracket instead of matching indentation of opening bracket's line. | `null` | -| `pylsp.plugins.flake8.ignore` | `array` | List of errors and warnings to ignore (or skip). | `null` | +| `pylsp.plugins.flake8.ignore` | `array` of `string` items | List of errors and warnings to ignore (or skip). | `[]` | +| `pylsp.plugins.flake8.maxComplexity` | `integer` | Maximum allowed complexity threshold. | `null` | | `pylsp.plugins.flake8.maxLineLength` | `integer` | Maximum allowed line length for the entirety of this run. | `null` | | `pylsp.plugins.flake8.indentSize` | `integer` | Set indentation spaces. | `null` | -| `pylsp.plugins.flake8.perFileIgnores` | `array` | A pairing of filenames and violation codes that defines which violations to ignore in a particular file, for example: `["file_path.py:W305,W304"]`). | `null` | -| `pylsp.plugins.flake8.select` | `array` | List of errors and warnings to enable. | `null` | -| `pylsp.plugins.jedi.extra_paths` | `array` | Define extra paths for jedi.Script. | `[]` | +| `pylsp.plugins.flake8.perFileIgnores` | `array` of `string` items | A pairing of filenames and violation codes that defines which violations to ignore in a particular file, for example: `["file_path.py:W305,W304"]`). | `[]` | +| `pylsp.plugins.flake8.select` | `array` of unique `string` items | List of errors and warnings to enable. | `null` | +| `pylsp.plugins.jedi.auto_import_modules` | `array` of `string` items | List of module names for jedi.settings.auto_import_modules. | `["numpy"]` | +| `pylsp.plugins.jedi.extra_paths` | `array` of `string` items | Define extra paths for jedi.Script. | `[]` | +| `pylsp.plugins.jedi.prioritize_extra_paths` | `boolean` | Whether to place extra_paths at the beginning (true) or end (false) of `sys.path` | `false` | | `pylsp.plugins.jedi.env_vars` | `object` | Define environment variables for jedi.Script and Jedi.names. | `null` | | `pylsp.plugins.jedi.environment` | `string` | Define environment for jedi.Script and Jedi.names. | `null` | | `pylsp.plugins.jedi_completion.enabled` | `boolean` | Enable or disable the plugin. | `true` | | `pylsp.plugins.jedi_completion.include_params` | `boolean` | Auto-completes methods and classes with tabstops for each parameter. | `true` | -| `pylsp.plugins.jedi_completion.include_class_objects` | `boolean` | Adds class objects as a separate completion item. | `true` | +| `pylsp.plugins.jedi_completion.include_class_objects` | `boolean` | Adds class objects as a separate completion item. | `false` | +| `pylsp.plugins.jedi_completion.include_function_objects` | `boolean` | Adds function objects as a separate completion item. | `false` | | `pylsp.plugins.jedi_completion.fuzzy` | `boolean` | Enable fuzzy when requesting autocomplete. | `false` | | `pylsp.plugins.jedi_completion.eager` | `boolean` | Resolve documentation and detail eagerly. | `false` | -| `pylsp.plugins.jedi_completion.resolve_at_most` | `number` | How many labels and snippets (at most) should be resolved? | `25` | -| `pylsp.plugins.jedi_completion.cache_for` | `array` of `string` items | Modules for which labels and snippets should be cached. | `["pandas", "numpy", "tensorflow", "matplotlib"]` | +| `pylsp.plugins.jedi_completion.resolve_at_most` | `integer` | How many labels and snippets (at most) should be resolved? | `25` | +| `pylsp.plugins.jedi_completion.cache_for` | `array` of `string` items | Modules for which labels and snippets should be cached. | `["pandas", "numpy", "tensorflow", "matplotlib"]` | | `pylsp.plugins.jedi_definition.enabled` | `boolean` | Enable or disable the plugin. | `true` | | `pylsp.plugins.jedi_definition.follow_imports` | `boolean` | The goto call will follow imports. | `true` | | `pylsp.plugins.jedi_definition.follow_builtin_imports` | `boolean` | If follow_imports is True will decide if it follow builtin imports. | `true` | +| `pylsp.plugins.jedi_definition.follow_builtin_definitions` | `boolean` | Follow builtin and extension definitions to stubs. | `true` | | `pylsp.plugins.jedi_hover.enabled` | `boolean` | Enable or disable the plugin. | `true` | | `pylsp.plugins.jedi_references.enabled` | `boolean` | Enable or disable the plugin. | `true` | | `pylsp.plugins.jedi_signature_help.enabled` | `boolean` | Enable or disable the plugin. | `true` | | `pylsp.plugins.jedi_symbols.enabled` | `boolean` | Enable or disable the plugin. | `true` | | `pylsp.plugins.jedi_symbols.all_scopes` | `boolean` | If True lists the names of all scopes instead of only the module namespace. | `true` | | `pylsp.plugins.jedi_symbols.include_import_symbols` | `boolean` | If True includes symbols imported from other libraries. | `true` | +| `pylsp.plugins.jedi_type_definition.enabled` | `boolean` | Enable or disable the plugin. | `true` | | `pylsp.plugins.mccabe.enabled` | `boolean` | Enable or disable the plugin. | `true` | -| `pylsp.plugins.mccabe.threshold` | `number` | The minimum threshold that triggers warnings about cyclomatic complexity. | `15` | +| `pylsp.plugins.mccabe.threshold` | `integer` | The minimum threshold that triggers warnings about cyclomatic complexity. | `15` | | `pylsp.plugins.preload.enabled` | `boolean` | Enable or disable the plugin. | `true` | -| `pylsp.plugins.preload.modules` | `array` of unique `string` items | List of modules to import on startup | `null` | +| `pylsp.plugins.preload.modules` | `array` of unique `string` items | List of modules to import on startup | `[]` | | `pylsp.plugins.pycodestyle.enabled` | `boolean` | Enable or disable the plugin. | `true` | -| `pylsp.plugins.pycodestyle.exclude` | `array` of unique `string` items | Exclude files or directories which match these patterns. | `null` | -| `pylsp.plugins.pycodestyle.filename` | `array` of unique `string` items | When parsing directories, only check filenames matching these patterns. | `null` | -| `pylsp.plugins.pycodestyle.select` | `array` of unique `string` items | Select errors and warnings | `null` | -| `pylsp.plugins.pycodestyle.ignore` | `array` of unique `string` items | Ignore errors and warnings | `null` | +| `pylsp.plugins.pycodestyle.exclude` | `array` of unique `string` items | Exclude files or directories which match these patterns. | `[]` | +| `pylsp.plugins.pycodestyle.filename` | `array` of unique `string` items | When parsing directories, only check filenames matching these patterns. | `[]` | +| `pylsp.plugins.pycodestyle.select` | `array` of unique `string` items | Select errors and warnings | `null` | +| `pylsp.plugins.pycodestyle.ignore` | `array` of unique `string` items | Ignore errors and warnings | `[]` | | `pylsp.plugins.pycodestyle.hangClosing` | `boolean` | Hang closing bracket instead of matching indentation of opening bracket's line. | `null` | -| `pylsp.plugins.pycodestyle.maxLineLength` | `number` | Set maximum allowed line length. | `null` | +| `pylsp.plugins.pycodestyle.maxLineLength` | `integer` | Set maximum allowed line length. | `null` | | `pylsp.plugins.pycodestyle.indentSize` | `integer` | Set indentation spaces. | `null` | | `pylsp.plugins.pydocstyle.enabled` | `boolean` | Enable or disable the plugin. | `false` | -| `pylsp.plugins.pydocstyle.convention` | `string` | Choose the basic list of checked errors by specifying an existing convention. | `null` | -| `pylsp.plugins.pydocstyle.addIgnore` | `array` of unique `string` items | Ignore errors and warnings in addition to the specified convention. | `null` | -| `pylsp.plugins.pydocstyle.addSelect` | `array` of unique `string` items | Select errors and warnings in addition to the specified convention. | `null` | -| `pylsp.plugins.pydocstyle.ignore` | `array` of unique `string` items | Ignore errors and warnings | `null` | -| `pylsp.plugins.pydocstyle.select` | `array` of unique `string` items | Select errors and warnings | `null` | +| `pylsp.plugins.pydocstyle.convention` | `string` (one of: `'pep257'`, `'numpy'`, `'google'`, `None`) | Choose the basic list of checked errors by specifying an existing convention. | `null` | +| `pylsp.plugins.pydocstyle.addIgnore` | `array` of unique `string` items | Ignore errors and warnings in addition to the specified convention. | `[]` | +| `pylsp.plugins.pydocstyle.addSelect` | `array` of unique `string` items | Select errors and warnings in addition to the specified convention. | `[]` | +| `pylsp.plugins.pydocstyle.ignore` | `array` of unique `string` items | Ignore errors and warnings | `[]` | +| `pylsp.plugins.pydocstyle.select` | `array` of unique `string` items | Select errors and warnings | `null` | | `pylsp.plugins.pydocstyle.match` | `string` | Check only files that exactly match the given regular expression; default is to match files that don't start with 'test_' but end with '.py'. | `"(?!test_).*\\.py"` | | `pylsp.plugins.pydocstyle.matchDir` | `string` | Search only dirs that exactly match the given regular expression; default is to match dirs which do not begin with a dot. | `"[^\\.].*"` | | `pylsp.plugins.pyflakes.enabled` | `boolean` | Enable or disable the plugin. | `true` | | `pylsp.plugins.pylint.enabled` | `boolean` | Enable or disable the plugin. | `false` | -| `pylsp.plugins.pylint.args` | `array` of non-unique `string` items | Arguments to pass to pylint. | `null` | +| `pylsp.plugins.pylint.args` | `array` of non-unique `string` items | Arguments to pass to pylint. | `[]` | | `pylsp.plugins.pylint.executable` | `string` | Executable to run pylint with. Enabling this will run pylint on unsaved files via stdin. Can slow down workflow. Only works with python3. | `null` | -| `pylsp.plugins.rope_completion.enabled` | `boolean` | Enable or disable the plugin. | `true` | +| `pylsp.plugins.rope_autoimport.enabled` | `boolean` | Enable or disable autoimport. If false, neither completions nor code actions are enabled. If true, the respective features can be enabled or disabled individually. | `false` | +| `pylsp.plugins.rope_autoimport.completions.enabled` | `boolean` | Enable or disable autoimport completions. | `true` | +| `pylsp.plugins.rope_autoimport.code_actions.enabled` | `boolean` | Enable or disable autoimport code actions (e.g. for quick fixes). | `true` | +| `pylsp.plugins.rope_autoimport.memory` | `boolean` | Make the autoimport database memory only. Drastically increases startup time. | `false` | +| `pylsp.plugins.rope_completion.enabled` | `boolean` | Enable or disable the plugin. | `false` | | `pylsp.plugins.rope_completion.eager` | `boolean` | Resolve documentation and detail eagerly. | `false` | | `pylsp.plugins.yapf.enabled` | `boolean` | Enable or disable the plugin. | `true` | | `pylsp.rope.extensionModules` | `string` | Builtin and c-extension modules that are allowed to be imported and inspected by rope. | `null` | -| `pylsp.rope.ropeFolder` | `array` of unique `string` items | The name of the folder in which rope stores project configurations and data. Pass `null` for not using such a folder at all. | `null` | +| `pylsp.rope.ropeFolder` | `array` of unique `string` items | The name of the folder in which rope stores project configurations and data. Pass `null` for not using such a folder at all. | `null` | +| `pylsp.signature.formatter` | `string` (one of: `'black'`, `'ruff'`, `None`) | Formatter to use for reformatting signatures in docstrings. | `"black"` | +| `pylsp.signature.include_docstring` | `boolean` | Include signature docstring. | `true` | +| `pylsp.signature.line_length` | `number` | Maximum line length in signatures. | `88` | This documentation was generated from `pylsp/config/schema.json`. Please do not edit this file directly. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md new file mode 100644 index 00000000..5a24957b --- /dev/null +++ b/CONTRIBUTING.md @@ -0,0 +1,40 @@ +# Setup the environment + +1. Clone the repo: `git clone git@github.com:python-lsp/python-lsp-server.git` +2. Create the virtual environment: `python -m venv .venv` +3. Activate: `source .venv/bin/activate` +4. Install an editable installation: `pip install -e .` + - This will ensure you'll see your edits immediately without reinstalling the project +5. Configure your editor to point the pylsp executable to the one in `.venv` + +## Trying out if it works + +Go to file `pylsp/python_lsp.py`, function `start_io_lang_server`, +and on the first line of the function, add some logging: + +```py +log.info("It works!") +``` + +Save the file, restart the LSP server and you should see the log line: + +``` +2023-10-12 16:46:38,320 CEST - INFO - pylsp._utils - It works! +``` + +Now the project is setup in a way you can quickly iterate change you want to add. + +# Running tests + +1. Install runtime dependencies: `pip install .[all]` +2. Install test dependencies: `pip install .[test]` +3. Run `pytest`: `pytest -v` + +## Useful pytest options + +- To run a specific test file, use `pytest test/test_utils.py` +- To run a specific test function within a test file, + use `pytest test/test_utils.py::test_debounce` +- To run tests matching a certain expression, use `pytest -k format` +- To increase verbosity of pytest, use `pytest -v` or `pytest -vv` +- To enter a debugger on failed tests, use `pytest --pdb` diff --git a/MANIFEST.in b/MANIFEST.in index e82d1f50..52e77143 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -2,5 +2,6 @@ include README.md include versioneer.py include pylsp/_version.py include LICENSE +include ruff.toml include .pylintrc recursive-include test *.py diff --git a/README.md b/README.md index 4fe74032..daca7dc6 100644 --- a/README.md +++ b/README.md @@ -2,8 +2,7 @@ [![image](https://github.com/python-ls/python-ls/workflows/Linux%20tests/badge.svg)](https://github.com/python-ls/python-ls/actions?query=workflow%3A%22Linux+tests%22) [![image](https://github.com/python-ls/python-ls/workflows/Mac%20tests/badge.svg)](https://github.com/python-ls/python-ls/actions?query=workflow%3A%22Mac+tests%22) [![image](https://github.com/python-ls/python-ls/workflows/Windows%20tests/badge.svg)](https://github.com/python-ls/python-ls/actions?query=workflow%3A%22Windows+tests%22) [![image](https://img.shields.io/github/license/python-ls/python-ls.svg)](https://github.com/python-ls/python-ls/blob/master/LICENSE) -A Python 3.7+ implementation of the [Language Server Protocol](https://github.com/Microsoft/language-server-protocol). -(Note: versions <1.4 should still work with Python 3.6) +A Python 3.9+ implementation of the [Language Server Protocol](https://github.com/Microsoft/language-server-protocol). ## Installation @@ -24,7 +23,6 @@ If the respective dependencies are found, the following optional providers will - [YAPF](https://github.com/google/yapf) for code formatting (preferred over autopep8) - [flake8](https://github.com/pycqa/flake8) for error checking (disabled by default) - [pylint](https://github.com/PyCQA/pylint) for code linting (disabled by default) -- [preload](https://github.com/tfiers/preload) for heavy modules (not included by default) Optional providers can be installed using the `extras` syntax. To install [YAPF](https://github.com/google/yapf) formatting for example: @@ -44,16 +42,50 @@ If you get an error similar to `'install_requires' must be a string or list of s pip install -U setuptools ``` +### Windows and Linux installation + +If you use Anaconda/Miniconda, you can install `python-lsp-server` using this conda command + +``` +conda install -c conda-forge python-lsp-server +``` + +Python-lsp-server is available in the repos of every major Linux distribution, and it is usually called `python-lsp-server` or `python3-pylsp`. + +For example, here is how to install it in Debian and Debian-based distributions (E.g. Ubuntu, Pop!_OS, Linux Mint) + +``` +sudo apt-get install python3-pylsp +``` + +or Fedora Linux + +``` +sudo dnf install python3-lsp-server +``` + +or Arch Linux + +``` +sudo pacman -S python-lsp-server +```` + +Only on Alpine Linux the package is named differently. You can install it there by typing this command in your terminal: + +``` +apk add py3-lsp-server +``` + ### 3rd Party Plugins Installing these plugins will add extra functionality to the language server: -- [pyls-flake8](https://github.com/emanspeaks/pyls-flake8/): Error checking using [flake8](https://flake8.pycqa.org/en/latest/). -- [pylsp-mypy](https://github.com/Richardk2n/pylsp-mypy): [MyPy](http://mypy-lang.org/) type checking for Python >=3.7. -- [pyls-isort](https://github.com/paradoxxxzero/pyls-isort): code formatting using [isort](https://github.com/PyCQA/isort) (automatic import sorting). +- [pylsp-mypy](https://github.com/Richardk2n/pylsp-mypy): [MyPy](http://mypy-lang.org/) type checking for Python >=3.8. +- [python-lsp-isort](https://github.com/chantera/python-lsp-isort): code formatting using [isort](https://github.com/PyCQA/isort) (automatic import sorting). - [python-lsp-black](https://github.com/python-lsp/python-lsp-black): code formatting using [Black](https://github.com/psf/black). - [pyls-memestra](https://github.com/QuantStack/pyls-memestra): detecting the use of deprecated APIs. - [pylsp-rope](https://github.com/python-rope/pylsp-rope): Extended refactoring capabilities using [Rope](https://github.com/python-rope/rope). +- [python-lsp-ruff](https://github.com/python-lsp/python-lsp-ruff): Extensive and fast linting using [ruff](https://github.com/charliermarsh/ruff). Please see the above repositories for examples on how to write plugins for the Python LSP Server. @@ -68,9 +100,13 @@ Like all language servers, configuration can be passed from the client that talk `python-lsp-server` depends on other tools, like flake8 and pycodestyle. These tools can be configured via settings passed from the client (as above), or alternatively from other configuration sources. The following sources are available: - `pycodestyle`: discovered in `~/.config/pycodestyle`, `setup.cfg`, `tox.ini` and `pycodestyle.cfg`. -- `flake8`: discovered in `~/.config/flake8`, `setup.cfg`, `tox.ini` and `flake8.cfg` +- `flake8`: discovered in `.flake8`, `setup.cfg` and `tox.ini` + +The default configuration sources are `pycodestyle` and `pyflakes`. If you would like to use `flake8`, you will need to: -The default configuration source is `pycodestyle`. Change the `pylsp.configurationSources` setting (in the value passed in from your client) to `['flake8']` in order to use the flake8 configuration instead. +1. Disable `pycodestyle`, `mccabe`, and `pyflakes`, by setting their corresponding `enabled` configurations, e.g. `pylsp.plugins.pycodestyle.enabled`, to `false`. This will prevent duplicate linting messages as flake8 includes these tools. +1. Set `pylsp.plugins.flake8.enabled` to `true`. +1. Change the `pylsp.configurationSources` setting (in the value passed in from your client) to `['flake8']` in order to use the flake8 configuration instead. The configuration options available in these config files (`setup.cfg` etc) are documented in the relevant tools: @@ -92,10 +128,27 @@ As an example, to change the list of errors that pycodestyle will ignore, assumi 3. Same as 1, but add to `setup.cfg` file in the root of the project. +Python LSP Server can communicate over WebSockets when configured as follows: + +``` +pylsp --ws --port [port] +``` + +The following libraries are required for Web Sockets support: +- [websockets](https://websockets.readthedocs.io/en/stable/) for Python LSP Server Web sockets using websockets library. refer [Websockets installation](https://websockets.readthedocs.io/en/stable/intro/index.html#installation) for more details + +You can install this dependency with command below: + +``` +pip install 'python-lsp-server[websockets]' +``` + ## LSP Server Features * Auto Completion +* [Autoimport](docs/autoimport.md) * Code Linting +* Code actions * Signature Help * Go to definition * Hover @@ -107,10 +160,35 @@ As an example, to change the list of errors that pycodestyle will ignore, assumi ## Development +Dev install + +``` +# (optional) create conda env +conda create --name python-lsp-server python=3.11 -y +conda activate python-lsp-server + +pip install -e ".[all,websockets,test]" +``` + +Run server with ws + +``` +pylsp --ws -v # Info level logging +pylsp --ws -vv # Debug level logging +``` + To run the test suite: ```sh -pip install ".[test]" && pytest +# requires: pip install ".[test]" (see above) +pytest +``` + +Running ruff as a linter and code formatter on the repo: +```sh +ruff check . # linter +ruff check --fix . # fix all auto-fixable lint issues +ruff format . # format the document ``` After adding configuration options to `schema.json`, refresh the `CONFIGURATION.md` file with diff --git a/RELEASE.md b/RELEASE.md index 399bf7f7..bf7b7086 100644 --- a/RELEASE.md +++ b/RELEASE.md @@ -6,14 +6,17 @@ This is necessary to run our tests before the release, so we can be sure everything is in order. + ## To release a new version of python-lsp-server: 1. git fetch upstream && git checkout upstream/master 2. Close milestone on GitHub 3. git clean -xfdi 4. git tag -a vX.X.X -m "Release vX.X.X" -5. python setup.py sdist -6. python setup.py bdist_wheel -7. twine check dist/* -8. twine upload dist/* -9. git push upstream --tags +5. python -m pip install --upgrade pip +6. pip install --upgrade --upgrade-strategy eager build setuptools twine wheel +7. python -bb -X dev -W error -m build +8. twine check --strict dist/* +9. twine upload dist/* +10. git push upstream --tags +11. Create release on Github diff --git a/SECURITY.md b/SECURITY.md new file mode 100644 index 00000000..eb6e57df --- /dev/null +++ b/SECURITY.md @@ -0,0 +1,17 @@ +# Security Policy + + +## Supported Versions + +We normally support only the most recently released version with bug fixes, security updates and compatibility improvements. + + +## Reporting a Vulnerability + +If you believe you've discovered a security vulnerability in this project, please open a new security advisory with [our GitHub repo's private vulnerability reporting](https://github.com/python-lsp/python-lsp-server/security/advisories/new). +Please be sure to carefully document the vulnerability, including a summary, describing the impacts, identifying the line(s) of code affected, stating the conditions under which it is exploitable and including a minimal reproducible test case. +Further information and advice or patches on how to mitigate it is always welcome. +You can usually expect to hear back within 1 week, at which point we'll inform you of our evaluation of the vulnerability and what steps we plan to take, and will reach out if we need further clarification from you. +We'll discuss and update the advisory thread, and are happy to update you on its status should you further inquire. +While this is a volunteer project and we don't have financial compensation to offer, we can certainly publicly thank and credit you for your help if you would like. +Thanks! diff --git a/docs/autoimport.md b/docs/autoimport.md new file mode 100644 index 00000000..893a5e98 --- /dev/null +++ b/docs/autoimport.md @@ -0,0 +1,26 @@ +# Autoimport for pylsp + +Requirements: + +1. install `python-lsp-server[rope]` +2. set `pylsp.plugins.rope_autoimport.enabled` to `true` +3. This enables both completions and code actions. You can switch them off by setting `pylsp.plugins.rope_autoimport.completions.enabled` and/or `pylsp.plugins.rope_autoimport.code_actions.enabled` to `false` + +## Startup + +Autoimport will generate an autoimport sqllite3 database in .ropefolder/autoimport.db on startup. +This will take a few seconds but should be much quicker on future runs. + +## Usage + +Autoimport will provide suggestions to import names from everything in `sys.path`. You can change this by changing where pylsp is running or by setting rope's 'python_path' option. +It will suggest modules, submodules, keywords, functions, and classes. + +Since autoimport inserts everything towards the end of the import group, its recommended you use the isort [plugin](https://github.com/paradoxxxzero/pyls-isort). + +## Credits + +- Most of the code was written by me, @bagel897 +- [lyz-code](https://github.com/lyz-code/autoimport) for inspiration and some ideas +- [rope](https://github.com/python-rope/rope), especially @lieryan +- [pyright](https://github.com/Microsoft/pyright) for details on language server implementation diff --git a/pylsp/__init__.py b/pylsp/__init__.py index 151dddc0..f363ad86 100644 --- a/pylsp/__init__.py +++ b/pylsp/__init__.py @@ -2,7 +2,9 @@ # Copyright 2021- Python Language Server Contributors. import os + import pluggy + from . import _version from ._version import __version__ @@ -22,8 +24,8 @@ def convert_version_info(version: str) -> (int, ..., str): _version.VERSION_INFO = convert_version_info(__version__) -PYLSP = 'pylsp' -IS_WIN = os.name == 'nt' +PYLSP = "pylsp" +IS_WIN = os.name == "nt" hookspec = pluggy.HookspecMarker(PYLSP) hookimpl = pluggy.HookimplMarker(PYLSP) diff --git a/pylsp/__main__.py b/pylsp/__main__.py index 4698d5c9..760f8829 100644 --- a/pylsp/__main__.py +++ b/pylsp/__main__.py @@ -9,73 +9,79 @@ try: import ujson as json -except Exception: # pylint: disable=broad-except +except Exception: import json -from .python_lsp import (PythonLSPServer, start_io_lang_server, - start_tcp_lang_server) from ._version import __version__ +from .python_lsp import ( + PythonLSPServer, + start_io_lang_server, + start_tcp_lang_server, + start_ws_lang_server, +) -LOG_FORMAT = "%(asctime)s {0} - %(levelname)s - %(name)s - %(message)s".format( - time.localtime().tm_zone) +LOG_FORMAT = "%(asctime)s {} - %(levelname)s - %(name)s - %(message)s".format( + time.localtime().tm_zone +) -def add_arguments(parser): +def add_arguments(parser) -> None: parser.description = "Python Language Server" parser.add_argument( - "--tcp", action="store_true", - help="Use TCP server instead of stdio" + "--tcp", action="store_true", help="Use TCP server instead of stdio" ) parser.add_argument( - "--host", default="127.0.0.1", - help="Bind to this address" + "--ws", action="store_true", help="Use Web Sockets server instead of stdio" ) + parser.add_argument("--host", default="127.0.0.1", help="Bind to this address") + parser.add_argument("--port", type=int, default=2087, help="Bind to this port") parser.add_argument( - "--port", type=int, default=2087, - help="Bind to this port" - ) - parser.add_argument( - '--check-parent-process', action="store_true", + "--check-parent-process", + action="store_true", help="Check whether parent process is still alive using os.kill(ppid, 0) " - "and auto shut down language server process when parent process is not alive." - "Note that this may not work on a Windows machine." + "and auto shut down language server process when parent process is not alive. " + "Note that this may not work on a Windows machine.", ) log_group = parser.add_mutually_exclusive_group() log_group.add_argument( - "--log-config", - help="Path to a JSON file containing Python logging config." + "--log-config", help="Path to a JSON file containing Python logging config." ) log_group.add_argument( "--log-file", - help="Redirect logs to the given file instead of writing to stderr." - "Has no effect if used with --log-config." + help="Redirect logs to the given file instead of writing to stderr. " + "Has no effect if used with --log-config.", ) parser.add_argument( - '-v', '--verbose', action='count', default=0, - help="Increase verbosity of log output, overrides log config file" + "-v", + "--verbose", + action="count", + default=0, + help="Increase verbosity of log output, overrides log config file", ) parser.add_argument( - '-V', '--version', action='version', version='%(prog)s v' + __version__ + "-V", "--version", action="version", version="%(prog)s v" + __version__ ) -def main(): +def main() -> None: parser = argparse.ArgumentParser() add_arguments(parser) args = parser.parse_args() _configure_logger(args.verbose, args.log_config, args.log_file) if args.tcp: - start_tcp_lang_server(args.host, args.port, args.check_parent_process, - PythonLSPServer) + start_tcp_lang_server( + args.host, args.port, args.check_parent_process, PythonLSPServer + ) + elif args.ws: + start_ws_lang_server(args.port, args.check_parent_process, PythonLSPServer) else: stdin, stdout = _binary_stdio() - start_io_lang_server(stdin, stdout, args.check_parent_process, - PythonLSPServer) + start_io_lang_server(stdin, stdout, args.check_parent_process, PythonLSPServer) def _binary_stdio(): @@ -88,18 +94,22 @@ def _binary_stdio(): return stdin, stdout -def _configure_logger(verbose=0, log_config=None, log_file=None): +def _configure_logger(verbose=0, log_config=None, log_file=None) -> None: root_logger = logging.root if log_config: - with open(log_config, 'r', encoding='utf-8') as f: + with open(log_config, encoding="utf-8") as f: logging.config.dictConfig(json.load(f)) else: formatter = logging.Formatter(LOG_FORMAT) if log_file: log_handler = logging.handlers.RotatingFileHandler( - log_file, mode='a', maxBytes=50*1024*1024, - backupCount=10, encoding=None, delay=0 + log_file, + mode="a", + maxBytes=50 * 1024 * 1024, + backupCount=10, + encoding=None, + delay=0, ) else: log_handler = logging.StreamHandler() @@ -116,5 +126,5 @@ def _configure_logger(verbose=0, log_config=None, log_file=None): root_logger.setLevel(level) -if __name__ == '__main__': +if __name__ == "__main__": main() diff --git a/pylsp/_utils.py b/pylsp/_utils.py index 0732067a..c9eb6fb1 100644 --- a/pylsp/_utils.py +++ b/pylsp/_utils.py @@ -7,21 +7,28 @@ import os import pathlib import re +import subprocess +import sys import threading +import time +from typing import Optional +import docstring_to_markdown import jedi JEDI_VERSION = jedi.__version__ # Eol chars accepted by the LSP protocol -EOL_CHARS = ['\r\n', '\r', '\n'] -EOL_REGEX = re.compile(f'({"|".join(EOL_CHARS)})') +# the ordering affects performance +EOL_CHARS = ["\r\n", "\r", "\n"] +EOL_REGEX = re.compile(f"({'|'.join(EOL_CHARS)})") log = logging.getLogger(__name__) def debounce(interval_s, keyed_by=None): """Debounce calls to this function until interval_s seconds have passed.""" + def wrapper(func): timers = {} lock = threading.Lock() @@ -45,16 +52,35 @@ def run(): timer = threading.Timer(interval_s, run) timers[key] = timer timer.start() + return debounced + return wrapper +def throttle(seconds=1): + """Throttles calls to a function every `seconds` seconds.""" + + def decorator(func): + @functools.wraps(func) + def wrapper(*args, **kwargs): + if not hasattr(wrapper, "last_call"): + wrapper.last_call = 0 + if time.time() - wrapper.last_call >= seconds: + wrapper.last_call = time.time() + return func(*args, **kwargs) + + return wrapper + + return decorator + + def find_parents(root, path, names): """Find files matching the given names relative to the given path. Args: path (str): The file path to start searching up from. - names (List[str]): The file/directory names to look for. + names (list[str]): The file/directory names to look for. root (str): The directory at which to stop recursing upwards. Note: @@ -64,7 +90,7 @@ def find_parents(root, path, names): return [] if not os.path.commonprefix((root, path)): - log.warning("Path %s not in %s", path, root) + log.warning("Path %r not in %r", path, root) return [] # Split the relative by directory, generate all the parent directories, then check each of them. @@ -75,7 +101,9 @@ def find_parents(root, path, names): # Search each of /a/b/c, /a/b, /a while dirs: search_dir = os.path.join(*dirs) - existing = list(filter(os.path.exists, [os.path.join(search_dir, n) for n in names])) + existing = list( + filter(os.path.exists, [os.path.join(search_dir, n) for n in names]) + ) if existing: return existing dirs.pop() @@ -89,11 +117,11 @@ def path_to_dot_name(path): directory = os.path.dirname(path) module_name, _ = os.path.splitext(os.path.basename(path)) full_name = [module_name] - while os.path.exists(os.path.join(directory, '__init__.py')): + while os.path.exists(os.path.join(directory, "__init__.py")): this_directory = os.path.basename(directory) directory = os.path.dirname(directory) full_name = [this_directory] + full_name - return '.'.join(full_name) + return ".".join(full_name) def match_uri_to_workspace(uri, workspaces): @@ -125,6 +153,7 @@ def merge_dicts(dict_a, dict_b): If override_nones is True, then """ + def _merge_dicts_(a, b): for key in set(a.keys()).union(b.keys()): if key in a and key in b: @@ -140,27 +169,187 @@ def _merge_dicts_(a, b): yield (key, a[key]) elif b[key] is not None: yield (key, b[key]) + return dict(_merge_dicts_(dict_a, dict_b)) -def format_docstring(contents): - """Python doc strings come in a number of formats, but LSP wants markdown. +def escape_plain_text(contents: str) -> str: + """ + Format plain text to display nicely in environments which do not respect whitespaces. + """ + contents = contents.replace("\t", "\u00a0" * 4) + contents = contents.replace(" ", "\u00a0" * 2) + return contents + - Until we can find a fast enough way of discovering and parsing each format, - we can do a little better by at least preserving indentation. +def escape_markdown(contents: str) -> str: + """ + Format plain text to display nicely in Markdown environment. """ - contents = contents.replace('\t', '\u00A0' * 4) - contents = contents.replace(' ', '\u00A0' * 2) + # escape markdown syntax + contents = re.sub(r"([\\*_#[\]])", r"\\\1", contents) + # preserve white space characters + contents = escape_plain_text(contents) return contents +def wrap_signature(signature): + return "```python\n" + signature + "\n```\n" + + +SERVER_SUPPORTED_MARKUP_KINDS = {"markdown", "plaintext"} + + +def choose_markup_kind(client_supported_markup_kinds: list[str]): + """Choose a markup kind supported by both client and the server. + + This gives priority to the markup kinds provided earlier on the client preference list. + """ + for kind in client_supported_markup_kinds: + if kind in SERVER_SUPPORTED_MARKUP_KINDS: + return kind + return "markdown" + + +class Formatter: + command: list[str] + + @property + def is_installed(self) -> bool: + """Returns whether formatter is available""" + if not hasattr(self, "_is_installed"): + self._is_installed = self._is_available_via_cli() + return self._is_installed + + def format(self, code: str, line_length: int) -> str: + """Formats code""" + return subprocess.check_output( + [ + sys.executable, + "-m", + *self.command, + "--line-length", + str(line_length), + "-", + ], + input=code, + text=True, + ).strip() + + def _is_available_via_cli(self) -> bool: + try: + subprocess.check_output( + [ + sys.executable, + "-m", + *self.command, + "--help", + ], + ) + return True + except subprocess.CalledProcessError: + return False + + +class RuffFormatter(Formatter): + command = ["ruff", "format"] + + +class BlackFormatter(Formatter): + command = ["black"] + + +formatters = {"ruff": RuffFormatter(), "black": BlackFormatter()} + + +def format_signature(signature: str, config: dict, signature_formatter: str) -> str: + """Formats signature using ruff or black if either is available.""" + as_func = f"def {signature.strip()}:\n pass" + line_length = config.get("line_length", 88) + formatter = formatters[signature_formatter] + if formatter.is_installed: + try: + return ( + formatter.format(as_func, line_length=line_length) + .removeprefix("def ") + .removesuffix(":\n pass") + ) + except subprocess.CalledProcessError as e: + log.warning("Signature formatter failed %s", e) + else: + log.warning( + "Formatter %s was requested but it does not appear to be installed", + signature_formatter, + ) + return signature + + +def convert_signatures_to_markdown(signatures: list[str], config: dict) -> str: + signature_formatter = config.get("formatter", "black") + if signature_formatter: + signatures = [ + format_signature( + signature, signature_formatter=signature_formatter, config=config + ) + for signature in signatures + ] + return wrap_signature("\n".join(signatures)) + + +def format_docstring( + contents: str, + markup_kind: str, + signatures: Optional[list[str]] = None, + signature_config: Optional[dict] = None, +): + """Transform the provided docstring into a MarkupContent object. + + If `markup_kind` is 'markdown' the docstring will get converted to + markdown representation using `docstring-to-markdown`; if it is + `plaintext`, it will be returned as plain text. + Call signatures of functions (or equivalent code summaries) + provided in optional `signatures` argument will be prepended + to the provided contents of the docstring if given. + """ + if not isinstance(contents, str): + contents = "" + + if markup_kind == "markdown": + wrapped_signatures = convert_signatures_to_markdown( + signatures if signatures is not None else [], config=signature_config or {} + ) + + if contents != "": + try: + value = docstring_to_markdown.convert(contents) + except docstring_to_markdown.UnknownFormatError: + # try to escape the Markdown syntax instead: + value = escape_markdown(contents) + + if signatures: + value = wrapped_signatures + "\n\n" + value + else: + value = contents + + if signatures: + value = wrapped_signatures + + return {"kind": "markdown", "value": value} + value = contents + if signatures: + value = "\n".join(signatures) + "\n\n" + value + return {"kind": "plaintext", "value": escape_plain_text(value)} + + def clip_column(column, lines, line_number): """ Normalise the position as per the LSP that accepts character positions > line length https://microsoft.github.io/language-server-protocol/specification#position """ - max_column = len(lines[line_number].rstrip('\r\n')) if len(lines) > line_number else 0 + max_column = ( + len(lines[line_number].rstrip("\r\n")) if len(lines) > line_number else 0 + ) return min(column, max_column) @@ -172,14 +361,16 @@ def position_to_jedi_linecolumn(document, position): """ code_position = {} if position: - code_position = {'line': position['line'] + 1, - 'column': clip_column(position['character'], - document.lines, - position['line'])} + code_position = { + "line": position["line"] + 1, + "column": clip_column( + position["character"], document.lines, position["line"] + ), + } return code_position -if os.name == 'nt': +if os.name == "nt": import ctypes kernel32 = ctypes.windll.kernel32 @@ -223,8 +414,7 @@ def is_process_alive(pid): os.kill(pid, 0) except OSError as e: return e.errno == errno.EPERM - else: - return True + return True def get_eol_chars(text): diff --git a/pylsp/config/config.py b/pylsp/config/config.py index 27a76bde..7b201824 100644 --- a/pylsp/config/config.py +++ b/pylsp/config/config.py @@ -1,43 +1,50 @@ # Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. -# pylint: disable=import-outside-toplevel import logging +import sys +from collections.abc import Mapping, Sequence from functools import lru_cache -from typing import List, Mapping, Sequence, Union +from typing import Union -import pkg_resources import pluggy from pluggy._hooks import HookImpl -from pylsp import _utils, hookspecs, uris, PYLSP +from pylsp import PYLSP, _utils, hookspecs, uris + +# See compatibility note on `group` keyword: +# https://docs.python.org/3/library/importlib.metadata.html#entry-points +if sys.version_info < (3, 10): # pragma: no cover + from importlib_metadata import entry_points +else: # pragma: no cover + from importlib.metadata import entry_points + log = logging.getLogger(__name__) # Sources of config, first source overrides next source -DEFAULT_CONFIG_SOURCES = ['pycodestyle'] +DEFAULT_CONFIG_SOURCES = ["pycodestyle"] class PluginManager(pluggy.PluginManager): - def _hookexec( self, hook_name: str, methods: Sequence[HookImpl], kwargs: Mapping[str, object], firstresult: bool, - ) -> Union[object, List[object]]: + ) -> Union[object, list[object]]: # called from all hookcaller instances. # enable_tracing will set its own wrapping function at self._inner_hookexec try: return self._inner_hookexec(hook_name, methods, kwargs, firstresult) - except Exception as e: # pylint: disable=broad-except - log.warning(f"Failed to load hook {hook_name}: {e}") + except Exception as e: + log.warning(f"Failed to load hook {hook_name}: {e}", exc_info=True) return [] class Config: - def __init__(self, root_uri, init_opts, process_id, capabilities): + def __init__(self, root_uri, init_opts, process_id, capabilities) -> None: self._root_path = uris.to_fs_path(root_uri) self._root_uri = root_uri self._init_opts = init_opts @@ -50,12 +57,14 @@ def __init__(self, root_uri, init_opts, process_id, capabilities): self._config_sources = {} try: from .flake8_conf import Flake8Config - self._config_sources['flake8'] = Flake8Config(self._root_path) + + self._config_sources["flake8"] = Flake8Config(self._root_path) except ImportError: pass try: from .pycodestyle_conf import PyCodeStyleConfig - self._config_sources['pycodestyle'] = PyCodeStyleConfig(self._root_path) + + self._config_sources["pycodestyle"] = PyCodeStyleConfig(self._root_path) except ImportError: pass @@ -67,14 +76,17 @@ def __init__(self, root_uri, init_opts, process_id, capabilities): # Pluggy will skip loading a plugin if it throws a DistributionNotFound exception. # However I don't want all plugins to have to catch ImportError and re-throw. So here we'll filter # out any entry points that throw ImportError assuming one or more of their dependencies isn't present. - for entry_point in pkg_resources.iter_entry_points(PYLSP): + for entry_point in entry_points(group=PYLSP): try: entry_point.load() - except Exception as e: # pylint: disable=broad-except - log.warning("Failed to load %s entry point '%s': %s", PYLSP, entry_point.name, e) + except Exception as e: + log.info( + "Failed to load %s entry point '%s': %s", PYLSP, entry_point.name, e + ) self._pm.set_blocked(entry_point.name) - # Load the entry points into pluggy, having blocked any failing ones + # Load the entry points into pluggy, having blocked any failing ones. + # Despite the API name, recent Pluggy versions will use ``importlib_metadata``. self._pm.load_setuptools_entrypoints(PYLSP) for name, plugin in self._pm.list_name_plugin(): @@ -82,7 +94,13 @@ def __init__(self, root_uri, init_opts, process_id, capabilities): log.info("Loaded pylsp plugin %s from %s", name, plugin) for plugin_conf in self._pm.hook.pylsp_settings(config=self): - self._plugin_settings = _utils.merge_dicts(self._plugin_settings, plugin_conf) + self._plugin_settings = _utils.merge_dicts( + self._plugin_settings, plugin_conf + ) + + self._plugin_settings = _utils.merge_dicts( + self._plugin_settings, self._init_opts.get("pylsp", {}) + ) self._update_disabled_plugins() @@ -123,7 +141,7 @@ def settings(self, document_path=None): settings.cache_clear() when the config is updated """ settings = {} - sources = self._settings.get('configurationSources', DEFAULT_CONFIG_SOURCES) + sources = self._settings.get("configurationSources", DEFAULT_CONFIG_SOURCES) # Plugin configuration settings = _utils.merge_dicts(settings, self._plugin_settings) @@ -137,7 +155,9 @@ def settings(self, document_path=None): if not source: continue source_conf = source.user_config() - log.debug("Got user config from %s: %s", source.__class__.__name__, source_conf) + log.debug( + "Got user config from %s: %s", source.__class__.__name__, source_conf + ) settings = _utils.merge_dicts(settings, source_conf) # Project configuration @@ -146,7 +166,9 @@ def settings(self, document_path=None): if not source: continue source_conf = source.project_config(document_path or self._root_path) - log.debug("Got project config from %s: %s", source.__class__.__name__, source_conf) + log.debug( + "Got project config from %s: %s", source.__class__.__name__, source_conf + ) settings = _utils.merge_dicts(settings, source_conf) log.debug("With configuration: %s", settings) @@ -158,19 +180,24 @@ def find_parents(self, path, names): return _utils.find_parents(root_path, path, names) def plugin_settings(self, plugin, document_path=None): - return self.settings(document_path=document_path).get('plugins', {}).get(plugin, {}) + return ( + self.settings(document_path=document_path) + .get("plugins", {}) + .get(plugin, {}) + ) - def update(self, settings): + def update(self, settings) -> None: """Recursively merge the given settings into the current settings.""" self.settings.cache_clear() self._settings = settings log.info("Updated settings to %s", self._settings) self._update_disabled_plugins() - def _update_disabled_plugins(self): + def _update_disabled_plugins(self) -> None: # All plugins default to enabled self._disabled_plugins = [ - plugin for name, plugin in self.plugin_manager.list_name_plugin() - if not self.settings().get('plugins', {}).get(name, {}).get('enabled', True) + plugin + for name, plugin in self.plugin_manager.list_name_plugin() + if not self.settings().get("plugins", {}).get(name, {}).get("enabled", True) ] log.info("Disabled plugins: %s", self._disabled_plugins) diff --git a/pylsp/config/flake8_conf.py b/pylsp/config/flake8_conf.py index bdc34767..74258709 100644 --- a/pylsp/config/flake8_conf.py +++ b/pylsp/config/flake8_conf.py @@ -3,34 +3,39 @@ import logging import os + from pylsp._utils import find_parents + from .source import ConfigSource log = logging.getLogger(__name__) -CONFIG_KEY = 'flake8' -PROJECT_CONFIGS = ['.flake8', 'setup.cfg', 'tox.ini'] +CONFIG_KEY = "flake8" +PROJECT_CONFIGS = [".flake8", "setup.cfg", "tox.ini"] OPTIONS = [ # mccabe - ('max-complexity', 'plugins.mccabe.threshold', int), + ("max-complexity", "plugins.mccabe.threshold", int), # pycodestyle - ('exclude', 'plugins.pycodestyle.exclude', list), - ('filename', 'plugins.pycodestyle.filename', list), - ('hang-closing', 'plugins.pycodestyle.hangClosing', bool), - ('ignore', 'plugins.pycodestyle.ignore', list), - ('max-line-length', 'plugins.pycodestyle.maxLineLength', int), - ('indent-size', 'plugins.pycodestyle.indentSize', int), - ('select', 'plugins.pycodestyle.select', list), + ("exclude", "plugins.pycodestyle.exclude", list), + ("filename", "plugins.pycodestyle.filename", list), + ("hang-closing", "plugins.pycodestyle.hangClosing", bool), + ("ignore", "plugins.pycodestyle.ignore", list), + ("max-line-length", "plugins.pycodestyle.maxLineLength", int), + ("indent-size", "plugins.pycodestyle.indentSize", int), + ("select", "plugins.pycodestyle.select", list), # flake8 - ('exclude', 'plugins.flake8.exclude', list), - ('filename', 'plugins.flake8.filename', list), - ('hang-closing', 'plugins.flake8.hangClosing', bool), - ('ignore', 'plugins.flake8.ignore', list), - ('max-line-length', 'plugins.flake8.maxLineLength', int), - ('indent-size', 'plugins.flake8.indentSize', int), - ('select', 'plugins.flake8.select', list), - ('per-file-ignores', 'plugins.flake8.perFileIgnores', list), + ("exclude", "plugins.flake8.exclude", list), + ("extend-ignore", "plugins.flake8.extendIgnore", list), + ("extend-select", "plugins.flake8.extendSelect", list), + ("filename", "plugins.flake8.filename", list), + ("hang-closing", "plugins.flake8.hangClosing", bool), + ("ignore", "plugins.flake8.ignore", list), + ("max-complexity", "plugins.flake8.maxComplexity", int), + ("max-line-length", "plugins.flake8.maxLineLength", int), + ("indent-size", "plugins.flake8.indentSize", int), + ("select", "plugins.flake8.select", list), + ("per-file-ignores", "plugins.flake8.perFileIgnores", list), ] @@ -44,8 +49,8 @@ def user_config(self): def _user_config_file(self): if self.is_windows: - return os.path.expanduser('~\\.flake8') - return os.path.join(self.xdg_home, 'flake8') + return os.path.expanduser("~\\.flake8") + return os.path.join(self.xdg_home, "flake8") def project_config(self, document_path): files = find_parents(self.root_path, document_path, PROJECT_CONFIGS) diff --git a/pylsp/config/pycodestyle_conf.py b/pylsp/config/pycodestyle_conf.py index 6ac5941e..ed15a802 100644 --- a/pylsp/config/pycodestyle_conf.py +++ b/pylsp/config/pycodestyle_conf.py @@ -2,28 +2,28 @@ # Copyright 2021- Python Language Server Contributors. import pycodestyle + from pylsp._utils import find_parents -from .source import ConfigSource +from .source import ConfigSource -CONFIG_KEY = 'pycodestyle' +CONFIG_KEY = "pycodestyle" USER_CONFIGS = [pycodestyle.USER_CONFIG] if pycodestyle.USER_CONFIG else [] -PROJECT_CONFIGS = ['pycodestyle.cfg', 'setup.cfg', 'tox.ini'] +PROJECT_CONFIGS = ["pycodestyle.cfg", "setup.cfg", "tox.ini"] OPTIONS = [ - ('exclude', 'plugins.pycodestyle.exclude', list), - ('filename', 'plugins.pycodestyle.filename', list), - ('hang-closing', 'plugins.pycodestyle.hangClosing', bool), - ('ignore', 'plugins.pycodestyle.ignore', list), - ('max-line-length', 'plugins.pycodestyle.maxLineLength', int), - ('indent-size', 'plugins.pycodestyle.indentSize', int), - ('select', 'plugins.pycodestyle.select', list), - ('aggressive', 'plugins.pycodestyle.aggressive', int), + ("exclude", "plugins.pycodestyle.exclude", list), + ("filename", "plugins.pycodestyle.filename", list), + ("hang-closing", "plugins.pycodestyle.hangClosing", bool), + ("ignore", "plugins.pycodestyle.ignore", list), + ("max-line-length", "plugins.pycodestyle.maxLineLength", int), + ("indent-size", "plugins.pycodestyle.indentSize", int), + ("select", "plugins.pycodestyle.select", list), + ("aggressive", "plugins.pycodestyle.aggressive", int), ] class PyCodeStyleConfig(ConfigSource): - def user_config(self): config = self.read_config_from_files(USER_CONFIGS) return self.parse_config(config, CONFIG_KEY, OPTIONS) diff --git a/pylsp/config/schema.json b/pylsp/config/schema.json index c29d78bd..67289d96 100644 --- a/pylsp/config/schema.json +++ b/pylsp/config/schema.json @@ -1,21 +1,34 @@ { "$schema": "http://json-schema.org/draft-07/schema#", "title": "Python Language Server Configuration", - "description": "This server can be configured using `workspace/didChangeConfiguration` method. Each configuration option is described below:", + "description": "This server can be configured using the `workspace/didChangeConfiguration` method. Each configuration option is described below. Note, a value of `null` means that we do not set a value and thus use the plugin's default value.", "type": "object", "properties": { "pylsp.configurationSources": { "type": "array", - "default": ["pycodestyle"], + "default": [ + "pycodestyle" + ], "description": "List of configuration sources to use.", "items": { "type": "string", - "enum": ["pycodestyle", "pyflakes"] + "enum": [ + "pycodestyle", + "flake8" + ] }, "uniqueItems": true }, + "pylsp.plugins.autopep8.enabled": { + "type": "boolean", + "default": true, + "description": "Enable or disable the plugin (disabling required to use `yapf`)." + }, "pylsp.plugins.flake8.config": { - "type": "string", + "type": [ + "string", + "null" + ], "default": null, "description": "Path to the config file that will be the authoritative config source." }, @@ -26,61 +39,136 @@ }, "pylsp.plugins.flake8.exclude": { "type": "array", - "default": null, + "default": [], + "items": { + "type": "string" + }, "description": "List of files or directories to exclude." }, + "pylsp.plugins.flake8.extendIgnore": { + "type": "array", + "default": [], + "items": { + "type": "string" + }, + "description": "List of errors and warnings to append to ignore list." + }, + "pylsp.plugins.flake8.extendSelect": { + "type": "array", + "default": [], + "items": { + "type": "string" + }, + "description": "List of errors and warnings to append to select list." + }, "pylsp.plugins.flake8.executable": { "type": "string", "default": "flake8", "description": "Path to the flake8 executable." }, "pylsp.plugins.flake8.filename": { - "type": "string", + "type": [ + "string", + "null" + ], "default": null, "description": "Only check for filenames matching the patterns in this list." }, "pylsp.plugins.flake8.hangClosing": { - "type": "boolean", + "type": [ + "boolean", + "null" + ], "default": null, "description": "Hang closing bracket instead of matching indentation of opening bracket's line." }, "pylsp.plugins.flake8.ignore": { "type": "array", - "default": null, + "default": [], + "items": { + "type": "string" + }, "description": "List of errors and warnings to ignore (or skip)." }, - "pylsp.plugins.flake8.maxLineLength": { + "pylsp.plugins.flake8.maxComplexity": { "type": "integer", "default": null, + "description": "Maximum allowed complexity threshold." + }, + "pylsp.plugins.flake8.maxLineLength": { + "type": [ + "integer", + "null" + ], + "default": null, "description": "Maximum allowed line length for the entirety of this run." }, "pylsp.plugins.flake8.indentSize": { - "type": "integer", + "type": [ + "integer", + "null" + ], "default": null, "description": "Set indentation spaces." }, "pylsp.plugins.flake8.perFileIgnores": { - "type": "array", - "default": null, + "type": [ + "array" + ], + "default": [], + "items": { + "type": "string" + }, "description": "A pairing of filenames and violation codes that defines which violations to ignore in a particular file, for example: `[\"file_path.py:W305,W304\"]`)." }, "pylsp.plugins.flake8.select": { - "type": "array", + "type": [ + "array", + "null" + ], "default": null, + "items": { + "type": "string" + }, + "uniqueItems": true, "description": "List of errors and warnings to enable." }, + "pylsp.plugins.jedi.auto_import_modules": { + "type": "array", + "default": [ + "numpy" + ], + "items": { + "type": "string" + }, + "description": "List of module names for jedi.settings.auto_import_modules." + }, "pylsp.plugins.jedi.extra_paths": { "type": "array", "default": [], + "items": { + "type": "string" + }, "description": "Define extra paths for jedi.Script." }, + "pylsp.plugins.jedi.prioritize_extra_paths": { + "type": "boolean", + "default": false, + "description": "Whether to place extra_paths at the beginning (true) or end (false) of `sys.path`" + }, "pylsp.plugins.jedi.env_vars": { - "type": "object", + "type": [ + "object", + "null" + ], "default": null, "description": "Define environment variables for jedi.Script and Jedi.names." }, "pylsp.plugins.jedi.environment": { - "type": "string", + "type": [ + "string", + "null" + ], "default": null, "description": "Define environment for jedi.Script and Jedi.names." }, @@ -96,9 +184,14 @@ }, "pylsp.plugins.jedi_completion.include_class_objects": { "type": "boolean", - "default": true, + "default": false, "description": "Adds class objects as a separate completion item." }, + "pylsp.plugins.jedi_completion.include_function_objects": { + "type": "boolean", + "default": false, + "description": "Adds function objects as a separate completion item." + }, "pylsp.plugins.jedi_completion.fuzzy": { "type": "boolean", "default": false, @@ -110,7 +203,7 @@ "description": "Resolve documentation and detail eagerly." }, "pylsp.plugins.jedi_completion.resolve_at_most": { - "type": "number", + "type": "integer", "default": 25, "description": "How many labels and snippets (at most) should be resolved?" }, @@ -119,7 +212,12 @@ "items": { "type": "string" }, - "default": ["pandas", "numpy", "tensorflow", "matplotlib"], + "default": [ + "pandas", + "numpy", + "tensorflow", + "matplotlib" + ], "description": "Modules for which labels and snippets should be cached." }, "pylsp.plugins.jedi_definition.enabled": { @@ -137,6 +235,11 @@ "default": true, "description": "If follow_imports is True will decide if it follow builtin imports." }, + "pylsp.plugins.jedi_definition.follow_builtin_definitions": { + "type": "boolean", + "default": true, + "description": "Follow builtin and extension definitions to stubs." + }, "pylsp.plugins.jedi_hover.enabled": { "type": "boolean", "default": true, @@ -167,13 +270,18 @@ "default": true, "description": "If True includes symbols imported from other libraries." }, + "pylsp.plugins.jedi_type_definition.enabled": { + "type": "boolean", + "default": true, + "description": "Enable or disable the plugin." + }, "pylsp.plugins.mccabe.enabled": { "type": "boolean", "default": true, "description": "Enable or disable the plugin." }, "pylsp.plugins.mccabe.threshold": { - "type": "number", + "type": "integer", "default": 15, "description": "The minimum threshold that triggers warnings about cyclomatic complexity." }, @@ -184,7 +292,7 @@ }, "pylsp.plugins.preload.modules": { "type": "array", - "default": null, + "default": [], "items": { "type": "string" }, @@ -198,7 +306,7 @@ }, "pylsp.plugins.pycodestyle.exclude": { "type": "array", - "default": null, + "default": [], "items": { "type": "string" }, @@ -207,7 +315,7 @@ }, "pylsp.plugins.pycodestyle.filename": { "type": "array", - "default": null, + "default": [], "items": { "type": "string" }, @@ -215,7 +323,10 @@ "description": "When parsing directories, only check filenames matching these patterns." }, "pylsp.plugins.pycodestyle.select": { - "type": "array", + "type": [ + "array", + "null" + ], "default": null, "items": { "type": "string" @@ -225,7 +336,7 @@ }, "pylsp.plugins.pycodestyle.ignore": { "type": "array", - "default": null, + "default": [], "items": { "type": "string" }, @@ -233,17 +344,26 @@ "description": "Ignore errors and warnings" }, "pylsp.plugins.pycodestyle.hangClosing": { - "type": "boolean", + "type": [ + "boolean", + "null" + ], "default": null, "description": "Hang closing bracket instead of matching indentation of opening bracket's line." }, "pylsp.plugins.pycodestyle.maxLineLength": { - "type": "number", + "type": [ + "integer", + "null" + ], "default": null, "description": "Set maximum allowed line length." }, "pylsp.plugins.pycodestyle.indentSize": { - "type": "integer", + "type": [ + "integer", + "null" + ], "default": null, "description": "Set indentation spaces." }, @@ -253,17 +373,22 @@ "description": "Enable or disable the plugin." }, "pylsp.plugins.pydocstyle.convention": { - "type": "string", + "type": [ + "string", + "null" + ], "default": null, "enum": [ "pep257", - "numpy" + "numpy", + "google", + null ], "description": "Choose the basic list of checked errors by specifying an existing convention." }, "pylsp.plugins.pydocstyle.addIgnore": { "type": "array", - "default": null, + "default": [], "items": { "type": "string" }, @@ -272,7 +397,7 @@ }, "pylsp.plugins.pydocstyle.addSelect": { "type": "array", - "default": null, + "default": [], "items": { "type": "string" }, @@ -281,7 +406,7 @@ }, "pylsp.plugins.pydocstyle.ignore": { "type": "array", - "default": null, + "default": [], "items": { "type": "string" }, @@ -289,7 +414,10 @@ "description": "Ignore errors and warnings" }, "pylsp.plugins.pydocstyle.select": { - "type": "array", + "type": [ + "array", + "null" + ], "default": null, "items": { "type": "string" @@ -319,7 +447,7 @@ }, "pylsp.plugins.pylint.args": { "type": "array", - "default": null, + "default": [], "items": { "type": "string" }, @@ -327,13 +455,36 @@ "description": "Arguments to pass to pylint." }, "pylsp.plugins.pylint.executable": { - "type": "string", + "type": [ + "string", + "null" + ], "default": null, "description": "Executable to run pylint with. Enabling this will run pylint on unsaved files via stdin. Can slow down workflow. Only works with python3." }, - "pylsp.plugins.rope_completion.enabled": { + "pylsp.plugins.rope_autoimport.enabled": { + "type": "boolean", + "default": false, + "description": "Enable or disable autoimport. If false, neither completions nor code actions are enabled. If true, the respective features can be enabled or disabled individually." + }, + "pylsp.plugins.rope_autoimport.completions.enabled": { "type": "boolean", "default": true, + "description": "Enable or disable autoimport completions." + }, + "pylsp.plugins.rope_autoimport.code_actions.enabled": { + "type": "boolean", + "default": true, + "description": "Enable or disable autoimport code actions (e.g. for quick fixes)." + }, + "pylsp.plugins.rope_autoimport.memory": { + "type": "boolean", + "default": false, + "description": "Make the autoimport database memory only. Drastically increases startup time." + }, + "pylsp.plugins.rope_completion.enabled": { + "type": "boolean", + "default": false, "description": "Enable or disable the plugin." }, "pylsp.plugins.rope_completion.eager": { @@ -347,18 +498,47 @@ "description": "Enable or disable the plugin." }, "pylsp.rope.extensionModules": { - "type": "string", + "type": [ + "string", + "null" + ], "default": null, "description": "Builtin and c-extension modules that are allowed to be imported and inspected by rope." }, "pylsp.rope.ropeFolder": { - "type": "array", + "type": [ + "array", + "null" + ], "default": null, "items": { "type": "string" }, "uniqueItems": true, "description": "The name of the folder in which rope stores project configurations and data. Pass `null` for not using such a folder at all." + }, + "pylsp.signature.formatter": { + "type": [ + "string", + "null" + ], + "enum": [ + "black", + "ruff", + null + ], + "default": "black", + "description": "Formatter to use for reformatting signatures in docstrings." + }, + "pylsp.signature.include_docstring": { + "type": "boolean", + "default": true, + "description": "Include signature docstring." + }, + "pylsp.signature.line_length": { + "type": "number", + "default": 88, + "description": "Maximum line length in signatures." } } } diff --git a/pylsp/config/source.py b/pylsp/config/source.py index 6a21a84c..8ffc8b71 100644 --- a/pylsp/config/source.py +++ b/pylsp/config/source.py @@ -12,18 +12,18 @@ class ConfigSource: """Base class for implementing a config source.""" - def __init__(self, root_path): + def __init__(self, root_path) -> None: self.root_path = root_path - self.is_windows = sys.platform == 'win32' + self.is_windows = sys.platform == "win32" self.xdg_home = os.environ.get( - 'XDG_CONFIG_HOME', os.path.expanduser('~/.config') + "XDG_CONFIG_HOME", os.path.expanduser("~/.config") ) - def user_config(self): + def user_config(self) -> None: """Return user-level (i.e. home directory) configuration.""" raise NotImplementedError() - def project_config(self, document_path): + def project_config(self, document_path) -> None: """Return project-level (i.e. workspace directory) configuration.""" raise NotImplementedError() @@ -49,20 +49,20 @@ def parse_config(cls, config, key, options): @classmethod def _get_opt(cls, config, key, option, opt_type): """Get an option from a configparser with the given type.""" - for opt_key in [option, option.replace('-', '_')]: + for opt_key in [option, option.replace("-", "_")]: if not config.has_option(key, opt_key): continue - if opt_type == bool: + if opt_type is bool: return config.getboolean(key, opt_key) - if opt_type == int: + if opt_type is int: return config.getint(key, opt_key) - if opt_type == str: + if opt_type is str: return config.get(key, opt_key) - if opt_type == list: + if opt_type is list: return cls._parse_list_opt(config.get(key, opt_key)) raise ValueError("Unknown option type: %s" % opt_type) @@ -77,7 +77,7 @@ def _set_opt(cls, config_dict, path, value): if value is None: return - if '.' not in path: + if "." not in path: config_dict[path] = value return diff --git a/pylsp/hookspecs.py b/pylsp/hookspecs.py index d1a2458e..cf97c745 100644 --- a/pylsp/hookspecs.py +++ b/pylsp/hookspecs.py @@ -1,7 +1,6 @@ # Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. -# pylint: disable=redefined-builtin, unused-argument from pylsp import hookspec @@ -11,12 +10,12 @@ def pylsp_code_actions(config, workspace, document, range, context): @hookspec -def pylsp_code_lens(config, workspace, document): +def pylsp_code_lens(config, workspace, document) -> None: pass @hookspec -def pylsp_commands(config, workspace): +def pylsp_commands(config, workspace) -> None: """The list of command strings supported by the server. Returns: @@ -25,105 +24,122 @@ def pylsp_commands(config, workspace): @hookspec -def pylsp_completions(config, workspace, document, position): +def pylsp_completions(config, workspace, document, position, ignored_names) -> None: pass @hookspec(firstresult=True) -def pylsp_completion_item_resolve(config, workspace, document, completion_item): +def pylsp_completion_item_resolve(config, workspace, document, completion_item) -> None: pass @hookspec -def pylsp_definitions(config, workspace, document, position): +def pylsp_definitions(config, workspace, document, position) -> None: + pass + + +@hookspec(firstresult=True) +def pylsp_type_definition(config, document, position): pass @hookspec -def pylsp_dispatchers(config, workspace): +def pylsp_dispatchers(config, workspace) -> None: pass @hookspec -def pylsp_document_did_open(config, workspace, document): +def pylsp_document_did_open(config, workspace, document) -> None: pass @hookspec -def pylsp_document_did_save(config, workspace, document): +def pylsp_document_did_save(config, workspace, document) -> None: pass @hookspec -def pylsp_document_highlight(config, workspace, document, position): +def pylsp_document_highlight(config, workspace, document, position) -> None: pass @hookspec -def pylsp_document_symbols(config, workspace, document): +def pylsp_document_symbols(config, workspace, document) -> None: pass @hookspec(firstresult=True) -def pylsp_execute_command(config, workspace, command, arguments): +def pylsp_execute_command(config, workspace, command, arguments) -> None: pass @hookspec -def pylsp_experimental_capabilities(config, workspace): +def pylsp_experimental_capabilities(config, workspace) -> None: pass @hookspec -def pylsp_folding_range(config, workspace, document): +def pylsp_folding_range(config, workspace, document) -> None: pass @hookspec(firstresult=True) -def pylsp_format_document(config, workspace, document, options): +def pylsp_format_document(config, workspace, document, options) -> None: pass @hookspec(firstresult=True) -def pylsp_format_range(config, workspace, document, range, options): +def pylsp_format_range(config, workspace, document, range, options) -> None: pass @hookspec(firstresult=True) -def pylsp_hover(config, workspace, document, position): +def pylsp_hover(config, workspace, document, position) -> None: pass @hookspec -def pylsp_initialize(config, workspace): +def pylsp_initialize(config, workspace) -> None: pass @hookspec -def pylsp_initialized(): +def pylsp_initialized() -> None: pass @hookspec -def pylsp_lint(config, workspace, document, is_saved): +def pylsp_lint(config, workspace, document, is_saved) -> None: pass @hookspec -def pylsp_references(config, workspace, document, position, exclude_declaration): +def pylsp_references( + config, workspace, document, position, exclude_declaration +) -> None: pass @hookspec(firstresult=True) -def pylsp_rename(config, workspace, document, position, new_name): +def pylsp_rename(config, workspace, document, position, new_name) -> None: pass @hookspec -def pylsp_settings(config): +def pylsp_settings(config) -> None: pass @hookspec(firstresult=True) -def pylsp_signature_help(config, workspace, document, position): +def pylsp_signature_help(config, workspace, document, position) -> None: + pass + + +@hookspec +def pylsp_workspace_configuration_changed(config, workspace) -> None: + pass + + +@hookspec +def pylsp_shutdown(config, workspace) -> None: pass diff --git a/pylsp/lsp.py b/pylsp/lsp.py index f97a0a2e..7b3f02ee 100644 --- a/pylsp/lsp.py +++ b/pylsp/lsp.py @@ -90,3 +90,26 @@ class TextDocumentSyncKind: NONE = 0 FULL = 1 INCREMENTAL = 2 + + +class NotebookCellKind: + Markup = 1 + Code = 2 + + +# https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#errorCodes +class ErrorCodes: + ParseError = -32700 + InvalidRequest = -32600 + MethodNotFound = -32601 + InvalidParams = -32602 + InternalError = -32603 + jsonrpcReservedErrorRangeStart = -32099 + ServerNotInitialized = -32002 + UnknownErrorCode = -32001 + jsonrpcReservedErrorRangeEnd = -32000 + lspReservedErrorRangeStart = -32899 + ServerCancelled = -32802 + ContentModified = -32801 + RequestCancelled = -32800 + lspReservedErrorRangeEnd = -32800 diff --git a/pylsp/plugins/_resolvers.py b/pylsp/plugins/_resolvers.py index 3483d3dc..dcfd06ab 100644 --- a/pylsp/plugins/_resolvers.py +++ b/pylsp/plugins/_resolvers.py @@ -1,23 +1,21 @@ # Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. -from collections import defaultdict import logging +from collections import defaultdict from time import time from jedi.api.classes import Completion from pylsp import lsp - log = logging.getLogger(__name__) # ---- Base class # ----------------------------------------------------------------------------- class Resolver: - - def __init__(self, callback, resolve_on_error, time_to_live=60 * 30): + def __init__(self, callback, resolve_on_error, time_to_live=60 * 30) -> None: self.callback = callback self.resolve_on_error = resolve_on_error self._cache = {} @@ -25,7 +23,7 @@ def __init__(self, callback, resolve_on_error, time_to_live=60 * 30): self._cache_ttl = defaultdict(set) self._clear_every = 2 # see https://github.com/davidhalter/jedi/blob/master/jedi/inference/helpers.py#L194-L202 - self._cached_modules = {'pandas', 'numpy', 'tensorflow', 'matplotlib'} + self._cached_modules = {"pandas", "numpy", "tensorflow", "matplotlib"} @property def cached_modules(self): @@ -35,13 +33,9 @@ def cached_modules(self): def cached_modules(self, new_value): self._cached_modules = set(new_value) - def clear_outdated(self): + def clear_outdated(self) -> None: now = self.time_key() - to_clear = [ - timestamp - for timestamp in self._cache_ttl - if timestamp < now - ] + to_clear = [timestamp for timestamp in self._cache_ttl if timestamp < now] for time_key in to_clear: for key in self._cache_ttl[time_key]: del self._cache[key] @@ -54,7 +48,7 @@ def get_or_create(self, completion: Completion): if not completion.full_name: use_cache = False else: - module_parts = completion.full_name.split('.') + module_parts = completion.full_name.split(".") use_cache = module_parts and module_parts[0] in self._cached_modules if use_cache: @@ -71,31 +65,35 @@ def get_or_create(self, completion: Completion): def _create_completion_id(self, completion: Completion): return ( - completion.full_name, completion.module_path, - completion.line, completion.column, - self.time_key() + completion.full_name, + completion.module_path, + completion.line, + completion.column, + self.time_key(), ) def resolve(self, completion): try: sig = completion.get_signatures() return self.callback(completion, sig) - except Exception as e: # pylint: disable=broad-except - log.warning(f'Something went wrong when resolving label for {completion}: {e}') + except Exception as e: + log.warning( + f"Something went wrong when resolving label for {completion}: {e}" + ) return self.resolve_on_error # ---- Label resolver # ----------------------------------------------------------------------------- def format_label(completion, sig): - if sig and completion.type in ('function', 'method'): - params = ', '.join(param.name for param in sig[0].params) - label = '{}({})'.format(completion.name, params) + if sig and completion.type in ("function", "method"): + params = ", ".join(param.name for param in sig[0].params) + label = f"{completion.name}({params})" return label return completion.name -LABEL_RESOLVER = Resolver(callback=format_label, resolve_on_error='') +LABEL_RESOLVER = Resolver(callback=format_label, resolve_on_error="") # ---- Snippets resolver @@ -106,25 +104,27 @@ def format_snippet(completion, sig): snippet_completion = {} - positional_args = [param for param in sig[0].params - if '=' not in param.description and - param.name not in {'/', '*'}] + positional_args = [ + param + for param in sig[0].params + if "=" not in param.description and param.name not in {"/", "*"} + ] if len(positional_args) > 1: # For completions with params, we can generate a snippet instead - snippet_completion['insertTextFormat'] = lsp.InsertTextFormat.Snippet - snippet = completion.name + '(' + snippet_completion["insertTextFormat"] = lsp.InsertTextFormat.Snippet + snippet = completion.name + "(" for i, param in enumerate(positional_args): - snippet += '${%s:%s}' % (i + 1, param.name) + snippet += "${{{}:{}}}".format(i + 1, param.name) if i < len(positional_args) - 1: - snippet += ', ' - snippet += ')$0' - snippet_completion['insertText'] = snippet + snippet += ", " + snippet += ")$0" + snippet_completion["insertText"] = snippet elif len(positional_args) == 1: - snippet_completion['insertTextFormat'] = lsp.InsertTextFormat.Snippet - snippet_completion['insertText'] = completion.name + '($0)' + snippet_completion["insertTextFormat"] = lsp.InsertTextFormat.Snippet + snippet_completion["insertText"] = completion.name + "($0)" else: - snippet_completion['insertText'] = completion.name + '()' + snippet_completion["insertText"] = completion.name + "()" return snippet_completion diff --git a/pylsp/plugins/_rope_task_handle.py b/pylsp/plugins/_rope_task_handle.py new file mode 100644 index 00000000..5e278ee5 --- /dev/null +++ b/pylsp/plugins/_rope_task_handle.py @@ -0,0 +1,107 @@ +from __future__ import annotations + +import logging +from collections.abc import Sequence +from typing import Callable, ContextManager + +from rope.base.taskhandle import BaseJobSet, BaseTaskHandle + +from pylsp._utils import throttle +from pylsp.workspace import Workspace + +log = logging.getLogger(__name__) +Report = Callable[[str, int], None] + + +class PylspJobSet(BaseJobSet): + count: int = 0 + done: int = 0 + _reporter: Report + _report_iter: ContextManager + job_name: str = "" + + def __init__(self, count: int | None, report_iter: ContextManager) -> None: + if count is not None: + self.count = count + self._reporter = report_iter.__enter__() + self._report_iter = report_iter + + def started_job(self, name: str | None) -> None: + if name: + self.job_name = name + + def finished_job(self) -> None: + self.done += 1 + if self.get_percent_done() is not None and int(self.get_percent_done()) >= 100: + if self._report_iter is None: + return + self._report_iter.__exit__(None, None, None) + self._report_iter = None + else: + self._report() + + def check_status(self) -> None: + pass + + def get_percent_done(self) -> float | None: + if self.count == 0: + return 0 + return (self.done / self.count) * 100 + + def increment(self) -> None: + """ + Increment the number of tasks to complete. + + This is used if the number is not known ahead of time. + """ + self.count += 1 + self._report() + + @throttle(0.5) + def _report(self) -> None: + percent = int(self.get_percent_done()) + message = f"{self.job_name} {self.done}/{self.count}" + log.debug(f"Reporting {message} {percent}%") + self._reporter(message, percent) + + +class PylspTaskHandle(BaseTaskHandle): + name: str + observers: list + job_sets: list[PylspJobSet] + stopped: bool + workspace: Workspace + _report: Callable[[str, str], None] + + def __init__(self, workspace: Workspace) -> None: + self.workspace = workspace + self.job_sets = [] + self.observers = [] + + def create_jobset(self, name="JobSet", count: int | None = None): + report_iter = self.workspace.report_progress( + name, None, None, skip_token_initialization=True + ) + result = PylspJobSet(count, report_iter) + self.job_sets.append(result) + self._inform_observers() + return result + + def stop(self) -> None: + pass + + def current_jobset(self) -> BaseJobSet | None: + pass + + def add_observer(self) -> None: + pass + + def is_stopped(self) -> bool: + pass + + def get_jobsets(self) -> Sequence[BaseJobSet]: + pass + + def _inform_observers(self) -> None: + for observer in self.observers: + observer() diff --git a/pylsp/plugins/autopep8_format.py b/pylsp/plugins/autopep8_format.py index f605f830..2b3491da 100644 --- a/pylsp/plugins/autopep8_format.py +++ b/pylsp/plugins/autopep8_format.py @@ -4,7 +4,8 @@ import logging import pycodestyle -from autopep8 import fix_code, continued_indentation as autopep8_c_i +from autopep8 import continued_indentation as autopep8_c_i +from autopep8 import fix_code from pylsp import hookimpl from pylsp._utils import get_eol_chars @@ -13,32 +14,33 @@ @hookimpl(tryfirst=True) # Prefer autopep8 over YAPF -def pylsp_format_document(config, document, options=None): # pylint: disable=unused-argument - log.info("Formatting document %s with autopep8", document) - return _format(config, document) +def pylsp_format_document(config, workspace, document, options): + with workspace.report_progress("format: autopep8"): + log.info("Formatting document %s with autopep8", document) + return _format(config, document) @hookimpl(tryfirst=True) # Prefer autopep8 over YAPF -def pylsp_format_range(config, document, range, options=None): # pylint: disable=redefined-builtin,unused-argument +def pylsp_format_range(config, workspace, document, range, options): log.info("Formatting document %s in range %s with autopep8", document, range) # First we 'round' the range up/down to full lines only - range['start']['character'] = 0 - range['end']['line'] += 1 - range['end']['character'] = 0 + range["start"]["character"] = 0 + range["end"]["line"] += 1 + range["end"]["character"] = 0 # Add 1 for 1-indexing vs LSP's 0-indexing - line_range = (range['start']['line'] + 1, range['end']['line'] + 1) + line_range = (range["start"]["line"] + 1, range["end"]["line"]) return _format(config, document, line_range=line_range) def _format(config, document, line_range=None): options = _autopep8_config(config, document) if line_range: - options['line_range'] = list(line_range) + options["line_range"] = list(line_range) # Temporarily re-monkey-patch the continued_indentation checker - #771 - del pycodestyle._checks['logical_line'][pycodestyle.continued_indentation] + del pycodestyle._checks["logical_line"][pycodestyle.continued_indentation] pycodestyle.register_check(autopep8_c_i) # Autopep8 doesn't work with CR line endings, so we replace them by '\n' @@ -46,45 +48,47 @@ def _format(config, document, line_range=None): replace_cr = False source = document.source eol_chars = get_eol_chars(source) - if eol_chars == '\r': + if eol_chars == "\r": replace_cr = True - source = source.replace('\r', '\n') + source = source.replace("\r", "\n") new_source = fix_code(source, options=options) # Switch it back - del pycodestyle._checks['logical_line'][autopep8_c_i] + del pycodestyle._checks["logical_line"][autopep8_c_i] pycodestyle.register_check(pycodestyle.continued_indentation) if new_source == source: return [] if replace_cr: - new_source = new_source.replace('\n', '\r') + new_source = new_source.replace("\n", "\r") # I'm too lazy at the moment to parse diffs into TextEdit items # So let's just return the entire file... - return [{ - 'range': { - 'start': {'line': 0, 'character': 0}, - # End char 0 of the line after our document - 'end': {'line': len(document.lines), 'character': 0} - }, - 'newText': new_source - }] + return [ + { + "range": { + "start": {"line": 0, "character": 0}, + # End char 0 of the line after our document + "end": {"line": len(document.lines), "character": 0}, + }, + "newText": new_source, + } + ] def _autopep8_config(config, document=None): # We user pycodestyle settings to avoid redefining things path = document.path if document is not None else None - settings = config.plugin_settings('pycodestyle', document_path=path) + settings = config.plugin_settings("pycodestyle", document_path=path) options = { - 'exclude': settings.get('exclude'), - 'hang_closing': settings.get('hangClosing'), - 'ignore': settings.get('ignore'), - 'max_line_length': settings.get('maxLineLength'), - 'select': settings.get('select'), - 'aggressive': settings.get('aggressive'), + "exclude": settings.get("exclude"), + "hang_closing": settings.get("hangClosing"), + "ignore": settings.get("ignore"), + "max_line_length": settings.get("maxLineLength"), + "select": settings.get("select"), + "aggressive": settings.get("aggressive"), } # Filter out null options diff --git a/pylsp/plugins/definition.py b/pylsp/plugins/definition.py index 98265fdb..1ddc03a0 100644 --- a/pylsp/plugins/definition.py +++ b/pylsp/plugins/definition.py @@ -1,37 +1,84 @@ # Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. +from __future__ import annotations import logging -from pylsp import hookimpl, uris, _utils +from typing import TYPE_CHECKING, Any + +import jedi + +from pylsp import _utils, hookimpl, uris + +if TYPE_CHECKING: + from jedi.api import Script + from jedi.api.classes import Name + + from pylsp.config.config import Config + from pylsp.workspace import Document log = logging.getLogger(__name__) +MAX_JEDI_GOTO_HOPS = 100 + + +def _resolve_definition( + maybe_defn: Name, script: Script, settings: dict[str, Any] +) -> Name: + for _ in range(MAX_JEDI_GOTO_HOPS): + if maybe_defn.is_definition() or maybe_defn.module_path != script.path: + break + defns = script.goto( + follow_imports=settings.get("follow_imports", True), + follow_builtin_imports=settings.get("follow_builtin_imports", True), + line=maybe_defn.line, + column=maybe_defn.column, + ) + if len(defns) == 1: + maybe_defn = defns[0] + else: + break + return maybe_defn + + @hookimpl -def pylsp_definitions(config, document, position): - settings = config.plugin_settings('jedi_definition') +def pylsp_definitions( + config: Config, document: Document, position: dict[str, int] +) -> list[dict[str, Any]]: + settings = config.plugin_settings("jedi_definition") code_position = _utils.position_to_jedi_linecolumn(document, position) - definitions = document.jedi_script(use_document_path=True).goto( - follow_imports=settings.get('follow_imports', True), - follow_builtin_imports=settings.get('follow_builtin_imports', True), - **code_position) + script = document.jedi_script(use_document_path=True) + auto_import_modules = jedi.settings.auto_import_modules + + try: + jedi.settings.auto_import_modules = [] + definitions = script.goto( + follow_imports=settings.get("follow_imports", True), + follow_builtin_imports=settings.get("follow_builtin_imports", True), + **code_position, + ) + definitions = [_resolve_definition(d, script, settings) for d in definitions] + finally: + jedi.settings.auto_import_modules = auto_import_modules + follow_builtin_defns = settings.get("follow_builtin_definitions", True) return [ { - 'uri': uris.uri_with(document.uri, path=str(d.module_path)), - 'range': { - 'start': {'line': d.line - 1, 'character': d.column}, - 'end': {'line': d.line - 1, 'character': d.column + len(d.name)}, - } + "uri": uris.uri_with(document.uri, path=str(d.module_path)), + "range": { + "start": {"line": d.line - 1, "character": d.column}, + "end": {"line": d.line - 1, "character": d.column + len(d.name)}, + }, } - for d in definitions if d.is_definition() and _not_internal_definition(d) + for d in definitions + if d.is_definition() and (follow_builtin_defns or _not_internal_definition(d)) ] -def _not_internal_definition(definition): +def _not_internal_definition(definition: Name) -> bool: return ( - definition.line is not None and - definition.column is not None and - definition.module_path is not None and - not definition.in_builtin_module() + definition.line is not None + and definition.column is not None + and definition.module_path is not None + and not definition.in_builtin_module() ) diff --git a/pylsp/plugins/flake8_lint.py b/pylsp/plugins/flake8_lint.py index 3707222f..b0a71b88 100644 --- a/pylsp/plugins/flake8_lint.py +++ b/pylsp/plugins/flake8_lint.py @@ -2,6 +2,7 @@ # Copyright 2021- Python Language Server Contributors. """Linter pluging for flake8""" + import logging import os.path import re @@ -9,67 +10,123 @@ from pathlib import PurePath from subprocess import PIPE, Popen +from flake8.plugins.pyflakes import FLAKE8_PYFLAKES_CODES + from pylsp import hookimpl, lsp +from pylsp.plugins.pyflakes_lint import PYFLAKES_ERROR_MESSAGES log = logging.getLogger(__name__) -FIX_IGNORES_RE = re.compile(r'([^a-zA-Z0-9_,]*;.*(\W+||$))') + +FIX_IGNORES_RE = re.compile(r"([^a-zA-Z0-9_,]*;.*(\W+||$))") +UNNECESSITY_CODES = { + "F401", # `module` imported but unused + "F504", # % format unused named arguments + "F522", # .format(...) unused named arguments + "F523", # .format(...) unused positional arguments + "F841", # local variable `name` is assigned to but never used +} +# NOTE: If the user sets the flake8 executable with workspace configuration, the +# error codes in this set may be inaccurate. +ERROR_CODES = ( + # Errors from the pyflakes plugin of flake8 + {FLAKE8_PYFLAKES_CODES.get(m.__name__, "E999") for m in PYFLAKES_ERROR_MESSAGES} + # Syntax error from flake8 itself + | {"E999"} +) + +if sys.platform == "win32": + from subprocess import CREATE_NO_WINDOW +else: + # CREATE_NO_WINDOW flag only available on Windows. + # Set constant as default `Popen` `creationflag` kwarg value (`0`) + CREATE_NO_WINDOW = 0 @hookimpl def pylsp_settings(): # Default flake8 to disabled - return {'plugins': {'flake8': {'enabled': False}}} + return {"plugins": {"flake8": {"enabled": False}}} @hookimpl def pylsp_lint(workspace, document): - config = workspace._config - settings = config.plugin_settings('flake8', document_path=document.path) - log.debug("Got flake8 settings: %s", settings) - - ignores = settings.get("ignore", []) - per_file_ignores = settings.get("perFileIgnores") - - if per_file_ignores: - for path in per_file_ignores: - file_pat, errors = path.split(":") - if PurePath(document.path).match(file_pat): - ignores.extend(errors.split(",")) - - opts = { - 'config': settings.get('config'), - 'exclude': settings.get('exclude'), - 'filename': settings.get('filename'), - 'hang-closing': settings.get('hangClosing'), - 'ignore': ignores or None, - 'max-line-length': settings.get('maxLineLength'), - 'indent-size': settings.get('indentSize'), - 'select': settings.get('select'), - } - - # flake takes only absolute path to the config. So we should check and - # convert if necessary - if opts.get('config') and not os.path.isabs(opts.get('config')): - opts['config'] = os.path.abspath(os.path.expanduser(os.path.expandvars( - opts.get('config') - ))) - log.debug("using flake8 with config: %s", opts['config']) - - # Call the flake8 utility then parse diagnostics from stdout - flake8_executable = settings.get('executable', 'flake8') - - args = build_args(opts) - output = run_flake8(flake8_executable, args, document) - return parse_stdout(document, output) - - -def run_flake8(flake8_executable, args, document): + with workspace.report_progress("lint: flake8"): + config = workspace._config + settings = config.plugin_settings("flake8", document_path=document.path) + log.debug("Got flake8 settings: %s", settings) + + ignores = settings.get("ignore", []) + per_file_ignores = settings.get("perFileIgnores") + + if per_file_ignores: + prev_file_pat = None + for path in per_file_ignores: + try: + file_pat, errors = path.split(":") + prev_file_pat = file_pat + except ValueError: + # It's legal to just specify another error type for the same + # file pattern: + if prev_file_pat is None: + log.warning("skipping a Per-file-ignore with no file pattern") + continue + file_pat = prev_file_pat + errors = path + if PurePath(document.path).match(file_pat): + ignores.extend(errors.split(",")) + + opts = { + "config": settings.get("config"), + "exclude": settings.get("exclude"), + "extend-ignore": settings.get("extendIgnore"), + "extend-select": settings.get("extendSelect"), + "filename": settings.get("filename"), + "hang-closing": settings.get("hangClosing"), + "ignore": ignores or None, + "max-complexity": settings.get("maxComplexity"), + "max-line-length": settings.get("maxLineLength"), + "indent-size": settings.get("indentSize"), + "select": settings.get("select"), + } + + # flake takes only absolute path to the config. So we should check and + # convert if necessary + if opts.get("config") and not os.path.isabs(opts.get("config")): + opts["config"] = os.path.abspath( + os.path.expanduser(os.path.expandvars(opts.get("config"))) + ) + log.debug("using flake8 with config: %s", opts["config"]) + + # Call the flake8 utility then parse diagnostics from stdout + flake8_executable = settings.get("executable", "flake8") + + args = build_args(opts) + + # ensure the same source is used for flake8 execution and result parsing; + # single source access improves performance as it is only one disk access + source = document.source + output = run_flake8(flake8_executable, args, document, source) + return parse_stdout(source, output) + + +def run_flake8(flake8_executable, args, document, source): """Run flake8 with the provided arguments, logs errors from stderr if any. """ # a quick temporary fix to deal with Atom - args = [(i if not i.startswith('--ignore=') else FIX_IGNORES_RE.sub('', i)) - for i in args if i is not None] + args = [ + (i if not i.startswith("--ignore=") else FIX_IGNORES_RE.sub("", i)) + for i in args + if i is not None + ] + + if document.path and document.path.startswith(document._workspace.root_path): + args.extend( + [ + "--stdin-display-name", + os.path.relpath(document.path, document._workspace.root_path), + ] + ) # if executable looks like a path resolve it if not os.path.isfile(flake8_executable) and os.sep in flake8_executable: @@ -78,16 +135,23 @@ def run_flake8(flake8_executable, args, document): ) log.debug("Calling %s with args: '%s'", flake8_executable, args) + popen_kwargs = {"creationflags": CREATE_NO_WINDOW} + if cwd := document._workspace.root_path: + popen_kwargs["cwd"] = cwd try: cmd = [flake8_executable] cmd.extend(args) - p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE) - except IOError: - log.debug("Can't execute %s. Trying with '%s -m flake8'", flake8_executable, sys.executable) - cmd = [sys.executable, '-m', 'flake8'] + p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE, **popen_kwargs) + except OSError: + log.debug( + "Can't execute %s. Trying with '%s -m flake8'", + flake8_executable, + sys.executable, + ) + cmd = [sys.executable, "-m", "flake8"] cmd.extend(args) - p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE) # pylint: disable=consider-using-with - (stdout, stderr) = p.communicate(document.source.encode()) + p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE, **popen_kwargs) + (stdout, stderr) = p.communicate(source.encode()) if stderr: log.error("Error while running flake8 '%s'", stderr.decode()) return stdout.decode() @@ -99,23 +163,23 @@ def build_args(options): Args: options: dictionary of argument names and their values. """ - args = ['-'] # use stdin + args = ["-"] # use stdin for arg_name, arg_val in options.items(): if arg_val is None: continue arg = None if isinstance(arg_val, list): - arg = '--{}={}'.format(arg_name, ','.join(arg_val)) + arg = "--{}={}".format(arg_name, ",".join(arg_val)) elif isinstance(arg_val, bool): if arg_val: - arg = '--{}'.format(arg_name) + arg = f"--{arg_name}" else: - arg = '--{}={}'.format(arg_name, arg_val) + arg = f"--{arg_name}={arg_val}" args.append(arg) return args -def parse_stdout(document, stdout): +def parse_stdout(source, stdout): """ Build a diagnostics from flake8's output, it should extract every result and format it into a dict that looks like this: @@ -143,10 +207,11 @@ def parse_stdout(document, stdout): A list of dictionaries. """ + document_lines = source.splitlines(True) diagnostics = [] lines = stdout.splitlines() for raw_line in lines: - parsed_line = re.match(r'(.*):(\d*):(\d*): (\w*) (.*)', raw_line) + parsed_line = re.match(r"(.*):(\d*):(\d*): (\w*) (.*)", raw_line) if not parsed_line: log.debug("Flake8 output parser can't parse line '%s'", raw_line) continue @@ -160,25 +225,26 @@ def parse_stdout(document, stdout): line = int(line) - 1 character = int(character) - 1 # show also the code in message - msg = code + ' ' + msg - diagnostics.append( - { - 'source': 'flake8', - 'code': code, - 'range': { - 'start': { - 'line': line, - 'character': character - }, - 'end': { - 'line': line, - # no way to determine the column - 'character': len(document.lines[line]) - } + msg = code + " " + msg + severity = lsp.DiagnosticSeverity.Warning + if code in ERROR_CODES: + severity = lsp.DiagnosticSeverity.Error + diagnostic = { + "source": "flake8", + "code": code, + "range": { + "start": {"line": line, "character": character}, + "end": { + "line": line, + # no way to determine the column + "character": len(document_lines[line]), }, - 'message': msg, - 'severity': lsp.DiagnosticSeverity.Warning, - } - ) + }, + "message": msg, + "severity": severity, + } + if code in UNNECESSITY_CODES: + diagnostic["tags"] = [lsp.DiagnosticTag.Unnecessary] + diagnostics.append(diagnostic) return diagnostics diff --git a/pylsp/plugins/folding.py b/pylsp/plugins/folding.py index 91469d3d..123ba4a8 100644 --- a/pylsp/plugins/folding.py +++ b/pylsp/plugins/folding.py @@ -9,26 +9,28 @@ from pylsp import hookimpl SKIP_NODES = (tree_nodes.Module, tree_nodes.IfStmt, tree_nodes.TryStmt) -IDENTATION_REGEX = re.compile(r'(\s+).+') +IDENTATION_REGEX = re.compile(r"(\s+).+") @hookimpl def pylsp_folding_range(document): - program = document.source + '\n' + program = document.source + "\n" lines = program.splitlines() tree = parso.parse(program) ranges = __compute_folding_ranges(tree, lines) results = [] - for (start_line, end_line) in ranges: + for start_line, end_line in ranges: start_line -= 1 end_line -= 1 # If start/end character is not defined, then it defaults to the # corresponding line last character - results.append({ - 'startLine': start_line, - 'endLine': end_line, - }) + results.append( + { + "startLine": start_line, + "endLine": end_line, + } + ) return results @@ -41,8 +43,9 @@ def __merge_folding_ranges(left, right): return left -def __empty_identation_stack(identation_stack, level_limits, - current_line, folding_ranges): +def __empty_identation_stack( + identation_stack, level_limits, current_line, folding_ranges +): while identation_stack != []: upper_level = identation_stack.pop(0) level_start = level_limits.pop(upper_level) @@ -50,8 +53,9 @@ def __empty_identation_stack(identation_stack, level_limits, return folding_ranges -def __match_identation_stack(identation_stack, level, level_limits, - folding_ranges, current_line): +def __match_identation_stack( + identation_stack, level, level_limits, folding_ranges, current_line +): upper_level = identation_stack.pop(0) while upper_level >= level: level_start = level_limits.pop(upper_level) @@ -68,7 +72,7 @@ def __compute_folding_ranges_identation(text): level_limits = {} current_level = 0 current_line = 0 - while lines[current_line] == '': + while lines[current_line] == "": current_line += 1 for i, line in enumerate(lines): if i < current_line: @@ -84,17 +88,19 @@ def __compute_folding_ranges_identation(text): current_level = level elif level < current_level: identation_stack, folding_ranges = __match_identation_stack( - identation_stack, level, level_limits, folding_ranges, - current_line) + identation_stack, level, level_limits, folding_ranges, current_line + ) current_level = level else: folding_ranges = __empty_identation_stack( - identation_stack, level_limits, current_line, folding_ranges) + identation_stack, level_limits, current_line, folding_ranges + ) current_level = 0 - if line.strip() != '': + if line.strip() != "": current_line = i folding_ranges = __empty_identation_stack( - identation_stack, level_limits, current_line, folding_ranges) + identation_stack, level_limits, current_line, folding_ranges + ) return dict(folding_ranges) @@ -102,9 +108,13 @@ def __check_if_node_is_valid(node): valid = True if isinstance(node, tree_nodes.PythonNode): kind = node.type - valid = kind not in {'decorated', 'parameters', 'dictorsetmaker', - 'testlist_comp'} - if kind == 'suite': + valid = kind not in { + "decorated", + "parameters", + "dictorsetmaker", + "testlist_comp", + } + if kind == "suite": if isinstance(node.parent, tree_nodes.Function): valid = False return valid @@ -113,9 +123,9 @@ def __check_if_node_is_valid(node): def __handle_skip(stack, skip): body = stack[skip] children = [body] - if hasattr(body, 'children'): + if hasattr(body, "children"): children = body.children - stack = stack[:skip] + children + stack[skip + 1:] + stack = stack[:skip] + children + stack[skip + 1 :] node = body end_line, _ = body.end_pos return node, end_line @@ -125,17 +135,17 @@ def __handle_flow_nodes(node, end_line, stack): from_keyword = False if isinstance(node, tree_nodes.Keyword): from_keyword = True - if node.value in {'if', 'elif', 'with', 'while'}: + if node.value in {"if", "elif", "with", "while"}: node, end_line = __handle_skip(stack, 2) - elif node.value in {'except'}: + elif node.value in {"except"}: first_node = stack[0] if isinstance(first_node, tree_nodes.Operator): node, end_line = __handle_skip(stack, 1) else: node, end_line = __handle_skip(stack, 2) - elif node.value in {'for'}: + elif node.value in {"for"}: node, end_line = __handle_skip(stack, 4) - elif node.value in {'else'}: + elif node.value in {"else"}: node, end_line = __handle_skip(stack, 1) return end_line, from_keyword, node, stack @@ -144,8 +154,7 @@ def __compute_start_end_lines(node, stack): start_line, _ = node.start_pos end_line, _ = node.end_pos modified = False - end_line, from_keyword, node, stack = __handle_flow_nodes( - node, end_line, stack) + end_line, from_keyword, node, stack = __handle_flow_nodes(node, end_line, stack) last_leaf = node.get_last_leaf() last_newline = isinstance(last_leaf, tree_nodes.Newline) @@ -157,7 +166,7 @@ def __compute_start_end_lines(node, stack): if isinstance(node.parent, tree_nodes.PythonNode) and not from_keyword: kind = node.type - if kind in {'suite', 'atom', 'atom_expr', 'arglist'}: + if kind in {"suite", "atom", "atom_expr", "arglist"}: if len(stack) > 0: next_node = stack[0] next_line, _ = next_node.start_pos @@ -182,21 +191,19 @@ def __compute_folding_ranges(tree, lines): # Fallback to indentation-based (best-effort) folding start_line, _ = node.start_pos start_line -= 1 - padding = [''] * start_line - text = '\n'.join(padding + lines[start_line:]) + '\n' + padding = [""] * start_line + text = "\n".join(padding + lines[start_line:]) + "\n" identation_ranges = __compute_folding_ranges_identation(text) - folding_ranges = __merge_folding_ranges( - folding_ranges, identation_ranges) + folding_ranges = __merge_folding_ranges(folding_ranges, identation_ranges) break if not isinstance(node, SKIP_NODES): valid = __check_if_node_is_valid(node) if valid: - start_line, end_line, stack = __compute_start_end_lines( - node, stack) + start_line, end_line, stack = __compute_start_end_lines(node, stack) if end_line > start_line: current_end = folding_ranges.get(start_line, -1) folding_ranges[start_line] = max(current_end, end_line) - if hasattr(node, 'children'): + if hasattr(node, "children"): stack = node.children + stack folding_ranges = sorted(folding_ranges.items()) diff --git a/pylsp/plugins/highlight.py b/pylsp/plugins/highlight.py index a67979ac..c4c12406 100644 --- a/pylsp/plugins/highlight.py +++ b/pylsp/plugins/highlight.py @@ -2,7 +2,8 @@ # Copyright 2021- Python Language Server Contributors. import logging -from pylsp import hookimpl, lsp, _utils + +from pylsp import _utils, hookimpl, lsp log = logging.getLogger(__name__) @@ -16,12 +17,20 @@ def is_valid(definition): return definition.line is not None and definition.column is not None def local_to_document(definition): - return not definition.module_path or str(definition.module_path) == document.path - - return [{ - 'range': { - 'start': {'line': d.line - 1, 'character': d.column}, - 'end': {'line': d.line - 1, 'character': d.column + len(d.name)} - }, - 'kind': lsp.DocumentHighlightKind.Write if d.is_definition() else lsp.DocumentHighlightKind.Read - } for d in usages if is_valid(d) and local_to_document(d)] + return ( + not definition.module_path or str(definition.module_path) == document.path + ) + + return [ + { + "range": { + "start": {"line": d.line - 1, "character": d.column}, + "end": {"line": d.line - 1, "character": d.column + len(d.name)}, + }, + "kind": lsp.DocumentHighlightKind.Write + if d.is_definition() + else lsp.DocumentHighlightKind.Read, + } + for d in usages + if is_valid(d) and local_to_document(d) + ] diff --git a/pylsp/plugins/hover.py b/pylsp/plugins/hover.py index a4d45d1c..a23d9036 100644 --- a/pylsp/plugins/hover.py +++ b/pylsp/plugins/hover.py @@ -3,13 +3,14 @@ import logging -from pylsp import hookimpl, _utils +from pylsp import _utils, hookimpl log = logging.getLogger(__name__) @hookimpl -def pylsp_hover(document, position): +def pylsp_hover(config, document, position): + signature_config = config.settings().get("signature", {}) code_position = _utils.position_to_jedi_linecolumn(document, position) definitions = document.jedi_script(use_document_path=True).infer(**code_position) word = document.word_at_position(position) @@ -24,26 +25,37 @@ def pylsp_hover(document, position): definition = definitions[0] if not definition: - return {'contents': ''} + return {"contents": ""} - # raw docstring returns only doc, without signature - doc = _utils.format_docstring(definition.docstring(raw=True)) + hover_capabilities = config.capabilities.get("textDocument", {}).get("hover", {}) + supported_markup_kinds = hover_capabilities.get("contentFormat", ["markdown"]) + preferred_markup_kind = _utils.choose_markup_kind(supported_markup_kinds) # Find first exact matching signature - signature = next((x.to_string() for x in definition.get_signatures() - if x.name == word), '') - - contents = [] - if signature: - contents.append({ - 'language': 'python', - 'value': signature, - }) + signature = next( + ( + x.to_string() + for x in definition.get_signatures() + if (x.name == word and x.type not in ["module"]) + ), + "", + ) - if doc: - contents.append(doc) + include_docstring = signature_config.get("include_docstring", True) - if not contents: - return {'contents': ''} - - return {'contents': contents} + # raw docstring returns only doc, without signature + docstring = definition.docstring(raw=True) + if not include_docstring: + if signature: + docstring = "" + else: + docstring = docstring.strip().split("\n")[0].strip() + + return { + "contents": _utils.format_docstring( + docstring, + preferred_markup_kind, + signatures=[signature] if signature else None, + signature_config=signature_config, + ) + } diff --git a/pylsp/plugins/jedi_completion.py b/pylsp/plugins/jedi_completion.py index a98f3d27..51c3589c 100644 --- a/pylsp/plugins/jedi_completion.py +++ b/pylsp/plugins/jedi_completion.py @@ -2,7 +2,7 @@ # Copyright 2021- Python Language Server Contributors. import logging -import os.path as osp +import os import parso @@ -16,60 +16,80 @@ # > ``param``, ``path``, ``keyword``, ``property`` and ``statement``. # see: https://jedi.readthedocs.io/en/latest/docs/api-classes.html#jedi.api.classes.BaseName.type _TYPE_MAP = { - 'module': lsp.CompletionItemKind.Module, - 'namespace': lsp.CompletionItemKind.Module, # to be added in Jedi 0.18+ - 'class': lsp.CompletionItemKind.Class, - 'instance': lsp.CompletionItemKind.Reference, - 'function': lsp.CompletionItemKind.Function, - 'param': lsp.CompletionItemKind.Variable, - 'path': lsp.CompletionItemKind.File, - 'keyword': lsp.CompletionItemKind.Keyword, - 'property': lsp.CompletionItemKind.Property, # added in Jedi 0.18 - 'statement': lsp.CompletionItemKind.Variable + "module": lsp.CompletionItemKind.Module, + "namespace": lsp.CompletionItemKind.Module, # to be added in Jedi 0.18+ + "class": lsp.CompletionItemKind.Class, + "instance": lsp.CompletionItemKind.Reference, + "function": lsp.CompletionItemKind.Function, + "param": lsp.CompletionItemKind.Variable, + "path": lsp.CompletionItemKind.File, + "keyword": lsp.CompletionItemKind.Keyword, + "property": lsp.CompletionItemKind.Property, # added in Jedi 0.18 + "statement": lsp.CompletionItemKind.Variable, } # Types of parso nodes for which snippet is not included in the completion -_IMPORTS = ('import_name', 'import_from') +_IMPORTS = ("import_name", "import_from") # Types of parso node for errors -_ERRORS = ('error_node', ) +_ERRORS = ("error_node",) @hookimpl def pylsp_completions(config, document, position): """Get formatted completions for current code position""" - # pylint: disable=too-many-locals - settings = config.plugin_settings('jedi_completion', document_path=document.path) - resolve_eagerly = settings.get('eager', False) - code_position = _utils.position_to_jedi_linecolumn(document, position) + settings = config.plugin_settings("jedi_completion", document_path=document.path) + resolve_eagerly = settings.get("eager", False) + signature_config = config.settings().get("signature", {}) - code_position['fuzzy'] = settings.get('fuzzy', False) + code_position = _utils.position_to_jedi_linecolumn(document, position) + code_position["fuzzy"] = settings.get("fuzzy", False) completions = document.jedi_script(use_document_path=True).complete(**code_position) if not completions: return None - completion_capabilities = config.capabilities.get('textDocument', {}).get('completion', {}) - snippet_support = completion_capabilities.get('completionItem', {}).get('snippetSupport') + completion_capabilities = config.capabilities.get("textDocument", {}).get( + "completion", {} + ) + item_capabilities = completion_capabilities.get("completionItem", {}) + snippet_support = item_capabilities.get("snippetSupport") + supported_markup_kinds = item_capabilities.get("documentationFormat", ["markdown"]) + preferred_markup_kind = _utils.choose_markup_kind(supported_markup_kinds) - should_include_params = settings.get('include_params') - should_include_class_objects = settings.get('include_class_objects', True) + should_include_params = settings.get("include_params") + should_include_class_objects = settings.get("include_class_objects", False) + should_include_function_objects = settings.get("include_function_objects", False) - max_to_resolve = settings.get('resolve_at_most', 25) - modules_to_cache_for = settings.get('cache_for', None) + max_to_resolve = settings.get("resolve_at_most", 25) + modules_to_cache_for = settings.get("cache_for", None) if modules_to_cache_for is not None: LABEL_RESOLVER.cached_modules = modules_to_cache_for SNIPPET_RESOLVER.cached_modules = modules_to_cache_for - include_params = snippet_support and should_include_params and use_snippets(document, position) - include_class_objects = snippet_support and should_include_class_objects and use_snippets(document, position) + include_params = ( + snippet_support and should_include_params and use_snippets(document, position) + ) + include_class_objects = ( + snippet_support + and should_include_class_objects + and use_snippets(document, position) + ) + include_function_objects = ( + snippet_support + and should_include_function_objects + and use_snippets(document, position) + ) ready_completions = [ _format_completion( c, - include_params, + markup_kind=preferred_markup_kind, + include_params=include_params if c.type in ["class", "function"] else False, resolve=resolve_eagerly, - resolve_label_or_snippet=(i < max_to_resolve) + resolve_label_or_snippet=(i < max_to_resolve), + snippet_support=snippet_support, + signature_config=signature_config, ) for i, c in enumerate(completions) ] @@ -77,26 +97,43 @@ def pylsp_completions(config, document, position): # TODO split up once other improvements are merged if include_class_objects: for i, c in enumerate(completions): - if c.type == 'class': + if c.type == "class": + completion_dict = _format_completion( + c, + markup_kind=preferred_markup_kind, + include_params=False, + resolve=resolve_eagerly, + resolve_label_or_snippet=(i < max_to_resolve), + snippet_support=snippet_support, + signature_config=signature_config, + ) + completion_dict["kind"] = lsp.CompletionItemKind.TypeParameter + completion_dict["label"] += " object" + ready_completions.append(completion_dict) + + if include_function_objects: + for i, c in enumerate(completions): + if c.type == "function": completion_dict = _format_completion( c, - False, + markup_kind=preferred_markup_kind, + include_params=False, resolve=resolve_eagerly, - resolve_label_or_snippet=(i < max_to_resolve) + resolve_label_or_snippet=(i < max_to_resolve), + snippet_support=snippet_support, + signature_config=signature_config, ) - completion_dict['kind'] = lsp.CompletionItemKind.TypeParameter - completion_dict['label'] += ' object' + completion_dict["kind"] = lsp.CompletionItemKind.TypeParameter + completion_dict["label"] += " object" ready_completions.append(completion_dict) for completion_dict in ready_completions: - completion_dict['data'] = { - 'doc_uri': document.uri - } + completion_dict["data"] = {"doc_uri": document.uri} # most recently retrieved completion items, used for resolution - document.shared_data['LAST_JEDI_COMPLETIONS'] = { + document.shared_data["LAST_JEDI_COMPLETIONS"] = { # label is the only required property; here it is assumed to be unique - completion['label']: (completion, data) + completion["label"]: (completion, data) for completion, data in zip(ready_completions, completions) } @@ -104,12 +141,31 @@ def pylsp_completions(config, document, position): @hookimpl -def pylsp_completion_item_resolve(completion_item, document): +def pylsp_completion_item_resolve( + config, + completion_item, + document, +): """Resolve formatted completion for given non-resolved completion""" - shared_data = document.shared_data['LAST_JEDI_COMPLETIONS'].get(completion_item['label']) + shared_data = document.shared_data["LAST_JEDI_COMPLETIONS"].get( + completion_item["label"] + ) + + completion_capabilities = config.capabilities.get("textDocument", {}).get( + "completion", {} + ) + item_capabilities = completion_capabilities.get("completionItem", {}) + supported_markup_kinds = item_capabilities.get("documentationFormat", ["markdown"]) + preferred_markup_kind = _utils.choose_markup_kind(supported_markup_kinds) + if shared_data: completion, data = shared_data - return _resolve_completion(completion, data) + return _resolve_completion( + completion, + data, + markup_kind=preferred_markup_kind, + signature_config=config.settings().get("signature", {}), + ) return completion_item @@ -135,61 +191,90 @@ def use_snippets(document, position): This returns `False` if a completion is being requested on an import statement, `True` otherwise. """ - line = position['line'] - lines = document.source.split('\n', line) - act_lines = [lines[line][:position['character']]] + line = position["line"] + lines = document.source.split("\n", line) + act_lines = [lines[line][: position["character"]]] line -= 1 - last_character = '' + last_character = "" while line > -1: act_line = lines[line] - if (act_line.rstrip().endswith('\\') or - act_line.rstrip().endswith('(') or - act_line.rstrip().endswith(',')): + if ( + act_line.rstrip().endswith("\\") + or act_line.rstrip().endswith("(") + or act_line.rstrip().endswith(",") + ): act_lines.insert(0, act_line) line -= 1 - if act_line.rstrip().endswith('('): + if act_line.rstrip().endswith("("): # Needs to be added to the end of the code before parsing # to make it valid, otherwise the node type could end # being an 'error_node' for multi-line imports that use '(' - last_character = ')' + last_character = ")" else: break - if '(' in act_lines[-1].strip(): - last_character = ')' - code = '\n'.join(act_lines).rsplit(';', maxsplit=1)[-1].strip() + last_character + if "(" in act_lines[-1].strip(): + last_character = ")" + code = "\n".join(act_lines).rsplit(";", maxsplit=1)[-1].strip() + last_character tokens = parso.parse(code) expr_type = tokens.children[0].type - return (expr_type not in _IMPORTS and - not (expr_type in _ERRORS and 'import' in code)) + return expr_type not in _IMPORTS and not (expr_type in _ERRORS and "import" in code) -def _resolve_completion(completion, d): - # pylint: disable=broad-except - completion['detail'] = _detail(d) +def _resolve_completion(completion, d, markup_kind: str, signature_config: dict): + completion["detail"] = _detail(d) try: - docs = _utils.format_docstring(d.docstring()) + docs = _utils.format_docstring( + d.docstring(raw=True), + signatures=[signature.to_string() for signature in d.get_signatures()], + markup_kind=markup_kind, + signature_config=signature_config, + ) except Exception: - docs = '' - completion['documentation'] = docs + docs = "" + completion["documentation"] = docs return completion -def _format_completion(d, include_params=True, resolve=False, resolve_label_or_snippet=False): +def _format_completion( + d, + markup_kind: str, + include_params=True, + resolve=False, + resolve_label_or_snippet=False, + snippet_support=False, + signature_config=None, +): completion = { - 'label': _label(d, resolve_label_or_snippet), - 'kind': _TYPE_MAP.get(d.type), - 'sortText': _sort_text(d), - 'insertText': d.name + "label": _label(d, resolve_label_or_snippet), + "kind": _TYPE_MAP.get(d.type), + "sortText": _sort_text(d), + "insertText": d.name, } if resolve: - completion = _resolve_completion(completion, d) + completion = _resolve_completion( + completion, d, markup_kind, signature_config=signature_config + ) + + # Adjustments for file completions + if d.type == "path": + path = os.path.normpath(d.name) + + # If the completion ends with os.sep, it means it's a directory. So we add os.sep at the end + # to ease additional file completions. + if d.name.endswith(os.sep): + if os.name == "nt": + path = path + "\\" + else: + path = path + "/" + + # Escape to prevent conflicts with the code snippets grammer + # See also https://github.com/python-lsp/python-lsp-server/issues/373 + if snippet_support: + path = path.replace("\\", "\\\\") + path = path.replace("/", "\\/") - if d.type == 'path': - path = osp.normpath(d.name) - path = path.replace('\\', '\\\\') - path = path.replace('/', '\\/') - completion['insertText'] = path + completion["insertText"] = path if include_params and not is_exception_class(d.name): snippet = _snippet(d, resolve_label_or_snippet) @@ -216,16 +301,16 @@ def _snippet(definition, resolve=False): def _detail(definition): try: - return definition.parent().full_name or '' + return definition.parent().full_name or "" except AttributeError: - return definition.full_name or '' + return definition.full_name or "" def _sort_text(definition): - """ Ensure builtins appear at the bottom. + """Ensure builtins appear at the bottom. Description is of format : . """ # If its 'hidden', put it next last - prefix = 'z{}' if definition.name.startswith('_') else 'a{}' + prefix = "z{}" if definition.name.startswith("_") else "a{}" return prefix.format(definition.name) diff --git a/pylsp/plugins/jedi_rename.py b/pylsp/plugins/jedi_rename.py index c1edc75f..b35e321a 100644 --- a/pylsp/plugins/jedi_rename.py +++ b/pylsp/plugins/jedi_rename.py @@ -3,47 +3,54 @@ import logging -from pylsp import hookimpl, uris, _utils +from pylsp import _utils, hookimpl, uris log = logging.getLogger(__name__) @hookimpl -def pylsp_rename(config, workspace, document, position, new_name): # pylint: disable=unused-argument - log.debug('Executing rename of %s to %s', document.word_at_position(position), new_name) +def pylsp_rename(config, workspace, document, position, new_name): + log.debug( + "Executing rename of %s to %s", document.word_at_position(position), new_name + ) kwargs = _utils.position_to_jedi_linecolumn(document, position) - kwargs['new_name'] = new_name + kwargs["new_name"] = new_name try: refactoring = document.jedi_script().rename(**kwargs) except NotImplementedError as exc: - raise Exception('No support for renaming in Python 2/3.5 with Jedi. ' - 'Consider using the rope_rename plugin instead') from exc - log.debug('Finished rename: %s', refactoring.get_diff()) + raise Exception( + "No support for renaming in Python 2/3.5 with Jedi. " + "Consider using the pylsp-rope plugin instead" + ) from exc + log.debug("Finished rename: %s", refactoring.get_diff()) changes = [] - for file_path, changed_file in refactoring.get_changed_files().items(): + + changed_files = refactoring.get_changed_files() + for file_path, changed_file in changed_files.items(): uri = uris.from_fs_path(str(file_path)) doc = workspace.get_maybe_document(uri) - changes.append({ - 'textDocument': { - 'uri': uri, - 'version': doc.version if doc else None - }, - 'edits': [ - { - 'range': { - 'start': {'line': 0, 'character': 0}, - 'end': { - 'line': _num_lines(changed_file.get_new_code()), - 'character': 0, + changes.append( + { + "textDocument": {"uri": uri, "version": doc.version if doc else None}, + "edits": [ + { + "range": { + "start": {"line": 0, "character": 0}, + "end": { + "line": _num_lines(changed_file.get_new_code()), + "character": 0, + }, }, - }, - 'newText': changed_file.get_new_code(), - } - ], - }) - return {'documentChanges': changes} + "newText": changed_file.get_new_code(), + } + ], + } + ) + return {"documentChanges": changes} def _num_lines(file_contents): - 'Count the number of lines in the given string.' - return len(file_contents.splitlines()) + "Count the number of lines in the given string." + if _utils.get_eol_chars(file_contents): + return len(file_contents.splitlines()) + return 0 diff --git a/pylsp/plugins/mccabe_lint.py b/pylsp/plugins/mccabe_lint.py index 77ff3a05..0e2cba2e 100644 --- a/pylsp/plugins/mccabe_lint.py +++ b/pylsp/plugins/mccabe_lint.py @@ -3,40 +3,54 @@ import ast import logging + import mccabe + from pylsp import hookimpl, lsp log = logging.getLogger(__name__) -THRESHOLD = 'threshold' +THRESHOLD = "threshold" DEFAULT_THRESHOLD = 15 @hookimpl -def pylsp_lint(config, document): - threshold = config.plugin_settings('mccabe', document_path=document.path).get(THRESHOLD, DEFAULT_THRESHOLD) - log.debug("Running mccabe lint with threshold: %s", threshold) - - try: - tree = compile(document.source, document.path, "exec", ast.PyCF_ONLY_AST) - except SyntaxError: - # We'll let the other linters point this one out - return None - - visitor = mccabe.PathGraphingAstVisitor() - visitor.preorder(tree, visitor) - - diags = [] - for graph in visitor.graphs.values(): - if graph.complexity() >= threshold: - diags.append({ - 'source': 'mccabe', - 'range': { - 'start': {'line': graph.lineno - 1, 'character': graph.column}, - 'end': {'line': graph.lineno - 1, 'character': len(document.lines[graph.lineno])}, - }, - 'message': 'Cyclomatic complexity too high: %s (threshold %s)' % (graph.complexity(), threshold), - 'severity': lsp.DiagnosticSeverity.Warning - }) - - return diags +def pylsp_lint(config, workspace, document): + with workspace.report_progress("lint: mccabe"): + threshold = config.plugin_settings("mccabe", document_path=document.path).get( + THRESHOLD, DEFAULT_THRESHOLD + ) + log.debug("Running mccabe lint with threshold: %s", threshold) + + try: + tree = compile(document.source, document.path, "exec", ast.PyCF_ONLY_AST) + except SyntaxError: + # We'll let the other linters point this one out + return None + + visitor = mccabe.PathGraphingAstVisitor() + visitor.preorder(tree, visitor) + + diags = [] + for graph in visitor.graphs.values(): + if graph.complexity() >= threshold: + diags.append( + { + "source": "mccabe", + "range": { + "start": { + "line": graph.lineno - 1, + "character": graph.column, + }, + "end": { + "line": graph.lineno - 1, + "character": len(document.lines[graph.lineno]), + }, + }, + "message": "Cyclomatic complexity too high: %s (threshold %s)" + % (graph.complexity(), threshold), + "severity": lsp.DiagnosticSeverity.Warning, + } + ) + + return diags diff --git a/pylsp/plugins/preload_imports.py b/pylsp/plugins/preload_imports.py index b3994606..ebcd9adb 100644 --- a/pylsp/plugins/preload_imports.py +++ b/pylsp/plugins/preload_imports.py @@ -2,18 +2,58 @@ # Copyright 2021- Python Language Server Contributors. import logging + from pylsp import hookimpl log = logging.getLogger(__name__) MODULES = [ - "OpenGL", "PIL", - "array", "audioop", "binascii", "cPickle", "cStringIO", "cmath", "collections", - "datetime", "errno", "exceptions", "gc", "imageop", "imp", "itertools", - "marshal", "math", "matplotlib", "mmap", "mpmath", "msvcrt", "networkx", "nose", "nt", - "numpy", "operator", "os", "os.path", "pandas", "parser", "rgbimg", "scipy", "signal", - "skimage", "sklearn", "statsmodels", "strop", "sympy", "sys", "thread", "time", - "wx", "xxsubtype", "zipimport", "zlib" + "OpenGL", + "PIL", + "array", + "audioop", + "binascii", + "cPickle", + "cStringIO", + "cmath", + "collections", + "datetime", + "errno", + "exceptions", + "gc", + "imageop", + "imp", + "itertools", + "marshal", + "math", + "matplotlib", + "mmap", + "mpmath", + "msvcrt", + "networkx", + "nose", + "nt", + "numpy", + "operator", + "os", + "os.path", + "pandas", + "parser", + "rgbimg", + "scipy", + "signal", + "skimage", + "sklearn", + "statsmodels", + "strop", + "sympy", + "sys", + "thread", + "time", + "wx", + "xxsubtype", + "zipimport", + "zlib", ] @@ -21,18 +61,18 @@ def pylsp_settings(): # Setup default modules to preload, and rope extension modules return { - 'plugins': {'preload': {'modules': MODULES}}, - 'rope': {'extensionModules': MODULES} + "plugins": {"preload": {"modules": MODULES}}, + "rope": {"extensionModules": MODULES}, } @hookimpl -def pylsp_initialize(config): - for mod_name in config.plugin_settings('preload').get('modules', []): +def pylsp_initialize(config) -> None: + for mod_name in config.plugin_settings("preload").get("modules", []): try: __import__(mod_name) log.debug("Preloaded module %s", mod_name) - except Exception: # pylint: disable=broad-except + except Exception: # Catch any exception since not only ImportError can be raised here # For example, old versions of NumPy can cause a ValueError. # See spyder-ide/spyder#13985 diff --git a/pylsp/plugins/pycodestyle_lint.py b/pylsp/plugins/pycodestyle_lint.py index 99a6f074..7a514adf 100644 --- a/pylsp/plugins/pycodestyle_lint.py +++ b/pylsp/plugins/pycodestyle_lint.py @@ -2,8 +2,11 @@ # Copyright 2021- Python Language Server Contributors. import logging + import pycodestyle + from pylsp import hookimpl, lsp +from pylsp._utils import get_eol_chars try: from autopep8 import continued_indentation as autopep8_c_i @@ -13,8 +16,8 @@ # Check if autopep8's continued_indentation implementation # is overriding pycodestyle's and if so, re-register # the check using pycodestyle's implementation as expected - if autopep8_c_i in pycodestyle._checks['logical_line']: - del pycodestyle._checks['logical_line'][autopep8_c_i] + if autopep8_c_i in pycodestyle._checks["logical_line"]: + del pycodestyle._checks["logical_line"][autopep8_c_i] pycodestyle.register_check(pycodestyle.continued_indentation) log = logging.getLogger(__name__) @@ -22,35 +25,47 @@ @hookimpl def pylsp_lint(workspace, document): - config = workspace._config - settings = config.plugin_settings('pycodestyle', document_path=document.path) - log.debug("Got pycodestyle settings: %s", settings) - - opts = { - 'exclude': settings.get('exclude'), - 'filename': settings.get('filename'), - 'hang_closing': settings.get('hangClosing'), - 'ignore': settings.get('ignore'), - 'max_line_length': settings.get('maxLineLength'), - 'indent_size': settings.get('indentSize'), - 'select': settings.get('select'), - } - kwargs = {k: v for k, v in opts.items() if v} - styleguide = pycodestyle.StyleGuide(kwargs) - - c = pycodestyle.Checker( - filename=document.uri, lines=document.lines, options=styleguide.options, - report=PyCodeStyleDiagnosticReport(styleguide.options) - ) - c.check_all() - diagnostics = c.report.diagnostics - - return diagnostics + with workspace.report_progress("lint: pycodestyle"): + config = workspace._config + settings = config.plugin_settings("pycodestyle", document_path=document.path) + log.debug("Got pycodestyle settings: %s", settings) + + opts = { + "exclude": settings.get("exclude"), + "filename": settings.get("filename"), + "hang_closing": settings.get("hangClosing"), + "ignore": settings.get("ignore"), + "max_line_length": settings.get("maxLineLength"), + "indent_size": settings.get("indentSize"), + "select": settings.get("select"), + } + kwargs = {k: v for k, v in opts.items() if v} + styleguide = pycodestyle.StyleGuide(kwargs) + + # Use LF to lint file because other line endings can give false positives. + # See spyder-ide/spyder#19565 for context. + source = document.source + eol_chars = get_eol_chars(source) + if eol_chars in ["\r", "\r\n"]: + source = source.replace(eol_chars, "\n") + lines = source.splitlines(keepends=True) + else: + lines = document.lines + + c = pycodestyle.Checker( + filename=document.path, + lines=lines, + options=styleguide.options, + report=PyCodeStyleDiagnosticReport(styleguide.options), + ) + c.check_all() + diagnostics = c.report.diagnostics + + return diagnostics class PyCodeStyleDiagnosticReport(pycodestyle.BaseReport): - - def __init__(self, options): + def __init__(self, options) -> None: self.diagnostics = [] super().__init__(options=options) @@ -68,26 +83,31 @@ def error(self, line_number, offset, text, check): # In that case, the end offset should just be some number ~100 # (because why not? There's nothing to underline anyways) err_range = { - 'start': {'line': line_number - 1, 'character': offset}, - 'end': { + "start": {"line": line_number - 1, "character": offset}, + "end": { # FIXME: It's a little naiive to mark until the end of the line, can we not easily do better? - 'line': line_number - 1, - 'character': 100 if line_number > len(self.lines) else len(self.lines[line_number - 1]) + "line": line_number - 1, + "character": 100 + if line_number > len(self.lines) + else len(self.lines[line_number - 1]), }, } - self.diagnostics.append({ - 'source': 'pycodestyle', - 'range': err_range, - 'message': text, - 'code': code, + diagnostic = { + "source": "pycodestyle", + "range": err_range, + "message": text, + "code": code, # Are style errors really ever errors? - 'severity': _get_severity(code) - }) + "severity": _get_severity(code), + } + if code.startswith("W6"): + diagnostic["tags"] = [lsp.DiagnosticTag.Deprecated] + self.diagnostics.append(diagnostic) def _get_severity(code): # Are style errors ever really errors? - if code[0] == 'E' or code[0] == 'W': + if code[0] == "E" or code[0] == "W": return lsp.DiagnosticSeverity.Warning # If no severity is specified, why wouldn't this be informational only? return lsp.DiagnosticSeverity.Information diff --git a/pylsp/plugins/pydocstyle_lint.py b/pylsp/plugins/pydocstyle_lint.py index 7a986aa6..a310ac84 100644 --- a/pylsp/plugins/pydocstyle_lint.py +++ b/pylsp/plugins/pydocstyle_lint.py @@ -8,6 +8,7 @@ import sys import pydocstyle + from pylsp import hookimpl, lsp log = logging.getLogger(__name__) @@ -23,64 +24,75 @@ @hookimpl def pylsp_settings(): # Default pydocstyle to disabled - return {'plugins': {'pydocstyle': {'enabled': False}}} + return {"plugins": {"pydocstyle": {"enabled": False}}} @hookimpl -def pylsp_lint(config, document): - settings = config.plugin_settings('pydocstyle', document_path=document.path) - log.debug("Got pydocstyle settings: %s", settings) - - # Explicitly passing a path to pydocstyle means it doesn't respect the --match flag, so do it ourselves - filename_match_re = re.compile(settings.get('match', DEFAULT_MATCH_RE) + '$') - if not filename_match_re.match(os.path.basename(document.path)): - return [] - - # Likewise with --match-dir - dir_match_re = re.compile(settings.get('matchDir', DEFAULT_MATCH_DIR_RE) + '$') - if not dir_match_re.match(os.path.basename(os.path.dirname(document.path))): - return [] - - args = [document.path] - - if settings.get('convention'): - args.append('--convention=' + settings['convention']) - - if settings.get('addSelect'): - args.append('--add-select=' + ','.join(settings['addSelect'])) - if settings.get('addIgnore'): - args.append('--add-ignore=' + ','.join(settings['addIgnore'])) - - elif settings.get('select'): - args.append('--select=' + ','.join(settings['select'])) - elif settings.get('ignore'): - args.append('--ignore=' + ','.join(settings['ignore'])) - - log.info("Using pydocstyle args: %s", args) - - conf = pydocstyle.config.ConfigurationParser() - with _patch_sys_argv(args): - # TODO(gatesn): We can add more pydocstyle args here from our pylsp config - conf.parse() - - # Will only yield a single filename, the document path - diags = [] - for filename, checked_codes, ignore_decorators in conf.get_files_to_check(): - errors = pydocstyle.checker.ConventionChecker().check_source( - document.source, filename, ignore_decorators=ignore_decorators - ) - - try: - for error in errors: - if error.code not in checked_codes: - continue - diags.append(_parse_diagnostic(document, error)) - except pydocstyle.parser.ParseError: - # In the case we cannot parse the Python file, just continue - pass - - log.debug("Got pydocstyle errors: %s", diags) - return diags +def pylsp_lint(config, workspace, document): + with workspace.report_progress("lint: pydocstyle"): + settings = config.plugin_settings("pydocstyle", document_path=document.path) + log.debug("Got pydocstyle settings: %s", settings) + + # Explicitly passing a path to pydocstyle means it doesn't respect the --match flag, so do it ourselves + filename_match_re = re.compile(settings.get("match", DEFAULT_MATCH_RE) + "$") + if not filename_match_re.match(os.path.basename(document.path)): + return [] + + # Likewise with --match-dir + dir_match_re = re.compile(settings.get("matchDir", DEFAULT_MATCH_DIR_RE) + "$") + if not dir_match_re.match(os.path.basename(os.path.dirname(document.path))): + return [] + + args = [document.path] + + if settings.get("convention"): + args.append("--convention=" + settings["convention"]) + + if settings.get("addSelect"): + args.append("--add-select=" + ",".join(settings["addSelect"])) + if settings.get("addIgnore"): + args.append("--add-ignore=" + ",".join(settings["addIgnore"])) + + elif settings.get("select"): + args.append("--select=" + ",".join(settings["select"])) + elif settings.get("ignore"): + args.append("--ignore=" + ",".join(settings["ignore"])) + + log.info("Using pydocstyle args: %s", args) + + conf = pydocstyle.config.ConfigurationParser() + with _patch_sys_argv(args): + # TODO(gatesn): We can add more pydocstyle args here from our pylsp config + conf.parse() + + # Will only yield a single filename, the document path + diags = [] + for ( + filename, + checked_codes, + ignore_decorators, + property_decorators, + ignore_self_only_init, + ) in conf.get_files_to_check(): + errors = pydocstyle.checker.ConventionChecker().check_source( + document.source, + filename, + ignore_decorators=ignore_decorators, + property_decorators=property_decorators, + ignore_self_only_init=ignore_self_only_init, + ) + + try: + for error in errors: + if error.code not in checked_codes: + continue + diags.append(_parse_diagnostic(document, error)) + except pydocstyle.parser.ParseError: + # In the case we cannot parse the Python file, just continue + pass + + log.debug("Got pydocstyle errors: %s", diags) + return diags def _parse_diagnostic(document, error): @@ -91,25 +103,19 @@ def _parse_diagnostic(document, error): end_character = len(line) return { - 'source': 'pydocstyle', - 'code': error.code, - 'message': error.message, - 'severity': lsp.DiagnosticSeverity.Warning, - 'range': { - 'start': { - 'line': lineno, - 'character': start_character - }, - 'end': { - 'line': lineno, - 'character': end_character - } - } + "source": "pydocstyle", + "code": error.code, + "message": error.message, + "severity": lsp.DiagnosticSeverity.Warning, + "range": { + "start": {"line": lineno, "character": start_character}, + "end": {"line": lineno, "character": end_character}, + }, } @contextlib.contextmanager -def _patch_sys_argv(arguments): +def _patch_sys_argv(arguments) -> None: old_args = sys.argv # Preserve argv[0] since it's the executable diff --git a/pylsp/plugins/pyflakes_lint.py b/pylsp/plugins/pyflakes_lint.py index da0ee66b..8a04276c 100644 --- a/pylsp/plugins/pyflakes_lint.py +++ b/pylsp/plugins/pyflakes_lint.py @@ -1,7 +1,9 @@ # Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. -from pyflakes import api as pyflakes_api, messages +from pyflakes import api as pyflakes_api +from pyflakes import messages + from pylsp import hookimpl, lsp # Pyflakes messages that should be reported as Errors instead of Warns @@ -15,57 +17,68 @@ messages.YieldOutsideFunction, messages.ContinueOutsideLoop, messages.BreakOutsideLoop, - messages.ContinueInFinally, messages.TwoStarredExpressions, ) @hookimpl -def pylsp_lint(document): - reporter = PyflakesDiagnosticReport(document.lines) - pyflakes_api.check(document.source.encode('utf-8'), document.path, reporter=reporter) - return reporter.diagnostics +def pylsp_lint(workspace, document): + with workspace.report_progress("lint: pyflakes"): + reporter = PyflakesDiagnosticReport(document.lines) + pyflakes_api.check( + document.source.encode("utf-8"), document.path, reporter=reporter + ) + return reporter.diagnostics class PyflakesDiagnosticReport: - - def __init__(self, lines): + def __init__(self, lines) -> None: self.lines = lines self.diagnostics = [] - def unexpectedError(self, _filename, msg): # pragma: no cover + def unexpectedError(self, _filename, msg) -> None: # pragma: no cover err_range = { - 'start': {'line': 0, 'character': 0}, - 'end': {'line': 0, 'character': 0}, + "start": {"line": 0, "character": 0}, + "end": {"line": 0, "character": 0}, } - self.diagnostics.append({ - 'source': 'pyflakes', - 'range': err_range, - 'message': msg, - 'severity': lsp.DiagnosticSeverity.Error, - }) + self.diagnostics.append( + { + "source": "pyflakes", + "range": err_range, + "message": msg, + "severity": lsp.DiagnosticSeverity.Error, + } + ) - def syntaxError(self, _filename, msg, lineno, offset, text): + def syntaxError(self, _filename, msg, lineno, offset, text) -> None: # We've seen that lineno and offset can sometimes be None lineno = lineno or 1 offset = offset or 0 + # could be None if the error is due to an invalid encoding + # see e.g. https://github.com/python-lsp/python-lsp-server/issues/429 + text = text or "" err_range = { - 'start': {'line': lineno - 1, 'character': offset}, - 'end': {'line': lineno - 1, 'character': offset + len(text)}, + "start": {"line": lineno - 1, "character": offset}, + "end": {"line": lineno - 1, "character": offset + len(text)}, } - self.diagnostics.append({ - 'source': 'pyflakes', - 'range': err_range, - 'message': msg, - 'severity': lsp.DiagnosticSeverity.Error, - }) + self.diagnostics.append( + { + "source": "pyflakes", + "range": err_range, + "message": msg, + "severity": lsp.DiagnosticSeverity.Error, + } + ) - def flake(self, message): - """ Get message like :: """ + def flake(self, message) -> None: + """Get message like :: """ err_range = { - 'start': {'line': message.lineno - 1, 'character': message.col}, - 'end': {'line': message.lineno - 1, 'character': len(self.lines[message.lineno - 1])}, + "start": {"line": message.lineno - 1, "character": message.col}, + "end": { + "line": message.lineno - 1, + "character": len(self.lines[message.lineno - 1]), + }, } severity = lsp.DiagnosticSeverity.Warning @@ -74,9 +87,11 @@ def flake(self, message): severity = lsp.DiagnosticSeverity.Error break - self.diagnostics.append({ - 'source': 'pyflakes', - 'range': err_range, - 'message': message.message % message.message_args, - 'severity': severity - }) + self.diagnostics.append( + { + "source": "pyflakes", + "range": err_range, + "message": message.message % message.message_args, + "severity": severity, + } + ) diff --git a/pylsp/plugins/pylint_lint.py b/pylsp/plugins/pylint_lint.py index d974a2f8..f3415c8a 100644 --- a/pylsp/plugins/pylint_lint.py +++ b/pylsp/plugins/pylint_lint.py @@ -3,19 +3,20 @@ # Copyright 2021- Python Language Server Contributors. """Linter plugin for pylint.""" + import collections import logging -import sys -import re -from subprocess import Popen, PIPE import os +import re +import shlex +import sys +from subprocess import PIPE, Popen -from pylint.epylint import py_run from pylsp import hookimpl, lsp try: import ujson as json -except Exception: # pylint: disable=broad-except +except Exception: import json log = logging.getLogger(__name__) @@ -27,14 +28,28 @@ # import via an (otherwise harmless) environment variable. This is an ad-hoc # fix for a very specific upstream issue. # Related: https://github.com/PyCQA/pylint/issues/3518 -os.environ['PYGAME_HIDE_SUPPORT_PROMPT'] = 'hide' +os.environ["PYGAME_HIDE_SUPPORT_PROMPT"] = "hide" +DEPRECATION_CODES = { + "W0402", # Uses of a deprecated module %r + "W1505", # Using deprecated method %s() + "W1511", # Using deprecated argument %s of method %s() + "W1512", # Using deprecated class %s of module %s + "W1513", # Using deprecated decorator %s() +} +UNNECESSITY_CODES = { + "W0611", # Unused import %s + "W0612", # Unused variable %r + "W0613", # Unused argument %r + "W0614", # Unused import %s from wildcard import + "W1304", # Unused-format-string-argument +} class PylintLinter: last_diags = collections.defaultdict(list) @classmethod - def lint(cls, document, is_saved, flags=''): + def lint(cls, document, is_saved, flags=""): """Plugin interface to pylsp linter. Args: @@ -71,22 +86,26 @@ def lint(cls, document, is_saved, flags=''): # save. return cls.last_diags[document.path] - # py_run will call shlex.split on its arguments, and shlex.split does - # not handle Windows paths (it will try to perform escaping). Turn - # backslashes into forward slashes first to avoid this issue. - path = document.path - if sys.platform.startswith('win'): - path = path.replace('\\', '/') - - pylint_call = '{} -f json {}'.format(path, flags) - log.debug("Calling pylint with '%s'", pylint_call) - json_out, err = py_run(pylint_call, return_std=True) - - # Get strings - json_out = json_out.getvalue() - err = err.getvalue() - - if err != '': + cmd = [ + sys.executable, + "-c", + "import sys; from pylint.lint import Run; Run(sys.argv[1:])", + "-f", + "json", + document.path, + ] + (shlex.split(str(flags)) if flags else []) + log.debug("Calling pylint with '%s'", " ".join(cmd)) + + cwd = document._workspace.root_path + if not cwd: + cwd = os.path.dirname(__file__) + + with Popen( + cmd, stdout=PIPE, stderr=PIPE, cwd=cwd, universal_newlines=True + ) as process: + json_out, err = process.communicate() + + if err != "": log.error("Error calling pylint: '%s'", err) # pylint prints nothing rather than [] when there are no diagnostics. @@ -112,6 +131,7 @@ def lint(cls, document, is_saved, flags=''): # The type can be any of: # # * convention + # * information # * error # * fatal # * refactor @@ -119,77 +139,97 @@ def lint(cls, document, is_saved, flags=''): diagnostics = [] for diag in json.loads(json_out): # pylint lines index from 1, pylsp lines index from 0 - line = diag['line'] - 1 + line = diag["line"] - 1 err_range = { - 'start': { - 'line': line, + "start": { + "line": line, # Index columns start from 0 - 'character': diag['column'], + "character": diag["column"], }, - 'end': { - 'line': line, + "end": { + "line": line, # It's possible that we're linting an empty file. Even an empty # file might fail linting if it isn't named properly. - 'character': len(document.lines[line]) if document.lines else 0, + "character": ( + _find_end_of_identifier(document.lines[line], diag["column"]) + if document.lines + else 0 + ), }, } - if diag['type'] == 'convention': + if diag["type"] == "convention": + severity = lsp.DiagnosticSeverity.Information + elif diag["type"] == "information": severity = lsp.DiagnosticSeverity.Information - elif diag['type'] == 'error': + elif diag["type"] == "error": severity = lsp.DiagnosticSeverity.Error - elif diag['type'] == 'fatal': + elif diag["type"] == "fatal": severity = lsp.DiagnosticSeverity.Error - elif diag['type'] == 'refactor': + elif diag["type"] == "refactor": severity = lsp.DiagnosticSeverity.Hint - elif diag['type'] == 'warning': + elif diag["type"] == "warning": severity = lsp.DiagnosticSeverity.Warning - diagnostics.append({ - 'source': 'pylint', - 'range': err_range, - 'message': '[{}] {}'.format(diag['symbol'], diag['message']), - 'severity': severity, - 'code': diag['message-id'] - }) + code = diag["message-id"] + + diagnostic = { + "source": "pylint", + "range": err_range, + "message": "[{}] {}".format(diag["symbol"], diag["message"]), + "severity": severity, + "code": code, + } + + if code in UNNECESSITY_CODES: + diagnostic["tags"] = [lsp.DiagnosticTag.Unnecessary] + if code in DEPRECATION_CODES: + diagnostic["tags"] = [lsp.DiagnosticTag.Deprecated] + + diagnostics.append(diagnostic) cls.last_diags[document.path] = diagnostics return diagnostics def _build_pylint_flags(settings): """Build arguments for calling pylint.""" - pylint_args = settings.get('args') + pylint_args = settings.get("args") if pylint_args is None: - return '' - return ' '.join(pylint_args) + return "" + return " ".join(pylint_args) @hookimpl def pylsp_settings(): # Default pylint to disabled because it requires a config # file to be useful. - return {'plugins': {'pylint': { - 'enabled': False, - 'args': [], - # disabled by default as it can slow down the workflow - 'executable': None, - }}} + return { + "plugins": { + "pylint": { + "enabled": False, + "args": [], + # disabled by default as it can slow down the workflow + "executable": None, + } + } + } @hookimpl -def pylsp_lint(config, document, is_saved): +def pylsp_lint(config, workspace, document, is_saved): """Run pylint linter.""" - settings = config.plugin_settings('pylint') - log.debug("Got pylint settings: %s", settings) - # pylint >= 2.5.0 is required for working through stdin and only - # available with python3 - if settings.get('executable') and sys.version_info[0] >= 3: - flags = build_args_stdio(settings) - pylint_executable = settings.get('executable', 'pylint') - return pylint_lint_stdin(pylint_executable, document, flags) - flags = _build_pylint_flags(settings) - return PylintLinter.lint(document, is_saved, flags=flags) + with workspace.report_progress("lint: pylint"): + settings = config.plugin_settings("pylint") + log.debug("Got pylint settings: %s", settings) + # pylint >= 2.5.0 is required for working through stdin and only + # available with python3 + if settings.get("executable") and sys.version_info[0] >= 3: + flags = build_args_stdio(settings) + pylint_executable = settings.get("executable", "pylint") + return pylint_lint_stdin(pylint_executable, document, flags) + flags = _build_pylint_flags(settings) + return PylintLinter.lint(document, is_saved, flags=flags) def build_args_stdio(settings): @@ -201,7 +241,7 @@ def build_args_stdio(settings): :return: arguments to path to pylint :rtype: list """ - pylint_args = settings.get('args') + pylint_args = settings.get("args") if pylint_args is None: return [] return pylint_args @@ -245,14 +285,14 @@ def _run_pylint_stdio(pylint_executable, document, flags): try: cmd = [pylint_executable] cmd.extend(flags) - cmd.extend(['--from-stdin', document.path]) + cmd.extend(["--from-stdin", document.path]) p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE) - except IOError: + except OSError: log.debug("Can't execute %s. Trying with 'python -m pylint'", pylint_executable) - cmd = ['python', '-m', 'pylint'] + cmd = [sys.executable, "-m", "pylint"] cmd.extend(flags) - cmd.extend(['--from-stdin', document.path]) - p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE) # pylint: disable=consider-using-with + cmd.extend(["--from-stdin", document.path]) + p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE) (stdout, stderr) = p.communicate(document.source.encode()) if stderr: log.error("Error while running pylint '%s'", stderr.decode()) @@ -273,7 +313,7 @@ def _parse_pylint_stdio_result(document, stdout): diagnostics = [] lines = stdout.splitlines() for raw_line in lines: - parsed_line = re.match(r'(.*):(\d*):(\d*): (\w*): (.*)', raw_line) + parsed_line = re.match(r"(.*):(\d*):(\d*): (\w*): (.*)", raw_line) if not parsed_line: log.debug("Pylint output parser can't parse line '%s'", raw_line) continue @@ -287,32 +327,41 @@ def _parse_pylint_stdio_result(document, stdout): line = int(line) - 1 character = int(character) severity_map = { - 'C': lsp.DiagnosticSeverity.Information, - 'E': lsp.DiagnosticSeverity.Error, - 'F': lsp.DiagnosticSeverity.Error, - 'I': lsp.DiagnosticSeverity.Information, - 'R': lsp.DiagnosticSeverity.Hint, - 'W': lsp.DiagnosticSeverity.Warning, + "C": lsp.DiagnosticSeverity.Information, + "E": lsp.DiagnosticSeverity.Error, + "F": lsp.DiagnosticSeverity.Error, + "I": lsp.DiagnosticSeverity.Information, + "R": lsp.DiagnosticSeverity.Hint, + "W": lsp.DiagnosticSeverity.Warning, } severity = severity_map[code[0]] - diagnostics.append( - { - 'source': 'pylint', - 'code': code, - 'range': { - 'start': { - 'line': line, - 'character': character - }, - 'end': { - 'line': line, - # no way to determine the column - 'character': len(document.lines[line]) - 1 - } + diagnostic = { + "source": "pylint", + "code": code, + "range": { + "start": {"line": line, "character": character}, + "end": { + "line": line, + "character": _find_end_of_identifier( + document.lines[line], character + ), }, - 'message': msg, - 'severity': severity, - } - ) + }, + "message": msg, + "severity": severity, + } + if code in UNNECESSITY_CODES: + diagnostic["tags"] = [lsp.DiagnosticTag.Unnecessary] + if code in DEPRECATION_CODES: + diagnostic["tags"] = [lsp.DiagnosticTag.Deprecated] + diagnostics.append(diagnostic) return diagnostics + + +def _find_end_of_identifier(string, start): + """Find the end of the identifier starting at the given position.""" + for i in range(len(string), start, -1): + if string[start:i].isidentifier(): + return i + return len(string) - 1 diff --git a/pylsp/plugins/references.py b/pylsp/plugins/references.py index 4ef2072a..a4c61b52 100644 --- a/pylsp/plugins/references.py +++ b/pylsp/plugins/references.py @@ -2,13 +2,14 @@ # Copyright 2021- Python Language Server Contributors. import logging -from pylsp import hookimpl, uris, _utils + +from pylsp import _utils, hookimpl, uris log = logging.getLogger(__name__) @hookimpl -def pylsp_references(document, position, exclude_declaration=False): +def pylsp_references(document, position, exclude_declaration): code_position = _utils.position_to_jedi_linecolumn(document, position) usages = document.jedi_script().get_references(**code_position) @@ -17,10 +18,16 @@ def pylsp_references(document, position, exclude_declaration=False): usages = [d for d in usages if not d.is_definition()] # Filter out builtin modules - return [{ - 'uri': uris.uri_with(document.uri, path=str(d.module_path)) if d.module_path else document.uri, - 'range': { - 'start': {'line': d.line - 1, 'character': d.column}, - 'end': {'line': d.line - 1, 'character': d.column + len(d.name)} + return [ + { + "uri": uris.uri_with(document.uri, path=str(d.module_path)) + if d.module_path + else document.uri, + "range": { + "start": {"line": d.line - 1, "character": d.column}, + "end": {"line": d.line - 1, "character": d.column + len(d.name)}, + }, } - } for d in usages if not d.in_builtin_module()] + for d in usages + if not d.in_builtin_module() + ] diff --git a/pylsp/plugins/rope_autoimport.py b/pylsp/plugins/rope_autoimport.py new file mode 100644 index 00000000..8ba951f7 --- /dev/null +++ b/pylsp/plugins/rope_autoimport.py @@ -0,0 +1,408 @@ +# Copyright 2022- Python Language Server Contributors. + +import logging +import threading +from collections.abc import Generator +from typing import Any, Optional, Union + +import parso +from jedi import Script +from parso.python import tree +from parso.tree import NodeOrLeaf +from rope.base.resources import Resource +from rope.contrib.autoimport.defs import SearchResult +from rope.contrib.autoimport.sqlite import AutoImport + +from pylsp import hookimpl +from pylsp.config.config import Config +from pylsp.workspace import Document, Workspace + +from ._rope_task_handle import PylspTaskHandle + +log = logging.getLogger(__name__) + +_score_pow = 5 +_score_max = 10**_score_pow +MAX_RESULTS_COMPLETIONS = 1000 +MAX_RESULTS_CODE_ACTIONS = 5 + + +class AutoimportCache: + """Handles the cache creation.""" + + def __init__(self) -> None: + self.thread = None + + def reload_cache( + self, + config: Config, + workspace: Workspace, + files: Optional[list[Document]] = None, + single_thread: Optional[bool] = True, + ): + if self.is_blocked(): + return + + memory: bool = config.plugin_settings("rope_autoimport").get("memory", False) + rope_config = config.settings().get("rope", {}) + autoimport = workspace._rope_autoimport(rope_config, memory) + resources: Optional[list[Resource]] = ( + None + if files is None + else [document._rope_resource(rope_config) for document in files] + ) + + if single_thread: + self._reload_cache(workspace, autoimport, resources) + else: + # Creating the cache may take 10-20s for a environment with 5k python modules. That's + # why we decided to move cache creation into its own thread. + self.thread = threading.Thread( + target=self._reload_cache, args=(workspace, autoimport, resources) + ) + self.thread.start() + + def _reload_cache( + self, + workspace: Workspace, + autoimport: AutoImport, + resources: Optional[list[Resource]] = None, + ) -> None: + task_handle = PylspTaskHandle(workspace) + autoimport.generate_cache(task_handle=task_handle, resources=resources) + autoimport.generate_modules_cache(task_handle=task_handle) + + def is_blocked(self): + return self.thread and self.thread.is_alive() + + +@hookimpl +def pylsp_settings() -> dict[str, dict[str, dict[str, Any]]]: + # Default rope_completion to disabled + return { + "plugins": { + "rope_autoimport": { + "enabled": False, + "memory": False, + "completions": { + "enabled": True, + }, + "code_actions": { + "enabled": True, + }, + } + } + } + + +def _should_insert(expr: tree.BaseNode, word_node: tree.Leaf) -> bool: + """ + Check if we should insert the word_node on the given expr. + + Works for both correct and incorrect code. This is because the + user is often working on the code as they write it. + """ + if not word_node: + return False + if len(expr.children) == 0: + return True + first_child = expr.children[0] + if isinstance(first_child, tree.EndMarker): + if "#" in first_child.prefix: + return False # Check for single line comment + if first_child == word_node: + return True # If the word is the first word then its fine + if len(expr.children) > 1: + if any( + node.type == "operator" and "." in node.value or node.type == "trailer" + for node in expr.children + ): + return False # Check if we're on a method of a function + if isinstance(first_child, (tree.PythonErrorNode, tree.PythonNode)): + # The tree will often include error nodes like this to indicate errors + # we want to ignore errors since the code is being written + return _should_insert(first_child, word_node) + return _handle_first_child(first_child, expr, word_node) + + +def _handle_first_child( + first_child: NodeOrLeaf, expr: tree.BaseNode, word_node: tree.Leaf +) -> bool: + """Check if we suggest imports given the following first child.""" + if isinstance(first_child, tree.Import): + return False + if isinstance(first_child, (tree.PythonLeaf, tree.PythonErrorLeaf)): + # Check if the first item is a from or import statement even when incomplete + if first_child.value in ("import", "from"): + return False + if isinstance(first_child, tree.Keyword): + if first_child.value == "def": + return _should_import_function(word_node, expr) + if first_child.value == "class": + return _should_import_class(word_node, expr) + return True + + +def _should_import_class(word_node: tree.Leaf, expr: tree.BaseNode) -> bool: + prev_node = None + for node in expr.children: + if isinstance(node, tree.Name): + if isinstance(prev_node, tree.Operator): + if node == word_node and prev_node.value == "(": + return True + prev_node = node + + return False + + +def _should_import_function(word_node: tree.Leaf, expr: tree.BaseNode) -> bool: + prev_node = None + for node in expr.children: + if _handle_argument(node, word_node): + return True + if isinstance(prev_node, tree.Operator): + if prev_node.value == "->": + if node == word_node: + return True + prev_node = node + return False + + +def _handle_argument(node: NodeOrLeaf, word_node: tree.Leaf): + if isinstance(node, tree.PythonNode): + if node.type == "tfpdef": + if node.children[2] == word_node: + return True + if node.type == "parameters": + for parameter in node.children: + if _handle_argument(parameter, word_node): + return True + return False + + +def _process_statements( + suggestions: list[SearchResult], + doc_uri: str, + word: str, + autoimport: AutoImport, + document: Document, + feature: str = "completions", +) -> Generator[dict[str, Any], None, None]: + for suggestion in suggestions: + insert_line = autoimport.find_insertion_line(document.source) - 1 + start = {"line": insert_line, "character": 0} + edit_range = {"start": start, "end": start} + edit = {"range": edit_range, "newText": suggestion.import_statement + "\n"} + score = _get_score( + suggestion.source, suggestion.import_statement, suggestion.name, word + ) + if score > _score_max: + continue + # TODO make this markdown + if feature == "completions": + yield { + "label": suggestion.name, + "kind": suggestion.itemkind, + "sortText": _sort_import(score), + "data": {"doc_uri": doc_uri}, + "detail": _document(suggestion.import_statement), + "additionalTextEdits": [edit], + } + elif feature == "code_actions": + yield { + "title": suggestion.import_statement, + "kind": "quickfix", + "edit": {"changes": {doc_uri: [edit]}}, + # data is a supported field for codeAction responses + # See https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#textDocument_codeAction + "data": {"sortText": _sort_import(score)}, + } + else: + raise ValueError(f"Unknown feature: {feature}") + + +def get_names(script: Script) -> set[str]: + """Get all names to ignore from the current file.""" + raw_names = script.get_names(definitions=True) + log.debug(raw_names) + return {name.name for name in raw_names} + + +@hookimpl +def pylsp_completions( + config: Config, + workspace: Workspace, + document: Document, + position, + ignored_names: Union[set[str], None], +): + """Get autoimport suggestions.""" + if ( + not config.plugin_settings("rope_autoimport") + .get("completions", {}) + .get("enabled", True) + ) or cache.is_blocked(): + return [] + + line = document.lines[position["line"]] + expr = parso.parse(line) + word_node = expr.get_leaf_for_position((1, position["character"])) + if not _should_insert(expr, word_node): + return [] + word = word_node.value + log.debug(f"autoimport: searching for word: {word}") + rope_config = config.settings(document_path=document.path).get("rope", {}) + ignored_names: set[str] = ignored_names or get_names( + document.jedi_script(use_document_path=True) + ) + autoimport = workspace._rope_autoimport(rope_config) + suggestions = list(autoimport.search_full(word, ignored_names=ignored_names)) + results = sorted( + _process_statements( + suggestions, document.uri, word, autoimport, document, "completions" + ), + key=lambda statement: statement["sortText"], + ) + if len(results) > MAX_RESULTS_COMPLETIONS: + results = results[:MAX_RESULTS_COMPLETIONS] + return results + + +def _document(import_statement: str) -> str: + return """# Auto-Import\n""" + import_statement + + +def _get_score( + source: int, full_statement: str, suggested_name: str, desired_name +) -> int: + import_length = len("import") + full_statement_score = len(full_statement) - import_length + suggested_name_score = (len(suggested_name) - len(desired_name)) ** 2 + source_score = 20 * source + return suggested_name_score + full_statement_score + source_score + + +def _sort_import(score: int) -> str: + score = max(min(score, (_score_max) - 1), 0) + # Since we are using ints, we need to pad them. + # We also want to prioritize autoimport behind everything since its the last priority. + # The minimum is to prevent score from overflowing the pad + return "[z" + str(score).rjust(_score_pow, "0") + + +def get_name_or_module(document, diagnostic) -> str: + start = diagnostic["range"]["start"] + return ( + parso.parse(document.lines[start["line"]]) + .get_leaf_for_position((1, start["character"] + 1)) + .value + ) + + +@hookimpl +def pylsp_code_actions( + config: Config, + workspace: Workspace, + document: Document, + range: dict, + context: dict, +) -> list[dict]: + """ + Provide code actions through rope. + + Parameters + ---------- + config : pylsp.config.config.Config + Current config. + workspace : pylsp.workspace.Workspace + Current workspace. + document : pylsp.workspace.Document + Document to apply code actions on. + range : dict + Range argument given by pylsp. Not used here. + context : dict + CodeActionContext given as dict. + + Returns + ------- + List of dicts containing the code actions. + """ + if ( + not config.plugin_settings("rope_autoimport") + .get("code_actions", {}) + .get("enabled", True) + ) or cache.is_blocked(): + return [] + + log.debug(f"textDocument/codeAction: {document} {range} {context}") + code_actions = [] + for diagnostic in context.get("diagnostics", []): + if "undefined name" not in diagnostic.get("message", "").lower(): + continue + + word = get_name_or_module(document, diagnostic) + log.debug(f"autoimport: searching for word: {word}") + rope_config = config.settings(document_path=document.path).get("rope", {}) + autoimport = workspace._rope_autoimport(rope_config) + suggestions = list(autoimport.search_full(word)) + log.debug("autoimport: suggestions: %s", suggestions) + results = sorted( + _process_statements( + suggestions, + document.uri, + word, + autoimport, + document, + "code_actions", + ), + key=lambda statement: statement["data"]["sortText"], + ) + + if len(results) > MAX_RESULTS_CODE_ACTIONS: + results = results[:MAX_RESULTS_CODE_ACTIONS] + code_actions.extend(results) + + return code_actions + + +@hookimpl +def pylsp_initialize(config: Config, workspace: Workspace) -> None: + """Initialize AutoImport. + + Generates the cache for local and global items. + """ + cache.reload_cache(config, workspace) + + +@hookimpl +def pylsp_document_did_open(config: Config, workspace: Workspace) -> None: + """Initialize AutoImport. + + Generates the cache for local and global items. + """ + cache.reload_cache(config, workspace) + + +@hookimpl +def pylsp_document_did_save( + config: Config, workspace: Workspace, document: Document +) -> None: + """Update the names associated with this document.""" + cache.reload_cache(config, workspace, [document]) + + +@hookimpl +def pylsp_workspace_configuration_changed(config: Config, workspace: Workspace) -> None: + """ + Initialize autoimport if it has been enabled through a + workspace/didChangeConfiguration message from the frontend. + + Generates the cache for local and global items. + """ + if config.plugin_settings("rope_autoimport").get("enabled", False): + cache.reload_cache(config, workspace) + else: + log.debug("autoimport: Skipping cache reload.") + + +cache: AutoimportCache = AutoimportCache() diff --git a/pylsp/plugins/rope_completion.py b/pylsp/plugins/rope_completion.py index 502d2390..dc94ddea 100644 --- a/pylsp/plugins/rope_completion.py +++ b/pylsp/plugins/rope_completion.py @@ -2,10 +2,10 @@ # Copyright 2021- Python Language Server Contributors. import logging -from rope.contrib.codeassist import code_assist, sorted_proposals -from pylsp import hookimpl, lsp +from rope.contrib.codeassist import code_assist, sorted_proposals +from pylsp import _utils, hookimpl, lsp log = logging.getLogger(__name__) @@ -13,45 +13,54 @@ @hookimpl def pylsp_settings(): # Default rope_completion to disabled - return {'plugins': {'rope_completion': {'enabled': False, 'eager': False}}} + return {"plugins": {"rope_completion": {"enabled": False, "eager": False}}} -def _resolve_completion(completion, data): - # pylint: disable=broad-except +def _resolve_completion(completion, data, markup_kind): try: - doc = data.get_doc() + doc = _utils.format_docstring(data.get_doc(), markup_kind=markup_kind) except Exception as e: log.debug("Failed to resolve Rope completion: %s", e) doc = "" - completion['detail'] = '{0} {1}'.format(data.scope or "", data.name) - completion['documentation'] = doc + completion["detail"] = "{} {}".format(data.scope or "", data.name) + completion["documentation"] = doc return completion @hookimpl def pylsp_completions(config, workspace, document, position): - # pylint: disable=too-many-locals - - settings = config.plugin_settings('rope_completion', document_path=document.path) - resolve_eagerly = settings.get('eager', False) + settings = config.plugin_settings("rope_completion", document_path=document.path) + resolve_eagerly = settings.get("eager", False) # Rope is a bit rubbish at completing module imports, so we'll return None - word = document.word_at_position({ - # The -1 should really be trying to look at the previous word, but that might be quite expensive - # So we only skip import completions when the cursor is one space after `import` - 'line': position['line'], 'character': max(position['character'] - 1, 0), - }) - if word == 'import': + word = document.word_at_position( + { + # The -1 should really be trying to look at the previous word, but that might be quite expensive + # So we only skip import completions when the cursor is one space after `import` + "line": position["line"], + "character": max(position["character"] - 1, 0), + } + ) + if word == "import": return None offset = document.offset_at_position(position) - rope_config = config.settings(document_path=document.path).get('rope', {}) + rope_config = config.settings(document_path=document.path).get("rope", {}) rope_project = workspace._rope_project_builder(rope_config) document_rope = document._rope_resource(rope_config) + completion_capabilities = config.capabilities.get("textDocument", {}).get( + "completion", {} + ) + item_capabilities = completion_capabilities.get("completionItem", {}) + supported_markup_kinds = item_capabilities.get("documentationFormat", ["markdown"]) + preferred_markup_kind = _utils.choose_markup_kind(supported_markup_kinds) + try: - definitions = code_assist(rope_project, document.source, offset, document_rope, maxfixes=3) - except Exception as e: # pylint: disable=broad-except + definitions = code_assist( + rope_project, document.source, offset, document_rope, maxfixes=3 + ) + except Exception as e: log.debug("Failed to run Rope code assist: %s", e) return [] @@ -59,21 +68,19 @@ def pylsp_completions(config, workspace, document, position): new_definitions = [] for d in definitions: item = { - 'label': d.name, - 'kind': _kind(d), - 'sortText': _sort_text(d), - 'data': { - 'doc_uri': document.uri - } + "label": d.name, + "kind": _kind(d), + "sortText": _sort_text(d), + "data": {"doc_uri": document.uri}, } if resolve_eagerly: - item = _resolve_completion(item, d) + item = _resolve_completion(item, d, preferred_markup_kind) new_definitions.append(item) # most recently retrieved completion items, used for resolution - document.shared_data['LAST_ROPE_COMPLETIONS'] = { + document.shared_data["LAST_ROPE_COMPLETIONS"] = { # label is the only required property; here it is assumed to be unique - completion['label']: (completion, data) + completion["label"]: (completion, data) for completion, data in zip(new_definitions, definitions) } @@ -83,62 +90,72 @@ def pylsp_completions(config, workspace, document, position): @hookimpl -def pylsp_completion_item_resolve(completion_item, document): +def pylsp_completion_item_resolve(config, completion_item, document): """Resolve formatted completion for given non-resolved completion""" - shared_data = document.shared_data['LAST_ROPE_COMPLETIONS'].get(completion_item['label']) + shared_data = document.shared_data["LAST_ROPE_COMPLETIONS"].get( + completion_item["label"] + ) + + completion_capabilities = config.capabilities.get("textDocument", {}).get( + "completion", {} + ) + item_capabilities = completion_capabilities.get("completionItem", {}) + supported_markup_kinds = item_capabilities.get("documentationFormat", ["markdown"]) + preferred_markup_kind = _utils.choose_markup_kind(supported_markup_kinds) + if shared_data: completion, data = shared_data - return _resolve_completion(completion, data) + return _resolve_completion(completion, data, preferred_markup_kind) return completion_item def _sort_text(definition): - """ Ensure builtins appear at the bottom. + """Ensure builtins appear at the bottom. Description is of format : . """ if definition.name.startswith("_"): # It's a 'hidden' func, put it next last - return 'z' + definition.name - if definition.scope == 'builtin': - return 'y' + definition.name + return "z" + definition.name + if definition.scope == "builtin": + return "y" + definition.name # Else put it at the front - return 'a' + definition.name + return "a" + definition.name def _kind(d): - """ Return the LSP type """ + """Return the LSP type""" MAP = { - 'none': lsp.CompletionItemKind.Value, - 'type': lsp.CompletionItemKind.Class, - 'tuple': lsp.CompletionItemKind.Class, - 'dict': lsp.CompletionItemKind.Class, - 'dictionary': lsp.CompletionItemKind.Class, - 'function': lsp.CompletionItemKind.Function, - 'lambda': lsp.CompletionItemKind.Function, - 'generator': lsp.CompletionItemKind.Function, - 'class': lsp.CompletionItemKind.Class, - 'instance': lsp.CompletionItemKind.Reference, - 'method': lsp.CompletionItemKind.Method, - 'builtin': lsp.CompletionItemKind.Class, - 'builtinfunction': lsp.CompletionItemKind.Function, - 'module': lsp.CompletionItemKind.Module, - 'file': lsp.CompletionItemKind.File, - 'xrange': lsp.CompletionItemKind.Class, - 'slice': lsp.CompletionItemKind.Class, - 'traceback': lsp.CompletionItemKind.Class, - 'frame': lsp.CompletionItemKind.Class, - 'buffer': lsp.CompletionItemKind.Class, - 'dictproxy': lsp.CompletionItemKind.Class, - 'funcdef': lsp.CompletionItemKind.Function, - 'property': lsp.CompletionItemKind.Property, - 'import': lsp.CompletionItemKind.Module, - 'keyword': lsp.CompletionItemKind.Keyword, - 'constant': lsp.CompletionItemKind.Variable, - 'variable': lsp.CompletionItemKind.Variable, - 'value': lsp.CompletionItemKind.Value, - 'param': lsp.CompletionItemKind.Variable, - 'statement': lsp.CompletionItemKind.Keyword, + "none": lsp.CompletionItemKind.Value, + "type": lsp.CompletionItemKind.Class, + "tuple": lsp.CompletionItemKind.Class, + "dict": lsp.CompletionItemKind.Class, + "dictionary": lsp.CompletionItemKind.Class, + "function": lsp.CompletionItemKind.Function, + "lambda": lsp.CompletionItemKind.Function, + "generator": lsp.CompletionItemKind.Function, + "class": lsp.CompletionItemKind.Class, + "instance": lsp.CompletionItemKind.Reference, + "method": lsp.CompletionItemKind.Method, + "builtin": lsp.CompletionItemKind.Class, + "builtinfunction": lsp.CompletionItemKind.Function, + "module": lsp.CompletionItemKind.Module, + "file": lsp.CompletionItemKind.File, + "xrange": lsp.CompletionItemKind.Class, + "slice": lsp.CompletionItemKind.Class, + "traceback": lsp.CompletionItemKind.Class, + "frame": lsp.CompletionItemKind.Class, + "buffer": lsp.CompletionItemKind.Class, + "dictproxy": lsp.CompletionItemKind.Class, + "funcdef": lsp.CompletionItemKind.Function, + "property": lsp.CompletionItemKind.Property, + "import": lsp.CompletionItemKind.Module, + "keyword": lsp.CompletionItemKind.Keyword, + "constant": lsp.CompletionItemKind.Variable, + "variable": lsp.CompletionItemKind.Variable, + "value": lsp.CompletionItemKind.Value, + "param": lsp.CompletionItemKind.Variable, + "statement": lsp.CompletionItemKind.Keyword, } return MAP.get(d.type) diff --git a/pylsp/plugins/rope_rename.py b/pylsp/plugins/rope_rename.py deleted file mode 100644 index d9ebab5c..00000000 --- a/pylsp/plugins/rope_rename.py +++ /dev/null @@ -1,61 +0,0 @@ -# Copyright 2017-2020 Palantir Technologies, Inc. -# Copyright 2021- Python Language Server Contributors. - -import logging - -from rope.base import libutils -from rope.refactor.rename import Rename - -from pylsp import hookimpl, uris - -log = logging.getLogger(__name__) - - -@hookimpl -def pylsp_settings(): - # Default rope_rename to disabled - return {'plugins': {'rope_rename': {'enabled': False}}} - - -@hookimpl -def pylsp_rename(config, workspace, document, position, new_name): - rope_config = config.settings(document_path=document.path).get('rope', {}) - rope_project = workspace._rope_project_builder(rope_config) - - rename = Rename( - rope_project, - libutils.path_to_resource(rope_project, document.path), - document.offset_at_position(position) - ) - - log.debug("Executing rename of %s to %s", document.word_at_position(position), new_name) - changeset = rename.get_changes(new_name, in_hierarchy=True, docs=True) - log.debug("Finished rename: %s", changeset.changes) - changes = [] - for change in changeset.changes: - uri = uris.from_fs_path(change.resource.path) - doc = workspace.get_maybe_document(uri) - changes.append({ - 'textDocument': { - 'uri': uri, - 'version': doc.version if doc else None - }, - 'edits': [ - { - 'range': { - 'start': {'line': 0, 'character': 0}, - 'end': { - 'line': _num_lines(change.resource), - 'character': 0, - }, - }, - 'newText': change.new_contents, - } - ] - }) - return {'documentChanges': changes} - - -def _num_lines(resource): - "Count the number of lines in a `File` resource." - return len(resource.read().splitlines()) diff --git a/pylsp/plugins/signature.py b/pylsp/plugins/signature.py index c4c3048f..c9a473fe 100644 --- a/pylsp/plugins/signature.py +++ b/pylsp/plugins/signature.py @@ -3,7 +3,8 @@ import logging import re -from pylsp import hookimpl, _utils + +from pylsp import _utils, hookimpl log = logging.getLogger(__name__) @@ -15,36 +16,62 @@ @hookimpl -def pylsp_signature_help(document, position): +def pylsp_signature_help(config, document, position): + signature_config = config.settings().get("signature", {}) code_position = _utils.position_to_jedi_linecolumn(document, position) signatures = document.jedi_script().get_signatures(**code_position) if not signatures: - return {'signatures': []} + return {"signatures": []} + + signature_capabilities = config.capabilities.get("textDocument", {}).get( + "signatureHelp", {} + ) + signature_information_support = signature_capabilities.get( + "signatureInformation", {} + ) + supported_markup_kinds = signature_information_support.get( + "documentationFormat", ["markdown"] + ) + preferred_markup_kind = _utils.choose_markup_kind(supported_markup_kinds) s = signatures[0] + docstring = s.docstring() + # Docstring contains one or more lines of signature, followed by empty line, followed by docstring - function_sig_lines = (s.docstring().split('\n\n') or [''])[0].splitlines() - function_sig = ' '.join([line.strip() for line in function_sig_lines]) + function_sig_lines = (docstring.split("\n\n") or [""])[0].splitlines() + function_sig = " ".join([line.strip() for line in function_sig_lines]) + + signature_docstring = s.docstring(raw=True) + if not signature_config.get("include_docstring", True): + signature_docstring = "" + sig = { - 'label': function_sig, - 'documentation': _utils.format_docstring(s.docstring(raw=True)) + "label": function_sig, + "documentation": _utils.format_docstring( + signature_docstring, markup_kind=preferred_markup_kind + ), } # If there are params, add those if s.params: - sig['parameters'] = [{ - 'label': p.name, - 'documentation': _param_docs(s.docstring(), p.name) - } for p in s.params] + sig["parameters"] = [ + { + "label": p.name, + "documentation": _utils.format_docstring( + _param_docs(docstring, p.name), markup_kind=preferred_markup_kind + ), + } + for p in s.params + ] # We only return a single signature because Python doesn't allow overloading - sig_info = {'signatures': [sig], 'activeSignature': 0} + sig_info = {"signatures": [sig], "activeSignature": 0} if s.index is not None and s.params: # Then we know which parameter we're looking at - sig_info['activeParameter'] = s.index + sig_info["activeParameter"] = s.index return sig_info @@ -55,6 +82,6 @@ def _param_docs(docstring, param_name): m = regex.match(line) if not m: continue - if m.group('param') != param_name: + if m.group("param") != param_name: continue - return m.group('doc') or "" + return m.group("doc") or "" diff --git a/pylsp/plugins/symbols.py b/pylsp/plugins/symbols.py index 2a00e612..3a7beb07 100644 --- a/pylsp/plugins/symbols.py +++ b/pylsp/plugins/symbols.py @@ -2,7 +2,8 @@ # Copyright 2021- Python Language Server Contributors. import logging -import os +import re +from pathlib import Path from pylsp import hookimpl from pylsp.lsp import SymbolKind @@ -12,19 +13,16 @@ @hookimpl def pylsp_document_symbols(config, document): - # pylint: disable=broad-except - # pylint: disable=too-many-nested-blocks - # pylint: disable=too-many-locals - # pylint: disable=too-many-branches - # pylint: disable=too-many-statements - - symbols_settings = config.plugin_settings('jedi_symbols') - all_scopes = symbols_settings.get('all_scopes', True) - add_import_symbols = symbols_settings.get('include_import_symbols', True) + symbols_settings = config.plugin_settings("jedi_symbols") + all_scopes = symbols_settings.get("all_scopes", True) + add_import_symbols = symbols_settings.get("include_import_symbols", True) definitions = document.jedi_names(all_scopes=all_scopes) symbols = [] exclude = set({}) redefinitions = {} + pattern_import = re.compile( + r"^\s*(?!#)\s*(from\s+[.\w]+(\.[\w]+)*\s+import\s+[\w\s,()*]+|import\s+[\w\s,.*]+)" + ) while definitions != []: d = definitions.pop(0) @@ -33,40 +31,44 @@ def pylsp_document_symbols(config, document): if not add_import_symbols: # Skip if there's an import in the code the symbol is defined. code = d.get_line_code() - if ' import ' in code or 'import ' in code: + + if pattern_import.match(code): continue # Skip imported symbols comparing module names. sym_full_name = d.full_name - document_dot_path = document.dot_path if sym_full_name is not None: + document_dot_path = document.dot_path + # We assume a symbol is imported from another module to start # with. imported_symbol = True # The last element of sym_full_name is the symbol itself, so # we need to discard it to do module comparisons below. - if '.' in sym_full_name: - sym_module_name = sym_full_name.rpartition('.')[0] + if "." in sym_full_name: + sym_module_name = sym_full_name.rpartition(".")[0] + else: + sym_module_name = sym_full_name # This is necessary to display symbols in init files (the checks # below fail without it). - if document_dot_path.endswith('__init__'): - document_dot_path = document_dot_path.rpartition('.')[0] + if document_dot_path.endswith("__init__"): + document_dot_path = document_dot_path.rpartition(".")[0] # document_dot_path is the module where the symbol is imported, # whereas sym_module_name is the one where it was declared. - if sym_module_name.startswith(document_dot_path): - # If sym_module_name starts with the same string as document_dot_path, - # we can safely assume it was declared in the document. + if document_dot_path in sym_module_name: + # If document_dot_path is in sym_module_name, we can safely assume + # that the symbol was declared in the document. imported_symbol = False - elif sym_module_name.split('.')[0] in document_dot_path.split('.'): + elif sym_module_name.split(".")[0] in document_dot_path.split("."): # If the first module in sym_module_name is one of the modules in # document_dot_path, we need to check if sym_module_name starts # with the modules in document_dot_path. - document_mods = document_dot_path.split('.') + document_mods = document_dot_path.split(".") for i in range(1, len(document_mods) + 1): - submod = '.'.join(document_mods[-i:]) + submod = ".".join(document_mods[-i:]) if sym_module_name.startswith(submod): imported_symbol = False break @@ -74,19 +76,21 @@ def pylsp_document_symbols(config, document): # When there's no __init__.py next to a file or in one of its # parents, the checks above fail. However, Jedi has a nice way # to tell if the symbol was declared in the same file: if - # full_name starts by __main__. + # sym_module_name starts by __main__. if imported_symbol: - if not sym_module_name.startswith('__main__'): + if not sym_module_name.startswith("__main__"): continue + else: + # We need to skip symbols if their definition doesn't have `full_name` info, they + # are detected as a definition, but their description (e.g. `class Foo`) doesn't + # match the code where they're detected by Jedi. This happens for relative imports. + if _include_def(d): + if d.description not in d.get_line_code(): + continue + else: + continue - try: - docismodule = os.path.samefile(document.path, d.module_path) - except (TypeError, FileNotFoundError): - # Python 2 on Windows has no .samefile, but then these are - # strings for sure - docismodule = document.path == d.module_path - - if _include_def(d) and docismodule: + if _include_def(d) and Path(document.path) == Path(d.module_path): tuple_range = _tuple_range(d) if tuple_range in exclude: continue @@ -95,29 +99,29 @@ def pylsp_document_symbols(config, document): if kind is not None: exclude |= {tuple_range} - if d.type == 'statement': - if d.description.startswith('self'): - kind = 'field' + if d.type == "statement": + if d.description.startswith("self"): + kind = "field" symbol = { - 'name': d.name, - 'containerName': _container(d), - 'location': { - 'uri': document.uri, - 'range': _range(d), + "name": d.name, + "containerName": _container(d), + "location": { + "uri": document.uri, + "range": _range(d), }, - 'kind': _kind(d) if kind is None else _SYMBOL_KIND_MAP[kind], + "kind": _kind(d) if kind is None else _SYMBOL_KIND_MAP[kind], } symbols.append(symbol) - if d.type == 'class': + if d.type == "class": try: defined_names = list(d.defined_names()) for method in defined_names: - if method.type == 'function': - redefinitions[_tuple_range(method)] = 'method' - elif method.type == 'statement': - redefinitions[_tuple_range(method)] = 'field' + if method.type == "function": + redefinitions[_tuple_range(method)] = "method" + elif method.type == "statement": + redefinitions[_tuple_range(method)] = "field" else: redefinitions[_tuple_range(method)] = method.type definitions = list(defined_names) + definitions @@ -129,10 +133,11 @@ def pylsp_document_symbols(config, document): def _include_def(definition): return ( # Don't tend to include parameters as symbols - definition.type != 'param' and + definition.type != "param" + and # Unused vars should also be skipped - definition.name != '_' and - _kind(definition) is not None + definition.name != "_" + and _kind(definition) is not None ) @@ -144,7 +149,7 @@ def _container(definition): # as children of the module. if parent.parent(): return parent.name - except: # pylint: disable=bare-except + except: return None return None @@ -156,8 +161,8 @@ def _range(definition): (start_line, start_column) = definition.start_pos (end_line, end_column) = definition.end_pos return { - 'start': {'line': start_line - 1, 'character': start_column}, - 'end': {'line': end_line - 1, 'character': end_column} + "start": {"line": start_line - 1, "character": start_column}, + "end": {"line": end_line - 1, "character": end_column}, } @@ -167,48 +172,48 @@ def _tuple_range(definition): _SYMBOL_KIND_MAP = { - 'none': SymbolKind.Variable, - 'type': SymbolKind.Class, - 'tuple': SymbolKind.Class, - 'dict': SymbolKind.Class, - 'dictionary': SymbolKind.Class, - 'function': SymbolKind.Function, - 'lambda': SymbolKind.Function, - 'generator': SymbolKind.Function, - 'class': SymbolKind.Class, - 'instance': SymbolKind.Class, - 'method': SymbolKind.Method, - 'builtin': SymbolKind.Class, - 'builtinfunction': SymbolKind.Function, - 'module': SymbolKind.Module, - 'file': SymbolKind.File, - 'xrange': SymbolKind.Array, - 'slice': SymbolKind.Class, - 'traceback': SymbolKind.Class, - 'frame': SymbolKind.Class, - 'buffer': SymbolKind.Array, - 'dictproxy': SymbolKind.Class, - 'funcdef': SymbolKind.Function, - 'property': SymbolKind.Property, - 'import': SymbolKind.Module, - 'keyword': SymbolKind.Variable, - 'constant': SymbolKind.Constant, - 'variable': SymbolKind.Variable, - 'value': SymbolKind.Variable, - 'param': SymbolKind.Variable, - 'statement': SymbolKind.Variable, - 'boolean': SymbolKind.Boolean, - 'int': SymbolKind.Number, - 'longlean': SymbolKind.Number, - 'float': SymbolKind.Number, - 'complex': SymbolKind.Number, - 'string': SymbolKind.String, - 'unicode': SymbolKind.String, - 'list': SymbolKind.Array, - 'field': SymbolKind.Field + "none": SymbolKind.Variable, + "type": SymbolKind.Class, + "tuple": SymbolKind.Class, + "dict": SymbolKind.Class, + "dictionary": SymbolKind.Class, + "function": SymbolKind.Function, + "lambda": SymbolKind.Function, + "generator": SymbolKind.Function, + "class": SymbolKind.Class, + "instance": SymbolKind.Class, + "method": SymbolKind.Method, + "builtin": SymbolKind.Class, + "builtinfunction": SymbolKind.Function, + "module": SymbolKind.Module, + "file": SymbolKind.File, + "xrange": SymbolKind.Array, + "slice": SymbolKind.Class, + "traceback": SymbolKind.Class, + "frame": SymbolKind.Class, + "buffer": SymbolKind.Array, + "dictproxy": SymbolKind.Class, + "funcdef": SymbolKind.Function, + "property": SymbolKind.Property, + "import": SymbolKind.Module, + "keyword": SymbolKind.Variable, + "constant": SymbolKind.Constant, + "variable": SymbolKind.Variable, + "value": SymbolKind.Variable, + "param": SymbolKind.Variable, + "statement": SymbolKind.Variable, + "boolean": SymbolKind.Boolean, + "int": SymbolKind.Number, + "longlean": SymbolKind.Number, + "float": SymbolKind.Number, + "complex": SymbolKind.Number, + "string": SymbolKind.String, + "unicode": SymbolKind.String, + "list": SymbolKind.Array, + "field": SymbolKind.Field, } def _kind(d): - """ Return the VSCode Symbol Type """ + """Return the VSCode Symbol Type""" return _SYMBOL_KIND_MAP.get(d.type) diff --git a/pylsp/plugins/type_definition.py b/pylsp/plugins/type_definition.py new file mode 100644 index 00000000..5fe0a890 --- /dev/null +++ b/pylsp/plugins/type_definition.py @@ -0,0 +1,38 @@ +# Copyright 2021- Python Language Server Contributors. + +import logging + +from pylsp import _utils, hookimpl + +log = logging.getLogger(__name__) + + +def lsp_location(name): + module_path = name.module_path + if module_path is None or name.line is None or name.column is None: + return None + uri = module_path.as_uri() + return { + "uri": str(uri), + "range": { + "start": {"line": name.line - 1, "character": name.column}, + "end": {"line": name.line - 1, "character": name.column + len(name.name)}, + }, + } + + +@hookimpl +def pylsp_type_definition(config, document, position): + try: + kwargs = _utils.position_to_jedi_linecolumn(document, position) + script = document.jedi_script() + names = script.infer(**kwargs) + definitions = [ + definition + for definition in [lsp_location(name) for name in names] + if definition is not None + ] + return definitions + except Exception as e: + log.debug("Failed to run type_definition: %s", e) + return [] diff --git a/pylsp/plugins/yapf_format.py b/pylsp/plugins/yapf_format.py index e4267a7c..72aa7404 100644 --- a/pylsp/plugins/yapf_format.py +++ b/pylsp/plugins/yapf_format.py @@ -4,6 +4,7 @@ import logging import os +import whatthepatch from yapf.yapflib import file_resources, style from yapf.yapflib.yapf_api import FormatCode @@ -14,16 +15,19 @@ @hookimpl -def pylsp_format_document(document, options=None): - return _format(document, options=options) +def pylsp_format_document(workspace, document, options): + log.info("Formatting document %s with yapf", document) + with workspace.report_progress("format: yapf"): + return _format(document, options=options) @hookimpl -def pylsp_format_range(document, range, options=None): # pylint: disable=redefined-builtin +def pylsp_format_range(document, range, options): + log.info("Formatting document %s in range %s with yapf", document, range) # First we 'round' the range up/down to full lines only - range['start']['character'] = 0 - range['end']['line'] += 1 - range['end']['character'] = 0 + range["start"]["character"] = 0 + range["end"]["line"] += 1 + range["end"]["character"] = 0 # From Yapf docs: # lines: (list of tuples of integers) A list of tuples of lines, [start, end], @@ -32,79 +36,164 @@ def pylsp_format_range(document, range, options=None): # pylint: disable=redefi # than a whole file. # Add 1 for 1-indexing vs LSP's 0-indexing - lines = [(range['start']['line'] + 1, range['end']['line'] + 1)] + lines = [(range["start"]["line"] + 1, range["end"]["line"] + 1)] return _format(document, lines=lines, options=options) -def _format(document, lines=None, options=None): - # Yapf doesn't work with CRLF/CR line endings, so we replace them by '\n' - # and restore them below. - replace_eols = False - source = document.source - eol_chars = get_eol_chars(source) - if eol_chars in ['\r', '\r\n']: - replace_eols = True - source = source.replace(eol_chars, '\n') +def get_style_config(document_path, options=None): + # Exclude file if it follows the patterns for that + exclude_patterns_from_ignore_file = file_resources.GetExcludePatternsForDir( + os.getcwd() + ) + if file_resources.IsIgnored(document_path, exclude_patterns_from_ignore_file): + return [] # Get the default styles as a string # for a preset configuration, i.e. "pep8" - style_config = file_resources.GetDefaultStyleForDir( - os.path.dirname(document.path) + style_config = file_resources.GetDefaultStyleForDir(os.path.dirname(document_path)) + if options is None: + return style_config + + # We have options passed from LSP format request + # let's pass them to the formatter. + # First we want to get a dictionary of the preset style + # to pass instead of a string so that we can modify it + style_config = style.CreateStyleFromConfig(style_config) + + use_tabs = style_config["USE_TABS"] + indent_width = style_config["INDENT_WIDTH"] + + if options.get("tabSize") is not None: + indent_width = max(int(options.get("tabSize")), 1) + + if options.get("insertSpaces") is not None: + # TODO is it guaranteed to be a boolean, or can it be a string? + use_tabs = not options.get("insertSpaces") + + if use_tabs: + # Indent width doesn't make sense when using tabs + # the specifications state: "Size of a tab in spaces" + indent_width = 1 + + style_config["USE_TABS"] = use_tabs + style_config["INDENT_WIDTH"] = indent_width + style_config["CONTINUATION_INDENT_WIDTH"] = indent_width + + for style_option, value in options.items(): + # Apply arbitrary options passed as formatter options + if style_option not in style_config: + # ignore if it's not a known yapf config + continue + + style_config[style_option] = value + + return style_config + + +def diff_to_text_edits(diff, eol_chars): + # To keep things simple our text edits will be line based. + # We will also return the edits uncompacted, meaning a + # line replacement will come in as a line remove followed + # by a line add instead of a line replace. + text_edits = [] + # keep track of line number since additions + # don't include the line number it's being added + # to in diffs. lsp is 0-indexed so we'll start with -1 + prev_line_no = -1 + + for change in diff.changes: + if change.old and change.new: + # old and new are the same line, no change + # diffs are 1-indexed + prev_line_no = change.old - 1 + elif change.new: + # addition + text_edits.append( + { + "range": { + "start": {"line": prev_line_no + 1, "character": 0}, + "end": {"line": prev_line_no + 1, "character": 0}, + }, + "newText": change.line + eol_chars, + } + ) + elif change.old: + # remove + lsp_line_no = change.old - 1 + text_edits.append( + { + "range": { + "start": {"line": lsp_line_no, "character": 0}, + "end": { + # From LSP spec: + # If you want to specify a range that contains a line + # including the line ending character(s) then use an + # end position denoting the start of the next line. + "line": lsp_line_no + 1, + "character": 0, + }, + }, + "newText": "", + } + ) + prev_line_no = lsp_line_no + + return text_edits + + +def ensure_eof_new_line(document, eol_chars, text_edits): + # diffs don't include EOF newline https://github.com/google/yapf/issues/1008 + # we'll add it ourselves if our document doesn't already have it and the diff + # does not change the last line. + if document.source.endswith(eol_chars): + return + + lines = document.lines + last_line_number = len(lines) - 1 + + if text_edits and text_edits[-1]["range"]["start"]["line"] >= last_line_number: + return + + text_edits.append( + { + "range": { + "start": {"line": last_line_number, "character": 0}, + "end": {"line": last_line_number + 1, "character": 0}, + }, + "newText": lines[-1] + eol_chars, + } ) - if options is not None: - # We have options passed from LSP format request - # let's pass them to the formatter. - # First we want to get a dictionary of the preset style - # to pass instead of a string so that we can modify it - style_config = style.CreateStyleFromConfig(style_config) - - use_tabs = style_config['USE_TABS'] - indent_width = style_config['INDENT_WIDTH'] - - if options.get('tabSize') is not None: - indent_width = max(int(options.get('tabSize')), 1) - - if options.get('insertSpaces') is not None: - # TODO is it guaranteed to be a boolean, or can it be a string? - use_tabs = not options.get('insertSpaces') - if use_tabs: - # Indent width doesn't make sense when using tabs - # the specifications state: "Size of a tab in spaces" - indent_width = 1 - style_config['USE_TABS'] = use_tabs - style_config['INDENT_WIDTH'] = indent_width - style_config['CONTINUATION_INDENT_WIDTH'] = indent_width - - for style_option, value in options.items(): - # Apply arbitrary options passed as formatter options - if style_option not in style_config: - # ignore if it's not a known yapf config - continue +def _format(document, lines=None, options=None): + source = document.source + # Yapf doesn't work with CRLF/CR line endings, so we replace them by '\n' + # and restore them below when adding new lines + eol_chars = get_eol_chars(source) + if eol_chars in ["\r", "\r\n"]: + source = source.replace(eol_chars, "\n") + else: + eol_chars = "\n" - style_config[style_option] = value + style_config = get_style_config(document_path=document.path, options=options) - new_source, changed = FormatCode( + diff_txt, changed = FormatCode( source, lines=lines, filename=document.filename, - style_config=style_config + print_diff=True, + style_config=style_config, ) if not changed: return [] - if replace_eols: - new_source = new_source.replace('\n', eol_chars) - - # I'm too lazy at the moment to parse diffs into TextEdit items - # So let's just return the entire file... - return [{ - 'range': { - 'start': {'line': 0, 'character': 0}, - # End char 0 of the line after our document - 'end': {'line': len(document.lines), 'character': 0} - }, - 'newText': new_source - }] + patch_generator = whatthepatch.parse_patch(diff_txt) + diff = next(patch_generator) + patch_generator.close() + + text_edits = diff_to_text_edits(diff=diff, eol_chars=eol_chars) + + ensure_eof_new_line(document=document, eol_chars=eol_chars, text_edits=text_edits) + + return text_edits diff --git a/pylsp/py.typed b/pylsp/py.typed new file mode 100644 index 00000000..e69de29b diff --git a/pylsp/python_lsp.py b/pylsp/python_lsp.py index 81e93bdc..bdc072d4 100644 --- a/pylsp/python_lsp.py +++ b/pylsp/python_lsp.py @@ -1,20 +1,28 @@ # Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. -from functools import partial import logging import os import socketserver +import sys import threading +import uuid +from functools import partial +from typing import Any + +try: + import ujson as json +except Exception: + import json from pylsp_jsonrpc.dispatchers import MethodDispatcher from pylsp_jsonrpc.endpoint import Endpoint from pylsp_jsonrpc.streams import JsonRpcStreamReader, JsonRpcStreamWriter -from . import lsp, _utils, uris -from .config import config -from .workspace import Workspace +from . import _utils, lsp, uris from ._version import __version__ +from .config import config +from .workspace import Cell, Document, Notebook, Workspace log = logging.getLogger(__name__) @@ -22,8 +30,8 @@ LINT_DEBOUNCE_S = 0.5 # 500 ms PARENT_PROCESS_WATCH_INTERVAL = 10 # 10 s MAX_WORKERS = 64 -PYTHON_FILE_EXTENSIONS = ('.py', '.pyi') -CONFIG_FILEs = ('pycodestyle.cfg', 'setup.cfg', 'tox.ini', '.flake8') +PYTHON_FILE_EXTENSIONS = (".py", ".pyi") +CONFIG_FILEs = ("pycodestyle.cfg", "setup.cfg", "tox.ini", ".flake8") class _StreamHandlerWrapper(socketserver.StreamRequestHandler): @@ -31,74 +39,146 @@ class _StreamHandlerWrapper(socketserver.StreamRequestHandler): delegate = None - def setup(self): + def setup(self) -> None: super().setup() self.delegate = self.DELEGATE_CLASS(self.rfile, self.wfile) - def handle(self): + def handle(self) -> None: try: self.delegate.start() except OSError as e: - if os.name == 'nt': + if os.name == "nt": # Catch and pass on ConnectionResetError when parent process # dies - # pylint: disable=no-member, undefined-variable if isinstance(e, WindowsError) and e.winerror == 10054: pass self.SHUTDOWN_CALL() -def start_tcp_lang_server(bind_addr, port, check_parent_process, handler_class): +def start_tcp_lang_server(bind_addr, port, check_parent_process, handler_class) -> None: if not issubclass(handler_class, PythonLSPServer): - raise ValueError('Handler class must be an instance of PythonLSPServer') + raise ValueError("Handler class must be an instance of PythonLSPServer") def shutdown_server(check_parent_process, *args): - # pylint: disable=unused-argument if check_parent_process: - log.debug('Shutting down server') + log.debug("Shutting down server") # Shutdown call must be done on a thread, to prevent deadlocks stop_thread = threading.Thread(target=server.shutdown) stop_thread.start() # Construct a custom wrapper class around the user's handler_class wrapper_class = type( - handler_class.__name__ + 'Handler', + handler_class.__name__ + "Handler", (_StreamHandlerWrapper,), - {'DELEGATE_CLASS': partial(handler_class, - check_parent_process=check_parent_process), - 'SHUTDOWN_CALL': partial(shutdown_server, check_parent_process)} + { + # We need to wrap this in staticmethod due to the changes to + # functools.partial in Python 3.14+ + "DELEGATE_CLASS": staticmethod( + partial(handler_class, check_parent_process=check_parent_process) + ) + if sys.version_info >= (3, 14) + else partial(handler_class, check_parent_process=check_parent_process), + "SHUTDOWN_CALL": partial(shutdown_server, check_parent_process), + }, ) - server = socketserver.TCPServer((bind_addr, port), wrapper_class, bind_and_activate=False) + server = socketserver.TCPServer( + (bind_addr, port), wrapper_class, bind_and_activate=False + ) server.allow_reuse_address = True try: server.server_bind() server.server_activate() - log.info('Serving %s on (%s, %s)', handler_class.__name__, bind_addr, port) + log.info("Serving %s on (%s, %s)", handler_class.__name__, bind_addr, port) server.serve_forever() finally: - log.info('Shutting down') + log.info("Shutting down") server.server_close() -def start_io_lang_server(rfile, wfile, check_parent_process, handler_class): +def start_io_lang_server(rfile, wfile, check_parent_process, handler_class) -> None: if not issubclass(handler_class, PythonLSPServer): - raise ValueError('Handler class must be an instance of PythonLSPServer') - log.info('Starting %s IO language server', handler_class.__name__) + raise ValueError("Handler class must be an instance of PythonLSPServer") + log.info("Starting %s IO language server", handler_class.__name__) server = handler_class(rfile, wfile, check_parent_process) server.start() +def start_ws_lang_server(port, check_parent_process, handler_class) -> None: + if not issubclass(handler_class, PythonLSPServer): + raise ValueError("Handler class must be an instance of PythonLSPServer") + + # imports needed only for websockets based server + try: + import asyncio + from concurrent.futures import ThreadPoolExecutor + + import websockets + except ImportError as e: + raise ImportError( + "websocket modules missing. Please run: pip install 'python-lsp-server[websockets]'" + ) from e + + with ThreadPoolExecutor(max_workers=10) as tpool: + send_queue = None + loop = None + + async def pylsp_ws(websocket): + log.debug("Creating LSP object") + + # creating a partial function and suppling the websocket connection + response_handler = partial(send_message, websocket=websocket) + + # Not using default stream reader and writer. + # Instead using a consumer based approach to handle processed requests + pylsp_handler = handler_class( + rx=None, + tx=None, + consumer=response_handler, + check_parent_process=check_parent_process, + ) + + async for message in websocket: + try: + log.debug("consuming payload and feeding it to LSP handler") + request = json.loads(message) + loop = asyncio.get_running_loop() + await loop.run_in_executor(tpool, pylsp_handler.consume, request) + except Exception as e: + log.exception("Failed to process request %s, %s", message, str(e)) + + def send_message(message, websocket): + """Handler to send responses of processed requests to respective web socket clients""" + try: + payload = json.dumps(message, ensure_ascii=False) + loop.call_soon_threadsafe(send_queue.put_nowait, (payload, websocket)) + except Exception as e: + log.exception("Failed to write message %s, %s", message, str(e)) + + async def run_server(): + nonlocal send_queue, loop + send_queue = asyncio.Queue() + loop = asyncio.get_running_loop() + + async with websockets.serve(pylsp_ws, port=port): + while 1: + # Wait until payload is available for sending + payload, websocket = await send_queue.get() + await websocket.send(payload) + + asyncio.run(run_server()) + + class PythonLSPServer(MethodDispatcher): - """ Implementation of the Microsoft VSCode Language Server Protocol + """Implementation of the Microsoft VSCode Language Server Protocol https://github.com/Microsoft/language-server-protocol/blob/master/versions/protocol-1-x.md """ - # pylint: disable=too-many-public-methods,redefined-builtin - - def __init__(self, rx, tx, check_parent_process=False): + def __init__( + self, rx, tx, check_parent_process=False, consumer=None, *, endpoint_cls=None + ) -> None: self.workspace = None self.config = None self.root_uri = None @@ -106,23 +186,46 @@ def __init__(self, rx, tx, check_parent_process=False): self.workspaces = {} self.uri_workspace_mapper = {} - self._jsonrpc_stream_reader = JsonRpcStreamReader(rx) - self._jsonrpc_stream_writer = JsonRpcStreamWriter(tx) self._check_parent_process = check_parent_process - self._endpoint = Endpoint(self, self._jsonrpc_stream_writer.write, max_workers=MAX_WORKERS) + + if rx is not None: + self._jsonrpc_stream_reader = JsonRpcStreamReader(rx) + else: + self._jsonrpc_stream_reader = None + + if tx is not None: + self._jsonrpc_stream_writer = JsonRpcStreamWriter(tx) + else: + self._jsonrpc_stream_writer = None + + endpoint_cls = endpoint_cls or Endpoint + + # if consumer is None, it is assumed that the default streams-based approach is being used + if consumer is None: + self._endpoint = endpoint_cls( + self, self._jsonrpc_stream_writer.write, max_workers=MAX_WORKERS + ) + else: + self._endpoint = endpoint_cls(self, consumer, max_workers=MAX_WORKERS) + self._dispatchers = [] self._shutdown = False - def start(self): + def start(self) -> None: """Entry point for the server.""" self._jsonrpc_stream_reader.listen(self._endpoint.consume) + def consume(self, message) -> None: + """Entry point for consumer based server. Alternative to stream listeners.""" + # assuming message will be JSON + self._endpoint.consume(message) + def __getitem__(self, item): """Override getitem to fallback through multiple dispatchers.""" - if self._shutdown and item != 'exit': + if self._shutdown and item != "exit": # exit is the only allowed method during shutdown log.debug("Ignoring non-exit method during shutdown: %s", item) - raise KeyError + item = "invalid_request_after_shutdown" try: return super().__getitem__(item) @@ -136,13 +239,26 @@ def __getitem__(self, item): raise KeyError() - def m_shutdown(self, **_kwargs): + def m_shutdown(self, **_kwargs) -> None: + for workspace in self.workspaces.values(): + workspace.close() + self._hook("pylsp_shutdown") self._shutdown = True - def m_exit(self, **_kwargs): + def m_invalid_request_after_shutdown(self, **_kwargs): + return { + "error": { + "code": lsp.ErrorCodes.InvalidRequest, + "message": "Requests after shutdown are not valid", + } + } + + def m_exit(self, **_kwargs) -> None: self._endpoint.shutdown() - self._jsonrpc_stream_reader.close() - self._jsonrpc_stream_writer.close() + if self._jsonrpc_stream_reader is not None: + self._jsonrpc_stream_reader.close() + if self._jsonrpc_stream_writer is not None: + self._jsonrpc_stream_writer.close() def _match_uri_to_workspace(self, uri): workspace_uri = _utils.match_uri_to_workspace(uri, self.workspaces) @@ -152,268 +268,607 @@ def _hook(self, hook_name, doc_uri=None, **kwargs): """Calls hook_name and returns a list of results from all registered handlers""" workspace = self._match_uri_to_workspace(doc_uri) doc = workspace.get_document(doc_uri) if doc_uri else None - hook_handlers = self.config.plugin_manager.subset_hook_caller(hook_name, self.config.disabled_plugins) - return hook_handlers(config=self.config, workspace=workspace, document=doc, **kwargs) + hook_handlers = self.config.plugin_manager.subset_hook_caller( + hook_name, self.config.disabled_plugins + ) + return hook_handlers( + config=self.config, workspace=workspace, document=doc, **kwargs + ) def capabilities(self): server_capabilities = { - 'codeActionProvider': True, - 'codeLensProvider': { - 'resolveProvider': False, # We may need to make this configurable + "codeActionProvider": True, + "codeLensProvider": { + "resolveProvider": False, # We may need to make this configurable }, - 'completionProvider': { - 'resolveProvider': True, # We could know everything ahead of time, but this takes time to transfer - 'triggerCharacters': ['.'], + "completionProvider": { + "resolveProvider": True, # We could know everything ahead of time, but this takes time to transfer + "triggerCharacters": ["."], }, - 'documentFormattingProvider': True, - 'documentHighlightProvider': True, - 'documentRangeFormattingProvider': True, - 'documentSymbolProvider': True, - 'definitionProvider': True, - 'executeCommandProvider': { - 'commands': flatten(self._hook('pylsp_commands')) + "documentFormattingProvider": True, + "documentHighlightProvider": True, + "documentRangeFormattingProvider": True, + "documentSymbolProvider": True, + "definitionProvider": True, + "typeDefinitionProvider": True, + "executeCommandProvider": { + "commands": flatten(self._hook("pylsp_commands")) }, - 'hoverProvider': True, - 'referencesProvider': True, - 'renameProvider': True, - 'foldingRangeProvider': True, - 'signatureHelpProvider': { - 'triggerCharacters': ['(', ',', '='] - }, - 'textDocumentSync': { - 'change': lsp.TextDocumentSyncKind.INCREMENTAL, - 'save': { - 'includeText': True, + "hoverProvider": True, + "referencesProvider": True, + "renameProvider": True, + "foldingRangeProvider": True, + "signatureHelpProvider": {"triggerCharacters": ["(", ",", "="]}, + "textDocumentSync": { + "change": lsp.TextDocumentSyncKind.INCREMENTAL, + "save": { + "includeText": True, }, - 'openClose': True, + "openClose": True, + }, + "notebookDocumentSync": { + "notebookSelector": [{"cells": [{"language": "python"}]}] }, - 'workspace': { - 'workspaceFolders': { - 'supported': True, - 'changeNotifications': True - } + "workspace": { + "workspaceFolders": {"supported": True, "changeNotifications": True} }, - 'experimental': merge( - self._hook('pylsp_experimental_capabilities')) + "experimental": merge(self._hook("pylsp_experimental_capabilities")), } - log.info('Server capabilities: %s', server_capabilities) + log.info("Server capabilities: %s", server_capabilities) return server_capabilities - def m_initialize(self, processId=None, rootUri=None, rootPath=None, - initializationOptions=None, workspaceFolders=None, **_kwargs): - log.debug('Language server initialized with %s %s %s %s', processId, rootUri, rootPath, initializationOptions) + def m_initialize( + self, + processId=None, + rootUri=None, + rootPath=None, + initializationOptions=None, + workspaceFolders=None, + **_kwargs, + ): + log.debug( + "Language server initialized with %s %s %s %s", + processId, + rootUri, + rootPath, + initializationOptions, + ) if rootUri is None: - rootUri = uris.from_fs_path(rootPath) if rootPath is not None else '' + rootUri = uris.from_fs_path(rootPath) if rootPath is not None else "" self.workspaces.pop(self.root_uri, None) self.root_uri = rootUri - self.config = config.Config(rootUri, initializationOptions or {}, - processId, _kwargs.get('capabilities', {})) + self.config = config.Config( + rootUri, + initializationOptions or {}, + processId, + _kwargs.get("capabilities", {}), + ) self.workspace = Workspace(rootUri, self._endpoint, self.config) self.workspaces[rootUri] = self.workspace if workspaceFolders: for folder in workspaceFolders: - uri = folder['uri'] + uri = folder["uri"] if uri == rootUri: # Already created continue workspace_config = config.Config( - uri, self.config._init_opts, - self.config._process_id, self.config._capabilities) + uri, + self.config._init_opts, + self.config._process_id, + self.config._capabilities, + ) workspace_config.update(self.config._settings) - self.workspaces[uri] = Workspace( - uri, self._endpoint, workspace_config) + self.workspaces[uri] = Workspace(uri, self._endpoint, workspace_config) - self._dispatchers = self._hook('pylsp_dispatchers') - self._hook('pylsp_initialize') + self._dispatchers = self._hook("pylsp_dispatchers") + self._hook("pylsp_initialize") + + if ( + self._check_parent_process + and processId is not None + and self.watching_thread is None + ): - if self._check_parent_process and processId is not None and self.watching_thread is None: def watch_parent_process(pid): # exit when the given pid is not alive if not _utils.is_process_alive(pid): log.info("parent process %s is not alive, exiting!", pid) self.m_exit() else: - threading.Timer(PARENT_PROCESS_WATCH_INTERVAL, watch_parent_process, args=[pid]).start() + threading.Timer( + PARENT_PROCESS_WATCH_INTERVAL, watch_parent_process, args=[pid] + ).start() - self.watching_thread = threading.Thread(target=watch_parent_process, args=(processId,)) + self.watching_thread = threading.Thread( + target=watch_parent_process, args=(processId,) + ) self.watching_thread.daemon = True self.watching_thread.start() # Get our capabilities return { - 'capabilities': self.capabilities(), - 'serverInfo': { - 'name': 'pylsp', - 'version': __version__, + "capabilities": self.capabilities(), + "serverInfo": { + "name": "pylsp", + "version": __version__, }, } - def m_initialized(self, **_kwargs): - self._hook('pylsp_initialized') + def m_initialized(self, **_kwargs) -> None: + self._hook("pylsp_initialized") - def code_actions(self, doc_uri, range, context): - return flatten(self._hook('pylsp_code_actions', doc_uri, range=range, context=context)) + def code_actions(self, doc_uri: str, range: dict, context: dict): + return flatten( + self._hook("pylsp_code_actions", doc_uri, range=range, context=context) + ) def code_lens(self, doc_uri): - return flatten(self._hook('pylsp_code_lens', doc_uri)) + return flatten(self._hook("pylsp_code_lens", doc_uri)) def completions(self, doc_uri, position): - completions = self._hook('pylsp_completions', doc_uri, position=position) - return { - 'isIncomplete': False, - 'items': flatten(completions) - } + workspace = self._match_uri_to_workspace(doc_uri) + document = workspace.get_document(doc_uri) + ignored_names = None + if isinstance(document, Cell): + # We need to get the ignored names from the whole notebook document + notebook_document = workspace.get_maybe_document(document.notebook_uri) + ignored_names = notebook_document.jedi_names(doc_uri) + completions = self._hook( + "pylsp_completions", doc_uri, position=position, ignored_names=ignored_names + ) + return {"isIncomplete": False, "items": flatten(completions)} def completion_item_resolve(self, completion_item): - doc_uri = completion_item.get('data', {}).get('doc_uri', None) - return self._hook('pylsp_completion_item_resolve', doc_uri, completion_item=completion_item) + doc_uri = completion_item.get("data", {}).get("doc_uri", None) + return self._hook( + "pylsp_completion_item_resolve", doc_uri, completion_item=completion_item + ) def definitions(self, doc_uri, position): - return flatten(self._hook('pylsp_definitions', doc_uri, position=position)) + return flatten(self._hook("pylsp_definitions", doc_uri, position=position)) + + def type_definition(self, doc_uri, position): + return self._hook("pylsp_type_definition", doc_uri, position=position) def document_symbols(self, doc_uri): - return flatten(self._hook('pylsp_document_symbols', doc_uri)) + return flatten(self._hook("pylsp_document_symbols", doc_uri)) + + def document_did_save(self, doc_uri): + return self._hook("pylsp_document_did_save", doc_uri) def execute_command(self, command, arguments): - return self._hook('pylsp_execute_command', command=command, arguments=arguments) + return self._hook("pylsp_execute_command", command=command, arguments=arguments) def format_document(self, doc_uri, options): - return self._hook('pylsp_format_document', doc_uri, options=options) + return lambda: self._hook("pylsp_format_document", doc_uri, options=options) def format_range(self, doc_uri, range, options): - return self._hook('pylsp_format_range', doc_uri, range=range, options=options) + return self._hook("pylsp_format_range", doc_uri, range=range, options=options) def highlight(self, doc_uri, position): - return flatten(self._hook('pylsp_document_highlight', doc_uri, position=position)) or None + return ( + flatten(self._hook("pylsp_document_highlight", doc_uri, position=position)) + or None + ) def hover(self, doc_uri, position): - return self._hook('pylsp_hover', doc_uri, position=position) or {'contents': ''} + return self._hook("pylsp_hover", doc_uri, position=position) or {"contents": ""} - @_utils.debounce(LINT_DEBOUNCE_S, keyed_by='doc_uri') - def lint(self, doc_uri, is_saved): + @_utils.debounce(LINT_DEBOUNCE_S, keyed_by="doc_uri") + def lint(self, doc_uri, is_saved) -> None: # Since we're debounced, the document may no longer be open workspace = self._match_uri_to_workspace(doc_uri) - if doc_uri in workspace.documents: - workspace.publish_diagnostics( - doc_uri, - flatten(self._hook('pylsp_lint', doc_uri, is_saved=is_saved)) + document_object = workspace.documents.get(doc_uri, None) + if isinstance(document_object, Document): + self._lint_text_document( + doc_uri, workspace, is_saved, document_object.version ) + elif isinstance(document_object, Notebook): + self._lint_notebook_document(document_object, workspace) + + def _lint_text_document( + self, doc_uri, workspace, is_saved, doc_version=None + ) -> None: + workspace.publish_diagnostics( + doc_uri, + flatten(self._hook("pylsp_lint", doc_uri, is_saved=is_saved)), + doc_version, + ) + + def _lint_notebook_document(self, notebook_document, workspace) -> None: + """ + Lint a notebook document. + + This is a bit more complicated than linting a text document, because we need to + send the entire notebook document to the pylsp_lint hook, but we need to send + the diagnostics back to the client on a per-cell basis. + """ + + # First, we create a temp TextDocument that represents the whole notebook + # contents. We'll use this to send to the pylsp_lint hook. + random_uri = str(uuid.uuid4()) + + # cell_list helps us map the diagnostics back to the correct cell later. + cell_list: list[dict[str, Any]] = [] + + offset = 0 + total_source = "" + for cell in notebook_document.cells: + cell_uri = cell["document"] + cell_document = workspace.get_cell_document(cell_uri) + + num_lines = cell_document.line_count + + data = { + "uri": cell_uri, + "line_start": offset, + "line_end": offset + num_lines - 1, + "source": cell_document.source, + } + + cell_list.append(data) + if offset == 0: + total_source = cell_document.source + else: + total_source += "\n" + cell_document.source + + offset += num_lines + + workspace.put_document(random_uri, total_source) + + try: + document_diagnostics = flatten( + self._hook("pylsp_lint", random_uri, is_saved=True) + ) + + # Now we need to map the diagnostics back to the correct cell and publish them. + # Note: this is O(n*m) in the number of cells and diagnostics, respectively. + for cell in cell_list: + cell_diagnostics = [] + for diagnostic in document_diagnostics: + start_line = diagnostic["range"]["start"]["line"] + end_line = diagnostic["range"]["end"]["line"] + + if start_line > cell["line_end"] or end_line < cell["line_start"]: + continue + diagnostic["range"]["start"]["line"] = ( + start_line - cell["line_start"] + ) + diagnostic["range"]["end"]["line"] = end_line - cell["line_start"] + cell_diagnostics.append(diagnostic) + + workspace.publish_diagnostics(cell["uri"], cell_diagnostics) + finally: + workspace.rm_document(random_uri) def references(self, doc_uri, position, exclude_declaration): - return flatten(self._hook( - 'pylsp_references', doc_uri, position=position, - exclude_declaration=exclude_declaration - )) + return flatten( + self._hook( + "pylsp_references", + doc_uri, + position=position, + exclude_declaration=exclude_declaration, + ) + ) def rename(self, doc_uri, position, new_name): - return self._hook('pylsp_rename', doc_uri, position=position, new_name=new_name) + return self._hook("pylsp_rename", doc_uri, position=position, new_name=new_name) def signature_help(self, doc_uri, position): - return self._hook('pylsp_signature_help', doc_uri, position=position) + return self._hook("pylsp_signature_help", doc_uri, position=position) def folding(self, doc_uri): - return flatten(self._hook('pylsp_folding_range', doc_uri)) + return flatten(self._hook("pylsp_folding_range", doc_uri)) def m_completion_item__resolve(self, **completionItem): return self.completion_item_resolve(completionItem) - def m_text_document__did_close(self, textDocument=None, **_kwargs): - workspace = self._match_uri_to_workspace(textDocument['uri']) - workspace.publish_diagnostics(textDocument['uri'], []) - workspace.rm_document(textDocument['uri']) + def m_notebook_document__did_open( + self, notebookDocument=None, cellTextDocuments=None, **_kwargs + ) -> None: + workspace = self._match_uri_to_workspace(notebookDocument["uri"]) + workspace.put_notebook_document( + notebookDocument["uri"], + notebookDocument["notebookType"], + cells=notebookDocument["cells"], + version=notebookDocument.get("version"), + metadata=notebookDocument.get("metadata"), + ) + for cell in cellTextDocuments or []: + workspace.put_cell_document( + cell["uri"], + notebookDocument["uri"], + cell["languageId"], + cell["text"], + version=cell.get("version"), + ) + self.lint(notebookDocument["uri"], is_saved=True) + + def m_notebook_document__did_close( + self, notebookDocument=None, cellTextDocuments=None, **_kwargs + ) -> None: + workspace = self._match_uri_to_workspace(notebookDocument["uri"]) + for cell in cellTextDocuments or []: + workspace.publish_diagnostics(cell["uri"], []) + workspace.rm_document(cell["uri"]) + workspace.rm_document(notebookDocument["uri"]) + + def m_notebook_document__did_change( + self, notebookDocument=None, change=None, **_kwargs + ) -> None: + """ + Changes to the notebook document. + + This could be one of the following: + 1. Notebook metadata changed + 2. Cell(s) added + 3. Cell(s) deleted + 4. Cell(s) data changed + 4.1 Cell metadata changed + 4.2 Cell source changed + """ + workspace = self._match_uri_to_workspace(notebookDocument["uri"]) + + if change.get("metadata"): + # Case 1 + workspace.update_notebook_metadata( + notebookDocument["uri"], change.get("metadata") + ) - def m_text_document__did_open(self, textDocument=None, **_kwargs): - workspace = self._match_uri_to_workspace(textDocument['uri']) - workspace.put_document(textDocument['uri'], textDocument['text'], version=textDocument.get('version')) - self._hook('pylsp_document_did_open', textDocument['uri']) - self.lint(textDocument['uri'], is_saved=True) + cells = change.get("cells") + if cells: + # Change to cells + structure = cells.get("structure") + if structure: + # Case 2 or 3 + notebook_cell_array_change = structure["array"] + start = notebook_cell_array_change["start"] + cell_delete_count = notebook_cell_array_change["deleteCount"] + if cell_delete_count == 0: + # Case 2 + # Cell documents + for cell_document in structure["didOpen"]: + workspace.put_cell_document( + cell_document["uri"], + notebookDocument["uri"], + cell_document["languageId"], + cell_document["text"], + cell_document.get("version"), + ) + # Cell metadata which is added to Notebook + workspace.add_notebook_cells( + notebookDocument["uri"], + notebook_cell_array_change["cells"], + start, + ) + else: + # Case 3 + # Cell documents + for cell_document in structure["didClose"]: + workspace.rm_document(cell_document["uri"]) + workspace.publish_diagnostics(cell_document["uri"], []) + # Cell metadata which is removed from Notebook + workspace.remove_notebook_cells( + notebookDocument["uri"], start, cell_delete_count + ) + + data = cells.get("data") + if data: + # Case 4.1 + for cell in data: + # update NotebookDocument.cells properties + pass - def m_text_document__did_change(self, contentChanges=None, textDocument=None, **_kwargs): - workspace = self._match_uri_to_workspace(textDocument['uri']) + text_content = cells.get("textContent") + if text_content: + # Case 4.2 + for cell in text_content: + cell_uri = cell["document"]["uri"] + # Even though the protocol says that `changes` is an array, we assume that it's always a single + # element array that contains the last change to the cell source. + workspace.update_document(cell_uri, cell["changes"][0]) + self.lint(notebookDocument["uri"], is_saved=True) + + def m_text_document__did_close(self, textDocument=None, **_kwargs) -> None: + workspace = self._match_uri_to_workspace(textDocument["uri"]) + workspace.publish_diagnostics(textDocument["uri"], []) + workspace.rm_document(textDocument["uri"]) + + def m_text_document__did_open(self, textDocument=None, **_kwargs) -> None: + workspace = self._match_uri_to_workspace(textDocument["uri"]) + workspace.put_document( + textDocument["uri"], + textDocument["text"], + version=textDocument.get("version"), + ) + self._hook("pylsp_document_did_open", textDocument["uri"]) + self.lint(textDocument["uri"], is_saved=True) + + def m_text_document__did_change( + self, contentChanges=None, textDocument=None, **_kwargs + ) -> None: + workspace = self._match_uri_to_workspace(textDocument["uri"]) for change in contentChanges: workspace.update_document( - textDocument['uri'], - change, - version=textDocument.get('version') + textDocument["uri"], change, version=textDocument.get("version") ) - self.lint(textDocument['uri'], is_saved=False) + self.lint(textDocument["uri"], is_saved=False) - def m_text_document__did_save(self, textDocument=None, **_kwargs): - self.lint(textDocument['uri'], is_saved=True) + def m_text_document__did_save(self, textDocument=None, **_kwargs) -> None: + self.lint(textDocument["uri"], is_saved=True) + self.document_did_save(textDocument["uri"]) - def m_text_document__code_action(self, textDocument=None, range=None, context=None, **_kwargs): - return self.code_actions(textDocument['uri'], range, context) + def m_text_document__code_action( + self, textDocument=None, range=None, context=None, **_kwargs + ): + return self.code_actions(textDocument["uri"], range, context) def m_text_document__code_lens(self, textDocument=None, **_kwargs): - return self.code_lens(textDocument['uri']) + return self.code_lens(textDocument["uri"]) + + def _cell_document__completion(self, cellDocument, position=None, **_kwargs): + workspace = self._match_uri_to_workspace(cellDocument.notebook_uri) + notebookDocument = workspace.get_maybe_document(cellDocument.notebook_uri) + if notebookDocument is None: + raise ValueError("Invalid notebook document") + + cell_data = notebookDocument.cell_data() + + # Concatenate all cells to be a single temporary document + total_source = "\n".join(data["source"] for data in cell_data.values()) + with workspace.temp_document(total_source) as temp_uri: + # update position to be the position in the temp document + if position is not None: + position["line"] += cell_data[cellDocument.uri]["line_start"] + + completions = self.completions(temp_uri, position) + + # Translate temp_uri locations to cell document locations + for item in completions.get("items", []): + if item.get("data", {}).get("doc_uri") == temp_uri: + item["data"]["doc_uri"] = cellDocument.uri + + # Copy LAST_JEDI_COMPLETIONS to cell document so that completionItem/resolve will work + tempDocument = workspace.get_document(temp_uri) + cellDocument.shared_data["LAST_JEDI_COMPLETIONS"] = ( + tempDocument.shared_data.get("LAST_JEDI_COMPLETIONS", None) + ) + + return completions def m_text_document__completion(self, textDocument=None, position=None, **_kwargs): - return self.completions(textDocument['uri'], position) + # textDocument here is just a dict with a uri + workspace = self._match_uri_to_workspace(textDocument["uri"]) + document = workspace.get_document(textDocument["uri"]) + if isinstance(document, Cell): + return self._cell_document__completion(document, position, **_kwargs) + return self.completions(textDocument["uri"], position) + + def _cell_document__definition(self, cellDocument, position=None, **_kwargs): + workspace = self._match_uri_to_workspace(cellDocument.notebook_uri) + notebookDocument = workspace.get_maybe_document(cellDocument.notebook_uri) + if notebookDocument is None: + raise ValueError("Invalid notebook document") + + cell_data = notebookDocument.cell_data() + + # Concatenate all cells to be a single temporary document + total_source = "\n".join(data["source"] for data in cell_data.values()) + with workspace.temp_document(total_source) as temp_uri: + # update position to be the position in the temp document + if position is not None: + position["line"] += cell_data[cellDocument.uri]["line_start"] + + definitions = self.definitions(temp_uri, position) + + # Translate temp_uri locations to cell document locations + for definition in definitions: + if definition["uri"] == temp_uri: + # Find the cell the start line is in and adjust the uri and line numbers + for cell_uri, data in cell_data.items(): + if ( + data["line_start"] + <= definition["range"]["start"]["line"] + <= data["line_end"] + ): + definition["uri"] = cell_uri + definition["range"]["start"]["line"] -= data["line_start"] + definition["range"]["end"]["line"] -= data["line_start"] + break + + return definitions def m_text_document__definition(self, textDocument=None, position=None, **_kwargs): - return self.definitions(textDocument['uri'], position) - - def m_text_document__document_highlight(self, textDocument=None, position=None, **_kwargs): - return self.highlight(textDocument['uri'], position) + # textDocument here is just a dict with a uri + workspace = self._match_uri_to_workspace(textDocument["uri"]) + document = workspace.get_document(textDocument["uri"]) + if isinstance(document, Cell): + return self._cell_document__definition(document, position, **_kwargs) + return self.definitions(textDocument["uri"], position) + + def m_text_document__type_definition( + self, textDocument=None, position=None, **_kwargs + ): + return self.type_definition(textDocument["uri"], position) + + def m_text_document__document_highlight( + self, textDocument=None, position=None, **_kwargs + ): + return self.highlight(textDocument["uri"], position) def m_text_document__hover(self, textDocument=None, position=None, **_kwargs): - return self.hover(textDocument['uri'], position) + return self.hover(textDocument["uri"], position) def m_text_document__document_symbol(self, textDocument=None, **_kwargs): - return self.document_symbols(textDocument['uri']) + return self.document_symbols(textDocument["uri"]) def m_text_document__formatting(self, textDocument=None, options=None, **_kwargs): - return self.format_document(textDocument['uri'], options) + return self.format_document(textDocument["uri"], options) - def m_text_document__rename(self, textDocument=None, position=None, newName=None, **_kwargs): - return self.rename(textDocument['uri'], position, newName) + def m_text_document__rename( + self, textDocument=None, position=None, newName=None, **_kwargs + ): + return self.rename(textDocument["uri"], position, newName) def m_text_document__folding_range(self, textDocument=None, **_kwargs): - return self.folding(textDocument['uri']) + return self.folding(textDocument["uri"]) - def m_text_document__range_formatting(self, textDocument=None, range=None, options=None, **_kwargs): - return self.format_range(textDocument['uri'], range, options) + def m_text_document__range_formatting( + self, textDocument=None, range=None, options=None, **_kwargs + ): + return self.format_range(textDocument["uri"], range, options) - def m_text_document__references(self, textDocument=None, position=None, context=None, **_kwargs): - exclude_declaration = not context['includeDeclaration'] - return self.references(textDocument['uri'], position, exclude_declaration) + def m_text_document__references( + self, textDocument=None, position=None, context=None, **_kwargs + ): + exclude_declaration = not context["includeDeclaration"] + return self.references(textDocument["uri"], position, exclude_declaration) - def m_text_document__signature_help(self, textDocument=None, position=None, **_kwargs): - return self.signature_help(textDocument['uri'], position) + def m_text_document__signature_help( + self, textDocument=None, position=None, **_kwargs + ): + return self.signature_help(textDocument["uri"], position) - def m_workspace__did_change_configuration(self, settings=None): + def m_workspace__did_change_configuration(self, settings=None) -> None: if self.config is not None: - self.config.update((settings or {}).get('pylsp', {})) + self.config.update((settings or {}).get("pylsp", {})) for workspace in self.workspaces.values(): workspace.update_config(settings) + self._hook("pylsp_workspace_configuration_changed") for doc_uri in workspace.documents: self.lint(doc_uri, is_saved=False) - def m_workspace__did_change_workspace_folders(self, event=None, **_kwargs): # pylint: disable=too-many-locals + def m_workspace__did_change_workspace_folders(self, event=None, **_kwargs): if event is None: return - added = event.get('added', []) - removed = event.get('removed', []) + added = event.get("added", []) + removed = event.get("removed", []) for removed_info in removed: - if 'uri' in removed_info: - removed_uri = removed_info['uri'] + if "uri" in removed_info: + removed_uri = removed_info["uri"] self.workspaces.pop(removed_uri, None) for added_info in added: - if 'uri' in added_info: - added_uri = added_info['uri'] + if "uri" in added_info: + added_uri = added_info["uri"] workspace_config = config.Config( - added_uri, self.config._init_opts, - self.config._process_id, self.config._capabilities) + added_uri, + self.config._init_opts, + self.config._process_id, + self.config._capabilities, + ) workspace_config.update(self.config._settings) self.workspaces[added_uri] = Workspace( - added_uri, self._endpoint, workspace_config) + added_uri, self._endpoint, workspace_config + ) - root_workspace_removed = any(removed_info['uri'] == self.root_uri for removed_info in removed) - workspace_added = len(added) > 0 and 'uri' in added[0] + root_workspace_removed = any( + removed_info["uri"] == self.root_uri for removed_info in removed + ) + workspace_added = len(added) > 0 and "uri" in added[0] if root_workspace_removed and workspace_added: - added_uri = added[0]['uri'] + added_uri = added[0]["uri"] self.root_uri = added_uri new_root_workspace = self.workspaces[added_uri] self.config = new_root_workspace._config @@ -422,7 +877,7 @@ def m_workspace__did_change_workspace_folders(self, event=None, **_kwargs): # p # NOTE: Removing the root workspace can only happen when the server # is closed, thus the else condition of this if can never happen. if self.workspaces: - log.debug('Root workspace deleted!') + log.debug("Root workspace deleted!") available_workspaces = sorted(self.workspaces) first_workspace = available_workspaces[0] new_root_workspace = self.workspaces[first_workspace] @@ -441,10 +896,10 @@ def m_workspace__did_change_workspace_folders(self, event=None, **_kwargs): # p def m_workspace__did_change_watched_files(self, changes=None, **_kwargs): changed_py_files = set() config_changed = False - for d in (changes or []): - if d['uri'].endswith(PYTHON_FILE_EXTENSIONS): - changed_py_files.add(d['uri']) - elif d['uri'].endswith(CONFIG_FILEs): + for d in changes or []: + if d["uri"].endswith(PYTHON_FILE_EXTENSIONS): + changed_py_files.add(d["uri"]) + elif d["uri"].endswith(CONFIG_FILEs): config_changed = True if config_changed: diff --git a/pylsp/text_edit.py b/pylsp/text_edit.py new file mode 100644 index 00000000..07be2aa4 --- /dev/null +++ b/pylsp/text_edit.py @@ -0,0 +1,97 @@ +# Copyright 2017-2020 Palantir Technologies, Inc. +# Copyright 2021- Python Language Server Contributors. + + +def get_well_formatted_range(lsp_range): + start = lsp_range["start"] + end = lsp_range["end"] + + if start["line"] > end["line"] or ( + start["line"] == end["line"] and start["character"] > end["character"] + ): + return {"start": end, "end": start} + + return lsp_range + + +def get_well_formatted_edit(text_edit): + lsp_range = get_well_formatted_range(text_edit["range"]) + if lsp_range != text_edit["range"]: + return {"newText": text_edit["newText"], "range": lsp_range} + + return text_edit + + +def compare_text_edits(a, b): + diff = a["range"]["start"]["line"] - b["range"]["start"]["line"] + if diff == 0: + return a["range"]["start"]["character"] - b["range"]["start"]["character"] + + return diff + + +def merge_sort_text_edits(text_edits): + if len(text_edits) <= 1: + return text_edits + + p = len(text_edits) // 2 + left = text_edits[:p] + right = text_edits[p:] + + merge_sort_text_edits(left) + merge_sort_text_edits(right) + + left_idx = 0 + right_idx = 0 + i = 0 + while left_idx < len(left) and right_idx < len(right): + ret = compare_text_edits(left[left_idx], right[right_idx]) + if ret <= 0: + # smaller_equal -> take left to preserve order + text_edits[i] = left[left_idx] + i += 1 + left_idx += 1 + else: + # greater -> take right + text_edits[i] = right[right_idx] + i += 1 + right_idx += 1 + while left_idx < len(left): + text_edits[i] = left[left_idx] + i += 1 + left_idx += 1 + while right_idx < len(right): + text_edits[i] = right[right_idx] + i += 1 + right_idx += 1 + return text_edits + + +class OverLappingTextEditException(Exception): + """ + Text edits are expected to be sorted + and compressed instead of overlapping. + This error is raised when two edits + are overlapping. + """ + + +def apply_text_edits(doc, text_edits): + text = doc.source + sorted_edits = merge_sort_text_edits(list(map(get_well_formatted_edit, text_edits))) + last_modified_offset = 0 + spans = [] + for e in sorted_edits: + start_offset = doc.offset_at_position(e["range"]["start"]) + if start_offset < last_modified_offset: + raise OverLappingTextEditException("overlapping edit") + + if start_offset > last_modified_offset: + spans.append(text[last_modified_offset:start_offset]) + + if len(e["newText"]): + spans.append(e["newText"]) + last_modified_offset = doc.offset_at_position(e["range"]["end"]) + + spans.append(text[last_modified_offset:]) + return "".join(spans) diff --git a/pylsp/uris.py b/pylsp/uris.py index 552761fc..8ebd8e31 100644 --- a/pylsp/uris.py +++ b/pylsp/uris.py @@ -5,11 +5,13 @@ https://github.com/Microsoft/vscode-uri/blob/e59cab84f5df6265aed18ae5f43552d3eef13bb9/lib/index.ts """ + import re from urllib import parse + from pylsp import IS_WIN -RE_DRIVE_LETTER_PATH = re.compile(r'^\/[a-zA-Z]:') +RE_DRIVE_LETTER_PATH = re.compile(r"^\/[a-zA-Z]:") def urlparse(uri): @@ -21,7 +23,7 @@ def urlparse(uri): parse.unquote(path), parse.unquote(params), parse.unquote(query), - parse.unquote(fragment) + parse.unquote(fragment), ) @@ -35,14 +37,16 @@ def urlunparse(parts): else: quoted_path = parse.quote(path) - return parse.urlunparse(( - parse.quote(scheme), - parse.quote(netloc), - quoted_path, - parse.quote(params), - parse.quote(query), - parse.quote(fragment) - )) + return parse.urlunparse( + ( + parse.quote(scheme), + parse.quote(netloc), + quoted_path, + parse.quote(params), + parse.quote(query), + parse.quote(fragment), + ) + ) def to_fs_path(uri): @@ -55,9 +59,9 @@ def to_fs_path(uri): # scheme://netloc/path;parameters?query#fragment scheme, netloc, path, _params, _query, _fragment = urlparse(uri) - if netloc and path and scheme == 'file': + if netloc and path and scheme == "file": # unc path: file://shares/c$/far/boo - value = "//{}{}".format(netloc, path) + value = f"//{netloc}{path}" elif RE_DRIVE_LETTER_PATH.match(path): # windows drive letter: file:///C:/far/boo @@ -68,49 +72,55 @@ def to_fs_path(uri): value = path if IS_WIN: - value = value.replace('/', '\\') + value = value.replace("/", "\\") return value def from_fs_path(path): """Returns a URI for the given filesystem path.""" - scheme = 'file' - params, query, fragment = '', '', '' + scheme = "file" + params, query, fragment = "", "", "" path, netloc = _normalize_win_path(path) return urlunparse((scheme, netloc, path, params, query, fragment)) -def uri_with(uri, scheme=None, netloc=None, path=None, params=None, query=None, fragment=None): +def uri_with( + uri, scheme=None, netloc=None, path=None, params=None, query=None, fragment=None +): """Return a URI with the given part(s) replaced. Parts are decoded / encoded. """ - old_scheme, old_netloc, old_path, old_params, old_query, old_fragment = urlparse(uri) + old_scheme, old_netloc, old_path, old_params, old_query, old_fragment = urlparse( + uri + ) path, _netloc = _normalize_win_path(path) - return urlunparse(( - scheme or old_scheme, - netloc or old_netloc, - path or old_path, - params or old_params, - query or old_query, - fragment or old_fragment - )) + return urlunparse( + ( + scheme or old_scheme, + netloc or old_netloc, + path or old_path, + params or old_params, + query or old_query, + fragment or old_fragment, + ) + ) def _normalize_win_path(path): - netloc = '' + netloc = "" # normalize to fwd-slashes on windows, # on other systems bwd-slaches are valid # filename character, eg /f\oo/ba\r.txt if IS_WIN: - path = path.replace('\\', '/') + path = path.replace("\\", "/") # check for authority as used in UNC shares # or use the path as given - if path[:2] == '//': - idx = path.index('/', 2) + if path[:2] == "//": + idx = path.index("/", 2) if idx == -1: netloc = path[2:] else: @@ -119,8 +129,8 @@ def _normalize_win_path(path): # Ensure that path starts with a slash # or that it is at least a slash - if not path.startswith('/'): - path = '/' + path + if not path.startswith("/"): + path = "/" + path # Normalize drive paths to lower case if RE_DRIVE_LETTER_PATH.match(path): diff --git a/pylsp/workspace.py b/pylsp/workspace.py index bf312f62..290b95ee 100644 --- a/pylsp/workspace.py +++ b/pylsp/workspace.py @@ -1,40 +1,50 @@ # Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. +import functools import io import logging import os import re -import functools +import uuid +from collections.abc import Generator +from contextlib import contextmanager from threading import RLock +from typing import Callable, Optional import jedi -from . import lsp, uris, _utils +from . import _utils, lsp, uris log = logging.getLogger(__name__) +DEFAULT_AUTO_IMPORT_MODULES = ["numpy"] + # TODO: this is not the best e.g. we capture numbers -RE_START_WORD = re.compile('[A-Za-z_0-9]*$') -RE_END_WORD = re.compile('^[A-Za-z_0-9]*') +RE_START_WORD = re.compile("[A-Za-z_0-9]*$") +RE_END_WORD = re.compile("^[A-Za-z_0-9]*") def lock(method): """Define an atomic region over a method.""" + @functools.wraps(method) def wrapper(self, *args, **kwargs): with self._lock: return method(self, *args, **kwargs) + return wrapper class Workspace: - - M_PUBLISH_DIAGNOSTICS = 'textDocument/publishDiagnostics' - M_APPLY_EDIT = 'workspace/applyEdit' - M_SHOW_MESSAGE = 'window/showMessage' - - def __init__(self, root_uri, endpoint, config=None): + M_PUBLISH_DIAGNOSTICS = "textDocument/publishDiagnostics" + M_PROGRESS = "$/progress" + M_INITIALIZE_PROGRESS = "window/workDoneProgress/create" + M_APPLY_EDIT = "workspace/applyEdit" + M_SHOW_MESSAGE = "window/showMessage" + M_LOG_MESSAGE = "window/logMessage" + + def __init__(self, root_uri, endpoint, config=None) -> None: self._config = config self._root_uri = root_uri self._endpoint = endpoint @@ -48,18 +58,35 @@ def __init__(self, root_uri, endpoint, config=None): # Whilst incubating, keep rope private self.__rope = None self.__rope_config = None + self.__rope_autoimport = None + + def _rope_autoimport( + self, + rope_config: Optional, + memory: bool = False, + ): + from rope.contrib.autoimport.sqlite import AutoImport + + if self.__rope_autoimport is None: + project = self._rope_project_builder(rope_config) + self.__rope_autoimport = AutoImport(project, memory=memory) + return self.__rope_autoimport def _rope_project_builder(self, rope_config): - # pylint: disable=import-outside-toplevel from rope.base.project import Project # TODO: we could keep track of dirty files and validate only those if self.__rope is None or self.__rope_config != rope_config: - rope_folder = rope_config.get('ropeFolder') - self.__rope = Project(self._root_path, ropefolder=rope_folder) - self.__rope.prefs.set('extension_modules', rope_config.get('extensionModules', [])) - self.__rope.prefs.set('ignore_syntax_errors', True) - self.__rope.prefs.set('ignore_bad_imports', True) + rope_folder = rope_config.get("ropeFolder") + if "ropeFolder" in rope_config: + self.__rope = Project(self._root_path, ropefolder=rope_folder) + else: + self.__rope = Project(self._root_path) + self.__rope.prefs.set( + "extension_modules", rope_config.get("extensionModules", []) + ) + self.__rope.prefs.set("ignore_syntax_errors", True) + self.__rope.prefs.set("ignore_bad_imports", True) self.__rope.validate() return self.__rope @@ -76,7 +103,9 @@ def root_uri(self): return self._root_uri def is_local(self): - return (self._root_uri_scheme in ['', 'file']) and os.path.exists(self._root_path) + return (self._root_uri_scheme in ["", "file"]) and os.path.exists( + self._root_path + ) def get_document(self, doc_uri): """Return a managed document if-present, else create one pointing at disk. @@ -85,37 +114,239 @@ def get_document(self, doc_uri): """ return self._docs.get(doc_uri) or self._create_document(doc_uri) + def get_cell_document(self, doc_uri): + return self._docs.get(doc_uri) + def get_maybe_document(self, doc_uri): return self._docs.get(doc_uri) - def put_document(self, doc_uri, source, version=None): - self._docs[doc_uri] = self._create_document(doc_uri, source=source, version=version) + def put_document(self, doc_uri, source, version=None) -> None: + self._docs[doc_uri] = self._create_document( + doc_uri, source=source, version=version + ) + + def put_notebook_document( + self, doc_uri, notebook_type, cells, version=None, metadata=None + ) -> None: + self._docs[doc_uri] = self._create_notebook_document( + doc_uri, notebook_type, cells, version, metadata + ) + + @contextmanager + def temp_document(self, source, path=None) -> None: + if path is None: + path = self.root_path + uri = uris.from_fs_path(os.path.join(path, str(uuid.uuid4()))) + try: + self.put_document(uri, source) + yield uri + finally: + self.rm_document(uri) + + def add_notebook_cells(self, doc_uri, cells, start) -> None: + self._docs[doc_uri].add_cells(cells, start) + + def remove_notebook_cells(self, doc_uri, start, delete_count) -> None: + self._docs[doc_uri].remove_cells(start, delete_count) + + def update_notebook_metadata(self, doc_uri, metadata) -> None: + self._docs[doc_uri].metadata = metadata + + def put_cell_document( + self, doc_uri, notebook_uri, language_id, source, version=None + ) -> None: + self._docs[doc_uri] = self._create_cell_document( + doc_uri, notebook_uri, language_id, source, version + ) - def rm_document(self, doc_uri): + def rm_document(self, doc_uri) -> None: self._docs.pop(doc_uri) - def update_document(self, doc_uri, change, version=None): + def update_document(self, doc_uri, change, version=None) -> None: self._docs[doc_uri].apply_change(change) self._docs[doc_uri].version = version def update_config(self, settings): - self._config.update((settings or {}).get('pylsp', {})) + self._config.update((settings or {}).get("pylsp", {})) for doc_uri in self.documents: - self.get_document(doc_uri).update_config(settings) + if isinstance(document := self.get_document(doc_uri), Notebook): + # Notebook documents don't have a config. The config is + # handled at the cell level. + return + document.update_config(settings) def apply_edit(self, edit): - return self._endpoint.request(self.M_APPLY_EDIT, {'edit': edit}) + return self._endpoint.request(self.M_APPLY_EDIT, {"edit": edit}) - def publish_diagnostics(self, doc_uri, diagnostics): - self._endpoint.notify(self.M_PUBLISH_DIAGNOSTICS, params={'uri': doc_uri, 'diagnostics': diagnostics}) + def publish_diagnostics(self, doc_uri, diagnostics, doc_version=None) -> None: + params = { + "uri": doc_uri, + "diagnostics": diagnostics, + } + + if doc_version: + params["version"] = doc_version + + self._endpoint.notify( + self.M_PUBLISH_DIAGNOSTICS, + params=params, + ) + + @contextmanager + def report_progress( + self, + title: str, + message: Optional[str] = None, + percentage: Optional[int] = None, + skip_token_initialization: bool = False, + ) -> Generator[Callable[[str, Optional[int]], None], None, None]: + """ + Report progress to the editor / client. - def show_message(self, message, msg_type=lsp.MessageType.Info): - self._endpoint.notify(self.M_SHOW_MESSAGE, params={'type': msg_type, 'message': message}) + ``skip_token_initialization` is necessary due to some current + limitations of our LSP implementation. When `report_progress` + is used from a synchronous LSP handler, the token initialization + will time out because we can't receive the response. + + Many editors will still correctly show the progress messages though, which + is why we are giving progress users the option to skip the initialization + of the progress token. + """ + if self._config: + client_supports_progress_reporting = self._config.capabilities.get( + "window", {} + ).get("workDoneProgress", False) + else: + client_supports_progress_reporting = False + + if client_supports_progress_reporting: + token = self._progress_begin( + title, message, percentage, skip_token_initialization + ) + + def progress_message( + message: str, percentage: Optional[int] = None + ) -> None: + self._progress_report(token, message, percentage) + + try: + yield progress_message + finally: + self._progress_end(token) + + return + + # FALLBACK: + # If the client doesn't support progress reporting, we have a dummy method + # for the caller to use. + def dummy_progress_message( + message: str, percentage: Optional[int] = None + ) -> None: + pass + + yield dummy_progress_message + + def _progress_begin( + self, + title: str, + message: Optional[str] = None, + percentage: Optional[int] = None, + skip_token_initialization: bool = False, + ) -> str: + token = str(uuid.uuid4()) + + if not skip_token_initialization: + try: + self._endpoint.request( + self.M_INITIALIZE_PROGRESS, {"token": token} + ).result(timeout=1.0) + except Exception: + log.warning( + "There was an error while trying to initialize progress reporting." + "Likely progress reporting was used in a synchronous LSP handler, " + "which is not supported by progress reporting yet. " + "To prevent waiting for the timeout you can set " + "`skip_token_initialization=True`. " + "Not every editor will show progress then, but many will.", + exc_info=True, + ) + + value = { + "kind": "begin", + "title": title, + } + if message is not None: + value["message"] = message + if percentage is not None: + value["percentage"] = percentage + + self._endpoint.notify( + self.M_PROGRESS, + params={ + "token": token, + "value": value, + }, + ) + return token + + def _progress_report( + self, + token: str, + message: Optional[str] = None, + percentage: Optional[int] = None, + ) -> None: + value = { + "kind": "report", + } + if message: + value["message"] = message + if percentage: + value["percentage"] = percentage + + self._endpoint.notify( + self.M_PROGRESS, + params={ + "token": token, + "value": value, + }, + ) + + def _progress_end(self, token: str, message: Optional[str] = None) -> None: + value = { + "kind": "end", + } + if message: + value["message"] = message + + self._endpoint.notify( + self.M_PROGRESS, + params={ + "token": token, + "value": value, + }, + ) + + def log_message(self, message, msg_type=lsp.MessageType.Info): + self._endpoint.notify( + self.M_LOG_MESSAGE, params={"type": msg_type, "message": message} + ) + + def show_message(self, message, msg_type=lsp.MessageType.Info) -> None: + self._endpoint.notify( + self.M_SHOW_MESSAGE, params={"type": msg_type, "message": message} + ) def source_roots(self, document_path): """Return the source roots for the given document.""" - files = _utils.find_parents(self._root_path, document_path, ['setup.py', 'pyproject.toml']) or [] - return list({os.path.dirname(project_file) for project_file in files}) or [self._root_path] + files = ( + _utils.find_parents( + self._root_path, document_path, ["setup.py", "pyproject.toml"] + ) + or [] + ) + return list({os.path.dirname(project_file) for project_file in files}) or [ + self._root_path + ] def _create_document(self, doc_uri, source=None, version=None): path = uris.to_fs_path(doc_uri) @@ -128,11 +359,50 @@ def _create_document(self, doc_uri, source=None, version=None): rope_project_builder=self._rope_project_builder, ) + def _create_notebook_document( + self, doc_uri, notebook_type, cells, version=None, metadata=None + ): + return Notebook( + doc_uri, + notebook_type, + self, + cells=cells, + version=version, + metadata=metadata, + ) -class Document: + def _create_cell_document( + self, doc_uri, notebook_uri, language_id, source=None, version=None + ): + # TODO: remove what is unnecessary here. + path = uris.to_fs_path(doc_uri) + return Cell( + doc_uri, + notebook_uri=notebook_uri, + language_id=language_id, + workspace=self, + source=source, + version=version, + extra_sys_path=self.source_roots(path), + rope_project_builder=self._rope_project_builder, + ) + + def close(self) -> None: + if self.__rope_autoimport: + self.__rope_autoimport.close() - def __init__(self, uri, workspace, source=None, version=None, local=True, extra_sys_path=None, - rope_project_builder=None): + +class Document: + def __init__( + self, + uri, + workspace, + source=None, + version=None, + local=True, + extra_sys_path=None, + rope_project_builder=None, + ) -> None: self.uri = uri self.version = version self.path = uris.to_fs_path(uri) @@ -152,9 +422,11 @@ def __str__(self): return str(self.uri) def _rope_resource(self, rope_config): - # pylint: disable=import-outside-toplevel from rope.base import libutils - return libutils.path_to_resource(self._rope_project_builder(rope_config), self.path) + + return libutils.path_to_resource( + self._rope_project_builder(rope_config), self.path + ) @property @lock @@ -165,31 +437,32 @@ def lines(self): @lock def source(self): if self._source is None: - with io.open(self.path, 'r', encoding='utf-8') as f: + with open(self.path, encoding="utf-8") as f: return f.read() return self._source - def update_config(self, settings): - self._config.update((settings or {}).get('pylsp', {})) + def update_config(self, settings) -> None: + self._config.update((settings or {}).get("pylsp", {})) @lock def apply_change(self, change): """Apply a change to the document.""" - text = change['text'] - change_range = change.get('range') + text = change["text"] + change_range = change.get("range") if not change_range: # The whole file has changed self._source = text return - start_line = change_range['start']['line'] - start_col = change_range['start']['character'] - end_line = change_range['end']['line'] - end_col = change_range['end']['character'] + start_line = change_range["start"]["line"] + start_col = change_range["start"]["character"] + end_line = change_range["end"]["line"] + end_col = change_range["end"]["character"] # Check for an edit occuring at the very end of the file - if start_line == len(self.lines): + lines = self.lines + if start_line == len(lines): self._source = self.source + text return @@ -198,7 +471,7 @@ def apply_change(self, change): # Iterate over the existing document until we hit the edit range, # at which point we write the new text, then loop until we hit # the end of the range and continue writing. - for i, line in enumerate(self.lines): + for i, line in enumerate(lines): if i < start_line: new.write(line) continue @@ -218,15 +491,16 @@ def apply_change(self, change): def offset_at_position(self, position): """Return the byte-offset pointed at by the given position.""" - return position['character'] + len(''.join(self.lines[:position['line']])) + return position["character"] + len("".join(self.lines[: position["line"]])) def word_at_position(self, position): """Get the word under the cursor returning the start and end positions.""" - if position['line'] >= len(self.lines): - return '' + lines = self.lines + if position["line"] >= len(lines): + return "" - line = self.lines[position['line']] - i = position['character'] + line = lines[position["line"]] + i = position["character"] # Split word in two start = line[:i] end = line[i:] @@ -241,29 +515,45 @@ def word_at_position(self, position): @lock def jedi_names(self, all_scopes=False, definitions=True, references=False): script = self.jedi_script() - return script.get_names(all_scopes=all_scopes, definitions=definitions, - references=references) + return script.get_names( + all_scopes=all_scopes, definitions=definitions, references=references + ) @lock def jedi_script(self, position=None, use_document_path=False): extra_paths = [] environment_path = None env_vars = None + prioritize_extra_paths = False if self._config: - jedi_settings = self._config.plugin_settings('jedi', document_path=self.path) - environment_path = jedi_settings.get('environment') - extra_paths = jedi_settings.get('extra_paths') or [] - env_vars = jedi_settings.get('env_vars') - - # Drop PYTHONPATH from env_vars before creating the environment because that makes - # Jedi throw an error. + jedi_settings = self._config.plugin_settings( + "jedi", document_path=self.path + ) + jedi.settings.auto_import_modules = jedi_settings.get( + "auto_import_modules", DEFAULT_AUTO_IMPORT_MODULES + ) + environment_path = jedi_settings.get("environment") + # Jedi itself cannot deal with homedir-relative paths. + # On systems, where it is expected, expand the home directory. + if environment_path and os.name != "nt": + environment_path = os.path.expanduser(environment_path) + + extra_paths = jedi_settings.get("extra_paths") or [] + env_vars = jedi_settings.get("env_vars") + prioritize_extra_paths = jedi_settings.get("prioritize_extra_paths") + + # Drop PYTHONPATH from env_vars before creating the environment to + # ensure that Jedi can startup properly without module name collision. if env_vars is None: env_vars = os.environ.copy() - env_vars.pop('PYTHONPATH', None) + env_vars.pop("PYTHONPATH", None) + + environment = self.get_enviroment(environment_path, env_vars=env_vars) + sys_path = self.sys_path( + environment_path, env_vars, prioritize_extra_paths, extra_paths + ) - environment = self.get_enviroment(environment_path, env_vars=env_vars) if environment_path else None - sys_path = self.sys_path(environment_path, env_vars=env_vars) + extra_paths project_path = self._workspace.root_path # Extend sys_path with document's path if requested @@ -271,10 +561,10 @@ def jedi_script(self, position=None, use_document_path=False): sys_path += [os.path.normpath(os.path.dirname(self.path))] kwargs = { - 'code': self.source, - 'path': self.path, - 'environment': environment, - 'project': jedi.Project(path=project_path, sys_path=sys_path), + "code": self.source, + "path": self.path, + "environment": environment if environment_path else None, + "project": jedi.Project(path=project_path, sys_path=sys_path), } if position: @@ -291,17 +581,133 @@ def get_enviroment(self, environment_path=None, env_vars=None): if environment_path in self._workspace._environments: environment = self._workspace._environments[environment_path] else: - environment = jedi.api.environment.create_environment(path=environment_path, - safe=False, - env_vars=env_vars) + environment = jedi.api.environment.create_environment( + path=environment_path, safe=False, env_vars=env_vars + ) self._workspace._environments[environment_path] = environment return environment - def sys_path(self, environment_path=None, env_vars=None): + def sys_path( + self, + environment_path=None, + env_vars=None, + prioritize_extra_paths=False, + extra_paths=[], + ): # Copy our extra sys path - # TODO: when safe to break API, use env_vars explicitly to pass to create_environment path = list(self._extra_sys_path) - environment = self.get_enviroment(environment_path=environment_path, env_vars=env_vars) + environment = self.get_enviroment( + environment_path=environment_path, env_vars=env_vars + ) path.extend(environment.get_sys_path()) + if prioritize_extra_paths: + path = extra_paths + path + else: + path = path + extra_paths + return path + + +class Notebook: + """Represents a notebook.""" + + def __init__( + self, uri, notebook_type, workspace, cells=None, version=None, metadata=None + ) -> None: + self.uri = uri + self.notebook_type = notebook_type + self.workspace = workspace + self.version = version + self.cells = cells or [] + self.metadata = metadata or {} + self._lock = RLock() + + def __str__(self): + return "Notebook with URI '%s'" % str(self.uri) + + def add_cells(self, new_cells: list, start: int) -> None: + self.cells[start:start] = new_cells + + def remove_cells(self, start: int, delete_count: int) -> None: + del self.cells[start : start + delete_count] + + def cell_data(self): + """Extract current cell data. + + Returns a dict (ordered by cell position) where the key is the cell uri and the + value is a dict with line_start, line_end, and source attributes. + """ + cell_data = {} + offset = 0 + for cell in self.cells: + cell_uri = cell["document"] + cell_document = self.workspace.get_cell_document(cell_uri) + num_lines = cell_document.line_count + cell_data[cell_uri] = { + "line_start": offset, + "line_end": offset + num_lines - 1, + "source": cell_document.source, + } + offset += num_lines + return cell_data + + @lock + def jedi_names( + self, + up_to_cell_uri: Optional[str] = None, + all_scopes=False, + definitions=True, + references=False, + ): + """ + Get the names in the notebook up to a certain cell. + + Parameters + ---------- + up_to_cell_uri: str, optional + The cell uri to stop at. If None, all cells are considered. + """ + names = set() + for cell in self.cells: + cell_uri = cell["document"] + cell_document = self.workspace.get_cell_document(cell_uri) + names.update(cell_document.jedi_names(all_scopes, definitions, references)) + if cell_uri == up_to_cell_uri: + break + return {name.name for name in names} + + +class Cell(Document): + """ + Represents a cell in a notebook. + + Notes + ----- + We inherit from Document for now to get the same API. However, a cell document differs from text documents in that + they have a language id. + """ + + def __init__( + self, + uri, + notebook_uri, + language_id, + workspace, + source=None, + version=None, + local=True, + extra_sys_path=None, + rope_project_builder=None, + ) -> None: + super().__init__( + uri, workspace, source, version, local, extra_sys_path, rope_project_builder + ) + self.language_id = language_id + self.notebook_uri = notebook_uri + + @property + @lock + def line_count(self): + """ "Return the number of lines in the cell document.""" + return len(self.source.split("\n")) diff --git a/pyproject.toml b/pyproject.toml index cbad00d4..8fe9c574 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,7 +1,189 @@ +# Copyright 2017-2020 Palantir Technologies, Inc. +# Copyright 2021- Python Language Server Contributors. + [build-system] -requires = ["setuptools>=44", "wheel", "setuptools_scm[toml]>=3.4.3"] +requires = ["setuptools>=69.0.0", "setuptools_scm[toml]>=3.4.3"] build-backend = "setuptools.build_meta" +[project] +name = "python-lsp-server" +authors = [{name = "Python Language Server Contributors"}] +description = "Python Language Server for the Language Server Protocol" +readme = "README.md" +license = "MIT" +license-files = ["LICENSE"] +requires-python = ">=3.9" +dependencies = [ + "docstring-to-markdown", + "importlib_metadata>=4.8.3;python_version<\"3.10\"", + "jedi>=0.17.2,<0.20.0", + "pluggy>=1.0.0", + "python-lsp-jsonrpc>=1.1.0,<2.0.0", + "ujson>=3.0.0", + "black" +] +dynamic = ["version"] + +[project.urls] +Homepage = "https://github.com/python-lsp/python-lsp-server" + +[project.optional-dependencies] +all = [ + "autopep8>=2.0.4,<2.1.0", + "flake8>=7.1,<8", + "mccabe>=0.7.0,<0.8.0", + "pycodestyle>=2.12.0,<2.13.0", + "pydocstyle>=6.3.0,<6.4.0", + "pyflakes>=3.2.0,<3.3.0", + "pylint>=3.1,<4.1.0", + "rope>=1.11.0", + "yapf>=0.33.0", + "whatthepatch>=1.0.2,<2.0.0" +] +autopep8 = ["autopep8>=2.0.4,<2.1.0"] +flake8 = ["flake8>=7.1,<8"] +mccabe = ["mccabe>=0.7.0,<0.8.0"] +pycodestyle = ["pycodestyle>=2.12.0,<2.13.0"] +pydocstyle = ["pydocstyle>=6.3.0,<6.4.0"] +pyflakes = ["pyflakes>=3.2.0,<3.3.0"] +pylint = ["pylint>=3.1,<4.1.0"] +rope = ["rope>=1.11.0"] +yapf = ["yapf>=0.33.0", "whatthepatch>=1.0.2,<2.0.0"] +websockets = ["websockets>=10.3"] +test = [ + "pylint>=3.1,<4.1.0", + "pytest", + "pytest-cov", + "coverage", + "numpy", + "pandas", + "matplotlib", + "pyqt6", + "flaky", + "websockets>=10.3", +] + +[project.entry-points.pylsp] +autopep8 = "pylsp.plugins.autopep8_format" +folding = "pylsp.plugins.folding" +flake8 = "pylsp.plugins.flake8_lint" +jedi_completion = "pylsp.plugins.jedi_completion" +jedi_definition = "pylsp.plugins.definition" +jedi_type_definition = "pylsp.plugins.type_definition" +jedi_hover = "pylsp.plugins.hover" +jedi_highlight = "pylsp.plugins.highlight" +jedi_references = "pylsp.plugins.references" +jedi_rename = "pylsp.plugins.jedi_rename" +jedi_signature_help = "pylsp.plugins.signature" +jedi_symbols = "pylsp.plugins.symbols" +mccabe = "pylsp.plugins.mccabe_lint" +preload = "pylsp.plugins.preload_imports" +pycodestyle = "pylsp.plugins.pycodestyle_lint" +pydocstyle = "pylsp.plugins.pydocstyle_lint" +pyflakes = "pylsp.plugins.pyflakes_lint" +pylint = "pylsp.plugins.pylint_lint" +rope_completion = "pylsp.plugins.rope_completion" +rope_autoimport = "pylsp.plugins.rope_autoimport" +yapf = "pylsp.plugins.yapf_format" + +[project.scripts] +pylsp = "pylsp.__main__:main" + +[tool.ruff] +# Exclude a variety of commonly ignored directories. +exclude = [ + ".bzr", + ".direnv", + ".eggs", + ".git", + ".git-rewrite", + ".hg", + ".ipynb_checkpoints", + ".mypy_cache", + ".nox", + ".pants.d", + ".pyenv", + ".pytest_cache", + ".pytype", + ".ruff_cache", + ".svn", + ".tox", + ".venv", + ".vscode", + "__pypackages__", + "_build", + "buck-out", + "build", + "dist", + "node_modules", + "site-packages", + "venv", +] + +# Same as Black. +line-length = 88 +indent-width = 4 + +# Assume Python 3.9 +target-version = "py39" + +[tool.ruff.lint] +# https://docs.astral.sh/ruff/rules/ +select = ["E", "F", "W", "C", "I"] +ignore = [ + "C901", # McCabe complexity warning + "E501", # Line too long + "E722", # Do not use bare `except` +] + +# Allow fix for all enabled rules (when `--fix`) is provided. +fixable = ["ALL"] +unfixable = [] + +# Allow unused variables when underscore-prefixed. +dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$" + +[tool.ruff.format] +# Like Black, use double quotes for strings. +quote-style = "double" + +# Like Black, indent with spaces, rather than tabs. +indent-style = "space" + +# Like Black, respect magic trailing commas. +skip-magic-trailing-comma = false + +# Like Black, automatically detect the appropriate line ending. +line-ending = "auto" + +# Enable auto-formatting of code examples in docstrings. Markdown, +# reStructuredText code/literal blocks and doctests are all supported. +# +# This is currently disabled by default, but it is planned for this +# to be opt-out in the future. +docstring-code-format = false + +# Set the line length limit used when formatting code snippets in +# docstrings. +# +# This only has an effect when the `docstring-code-format` setting is +# enabled. +docstring-code-line-length = "dynamic" + +[tool.setuptools] +include-package-data = false + +[tool.setuptools.packages.find] +exclude = ["contrib", "docs", "test", "test.*", "test.plugins", "test.plugins.*"] +namespaces = false + [tool.setuptools_scm] write_to = "pylsp/_version.py" write_to_template = "__version__ = \"{version}\"\n" # VERSION_INFO is populated in __main__ + +[tool.pytest.ini_options] +testpaths = ["test"] +addopts = "--cov-report html --cov-report term --junitxml=pytest.xml --cov pylsp --cov test" + +[tool.coverage.run] +concurrency = ["multiprocessing", "thread"] diff --git a/scripts/jsonschema2md.py b/scripts/jsonschema2md.py index b10de886..c0a00759 100644 --- a/scripts/jsonschema2md.py +++ b/scripts/jsonschema2md.py @@ -10,7 +10,7 @@ def describe_array(prop: dict) -> str: if "uniqueItems" in prop: unique_qualifier = "unique" if prop["uniqueItems"] else "non-unique" item_type = describe_type(prop["items"]) - extra += f" of {unique_qualifier} {item_type} items" + extra = " ".join(filter(bool, ["of", unique_qualifier, item_type, "items"])) return extra @@ -31,13 +31,19 @@ def describe_number(prop: dict) -> str: def describe_type(prop: dict) -> str: prop_type = prop["type"] - label = f"`{prop_type}`" - if prop_type in EXTRA_DESCRIPTORS: - label += " " + EXTRA_DESCRIPTORS[prop_type](prop) - if "enum" in prop: - allowed_values = [f"`{value}`" for value in prop["enum"]] - label += "one of: " + ", ".join(allowed_values) - return label + types = prop_type if isinstance(prop_type, list) else [prop_type] + if "null" in types: + types.remove("null") + if len(types) == 1: + prop_type = types[0] + parts = [f"`{prop_type}`"] + for option in types: + if option in EXTRA_DESCRIPTORS: + parts.append(EXTRA_DESCRIPTORS[option](prop)) + if "enum" in prop: + allowed_values = [f"`{value!r}`" for value in prop["enum"]] + parts.append("(one of: " + ", ".join(allowed_values) + ")") + return " ".join(parts) def convert_schema(schema: dict, source: str = None) -> str: @@ -67,7 +73,7 @@ def convert_schema(schema: dict, source: str = None) -> str: return "\n".join(lines) -def main(argv): +def main(argv) -> None: parser = ArgumentParser() parser.add_argument("schema", type=FileType()) parser.add_argument("markdown", type=FileType("w+"), default=sys.stdout) diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index 17db54d9..00000000 --- a/setup.cfg +++ /dev/null @@ -1,91 +0,0 @@ -[metadata] -name = python-lsp-server -author = Python Language Server Contributors -description = Python Language Server for the Language Server Protocol -url = https://github.com/python-lsp/python-lsp-server -long_description = file: README.md -long_description_content_type = text/markdown -license = MIT -license_file = LICENSE - -[options] -packages = find: -python_requires = >=3.7 -install_requires = - jedi>=0.17.2,<0.19.0 - python-lsp-jsonrpc>=1.0.0 - pluggy>=1.0.0 - ujson>=3.0.0 - setuptools>=39.0.0 -setup_requires = setuptools>=44; wheel; setuptools_scm[toml]>=3.4.3 - -[options.packages.find] -exclude = contrib; docs; test; test.*; test.plugins; test.plugins.* - -[options.extras_require] -all = - autopep8>=1.6.0,<1.7.0 - flake8>=4.0.0,<4.1.0 - mccabe>=0.6.0,<0.7.0 - pycodestyle>=2.8.0,<2.9.0 - pydocstyle>=2.0.0 - pyflakes>=2.4.0,<2.5.0 - pylint>=2.5.0 - rope>=0.10.5 - yapf -autopep8 = autopep8>=1.6.0,<1.7.0 -flake8 = flake8>=4.0.0,<4.1.0 -mccabe = mccabe>=0.6.0,<0.7.0 -pycodestyle = pycodestyle>=2.8.0,<2.9.0 -pydocstyle = pydocstyle>=2.0.0 -pyflakes = pyflakes>=2.4.0,<2.5.0 -pylint = pylint>=2.5.0 -rope = rope>0.10.5 -yapf = yapf -test = - pylint>=2.5.0 - pytest - pytest-cov - coverage - numpy - pandas - matplotlib - pyqt5 - flaky - -[options.entry_points] -console_scripts = pylsp = pylsp.__main__:main -pylsp = - autopep8 = pylsp.plugins.autopep8_format - folding = pylsp.plugins.folding - flake8 = pylsp.plugins.flake8_lint - jedi_completion = pylsp.plugins.jedi_completion - jedi_definition = pylsp.plugins.definition - jedi_hover = pylsp.plugins.hover - jedi_highlight = pylsp.plugins.highlight - jedi_references = pylsp.plugins.references - jedi_rename = pylsp.plugins.jedi_rename - jedi_signature_help = pylsp.plugins.signature - jedi_symbols = pylsp.plugins.symbols - mccabe = pylsp.plugins.mccabe_lint - preload = pylsp.plugins.preload_imports - pycodestyle = pylsp.plugins.pycodestyle_lint - pydocstyle = pylsp.plugins.pydocstyle_lint - pyflakes = pylsp.plugins.pyflakes_lint - pylint = pylsp.plugins.pylint_lint - rope_completion = pylsp.plugins.rope_completion - rope_rename = pylsp.plugins.rope_rename - yapf = pylsp.plugins.yapf_format - -[pycodestyle] -ignore = E226, E722, W504 -max-line-length = 120 -exclude = test/plugins/.ropeproject,test/.ropeproject - - -[tool:pytest] -testpaths = test -addopts = - --cov-report html --cov-report term --junitxml=pytest.xml - --cov pylsp --cov test - diff --git a/setup.py b/setup.py index 28d2d305..762deb8a 100755 --- a/setup.py +++ b/setup.py @@ -3,10 +3,9 @@ # Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. -from setuptools import setup, find_packages +from setuptools import setup if __name__ == "__main__": setup( name="python-lsp-server", # to allow GitHub dependency tracking work - packages=find_packages(exclude=["contrib", "docs", "test", "test.*"]), # https://github.com/pypa/setuptools/issues/2688 ) diff --git a/test/__init__.py b/test/__init__.py index ab920d25..0936e783 100644 --- a/test/__init__.py +++ b/test/__init__.py @@ -1,13 +1,9 @@ # Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. -import sys import pytest -from pylsp import IS_WIN -IS_PY3 = sys.version_info.major == 3 +from pylsp import IS_WIN unix_only = pytest.mark.skipif(IS_WIN, reason="Unix only") windows_only = pytest.mark.skipif(not IS_WIN, reason="Windows only") -py3_only = pytest.mark.skipif(not IS_PY3, reason="Python3 only") -py2_only = pytest.mark.skipif(IS_PY3, reason="Python2 only") diff --git a/test/conftest.py b/test/conftest.py index 839fd126..a9010517 100644 --- a/test/conftest.py +++ b/test/conftest.py @@ -1,13 +1,13 @@ # Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. -""" py.test configuration""" +"""pytest configuration""" + import logging + from pylsp.__main__ import LOG_FORMAT logging.basicConfig(level=logging.DEBUG, format=LOG_FORMAT) -pytest_plugins = [ - 'test.fixtures' -] +pytest_plugins = ["test.fixtures"] diff --git a/test/data/publish_diagnostics_message_examples/example_1.json b/test/data/publish_diagnostics_message_examples/example_1.json new file mode 100644 index 00000000..25d43a17 --- /dev/null +++ b/test/data/publish_diagnostics_message_examples/example_1.json @@ -0,0 +1,36 @@ +{ + "diagnostics": [ + { + "message": "invalid syntax", + "range": { + "end": { + "character": 15, + "line": 1 + }, + "start": { + "character": 7, + "line": 1 + } + }, + "severity": 1, + "source": "pyflakes" + }, + { + "code": "W292", + "message": "W292 no newline at end of file", + "range": { + "end": { + "character": 7, + "line": 1 + }, + "start": { + "character": 7, + "line": 1 + } + }, + "severity": 2, + "source": "pycodestyle" + } + ], + "uri": "/Users/.../code/python-lsp-server/test" +} diff --git a/test/data/publish_diagnostics_message_examples/example_2.json b/test/data/publish_diagnostics_message_examples/example_2.json new file mode 100644 index 00000000..006f95a6 --- /dev/null +++ b/test/data/publish_diagnostics_message_examples/example_2.json @@ -0,0 +1,68 @@ +{ + "diagnostics": [ + { + "message": "'sys' imported but unused", + "range": { + "end": { + "character": 11, + "line": 0 + }, + "start": { + "character": 0, + "line": 0 + } + }, + "severity": 2, + "source": "pyflakes" + }, + { + "code": "E225", + "message": "E225 missing whitespace around operator", + "range": { + "end": { + "character": 4, + "line": 1 + }, + "start": { + "character": 1, + "line": 1 + } + }, + "severity": 2, + "source": "pycodestyle" + }, + { + "code": "W292", + "message": "W292 no newline at end of file", + "range": { + "end": { + "character": 5, + "line": 2 + }, + "start": { + "character": 5, + "line": 2 + } + }, + "severity": 2, + "source": "pycodestyle" + }, + { + "code": "E225", + "message": "E225 missing whitespace around operator", + "range": { + "end": { + "character": 5, + "line": 2 + }, + "start": { + "character": 1, + "line": 2 + } + }, + "severity": 2, + "source": "pycodestyle" + } + ], + uri: "/Users/.../code/python-lsp-server/test" +} \ No newline at end of file diff --git a/test/fixtures.py b/test/fixtures.py index e57bda6b..258781f9 100644 --- a/test/fixtures.py +++ b/test/fixtures.py @@ -3,14 +3,18 @@ import os from io import StringIO -from unittest.mock import Mock +from unittest.mock import MagicMock + import pytest +from pylsp_jsonrpc.dispatchers import MethodDispatcher +from pylsp_jsonrpc.endpoint import Endpoint +from pylsp_jsonrpc.exceptions import JsonRpcException from pylsp import uris from pylsp.config.config import Config from pylsp.python_lsp import PythonLSPServer -from pylsp.workspace import Workspace, Document - +from pylsp.workspace import Document, Workspace +from test.test_utils import CALL_TIMEOUT_IN_SECONDS, ClientServerPair DOC_URI = uris.from_fs_path(__file__) DOC = """import sys @@ -20,15 +24,53 @@ def main(): """ +class FakeEditorMethodsMixin: + """ + Represents the methods to be added to a dispatcher class when faking an editor. + """ + + def m_window__work_done_progress__create(self, *_args, **_kwargs): + """ + Fake editor method `window/workDoneProgress/create`. + + related spec: + https://microsoft.github.io/language-server-protocol/specifications/lsp/3.17/specification/#window_workDoneProgress_create + """ + return None + + +class FakePythonLSPServer(FakeEditorMethodsMixin, PythonLSPServer): + pass + + +class FakeEndpoint(Endpoint): + """ + Fake Endpoint representing the editor / LSP client. + + The `dispatcher` dict will be used to synchronously calculate the responses + for calls to `.request` and resolve the futures with the value or errors. + + Fake methods in the `dispatcher` should raise `JsonRpcException` for any + error. + """ + + def request(self, method, params=None): + request_future = super().request(method, params) + try: + request_future.set_result(self._dispatcher[method](params)) + except JsonRpcException as e: + request_future.set_exception(e) + + return request_future + + @pytest.fixture def pylsp(tmpdir): - """ Return an initialized python LS """ - ls = PythonLSPServer(StringIO, StringIO) + """Return an initialized python LS""" + ls = FakePythonLSPServer(StringIO, StringIO, endpoint_cls=FakeEndpoint) ls.m_initialize( - processId=1, - rootUri=uris.from_fs_path(str(tmpdir)), - initializationOptions={} + processId=1, rootUri=uris.from_fs_path(str(tmpdir)), initializationOptions={} ) return ls @@ -36,72 +78,83 @@ def pylsp(tmpdir): @pytest.fixture def pylsp_w_workspace_folders(tmpdir): - """ Return an initialized python LS """ - ls = PythonLSPServer(StringIO, StringIO) + """Return an initialized python LS""" + ls = FakePythonLSPServer(StringIO, StringIO, endpoint_cls=FakeEndpoint) - folder1 = tmpdir.mkdir('folder1') - folder2 = tmpdir.mkdir('folder2') + folder1 = tmpdir.mkdir("folder1") + folder2 = tmpdir.mkdir("folder2") ls.m_initialize( processId=1, rootUri=uris.from_fs_path(str(folder1)), initializationOptions={}, workspaceFolders=[ - { - 'uri': uris.from_fs_path(str(folder1)), - 'name': 'folder1' - }, - { - 'uri': uris.from_fs_path(str(folder2)), - 'name': 'folder2' - } - ] + {"uri": uris.from_fs_path(str(folder1)), "name": "folder1"}, + {"uri": uris.from_fs_path(str(folder2)), "name": "folder2"}, + ], ) workspace_folders = [folder1, folder2] return (ls, workspace_folders) +@pytest.fixture() +def consumer(): + return MagicMock() + + +@pytest.fixture() +def endpoint(consumer): + class Dispatcher(FakeEditorMethodsMixin, MethodDispatcher): + pass + + return FakeEndpoint(Dispatcher(), consumer, id_generator=lambda: "id") + + @pytest.fixture -def workspace(tmpdir): +def workspace(tmpdir, endpoint) -> None: """Return a workspace.""" - ws = Workspace(uris.from_fs_path(str(tmpdir)), Mock()) + ws = Workspace(uris.from_fs_path(str(tmpdir)), endpoint) ws._config = Config(ws.root_uri, {}, 0, {}) - return ws + yield ws + ws.close() @pytest.fixture -def workspace_other_root_path(tmpdir): +def workspace_other_root_path(tmpdir, endpoint): """Return a workspace with a root_path other than tmpdir.""" - ws_path = str(tmpdir.mkdir('test123').mkdir('test456')) - ws = Workspace(uris.from_fs_path(ws_path), Mock()) + ws_path = str(tmpdir.mkdir("test123").mkdir("test456")) + ws = Workspace(uris.from_fs_path(ws_path), endpoint) ws._config = Config(ws.root_uri, {}, 0, {}) return ws @pytest.fixture -def config(workspace): # pylint: disable=redefined-outer-name +def config(workspace): """Return a config object.""" cfg = Config(workspace.root_uri, {}, 0, {}) - cfg._plugin_settings = {'plugins': {'pylint': {'enabled': False, 'args': [], 'executable': None}}} + cfg._plugin_settings = { + "plugins": {"pylint": {"enabled": False, "args": [], "executable": None}} + } return cfg @pytest.fixture -def doc(workspace): # pylint: disable=redefined-outer-name +def doc(workspace): return Document(DOC_URI, workspace, DOC) @pytest.fixture -def temp_workspace_factory(workspace): # pylint: disable=redefined-outer-name - ''' +def temp_workspace_factory(workspace): + """ Returns a function that creates a temporary workspace from the files dict. The dict is in the format {"file_name": "file_contents"} - ''' + """ + def fn(files): def create_file(name, content): fn = os.path.join(workspace.root_path, name) - with open(fn, 'w', encoding='utf-8') as f: + with open(fn, "w", encoding="utf-8") as f: f.write(content) workspace.put_document(uris.from_fs_path(fn), content) @@ -110,3 +163,31 @@ def create_file(name, content): return workspace return fn + + +@pytest.fixture +def client_server_pair() -> None: + """A fixture that sets up a client/server pair and shuts down the server""" + client_server_pair_obj = ClientServerPair() + + yield (client_server_pair_obj.client, client_server_pair_obj.server) + + shutdown_response = client_server_pair_obj.client._endpoint.request( + "shutdown" + ).result(timeout=CALL_TIMEOUT_IN_SECONDS) + assert shutdown_response is None + client_server_pair_obj.client._endpoint.notify("exit") + + +@pytest.fixture +def workspace_with_signature_docstring_disabled(workspace) -> None: + workspace._config.update( + { + "signature": { + **workspace._config.settings().get("signature", {}), + "include_docstring": False, + }, + } + ) + + yield workspace diff --git a/test/plugins/test_autoimport.py b/test/plugins/test_autoimport.py new file mode 100644 index 00000000..cbe3dde1 --- /dev/null +++ b/test/plugins/test_autoimport.py @@ -0,0 +1,348 @@ +# Copyright 2022- Python Language Server Contributors. + +from typing import Any +from unittest.mock import Mock, patch + +import jedi +import parso +import pytest + +from pylsp import IS_WIN, lsp, uris +from pylsp.config.config import Config +from pylsp.plugins.rope_autoimport import ( + _get_score, + _should_insert, + cache, + get_name_or_module, + get_names, +) +from pylsp.plugins.rope_autoimport import ( + pylsp_completions as pylsp_autoimport_completions, +) +from pylsp.workspace import Workspace +from test.test_notebook_document import wait_for_condition +from test.test_utils import send_initialize_request, send_notebook_did_open + +DOC_URI = uris.from_fs_path(__file__) + + +def contains_autoimport_completion(suggestion: dict[str, Any], module: str) -> bool: + """Checks if `suggestion` contains an autoimport completion for `module`.""" + return suggestion.get("label", "") == module and "import" in suggestion.get( + "detail", "" + ) + + +def contains_autoimport_quickfix(suggestion: dict[str, Any], module: str) -> bool: + """Checks if `suggestion` contains an autoimport quick fix for `module`.""" + return suggestion.get("title", "") == f"import {module}" + + +@pytest.fixture(scope="session") +def autoimport_workspace(tmp_path_factory) -> Workspace: + "Special autoimport workspace. Persists across sessions to make in-memory sqlite3 database fast." + workspace = Workspace( + uris.from_fs_path(str(tmp_path_factory.mktemp("pylsp"))), Mock() + ) + workspace._config = Config(workspace.root_uri, {}, 0, {}) + workspace._config.update( + { + "rope_autoimport": { + "memory": True, + "enabled": True, + "completions": {"enabled": True}, + "code_actions": {"enabled": True}, + } + } + ) + cache.reload_cache(workspace._config, workspace, single_thread=True) + yield workspace + workspace.close() + + +@pytest.fixture +def completions(config: Config, autoimport_workspace: Workspace, request) -> None: + document, position = request.param + com_position = {"line": 0, "character": position} + autoimport_workspace.put_document(DOC_URI, source=document) + doc = autoimport_workspace.get_document(DOC_URI) + yield pylsp_autoimport_completions( + config, autoimport_workspace, doc, com_position, None + ) + autoimport_workspace.rm_document(DOC_URI) + + +def should_insert(phrase: str, position: int): + expr = parso.parse(phrase) + word_node = expr.get_leaf_for_position((1, position)) + return _should_insert(expr, word_node) + + +def check_dict(query: dict, results: list[dict]) -> bool: + for result in results: + if all(result[key] == query[key] for key in query.keys()): + return True + return False + + +@pytest.mark.parametrize("completions", [("""pathli """, 6)], indirect=True) +def test_autoimport_completion(completions) -> None: + assert completions + assert check_dict( + {"label": "pathlib", "kind": lsp.CompletionItemKind.Module}, completions + ) + + +@pytest.mark.parametrize("completions", [("""import """, 7)], indirect=True) +def test_autoimport_import(completions) -> None: + assert len(completions) == 0 + + +@pytest.mark.parametrize("completions", [("""pathlib""", 2)], indirect=True) +def test_autoimport_pathlib(completions) -> None: + assert completions[0]["label"] == "pathlib" + + start = {"line": 0, "character": 0} + edit_range = {"start": start, "end": start} + assert completions[0]["additionalTextEdits"] == [ + {"range": edit_range, "newText": "import pathlib\n"} + ] + + +@pytest.mark.parametrize("completions", [("""import test\n""", 10)], indirect=True) +def test_autoimport_import_with_name(completions) -> None: + assert len(completions) == 0 + + +@pytest.mark.parametrize("completions", [("""def func(s""", 10)], indirect=True) +def test_autoimport_function(completions) -> None: + assert len(completions) == 0 + + +@pytest.mark.parametrize("completions", [("""class Test""", 10)], indirect=True) +def test_autoimport_class(completions) -> None: + assert len(completions) == 0 + + +@pytest.mark.parametrize("completions", [("""\n""", 0)], indirect=True) +def test_autoimport_empty_line(completions) -> None: + assert len(completions) == 0 + + +@pytest.mark.parametrize( + "completions", [("""class Test(NamedTupl):""", 20)], indirect=True +) +def test_autoimport_class_complete(completions) -> None: + assert len(completions) > 0 + + +@pytest.mark.parametrize( + "completions", [("""class Test(NamedTupl""", 20)], indirect=True +) +def test_autoimport_class_incomplete(completions) -> None: + assert len(completions) > 0 + + +@pytest.mark.parametrize("completions", [("""def func(s:Lis""", 12)], indirect=True) +def test_autoimport_function_typing(completions) -> None: + assert len(completions) > 0 + assert check_dict({"label": "List"}, completions) + + +@pytest.mark.parametrize( + "completions", [("""def func(s : Lis ):""", 16)], indirect=True +) +def test_autoimport_function_typing_complete(completions) -> None: + assert len(completions) > 0 + assert check_dict({"label": "List"}, completions) + + +@pytest.mark.parametrize( + "completions", [("""def func(s : Lis ) -> Generat:""", 29)], indirect=True +) +def test_autoimport_function_typing_return(completions) -> None: + assert len(completions) > 0 + assert check_dict({"label": "Generator"}, completions) + + +def test_autoimport_defined_name(config, workspace) -> None: + document = """List = "hi"\nLis""" + com_position = {"line": 1, "character": 3} + workspace.put_document(DOC_URI, source=document) + doc = workspace.get_document(DOC_URI) + completions = pylsp_autoimport_completions( + config, workspace, doc, com_position, None + ) + workspace.rm_document(DOC_URI) + assert not check_dict({"label": "List"}, completions) + + +class TestShouldInsert: + def test_dot(self) -> None: + assert not should_insert("""str.""", 4) + + def test_dot_partial(self) -> None: + assert not should_insert("""str.metho\n""", 9) + + def test_comment(self) -> None: + assert not should_insert("""#""", 1) + + def test_comment_indent(self) -> None: + assert not should_insert(""" # """, 5) + + def test_from(self) -> None: + assert not should_insert("""from """, 5) + assert should_insert("""from """, 4) + + +def test_sort_sources() -> None: + result1 = _get_score(1, "import pathlib", "pathlib", "pathli") + result2 = _get_score(2, "import pathlib", "pathlib", "pathli") + assert result1 < result2 + + +def test_sort_statements() -> None: + result1 = _get_score( + 2, "from importlib_metadata import pathlib", "pathlib", "pathli" + ) + result2 = _get_score(2, "import pathlib", "pathlib", "pathli") + assert result1 > result2 + + +def test_sort_both() -> None: + result1 = _get_score( + 3, "from importlib_metadata import pathlib", "pathlib", "pathli" + ) + result2 = _get_score(2, "import pathlib", "pathlib", "pathli") + assert result1 > result2 + + +def test_get_names() -> None: + source = """ + from a import s as e + import blah, bleh + hello = "str" + a, b = 1, 2 + def someone(): + soemthing + class sfa: + sfiosifo + """ + results = get_names(jedi.Script(code=source)) + assert results == {"blah", "bleh", "e", "hello", "someone", "sfa", "a", "b"} + + +# Tests ruff, flake8 and pyflakes messages +@pytest.mark.parametrize( + "message", + ["Undefined name `os`", "F821 undefined name 'numpy'", "undefined name 'numpy'"], +) +def test_autoimport_code_actions_get_correct_module_name( + autoimport_workspace, message +) -> None: + source = "os.path.join('a', 'b')" + autoimport_workspace.put_document(DOC_URI, source=source) + doc = autoimport_workspace.get_document(DOC_URI) + diagnostic = { + "range": { + "start": {"line": 0, "character": 0}, + "end": {"line": 0, "character": 2}, + }, + "message": message, + } + module_name = get_name_or_module(doc, diagnostic) + autoimport_workspace.rm_document(DOC_URI) + assert module_name == "os" + + +def make_context(module_name, line, character_start, character_end): + return { + "diagnostics": [ + { + "message": f"undefined name '{module_name}'", + "range": { + "start": {"line": line, "character": character_start}, + "end": {"line": line, "character": character_end}, + }, + } + ] + } + + +def position(line, character): + return {"line": line, "character": character} + + +@pytest.mark.skipif(IS_WIN, reason="Flaky on Windows") +def test_autoimport_code_actions_and_completions_for_notebook_document( + client_server_pair, +) -> None: + client, server = client_server_pair + send_initialize_request( + client, + { + "pylsp": { + "plugins": { + "rope_autoimport": { + "memory": True, + "enabled": True, + "completions": {"enabled": True}, + }, + } + } + }, + ) + with patch.object(server._endpoint, "notify") as mock_notify: + # Expectations: + # 1. We receive an autoimport suggestion for "os" in the first cell because + # os is imported after that. + # 2. We don't receive an autoimport suggestion for "os" in the second cell because it's + # already imported in the second cell. + # 3. We don't receive an autoimport suggestion for "os" in the third cell because it's + # already imported in the second cell. + # 4. We receive an autoimport suggestion for "sys" because it's not already imported. + # 5. If diagnostics doesn't contain "undefined name ...", we send empty quick fix suggestions. + send_notebook_did_open(client, ["os", "import os\nos", "os", "sys"]) + wait_for_condition(lambda: mock_notify.call_count >= 4) + # We received diagnostics messages for every cell + assert all( + "textDocument/publishDiagnostics" in c.args + for c in mock_notify.call_args_list + ) + + rope_autoimport_settings = server.workspace._config.plugin_settings( + "rope_autoimport" + ) + assert rope_autoimport_settings.get("completions", {}).get("enabled", False) is True + assert rope_autoimport_settings.get("memory", False) is True + wait_for_condition(lambda: not cache.is_blocked()) + + # 1. + quick_fixes = server.code_actions("cell_1_uri", {}, make_context("os", 0, 0, 2)) + assert any(s for s in quick_fixes if contains_autoimport_quickfix(s, "os")) + + completions = server.completions("cell_1_uri", position(0, 2)).get("items") + assert any(s for s in completions if contains_autoimport_completion(s, "os")) + + # 2. + # We don't test code actions here as in this case, there would be no code actions sent bc + # there wouldn't be a diagnostics message. + completions = server.completions("cell_2_uri", position(1, 2)).get("items") + assert not any(s for s in completions if contains_autoimport_completion(s, "os")) + + # 3. + # Same as in 2. + completions = server.completions("cell_3_uri", position(0, 2)).get("items") + assert not any(s for s in completions if contains_autoimport_completion(s, "os")) + + # 4. + quick_fixes = server.code_actions("cell_4_uri", {}, make_context("sys", 0, 0, 3)) + assert any(s for s in quick_fixes if contains_autoimport_quickfix(s, "sys")) + + completions = server.completions("cell_4_uri", position(0, 3)).get("items") + assert any(s for s in completions if contains_autoimport_completion(s, "sys")) + + # 5. + context = {"diagnostics": [{"message": "A random message"}]} + quick_fixes = server.code_actions("cell_4_uri", {}, context) + assert len(quick_fixes) == 0 diff --git a/test/plugins/test_autopep8_format.py b/test/plugins/test_autopep8_format.py index bb5bc31b..4966b89d 100644 --- a/test/plugins/test_autopep8_format.py +++ b/test/plugins/test_autopep8_format.py @@ -39,45 +39,48 @@ def func(): """ -def test_format(config, workspace): +def test_format(config, workspace) -> None: doc = Document(DOC_URI, workspace, DOC) - res = pylsp_format_document(config, doc) + res = pylsp_format_document(config, workspace, doc, options=None) assert len(res) == 1 - assert res[0]['newText'] == "a = 123\n\n\ndef func():\n pass\n" + assert res[0]["newText"] == "a = 123\n\n\ndef func():\n pass\n" -def test_range_format(config, workspace): +def test_range_format(config, workspace) -> None: doc = Document(DOC_URI, workspace, DOC) def_range = { - 'start': {'line': 0, 'character': 0}, - 'end': {'line': 2, 'character': 0} + "start": {"line": 0, "character": 0}, + "end": {"line": 2, "character": 0}, } - res = pylsp_format_range(config, doc, def_range) + res = pylsp_format_range(config, workspace, doc, def_range, options=None) assert len(res) == 1 # Make sure the func is still badly formatted - assert res[0]['newText'] == "a = 123\n\n\n\n\ndef func():\n pass\n" + assert res[0]["newText"] == "a = 123\n\n\n\n\ndef func():\n pass\n" -def test_no_change(config, workspace): +def test_no_change(config, workspace) -> None: doc = Document(DOC_URI, workspace, GOOD_DOC) - assert not pylsp_format_document(config, doc) + assert not pylsp_format_document(config, workspace, doc, options=None) -def test_hanging_indentation(config, workspace): +def test_hanging_indentation(config, workspace) -> None: doc = Document(DOC_URI, workspace, INDENTED_DOC) - res = pylsp_format_document(config, doc) + res = pylsp_format_document(config, workspace, doc, options=None) assert len(res) == 1 - assert res[0]['newText'] == CORRECT_INDENTED_DOC + assert res[0]["newText"] == CORRECT_INDENTED_DOC -@pytest.mark.parametrize('newline', ['\r\n', '\r']) -def test_line_endings(config, workspace, newline): - doc = Document(DOC_URI, workspace, f'import os;import sys{2 * newline}dict(a=1)') - res = pylsp_format_document(config, doc) +@pytest.mark.parametrize("newline", ["\r\n", "\r"]) +def test_line_endings(config, workspace, newline) -> None: + doc = Document(DOC_URI, workspace, f"import os;import sys{2 * newline}dict(a=1)") + res = pylsp_format_document(config, workspace, doc, options=None) - assert res[0]['newText'] == f'import os{newline}import sys{2 * newline}dict(a=1){newline}' + assert ( + res[0]["newText"] + == f"import os{newline}import sys{2 * newline}dict(a=1){newline}" + ) diff --git a/test/plugins/test_completion.py b/test/plugins/test_completion.py index f70990d5..ae5021f5 100644 --- a/test/plugins/test_completion.py +++ b/test/plugins/test_completion.py @@ -4,26 +4,24 @@ import math import os import sys - from pathlib import Path -from typing import NamedTuple, Dict +from typing import NamedTuple import pytest -from pylsp import uris, lsp -from pylsp.workspace import Document +from pylsp import lsp, uris +from pylsp._utils import JEDI_VERSION +from pylsp.plugins.jedi_completion import ( + pylsp_completion_item_resolve as pylsp_jedi_completion_item_resolve, +) from pylsp.plugins.jedi_completion import pylsp_completions as pylsp_jedi_completions -from pylsp.plugins.jedi_completion import pylsp_completion_item_resolve as pylsp_jedi_completion_item_resolve from pylsp.plugins.rope_completion import pylsp_completions as pylsp_rope_completions -from pylsp._utils import JEDI_VERSION - +from pylsp.workspace import Document -PY2 = sys.version[0] == '2' -LINUX = sys.platform.startswith('linux') -CI = os.environ.get('CI') -LOCATION = os.path.realpath( - os.path.join(os.getcwd(), os.path.dirname(__file__)) -) +PY2 = sys.version[0] == "2" +LINUX = sys.platform.startswith("linux") +CI = os.environ.get("CI") +LOCATION = os.path.realpath(os.path.join(os.getcwd(), os.path.dirname(__file__))) DOC_URI = uris.from_fs_path(__file__) DOC = """import os print os.path.isabs("/tmp") @@ -53,8 +51,8 @@ def documented_hello(): """ -def test_rope_import_completion(config, workspace): - com_position = {'line': 0, 'character': 7} +def test_rope_import_completion(config, workspace) -> None: + com_position = {"line": 0, "character": 7} doc = Document(DOC_URI, workspace, DOC) items = pylsp_rope_completions(config, workspace, doc, com_position) assert items is None @@ -67,366 +65,423 @@ class TypeCase(NamedTuple): expected: lsp.CompletionItemKind -TYPE_CASES: Dict[str, TypeCase] = { - 'variable': TypeCase( - document='test = 1\ntes', - position={'line': 1, 'character': 3}, - label='test', - expected=lsp.CompletionItemKind.Variable +# fmt: off +TYPE_CASES: dict[str, TypeCase] = { + "variable": TypeCase( + document="test = 1\ntes", + position={"line": 1, "character": 3}, + label="test", + expected=lsp.CompletionItemKind.Variable, ), - 'function': TypeCase( - document='def test():\n pass\ntes', - position={'line': 2, 'character': 3}, - label='test()', - expected=lsp.CompletionItemKind.Function + "function": TypeCase( + document="def test():\n pass\ntes", + position={"line": 2, "character": 3}, + label="test()", + expected=lsp.CompletionItemKind.Function, ), - 'keyword': TypeCase( - document='fro', - position={'line': 0, 'character': 3}, - label='from', - expected=lsp.CompletionItemKind.Keyword + "keyword": TypeCase( + document="fro", + position={"line": 0, "character": 3}, + label="from", + expected=lsp.CompletionItemKind.Keyword, ), - 'file': TypeCase( + "file": TypeCase( document='"' + __file__[:-2].replace('"', '\\"') + '"', - position={'line': 0, 'character': len(__file__) - 2}, + position={"line": 0, "character": len(__file__) - 2}, label=Path(__file__).name + '"', - expected=lsp.CompletionItemKind.File + expected=lsp.CompletionItemKind.File, ), - 'module': TypeCase( - document='import statis', - position={'line': 0, 'character': 13}, - label='statistics', - expected=lsp.CompletionItemKind.Module + "module": TypeCase( + document="import statis", + position={"line": 0, "character": 13}, + label="statistics", + expected=lsp.CompletionItemKind.Module, ), - 'class': TypeCase( - document='KeyErr', - position={'line': 0, 'character': 6}, - label='KeyError', - expected=lsp.CompletionItemKind.Class + "class": TypeCase( + document="KeyErr", + position={"line": 0, "character": 6}, + label="KeyError", + expected=lsp.CompletionItemKind.Class, ), - 'property': TypeCase( + "property": TypeCase( document=( - 'class A:\n' - ' @property\n' - ' def test(self):\n' - ' pass\n' - 'A().tes' + "class A:\n" + " @property\n" + " def test(self):\n" + " pass\n" + "A().tes" ), - position={'line': 4, 'character': 5}, - label='test', - expected=lsp.CompletionItemKind.Property - ) + position={"line": 4, "character": 5}, + label="test", + expected=lsp.CompletionItemKind.Property, + ), } +# fmt: on -@pytest.mark.parametrize('case', list(TYPE_CASES.values()), ids=list(TYPE_CASES.keys())) +@pytest.mark.parametrize("case", list(TYPE_CASES.values()), ids=list(TYPE_CASES.keys())) def test_jedi_completion_type(case, config, workspace): # property support was introduced in 0.18 - if case.expected == lsp.CompletionItemKind.Property and JEDI_VERSION.startswith('0.17'): + if case.expected == lsp.CompletionItemKind.Property and JEDI_VERSION.startswith( + "0.17" + ): return doc = Document(DOC_URI, workspace, case.document) items = pylsp_jedi_completions(config, doc, case.position) - items = {i['label']: i for i in items} - assert items[case.label]['kind'] == case.expected + items = {i["label"]: i for i in items} + assert items[case.label]["kind"] == case.expected -def test_jedi_completion(config, workspace): +def test_jedi_completion(config, workspace) -> None: # Over 'i' in os.path.isabs(...) - com_position = {'line': 1, 'character': 15} + com_position = {"line": 1, "character": 15} doc = Document(DOC_URI, workspace, DOC) items = pylsp_jedi_completions(config, doc, com_position) assert items - labels = [i['label'] for i in items] - assert 'isfile(path)' in labels + labels = [i["label"] for i in items] + assert "isfile(path)" in labels # Test we don't throw with big character - pylsp_jedi_completions(config, doc, {'line': 1, 'character': 1000}) + pylsp_jedi_completions(config, doc, {"line": 1, "character": 1000}) -def test_jedi_completion_item_resolve(config, workspace): +def test_jedi_completion_item_resolve(config, workspace) -> None: # Over the blank line - com_position = {'line': 8, 'character': 0} + com_position = {"line": 8, "character": 0} doc = Document(DOC_URI, workspace, DOC) - config.update({'plugins': {'jedi_completion': {'resolve_at_most': math.inf}}}) + config.update({"plugins": {"jedi_completion": {"resolve_at_most": math.inf}}}) completions = pylsp_jedi_completions(config, doc, com_position) - items = {c['label']: c for c in completions} + items = {c["label"]: c for c in completions} - documented_hello_item = items['documented_hello()'] + documented_hello_item = items["documented_hello()"] - assert 'documentation' not in documented_hello_item - assert 'detail' not in documented_hello_item + assert "documentation" not in documented_hello_item + assert "detail" not in documented_hello_item resolved_documented_hello = pylsp_jedi_completion_item_resolve( - completion_item=documented_hello_item, - document=doc + doc._config, completion_item=documented_hello_item, document=doc ) - assert 'Sends a polite greeting' in resolved_documented_hello['documentation'] + expected_doc = { + "kind": "markdown", + "value": "```python\ndocumented_hello()\n```\n\n\nSends a polite greeting", + } + assert resolved_documented_hello["documentation"] == expected_doc -def test_jedi_completion_with_fuzzy_enabled(config, workspace): +def test_jedi_completion_with_fuzzy_enabled(config, workspace) -> None: # Over 'i' in os.path.isabs(...) - config.update({'plugins': {'jedi_completion': {'fuzzy': True}}}) - com_position = {'line': 1, 'character': 15} + config.update({"plugins": {"jedi_completion": {"fuzzy": True}}}) + com_position = {"line": 1, "character": 15} doc = Document(DOC_URI, workspace, DOC) items = pylsp_jedi_completions(config, doc, com_position) assert items - expected = 'commonprefix(m)' - if JEDI_VERSION == '0.18.0': - expected = 'commonprefix(list)' - assert items[0]['label'] == expected + expected = "commonprefix(m)" if JEDI_VERSION < "0.19.2" else "isabs(s)" + assert items[0]["label"] == expected # Test we don't throw with big character - pylsp_jedi_completions(config, doc, {'line': 1, 'character': 1000}) + pylsp_jedi_completions(config, doc, {"line": 1, "character": 1000}) -def test_jedi_completion_resolve_at_most(config, workspace): +def test_jedi_completion_resolve_at_most(config, workspace) -> None: # Over 'i' in os.path.isabs(...) - com_position = {'line': 1, 'character': 15} + com_position = {"line": 1, "character": 15} doc = Document(DOC_URI, workspace, DOC) # Do not resolve any labels - config.update({'plugins': {'jedi_completion': {'resolve_at_most': 0}}}) + config.update({"plugins": {"jedi_completion": {"resolve_at_most": 0}}}) items = pylsp_jedi_completions(config, doc, com_position) - labels = {i['label'] for i in items} - assert 'isabs' in labels + labels = {i["label"] for i in items} + assert "isabs" in labels # Resolve all items - config.update({'plugins': {'jedi_completion': {'resolve_at_most': math.inf}}}) + config.update({"plugins": {"jedi_completion": {"resolve_at_most": math.inf}}}) items = pylsp_jedi_completions(config, doc, com_position) - labels = {i['label'] for i in items} - assert 'isfile(path)' in labels + labels = {i["label"] for i in items} + assert "isfile(path)" in labels -def test_rope_completion(config, workspace): +def test_rope_completion(config, workspace) -> None: # Over 'i' in os.path.isabs(...) - com_position = {'line': 1, 'character': 15} + com_position = {"line": 1, "character": 15} workspace.put_document(DOC_URI, source=DOC) doc = workspace.get_document(DOC_URI) items = pylsp_rope_completions(config, workspace, doc, com_position) assert items - assert items[0]['label'] == 'isabs' + assert items[0]["label"] == "isabs" -def test_jedi_completion_ordering(config, workspace): +def test_jedi_completion_ordering(config, workspace) -> None: # Over the blank line - com_position = {'line': 8, 'character': 0} + com_position = {"line": 8, "character": 0} doc = Document(DOC_URI, workspace, DOC) - config.update({'plugins': {'jedi_completion': {'resolve_at_most': math.inf}}}) + config.update({"plugins": {"jedi_completion": {"resolve_at_most": math.inf}}}) completions = pylsp_jedi_completions(config, doc, com_position) - items = {c['label']: c['sortText'] for c in completions} + items = {c["label"]: c["sortText"] for c in completions} # And that 'hidden' functions come after unhidden ones - assert items['hello()'] < items['_a_hello()'] + assert items["hello()"] < items["_a_hello()"] -def test_jedi_property_completion(config, workspace): +def test_jedi_property_completion(config, workspace) -> None: # Over the 'w' in 'print Hello().world' - com_position = {'line': 18, 'character': 15} + com_position = {"line": 18, "character": 15} doc = Document(DOC_URI, workspace, DOC) completions = pylsp_jedi_completions(config, doc, com_position) - items = {c['label']: c['sortText'] for c in completions} + items = {c["label"]: c["sortText"] for c in completions} # Ensure we can complete the 'world' property - assert 'world' in list(items.keys())[0] + assert "world" in list(items.keys())[0] -def test_jedi_method_completion(config, workspace): +def test_jedi_method_completion(config, workspace) -> None: # Over the 'y' in 'print Hello().every' - com_position = {'line': 20, 'character': 19} + com_position = {"line": 20, "character": 19} doc = Document(DOC_URI, workspace, DOC) - config.capabilities['textDocument'] = {'completion': {'completionItem': {'snippetSupport': True}}} - config.update({'plugins': {'jedi_completion': {'include_params': True}}}) + config.capabilities["textDocument"] = { + "completion": {"completionItem": {"snippetSupport": True}} + } + config.update({"plugins": {"jedi_completion": {"include_params": True}}}) completions = pylsp_jedi_completions(config, doc, com_position) - everyone_method = [completion for completion in completions if completion['label'] == 'everyone(a, b, c, d)'][0] + everyone_method = [ + completion + for completion in completions + if completion["label"] == "everyone(a, b, c, d)" + ][0] # Ensure we only generate snippets for positional args - assert everyone_method['insertTextFormat'] == lsp.InsertTextFormat.Snippet - assert everyone_method['insertText'] == 'everyone(${1:a}, ${2:b})$0' + assert everyone_method["insertTextFormat"] == lsp.InsertTextFormat.Snippet + assert everyone_method["insertText"] == "everyone(${1:a}, ${2:b})$0" # Disable param snippets - config.update({'plugins': {'jedi_completion': {'include_params': False}}}) + config.update({"plugins": {"jedi_completion": {"include_params": False}}}) completions = pylsp_jedi_completions(config, doc, com_position) - everyone_method = [completion for completion in completions if completion['label'] == 'everyone(a, b, c, d)'][0] + everyone_method = [ + completion + for completion in completions + if completion["label"] == "everyone(a, b, c, d)" + ][0] - assert 'insertTextFormat' not in everyone_method - assert everyone_method['insertText'] == 'everyone' + assert "insertTextFormat" not in everyone_method + assert everyone_method["insertText"] == "everyone" -@pytest.mark.skipif(PY2 or (sys.platform.startswith('linux') and os.environ.get('CI') is not None), - reason="Test in Python 3 and not on CIs on Linux because wheels don't work on them.") -def test_pyqt_completion(config, workspace): - # Over 'QA' in 'from PyQt5.QtWidgets import QApplication' - doc_pyqt = "from PyQt5.QtWidgets import QA" - com_position = {'line': 0, 'character': len(doc_pyqt)} +@pytest.mark.skipif( + PY2 or (sys.platform.startswith("linux") and os.environ.get("CI") is not None), + reason="Test in Python 3 and not on CIs on Linux because wheels don't work on them.", +) +def test_pyqt_completion(config, workspace) -> None: + # Over 'QA' in 'from PyQt6.QtWidgets import QApplication' + doc_pyqt = "from PyQt6.QtWidgets import QA" + com_position = {"line": 0, "character": len(doc_pyqt)} doc = Document(DOC_URI, workspace, doc_pyqt) completions = pylsp_jedi_completions(config, doc, com_position) assert completions is not None -def test_numpy_completions(config, workspace): +def test_numpy_completions(config, workspace) -> None: doc_numpy = "import numpy as np; np." - com_position = {'line': 0, 'character': len(doc_numpy)} + com_position = {"line": 0, "character": len(doc_numpy)} doc = Document(DOC_URI, workspace, doc_numpy) items = pylsp_jedi_completions(config, doc, com_position) assert items - assert any('array' in i['label'] for i in items) + assert any("array" in i["label"] for i in items) -def test_pandas_completions(config, workspace): +def test_pandas_completions(config, workspace) -> None: doc_pandas = "import pandas as pd; pd." - com_position = {'line': 0, 'character': len(doc_pandas)} + com_position = {"line": 0, "character": len(doc_pandas)} doc = Document(DOC_URI, workspace, doc_pandas) items = pylsp_jedi_completions(config, doc, com_position) assert items - assert any('DataFrame' in i['label'] for i in items) + assert any("DataFrame" in i["label"] for i in items) -def test_matplotlib_completions(config, workspace): +def test_matplotlib_completions(config, workspace) -> None: doc_mpl = "import matplotlib.pyplot as plt; plt." - com_position = {'line': 0, 'character': len(doc_mpl)} + com_position = {"line": 0, "character": len(doc_mpl)} doc = Document(DOC_URI, workspace, doc_mpl) items = pylsp_jedi_completions(config, doc, com_position) assert items - assert any('plot' in i['label'] for i in items) + assert any("plot" in i["label"] for i in items) -def test_snippets_completion(config, workspace): - doc_snippets = 'from collections import defaultdict \na=defaultdict' - com_position = {'line': 0, 'character': 35} +def test_snippets_completion(config, workspace) -> None: + doc_snippets = "from collections import defaultdict \na=defaultdict" + com_position = {"line": 0, "character": 35} doc = Document(DOC_URI, workspace, doc_snippets) - config.capabilities['textDocument'] = { - 'completion': {'completionItem': {'snippetSupport': True}}} - config.update({'plugins': {'jedi_completion': {'include_params': True}}}) + config.capabilities["textDocument"] = { + "completion": {"completionItem": {"snippetSupport": True}} + } + config.update({"plugins": {"jedi_completion": {"include_params": True}}}) completions = pylsp_jedi_completions(config, doc, com_position) - assert completions[0]['insertText'] == 'defaultdict' + assert completions[0]["insertText"] == "defaultdict" - com_position = {'line': 1, 'character': len(doc_snippets)} + com_position = {"line": 1, "character": len(doc_snippets)} completions = pylsp_jedi_completions(config, doc, com_position) - assert completions[0]['insertText'] == 'defaultdict($0)' - assert completions[0]['insertTextFormat'] == lsp.InsertTextFormat.Snippet + assert completions[0]["insertText"] == "defaultdict($0)" + assert completions[0]["insertTextFormat"] == lsp.InsertTextFormat.Snippet -def test_snippets_completion_at_most(config, workspace): - doc_snippets = 'from collections import defaultdict \na=defaultdict' +def test_snippets_completion_at_most(config, workspace) -> None: + doc_snippets = "from collections import defaultdict \na=defaultdict" doc = Document(DOC_URI, workspace, doc_snippets) - config.capabilities['textDocument'] = { - 'completion': {'completionItem': {'snippetSupport': True}}} - config.update({'plugins': {'jedi_completion': {'include_params': True}}}) - config.update({'plugins': {'jedi_completion': {'resolve_at_most': 0}}}) + config.capabilities["textDocument"] = { + "completion": {"completionItem": {"snippetSupport": True}} + } + config.update({"plugins": {"jedi_completion": {"include_params": True}}}) + config.update({"plugins": {"jedi_completion": {"resolve_at_most": 0}}}) - com_position = {'line': 1, 'character': len(doc_snippets)} + com_position = {"line": 1, "character": len(doc_snippets)} completions = pylsp_jedi_completions(config, doc, com_position) - assert completions[0]['insertText'] == 'defaultdict' - assert not completions[0].get('insertTextFormat', None) + assert completions[0]["insertText"] == "defaultdict" + assert not completions[0].get("insertTextFormat", None) -def test_completion_with_class_objects(config, workspace): - doc_text = 'class FOOBAR(Object): pass\nFOOB' - com_position = {'line': 1, 'character': 4} +def test_completion_with_class_objects(config, workspace) -> None: + doc_text = "class FOOBAR(Object): pass\nFOOB" + com_position = {"line": 1, "character": 4} doc = Document(DOC_URI, workspace, doc_text) - config.capabilities['textDocument'] = { - 'completion': {'completionItem': {'snippetSupport': True}}} - config.update({'plugins': {'jedi_completion': { - 'include_params': True, - 'include_class_objects': True, - }}}) + config.capabilities["textDocument"] = { + "completion": {"completionItem": {"snippetSupport": True}} + } + config.update( + { + "plugins": { + "jedi_completion": { + "include_params": True, + "include_class_objects": True, + } + } + } + ) completions = pylsp_jedi_completions(config, doc, com_position) assert len(completions) == 2 - assert completions[0]['label'] == 'FOOBAR' - assert completions[0]['kind'] == lsp.CompletionItemKind.Class + assert completions[0]["label"] == "FOOBAR" + assert completions[0]["kind"] == lsp.CompletionItemKind.Class + + assert completions[1]["label"] == "FOOBAR object" + assert completions[1]["kind"] == lsp.CompletionItemKind.TypeParameter + + +def test_completion_with_function_objects(config, workspace) -> None: + doc_text = "def foobar(): pass\nfoob" + com_position = {"line": 1, "character": 4} + doc = Document(DOC_URI, workspace, doc_text) + config.capabilities["textDocument"] = { + "completion": {"completionItem": {"snippetSupport": True}} + } + config.update( + { + "plugins": { + "jedi_completion": { + "include_params": True, + "include_function_objects": True, + } + } + } + ) + completions = pylsp_jedi_completions(config, doc, com_position) + assert len(completions) == 2 - assert completions[1]['label'] == 'FOOBAR object' - assert completions[1]['kind'] == lsp.CompletionItemKind.TypeParameter + assert completions[0]["label"] == "foobar()" + assert completions[0]["kind"] == lsp.CompletionItemKind.Function + assert completions[1]["label"] == "foobar() object" + assert completions[1]["kind"] == lsp.CompletionItemKind.TypeParameter -def test_snippet_parsing(config, workspace): - doc = 'divmod' - completion_position = {'line': 0, 'character': 6} + +def test_snippet_parsing(config, workspace) -> None: + doc = "divmod" + completion_position = {"line": 0, "character": 6} doc = Document(DOC_URI, workspace, doc) - config.capabilities['textDocument'] = { - 'completion': {'completionItem': {'snippetSupport': True}}} - config.update({'plugins': {'jedi_completion': {'include_params': True}}}) + config.capabilities["textDocument"] = { + "completion": {"completionItem": {"snippetSupport": True}} + } + config.update({"plugins": {"jedi_completion": {"include_params": True}}}) completions = pylsp_jedi_completions(config, doc, completion_position) - out = 'divmod(${1:x}, ${2:y})$0' - if JEDI_VERSION == '0.18.0': - out = 'divmod(${1:a}, ${2:b})$0' - assert completions[0]['insertText'] == out + out = "divmod(${1:x}, ${2:y})$0" + if JEDI_VERSION == "0.18.0": + out = "divmod(${1:a}, ${2:b})$0" + assert completions[0]["insertText"] == out -def test_multiline_import_snippets(config, workspace): - document = 'from datetime import(\n date,\n datetime)\na=date' +def test_multiline_import_snippets(config, workspace) -> None: + document = "from datetime import(\n date,\n datetime)\na=date" doc = Document(DOC_URI, workspace, document) - config.capabilities['textDocument'] = { - 'completion': {'completionItem': {'snippetSupport': True}}} - config.update({'plugins': {'jedi_completion': {'include_params': True}}}) + config.capabilities["textDocument"] = { + "completion": {"completionItem": {"snippetSupport": True}} + } + config.update({"plugins": {"jedi_completion": {"include_params": True}}}) - position = {'line': 1, 'character': 5} + position = {"line": 1, "character": 5} completions = pylsp_jedi_completions(config, doc, position) - assert completions[0]['insertText'] == 'date' + assert completions[0]["insertText"] == "date" - position = {'line': 2, 'character': 9} + position = {"line": 2, "character": 9} completions = pylsp_jedi_completions(config, doc, position) - assert completions[0]['insertText'] == 'datetime' + assert completions[0]["insertText"] == "datetime" -def test_multiline_snippets(config, workspace): - document = 'from datetime import\\\n date,\\\n datetime \na=date' +def test_multiline_snippets(config, workspace) -> None: + document = "from datetime import\\\n date,\\\n datetime \na=date" doc = Document(DOC_URI, workspace, document) - config.capabilities['textDocument'] = { - 'completion': {'completionItem': {'snippetSupport': True}}} - config.update({'plugins': {'jedi_completion': {'include_params': True}}}) + config.capabilities["textDocument"] = { + "completion": {"completionItem": {"snippetSupport": True}} + } + config.update({"plugins": {"jedi_completion": {"include_params": True}}}) - position = {'line': 1, 'character': 5} + position = {"line": 1, "character": 5} completions = pylsp_jedi_completions(config, doc, position) - assert completions[0]['insertText'] == 'date' + assert completions[0]["insertText"] == "date" - position = {'line': 2, 'character': 9} + position = {"line": 2, "character": 9} completions = pylsp_jedi_completions(config, doc, position) - assert completions[0]['insertText'] == 'datetime' + assert completions[0]["insertText"] == "datetime" -def test_multistatement_snippet(config, workspace): - config.capabilities['textDocument'] = { - 'completion': {'completionItem': {'snippetSupport': True}}} - config.update({'plugins': {'jedi_completion': {'include_params': True}}}) +def test_multistatement_snippet(config, workspace) -> None: + config.capabilities["textDocument"] = { + "completion": {"completionItem": {"snippetSupport": True}} + } + config.update({"plugins": {"jedi_completion": {"include_params": True}}}) - document = 'a = 1; from datetime import date' + document = "a = 1; from datetime import date" doc = Document(DOC_URI, workspace, document) - position = {'line': 0, 'character': len(document)} + position = {"line": 0, "character": len(document)} completions = pylsp_jedi_completions(config, doc, position) - assert completions[0]['insertText'] == 'date' + assert completions[0]["insertText"] == "date" - document = 'from math import fmod; a = fmod' + document = "from math import fmod; a = fmod" doc = Document(DOC_URI, workspace, document) - position = {'line': 0, 'character': len(document)} + position = {"line": 0, "character": len(document)} completions = pylsp_jedi_completions(config, doc, position) - assert completions[0]['insertText'] == 'fmod(${1:x}, ${2:y})$0' + assert completions[0]["insertText"] == "fmod(${1:x}, ${2:y})$0" -def test_jedi_completion_extra_paths(tmpdir, workspace): +def test_jedi_completion_extra_paths(tmpdir, workspace) -> None: # Create a tempfile with some content and pass to extra_paths - temp_doc_content = ''' + temp_doc_content = """ def spam(): pass -''' +""" p = tmpdir.mkdir("extra_path") extra_paths = [str(p)] p = p.join("foo.py") @@ -438,57 +493,59 @@ def spam(): doc = Document(DOC_URI, workspace, doc_content) # After 'foo.s' without extra paths - com_position = {'line': 1, 'character': 5} + com_position = {"line": 1, "character": 5} completions = pylsp_jedi_completions(doc._config, doc, com_position) assert completions is None # Update config extra paths - settings = {'pylsp': {'plugins': {'jedi': {'extra_paths': extra_paths}}}} + settings = {"pylsp": {"plugins": {"jedi": {"extra_paths": extra_paths}}}} doc.update_config(settings) # After 'foo.s' with extra paths - com_position = {'line': 1, 'character': 5} + com_position = {"line": 1, "character": 5} completions = pylsp_jedi_completions(doc._config, doc, com_position) - assert completions[0]['label'] == 'spam()' + assert completions[0]["label"] == "spam()" -@pytest.mark.skipif(PY2 or not LINUX or not CI, reason="tested on linux and python 3 only") -def test_jedi_completion_environment(workspace): +@pytest.mark.skipif( + PY2 or not LINUX or not CI, reason="tested on linux and python 3 only" +) +def test_jedi_completion_environment(workspace) -> None: # Content of doc to test completion - doc_content = '''import logh -''' + doc_content = """import logh +""" doc = Document(DOC_URI, workspace, doc_content) # After 'import logh' with default environment - com_position = {'line': 0, 'character': 11} + com_position = {"line": 0, "character": 11} - assert os.path.isdir('/tmp/pyenv/') + assert os.path.isdir("/tmp/pyenv/") - settings = {'pylsp': {'plugins': {'jedi': {'environment': None}}}} + settings = {"pylsp": {"plugins": {"jedi": {"environment": None}}}} doc.update_config(settings) completions = pylsp_jedi_completions(doc._config, doc, com_position) assert completions is None # Update config extra environment - env_path = '/tmp/pyenv/bin/python' - settings = {'pylsp': {'plugins': {'jedi': {'environment': env_path}}}} + env_path = "/tmp/pyenv/bin/python" + settings = {"pylsp": {"plugins": {"jedi": {"environment": env_path}}}} doc.update_config(settings) # After 'import logh' with new environment completions = pylsp_jedi_completions(doc._config, doc, com_position) - assert completions[0]['label'] == 'loghub' + assert completions[0]["label"] == "loghub" - resolved = pylsp_jedi_completion_item_resolve(completions[0], doc) - assert 'changelog generator' in resolved['documentation'].lower() + resolved = pylsp_jedi_completion_item_resolve(doc._config, completions[0], doc) + assert "changelog generator" in resolved["documentation"]["value"].lower() -def test_document_path_completions(tmpdir, workspace_other_root_path): +def test_document_path_completions(tmpdir, workspace_other_root_path) -> None: # Create a dummy module out of the workspace's root_path and try to get # completions for it in another file placed next to it. - module_content = ''' + module_content = """ def foo(): pass -''' +""" p = tmpdir.join("mymodule.py") p.write(module_content) @@ -496,10 +553,46 @@ def foo(): # Content of doc to test completion doc_content = """import mymodule mymodule.f""" - doc_path = str(tmpdir) + os.path.sep + 'myfile.py' + doc_path = str(tmpdir) + os.path.sep + "myfile.py" doc_uri = uris.from_fs_path(doc_path) doc = Document(doc_uri, workspace_other_root_path, doc_content) - com_position = {'line': 1, 'character': 10} + com_position = {"line": 1, "character": 10} + completions = pylsp_jedi_completions(doc._config, doc, com_position) + assert completions[0]["label"] == "foo()" + + +def test_file_completions(workspace, tmpdir) -> None: + # Create directory and a file to get completions for them. + # Note: `tmpdir`` is the root dir of the `workspace` fixture. That's why we use + # it here. + tmpdir.mkdir("bar") + file = tmpdir.join("foo.txt") + file.write("baz") + + # Content of doc to test completion + doc_content = '"' + doc = Document(DOC_URI, workspace, doc_content) + + # Request for completions + com_position = {"line": 0, "character": 1} completions = pylsp_jedi_completions(doc._config, doc, com_position) - assert completions[0]['label'] == 'foo()' + + # Check completions + assert len(completions) == 2 + assert [c["kind"] == lsp.CompletionItemKind.File for c in completions] + assert completions[0]["insertText"] == ( + ("bar" + "\\") if os.name == "nt" else ("bar" + "/") + ) + assert completions[1]["insertText"] == 'foo.txt"' + + # When snippets are supported, ensure that path separators are escaped. + support_snippet = { + "textDocument": {"completion": {"completionItem": {"snippetSupport": True}}} + } + doc._config.capabilities.update(support_snippet) + completions = pylsp_jedi_completions(doc._config, doc, com_position) + assert completions[0]["insertText"] == ( + ("bar" + "\\\\") if os.name == "nt" else ("bar" + "\\/") + ) + assert completions[1]["insertText"] == 'foo.txt"' diff --git a/test/plugins/test_definitions.py b/test/plugins/test_definitions.py index 488f5452..7923524b 100644 --- a/test/plugins/test_definitions.py +++ b/test/plugins/test_definitions.py @@ -7,12 +7,11 @@ from pylsp.plugins.definition import pylsp_definitions from pylsp.workspace import Document - DOC_URI = uris.from_fs_path(__file__) DOC = """def a(): pass -print a() +print(a()) class Directory(object): @@ -21,74 +20,156 @@ def __init__(self): def add_member(self, id, name): self.members[id] = name + + +subscripted_before_reference = {} +subscripted_before_reference[0] = 0 +subscripted_before_reference + + +def my_func(): + print('called') + +alias = my_func +my_list = [1, None, alias] +inception = my_list[2] + +inception() + +import numpy +numpy.ones """ -def test_definitions(config, workspace): +def test_definitions(config, workspace) -> None: # Over 'a' in print a - cursor_pos = {'line': 3, 'character': 6} + cursor_pos = {"line": 3, "character": 6} # The definition of 'a' def_range = { - 'start': {'line': 0, 'character': 4}, - 'end': {'line': 0, 'character': 5} + "start": {"line": 0, "character": 4}, + "end": {"line": 0, "character": 5}, + } + + doc = Document(DOC_URI, workspace, DOC) + assert [{"uri": DOC_URI, "range": def_range}] == pylsp_definitions( + config, doc, cursor_pos + ) + + +def test_indirect_definitions(config, workspace) -> None: + # Over 'subscripted_before_reference' + cursor_pos = {"line": 16, "character": 0} + + # The definition of 'subscripted_before_reference', + # skipping intermediate writes to the most recent definition + def_range = { + "start": {"line": 14, "character": 0}, + "end": {"line": 14, "character": len("subscripted_before_reference")}, } doc = Document(DOC_URI, workspace, DOC) - assert [{'uri': DOC_URI, 'range': def_range}] == pylsp_definitions(config, doc, cursor_pos) + assert [{"uri": DOC_URI, "range": def_range}] == pylsp_definitions( + config, doc, cursor_pos + ) -def test_builtin_definition(config, workspace): +def test_definition_with_multihop_inference_goto(config, workspace) -> None: + # Over 'inception()' + cursor_pos = {"line": 26, "character": 0} + + # The most recent definition of 'inception', + # ignoring alias hops + def_range = { + "start": {"line": 24, "character": 0}, + "end": {"line": 24, "character": len("inception")}, + } + + doc = Document(DOC_URI, workspace, DOC) + assert [{"uri": DOC_URI, "range": def_range}] == pylsp_definitions( + config, doc, cursor_pos + ) + + +def test_numpy_definition(config, workspace) -> None: + # Over numpy.ones + cursor_pos = {"line": 29, "character": 8} + + doc = Document(DOC_URI, workspace, DOC) + defns = pylsp_definitions(config, doc, cursor_pos) + assert len(defns) > 0, defns + + +def test_builtin_definition(config, workspace) -> None: # Over 'i' in dict - cursor_pos = {'line': 8, 'character': 24} + cursor_pos = {"line": 8, "character": 24} - # No go-to def for builtins doc = Document(DOC_URI, workspace, DOC) - assert not pylsp_definitions(config, doc, cursor_pos) + orig_settings = config.settings() + + # Check definition for `dict` goes to `builtins.pyi::dict` + follow_defns_setting = {"follow_builtin_definitions": True} + settings = {"plugins": {"jedi_definition": follow_defns_setting}} + config.update(settings) + defns = pylsp_definitions(config, doc, cursor_pos) + assert len(defns) == 1 + assert defns[0]["uri"].endswith("builtins.pyi") + # Check no definitions for `dict` + follow_defns_setting["follow_builtin_definitions"] = False + config.update(settings) + defns = pylsp_definitions(config, doc, cursor_pos) + assert not defns -def test_assignment(config, workspace): + config.update(orig_settings) + + +def test_assignment(config, workspace) -> None: # Over 's' in self.members[id] - cursor_pos = {'line': 11, 'character': 19} + cursor_pos = {"line": 11, "character": 19} # The assignment of 'self.members' def_range = { - 'start': {'line': 8, 'character': 13}, - 'end': {'line': 8, 'character': 20} + "start": {"line": 8, "character": 13}, + "end": {"line": 8, "character": 20}, } doc = Document(DOC_URI, workspace, DOC) - assert [{'uri': DOC_URI, 'range': def_range}] == pylsp_definitions(config, doc, cursor_pos) + assert [{"uri": DOC_URI, "range": def_range}] == pylsp_definitions( + config, doc, cursor_pos + ) -def test_document_path_definitions(config, workspace_other_root_path, tmpdir): +def test_document_path_definitions(config, workspace_other_root_path, tmpdir) -> None: # Create a dummy module out of the workspace's root_path and try to get # a definition on it in another file placed next to it. - module_content = ''' + module_content = """ def foo(): pass -''' +""" p = tmpdir.join("mymodule.py") p.write(module_content) # Content of doc to test definition doc_content = """from mymodule import foo""" - doc_path = str(tmpdir) + os.path.sep + 'myfile.py' + doc_path = str(tmpdir) + os.path.sep + "myfile.py" doc_uri = uris.from_fs_path(doc_path) doc = Document(doc_uri, workspace_other_root_path, doc_content) # The range where is defined in mymodule.py def_range = { - 'start': {'line': 1, 'character': 4}, - 'end': {'line': 1, 'character': 7} + "start": {"line": 1, "character": 4}, + "end": {"line": 1, "character": 7}, } # The position where foo is called in myfile.py - cursor_pos = {'line': 0, 'character': 24} + cursor_pos = {"line": 0, "character": 24} # The uri for mymodule.py module_path = str(p) module_uri = uris.from_fs_path(module_path) - assert [{'uri': module_uri, 'range': def_range}] == pylsp_definitions(config, doc, cursor_pos) + assert [{"uri": module_uri, "range": def_range}] == pylsp_definitions( + config, doc, cursor_pos + ) diff --git a/test/plugins/test_flake8_lint.py b/test/plugins/test_flake8_lint.py index 59a776a1..ad1dc4ff 100644 --- a/test/plugins/test_flake8_lint.py +++ b/test/plugins/test_flake8_lint.py @@ -1,14 +1,15 @@ # Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. -import tempfile import os +import tempfile +from textwrap import dedent from unittest.mock import patch + from pylsp import lsp, uris from pylsp.plugins import flake8_lint from pylsp.workspace import Document - DOC_URI = uris.from_fs_path(__file__) DOC = """import pylsp @@ -21,7 +22,7 @@ def using_const(): def temp_document(doc_text, workspace): - with tempfile.NamedTemporaryFile(mode='w', delete=False) as temp_file: + with tempfile.NamedTemporaryFile(mode="w", delete=False) as temp_file: name = temp_file.name temp_file.write(doc_text) doc = Document(uris.from_fs_path(name), workspace) @@ -29,55 +30,120 @@ def temp_document(doc_text, workspace): return name, doc -def test_flake8_unsaved(workspace): - doc = Document('', workspace, DOC) +def test_flake8_unsaved(workspace) -> None: + doc = Document("", workspace, DOC) diags = flake8_lint.pylsp_lint(workspace, doc) - msg = 'F841 local variable \'a\' is assigned to but never used' - unused_var = [d for d in diags if d['message'] == msg][0] + msg = "F841 local variable 'a' is assigned to but never used" + unused_var = [d for d in diags if d["message"] == msg][0] - assert unused_var['source'] == 'flake8' - assert unused_var['code'] == 'F841' - assert unused_var['range']['start'] == {'line': 5, 'character': 1} - assert unused_var['range']['end'] == {'line': 5, 'character': 11} - assert unused_var['severity'] == lsp.DiagnosticSeverity.Warning + assert unused_var["source"] == "flake8" + assert unused_var["code"] == "F841" + assert unused_var["range"]["start"] == {"line": 5, "character": 1} + assert unused_var["range"]["end"] == {"line": 5, "character": 11} + assert unused_var["severity"] == lsp.DiagnosticSeverity.Warning + assert unused_var["tags"] == [lsp.DiagnosticTag.Unnecessary] -def test_flake8_lint(workspace): +def test_flake8_lint(workspace) -> None: name, doc = temp_document(DOC, workspace) try: diags = flake8_lint.pylsp_lint(workspace, doc) - msg = 'F841 local variable \'a\' is assigned to but never used' - unused_var = [d for d in diags if d['message'] == msg][0] - - assert unused_var['source'] == 'flake8' - assert unused_var['code'] == 'F841' - assert unused_var['range']['start'] == {'line': 5, 'character': 1} - assert unused_var['range']['end'] == {'line': 5, 'character': 11} - assert unused_var['severity'] == lsp.DiagnosticSeverity.Warning + msg = "F841 local variable 'a' is assigned to but never used" + unused_var = [d for d in diags if d["message"] == msg][0] + + assert unused_var["source"] == "flake8" + assert unused_var["code"] == "F841" + assert unused_var["range"]["start"] == {"line": 5, "character": 1} + assert unused_var["range"]["end"] == {"line": 5, "character": 11} + assert unused_var["severity"] == lsp.DiagnosticSeverity.Warning finally: os.remove(name) -def test_flake8_config_param(workspace): - with patch('pylsp.plugins.flake8_lint.Popen') as popen_mock: +def test_flake8_respecting_configuration(workspace) -> None: + docs = [ + ("src/__init__.py", ""), + ("src/a.py", DOC), + ("src/b.py", "import os"), + ( + "setup.cfg", + dedent( + """ + [flake8] + ignore = E302,W191 + per-file-ignores = + src/a.py:F401 + src/b.py:W292 + """ + ), + ), + ] + + made = {} + for rel, contents in docs: + location = os.path.join(workspace.root_path, rel) + made[rel] = {"uri": uris.from_fs_path(location)} + + os.makedirs(os.path.dirname(location), exist_ok=True) + with open(location, "w", encoding="utf-8") as fle: + fle.write(contents) + + workspace.put_document(made[rel]["uri"], contents) + made[rel]["document"] = workspace._docs[made[rel]["uri"]] + + diags = flake8_lint.pylsp_lint(workspace, made["src/a.py"]["document"]) + assert diags == [ + { + "source": "flake8", + "code": "F841", + "range": { + "start": {"line": 5, "character": 1}, + "end": {"line": 5, "character": 11}, + }, + "message": "F841 local variable 'a' is assigned to but never used", + "severity": 2, + "tags": [1], + }, + ] + + diags = flake8_lint.pylsp_lint(workspace, made["src/b.py"]["document"]) + assert diags == [ + { + "source": "flake8", + "code": "F401", + "range": { + "start": {"line": 0, "character": 0}, + "end": {"line": 0, "character": 9}, + }, + "message": "F401 'os' imported but unused", + "severity": 2, + "tags": [1], + } + ] + + +def test_flake8_config_param(workspace) -> None: + with patch("pylsp.plugins.flake8_lint.Popen") as popen_mock: mock_instance = popen_mock.return_value - mock_instance.communicate.return_value = [bytes(), bytes()] - flake8_conf = '/tmp/some.cfg' - workspace._config.update({'plugins': {'flake8': {'config': flake8_conf}}}) + mock_instance.communicate.return_value = [b"", b""] + flake8_conf = "C:\\some.cfg" if os.name == "nt" else "/tmp/some.cfg" + workspace._config.update({"plugins": {"flake8": {"config": flake8_conf}}}) _name, doc = temp_document(DOC, workspace) flake8_lint.pylsp_lint(workspace, doc) (call_args,) = popen_mock.call_args[0] - assert 'flake8' in call_args - assert '--config={}'.format(flake8_conf) in call_args + assert "flake8" in call_args + assert f"--config={flake8_conf}" in call_args -def test_flake8_executable_param(workspace): - with patch('pylsp.plugins.flake8_lint.Popen') as popen_mock: +def test_flake8_executable_param(workspace) -> None: + with patch("pylsp.plugins.flake8_lint.Popen") as popen_mock: mock_instance = popen_mock.return_value - mock_instance.communicate.return_value = [bytes(), bytes()] + mock_instance.communicate.return_value = [b"", b""] - flake8_executable = '/tmp/flake8' - workspace._config.update({'plugins': {'flake8': {'executable': flake8_executable}}}) + flake8_executable = "/tmp/flake8" + workspace._config.update( + {"plugins": {"flake8": {"executable": flake8_executable}}} + ) _name, doc = temp_document(DOC, workspace) flake8_lint.pylsp_lint(workspace, doc) @@ -92,7 +158,9 @@ def get_flake8_cfg_settings(workspace, config_str): This function creates a ``setup.cfg``; you'll have to delete it yourself. """ - with open(os.path.join(workspace.root_path, "setup.cfg"), "w+", encoding='utf-8') as f: + with open( + os.path.join(workspace.root_path, "setup.cfg"), "w+", encoding="utf-8" + ) as f: f.write(config_str) workspace.update_config({"pylsp": {"configurationSources": ["flake8"]}}) @@ -100,7 +168,7 @@ def get_flake8_cfg_settings(workspace, config_str): return workspace._config.plugin_settings("flake8") -def test_flake8_multiline(workspace): +def test_flake8_multiline(workspace) -> None: config_str = r"""[flake8] exclude = blah/, @@ -117,20 +185,28 @@ def test_flake8_multiline(workspace): assert "exclude" in flake8_settings assert len(flake8_settings["exclude"]) == 2 - with patch('pylsp.plugins.flake8_lint.Popen') as popen_mock: + with patch("pylsp.plugins.flake8_lint.Popen") as popen_mock: mock_instance = popen_mock.return_value - mock_instance.communicate.return_value = [bytes(), bytes()] + mock_instance.communicate.return_value = [b"", b""] doc = workspace.get_document(doc_uri) flake8_lint.pylsp_lint(workspace, doc) call_args = popen_mock.call_args[0][0] - assert call_args == ["flake8", "-", "--exclude=blah/,file_2.py"] + + init_file = os.path.join("blah", "__init__.py") + assert call_args == [ + "flake8", + "-", + "--exclude=blah/,file_2.py", + "--stdin-display-name", + init_file, + ] os.unlink(os.path.join(workspace.root_path, "setup.cfg")) -def test_flake8_per_file_ignores(workspace): +def test_flake8_per_file_ignores(workspace) -> None: config_str = r"""[flake8] ignores = F403 per-file-ignores = @@ -158,3 +234,25 @@ def test_flake8_per_file_ignores(workspace): assert not res os.unlink(os.path.join(workspace.root_path, "setup.cfg")) + + +def test_per_file_ignores_alternative_syntax(workspace) -> None: + config_str = r"""[flake8] +per-file-ignores = **/__init__.py:F401,E402 + """ + + doc_str = "print('hi')\nimport os\n" + + doc_uri = uris.from_fs_path(os.path.join(workspace.root_path, "blah/__init__.py")) + workspace.put_document(doc_uri, doc_str) + + flake8_settings = get_flake8_cfg_settings(workspace, config_str) + + assert "perFileIgnores" in flake8_settings + assert len(flake8_settings["perFileIgnores"]) == 2 + + doc = workspace.get_document(doc_uri) + res = flake8_lint.pylsp_lint(workspace, doc) + assert not res + + os.unlink(os.path.join(workspace.root_path, "setup.cfg")) diff --git a/test/plugins/test_folding.py b/test/plugins/test_folding.py index 57d6e2e9..1f0d34c8 100644 --- a/test/plugins/test_folding.py +++ b/test/plugins/test_folding.py @@ -9,7 +9,8 @@ from pylsp.workspace import Document DOC_URI = uris.from_fs_path(__file__) -DOC = dedent(""" +DOC = dedent( + """ def func(arg1, arg2, arg3, arg4, arg5, default=func( 2, 3, 4 @@ -78,9 +79,11 @@ def inner(): def testC(): pass -""") +""" +) -SYNTAX_ERR = dedent(""" +SYNTAX_ERR = dedent( + """ def func(arg1, arg2, arg3, arg4, arg5, default=func( 2, 3, 4 @@ -109,65 +112,70 @@ class A(: for j in range(0, i): if i % 2 == 1: pass -""") +""" +) -def test_folding(workspace): +def test_folding(workspace) -> None: doc = Document(DOC_URI, workspace, DOC) ranges = pylsp_folding_range(doc) - expected = [{'startLine': 1, 'endLine': 6}, - {'startLine': 2, 'endLine': 3}, - {'startLine': 5, 'endLine': 6}, - {'startLine': 8, 'endLine': 11}, - {'startLine': 12, 'endLine': 20}, - {'startLine': 13, 'endLine': 14}, - {'startLine': 15, 'endLine': 16}, - {'startLine': 17, 'endLine': 18}, - {'startLine': 19, 'endLine': 20}, - {'startLine': 22, 'endLine': 35}, - {'startLine': 23, 'endLine': 35}, - {'startLine': 24, 'endLine': 25}, - {'startLine': 27, 'endLine': 29}, - {'startLine': 28, 'endLine': 29}, - {'startLine': 30, 'endLine': 31}, - {'startLine': 32, 'endLine': 34}, - {'startLine': 33, 'endLine': 34}, - {'startLine': 38, 'endLine': 39}, - {'startLine': 41, 'endLine': 43}, - {'startLine': 42, 'endLine': 43}, - {'startLine': 45, 'endLine': 54}, - {'startLine': 47, 'endLine': 51}, - {'startLine': 49, 'endLine': 51}, - {'startLine': 50, 'endLine': 51}, - {'startLine': 52, 'endLine': 54}, - {'startLine': 53, 'endLine': 54}, - {'startLine': 56, 'endLine': 57}, - {'startLine': 59, 'endLine': 65}, - {'startLine': 60, 'endLine': 61}, - {'startLine': 62, 'endLine': 63}, - {'startLine': 64, 'endLine': 65}, - {'startLine': 67, 'endLine': 68}] + expected = [ + {"startLine": 1, "endLine": 6}, + {"startLine": 2, "endLine": 3}, + {"startLine": 5, "endLine": 6}, + {"startLine": 8, "endLine": 11}, + {"startLine": 12, "endLine": 20}, + {"startLine": 13, "endLine": 14}, + {"startLine": 15, "endLine": 16}, + {"startLine": 17, "endLine": 18}, + {"startLine": 19, "endLine": 20}, + {"startLine": 22, "endLine": 35}, + {"startLine": 23, "endLine": 35}, + {"startLine": 24, "endLine": 25}, + {"startLine": 27, "endLine": 29}, + {"startLine": 28, "endLine": 29}, + {"startLine": 30, "endLine": 31}, + {"startLine": 32, "endLine": 34}, + {"startLine": 33, "endLine": 34}, + {"startLine": 38, "endLine": 39}, + {"startLine": 41, "endLine": 43}, + {"startLine": 42, "endLine": 43}, + {"startLine": 45, "endLine": 54}, + {"startLine": 47, "endLine": 51}, + {"startLine": 49, "endLine": 51}, + {"startLine": 50, "endLine": 51}, + {"startLine": 52, "endLine": 54}, + {"startLine": 53, "endLine": 54}, + {"startLine": 56, "endLine": 57}, + {"startLine": 59, "endLine": 65}, + {"startLine": 60, "endLine": 61}, + {"startLine": 62, "endLine": 63}, + {"startLine": 64, "endLine": 65}, + {"startLine": 67, "endLine": 68}, + ] if sys.version_info[:2] >= (3, 9): # the argument list of the decorator is also folded in Python >= 3.9 - expected.insert(4, {'startLine': 9, 'endLine': 10}) + expected.insert(4, {"startLine": 9, "endLine": 10}) assert ranges == expected -def test_folding_syntax_error(workspace): +def test_folding_syntax_error(workspace) -> None: doc = Document(DOC_URI, workspace, SYNTAX_ERR) ranges = pylsp_folding_range(doc) - expected = [{'startLine': 1, 'endLine': 6}, - {'startLine': 2, 'endLine': 3}, - {'startLine': 5, 'endLine': 6}, - {'startLine': 8, 'endLine': 9}, - {'startLine': 12, 'endLine': 13}, - {'startLine': 15, 'endLine': 17}, - {'startLine': 16, 'endLine': 17}, - {'startLine': 19, 'endLine': 28}, - {'startLine': 21, 'endLine': 25}, - {'startLine': 23, 'endLine': 25}, - {'startLine': 24, 'endLine': 25}, - {'startLine': 26, 'endLine': 28}, - {'startLine': 27, 'endLine': 28}] + expected = [ + {"startLine": 1, "endLine": 6}, + {"startLine": 2, "endLine": 3}, + {"startLine": 5, "endLine": 6}, + {"startLine": 8, "endLine": 9}, + {"startLine": 12, "endLine": 13}, + {"startLine": 15, "endLine": 17}, + {"startLine": 16, "endLine": 17}, + {"startLine": 19, "endLine": 28}, + {"startLine": 21, "endLine": 25}, + {"startLine": 23, "endLine": 25}, + {"startLine": 24, "endLine": 25}, + {"startLine": 26, "endLine": 28}, + {"startLine": 27, "endLine": 28}, + ] assert ranges == expected diff --git a/test/plugins/test_highlight.py b/test/plugins/test_highlight.py index b1baa008..eb5485bb 100644 --- a/test/plugins/test_highlight.py +++ b/test/plugins/test_highlight.py @@ -2,9 +2,8 @@ # Copyright 2021- Python Language Server Contributors. from pylsp import lsp, uris -from pylsp.workspace import Document from pylsp.plugins.highlight import pylsp_document_highlight - +from pylsp.workspace import Document DOC_URI = uris.from_fs_path(__file__) DOC = """a = "hello" @@ -12,47 +11,53 @@ """ -def test_highlight(workspace): +def test_highlight(workspace) -> None: # Over 'a' in a.startswith - cursor_pos = {'line': 1, 'character': 0} + cursor_pos = {"line": 1, "character": 0} doc = Document(DOC_URI, workspace, DOC) - assert pylsp_document_highlight(doc, cursor_pos) == [{ - 'range': { - 'start': {'line': 0, 'character': 0}, - 'end': {'line': 0, 'character': 1}, + assert pylsp_document_highlight(doc, cursor_pos) == [ + { + "range": { + "start": {"line": 0, "character": 0}, + "end": {"line": 0, "character": 1}, + }, + # The first usage is Write + "kind": lsp.DocumentHighlightKind.Write, }, - # The first usage is Write - 'kind': lsp.DocumentHighlightKind.Write - }, { - 'range': { - 'start': {'line': 1, 'character': 0}, - 'end': {'line': 1, 'character': 1}, + { + "range": { + "start": {"line": 1, "character": 0}, + "end": {"line": 1, "character": 1}, + }, + # The second usage is Read + "kind": lsp.DocumentHighlightKind.Read, }, - # The second usage is Read - 'kind': lsp.DocumentHighlightKind.Read - }] + ] -SYS_DOC = '''import sys +SYS_DOC = """import sys print sys.path -''' +""" -def test_sys_highlight(workspace): - cursor_pos = {'line': 0, 'character': 8} +def test_sys_highlight(workspace) -> None: + cursor_pos = {"line": 0, "character": 8} doc = Document(DOC_URI, workspace, SYS_DOC) - assert pylsp_document_highlight(doc, cursor_pos) == [{ - 'range': { - 'start': {'line': 0, 'character': 7}, - 'end': {'line': 0, 'character': 10} + assert pylsp_document_highlight(doc, cursor_pos) == [ + { + "range": { + "start": {"line": 0, "character": 7}, + "end": {"line": 0, "character": 10}, + }, + "kind": lsp.DocumentHighlightKind.Write, }, - 'kind': lsp.DocumentHighlightKind.Write - }, { - 'range': { - 'start': {'line': 1, 'character': 6}, - 'end': {'line': 1, 'character': 9} + { + "range": { + "start": {"line": 1, "character": 6}, + "end": {"line": 1, "character": 9}, + }, + "kind": lsp.DocumentHighlightKind.Read, }, - 'kind': lsp.DocumentHighlightKind.Read - }] + ] diff --git a/test/plugins/test_hover.py b/test/plugins/test_hover.py index 7ac6e071..4c0d75e8 100644 --- a/test/plugins/test_hover.py +++ b/test/plugins/test_hover.py @@ -10,7 +10,7 @@ DOC_URI = uris.from_fs_path(__file__) DOC = """ -def main(): +def main(a: float, b: float): \"\"\"hello world\"\"\" pass """ @@ -23,60 +23,104 @@ def main(): """ -def test_numpy_hover(workspace): +def test_numpy_hover(workspace) -> None: # Over the blank line - no_hov_position = {'line': 1, 'character': 0} + no_hov_position = {"line": 1, "character": 0} # Over 'numpy' in import numpy as np - numpy_hov_position_1 = {'line': 2, 'character': 8} + numpy_hov_position_1 = {"line": 2, "character": 8} # Over 'np' in import numpy as np - numpy_hov_position_2 = {'line': 2, 'character': 17} + numpy_hov_position_2 = {"line": 2, "character": 17} # Over 'np' in np.sin - numpy_hov_position_3 = {'line': 3, 'character': 1} + numpy_hov_position_3 = {"line": 3, "character": 1} # Over 'sin' in np.sin - numpy_sin_hov_position = {'line': 3, 'character': 4} + numpy_sin_hov_position = {"line": 3, "character": 4} doc = Document(DOC_URI, workspace, NUMPY_DOC) - contents = '' - assert contents in pylsp_hover(doc, no_hov_position)['contents'] + contents = "" + assert contents in pylsp_hover(doc._config, doc, no_hov_position)["contents"] - contents = 'NumPy\n=====\n\nProvides\n' - assert contents in pylsp_hover(doc, numpy_hov_position_1)['contents'][0] + contents = "NumPy\n=====\n\nProvides\n" + assert ( + contents + in pylsp_hover(doc._config, doc, numpy_hov_position_1)["contents"]["value"] + ) - contents = 'NumPy\n=====\n\nProvides\n' - assert contents in pylsp_hover(doc, numpy_hov_position_2)['contents'][0] + contents = "NumPy\n=====\n\nProvides\n" + assert ( + contents + in pylsp_hover(doc._config, doc, numpy_hov_position_2)["contents"]["value"] + ) - contents = 'NumPy\n=====\n\nProvides\n' - assert contents in pylsp_hover(doc, numpy_hov_position_3)['contents'][0] + contents = "NumPy\n=====\n\nProvides\n" + assert ( + contents + in pylsp_hover(doc._config, doc, numpy_hov_position_3)["contents"]["value"] + ) # https://github.com/davidhalter/jedi/issues/1746 - # pylint: disable=import-outside-toplevel import numpy as np - if np.lib.NumpyVersion(np.__version__) < '1.20.0': - contents = 'Trigonometric sine, element-wise.\n\n' - assert contents in pylsp_hover( - doc, numpy_sin_hov_position)['contents'][0] + if np.lib.NumpyVersion(np.__version__) < "1.20.0": + contents = "Trigonometric sine, element-wise.\n\n" + assert ( + contents + in pylsp_hover(doc._config, doc, numpy_sin_hov_position)["contents"][ + "value" + ] + ) -def test_hover(workspace): +def test_hover(workspace) -> None: # Over 'main' in def main(): - hov_position = {'line': 2, 'character': 6} + hov_position = {"line": 2, "character": 6} # Over the blank second line - no_hov_position = {'line': 1, 'character': 0} + no_hov_position = {"line": 1, "character": 0} doc = Document(DOC_URI, workspace, DOC) - contents = [{'language': 'python', 'value': 'main()'}, 'hello world'] + contents = { + "kind": "markdown", + "value": "```python\nmain(a: float, b: float)\n```\n\n\nhello world", + } - assert { - 'contents': contents - } == pylsp_hover(doc, hov_position) + assert {"contents": contents} == pylsp_hover(doc._config, doc, hov_position) - assert {'contents': ''} == pylsp_hover(doc, no_hov_position) + assert {"contents": ""} == pylsp_hover(doc._config, doc, no_hov_position) -def test_document_path_hover(workspace_other_root_path, tmpdir): +def test_hover_signature_formatting(workspace) -> None: + # Over 'main' in def main(): + hov_position = {"line": 2, "character": 6} + + doc = Document(DOC_URI, workspace, DOC) + # setting low line length should trigger reflow to multiple lines + doc._config.update({"signature": {"line_length": 10}}) + + contents = { + "kind": "markdown", + "value": "```python\nmain(\n a: float,\n b: float,\n)\n```\n\n\nhello world", + } + + assert {"contents": contents} == pylsp_hover(doc._config, doc, hov_position) + + +def test_hover_signature_formatting_opt_out(workspace) -> None: + # Over 'main' in def main(): + hov_position = {"line": 2, "character": 6} + + doc = Document(DOC_URI, workspace, DOC) + doc._config.update({"signature": {"line_length": 10, "formatter": None}}) + + contents = { + "kind": "markdown", + "value": "```python\nmain(a: float, b: float)\n```\n\n\nhello world", + } + + assert {"contents": contents} == pylsp_hover(doc._config, doc, hov_position) + + +def test_document_path_hover(workspace_other_root_path, tmpdir) -> None: # Create a dummy module out of the workspace's root_path and try to get # a definition on it in another file placed next to it. module_content = ''' @@ -91,11 +135,25 @@ def foo(): # Content of doc to test definition doc_content = """from mymodule import foo foo""" - doc_path = str(tmpdir) + os.path.sep + 'myfile.py' + doc_path = str(tmpdir) + os.path.sep + "myfile.py" doc_uri = uris.from_fs_path(doc_path) doc = Document(doc_uri, workspace_other_root_path, doc_content) - cursor_pos = {'line': 1, 'character': 3} - contents = pylsp_hover(doc, cursor_pos)['contents'] + cursor_pos = {"line": 1, "character": 3} + contents = pylsp_hover(doc._config, doc, cursor_pos)["contents"] + + assert "A docstring for foo." in contents["value"] + + +def test_hover_without_docstring(workspace_with_signature_docstring_disabled) -> None: + # Over 'main' in def main(): + hov_position = {"line": 2, "character": 6} + + doc = Document(DOC_URI, workspace_with_signature_docstring_disabled, DOC) + + contents = { + "kind": "markdown", + "value": "```python\nmain(a: float, b: float)\n```\n", + } - assert contents[1] == 'A docstring for foo.' + assert {"contents": contents} == pylsp_hover(doc._config, doc, hov_position) diff --git a/test/plugins/test_jedi_rename.py b/test/plugins/test_jedi_rename.py index fb2f97f1..349274be 100644 --- a/test/plugins/test_jedi_rename.py +++ b/test/plugins/test_jedi_rename.py @@ -2,78 +2,104 @@ # Copyright 2021- Python Language Server Contributors. import os -import sys import pytest + from pylsp import uris from pylsp.plugins.jedi_rename import pylsp_rename from pylsp.workspace import Document -LT_PY36 = sys.version_info.major < 3 or (sys.version_info.major == 3 and sys.version_info.minor < 6) - -DOC_NAME = 'test1.py' -DOC = '''class Test1(): +DOC_NAME = "test1.py" +DOC = """class Test1(): pass class Test2(Test1): pass -''' +""" -DOC_NAME_EXTRA = 'test2.py' -DOC_EXTRA = '''from test1 import Test1 +DOC_NAME_EXTRA = "test2.py" +DOC_EXTRA = """from test1 import Test1 x = Test1() -''' +""" + +DOC_NAME_SIMPLE = "test3.py" +DOC_SIMPLE = "foo = 12" @pytest.fixture def tmp_workspace(temp_workspace_factory): - return temp_workspace_factory({ - DOC_NAME: DOC, - DOC_NAME_EXTRA: DOC_EXTRA - }) + return temp_workspace_factory( + {DOC_NAME: DOC, DOC_NAME_EXTRA: DOC_EXTRA, DOC_NAME_SIMPLE: DOC_SIMPLE} + ) -@pytest.mark.skipif(LT_PY36, reason='Jedi refactoring isnt supported on Python 2.x/3.5') -def test_jedi_rename(tmp_workspace, config): # pylint: disable=redefined-outer-name +def test_jedi_rename(tmp_workspace, config) -> None: # rename the `Test1` class - position = {'line': 0, 'character': 6} + position = {"line": 0, "character": 6} DOC_URI = uris.from_fs_path(os.path.join(tmp_workspace.root_path, DOC_NAME)) doc = Document(DOC_URI, tmp_workspace) - result = pylsp_rename(config, tmp_workspace, doc, position, 'ShouldBeRenamed') + result = pylsp_rename(config, tmp_workspace, doc, position, "ShouldBeRenamed") assert len(result.keys()) == 1 - changes = result.get('documentChanges') + changes = result.get("documentChanges") assert len(changes) == 2 - assert changes[0]['textDocument']['uri'] == doc.uri - assert changes[0]['textDocument']['version'] == doc.version - assert changes[0].get('edits') == [ + assert changes[0]["textDocument"]["uri"] == doc.uri + assert changes[0]["textDocument"]["version"] == doc.version + assert changes[0].get("edits") == [ { - 'range': { - 'start': {'line': 0, 'character': 0}, - 'end': {'line': 5, 'character': 0}, + "range": { + "start": {"line": 0, "character": 0}, + "end": {"line": 5, "character": 0}, }, - 'newText': 'class ShouldBeRenamed():\n pass\n\nclass Test2(ShouldBeRenamed):\n pass\n', + "newText": "class ShouldBeRenamed():\n pass\n\nclass Test2(ShouldBeRenamed):\n pass\n", } ] + path = os.path.join(tmp_workspace.root_path, DOC_NAME_EXTRA) uri_extra = uris.from_fs_path(path) - assert changes[1]['textDocument']['uri'] == uri_extra + assert changes[1]["textDocument"]["uri"] == uri_extra # This also checks whether documents not yet added via textDocument/didOpen # but that do need to be renamed in the project have a `null` version # number. - assert changes[1]['textDocument']['version'] is None - expected = 'from test1 import ShouldBeRenamed\nx = ShouldBeRenamed()\n' - if os.name == 'nt': + assert changes[1]["textDocument"]["version"] is None + + expected = "from test1 import ShouldBeRenamed\nx = ShouldBeRenamed()\n" + if os.name == "nt": # The .write method in the temp_workspace_factory functions writes # Windows-style line-endings. - expected = expected.replace('\n', '\r\n') - assert changes[1].get('edits') == [ + expected = expected.replace("\n", "\r\n") + assert changes[1].get("edits") == [ + { + "range": { + "start": {"line": 0, "character": 0}, + "end": {"line": 2, "character": 0}, + }, + "newText": expected, + } + ] + + # Regression test for issue python-lsp/python-lsp-server#413 + # rename foo + position = {"line": 0, "character": 0} + DOC_URI = uris.from_fs_path(os.path.join(tmp_workspace.root_path, DOC_NAME_SIMPLE)) + doc = Document(DOC_URI, tmp_workspace) + + result = pylsp_rename(config, tmp_workspace, doc, position, "bar") + assert len(result.keys()) == 1 + + changes = result.get("documentChanges") + assert len(changes) == 1 + + assert changes[0]["textDocument"]["uri"] == doc.uri + assert changes[0]["textDocument"]["version"] == doc.version + assert changes[0].get("edits") == [ { - 'range': { - 'start': {'line': 0, 'character': 0}, - 'end': {'line': 2, 'character': 0}}, - 'newText': expected + "range": { + "start": {"line": 0, "character": 0}, + "end": {"line": 0, "character": 0}, + }, + "newText": "bar = 12", } ] diff --git a/test/plugins/test_mccabe_lint.py b/test/plugins/test_mccabe_lint.py index c85a9965..f4df0c2c 100644 --- a/test/plugins/test_mccabe_lint.py +++ b/test/plugins/test_mccabe_lint.py @@ -2,8 +2,8 @@ # Copyright 2021- Python Language Server Contributors. from pylsp import lsp, uris -from pylsp.workspace import Document from pylsp.plugins import mccabe_lint +from pylsp.workspace import Document DOC_URI = uris.from_fs_path(__file__) DOC = """def hello(): @@ -14,26 +14,26 @@ \tpass""" -def test_mccabe(config, workspace): +def test_mccabe(config, workspace) -> None: old_settings = config.settings try: - config.update({'plugins': {'mccabe': {'threshold': 1}}}) + config.update({"plugins": {"mccabe": {"threshold": 1}}}) doc = Document(DOC_URI, workspace, DOC) - diags = mccabe_lint.pylsp_lint(config, doc) + diags = mccabe_lint.pylsp_lint(config, workspace, doc) - assert all(d['source'] == 'mccabe' for d in diags) + assert all(d["source"] == "mccabe" for d in diags) # One we're expecting is: - msg = 'Cyclomatic complexity too high: 1 (threshold 1)' - mod_import = [d for d in diags if d['message'] == msg][0] + msg = "Cyclomatic complexity too high: 1 (threshold 1)" + mod_import = [d for d in diags if d["message"] == msg][0] - assert mod_import['severity'] == lsp.DiagnosticSeverity.Warning - assert mod_import['range']['start'] == {'line': 0, 'character': 0} - assert mod_import['range']['end'] == {'line': 0, 'character': 6} + assert mod_import["severity"] == lsp.DiagnosticSeverity.Warning + assert mod_import["range"]["start"] == {"line": 0, "character": 0} + assert mod_import["range"]["end"] == {"line": 0, "character": 6} finally: config._settings = old_settings -def test_mccabe_syntax_error(config, workspace): +def test_mccabe_syntax_error(config, workspace) -> None: doc = Document(DOC_URI, workspace, DOC_SYNTAX_ERR) - assert mccabe_lint.pylsp_lint(config, doc) is None + assert mccabe_lint.pylsp_lint(config, workspace, doc) is None diff --git a/test/plugins/test_pycodestyle_lint.py b/test/plugins/test_pycodestyle_lint.py index e2381472..eea0b7de 100644 --- a/test/plugins/test_pycodestyle_lint.py +++ b/test/plugins/test_pycodestyle_lint.py @@ -2,9 +2,12 @@ # Copyright 2021- Python Language Server Contributors. import os + +import pytest + from pylsp import lsp, uris -from pylsp.workspace import Document from pylsp.plugins import pycodestyle_lint +from pylsp.workspace import Document DOC_URI = uris.from_fs_path(__file__) DOC = """import sys @@ -21,48 +24,48 @@ def hello( ): """ -def test_pycodestyle(workspace): +def test_pycodestyle(workspace) -> None: doc = Document(DOC_URI, workspace, DOC) diags = pycodestyle_lint.pylsp_lint(workspace, doc) - assert all(d['source'] == 'pycodestyle' for d in diags) + assert all(d["source"] == "pycodestyle" for d in diags) # One we're expecting is: - msg = 'W191 indentation contains tabs' - mod_import = [d for d in diags if d['message'] == msg][0] + msg = "W191 indentation contains tabs" + mod_import = [d for d in diags if d["message"] == msg][0] - assert mod_import['code'] == 'W191' - assert mod_import['severity'] == lsp.DiagnosticSeverity.Warning - assert mod_import['range']['start'] == {'line': 3, 'character': 0} - assert mod_import['range']['end'] == {'line': 3, 'character': 6} + assert mod_import["code"] == "W191" + assert mod_import["severity"] == lsp.DiagnosticSeverity.Warning + assert mod_import["range"]["start"] == {"line": 3, "character": 0} + assert mod_import["range"]["end"] == {"line": 3, "character": 6} - msg = 'W391 blank line at end of file' - mod_import = [d for d in diags if d['message'] == msg][0] + msg = "W391 blank line at end of file" + mod_import = [d for d in diags if d["message"] == msg][0] - assert mod_import['code'] == 'W391' - assert mod_import['severity'] == lsp.DiagnosticSeverity.Warning - assert mod_import['range']['start'] == {'line': 10, 'character': 0} - assert mod_import['range']['end'] == {'line': 10, 'character': 1} + assert mod_import["code"] == "W391" + assert mod_import["severity"] == lsp.DiagnosticSeverity.Warning + assert mod_import["range"]["start"] == {"line": 10, "character": 0} + assert mod_import["range"]["end"] == {"line": 10, "character": 1} msg = "E201 whitespace after '('" - mod_import = [d for d in diags if d['message'] == msg][0] + mod_import = [d for d in diags if d["message"] == msg][0] - assert mod_import['code'] == 'E201' - assert mod_import['severity'] == lsp.DiagnosticSeverity.Warning - assert mod_import['range']['start'] == {'line': 2, 'character': 10} - assert mod_import['range']['end'] == {'line': 2, 'character': 14} + assert mod_import["code"] == "E201" + assert mod_import["severity"] == lsp.DiagnosticSeverity.Warning + assert mod_import["range"]["start"] == {"line": 2, "character": 10} + assert mod_import["range"]["end"] == {"line": 2, "character": 14} msg = "E128 continuation line under-indented for visual indent" - mod_import = [d for d in diags if d['message'] == msg][0] + mod_import = [d for d in diags if d["message"] == msg][0] - assert mod_import['code'] == 'E128' - assert mod_import['severity'] == lsp.DiagnosticSeverity.Warning - assert mod_import['range']['start'] == {'line': 5, 'character': 1} - assert mod_import['range']['end'] == {'line': 5, 'character': 10} + assert mod_import["code"] == "E128" + assert mod_import["severity"] == lsp.DiagnosticSeverity.Warning + assert mod_import["range"]["start"] == {"line": 5, "character": 1} + assert mod_import["range"]["end"] == {"line": 5, "character": 10} -def test_pycodestyle_config(workspace): - """ Test that we load config files properly. +def test_pycodestyle_config(workspace) -> None: + """Test that we load config files properly. Config files are loaded in the following order: tox.ini pep8.cfg setup.cfg pycodestyle.cfg @@ -76,37 +79,58 @@ def test_pycodestyle_config(workspace): If any section called 'pycodestyle' exists that will be solely used and any config in a 'pep8' section will be ignored """ - doc_uri = uris.from_fs_path(os.path.join(workspace.root_path, 'test.py')) + doc_uri = uris.from_fs_path(os.path.join(workspace.root_path, "test.py")) workspace.put_document(doc_uri, DOC) doc = workspace.get_document(doc_uri) # Make sure we get a warning for 'indentation contains tabs' diags = pycodestyle_lint.pylsp_lint(workspace, doc) - assert [d for d in diags if d['code'] == 'W191'] + assert [d for d in diags if d["code"] == "W191"] content = { - 'setup.cfg': ('[pycodestyle]\nignore = W191, E201, E128', True), - 'tox.ini': ('', False) + "setup.cfg": ("[pycodestyle]\nignore = W191, E201, E128", True), + "tox.ini": ("", False), } for conf_file, (content, working) in list(content.items()): # Now we'll add config file to ignore it - with open(os.path.join(workspace.root_path, conf_file), 'w+', encoding='utf-8') as f: + with open( + os.path.join(workspace.root_path, conf_file), "w+", encoding="utf-8" + ) as f: f.write(content) workspace._config.settings.cache_clear() # And make sure we don't get any warnings diags = pycodestyle_lint.pylsp_lint(workspace, doc) - assert len([d for d in diags if d['code'] == 'W191']) == (0 if working else 1) - assert len([d for d in diags if d['code'] == 'E201']) == (0 if working else 1) - assert [d for d in diags if d['code'] == 'W391'] + assert len([d for d in diags if d["code"] == "W191"]) == (0 if working else 1) + assert len([d for d in diags if d["code"] == "E201"]) == (0 if working else 1) + assert [d for d in diags if d["code"] == "W391"] os.unlink(os.path.join(workspace.root_path, conf_file)) # Make sure we can ignore via the PYLS config as well - workspace._config.update({'plugins': {'pycodestyle': {'ignore': ['W191', 'E201']}}}) + workspace._config.update({"plugins": {"pycodestyle": {"ignore": ["W191", "E201"]}}}) # And make sure we only get one warning diags = pycodestyle_lint.pylsp_lint(workspace, doc) - assert not [d for d in diags if d['code'] == 'W191'] - assert not [d for d in diags if d['code'] == 'E201'] - assert [d for d in diags if d['code'] == 'W391'] + assert not [d for d in diags if d["code"] == "W191"] + assert not [d for d in diags if d["code"] == "E201"] + assert [d for d in diags if d["code"] == "W391"] + + +@pytest.mark.parametrize("newline", ["\r\n", "\r"]) +def test_line_endings(workspace, newline) -> None: + """ + Check that Pycodestyle doesn't generate false positives with line endings + other than LF. + """ + # Create simple source that should give false positives + source = f"try:{newline} 1/0{newline}except Exception:{newline} pass{newline}" + + # Create document + doc = Document(DOC_URI, workspace, source) + + # Get diagnostics + diags = pycodestyle_lint.pylsp_lint(workspace, doc) + + # Assert no diagnostics were given + assert len(diags) == 0 diff --git a/test/plugins/test_pydocstyle_lint.py b/test/plugins/test_pydocstyle_lint.py index c6d8fa11..383aaf1f 100644 --- a/test/plugins/test_pydocstyle_lint.py +++ b/test/plugins/test_pydocstyle_lint.py @@ -2,9 +2,10 @@ # Copyright 2021- Python Language Server Contributors. import os + from pylsp import lsp, uris -from pylsp.workspace import Document from pylsp.plugins import pydocstyle_lint +from pylsp.workspace import Document DOC_URI = uris.from_fs_path(os.path.join(os.path.dirname(__file__), "pydocstyle.py")) TEST_DOC_URI = uris.from_fs_path(__file__) @@ -18,41 +19,41 @@ def hello(): """ -def test_pydocstyle(config, workspace): +def test_pydocstyle(config, workspace) -> None: doc = Document(DOC_URI, workspace, DOC) - diags = pydocstyle_lint.pylsp_lint(config, doc) + diags = pydocstyle_lint.pylsp_lint(config, workspace, doc) - assert all(d['source'] == 'pydocstyle' for d in diags) + assert all(d["source"] == "pydocstyle" for d in diags) # One we're expecting is: assert diags[0] == { - 'code': 'D100', - 'message': 'D100: Missing docstring in public module', - 'severity': lsp.DiagnosticSeverity.Warning, - 'range': { - 'start': {'line': 0, 'character': 0}, - 'end': {'line': 0, 'character': 11}, + "code": "D100", + "message": "D100: Missing docstring in public module", + "severity": lsp.DiagnosticSeverity.Warning, + "range": { + "start": {"line": 0, "character": 0}, + "end": {"line": 0, "character": 11}, }, - 'source': 'pydocstyle' + "source": "pydocstyle", } -def test_pydocstyle_test_document(config, workspace): +def test_pydocstyle_test_document(config, workspace) -> None: # The default --match argument excludes test_* documents. doc = Document(TEST_DOC_URI, workspace, "") - diags = pydocstyle_lint.pylsp_lint(config, doc) + diags = pydocstyle_lint.pylsp_lint(config, workspace, doc) assert not diags -def test_pydocstyle_empty_source(config, workspace): +def test_pydocstyle_empty_source(config, workspace) -> None: doc = Document(DOC_URI, workspace, "") - diags = pydocstyle_lint.pylsp_lint(config, doc) - assert diags[0]['message'] == 'D100: Missing docstring in public module' + diags = pydocstyle_lint.pylsp_lint(config, workspace, doc) + assert diags[0]["message"] == "D100: Missing docstring in public module" assert len(diags) == 1 -def test_pydocstyle_invalid_source(config, workspace): +def test_pydocstyle_invalid_source(config, workspace) -> None: doc = Document(DOC_URI, workspace, "bad syntax") - diags = pydocstyle_lint.pylsp_lint(config, doc) + diags = pydocstyle_lint.pylsp_lint(config, workspace, doc) # We're unable to parse the file, so can't get any pydocstyle diagnostics assert not diags diff --git a/test/plugins/test_pyflakes_lint.py b/test/plugins/test_pyflakes_lint.py index aa2086ce..8ab36320 100644 --- a/test/plugins/test_pyflakes_lint.py +++ b/test/plugins/test_pyflakes_lint.py @@ -4,8 +4,8 @@ import sys from pylsp import lsp, uris -from pylsp.workspace import Document from pylsp.plugins import pyflakes_lint +from pylsp.workspace import Document DOC_URI = uris.from_fs_path(__file__) DOC = """import sys @@ -28,42 +28,42 @@ def hello(): """ -def test_pyflakes(workspace): +def test_pyflakes(workspace) -> None: doc = Document(DOC_URI, workspace, DOC) - diags = pyflakes_lint.pylsp_lint(doc) + diags = pyflakes_lint.pylsp_lint(workspace, doc) # One we're expecting is: - msg = '\'sys\' imported but unused' - unused_import = [d for d in diags if d['message'] == msg][0] + msg = "'sys' imported but unused" + unused_import = [d for d in diags if d["message"] == msg][0] - assert unused_import['range']['start'] == {'line': 0, 'character': 0} - assert unused_import['severity'] == lsp.DiagnosticSeverity.Warning + assert unused_import["range"]["start"] == {"line": 0, "character": 0} + assert unused_import["severity"] == lsp.DiagnosticSeverity.Warning -def test_syntax_error_pyflakes(workspace): +def test_syntax_error_pyflakes(workspace) -> None: doc = Document(DOC_URI, workspace, DOC_SYNTAX_ERR) - diag = pyflakes_lint.pylsp_lint(doc)[0] + diag = pyflakes_lint.pylsp_lint(workspace, doc)[0] if sys.version_info[:2] >= (3, 10): - assert diag['message'] == "expected ':'" + assert diag["message"] == "expected ':'" else: - assert diag['message'] == 'invalid syntax' - assert diag['range']['start'] == {'line': 0, 'character': 12} - assert diag['severity'] == lsp.DiagnosticSeverity.Error + assert diag["message"] == "invalid syntax" + assert diag["range"]["start"] == {"line": 0, "character": 12} + assert diag["severity"] == lsp.DiagnosticSeverity.Error -def test_undefined_name_pyflakes(workspace): +def test_undefined_name_pyflakes(workspace) -> None: doc = Document(DOC_URI, workspace, DOC_UNDEFINED_NAME_ERR) - diag = pyflakes_lint.pylsp_lint(doc)[0] + diag = pyflakes_lint.pylsp_lint(workspace, doc)[0] - assert diag['message'] == 'undefined name \'b\'' - assert diag['range']['start'] == {'line': 0, 'character': 4} - assert diag['severity'] == lsp.DiagnosticSeverity.Error + assert diag["message"] == "undefined name 'b'" + assert diag["range"]["start"] == {"line": 0, "character": 4} + assert diag["severity"] == lsp.DiagnosticSeverity.Error -def test_unicode_encoding(workspace): +def test_unicode_encoding(workspace) -> None: doc = Document(DOC_URI, workspace, DOC_ENCODING) - diags = pyflakes_lint.pylsp_lint(doc) + diags = pyflakes_lint.pylsp_lint(workspace, doc) assert len(diags) == 1 - assert diags[0]['message'] == '\'sys\' imported but unused' + assert diags[0]["message"] == "'sys' imported but unused" diff --git a/test/plugins/test_pylint_lint.py b/test/plugins/test_pylint_lint.py index cbad9c3b..b4d511d0 100644 --- a/test/plugins/test_pylint_lint.py +++ b/test/plugins/test_pylint_lint.py @@ -4,13 +4,12 @@ import contextlib import os -import sys import tempfile +from pathlib import Path -from test import py2_only, py3_only, IS_PY3 from pylsp import lsp, uris -from pylsp.workspace import Document from pylsp.plugins import pylint_lint +from pylsp.workspace import Document, Workspace DOC_URI = uris.from_fs_path(__file__) DOC = """import sys @@ -27,9 +26,9 @@ def hello(): @contextlib.contextmanager -def temp_document(doc_text, workspace): +def temp_document(doc_text, workspace) -> None: try: - with tempfile.NamedTemporaryFile(mode='w', delete=False) as temp_file: + with tempfile.NamedTemporaryFile(mode="w", delete=False) as temp_file: name = temp_file.name temp_file.write(doc_text) yield Document(uris.from_fs_path(name), workspace) @@ -37,79 +36,72 @@ def temp_document(doc_text, workspace): os.remove(name) -def write_temp_doc(document, contents): - with open(document.path, 'w', encoding='utf-8') as temp_file: +def write_temp_doc(document, contents) -> None: + with open(document.path, "w", encoding="utf-8") as temp_file: temp_file.write(contents) -def test_pylint(config, workspace): +def test_pylint(config, workspace) -> None: with temp_document(DOC, workspace) as doc: - diags = pylint_lint.pylsp_lint(config, doc, True) + diags = pylint_lint.pylsp_lint(config, workspace, doc, True) - msg = '[unused-import] Unused import sys' - unused_import = [d for d in diags if d['message'] == msg][0] + msg = "[unused-import] Unused import sys" + unused_import = [d for d in diags if d["message"] == msg][0] - assert unused_import['range']['start'] == {'line': 0, 'character': 0} - assert unused_import['severity'] == lsp.DiagnosticSeverity.Warning + assert unused_import["range"]["start"] == {"line": 0, "character": 0} + assert unused_import["severity"] == lsp.DiagnosticSeverity.Warning + assert unused_import["tags"] == [lsp.DiagnosticTag.Unnecessary] - if IS_PY3: - # test running pylint in stdin - config.plugin_settings('pylint')['executable'] = 'pylint' - diags = pylint_lint.pylsp_lint(config, doc, True) + # test running pylint in stdin + config.plugin_settings("pylint")["executable"] = "pylint" + diags = pylint_lint.pylsp_lint(config, workspace, doc, True) - msg = 'Unused import sys (unused-import)' - unused_import = [d for d in diags if d['message'] == msg][0] + msg = "Unused import sys (unused-import)" + unused_import = [d for d in diags if d["message"] == msg][0] - assert unused_import['range']['start'] == { - 'line': 0, - 'character': 0, - } - assert unused_import['severity'] == lsp.DiagnosticSeverity.Warning + assert unused_import["range"]["start"] == { + "line": 0, + "character": 0, + } + assert unused_import["severity"] == lsp.DiagnosticSeverity.Warning -@py3_only -def test_syntax_error_pylint_py3(config, workspace): +def test_syntax_error_pylint(config, workspace) -> None: with temp_document(DOC_SYNTAX_ERR, workspace) as doc: - diag = pylint_lint.pylsp_lint(config, doc, True)[0] + diag = pylint_lint.pylsp_lint(config, workspace, doc, True)[0] - if sys.version_info[:2] >= (3, 10): - assert diag['message'].count("[syntax-error] expected ':'") - else: - assert diag['message'].startswith('[syntax-error] invalid syntax') + assert diag["message"].startswith("[syntax-error]") + assert diag["message"].count("expected ':'") or diag["message"].count( + "invalid syntax" + ) # Pylint doesn't give column numbers for invalid syntax. - assert diag['range']['start'] == {'line': 0, 'character': 12} - assert diag['severity'] == lsp.DiagnosticSeverity.Error + assert diag["range"]["start"] == {"line": 0, "character": 12} + assert diag["severity"] == lsp.DiagnosticSeverity.Error + assert "tags" not in diag # test running pylint in stdin - config.plugin_settings('pylint')['executable'] = 'pylint' - diag = pylint_lint.pylsp_lint(config, doc, True)[0] - - assert diag['message'].count("expected ':'") or diag['message'].startswith('invalid syntax') - # Pylint doesn't give column numbers for invalid syntax. - assert diag['range']['start'] == {'line': 0, 'character': 12} - assert diag['severity'] == lsp.DiagnosticSeverity.Error - - -@py2_only -def test_syntax_error_pylint_py2(config, workspace): - with temp_document(DOC_SYNTAX_ERR, workspace) as doc: - diag = pylint_lint.pylsp_lint(config, doc, True)[0] + config.plugin_settings("pylint")["executable"] = "pylint" + diag = pylint_lint.pylsp_lint(config, workspace, doc, True)[0] - assert diag['message'].startswith('[syntax-error] invalid syntax') + assert diag["message"].count("expected ':'") or diag["message"].count( + "invalid syntax" + ) # Pylint doesn't give column numbers for invalid syntax. - assert diag['range']['start'] == {'line': 0, 'character': 0} - assert diag['severity'] == lsp.DiagnosticSeverity.Error + assert diag["range"]["start"] == {"line": 0, "character": 12} + assert diag["severity"] == lsp.DiagnosticSeverity.Error -def test_lint_free_pylint(config, workspace): +def test_lint_free_pylint(config, workspace) -> None: # Can't use temp_document because it might give us a file that doesn't # match pylint's naming requirements. We should be keeping this file clean # though, so it works for a test of an empty lint. + ws = Workspace(str(Path(__file__).absolute().parents[2]), workspace._endpoint) assert not pylint_lint.pylsp_lint( - config, Document(uris.from_fs_path(__file__), workspace), True) + config, ws, Document(uris.from_fs_path(__file__), ws), True + ) -def test_lint_caching(workspace): +def test_lint_caching(workspace) -> None: # Pylint can only operate on files, not in-memory contents. We cache the # diagnostics after a run so we can continue displaying them until the file # is saved again. @@ -119,7 +111,7 @@ def test_lint_caching(workspace): # need to ensure that pylint doesn't give us invalid-name when our temp # file has capital letters in its name. - flags = '--disable=invalid-name' + flags = "--disable=invalid-name" with temp_document(DOC, workspace) as doc: # Start with a file with errors. diags = pylint_lint.PylintLinter.lint(doc, True, flags) @@ -127,7 +119,7 @@ def test_lint_caching(workspace): # Fix lint errors and write the changes to disk. Run the linter in the # in-memory mode to check the cached diagnostic behavior. - write_temp_doc(doc, '') + write_temp_doc(doc, "") assert pylint_lint.PylintLinter.lint(doc, False, flags) == diags # Now check the on-disk behavior. @@ -137,10 +129,11 @@ def test_lint_caching(workspace): assert not pylint_lint.PylintLinter.lint(doc, False, flags) -def test_per_file_caching(config, workspace): +def test_per_file_caching(config, workspace) -> None: # Ensure that diagnostics are cached per-file. with temp_document(DOC, workspace) as doc: - assert pylint_lint.pylsp_lint(config, doc, True) + assert pylint_lint.pylsp_lint(config, workspace, doc, True) assert not pylint_lint.pylsp_lint( - config, Document(uris.from_fs_path(__file__), workspace), False) + config, workspace, Document(uris.from_fs_path(__file__), workspace), False + ) diff --git a/test/plugins/test_references.py b/test/plugins/test_references.py index c1df037b..f5121693 100644 --- a/test/plugins/test_references.py +++ b/test/plugins/test_references.py @@ -6,12 +6,11 @@ import pytest from pylsp import uris -from pylsp.workspace import Document from pylsp.plugins.references import pylsp_references +from pylsp.workspace import Document - -DOC1_NAME = 'test1.py' -DOC2_NAME = 'test2.py' +DOC1_NAME = "test1.py" +DOC2_NAME = "test2.py" DOC1 = """class Test1(): pass @@ -28,19 +27,21 @@ @pytest.fixture def tmp_workspace(temp_workspace_factory): - return temp_workspace_factory({ - DOC1_NAME: DOC1, - DOC2_NAME: DOC2, - }) + return temp_workspace_factory( + { + DOC1_NAME: DOC1, + DOC2_NAME: DOC2, + } + ) -def test_references(tmp_workspace): # pylint: disable=redefined-outer-name +def test_references(tmp_workspace) -> None: # Over 'Test1' in class Test1(): - position = {'line': 0, 'character': 8} + position = {"line": 0, "character": 8} DOC1_URI = uris.from_fs_path(os.path.join(tmp_workspace.root_path, DOC1_NAME)) doc1 = Document(DOC1_URI, tmp_workspace) - refs = pylsp_references(doc1, position) + refs = pylsp_references(doc1, position, exclude_declaration=False) # Definition, the import and the instantiation assert len(refs) == 3 @@ -50,30 +51,32 @@ def test_references(tmp_workspace): # pylint: disable=redefined-outer-name assert len(no_def_refs) == 1 # Make sure our definition is correctly located - doc1_ref = [u for u in refs if u['uri'] == DOC1_URI][0] - assert doc1_ref['range']['start'] == {'line': 0, 'character': 6} - assert doc1_ref['range']['end'] == {'line': 0, 'character': 11} + doc1_ref = [u for u in refs if u["uri"] == DOC1_URI][0] + assert doc1_ref["range"]["start"] == {"line": 0, "character": 6} + assert doc1_ref["range"]["end"] == {"line": 0, "character": 11} # Make sure our import is correctly located - doc2_import_ref = [u for u in refs if u['uri'] != DOC1_URI][0] - assert doc2_import_ref['range']['start'] == {'line': 0, 'character': 18} - assert doc2_import_ref['range']['end'] == {'line': 0, 'character': 23} + doc2_import_ref = [u for u in refs if u["uri"] != DOC1_URI][0] + assert doc2_import_ref["range"]["start"] == {"line": 0, "character": 18} + assert doc2_import_ref["range"]["end"] == {"line": 0, "character": 23} - doc2_usage_ref = [u for u in refs if u['uri'] != DOC1_URI][1] - assert doc2_usage_ref['range']['start'] == {'line': 3, 'character': 4} - assert doc2_usage_ref['range']['end'] == {'line': 3, 'character': 9} + doc2_usage_ref = [u for u in refs if u["uri"] != DOC1_URI][1] + assert doc2_usage_ref["range"]["start"] == {"line": 3, "character": 4} + assert doc2_usage_ref["range"]["end"] == {"line": 3, "character": 9} -def test_references_builtin(tmp_workspace): # pylint: disable=redefined-outer-name +def test_references_builtin(tmp_workspace) -> None: # Over 'UnicodeError': - position = {'line': 4, 'character': 7} + position = {"line": 4, "character": 7} doc2_uri = uris.from_fs_path(os.path.join(str(tmp_workspace.root_path), DOC2_NAME)) doc2 = Document(doc2_uri, tmp_workspace) - refs = pylsp_references(doc2, position) + refs = pylsp_references(doc2, position, exclude_declaration=False) assert len(refs) >= 1 - expected = {'start': {'line': 4, 'character': 7}, - 'end': {'line': 4, 'character': 19}} - ranges = [r['range'] for r in refs] + expected = { + "start": {"line": 4, "character": 7}, + "end": {"line": 4, "character": 19}, + } + ranges = [r["range"] for r in refs] assert expected in ranges diff --git a/test/plugins/test_rope_rename.py b/test/plugins/test_rope_rename.py deleted file mode 100644 index 285a565e..00000000 --- a/test/plugins/test_rope_rename.py +++ /dev/null @@ -1,47 +0,0 @@ -# Copyright 2017-2020 Palantir Technologies, Inc. -# Copyright 2021- Python Language Server Contributors. - -import os - -import pytest -from pylsp import uris -from pylsp.plugins.rope_rename import pylsp_rename -from pylsp.workspace import Document - -DOC_NAME = "test1.py" -DOC = """class Test1(): - pass - -class Test2(Test1): - pass -""" - - -@pytest.fixture -def tmp_workspace(temp_workspace_factory): - return temp_workspace_factory({DOC_NAME: DOC}) - - -def test_rope_rename(tmp_workspace, config): # pylint: disable=redefined-outer-name - position = {"line": 0, "character": 6} - DOC_URI = uris.from_fs_path(os.path.join(tmp_workspace.root_path, DOC_NAME)) - doc = Document(DOC_URI, tmp_workspace) - - result = pylsp_rename(config, tmp_workspace, doc, position, "ShouldBeRenamed") - assert len(result.keys()) == 1 - - changes = result.get("documentChanges") - assert len(changes) == 1 - changes = changes[0] - - # Note that this test differs from test_jedi_rename, because rope does not - # seem to modify files that haven't been opened with textDocument/didOpen. - assert changes.get("edits") == [ - { - "range": { - "start": {"line": 0, "character": 0}, - "end": {"line": 5, "character": 0}, - }, - "newText": "class ShouldBeRenamed():\n pass\n\nclass Test2(ShouldBeRenamed):\n pass\n", - } - ] diff --git a/test/plugins/test_signature.py b/test/plugins/test_signature.py index 51cecb56..82a90fc2 100644 --- a/test/plugins/test_signature.py +++ b/test/plugins/test_signature.py @@ -2,6 +2,7 @@ # Copyright 2021- Python Language Server Contributors. import pytest + from pylsp import uris from pylsp.plugins import signature from pylsp.workspace import Document @@ -41,56 +42,79 @@ def main(param1=None, """ -def test_no_signature(workspace): +def test_no_signature(workspace) -> None: # Over blank line - sig_position = {'line': 9, 'character': 0} + sig_position = {"line": 9, "character": 0} doc = Document(DOC_URI, workspace, DOC) - sigs = signature.pylsp_signature_help(doc, sig_position)['signatures'] + sigs = signature.pylsp_signature_help(doc._config, doc, sig_position)["signatures"] assert not sigs -def test_signature(workspace): +def test_signature(workspace) -> None: # Over '( ' in main( - sig_position = {'line': 10, 'character': 5} + sig_position = {"line": 10, "character": 5} doc = Document(DOC_URI, workspace, DOC) - sig_info = signature.pylsp_signature_help(doc, sig_position) + sig_info = signature.pylsp_signature_help(doc._config, doc, sig_position) - sigs = sig_info['signatures'] + sigs = sig_info["signatures"] assert len(sigs) == 1 - assert sigs[0]['label'] == 'main(param1, param2)' - assert sigs[0]['parameters'][0]['label'] == 'param1' - assert sigs[0]['parameters'][0]['documentation'] == 'Docs for param1' + assert sigs[0]["label"] == "main(param1, param2)" + assert sigs[0]["parameters"][0]["label"] == "param1" + assert sigs[0]["parameters"][0]["documentation"] == { + "kind": "markdown", + "value": "Docs for param1", + } - assert sig_info['activeParameter'] == 0 + assert sig_info["activeParameter"] == 0 -def test_multi_line_signature(workspace): +def test_multi_line_signature(workspace) -> None: # Over '( ' in main( - sig_position = {'line': 17, 'character': 5} + sig_position = {"line": 17, "character": 5} doc = Document(DOC_URI, workspace, MULTI_LINE_DOC) - sig_info = signature.pylsp_signature_help(doc, sig_position) + sig_info = signature.pylsp_signature_help(doc._config, doc, sig_position) - sigs = sig_info['signatures'] + sigs = sig_info["signatures"] assert len(sigs) == 1 - assert sigs[0]['label'] == ( - 'main(param1=None, param2=None, param3=None, param4=None, ' - 'param5=None, param6=None, param7=None, param8=None)' + assert sigs[0]["label"] == ( + "main(param1=None, param2=None, param3=None, param4=None, " + "param5=None, param6=None, param7=None, param8=None)" ) - assert sigs[0]['parameters'][0]['label'] == 'param1' - assert sigs[0]['parameters'][0]['documentation'] == 'Docs for param1' + assert sigs[0]["parameters"][0]["label"] == "param1" + assert sigs[0]["parameters"][0]["documentation"] == { + "kind": "markdown", + "value": "Docs for param1", + } + + assert sig_info["activeParameter"] == 0 + + +@pytest.mark.parametrize( + "regex,doc", + [ + (signature.SPHINX, " :param test: parameter docstring"), + (signature.EPYDOC, " @param test: parameter docstring"), + (signature.GOOGLE, " test (str): parameter docstring"), + ], +) +def test_docstring_params(regex, doc) -> None: + m = regex.match(doc) + assert m.group("param") == "test" + assert m.group("doc") == "parameter docstring" + - assert sig_info['activeParameter'] == 0 +def test_signature_without_docstring( + workspace_with_signature_docstring_disabled, +) -> None: + # Over '( ' in main( + sig_position = {"line": 10, "character": 5} + doc = Document(DOC_URI, workspace_with_signature_docstring_disabled, DOC) + sig_info = signature.pylsp_signature_help(doc._config, doc, sig_position) -@pytest.mark.parametrize('regex,doc', [ - (signature.SPHINX, " :param test: parameter docstring"), - (signature.EPYDOC, " @param test: parameter docstring"), - (signature.GOOGLE, " test (str): parameter docstring") -]) -def test_docstring_params(regex, doc): - m = regex.match(doc) - assert m.group('param') == "test" - assert m.group('doc') == "parameter docstring" + sigs = sig_info["signatures"] + assert len(sigs) == 1 + assert sigs[0]["documentation"] == {"kind": "markdown", "value": ""} diff --git a/test/plugins/test_symbols.py b/test/plugins/test_symbols.py index a25f5621..242a38a1 100644 --- a/test/plugins/test_symbols.py +++ b/test/plugins/test_symbols.py @@ -7,14 +7,13 @@ import pytest from pylsp import uris -from pylsp.plugins.symbols import pylsp_document_symbols from pylsp.lsp import SymbolKind +from pylsp.plugins.symbols import pylsp_document_symbols from pylsp.workspace import Document - -PY2 = sys.version[0] == '2' -LINUX = sys.platform.startswith('linux') -CI = os.environ.get('CI') +PY2 = sys.version[0] == "2" +LINUX = sys.platform.startswith("linux") +CI = os.environ.get("CI") DOC_URI = uris.from_fs_path(__file__) DOC = """import sys @@ -31,27 +30,38 @@ def main(x): """ +DOC_IMPORTS = """from . import something +from ..module import something +from module import (a, b) + +def main(): + # import ignored + print("from module import x") # string with import + return something + +""" + def helper_check_symbols_all_scope(symbols): # All eight symbols (import sys, a, B, __init__, x, y, main, y) assert len(symbols) == 8 def sym(name): - return [s for s in symbols if s['name'] == name][0] + return [s for s in symbols if s["name"] == name][0] # Check we have some sane mappings to VSCode constants - assert sym('a')['kind'] == SymbolKind.Variable - assert sym('B')['kind'] == SymbolKind.Class - assert sym('__init__')['kind'] == SymbolKind.Method - assert sym('main')['kind'] == SymbolKind.Function + assert sym("a")["kind"] == SymbolKind.Variable + assert sym("B")["kind"] == SymbolKind.Class + assert sym("__init__")["kind"] == SymbolKind.Method + assert sym("main")["kind"] == SymbolKind.Function # Not going to get too in-depth here else we're just testing Jedi - assert sym('a')['location']['range']['start'] == {'line': 2, 'character': 0} + assert sym("a")["location"]["range"]["start"] == {"line": 2, "character": 0} def test_symbols(config, workspace): doc = Document(DOC_URI, workspace, DOC) - config.update({'plugins': {'jedi_symbols': {'all_scopes': False}}}) + config.update({"plugins": {"jedi_symbols": {"all_scopes": False}}}) symbols = pylsp_document_symbols(config, doc) # All four symbols (import sys, a, B, main) @@ -59,34 +69,64 @@ def test_symbols(config, workspace): assert len(symbols) == 5 def sym(name): - return [s for s in symbols if s['name'] == name][0] + return [s for s in symbols if s["name"] == name][0] # Check we have some sane mappings to VSCode constants - assert sym('a')['kind'] == SymbolKind.Variable - assert sym('B')['kind'] == SymbolKind.Class - assert sym('main')['kind'] == SymbolKind.Function + assert sym("a")["kind"] == SymbolKind.Variable + assert sym("B")["kind"] == SymbolKind.Class + assert sym("main")["kind"] == SymbolKind.Function # Not going to get too in-depth here else we're just testing Jedi - assert sym('a')['location']['range']['start'] == {'line': 2, 'character': 0} + assert sym("a")["location"]["range"]["start"] == {"line": 2, "character": 0} # Ensure that the symbol range spans the whole definition - assert sym('main')['location']['range']['start'] == {'line': 9, 'character': 0} - assert sym('main')['location']['range']['end'] == {'line': 12, 'character': 0} + assert sym("main")["location"]["range"]["start"] == {"line": 9, "character": 0} + assert sym("main")["location"]["range"]["end"] == {"line": 12, "character": 0} + + +def test_symbols_complex_imports(config, workspace): + doc = Document(DOC_URI, workspace, DOC_IMPORTS) + config.update({"plugins": {"jedi_symbols": {"all_scopes": False}}}) + symbols = pylsp_document_symbols(config, doc) + import_symbols = [s for s in symbols if s["kind"] == SymbolKind.Module] -def test_symbols_all_scopes(config, workspace): + assert len(import_symbols) == 4 + + names = [s["name"] for s in import_symbols] + assert "something" in names + assert "a" in names or "b" in names + + assert any( + s["name"] == "main" and s["kind"] == SymbolKind.Function for s in symbols + ) + + +def test_symbols_all_scopes(config, workspace) -> None: doc = Document(DOC_URI, workspace, DOC) symbols = pylsp_document_symbols(config, doc) helper_check_symbols_all_scope(symbols) -@pytest.mark.skipif(PY2 or not LINUX or not CI, reason="tested on linux and python 3 only") -def test_symbols_all_scopes_with_jedi_environment(workspace): +def test_symbols_non_existing_file(config, workspace, tmpdir) -> None: + path = tmpdir.join("foo.py") + # Check pre-condition: file must not exist + assert not path.check(exists=1) + + doc = Document(uris.from_fs_path(str(path)), workspace, DOC) + symbols = pylsp_document_symbols(config, doc) + helper_check_symbols_all_scope(symbols) + + +@pytest.mark.skipif( + PY2 or not LINUX or not CI, reason="tested on linux and python 3 only" +) +def test_symbols_all_scopes_with_jedi_environment(workspace) -> None: doc = Document(DOC_URI, workspace, DOC) # Update config extra environment - env_path = '/tmp/pyenv/bin/python' - settings = {'pylsp': {'plugins': {'jedi': {'environment': env_path}}}} + env_path = "/tmp/pyenv/bin/python" + settings = {"pylsp": {"plugins": {"jedi": {"environment": env_path}}}} doc.update_config(settings) symbols = pylsp_document_symbols(doc._config, doc) helper_check_symbols_all_scope(symbols) diff --git a/test/plugins/test_type_definition.py b/test/plugins/test_type_definition.py new file mode 100644 index 00000000..b433fc63 --- /dev/null +++ b/test/plugins/test_type_definition.py @@ -0,0 +1,96 @@ +# Copyright 2021- Python Language Server Contributors. + +from pylsp import uris +from pylsp.plugins.type_definition import pylsp_type_definition +from pylsp.workspace import Document + +DOC_URI = uris.from_fs_path(__file__) +DOC = """\ +from dataclasses import dataclass + +@dataclass +class IntPair: + a: int + b: int + +def main() -> None: + l0 = list(1, 2) + + my_pair = IntPair(a=10, b=20) + print(f"Original pair: {my_pair}") +""" + + +def test_type_definitions(config, workspace) -> None: + # Over 'IntPair' in 'main' + cursor_pos = {"line": 10, "character": 14} + + # The definition of 'IntPair' + def_range = { + "start": {"line": 3, "character": 6}, + "end": {"line": 3, "character": 13}, + } + + doc = Document(DOC_URI, workspace, DOC) + assert [{"uri": DOC_URI, "range": def_range}] == pylsp_type_definition( + config, doc, cursor_pos + ) + + +def test_builtin_definition(config, workspace) -> None: + # Over 'list' in main + cursor_pos = {"line": 8, "character": 9} + + doc = Document(DOC_URI, workspace, DOC) + + defns = pylsp_type_definition(config, doc, cursor_pos) + assert len(defns) == 1 + assert defns[0]["uri"].endswith("builtins.pyi") + + +def test_mutli_file_type_definitions(config, workspace, tmpdir) -> None: + # Create a dummy module out of the workspace's root_path and try to get + # a definition on it in another file placed next to it. + module_content = """\ +from dataclasses import dataclass + +@dataclass +class IntPair: + a: int + b: int +""" + p1 = tmpdir.join("intpair.py") + p1.write(module_content) + # The uri for intpair.py + module_path = str(p1) + module_uri = uris.from_fs_path(module_path) + + # Content of doc to test type definition + doc_content = """\ +from intpair import IntPair + +def main() -> None: + l0 = list(1, 2) + + my_pair = IntPair(a=10, b=20) + print(f"Original pair: {my_pair}") +""" + p2 = tmpdir.join("main.py") + p2.write(doc_content) + doc_path = str(p2) + doc_uri = uris.from_fs_path(doc_path) + + doc = Document(doc_uri, workspace, doc_content) + + # The range where IntPair is defined in intpair.py + def_range = { + "start": {"line": 3, "character": 6}, + "end": {"line": 3, "character": 13}, + } + + # The position where IntPair is called in main.py + cursor_pos = {"line": 5, "character": 14} + + assert [{"uri": module_uri, "range": def_range}] == pylsp_type_definition( + config, doc, cursor_pos + ) diff --git a/test/plugins/test_yapf_format.py b/test/plugins/test_yapf_format.py index cf4d9655..f69541a4 100644 --- a/test/plugins/test_yapf_format.py +++ b/test/plugins/test_yapf_format.py @@ -5,6 +5,7 @@ from pylsp import uris from pylsp.plugins.yapf_format import pylsp_format_document, pylsp_format_range +from pylsp.text_edit import apply_text_edits from pylsp.workspace import Document DOC_URI = uris.from_fs_path(__file__) @@ -26,72 +27,89 @@ """ -def test_format(workspace): +def test_format(workspace) -> None: doc = Document(DOC_URI, workspace, DOC) - res = pylsp_format_document(doc) + res = pylsp_format_document(workspace, doc, None) - assert len(res) == 1 - assert res[0]['newText'] == "A = ['h', 'w', 'a']\n\nB = ['h', 'w']\n" + assert apply_text_edits(doc, res) == "A = ['h', 'w', 'a']\n\nB = ['h', 'w']\n" -def test_range_format(workspace): +def test_range_format(workspace) -> None: doc = Document(DOC_URI, workspace, DOC) def_range = { - 'start': {'line': 0, 'character': 0}, - 'end': {'line': 4, 'character': 10} + "start": {"line": 0, "character": 0}, + "end": {"line": 4, "character": 10}, } - res = pylsp_format_range(doc, def_range) - - assert len(res) == 1 + res = pylsp_format_range(doc, def_range, None) # Make sure B is still badly formatted - assert res[0]['newText'] == "A = ['h', 'w', 'a']\n\nB = ['h',\n\n\n'w']\n" + assert apply_text_edits(doc, res) == "A = ['h', 'w', 'a']\n\nB = ['h',\n\n\n'w']\n" -def test_no_change(workspace): +def test_no_change(workspace) -> None: doc = Document(DOC_URI, workspace, GOOD_DOC) - assert not pylsp_format_document(doc) + assert not pylsp_format_document(workspace, doc, options=None) -def test_config_file(tmpdir, workspace): +def test_config_file(tmpdir, workspace) -> None: # a config file in the same directory as the source file will be used - conf = tmpdir.join('.style.yapf') - conf.write('[style]\ncolumn_limit = 14') - src = tmpdir.join('test.py') + conf = tmpdir.join(".style.yapf") + conf.write("[style]\ncolumn_limit = 14") + src = tmpdir.join("test.py") doc = Document(uris.from_fs_path(src.strpath), workspace, DOC) + res = pylsp_format_document(workspace, doc, options=None) + # A was split on multiple lines because of column_limit from config file - assert pylsp_format_document(doc)[0]['newText'] == "A = [\n 'h', 'w',\n 'a'\n]\n\nB = ['h', 'w']\n" + assert ( + apply_text_edits(doc, res) + == "A = [\n 'h', 'w',\n 'a'\n]\n\nB = ['h', 'w']\n" + ) -@pytest.mark.parametrize('newline', ['\r\n', '\r']) -def test_line_endings(workspace, newline): - doc = Document(DOC_URI, workspace, f'import os;import sys{2 * newline}dict(a=1)') - res = pylsp_format_document(doc) +@pytest.mark.parametrize("newline", ["\r\n"]) +def test_line_endings(workspace, newline) -> None: + doc = Document(DOC_URI, workspace, f"import os;import sys{2 * newline}dict(a=1)") + res = pylsp_format_document(workspace, doc, options=None) - assert res[0]['newText'] == f'import os{newline}import sys{2 * newline}dict(a=1){newline}' + assert ( + apply_text_edits(doc, res) + == f"import os{newline}import sys{2 * newline}dict(a=1){newline}" + ) -def test_format_with_tab_size_option(workspace): +def test_format_with_tab_size_option(workspace) -> None: doc = Document(DOC_URI, workspace, FOUR_SPACE_DOC) - res = pylsp_format_document(doc, {"tabSize": "8"}) + res = pylsp_format_document(workspace, doc, {"tabSize": "8"}) - assert len(res) == 1 - assert res[0]['newText'] == FOUR_SPACE_DOC.replace(" ", " ") + assert apply_text_edits(doc, res) == FOUR_SPACE_DOC.replace(" ", " ") -def test_format_with_insert_spaces_option(workspace): +def test_format_with_insert_spaces_option(workspace) -> None: doc = Document(DOC_URI, workspace, FOUR_SPACE_DOC) - res = pylsp_format_document(doc, {"insertSpaces": False}) + res = pylsp_format_document(workspace, doc, {"insertSpaces": False}) - assert len(res) == 1 - assert res[0]['newText'] == FOUR_SPACE_DOC.replace(" ", "\t") + assert apply_text_edits(doc, res) == FOUR_SPACE_DOC.replace(" ", "\t") -def test_format_with_yapf_specific_option(workspace): +def test_format_with_yapf_specific_option(workspace) -> None: doc = Document(DOC_URI, workspace, FOUR_SPACE_DOC) - res = pylsp_format_document(doc, {"USE_TABS": True}) + res = pylsp_format_document(workspace, doc, {"USE_TABS": True}) + + assert apply_text_edits(doc, res) == FOUR_SPACE_DOC.replace(" ", "\t") + + +def test_format_returns_text_edit_per_line(workspace) -> None: + single_space_indent = """def wow(): + log("x") + log("hi")""" + doc = Document(DOC_URI, workspace, single_space_indent) + res = pylsp_format_document(workspace, doc, options=None) - assert len(res) == 1 - assert res[0]['newText'] == FOUR_SPACE_DOC.replace(" ", "\t") + # two removes and two adds + assert len(res) == 4 + assert res[0]["newText"] == "" + assert res[1]["newText"] == "" + assert res[2]["newText"] == ' log("x")\n' + assert res[3]["newText"] == ' log("hi")\n' diff --git a/test/test_configuration.py b/test/test_configuration.py new file mode 100644 index 00000000..e6b40121 --- /dev/null +++ b/test/test_configuration.py @@ -0,0 +1,53 @@ +# Copyright 2021- Python Language Server Contributors. + +from unittest.mock import patch + +import pytest + +from pylsp import IS_WIN +from test.test_notebook_document import wait_for_condition +from test.test_utils import send_initialize_request + +INITIALIZATION_OPTIONS = { + "pylsp": { + "plugins": { + "flake8": {"enabled": True}, + "pycodestyle": {"enabled": False}, + "pyflakes": {"enabled": False}, + }, + } +} + + +@pytest.mark.skipif(IS_WIN, reason="Flaky on Windows") +def test_set_flake8_using_init_opts(client_server_pair) -> None: + client, server = client_server_pair + send_initialize_request(client, INITIALIZATION_OPTIONS) + for key, value in INITIALIZATION_OPTIONS["pylsp"]["plugins"].items(): + assert server.workspace._config.settings().get("plugins").get(key).get( + "enabled" + ) == value.get("enabled") + + +@pytest.mark.skipif(IS_WIN, reason="Flaky on Windows") +def test_set_flake8_using_workspace_did_change_configuration( + client_server_pair, +) -> None: + client, server = client_server_pair + send_initialize_request(client, None) + assert ( + server.workspace._config.settings().get("plugins").get("flake8").get("enabled") + is False + ) + + with patch.object(server.workspace, "update_config") as mock_update_config: + client._endpoint.notify( + "workspace/didChangeConfiguration", + {"settings": INITIALIZATION_OPTIONS}, + ) + wait_for_condition(lambda: mock_update_config.call_count >= 1) + + for key, value in INITIALIZATION_OPTIONS["pylsp"]["plugins"].items(): + assert server.workspace._config.settings().get("plugins").get(key).get( + "enabled" + ) == value.get("enabled") diff --git a/test/test_document.py b/test/test_document.py index 3dcabb68..f31d446e 100644 --- a/test/test_document.py +++ b/test/test_document.py @@ -1,99 +1,103 @@ # Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. -from test.fixtures import DOC_URI, DOC from pylsp.workspace import Document +from test.fixtures import DOC, DOC_URI -def test_document_props(doc): +def test_document_props(doc) -> None: assert doc.uri == DOC_URI assert doc.source == DOC -def test_document_lines(doc): +def test_document_lines(doc) -> None: assert len(doc.lines) == 4 - assert doc.lines[0] == 'import sys\n' + assert doc.lines[0] == "import sys\n" -def test_document_source_unicode(workspace): - document_mem = Document(DOC_URI, workspace, 'my source') +def test_document_source_unicode(workspace) -> None: + document_mem = Document(DOC_URI, workspace, "my source") document_disk = Document(DOC_URI, workspace) assert isinstance(document_mem.source, type(document_disk.source)) -def test_offset_at_position(doc): - assert doc.offset_at_position({'line': 0, 'character': 8}) == 8 - assert doc.offset_at_position({'line': 1, 'character': 5}) == 16 - assert doc.offset_at_position({'line': 2, 'character': 0}) == 12 - assert doc.offset_at_position({'line': 2, 'character': 4}) == 16 - assert doc.offset_at_position({'line': 4, 'character': 0}) == 51 +def test_offset_at_position(doc) -> None: + assert doc.offset_at_position({"line": 0, "character": 8}) == 8 + assert doc.offset_at_position({"line": 1, "character": 5}) == 16 + assert doc.offset_at_position({"line": 2, "character": 0}) == 12 + assert doc.offset_at_position({"line": 2, "character": 4}) == 16 + assert doc.offset_at_position({"line": 4, "character": 0}) == 51 -def test_word_at_position(doc): - """ Return the position under the cursor (or last in line if past the end) """ +def test_word_at_position(doc) -> None: + """Return the position under the cursor (or last in line if past the end)""" # import sys - assert doc.word_at_position({'line': 0, 'character': 8}) == 'sys' + assert doc.word_at_position({"line": 0, "character": 8}) == "sys" # Past end of import sys - assert doc.word_at_position({'line': 0, 'character': 1000}) == 'sys' + assert doc.word_at_position({"line": 0, "character": 1000}) == "sys" # Empty line - assert doc.word_at_position({'line': 1, 'character': 5}) == '' + assert doc.word_at_position({"line": 1, "character": 5}) == "" # def main(): - assert doc.word_at_position({'line': 2, 'character': 0}) == 'def' + assert doc.word_at_position({"line": 2, "character": 0}) == "def" # Past end of file - assert doc.word_at_position({'line': 4, 'character': 0}) == '' - - -def test_document_empty_edit(workspace): - doc = Document('file:///uri', workspace, '') - doc.apply_change({ - 'range': { - 'start': {'line': 0, 'character': 0}, - 'end': {'line': 0, 'character': 0} - }, - 'text': 'f' - }) - assert doc.source == 'f' - - -def test_document_line_edit(workspace): - doc = Document('file:///uri', workspace, 'itshelloworld') - doc.apply_change({ - 'text': 'goodbye', - 'range': { - 'start': {'line': 0, 'character': 3}, - 'end': {'line': 0, 'character': 8} + assert doc.word_at_position({"line": 4, "character": 0}) == "" + + +def test_document_empty_edit(workspace) -> None: + doc = Document("file:///uri", workspace, "") + doc.apply_change( + { + "range": { + "start": {"line": 0, "character": 0}, + "end": {"line": 0, "character": 0}, + }, + "text": "f", } - }) - assert doc.source == 'itsgoodbyeworld' - - -def test_document_multiline_edit(workspace): - old = [ - "def hello(a, b):\n", - " print a\n", - " print b\n" - ] - doc = Document('file:///uri', workspace, ''.join(old)) - doc.apply_change({'text': 'print a, b', 'range': { - 'start': {'line': 1, 'character': 4}, - 'end': {'line': 2, 'character': 11} - }}) - assert doc.lines == [ - "def hello(a, b):\n", - " print a, b\n" - ] - - -def test_document_end_of_file_edit(workspace): - old = [ - "print 'a'\n", - "print 'b'\n" - ] - doc = Document('file:///uri', workspace, ''.join(old)) - doc.apply_change({'text': 'o', 'range': { - 'start': {'line': 2, 'character': 0}, - 'end': {'line': 2, 'character': 0} - }}) + ) + assert doc.source == "f" + + +def test_document_line_edit(workspace) -> None: + doc = Document("file:///uri", workspace, "itshelloworld") + doc.apply_change( + { + "text": "goodbye", + "range": { + "start": {"line": 0, "character": 3}, + "end": {"line": 0, "character": 8}, + }, + } + ) + assert doc.source == "itsgoodbyeworld" + + +def test_document_multiline_edit(workspace) -> None: + old = ["def hello(a, b):\n", " print a\n", " print b\n"] + doc = Document("file:///uri", workspace, "".join(old)) + doc.apply_change( + { + "text": "print a, b", + "range": { + "start": {"line": 1, "character": 4}, + "end": {"line": 2, "character": 11}, + }, + } + ) + assert doc.lines == ["def hello(a, b):\n", " print a, b\n"] + + +def test_document_end_of_file_edit(workspace) -> None: + old = ["print 'a'\n", "print 'b'\n"] + doc = Document("file:///uri", workspace, "".join(old)) + doc.apply_change( + { + "text": "o", + "range": { + "start": {"line": 2, "character": 0}, + "end": {"line": 2, "character": 0}, + }, + } + ) assert doc.lines == [ "print 'a'\n", "print 'b'\n", diff --git a/test/test_language_server.py b/test/test_language_server.py index 92d1ea84..9b362110 100644 --- a/test/test_language_server.py +++ b/test/test_language_server.py @@ -2,119 +2,85 @@ # Copyright 2021- Python Language Server Contributors. import os -import time -import multiprocessing import sys -from threading import Thread +import time +import pytest from flaky import flaky from pylsp_jsonrpc.exceptions import JsonRpcMethodNotFound -import pytest - -from pylsp.python_lsp import start_io_lang_server, PythonLSPServer - -CALL_TIMEOUT = 10 -RUNNING_IN_CI = bool(os.environ.get('CI')) - - -def start_client(client): - client.start() +from test.test_utils import ClientServerPair, send_initialize_request -class _ClientServer: - """ A class to setup a client/server pair """ - def __init__(self, check_parent_process=False): - # Client to Server pipe - csr, csw = os.pipe() - # Server to client pipe - scr, scw = os.pipe() - - if os.name == 'nt': - ParallelKind = Thread - else: - if sys.version_info[:2] >= (3, 8): - ParallelKind = multiprocessing.get_context("fork").Process - else: - ParallelKind = multiprocessing.Process - - self.process = ParallelKind(target=start_io_lang_server, args=( - os.fdopen(csr, 'rb'), os.fdopen(scw, 'wb'), check_parent_process, PythonLSPServer - )) - self.process.start() - - self.client = PythonLSPServer(os.fdopen(scr, 'rb'), os.fdopen(csw, 'wb'), start_io_lang_server) - self.client_thread = Thread(target=start_client, args=[self.client]) - self.client_thread.daemon = True - self.client_thread.start() - - -@pytest.fixture -def client_server(): - """ A fixture that sets up a client/server pair and shuts down the server - This client/server pair does not support checking parent process aliveness - """ - client_server_pair = _ClientServer() - - yield client_server_pair.client +RUNNING_IN_CI = bool(os.environ.get("CI")) - shutdown_response = client_server_pair.client._endpoint.request('shutdown').result(timeout=CALL_TIMEOUT) - assert shutdown_response is None - client_server_pair.client._endpoint.notify('exit') +CALL_TIMEOUT_IN_SECONDS = 10 @pytest.fixture -def client_exited_server(): - """ A fixture that sets up a client/server pair that support checking parent process aliveness +def client_exited_server() -> None: + """A fixture that sets up a client/server pair that support checking parent process aliveness and assert the server has already exited """ - client_server_pair = _ClientServer(True) + client_server_pair_obj = ClientServerPair(True, True) - # yield client_server_pair.client - yield client_server_pair + yield client_server_pair_obj - assert client_server_pair.process.is_alive() is False + assert client_server_pair_obj.server_process.is_alive() is False @flaky(max_runs=10, min_passes=1) -@pytest.mark.skipif(sys.platform == 'darwin', reason='Too flaky on Mac') -def test_initialize(client_server): # pylint: disable=redefined-outer-name - response = client_server._endpoint.request('initialize', { - 'rootPath': os.path.dirname(__file__), - 'initializationOptions': {} - }).result(timeout=CALL_TIMEOUT) - assert 'capabilities' in response +@pytest.mark.skipif(sys.platform == "darwin", reason="Too flaky on Mac") +def test_initialize(client_server_pair) -> None: + client, _ = client_server_pair + response = client._endpoint.request( + "initialize", + {"rootPath": os.path.dirname(__file__), "initializationOptions": {}}, + ).result(timeout=CALL_TIMEOUT_IN_SECONDS) + assert "capabilities" in response @flaky(max_runs=10, min_passes=1) -@pytest.mark.skipif(not sys.platform.startswith('Linux'), reason='Skipped on win and flaky on mac') -def test_exit_with_parent_process_died(client_exited_server): # pylint: disable=redefined-outer-name +@pytest.mark.skipif( + not sys.platform.startswith("Linux"), reason="Skipped on win and flaky on mac" +) +def test_exit_with_parent_process_died( + client_exited_server, +) -> None: # language server should have already exited before responding - lsp_server, mock_process = client_exited_server.client, client_exited_server.process + lsp_server, mock_process = ( + client_exited_server.client, + client_exited_server.server_process, + ) # with pytest.raises(Exception): - lsp_server._endpoint.request('initialize', { - 'processId': mock_process.pid, - 'rootPath': os.path.dirname(__file__), - 'initializationOptions': {} - }).result(timeout=CALL_TIMEOUT) + lsp_server._endpoint.request( + "initialize", + { + "processId": mock_process.pid, + "rootPath": os.path.dirname(__file__), + "initializationOptions": {}, + }, + ).result(timeout=CALL_TIMEOUT_IN_SECONDS) mock_process.terminate() - time.sleep(CALL_TIMEOUT) + time.sleep(CALL_TIMEOUT_IN_SECONDS) assert not client_exited_server.client_thread.is_alive() @flaky(max_runs=10, min_passes=1) -@pytest.mark.skipif(sys.platform.startswith('linux'), reason='Fails on linux') -def test_not_exit_without_check_parent_process_flag(client_server): # pylint: disable=redefined-outer-name - response = client_server._endpoint.request('initialize', { - 'processId': 1234, - 'rootPath': os.path.dirname(__file__), - 'initializationOptions': {} - }).result(timeout=CALL_TIMEOUT) - assert 'capabilities' in response +@pytest.mark.skipif(sys.platform.startswith("linux"), reason="Fails on linux") +def test_not_exit_without_check_parent_process_flag( + client_server_pair, +) -> None: + client, _ = client_server_pair + response = send_initialize_request(client) + assert "capabilities" in response @flaky(max_runs=10, min_passes=1) -@pytest.mark.skipif(RUNNING_IN_CI, reason='This test is hanging on CI') -def test_missing_message(client_server): # pylint: disable=redefined-outer-name +@pytest.mark.skipif(RUNNING_IN_CI, reason="This test is hanging on CI") +def test_missing_message(client_server_pair) -> None: + client, _ = client_server_pair with pytest.raises(JsonRpcMethodNotFound): - client_server._endpoint.request('unknown_method').result(timeout=CALL_TIMEOUT) + client._endpoint.request("unknown_method").result( + timeout=CALL_TIMEOUT_IN_SECONDS + ) diff --git a/test/test_notebook_document.py b/test/test_notebook_document.py new file mode 100644 index 00000000..215258e1 --- /dev/null +++ b/test/test_notebook_document.py @@ -0,0 +1,600 @@ +# Copyright 2021- Python Language Server Contributors. + +import time +from unittest.mock import call, patch + +import pytest + +from pylsp import IS_WIN +from pylsp.lsp import NotebookCellKind +from pylsp.workspace import Notebook +from test.test_utils import ( + CALL_TIMEOUT_IN_SECONDS, + send_initialize_request, + send_notebook_did_open, +) + + +def wait_for_condition(condition, timeout=CALL_TIMEOUT_IN_SECONDS) -> None: + """Wait for a condition to be true, or timeout.""" + start_time = time.time() + while not condition(): + time.sleep(0.1) + if time.time() - start_time > timeout: + raise TimeoutError("Timeout waiting for condition") + + +@pytest.mark.skipif(IS_WIN, reason="Flaky on Windows") +def test_initialize(client_server_pair) -> None: + client, server = client_server_pair + response = send_initialize_request(client) + assert server.workspace is not None + selector = response["capabilities"]["notebookDocumentSync"]["notebookSelector"] + assert isinstance(selector, list) + + +@pytest.mark.skipif(IS_WIN, reason="Flaky on Windows") +def test_workspace_did_change_configuration(client_server_pair) -> None: + """Test that we can update a workspace config w/o error when a notebook is open.""" + client, server = client_server_pair + send_initialize_request(client) + assert server.workspace is not None + + with patch.object(server._endpoint, "notify") as mock_notify: + client._endpoint.notify( + "notebookDocument/didOpen", + { + "notebookDocument": { + "uri": "notebook_uri", + "notebookType": "jupyter-notebook", + "cells": [ + { + "kind": NotebookCellKind.Code, + "document": "cell_1_uri", + }, + ], + }, + "cellTextDocuments": [ + { + "uri": "cell_1_uri", + "languageId": "python", + "text": "", + }, + ], + }, + ) + wait_for_condition(lambda: mock_notify.call_count >= 1) + assert isinstance(server.workspace.get_document("notebook_uri"), Notebook) + assert len(server.workspace.documents) == 2 + + server.workspace.update_config( + {"pylsp": {"plugins": {"flake8": {"enabled": True}}}} + ) + + assert server.config.plugin_settings("flake8").get("enabled") is True + assert ( + server.workspace.get_document("cell_1_uri") + ._config.plugin_settings("flake8") + .get("enabled") + is True + ) + + +@pytest.mark.skipif(IS_WIN, reason="Flaky on Windows") +def test_notebook_document__did_open( + client_server_pair, +) -> None: + client, server = client_server_pair + send_initialize_request(client) + + with patch.object(server._endpoint, "notify") as mock_notify: + # Test as many edge cases as possible for the diagnostics messages + send_notebook_did_open( + client, ["", "\n", "\nimport sys\n\nabc\n\n", "x", "y\n"] + ) + wait_for_condition(lambda: mock_notify.call_count >= 5) + expected_call_args = [ + call( + "textDocument/publishDiagnostics", + params={ + "uri": "cell_1_uri", + "diagnostics": [], + }, + ), + call( + "textDocument/publishDiagnostics", + params={ + "uri": "cell_2_uri", + "diagnostics": [], + }, + ), + call( + "textDocument/publishDiagnostics", + params={ + "uri": "cell_3_uri", + "diagnostics": [ + { + "source": "pyflakes", + "range": { + "start": {"line": 1, "character": 0}, + "end": {"line": 1, "character": 11}, + }, + "message": "'sys' imported but unused", + "severity": 2, + }, + { + "source": "pyflakes", + "range": { + "start": {"line": 3, "character": 0}, + "end": {"line": 3, "character": 4}, + }, + "message": "undefined name 'abc'", + "severity": 1, + }, + { + "source": "pycodestyle", + "range": { + "start": {"line": 1, "character": 0}, + "end": {"line": 1, "character": 11}, + }, + "message": "E303 too many blank lines (4)", + "code": "E303", + "severity": 2, + }, + ], + }, + ), + call( + "textDocument/publishDiagnostics", + params={ + "uri": "cell_4_uri", + "diagnostics": [ + { + "source": "pyflakes", + "range": { + "start": {"line": 0, "character": 0}, + "end": {"line": 0, "character": 2}, + }, + "message": "undefined name 'x'", + "severity": 1, + }, + ], + }, + ), + call( + "textDocument/publishDiagnostics", + params={ + "uri": "cell_5_uri", + "diagnostics": [ + { + "source": "pyflakes", + "range": { + "start": {"line": 0, "character": 0}, + "end": {"line": 0, "character": 2}, + }, + "message": "undefined name 'y'", + "severity": 1, + }, + ], + }, + ), + ] + mock_notify.assert_has_calls(expected_call_args) + + +@pytest.mark.skipif(IS_WIN, reason="Flaky on Windows") +def test_notebook_document__did_change( + client_server_pair, +) -> None: + client, server = client_server_pair + send_initialize_request(client) + + # Open notebook + with patch.object(server._endpoint, "notify") as mock_notify: + send_notebook_did_open(client, ["import sys", ""]) + wait_for_condition(lambda: mock_notify.call_count >= 2) + assert len(server.workspace.documents) == 3 + for uri in ["cell_1_uri", "cell_2_uri", "notebook_uri"]: + assert uri in server.workspace.documents + assert len(server.workspace.get_document("notebook_uri").cells) == 2 + expected_call_args = [ + call( + "textDocument/publishDiagnostics", + params={ + "uri": "cell_1_uri", + "diagnostics": [ + { + "source": "pyflakes", + "range": { + "start": {"line": 0, "character": 0}, + "end": {"line": 0, "character": 11}, + }, + "message": "'sys' imported but unused", + "severity": 2, + } + ], + }, + ), + call( + "textDocument/publishDiagnostics", + params={"uri": "cell_2_uri", "diagnostics": []}, + ), + ] + mock_notify.assert_has_calls(expected_call_args) + + # Remove second cell + with patch.object(server._endpoint, "notify") as mock_notify: + client._endpoint.notify( + "notebookDocument/didChange", + { + "notebookDocument": { + "uri": "notebook_uri", + }, + "change": { + "cells": { + "structure": { + "array": { + "start": 1, + "deleteCount": 1, + }, + "didClose": [ + { + "uri": "cell_2_uri", + } + ], + }, + } + }, + }, + ) + wait_for_condition(lambda: mock_notify.call_count >= 2) + assert len(server.workspace.documents) == 2 + assert "cell_2_uri" not in server.workspace.documents + assert len(server.workspace.get_document("notebook_uri").cells) == 1 + expected_call_args = [ + call( + "textDocument/publishDiagnostics", + params={ + "uri": "cell_1_uri", + "diagnostics": [ + { + "source": "pyflakes", + "range": { + "start": {"line": 0, "character": 0}, + "end": {"line": 0, "character": 10}, + }, + "message": "'sys' imported but unused", + "severity": 2, + }, + { + "source": "pycodestyle", + "range": { + "start": {"line": 0, "character": 10}, + "end": {"line": 0, "character": 10}, + }, + "message": "W292 no newline at end of file", + "code": "W292", + "severity": 2, + }, + ], + }, + ) + ] + mock_notify.assert_has_calls(expected_call_args) + + # Add second cell + with patch.object(server._endpoint, "notify") as mock_notify: + client._endpoint.notify( + "notebookDocument/didChange", + { + "notebookDocument": { + "uri": "notebook_uri", + }, + "change": { + "cells": { + "structure": { + "array": { + "start": 1, + "deleteCount": 0, + "cells": [ + { + "kind": NotebookCellKind.Code, + "document": "cell_3_uri", + } + ], + }, + "didOpen": [ + { + "uri": "cell_3_uri", + "languageId": "python", + "text": "x", + } + ], + }, + } + }, + }, + ) + wait_for_condition(lambda: mock_notify.call_count >= 2) + assert len(server.workspace.documents) == 3 + assert "cell_3_uri" in server.workspace.documents + assert len(server.workspace.get_document("notebook_uri").cells) == 2 + expected_call_args = [ + call( + "textDocument/publishDiagnostics", + params={ + "uri": "cell_1_uri", + "diagnostics": [ + { + "source": "pyflakes", + "range": { + "start": {"line": 0, "character": 0}, + "end": {"line": 0, "character": 11}, + }, + "message": "'sys' imported but unused", + "severity": 2, + } + ], + }, + ), + call( + "textDocument/publishDiagnostics", + params={ + "uri": "cell_3_uri", + "diagnostics": [ + { + "source": "pyflakes", + "range": { + "start": {"line": 0, "character": 0}, + "end": {"line": 0, "character": 1}, + }, + "message": "undefined name 'x'", + "severity": 1, + }, + { + "source": "pycodestyle", + "range": { + "start": {"line": 0, "character": 1}, + "end": {"line": 0, "character": 1}, + }, + "message": "W292 no newline at end of file", + "code": "W292", + "severity": 2, + }, + ], + }, + ), + ] + mock_notify.assert_has_calls(expected_call_args) + + # Edit second cell + with patch.object(server._endpoint, "notify") as mock_notify: + client._endpoint.notify( + "notebookDocument/didChange", + { + "notebookDocument": { + "uri": "notebook_uri", + }, + "change": { + "cells": { + "textContent": [ + { + "document": { + "uri": "cell_3_uri", + }, + "changes": [{"text": "sys.path"}], + } + ] + } + }, + }, + ) + wait_for_condition(lambda: mock_notify.call_count >= 2) + expected_call_args = [ + call( + "textDocument/publishDiagnostics", + params={"uri": "cell_1_uri", "diagnostics": []}, + ), + call( + "textDocument/publishDiagnostics", + params={ + "uri": "cell_3_uri", + "diagnostics": [ + { + "source": "pycodestyle", + "range": { + "start": {"line": 0, "character": 8}, + "end": {"line": 0, "character": 8}, + }, + "message": "W292 no newline at end of file", + "code": "W292", + "severity": 2, + } + ], + }, + ), + ] + mock_notify.assert_has_calls(expected_call_args) + + +@pytest.mark.skipif(IS_WIN, reason="Flaky on Windows") +def test_notebook__did_close( + client_server_pair, +) -> None: + client, server = client_server_pair + send_initialize_request(client) + + # Open notebook + with patch.object(server._endpoint, "notify") as mock_notify: + send_notebook_did_open(client, ["import sys", ""]) + wait_for_condition(lambda: mock_notify.call_count >= 2) + assert len(server.workspace.documents) == 3 + for uri in ["cell_1_uri", "cell_2_uri", "notebook_uri"]: + assert uri in server.workspace.documents + + # Close notebook + with patch.object(server._endpoint, "notify") as mock_notify: + client._endpoint.notify( + "notebookDocument/didClose", + { + "notebookDocument": { + "uri": "notebook_uri", + }, + "cellTextDocuments": [ + { + "uri": "cell_1_uri", + }, + { + "uri": "cell_2_uri", + }, + ], + }, + ) + wait_for_condition(lambda: mock_notify.call_count >= 2) + assert len(server.workspace.documents) == 0 + + +@pytest.mark.skipif(IS_WIN, reason="Flaky on Windows") +def test_notebook_definition(client_server_pair) -> None: + client, server = client_server_pair + send_initialize_request(client) + + # Open notebook + with patch.object(server._endpoint, "notify") as mock_notify: + send_notebook_did_open(client, ["y=2\nx=1", "x"]) + # wait for expected diagnostics messages + wait_for_condition(lambda: mock_notify.call_count >= 2) + assert len(server.workspace.documents) == 3 + for uri in ["cell_1_uri", "cell_2_uri", "notebook_uri"]: + assert uri in server.workspace.documents + + future = client._endpoint.request( + "textDocument/definition", + { + "textDocument": { + "uri": "cell_2_uri", + }, + "position": {"line": 0, "character": 1}, + }, + ) + result = future.result(CALL_TIMEOUT_IN_SECONDS) + assert result == [ + { + "uri": "cell_1_uri", + "range": { + "start": {"line": 1, "character": 0}, + "end": {"line": 1, "character": 1}, + }, + } + ] + + +@pytest.mark.skipif(IS_WIN, reason="Flaky on Windows") +def test_notebook_completion(client_server_pair) -> None: + """ + Tests that completions work across cell boundaries for notebook document support + """ + client, server = client_server_pair + send_initialize_request(client) + + # Open notebook + with patch.object(server._endpoint, "notify") as mock_notify: + send_notebook_did_open( + client, ["answer_to_life_universe_everything = 42", "answer_"] + ) + # wait for expected diagnostics messages + wait_for_condition(lambda: mock_notify.call_count >= 2) + assert len(server.workspace.documents) == 3 + for uri in ["cell_1_uri", "cell_2_uri", "notebook_uri"]: + assert uri in server.workspace.documents + + future = client._endpoint.request( + "textDocument/completion", + { + "textDocument": { + "uri": "cell_2_uri", + }, + "position": {"line": 0, "character": 7}, + }, + ) + result = future.result(CALL_TIMEOUT_IN_SECONDS) + assert result == { + "isIncomplete": False, + "items": [ + { + "data": {"doc_uri": "cell_2_uri"}, + "insertText": "answer_to_life_universe_everything", + "kind": 6, + "label": "answer_to_life_universe_everything", + "sortText": "aanswer_to_life_universe_everything", + }, + ], + } + + +@pytest.mark.skipif(IS_WIN, reason="Flaky on Windows") +def test_notebook_completion_resolve(client_server_pair) -> None: + """ + Tests that completion item resolve works correctly + """ + client, server = client_server_pair + send_initialize_request(client) + + # Open notebook + with patch.object(server._endpoint, "notify") as mock_notify: + send_notebook_did_open( + client, + [ + "def answer():\n\t'''Returns an important number.'''\n\treturn 42", + "ans", + ], + ) + # wait for expected diagnostics messages + wait_for_condition(lambda: mock_notify.call_count >= 2) + assert len(server.workspace.documents) == 3 + for uri in ["cell_1_uri", "cell_2_uri", "notebook_uri"]: + assert uri in server.workspace.documents + + future = client._endpoint.request( + "textDocument/completion", + { + "textDocument": { + "uri": "cell_2_uri", + }, + "position": {"line": 0, "character": 3}, + }, + ) + result = future.result(CALL_TIMEOUT_IN_SECONDS) + assert result == { + "isIncomplete": False, + "items": [ + { + "data": {"doc_uri": "cell_2_uri"}, + "insertText": "answer", + "kind": 3, + "label": "answer()", + "sortText": "aanswer", + }, + ], + } + + future = client._endpoint.request( + "completionItem/resolve", + { + "data": {"doc_uri": "cell_2_uri"}, + "label": "answer()", + }, + ) + result = future.result(CALL_TIMEOUT_IN_SECONDS) + del result["detail"] # The value of this is unpredictable. + assert result == { + "data": {"doc_uri": "cell_2_uri"}, + "insertText": "answer", + "kind": 3, + "label": "answer()", + "sortText": "aanswer", + "documentation": { + "kind": "markdown", + "value": "```python\nanswer()\n```\n\n\nReturns an important number.", + }, + } diff --git a/test/test_python_lsp.py b/test/test_python_lsp.py new file mode 100644 index 00000000..b7b9daec --- /dev/null +++ b/test/test_python_lsp.py @@ -0,0 +1,161 @@ +import asyncio +import json +import os +import socket +import subprocess +import sys +import threading +import time + +import pytest +import websockets + +NUM_CLIENTS = 2 +NUM_REQUESTS = 5 +TEST_PORT = 5102 +HOST = "127.0.0.1" +MAX_STARTUP_SECONDS = 5.0 +CHECK_INTERVAL = 0.1 + + +@pytest.fixture(scope="module", autouse=True) +def ws_server_subprocess(): + cmd = [ + sys.executable, + "-m", + "pylsp.__main__", + "--ws", + "--host", + HOST, + "--port", + str(TEST_PORT), + ] + + proc = subprocess.Popen( + cmd, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + env=os.environ.copy(), + ) + + deadline = time.time() + MAX_STARTUP_SECONDS + while True: + try: + with socket.create_connection( + ("127.0.0.1", TEST_PORT), timeout=CHECK_INTERVAL + ): + break + except (ConnectionRefusedError, OSError): + if time.time() > deadline: + proc.kill() + out, err = proc.communicate(timeout=1) + raise RuntimeError( + f"Server didn’t start listening on port {TEST_PORT} in time.\n" + f"STDOUT:\n{out.decode()}\nSTDERR:\n{err.decode()}" + ) + time.sleep(CHECK_INTERVAL) + + yield # run the tests + + proc.terminate() + try: + proc.wait(timeout=2) + except subprocess.TimeoutExpired: + proc.kill() + + +TEST_DOC = """\ +def test(): + '''Test documentation''' +test() +""" + + +def test_concurrent_ws_requests(): + errors = set() + lock = threading.Lock() + + def thread_target(i: int): + async def do_initialize(idx): + uri = f"ws://{HOST}:{TEST_PORT}" + async with websockets.connect(uri) as ws: + # send initialize + init_request = { + "jsonrpc": "2.0", + "id": 4 * idx, + "method": "initialize", + "params": {}, + } + did_open_request = { + "jsonrpc": "2.0", + "id": 4 * (idx + 1), + "method": "textDocument/didOpen", + "params": { + "textDocument": { + "uri": "test.py", + "languageId": "python", + "version": 0, + "text": TEST_DOC, + } + }, + } + + async def send_request(request: dict): + await asyncio.wait_for( + ws.send(json.dumps(request, ensure_ascii=False)), timeout=5 + ) + + async def get_json_reply(): + raw = await asyncio.wait_for(ws.recv(), timeout=60) + obj = json.loads(raw) + return obj + + try: + await send_request(init_request) + await get_json_reply() + await send_request(did_open_request) + await get_json_reply() + requests = [] + for i in range(NUM_REQUESTS): + hover_request = { + "jsonrpc": "2.0", + "id": 4 * (idx + 2 + i), + "method": "textDocument/definition", + "params": { + "textDocument": { + "uri": "test.py", + }, + "position": { + "line": 3, + "character": 2, + }, + }, + } + requests.append(send_request(hover_request)) + # send many requests in parallel + await asyncio.gather(*requests) + # collect replies + for i in range(NUM_REQUESTS): + hover = await get_json_reply() + assert hover + except (json.JSONDecodeError, asyncio.TimeoutError) as e: + return e + return None + + error = asyncio.run(do_initialize(i)) + with lock: + errors.add(error) + + # launch threads + threads = [] + for i in range(1, NUM_CLIENTS + 1): + t = threading.Thread(target=thread_target, args=(i,)) + t.start() + threads.append(t) + + # wait for them all + for t in threads: + t.join(timeout=50) + assert not t.is_alive(), f"Worker thread {t} hung!" + + assert not any(filter(bool, errors)) diff --git a/test/test_text_edit.py b/test/test_text_edit.py new file mode 100644 index 00000000..1d9115bf --- /dev/null +++ b/test/test_text_edit.py @@ -0,0 +1,303 @@ +# Copyright 2017-2020 Palantir Technologies, Inc. +# Copyright 2021- Python Language Server Contributors. + +from pylsp import uris +from pylsp.text_edit import OverLappingTextEditException, apply_text_edits + +DOC_URI = uris.from_fs_path(__file__) + + +def test_apply_text_edits_insert(pylsp) -> None: + pylsp.workspace.put_document(DOC_URI, "012345678901234567890123456789") + test_doc = pylsp.workspace.get_document(DOC_URI) + + assert ( + apply_text_edits( + test_doc, + [ + { + "range": { + "start": {"line": 0, "character": 0}, + "end": {"line": 0, "character": 0}, + }, + "newText": "Hello", + } + ], + ) + == "Hello012345678901234567890123456789" + ) + assert ( + apply_text_edits( + test_doc, + [ + { + "range": { + "start": {"line": 0, "character": 1}, + "end": {"line": 0, "character": 1}, + }, + "newText": "Hello", + } + ], + ) + == "0Hello12345678901234567890123456789" + ) + assert ( + apply_text_edits( + test_doc, + [ + { + "range": { + "start": {"line": 0, "character": 1}, + "end": {"line": 0, "character": 1}, + }, + "newText": "Hello", + }, + { + "range": { + "start": {"line": 0, "character": 1}, + "end": {"line": 0, "character": 1}, + }, + "newText": "World", + }, + ], + ) + == "0HelloWorld12345678901234567890123456789" + ) + assert ( + apply_text_edits( + test_doc, + [ + { + "range": { + "start": {"line": 0, "character": 2}, + "end": {"line": 0, "character": 2}, + }, + "newText": "One", + }, + { + "range": { + "start": {"line": 0, "character": 1}, + "end": {"line": 0, "character": 1}, + }, + "newText": "Hello", + }, + { + "range": { + "start": {"line": 0, "character": 1}, + "end": {"line": 0, "character": 1}, + }, + "newText": "World", + }, + { + "range": { + "start": {"line": 0, "character": 2}, + "end": {"line": 0, "character": 2}, + }, + "newText": "Two", + }, + { + "range": { + "start": {"line": 0, "character": 2}, + "end": {"line": 0, "character": 2}, + }, + "newText": "Three", + }, + ], + ) + == "0HelloWorld1OneTwoThree2345678901234567890123456789" + ) + + +def test_apply_text_edits_replace(pylsp) -> None: + pylsp.workspace.put_document(DOC_URI, "012345678901234567890123456789") + test_doc = pylsp.workspace.get_document(DOC_URI) + + assert ( + apply_text_edits( + test_doc, + [ + { + "range": { + "start": {"line": 0, "character": 3}, + "end": {"line": 0, "character": 6}, + }, + "newText": "Hello", + } + ], + ) + == "012Hello678901234567890123456789" + ) + assert ( + apply_text_edits( + test_doc, + [ + { + "range": { + "start": {"line": 0, "character": 3}, + "end": {"line": 0, "character": 6}, + }, + "newText": "Hello", + }, + { + "range": { + "start": {"line": 0, "character": 6}, + "end": {"line": 0, "character": 9}, + }, + "newText": "World", + }, + ], + ) + == "012HelloWorld901234567890123456789" + ) + assert ( + apply_text_edits( + test_doc, + [ + { + "range": { + "start": {"line": 0, "character": 3}, + "end": {"line": 0, "character": 6}, + }, + "newText": "Hello", + }, + { + "range": { + "start": {"line": 0, "character": 6}, + "end": {"line": 0, "character": 6}, + }, + "newText": "World", + }, + ], + ) + == "012HelloWorld678901234567890123456789" + ) + assert ( + apply_text_edits( + test_doc, + [ + { + "range": { + "start": {"line": 0, "character": 6}, + "end": {"line": 0, "character": 6}, + }, + "newText": "World", + }, + { + "range": { + "start": {"line": 0, "character": 3}, + "end": {"line": 0, "character": 6}, + }, + "newText": "Hello", + }, + ], + ) + == "012HelloWorld678901234567890123456789" + ) + assert ( + apply_text_edits( + test_doc, + [ + { + "range": { + "start": {"line": 0, "character": 3}, + "end": {"line": 0, "character": 3}, + }, + "newText": "World", + }, + { + "range": { + "start": {"line": 0, "character": 3}, + "end": {"line": 0, "character": 6}, + }, + "newText": "Hello", + }, + ], + ) + == "012WorldHello678901234567890123456789" + ) + + +def test_apply_text_edits_overlap(pylsp) -> None: + pylsp.workspace.put_document(DOC_URI, "012345678901234567890123456789") + test_doc = pylsp.workspace.get_document(DOC_URI) + + did_throw = False + try: + apply_text_edits( + test_doc, + [ + { + "range": { + "start": {"line": 0, "character": 3}, + "end": {"line": 0, "character": 6}, + }, + "newText": "Hello", + }, + { + "range": { + "start": {"line": 0, "character": 3}, + "end": {"line": 0, "character": 3}, + }, + "newText": "World", + }, + ], + ) + except OverLappingTextEditException: + did_throw = True + + assert did_throw + + did_throw = False + + try: + apply_text_edits( + test_doc, + [ + { + "range": { + "start": {"line": 0, "character": 3}, + "end": {"line": 0, "character": 6}, + }, + "newText": "Hello", + }, + { + "range": { + "start": {"line": 0, "character": 4}, + "end": {"line": 0, "character": 4}, + }, + "newText": "World", + }, + ], + ) + except OverLappingTextEditException: + did_throw = True + + assert did_throw + + +def test_apply_text_edits_multiline(pylsp) -> None: + pylsp.workspace.put_document(DOC_URI, "0\n1\n2\n3\n4") + test_doc = pylsp.workspace.get_document(DOC_URI) + + assert ( + apply_text_edits( + test_doc, + [ + { + "range": { + "start": {"line": 2, "character": 0}, + "end": {"line": 3, "character": 0}, + }, + "newText": "Hello", + }, + { + "range": { + "start": {"line": 1, "character": 1}, + "end": {"line": 1, "character": 1}, + }, + "newText": "World", + }, + ], + ) + == "0\n1World\nHello3\n4" + ) diff --git a/test/test_uris.py b/test/test_uris.py index ad83c090..41c7f54d 100644 --- a/test/test_uris.py +++ b/test/test_uris.py @@ -1,52 +1,72 @@ # Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. -from test import unix_only, windows_only import pytest + from pylsp import uris +from test import unix_only, windows_only @unix_only -@pytest.mark.parametrize('uri,path', [ - ('file:///foo/bar#frag', '/foo/bar'), - ('file:/foo/bar#frag', '/foo/bar'), - ('file:/foo/space%20%3Fbar#frag', '/foo/space ?bar'), -]) -def test_to_fs_path(uri, path): +@pytest.mark.parametrize( + "uri,path", + [ + ("file:///foo/bar#frag", "/foo/bar"), + ("file:/foo/bar#frag", "/foo/bar"), + ("file:/foo/space%20%3Fbar#frag", "/foo/space ?bar"), + ], +) +def test_to_fs_path(uri, path) -> None: assert uris.to_fs_path(uri) == path @windows_only -@pytest.mark.parametrize('uri,path', [ - ('file:///c:/far/boo', 'c:\\far\\boo'), - ('file:///C:/far/boo', 'c:\\far\\boo'), - ('file:///C:/far/space%20%3Fboo', 'c:\\far\\space ?boo'), -]) -def test_win_to_fs_path(uri, path): +@pytest.mark.parametrize( + "uri,path", + [ + ("file:///c:/far/boo", "c:\\far\\boo"), + ("file:///C:/far/boo", "c:\\far\\boo"), + ("file:///C:/far/space%20%3Fboo", "c:\\far\\space ?boo"), + ], +) +def test_win_to_fs_path(uri, path) -> None: assert uris.to_fs_path(uri) == path @unix_only -@pytest.mark.parametrize('path,uri', [ - ('/foo/bar', 'file:///foo/bar'), - ('/foo/space ?bar', 'file:///foo/space%20%3Fbar'), -]) -def test_from_fs_path(path, uri): +@pytest.mark.parametrize( + "path,uri", + [ + ("/foo/bar", "file:///foo/bar"), + ("/foo/space ?bar", "file:///foo/space%20%3Fbar"), + ], +) +def test_from_fs_path(path, uri) -> None: assert uris.from_fs_path(path) == uri @windows_only -@pytest.mark.parametrize('path,uri', [ - ('c:\\far\\boo', 'file:///c:/far/boo'), - ('C:\\far\\space ?boo', 'file:///c:/far/space%20%3Fboo') -]) -def test_win_from_fs_path(path, uri): +@pytest.mark.parametrize( + "path,uri", + [ + ("c:\\far\\boo", "file:///c:/far/boo"), + ("C:\\far\\space ?boo", "file:///c:/far/space%20%3Fboo"), + ], +) +def test_win_from_fs_path(path, uri) -> None: assert uris.from_fs_path(path) == uri -@pytest.mark.parametrize('uri,kwargs,new_uri', [ - ('file:///foo/bar', {'path': '/baz/boo'}, 'file:///baz/boo'), - ('file:///D:/hello%20world.py', {'path': 'D:/hello universe.py'}, 'file:///d:/hello%20universe.py') -]) -def test_uri_with(uri, kwargs, new_uri): +@pytest.mark.parametrize( + "uri,kwargs,new_uri", + [ + ("file:///foo/bar", {"path": "/baz/boo"}, "file:///baz/boo"), + ( + "file:///D:/hello%20world.py", + {"path": "D:/hello universe.py"}, + "file:///d:/hello%20universe.py", + ), + ], +) +def test_uri_with(uri, kwargs, new_uri) -> None: assert uris.uri_with(uri, **kwargs) == new_uri diff --git a/test/test_utils.py b/test/test_utils.py index 4b41155b..7ed6214f 100644 --- a/test/test_utils.py +++ b/test/test_utils.py @@ -1,16 +1,131 @@ # Copyright 2017-2020 Palantir Technologies, Inc. # Copyright 2021- Python Language Server Contributors. +import multiprocessing +import os +import sys import time +from threading import Thread +from typing import Any from unittest import mock +from docstring_to_markdown import UnknownFormatError from flaky import flaky from pylsp import _utils +from pylsp.lsp import NotebookCellKind +from pylsp.python_lsp import PythonLSPServer, start_io_lang_server + +CALL_TIMEOUT_IN_SECONDS = 30 + + +def send_notebook_did_open(client, cells: list[str]) -> None: + """ + Sends a notebookDocument/didOpen notification with the given python cells. + + The notebook has the uri "notebook_uri" and the cells have the uris + "cell_1_uri", "cell_2_uri", etc. + """ + client._endpoint.notify( + "notebookDocument/didOpen", notebook_with_python_cells(cells) + ) + + +def notebook_with_python_cells(cells: list[str]): + """ + Create a notebook document with the given python cells. + + The notebook has the uri "notebook_uri" and the cells have the uris + "cell_1_uri", "cell_2_uri", etc. + """ + return { + "notebookDocument": { + "uri": "notebook_uri", + "notebookType": "jupyter-notebook", + "cells": [ + { + "kind": NotebookCellKind.Code, + "document": f"cell_{i + 1}_uri", + } + for i in range(len(cells)) + ], + }, + "cellTextDocuments": [ + { + "uri": f"cell_{i + 1}_uri", + "languageId": "python", + "text": cell, + } + for i, cell in enumerate(cells) + ], + } + + +def send_initialize_request(client, initialization_options: dict[str, Any] = None): + return client._endpoint.request( + "initialize", + { + "processId": 1234, + "rootPath": os.path.dirname(__file__), + "initializationOptions": initialization_options, + }, + ).result(timeout=CALL_TIMEOUT_IN_SECONDS) + + +def start(obj) -> None: + obj.start() + + +class ClientServerPair: + """ + A class to setup a client/server pair. + + args: + start_server_in_process: if True, the server will be started in a process. + check_parent_process: if True, the server_process will check if the parent process is alive. + """ + + def __init__( + self, start_server_in_process=False, check_parent_process=False + ) -> None: + # Client to Server pipe + csr, csw = os.pipe() + # Server to client pipe + scr, scw = os.pipe() + + if start_server_in_process: + ParallelKind = self._get_parallel_kind() + self.server_process = ParallelKind( + target=start_io_lang_server, + args=( + os.fdopen(csr, "rb"), + os.fdopen(scw, "wb"), + check_parent_process, + PythonLSPServer, + ), + ) + self.server_process.start() + else: + self.server = PythonLSPServer(os.fdopen(csr, "rb"), os.fdopen(scw, "wb")) + self.server_thread = Thread(target=start, args=[self.server]) + self.server_thread.start() + + self.client = PythonLSPServer(os.fdopen(scr, "rb"), os.fdopen(csw, "wb")) + self.client_thread = Thread(target=start, args=[self.client]) + self.client_thread.start() + + def _get_parallel_kind(self): + if os.name == "nt": + return Thread + + if sys.version_info[:2] >= (3, 8): + return multiprocessing.get_context("fork").Process + + return multiprocessing.Process @flaky(max_runs=6, min_passes=1) -def test_debounce(): +def test_debounce() -> None: interval = 0.1 obj = mock.Mock() @@ -34,11 +149,11 @@ def call_m(): @flaky(max_runs=6, min_passes=1) -def test_debounce_keyed_by(): +def test_debounce_keyed_by() -> None: interval = 0.1 obj = mock.Mock() - @_utils.debounce(0.1, keyed_by='key') + @_utils.debounce(0.1, keyed_by="key") def call_m(key): obj(key) @@ -50,11 +165,14 @@ def call_m(key): assert not obj.mock_calls time.sleep(interval * 2) - obj.assert_has_calls([ - mock.call(1), - mock.call(2), - mock.call(3), - ], any_order=True) + obj.assert_has_calls( + [ + mock.call(1), + mock.call(2), + mock.call(3), + ], + any_order=True, + ) assert len(obj.mock_calls) == 3 call_m(1) @@ -64,33 +182,85 @@ def call_m(key): assert len(obj.mock_calls) == 4 -def test_list_to_string(): +def test_list_to_string() -> None: assert _utils.list_to_string("string") == "string" assert _utils.list_to_string(["a", "r", "r", "a", "y"]) == "a,r,r,a,y" -def test_find_parents(tmpdir): +def test_find_parents(tmpdir) -> None: subsubdir = tmpdir.ensure_dir("subdir", "subsubdir") path = subsubdir.ensure("path.py") test_cfg = tmpdir.ensure("test.cfg") - assert _utils.find_parents(tmpdir.strpath, path.strpath, ["test.cfg"]) == [test_cfg.strpath] + assert _utils.find_parents(tmpdir.strpath, path.strpath, ["test.cfg"]) == [ + test_cfg.strpath + ] -def test_merge_dicts(): +def test_merge_dicts() -> None: assert _utils.merge_dicts( - {'a': True, 'b': {'x': 123, 'y': {'hello': 'world'}}}, - {'a': False, 'b': {'y': [], 'z': 987}} - ) == {'a': False, 'b': {'x': 123, 'y': [], 'z': 987}} + {"a": True, "b": {"x": 123, "y": {"hello": "world"}}}, + {"a": False, "b": {"y": [], "z": 987}}, + ) == {"a": False, "b": {"x": 123, "y": [], "z": 987}} -def test_clip_column(): +def test_clip_column() -> None: assert _utils.clip_column(0, [], 0) == 0 - assert _utils.clip_column(2, ['123'], 0) == 2 - assert _utils.clip_column(3, ['123'], 0) == 3 - assert _utils.clip_column(5, ['123'], 0) == 3 - assert _utils.clip_column(0, ['\n', '123'], 0) == 0 - assert _utils.clip_column(1, ['\n', '123'], 0) == 0 - assert _utils.clip_column(2, ['123\n', '123'], 0) == 2 - assert _utils.clip_column(3, ['123\n', '123'], 0) == 3 - assert _utils.clip_column(4, ['123\n', '123'], 1) == 3 + assert _utils.clip_column(2, ["123"], 0) == 2 + assert _utils.clip_column(3, ["123"], 0) == 3 + assert _utils.clip_column(5, ["123"], 0) == 3 + assert _utils.clip_column(0, ["\n", "123"], 0) == 0 + assert _utils.clip_column(1, ["\n", "123"], 0) == 0 + assert _utils.clip_column(2, ["123\n", "123"], 0) == 2 + assert _utils.clip_column(3, ["123\n", "123"], 0) == 3 + assert _utils.clip_column(4, ["123\n", "123"], 1) == 3 + + +@mock.patch("docstring_to_markdown.convert") +def test_format_docstring_valid_rst_signature(mock_convert) -> None: + """Test that a valid RST docstring includes the function signature.""" + docstring = """A function docstring. + + Parameters + ---------- + a : str, something + """ + + # Mock the return value to avoid depedency on the real thing + mock_convert.return_value = """A function docstring. + + #### Parameters + + - `a`: str, something + """ + + markdown = _utils.format_docstring( + docstring, + "markdown", + ["something(a: str) -> str"], + )["value"] + + assert markdown.startswith( + _utils.wrap_signature("something(a: str) -> str"), + ) + + +@mock.patch("docstring_to_markdown.convert", side_effect=UnknownFormatError) +def test_format_docstring_invalid_rst_signature(_) -> None: + """Test that an invalid RST docstring includes the function signature.""" + docstring = """A function docstring. + + Parameters + ---------- + a : str, something + """ + + markdown = _utils.format_docstring( + docstring, + "markdown", + ["something(a: str) -> str"], + )["value"] + + assert markdown.startswith( + _utils.wrap_signature("something(a: str) -> str"), + ) diff --git a/test/test_workspace.py b/test/test_workspace.py index 44d754b2..41bac398 100644 --- a/test/test_workspace.py +++ b/test/test_workspace.py @@ -3,73 +3,86 @@ import pathlib import pytest -from pylsp import uris +from pylsp import uris DOC_URI = uris.from_fs_path(__file__) +NOTEBOOK_URI = uris.from_fs_path("notebook_uri") def path_as_uri(path): return pathlib.Path(os.path.abspath(path)).as_uri() -def test_local(pylsp): - """ Since the workspace points to the test directory """ +def test_local(pylsp) -> None: + """Since the workspace points to the test directory""" assert pylsp.workspace.is_local() -def test_put_document(pylsp): - pylsp.workspace.put_document(DOC_URI, 'content') +def test_put_document(pylsp) -> None: + pylsp.workspace.put_document(DOC_URI, "content") assert DOC_URI in pylsp.workspace._docs -def test_get_document(pylsp): - pylsp.workspace.put_document(DOC_URI, 'TEXT') - assert pylsp.workspace.get_document(DOC_URI).source == 'TEXT' +def test_put_notebook_document(pylsp) -> None: + pylsp.workspace.put_notebook_document(DOC_URI, "jupyter-notebook", []) + assert DOC_URI in pylsp.workspace._docs + + +def test_put_cell_document(pylsp) -> None: + pylsp.workspace.put_cell_document(DOC_URI, NOTEBOOK_URI, "python", "content") + assert DOC_URI in pylsp.workspace._docs + +def test_get_document(pylsp) -> None: + pylsp.workspace.put_document(DOC_URI, "TEXT") + assert pylsp.workspace.get_document(DOC_URI).source == "TEXT" -def test_get_missing_document(tmpdir, pylsp): - source = 'TEXT' + +def test_get_missing_document(tmpdir, pylsp) -> None: + source = "TEXT" doc_path = tmpdir.join("test_document.py") doc_path.write(source) doc_uri = uris.from_fs_path(str(doc_path)) - assert pylsp.workspace.get_document(doc_uri).source == 'TEXT' + assert pylsp.workspace.get_document(doc_uri).source == "TEXT" -def test_rm_document(pylsp): - pylsp.workspace.put_document(DOC_URI, 'TEXT') - assert pylsp.workspace.get_document(DOC_URI).source == 'TEXT' +def test_rm_document(pylsp) -> None: + pylsp.workspace.put_document(DOC_URI, "TEXT") + assert pylsp.workspace.get_document(DOC_URI).source == "TEXT" pylsp.workspace.rm_document(DOC_URI) assert pylsp.workspace.get_document(DOC_URI)._source is None -@pytest.mark.parametrize('metafiles', [('setup.py',), ('pyproject.toml',), ('setup.py', 'pyproject.toml')]) -def test_non_root_project(pylsp, metafiles): - repo_root = os.path.join(pylsp.workspace.root_path, 'repo-root') +@pytest.mark.parametrize( + "metafiles", [("setup.py",), ("pyproject.toml",), ("setup.py", "pyproject.toml")] +) +def test_non_root_project(pylsp, metafiles) -> None: + repo_root = os.path.join(pylsp.workspace.root_path, "repo-root") os.mkdir(repo_root) - project_root = os.path.join(repo_root, 'project-root') + project_root = os.path.join(repo_root, "project-root") os.mkdir(project_root) for metafile in metafiles: - with open(os.path.join(project_root, metafile), 'w+', encoding='utf-8') as f: - f.write('# ' + metafile) + with open(os.path.join(project_root, metafile), "w+", encoding="utf-8") as f: + f.write("# " + metafile) - test_uri = uris.from_fs_path(os.path.join(project_root, 'hello/test.py')) - pylsp.workspace.put_document(test_uri, 'assert True') + test_uri = uris.from_fs_path(os.path.join(project_root, "hello/test.py")) + pylsp.workspace.put_document(test_uri, "assert True") test_doc = pylsp.workspace.get_document(test_uri) assert project_root in test_doc.sys_path() -def test_root_project_with_no_setup_py(pylsp): +def test_root_project_with_no_setup_py(pylsp) -> None: """Default to workspace root.""" workspace_root = pylsp.workspace.root_path - test_uri = uris.from_fs_path(os.path.join(workspace_root, 'hello/test.py')) - pylsp.workspace.put_document(test_uri, 'assert True') + test_uri = uris.from_fs_path(os.path.join(workspace_root, "hello/test.py")) + pylsp.workspace.put_document(test_uri, "assert True") test_doc = pylsp.workspace.get_document(test_uri) assert workspace_root in test_doc.sys_path() -def test_multiple_workspaces_from_initialize(pylsp_w_workspace_folders): +def test_multiple_workspaces_from_initialize(pylsp_w_workspace_folders) -> None: pylsp, workspace_folders = pylsp_w_workspace_folders assert len(pylsp.workspaces) == 2 @@ -82,146 +95,132 @@ def test_multiple_workspaces_from_initialize(pylsp_w_workspace_folders): assert folders_uris[0] == pylsp.root_uri # Create file in the first workspace folder. - file1 = workspace_folders[0].join('file1.py') - file1.write('import os') - msg1 = { - 'uri': path_as_uri(str(file1)), - 'version': 1, - 'text': 'import os' - } + file1 = workspace_folders[0].join("file1.py") + file1.write("import os") + msg1 = {"uri": path_as_uri(str(file1)), "version": 1, "text": "import os"} pylsp.m_text_document__did_open(textDocument=msg1) - assert msg1['uri'] in pylsp.workspace._docs - assert msg1['uri'] in pylsp.workspaces[folders_uris[0]]._docs + assert msg1["uri"] in pylsp.workspace._docs + assert msg1["uri"] in pylsp.workspaces[folders_uris[0]]._docs # Create file in the second workspace folder. - file2 = workspace_folders[1].join('file2.py') - file2.write('import sys') - msg2 = { - 'uri': path_as_uri(str(file2)), - 'version': 1, - 'text': 'import sys' - } + file2 = workspace_folders[1].join("file2.py") + file2.write("import sys") + msg2 = {"uri": path_as_uri(str(file2)), "version": 1, "text": "import sys"} pylsp.m_text_document__did_open(textDocument=msg2) - assert msg2['uri'] not in pylsp.workspace._docs - assert msg2['uri'] in pylsp.workspaces[folders_uris[1]]._docs - - -def test_multiple_workspaces(tmpdir, pylsp): - workspace1_dir = tmpdir.mkdir('workspace1') - workspace2_dir = tmpdir.mkdir('workspace2') - file1 = workspace1_dir.join('file1.py') - file2 = workspace2_dir.join('file1.py') - file1.write('import os') - file2.write('import sys') - - msg = { - 'uri': path_as_uri(str(file1)), - 'version': 1, - 'text': 'import os' - } + assert msg2["uri"] not in pylsp.workspace._docs + assert msg2["uri"] in pylsp.workspaces[folders_uris[1]]._docs + + +def test_multiple_workspaces(tmpdir, pylsp) -> None: + workspace1_dir = tmpdir.mkdir("workspace1") + workspace2_dir = tmpdir.mkdir("workspace2") + file1 = workspace1_dir.join("file1.py") + file2 = workspace2_dir.join("file1.py") + file1.write("import os") + file2.write("import sys") + + msg = {"uri": path_as_uri(str(file1)), "version": 1, "text": "import os"} pylsp.m_text_document__did_open(textDocument=msg) - assert msg['uri'] in pylsp.workspace._docs + assert msg["uri"] in pylsp.workspace._docs - added_workspaces = [{'uri': path_as_uri(str(x))} - for x in (workspace1_dir, workspace2_dir)] - event = {'added': added_workspaces, 'removed': []} + added_workspaces = [ + {"uri": path_as_uri(str(x))} for x in (workspace1_dir, workspace2_dir) + ] + event = {"added": added_workspaces, "removed": []} pylsp.m_workspace__did_change_workspace_folders(event) for workspace in added_workspaces: - assert workspace['uri'] in pylsp.workspaces + assert workspace["uri"] in pylsp.workspaces - workspace1_uri = added_workspaces[0]['uri'] - assert msg['uri'] not in pylsp.workspace._docs - assert msg['uri'] in pylsp.workspaces[workspace1_uri]._docs + workspace1_uri = added_workspaces[0]["uri"] + assert msg["uri"] not in pylsp.workspace._docs + assert msg["uri"] in pylsp.workspaces[workspace1_uri]._docs - msg = { - 'uri': path_as_uri(str(file2)), - 'version': 1, - 'text': 'import sys' - } + msg = {"uri": path_as_uri(str(file2)), "version": 1, "text": "import sys"} pylsp.m_text_document__did_open(textDocument=msg) - workspace2_uri = added_workspaces[1]['uri'] - assert msg['uri'] in pylsp.workspaces[workspace2_uri]._docs + workspace2_uri = added_workspaces[1]["uri"] + assert msg["uri"] in pylsp.workspaces[workspace2_uri]._docs - event = {'added': [], 'removed': [added_workspaces[0]]} + event = {"added": [], "removed": [added_workspaces[0]]} pylsp.m_workspace__did_change_workspace_folders(event) assert workspace1_uri not in pylsp.workspaces -def test_multiple_workspaces_wrong_removed_uri(pylsp, tmpdir): - workspace = {'uri': str(tmpdir.mkdir('Test123'))} - event = {'added': [], 'removed': [workspace]} +def test_multiple_workspaces_wrong_removed_uri(pylsp, tmpdir) -> None: + workspace = {"uri": str(tmpdir.mkdir("Test123"))} + event = {"added": [], "removed": [workspace]} pylsp.m_workspace__did_change_workspace_folders(event) - assert workspace['uri'] not in pylsp.workspaces + assert workspace["uri"] not in pylsp.workspaces -def test_root_workspace_changed(pylsp, tmpdir): - test_uri = str(tmpdir.mkdir('Test123')) +def test_root_workspace_changed(pylsp, tmpdir) -> None: + test_uri = str(tmpdir.mkdir("Test123")) pylsp.root_uri = test_uri pylsp.workspace._root_uri = test_uri - workspace1 = {'uri': test_uri} - workspace2 = {'uri': str(tmpdir.mkdir('NewTest456'))} + workspace1 = {"uri": test_uri} + workspace2 = {"uri": str(tmpdir.mkdir("NewTest456"))} - event = {'added': [workspace2], 'removed': [workspace1]} + event = {"added": [workspace2], "removed": [workspace1]} pylsp.m_workspace__did_change_workspace_folders(event) - assert workspace2['uri'] == pylsp.workspace._root_uri - assert workspace2['uri'] == pylsp.root_uri + assert workspace2["uri"] == pylsp.workspace._root_uri + assert workspace2["uri"] == pylsp.root_uri -def test_root_workspace_not_changed(pylsp, tmpdir): +def test_root_workspace_not_changed(pylsp, tmpdir) -> None: # removed uri != root_uri - test_uri_1 = str(tmpdir.mkdir('Test12')) + test_uri_1 = str(tmpdir.mkdir("Test12")) pylsp.root_uri = test_uri_1 pylsp.workspace._root_uri = test_uri_1 - workspace1 = {'uri': str(tmpdir.mkdir('Test1234'))} - workspace2 = {'uri': str(tmpdir.mkdir('NewTest456'))} - event = {'added': [workspace2], 'removed': [workspace1]} + workspace1 = {"uri": str(tmpdir.mkdir("Test1234"))} + workspace2 = {"uri": str(tmpdir.mkdir("NewTest456"))} + event = {"added": [workspace2], "removed": [workspace1]} pylsp.m_workspace__did_change_workspace_folders(event) assert test_uri_1 == pylsp.workspace._root_uri assert test_uri_1 == pylsp.root_uri # empty 'added' list - test_uri_2 = str(tmpdir.mkdir('Test123')) - new_root_uri = workspace2['uri'] + test_uri_2 = str(tmpdir.mkdir("Test123")) + new_root_uri = workspace2["uri"] pylsp.root_uri = test_uri_2 pylsp.workspace._root_uri = test_uri_2 - workspace1 = {'uri': test_uri_2} - event = {'added': [], 'removed': [workspace1]} + workspace1 = {"uri": test_uri_2} + event = {"added": [], "removed": [workspace1]} pylsp.m_workspace__did_change_workspace_folders(event) assert new_root_uri == pylsp.workspace._root_uri assert new_root_uri == pylsp.root_uri # empty 'removed' list - event = {'added': [workspace1], 'removed': []} + event = {"added": [workspace1], "removed": []} pylsp.m_workspace__did_change_workspace_folders(event) assert new_root_uri == pylsp.workspace._root_uri assert new_root_uri == pylsp.root_uri # 'added' list has no 'uri' - workspace2 = {'TESTuri': 'Test1234'} - event = {'added': [workspace2], 'removed': [workspace1]} + workspace2 = {"TESTuri": "Test1234"} + event = {"added": [workspace2], "removed": [workspace1]} pylsp.m_workspace__did_change_workspace_folders(event) assert new_root_uri == pylsp.workspace._root_uri assert new_root_uri == pylsp.root_uri -def test_root_workspace_removed(tmpdir, pylsp): - workspace1_dir = tmpdir.mkdir('workspace1') - workspace2_dir = tmpdir.mkdir('workspace2') +def test_root_workspace_removed(tmpdir, pylsp) -> None: + workspace1_dir = tmpdir.mkdir("workspace1") + workspace2_dir = tmpdir.mkdir("workspace2") root_uri = pylsp.root_uri # Add workspaces to the pylsp - added_workspaces = [{'uri': path_as_uri(str(x))} - for x in (workspace1_dir, workspace2_dir)] - event = {'added': added_workspaces, 'removed': []} + added_workspaces = [ + {"uri": path_as_uri(str(x))} for x in (workspace1_dir, workspace2_dir) + ] + event = {"added": added_workspaces, "removed": []} pylsp.m_workspace__did_change_workspace_folders(event) # Remove the root workspace - removed_workspaces = [{'uri': root_uri}] - event = {'added': [], 'removed': removed_workspaces} + removed_workspaces = [{"uri": root_uri}] + event = {"added": [], "removed": removed_workspaces} pylsp.m_workspace__did_change_workspace_folders(event) # Assert that the first of the workspaces (in alphabetical order) is now @@ -230,66 +229,204 @@ def test_root_workspace_removed(tmpdir, pylsp): assert pylsp.workspace._root_uri == path_as_uri(str(workspace1_dir)) -@pytest.mark.skipif(os.name == 'nt', reason="Fails on Windows") -def test_workspace_loads_pycodestyle_config(pylsp, tmpdir): - workspace1_dir = tmpdir.mkdir('Test123') +@pytest.mark.skipif(os.name == "nt", reason="Fails on Windows") +def test_workspace_loads_pycodestyle_config(pylsp, tmpdir) -> None: + workspace1_dir = tmpdir.mkdir("Test123") pylsp.root_uri = str(workspace1_dir) pylsp.workspace._root_uri = str(workspace1_dir) # Test that project settings are loaded - workspace2_dir = tmpdir.mkdir('NewTest456') + workspace2_dir = tmpdir.mkdir("NewTest456") cfg = workspace2_dir.join("pycodestyle.cfg") - cfg.write( - "[pycodestyle]\n" - "max-line-length = 1000" - ) + cfg.write("[pycodestyle]\nmax-line-length = 1000") - workspace1 = {'uri': str(workspace1_dir)} - workspace2 = {'uri': str(workspace2_dir)} + workspace1 = {"uri": str(workspace1_dir)} + workspace2 = {"uri": str(workspace2_dir)} - event = {'added': [workspace2], 'removed': [workspace1]} + event = {"added": [workspace2], "removed": [workspace1]} pylsp.m_workspace__did_change_workspace_folders(event) seetings = pylsp.workspaces[str(workspace2_dir)]._config.settings() - assert seetings['plugins']['pycodestyle']['maxLineLength'] == 1000 + assert seetings["plugins"]["pycodestyle"]["maxLineLength"] == 1000 # Test that project settings prevail over server ones. - server_settings = {'pylsp': {'plugins': {'pycodestyle': {'maxLineLength': 10}}}} + server_settings = {"pylsp": {"plugins": {"pycodestyle": {"maxLineLength": 10}}}} pylsp.m_workspace__did_change_configuration(server_settings) - assert seetings['plugins']['pycodestyle']['maxLineLength'] == 1000 + assert seetings["plugins"]["pycodestyle"]["maxLineLength"] == 1000 # Test switching to another workspace with different settings - workspace3_dir = tmpdir.mkdir('NewTest789') + workspace3_dir = tmpdir.mkdir("NewTest789") cfg1 = workspace3_dir.join("pycodestyle.cfg") - cfg1.write( - "[pycodestyle]\n" - "max-line-length = 20" - ) + cfg1.write("[pycodestyle]\nmax-line-length = 20") - workspace3 = {'uri': str(workspace3_dir)} + workspace3 = {"uri": str(workspace3_dir)} - event = {'added': [workspace3], 'removed': [workspace2]} + event = {"added": [workspace3], "removed": [workspace2]} pylsp.m_workspace__did_change_workspace_folders(event) seetings = pylsp.workspaces[str(workspace3_dir)]._config.settings() - assert seetings['plugins']['pycodestyle']['maxLineLength'] == 20 + assert seetings["plugins"]["pycodestyle"]["maxLineLength"] == 20 -def test_settings_of_added_workspace(pylsp, tmpdir): - test_uri = str(tmpdir.mkdir('Test123')) +def test_settings_of_added_workspace(pylsp, tmpdir) -> None: + test_uri = str(tmpdir.mkdir("Test123")) pylsp.root_uri = test_uri pylsp.workspace._root_uri = test_uri # Set some settings for the server. - server_settings = {'pylsp': {'plugins': {'jedi': {'environment': '/usr/bin/python3'}}}} + server_settings = { + "pylsp": {"plugins": {"jedi": {"environment": "/usr/bin/python3"}}} + } pylsp.m_workspace__did_change_configuration(server_settings) # Create a new workspace. - workspace1 = {'uri': str(tmpdir.mkdir('NewTest456'))} - event = {'added': [workspace1]} + workspace1 = {"uri": str(tmpdir.mkdir("NewTest456"))} + event = {"added": [workspace1]} pylsp.m_workspace__did_change_workspace_folders(event) # Assert settings are inherited from the server config. - workspace1_object = pylsp.workspaces[workspace1['uri']] - workspace1_jedi_settings = workspace1_object._config.plugin_settings('jedi') - assert workspace1_jedi_settings == server_settings['pylsp']['plugins']['jedi'] + workspace1_object = pylsp.workspaces[workspace1["uri"]] + workspace1_jedi_settings = workspace1_object._config.plugin_settings("jedi") + assert workspace1_jedi_settings == server_settings["pylsp"]["plugins"]["jedi"] + + +def test_no_progress_without_capability(workspace, consumer) -> None: + workspace._config.capabilities["window"] = {"workDoneProgress": False} + + with workspace.report_progress("some_title"): + pass + + assert len(consumer.call_args_list) == 0 + + +def test_progress_simple(workspace, consumer) -> None: + workspace._config.capabilities["window"] = {"workDoneProgress": True} + + with workspace.report_progress("some_title"): + pass + + init_call, *progress_calls = consumer.call_args_list + + assert init_call[0][0]["method"] == "window/workDoneProgress/create" + + # same method for all calls + assert all(call[0][0]["method"] == "$/progress" for call in progress_calls), ( + consumer.call_args_list + ) + + # same token used in all calls + assert ( + len( + {call[0][0]["params"]["token"] for call in progress_calls} + | {init_call[0][0]["params"]["token"]} + ) + == 1 + ) + + assert [call[0][0]["params"]["value"] for call in progress_calls] == [ + {"kind": "begin", "title": "some_title"}, + {"kind": "end"}, + ] + + +@pytest.mark.parametrize("exc", [Exception("something"), TimeoutError()]) +def test_progress_initialization_fails_but_is_skipped( + workspace, consumer, endpoint, exc +) -> None: + def failing_token_initialization(self, *_args, **_kwargs): + raise exc + + endpoint._dispatcher.m_window__work_done_progress__create = ( + failing_token_initialization + ) + + workspace._config.capabilities["window"] = {"workDoneProgress": True} + + with workspace.report_progress("some_title", skip_token_initialization=True): + pass + + # we only see the failing token initialization call, no other calls + progress_calls = consumer.call_args_list + assert all(call[0][0]["method"] == "$/progress" for call in progress_calls) + assert len({call[0][0]["params"]["token"] for call in progress_calls}) == 1 + assert [call[0][0]["params"]["value"] for call in progress_calls] == [ + {"kind": "begin", "title": "some_title"}, + {"kind": "end"}, + ] + + +def test_progress_with_percent(workspace, consumer) -> None: + workspace._config.capabilities["window"] = {"workDoneProgress": True} + + with workspace.report_progress( + "some_title", "initial message", percentage=1 + ) as progress_message: + progress_message("ten", 10) + progress_message("fifty", 50) + progress_message("ninety", 90) + + init_call, *progress_calls = consumer.call_args_list + + assert init_call[0][0]["method"] == "window/workDoneProgress/create" + + # same method for all progress calls + assert all(call[0][0]["method"] == "$/progress" for call in progress_calls) + + # same token used in all calls + assert ( + len( + {call[0][0]["params"]["token"] for call in progress_calls} + | {init_call[0][0]["params"]["token"]} + ) + == 1 + ) + + assert [call[0][0]["params"]["value"] for call in progress_calls] == [ + { + "kind": "begin", + "message": "initial message", + "percentage": 1, + "title": "some_title", + }, + {"kind": "report", "message": "ten", "percentage": 10}, + {"kind": "report", "message": "fifty", "percentage": 50}, + {"kind": "report", "message": "ninety", "percentage": 90}, + {"kind": "end"}, + ] + + +def test_progress_with_exception(workspace, consumer) -> None: + workspace._config.capabilities["window"] = {"workDoneProgress": True} + + class DummyError(Exception): + pass + + try: + with workspace.report_progress("some_title"): + raise DummyError("something") + except DummyError: + # we're using a specific exception class here so + # any other exceptions happening in progress + # reporting would correctly be raised in the + # test. + pass + + init_call, *progress_calls = consumer.call_args_list + assert init_call[0][0]["method"] == "window/workDoneProgress/create" + + # same method for all calls + assert all(call[0][0]["method"] == "$/progress" for call in progress_calls) + + # same token used in all calls + assert ( + len( + {call[0][0]["params"]["token"] for call in progress_calls} + | {init_call[0][0]["params"]["token"]} + ) + == 1 + ) + + assert [call[0][0]["params"]["value"] for call in progress_calls] == [ + {"kind": "begin", "title": "some_title"}, + {"kind": "end"}, + ]