diff --git a/.cmake-format.yaml b/.cmake-format.yaml deleted file mode 100644 index 9ff5d51..0000000 --- a/.cmake-format.yaml +++ /dev/null @@ -1,2 +0,0 @@ ---- -line_width: 120 diff --git a/.cmakelintrc b/.cmakelintrc deleted file mode 100644 index 10473ce..0000000 --- a/.cmakelintrc +++ /dev/null @@ -1 +0,0 @@ -filter=-whitespace/indent,-linelength,-readability/wonkycase diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml index 209d62f..3a2c475 100644 --- a/.github/FUNDING.yml +++ b/.github/FUNDING.yml @@ -1,4 +1,5 @@ --- +patreon: user?u=83975719 custom: - "https://user-images.githubusercontent.com/32936898/199681341-1c5cfa61-4411-4b67-b268-7cd87c5867bb.png" - "https://user-images.githubusercontent.com/32936898/199681363-1094a0be-85ca-49cf-a410-19b3d7965120.png" diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 5479697..b32b7a9 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -15,39 +15,50 @@ permissions: contents: write env: - CMAKE_GENERATOR: Ninja PYTHONUTF8: "1" - python-version: 3.x + # https://github.com/tree-sitter/py-tree-sitter/pull/161 + python-version: "3.11" cache: pip jobs: - build-wheels-and-test: + test: strategy: fail-fast: false matrix: runs-on: - ubuntu-latest - macos-latest - # OSError: [WinError 193] %1 is not a valid Win32 application - # - windows-latest - runs-on: ${{ matrix.runs-on }} + - windows-latest + runs-on: ${{matrix.runs-on}} steps: - - uses: actions/checkout@v3 - - uses: docker/setup-qemu-action@v2.2.0 - if: runner.os == 'Linux' - - uses: pypa/cibuildwheel@v2.14.1 - - uses: actions/upload-artifact@v3 + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 with: - path: | - wheelhouse/*.whl - + python-version: ${{env.python-version}} + cache: ${{env.cache}} + cache-dependency-path: |- + requirements.txt + requirements/dev.txt + - name: Install dependencies + run: | + pip install -e '.[dev]' + - name: Test + run: | + pytest --cov + - uses: codecov/codecov-action@v4 build: - needs: - - build-wheels-and-test - runs-on: ubuntu-latest + needs: test + strategy: + fail-fast: false + matrix: + runs-on: + - ubuntu-latest + - macos-latest + - windows-latest + runs-on: ${{matrix.runs-on}} steps: - - uses: actions/checkout@v3 - - uses: actions/setup-python@v4 + - uses: actions/checkout@v4 + - uses: actions/setup-python@v5 with: python-version: ${{env.python-version}} cache: ${{env.cache}} @@ -57,28 +68,24 @@ jobs: - name: Install dependencies run: | pip install build - - name: Build sdist + - name: Build run: | - pyproject-build -s - - uses: actions/upload-artifact@v3 - if: ${{ ! startsWith(github.ref, 'refs/tags/') }} + pyproject-build + - uses: pypa/gh-action-pypi-publish@release/v1 + if: runner.os == 'Linux' && startsWith(github.ref, 'refs/tags/') + with: + password: ${{secrets.PYPI_API_TOKEN}} + - uses: actions/upload-artifact@v4 + if: runner.os == 'Linux' && ! startsWith(github.ref, 'refs/tags/') with: path: | dist/* - - uses: actions/download-artifact@v3 - with: - name: artifact - path: dist - - uses: softprops/action-gh-release@v1 - if: startsWith(github.ref, 'refs/tags/') + - uses: softprops/action-gh-release@v2 + if: runner.os == 'Linux' && startsWith(github.ref, 'refs/tags/') with: # body_path: build/CHANGELOG.md files: | dist/* - - uses: pypa/gh-action-pypi-publish@release/v1 - if: startsWith(github.ref, 'refs/tags/') - with: - password: ${{ secrets.PYPI_API_TOKEN }} deploy-aur: needs: build @@ -86,6 +93,7 @@ jobs: if: startsWith(github.ref, 'refs/tags/') steps: - uses: Freed-Wu/update-aur-package@v1.0.11 + if: startsWith(github.ref, 'refs/tags/') with: package_name: termux-language-server ssh_private_key: ${{secrets.AUR_SSH_PRIVATE_KEY}} @@ -99,5 +107,5 @@ jobs: run: > curl -X POST -d '{"ref":"main"}' -H "Accept: application/vnd.github.v3+json" - -H "Authorization: Bearer ${{ secrets.GH_TOKEN }}" + -H "Authorization: Bearer ${{secrets.GH_TOKEN}}" https://api.github.com/repos/Freed-Wu/nur-packages/actions/workflows/version.yml/dispatches diff --git a/.pre-commit-config.yaml b/.pre-commit-config.yaml index 58e3573..4e06c28 100644 --- a/.pre-commit-config.yaml +++ b/.pre-commit-config.yaml @@ -1,9 +1,9 @@ --- -exclude: (^templates/.*|.*\.json$) +exclude: ^templates/|\.json$ repos: - repo: https://github.com/pre-commit/pre-commit-hooks - rev: v4.5.0 + rev: v6.0.0 hooks: - id: check-added-large-files - id: fix-byte-order-marker @@ -23,11 +23,11 @@ repos: - id: check-toml - id: check-json - repo: https://github.com/Lucas-C/pre-commit-hooks - rev: v1.5.4 + rev: v1.5.5 hooks: - id: remove-crlf - repo: https://github.com/codespell-project/codespell - rev: v2.2.6 + rev: v2.4.1 hooks: - id: codespell additional_dependencies: @@ -39,80 +39,60 @@ repos: args: - --msg-filename - repo: https://github.com/editorconfig-checker/editorconfig-checker.python - rev: 2.7.2 + rev: 3.4.1 hooks: - id: editorconfig-checker - repo: https://github.com/jumanjihouse/pre-commit-hooks rev: 3.0.0 hooks: - id: check-mailmap + # https://github.com/koalaman/shellcheck/issues/2909 + - id: shellcheck + exclude_types: + - zsh - repo: https://github.com/rhysd/actionlint - rev: v1.6.26 + rev: v1.7.8 hooks: - id: actionlint - repo: https://github.com/adrienverge/yamllint - rev: v1.32.0 + rev: v1.37.1 hooks: - id: yamllint - repo: https://github.com/executablebooks/mdformat - rev: 0.7.17 + rev: 1.0.0 hooks: - id: mdformat additional_dependencies: - mdformat-pyproject - mdformat-gfm - - mdformat-myst + # - mdformat-myst - mdformat-toc - mdformat-deflist - mdformat-beautysh - - mdformat-black + - mdformat-ruff - mdformat-config + - mdformat-web - repo: https://github.com/DavidAnson/markdownlint-cli2 - rev: v0.10.0 + rev: v0.18.1 hooks: - id: markdownlint-cli2 additional_dependencies: - markdown-it-texmath - - repo: https://github.com/psf/black - rev: 23.9.1 + - repo: https://github.com/scop/pre-commit-shfmt + rev: v3.12.0-2 hooks: - - id: black - - repo: https://github.com/PyCQA/isort - rev: 5.12.0 + - id: shfmt + - repo: https://github.com/astral-sh/ruff-pre-commit + rev: v0.14.3 hooks: - - id: isort - - repo: https://github.com/pycqa/pydocstyle - rev: 6.3.0 - hooks: - - id: pydocstyle - additional_dependencies: - - tomli + - id: ruff-check + - id: ruff-format - repo: https://github.com/kumaraditya303/mirrors-pyright - rev: v1.1.329 + rev: v1.1.407 hooks: - id: pyright - - repo: https://github.com/PyCQA/bandit - rev: 1.7.5 - hooks: - - id: bandit - args: - - -cpyproject.toml - additional_dependencies: - - tomli - - repo: https://github.com/cmake-lint/cmake-lint - rev: 1.4.2 - hooks: - - id: cmakelint - - repo: https://github.com/cheshirekow/cmake-format-precommit - rev: v0.6.13 - hooks: - - id: cmake-format - additional_dependencies: - - pyyaml - - id: cmake-lint - additional_dependencies: - - pyyaml ci: skip: + - shellcheck - pyright diff --git a/.readthedocs.yaml b/.readthedocs.yaml index 1da6547..77d51fb 100644 --- a/.readthedocs.yaml +++ b/.readthedocs.yaml @@ -5,7 +5,8 @@ version: 2 build: os: ubuntu-22.04 tools: - python: "3" + # https://github.com/grantjenks/py-tree-sitter-languages/issues/30 + python: "3.11" formats: - htmlzip diff --git a/.shellcheckrc b/.shellcheckrc new file mode 100644 index 0000000..ef97b52 --- /dev/null +++ b/.shellcheckrc @@ -0,0 +1,2 @@ +shell=bash +disable=SC2034,SC2154 diff --git a/CMakeLists.txt b/CMakeLists.txt deleted file mode 100644 index b2ca7e3..0000000 --- a/CMakeLists.txt +++ /dev/null @@ -1,12 +0,0 @@ -# https://github.com/tree-sitter/tree-sitter-bash/issues/227 -cmake_minimum_required(VERSION 3.10) -include(FetchContent) -FetchContent_Declare(tree-sitter-bash GIT_REPOSITORY "https://github.com/tree-sitter/tree-sitter-bash") -FetchContent_MakeAvailable(tree-sitter-bash) -include_directories(${tree-sitter-bash_SOURCE_DIR}/src) -add_library(parser SHARED ${tree-sitter-bash_SOURCE_DIR}/src/parser.c ${tree-sitter-bash_SOURCE_DIR}/src/scanner.c) -if(NOT DEFINED SKBUILD_PLATLIB_DIR) - set(SKBUILD_PLATLIB_DIR ${CMAKE_SOURCE_DIR}/src) -endif() -set(CMAKE_INSTALL_LIBDIR ${SKBUILD_PLATLIB_DIR}/termux_language_server/data/lib) -install(TARGETS parser RUNTIME) diff --git a/README.md b/README.md index d5f9070..a002044 100644 --- a/README.md +++ b/README.md @@ -51,6 +51,10 @@ Language server for some specific bash scripts: - `*.eclass` - [`make.conf`](https://dev.gentoo.org/~zmedico/portage/doc/man/make.conf.5.html) - [`color.map`](https://dev.gentoo.org/~zmedico/portage/doc/man/color.map.5.html) +- Debian/Ubuntu + - [`devscripts.conf` and `.devscripts`](https://manpages.ubuntu.com/manpages/bionic/man5/devscripts.conf.5.html) +- zsh + - [`*.mdd`](https://github.com/zsh-users/zsh/blob/57248b88830ce56adc243a40c7773fb3825cab34/Etc/zsh-development-guide#L285-L288) This language server only provides extra features which [bash-language-server](https://github.com/bash-lsp/bash-language-server) @@ -71,7 +75,9 @@ doesn't support: - [x] sort values - [x] [Document Link](https://microsoft.github.io/language-server-protocol/specifications/specification-current#textDocument_documentLink): - [x] `build.sh`: - - [x] `PKGBUILD`: + - [x] `PKGBUILD`: + - [x] ArchLinux: + - [x] Windows Msys2: - [ ] `ebuild`: - [x] [Hover](https://microsoft.github.io/language-server-protocol/specifications/specification-current#textDocument_hover) - [x] [Completion](https://microsoft.github.io/language-server-protocol/specifications/specification-current#textDocument_completion) @@ -127,20 +133,15 @@ Other features: ![completion](https://github.com/Freed-Wu/pkgbuild-language-server/assets/32936898/c060690c-071b-41a0-bde5-dce338f4e779) -## How Does It Work +![arch](https://github.com/termux/termux-language-server/assets/32936898/e10b40c6-515e-4d50-9526-d32ea26b9238) + +![license](https://github.com/termux/termux-language-server/assets/32936898/13109df3-30ba-4371-ad0a-aa7f46c8e80a) -For every subtype of bash, there exists one -[json](https://github.com/termux/termux-language-server/tree/main/src/termux_language_server/assets/json) -respecting [json schema specification](https://json-schema.org/specification) -to provide the following information: +![depends](https://github.com/termux/termux-language-server/assets/32936898/a70b41ae-cf4b-44cc-bb10-a54cb5488f30) -- variable type -- variable description -- variable value enumerate -- is variable required -- ... +## How Does It Work -If you want to support more filetypes, just add a new json schema for it. +See [here](https://github.com/neomutt/lsp-tree-sitter#usage). Read [![readthedocs](https://shields.io/readthedocs/termux-language-server)](https://termux-language-server.readthedocs.io) diff --git a/docs/conf.py b/docs/conf.py index 009c886..7d81290 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -2,6 +2,7 @@ https://www.sphinx-doc.org/en/master/usage/configuration.html """ + from termux_language_server import __version__ as version # type: ignore from termux_language_server._metainfo import ( # type: ignore author, diff --git a/docs/resources/configure.md b/docs/resources/configure.md index c3275ef..2d0454c 100644 --- a/docs/resources/configure.md +++ b/docs/resources/configure.md @@ -1,22 +1,32 @@ # Configure -## (Neo)[Vim](https://www.vim.org) +- For windows, change `~/.config` to `~/AppData/Local` +- For macOS, change `~/.config` to `~/Library` + +## [Neovim](https://neovim.io) / [Vim](https://www.vim.org) ### [coc.nvim](https://github.com/neoclide/coc.nvim) +`~/.config/nvim/coc-settings.json`: + ```json { "languageserver": { "termux": { "command": "termux-language-server", - "filetypes": ["sh"] - }, + "filetypes": [ + "sh" + ] + } } } ``` ### [vim-lsp](https://github.com/prabirshrestha/vim-lsp) +Neovim - `~/.config/nvim/init.vim`:\ +Vim - `~/.config/vim/vimrc`: + ```vim if executable('termux-language-server') augroup lsp @@ -30,32 +40,66 @@ if executable('termux-language-server') endif ``` -## [Neovim](https://neovim.io) +### [nvim-lspconfig](https://github.com/neovim/nvim-lspconfig) (upstream configs) + +Example via [`lazy.nvim`](https://github.com/folke/lazy.nvim)\ +For other options see: [nvim-lspconfig#install](https://github.com/neovim/nvim-lspconfig#install)\ +Or the documentation for your plugin manager of choice. + +`~/.config/nvim/init.vim`: ```lua -vim.api.nvim_create_autocmd({ "BufEnter" }, { - pattern = { "build.sh" "*.subpackage.sh" }, - callback = function() - vim.lsp.start({ - name = "termux", - cmd = { "termux-language-server" } - }) - end, +require('lazy').setup({ + -- [...] + { 'neovim/nvim-lspconfig' }, + -- [...] }) + +vim.lsp.enable('termux_language_server') ``` ## [Emacs](https://www.gnu.org/software/emacs) +`~/.config/emacs/init.el`: + ```elisp (make-lsp-client :new-connection (lsp-stdio-connection `(,(executable-find "termux-language-server"))) - :activation-fn (lsp-activate-on "build.sh" "*.subpackage.sh") + :activation-fn (lsp-activate-on "build.sh" "*.subpackage.sh" "PKGBUILD" +"*.install" "makepkg.conf" "*.ebuild" "*.eclass" "color.map" "make.conf") :server-id "termux"))) ``` +## [Helix](https://helix-editor.com/) + +`~/.config/helix/languages.toml`: + +```toml +[[language]] +name = "sh" +language-servers = ["termux-language-server"] + +[language_server.termux-language-server] +command = "termux-language-server" +``` + +## [KaKoune](https://kakoune.org/) + +### [kak-lsp](https://github.com/kak-lsp/kak-lsp) + +`~/.config/kak-lsp/kak-lsp.toml`: + +```toml +[language_server.termux-language-server] +filetypes = ["sh"] +command = "termux-language-server" +``` + ## [Sublime](https://www.sublimetext.com) +`~/.config/sublime-text-3/Packages/Preferences.sublime-settings`: + ```json { "clients": { @@ -69,3 +113,18 @@ vim.api.nvim_create_autocmd({ "BufEnter" }, { } } ``` + +## [Visual Studio Code](https://code.visualstudio.com/) + +[Official support for generic LSP clients is pending](https://github.com/microsoft/vscode/issues/137885). + +### [vscode-glspc](https://gitlab.com/ruilvo/vscode-glspc) + +`~/.config/Code/User/settings.json`: + +```json +{ + "glspc.serverPath": "termux-language-server", + "glspc.languageId": "bash" +} +``` diff --git a/docs/resources/install.md b/docs/resources/install.md index 6f8fd0d..284bc1b 100644 --- a/docs/resources/install.md +++ b/docs/resources/install.md @@ -25,25 +25,13 @@ yay -S termux-language-server ( python3.withPackages ( p: with p; [ - nur.repos.termux.termux-language-server + nur.repos.Freed-Wu.termux-language-server ] ) ) } ``` -## [Nix](https://nixos.org) - -```sh -nix shell github:termux/termux-language-server -``` - -Run without installation: - -```sh -nix run github:termux/termux-language-server -- --help -``` - ## [PYPI](https://pypi.org/project/termux-language-server) ```sh diff --git a/pyproject.toml b/pyproject.toml index bbd20b5..6a0f024 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,11 +1,11 @@ [build-system] -requires = ["scikit-build-core"] -build-backend = "scikit_build_core.build" +requires = ["setuptools_scm[toml] >= 6.2", "setuptools-generate >= 0.0.6"] +build-backend = "setuptools.build_meta" # https://setuptools.pypa.io/en/latest/userguide/pyproject_config.html [project] name = "termux-language-server" -description = "termux language server" +description = "Language server for build.sh, PKGBUILD, ebuild" readme = "README.md" # type_a | type_b requires-python = ">= 3.10" @@ -27,28 +27,10 @@ classifiers = [ "Programming Language :: Python :: Implementation :: CPython", "Programming Language :: Python :: Implementation :: PyPy", ] -# dynamic = ["version", "dependencies", "optional-dependencies"] -# https://github.com/pypa/twine/issues/753 -dynamic = ["version"] -dependencies = [ - "colorama", - "fqdn", - "jinja2", - "platformdirs", - "pygls", - "rfc3987", - "tree-sitter", -] - -[project.optional-dependencies] -colorize = ["pygments"] -dev = ["pytest-cov"] -misc = ["beautifulsoup4", "markdown-it-py", "pypandoc"] -# pyproject.toml doesn't support git+git:// -pkgbuild = ["pyalpm"] +dynamic = ["version", "dependencies", "optional-dependencies"] [[project.authors]] -name = "Wu Zhenyu" +name = "Wu, Zhenyu" email = "wuzhenyu@ustc.edu" [project.license] @@ -65,19 +47,26 @@ termux-language-server = "termux_language_server.__main__:main" pkgbuild-language-server = "termux_language_server.__main__:main" portage-language-server = "termux_language_server.__main__:main" +[tool.setuptools.data-files] +"share/man/man1" = [ + "sdist/termux-language-server.1", + "sdist/pkgbuild-language-server.1", + "sdist/portage-language-server.1", +] +"share/bash-completion/completions" = [ + "sdist/termux-language-server", + "sdist/pkgbuild-language-server", + "sdist/portage-language-server", +] +"share/zsh/site-functions" = [ + "sdist/_termux-language-server", + "sdist/_pkgbuild-language-server", + "sdist/_portage-language-server", +] + [tool.scikit-build] experimental = true -[tool.scikit-build.metadata.version] -provider = "scikit_build_core.metadata.setuptools_scm" - -[tool.scikit-build.sdist] -include = ["src/termux_language_server/_version.py"] - -[[tool.scikit-build.generate]] -path = "termux_language_server/_metainfo.py" -template-path = "templates/metainfo.py" - [tool.setuptools.dynamic.dependencies] file = "requirements.txt" @@ -88,29 +77,52 @@ file = "requirements/colorize.txt" [tool.setuptools.dynamic.optional-dependencies.dev] file = "requirements/dev.txt" -[tool.setuptools.dynamic.optional-dependencies.termux] -file = "requirements/termux.txt" +[tool.setuptools.dynamic.optional-dependencies.misc] +file = "requirements/misc.txt" + +[tool.setuptools.dynamic.optional-dependencies.pkgbuild] +file = "requirements/pkgbuild.txt" # end: scripts/update-pyproject.toml.pl [tool.setuptools_scm] write_to = "src/termux_language_server/_version.py" +[tool.setuptools-generate] +write-to = "src/termux_language_server/_metainfo.py" + +[tool.setuptools-generate.metainfo-template] +file = "templates/metainfo.py.j2" + [tool.mdformat] number = true -[tool.black] -line-length = 79 +[tool.doq] +template_path = "templates" -[tool.isort] -line_length = 79 -profile = "black" +[tool.ruff] +line-length = 79 -# https://github.com/PyCQA/pydocstyle/issues/418 -[tool.pydocstyle] -add_ignore = "D205, D400" +[tool.ruff.lint] +select = [ + # pycodestyle + "E", + # pyflakes + "F", + # pyupgrade + "UP", + # flake8-bugbear + "B", + # flake8-simplify + "SIM", + # isort + "I", +] +ignore = ["D205", "D400"] +preview = true -[tool.doq] -template_path = "templates" +[tool.ruff.format] +docstring-code-format = true +preview = true [tool.coverage.report] exclude_lines = [ @@ -119,15 +131,8 @@ exclude_lines = [ "\\s*import tomli as tomllib", ] -[tool.bandit.assert_used] -skips = ["*_test.py", "*/test_*.py"] - [tool.cibuildwheel] archs = ["all"] -# jsonschema depends on rpds-py which depends on cargo -# disable *-musllinux_* -# Fatal Python error: Segmentation fault -# disable pp* -skip = "*37-* *38-* *39-* *-musllinux_* pp*" +skip = "*37-* *38-* *39-*" before-test = "pip install -rrequirements.txt -rrequirements/dev.txt" test-command = "pytest {project}" diff --git a/requirements.txt b/requirements.txt index 76e6874..93dcfd8 100755 --- a/requirements.txt +++ b/requirements.txt @@ -1,14 +1,12 @@ #!/usr/bin/env -S pip install -r -colorama # https://python-jsonschema.readthedocs.io/en/stable/validate/#validating-formats -# hostname +# validate hostname fqdn -jinja2 -jsonschema -# get nvim-tree-sitter's parser +lsp-tree-sitter >= 0.1.0 +# ~/.config/pacman/template.md.j2 platformdirs -pygls -# uri +pygls >= 2.0.0 +# validate uri rfc3987 -tree-sitter +tree-sitter-bash diff --git a/requirements/colorize.txt b/requirements/colorize.txt index 0b92a1c..b0dfd7e 100755 --- a/requirements/colorize.txt +++ b/requirements/colorize.txt @@ -1,3 +1,3 @@ #!/usr/bin/env -S pip install -r -pygments +lsp-tree-sitter[colorize] diff --git a/requirements/ebuild.txt b/requirements/ebuild.txt new file mode 100755 index 0000000..b227c03 --- /dev/null +++ b/requirements/ebuild.txt @@ -0,0 +1,3 @@ +#!/usr/bin/env -S pip install -r + +portage diff --git a/requirements/misc.txt b/requirements/misc.txt index 45b42b2..0402fb8 100755 --- a/requirements/misc.txt +++ b/requirements/misc.txt @@ -1,6 +1,4 @@ #!/usr/bin/env -S pip install -r -# See . -beautifulsoup4 -markdown-it-py -pypandoc +license-expression +lsp-tree-sitter[misc] diff --git a/requirements/pkgbuild.txt b/requirements/pkgbuild.txt index f67bf38..86b6ddf 100755 --- a/requirements/pkgbuild.txt +++ b/requirements/pkgbuild.txt @@ -1,4 +1,4 @@ #!/usr/bin/env -S pip install -r -git+git://gitlab.archlinux.org/pacman/namcap +# git+git://gitlab.archlinux.org/pacman/namcap pyalpm diff --git a/src/termux_language_server/__init__.py b/src/termux_language_server/__init__.py index 75a4216..1f3e0fe 100644 --- a/src/termux_language_server/__init__.py +++ b/src/termux_language_server/__init__.py @@ -1,6 +1,7 @@ r"""Provide ``__version__`` for `importlib.metadata.version() `_. """ + from typing import Literal try: @@ -20,6 +21,8 @@ "ebuild", "make.conf", "color.map", + "mdd", + "devscripts.conf", ] PACKAGE_NAME = r"[a-z][a-z0-9-]*" CSV = f"{PACKAGE_NAME}(, {PACKAGE_NAME})*" diff --git a/src/termux_language_server/__main__.py b/src/termux_language_server/__main__.py index 28471c8..21957b8 100644 --- a/src/termux_language_server/__main__.py +++ b/src/termux_language_server/__main__.py @@ -1,18 +1,22 @@ r"""This module can be called by `python -m `_. """ + from argparse import ArgumentParser, RawDescriptionHelpFormatter -from contextlib import suppress from datetime import datetime -from . import FILETYPE +from . import FILETYPE, __version__ from . import __name__ as NAME -from . import __version__ + +try: + import shtab +except ImportError: + from . import _shtab as shtab NAME = NAME.replace("_", "-") VERSION = rf"""{NAME} {__version__} Copyright (C) {datetime.now().year} -Written by Wu Zhenyu +Written by Wu, Zhenyu """ EPILOG = """ Report bugs to . @@ -25,66 +29,92 @@ def get_parser(): epilog=EPILOG, formatter_class=RawDescriptionHelpFormatter, ) - with suppress(ImportError): - import shtab - - shtab.add_argument_to(parser) + shtab.add_argument_to(parser) parser.add_argument("--version", version=VERSION, action="version") parser.add_argument( "--generate-schema", choices=FILETYPE.__args__, # type: ignore - help="generate schema json", + help="generate schema in an output format", + ) + parser.add_argument( + "--output-format", + choices=["json", "yaml", "toml"], + default="json", + help="output format: %(default)s", ) parser.add_argument( "--indent", type=int, default=2, - help="generated json's indent", + help="generated json, yaml's indent, ignored by toml: %(default)s", + ) + parser.add_argument( + "--color", + choices=["auto", "always", "never"], + default="auto", + help="when to display color, default: %(default)s", ) parser.add_argument( "--check", nargs="*", default={}, help="check file's errors and warnings", - ) + ).complete = shtab.FILE # type: ignore parser.add_argument( "--format", nargs="*", default={}, help="format files", - ) + ).complete = shtab.FILE # type: ignore parser.add_argument( - "--color", - choices={"auto", "always", "never"}, - default="auto", - help="when to display color", - ) + "--convert", + nargs="*", + default={}, + help="convert files to output format", + ).complete = shtab.FILE # type: ignore return parser def main(): r"""Parse arguments and provide shell completions.""" - parser = get_parser() - args = parser.parse_args() - - if args.generate_schema: - from .misc import get_schema - from .tree_sitter_lsp.utils import pprint - - pprint(get_schema(args.generate_schema), indent=args.indent) - exit() - from .finders import DIAGNOSTICS_FINDER_CLASSES, FORMAT_FINDER_CLASSES - from .parser import parse - from .tree_sitter_lsp.diagnose import check - from .tree_sitter_lsp.format import format - from .utils import get_filetype - - format(args.format, parse, FORMAT_FINDER_CLASSES, get_filetype) - result = check( - args.check, parse, DIAGNOSTICS_FINDER_CLASSES, get_filetype, args.color - ) - if args.format or args.check: - exit(result) + args = get_parser().parse_args() + + if args.generate_schema or args.format or args.check or args.convert: + from lsp_tree_sitter.diagnose import check + from lsp_tree_sitter.format import format + from lsp_tree_sitter.utils import pprint + + from .finders import DIAGNOSTICS_FINDER_CLASSES, FORMAT_FINDER_CLASSES + from .schema import BashTrie + from .utils import get_filetype, parser + + if args.generate_schema: + from .misc import get_schema + + kwargs = ( + {"indent": args.indent} if args.output_format != "toml" else {} + ) + pprint( + get_schema(args.generate_schema), + filetype=args.output_format, + **kwargs, + ) + for file in args.convert: + pprint( + BashTrie.from_file(file, parser.parse).to_json(), + filetype=args.output_format, + indent=args.indent, + ) + format(args.format, parser.parse, FORMAT_FINDER_CLASSES, get_filetype) + exit( + check( + args.check, + parser.parse, + DIAGNOSTICS_FINDER_CLASSES, + get_filetype, + args.color, + ) + ) from .server import TermuxLanguageServer diff --git a/src/termux_language_server/_shtab.py b/src/termux_language_server/_shtab.py new file mode 100644 index 0000000..8b15ce7 --- /dev/null +++ b/src/termux_language_server/_shtab.py @@ -0,0 +1,16 @@ +r"""Fake shtab +============== +""" + +from argparse import ArgumentParser +from typing import Any + +FILE = None +DIRECTORY = DIR = None + + +def add_argument_to(parser: ArgumentParser, *args: Any, **kwargs: Any): + from argparse import Action + + Action.complete = None # type: ignore + return parser diff --git a/src/termux_language_server/assets/json/PKGBUILD.json b/src/termux_language_server/assets/json/PKGBUILD.json index 55282b5..eba9174 100644 --- a/src/termux_language_server/assets/json/PKGBUILD.json +++ b/src/termux_language_server/assets/json/PKGBUILD.json @@ -24,7 +24,7 @@ "properties": { "pkgname": { "description": "Either the name of the package or an array of names for split packages. Valid characters for members of this array are alphanumerics, and any of the following characters: \"@ . \\_ + -\". Additionally, names are not allowed to start with hyphens or dots.", - "anyOf": [ + "oneOf": [ { "type": "array", "items": { @@ -39,7 +39,14 @@ }, "pkgver": { "description": "The version of the software as released from the author (e.g., *2.7.1*). The variable is not allowed to contain colons, forward slashes, hyphens or whitespace.\nThe pkgver variable can be automatically updated by providing a pkgver() function in the PKGBUILD that outputs the new package version. This is run after downloading and extracting the sources and running the prepare() function (if present), so it can use those files in determining the new pkgver. This is most useful when used with sources from version control systems (see below).", - "type": "string" + "oneOf": [ + { + "type": "string" + }, + { + "const": 0 + } + ] }, "pkgrel": { "description": "This is the release number specific to the distribution. This allows package maintainers to make updates to the package's configure flags, for example. This is typically set to *1* for each new upstream software release and incremented for intermediate PKGBUILD updates. The variable is a positive integer, with an optional subrelease level specified by adding another positive integer separated by a period (i.e. in the form x.y).", @@ -55,13 +62,618 @@ }, "url": { "description": "The URL to the VCS repository. This must include the VCS in the URL protocol for makepkg to recognize this as a VCS source. If the protocol does not include the VCS name, it can be added by prefixing the URL with vcs+. For example, using a Git repository over HTTPS would have a source URL in the form: git+https://\\....", - "type": "string" + "type": "string", + "format": "uri" }, "license": { "description": "This field specifies the license(s) that apply to the package. Commonly used licenses can be found in */usr/share/licenses/common*. If you see the package's license there, simply reference it in the license field (e.g., license=(GPL)). If the package provides a license not available in */usr/share/licenses/common*, then you should include it in the package itself and set license=(custom) or license=(custom:LicenseName). The license should be placed in *\\$pkgdir/usr/share/licenses/\\$pkgname/* when building the package. If multiple licenses are applicable, list all of them: license=(GPL FDL).", "type": "array", "items": { - "type": "string" + "oneOf": [ + { + "type": "string", + "enum": [ + "389-exception", + "Abstyles", + "CDL-1.0", + "DOC", + "GNAT-exception", + "AdaCore-doc", + "APL-1.0", + "Adobe-Glyph", + "Adobe-2006", + "ADSL", + "AFL-1.1", + "AFL-1.2", + "AFL-2.0", + "AFL-2.1", + "AFL-3.0", + "Afmparse", + "Aladdin", + "AGPL-1.0-only", + "AGPL-1.0-or-later", + "AGPL-3.0-only", + "AGPL-3.0-or-later", + "Giftware", + "LicenseRef-.amazon.com.-AmznSL-1.0", + "AMDPLPA", + "AML", + "AMPAS", + "ANTLR-PD", + "ANTLR-PD-fallback", + "Apache-1.0", + "Apache-1.1", + "Apache-2.0", + "APAFML", + "App-s2p", + "Swift-exception", + "APSL-1.0", + "APSL-1.1", + "APSL-1.2", + "APSL-2.0", + "Arphic-1999", + "Artistic-1.0", + "Artistic-1.0-cl8", + "Artistic-2.0", + "ClArtistic", + "Artistic-1.0-Perl", + "AAL", + "Autoconf-exception-2.0", + "Autoconf-exception-3.0", + "Autoconf-exception-generic", + "Baekmuk", + "Bahyph", + "Barr", + "Beerware", + "Bison-exception-2.2", + "Bitstream-Vera", + "BitTorrent-1.0", + "BitTorrent-1.1", + "blessing", + "BlueOak-1.0.0", + "BSL-1.0", + "Bootloader-exception", + "Borceux", + "Brian-Gladman-3-Clause", + "BSD-1-Clause", + "BSD-2-Clause-Views", + "BSD-3-Clause-No-Military-License", + "BSD-3-Clause-No-Nuclear-Warranty", + "BSD-3-Clause-Open-MPI", + "BSD-4-Clause-Shortened", + "BSD-3-Clause-Attribution", + "BSD-Advertising-Acknowledgement", + "BSD-3-Clause", + "BSD-4-Clause", + "BSD-4-Clause-UC", + "BSD-4.3RENO", + "BSD-2-Clause-Patent", + "BSD-Protection", + "BSD-2-Clause", + "BSD-Source-Code", + "0BSD", + "BUSL-1.1", + "BSD-4.3TAHOE", + "bzip2-1.0.6", + "C-UDA-1.0", + "CATOSL-1.1", + "CAL-1.0", + "CAL-1.0-Combined-Work-Exception", + "Caldera", + "OGL-Canada-2.0", + "CMU-Mach", + "CC-BY-1.0", + "CC-BY-2.0", + "CC-BY-2.5", + "CC-BY-2.5-AU", + "CC-BY-3.0", + "CC-BY-3.0-AT", + "CC-BY-3.0-DE", + "CC-BY-3.0-IGO", + "CC-BY-3.0-NL", + "CC-BY-3.0-US", + "CC-BY-4.0", + "CC-BY-NC-1.0", + "CC-BY-NC-2.0", + "CC-BY-NC-2.5", + "CC-BY-NC-3.0", + "CC-BY-NC-3.0-DE", + "CC-BY-NC-4.0", + "CC-BY-NC-ND-1.0", + "CC-BY-NC-ND-2.0", + "CC-BY-NC-ND-2.5", + "CC-BY-NC-ND-3.0", + "CC-BY-NC-ND-3.0-DE", + "CC-BY-NC-ND-3.0-IGO", + "CC-BY-NC-ND-4.0", + "CC-BY-NC-SA-1.0", + "CC-BY-NC-SA-2.0", + "CC-BY-NC-SA-2.0-DE", + "CC-BY-NC-SA-2.0-FR", + "CC-BY-NC-SA-2.0-UK", + "CC-BY-NC-SA-2.5", + "CC-BY-NC-SA-3.0", + "CC-BY-NC-SA-3.0-DE", + "CC-BY-NC-SA-3.0-IGO", + "CC-BY-NC-SA-4.0", + "CC-BY-ND-1.0", + "CC-BY-ND-2.0", + "CC-BY-ND-2.5", + "CC-BY-ND-3.0", + "CC-BY-ND-3.0-DE", + "CC-BY-ND-4.0", + "CC-BY-SA-1.0", + "CC-BY-SA-2.0", + "CC-BY-SA-2.0-UK", + "CC-BY-SA-2.1-JP", + "CC-BY-SA-2.5", + "CC-BY-SA-3.0", + "CC-BY-SA-3.0-AT", + "CC-BY-SA-3.0-DE", + "CC-BY-SA-4.0", + "CC-PDDC", + "CC0-1.0", + "CDDL-1.0", + "CDDL-1.1", + "CDLA-Permissive-1.0", + "CDLA-Permissive-2.0", + "CDLA-Sharing-1.0", + "CECILL-1.0", + "CECILL-1.1", + "CECILL-2.0", + "CECILL-2.1", + "CECILL-B", + "CECILL-C", + "CERN-OHL-1.1", + "CERN-OHL-1.2", + "CERN-OHL-P-2.0", + "CERN-OHL-S-2.0", + "CERN-OHL-W-2.0", + "CFITSIO", + "checkmk", + "Classpath-exception-2.0", + "BSD-3-Clause-Clear", + "Clips", + "CLISP-exception-2.0", + "BSD-Attribution-HPND-disclaimer", + "MIT-CMU", + "CNRI-Jython", + "CNRI-Python", + "CNRI-Python-GPL-Compatible", + "COIL-1.0", + "Condor-1.1", + "copyleft-next-0.3.0", + "copyleft-next-0.3.1", + "Cornell-Lossless-JPEG", + "CPAL-1.0", + "CPL-1.0", + "CPOL-1.02", + "Crossword", + "CrystalStacker", + "Community-Spec-1.0", + "CUA-OPL-1.0", + "Cube", + "curl", + "D-FSL-1.0", + "diffmark", + "DigiRule-FOSS-exception", + "DL-DE-BY-2.0", + "Plexus", + "Dotseqn", + "DRL-1.0", + "DSDP", + "dvipdfm", + "ECL-1.0", + "ECL-2.0", + "eCos-2.0", + "eCos-exception-2.0", + "RHeCos-1.1", + "EFL-1.0", + "EFL-2.0", + "eGenix", + "Elastic-2.0", + "MIT-advertising", + "MIT-enna", + "Entessa", + "EPICS", + "EPL-1.0", + "EPL-2.0", + "ErlPL-1.1", + "etalab-2.0", + "EUDatagrid", + "EUPL-1.0", + "EUPL-1.1", + "EUPL-1.2", + "Eurosym", + "Fair", + "OML", + "Fawkes-Runtime-exception", + "FLTK-exception", + "Font-exception-2.0", + "Frameworx-1.0", + "FDK-AAC", + "FreeBSD-DOC", + "FreeImage", + "freertos-exception-2.0", + "FTL", + "FSFAP", + "FSFUL", + "FSFULLR", + "FSFULLRWD", + "GCC-exception-3.1", + "GCC-exception-2.0", + "GFDL-1.1-only", + "GFDL-1.1-invariants-only", + "GFDL-1.1-invariants-or-later", + "GFDL-1.1-no-invariants-only", + "GFDL-1.1-no-invariants-or-later", + "GFDL-1.1-or-later", + "GFDL-1.2-only", + "GFDL-1.2-invariants-only", + "GFDL-1.2-invariants-or-later", + "GFDL-1.2-no-invariants-only", + "GFDL-1.2-no-invariants-or-later", + "GFDL-1.2-or-later", + "GFDL-1.3-only", + "GFDL-1.3-invariants-only", + "GFDL-1.3-invariants-or-later", + "GFDL-1.3-no-invariants-only", + "GFDL-1.3-no-invariants-or-later", + "GFDL-1.3-or-later", + "GL2PS", + "Glide", + "Glulxe", + "GLWTPL", + "gnu-javamail-exception", + "gnuplot", + "GPL-1.0-only", + "GPL-1.0-or-later", + "GPL-2.0-only", + "GPL-2.0-with-autoconf-exception", + "GPL-2.0-with-classpath-exception", + "GPL-2.0-with-font-exception", + "GPL-2.0-with-GCC-exception", + "GPL-2.0-or-later", + "GPL-3.0-only", + "GPL-3.0-with-autoconf-exception", + "GPL-3.0-with-GCC-exception", + "GPL-3.0-linking-exception", + "GPL-3.0-linking-source-exception", + "GPL-3.0-or-later", + "GPL-CC-1.0", + "Graphics-Gems", + "gSOAP-1.3b", + "GStreamer-exception-2005", + "GStreamer-exception-2008", + "HaskellReport", + "Spencer-99", + "Hippocratic-2.1", + "HPND", + "HPND-export-US", + "HPND-sell-variant-MIT-disclaimer", + "Spencer-94", + "i2p-gpl-java-exception", + "IBM-pibs", + "IPL-1.0", + "IEC-Code-Components-EULA", + "IJG", + "IJG-short", + "ImageMagick", + "Imlib2", + "xpp", + "Info-ZIP", + "QPL-1.0-INRIA-2004-exception", + "Intel-ACPI", + "Intel", + "Interbase-1.0", + "IPA", + "ISC", + "Jam", + "JasPer-2.0", + "JPL-image", + "JPNIC", + "JSON", + "Kazlib", + "KiCad-libraries-exception", + "Knuth-CTAN", + "LAL-1.2", + "LAL-1.3", + "Latex2e", + "BSD-3-Clause-LBNL", + "Leptonica", + "LGPL-2.0-only", + "LGPL-2.0-or-later", + "LGPL-2.1-only", + "LGPL-2.1-or-later", + "LGPL-3.0-only", + "LGPL-3.0-linking-exception", + "LGPL-3.0-or-later", + "LGPLLR", + "GD", + "xlock", + "Libpng", + "libpng-2.0", + "Libtool-exception", + "libutil-David-Nugent", + "LiLiQ-P-1.1", + "LiLiQ-R-1.1", + "LiLiQ-Rplus-1.1", + "Linux-OpenIB", + "Linux-syscall-note", + "LLVM-exception", + "LOOP", + "LPPL-1.0", + "LPPL-1.1", + "LPPL-1.2", + "LPPL-1.3a", + "LPPL-1.3c", + "LPL-1.0", + "LPL-1.02", + "LZMA-exception", + "LZMA-SDK-9.11-to-9.20", + "LZMA-SDK-9.22", + "mplus", + "MakeIndex", + "HPND-Markus-Kuhn", + "Martin-Birgmeier", + "mif-exception", + "Minpack", + "MirOS", + "MIT", + "MIT-0", + "MIT-feh", + "MIT-Wu", + "Xerox", + "MIT-Modern-Variant", + "MITNFA", + "NTP", + "SSPL-1.0", + "Motosoto", + "mpi-permissive", + "mpich2", + "MPL-1.0", + "MPL-1.1", + "MPL-2.0", + "MPL-2.0-no-copyleft-exception", + "MS-LPL", + "MS-PL", + "MS-RL", + "MTLL", + "MulanPSL-1.0", + "MulanPSL-2.0", + "Multics", + "Mup", + "NAIST-2003", + "NASA-1.3", + "Naumen", + "NBPL-1.0", + "NCGL-UK-2.0", + "Net-SNMP", + "NetCDF", + "Newsletr", + "NGPL", + "NICTA-1.0", + "NIST-PD", + "NIST-PD-fallback", + "NLOD-1.0", + "NLOD-2.0", + "NLPL", + "Nokia", + "NOSL", + "NPOSL-3.0", + "Noweb", + "NPL-1.0", + "NPL-1.1", + "NRL", + "NTP-0", + "O-UDA-1.0", + "OCaml-LGPL-linking-exception", + "OCCT-exception-1.0", + "OCCT-PL", + "OCLC-2.0", + "ODbL-1.0", + "ODC-By-1.0", + "OFFIS", + "OFL-1.0", + "OFL-1.0-no-RFN", + "OFL-1.0-RFN", + "OFL-1.1", + "OFL-1.1-no-RFN", + "OFL-1.1-RFN", + "OGC-1.0", + "OGDL-Taiwan-1.0", + "OGL-UK-1.0", + "OGL-UK-2.0", + "OGL-UK-3.0", + "OPL-1.0", + "OGTSL", + "OpenJDK-assembly-exception-1.0", + "OLDAP-1.1", + "OLDAP-1.2", + "OLDAP-1.3", + "OLDAP-1.4", + "OLDAP-2.0", + "OLDAP-2.0.1", + "OLDAP-2.1", + "OLDAP-2.2", + "OLDAP-2.2.1", + "OLDAP-2.2.2", + "OLDAP-2.3", + "OLDAP-2.4", + "OLDAP-2.5", + "OLDAP-2.6", + "OLDAP-2.7", + "OLDAP-2.8", + "OpenPBS-2.3", + "OPUBL-1.0", + "SSH-OpenSSH", + "x11vnc-openssl-exception", + "OpenSSL", + "openvpn-openssl-exception", + "BSD-3-Clause-No-Nuclear-License-2014", + "OSET-PL-2.1", + "HP-1986", + "OSL-1.0", + "OSL-1.1", + "OSL-2.0", + "OSL-2.1", + "OSL-3.0", + "Parity-6.0.0", + "Parity-7.0.0", + "PDDL-1.0", + "PHP-3.0", + "PHP-3.01", + "PolyForm-Noncommercial-1.0.0", + "PolyForm-Small-Business-1.0.0", + "PostgreSQL", + "PS-or-PDF-font-exception-20170817", + "PSF-2.0", + "psfrag", + "psutils", + "Python-2.0", + "Python-2.0.1", + "Qhull", + "QPL-1.0", + "QPL-1.0-INRIA-2004", + "Qt-GPL-exception-1.0", + "Qt-LGPL-exception-1.1", + "Qwt-exception-1.0", + "Rdisc", + "Spencer-86", + "BSD-3-Clause-Modification", + "RSCPL", + "RPL-1.1", + "RPL-1.5", + "RPSL-1.0", + "RSA-MD", + "Ruby", + "SAX-PD", + "Saxpath", + "SCEA", + "SchemeReport", + "libselinux-1.0", + "Sendmail", + "Sendmail-8.23", + "iMatix", + "SGI-B-1.1", + "SGI-B-2.0", + "SGI-B-1.0", + "SHL-0.5", + "SHL-0.51", + "SHL-2.0", + "SHL-2.1", + "SimPL-2.0", + "Sleepycat", + "SMPPL", + "SNIA", + "snprintf", + "SPL-1.0", + "SMLNJ", + "SugarCRM-1.1.3", + "BSD-3-Clause-No-Nuclear-License", + "SISSL", + "SISSL-1.2", + "SunPro", + "SWI-exception", + "SWL", + "Watcom-1.0", + "Symlinks", + "TAPR-OHL-1.0", + "SSH-short", + "TCL", + "TCP-wrappers", + "TPL-1.0", + "HTMLTIDY", + "TMate", + "TORQUE-1.1", + "TOSL", + "TPDL", + "TTWL", + "TU-Berlin-1.0", + "TU-Berlin-2.0", + "u-boot-exception-2.0", + "UCAR", + "UCL-1.0", + "Unicode-DFS-2015", + "Unicode-DFS-2016", + "Unicode-TOU", + "Universal-FOSS-exception-1.0", + "Unlicense", + "NCSA", + "UPL-1.0", + "Linux-man-pages-copyleft", + "Vim", + "VOSTROM", + "VSL-1.0", + "W3C", + "W3C-19980720", + "W3C-20150513", + "w3m", + "Wsuipa", + "WTFPL", + "wxWindows", + "WxWindows-exception-3.1", + "ICU", + "Bitstream-Charter", + "X11-distribute-modifications-variant", + "HPND-sell-variant", + "MIT-open-group", + "libtiff", + "X11", + "XFree86-1.1", + "xinetd", + "Xnet", + "XSkat", + "YPL-1.0", + "YPL-1.1", + "Zed", + "Zend-2.0", + "Zimbra-1.3", + "Zimbra-1.4", + "Zlib", + "zlib-acknowledgement", + "ZPL-1.1", + "ZPL-2.0", + "ZPL-2.1", + "AGPL3", + "Apache", + "Artistic2.0", + "Boost", + "CCPL", + "CDDL", + "CPL", + "EPL", + "FDL1.2", + "FDL1.3", + "GPL2", + "GPL3", + "LGPL2.1", + "LGPL3", + "LPPL", + "MPL", + "MPL2", + "PHP", + "PSF", + "PerlArtistic", + "Unlicense", + "W3C", + "ZPL", + "WTFPL", + "AGPL", + "APACHE", + "FDL", + "GPL", + "LGPL", + "RUBY", + "Ruby", + "custom" + ] + }, + { + "type": "string", + "pattern": "custom:.+" + } + ] }, "uniqueItems": true }, @@ -102,64 +714,56 @@ "type": "array", "items": { "type": "string" - }, - "uniqueItems": true + } }, "md5sums": { "description": "Alternative integrity checks that makepkg supports; these all behave similar to the cksums option described above. To enable use and generation of these checksums, be sure to set up the INTEGRITY_CHECK option in **makepkg.conf**(5).", "type": "array", "items": { "type": "string" - }, - "uniqueItems": true + } }, "sha1sums": { "description": "Alternative integrity checks that makepkg supports; these all behave similar to the cksums option described above. To enable use and generation of these checksums, be sure to set up the INTEGRITY_CHECK option in **makepkg.conf**(5).", "type": "array", "items": { "type": "string" - }, - "uniqueItems": true + } }, "sha224sums": { "description": "Alternative integrity checks that makepkg supports; these all behave similar to the cksums option described above. To enable use and generation of these checksums, be sure to set up the INTEGRITY_CHECK option in **makepkg.conf**(5).", "type": "array", "items": { "type": "string" - }, - "uniqueItems": true + } }, "sha256sums": { "description": "Alternative integrity checks that makepkg supports; these all behave similar to the cksums option described above. To enable use and generation of these checksums, be sure to set up the INTEGRITY_CHECK option in **makepkg.conf**(5).", "type": "array", "items": { "type": "string" - }, - "uniqueItems": true + } }, "sha384sums": { "description": "Alternative integrity checks that makepkg supports; these all behave similar to the cksums option described above. To enable use and generation of these checksums, be sure to set up the INTEGRITY_CHECK option in **makepkg.conf**(5).", "type": "array", "items": { "type": "string" - }, - "uniqueItems": true + } }, "sha512sums": { "description": "Alternative integrity checks that makepkg supports; these all behave similar to the cksums option described above. To enable use and generation of these checksums, be sure to set up the INTEGRITY_CHECK option in **makepkg.conf**(5).", "type": "array", "items": { "type": "string" - }, - "uniqueItems": true + } }, "b2sums": { "description": "Alternative integrity checks that makepkg supports; these all behave similar to the cksums option described above. To enable use and generation of these checksums, be sure to set up the INTEGRITY_CHECK option in **makepkg.conf**(5).", "type": "array", "items": { "type": "string" - }, - "uniqueItems": true + } }, "groups": { "description": "An array of symbolic names that represent groups of packages, allowing you to install multiple packages by requesting a single target. For example, one could install all KDE packages by installing the *kde* group.", @@ -173,7 +777,20 @@ "description": "Defines on which architectures the given package is available (e.g., arch=(i686 x86_64)). Packages that contain no architecture specific files should use arch=(any). Valid characters for members of this array are alphanumerics and \"\\_\".", "type": "array", "items": { - "type": "string" + "type": "string", + "enum": [ + "any", + "pentium4", + "i486", + "i686", + "x86_64", + "x86_64_v3", + "arm", + "armv6h", + "armv7h", + "armv8", + "aarch64" + ] }, "uniqueItems": true }, @@ -276,6 +893,56 @@ "query": { "description": "(optional) Allows specifying whether a VCS checkout should be checked for PGP-signed revisions. The source line should have the format source=(url#fragment?signed) or source=(url?signed#fragment). Currently only supported by Git.", "type": "string" + }, + "mingw_arch": { + "type": "array", + "description": "A list of MSYS2 environments the package is built for. Defaults to an empty list.", + "items": { + "type": "string", + "enum": [ + "mingw32", + "mingw64", + "ucrt64", + "clang64", + "clang32", + "clangarm64" + ] + }, + "uniqueItems": true + }, + "msys2_references": { + "type": "array", + "description": "Maps the package to external resources, such as other package repositories.", + "items": { + "type": "string", + "pattern": "(archlinux|aur|cygwin|cygwin-mingw64|pypi|gentoo|internal)(|: .*)" + }, + "uniqueItems": true + }, + "msys2_changelog_url": { + "type": "string", + "description": "NEWS file in git or the GitHub releases page. In case there are multiple, the one that is more useful for packagers.", + "format": "uri" + }, + "msys2_documentation_url": { + "type": "string", + "description": "URL to the documentation for the API, tools, etc., in case it's a different website than the homepage.", + "format": "uri" + }, + "msys2_repository_url": { + "type": "string", + "description": "URL to the web view of the repository, e.g., on GitHub or GitLab.", + "format": "uri" + }, + "msys2_issue_tracker_url": { + "type": "string", + "description": "URL to the bug tracker, mailing list archive, etc.", + "format": "uri" + }, + "msys2_pgp_keys_url": { + "type": "string", + "description": "URL to a website containing which keys are used to sign releases.", + "format": "uri" } } } diff --git a/src/termux_language_server/assets/json/build.sh.json b/src/termux_language_server/assets/json/build.sh.json index a398b1f..cbe98d3 100644 --- a/src/termux_language_server/assets/json/build.sh.json +++ b/src/termux_language_server/assets/json/build.sh.json @@ -22,7 +22,8 @@ }, "TERMUX_PKG_LICENSE": { "type": "string", - "description": "Package license." + "description": "Package license.", + "pattern": "(389-exception|Abstyles|CDL-1.0|DOC|GNAT-exception|AdaCore-doc|APL-1.0|Adobe-Glyph|Adobe-2006|ADSL|AFL-1.1|AFL-1.2|AFL-2.0|AFL-2.1|AFL-3.0|Afmparse|Aladdin|AGPL-1.0-only|AGPL-1.0-or-later|AGPL-3.0-only|AGPL-3.0-or-later|Giftware|LicenseRef-.amazon.com.-AmznSL-1.0|AMDPLPA|AML|AMPAS|ANTLR-PD|ANTLR-PD-fallback|Apache-1.0|Apache-1.1|Apache-2.0|APAFML|App-s2p|Swift-exception|APSL-1.0|APSL-1.1|APSL-1.2|APSL-2.0|Arphic-1999|Artistic-1.0|Artistic-1.0-cl8|Artistic-2.0|ClArtistic|Artistic-1.0-Perl|AAL|Autoconf-exception-2.0|Autoconf-exception-3.0|Autoconf-exception-generic|Baekmuk|Bahyph|Barr|Beerware|Bison-exception-2.2|Bitstream-Vera|BitTorrent-1.0|BitTorrent-1.1|blessing|BlueOak-1.0.0|BSL-1.0|Bootloader-exception|Borceux|Brian-Gladman-3-Clause|BSD-1-Clause|BSD-2-Clause-Views|BSD-3-Clause-No-Military-License|BSD-3-Clause-No-Nuclear-Warranty|BSD-3-Clause-Open-MPI|BSD-4-Clause-Shortened|BSD-3-Clause-Attribution|BSD-Advertising-Acknowledgement|BSD-3-Clause|BSD-4-Clause|BSD-4-Clause-UC|BSD-4.3RENO|BSD-2-Clause-Patent|BSD-Protection|BSD-2-Clause|BSD-Source-Code|0BSD|BUSL-1.1|BSD-4.3TAHOE|bzip2-1.0.6|C-UDA-1.0|CATOSL-1.1|CAL-1.0|CAL-1.0-Combined-Work-Exception|Caldera|OGL-Canada-2.0|CMU-Mach|CC-BY-1.0|CC-BY-2.0|CC-BY-2.5|CC-BY-2.5-AU|CC-BY-3.0|CC-BY-3.0-AT|CC-BY-3.0-DE|CC-BY-3.0-IGO|CC-BY-3.0-NL|CC-BY-3.0-US|CC-BY-4.0|CC-BY-NC-1.0|CC-BY-NC-2.0|CC-BY-NC-2.5|CC-BY-NC-3.0|CC-BY-NC-3.0-DE|CC-BY-NC-4.0|CC-BY-NC-ND-1.0|CC-BY-NC-ND-2.0|CC-BY-NC-ND-2.5|CC-BY-NC-ND-3.0|CC-BY-NC-ND-3.0-DE|CC-BY-NC-ND-3.0-IGO|CC-BY-NC-ND-4.0|CC-BY-NC-SA-1.0|CC-BY-NC-SA-2.0|CC-BY-NC-SA-2.0-DE|CC-BY-NC-SA-2.0-FR|CC-BY-NC-SA-2.0-UK|CC-BY-NC-SA-2.5|CC-BY-NC-SA-3.0|CC-BY-NC-SA-3.0-DE|CC-BY-NC-SA-3.0-IGO|CC-BY-NC-SA-4.0|CC-BY-ND-1.0|CC-BY-ND-2.0|CC-BY-ND-2.5|CC-BY-ND-3.0|CC-BY-ND-3.0-DE|CC-BY-ND-4.0|CC-BY-SA-1.0|CC-BY-SA-2.0|CC-BY-SA-2.0-UK|CC-BY-SA-2.1-JP|CC-BY-SA-2.5|CC-BY-SA-3.0|CC-BY-SA-3.0-AT|CC-BY-SA-3.0-DE|CC-BY-SA-4.0|CC-PDDC|CC0-1.0|CDDL-1.0|CDDL-1.1|CDLA-Permissive-1.0|CDLA-Permissive-2.0|CDLA-Sharing-1.0|CECILL-1.0|CECILL-1.1|CECILL-2.0|CECILL-2.1|CECILL-B|CECILL-C|CERN-OHL-1.1|CERN-OHL-1.2|CERN-OHL-P-2.0|CERN-OHL-S-2.0|CERN-OHL-W-2.0|CFITSIO|checkmk|Classpath-exception-2.0|BSD-3-Clause-Clear|Clips|CLISP-exception-2.0|BSD-Attribution-HPND-disclaimer|MIT-CMU|CNRI-Jython|CNRI-Python|CNRI-Python-GPL-Compatible|COIL-1.0|Condor-1.1|copyleft-next-0.3.0|copyleft-next-0.3.1|Cornell-Lossless-JPEG|CPAL-1.0|CPL-1.0|CPOL-1.02|Crossword|CrystalStacker|Community-Spec-1.0|CUA-OPL-1.0|Cube|curl|D-FSL-1.0|diffmark|DigiRule-FOSS-exception|DL-DE-BY-2.0|Plexus|Dotseqn|DRL-1.0|DSDP|dvipdfm|ECL-1.0|ECL-2.0|eCos-2.0|eCos-exception-2.0|RHeCos-1.1|EFL-1.0|EFL-2.0|eGenix|Elastic-2.0|MIT-advertising|MIT-enna|Entessa|EPICS|EPL-1.0|EPL-2.0|ErlPL-1.1|etalab-2.0|EUDatagrid|EUPL-1.0|EUPL-1.1|EUPL-1.2|Eurosym|Fair|OML|Fawkes-Runtime-exception|FLTK-exception|Font-exception-2.0|Frameworx-1.0|FDK-AAC|FreeBSD-DOC|FreeImage|freertos-exception-2.0|FTL|FSFAP|FSFUL|FSFULLR|FSFULLRWD|GCC-exception-3.1|GCC-exception-2.0|GFDL-1.1-only|GFDL-1.1-invariants-only|GFDL-1.1-invariants-or-later|GFDL-1.1-no-invariants-only|GFDL-1.1-no-invariants-or-later|GFDL-1.1-or-later|GFDL-1.2-only|GFDL-1.2-invariants-only|GFDL-1.2-invariants-or-later|GFDL-1.2-no-invariants-only|GFDL-1.2-no-invariants-or-later|GFDL-1.2-or-later|GFDL-1.3-only|GFDL-1.3-invariants-only|GFDL-1.3-invariants-or-later|GFDL-1.3-no-invariants-only|GFDL-1.3-no-invariants-or-later|GFDL-1.3-or-later|GL2PS|Glide|Glulxe|GLWTPL|gnu-javamail-exception|gnuplot|GPL-1.0-only|GPL-1.0-or-later|GPL-2.0-only|GPL-2.0-with-autoconf-exception|GPL-2.0-with-classpath-exception|GPL-2.0-with-font-exception|GPL-2.0-with-GCC-exception|GPL-2.0-or-later|GPL-3.0-only|GPL-3.0-with-autoconf-exception|GPL-3.0-with-GCC-exception|GPL-3.0-linking-exception|GPL-3.0-linking-source-exception|GPL-3.0-or-later|GPL-CC-1.0|Graphics-Gems|gSOAP-1.3b|GStreamer-exception-2005|GStreamer-exception-2008|HaskellReport|Spencer-99|Hippocratic-2.1|HPND|HPND-export-US|HPND-sell-variant-MIT-disclaimer|Spencer-94|i2p-gpl-java-exception|IBM-pibs|IPL-1.0|IEC-Code-Components-EULA|IJG|IJG-short|ImageMagick|Imlib2|xpp|Info-ZIP|QPL-1.0-INRIA-2004-exception|Intel-ACPI|Intel|Interbase-1.0|IPA|ISC|Jam|JasPer-2.0|JPL-image|JPNIC|JSON|Kazlib|KiCad-libraries-exception|Knuth-CTAN|LAL-1.2|LAL-1.3|Latex2e|BSD-3-Clause-LBNL|Leptonica|LGPL-2.0-only|LGPL-2.0-or-later|LGPL-2.1-only|LGPL-2.1-or-later|LGPL-3.0-only|LGPL-3.0-linking-exception|LGPL-3.0-or-later|LGPLLR|GD|xlock|Libpng|libpng-2.0|Libtool-exception|libutil-David-Nugent|LiLiQ-P-1.1|LiLiQ-R-1.1|LiLiQ-Rplus-1.1|Linux-OpenIB|Linux-syscall-note|LLVM-exception|LOOP|LPPL-1.0|LPPL-1.1|LPPL-1.2|LPPL-1.3a|LPPL-1.3c|LPL-1.0|LPL-1.02|LZMA-exception|LZMA-SDK-9.11-to-9.20|LZMA-SDK-9.22|mplus|MakeIndex|HPND-Markus-Kuhn|Martin-Birgmeier|mif-exception|Minpack|MirOS|MIT|MIT-0|MIT-feh|MIT-Wu|Xerox|MIT-Modern-Variant|MITNFA|NTP|SSPL-1.0|Motosoto|mpi-permissive|mpich2|MPL-1.0|MPL-1.1|MPL-2.0|MPL-2.0-no-copyleft-exception|MS-LPL|MS-PL|MS-RL|MTLL|MulanPSL-1.0|MulanPSL-2.0|Multics|Mup|NAIST-2003|NASA-1.3|Naumen|NBPL-1.0|NCGL-UK-2.0|Net-SNMP|NetCDF|Newsletr|NGPL|NICTA-1.0|NIST-PD|NIST-PD-fallback|NLOD-1.0|NLOD-2.0|NLPL|Nokia|NOSL|NPOSL-3.0|Noweb|NPL-1.0|NPL-1.1|NRL|NTP-0|O-UDA-1.0|OCaml-LGPL-linking-exception|OCCT-exception-1.0|OCCT-PL|OCLC-2.0|ODbL-1.0|ODC-By-1.0|OFFIS|OFL-1.0|OFL-1.0-no-RFN|OFL-1.0-RFN|OFL-1.1|OFL-1.1-no-RFN|OFL-1.1-RFN|OGC-1.0|OGDL-Taiwan-1.0|OGL-UK-1.0|OGL-UK-2.0|OGL-UK-3.0|OPL-1.0|OGTSL|OpenJDK-assembly-exception-1.0|OLDAP-1.1|OLDAP-1.2|OLDAP-1.3|OLDAP-1.4|OLDAP-2.0|OLDAP-2.0.1|OLDAP-2.1|OLDAP-2.2|OLDAP-2.2.1|OLDAP-2.2.2|OLDAP-2.3|OLDAP-2.4|OLDAP-2.5|OLDAP-2.6|OLDAP-2.7|OLDAP-2.8|OpenPBS-2.3|OPUBL-1.0|SSH-OpenSSH|x11vnc-openssl-exception|OpenSSL|openvpn-openssl-exception|BSD-3-Clause-No-Nuclear-License-2014|OSET-PL-2.1|HP-1986|OSL-1.0|OSL-1.1|OSL-2.0|OSL-2.1|OSL-3.0|Parity-6.0.0|Parity-7.0.0|PDDL-1.0|PHP-3.0|PHP-3.01|PolyForm-Noncommercial-1.0.0|PolyForm-Small-Business-1.0.0|PostgreSQL|PS-or-PDF-font-exception-20170817|PSF-2.0|psfrag|psutils|Python-2.0|Python-2.0.1|Qhull|QPL-1.0|QPL-1.0-INRIA-2004|Qt-GPL-exception-1.0|Qt-LGPL-exception-1.1|Qwt-exception-1.0|Rdisc|Spencer-86|BSD-3-Clause-Modification|RSCPL|RPL-1.1|RPL-1.5|RPSL-1.0|RSA-MD|Ruby|SAX-PD|Saxpath|SCEA|SchemeReport|libselinux-1.0|Sendmail|Sendmail-8.23|iMatix|SGI-B-1.1|SGI-B-2.0|SGI-B-1.0|SHL-0.5|SHL-0.51|SHL-2.0|SHL-2.1|SimPL-2.0|Sleepycat|SMPPL|SNIA|snprintf|SPL-1.0|SMLNJ|SugarCRM-1.1.3|BSD-3-Clause-No-Nuclear-License|SISSL|SISSL-1.2|SunPro|SWI-exception|SWL|Watcom-1.0|Symlinks|TAPR-OHL-1.0|SSH-short|TCL|TCP-wrappers|TPL-1.0|HTMLTIDY|TMate|TORQUE-1.1|TOSL|TPDL|TTWL|TU-Berlin-1.0|TU-Berlin-2.0|u-boot-exception-2.0|UCAR|UCL-1.0|Unicode-DFS-2015|Unicode-DFS-2016|Unicode-TOU|Universal-FOSS-exception-1.0|Unlicense|NCSA|UPL-1.0|Linux-man-pages-copyleft|Vim|VOSTROM|VSL-1.0|W3C|W3C-19980720|W3C-20150513|w3m|Wsuipa|WTFPL|wxWindows|WxWindows-exception-3.1|ICU|Bitstream-Charter|X11-distribute-modifications-variant|HPND-sell-variant|MIT-open-group|libtiff|X11|XFree86-1.1|xinetd|Xnet|XSkat|YPL-1.0|YPL-1.1|Zed|Zend-2.0|Zimbra-1.3|Zimbra-1.4|Zlib|zlib-acknowledgement|ZPL-1.1|ZPL-2.0|ZPL-2.1)(,(389-exception|Abstyles|CDL-1.0|DOC|GNAT-exception|AdaCore-doc|APL-1.0|Adobe-Glyph|Adobe-2006|ADSL|AFL-1.1|AFL-1.2|AFL-2.0|AFL-2.1|AFL-3.0|Afmparse|Aladdin|AGPL-1.0-only|AGPL-1.0-or-later|AGPL-3.0-only|AGPL-3.0-or-later|Giftware|LicenseRef-.amazon.com.-AmznSL-1.0|AMDPLPA|AML|AMPAS|ANTLR-PD|ANTLR-PD-fallback|Apache-1.0|Apache-1.1|Apache-2.0|APAFML|App-s2p|Swift-exception|APSL-1.0|APSL-1.1|APSL-1.2|APSL-2.0|Arphic-1999|Artistic-1.0|Artistic-1.0-cl8|Artistic-2.0|ClArtistic|Artistic-1.0-Perl|AAL|Autoconf-exception-2.0|Autoconf-exception-3.0|Autoconf-exception-generic|Baekmuk|Bahyph|Barr|Beerware|Bison-exception-2.2|Bitstream-Vera|BitTorrent-1.0|BitTorrent-1.1|blessing|BlueOak-1.0.0|BSL-1.0|Bootloader-exception|Borceux|Brian-Gladman-3-Clause|BSD-1-Clause|BSD-2-Clause-Views|BSD-3-Clause-No-Military-License|BSD-3-Clause-No-Nuclear-Warranty|BSD-3-Clause-Open-MPI|BSD-4-Clause-Shortened|BSD-3-Clause-Attribution|BSD-Advertising-Acknowledgement|BSD-3-Clause|BSD-4-Clause|BSD-4-Clause-UC|BSD-4.3RENO|BSD-2-Clause-Patent|BSD-Protection|BSD-2-Clause|BSD-Source-Code|0BSD|BUSL-1.1|BSD-4.3TAHOE|bzip2-1.0.6|C-UDA-1.0|CATOSL-1.1|CAL-1.0|CAL-1.0-Combined-Work-Exception|Caldera|OGL-Canada-2.0|CMU-Mach|CC-BY-1.0|CC-BY-2.0|CC-BY-2.5|CC-BY-2.5-AU|CC-BY-3.0|CC-BY-3.0-AT|CC-BY-3.0-DE|CC-BY-3.0-IGO|CC-BY-3.0-NL|CC-BY-3.0-US|CC-BY-4.0|CC-BY-NC-1.0|CC-BY-NC-2.0|CC-BY-NC-2.5|CC-BY-NC-3.0|CC-BY-NC-3.0-DE|CC-BY-NC-4.0|CC-BY-NC-ND-1.0|CC-BY-NC-ND-2.0|CC-BY-NC-ND-2.5|CC-BY-NC-ND-3.0|CC-BY-NC-ND-3.0-DE|CC-BY-NC-ND-3.0-IGO|CC-BY-NC-ND-4.0|CC-BY-NC-SA-1.0|CC-BY-NC-SA-2.0|CC-BY-NC-SA-2.0-DE|CC-BY-NC-SA-2.0-FR|CC-BY-NC-SA-2.0-UK|CC-BY-NC-SA-2.5|CC-BY-NC-SA-3.0|CC-BY-NC-SA-3.0-DE|CC-BY-NC-SA-3.0-IGO|CC-BY-NC-SA-4.0|CC-BY-ND-1.0|CC-BY-ND-2.0|CC-BY-ND-2.5|CC-BY-ND-3.0|CC-BY-ND-3.0-DE|CC-BY-ND-4.0|CC-BY-SA-1.0|CC-BY-SA-2.0|CC-BY-SA-2.0-UK|CC-BY-SA-2.1-JP|CC-BY-SA-2.5|CC-BY-SA-3.0|CC-BY-SA-3.0-AT|CC-BY-SA-3.0-DE|CC-BY-SA-4.0|CC-PDDC|CC0-1.0|CDDL-1.0|CDDL-1.1|CDLA-Permissive-1.0|CDLA-Permissive-2.0|CDLA-Sharing-1.0|CECILL-1.0|CECILL-1.1|CECILL-2.0|CECILL-2.1|CECILL-B|CECILL-C|CERN-OHL-1.1|CERN-OHL-1.2|CERN-OHL-P-2.0|CERN-OHL-S-2.0|CERN-OHL-W-2.0|CFITSIO|checkmk|Classpath-exception-2.0|BSD-3-Clause-Clear|Clips|CLISP-exception-2.0|BSD-Attribution-HPND-disclaimer|MIT-CMU|CNRI-Jython|CNRI-Python|CNRI-Python-GPL-Compatible|COIL-1.0|Condor-1.1|copyleft-next-0.3.0|copyleft-next-0.3.1|Cornell-Lossless-JPEG|CPAL-1.0|CPL-1.0|CPOL-1.02|Crossword|CrystalStacker|Community-Spec-1.0|CUA-OPL-1.0|Cube|curl|D-FSL-1.0|diffmark|DigiRule-FOSS-exception|DL-DE-BY-2.0|Plexus|Dotseqn|DRL-1.0|DSDP|dvipdfm|ECL-1.0|ECL-2.0|eCos-2.0|eCos-exception-2.0|RHeCos-1.1|EFL-1.0|EFL-2.0|eGenix|Elastic-2.0|MIT-advertising|MIT-enna|Entessa|EPICS|EPL-1.0|EPL-2.0|ErlPL-1.1|etalab-2.0|EUDatagrid|EUPL-1.0|EUPL-1.1|EUPL-1.2|Eurosym|Fair|OML|Fawkes-Runtime-exception|FLTK-exception|Font-exception-2.0|Frameworx-1.0|FDK-AAC|FreeBSD-DOC|FreeImage|freertos-exception-2.0|FTL|FSFAP|FSFUL|FSFULLR|FSFULLRWD|GCC-exception-3.1|GCC-exception-2.0|GFDL-1.1-only|GFDL-1.1-invariants-only|GFDL-1.1-invariants-or-later|GFDL-1.1-no-invariants-only|GFDL-1.1-no-invariants-or-later|GFDL-1.1-or-later|GFDL-1.2-only|GFDL-1.2-invariants-only|GFDL-1.2-invariants-or-later|GFDL-1.2-no-invariants-only|GFDL-1.2-no-invariants-or-later|GFDL-1.2-or-later|GFDL-1.3-only|GFDL-1.3-invariants-only|GFDL-1.3-invariants-or-later|GFDL-1.3-no-invariants-only|GFDL-1.3-no-invariants-or-later|GFDL-1.3-or-later|GL2PS|Glide|Glulxe|GLWTPL|gnu-javamail-exception|gnuplot|GPL-1.0-only|GPL-1.0-or-later|GPL-2.0-only|GPL-2.0-with-autoconf-exception|GPL-2.0-with-classpath-exception|GPL-2.0-with-font-exception|GPL-2.0-with-GCC-exception|GPL-2.0-or-later|GPL-3.0-only|GPL-3.0-with-autoconf-exception|GPL-3.0-with-GCC-exception|GPL-3.0-linking-exception|GPL-3.0-linking-source-exception|GPL-3.0-or-later|GPL-CC-1.0|Graphics-Gems|gSOAP-1.3b|GStreamer-exception-2005|GStreamer-exception-2008|HaskellReport|Spencer-99|Hippocratic-2.1|HPND|HPND-export-US|HPND-sell-variant-MIT-disclaimer|Spencer-94|i2p-gpl-java-exception|IBM-pibs|IPL-1.0|IEC-Code-Components-EULA|IJG|IJG-short|ImageMagick|Imlib2|xpp|Info-ZIP|QPL-1.0-INRIA-2004-exception|Intel-ACPI|Intel|Interbase-1.0|IPA|ISC|Jam|JasPer-2.0|JPL-image|JPNIC|JSON|Kazlib|KiCad-libraries-exception|Knuth-CTAN|LAL-1.2|LAL-1.3|Latex2e|BSD-3-Clause-LBNL|Leptonica|LGPL-2.0-only|LGPL-2.0-or-later|LGPL-2.1-only|LGPL-2.1-or-later|LGPL-3.0-only|LGPL-3.0-linking-exception|LGPL-3.0-or-later|LGPLLR|GD|xlock|Libpng|libpng-2.0|Libtool-exception|libutil-David-Nugent|LiLiQ-P-1.1|LiLiQ-R-1.1|LiLiQ-Rplus-1.1|Linux-OpenIB|Linux-syscall-note|LLVM-exception|LOOP|LPPL-1.0|LPPL-1.1|LPPL-1.2|LPPL-1.3a|LPPL-1.3c|LPL-1.0|LPL-1.02|LZMA-exception|LZMA-SDK-9.11-to-9.20|LZMA-SDK-9.22|mplus|MakeIndex|HPND-Markus-Kuhn|Martin-Birgmeier|mif-exception|Minpack|MirOS|MIT|MIT-0|MIT-feh|MIT-Wu|Xerox|MIT-Modern-Variant|MITNFA|NTP|SSPL-1.0|Motosoto|mpi-permissive|mpich2|MPL-1.0|MPL-1.1|MPL-2.0|MPL-2.0-no-copyleft-exception|MS-LPL|MS-PL|MS-RL|MTLL|MulanPSL-1.0|MulanPSL-2.0|Multics|Mup|NAIST-2003|NASA-1.3|Naumen|NBPL-1.0|NCGL-UK-2.0|Net-SNMP|NetCDF|Newsletr|NGPL|NICTA-1.0|NIST-PD|NIST-PD-fallback|NLOD-1.0|NLOD-2.0|NLPL|Nokia|NOSL|NPOSL-3.0|Noweb|NPL-1.0|NPL-1.1|NRL|NTP-0|O-UDA-1.0|OCaml-LGPL-linking-exception|OCCT-exception-1.0|OCCT-PL|OCLC-2.0|ODbL-1.0|ODC-By-1.0|OFFIS|OFL-1.0|OFL-1.0-no-RFN|OFL-1.0-RFN|OFL-1.1|OFL-1.1-no-RFN|OFL-1.1-RFN|OGC-1.0|OGDL-Taiwan-1.0|OGL-UK-1.0|OGL-UK-2.0|OGL-UK-3.0|OPL-1.0|OGTSL|OpenJDK-assembly-exception-1.0|OLDAP-1.1|OLDAP-1.2|OLDAP-1.3|OLDAP-1.4|OLDAP-2.0|OLDAP-2.0.1|OLDAP-2.1|OLDAP-2.2|OLDAP-2.2.1|OLDAP-2.2.2|OLDAP-2.3|OLDAP-2.4|OLDAP-2.5|OLDAP-2.6|OLDAP-2.7|OLDAP-2.8|OpenPBS-2.3|OPUBL-1.0|SSH-OpenSSH|x11vnc-openssl-exception|OpenSSL|openvpn-openssl-exception|BSD-3-Clause-No-Nuclear-License-2014|OSET-PL-2.1|HP-1986|OSL-1.0|OSL-1.1|OSL-2.0|OSL-2.1|OSL-3.0|Parity-6.0.0|Parity-7.0.0|PDDL-1.0|PHP-3.0|PHP-3.01|PolyForm-Noncommercial-1.0.0|PolyForm-Small-Business-1.0.0|PostgreSQL|PS-or-PDF-font-exception-20170817|PSF-2.0|psfrag|psutils|Python-2.0|Python-2.0.1|Qhull|QPL-1.0|QPL-1.0-INRIA-2004|Qt-GPL-exception-1.0|Qt-LGPL-exception-1.1|Qwt-exception-1.0|Rdisc|Spencer-86|BSD-3-Clause-Modification|RSCPL|RPL-1.1|RPL-1.5|RPSL-1.0|RSA-MD|Ruby|SAX-PD|Saxpath|SCEA|SchemeReport|libselinux-1.0|Sendmail|Sendmail-8.23|iMatix|SGI-B-1.1|SGI-B-2.0|SGI-B-1.0|SHL-0.5|SHL-0.51|SHL-2.0|SHL-2.1|SimPL-2.0|Sleepycat|SMPPL|SNIA|snprintf|SPL-1.0|SMLNJ|SugarCRM-1.1.3|BSD-3-Clause-No-Nuclear-License|SISSL|SISSL-1.2|SunPro|SWI-exception|SWL|Watcom-1.0|Symlinks|TAPR-OHL-1.0|SSH-short|TCL|TCP-wrappers|TPL-1.0|HTMLTIDY|TMate|TORQUE-1.1|TOSL|TPDL|TTWL|TU-Berlin-1.0|TU-Berlin-2.0|u-boot-exception-2.0|UCAR|UCL-1.0|Unicode-DFS-2015|Unicode-DFS-2016|Unicode-TOU|Universal-FOSS-exception-1.0|Unlicense|NCSA|UPL-1.0|Linux-man-pages-copyleft|Vim|VOSTROM|VSL-1.0|W3C|W3C-19980720|W3C-20150513|w3m|Wsuipa|WTFPL|wxWindows|WxWindows-exception-3.1|ICU|Bitstream-Charter|X11-distribute-modifications-variant|HPND-sell-variant|MIT-open-group|libtiff|X11|XFree86-1.1|xinetd|Xnet|XSkat|YPL-1.0|YPL-1.1|Zed|Zend-2.0|Zimbra-1.3|Zimbra-1.4|Zlib|zlib-acknowledgement|ZPL-1.1|ZPL-2.0|ZPL-2.1))*" }, "TERMUX_PKG_LICENSE_FILE": { "type": "string", @@ -174,7 +175,7 @@ "false" ] }, - "TERMUX_PKG_BLACKLISTED_ARCHES": { + "TERMUX_PKG_EXCLUDED_ARCHES": { "type": "string", "description": "Comma-separated list of CPU architectures for which package cannot be compiled.", "pattern": "[a-z][a-z0-9-]*(, [a-z][a-z0-9-]*)*" diff --git a/src/termux_language_server/assets/json/devscripts.conf.json b/src/termux_language_server/assets/json/devscripts.conf.json new file mode 100644 index 0000000..0768e96 --- /dev/null +++ b/src/termux_language_server/assets/json/devscripts.conf.json @@ -0,0 +1,634 @@ +{ + "$id": "https://github.com/termux/termux-language-server/blob/main/src/termux_language_server/assets/json/build.sh.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "$comment": "/etc/devscripts.conf", + "type": "object", + "properties": { + "DEVSCRIPTS_CHECK_DIRNAME_LEVEL": { + "type": "string", + "enum": ["0", "1", "2"], + "default": "1", + "description": "Several programs check the directory name and refuse to function if it does not match the name of the package being worked on. (The details are described in the individual manpages.) These two variables control this behaviour, corresponding to the --check-dirname-level and --check-dirname-regex command line options. The possible values of DEVSCRIPTS_CHECK_DIRNAME_LEVEL are:\n 0 never check the directory name\n 1 check the directory name only if the program has changed directory\n 2 always check the directory name" + }, + "DEVSCRIPTS_CHECK_DIRNAME_REGEX": { + "type": "string", + "format": "regex", + "default": "PACKAGE(-.+)?", + "description": "The variable DEVSCRIPTS_DIRNAME_REGEXP is a Perl regex which defines what is considered a valid directory name for the source package PACKAGE; if it includes a '/', then it must match the full directory path, otherwise it must match the full directory name" + }, + "BTS_OFFLINE": { + "type": "string", + "enum": ["yes", "no"], + "default": "no", + "description": "Default bts show/bugs to run in offline mode?" + }, + "BTS_CACHE": { + "type": "string", + "enum": ["yes", "no"], + "default": "yes", + "description": "Cache all visited bug reports once a cache has been established for the first time?" + }, + "BTS_CACHE_MODE": { + "type": "string", + "enum": ["min", "max", "full"], + "default": "min", + "description": "How much to mirror when caching? The minimal amount (min), the mbox version as well (mbox) or the whole works (full)?" + }, + "BTS_FORCE_REFRESH": { + "type": "string", + "enum": ["yes", "no"], + "default": "no", + "description": "Always refresh the cache, even if nothing's changed?" + }, + "BTS_MAIL_READER": { + "type": "string", + "default": "mutt -f %s", + "description": "How do we read an mbox? This will be split on whitespace, then %s is replaced by the mbox name and %% by a single %." + }, + "BTS_SENDMAIL_COMMAND": { + "type": "string", + "default": "/usr/sbin/sendmail", + "description": "What sendmail command do we use? This will be split on whitespace." + }, + "BTS_ONLY_NEW": { + "type": "string", + "enum": ["yes", "no"], + "default": "no", + "description": "Download only new bugs when caching? If set to yes, don't check for updates in bugs we already have." + }, + "BTS_SMTP_HOST": { + "type": "string", + "default": "reportbug.debian.org", + "description": "Which SMTP host should be used? Note that if both an SMTP host and sendmail command are specified in the configuration file(s), the SMTP host will be used unless overridden by --sendmail on the command line" + }, + "BTS_SMTP_AUTH_USERNAME": { + "type": "string", + "default": "user", + "description": "If the SMTP host specified above requires authentication, the following options may be used to specify the username and password to use. If only a username is provided then the password will be prompted for before sending the e-mail" + }, + "BTS_SMTP_AUTH_PASSWORD": { + "type": "string", + "default": "pass", + "description": "If the SMTP host specified above requires authentication, the following options may be used to specify the username and password to use. If only a username is provided then the password will be prompted for before sending the e-mail" + }, + "BTS_SMTP_HELO": { + "type": "string", + "default": "foo.example.com", + "description": "Specify a HELO to use when connecting to the SMTP host. If not supplied and the file /etc/mailname exists, its contents will be used as the HELO" + }, + "BTS_INCLUDE_RESOLVED": { + "type": "string", + "enum": ["yes", "no"], + "default": "yes", + "description": "Include resolved bugs when caching?" + }, + "BTS_SUPPRESS_ACKS": { + "type": "string", + "enum": ["yes", "no"], + "default": "no", + "description": "Suppress BTS acknowledgment e-mails (ignored by the control bot)" + }, + "BTS_INTERACTIVE": { + "type": "string", + "enum": ["yes", "no"], + "default": "no", + "description": "Allow the generated message to be edited and, if necessary, abandoned before sending it to the control bot?\nIf set to yes, prompt for confirmation / edit / abandonment.\nIf set to force, spawn an editor and then proceed as if set to yes" + }, + "BTS_DEFAULT_CC": { + "type": "string", + "default": "example@example.com", + "description": "Specify a list of e-mail addresses to which a carbon copy of the generated e-mail to the control bot should automatically be sent." + }, + "BTS_SERVER": { + "type": "string", + "default": "https://bugs.debian.org", + "description": "Which debbugs server should be used?" + }, + "DEBCHANGE_PRESERVE": { + "type": "string", + "enum": ["yes", "no"], + "default": "no", + "description": "Preserve the source tree dirname if the upstream version changes?" + }, + "DEBCHANGE_QUERY_BTS": { + "type": "string", + "enum": ["yes", "no"], + "default": "yes", + "description": "Query the BTS when --closes is being used?" + }, + "DEBCHANGE_RELEASE_HEURISTIC": { + "type": "string", + "enum": ["log", "changelog"], + "default": "log", + "description": "Select a heuristic to use to determine whether the package has released. See the debchange man page for details." + }, + "DEBCHANGE_MULTIMAINT": { + "type": "string", + "enum": ["yes", "no"], + "default": "yes", + "description": "Introduce multiple-maintainer markers in changelog sections?" + }, + "DEBCHANGE_MULTIMAINT_MERGE": { + "type": "string", + "enum": ["yes", "no"], + "default": "no", + "description": "When appending to a multiple-maintainer changelog, if there are existing changes made by the current maintainer, should new changelog entries be appended to the existing entries?" + }, + "DEBCHANGE_MAINTTRAILER": { + "type": "string", + "enum": ["yes", "no"], + "default": "yes", + "description": "When appending entries to the changelog, should the trailer line be maintained as-is?" + }, + "DEBCHANGE_TZ": { + "type": "string", + "default": "UTC", + "description": "Use a fixed timezone in changelog entries?" + }, + "DEBCHANGE_LOWER_VERSION_PATTERN": { + "type": "string", + "default": "bpo", + "description": "Allow a new version to be lower than the current package version if the new version matches the specified regular expression" + }, + "DEBCHANGE_AUTO_NMU": { + "type": "string", + "enum": ["yes", "no"], + "default": "yes", + "description": "Attempt to automatically determine whether the current changelog stanza represents an NMU?" + }, + "DEBCHANGE_FORCE_SAVE_ON_RELEASE": { + "type": "string", + "enum": ["yes", "no"], + "default": "yes", + "description": "When --release was used and an editor presented, force the changelog to be explicitly saved in the editor? If this is set to \"no\" then the changes made by --release will be automatically saved." + }, + "DEBCHECKOUT_AUTH_URLS": { + "type": "string", + "default": "", + "description": "List of space-separated pairs REGEXP/REPLACEMENT_TEXT to define custom rules to enable authenticated mode." + }, + "DEBCHECKOUT_SOURCE": { + "type": "string", + "default": "auto", + "description": "For debian-dir-only repositories, also retrieve the source package, unpack it, and move the missing files over." + }, + "DEBCHECKOUT_USER": { + "type": "string", + "default": "", + "description": "Username for authenticated mode, can be overridden with -u|--user." + }, + "DEBCLEAN_CLEANDEBS": { + "type": "string", + "enum": ["yes", "no"], + "default": "no", + "description": "Remove .deb, .changes, .dsc and .upload files?" + }, + "DEBCOMMIT_STRIP_MESSAGE": { + "type": "string", + "enum": ["yes", "no"], + "default": "no", + "description": "Strip a leading \"* \" from commit messages taken from changelogs?" + }, + "DEBCOMMIT_SIGN_TAGS": { + "type": "string", + "enum": ["yes", "no"], + "default": "yes", + "description": "Sign created tags using gnupg?" + }, + "DEBCOMMIT_RELEASE_USE_CHANGELOG": { + "type": "string", + "enum": ["yes", "no"], + "default": "no", + "description": "Take any uncommitted changes in the changelog in to account when determining the commit message for a release?" + }, + "DEBCOMMIT_SIGN_COMMITS": { + "type": "string", + "enum": ["yes", "no"], + "default": "no", + "description": "Sign commits using gnupg?" + }, + "DEBDIFF_DIRS": { + "type": "string", + "enum": ["yes", "no"], + "default": "no", + "description": "Show directory names which appear in the filelist?" + }, + "DEBDIFF_CONTROL": { + "type": "string", + "enum": ["yes", "no"], + "default": "yes", + "description": "Compare control files?" + }, + "DEBDIFF_CONTROLFILES": { + "type": "string", + "default": "control", + "description": "Which control files to compare? A comma-separated list, with possibilities such as postinst, config and so on; ALL means compare all control files." + }, + "DEBDIFF_SHOW_MOVED": { + "type": "string", + "enum": ["yes", "no"], + "default": "no", + "description": "Show files which have moved between .debs?" + }, + "DEBDIFF_WDIFF_OPT": { + "type": "string", + "default": "", + "description": "Option to pass to wdiff" + }, + "DEBDIFF_SHOW_DIFFSTAT": { + "type": "string", + "enum": ["yes", "no"], + "default": "no", + "description": "Include the output of diffstat?" + }, + "DEBDIFF_WDIFF_SOURCE_CONTROL": { + "type": "string", + "enum": ["yes", "no"], + "default": "no", + "description": "Compare control files in source packages using wdiff?" + }, + "DEBDIFF_AUTO_VER_SORT": { + "type": "string", + "enum": ["yes", "no"], + "default": "no", + "description": "Always compare package in version order, rather than the order specified on the command line?" + }, + "DEBDIFF_UNPACK_TARBALLS": { + "type": "string", + "enum": ["yes", "no"], + "default": "yes", + "description": "Unpack tarballs found in the top level source directory." + }, + "DEBDIFF_APPLY_PATCHES": { + "type": "string", + "enum": ["yes", "no"], + "default": "no", + "description": "Apply patches when comparing 3.0 (quilt)-format packages" + }, + "DEBRELEASE_UPLOADER": { + "type": "string", + "enum": ["dupload", "dput"], + "default": "dupload", + "description": "This specifies which uploader program to use. As of devscripts 2.22.1ubuntu1 the recognised values are \"dupload\" (default) and \"dput\". Check the debrelease(1) manpage for any recent changes to this variable" + }, + "DEBRELEASE_DEBS_DIR": { + "type": "string", + "default": "..", + "description": "This specifies the directory, relative to the top of the source tree, in which the .changes and .debs files are to be found. Note that this also affects debc and debi." + }, + "DEBSIGN_ALWAYS_RESIGN": { + "type": "string", + "enum": ["yes", "no"], + "default": "no", + "description": "Always re-sign files even if they are already signed, without prompting." + }, + "DEBSIGN_PROGRAM": { + "type": "string", + "default": "", + "description": "Which signing program to use? gpg and pgp are the usual values; the default is determined as described in the manpage. Corresponds to -p option" + }, + "DEBSIGN_SIGNLIKE": { + "type": "string", + "default": "", + "description": "How the signing program works; must be either gpg or pgp as of devscripts version 2.22.1ubuntu1. The default is described in the manpage. Corresponds to -sgpg and -spgp." + }, + "DEBSIGN_MAINT": { + "type": "string", + "default": "", + "description": "Maintainer name (only used to determine GPG keyid; -m option)" + }, + "DEBSIGN_KEYID": { + "type": "string", + "default": "", + "description": "GPG keyid to use (-k option)" + }, + "DEBSNAP_DESTDIR": { + "type": "string", + "default": "", + "description": "Where to put the directory named -/ default: source-$package_name if unset" + }, + "DEBSNAP_VERBOSE": { + "type": "string", + "enum": ["yes", "no"], + "default": "no", + "description": "Verbosely show messages" + }, + "DEBSNAP_BASE_URL": { + "type": "string", + "default": "https://snapshot.debian.org", + "description": "The base URL of the archive to download from" + }, + "DEBSNAP_CLEAN_REGEX": { + "type": "string", + "format": "regex", + "default": "s@\\([^/]*\\)/[^/]*/\\(.*\\)@\\1/\\2@", + "description": "A sed regexp to transform pool//f/foo into the desired layout default: make the directory from pool//f/foo to pool/f/foo" + }, + "DEBSNAP_SOURCES_GZ_PATH": { + "type": "string", + "default": "source/Sources.gz", + "description": "Where the Sources.gz lives, subdirectory of DEBSNAP_BASE_URL//" + }, + "DEBUILD_PRESERVE_ENV": { + "type": "string", + "enum": ["yes", "no"], + "default": "no", + "description": "Do we preserve the whole environment except for PATH?" + }, + "DEBUILD_PRESERVE_ENVVARS": { + "type": "string", + "default": "", + "description": "Are there any environment variables we should preserve? This should be a comma-separated list." + }, + "DEBUILD_TGZ_CHECK": { + "type": "string", + "enum": ["yes", "no"], + "default": "yes", + "description": "Do we check for the existence of the .orig.tar.gz before calling dpkg-buildpackage?" + }, + "DEBUILD_ROOTCMD": { + "type": "string", + "default": "fakeroot", + "description": "Corresponds to the dpkg-buildpackage -r option." + }, + "DEBUILD_DPKG_BUILDPACKAGE_OPTS": { + "type": "string", + "default": "", + "description": "Extra options given to dpkg-buildpackage before any command-line options specified. Single options containing spaces should be quoted, for example \"-m'Julian Gilbey ' -us -uc\" If this contains a -r, -d or -D option, this will also be recognised when running debuild binary|binary-arch|..." + }, + "DEBUILD_LINTIAN": { + "type": "string", + "enum": ["yes", "no"], + "default": "yes", + "description": "Do we run lintian at the end of a full run?" + }, + "DEBUILD_LINTIAN_OPTS": { + "type": "string", + "default": "", + "description": "Extra options given to lintian before any command-line options specified." + }, + "DEBUILD_PREPEND_PATH": { + "type": "string", + "default": "/usr/lib/ccache", + "description": "Colon-separated list of options to be added to the beginning of PATH once it has been sanitised" + }, + "DEBUILD_SIGNING_USERNAME": { + "type": "string", + "default": "user@host", + "description": "Credentials to pass to debrsign when signing dsc / changes files Setting this option to a non-blank string implies using debrsign" + }, + "DEBUILD_DPKG_BUILDPACKAGE_HOOK": { + "type": "string", + "default": "", + "description": "Hooks; see the manpage for details of these" + }, + "DEBUILD_CLEAN_HOOK": { + "type": "string", + "default": "", + "description": "Hooks; see the manpage for details of these" + }, + "DEBUILD_DPKG_SOURCE_HOOK": { + "type": "string", + "default": "", + "description": "Hooks; see the manpage for details of these" + }, + "DEBUILD_BUILD_HOOK": { + "type": "string", + "default": "", + "description": "Hooks; see the manpage for details of these" + }, + "DEBUILD_BINARY_HOOK": { + "type": "string", + "default": "", + "description": "Hooks; see the manpage for details of these" + }, + "DEBUILD_FINAL_CLEAN_HOOK": { + "type": "string", + "default": "", + "description": "Hooks; see the manpage for details of these" + }, + "DEBUILD_LINTIAN_HOOK": { + "type": "string", + "default": "", + "description": "Hooks; see the manpage for details of these" + }, + "DEBUILD_SIGNING_HOOK": { + "type": "string", + "default": "", + "description": "Hooks; see the manpage for details of these" + }, + "DEBUILD_POST_DPKG_BUILDPACKAGE_HOOK": { + "type": "string", + "default": "", + "description": "Hooks; see the manpage for details of these" + }, + "DGET_PATH": { + "type": "string", + "default": "", + "description": "Extra directories to search for files in addition to /var/cache/apt/archives. This is a colon-separated list of directories." + }, + "DGET_UNPACK": { + "type": "string", + "default": "yes", + "enum": ["yes", "no"], + "description": "Unpack downloaded source packages" + }, + "DGET_VERIFY": { + "type": "string", + "default": "yes", + "enum": ["yes", "no"], + "description": "Verify source package signatures using dscverify" + }, + "DPKG_DEPCHECK_OPTIONS": { + "type": "string", + "default": "", + "description": "Extra options given to dpkg-depcheck before any command-line options specified. For example: \"-b --features=-catch-alternatives\"" + }, + "DPKGSIG_KEYID": { + "type": "string", + "default": "", + "description": "This key ID takes precedence over the rest" + }, + "DPKGSIG_SIGN_CHANGES": { + "type": "string", + "default": "auto", + "description": "Do we sign the .changes and .dsc files? See the manpage for more info. Valid options are no, auto, yes, full and force_full." + }, + "DPKGSIG_CACHE_PASS": { + "type": "string", + "default": "auto", + "description": "Do we cache the gpg passphrase by default? This can be dangerous!" + }, + "DSCVERIFY_KEYRINGS": { + "type": "string", + "default": "", + "description": "A colon separated list of extra keyrings to read." + }, + "GREP_EXCUSES_MAINTAINER": { + "type": "string", + "default": "", + "description": "This specifies a default maintainer name or email to hunt for" + }, + "GREP_EXCUSES_FTP_MASTER": { + "type": "string", + "default": "", + "description": "Is this running on ftp-master.debian.org? If so, we use the local excuses file" + }, + "MKBUILDDEPS_TOOL": { + "type": "string", + "default": "/usr/bin/apt-get --no-install-recommends", + "description": "Which tool to use for installing build depends?" + }, + "MKBUILDDEPS_REMOVE_AFTER_INSTALL": { + "type": "string", + "enum": ["yes", "no"], + "default": "yes", + "description": "Remove package files after install?" + }, + "MKBUILDDEPS_ROOTCMD": { + "type": "string", + "default": "", + "description": "Tool used to gain root privileges to install the deb" + }, + "NMUDIFF_DELAY": { + "type": "string", + "default": "3", + "pattern": "\\d+", + "description": "Number of days to indicate that an NMU upload has been delayed by using the DELAYED upload queue. 0 indicates no delay. Defaults to \"XX\" which adds a placeholder to the e-mail." + }, + "NMUDIFF_MUTT": { + "type": "string", + "enum": ["yes", "no"], + "default": "yes", + "description": "Should we use mutt to edit and send the message or just a plain old editor?" + }, + "NMUDIFF_NEWREPORT": { + "type": "string", + "default": "maybe", + "enum": ["yes", "no", "maybe"], + "description": "Should we always submit a new report (yes), always send to the bugs which are being closed (no), or send to the bug being closed if there is only one of them, otherwise send a new report (maybe)?" + }, + "PLOTCHANGELOG_OPTIONS": { + "type": "string", + "default": "", + "description": "Command line options to use (space separated). None of the options should contain spaces. Use the PLOTCHANGELOG_GNUPLOT variable for the --gnuplot command line option." + }, + "PLOTCHANGELOG_GNUPLOT": { + "type": "string", + "default": "", + "description": "Here we can give gnuplot options. Any command line --gnuplot commands will be appended to these." + }, + "PTS_UNTIL": { + "type": "string", + "default": "now + 30 days", + "description": "How long will we subscribe for by default? The default is 30 days. Setting this to 'forever' means that no unsubscription request will be scheduled." + }, + "RMADISON_URL_MAP_EXAMPLE": { + "type": "string", + "default": "http://example.com/madison.cgi", + "description": "Add a custom URL to the default list of shorthands so one can use it with -u without having to specify the full URL" + }, + "RMADISON_DEFAULT_URL": { + "type": "string", + "default": "debian", + "description": "Default URL to use if none is specified on the command line." + }, + "RMADISON_ARCHITECTURE": { + "type": "string", + "default": "source,i386,amd64,all", + "description": "Default architecture to use if none is specified on the command line. use --architecture='*' to run an unrestricted query when RMADISON_ARCHITECTURE is set." + }, + "USCAN_DOWNLOAD": { + "type": "string", + "default": "yes", + "enum": ["yes", "no"], + "description": "Should we download newer upstream files we come across?" + }, + "USCAN_PASV": { + "type": "string", + "default": "default", + "enum": ["yes", "no", "default"], + "description": "Should we use FTP PASV mode for ftp:// links? 'default' means let Net::FTP(3) make the choice (primarily based on the FTP_PASSIVE environment variable); 'yes' and 'no' override the default" + }, + "USCAN_SYMLINK": { + "type": "string", + "default": "yes", + "enum": ["yes", "no", "rename", "symlink"], + "description": "Should we create a symlink from the downloaded tar.gz file to pkg_version.orig.tar.gz, rename it like this or do nothing? Options are 'symlink'/'yes', 'rename' or 'no'" + }, + "USCAN_DEHS_OUTPUT": { + "type": "string", + "default": "no", + "enum": ["yes", "no"], + "description": "Should we use DEHS style output (XML format)?" + }, + "USCAN_VERBOSE": { + "type": "string", + "default": "no", + "enum": ["yes", "no"], + "description": "Should we give verbose output?" + }, + "USCAN_USER_AGENT": { + "type": "string", + "default": "Debian uscan X.Y.Z", + "description": "What user agent string should we send with requests? (Default is 'Debian uscan X.Y.Z')" + }, + "USCAN_DESTDIR": { + "type": "string", + "default": "..", + "description": "Where should downloaded files be placed?" + }, + "USCAN_REPACK": { + "type": "string", + "default": "no", + "enum": ["yes", "no"], + "description": "Automatically repack bzipped tar or zip archives to gzipped tars?" + }, + "USCAN_EXCLUSION": { + "type": "string", + "default": "yes", + "enum": ["yes", "no"], + "description": "Use the Files-Excluded field in debian/copyright to determine whether the orig tarball needs to be repacked to remove non-DFSG content?" + }, + "UUPDATE_PRISTINE": { + "type": "string", + "default": "yes", + "enum": ["yes", "no"], + "description": "Should we retain the pristine upstream source wherever possible?" + }, + "UUPDATE_SYMLINK_ORIG": { + "type": "string", + "default": "yes", + "enum": ["yes", "no"], + "description": "Should we symlink the .orig.tar.gz file to its new name or copy it instead? yes=symlink, no=copy" + }, + "UUPDATE_ROOTCMD": { + "type": "string", + "default": "", + "description": "Corresponds to the dpkg-buildpackage -r option and debuild DEBUILD_ROOTCMD option. Normally, this can be left empty, as then the debuild setting will be used." + }, + "WHOUPLOADS_DATE": { + "type": "string", + "default": "no", + "enum": ["yes", "no"], + "description": "Display the date of the upload?" + }, + "WHOUPLOADS_MAXUPLOADS": { + "type": "string", + "default": "3", + "pattern": "\\d+", + "description": "Maximum number of uploads to display per package" + }, + "WHOUPLOADS_KEYRINGS": { + "type": "string", + "default": "/usr/share/keyrings/debian-keyring.gpg:/usr/share/keyrings/debian-keyring.pgp:/usr/share/keyrings/debian-maintainers.gpg:/usr/share/keyrings/debian-nonupload.gpg", + "description": "Colon-separated list of keyrings to examine by default" + } + }, + "patternProperties": { + "DEBUILD_SET_ENVVAR_.*": { + "type": "string", + "description": "How to set a preserved environment variable" + } + } +} diff --git a/src/termux_language_server/assets/json/ebuild.json b/src/termux_language_server/assets/json/ebuild.json index ad7cc2b..2a9a460 100644 --- a/src/termux_language_server/assets/json/ebuild.json +++ b/src/termux_language_server/assets/json/ebuild.json @@ -142,7 +142,8 @@ }, "LICENSE": { "description": "This should be a space delimited list of licenses that the package falls under. This _must_ be set to a matching license in /var/db/repos/gentoo/licenses/. If the license does not exist in the repository yet, you must add it first.", - "type": "string" + "type": "string", + "pattern": "(0BSD|2dboy-EULA|9wm|Activision|aczoom|adom|AFL-2.1|AFL-3.0|AGPL-3|AGPL-3+|AIFFWriter.m|Aladdin|Alasir|all-rights-reserved|Allegro|alternate|AMD-GPU-PRO-EULA|amiwm|AMPAS|android|AnyDesk-TOS|Apache-1.0|Apache-1.1|Apache-2.0|Apache-2.0-with-LLVM-exceptions|APL-1.0|APSL-2|aquafont|Arkkra|Arphic|Artistic|Artistic-2|Aseprite-EULA|astrolog|Atmel|Autodesk|Avago|AVASYS|AvP|BAEKMUK|bakoma|bass|battalion|baudline|BEER-WARE|bertini|bestcrypt|bh-luxi|BigelowHolmes|BitstreamCyberbit|BitstreamVera|BL|blackshades|blat|boehm-gc|bonnie|Boost-1.0|Broadcom|broadcom_bcm20702|Brother|BSD|BSD-1|BSD-2|BSD-2-with-patent|BSD-4|BSD-with-attribution|BSD-with-disclosure|buddy|bufexplorer.vim|BUILDLIC|bungie-marathon|BUSL-1.1|BZIP2|C3|Canon-IJ|Canon-UFR-II|CAOSL|CAPYBARA-EULA|CARA|CC-BY-1.0|CC-BY-2.0|CC-BY-2.5|CC-BY-3.0|CC-BY-4.0|CC-BY-NC-4.0|CC-BY-NC-ND-2.0|CC-BY-NC-ND-2.5|CC-BY-NC-ND-3.0|CC-BY-NC-ND-4.0|CC-BY-NC-SA-1.0|CC-BY-NC-SA-2.5|CC-BY-NC-SA-3.0|CC-BY-NC-SA-4.0|CC-BY-ND-3.0|CC-BY-ND-4.0|CC-BY-SA-1.0|CC-BY-SA-2.0|CC-BY-SA-2.5|CC-BY-SA-3.0|CC-BY-SA-4.0|CC-PD|CC-SA-1.0|CC-Sampling-Plus-1.0|CC0-1.0|CDDL|CDDL-1.1|CDDL-Schily|CDF|CeCILL-2|CeCILL-B|CeCILL-C|Cenon|charm|ChexQuest3|circlemud|Clarified-Artistic|Clear-BSD|clustalw|CNRI|Cockos|Cockroach|codehaus-classworlds|Coherent-Graphics|coldspringharbor|colt|comi|CPAL-1.0|CPL-0.5|CPL-1.0|CPOL|crafty|CROSSOVER-3|Crypt-IDEA|CSL-2.0|curl|DCC|Dell-EULA|DES|descent-data|dgen-sdl|Digium|docbook|dom4j|DOOM-COLLECTORS-EDITION|drascula|Dreamweb|dropbox|DUKE3D|DUMB-0.9.3|DXX-Rebirth|EAPL|ECL-2.0|Elastic|Elastic-2.0|ElementTree|Elm|Emacs|EPL-1.0|EPL-2.0|EPSON|ErlPL-1.1|eschalon-book-1-demo|eternal_lands|ETQW|EUPL-1.1|EUPL-1.2|exljbris-free|Exolab|f.lux|FAH-EULA-2014|FAH-special-permission|fairuse|fasta|FastCGI|FDL-1.1|FDL-1.1+|FDL-1.2|FDL-1.2+|FDL-1.3|FDL-1.3+|feh|FESTIVAL|File-MMagic|finchtv|FIPL-1.0|Flashpix|FLEX|flexmock|Flightradar24|FLTK|fmdrv|FoilTeX|fping|FraunhoferFDK|Free-Art-1.2|Free-Art-1.3|free-noncomm|freedist|freetts|frozenbyte-eula|FSFAP|FTDI|FTL|FVWM|galaxyhack|Gameplay-Group-EULA|gcc-runtime-library-exception-3.1|gd|geant4|geekbench|genymotion|Geogebra|GIMPS|Glulxe|gmap|gnuplot|GOG-EULA|google-chrome|GPL-1|GPL-1+|GPL-2|GPL-2+|GPL-2+-with-eCos-exception-2|GPL-2+-with-openssl-exception|GPL-2+-with-Pyinstaller-Bootloader-exception|GPL-2-with-classpath-exception|GPL-2-with-exceptions|GPL-2-with-font-exception|GPL-2-with-linking-exception|GPL-2-with-MySQL-FLOSS-exception|GPL-3|GPL-3+|GPL-3+-with-autoconf-exception|GPL-3+-with-font-exception|GPL-3-with-font-exception|GPL-3-with-openssl-exception|grass-ipafonts|GregoryRubin|gsm|gSOAP|gSOAP-1.3b|guild|HappyBunny|Hauppauge-Firmware|HIDAPI|HoMM2-Demo|hp-proliant-essentials|hpe|HPL|hplip-plugin|HPND|HRP|HSL|HTML-Tidy|Hugo|hylafaxplus|HyperSpec|iASL|IBM|icaclient|icu|IDEA|IDEA_Academic|IDEA_Classroom|IDEA_OpenSource|IDEA_Personal|IDPL|IJG|imagemagick|Info-ZIP|Inform|inmon-sflow|inner-net|Intel-SDP|intel-ucode|Interbase-1.0|Introversion|ipadic|IPAfont|ipw2100-fw|ipw2200-fw|ISC|Ispell|ISSL|ITS4|IUPAC-InChi|jardinains|JasPer2.0|JDOM|JetBrains-business|JetBrains-classroom|JetBrains-educational|JetBrains-individual|jfontain|JOVE|JoyPixels|JPRS|JSON|julius|Kermit|Khronos-CLHPP|knights-demo|Kryoflux-MAME|kyocera-mita-ppds|LA_OPT_BASE_LICENSE|lablgtk-examples|LambdaMOO|LaTeX-Calendar|lcc|LDP-1|LDP-1a|Legends|levee|Lexmark-EU2-0111|LGPL-2|LGPL-2+|LGPL-2-with-linking-exception|LGPL-2.1|LGPL-2.1+|LGPL-2.1-with-linking-exception|LGPL-3|LGPL-3+|LGPL-3-with-linking-exception|LGrind-Jacobson|lha|libgcc|LIBGLOSS|libmng|libpng|libpng2|libstdc++|libtiff|LICENSE-BITSTREAM|linux-fw-redistributable|LLGPL-2.1|LogMeIn|LOKI-EULA|LOTW|LPL-1.02|LPPL-1.0|LPPL-1.2|LPPL-1.3|LPPL-1.3a|LPPL-1.3c|LSI|LSI-tw_cli|lsof|lure|mac|MagentaMgOpen|MAJESTY-DEMO|MakeMKV-EULA|man-pages|man-pages-posix-2013|mapm|marginalhacks|Markwardt|master-pdf-editor|matplotlib|MaxMind2|MBROLA-VOICES|mekanix|Mellanox-AS-IS|metapackage|MicroChip-PK2|MicroChip-SDCC|Microsemi|microsoft-azurevpnclient|microsoft-edge|Microsoft-vscode|Midisport|MILO|Mini-XML|minpack|MirOS|MIT|MIT-0|MIT-with-advertising|mm|mmix|modeller|Mojang|MOLDEN|molmol|Moria|Mozart|MPEG-4|mpg123-el|mpich2|MPL-1.0|MPL-1.1|MPL-2.0|mplus-fonts|Ms-PL|Ms-RL|MSMS|MSttfEULA|MTA-0.5|myspell-en_CA-KevinAtkinson|myspell-ru_RU-AlexanderLebedev|namd|NCSA-AMD|NCSA-HDF|netcat|nethack|netlogo|netperf|NEWLIB|ngrep|no-source-code|Nokia-Qt-LGPL-Exception-1.1|nomachine|NOSA|NPL-1.1|NPSL-0.95|NVIDIA-CUDA|NVIDIA-cuDNN|NVIDIA-NVLM|NVIDIA-r1|NVIDIA-r2|NVIDIA-SDK|OAL-1.0.1|OASIS-Open|OFFIS|OFL-1.0|OFL-1.1|OGL-1.0a|Old-MIT|olivia|Ookla|Open-CASCADE-LGPL-2.1-Exception-1.0|openafs-krb5-a|openknights|OPENLDAP|openssl|Openwall|OPERA-2018|OPL|OSGi-Specification-2.0|OSL-1.1|OSL-2.0|OSL-2.1|OTN|otter|PAK128.German|PAPERS-PLEASE|par|PassMark-EULA|PCRE|PEL|penguzzle|perforce|PerlDL|photopc|PHP-2.02|PHP-3|PHP-3.01|phrack|PICO-8|pkcrack|PLAN9|Plex|pngcrush|pngnq|POSTGRESQL|Primate-Plunge|Princeton|procheck|prog-express|PSF-2|PSF-2.4|PSTT|psutils|public-domain|PUEL-11|PyCharm|PyCharm_Academic|PyCharm_Classroom|PyCharm_OpenSource|PyCharm_Preview|PYTHON|Q2EULA|Q3AEULA-20000111|qlogic-fibre-channel-firmware|QPL-1.0|quake1-demodata|quake1-killer|quake1-teamfortress|quake1-textures|quake2-demodata|QUAKE4|queen|qwt|RAR|raspberrypi-videocore-bin|rc|rdisc|regexp-UofT|repoze|Resounding|richardson|rpi-eeprom|RSA|RtMidi|Ruby|Ruby-BSD|rwpng|sash|scanlogd|sdlsasteroids|SDRplay|Sendmail|Sendmail-Open-Source|SFI-SCLA|sfpg|SGI-B-2.0|SGMLUG|shmux|shorten|SIP|SIR-TECH|Skype-TOS|Sleepycat|SMAIL|Snd|Snes9x|SOFA|Soltys|SpeedTouch-USB-Firmware|Spencer-99|spideroak|Spotify|SPS|SSLeay|SSPL-1|Stanford|stardock-images|Steam|STRIDE|Stuffit|Sublime|sun-bcla-j2me|sun-bcla-jai|sun-bcla-jsapi|Sun-BSD-no-nuclear-2005|supermicro|SURF|sus4-copyright|swiss-prot|symlinks|Sympow-BSD|Synology|szip|tablelist|TADS2|TADS3|tanuki-community|tarsnap|tcltk|tcp_wrappers_license|teamspeak3|teamspeak5|TeamViewer|Tenable-Master-Agreement|TeX|TeX-other-free|Texinfo-manual|TextMate-bundle|the-Click-license|THINKTANKS|TIK|Time-Format|Time-modules|timescale|Tinker|tkMOO|tm-align|torque-2.5|Toyoda|Transmission-OpenSSL-exception|trf|trio|truecrypt-3.0|tsm|ttf2pt1|ubiquiti|UbuntuFontLicense-1.0|UCAR-BSD|UCAR-Unidata|unafold|unicode|Unicode-3.0|Unicode-DFS-2016|Unlicense|unRAR|UoI-NCSA|UPL-1.0|UPX-exception|urbanterror-4.2-maps|URI|URT|ut2003|ut2003-demo|VGBA|vim|vim.org|Vivaldi|vlgothic|vmd|VOSTROM|VTK|W3C|w3m|Watcom-1.0|WidePix|wm2|WolframCDFPlayer|worklog-assistant|worldofpadman|WPS-EULA|WTFPL-2|wxWinFDL-3|wxWinLL-3|wxWinLL-3.1|xbatt|xbattle|xboing|XC|Xdebug|xearth|XEphem|xgraph|XMAME|xmlformat|xref.lisp|xrick|xtrs|xv|Yacht-Club-Games-EULA|YDSLA|Zend-2.0|zi-labone|ZLIB|ZPL|ZSH)(( |\\n)(0BSD|2dboy-EULA|9wm|Activision|aczoom|adom|AFL-2.1|AFL-3.0|AGPL-3|AGPL-3+|AIFFWriter.m|Aladdin|Alasir|all-rights-reserved|Allegro|alternate|AMD-GPU-PRO-EULA|amiwm|AMPAS|android|AnyDesk-TOS|Apache-1.0|Apache-1.1|Apache-2.0|Apache-2.0-with-LLVM-exceptions|APL-1.0|APSL-2|aquafont|Arkkra|Arphic|Artistic|Artistic-2|Aseprite-EULA|astrolog|Atmel|Autodesk|Avago|AVASYS|AvP|BAEKMUK|bakoma|bass|battalion|baudline|BEER-WARE|bertini|bestcrypt|bh-luxi|BigelowHolmes|BitstreamCyberbit|BitstreamVera|BL|blackshades|blat|boehm-gc|bonnie|Boost-1.0|Broadcom|broadcom_bcm20702|Brother|BSD|BSD-1|BSD-2|BSD-2-with-patent|BSD-4|BSD-with-attribution|BSD-with-disclosure|buddy|bufexplorer.vim|BUILDLIC|bungie-marathon|BUSL-1.1|BZIP2|C3|Canon-IJ|Canon-UFR-II|CAOSL|CAPYBARA-EULA|CARA|CC-BY-1.0|CC-BY-2.0|CC-BY-2.5|CC-BY-3.0|CC-BY-4.0|CC-BY-NC-4.0|CC-BY-NC-ND-2.0|CC-BY-NC-ND-2.5|CC-BY-NC-ND-3.0|CC-BY-NC-ND-4.0|CC-BY-NC-SA-1.0|CC-BY-NC-SA-2.5|CC-BY-NC-SA-3.0|CC-BY-NC-SA-4.0|CC-BY-ND-3.0|CC-BY-ND-4.0|CC-BY-SA-1.0|CC-BY-SA-2.0|CC-BY-SA-2.5|CC-BY-SA-3.0|CC-BY-SA-4.0|CC-PD|CC-SA-1.0|CC-Sampling-Plus-1.0|CC0-1.0|CDDL|CDDL-1.1|CDDL-Schily|CDF|CeCILL-2|CeCILL-B|CeCILL-C|Cenon|charm|ChexQuest3|circlemud|Clarified-Artistic|Clear-BSD|clustalw|CNRI|Cockos|Cockroach|codehaus-classworlds|Coherent-Graphics|coldspringharbor|colt|comi|CPAL-1.0|CPL-0.5|CPL-1.0|CPOL|crafty|CROSSOVER-3|Crypt-IDEA|CSL-2.0|curl|DCC|Dell-EULA|DES|descent-data|dgen-sdl|Digium|docbook|dom4j|DOOM-COLLECTORS-EDITION|drascula|Dreamweb|dropbox|DUKE3D|DUMB-0.9.3|DXX-Rebirth|EAPL|ECL-2.0|Elastic|Elastic-2.0|ElementTree|Elm|Emacs|EPL-1.0|EPL-2.0|EPSON|ErlPL-1.1|eschalon-book-1-demo|eternal_lands|ETQW|EUPL-1.1|EUPL-1.2|exljbris-free|Exolab|f.lux|FAH-EULA-2014|FAH-special-permission|fairuse|fasta|FastCGI|FDL-1.1|FDL-1.1+|FDL-1.2|FDL-1.2+|FDL-1.3|FDL-1.3+|feh|FESTIVAL|File-MMagic|finchtv|FIPL-1.0|Flashpix|FLEX|flexmock|Flightradar24|FLTK|fmdrv|FoilTeX|fping|FraunhoferFDK|Free-Art-1.2|Free-Art-1.3|free-noncomm|freedist|freetts|frozenbyte-eula|FSFAP|FTDI|FTL|FVWM|galaxyhack|Gameplay-Group-EULA|gcc-runtime-library-exception-3.1|gd|geant4|geekbench|genymotion|Geogebra|GIMPS|Glulxe|gmap|gnuplot|GOG-EULA|google-chrome|GPL-1|GPL-1+|GPL-2|GPL-2+|GPL-2+-with-eCos-exception-2|GPL-2+-with-openssl-exception|GPL-2+-with-Pyinstaller-Bootloader-exception|GPL-2-with-classpath-exception|GPL-2-with-exceptions|GPL-2-with-font-exception|GPL-2-with-linking-exception|GPL-2-with-MySQL-FLOSS-exception|GPL-3|GPL-3+|GPL-3+-with-autoconf-exception|GPL-3+-with-font-exception|GPL-3-with-font-exception|GPL-3-with-openssl-exception|grass-ipafonts|GregoryRubin|gsm|gSOAP|gSOAP-1.3b|guild|HappyBunny|Hauppauge-Firmware|HIDAPI|HoMM2-Demo|hp-proliant-essentials|hpe|HPL|hplip-plugin|HPND|HRP|HSL|HTML-Tidy|Hugo|hylafaxplus|HyperSpec|iASL|IBM|icaclient|icu|IDEA|IDEA_Academic|IDEA_Classroom|IDEA_OpenSource|IDEA_Personal|IDPL|IJG|imagemagick|Info-ZIP|Inform|inmon-sflow|inner-net|Intel-SDP|intel-ucode|Interbase-1.0|Introversion|ipadic|IPAfont|ipw2100-fw|ipw2200-fw|ISC|Ispell|ISSL|ITS4|IUPAC-InChi|jardinains|JasPer2.0|JDOM|JetBrains-business|JetBrains-classroom|JetBrains-educational|JetBrains-individual|jfontain|JOVE|JoyPixels|JPRS|JSON|julius|Kermit|Khronos-CLHPP|knights-demo|Kryoflux-MAME|kyocera-mita-ppds|LA_OPT_BASE_LICENSE|lablgtk-examples|LambdaMOO|LaTeX-Calendar|lcc|LDP-1|LDP-1a|Legends|levee|Lexmark-EU2-0111|LGPL-2|LGPL-2+|LGPL-2-with-linking-exception|LGPL-2.1|LGPL-2.1+|LGPL-2.1-with-linking-exception|LGPL-3|LGPL-3+|LGPL-3-with-linking-exception|LGrind-Jacobson|lha|libgcc|LIBGLOSS|libmng|libpng|libpng2|libstdc++|libtiff|LICENSE-BITSTREAM|linux-fw-redistributable|LLGPL-2.1|LogMeIn|LOKI-EULA|LOTW|LPL-1.02|LPPL-1.0|LPPL-1.2|LPPL-1.3|LPPL-1.3a|LPPL-1.3c|LSI|LSI-tw_cli|lsof|lure|mac|MagentaMgOpen|MAJESTY-DEMO|MakeMKV-EULA|man-pages|man-pages-posix-2013|mapm|marginalhacks|Markwardt|master-pdf-editor|matplotlib|MaxMind2|MBROLA-VOICES|mekanix|Mellanox-AS-IS|metapackage|MicroChip-PK2|MicroChip-SDCC|Microsemi|microsoft-azurevpnclient|microsoft-edge|Microsoft-vscode|Midisport|MILO|Mini-XML|minpack|MirOS|MIT|MIT-0|MIT-with-advertising|mm|mmix|modeller|Mojang|MOLDEN|molmol|Moria|Mozart|MPEG-4|mpg123-el|mpich2|MPL-1.0|MPL-1.1|MPL-2.0|mplus-fonts|Ms-PL|Ms-RL|MSMS|MSttfEULA|MTA-0.5|myspell-en_CA-KevinAtkinson|myspell-ru_RU-AlexanderLebedev|namd|NCSA-AMD|NCSA-HDF|netcat|nethack|netlogo|netperf|NEWLIB|ngrep|no-source-code|Nokia-Qt-LGPL-Exception-1.1|nomachine|NOSA|NPL-1.1|NPSL-0.95|NVIDIA-CUDA|NVIDIA-cuDNN|NVIDIA-NVLM|NVIDIA-r1|NVIDIA-r2|NVIDIA-SDK|OAL-1.0.1|OASIS-Open|OFFIS|OFL-1.0|OFL-1.1|OGL-1.0a|Old-MIT|olivia|Ookla|Open-CASCADE-LGPL-2.1-Exception-1.0|openafs-krb5-a|openknights|OPENLDAP|openssl|Openwall|OPERA-2018|OPL|OSGi-Specification-2.0|OSL-1.1|OSL-2.0|OSL-2.1|OTN|otter|PAK128.German|PAPERS-PLEASE|par|PassMark-EULA|PCRE|PEL|penguzzle|perforce|PerlDL|photopc|PHP-2.02|PHP-3|PHP-3.01|phrack|PICO-8|pkcrack|PLAN9|Plex|pngcrush|pngnq|POSTGRESQL|Primate-Plunge|Princeton|procheck|prog-express|PSF-2|PSF-2.4|PSTT|psutils|public-domain|PUEL-11|PyCharm|PyCharm_Academic|PyCharm_Classroom|PyCharm_OpenSource|PyCharm_Preview|PYTHON|Q2EULA|Q3AEULA-20000111|qlogic-fibre-channel-firmware|QPL-1.0|quake1-demodata|quake1-killer|quake1-teamfortress|quake1-textures|quake2-demodata|QUAKE4|queen|qwt|RAR|raspberrypi-videocore-bin|rc|rdisc|regexp-UofT|repoze|Resounding|richardson|rpi-eeprom|RSA|RtMidi|Ruby|Ruby-BSD|rwpng|sash|scanlogd|sdlsasteroids|SDRplay|Sendmail|Sendmail-Open-Source|SFI-SCLA|sfpg|SGI-B-2.0|SGMLUG|shmux|shorten|SIP|SIR-TECH|Skype-TOS|Sleepycat|SMAIL|Snd|Snes9x|SOFA|Soltys|SpeedTouch-USB-Firmware|Spencer-99|spideroak|Spotify|SPS|SSLeay|SSPL-1|Stanford|stardock-images|Steam|STRIDE|Stuffit|Sublime|sun-bcla-j2me|sun-bcla-jai|sun-bcla-jsapi|Sun-BSD-no-nuclear-2005|supermicro|SURF|sus4-copyright|swiss-prot|symlinks|Sympow-BSD|Synology|szip|tablelist|TADS2|TADS3|tanuki-community|tarsnap|tcltk|tcp_wrappers_license|teamspeak3|teamspeak5|TeamViewer|Tenable-Master-Agreement|TeX|TeX-other-free|Texinfo-manual|TextMate-bundle|the-Click-license|THINKTANKS|TIK|Time-Format|Time-modules|timescale|Tinker|tkMOO|tm-align|torque-2.5|Toyoda|Transmission-OpenSSL-exception|trf|trio|truecrypt-3.0|tsm|ttf2pt1|ubiquiti|UbuntuFontLicense-1.0|UCAR-BSD|UCAR-Unidata|unafold|unicode|Unicode-3.0|Unicode-DFS-2016|Unlicense|unRAR|UoI-NCSA|UPL-1.0|UPX-exception|urbanterror-4.2-maps|URI|URT|ut2003|ut2003-demo|VGBA|vim|vim.org|Vivaldi|vlgothic|vmd|VOSTROM|VTK|W3C|w3m|Watcom-1.0|WidePix|wm2|WolframCDFPlayer|worklog-assistant|worldofpadman|WPS-EULA|WTFPL-2|wxWinFDL-3|wxWinLL-3|wxWinLL-3.1|xbatt|xbattle|xboing|XC|Xdebug|xearth|XEphem|xgraph|XMAME|xmlformat|xref.lisp|xrick|xtrs|xv|Yacht-Club-Games-EULA|YDSLA|Zend-2.0|zi-labone|ZLIB|ZPL|ZSH))*" }, "IUSE": { "description": "This should be a list of any and all USE flags that are leveraged within your build script. The only USE flags that should not be listed here are arch related flags (see KEYWORDS). Beginning with EAPI 1, it is possible to prefix flags with + or - in order to create default settings that respectively enable or disable the corresponding USE flags. For details about USE flag stacking order, refer to the USE_ORDER variable in make.conf(5). Given the default USE_ORDER setting, negative IUSE default settings are effective only for negation of repo-level USE settings, since profile and user configuration settings override them.", @@ -353,7 +354,7 @@ "const": 0 }, "has_version": { - "description": "```sh\nhas_version [-b] [-d] [-r] [--host-root] \n```\nCheck to see if is installed. The parameter accepts all values that are acceptable in the DEPEND variable. The function returns 0 if is installed, 1 otherwise. The package is searched for in ROOT by default.", + "description": "```sh\nhas_version [-b] [-d] [-r] [--host-root] \n```\nCheck to see if category/package-version is installed. The parameter accepts all values that are acceptable in the DEPEND variable. The function returns 0 if category/package-version is installed, 1 otherwise. The package is searched for in ROOT by default.", "const": 0 }, "best_version": { diff --git a/src/termux_language_server/assets/json/mdd.json b/src/termux_language_server/assets/json/mdd.json new file mode 100644 index 0000000..66eabf6 --- /dev/null +++ b/src/termux_language_server/assets/json/mdd.json @@ -0,0 +1,62 @@ +{ + "$id": "https://github.com/termux/termux-language-server/blob/main/src/termux_language_server/assets/json/mdd.json", + "$schema": "http://json-schema.org/draft-07/schema#", + "$comment": "https://github.com/zsh-users/zsh/blob/57248b88830ce56adc243a40c7773fb3825cab34/Etc/zsh-development-guide#L285-L288", + "type": "object", + "properties": { + "name": { + "type": "string", + "description": "name of the module" + }, + "link": { + "type": "string", + "enum": ["static", "dynamic", "no"], + "description": "as described in INSTALL. In addition, the value `either` is allowed in the .mdd file, which will be converted by configure to `dynamic` if that is available, else `static`. May also be a command string, which will be run within configure and whose output is used to set the value of `link` in config.modules. This allows a system-specific choice of modules. For example, \n```zsh\nlink=`case $host in\n *-hpux*) echo dynamic;\n ;;\n *) echo no;\n ;;\n esac`\n```" + }, + "load": { + "type": "string", + "enum": ["yes", "no"], + "description": "whether the shell should include hooks for loading the module automatically as necessary. (This corresponds to an `L` in xmods.conf in the old mechanism.)" + }, + "moddeps": { + "type": "string", + "description": "modules on which this module depends (default none)" + }, + "nozshdep": { + "type": "string", + "description": "non-empty indicates no dependence on the `zsh/main' pseudo-module" + }, + "alwayslink": { + "type": "string", + "description": "if non-empty, always link the module into the executable" + }, + "autofeatures": { + "type": "string", + "description": "features defined by the module for autoloading, a space-separated list. The syntax for features is as for zmodload -F, e.g. b:mybin refers to the builtin mybin. This replaces the previous mechanism with separate variables for builtins, conditions, math functions and parameters. Note the features are only available in zsh's native mode, not in emulation modes." + }, + "autofeatures_emu": { + "type": "string", + "description": "As autofeatures, but the features so presented are available in modes that are *not* zsh's native mode. The variable autofeatures must also be present." + }, + "objects": { + "type": "string", + "description": ".o files making up this module (*must* be defined)" + }, + "proto": { + "type": "string", + "description": ".syms files for this module (default generated from $objects)" + }, + "headers": { + "type": "string", + "description": "extra headers for this module (default none)" + }, + "hdrdeps": { + "type": "string", + "description": "extra headers on which the .mdh depends (default none)" + }, + "otherincs": { + "type": "string", + "description": "extra headers that are included indirectly (default none)" + } + } +} diff --git a/src/termux_language_server/assets/queries/mingw.scm b/src/termux_language_server/assets/queries/mingw.scm new file mode 100644 index 0000000..0e84a35 --- /dev/null +++ b/src/termux_language_server/assets/queries/mingw.scm @@ -0,0 +1,8 @@ +( + (variable_assignment + name: (variable_name) @variable.name + ) + (#match? + @variable.name "^(mingw|msys2)_" + ) + ) diff --git a/src/termux_language_server/assets/queries/package.scm b/src/termux_language_server/assets/queries/package.scm new file mode 100644 index 0000000..dcb6324 --- /dev/null +++ b/src/termux_language_server/assets/queries/package.scm @@ -0,0 +1,9 @@ +( + (variable_assignment + name: (variable_name) @variable.name + value: (array (word) @package) + ) + (#match? + @variable.name "^(depends|optdepends|makedepends|conflicts|provides)$" + ) + ) diff --git a/src/termux_language_server/finders.py b/src/termux_language_server/finders.py index 029a69a..810b57e 100644 --- a/src/termux_language_server/finders.py +++ b/src/termux_language_server/finders.py @@ -1,10 +1,18 @@ r"""Finders =========== """ + from copy import deepcopy from dataclasses import dataclass from jinja2 import Template +from lsp_tree_sitter import UNI, Finder +from lsp_tree_sitter.finders import ( + ErrorFinder, + QueryFinder, + SchemaFinder, + UnFixedOrderFinder, +) from lsprotocol.types import ( CompletionItemKind, DiagnosticSeverity, @@ -13,18 +21,11 @@ Range, TextEdit, ) -from tree_sitter import Tree +from tree_sitter import Node, Tree from . import CSV, FILETYPE from .schema import BashTrie -from .tree_sitter_lsp import UNI, Finder -from .tree_sitter_lsp.finders import ( - ErrorFinder, - MissingFinder, - SchemaFinder, - UnFixedOrderFinder, -) -from .utils import get_schema +from .utils import get_query, get_schema @dataclass(init=False) @@ -49,7 +50,7 @@ class UnsortedKeywordFinder(UnFixedOrderFinder): def __init__( self, filetype: FILETYPE, - message: str = "{{uni.get_text()}}: is unsorted due to {{_uni}}", + message: str = "{{uni.text}}: is unsorted due to {{_uni}}", severity: DiagnosticSeverity = DiagnosticSeverity.Warning, ) -> None: r"""Init. @@ -153,7 +154,7 @@ def filter(self, uni: UNI) -> bool: :type uni: UNI :rtype: bool """ - text = uni.get_text() + text = uni.text return ( text in self.order and text in self.keywords @@ -180,8 +181,8 @@ def get_text_edits(self, uri: str, tree: Tree) -> list[TextEdit]: return [] # swap 2 unis return [ - TextEdit(UNI.node2range(parent), UNI.node2text(_parent)), - TextEdit(UNI.node2range(_parent), UNI.node2text(parent)), + TextEdit(UNI(parent).range, UNI(_parent).text), + TextEdit(UNI(_parent).range, UNI(parent).text), ] return [] @@ -193,7 +194,7 @@ class UnsortedCSVFinder(Finder): def __init__( self, filetype: FILETYPE, - message: str = "{{uni.get_text()}}: unsorted", + message: str = "{{uni.text}}: unsorted", severity: DiagnosticSeverity = DiagnosticSeverity.Warning, ) -> None: r"""Init. @@ -230,7 +231,7 @@ def __call__(self, uni: UNI) -> bool: :type uni: UNI :rtype: bool """ - return self.is_csv(uni) and self.sort(uni.get_text()) != uni.get_text() + return self.is_csv(uni) and self.sort(uni.text) != uni.text def is_csv(self, uni: UNI) -> bool: r"""Is csv. @@ -246,7 +247,7 @@ def is_csv(self, uni: UNI) -> bool: parent.type == "variable_assignment" and uni.node == parent.children[-1] and (uni.node.type == "word" or uni.node.type == "string") - and UNI.node2text(parent.children[0]) in self.csvs + and UNI(parent.children[0]).text in self.csvs ) @staticmethod @@ -276,7 +277,7 @@ def get_text_edits(self, uri: str, tree: Tree) -> list[TextEdit]: :rtype: list[TextEdit] """ text_edits = [ - TextEdit(uni.get_range(), self.sort(uni.get_text())) + TextEdit(uni.range, self.sort(uni.text)) for uni in self.find_all(uri, tree) ] return text_edits @@ -291,7 +292,7 @@ def __post_init__(self) -> None: :rtype: None """ - self.csvs -= {"TERMUX_PKG_BLACKLISTED_ARCHES"} + self.csvs -= {"TERMUX_PKG_EXCLUDED_ARCHES"} def __call__(self, uni: UNI) -> bool: r"""Call. @@ -303,7 +304,7 @@ def __call__(self, uni: UNI) -> bool: return self.is_csv(uni) def get_document_links( - self, uri: str, tree: Tree, template: str + self, uri: str, tree: Tree, template: str = "" ) -> list[DocumentLink]: r"""Get document links. @@ -318,7 +319,7 @@ def get_document_links( links = [] for uni in self.find_all(uri, tree): start = list(uni.node.start_point) - text = uni.get_text() + text = uni.text if text.startswith('"'): text = text.strip('"') start[1] += 1 @@ -339,39 +340,63 @@ def get_document_links( return links -class PackageFinder(Finder): +@dataclass(init=False) +class PackageFinder(QueryFinder): r"""Packagefinder.""" - def __call__(self, uni: UNI) -> bool: - r"""Call. + def __init__( + self, + message: str = "{{uni.text}}: no such file", + severity: DiagnosticSeverity = DiagnosticSeverity.Error, + ) -> None: + r"""Init. - :param uni: - :type uni: UNI - :rtype: bool + :param message: + :type message: str + :param severity: + :type severity: DiagnosticSeverity + :rtype: None """ - parent = uni.node.parent - if parent is None: - return False - if parent.parent is None: - return False - return ( - uni.node.type == "word" - and parent.type == "array" - and parent.parent.type == "variable_assignment" - and UNI.node2text(parent.parent.children[0]) - in [ - "depends", - "optdepends", - "makedepends", - "conflicts", - "provides", - ] - ) + query = get_query("package") + super().__init__(query, message, severity) + + def capture2uni(self, capture: tuple[Node, str], uri: str) -> UNI | None: + r"""Capture2uni. + + :param capture: + :type capture: tuple[Node, str] + :param uri: + :type uri: str + :rtype: UNI | None + """ + node, label = capture + uni = UNI(node, uri) + return uni if label == "package" else None + + +@dataclass(init=False) +class MinGWFinder(QueryFinder): + r"""Mingwfinder.""" + + def __init__( + self, + message: str = "{{uni.text}}: no such file", + severity: DiagnosticSeverity = DiagnosticSeverity.Error, + ) -> None: + r"""Init. + + :param message: + :type message: str + :param severity: + :type severity: DiagnosticSeverity + :rtype: None + """ + query = get_query("mingw") + super().__init__(query, message, severity) DIAGNOSTICS_FINDER_CLASSES = [ ErrorFinder, - MissingFinder, BashFinder, UnsortedKeywordFinder, UnsortedCSVFinder, diff --git a/src/termux_language_server/misc/__init__.py b/src/termux_language_server/misc/__init__.py index 4a08b4e..6b74d27 100644 --- a/src/termux_language_server/misc/__init__.py +++ b/src/termux_language_server/misc/__init__.py @@ -1,14 +1,17 @@ r"""Misc ======== """ + from typing import Any +from .. import FILETYPE + -def get_schema(filetype: str) -> dict[str, Any]: +def get_schema(filetype: FILETYPE) -> dict[str, Any]: r"""Get schema. :param filetype: - :type filetype: str + :type filetype: FILETYPE :rtype: dict[str, Any] """ if filetype in {"build.sh", "subpackage.sh"}: diff --git a/src/termux_language_server/misc/color_map.py b/src/termux_language_server/misc/color_map.py index c3b361c..2f6122f 100644 --- a/src/termux_language_server/misc/color_map.py +++ b/src/termux_language_server/misc/color_map.py @@ -1,10 +1,12 @@ r"""Portage's color.map ======================= """ + from typing import Any +from lsp_tree_sitter.misc import get_soup + from .._metainfo import SOURCE, project -from .utils import get_soup def init_schema() -> dict[str, dict[str, Any]]: @@ -12,10 +14,12 @@ def init_schema() -> dict[str, dict[str, Any]]: :rtype: dict[str, dict[str, Any]] """ - schema = {} filetype = "color.map" schema = { - "$id": f"{SOURCE}/blob/main/src/termux_language_server/assets/json/{filetype}.json", + "$id": ( + f"{SOURCE}/blob/main/" + f"src/termux_language_server/assets/json/{filetype}.json" + ), "$schema": "http://json-schema.org/draft-07/schema#", "$comment": ( "Don't edit this file directly! It is generated by " @@ -25,7 +29,7 @@ def init_schema() -> dict[str, dict[str, Any]]: "properties": {}, } dl = get_soup("color.map").find_all("dl")[1] - for dt, dd in zip(dl.find_all("dt"), dl.find_all("dd")): + for dt, dd in zip(dl.find_all("dt"), dl.find_all("dd"), strict=False): name = dt.text.split()[0] description = dd.text.replace("\n", " ").strip() example = dt.text.replace("\n", " ") diff --git a/src/termux_language_server/misc/ebuild.py b/src/termux_language_server/misc/ebuild.py index 4929531..48b36dc 100644 --- a/src/termux_language_server/misc/ebuild.py +++ b/src/termux_language_server/misc/ebuild.py @@ -1,10 +1,15 @@ r"""Portage's ebuild ==================== """ + +import os +from shlex import split +from subprocess import check_output # nosec: B404 from typing import Any +from lsp_tree_sitter.misc import get_soup + from .._metainfo import SOURCE, project -from .utils import get_soup def init_schema() -> dict[str, dict[str, Any]]: @@ -12,10 +17,12 @@ def init_schema() -> dict[str, dict[str, Any]]: :rtype: dict[str, dict[str, Any]] """ - schema = {} filetype = "ebuild" schema = { - "$id": f"{SOURCE}/blob/main/src/termux_language_server/assets/json/{filetype}.json", + "$id": ( + f"{SOURCE}/blob/main/" + f"src/termux_language_server/assets/json/{filetype}.json" + ), "$schema": "http://json-schema.org/draft-07/schema#", "$comment": ( "Don't edit this file directly! It is generated by " @@ -25,7 +32,7 @@ def init_schema() -> dict[str, dict[str, Any]]: "properties": {}, } for dl in get_soup("ebuild").find_all("dl")[20:-2]: - for dt, dd in zip(dl.find_all("dt"), dl.find_all("dd")): + for dt, dd in zip(dl.find_all("dt"), dl.find_all("dd"), strict=False): if dt.strong is None or dt.strong.text.endswith(":"): continue name = dt.strong.text.split()[0] @@ -43,4 +50,12 @@ def init_schema() -> dict[str, dict[str, Any]]: schema["properties"][name]["type"] = "string" else: schema["properties"][name]["const"] = 0 + eprefix = os.path.dirname(os.path.dirname(os.getenv("SHELL", ""))) + path = ( + check_output(split(f"portageq get_repo_path {eprefix} gentoo")) + .decode() + .strip() + ) + atom = f"({'|'.join(os.listdir(path))})" + schema["properties"]["LICENSE"]["pattern"] = rf"{atom}(( |\n){atom})*" return {filetype: schema} diff --git a/src/termux_language_server/misc/licenses.py b/src/termux_language_server/misc/licenses.py new file mode 100644 index 0000000..24f72bd --- /dev/null +++ b/src/termux_language_server/misc/licenses.py @@ -0,0 +1,13 @@ +r"""Licenses +============ +""" + +from license_expression import get_license_index + +LICENSES = [ + i["spdx_license_key"] + for i in get_license_index() + if i.get("spdx_license_key") + and not i["spdx_license_key"].startswith("LicenseRef-scancode-") +] +ATOM = f"({'|'.join(LICENSES)})" diff --git a/src/termux_language_server/misc/make_conf.py b/src/termux_language_server/misc/make_conf.py index 0cace15..95b4f3a 100644 --- a/src/termux_language_server/misc/make_conf.py +++ b/src/termux_language_server/misc/make_conf.py @@ -1,10 +1,12 @@ r"""Portage's make.conf ======================= """ + from typing import Any +from lsp_tree_sitter.misc import get_soup + from .._metainfo import SOURCE, project -from .utils import get_soup def init_schema() -> dict[str, dict[str, Any]]: @@ -12,10 +14,12 @@ def init_schema() -> dict[str, dict[str, Any]]: :rtype: dict[str, dict[str, Any]] """ - schema = {} filetype = "make.conf" schema = { - "$id": f"{SOURCE}/blob/main/src/termux_language_server/assets/json/{filetype}.json", + "$id": ( + f"{SOURCE}/blob/main/" + f"src/termux_language_server/assets/json/{filetype}.json" + ), "$schema": "http://json-schema.org/draft-07/schema#", "$comment": ( "Don't edit this file directly! It is generated by " @@ -25,7 +29,7 @@ def init_schema() -> dict[str, dict[str, Any]]: "properties": {}, } for dl in get_soup("make.conf").find_all("dl")[:-2]: - for dt, dd in zip(dl.find_all("dt"), dl.find_all("dd")): + for dt, dd in zip(dl.find_all("dt"), dl.find_all("dd"), strict=False): if dt.strong is None: continue name = dt.strong.text.split()[0] diff --git a/src/termux_language_server/misc/makepkg_conf.py b/src/termux_language_server/misc/makepkg_conf.py index 9df45b1..616fdad 100644 --- a/src/termux_language_server/misc/makepkg_conf.py +++ b/src/termux_language_server/misc/makepkg_conf.py @@ -1,10 +1,12 @@ r"""makepkg.conf ================ """ + from typing import Any +from lsp_tree_sitter.misc import get_soup + from .._metainfo import SOURCE, project -from .utils import get_soup def init_schema() -> dict[str, dict[str, Any]]: @@ -12,10 +14,12 @@ def init_schema() -> dict[str, dict[str, Any]]: :rtype: dict[str, dict[str, Any]] """ - schema = {} filetype = "makepkg.conf" schema = { - "$id": f"{SOURCE}/blob/main/src/termux_language_server/assets/json/{filetype}.json", + "$id": ( + f"{SOURCE}/blob/main/" + f"src/termux_language_server/assets/json/{filetype}.json" + ), "$schema": "http://json-schema.org/draft-07/schema#", "$comment": ( "Don't edit this file directly! It is generated by " diff --git a/src/termux_language_server/misc/pkgbuild.py b/src/termux_language_server/misc/pkgbuild.py index d1af2e9..25c02c4 100644 --- a/src/termux_language_server/misc/pkgbuild.py +++ b/src/termux_language_server/misc/pkgbuild.py @@ -1,12 +1,15 @@ r"""PKGBUILD ============ """ + +import os from typing import Any +from lsp_tree_sitter.misc import get_md_tokens, get_soup from markdown_it.token import Token from .._metainfo import SOURCE, project -from .utils import get_md_tokens +from .licenses import LICENSES def get_content(tokens: list[Token]) -> str: @@ -16,9 +19,9 @@ def get_content(tokens: list[Token]) -> str: :type tokens: list[Token] :rtype: str """ - return "\n".join( - [token.content.replace("\n", " ") for token in tokens if token.content] - ) + return "\n".join([ + token.content.replace("\n", " ") for token in tokens if token.content + ]) def init_schema() -> dict[str, Any]: @@ -29,7 +32,10 @@ def init_schema() -> dict[str, Any]: schemas = {} for filetype in {"PKGBUILD", "install"}: schemas[filetype] = { - "$id": f"{SOURCE}/blob/main/src/termux_language_server/assets/json/{filetype}.json", + "$id": ( + f"{SOURCE}/blob/main/" + f"src/termux_language_server/assets/json/{filetype}.json" + ), "$schema": "http://json-schema.org/draft-07/schema#", "$comment": ( "Don't edit this file directly! It is generated by " @@ -61,16 +67,14 @@ def init_schema() -> dict[str, Any]: if token.type == "blockquote_close" ] close_indices = [ - min( - [ - blockquote_close_index - for blockquote_close_index in blockquote_close_indices - if blockquote_close_index > index - ] - ) + min([ + blockquote_close_index + for blockquote_close_index in blockquote_close_indices + if blockquote_close_index > index + ]) for index in indices ] - for index, close_index in zip(indices, close_indices): + for index, close_index in zip(indices, close_indices, strict=False): children = tokens[index].children if children is None: continue @@ -106,11 +110,11 @@ def init_schema() -> dict[str, Any]: schemas[filetype][properties_name][name] = { "description": description } - # makepkg supports building multiple packages from a single PKGBUILD. - # This is achieved by assigning an array of package names to the - # pkgname directive. + # makepkg supports building multiple packages from a single + # PKGBUILD. This is achieved by assigning an array of package names + # to the pkgname directive. if name == "pkgname": - schemas[filetype][properties_name][name]["anyOf"] = [ + schemas[filetype][properties_name][name]["oneOf"] = [ { "type": "array", "items": {"type": "string"}, @@ -127,7 +131,88 @@ def init_schema() -> dict[str, Any]: "uniqueItems": True, } elif kind == "Function": - # Each split package uses a corresponding packaging function with - # name package_foo(), where foo is the name of the split package. + # Each split package uses a corresponding packaging function + # with name package_foo(), where foo is the name of the split + # package. schemas[filetype][properties_name][name]["const"] = 0 + # https://archlinux32.org/architecture/ + # https://archlinux.org/packages/ + # https://archlinuxarm.org/forum/viewforum.php?f=56 + schemas["PKGBUILD"]["properties"]["arch"]["items"]["enum"] = [ + "any", + "pentium4", + "i486", + "i686", + "x86_64", + "x86_64_v3", + "arm", + "armv6h", + "armv7h", + "armv8", + "aarch64", + ] + schemas["PKGBUILD"]["properties"]["url"]["format"] = "uri" + del schemas["PKGBUILD"]["properties"]["pkgver"]["type"] + for name in schemas["PKGBUILD"]["properties"]: + if name.endswith("sums"): + del schemas["PKGBUILD"]["properties"][name]["uniqueItems"] + schemas["PKGBUILD"]["properties"]["pkgver"] |= { + "oneOf": [{"type": "string"}, {"const": 0}] + } + + soup = get_soup("https://www.msys2.org/dev/pkgbuild/") + for tr in soup.find_all("tr")[1:]: + tds = tr.find_all("td") + name = tds[0].code.text + kind = tds[1].text + kind = {"mapping": "array"}.get(kind, kind) + description = tds[2].text + schemas["PKGBUILD"]["properties"][name] = { + "type": kind, + "description": description, + } + if kind == "array": + schemas["PKGBUILD"]["properties"][name] |= { + "items": {"type": "string"}, + "uniqueItems": True, + } + elif kind == "object": + schemas["PKGBUILD"]["properties"][name] |= {"properties": {}} + elif kind == "string" and name.endswith("_url"): + schemas["PKGBUILD"]["properties"][name]["format"] = "uri" + # https://packages.msys2.org/repos + schemas["PKGBUILD"]["properties"]["mingw_arch"]["items"]["enum"] = [ + "mingw32", + "mingw64", + "ucrt64", + "clang64", + "clang32", + "clangarm64", + ] + names = [] + for li in soup.find_all("li"): + code = li.find("code") + if code is None: + continue + name = code.text + if not li.text.startswith(name): + continue + names += [name] + # text = li.text + # _, _, text = text.partition(name) + # description = text.replace("\n", " ").lstrip("- ") + schemas["PKGBUILD"]["properties"]["msys2_references"]["items"][ + "pattern" + ] = f"({'|'.join(names)})(|: .*)" + schemas["PKGBUILD"]["properties"]["license"]["items"] = { + "oneOf": [ + { + "type": "string", + "enum": LICENSES + + os.listdir("/usr/share/licenses/common") + + ["custom"], + }, + {"type": "string", "pattern": "custom:.+"}, + ] + } return schemas diff --git a/src/termux_language_server/misc/termux.py b/src/termux_language_server/misc/termux.py index 3995c12..07bfef9 100644 --- a/src/termux_language_server/misc/termux.py +++ b/src/termux_language_server/misc/termux.py @@ -1,11 +1,14 @@ r"""Termux ========== """ + from typing import Any +from lsp_tree_sitter.misc import get_soup + from .. import CSV from .._metainfo import SOURCE, project -from .utils import get_soup +from .licenses import ATOM URIS = { "variable": "https://github.com/termux/termux-packages/wiki/Creating-new-package", @@ -22,7 +25,10 @@ def init_schema() -> dict[str, dict[str, Any]]: schemas = {} for filetype in {"build.sh", "subpackage.sh"}: schemas[filetype] = { - "$id": f"{SOURCE}/blob/main/src/termux_language_server/assets/json/{filetype}.json", + "$id": ( + f"{SOURCE}/blob/main/" + f"src/termux_language_server/assets/json/{filetype}.json" + ), "$schema": "http://json-schema.org/draft-07/schema#", "$comment": ( "Don't edit this file directly! It is generated by " @@ -137,24 +143,27 @@ def init_schema() -> dict[str, dict[str, Any]]: schemas["build.sh"]["properties"]["TERMUX_PKG_HOMEPAGE"][ "format" - ] = schemas["build.sh"]["properties"]["TERMUX_PKG_SRCURL"][ - "format" - ] = "uri" - schemas["build.sh"]["properties"]["TERMUX_PKG_MAINTAINER"][ - "default" - ] = "@termux" + ] = schemas["build.sh"]["properties"]["TERMUX_PKG_SRCURL"]["format"] = ( + "uri" + ) + schemas["build.sh"]["properties"]["TERMUX_PKG_MAINTAINER"]["default"] = ( + "@termux" + ) schemas["build.sh"]["properties"]["TERMUX_PKG_UPDATE_METHOD"]["enum"] = [ "github", "gitlab", "repology", ] - schemas["build.sh"]["properties"]["TERMUX_GITLAB_API_HOST"][ - "default" - ] = "gitlab.com" - schemas["build.sh"]["properties"]["TERMUX_GITLAB_API_HOST"][ - "format" - ] = "hostname" + schemas["build.sh"]["properties"]["TERMUX_GITLAB_API_HOST"]["default"] = ( + "gitlab.com" + ) + schemas["build.sh"]["properties"]["TERMUX_GITLAB_API_HOST"]["format"] = ( + "hostname" + ) schemas["build.sh"]["properties"]["TERMUX_PKG_UPDATE_VERSION_REGEXP"][ "format" ] = "regex" + schemas["build.sh"]["properties"]["TERMUX_PKG_LICENSE"]["pattern"] = ( + rf"{ATOM}(,{ATOM})*" + ) return schemas diff --git a/src/termux_language_server/misc/utils.py b/src/termux_language_server/misc/utils.py deleted file mode 100644 index 4146b69..0000000 --- a/src/termux_language_server/misc/utils.py +++ /dev/null @@ -1,83 +0,0 @@ -r"""Utils -========= -""" -from gzip import decompress -from itertools import chain -from urllib import request - -from bs4 import BeautifulSoup, FeatureNotFound -from markdown_it import MarkdownIt -from markdown_it.token import Token -from platformdirs import site_data_path, user_data_path -from pygls.uris import uri_scheme -from pypandoc import convert_text - - -def get_man(filename: str) -> str: - r"""Get man. - - :param filename: - :type filename: str - :rtype: str - """ - filename += ".5*" - text = b"" - path = "" - for path in chain( - (site_data_path("man") / "man5").glob(filename), - (user_data_path("man") / "man5").glob(filename), - ): - try: - with open(path, "rb") as f: - text = f.read() - break - except Exception: # nosec: B112 - continue - if text == b"": - raise FileNotFoundError - _, _, ext = str(path).rpartition(".") - if ext != "5": - text = decompress(text) - return text.decode() - - -def html2soup(html: str) -> BeautifulSoup: - r"""Html2soup. - - :param html: - :type html: str - :rtype: BeautifulSoup - """ - try: - soup = BeautifulSoup(html, "lxml") - except FeatureNotFound: - soup = BeautifulSoup(html, "html.parser") - return soup - - -def get_soup(uri: str) -> BeautifulSoup: - r"""Get soup. - - :param uri: - :type uri: str - :rtype: BeautifulSoup - """ - if uri_scheme(uri): - with request.urlopen(uri) as f: # nosec: B310 - html = f.read() - else: - text = get_man(uri) - html = convert_text(text, "html", "man") - return html2soup(html) - - -def get_md_tokens(filename: str) -> list[Token]: - r"""Get markdown tokens. - - :param filename: - :type filename: str - :rtype: list[Token] - """ - md = MarkdownIt("commonmark", {}) - text = get_man(filename) - return md.parse(convert_text(text, "markdown", "man")) diff --git a/src/termux_language_server/packages/__init__.py b/src/termux_language_server/packages/__init__.py index 35d9827..bdc924f 100644 --- a/src/termux_language_server/packages/__init__.py +++ b/src/termux_language_server/packages/__init__.py @@ -1,8 +1,20 @@ r"""Packages ============ """ + from .. import FILETYPE +PACKAGE_VARIABLES = { + "PKGBUILD": { + "depends", + "makedepends", + "optdepends", + "conflicts", + "provides", + "replaces", + } +} + def search_package_document(name: str, filetype: FILETYPE) -> str: r"""Search package document. @@ -20,15 +32,17 @@ def search_package_document(name: str, filetype: FILETYPE) -> str: return get_package_document(name) -def search_package_names(filetype: FILETYPE) -> list[str]: +def search_package_names(name: str, filetype: FILETYPE) -> dict[str, str]: r"""Search package names. + :param name: + :type name: str :param filetype: :type filetype: FILETYPE - :rtype: list[str] + :rtype: dict[str, str] """ if filetype == "PKGBUILD": from .pkgbuild import get_package_names else: raise NotImplementedError - return get_package_names() + return get_package_names(name) diff --git a/src/termux_language_server/packages/pkgbuild.py b/src/termux_language_server/packages/pkgbuild.py index 9e7572c..3e750af 100644 --- a/src/termux_language_server/packages/pkgbuild.py +++ b/src/termux_language_server/packages/pkgbuild.py @@ -1,19 +1,14 @@ r"""PKGBUILD packages ===================== """ + from pathlib import Path from jinja2 import Template from platformdirs import user_config_path +from pyalpm import Handle, Package -try: - from pyalpm import Handle - - DB = Handle(".", "/var/lib/pacman").get_localdb() -except ImportError: - from argparse import Namespace - - DB = Namespace(pkgcache=[]) +DB = Handle(".", "/var/lib/pacman").get_localdb() TEMPLATE_NAME = "template.md.j2" PATH = user_config_path("pacman") / TEMPLATE_NAME if not PATH.exists(): @@ -21,6 +16,18 @@ TEMPLATE = PATH.read_text() +def render_document(pkg: Package, template: str = TEMPLATE) -> str: + r"""Render document. + + :param pkg: + :type pkg: Package + :param template: + :type template: str + :rtype: str + """ + return Template(template).render(pkg=pkg) + + def get_package_document(name: str, template: str = TEMPLATE) -> str: r"""Get package document. @@ -30,15 +37,18 @@ def get_package_document(name: str, template: str = TEMPLATE) -> str: :type template: str :rtype: str """ - for pkg in DB.pkgcache: - if pkg.name == name: - return Template(template).render(pkg=pkg) - return "" + return render_document(DB.get_pkg(name), template) -def get_package_names() -> list[str]: +def get_package_names(name: str) -> dict[str, str]: r"""Get package names. - :rtype: list[str] + :param name: + :type name: str + :rtype: dict[str, str] """ - return [pkg.name for pkg in DB.pkgcache] + return { + pkg.name: render_document(pkg) + for pkg in DB.search(name) + if pkg.name.startswith(name) + } diff --git a/src/termux_language_server/parser.py b/src/termux_language_server/parser.py deleted file mode 100644 index adeda13..0000000 --- a/src/termux_language_server/parser.py +++ /dev/null @@ -1,43 +0,0 @@ -r"""Parser -========== -""" -import os -from glob import glob - -from platformdirs import user_data_path -from tree_sitter import Language, Parser, Tree - -LIBS = glob( - os.path.join( - os.path.join(os.path.join(os.path.dirname(__file__), "data"), "lib"), - "*", - ) -) -if len(LIBS) > 0: - LIB = LIBS[0] -else: - # https://github.com/nvim-treesitter/nvim-treesitter/issues/5493 - LIB = str( - next( - ( - user_data_path("nvim") - / "repos" - / "github.com" - / "nvim-treesitter" - / "nvim-treesitter" - / "parser" - ).glob("bash.*") - ) - ) -PARSER = Parser() -PARSER.set_language(Language(LIB, "bash")) - - -def parse(source: bytes) -> Tree: - r"""Parse. - - :param source: - :type source: bytes - :rtype: Tree - """ - return PARSER.parse(source) diff --git a/src/termux_language_server/schema.py b/src/termux_language_server/schema.py index 6acc66d..8a66da3 100644 --- a/src/termux_language_server/schema.py +++ b/src/termux_language_server/schema.py @@ -1,15 +1,15 @@ r"""Schema ========== """ + from dataclasses import dataclass from typing import Literal +from lsp_tree_sitter import UNI +from lsp_tree_sitter.schema import Trie from lsprotocol.types import Position, Range from tree_sitter import Node -from .tree_sitter_lsp import UNI -from .tree_sitter_lsp.schema import Trie - @dataclass class BashTrie(Trie): @@ -17,6 +17,29 @@ class BashTrie(Trie): value: dict[str, "Trie"] | list["Trie"] | str | Literal[0] = 0 + @classmethod + def from_string_node(cls, node: Node, parent: "Trie | None") -> "Trie": + r"""From string node. + + ``_ + + :param cls: + :param node: + :type node: Node + :param parent: + :type parent: Trie | None + :rtype: "Trie" + """ + if node.type == "string" and node.children == 3: + node = node.children[1] + text = UNI(node).text + _range = UNI(node).range + if node.type in {"string", "raw_string"} and node.children != 3: + text = text.strip("'\"") + _range.start.character += 1 + _range.end.character -= 1 + return cls(_range, parent, text) + @classmethod def from_node(cls, node: Node, parent: "Trie | None") -> "Trie": r"""From node. @@ -30,20 +53,21 @@ def from_node(cls, node: Node, parent: "Trie | None") -> "Trie": string_types = { "word", "string", + "raw_string", "concatenation", "number", "simple_expansion", } if node.type in string_types: - return cls(UNI.node2range(node), parent, UNI.node2text(node)) + return cls.from_string_node(node, parent) if node.type == "function_definition": - return cls(UNI.node2range(node), parent, 0) + return cls(UNI(node).range, parent, 0) if node.type == "variable_assignment": if len(node.children) < 3: - return cls(UNI.node2range(node), parent, "") + return cls(UNI(node).range, parent, "") node = node.children[2] if node.type == "array": - trie = cls(UNI.node2range(node), parent, []) + trie = cls(UNI(node).range, parent, []) value: list[Trie] = trie.value # type: ignore trie.value = [ cls.from_node(child, trie) @@ -52,7 +76,7 @@ def from_node(cls, node: Node, parent: "Trie | None") -> "Trie": ] return trie if node.type in string_types: - return cls(UNI.node2range(node), parent, UNI.node2text(node)) + return cls.from_string_node(node, parent) if node.type == "program": trie = cls(Range(Position(0, 0), Position(1, 0)), parent, {}) value: dict[str, Trie] = trie.value # type: ignore @@ -61,7 +85,7 @@ def from_node(cls, node: Node, parent: "Trie | None") -> "Trie": "variable_assignment", "function_definition", }: - value[UNI.node2text(child.children[0])] = cls.from_node( + value[UNI(child.children[0]).text] = cls.from_node( child, trie ) return trie diff --git a/src/termux_language_server/server.py b/src/termux_language_server/server.py index 887edaf..7454411 100644 --- a/src/termux_language_server/server.py +++ b/src/termux_language_server/server.py @@ -1,15 +1,20 @@ r"""Server ========== """ + import re from typing import Any +from lsp_tree_sitter.complete import get_completion_list_by_enum +from lsp_tree_sitter.diagnose import get_diagnostics +from lsp_tree_sitter.finders import PositionFinder +from lsp_tree_sitter.format import get_text_edits from lsprotocol.types import ( TEXT_DOCUMENT_COMPLETION, TEXT_DOCUMENT_DID_CHANGE, TEXT_DOCUMENT_DID_OPEN, TEXT_DOCUMENT_DOCUMENT_LINK, - TEXT_DOCUMENT_FORMATTING, + # TEXT_DOCUMENT_FORMATTING, TEXT_DOCUMENT_HOVER, CompletionItem, CompletionItemKind, @@ -22,24 +27,25 @@ Hover, MarkupContent, MarkupKind, - Position, + PublishDiagnosticsParams, TextDocumentPositionParams, TextEdit, ) -from pygls.server import LanguageServer +from pygls.lsp.server import LanguageServer from .finders import ( DIAGNOSTICS_FINDER_CLASSES, FORMAT_FINDER_CLASSES, CSVFinder, + MinGWFinder, PackageFinder, ) -from .packages import search_package_document, search_package_names -from .parser import parse -from .tree_sitter_lsp.diagnose import get_diagnostics -from .tree_sitter_lsp.finders import PositionFinder -from .tree_sitter_lsp.format import get_text_edits -from .utils import get_filetype, get_schema +from .packages import ( + PACKAGE_VARIABLES, + search_package_document, + search_package_names, +) +from .utils import get_filetype, get_schema, parser class TermuxLanguageServer(LanguageServer): @@ -67,8 +73,10 @@ def did_change(params: DidChangeTextDocumentParams) -> None: filetype = get_filetype(params.text_document.uri) if filetype == "": return None - document = self.workspace.get_document(params.text_document.uri) - self.trees[document.uri] = parse(document.source.encode()) + document = self.workspace.get_text_document( + params.text_document.uri + ) + self.trees[document.uri] = parser.parse(document.source.encode()) diagnostics = get_diagnostics( document.uri, self.trees[document.uri], @@ -79,9 +87,15 @@ def did_change(params: DidChangeTextDocumentParams) -> None: from .tools.namcap import namcap diagnostics += namcap(document.path, document.source) - self.publish_diagnostics(params.text_document.uri, diagnostics) + self.text_document_publish_diagnostics( + PublishDiagnosticsParams( + params.text_document.uri, + diagnostics, + ) + ) - @self.feature(TEXT_DOCUMENT_FORMATTING) + # https://github.com/termux/termux-language-server/issues/19#issuecomment-2413779969 + # @self.feature(TEXT_DOCUMENT_FORMATTING) def format(params: DocumentFormattingParams) -> list[TextEdit]: r"""Format. @@ -92,7 +106,9 @@ def format(params: DocumentFormattingParams) -> list[TextEdit]: filetype = get_filetype(params.text_document.uri) if filetype == "": return [] - document = self.workspace.get_document(params.text_document.uri) + document = self.workspace.get_text_document( + params.text_document.uri + ) return get_text_edits( document.uri, self.trees[document.uri], @@ -111,7 +127,9 @@ def document_link(params: DocumentLinkParams) -> list[DocumentLink]: filetype = get_filetype(params.text_document.uri) if filetype == "": return [] - document = self.workspace.get_document(params.text_document.uri) + document = self.workspace.get_text_document( + params.text_document.uri + ) if filetype in {"build.sh", "subpackage.sh"}: return CSVFinder(filetype).get_document_links( document.uri, @@ -119,10 +137,21 @@ def document_link(params: DocumentLinkParams) -> list[DocumentLink]: "https://github.com/termux/termux-packages/tree/master/packages/{{name}}/build.sh", ) elif filetype in {"PKGBUILD", "install"}: + if ( + len( + MinGWFinder().find_all( + document.uri, self.trees[document.uri] + ) + ) + > 0 + ): + url = "https://packages.msys2.org/base/{{uni.text}}" + else: + url = "https://archlinux.org/packages/{{uni.text}}" return PackageFinder().get_document_links( document.uri, self.trees[document.uri], - "https://archlinux.org/packages/{{uni.get_text()}}", + url, ) raise NotImplementedError @@ -137,7 +166,9 @@ def hover(params: TextDocumentPositionParams) -> Hover | None: filetype = get_filetype(params.text_document.uri) if filetype == "": return None - document = self.workspace.get_document(params.text_document.uri) + document = self.workspace.get_text_document( + params.text_document.uri + ) uni = PositionFinder(params.position).find( document.uri, self.trees[document.uri] ) @@ -146,8 +177,8 @@ def hover(params: TextDocumentPositionParams) -> Hover | None: parent = uni.node.parent if parent is None: return None - text = uni.get_text() - _range = uni.get_range() + text = uni.text + _range = uni.range # we only hover variable names and function names if not ( uni.node.type == "variable_name" @@ -158,8 +189,13 @@ def hover(params: TextDocumentPositionParams) -> Hover | None: "command_name", } ): - # or package names (for PKGBUILD) - if parent.type == "array": + if ( + parent.type == "array" + and parent.parent is not None + and parent.parent.children[0].text is not None + and parent.parent.children[0].text.decode() + in PACKAGE_VARIABLES.get(filetype, set()) + ): result = search_package_document(text, filetype) if result is None: return None @@ -197,30 +233,51 @@ def completions(params: CompletionParams) -> CompletionList: filetype = get_filetype(params.text_document.uri) if filetype == "": return CompletionList(False, []) - document = self.workspace.get_document(params.text_document.uri) - uni = PositionFinder( - Position(params.position.line, params.position.character - 1) - ).find(document.uri, self.trees[document.uri]) + document = self.workspace.get_text_document( + params.text_document.uri + ) + uni = PositionFinder(params.position, right_equal=True).find( + document.uri, self.trees[document.uri] + ) if uni is None: return CompletionList(False, []) parent = uni.node.parent if parent is None: return CompletionList(False, []) - text = uni.get_text() - if parent.type == "array": + text = uni.text + if ( + parent.type == "array" + and parent.parent is not None + and parent.parent.children[0].text is not None + and parent.parent.children[0].text.decode() + in PACKAGE_VARIABLES.get(filetype, set()) + ): return CompletionList( False, [ CompletionItem( k, kind=CompletionItemKind.Module, + documentation=MarkupContent( + MarkupKind.Markdown, v + ), insert_text=k, ) - for k in search_package_names(filetype) - if k.startswith(text) + for k, v in search_package_names( + text, filetype + ).items() ], ) schema = get_schema(filetype) + if ( + parent.type == "array" + and parent.parent is not None + and parent.parent.children[0].text is not None + ): + property = schema["properties"].get( + parent.parent.children[0].text.decode(), {} + ) + return get_completion_list_by_enum(text, property) return CompletionList( False, [ diff --git a/src/termux_language_server/tools/namcap.py b/src/termux_language_server/tools/namcap.py index 47b7ea3..bbe94e1 100644 --- a/src/termux_language_server/tools/namcap.py +++ b/src/termux_language_server/tools/namcap.py @@ -1,6 +1,7 @@ r"""Namcap ========== """ + from lsprotocol.types import Diagnostic, DiagnosticSeverity, Position, Range diff --git a/src/termux_language_server/tree_sitter_lsp/__init__.py b/src/termux_language_server/tree_sitter_lsp/__init__.py deleted file mode 100644 index 3ae9daf..0000000 --- a/src/termux_language_server/tree_sitter_lsp/__init__.py +++ /dev/null @@ -1,438 +0,0 @@ -r"""Tree-sitter LSP -=================== -""" -import os -from copy import deepcopy -from dataclasses import dataclass -from typing import Any - -from jinja2 import Template -from lsprotocol.types import ( - Diagnostic, - DiagnosticSeverity, - DocumentLink, - Location, - Position, - Range, - TextEdit, -) -from pygls.uris import to_fs_path -from tree_sitter import Node, Tree, TreeCursor - -# maximum of recursive search -LEVEL = 5 - - -@dataclass -class UNI: - r"""Unified node identifier.""" - - uri: str - node: Node - - def __str__(self) -> str: - r"""Str. - - :rtype: str - """ - return f"{self.get_text()}@{self.uri}:{self.node.start_point[0] + 1}:{self.node.start_point[1] + 1}-{self.node.end_point[0] + 1}:{self.node.end_point[1]}" - - def get_text(self) -> str: - r"""Get text. - - :rtype: str - """ - return self.node2text(self.node) - - @staticmethod - def node2text(node: Node) -> str: - r"""Node2text. - - :param node: - :type node: Node - :rtype: str - """ - return node.text.decode() - - def get_location(self) -> Location: - r"""Get location. - - :rtype: Location - """ - return Location(self.uri, self.get_range()) - - def get_range(self) -> Range: - r"""Get range. - - :rtype: Range - """ - return self.node2range(self.node) - - @staticmethod - def node2range(node: Node) -> Range: - r"""Node2range. - - :param node: - :type node: Node - :rtype: Range - """ - return Range(Position(*node.start_point), Position(*node.end_point)) - - def get_path(self) -> str: - r"""Get path. - - :rtype: str - """ - return self.uri2path(self.uri) - - @staticmethod - def uri2path(uri: str) -> str: - r"""Uri2path. - - :param uri: - :type uri: str - :rtype: str - """ - if path := to_fs_path(uri): - return path - raise TypeError - - def get_diagnostic( - self, - message: str, - severity: DiagnosticSeverity, - **kwargs: Any, - ) -> Diagnostic: - r"""Get diagnostic. - - :param message: - :type message: str - :param severity: - :type severity: DiagnosticSeverity - :param kwargs: - :type kwargs: Any - :rtype: Diagnostic - """ - _range = self.get_range() - _range.end.character -= 1 - return Diagnostic( - _range, - Template(message).render(uni=self, **kwargs), - severity, - ) - - def get_text_edit(self, new_text: str) -> TextEdit: - r"""Get text edit. - - :param new_text: - :type new_text: str - :rtype: TextEdit - """ - return TextEdit(self.get_range(), new_text) - - def get_document_link(self, target: str, **kwargs) -> DocumentLink: - r"""Get document link. - - :param target: - :type target: str - :param kwargs: - :rtype: DocumentLink - """ - return DocumentLink( - self.get_range(), - Template(target).render(uni=self, **kwargs), - ) - - @staticmethod - def join(path, text) -> str: - r"""Join. - - :param path: - :param text: - :rtype: str - """ - return os.path.join(os.path.dirname(path), text) - - -@dataclass -class Finder: - r"""Finder.""" - - message: str = "" - severity: DiagnosticSeverity = DiagnosticSeverity.Error - - def __post_init__(self) -> None: - r"""Post init. - - :rtype: None - """ - self.reset() - - def __call__(self, uni: UNI) -> bool: - r"""Call. - - :param uni: - :type uni: UNI - :rtype: bool - """ - return True - - def __and__(self, second: "Finder") -> "Finder": - r"""And. - - :param second: - :type second: Finder - :rtype: "Finder" - """ - finder = deepcopy(self) - finder.__call__ = lambda uni: self(uni) and second(uni) - return finder - - def __or__(self, second: "Finder") -> "Finder": - r"""Or. - - :param second: - :type second: Finder - :rtype: "Finder" - """ - finder = deepcopy(self) - finder.__call__ = lambda uni: self(uni) or second(uni) - return finder - - def __minus__(self, second: "Finder") -> "Finder": - r"""Minus. - - :param second: - :type second: Finder - :rtype: "Finder" - """ - finder = deepcopy(self) - finder.__call__ = lambda uni: self(uni) and not second(uni) - return finder - - def is_include_node(self, node: Node) -> bool: - r"""Is include node. - - :param node: - :type node: Node - :rtype: bool - """ - return False - - def parse(self, code: bytes) -> Tree: - r"""Parse. - - :param code: - :type code: bytes - :rtype: Tree - """ - raise NotImplementedError - - def uri2tree(self, uri: str) -> Tree | None: - r"""Convert URI to tree. - - :param uri: - :type uri: str - :rtype: Tree | None - """ - path = UNI.uri2path(uri) - if not os.path.exists(path): - return None - with open(path, "rb") as f: - code = f.read() - return self.parse(code) - - def uni2uri(self, uni: UNI) -> str: - r"""Convert UNI to URI. - - :param uni: - :type uni: UNI - :rtype: str - """ - return uni.join(uni.uri, uni.get_text()) - - def uni2path(self, uni: UNI) -> str: - r"""Convert UNI to path. - - :param self: - :param uni: - :type uni: UNI - :rtype: str - """ - uri = self.uni2uri(uni) - return UNI.uri2path(uri) - - def move_cursor( - self, uri: str, cursor: TreeCursor, is_all: bool = False - ) -> str | None: - r"""Move cursor. - - :param self: - :param uri: - :type uri: str - :param cursor: - :type cursor: TreeCursor - :param is_all: - :type is_all: bool - :rtype: str | None - """ - while self(UNI(uri, cursor.node)) is False: - if self.is_include_node(cursor.node) and self.level < LEVEL: - self.level += 1 - old_uri = uri - uri = self.uni2uri(UNI(uri, cursor.node)) - tree = self.uri2tree(uri) - if tree is not None: - if is_all: - self.find_all(uri, tree, False) - else: - result = self.find(uri, tree) - if result is not None: - return - uri = old_uri - self.level -= 1 - if cursor.node.child_count > 0: - cursor.goto_first_child() - continue - while cursor.node.next_sibling is None: - cursor.goto_parent() - # when cannot find new nodes, return - if cursor.node.parent is None: - return None - cursor.goto_next_sibling() - return uri - - def reset(self) -> None: - r"""Reset. - - :rtype: None - """ - self.level = 0 - self.unis = [] - - def prepare( - self, uri: str, tree: Tree | None = None, reset: bool = True - ) -> TreeCursor: - r"""Prepare. - - :param uri: - :type uri: str - :param tree: - :type tree: Tree | None - :param reset: - :type reset: bool - :rtype: TreeCursor - """ - if reset: - self.reset() - if tree is None: - tree = self.uri2tree(uri) - if tree is None: - raise TypeError - return tree.walk() - - def find( - self, uri: str, tree: Tree | None = None, reset: bool = True - ) -> UNI | None: - r"""Find. - - :param uri: - :type uri: str - :param tree: - :type tree: Tree | None - :param reset: - :type reset: bool - :rtype: UNI | None - """ - cursor = self.prepare(uri, tree, reset) - _uri = self.move_cursor(uri, cursor, False) - if _uri is not None: - return UNI(_uri, cursor.node) - else: - return None - - def find_all( - self, uri: str, tree: Tree | None = None, reset: bool = True - ) -> list[UNI]: - r"""Find all. - - :param uri: - :type uri: str - :param tree: - :type tree: Tree | None - :param reset: - :type reset: bool - :rtype: list[UNI] - """ - cursor = self.prepare(uri, tree, reset) - while True: - _uri = self.move_cursor(uri, cursor, True) - if _uri is not None: - self.unis += [UNI(_uri, cursor.node)] - while cursor.node.next_sibling is None: - cursor.goto_parent() - # when cannot find new nodes, return - if cursor.node.parent is None: - return self.unis - cursor.goto_next_sibling() - - def uni2diagnostic(self, uni: UNI) -> Diagnostic: - r"""Uni2diagnostic. - - :param uni: - :type uni: UNI - :rtype: Diagnostic - """ - return uni.get_diagnostic(self.message, self.severity) - - def unis2diagnostics(self, unis: list[UNI]) -> list[Diagnostic]: - r"""Unis2diagnostics. - - :param unis: - :type unis: list[UNI] - :rtype: list[Diagnostic] - """ - return [self.uni2diagnostic(uni) for uni in unis] - - def get_diagnostics(self, uri: str, tree: Tree) -> list[Diagnostic]: - r"""Get diagnostics. - - :param self: - :param uri: - :type uri: str - :param tree: - :type tree: Tree - :rtype: list[Diagnostic] - """ - return self.unis2diagnostics(self.find_all(uri, tree)) - - def get_text_edits(self, uri: str, tree: Tree) -> list[TextEdit]: - r"""Get text edits. - - :param self: - :param uri: - :type uri: str - :param tree: - :type tree: Tree - :rtype: list[TextEdit] - """ - self.find_all(uri, tree) - return [] - - def get_document_links( - self, uri: str, tree: Tree, template: str - ) -> list[DocumentLink]: - r"""Get document links. - - :param uri: - :type uri: str - :param tree: - :type tree: Tree - :param template: - :type template: str - :rtype: list[DocumentLink] - """ - self.find_all(uri, tree) - return [ - uni.get_document_link(template) for uni in self.find_all(uri, tree) - ] diff --git a/src/termux_language_server/tree_sitter_lsp/complete.py b/src/termux_language_server/tree_sitter_lsp/complete.py deleted file mode 100644 index f7731a9..0000000 --- a/src/termux_language_server/tree_sitter_lsp/complete.py +++ /dev/null @@ -1,48 +0,0 @@ -r"""Complete -============ -""" -import os -from glob import glob -from pathlib import Path - -from lsprotocol.types import CompletionItem, CompletionItemKind, CompletionList - -from . import UNI - - -def get_completion_list_by_uri( - uri: str, text: str = "", expr: str = "*" -) -> CompletionList: - r"""Get completion list by uri. - - :param uri: - :type uri: str - :param text: - :type text: str - :param expr: - :type expr: str - :rtype: CompletionList - """ - dirname = os.path.dirname(UNI.uri2path(uri)) - return CompletionList( - False, - [ - CompletionItem( - x.rpartition(dirname + os.path.sep)[-1], - kind=CompletionItemKind.File - if os.path.isfile(x) - else CompletionItemKind.Folder, - documentation=Path(x).read_text() - if os.path.isfile(x) - else "\n".join(os.listdir(x)), - insert_text=x.rpartition(dirname + os.path.sep)[-1], - ) - for x in [ - file + ("" if os.path.isfile(file) else os.path.sep) - for file in glob( - os.path.join(dirname, text + f"**{os.path.sep}" + expr), - recursive=True, - ) - ] - ], - ) diff --git a/src/termux_language_server/tree_sitter_lsp/diagnose.py b/src/termux_language_server/tree_sitter_lsp/diagnose.py deleted file mode 100644 index 464b004..0000000 --- a/src/termux_language_server/tree_sitter_lsp/diagnose.py +++ /dev/null @@ -1,196 +0,0 @@ -r"""Diagnose -============ - -Wrap -``Diagnostic ``_ -to a linter. -""" -import sys -from typing import Callable, Literal - -from lsprotocol.types import Diagnostic, DiagnosticSeverity -from tree_sitter import Tree - -from . import Finder -from .utils import get_finders, get_paths - - -def get_diagnostics_by_finders( - uri: str, tree: Tree, finders: list[Finder] -) -> list[Diagnostic]: - r"""Get diagnostics by finders. - - :param uri: - :type uri: str - :param tree: - :type tree: Tree - :param finders: - :type finders: list[Finder] - :rtype: list[Diagnostic] - """ - return [ - diagnostic - for finder in finders - for diagnostic in finder.get_diagnostics(uri, tree) - ] - - -def get_diagnostics( - uri: str, - tree: Tree, - classes: list[type[Finder]] | None = None, - filetype: str | None = None, -) -> list[Diagnostic]: - r"""Get diagnostics. - - :param uri: - :type uri: str - :param tree: - :type tree: Tree - :param classes: - :type classes: list[type[Finder]] | None - :param filetype: - :type filetype: str | None - :rtype: list[Diagnostic] - """ - finders, finder_classes = get_finders(classes) - if filetype is None: - return get_diagnostics_by_finders(uri, tree, finders) - return [ - diagnostic - for diagnostic in get_diagnostics_by_finders( - uri, tree, finders + [cls(filetype) for cls in finder_classes] - ) - ] - - -def count_level( - diagnostics: list[Diagnostic], - level: DiagnosticSeverity = DiagnosticSeverity.Warning, -) -> int: - r"""Count level. - - :param diagnostics: - :type diagnostics: list[Diagnostic] - :param level: - :type level: DiagnosticSeverity - :rtype: int - """ - return len( - [ - diagnostic - for diagnostic in diagnostics - if diagnostic.severity and diagnostic.severity <= level - ] - ) - - -class _Colorama: - """Colorama.""" - - def __getattribute__(self, _: str) -> str: - """Getattribute. - - :param _: - :type _: str - :rtype: str - """ - return "" - - -def diagnostics2linter_messages( - path: str, - diagnostics: list[Diagnostic], - color: Literal["auto", "always", "never"] = "auto", - colors: list[str] | None = None, -) -> list[str]: - r"""Diagnostics2linter messages. - - :param path: - :type path: str - :param diagnostics: - :type diagnostics: list[Diagnostic] - :param color: - :type color: Literal["auto", "always", "never"] - :param colors: - :type colors: list[str] | None - :rtype: list[str] - """ - from colorama import Fore, init - - init() - if not sys.stdout.isatty() and color == "auto" or color == "never": - Fore = _Colorama() - if colors is None: - colors = [Fore.RESET, Fore.RED, Fore.YELLOW, Fore.BLUE, Fore.GREEN] - return [ - f"{Fore.MAGENTA}{path}{Fore.RESET}:{Fore.CYAN}{diagnostic.range.start.line + 1}:{diagnostic.range.start.character + 1}{Fore.RESET}-{Fore.CYAN}{diagnostic.range.end.line + 1}:{diagnostic.range.end.character + 1}{Fore.RESET}:{colors[diagnostic.severity if diagnostic.severity else 0]}{str(diagnostic.severity).split('.')[-1].lower()}{Fore.RESET}: {diagnostic.message}" - for diagnostic in diagnostics - ] - - -def check_by_finders( - paths: list[str], - parse: Callable[[bytes], Tree], - finders: list[Finder], - color: Literal["auto", "always", "never"] = "auto", -) -> int: - r"""Check by finders. - - :param paths: - :type paths: list[str] - :param parse: - :type parse: Callable[[bytes], Tree] - :param finders: - :type finders: list[Finder] - :param color: - :type color: Literal["auto", "always", "never"] - :rtype: int - """ - count = 0 - lines = [] - for path in paths: - with open(path, "rb") as f: - src = f.read() - tree = parse(src) - diagnostics = get_diagnostics_by_finders(path, tree, finders) - count += count_level(diagnostics) - lines += diagnostics2linter_messages(path, diagnostics, color) - if text := "\n".join(lines): - print(text) - return count - - -def check( - paths: list[str], - parse: Callable[[bytes], Tree], - classes: list[type[Finder]] | None = None, - get_filetype: Callable[[str], str] | None = None, - color: Literal["auto", "always", "never"] = "auto", -) -> int: - r"""Check. - - :param paths: - :type paths: list[str] - :param parse: - :type parse: Callable[[bytes], Tree] - :param classes: - :type classes: list[type[Finder]] | None - :param get_filetype: - :type get_filetype: Callable[[str], str] | None - :param color: - :type color: Literal["auto", "always", "never"] - :rtype: int - """ - finders, finder_classes = get_finders(classes) - if get_filetype is None: - return check_by_finders(paths, parse, finders, color) - return sum( - check_by_finders( - filepaths, - parse, - finders + [cls(filetype) for cls in finder_classes], - color, - ) - for filetype, filepaths in get_paths(paths, get_filetype).items() - ) diff --git a/src/termux_language_server/tree_sitter_lsp/finders.py b/src/termux_language_server/tree_sitter_lsp/finders.py deleted file mode 100644 index 6257d34..0000000 --- a/src/termux_language_server/tree_sitter_lsp/finders.py +++ /dev/null @@ -1,530 +0,0 @@ -r"""Finders -=========== -""" -import os -from copy import deepcopy -from dataclasses import dataclass -from typing import Any - -from jinja2 import Template -from jsonschema import Validator -from jsonschema.validators import validator_for -from lsprotocol.types import ( - Diagnostic, - DiagnosticSeverity, - Location, - Position, - Range, - TextEdit, -) -from tree_sitter import Node, Tree - -from . import UNI, Finder -from .schema import Trie - - -@dataclass -class MissingFinder(Finder): - r"""Missingfinder.""" - - message: str = "{{uni.get_text()}}: missing" - severity: DiagnosticSeverity = DiagnosticSeverity.Error - - def __call__(self, uni: UNI) -> bool: - r"""Call. - - :param uni: - :type uni: UNI - :rtype: bool - """ - node = uni.node - return node.is_missing and not ( - any(child.is_missing for child in node.children) - ) - - -@dataclass -class ErrorFinder(Finder): - r"""Errorfinder.""" - - message: str = "{{uni.get_text()}}: error" - severity: DiagnosticSeverity = DiagnosticSeverity.Error - - def __call__(self, uni: UNI) -> bool: - r"""Call. - - :param uni: - :type uni: UNI - :rtype: bool - """ - node = uni.node - return node.has_error and not ( - any(child.has_error for child in node.children) - ) - - -@dataclass -class NotFileFinder(Finder): - r"""NotFilefinder.""" - - message: str = "{{uni.get_text()}}: no such file or directory" - severity: DiagnosticSeverity = DiagnosticSeverity.Error - - def __call__(self, uni: UNI) -> bool: - r"""Call. - - :param uni: - :type uni: UNI - :rtype: bool - """ - path = self.uni2path(uni) - return not (os.path.isfile(path) or os.path.isdir(path)) - - -@dataclass -class RepeatedFinder(Finder): - r"""Repeatedfinder.""" - - message: str = "{{uni.get_text()}}: is repeated on {{_uni}}" - severity: DiagnosticSeverity = DiagnosticSeverity.Warning - - def reset(self) -> None: - r"""Reset. - - :rtype: None - """ - self.level = 0 - self.unis = [] - self._unis = [] - self.uni_pairs = [] - - def filter(self, uni: UNI) -> bool: - r"""Filter. - - :param uni: - :type uni: UNI - :rtype: bool - """ - return True - - def compare(self, uni: UNI, _uni: UNI) -> bool: - r"""Compare. - - :param uni: - :type uni: UNI - :param _uni: - :type _uni: UNI - :rtype: bool - """ - return uni.node.text == _uni.node.text - - def __call__(self, uni: UNI) -> bool: - r"""Call. - - :param uni: - :type uni: UNI - :rtype: bool - """ - if self.filter(uni) is False: - return False - for _uni in self._unis: - if self.compare(uni, _uni): - self.uni_pairs += [[uni, _uni]] - return True - self._unis += [uni] - return False - - def get_definitions(self, uni: UNI) -> list[Location]: - r"""Get definitions. - - :param uni: - :type uni: UNI - :rtype: list[Location] - """ - for uni_, _uni in self.uni_pairs: - # cache hit - if uni == uni_: - return [_uni.get_location()] - return [] - - def get_references(self, uni: UNI) -> list[Location]: - r"""Get references. - - :param uni: - :type uni: UNI - :rtype: list[Location] - """ - locations = [] - for uni_, _uni in self.uni_pairs: - # cache hit - if uni == _uni: - locations += [uni_.get_location()] - return locations - - def get_text_edits(self, uri: str, tree: Tree) -> list[TextEdit]: - r"""Get text edits. Only return two to avoid `Overlapping edit` - - :param self: - :param uri: - :type uri: str - :param tree: - :type tree: Tree - :rtype: list[TextEdit] - """ - self.find_all(uri, tree) - for uni, _uni in self.uni_pairs: - # swap 2 unis - return [ - uni.get_text_edit(_uni.get_text()), - _uni.get_text_edit(uni.get_text()), - ] - return [] - - def uni2diagnostic(self, uni: UNI) -> Diagnostic: - r"""Uni2diagnostic. - - :param uni: - :type uni: UNI - :rtype: Diagnostic - """ - for uni_, _uni in self.uni_pairs: - if uni == uni_: - return uni.get_diagnostic( - self.message, self.severity, _uni=_uni - ) - return uni.get_diagnostic(self.message, self.severity) - - -@dataclass -class UnsortedFinder(RepeatedFinder): - r"""Unsortedfinder.""" - - message: str = "{{uni.get_text()}}: is unsorted due to {{_uni}}" - severity: DiagnosticSeverity = DiagnosticSeverity.Warning - - def compare(self, uni: UNI, _uni: UNI) -> bool: - r"""Compare. - - :param uni: - :type uni: UNI - :param _uni: - :type _uni: UNI - :rtype: bool - """ - return uni.node.text < _uni.node.text - - -@dataclass(init=False) -class UnFixedOrderFinder(RepeatedFinder): - r"""Unfixedorderfinder.""" - - def __init__( - self, - order: list[Any], - message: str = "{{uni.get_text()}}: is unsorted due to {{_uni}}", - severity: DiagnosticSeverity = DiagnosticSeverity.Warning, - ) -> None: - r"""Init. - - :param order: - :type order: list[Any] - :param message: - :type message: str - :param severity: - :type severity: DiagnosticSeverity - :rtype: None - """ - super().__init__(message, severity) - self.order = order - - def filter(self, uni: UNI) -> bool: - r"""Filter. - - :param uni: - :type uni: UNI - :rtype: bool - """ - return uni.get_text() in self.order - - def compare(self, uni: UNI, _uni: UNI) -> bool: - r"""Compare. - - :param uni: - :type uni: UNI - :param _uni: - :type _uni: UNI - :rtype: bool - """ - return self.order.index(uni.get_text()) < self.order.index( - _uni.get_text() - ) - - -@dataclass(init=False) -class TypeFinder(Finder): - r"""Typefinder.""" - - def __init__( - self, - type: str, - message: str = "", - severity: DiagnosticSeverity = DiagnosticSeverity.Information, - ) -> None: - r"""Init. - - :param type: - :type type: str - :param message: - :type message: str - :param severity: - :type severity: DiagnosticSeverity - :rtype: None - """ - super().__init__(message, severity) - self.type = type - - def __call__(self, uni: UNI) -> bool: - r"""Call. - - :param uni: - :type uni: UNI - :rtype: bool - """ - node = uni.node - return node.type == self.type - - -@dataclass(init=False) -class PositionFinder(Finder): - r"""Positionfinder.""" - - def __init__( - self, - position: Position, - message: str = "", - severity: DiagnosticSeverity = DiagnosticSeverity.Information, - ) -> None: - r"""Init. - - :param position: - :type position: Position - :param message: - :type message: str - :param severity: - :type severity: DiagnosticSeverity - :rtype: None - """ - super().__init__(message, severity) - self.position = position - - @staticmethod - def belong(position: Position, node: Node) -> bool: - r"""Belong. - - :param position: - :type position: Position - :param node: - :type node: Node - :rtype: bool - """ - return ( - Position(*node.start_point) <= position < Position(*node.end_point) - ) - - def __call__(self, uni: UNI) -> bool: - r"""Call. - - :param uni: - :type uni: UNI - :rtype: bool - """ - node = uni.node - return node.child_count == 0 and self.belong(self.position, node) - - -@dataclass(init=False) -class RangeFinder(Finder): - r"""Rangefinder.""" - - def __init__( - self, - range: Range, - message: str = "", - severity: DiagnosticSeverity = DiagnosticSeverity.Information, - ) -> None: - r"""Init. - - :param range: - :type range: Range - :param message: - :type message: str - :param severity: - :type severity: DiagnosticSeverity - :rtype: None - """ - super().__init__(message, severity) - self.range = range - - @staticmethod - def equal(_range: Range, node: Node) -> bool: - r"""Equal. - - :param _range: - :type _range: Range - :param node: - :type node: Node - :rtype: bool - """ - return _range.start == Position( - *node.start_point - ) and _range.end == Position(*node.end_point) - - def __call__(self, uni: UNI) -> bool: - r"""Call. - - :param uni: - :type uni: UNI - :rtype: bool - """ - node = uni.node - return self.equal(self.range, node) - - -@dataclass(init=False) -class RequiresFinder(Finder): - r"""Requiresfinder.""" - - def __init__( - self, - requires: set[Any], - message: str = "{{require}}: required", - severity: DiagnosticSeverity = DiagnosticSeverity.Error, - ) -> None: - r"""Init. - - :param requires: - :type requires: set[Any] - :param message: - :type message: str - :param severity: - :type severity: DiagnosticSeverity - :rtype: None - """ - self.requires = requires - # will call reset() which will call self.requires - super().__init__(message, severity) - - def reset(self) -> None: - r"""Reset. - - :rtype: None - """ - self.level = 0 - self.unis = [] - self._requires = deepcopy(self.requires) - - def filter(self, uni: UNI, require: Any) -> bool: - r"""Filter. - - :param uni: - :type uni: UNI - :param require: - :type require: Any - :rtype: bool - """ - return False - - def __call__(self, uni: UNI) -> bool: - r"""Call. - - :param uni: - :type uni: UNI - :rtype: bool - """ - found = set() - for require in self._requires: - if self.filter(uni, require): - found |= {require} - self._requires -= found - return False - - def require2message(self, require: Any, **kwargs: Any) -> str: - r"""Require2message. - - :param require: - :type require: Any - :param kwargs: - :type kwargs: Any - :rtype: str - """ - return Template(self.message).render( - uni=self, require=require, **kwargs - ) - - def get_diagnostics(self, uri: str, tree: Tree) -> list[Diagnostic]: - r"""Get diagnostics. - - :param uri: - :type uri: str - :param tree: - :type tree: Tree - :rtype: list[Diagnostic] - """ - self.find_all(uri, tree) - return [ - Diagnostic( - # If you want to specify a range that contains a line including - # the line ending character(s) then use an end position - # denoting the start of the next line - Range(Position(0, 0), Position(1, 0)), - self.require2message(i), - self.severity, - ) - for i in self._requires - ] - - -@dataclass(init=False) -class SchemaFinder(Finder): - r"""Schemafinder.""" - - def __init__(self, schema: dict[str, Any], cls: type[Trie]) -> None: - r"""Init. - - :param schema: - :type schema: dict[str, Any] - :param cls: - :type cls: type[Trie] - :rtype: None - """ - self.validator = self.schema2validator(schema) - self.cls = cls - - @staticmethod - def schema2validator(schema: dict[str, Any]) -> Validator: - r"""Schema2validator. - - :param schema: - :type schema: dict[str, Any] - :rtype: Validator - """ - return validator_for(schema)(schema) - - def get_diagnostics(self, _: str, tree: Tree) -> list[Diagnostic]: - r"""Get diagnostics. - - :param _: - :type _: str - :param tree: - :type tree: Tree - :rtype: list[Diagnostic] - """ - trie = self.cls.from_tree(tree) - return [ - Diagnostic( - trie.from_path(error.json_path).range, - error.message, - DiagnosticSeverity.Error, - ) - for error in self.validator.iter_errors(trie.to_json()) - ] diff --git a/src/termux_language_server/tree_sitter_lsp/format.py b/src/termux_language_server/tree_sitter_lsp/format.py deleted file mode 100644 index 832159d..0000000 --- a/src/termux_language_server/tree_sitter_lsp/format.py +++ /dev/null @@ -1,166 +0,0 @@ -r"""Format -========== - -Wrap -``Document Formatting ``_ -to a formatter. -""" -from typing import Callable - -from lsprotocol.types import Position, Range, TextEdit -from tree_sitter import Tree - -from . import Finder -from .utils import get_finders, get_paths - - -def position_2d_to_1d(source: str, position: Position) -> int: - r"""Position 2d to 1d. - - :param source: - :type source: str - :param position: - :type position: Position - :rtype: int - """ - return ( - sum(len(line) + 1 for line in source.splitlines()[: position.line]) - + position.character - ) - - -def range_2d_to_1d(source: str, region: Range) -> range: - r"""Range 2d to 1d. - - :param source: - :type source: str - :param region: - :type region: Range - :rtype: range - """ - return range( - position_2d_to_1d(source, region.start), - position_2d_to_1d(source, region.end), - ) - - -def apply_text_edits(text_edits: list[TextEdit], source: str) -> str: - r"""Apply text edits. - - :param text_edits: - :type text_edits: list[TextEdit] - :param source: - :type source: str - :rtype: str - """ - for text_edit in text_edits: - region = range_2d_to_1d(source, text_edit.range) - source = ( - source[: region.start] + text_edit.new_text + source[region.stop :] - ) - return source - - -def format_by_finders( - paths: list[str], parse: Callable[[bytes], Tree], finders: list[Finder] -) -> None: - r"""Format by finders. - - :param paths: - :type paths: list[str] - :param parse: - :type parse: Callable[[bytes], Tree] - :param finders: - :type finders: list[Finder] - :rtype: None - """ - for path in paths: - with open(path, "rb") as f: - src = f.read() - tree = parse(src) - text_edits = [ - text_edit - for finder in finders - for text_edit in finder.get_text_edits(path, tree) - ] - src = apply_text_edits(text_edits, src.decode()) - with open(path, "w") as f: - f.write(src) - - -def get_text_edits_by_finders( - uri: str, tree: Tree, finders: list[Finder] -) -> list[TextEdit]: - r"""Get text edits by finders. - - :param uri: - :type uri: str - :param tree: - :type tree: Tree - :param finders: - :type finders: list[Finder] - :rtype: list[TextEdit] - """ - return [ - text_edit - for finder in finders - for text_edit in finder.get_text_edits(uri, tree) - ] - - -def get_text_edits( - uri: str, - tree: Tree, - classes: list[type[Finder]] | None = None, - filetype: str | None = None, -) -> list[TextEdit]: - r"""Get text edits. - - :param uri: - :type uri: str - :param tree: - :type tree: Tree - :param classes: - :type classes: list[type[Finder]] | None - :param filetype: - :type filetype: str | None - :rtype: list[TextEdit] - """ - finders, finder_classes = get_finders(classes) - if filetype is None: - return get_text_edits_by_finders(uri, tree, finders) - return [ - text_edit - for text_edit in get_text_edits_by_finders( - uri, tree, finders + [cls(filetype) for cls in finder_classes] - ) - ] - - -def format( - paths: list[str], - parse: Callable[[bytes], Tree], - classes: list[type[Finder]] | None = None, - get_filetype: Callable[[str], str] | None = None, -) -> None: - r"""Format. - - :param paths: - :type paths: list[str] - :param parse: - :type parse: Callable[[bytes], Tree] - :param classes: - :type classes: list[type[Finder]] | None - :param get_filetype: - :type get_filetype: Callable[[str], str] | None - :rtype: None - """ - finders, finder_classes = get_finders(classes) - if get_filetype is None: - return format_by_finders(paths, parse, finders) - for filetype, filepaths in get_paths(paths, get_filetype).items(): - format_by_finders( - filepaths, - parse, - finders + [cls(filetype) for cls in finder_classes], - ) diff --git a/src/termux_language_server/tree_sitter_lsp/schema.py b/src/termux_language_server/tree_sitter_lsp/schema.py deleted file mode 100644 index a7420f4..0000000 --- a/src/termux_language_server/tree_sitter_lsp/schema.py +++ /dev/null @@ -1,133 +0,0 @@ -r"""Schema -========== -""" -from dataclasses import dataclass -from typing import Any - -from lsprotocol.types import Position, Range -from tree_sitter import Node, Tree - -from . import UNI - - -@dataclass -class Trie: - r"""Trie.""" - - range: Range - parent: "Trie | None" = None - # can be serialized to a json - value: dict[str, "Trie"] | list["Trie"] | str | int | float | None = None - - def get_root(self) -> "Trie": - r"""Get root. - - :rtype: "Trie" - """ - node = self - while node.parent is not None: - node = node.parent - return node - - def to_path(self) -> str: - r"""To path. - - :rtype: str - """ - if self.parent is None: - return "$" - path = self.parent.to_path() - if isinstance(self.parent.value, dict): - for k, v in self.parent.value.items(): - if v is self: - return f"{path}.{k}" - raise TypeError - if isinstance(self.parent.value, list): - for k, v in enumerate(self.parent.value): - if v is self: - return f"{path}[{k}]" - raise TypeError - return path - - def from_path(self, path: str) -> "Trie": - r"""From path. - - :param path: - :type path: str - :rtype: "Trie" - """ - node = self - if path.startswith("$"): - path = path.lstrip("$") - node = self.get_root() - return node.from_relative_path(path) - - def from_relative_path(self, path: str) -> "Trie": - r"""From relative path. - - :param path: - :type path: str - :rtype: "Trie" - """ - if path == "": - return self - if path.startswith("."): - if not isinstance(self.value, dict): - raise TypeError - path = path.lstrip(".") - index, mid, path = path.partition(".") - if mid == ".": - path = mid + path - index, mid, suffix = index.partition("[") - if mid == "[": - path = mid + suffix + path - return self.value[index].from_relative_path(path) - if path.startswith("["): - if not isinstance(self.value, list): - raise TypeError - path = path.lstrip("[") - index, _, path = path.partition("]") - return self.value[int(index)].from_relative_path(path) - raise TypeError - - def to_json(self) -> dict[str, Any] | list[Any] | str | int | float | None: - r"""To json. - - :rtype: dict[str, Any] | list[Any] | str | int | float | None - """ - if isinstance(self.value, dict): - return {k: v.to_json() for k, v in self.value.items()} - if isinstance(self.value, list): - return [v.to_json() for v in self.value] - return self.value - - @classmethod - def from_tree(cls, tree: Tree) -> "Trie": - r"""From tree. - - :param tree: - :type tree: Tree - :rtype: "Trie" - """ - return cls.from_node(tree.root_node, None) - - @classmethod - def from_node(cls, node: Node, parent: "Trie | None") -> "Trie": - r"""From node. - - :param node: - :type node: Node - :param parent: - :type parent: Trie | None - :rtype: "Trie" - """ - if parent is None: - _range = Range(Position(0, 0), Position(1, 0)) - else: - _range = UNI.node2range(node) - trie = cls(_range, parent, {}) - trie.value = { - UNI.node2text(child.children[0]): cls.from_node(child, trie) - for child in node.children - } - return trie diff --git a/src/termux_language_server/tree_sitter_lsp/utils.py b/src/termux_language_server/tree_sitter_lsp/utils.py deleted file mode 100644 index 9f60fab..0000000 --- a/src/termux_language_server/tree_sitter_lsp/utils.py +++ /dev/null @@ -1,99 +0,0 @@ -r"""Utils -========= - -Some common functions used by formatters and linters. -""" -import json -import os -import sys -from typing import Any, Callable - -from . import Finder - - -def get_paths( - paths: list[str], get_filetype: Callable[[str], str] -) -> dict[str, list[str]]: - r"""Get paths. - - :param paths: - :type paths: list[str] - :param get_filetype: A function returning ``Literal["filetype1", "filetype2", ...] | Literal[""]`` - :type get_filetype: Callable[[str], str] - :rtype: dict[str, list[str]] - """ - filetype_paths = { - k: [] - for k in get_filetype.__annotations__["return"].__args__[0].__args__ - if k != "" - } - for path in paths: - filetype = get_filetype(path) - for _filetype, filepaths in filetype_paths.items(): - if filetype == _filetype: - filepaths += [path] - return filetype_paths - - -def get_finders( - classes: list[type[Finder]] | None = None, -) -> tuple[list[Finder], list[type[Finder]]]: - r"""Get finders. - - :param classes: - :type classes: list[type[Finder]] | None - :rtype: tuple[list[Finder], list[type[Finder]]] - """ - if classes is None: - from .finders import ErrorFinder, MissingFinder - - classes = [ErrorFinder, MissingFinder] - - finders = [] - finder_classes = [] - for cls in classes: - if cls.__init__.__annotations__.get("filetype"): - finder_classes += [cls] - else: - finders += [cls()] - return finders, finder_classes - - -def pprint( - obj: object, filetype: str = "json", *args: Any, **kwargs: Any -) -> None: - r"""Pprint. - - :param obj: - :type obj: object - :param filetype: - :type filetype: str - :param args: - :type args: Any - :param kwargs: - :type kwargs: Any - :rtype: None - """ - text = json.dumps(obj, *args, **kwargs) - TERM = os.getenv("TERM", "xterm") - if not sys.stdout.isatty(): - TERM = "dumb" - try: - from pygments import highlight - from pygments.formatters import get_formatter_by_name - from pygments.lexers import get_lexer_by_name - - if TERM.split("-")[-1] == "256color": - formatter_name = "terminal256" - elif TERM != "dumb": - formatter_name = "terminal" - else: - formatter_name = None - if formatter_name: - formatter = get_formatter_by_name(formatter_name) - lexer = get_lexer_by_name(filetype) - print(highlight(text, lexer, formatter), end="") - except ImportError: - TERM = "dumb" - if TERM == "dumb": - print(text) diff --git a/src/termux_language_server/utils.py b/src/termux_language_server/utils.py index 34f2be4..00f9b01 100644 --- a/src/termux_language_server/utils.py +++ b/src/termux_language_server/utils.py @@ -1,13 +1,46 @@ -r"""Documents -============= +r"""Utils +========= """ + import json import os from typing import Any, Literal +from tree_sitter import Language, Parser, Query +from tree_sitter_bash import language as get_language_ptr + from . import FILETYPE SCHEMAS = {} +QUERIES = {} +parser = Parser() +parser.language = Language(get_language_ptr()) + + +def get_query(name: str, filetype: str = "bash") -> Query: + r"""Get query. + + :param name: + :type name: str + :param filetype: + :type filetype: str + :rtype: Query + """ + if name not in QUERIES: + with open( + os.path.join( + os.path.dirname(__file__), + "assets", + "queries", + f"{name}{os.path.extsep}scm", + ) + ) as f: + text = f.read() + if parser.language: + QUERIES[name] = Query(parser.language, text) + else: + raise NotImplementedError + return QUERIES[name] def get_schema(filetype: FILETYPE) -> dict[str, Any]: @@ -19,13 +52,12 @@ def get_schema(filetype: FILETYPE) -> dict[str, Any]: """ if filetype not in SCHEMAS: file = os.path.join( - os.path.join( - os.path.join(os.path.dirname(__file__), "assets"), - "json", - ), + os.path.dirname(__file__), + "assets", + "json", f"{filetype}.json", ) - with open(file, "r") as f: + with open(file) as f: SCHEMAS[filetype] = json.load(f) return SCHEMAS[filetype] @@ -37,9 +69,10 @@ def get_filetype(uri: str) -> FILETYPE | Literal[""]: :type uri: str :rtype: FILETYPE | Literal[""] """ + dirname = os.path.basename(os.path.dirname(uri)) basename = os.path.basename(uri) ext = uri.split(os.path.extsep)[-1] - if basename == "build.sh": + if basename == "build.sh" and dirname != "scripts" and dirname != "recipe": return "build.sh" if basename.endswith(".subpackage.sh"): return "subpackage.sh" @@ -55,4 +88,8 @@ def get_filetype(uri: str) -> FILETYPE | Literal[""]: return "make.conf" if basename == "color.map": return "color.map" + if ext == "mdd": + return "mdd" + if basename in {"devscripts.conf", ".devscripts"}: + return "devscripts.conf" return "" diff --git a/templates/metainfo.py b/templates/metainfo.py deleted file mode 100644 index a9a8444..0000000 --- a/templates/metainfo.py +++ /dev/null @@ -1,20 +0,0 @@ -"""This file is generated by scikit-build.generate. -The information comes from pyproject.toml. -It provide some metainfo for docs/conf.py to build documents and -help2man to build man pages. -""" -from datetime import datetime - -# For docs/conf.py -project = "$name" -author = "\n".join(f"{a[0]} <{a[1]}>" for a in $authors) -copyright = datetime.now().year - -# For help2man -DESCRIPTION = "$description" -EPILOG = "Report bugs to " + $urls["Bug Report"] -VERSION = f"""$name $version -Copyright (C) {copyright} -Written by {author}""" - -SOURCE = $urls["Source"] diff --git a/templates/metainfo.py.j2 b/templates/metainfo.py.j2 new file mode 100644 index 0000000..bfe7fd5 --- /dev/null +++ b/templates/metainfo.py.j2 @@ -0,0 +1,22 @@ +"""This file is generated by setuptools-generate. +The information comes from pyproject.toml. +It provide some metainfo for docs/conf.py to build documents and +help2man to build man pages. +""" + +# For docs/conf.py +project = "{{ data['project']['name'] }}" +author = """{% for author in data['project']['authors'] -%} +{{ author['name'] }} <{{ author['email'] }}> {% endfor -%} +""" +copyright = "{{ year }}" + +# For help2man +DESCRIPTION = "{{ data['project']['description'] }}" +EPILOG = "Report bugs to {{ data['project']['urls']['Bug Report'] }}" +# format __version__ by yourself +VERSION = """{{ data['project']['name'] }} {__version__} +Copyright (C) {{ year }} +Written by {% for author in data['project']['authors'] -%} +{{ author['name'] }} <{{ author['email'] }}> {% endfor %}""" +SOURCE = "{{ data['project']['urls']['Source'] }}" diff --git a/tests/PKGBUILD b/tests/PKGBUILD index d2a6638..db17068 100644 --- a/tests/PKGBUILD +++ b/tests/PKGBUILD @@ -1,16 +1,15 @@ -# shellcheck shell=bash disable=SC2034,SC2154 -# Maintainer: Wu Zhenyu # Updated by https://github.com/Freed-Wu/pkgbuild-language-server/blob/main/.github/workflows/main.yml pkgname=pkgbuild-language-server pkgver=0.0.6 pkgrel=1 pkgdesc="Archlinux and Windows Msys2's PKGBUILD language server" -arch=(any) +arch=("any") +mingw_arch=(wrong_arch) url=https://github.com/Freed-Wu/pkgbuild-language-server depends=(python-colorama python-jinja python-platformdirs python-pygls python-tree-sitter) optdepends=(python-pypandoc python-markdown-it-py pacman pyalpm namcap) makedepends=python-installer -license=(GPL3) +license=('GPL3') _py=py3 source=("https://files.pythonhosted.org/packages/$_py/${pkgname::1}/${pkgname//-/_}/${pkgname//-/_}-$pkgver-$_py-none-any.whl") sha256sums=('db062b5028e93aa9304d2783cd73017320587ac64fc4d8c01f514ae1015a4bf0') diff --git a/tests/build.sh b/tests/build.sh index c9be790..1f8c1e4 100644 --- a/tests/build.sh +++ b/tests/build.sh @@ -1,4 +1,3 @@ -# shellcheck disable=SC2034 # https://github.com/termux/termux-packages/pull/17457/files TERMUX_PKG_LICENSE=GPL-3.0 TERMUX_PKG_HOMEPAGE=https://github.com/ggerganov/llama.cpp @@ -18,7 +17,7 @@ TERMUX_PKG_AUTO_UPDATE=true # XXX: llama.cpp uses `int64_t`, but on 32-bit Android `size_t` is `int32_t`. # XXX: I don't think it will work if we simply casting it. -TERMUX_PKG_BLACKLISTED_ARCHES=(arm i686) +TERMUX_PKG_EXCLUDED_ARCHES=(arm i686) termux_pkg_auto_update() { local latest_tag @@ -26,7 +25,7 @@ termux_pkg_auto_update() { termux_github_api_get_tag "${TERMUX_PKG_SRCURL}" "${TERMUX_PKG_UPDATE_TAG_TYPE}" )" - if [[ -z "${latest_tag}" ]]; then + if [[ -z ${latest_tag} ]]; then termux_error_exit "ERROR: Unable to get tag from ${TERMUX_PKG_SRCURL}" fi termux_pkg_upgrade_version "0.0.0-${latest_tag}" diff --git a/tests/neovim-0.9.4.ebuild b/tests/neovim-0.9.4.ebuild index bd5f320..ec45d50 100644 --- a/tests/neovim-0.9.4.ebuild +++ b/tests/neovim-0.9.4.ebuild @@ -1,4 +1,3 @@ -# shellcheck disable=SC2034 # Copyright 1999-2023 Gentoo Authors # Distributed under the terms of the GNU General Public License v2 diff --git a/tests/test_schema.py b/tests/test_schema.py index 2a501da..4298d9e 100644 --- a/tests/test_schema.py +++ b/tests/test_schema.py @@ -1,10 +1,10 @@ r"""Test schema.""" + import os -from termux_language_server.parser import parse +from lsp_tree_sitter.finders import SchemaFinder from termux_language_server.schema import BashTrie -from termux_language_server.tree_sitter_lsp.finders import SchemaFinder -from termux_language_server.utils import get_filetype, get_schema +from termux_language_server.utils import get_filetype, get_schema, parser PATH = os.path.dirname(__file__) @@ -23,7 +23,7 @@ def test_SchemaFinder() -> None: assert filetype == "build.sh" with open(path, "rb") as f: text = f.read() - tree = parse(text) + tree = parser.parse(text) finder = SchemaFinder(get_schema(filetype), BashTrie) diagnostics = finder.get_diagnostics(path, tree) assert len(diagnostics) > 0 diff --git a/tests/test_utils.py b/tests/test_utils.py index 1830cd9..fd88b8c 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -1,4 +1,5 @@ r"""Test utils.""" + from termux_language_server.utils import get_schema @@ -6,8 +7,8 @@ class Test: r"""Test.""" @staticmethod - def test_get_document() -> None: - r"""Test get document. + def test_get_schema() -> None: + r"""Test get schema. :rtype: None """