diff --git a/.github/workflows/docs-ci.yml b/.github/workflows/docs-ci.yml index 18a44aa..8d8aa55 100644 --- a/.github/workflows/docs-ci.yml +++ b/.github/workflows/docs-ci.yml @@ -4,34 +4,29 @@ on: [push, pull_request] jobs: build: - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 strategy: max-parallel: 4 matrix: - python-version: [3.9] + python-version: [3.13] steps: - name: Checkout code - uses: actions/checkout@v2 + uses: actions/checkout@v4 - name: Set up Python ${{ matrix.python-version }} - uses: actions/setup-python@v2 + uses: actions/setup-python@v5 with: python-version: ${{ matrix.python-version }} - - name: Give permission to run scripts - run: chmod +x ./docs/scripts/doc8_style_check.sh - - name: Install Dependencies - run: pip install -e .[docs] + run: ./configure --dev - - name: Check Sphinx Documentation build minimally - working-directory: ./docs - run: sphinx-build -E -W source build + - name: Check documentation and HTML for errors and dead links + run: make docs-check - - name: Check for documentation style errors - working-directory: ./docs - run: ./scripts/doc8_style_check.sh + - name: Check documentation for style errors + run: make doc8 diff --git a/.github/workflows/pypi-release.yml b/.github/workflows/pypi-release.yml index 22315ff..7da0a40 100644 --- a/.github/workflows/pypi-release.yml +++ b/.github/workflows/pypi-release.yml @@ -21,23 +21,26 @@ on: jobs: build-pypi-distribs: name: Build and publish library to PyPI - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@master + - uses: actions/checkout@v4 - name: Set up Python - uses: actions/setup-python@v1 + uses: actions/setup-python@v5 with: - python-version: 3.9 + python-version: 3.12 - - name: Install pypa/build - run: python -m pip install build --user + - name: Install pypa/build and twine + run: python -m pip install --user --upgrade build twine pkginfo - name: Build a binary wheel and a source tarball - run: python -m build --sdist --wheel --outdir dist/ + run: python -m build --wheel --sdist --outdir dist/ + + - name: Validate wheel and sdis for Pypi + run: python -m twine check dist/* - name: Upload built archives - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@v4 with: name: pypi_archives path: dist/* @@ -47,17 +50,17 @@ jobs: name: Create GH release needs: - build-pypi-distribs - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 steps: - name: Download built archives - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: pypi_archives path: dist - name: Create GH release - uses: softprops/action-gh-release@v1 + uses: softprops/action-gh-release@v2 with: draft: true files: dist/* @@ -67,17 +70,18 @@ jobs: name: Create PyPI release needs: - create-gh-release - runs-on: ubuntu-20.04 + runs-on: ubuntu-24.04 + environment: pypi-publish + permissions: + id-token: write steps: - name: Download built archives - uses: actions/download-artifact@v3 + uses: actions/download-artifact@v4 with: name: pypi_archives path: dist - name: Publish to PyPI if: startsWith(github.ref, 'refs/tags') - uses: pypa/gh-action-pypi-publish@master - with: - password: ${{ secrets.PYPI_API_TOKEN }} + uses: pypa/gh-action-pypi-publish@release/v1 \ No newline at end of file diff --git a/.gitignore b/.gitignore index eecd265..c90f40c 100644 --- a/.gitignore +++ b/.gitignore @@ -81,3 +81,5 @@ tcl !/tests/typecode/data/contenttype/compiled/linux/* !/tests/typecode/data/contenttype/compiled/win/* !/tests/typecode/data/contenttype/package/* +/.ruff_cache/ +.env diff --git a/.readthedocs.yml b/.readthedocs.yml index 1b71cd9..27c1595 100644 --- a/.readthedocs.yml +++ b/.readthedocs.yml @@ -5,6 +5,17 @@ # Required version: 2 +# Build in latest ubuntu/python +build: + os: ubuntu-22.04 + tools: + python: "3.13" + +# Build PDF & ePub +formats: + - epub + - pdf + # Where the Sphinx conf.py file is located sphinx: configuration: docs/source/conf.py @@ -15,4 +26,4 @@ python: - method: pip path: . extra_requirements: - - docs + - dev diff --git a/MANIFEST.in b/MANIFEST.in index 8424cbe..4490009 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,4 +1,6 @@ graft src +graft docs +graft etc include *.LICENSE include NOTICE @@ -6,10 +8,19 @@ include *.ABOUT include *.toml include *.yml include *.rst +include *.png include setup.* include configure* include requirements* include .giti* +include .dockerignore +include .gitignore +include .readthedocs.yml +include manage.py +include Dockerfile* +include Makefile +include MANIFEST.in -global-exclude *.py[co] __pycache__ *.*~ +include .VERSION +global-exclude *.py[co] __pycache__ *.*~ diff --git a/Makefile b/Makefile index cc36c35..3041547 100644 --- a/Makefile +++ b/Makefile @@ -4,7 +4,7 @@ # ScanCode is a trademark of nexB Inc. # SPDX-License-Identifier: Apache-2.0 # See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -# See https://github.com/nexB/skeleton for support or download. +# See https://github.com/aboutcode-org/skeleton for support or download. # See https://aboutcode.org for more information about nexB OSS projects. # @@ -13,31 +13,33 @@ PYTHON_EXE?=python3 VENV=venv ACTIVATE?=. ${VENV}/bin/activate; -dev: - @echo "-> Configure the development envt." - ./configure --dev -isort: - @echo "-> Apply isort changes to ensure proper imports ordering" - ${VENV}/bin/isort --sl -l 100 src tests setup.py +conf: + @echo "-> Install dependencies" + ./configure -black: - @echo "-> Apply black code formatter" - ${VENV}/bin/black -l 100 src tests setup.py +dev: + @echo "-> Configure and install development dependencies" + ./configure --dev doc8: @echo "-> Run doc8 validation" - @${ACTIVATE} doc8 --max-line-length 100 --ignore-path docs/_build/ --quiet docs/ + @${ACTIVATE} doc8 --quiet docs/ *.rst -valid: isort black +valid: + @echo "-> Run Ruff format" + @${ACTIVATE} ruff format + @echo "-> Run Ruff linter" + @${ACTIVATE} ruff check --fix check: - @echo "-> Run pycodestyle (PEP8) validation" - @${ACTIVATE} pycodestyle --max-line-length=100 --exclude=.eggs,venv,lib,thirdparty,docs,migrations,settings.py,.cache . - @echo "-> Run isort imports ordering validation" - @${ACTIVATE} isort --sl --check-only -l 100 setup.py src tests . - @echo "-> Run black validation" - @${ACTIVATE} black --check --check -l 100 src tests setup.py + @echo "-> Run Ruff linter validation (pycodestyle, bandit, isort, and more)" + @${ACTIVATE} ruff check + @echo "-> Run Ruff format validation" + @${ACTIVATE} ruff format --check + @$(MAKE) doc8 + @echo "-> Run ABOUT files validation" + @${ACTIVATE} about check etc/ clean: @echo "-> Clean the Python env" @@ -49,6 +51,10 @@ test: docs: rm -rf docs/_build/ - @${ACTIVATE} sphinx-build docs/ docs/_build/ + @${ACTIVATE} sphinx-build docs/source docs/_build/ + +docs-check: + @${ACTIVATE} sphinx-build -E -W -b html docs/source docs/_build/ + @${ACTIVATE} sphinx-build -E -W -b linkcheck docs/source docs/_build/ -.PHONY: conf dev check valid black isort clean test docs +.PHONY: conf dev check valid clean test docs docs-check diff --git a/NOTICE b/NOTICE index fc8995f..fd5ac4d 100644 --- a/NOTICE +++ b/NOTICE @@ -1,5 +1,8 @@ # -# Copyright (c) nexB Inc. and others. All rights reserved. +# Copyright (c) nexB Inc. and others. +# SPDX-License-Identifier: Apache-2.0 +# +# Visit https://aboutcode.org and https://github.com/aboutcode-org/ for support and download. # ScanCode is a trademark of nexB Inc. # SPDX-License-Identifier: Apache-2.0 # See http://www.apache.org/licenses/LICENSE-2.0 for the license text. diff --git a/README.rst b/README.rst index ec3b5fb..e9ec7a7 100644 --- a/README.rst +++ b/README.rst @@ -31,7 +31,7 @@ system, use the `minimal` option:: In this case, you will need to provide a working libmagic and its database available in one of these ways: -- **a typecode-libmagic plugin**: See the standard ones at +- **a typecode-libmagic plugin**: See the standard ones at https://github.com/nexB/scancode-plugins/tree/main/builtins These can either bundle a libmagic library and its magic database or expose a system-installed libmagic. diff --git a/azure-pipelines.yml b/azure-pipelines.yml index a318e49..2bd523b 100644 --- a/azure-pipelines.yml +++ b/azure-pipelines.yml @@ -5,42 +5,58 @@ ################################################################################ jobs: - - template: etc/ci/azure-posix.yml - parameters: - job_name: ubuntu20_cpython - image_name: ubuntu-20.04 - python_versions: ["3.7", "3.8", "3.9", "3.10", "3.11"] - test_suites: - all: venv/bin/pytest -n 2 -vvs + - template: etc/ci/azure-posix.yml + parameters: + job_name: run_code_checks + image_name: ubuntu-24.04 + python_versions: ['3.13'] + test_suites: + all: make check - - template: etc/ci/azure-posix.yml - parameters: - job_name: ubuntu22_cpython - image_name: ubuntu-22.04 - python_versions: ["3.7", "3.8", "3.9", "3.10", "3.11"] - test_suites: - all: venv/bin/pytest -n 2 -vvs + - template: etc/ci/azure-posix.yml + parameters: + job_name: ubuntu22_cpython + image_name: ubuntu-22.04 + python_versions: ['3.10', '3.11', '3.12', '3.13', '3.14'] + test_suites: + all: venv/bin/pytest -n 2 -vvs - - template: etc/ci/azure-posix.yml - parameters: - job_name: macos12_cpython - image_name: macos-12 - python_versions: ["3.7", "3.8", "3.9", "3.10", "3.11"] - test_suites: - all: venv/bin/pytest -n 2 -vvs + - template: etc/ci/azure-posix.yml + parameters: + job_name: ubuntu24_cpython + image_name: ubuntu-24.04 + python_versions: ['3.10', '3.11', '3.12', '3.13', '3.14'] + test_suites: + all: venv/bin/pytest -n 2 -vvs - - template: etc/ci/azure-win.yml - parameters: - job_name: win2019_cpython - image_name: windows-2019 - python_versions: ["3.7", "3.8", "3.9", "3.10", "3.11"] - test_suites: - all: venv\Scripts\pytest -n 2 -vvs + - template: etc/ci/azure-posix.yml + parameters: + job_name: macos14_cpython + image_name: macOS-14 + python_versions: ['3.10', '3.11', '3.12', '3.13', '3.14'] + test_suites: + all: venv/bin/pytest -n 2 -vvs - - template: etc/ci/azure-win.yml - parameters: - job_name: win2022_cpython - image_name: windows-2022 - python_versions: ["3.7", "3.8", "3.9", "3.10", "3.11"] - test_suites: - all: venv\Scripts\pytest -n 2 -vvs + - template: etc/ci/azure-posix.yml + parameters: + job_name: macos15_cpython + image_name: macOS-15 + python_versions: ['3.10', '3.11', '3.12', '3.13', '3.14'] + test_suites: + all: venv/bin/pytest -n 2 -vvs + + - template: etc/ci/azure-win.yml + parameters: + job_name: win2022_cpython + image_name: windows-2022 + python_versions: ['3.10', '3.11', '3.12', '3.13', '3.14'] + test_suites: + all: venv\Scripts\pytest -n 2 -vvs + + - template: etc/ci/azure-win.yml + parameters: + job_name: win2025_cpython + image_name: windows-2025 + python_versions: ['3.10', '3.11', '3.12', '3.13', '3.14'] + test_suites: + all: venv\Scripts\pytest -n 2 -vvs diff --git a/configure b/configure index dede3a6..6d317d4 100755 --- a/configure +++ b/configure @@ -3,7 +3,7 @@ # Copyright (c) nexB Inc. and others. All rights reserved. # SPDX-License-Identifier: Apache-2.0 # See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -# See https://github.com/nexB/ for support or download. +# See https://github.com/aboutcode-org/ for support or download. # See https://aboutcode.org for more information about nexB OSS projects. # @@ -28,15 +28,14 @@ CLI_ARGS=$1 ################################ # Requirement arguments passed to pip and used by default or with --dev. -REQUIREMENTS="--editable .[full] --constraint requirements.txt" -DEV_REQUIREMENTS="--editable .[full,testing] --constraint requirements.txt --constraint requirements-dev.txt" -DOCS_REQUIREMENTS="--editable .[docs] --constraint requirements.txt" +REQUIREMENTS="--editable . --constraint requirements.txt" +DEV_REQUIREMENTS="--editable .[dev] --constraint requirements.txt --constraint requirements-dev.txt" # where we create a virtualenv VIRTUALENV_DIR=venv # Cleanable files and directories to delete with the --clean option -CLEANABLE="build dist venv .cache .eggs" +CLEANABLE="build dist venv .cache .eggs *.egg-info docs/_build/ pip-selfcheck.json" # extra arguments passed to pip PIP_EXTRA_ARGS=" " @@ -111,7 +110,7 @@ create_virtualenv() { fi $PYTHON_EXECUTABLE "$VIRTUALENV_PYZ" \ - --wheel embed --pip embed --setuptools embed \ + --pip embed --setuptools embed \ --seeder pip \ --never-download \ --no-periodic-update \ @@ -168,6 +167,7 @@ clean() { for cln in $CLEANABLE; do rm -rf "${CFG_ROOT_DIR:?}/${cln:?}"; done + find . -type f -name '*.py[co]' -delete -o -type d -name __pycache__ -delete set +e exit } @@ -185,7 +185,6 @@ while getopts :-: optchar; do help ) cli_help;; clean ) find_python && clean;; dev ) CFG_REQUIREMENTS="$DEV_REQUIREMENTS";; - docs ) CFG_REQUIREMENTS="$DOCS_REQUIREMENTS";; esac;; esac done diff --git a/configure.bat b/configure.bat index fbff828..15ab701 100644 --- a/configure.bat +++ b/configure.bat @@ -4,7 +4,7 @@ @rem Copyright (c) nexB Inc. and others. All rights reserved. @rem SPDX-License-Identifier: Apache-2.0 @rem See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -@rem See https://github.com/nexB/ for support or download. +@rem See https://github.com/aboutcode-org/ for support or download. @rem See https://aboutcode.org for more information about nexB OSS projects. @@ -26,9 +26,8 @@ @rem ################################ @rem # Requirement arguments passed to pip and used by default or with --dev. -set "REQUIREMENTS=--editable .[full] --constraint requirements.txt" -set "DEV_REQUIREMENTS=--editable .[full,testing] --constraint requirements.txt --constraint requirements-dev.txt" -set "DOCS_REQUIREMENTS=--editable .[docs] --constraint requirements.txt" +set "REQUIREMENTS=--editable . --constraint requirements.txt" +set "DEV_REQUIREMENTS=--editable .[dev] --constraint requirements.txt --constraint requirements-dev.txt" @rem # where we create a virtualenv set "VIRTUALENV_DIR=venv" @@ -76,9 +75,6 @@ if not "%1" == "" ( if "%1" EQU "--dev" ( set "CFG_REQUIREMENTS=%DEV_REQUIREMENTS%" ) - if "%1" EQU "--docs" ( - set "CFG_REQUIREMENTS=%DOCS_REQUIREMENTS%" - ) shift goto again ) @@ -114,7 +110,7 @@ if not exist "%CFG_BIN_DIR%\python.exe" ( if exist "%CFG_ROOT_DIR%\etc\thirdparty\virtualenv.pyz" ( %PYTHON_EXECUTABLE% "%CFG_ROOT_DIR%\etc\thirdparty\virtualenv.pyz" ^ - --wheel embed --pip embed --setuptools embed ^ + --pip embed --setuptools embed ^ --seeder pip ^ --never-download ^ --no-periodic-update ^ @@ -130,7 +126,7 @@ if not exist "%CFG_BIN_DIR%\python.exe" ( ) ) %PYTHON_EXECUTABLE% "%CFG_ROOT_DIR%\%VIRTUALENV_DIR%\virtualenv.pyz" ^ - --wheel embed --pip embed --setuptools embed ^ + --pip embed --setuptools embed ^ --seeder pip ^ --never-download ^ --no-periodic-update ^ diff --git a/docs/Makefile b/docs/Makefile index d0c3cbf..94f686b 100644 --- a/docs/Makefile +++ b/docs/Makefile @@ -5,8 +5,9 @@ # from the environment for the first two. SPHINXOPTS ?= SPHINXBUILD ?= sphinx-build +SPHINXAUTOBUILD = sphinx-autobuild SOURCEDIR = source -BUILDDIR = build +BUILDDIR = _build # Put it first so that "make" without argument is like "make help". help: @@ -14,6 +15,13 @@ help: .PHONY: help Makefile +# Run the development server using sphinx-autobuild +docs: + @echo + @echo "Starting up the docs server..." + @echo + $(SPHINXAUTOBUILD) --port 8000 --watch ${SOURCEDIR} $(SOURCEDIR) "$(BUILDDIR)/html" $(SPHINXOPTS) $(O) + # Catch-all target: route all unknown targets to Sphinx using the new # "make mode" option. $(O) is meant as a shortcut for $(SPHINXOPTS). %: Makefile diff --git a/docs/make.bat b/docs/make.bat index 6247f7e..4a3c1a4 100644 --- a/docs/make.bat +++ b/docs/make.bat @@ -7,11 +7,16 @@ REM Command file for Sphinx documentation if "%SPHINXBUILD%" == "" ( set SPHINXBUILD=sphinx-build ) +if "%SPHINXAUTOBUILD%" == "" ( + set SPHINXAUTOBUILD=sphinx-autobuild +) set SOURCEDIR=source set BUILDDIR=build if "%1" == "" goto help +if "%1" == "docs" goto docs + %SPHINXBUILD% >NUL 2>NUL if errorlevel 9009 ( echo. @@ -28,6 +33,13 @@ if errorlevel 9009 ( %SPHINXBUILD% -M %1 %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% goto end +:docs +@echo +@echo Starting up the docs server... +@echo +%SPHINXAUTOBUILD% --port 8000 --watch %SOURCEDIR% %SOURCEDIR% %BUILDDIR%\html %SPHINXOPTS% %O% +goto end + :help %SPHINXBUILD% -M help %SOURCEDIR% %BUILDDIR% %SPHINXOPTS% %O% diff --git a/docs/scripts/doc8_style_check.sh b/docs/scripts/doc8_style_check.sh deleted file mode 100644 index 9416323..0000000 --- a/docs/scripts/doc8_style_check.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash -# halt script on error -set -e -# Check for Style Code Violations -doc8 --max-line-length 100 source --ignore D000 --quiet \ No newline at end of file diff --git a/docs/scripts/sphinx_build_link_check.sh b/docs/scripts/sphinx_build_link_check.sh deleted file mode 100644 index c542686..0000000 --- a/docs/scripts/sphinx_build_link_check.sh +++ /dev/null @@ -1,5 +0,0 @@ -#!/bin/bash -# halt script on error -set -e -# Build locally, and then check links -sphinx-build -E -W -b linkcheck source build \ No newline at end of file diff --git a/docs/source/_static/theme_overrides.css b/docs/source/_static/theme_overrides.css index 9662d63..5863ccf 100644 --- a/docs/source/_static/theme_overrides.css +++ b/docs/source/_static/theme_overrides.css @@ -1,353 +1,26 @@ -body { - color: #000000; -} - -p { - margin-bottom: 10px; -} - -.wy-plain-list-disc, .rst-content .section ul, .rst-content .toctree-wrapper ul, article ul { - margin-bottom: 10px; -} - -.custom_header_01 { - color: #cc0000; - font-size: 22px; - font-weight: bold; - line-height: 50px; -} - -h1, h2, h3, h4, h5, h6 { - margin-bottom: 20px; - margin-top: 20px; -} - -h5 { - font-size: 18px; - color: #000000; - font-style: italic; - margin-bottom: 10px; -} - -h6 { - font-size: 15px; - color: #000000; - font-style: italic; - margin-bottom: 10px; -} - -/* custom admonitions */ -/* success */ -.custom-admonition-success .admonition-title { - color: #000000; - background: #ccffcc; - border-radius: 5px 5px 0px 0px; -} -div.custom-admonition-success.admonition { - color: #000000; - background: #ffffff; - border: solid 1px #cccccc; - border-radius: 5px; - box-shadow: 1px 1px 5px 3px #d8d8d8; - margin: 20px 0px 30px 0px; -} - -/* important */ -.custom-admonition-important .admonition-title { - color: #000000; - background: #ccffcc; - border-radius: 5px 5px 0px 0px; - border-bottom: solid 1px #000000; -} -div.custom-admonition-important.admonition { - color: #000000; - background: #ffffff; - border: solid 1px #cccccc; - border-radius: 5px; - box-shadow: 1px 1px 5px 3px #d8d8d8; - margin: 20px 0px 30px 0px; -} - -/* caution */ -.custom-admonition-caution .admonition-title { - color: #000000; - background: #ffff99; - border-radius: 5px 5px 0px 0px; - border-bottom: solid 1px #e8e8e8; -} -div.custom-admonition-caution.admonition { - color: #000000; - background: #ffffff; - border: solid 1px #cccccc; - border-radius: 5px; - box-shadow: 1px 1px 5px 3px #d8d8d8; - margin: 20px 0px 30px 0px; -} - -/* note */ -.custom-admonition-note .admonition-title { - color: #ffffff; - background: #006bb3; - border-radius: 5px 5px 0px 0px; -} -div.custom-admonition-note.admonition { - color: #000000; - background: #ffffff; - border: solid 1px #cccccc; - border-radius: 5px; - box-shadow: 1px 1px 5px 3px #d8d8d8; - margin: 20px 0px 30px 0px; -} - -/* todo */ -.custom-admonition-todo .admonition-title { - color: #000000; - background: #cce6ff; - border-radius: 5px 5px 0px 0px; - border-bottom: solid 1px #99ccff; -} -div.custom-admonition-todo.admonition { - color: #000000; - background: #ffffff; - border: solid 1px #99ccff; - border-radius: 5px; - box-shadow: 1px 1px 5px 3px #d8d8d8; - margin: 20px 0px 30px 0px; -} - -/* examples */ -.custom-admonition-examples .admonition-title { - color: #000000; - background: #ffe6cc; - border-radius: 5px 5px 0px 0px; - border-bottom: solid 1px #d8d8d8; -} -div.custom-admonition-examples.admonition { - color: #000000; - background: #ffffff; - border: solid 1px #cccccc; - border-radius: 5px; - box-shadow: 1px 1px 5px 3px #d8d8d8; - margin: 20px 0px 30px 0px; -} - +/* this is the container for the pages */ .wy-nav-content { max-width: 100%; - padding-right: 100px; - padding-left: 100px; - background-color: #f2f2f2; -} - -div.rst-content { - background-color: #ffffff; - border: solid 1px #e5e5e5; - padding: 20px 40px 20px 40px; -} - -.rst-content .guilabel { - border: 1px solid #ffff99; - background: #ffff99; - font-size: 100%; - font-weight: normal; - border-radius: 4px; - padding: 2px 0px; - margin: auto 2px; - vertical-align: middle; -} - -.rst-content kbd { - font-family: SFMono-Regular,Menlo,Monaco,Consolas,"Liberation Mono","Courier New",Courier,monospace; - border: solid 1px #d8d8d8; - background-color: #f5f5f5; - padding: 0px 3px; - border-radius: 3px; -} - -.wy-nav-content-wrap a { - color: #0066cc; - text-decoration: none; -} -.wy-nav-content-wrap a:hover { - color: #0099cc; - text-decoration: underline; -} - -.wy-nav-top a { - color: #ffffff; -} - -/* Based on numerous similar approaches e.g., https://github.com/readthedocs/sphinx_rtd_theme/issues/117 and https://rackerlabs.github.io/docs-rackspace/tools/rtd-tables.html -- but remove form-factor limits to enable table wrap on full-size and smallest-size form factors */ -.wy-table-responsive table td { - white-space: normal !important; -} - -.rst-content table.docutils td, -.rst-content table.docutils th { - padding: 5px 10px 5px 10px; -} -.rst-content table.docutils td p, -.rst-content table.docutils th p { - font-size: 14px; - margin-bottom: 0px; -} -.rst-content table.docutils td p cite, -.rst-content table.docutils th p cite { - font-size: 14px; - background-color: transparent; -} - -.colwidths-given th { - border: solid 1px #d8d8d8 !important; -} -.colwidths-given td { - border: solid 1px #d8d8d8 !important; -} - -/*handles single-tick inline code*/ -.wy-body-for-nav cite { - color: #000000; - background-color: transparent; - font-style: normal; - font-family: "Courier New"; - font-size: 13px; - padding: 3px 3px 3px 3px; -} - -.rst-content pre.literal-block, .rst-content div[class^="highlight"] pre, .rst-content .linenodiv pre { - font-family: SFMono-Regular,Menlo,Monaco,Consolas,"Liberation Mono","Courier New",Courier,monospace; - font-size: 13px; - overflow: visible; - white-space: pre-wrap; - color: #000000; -} - -.rst-content pre.literal-block, .rst-content div[class^='highlight'] { - background-color: #f8f8f8; - border: solid 1px #e8e8e8; -} - -/* This enables inline code to wrap. */ -code, .rst-content tt, .rst-content code { - white-space: pre-wrap; - padding: 2px 3px 1px; - border-radius: 3px; - font-size: 13px; - background-color: #ffffff; -} - -/* use this added class for code blocks attached to bulleted list items */ -.highlight-top-margin { - margin-top: 20px !important; -} - -/* change color of inline code block */ -span.pre { - color: #e01e5a; -} - -.wy-body-for-nav blockquote { - margin: 1em 0; - padding-left: 1em; - border-left: 4px solid #ddd; - color: #000000; -} - -/* Fix the unwanted top and bottom padding inside a nested bulleted/numbered list */ -.rst-content .section ol p, .rst-content .section ul p { - margin-bottom: 0px; -} - -/* add spacing between bullets for legibility */ -.rst-content .section ol li, .rst-content .section ul li { - margin-bottom: 5px; -} - -.rst-content .section ol li:first-child, .rst-content .section ul li:first-child { - margin-top: 5px; -} - -/* but exclude the toctree bullets */ -.rst-content .toctree-wrapper ul li, .rst-content .toctree-wrapper ul li:first-child { + padding: 0px 40px 0px 0px; margin-top: 0px; - margin-bottom: 0px; } -/* remove extra space at bottom of multine list-table cell */ -.rst-content .line-block { - margin-left: 0px; - margin-bottom: 0px; - line-height: 24px; +.wy-nav-content-wrap { + border-right: solid 1px; } -/* fix extra vertical spacing in page toctree */ -.rst-content .toctree-wrapper ul li ul, article ul li ul { - margin-top: 0; - margin-bottom: 0; -} - -/* this is used by the genindex added via layout.html (see source/_templates/) to sidebar toc */ -.reference.internal.toc-index { - color: #d9d9d9; -} - -.reference.internal.toc-index.current { - background-color: #ffffff; - color: #000000; - font-weight: bold; -} - -.toc-index-div { - border-top: solid 1px #000000; - margin-top: 10px; - padding-top: 5px; -} - -.indextable ul li { - font-size: 14px; - margin-bottom: 5px; -} - -/* The next 2 fix the poor vertical spacing in genindex.html (the alphabetized index) */ -.indextable.genindextable { - margin-bottom: 20px; -} - -div.genindex-jumpbox { - margin-bottom: 10px; -} - -/* rst image classes */ - -.clear-both { - clear: both; - } - -.float-left { - float: left; - margin-right: 20px; -} - -img { - border: solid 1px #e8e8e8; -} - -/* These are custom and need to be defined in conf.py to access in all pages, e.g., '.. role:: red' */ -.img-title { - color: #000000; - /* neither padding nor margin works for vertical spacing bc it's a span -- line-height does, sort of */ - line-height: 3.0; - font-style: italic; - font-weight: 600; -} - -.img-title-para { - color: #000000; - margin-top: 20px; - margin-bottom: 0px; - font-style: italic; - font-weight: 500; -} - -.red { - color: red; +div.rst-content { + max-width: 1300px; + border: 0; + padding: 10px 80px 10px 80px; + margin-left: 50px; +} + +@media (max-width: 768px) { + div.rst-content { + max-width: 1300px; + border: 0; + padding: 0px 10px 10px 10px; + margin-left: 0px; + } } diff --git a/docs/source/conf.py b/docs/source/conf.py index d5435e7..056ca6e 100644 --- a/docs/source/conf.py +++ b/docs/source/conf.py @@ -18,7 +18,7 @@ # -- Project information ----------------------------------------------------- project = "nexb-skeleton" -copyright = "nexB Inc. and others." +copyright = "nexB Inc., AboutCode and others." author = "AboutCode.org authors and contributors" @@ -29,15 +29,28 @@ # ones. extensions = [ "sphinx.ext.intersphinx", + "sphinx_reredirects", + "sphinx_rtd_theme", + "sphinx_rtd_dark_mode", + "sphinx.ext.extlinks", + "sphinx_copybutton", ] + +# Redirects for olds pages +# See https://documatt.gitlab.io/sphinx-reredirects/usage.html +redirects = {} + # This points to aboutcode.readthedocs.io # In case of "undefined label" ERRORS check docs on intersphinx to troubleshoot -# Link was created at commit - https://github.com/nexB/aboutcode/commit/faea9fcf3248f8f198844fe34d43833224ac4a83 +# Link was created at commit - https://github.com/aboutcode-org/aboutcode/commit/faea9fcf3248f8f198844fe34d43833224ac4a83 intersphinx_mapping = { "aboutcode": ("https://aboutcode.readthedocs.io/en/latest/", None), - "scancode-workbench": ("https://scancode-workbench.readthedocs.io/en/develop/", None), + "scancode-workbench": ( + "https://scancode-workbench.readthedocs.io/en/develop/", + None, + ), } @@ -72,14 +85,17 @@ "conf_py_path": "/docs/source/", # path in the checkout to the docs root } -html_css_files = ["_static/theme_overrides.css"] +html_css_files = [ + "theme_overrides.css", +] # If true, "Created using Sphinx" is shown in the HTML footer. Default is True. html_show_sphinx = True # Define CSS and HTML abbreviations used in .rst files. These are examples. -# .. role:: is used to refer to styles defined in _static/theme_overrides.css and is used like this: :red:`text` +# .. role:: is used to refer to styles defined in _static/theme_overrides.css +# and is used like this: :red:`text` rst_prolog = """ .. |psf| replace:: Python Software Foundation @@ -95,3 +111,7 @@ .. role:: img-title-para """ + +# -- Options for LaTeX output ------------------------------------------------- + +latex_elements = {"classoptions": ",openany,oneside"} diff --git a/docs/source/contribute/contrib_doc.rst b/docs/source/contribute/contrib_doc.rst index 13882e1..2a719a5 100644 --- a/docs/source/contribute/contrib_doc.rst +++ b/docs/source/contribute/contrib_doc.rst @@ -8,109 +8,59 @@ Contributing to the Documentation Setup Local Build ----------------- -To get started, create or identify a working directory on your local machine. +To get started, check out and configure the repository for development:: -Open that directory and execute the following command in a terminal session:: + git clone https://github.com/aboutcode-org/.git - git clone https://github.com/nexB/skeleton.git + cd your-repo + ./configure --dev -That will create an ``/skeleton`` directory in your working directory. -Now you can install the dependencies in a virtualenv:: - - cd skeleton - ./configure --docs +(Or use "make dev") .. note:: - In case of windows, run ``configure --docs`` instead of this. - -Now, this will install the following prerequisites: - -- Sphinx -- sphinx_rtd_theme (the format theme used by ReadTheDocs) -- docs8 (style linter) + In case of windows, run ``configure --dev``. -These requirements are already present in setup.cfg and `./configure --docs` installs them. +This will install and configure all requirements foer development including for docs development. -Now you can build the HTML documents locally:: +Now you can build the HTML documentation locally:: source venv/bin/activate - cd docs - make html - -Assuming that your Sphinx installation was successful, Sphinx should build a local instance of the -documentation .html files:: - - open build/html/index.html - -.. note:: - - In case this command did not work, for example on Ubuntu 18.04 you may get a message like “Couldn’t - get a file descriptor referring to the console”, try: - - :: - - see build/html/index.html + make docs -You now have a local build of the AboutCode documents. +This will build a local instance of the ``docs/_build`` directory:: -.. _contrib_doc_share_improvements: + open docs/_build/index.html -Share Document Improvements ---------------------------- - -Ensure that you have the latest files:: - - git pull - git status -Before commiting changes run Continious Integration Scripts locally to run tests. Refer -:ref:`doc_ci` for instructions on the same. +To validate the documentation style and content, use:: -Follow standard git procedures to upload your new and modified files. The following commands are -examples:: - - git status - git add source/index.rst - git add source/how-to-scan.rst - git status - git commit -m "New how-to document that explains how to scan" - git status - git push - git status - -The Scancode-Toolkit webhook with ReadTheDocs should rebuild the documentation after your -Pull Request is Merged. + source venv/bin/activate + make doc8 + make docs-check -Refer the `Pro Git Book `_ available online for Git tutorials -covering more complex topics on Branching, Merging, Rebasing etc. .. _doc_ci: Continuous Integration ---------------------- -The documentations are checked on every new commit through Travis-CI, so that common errors are -avoided and documentation standards are enforced. Travis-CI presently checks for these 3 aspects -of the documentation : +The documentations are checked on every new commit, so that common errors are avoided and +documentation standards are enforced. We checks for these aspects of the documentation: 1. Successful Builds (By using ``sphinx-build``) -2. No Broken Links (By Using ``link-check``) -3. Linting Errors (By Using ``Doc8``) +2. No Broken Links (By Using ``linkcheck``) +3. Linting Errors (By Using ``doc8``) -So run these scripts at your local system before creating a Pull Request:: +You myst run these scripts locally before creating a pull request:: - cd docs - ./scripts/sphinx_build_link_check.sh - ./scripts/doc8_style_check.sh + make doc8 + make check-docs -If you don't have permission to run the scripts, run:: - - chmod u+x ./scripts/doc8_style_check.sh .. _doc_style_docs8: -Style Checks Using ``Doc8`` +Style Checks Using ``doc8`` --------------------------- How To Run Style Tests @@ -118,8 +68,7 @@ How To Run Style Tests In the project root, run the following commands:: - $ cd docs - $ ./scripts/doc8_style_check.sh + make doc8 A sample output is:: @@ -143,11 +92,13 @@ A sample output is:: Now fix the errors and run again till there isn't any style error in the documentation. + What is Checked? ^^^^^^^^^^^^^^^^ PyCQA is an Organization for code quality tools (and plugins) for the Python programming language. -Doc8 is a sub-project of the same Organization. Refer this `README `_ for more details. +Doc8 is a sub-project of the same Organization. Refer this +`README `_ for more details. What is checked: @@ -164,16 +115,19 @@ What is checked: - no carriage returns (use UNIX newlines) - D004 - no newline at end of file - D005 + .. _doc_interspinx: Interspinx ---------- -ScanCode toolkit documentation uses `Intersphinx `_ +AboutCode documentation uses +`Intersphinx `_ to link to other Sphinx Documentations, to maintain links to other Aboutcode Projects. To link sections in the same documentation, standart reST labels are used. Refer -`Cross-Referencing `_ for more information. +`Cross-Referencing `_ +for more information. For example:: @@ -223,6 +177,7 @@ Intersphinx, and you link to that label, it will create a link to the local labe For more information, refer this tutorial named `Using Intersphinx `_. + .. _doc_style_conv: Style Conventions for the Documentaion @@ -230,7 +185,7 @@ Style Conventions for the Documentaion 1. Headings - (`Refer `_) + (`Refer `_) Normally, there are no heading levels assigned to certain characters as the structure is determined from the succession of headings. However, this convention is used in Python’s Style Guide for documenting which you may follow: @@ -303,12 +258,14 @@ Style Conventions for the Documentaion ``rst_snippets/warning_snippets/`` and then included to eliminate redundancy, as these are frequently used in multiple files. + Converting from Markdown ------------------------ -If you want to convert a ``.md`` file to a ``.rst`` file, this `tool `_ -does it pretty well. You'd still have to clean up and check for errors as this contains a lot of -bugs. But this is definitely better than converting everything by yourself. +If you want to convert a ``.md`` file to a ``.rst`` file, this +`tool `_ does it pretty well. +You will still have to clean up and check for errors as this contains a lot of bugs. But this is +definitely better than converting everything by yourself. This will be helpful in converting GitHub wiki's (Markdown Files) to reStructuredtext files for Sphinx/ReadTheDocs hosting. diff --git a/docs/source/skeleton-usage.rst b/docs/source/skeleton-usage.rst index cde23dc..6cb4cc5 100644 --- a/docs/source/skeleton-usage.rst +++ b/docs/source/skeleton-usage.rst @@ -118,7 +118,7 @@ corrected. You can check to see if your corrections are valid by running: Once the wheels are collected and the ABOUT files are generated and correct, upload them to thirdparty.aboutcode.org/pypi by placing the wheels and ABOUT files from the thirdparty directory to the pypi directory at -https://github.com/nexB/thirdparty-packages +https://github.com/aboutcode-org/thirdparty-packages Usage after project initialization diff --git a/etc/ci/azure-container-deb.yml b/etc/ci/azure-container-deb.yml index 85b611d..d80e8df 100644 --- a/etc/ci/azure-container-deb.yml +++ b/etc/ci/azure-container-deb.yml @@ -21,7 +21,7 @@ jobs: - job: ${{ parameters.job_name }} pool: - vmImage: 'ubuntu-16.04' + vmImage: 'ubuntu-22.04' container: image: ${{ parameters.container }} diff --git a/etc/ci/azure-container-rpm.yml b/etc/ci/azure-container-rpm.yml index 1e6657d..a64138c 100644 --- a/etc/ci/azure-container-rpm.yml +++ b/etc/ci/azure-container-rpm.yml @@ -1,6 +1,6 @@ parameters: job_name: '' - image_name: 'ubuntu-16.04' + image_name: 'ubuntu-22.04' container: '' python_path: '' python_version: '' diff --git a/etc/scripts/check_thirdparty.py b/etc/scripts/check_thirdparty.py index b052f25..65ae595 100644 --- a/etc/scripts/check_thirdparty.py +++ b/etc/scripts/check_thirdparty.py @@ -1,11 +1,10 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. # SPDX-License-Identifier: Apache-2.0 # See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -# See https://github.com/nexB/skeleton for support or download. +# See https://github.com/aboutcode-org/skeleton for support or download. # See https://aboutcode.org for more information about nexB OSS projects. # import click @@ -42,8 +41,7 @@ def check_thirdparty_dir( """ Check a thirdparty directory for problems and print these on screen. """ - # check for problems - print(f"==> CHECK FOR PROBLEMS") + print("==> CHECK FOR PROBLEMS") utils_thirdparty.find_problems( dest_dir=dest, report_missing_sources=sdists, diff --git a/etc/scripts/fetch_thirdparty.py b/etc/scripts/fetch_thirdparty.py index eedf05c..76a19a6 100644 --- a/etc/scripts/fetch_thirdparty.py +++ b/etc/scripts/fetch_thirdparty.py @@ -1,23 +1,21 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. # SPDX-License-Identifier: Apache-2.0 # See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -# See https://github.com/nexB/skeleton for support or download. +# See https://github.com/aboutcode-org/skeleton for support or download. # See https://aboutcode.org for more information about nexB OSS projects. # import itertools -import os import sys from collections import defaultdict import click -import utils_thirdparty import utils_requirements +import utils_thirdparty TRACE = False TRACE_DEEP = False @@ -109,7 +107,8 @@ @click.option( "--use-cached-index", is_flag=True, - help="Use on disk cached PyPI indexes list of packages and versions and do not refetch if present.", + help="Use on disk cached PyPI indexes list of packages and versions and " + "do not refetch if present.", ) @click.option( "--sdist-only", @@ -120,7 +119,7 @@ show_default=False, multiple=True, help="Package name(s) that come only in sdist format (no wheels). " - "The command will not fail and exit if no wheel exists for these names", + "The command will not fail and exit if no wheel exists for these names", ) @click.option( "--wheel-only", @@ -131,7 +130,7 @@ show_default=False, multiple=True, help="Package name(s) that come only in wheel format (no sdist). " - "The command will not fail and exit if no sdist exists for these names", + "The command will not fail and exit if no sdist exists for these names", ) @click.option( "--no-dist", @@ -142,7 +141,7 @@ show_default=False, multiple=True, help="Package name(s) that do not come either in wheel or sdist format. " - "The command will not fail and exit if no distribution exists for these names", + "The command will not fail and exit if no distribution exists for these names", ) @click.help_option("-h", "--help") def fetch_thirdparty( @@ -248,7 +247,6 @@ def fetch_thirdparty( print(f"Processing: {name} @ {version}") if wheels: for environment in environments: - if TRACE: print(f" ==> Fetching wheel for envt: {environment}") @@ -262,11 +260,9 @@ def fetch_thirdparty( if not fetched: wheels_or_sdist_not_found[f"{name}=={version}"].append(environment) if TRACE: - print(f" NOT FOUND") + print(" NOT FOUND") - if (sdists or - (f"{name}=={version}" in wheels_or_sdist_not_found and name in sdist_only) - ): + if sdists or (f"{name}=={version}" in wheels_or_sdist_not_found and name in sdist_only): if TRACE: print(f" ==> Fetching sdist: {name}=={version}") @@ -279,17 +275,17 @@ def fetch_thirdparty( if not fetched: wheels_or_sdist_not_found[f"{name}=={version}"].append("sdist") if TRACE: - print(f" NOT FOUND") + print(" NOT FOUND") mia = [] for nv, dists in wheels_or_sdist_not_found.items(): name, _, version = nv.partition("==") if name in no_dist: continue - sdist_missing = sdists and "sdist" in dists and not name in wheel_only + sdist_missing = sdists and "sdist" in dists and name not in wheel_only if sdist_missing: mia.append(f"SDist missing: {nv} {dists}") - wheels_missing = wheels and any(d for d in dists if d != "sdist") and not name in sdist_only + wheels_missing = wheels and any(d for d in dists if d != "sdist") and name not in sdist_only if wheels_missing: mia.append(f"Wheels missing: {nv} {dists}") @@ -298,12 +294,12 @@ def fetch_thirdparty( print(m) raise Exception(mia) - print(f"==> FETCHING OR CREATING ABOUT AND LICENSE FILES") + print("==> FETCHING OR CREATING ABOUT AND LICENSE FILES") utils_thirdparty.fetch_abouts_and_licenses(dest_dir=dest_dir, use_cached_index=use_cached_index) utils_thirdparty.clean_about_files(dest_dir=dest_dir) # check for problems - print(f"==> CHECK FOR PROBLEMS") + print("==> CHECK FOR PROBLEMS") utils_thirdparty.find_problems( dest_dir=dest_dir, report_missing_sources=sdists, diff --git a/etc/scripts/gen_pypi_simple.py b/etc/scripts/gen_pypi_simple.py index 214d90d..89d0626 100644 --- a/etc/scripts/gen_pypi_simple.py +++ b/etc/scripts/gen_pypi_simple.py @@ -1,5 +1,4 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # SPDX-License-Identifier: BSD-2-Clause-Views AND MIT # Copyright (c) 2010 David Wolever . All rights reserved. @@ -69,7 +68,6 @@ def get_package_name_from_filename(filename): raise InvalidDistributionFilename(filename) elif filename.endswith(wheel_ext): - wheel_info = get_wheel_from_filename(filename) if not wheel_info: @@ -133,7 +131,7 @@ def build_links_package_index(packages_by_package_name, base_url): Return an HTML document as string which is a links index of all packages """ document = [] - header = f""" + header = """ Links for all packages @@ -178,13 +176,13 @@ def simple_index_entry(self, base_url): def build_pypi_index(directory, base_url="https://thirdparty.aboutcode.org/pypi"): """ - Using a ``directory`` directory of wheels and sdists, create the a PyPI - simple directory index at ``directory``/simple/ populated with the proper - PyPI simple index directory structure crafted using symlinks. + Create the a PyPI simple directory index using a ``directory`` directory of wheels and sdists in + the direvctory at ``directory``/simple/ populated with the proper PyPI simple index directory + structure crafted using symlinks. - WARNING: The ``directory``/simple/ directory is removed if it exists. - NOTE: in addition to the a PyPI simple index.html there is also a links.html - index file generated which is suitable to use with pip's --find-links + WARNING: The ``directory``/simple/ directory is removed if it exists. NOTE: in addition to the a + PyPI simple index.html there is also a links.html index file generated which is suitable to use + with pip's --find-links """ directory = Path(directory) @@ -200,11 +198,10 @@ def build_pypi_index(directory, base_url="https://thirdparty.aboutcode.org/pypi" simple_html_index = [ "", "PyPI Simple Index", - '' '', + '', ] for pkg_file in directory.iterdir(): - pkg_filename = pkg_file.name if ( diff --git a/etc/scripts/gen_requirements.py b/etc/scripts/gen_requirements.py index 07e26f7..1b87944 100644 --- a/etc/scripts/gen_requirements.py +++ b/etc/scripts/gen_requirements.py @@ -1,11 +1,10 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. # SPDX-License-Identifier: Apache-2.0 # See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -# See https://github.com/nexB/skeleton for support or download. +# See https://github.com/aboutcode-org/skeleton for support or download. # See https://aboutcode.org for more information about nexB OSS projects. # import argparse @@ -34,7 +33,8 @@ def gen_requirements(): type=pathlib.Path, required=True, metavar="DIR", - help="Path to the 'site-packages' directory where wheels are installed such as lib/python3.6/site-packages", + help="Path to the 'site-packages' directory where wheels are installed " + "such as lib/python3.12/site-packages", ) parser.add_argument( "-r", diff --git a/etc/scripts/gen_requirements_dev.py b/etc/scripts/gen_requirements_dev.py index 12cc06d..8548205 100644 --- a/etc/scripts/gen_requirements_dev.py +++ b/etc/scripts/gen_requirements_dev.py @@ -1,11 +1,10 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. # SPDX-License-Identifier: Apache-2.0 # See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -# See https://github.com/nexB/skeleton for support or download. +# See https://github.com/aboutcode-org/skeleton for support or download. # See https://aboutcode.org for more information about nexB OSS projects. # import argparse @@ -36,7 +35,8 @@ def gen_dev_requirements(): type=pathlib.Path, required=True, metavar="DIR", - help='Path to the "site-packages" directory where wheels are installed such as lib/python3.6/site-packages', + help="Path to the 'site-packages' directory where wheels are installed " + "such as lib/python3.12/site-packages", ) parser.add_argument( "-d", diff --git a/etc/scripts/test_utils_pip_compatibility_tags.py b/etc/scripts/test_utils_pip_compatibility_tags.py index 98187c5..0e9c360 100644 --- a/etc/scripts/test_utils_pip_compatibility_tags.py +++ b/etc/scripts/test_utils_pip_compatibility_tags.py @@ -1,4 +1,5 @@ -"""Generate and work with PEP 425 Compatibility Tags. +""" +Generate and work with PEP 425 Compatibility Tags. copied from pip-20.3.1 pip/tests/unit/test_utils_compatibility_tags.py download_url: https://raw.githubusercontent.com/pypa/pip/20.3.1/tests/unit/test_utils_compatibility_tags.py @@ -25,8 +26,8 @@ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ -from unittest.mock import patch import sysconfig +from unittest.mock import patch import pytest @@ -51,7 +52,7 @@ def test_version_info_to_nodot(version_info, expected): assert actual == expected -class Testcompatibility_tags(object): +class Testcompatibility_tags: def mock_get_config_var(self, **kwd): """ Patch sysconfig.get_config_var for arbitrary keys. @@ -82,7 +83,7 @@ def test_no_hyphen_tag(self): assert "-" not in tag.platform -class TestManylinux2010Tags(object): +class TestManylinux2010Tags: @pytest.mark.parametrize( "manylinux2010,manylinux1", [ @@ -105,7 +106,7 @@ def test_manylinux2010_implies_manylinux1(self, manylinux2010, manylinux1): assert arches[:2] == [manylinux2010, manylinux1] -class TestManylinux2014Tags(object): +class TestManylinux2014Tags: @pytest.mark.parametrize( "manylinuxA,manylinuxB", [ diff --git a/etc/scripts/update_skeleton.py b/etc/scripts/update_skeleton.py new file mode 100644 index 0000000..374c06f --- /dev/null +++ b/etc/scripts/update_skeleton.py @@ -0,0 +1,105 @@ +#!/usr/bin/env python +# +# Copyright (c) nexB Inc. AboutCode, and others. All rights reserved. +# ScanCode is a trademark of nexB Inc. +# SPDX-License-Identifier: Apache-2.0 +# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. +# See https://github.com/aboutcode-org/skeleton for support or download. +# See https://aboutcode.org for more information about nexB OSS projects. +# + +from pathlib import Path +import os +import subprocess + +import click + + +ABOUTCODE_PUBLIC_REPO_NAMES = [ + "aboutcode-toolkit", + "ahocode", + "bitcode", + "clearcode-toolkit", + "commoncode", + "container-inspector", + "debian-inspector", + "deltacode", + "elf-inspector", + "extractcode", + "fetchcode", + "gemfileparser2", + "gh-issue-sandbox", + "go-inspector", + "heritedcode", + "license-expression", + "license_copyright_pipeline", + "nuget-inspector", + "pip-requirements-parser", + "plugincode", + "purldb", + "pygmars", + "python-inspector", + "sanexml", + "saneyaml", + "scancode-analyzer", + "scancode-toolkit-contrib", + "scancode-toolkit-reference-scans", + "thirdparty-toolkit", + "tracecode-toolkit", + "tracecode-toolkit-strace", + "turbo-spdx", + "typecode", + "univers", +] + + +@click.command() +@click.help_option("-h", "--help") +def update_skeleton_files(repo_names=ABOUTCODE_PUBLIC_REPO_NAMES): + """ + Update project files of AboutCode projects that use the skeleton + + This script will: + - Clone the repo + - Add the skeleton repo as a new origin + - Create a new branch named "update-skeleton-files" + - Merge in the new skeleton files into the "update-skeleton-files" branch + + The user will need to save merge commit messages that pop up when running + this script in addition to resolving the merge conflicts on repos that have + them. + """ + + # Create working directory + work_dir_path = Path("/tmp/update_skeleton/") + if not os.path.exists(work_dir_path): + os.makedirs(work_dir_path, exist_ok=True) + + for repo_name in repo_names: + # Move to work directory + os.chdir(work_dir_path) + + # Clone repo + repo_git = f"git@github.com:aboutcode-org/{repo_name}.git" + subprocess.run(["git", "clone", repo_git]) + + # Go into cloned repo + os.chdir(work_dir_path / repo_name) + + # Add skeleton as an origin + subprocess.run( + ["git", "remote", "add", "skeleton", "git@github.com:aboutcode-org/skeleton.git"] + ) + + # Fetch skeleton files + subprocess.run(["git", "fetch", "skeleton"]) + + # Create and checkout new branch + subprocess.run(["git", "checkout", "-b", "update-skeleton-files"]) + + # Merge skeleton files into the repo + subprocess.run(["git", "merge", "skeleton/main", "--allow-unrelated-histories"]) + + +if __name__ == "__main__": + update_skeleton_files() diff --git a/etc/scripts/utils_dejacode.py b/etc/scripts/utils_dejacode.py index c42e6c9..b6bff51 100644 --- a/etc/scripts/utils_dejacode.py +++ b/etc/scripts/utils_dejacode.py @@ -1,11 +1,10 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. # SPDX-License-Identifier: Apache-2.0 # See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -# See https://github.com/nexB/skeleton for support or download. +# See https://github.com/aboutcode-org/skeleton for support or download. # See https://aboutcode.org for more information about nexB OSS projects. # import io @@ -14,7 +13,6 @@ import requests import saneyaml - from packvers import version as packaging_version """ @@ -26,7 +24,7 @@ DEJACODE_API_URL_PACKAGES = f"{DEJACODE_API_URL}packages/" DEJACODE_API_HEADERS = { - "Authorization": "Token {}".format(DEJACODE_API_KEY), + "Authorization": f"Token {DEJACODE_API_KEY}", "Accept": "application/json; indent=4", } @@ -51,6 +49,7 @@ def fetch_dejacode_packages(params): DEJACODE_API_URL_PACKAGES, params=params, headers=DEJACODE_API_HEADERS, + timeout=10, ) return response.json()["results"] @@ -94,7 +93,7 @@ def update_with_dejacode_about_data(distribution): if package_data: package_api_url = package_data["api_url"] about_url = f"{package_api_url}about" - response = requests.get(about_url, headers=DEJACODE_API_HEADERS) + response = requests.get(about_url, headers=DEJACODE_API_HEADERS, timeout=10) # note that this is YAML-formatted about_text = response.json()["about_data"] about_data = saneyaml.load(about_text) @@ -114,7 +113,7 @@ def fetch_and_save_about_files(distribution, dest_dir="thirdparty"): if package_data: package_api_url = package_data["api_url"] about_url = f"{package_api_url}about_files" - response = requests.get(about_url, headers=DEJACODE_API_HEADERS) + response = requests.get(about_url, headers=DEJACODE_API_HEADERS, timeout=10) about_zip = response.content with io.BytesIO(about_zip) as zf: with zipfile.ZipFile(zf) as zi: @@ -153,7 +152,7 @@ def find_latest_dejacode_package(distribution): with_versions = sorted(with_versions) latest_version, latest_package_version = sorted(with_versions)[-1] print( - f"Found DejaCode latest version: {latest_version} " f"for dist: {distribution.package_url}", + f"Found DejaCode latest version: {latest_version} for dist: {distribution.package_url}", ) return latest_package_version @@ -179,7 +178,7 @@ def create_dejacode_package(distribution): } fields_to_carry_over = [ - "download_url" "type", + "download_urltype", "namespace", "name", "version", @@ -202,10 +201,11 @@ def create_dejacode_package(distribution): DEJACODE_API_URL_PACKAGES, data=new_package_payload, headers=DEJACODE_API_HEADERS, + timeout=10, ) new_package_data = response.json() if response.status_code != 201: raise Exception(f"Error, cannot create package for: {distribution}") - print(f'New Package created at: {new_package_data["absolute_url"]}') + print(f"New Package created at: {new_package_data['absolute_url']}") return new_package_data diff --git a/etc/scripts/utils_pip_compatibility_tags.py b/etc/scripts/utils_pip_compatibility_tags.py index af42a0c..dd954bc 100644 --- a/etc/scripts/utils_pip_compatibility_tags.py +++ b/etc/scripts/utils_pip_compatibility_tags.py @@ -1,4 +1,5 @@ -"""Generate and work with PEP 425 Compatibility Tags. +""" +Generate and work with PEP 425 Compatibility Tags. copied from pip-20.3.1 pip/_internal/utils/compatibility_tags.py download_url: https://github.com/pypa/pip/blob/20.3.1/src/pip/_internal/utils/compatibility_tags.py @@ -27,14 +28,12 @@ import re -from packvers.tags import ( - compatible_tags, - cpython_tags, - generic_tags, - interpreter_name, - interpreter_version, - mac_platforms, -) +from packvers.tags import compatible_tags +from packvers.tags import cpython_tags +from packvers.tags import generic_tags +from packvers.tags import interpreter_name +from packvers.tags import interpreter_version +from packvers.tags import mac_platforms _osx_arch_pat = re.compile(r"(.+)_(\d+)_(\d+)_(.+)") @@ -132,7 +131,7 @@ def _get_custom_interpreter(implementation=None, version=None): implementation = interpreter_name() if version is None: version = interpreter_version() - return "{}{}".format(implementation, version) + return f"{implementation}{version}" def get_supported( @@ -142,7 +141,8 @@ def get_supported( abis=None, # type: Optional[List[str]] ): # type: (...) -> List[Tag] - """Return a list of supported tags for each version specified in + """ + Return a list of supported tags for each version specified in `versions`. :param version: a string version, of the form "33" or "32", diff --git a/etc/scripts/utils_requirements.py b/etc/scripts/utils_requirements.py index 0fc25a3..b9b2c0e 100644 --- a/etc/scripts/utils_requirements.py +++ b/etc/scripts/utils_requirements.py @@ -1,11 +1,10 @@ #!/usr/bin/env python -# -*- coding: utf-8 -*- # # Copyright (c) nexB Inc. and others. All rights reserved. # ScanCode is a trademark of nexB Inc. # SPDX-License-Identifier: Apache-2.0 # See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -# See https://github.com/nexB/skeleton for support or download. +# See https://github.com/aboutcode-org/skeleton for support or download. # See https://aboutcode.org for more information about nexB OSS projects. # @@ -40,7 +39,7 @@ def get_required_name_versions(requirement_lines, with_unpinned=False): req_line = req_line.strip() if not req_line or req_line.startswith("#"): continue - if req_line.startswith("-") or (not with_unpinned and not "==" in req_line): + if req_line.startswith("-") or (not with_unpinned and "==" not in req_line): print(f"Requirement line is not supported: ignored: {req_line}") continue yield get_required_name_version(requirement=req_line, with_unpinned=with_unpinned) @@ -57,21 +56,25 @@ def get_required_name_version(requirement, with_unpinned=False): >>> assert get_required_name_version("fooA==1.2.3.DEV1") == ("fooa", "1.2.3.dev1") >>> assert get_required_name_version("foo==1.2.3", with_unpinned=False) == ("foo", "1.2.3") >>> assert get_required_name_version("foo", with_unpinned=True) == ("foo", "") - >>> assert get_required_name_version("foo>=1.2", with_unpinned=True) == ("foo", ""), get_required_name_version("foo>=1.2") + >>> expected = ("foo", ""), get_required_name_version("foo>=1.2") + >>> assert get_required_name_version("foo>=1.2", with_unpinned=True) == expected >>> try: ... assert not get_required_name_version("foo", with_unpinned=False) ... except Exception as e: ... assert "Requirement version must be pinned" in str(e) """ requirement = requirement and "".join(requirement.lower().split()) - assert requirement, f"specifier is required is empty:{requirement!r}" + if not requirement: + raise ValueError(f"specifier is required is empty:{requirement!r}") name, operator, version = split_req(requirement) - assert name, f"Name is required: {requirement}" + if not name: + raise ValueError(f"Name is required: {requirement}") is_pinned = operator == "==" if with_unpinned: version = "" else: - assert is_pinned and version, f"Requirement version must be pinned: {requirement}" + if not is_pinned and version: + raise ValueError(f"Requirement version must be pinned: {requirement}") return name, version @@ -117,7 +120,7 @@ def get_installed_reqs(site_packages_dir): # Also include these packages in the output with --all: wheel, distribute, # setuptools, pip args = ["pip", "freeze", "--exclude-editable", "--all", "--path", site_packages_dir] - return subprocess.check_output(args, encoding="utf-8") + return subprocess.check_output(args, encoding="utf-8") # noqa: S603 comparators = ( @@ -147,9 +150,11 @@ def split_req(req): >>> assert split_req("foo >= 1.2.3 ") == ("foo", ">=", "1.2.3"), split_req("foo >= 1.2.3 ") >>> assert split_req("foo>=1.2") == ("foo", ">=", "1.2"), split_req("foo>=1.2") """ - assert req + if not req: + raise ValueError("req is required") # do not allow multiple constraints and tags - assert not any(c in req for c in ",;") + if not any(c in req for c in ",;"): + raise Exception(f"complex requirements with : or ; not supported: {req}") req = "".join(req.split()) if not any(c in req for c in comparators): return req, "", "" diff --git a/etc/scripts/utils_thirdparty.py b/etc/scripts/utils_thirdparty.py index addf8e5..bc68ac7 100644 --- a/etc/scripts/utils_thirdparty.py +++ b/etc/scripts/utils_thirdparty.py @@ -5,7 +5,7 @@ # ScanCode is a trademark of nexB Inc. # SPDX-License-Identifier: Apache-2.0 # See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -# See https://github.com/nexB/skeleton for support or download. +# See https://github.com/aboutcode-org/skeleton for support or download. # See https://aboutcode.org for more information about nexB OSS projects. # import email @@ -115,13 +115,14 @@ TRACE_ULTRA_DEEP = False # Supported environments -PYTHON_VERSIONS = "37", "38", "39", "310" +PYTHON_VERSIONS = "310", "311", "312", "313", "314" PYTHON_DOT_VERSIONS_BY_VER = { - "37": "3.7", - "38": "3.8", - "39": "3.9", "310": "3.10", + "311": "3.11", + "312": "3.12", + "313": "3.13", + "314": "3.14", } @@ -133,10 +134,11 @@ def get_python_dot_version(version): ABIS_BY_PYTHON_VERSION = { - "37": ["cp37", "cp37m", "abi3"], - "38": ["cp38", "cp38m", "abi3"], - "39": ["cp39", "cp39m", "abi3"], "310": ["cp310", "cp310m", "abi3"], + "311": ["cp311", "cp311m", "abi3"], + "312": ["cp312", "cp312m", "abi3"], + "313": ["cp313", "cp313m", "abi3"], + "314": ["cp314", "cp314m", "abi3"], } PLATFORMS_BY_OS = { @@ -355,7 +357,6 @@ def sorted(cls, namevers): @attr.attributes class Distribution(NameVer): - # field names that can be updated from another Distribution or mapping updatable_fields = [ "license_expression", @@ -1091,7 +1092,6 @@ def get_sdist_name_ver_ext(filename): @attr.attributes class Sdist(Distribution): - extension = attr.ib( repr=False, type=str, @@ -1129,7 +1129,6 @@ def to_filename(self): @attr.attributes class Wheel(Distribution): - """ Represents a wheel file. @@ -2137,7 +2136,6 @@ def call(args, verbose=TRACE): with subprocess.Popen( args, stdout=subprocess.PIPE, stderr=subprocess.PIPE, encoding="utf-8" ) as process: - stdouts = [] while True: line = process.stdout.readline() diff --git a/pyproject.toml b/pyproject.toml index 9233238..c61fb9b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [build-system] -requires = ["setuptools >= 50", "wheel", "setuptools_scm[toml] >= 6"] +requires = ["setuptools >= 50", "wheel"] build-backend = "setuptools.build_meta" [tool.setuptools_scm] @@ -14,7 +14,6 @@ norecursedirs = [ "dist", "build", "_build", - "dist", "etc", "local", "ci", @@ -34,7 +33,9 @@ norecursedirs = [ "thirdparty", "tmp", "venv", + ".venv", "tests/data", + "*/tests/test_data", ".eggs", "src/*/data", "tests/*/data", @@ -58,3 +59,80 @@ target = 'typecode' packages = [ "pygments==2.9.0", ] +[tool.ruff] +line-length = 100 +extend-exclude = [] +target-version = "py310" +include = [ + "pyproject.toml", + "src/**/*.py", + "etc/**/*.py", + "test/**/*.py", + "tests/**/*.py", + "doc/**/*.py", + "docs/**/*.py", + "*.py", + "." + +] +# ignore test data and testfiles: they should never be linted nor formatted +exclude = [ +# main style + "**/tests/data/**/*", +# scancode-toolkit + "**/tests/*/data/**/*", +# dejacode, purldb + "**/tests/testfiles/**/*", +# vulnerablecode, fetchcode + "**/tests/*/test_data/**/*", + "**/tests/test_data/**/*", +# django migrations + "**/migrations/**/*", +# vendored code + "**/_vendor/**" +] + +[tool.ruff.lint] +# Rules: https://docs.astral.sh/ruff/rules/ +select = [ +# "E", # pycodestyle +# "W", # pycodestyle warnings + "D", # pydocstyle +# "F", # Pyflakes +# "UP", # pyupgrade +# "S", # flake8-bandit + "I", # isort +# "C9", # McCabe complexity +] +ignore = ["D1", "D200", "D202", "D203", "D205", "D212", "D400", "D415", "I001"] + + +[tool.ruff.lint.isort] +force-single-line = true +lines-after-imports = 1 +default-section = "first-party" +known-first-party = ["src", "tests", "etc/scripts/**/*.py"] +known-third-party = ["click", "pytest"] + +sections = { django = ["django"] } +section-order = [ + "future", + "standard-library", + "django", + "third-party", + "first-party", + "local-folder", +] + +[tool.ruff.lint.mccabe] +max-complexity = 10 + +[tool.ruff.lint.per-file-ignores] +# Place paths of files to be ignored by ruff here +"tests/*" = ["S101"] +"test_*.py" = ["S101"] + + +[tool.doc8] +ignore-path = ["docs/build", "doc/build", "docs/_build", "doc/_build"] +max-line-length=100 diff --git a/requirements-dev.txt b/requirements-dev.txt index f3ad56b..2cea954 100644 --- a/requirements-dev.txt +++ b/requirements-dev.txt @@ -1,24 +1,33 @@ -aboutcode-toolkit==8.0.0 -bleach==4.1.0 -build==0.7.0 +aboutcode-toolkit==11.1.1 +black==22.6.0 +bleach==5.0.1 +build==1.2.2.post1 commonmark==0.9.1 -docutils==0.18.1 +docutils==0.19 et-xmlfile==1.1.0 execnet==1.9.0 iniconfig==1.1.1 -jeepney==0.7.1 -keyring==23.4.1 -openpyxl==3.0.9 +isort==5.10.1 +jeepney==0.8.0 +keyring==23.7.0 +mypy-extensions==0.4.3 +openpyxl==3.0.10 +pathspec==0.9.0 pep517==0.12.0 -pkginfo==1.8.2 +pkginfo==1.12.1.2 +platformdirs==2.5.2 py==1.11.0 -pytest==7.0.1 +pytest==8.4.2 pytest-forked==1.4.0 pytest-xdist==2.5.0 -readme-renderer==34.0 +readme-renderer==35.0 requests-toolbelt==0.9.1 -rfc3986==1.5.0 -rich==12.3.0 +rfc3986==2.0.0 +rich==12.5.1 secretstorage==3.3.2 -tomli==1.2.3 -twine==3.8.0 +tomli==2.3.0 +tqdm==4.64.0 +twine==6.1.0 +typing_extensions==4.14.0 +vendorize==0.3.0 + diff --git a/requirements.txt b/requirements.txt index b578202..ba40b2b 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,79 +1,80 @@ -attrs==21.4.0 +attrs==25.3.0 banal==1.0.6 -beautifulsoup4==4.11.1 +beautifulsoup4==4.13.4 binaryornot==0.4.4 -boolean.py==3.8 -certifi==2021.10.8 -cffi==1.15.0 -chardet==4.0.0 -charset-normalizer==2.0.12 -click==8.0.4 -colorama==0.4.4 -commoncode==30.2.0 -construct==2.10.68 -container-inspector==31.0.0 -cryptography==36.0.2 -debian-inspector==30.0.0 -dockerfile-parse==1.2.0 -dparse2==0.6.1 +beartype==0.22.4 +boolean.py==5.0 +certifi==2025.6.15 +cffi==2.0.0 +chardet==5.2.0 +charset-normalizer==3.4.2 +click==8.3.0 +colorama==0.4.6 +commoncode==32.4.0 +construct==2.10.70 +container-inspector==33.0.0 +cryptography==45.0.4 +debian-inspector==31.1.0 +dockerfile-parse==2.0.1 +dparse2==0.7.0 extractcode==31.0.0 extractcode-7z==16.5.210531 extractcode-libarchive==3.5.1.210531 -fasteners==0.17.3 -fingerprints==1.0.3 -ftfy==6.0.3 -future==0.18.2 -gemfileparser==0.8.0 +fasteners==0.19 +fingerprints==1.2.3 +ftfy==6.3.1 +future==1.0.0 +gemfileparser2==0.9.4 html5lib==1.1 -idna==3.3 -importlib-metadata==4.8.3 +idna==3.10 +importlib-metadata==6.2.1 inflection==0.5.1 -intbitset==3.0.2 -isodate==0.6.1 -jaraco.functools==3.4.0 -javaproperties==0.8.1 -Jinja2==3.0.3 +intbitset==4.1.0 +isodate==0.7.2 +jaraco.functools==4.2.1 +javaproperties==0.8.2 +Jinja2==3.1.6 jsonstreams==0.6.0 -license-expression==21.6.14 -lxml==4.8.0 -MarkupSafe==2.0.1 -more-itertools==8.13.0 -normality==2.3.3 -packagedcode-msitools==0.101.210706 -packageurl-python==0.9.9 -packaging==21.3 +license-expression==30.4.4 +lxml==6.0.2 +MarkupSafe==3.0.3 +more-itertools==10.7.0 +normality==2.6.1 +packageurl-python==0.17.1 +packaging==25.0 +packvers==21.5 parameter-expansion-patched==0.3.1 -patch==1.16 -pdfminer-six==20220506 -pefile==2021.9.3 -pip-requirements-parser==31.2.0 +pdfminer.six==20250506 +pefile==2024.8.26 +pip-requirements-parser==32.0.1 pkginfo2==30.0.0 -pluggy==1.0.0 -plugincode==30.0.0 +pluggy==1.6.0 +plugincode==32.0.0 ply==3.11 publicsuffix2==2.20191221 -pyahocorasick==2.0.0b1 -pycparser==2.21 -pygmars==0.7.0 -Pygments==2.12.0 -pymaven-patch==0.3.0 -pyparsing==3.0.8 +pyahocorasick==2.3.0 +pycparser==2.22 +pygmars==1.0.0 +Pygments==2.13.0 +pymaven-patch==0.3.2 +pyparsing==3.2.3 pytz==2022.1 -PyYAML==6.0 -rdflib==5.0.0 -regipy==2.3.1 -requests==2.27.1 -rpm-inspector-rpm==4.16.1.3.210404 -saneyaml==0.5.2 -six==1.16.0 -soupsieve==2.3.1 -spdx-tools==0.7.0a3 +PyYAML==6.0.3 +rdflib==7.1.4 +requests==2.32.4 +saneyaml==0.6.1 +semantic-version==2.10.0 +six==1.17.0 +soupsieve==2.7 +spdx-tools==0.8.2 text-unidecode==1.3 -toml==0.10.2 +tomli==2.3.0 typecode-libmagic==5.39.210531 -urllib3==1.26.9 +typing-extensions==4.14.0 +uritools==5.0.0 +urllib3==2.5.0 urlpy==0.5 -wcwidth==0.2.5 +wcwidth==0.2.13 webencodings==0.5.1 -xmltodict==0.12.0 -zipp==3.6.0 +xmltodict==0.14.2 +zipp==3.23.0 diff --git a/setup.cfg b/setup.cfg index e38d83b..48bf7b6 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,13 +1,13 @@ [metadata] name = typecode -version = 30.0.1 +version = 30.0.2 license = Apache-2.0 # description must be on ONE line https://github.com/pypa/setuptools/issues/1390 description = Comprehensive filetype and mimetype detection using libmagic and Pygments. long_description = file:README.rst long_description_content_type = text/x-rst -url = https://github.com/nexB/typecode +url = https://github.com/aboutcode-org/typecode author = nexB. Inc. and others author_email = info@aboutcode.org @@ -40,18 +40,17 @@ license_files = AUTHORS.rst CHANGELOG.rst CODE_OF_CONDUCT.rst + README.rst [options] +python_requires = >=3.10 + package_dir = =src packages = find: include_package_data = true zip_safe = false -setup_requires = setuptools_scm[toml] >= 4 - -python_requires = >=3.7 - install_requires = attrs >= 18.1, !=20.1.0 binaryornot @@ -68,19 +67,19 @@ where = src full = typecode_libmagic >= 5.39.210223 -testing = - pytest >= 6, != 7.0.0 +dev = + pytest >= 7.0.1 pytest-xdist >= 2 - aboutcode-toolkit >= 8.0.0 + aboutcode-toolkit >= 7.0.2 pycodestyle >= 2.8.0 twine - black - twine - saneyaml + ruff + Sphinx>=5.0.2 + sphinx-rtd-theme>=1.0.0 + sphinx-reredirects >= 0.1.2 + doc8>=0.11.2 + sphinx-autobuild + sphinx-rtd-dark-mode>=1.3.0 + sphinx-copybutton vendy - isort - -docs = - Sphinx == 6.2.1 - sphinx-rtd-theme >= 0.5.0 - doc8 >= 0.8.1 + saneyaml diff --git a/src/typecode/contenttype.py b/src/typecode/contenttype.py index d7c0e43..242ec8b 100644 --- a/src/typecode/contenttype.py +++ b/src/typecode/contenttype.py @@ -69,31 +69,38 @@ def logger_debug(*args): logger.setLevel(logging.DEBUG) def logger_debug(*args): - return logger.debug(' '.join(isinstance(a, str) and a or repr(a) for a in args)) + return logger.debug(" ".join(isinstance(a, str) and a or repr(a) for a in args)) -# Ensure that all dates are UTC, especially for fine free file. -os.environ['TZ'] = 'UTC' -ELF_EXE = 'executable' -ELF_SHARED = 'shared object' -ELF_RELOC = 'relocatable' -ELF_UNKNOWN = 'unknown' -elf_types = (ELF_EXE, ELF_SHARED, ELF_RELOC,) +# Ensure that all dates are UTC, especially for fine free file. +os.environ["TZ"] = "UTC" + +ELF_EXE = "executable" +ELF_SHARED = "shared object" +ELF_RELOC = "relocatable" +ELF_UNKNOWN = "unknown" +elf_types = ( + ELF_EXE, + ELF_SHARED, + ELF_RELOC, +) PLAIN_TEXT_EXTENSIONS = ( # docs - '.rst', '.rest', '.md', - '.txt', + ".rst", + ".rest", + ".md", + ".txt", # This one is actually not handled by Pygments. There are probably more. - '.log', + ".log", # various data - '.json', - '.xml', + ".json", + ".xml", ) MAKEFILE_EXTENSIONS = ( - 'Makefile', - 'Makefile.inc', + "Makefile", + "Makefile.inc", ) # Global registry of Type objects, keyed by location @@ -113,6 +120,7 @@ def get_type(location): _registry[abs_loc] = t return t + # TODO: simplify code using a cached property decorator @@ -127,91 +135,94 @@ class Type(object): Raise an IOError if the location does not exists. """ + __slots__ = ( - 'location', - 'is_file', - 'is_dir', - 'is_regular', - 'is_special', - 'date', - 'is_link', - 'is_broken_link', - '_size', - '_link_target', - '_mimetype_python', - '_filetype_file', - '_mimetype_file', - '_filetype_pygment', - '_is_pdf_with_text', - '_is_text', - '_is_text_with_long_lines', - '_is_compact_js', - '_is_js_map', - '_is_binary', - '_is_data', - '_is_archive', - '_contains_text', + "location", + "is_file", + "is_dir", + "is_regular", + "is_special", + "date", + "is_link", + "is_broken_link", + "_size", + "_link_target", + "_mimetype_python", + "_filetype_file", + "_mimetype_file", + "_filetype_pygment", + "_is_pdf_with_text", + "_is_text", + "_is_text_with_long_lines", + "_is_compact_js", + "_is_js_map", + "_is_binary", + "_is_data", + "_is_archive", + "_contains_text", ) # FIXME: we should use an introspectable attrs class instead # ATTENTION: keep this in sync with sloats and properties text_attributes = [ - 'filetype_file', - 'mimetype_file', - 'mimetype_python', - 'filetype_pygment', - 'elf_type', - 'programming_language', - 'link_target', + "filetype_file", + "mimetype_file", + "mimetype_python", + "filetype_pygment", + "elf_type", + "programming_language", + "link_target", ] - numeric_attributes = ['size', ] - date_attributes = ['date', ] + numeric_attributes = [ + "size", + ] + date_attributes = [ + "date", + ] boolean_attributes = [ - 'is_file', - 'is_dir', - 'is_regular', - 'is_special', - 'is_link', - 'is_broken_link', - 'is_pdf_with_text', - 'is_text', - 'is_text_with_long_lines', - 'is_compact_js', - 'is_js_map', - 'is_binary', - 'is_data', - 'is_archive', - 'contains_text', - 'is_compressed', - 'is_c_source', - 'is_c_source', - 'is_elf', - 'is_elf', - 'is_filesystem', - 'is_java_class', - 'is_java_source', - 'is_media', - 'is_media_with_meta', - 'is_office_doc', - 'is_package', - 'is_pdf', - 'is_script', - 'is_source', - 'is_stripped_elf', - 'is_winexe', - 'is_makefile', + "is_file", + "is_dir", + "is_regular", + "is_special", + "is_link", + "is_broken_link", + "is_pdf_with_text", + "is_text", + "is_text_with_long_lines", + "is_compact_js", + "is_js_map", + "is_binary", + "is_data", + "is_archive", + "contains_text", + "is_compressed", + "is_c_source", + "is_c_source", + "is_elf", + "is_elf", + "is_filesystem", + "is_java_class", + "is_java_source", + "is_media", + "is_media_with_meta", + "is_office_doc", + "is_package", + "is_pdf", + "is_script", + "is_source", + "is_stripped_elf", + "is_winexe", + "is_makefile", ] exportable_attributes = ( - text_attributes + numeric_attributes + date_attributes + boolean_attributes) + text_attributes + numeric_attributes + date_attributes + boolean_attributes + ) def __init__(self, location): - if (not location - or (not os.path.exists(location) - and not filetype.is_broken_link(location))): - raise IOError("[Errno 2] No such file or directory: " - "'%(location)r'" % locals()) + if not location or (not os.path.exists(location) and not filetype.is_broken_link(location)): + raise IOError("[Errno 2] No such file or directory: '%(location)r'" % locals()) self.location = location # flags and values self.is_file = filetype.is_file(location) @@ -245,13 +256,12 @@ def __init__(self, location): self._contains_text = None def __repr__(self): - return ('Type(ftf=%r, mtf=%r, ftpyg=%r, mtpy=%r)' - % ( - self.filetype_file, - self.mimetype_file, - self.filetype_pygment, - self.mimetype_python - )) + return "Type(ftf=%r, mtf=%r, ftpyg=%r, mtpy=%r)" % ( + self.filetype_file, + self.mimetype_file, + self.filetype_pygment, + self.mimetype_python, + ) def to_dict(self, include_date=True): """ @@ -280,7 +290,7 @@ def link_target(self): Return a link target for symlinks or an empty string otherwise. """ if self._link_target is None: - self._link_target = '' + self._link_target = "" if self.is_link or self.is_broken_link: self._link_target = filetype.get_link_target(self.location) return self._link_target @@ -291,9 +301,9 @@ def mimetype_python(self): Return the mimetype using the a map of mimetypes by file extension. """ if self._mimetype_python is None: - self._mimetype_python = '' + self._mimetype_python = "" if self.is_file is True: - self._mimetype_python = mimetypes.guess_type(self.location) or '' + self._mimetype_python = mimetypes.guess_type(self.location) or "" return self._mimetype_python @property @@ -302,7 +312,7 @@ def filetype_file(self): Return the filetype using the fine free file library. """ if self._filetype_file is None: - self._filetype_file = '' + self._filetype_file = "" if self.is_file is True: self._filetype_file = magic2.file_type(self.location) return self._filetype_file @@ -313,7 +323,7 @@ def mimetype_file(self): Return the mimetype using the fine free file library. """ if self._mimetype_file is None: - self._mimetype_file = '' + self._mimetype_file = "" if self.is_file is True: self._mimetype_file = magic2.mime_type(self.location) return self._mimetype_file @@ -324,13 +334,13 @@ def filetype_pygment(self): Return the filetype guessed using Pygments lexer, mostly for source code. """ if self._filetype_pygment is None: - self._filetype_pygment = '' + self._filetype_pygment = "" if self.is_text and not self.is_media: lexer = get_pygments_lexer(self.location) - if lexer and not lexer.name.startswith('JSON'): - self._filetype_pygment = lexer.name or '' + if lexer and not lexer.name.startswith("JSON"): + self._filetype_pygment = lexer.name or "" else: - self._filetype_pygment = '' + self._filetype_pygment = "" return self._filetype_pygment @property @@ -370,8 +380,7 @@ def is_text_with_long_lines(self): """ if self._is_text_with_long_lines is None: self._is_text_with_long_lines = ( - self.is_text is True - and 'long lines' in self.filetype_file.lower() + self.is_text is True and "long lines" in self.filetype_file.lower() ) return self._is_text_with_long_lines @@ -383,17 +392,22 @@ def is_compact_js(self): """ if self._is_compact_js is None: # FIXME: when moving to Python 3 - extensions = ('.min.js', '.typeface.json',) - json_ext = '.json' + extensions = ( + ".min.js", + ".typeface.json", + ) + json_ext = ".json" self._is_compact_js = ( self.is_js_map or (self.is_text is True and self.location.endswith(extensions)) - or (self.filetype_file.lower() == 'data' - and (self.programming_language == 'JavaScript' - or self.location.endswith(json_ext) - ) + or ( + self.filetype_file.lower() == "data" + and ( + self.programming_language == "JavaScript" + or self.location.endswith(json_ext) ) + ) ) return self._is_compact_js @@ -405,11 +419,11 @@ def is_js_map(self): """ if self._is_js_map is None: # FIXME: when moving to Python 3 - extensions = '.js.map', '.css.map', - self._is_js_map = ( - self.is_text is True - and self.location.endswith(extensions) + extensions = ( + ".js.map", + ".css.map", ) + self._is_js_map = self.is_text is True and self.location.endswith(extensions) return self._is_js_map @property @@ -421,25 +435,25 @@ def is_archive(self): return self._is_archive self._is_archive = False - docx_type_end = '2007+' + docx_type_end = "2007+" ft = self.filetype_file.lower() if self.is_text: self._is_archive = False - elif ft.startswith('gem image data'): + elif ft.startswith("gem image data"): self._is_archive = False elif self.is_compressed: self._is_archive = True - elif 'archive' in ft: + elif "archive" in ft: self._is_archive = True - elif self.is_package: + elif self.is_package: self._is_archive = True elif self.is_filesystem: self._is_archive = True elif self.is_office_doc and ft.endswith(docx_type_end): self._is_archive = True - elif '(zip)' in ft: + elif "(zip)" in ft: # FIXME: is this really correct??? self._is_archive = True elif extractible.can_extract(self.location): @@ -452,16 +466,19 @@ def is_office_doc(self): loc = self.location.lower() # FIXME: add open office extensions and other extensions for other docs msoffice_exts = ( - '.doc', '.docx', - '.xlsx', '.xlsx', - '.ppt', '.pptx', + ".doc", + ".docx", + ".xlsx", + ".xlsx", + ".ppt", + ".pptx", ) if loc.endswith(msoffice_exts): return True else: ft = self.filetype_file.lower() - if ft.startswith('microsoft') and ft.endswith('2007+'): + if ft.startswith("microsoft") and ft.endswith("2007+"): return True return False @@ -473,15 +490,19 @@ def is_package(self): # FIXME: this should beased on proper package recognition, not this simplistic check ft = self.filetype_file.lower() loc = self.location.lower() - package_archive_extensions = '.jar', '.war', '.ear', '.zip', '.whl', '.egg' - gem_extension = '.gem' + package_archive_extensions = ".jar", ".war", ".ear", ".zip", ".whl", ".egg" + gem_extension = ".gem" # FIXME: this is grossly under specified and is missing many packages - if ('debian binary package' in ft - or ft.startswith('rpm ') - or (ft == 'posix tar archive' and loc.endswith(gem_extension)) - or (ft.startswith(('zip archive', 'java archive')) - and loc.endswith(package_archive_extensions))): + if ( + "debian binary package" in ft + or ft.startswith("rpm ") + or (ft == "posix tar archive" and loc.endswith(gem_extension)) + or ( + ft.startswith(("zip archive", "java archive")) + and loc.endswith(package_archive_extensions) + ) + ): return True else: return False @@ -493,16 +514,15 @@ def is_compressed(self): """ ft = self.filetype_file.lower() - docx_ext = 'x' + docx_ext = "x" - if (not self.is_text - and ( - '(zip)' in ft - or ft.startswith(('zip archive', 'java archive')) + if not self.is_text and ( + "(zip)" in ft + or ft.startswith(("zip archive", "java archive")) or self.is_package - or any(x in ft for x in ('squashfs filesystem', 'compressed')) + or any(x in ft for x in ("squashfs filesystem", "compressed")) or (self.is_office_doc and self.location.endswith(docx_ext)) - )): + ): return True else: return False @@ -513,7 +533,7 @@ def is_filesystem(self): Return True if the file is some kind of file system or disk image. """ ft = self.filetype_file.lower() - if ('squashfs filesystem' in ft): + if "squashfs filesystem" in ft: return True else: return False @@ -525,24 +545,52 @@ def is_media(self): """ # TODO: fonts? mt = self.mimetype_file - mimes = ('image', 'picture', 'audio', 'video', 'graphic', 'sound',) + mimes = ( + "image", + "picture", + "audio", + "video", + "graphic", + "sound", + ) ft = self.filetype_file.lower() types = ( - 'image data', 'graphics image', 'ms-windows metafont .wmf', - 'windows enhanced metafile', - 'png image', 'interleaved image', 'microsoft asf', 'image text', - 'photoshop image', 'shop pro image', 'ogg data', 'vorbis', 'mpeg', - 'theora', 'bitmap', 'audio', 'video', 'sound', 'riff', 'icon', - 'pc bitmap', 'image data', 'netpbm' + "image data", + "graphics image", + "ms-windows metafont .wmf", + "windows enhanced metafile", + "png image", + "interleaved image", + "microsoft asf", + "image text", + "photoshop image", + "shop pro image", + "ogg data", + "vorbis", + "mpeg", + "theora", + "bitmap", + "audio", + "video", + "sound", + "riff", + "icon", + "pc bitmap", + "image data", + "netpbm", ) if any(m in mt for m in mimes) or any(t in ft for t in types): return True - tga_ext = '.tga' + tga_ext = ".tga" - if ft == 'data' and mt == 'application/octet-stream' and self.location.lower().endswith(tga_ext): + if ( + ft == "data" + and mt == "application/octet-stream" + and self.location.lower().endswith(tga_ext) + ): # there is a regression in libmagic 5.38 https://bugs.astron.com/view.php?id=161 # this is a targe image return True @@ -562,7 +610,7 @@ def is_media_with_meta(self): if not self.is_media: return False if self.filetype_file.lower().startswith( - ('gif image', 'png image', 'jpeg image', 'netpbm', 'mpeg') + ("gif image", "png image", "jpeg image", "netpbm", "mpeg") ): return False else: @@ -573,7 +621,7 @@ def is_pdf(self): """ Return True if the file is highly likely to be a pdf file. """ - if 'pdf' in self.mimetype_file: + if "pdf" in self.mimetype_file: return True else: return False @@ -588,7 +636,7 @@ def is_pdf_with_text(self): if not self.is_file is True and not self.is_pdf is True: self._is_pdf_with_text = False else: - with open(self.location, 'rb') as pf: + with open(self.location, "rb") as pf: try: with contextlib.closing(PDFParser(pf)) as parser: doc = PDFDocument(parser) @@ -603,7 +651,7 @@ def contains_text(self): Return True if a file possibly contains some text. """ if self._contains_text is None: - svg_ext = '.svg' + svg_ext = ".svg" if not self.is_file: self._contains_text = False @@ -649,13 +697,14 @@ def is_data(self): size = self.size max_entropy = 1.3 - if (ft == 'data' - or is_data(self.location) - or ('data' in ft and size > large_file) - or (self.is_text and size > large_text_file) - or (self.is_text and size > large_text_file) - or (entropy.entropy(self.location, length=5000) < max_entropy)): - + if ( + ft == "data" + or is_data(self.location) + or ("data" in ft and size > large_file) + or (self.is_text and size > large_text_file) + or (self.is_text and size > large_text_file) + or (entropy.entropy(self.location, length=5000) < max_entropy) + ): self._is_data = True else: self._is_data = False @@ -667,7 +716,7 @@ def is_script(self): Return True if the file is script-like. """ ft = self.filetype_file.lower() - if self.is_text is True and 'script' in ft and not 'makefile' in ft: + if self.is_text is True and "script" in ft and not "makefile" in ft: return True else: return False @@ -709,14 +758,31 @@ def programming_language(self): string. """ if self.is_source: - return self.filetype_pygment or '' - return '' + return self.filetype_pygment or "" + return "" @property def is_c_source(self): C_EXTENSIONS = set( - ['.c', '.cc', '.cp', '.cpp', '.cxx', '.c++', '.h', '.hh', - '.s', '.asm', '.hpp', '.hxx', '.h++', '.i', '.ii', '.m']) + [ + ".c", + ".cc", + ".cp", + ".cpp", + ".cxx", + ".c++", + ".h", + ".hh", + ".s", + ".asm", + ".hpp", + ".hxx", + ".h++", + ".i", + ".ii", + ".m", + ] + ) ext = fileutils.file_extension(self.location) return self.is_text is True and ext.lower() in C_EXTENSIONS @@ -727,16 +793,12 @@ def is_winexe(self): Return True if a the file is a windows executable. """ ft = self.filetype_file.lower() - return 'for ms windows' in ft or ft.startswith('pe32') + return "for ms windows" in ft or ft.startswith("pe32") @property def is_elf(self): ft = self.filetype_file.lower() - if (ft.startswith('elf') - and (ELF_EXE in ft - or ELF_SHARED in ft - or ELF_RELOC in ft) - ): + if ft.startswith("elf") and (ELF_EXE in ft or ELF_SHARED in ft or ELF_RELOC in ft): return True else: return False @@ -750,12 +812,12 @@ def elf_type(self): return t return ELF_UNKNOWN else: - return '' + return "" @property def is_stripped_elf(self): if self.is_elf is True: - return 'not stripped' not in self.filetype_file.lower() + return "not stripped" not in self.filetype_file.lower() else: return False @@ -764,14 +826,14 @@ def is_java_source(self): """ FIXME: Check the filetype. """ - return self.is_file and self.file_name.lower().endswith(('.java', '.aj', '.jad', '.ajt')) + return self.is_file and self.file_name.lower().endswith((".java", ".aj", ".jad", ".ajt")) @property def is_java_class(self): """ FIXME: Check the filetype. """ - return self.is_file and self.file_name.lower().endswith('.class') + return self.is_file and self.file_name.lower().endswith(".class") @attr.attributes @@ -780,17 +842,24 @@ class TypeDefinition(object): filetypes = List(repr=True) mimetypes = List(repr=True) extensions = List(repr=True) - strict = Boolean(repr=True, - help=' if True, all criteria must be matched to select this detector.') + strict = Boolean( + repr=True, help=" if True, all criteria must be matched to select this detector." + ) -DATA_TYPE_DEFINITIONS = tuple([ - TypeDefinition( - name='MySQL ARCHIVE Storage Engine data files', - filetypes=('mysql table definition file',), - extensions=('.arm', '.arz', '.arn',), - ), -]) +DATA_TYPE_DEFINITIONS = tuple( + [ + TypeDefinition( + name="MySQL ARCHIVE Storage Engine data files", + filetypes=("mysql table definition file",), + extensions=( + ".arm", + ".arz", + ".arn", + ), + ), + ] +) def is_data(location, definitions=DATA_TYPE_DEFINITIONS): @@ -813,15 +882,18 @@ def is_data(location, definitions=DATA_TYPE_DEFINITIONS): extension_matched = exts and location.lower().endswith(exts) if TRACE: - logger_debug('is_data: considering def: %(ddef)r for %(location)s' % locals()) - logger_debug('matched type: %(type_matched)s, mime: %(mime_matched)s, ext: %(extension_matched)s' % locals()) + logger_debug("is_data: considering def: %(ddef)r for %(location)s" % locals()) + logger_debug( + "matched type: %(type_matched)s, mime: %(mime_matched)s, ext: %(extension_matched)s" + % locals() + ) if ddef.strict and not all([type_matched, mime_matched, extension_matched]): continue if type_matched or mime_matched or extension_matched: if TRACE: - logger_debug('is_data: True: %(location)s: ' % locals()) + logger_debug("is_data: True: %(location)s: " % locals()) return True return False @@ -871,11 +943,11 @@ def get_text_file_start(location, length=4096): content = None # read the first 4K of the file try: - with io.open(location, 'r') as f: + with io.open(location, "r") as f: content = f.read(length) except: # try again as bytes and force unicode - with open(location, 'rb') as f: + with open(location, "rb") as f: content = text.as_unicode(f.read(length)) finally: return content @@ -895,11 +967,14 @@ def is_standard_include(location): a standard C/C++ include. """ STD_INCLUDES = ( - '/usr/lib/gcc', '/usr/lib', '/usr/include', - '', '/tmp/glibc-', + "/usr/lib/gcc", + "/usr/lib", + "/usr/include", + "", + "/tmp/glibc-", ) - if (location.startswith(STD_INCLUDES) or location.endswith(STD_INCLUDES)): + if location.startswith(STD_INCLUDES) or location.endswith(STD_INCLUDES): return True else: return False @@ -910,8 +985,17 @@ def is_binary(location): Retrun True if the file at `location` is a binary file. """ known_extensions = ( - '.pyc', '.pgm', '.mp3', '.mp4', '.mpeg', '.mpg', '.emf', - '.pgm', '.pbm', '.ppm') + ".pyc", + ".pgm", + ".mp3", + ".mp4", + ".mpeg", + ".mpg", + ".emf", + ".pgm", + ".pbm", + ".ppm", + ) if location.endswith(known_extensions): return True return is_binary_string(get_starting_chunk(location)) diff --git a/src/typecode/entropy.py b/src/typecode/entropy.py index fd3b69a..f6a522f 100644 --- a/src/typecode/entropy.py +++ b/src/typecode/entropy.py @@ -17,7 +17,7 @@ def entropy(location, length=5000): Return the Shannon entropy of up to `length` bytes from the file at location. """ - with open(location, 'rb') as locf: + with open(location, "rb") as locf: data = locf.read(length) return shannon_entropy(data) @@ -52,7 +52,7 @@ def gzip_entropy(s): return 0 if isinstance(s, str): - s = s.encode('utf-8') + s = s.encode("utf-8") length = len(s) if not length: diff --git a/src/typecode/extractible.py b/src/typecode/extractible.py index 4439006..6c550fc 100644 --- a/src/typecode/extractible.py +++ b/src/typecode/extractible.py @@ -37,6 +37,7 @@ def _is_compressed(location, opener): try: import lzma + is_lzmafile = partial(_is_compressed, opener=lzma.open) except ImportError: is_lzmafile = lambda _: False @@ -44,13 +45,7 @@ def _is_compressed(location, opener): # Each function accept a single location argument and return True if this is # an archive -archive_handlers = [ - zipfile.is_zipfile, - tarfile.is_tarfile, - is_gzipfile, - is_bz2file, - is_lzmafile -] +archive_handlers = [zipfile.is_zipfile, tarfile.is_tarfile, is_gzipfile, is_bz2file, is_lzmafile] def _can_extract(location): diff --git a/src/typecode/magic2.py b/src/typecode/magic2.py index b501df7..6f33a58 100644 --- a/src/typecode/magic2.py +++ b/src/typecode/magic2.py @@ -59,7 +59,8 @@ def logger_debug(*args): logger.setLevel(logging.DEBUG) def logger_debug(*args): - return logger.debug(' '.join(isinstance(a, str) and a or repr(a) for a in args)) + return logger.debug(" ".join(isinstance(a, str) and a or repr(a) for a in args)) + # # Cached detectors @@ -79,11 +80,11 @@ def logger_debug(*args): DETECT_ENC = MAGIC_NONE | MAGIC_MIME | MAGIC_MIME_ENCODING # keys for plugin-provided locations -TYPECODE_LIBMAGIC_DLL = 'typecode.libmagic.dll' -TYPECODE_LIBMAGIC_DB = 'typecode.libmagic.db' +TYPECODE_LIBMAGIC_DLL = "typecode.libmagic.dll" +TYPECODE_LIBMAGIC_DB = "typecode.libmagic.db" -TYPECODE_LIBMAGIC_PATH_ENVVAR = 'TYPECODE_LIBMAGIC_PATH' -TYPECODE_LIBMAGIC_DB_PATH_ENVVAR = 'TYPECODE_LIBMAGIC_DB_PATH' +TYPECODE_LIBMAGIC_PATH_ENVVAR = "TYPECODE_LIBMAGIC_PATH" +TYPECODE_LIBMAGIC_DB_PATH_ENVVAR = "TYPECODE_LIBMAGIC_DB_PATH" if TRACE: @@ -93,7 +94,7 @@ def file_type(location): else: def file_type(location): - """" + """ " Return the detected filetype for file at `location` or an empty string if nothing found or an error occurred. """ @@ -101,7 +102,7 @@ def file_type(location): return _detect(location, DETECT_TYPE) except: # TODO: log errors - return '' + return "" class NoMagicLibError(Exception): @@ -118,12 +119,12 @@ def load_lib_failover(): libmagic = None # Let's try to find magic or magic1 dll = ( - ctypes.util.find_library('magic') - or ctypes.util.find_library('magic1') - or ctypes.util.find_library('cygmagic-1') - or ctypes.util.find_library('libmagic-1') + ctypes.util.find_library("magic") + or ctypes.util.find_library("magic1") + or ctypes.util.find_library("cygmagic-1") + or ctypes.util.find_library("libmagic-1") # for MSYS2 - or ctypes.util.find_library('msys-magic-1') + or ctypes.util.find_library("msys-magic-1") ) # necessary because find_library returns None if it doesn't find the library if dll: @@ -131,28 +132,29 @@ def load_lib_failover(): if not (libmagic and libmagic._name): windows_dlls = [ - 'magic1.dll', - 'cygmagic-1.dll', - 'libmagic-1.dll', - 'msys-magic-1.dll', + "magic1.dll", + "cygmagic-1.dll", + "libmagic-1.dll", + "msys-magic-1.dll", ] platform_to_lib = { - 'darwin': ( + "darwin": ( [ - '/opt/local/lib/libmagic.dylib', - '/usr/local/lib/libmagic.dylib', - ] + + "/opt/local/lib/libmagic.dylib", + "/usr/local/lib/libmagic.dylib", + ] + + # Assumes there will only be one version installed when using brew - glob.glob('/usr/local/Cellar/libmagic/*/lib/libmagic.dylib') + - glob.glob('/opt/homebrew/Cellar/libmagic/*/lib/libmagic.dylib') + glob.glob("/usr/local/Cellar/libmagic/*/lib/libmagic.dylib") + + glob.glob("/opt/homebrew/Cellar/libmagic/*/lib/libmagic.dylib") ), - 'win32': windows_dlls, - 'cygwin': windows_dlls, - 'linux': ['libmagic.so.1'], + "win32": windows_dlls, + "cygwin": windows_dlls, + "linux": ["libmagic.so.1"], } # fallback for some Linuxes (e.g. Alpine) where library search does not # work # flake8:noqa - platform = 'linux' if sys.platform.startswith('linux') else sys.platform + platform = "linux" if sys.platform.startswith("linux") else sys.platform for dll in platform_to_lib.get(platform, []): try: libmagic = ctypes.CDLL(dll) @@ -179,49 +181,49 @@ def load_lib(): dll_loc = os.environ.get(TYPECODE_LIBMAGIC_PATH_ENVVAR) if TRACE and dll_loc: - logger_debug('load_lib:', 'got environ magic location:', dll_loc) + logger_debug("load_lib:", "got environ magic location:", dll_loc) # try a plugin-provided path second if not dll_loc: dll_loc = get_location(TYPECODE_LIBMAGIC_DLL) if TRACE and dll_loc: - logger_debug('load_lib:', 'got plugin magic location:', dll_loc) + logger_debug("load_lib:", "got plugin magic location:", dll_loc) # try well known locations if not dll_loc: failover_lib = load_lib_failover() if failover_lib: warnings.warn( - 'System libmagic found in typical location is used. ' - 'Install instead a typecode-libmagic plugin for best support.' + "System libmagic found in typical location is used. " + "Install instead a typecode-libmagic plugin for best support." ) return failover_lib # try the PATH if not dll_loc: - dll = 'libmagic.dll' if on_windows else 'libmagic.so' + dll = "libmagic.dll" if on_windows else "libmagic.so" dll_loc = command.find_in_path(dll) if dll_loc: warnings.warn( - 'libmagic found in the PATH. ' - 'Install instead a typecode-libmagic plugin for best support.' + "libmagic found in the PATH. " + "Install instead a typecode-libmagic plugin for best support." ) if TRACE and dll_loc: - logger_debug('load_lib:', 'got path magic location:', dll_loc) + logger_debug("load_lib:", "got path magic location:", dll_loc) if not dll_loc or not os.path.isfile(dll_loc): raise NoMagicLibError( - 'CRITICAL: libmagic DLL and its magic database are not installed. ' - 'Unable to continue: you need to install a valid typecode-libmagic ' - 'plugin with a valid and proper libmagic and magic DB available.\n' - f'OR set the {TYPECODE_LIBMAGIC_PATH_ENVVAR} and ' - f'{TYPECODE_LIBMAGIC_DB_PATH_ENVVAR} environment variables.\n' - f'OR install libmagic in typical common locations.\n' - f'OR have a libmagic in the system PATH.\n' - ) + "CRITICAL: libmagic DLL and its magic database are not installed. " + "Unable to continue: you need to install a valid typecode-libmagic " + "plugin with a valid and proper libmagic and magic DB available.\n" + f"OR set the {TYPECODE_LIBMAGIC_PATH_ENVVAR} and " + f"{TYPECODE_LIBMAGIC_DB_PATH_ENVVAR} environment variables.\n" + f"OR install libmagic in typical common locations.\n" + f"OR have a libmagic in the system PATH.\n" + ) return command.load_shared_library(dll_loc) @@ -242,36 +244,36 @@ def get_magicdb_location(_cache=[]): magicdb_loc = os.environ.get(TYPECODE_LIBMAGIC_DB_PATH_ENVVAR) if TRACE and magicdb_loc: - logger_debug('get_magicdb_location:', 'got environ magicdb location:', magicdb_loc) + logger_debug("get_magicdb_location:", "got environ magicdb location:", magicdb_loc) # try a plugin-provided path second if not magicdb_loc: magicdb_loc = get_location(TYPECODE_LIBMAGIC_DB) if TRACE and magicdb_loc: - logger_debug('get_magicdb_location:', 'got plugin magicdb location:', magicdb_loc) + logger_debug("get_magicdb_location:", "got plugin magicdb location:", magicdb_loc) # try the PATH if not magicdb_loc: - db = 'magic.mgc' + db = "magic.mgc" magicdb_loc = command.find_in_path(db) if magicdb_loc: warnings.warn( - 'magicdb found in the PATH. ' - 'Install instead a typecode-libmagic plugin for best support.\n' - f'OR set the {TYPECODE_LIBMAGIC_DB_PATH_ENVVAR} environment variable.' + "magicdb found in the PATH. " + "Install instead a typecode-libmagic plugin for best support.\n" + f"OR set the {TYPECODE_LIBMAGIC_DB_PATH_ENVVAR} environment variable." ) if TRACE and magicdb_loc: - logger_debug('get_magicdb_location:', 'got path magicdb location:', magicdb_loc) + logger_debug("get_magicdb_location:", "got path magicdb location:", magicdb_loc) if not magicdb_loc: warnings.warn( - 'Libmagic magic database not found. ' - 'A default will be used if possible. ' - 'Install instead a typecode-libmagic plugin for best support.\n' - f'OR set the {TYPECODE_LIBMAGIC_DB_PATH_ENVVAR} environment variable.' + "Libmagic magic database not found. " + "A default will be used if possible. " + "Install instead a typecode-libmagic plugin for best support.\n" + f"OR set the {TYPECODE_LIBMAGIC_DB_PATH_ENVVAR} environment variable." ) return @@ -280,7 +282,7 @@ def get_magicdb_location(_cache=[]): def mime_type(location): - """" + """ " Return the detected mimetype for file at `location` or an empty string if nothing found or an error occurred. """ @@ -288,11 +290,11 @@ def mime_type(location): return _detect(location, DETECT_MIME) except: # TODO: log errors - return '' + return "" def encoding(location): - """" + """ " Return the detected encoding for file at `location` or an empty string. Raise an exception on errors. """ @@ -300,7 +302,7 @@ def encoding(location): def _detect(location, flags): - """" + """ " Return the detected type using `flags` of file at `location` or an empty string. Raise an exception on errors. """ @@ -310,9 +312,9 @@ def _detect(location, flags): detector = Detector(flags=flags) detectors[flags] = detector val = detector.get(location) - val = val or '' - val = val.decode('ascii', 'ignore').strip() - return ' '.join(val.split()) + val = val or "" + val = val.decode("ascii", "ignore").strip() + return " ".join(val.split()) class MagicException(Exception): @@ -320,7 +322,6 @@ class MagicException(Exception): class Detector(object): - def __init__(self, flags, magic_db_location=None): """ Create a new libmagic detector. @@ -349,7 +350,7 @@ def get(self, location): assert location try: # first use the path as is - return _magic_file(self.cookie, location) + return _magic_file(self.cookie, location) except: # then try to get a utf-8 encoded path: Rationale: # https://docs.python.org/2/library/ctypes.html#ctypes.set_conversion_mode ctypes @@ -358,10 +359,10 @@ def get(self, location): # anymore by libmagic in some cases. try: uloc = os.fsencode(location) - return _magic_file(self.cookie, uloc) + return _magic_file(self.cookie, uloc) except: # if all fails, read the start of the file instead - with open(location, 'rb') as fd: + with open(location, "rb") as fd: buf = fd.read(16384) return _magic_buffer(self.cookie, buf, len(buf)) @@ -393,10 +394,8 @@ def check_error(result, func, args): # NOQA if ( result is None or (is_int and result < 0) - or ( - is_bytes - and str(result, encoding='utf-8', errors='ignore').startswith('cannot open')) - or (is_text and result.startswith('cannot open')) + or (is_bytes and str(result, encoding="utf-8", errors="ignore").startswith("cannot open")) + or (is_text and result.startswith("cannot open")) ): err = _magic_error(args[0]) raise MagicException(err) diff --git a/src/typecode/mimetypes.py b/src/typecode/mimetypes.py index afb9739..87bb4ba 100644 --- a/src/typecode/mimetypes.py +++ b/src/typecode/mimetypes.py @@ -38,20 +38,20 @@ def guess_type(path): types_map = get_mimedb() suffix_map = { - '.svgz': '.svg.gz', - '.tgz': '.tar.gz', - '.taz': '.tar.gz', - '.tz': '.tar.gz', - '.tbz2': '.tar.bz2', - '.txz': '.tar.xz', + ".svgz": ".svg.gz", + ".tgz": ".tar.gz", + ".taz": ".tar.gz", + ".tz": ".tar.gz", + ".tbz2": ".tar.bz2", + ".txz": ".tar.xz", } encodings_map = { - '.gz': 'gzip', - '.Z': 'compress', - '.bz2': 'bzip2', - '.xz': 'xz', - '.br': 'br', + ".gz": "gzip", + ".Z": "compress", + ".bz2": "bzip2", + ".xz": "xz", + ".br": "br", } path = os.fspath(path) @@ -83,770 +83,768 @@ def get_types_map(): # file extension for a particular mime type appears before any others of the # same mimetype. return { - '.atom': 'application/atom+xml', - '.cu': 'application/cu-seeme', - '.ecma': 'application/ecmascript', - '.epub': 'application/epub+zip', - '.jar': 'application/java-archive', - '.ser': 'application/java-serialized-object', - '.class': 'application/java-vm', - '.js': 'application/javascript', - '.mjs': 'application/javascript', - '.json': 'application/json', - '.hqx': 'application/mac-binhex40', - '.webmanifest': 'application/manifest+json', - '.mrc': 'application/marc', - '.ma': 'application/mathematica', - '.mb': 'application/mathematica', - '.nb': 'application/mathematica', - '.mathml': 'application/mathml+xml', - '.mbox': 'application/mbox', - '.mscml': 'application/mediaservercontrol+xml', - '.doc': 'application/msword', - '.dot': 'application/msword', - '.mxf': 'application/mxf', - '.a': 'application/octet-stream', - '.bin': 'application/octet-stream', - '.bpk': 'application/octet-stream', - '.deploy': 'application/octet-stream', - '.dist': 'application/octet-stream', - '.distz': 'application/octet-stream', - '.dmg': 'application/octet-stream', - '.dms': 'application/octet-stream', - '.dump': 'application/octet-stream', - '.elc': 'application/octet-stream', - '.iso': 'application/octet-stream', - '.lha': 'application/octet-stream', - '.lrf': 'application/octet-stream', - '.lzh': 'application/octet-stream', - '.o': 'application/octet-stream', - '.obj': 'application/octet-stream', - '.pkg': 'application/octet-stream', - '.so': 'application/octet-stream', - '.oda': 'application/oda', - '.ogx': 'application/ogg', - '.onepkg': 'application/onenote', - '.onetmp': 'application/onenote', - '.onetoc': 'application/onenote', - '.onetoc2': 'application/onenote', - '.pdf': 'application/pdf', - '.pgp': 'application/pgp-encrypted', - '.asc': 'application/pgp-signature', - '.sig': 'application/pgp-signature', - '.prf': 'application/pics-rules', - '.p10': 'application/pkcs10', - '.p7c': 'application/pkcs7-mime', - '.p7m': 'application/pkcs7-mime', - '.p7s': 'application/pkcs7-signature', - '.cer': 'application/pkix-cert', - '.crl': 'application/pkix-crl', - '.pkipath': 'application/pkix-pkipath', - '.pki': 'application/pkixcmp', - '.pls': 'application/pls+xml', - '.ai': 'application/postscript', - '.eps': 'application/postscript', - '.ps': 'application/postscript', - '.cww': 'application/prs.cww', - '.rdf': 'application/rdf+xml', - '.rif': 'application/reginfo+xml', - '.rnc': 'application/relax-ng-compact-syntax', - '.rss': 'application/rss+xml', - '.rtf': 'application/rtf', - '.sbml': 'application/sbml+xml', - '.sdp': 'application/sdp', - '.smi': 'application/smil+xml', - '.smil': 'application/smil+xml', - '.rq': 'application/sparql-query', - '.srx': 'application/sparql-results+xml', - '.plb': 'application/vnd.3gpp.pic-bw-large', - '.psb': 'application/vnd.3gpp.pic-bw-small', - '.pvb': 'application/vnd.3gpp.pic-bw-var', - '.tcap': 'application/vnd.3gpp2.tcap', - '.pwn': 'application/vnd.3m.post-it-notes', - '.aso': 'application/vnd.accpac.simply.aso', - '.imp': 'application/vnd.accpac.simply.imp', - '.acu': 'application/vnd.acucobol', - '.air': 'application/vnd.adobe.air-application-installer-package+zip', - '.xdp': 'application/vnd.adobe.xdp+xml', - '.xfdf': 'application/vnd.adobe.xfdf', - '.azw': 'application/vnd.amazon.ebook', - - '.apk': 'application/vnd.android.package-archive', - '.cii': 'application/vnd.anser-web-certificate-issue-initiation', - '.fti': 'application/vnd.anser-web-funds-transfer-initiation', - '.atx': 'application/vnd.antix.game-component', - '.mpkg': 'application/vnd.apple.installer+xml', - '.m3u8': 'application/vnd.apple.mpegurl', - '.bmi': 'application/vnd.bmi', - '.rep': 'application/vnd.businessobjects', - '.cdxml': 'application/vnd.chemdraw+xml', - '.mmd': 'application/vnd.chipnuts.karaoke-mmd', - '.cdy': 'application/vnd.cinderella', - '.ppd': 'application/vnd.cups-ppd', - '.car': 'application/vnd.curl.car', - '.pcurl': 'application/vnd.curl.pcurl', - '.mlp': 'application/vnd.dolby.mlp', - '.dpg': 'application/vnd.dpgraph', - '.dfac': 'application/vnd.dreamfactory', - '.geo': 'application/vnd.dynageo', - '.mag': 'application/vnd.ecowin.chart', - '.nml': 'application/vnd.enliven', - '.esf': 'application/vnd.epson.esf', - '.msf': 'application/vnd.epson.msf', - '.qam': 'application/vnd.epson.quickanime', - '.slt': 'application/vnd.epson.salt', - '.ssf': 'application/vnd.epson.ssf', - '.es3': 'application/vnd.eszigno3+xml', - '.et3': 'application/vnd.eszigno3+xml', - '.ez2': 'application/vnd.ezpix-album', - '.ez3': 'application/vnd.ezpix-package', - '.fdf': 'application/vnd.fdf', - '.mseed': 'application/vnd.fdsn.mseed', - '.dataless': 'application/vnd.fdsn.seed', - '.seed': 'application/vnd.fdsn.seed', - '.gph': 'application/vnd.flographit', - '.ftc': 'application/vnd.fluxtime.clip', - '.book': 'application/vnd.framemaker', - '.fm': 'application/vnd.framemaker', - '.frame': 'application/vnd.framemaker', - '.maker': 'application/vnd.framemaker', - '.fnc': 'application/vnd.frogans.fnc', - '.ltf': 'application/vnd.frogans.ltf', - '.fsc': 'application/vnd.fsc.weblaunch', - '.oas': 'application/vnd.fujitsu.oasys', - '.oa2': 'application/vnd.fujitsu.oasys2', - '.oa3': 'application/vnd.fujitsu.oasys3', - '.fg5': 'application/vnd.fujitsu.oasysgp', - '.bh2': 'application/vnd.fujitsu.oasysprs', - '.ddd': 'application/vnd.fujixerox.ddd', - '.xdw': 'application/vnd.fujixerox.docuworks', - '.xbd': 'application/vnd.fujixerox.docuworks.binder', - '.fzs': 'application/vnd.fuzzysheet', - '.txd': 'application/vnd.genomatix.tuxedo', - '.ggb': 'application/vnd.geogebra.file', - '.ggt': 'application/vnd.geogebra.tool', - '.gex': 'application/vnd.geometry-explorer', - '.gre': 'application/vnd.geometry-explorer', - '.gxt': 'application/vnd.geonext', - '.g2w': 'application/vnd.geoplan', - '.g3w': 'application/vnd.geospace', - '.gmx': 'application/vnd.gmx', - '.kml': 'application/vnd.google-earth.kml+xml', - '.kmz': 'application/vnd.google-earth.kmz', - '.gqf': 'application/vnd.grafeq', - '.gqs': 'application/vnd.grafeq', - '.gac': 'application/vnd.groove-account', - '.ghf': 'application/vnd.groove-help', - '.gim': 'application/vnd.groove-identity-message', - '.grv': 'application/vnd.groove-injector', - '.gtm': 'application/vnd.groove-tool-message', - '.tpl': 'application/vnd.groove-tool-template', - '.vcg': 'application/vnd.groove-vcard', - '.zmm': 'application/vnd.handheld-entertainment+xml', - '.hbci': 'application/vnd.hbci', - '.les': 'application/vnd.hhe.lesson-player', - '.hpgl': 'application/vnd.hp-hpgl', - '.hpid': 'application/vnd.hp-hpid', - '.hps': 'application/vnd.hp-hps', - '.jlt': 'application/vnd.hp-jlyt', - '.pcl': 'application/vnd.hp-pcl', - '.pclxl': 'application/vnd.hp-pclxl', - '.sfd-hdstx': 'application/vnd.hydrostatix.sof-data', - '.x3d': 'application/vnd.hzn-3d-crossword', - '.mpy': 'application/vnd.ibm.minipay', - '.afp': 'application/vnd.ibm.modcap', - '.list3820': 'application/vnd.ibm.modcap', - '.listafp': 'application/vnd.ibm.modcap', - '.irm': 'application/vnd.ibm.rights-management', - '.sc': 'application/vnd.ibm.secure-container', - '.icc': 'application/vnd.iccprofile', - '.icm': 'application/vnd.iccprofile', - '.igl': 'application/vnd.igloader', - '.ivp': 'application/vnd.immervision-ivp', - '.ivu': 'application/vnd.immervision-ivu', - '.xpw': 'application/vnd.intercon.formnet', - '.xpx': 'application/vnd.intercon.formnet', - '.qbo': 'application/vnd.intu.qbo', - '.qfx': 'application/vnd.intu.qfx', - '.rcprofile': 'application/vnd.ipunplugged.rcprofile', - '.irp': 'application/vnd.irepository.package+xml', - '.xpr': 'application/vnd.is-xpr', - '.jam': 'application/vnd.jam', - '.rms': 'application/vnd.jcp.javame.midlet-rms', - '.jisp': 'application/vnd.jisp', - '.joda': 'application/vnd.joost.joda-archive', - '.ktr': 'application/vnd.kahootz', - '.ktz': 'application/vnd.kahootz', - '.karbon': 'application/vnd.kde.karbon', - '.chrt': 'application/vnd.kde.kchart', - '.kfo': 'application/vnd.kde.kformula', - '.flw': 'application/vnd.kde.kivio', - '.kon': 'application/vnd.kde.kontour', - '.kpr': 'application/vnd.kde.kpresenter', - '.kpt': 'application/vnd.kde.kpresenter', - '.ksp': 'application/vnd.kde.kspread', - '.kwd': 'application/vnd.kde.kword', - '.kwt': 'application/vnd.kde.kword', - '.htke': 'application/vnd.kenameaapp', - '.kia': 'application/vnd.kidspiration', - '.kne': 'application/vnd.kinar', - '.knp': 'application/vnd.kinar', - '.skd': 'application/vnd.koan', - '.skm': 'application/vnd.koan', - '.skp': 'application/vnd.koan', - '.skt': 'application/vnd.koan', - '.sse': 'application/vnd.kodak-descriptor', - '.lbd': 'application/vnd.llamagraphics.life-balance.desktop', - '.lbe': 'application/vnd.llamagraphics.life-balance.exchange+xml', - '.123': 'application/vnd.lotus-1-2-3', - '.apr': 'application/vnd.lotus-approach', - '.pre': 'application/vnd.lotus-freelance', - '.nsf': 'application/vnd.lotus-notes', - '.org': 'application/vnd.lotus-organizer', - '.scm': 'application/vnd.lotus-screencam', - '.lwp': 'application/vnd.lotus-wordpro', - '.portpkg': 'application/vnd.macports.portpkg', - '.mcd': 'application/vnd.mcd', - '.mc1': 'application/vnd.medcalcdata', - '.cdkey': 'application/vnd.mediastation.cdkey', - '.mwf': 'application/vnd.mfer', - '.mfm': 'application/vnd.mfmp', - '.flo': 'application/vnd.micrografx.flo', - '.igx': 'application/vnd.micrografx.igx', - '.mif': 'application/vnd.mif', - '.daf': 'application/vnd.mobius.daf', - '.dis': 'application/vnd.mobius.dis', - '.mbk': 'application/vnd.mobius.mbk', - '.mqy': 'application/vnd.mobius.mqy', - '.msl': 'application/vnd.mobius.msl', - '.plc': 'application/vnd.mobius.plc', - '.txf': 'application/vnd.mobius.txf', - '.mpn': 'application/vnd.mophun.application', - '.mpc': 'application/vnd.mophun.certificate', - '.xul': 'application/vnd.mozilla.xul+xml', - '.cil': 'application/vnd.ms-artgalry', - '.cab': 'application/vnd.ms-cab-compressed', - '.xla': 'application/vnd.ms-excel', - '.xlb': 'application/vnd.ms-excel', - '.xlc': 'application/vnd.ms-excel', - '.xlm': 'application/vnd.ms-excel', - '.xls': 'application/vnd.ms-excel', - '.xlt': 'application/vnd.ms-excel', - '.xlw': 'application/vnd.ms-excel', - '.xlam': 'application/vnd.ms-excel.addin.macroenabled.12', - '.xlsb': 'application/vnd.ms-excel.sheet.binary.macroenabled.12', - '.xlsm': 'application/vnd.ms-excel.sheet.macroenabled.12', - '.xltm': 'application/vnd.ms-excel.template.macroenabled.12', - '.eot': 'application/vnd.ms-fontobject', - '.chm': 'application/vnd.ms-htmlhelp', - '.ims': 'application/vnd.ms-ims', - '.lrm': 'application/vnd.ms-lrm', - '.cat': 'application/vnd.ms-pki.seccat', - '.stl': 'application/vnd.ms-pki.stl', - '.pot': 'application/vnd.ms-powerpoint', - '.ppa': 'application/vnd.ms-powerpoint', - '.pps': 'application/vnd.ms-powerpoint', - '.ppt': 'application/vnd.ms-powerpoint', - '.pwz': 'application/vnd.ms-powerpoint', - '.ppam': 'application/vnd.ms-powerpoint.addin.macroenabled.12', - '.pptm': 'application/vnd.ms-powerpoint.presentation.macroenabled.12', - '.sldm': 'application/vnd.ms-powerpoint.slide.macroenabled.12', - '.ppsm': 'application/vnd.ms-powerpoint.slideshow.macroenabled.12', - '.potm': 'application/vnd.ms-powerpoint.template.macroenabled.12', - '.mpp': 'application/vnd.ms-project', - '.mpt': 'application/vnd.ms-project', - '.docm': 'application/vnd.ms-word.document.macroenabled.12', - '.dotm': 'application/vnd.ms-word.template.macroenabled.12', - '.wcm': 'application/vnd.ms-works', - '.wdb': 'application/vnd.ms-works', - '.wks': 'application/vnd.ms-works', - '.wps': 'application/vnd.ms-works', - '.wpl': 'application/vnd.ms-wpl', - '.xps': 'application/vnd.ms-xpsdocument', - '.mseq': 'application/vnd.mseq', - '.mus': 'application/vnd.musician', - '.msty': 'application/vnd.muvee.style', - '.nlu': 'application/vnd.neurolanguage.nlu', - '.nnd': 'application/vnd.noblenet-directory', - '.nns': 'application/vnd.noblenet-sealer', - '.nnw': 'application/vnd.noblenet-web', - '.ngdat': 'application/vnd.nokia.n-gage.data', - '.n-gage': 'application/vnd.nokia.n-gage.symbian.install', - '.rpst': 'application/vnd.nokia.radio-preset', - '.rpss': 'application/vnd.nokia.radio-presets', - '.edm': 'application/vnd.novadigm.edm', - '.edx': 'application/vnd.novadigm.edx', - '.ext': 'application/vnd.novadigm.ext', - '.odc': 'application/vnd.oasis.opendocument.chart', - '.otc': 'application/vnd.oasis.opendocument.chart-template', - '.odb': 'application/vnd.oasis.opendocument.database', - '.odf': 'application/vnd.oasis.opendocument.formula', - '.odft': 'application/vnd.oasis.opendocument.formula-template', - '.odg': 'application/vnd.oasis.opendocument.graphics', - '.otg': 'application/vnd.oasis.opendocument.graphics-template', - '.odi': 'application/vnd.oasis.opendocument.image', - '.oti': 'application/vnd.oasis.opendocument.image-template', - '.odp': 'application/vnd.oasis.opendocument.presentation', - '.otp': 'application/vnd.oasis.opendocument.presentation-template', - '.ods': 'application/vnd.oasis.opendocument.spreadsheet', - '.ots': 'application/vnd.oasis.opendocument.spreadsheet-template', - '.odt': 'application/vnd.oasis.opendocument.text', - '.otm': 'application/vnd.oasis.opendocument.text-master', - '.ott': 'application/vnd.oasis.opendocument.text-template', - '.oth': 'application/vnd.oasis.opendocument.text-web', - '.xo': 'application/vnd.olpc-sugar', - '.dd2': 'application/vnd.oma.dd2+xml', - '.oxt': 'application/vnd.openofficeorg.extension', - '.pptx': 'application/vnd.openxmlformats-officedocument.presentationml.presentation', - '.sldx': 'application/vnd.openxmlformats-officedocument.presentationml.slide', - '.ppsx': 'application/vnd.openxmlformats-officedocument.presentationml.slideshow', - '.potx': 'application/vnd.openxmlformats-officedocument.presentationml.template', - '.xlsx': 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet', - '.xltx': 'application/vnd.openxmlformats-officedocument.spreadsheetml.template', - '.docx': 'application/vnd.openxmlformats-officedocument.wordprocessingml.document', - '.dotx': 'application/vnd.openxmlformats-officedocument.wordprocessingml.template', - '.dp': 'application/vnd.osgi.dp', - '.oprc': 'application/vnd.palm', - '.pdb': 'application/vnd.palm', - '.pqa': 'application/vnd.palm', - '.paw': 'application/vnd.pawaafile', - '.str': 'application/vnd.pg.format', - '.ei6': 'application/vnd.pg.osasli', - '.efif': 'application/vnd.picsel', - '.wg': 'application/vnd.pmi.widget', - '.plf': 'application/vnd.pocketlearn', - '.pbd': 'application/vnd.powerbuilder6', - '.box': 'application/vnd.previewsystems.box', - '.mgz': 'application/vnd.proteus.magazine', - '.qps': 'application/vnd.publishare-delta-tree', - '.ptid': 'application/vnd.pvi.ptid1', - '.qwd': 'application/vnd.quark.quarkxpress', - '.qwt': 'application/vnd.quark.quarkxpress', - '.qxb': 'application/vnd.quark.quarkxpress', - '.qxd': 'application/vnd.quark.quarkxpress', - '.qxl': 'application/vnd.quark.quarkxpress', - '.qxt': 'application/vnd.quark.quarkxpress', - '.bed': 'application/vnd.realvnc.bed', - '.mxl': 'application/vnd.recordare.musicxml', - '.musicxml': 'application/vnd.recordare.musicxml+xml', - '.cod': 'application/vnd.rim.cod', - '.rm': 'application/vnd.rn-realmedia', - '.link66': 'application/vnd.route66.link66+xml', - '.st': 'application/vnd.sailingtracker.track', - '.see': 'application/vnd.seemail', - '.sema': 'application/vnd.sema', - '.semd': 'application/vnd.semd', - '.semf': 'application/vnd.semf', - '.ifm': 'application/vnd.shana.informed.formdata', - '.itp': 'application/vnd.shana.informed.formtemplate', - '.iif': 'application/vnd.shana.informed.interchange', - '.ipk': 'application/vnd.shana.informed.package', - '.twd': 'application/vnd.simtech-mindmapper', - '.twds': 'application/vnd.simtech-mindmapper', - '.mmf': 'application/vnd.smaf', - '.teacher': 'application/vnd.smart.teacher', - '.sdkd': 'application/vnd.solent.sdkm+xml', - '.sdkm': 'application/vnd.solent.sdkm+xml', - '.dxp': 'application/vnd.spotfire.dxp', - '.sfs': 'application/vnd.spotfire.sfs', - '.sdc': 'application/vnd.stardivision.calc', - '.sda': 'application/vnd.stardivision.draw', - '.sdd': 'application/vnd.stardivision.impress', - '.smf': 'application/vnd.stardivision.math', - '.sdw': 'application/vnd.stardivision.writer', - '.vor': 'application/vnd.stardivision.writer', - '.sgl': 'application/vnd.stardivision.writer-global', - '.sxc': 'application/vnd.sun.xml.calc', - '.stc': 'application/vnd.sun.xml.calc.template', - '.sxd': 'application/vnd.sun.xml.draw', - '.std': 'application/vnd.sun.xml.draw.template', - '.sxi': 'application/vnd.sun.xml.impress', - '.sti': 'application/vnd.sun.xml.impress.template', - '.sxm': 'application/vnd.sun.xml.math', - '.sxw': 'application/vnd.sun.xml.writer', - '.sxg': 'application/vnd.sun.xml.writer.global', - '.stw': 'application/vnd.sun.xml.writer.template', - '.sus': 'application/vnd.sus-calendar', - '.susp': 'application/vnd.sus-calendar', - '.svd': 'application/vnd.svd', - '.sis': 'application/vnd.symbian.install', - '.sisx': 'application/vnd.symbian.install', - '.xsm': 'application/vnd.syncml+xml', - '.bdm': 'application/vnd.syncml.dm+wbxml', - '.xdm': 'application/vnd.syncml.dm+xml', - '.tao': 'application/vnd.tao.intent-module-archive', - '.tmo': 'application/vnd.tmobile-livetv', - '.tpt': 'application/vnd.trid.tpt', - '.mxs': 'application/vnd.triscape.mxs', - '.tra': 'application/vnd.trueapp', - '.ufd': 'application/vnd.ufdl', - '.ufdl': 'application/vnd.ufdl', - '.utz': 'application/vnd.uiq.theme', - '.umj': 'application/vnd.umajin', - '.unityweb': 'application/vnd.unity', - '.uoml': 'application/vnd.uoml+xml', - '.vcx': 'application/vnd.vcx', - '.vsd': 'application/vnd.visio', - '.vss': 'application/vnd.visio', - '.vst': 'application/vnd.visio', - '.vsw': 'application/vnd.visio', - '.vis': 'application/vnd.visionary', - '.vsf': 'application/vnd.vsf', - '.wbxml': 'application/vnd.wap.wbxml', - '.wmlc': 'application/vnd.wap.wmlc', - '.wmlsc': 'application/vnd.wap.wmlscriptc', - '.wtb': 'application/vnd.webturbo', - '.nbp': 'application/vnd.wolfram.player', - '.wpd': 'application/vnd.wordperfect', - '.wqd': 'application/vnd.wqd', - '.stf': 'application/vnd.wt.stf', - '.xar': 'application/vnd.xara', - '.xfdl': 'application/vnd.xfdl', - '.hvd': 'application/vnd.yamaha.hv-dic', - '.hvs': 'application/vnd.yamaha.hv-script', - '.hvp': 'application/vnd.yamaha.hv-voice', - '.osf': 'application/vnd.yamaha.openscoreformat', - '.osfpvg': 'application/vnd.yamaha.openscoreformat.osfpvg+xml', - '.saf': 'application/vnd.yamaha.smaf-audio', - '.spf': 'application/vnd.yamaha.smaf-phrase', - '.cmp': 'application/vnd.yellowriver-custom-menu', - '.zir': 'application/vnd.zul', - '.zirz': 'application/vnd.zul', - '.zaz': 'application/vnd.zzazz.deck+xml', - '.vxml': 'application/voicexml+xml', - '.wasm': 'application/wasm', - '.hlp': 'application/winhlp', - '.wsdl': 'application/wsdl+xml', - '.wspolicy': 'application/wspolicy+xml', - '.abw': 'application/x-abiword', - '.ace': 'application/x-ace-compressed', - '.aab': 'application/x-authorware-bin', - '.u32': 'application/x-authorware-bin', - '.vox': 'application/x-authorware-bin', - '.x32': 'application/x-authorware-bin', - '.aam': 'application/x-authorware-map', - '.aas': 'application/x-authorware-seg', - '.bcpio': 'application/x-bcpio', - '.torrent': 'application/x-bittorrent', - '.bz': 'application/x-bzip', - '.boz': 'application/x-bzip2', - '.bz2': 'application/x-bzip2', - '.vcd': 'application/x-cdlink', - '.chat': 'application/x-chat', - '.pgn': 'application/x-chess-pgn', - '.cpio': 'application/x-cpio', - '.csh': 'application/x-csh', - '.deb': 'application/x-debian-package', - '.udeb': 'application/x-debian-package', - '.cct': 'application/x-director', - '.cst': 'application/x-director', - '.cxt': 'application/x-director', - '.dcr': 'application/x-director', - '.dir': 'application/x-director', - '.dxr': 'application/x-director', - '.fgd': 'application/x-director', - '.swa': 'application/x-director', - '.w3d': 'application/x-director', - '.wad': 'application/x-doom', - '.ncx': 'application/x-dtbncx+xml', - '.dtb': 'application/x-dtbook+xml', - '.res': 'application/x-dtbresource+xml', - '.dvi': 'application/x-dvi', - '.bdf': 'application/x-font-bdf', - '.gsf': 'application/x-font-ghostscript', - '.psf': 'application/x-font-linux-psf', - '.otf': 'application/x-font-otf', - '.pcf': 'application/x-font-pcf', - '.snf': 'application/x-font-snf', - '.ttc': 'application/x-font-ttf', - '.ttf': 'application/x-font-ttf', - '.afm': 'application/x-font-type1', - '.pfa': 'application/x-font-type1', - '.pfb': 'application/x-font-type1', - '.pfm': 'application/x-font-type1', - '.spl': 'application/x-futuresplash', - '.gnumeric': 'application/x-gnumeric', - '.gtar': 'application/x-gtar', - '.hdf': 'application/x-hdf', - '.jnlp': 'application/x-java-jnlp-file', - '.latex': 'application/x-latex', - '.mobi': 'application/x-mobipocket-ebook', - '.prc': 'application/x-mobipocket-ebook', - '.wmd': 'application/x-ms-wmd', - '.wmz': 'application/x-ms-wmz', - '.xbap': 'application/x-ms-xbap', - '.mdb': 'application/x-msaccess', - '.obd': 'application/x-msbinder', - '.crd': 'application/x-mscardfile', - '.clp': 'application/x-msclip', - '.bat': 'application/x-msdownload', - '.com': 'application/x-msdownload', - '.dll': 'application/x-msdownload', - '.exe': 'application/x-msdownload', - '.msi': 'application/x-msdownload', - '.m13': 'application/x-msmediaview', - '.m14': 'application/x-msmediaview', - '.mvb': 'application/x-msmediaview', - '.wmf': 'application/x-msmetafile', - '.mny': 'application/x-msmoney', - '.pub': 'application/x-mspublisher', - '.scd': 'application/x-msschedule', - '.trm': 'application/x-msterminal', - '.wri': 'application/x-mswrite', - '.cdf': 'application/x-netcdf', - '.nc': 'application/x-netcdf', - '.p12': 'application/x-pkcs12', - '.pfx': 'application/x-pkcs12', - '.p7b': 'application/x-pkcs7-certificates', - '.spc': 'application/x-pkcs7-certificates', - '.p7r': 'application/x-pkcs7-certreqresp', - '.pyc': 'application/x-python-code', - '.pyo': 'application/x-python-code', - '.rar': 'application/x-rar-compressed', - '.sh': 'application/x-sh', - '.shar': 'application/x-shar', - '.swf': 'application/x-shockwave-flash', - '.xap': 'application/x-silverlight-app', - '.sit': 'application/x-stuffit', - '.sitx': 'application/x-stuffitx', - '.sv4cpio': 'application/x-sv4cpio', - '.sv4crc': 'application/x-sv4crc', - '.tar': 'application/x-tar', - '.tcl': 'application/x-tcl', - '.tex': 'application/x-tex', - '.tfm': 'application/x-tex-tfm', - '.texi': 'application/x-texinfo', - '.texinfo': 'application/x-texinfo', - '.ustar': 'application/x-ustar', - '.src': 'application/x-wais-source', - '.crt': 'application/x-x509-ca-cert', - '.der': 'application/x-x509-ca-cert', - '.fig': 'application/x-xfig', - '.xpi': 'application/x-xpinstall', - '.xenc': 'application/xenc+xml', - '.xht': 'application/xhtml+xml', - '.xhtml': 'application/xhtml+xml', - '.xml': 'application/xml', - '.xpdl': 'application/xml', - '.xsl': 'application/xml', - '.dtd': 'application/xml-dtd', - '.xop': 'application/xop+xml', - '.xslt': 'application/xslt+xml', - '.xspf': 'application/xspf+xml', - '.mxml': 'application/xv+xml', - '.xhvml': 'application/xv+xml', - '.xvm': 'application/xv+xml', - '.xvml': 'application/xv+xml', - '.zip': 'application/zip', - '.adp': 'audio/adpcm', - '.au': 'audio/basic', - '.snd': 'audio/basic', - '.kar': 'audio/midi', - '.mid': 'audio/midi', - '.midi': 'audio/midi', - '.rmi': 'audio/midi', - '.mp4a': 'audio/mp4', - '.m2a': 'audio/mpeg', - '.m3a': 'audio/mpeg', - '.mp2': 'audio/mpeg', - '.mp2a': 'audio/mpeg', - '.mp3': 'audio/mpeg', - '.mpga': 'audio/mpeg', - '.oga': 'audio/ogg', - '.ogg': 'audio/ogg', - '.spx': 'audio/ogg', - '.eol': 'audio/vnd.digital-winds', - '.dra': 'audio/vnd.dra', - '.dts': 'audio/vnd.dts', - '.dtshd': 'audio/vnd.dts.hd', - '.lvp': 'audio/vnd.lucent.voice', - '.pya': 'audio/vnd.ms-playready.media.pya', - '.ecelp4800': 'audio/vnd.nuera.ecelp4800', - '.ecelp7470': 'audio/vnd.nuera.ecelp7470', - '.ecelp9600': 'audio/vnd.nuera.ecelp9600', - '.aac': 'audio/x-aac', - '.aif': 'audio/x-aiff', - '.aifc': 'audio/x-aiff', - '.aiff': 'audio/x-aiff', - '.m3u': 'audio/x-mpegurl', - '.wax': 'audio/x-ms-wax', - '.wma': 'audio/x-ms-wma', - '.ra': 'audio/x-pn-realaudio', - '.ram': 'audio/x-pn-realaudio', - '.rmp': 'audio/x-pn-realaudio-plugin', - '.wav': 'audio/x-wav', - '.cdx': 'chemical/x-cdx', - '.cif': 'chemical/x-cif', - '.cmdf': 'chemical/x-cmdf', - '.cml': 'chemical/x-cml', - '.csml': 'chemical/x-csml', - '.xyz': 'chemical/x-xyz', - '.bmp': 'image/bmp', - '.cgm': 'image/cgm', - '.g3': 'image/g3fax', - '.gif': 'image/gif', - '.ief': 'image/ief', - '.jpe': 'image/jpeg', - '.jpeg': 'image/jpeg', - '.jpg': 'image/jpeg', - '.png': 'image/png', - '.btif': 'image/prs.btif', - '.svg': 'image/svg+xml', - '.svgz': 'image/svg+xml', - '.tif': 'image/tiff', - '.tiff': 'image/tiff', - '.psd': 'image/vnd.adobe.photoshop', - '.djv': 'image/vnd.djvu', - '.djvu': 'image/vnd.djvu', - '.dwg': 'image/vnd.dwg', - '.dxf': 'image/vnd.dxf', - '.fbs': 'image/vnd.fastbidsheet', - '.fpx': 'image/vnd.fpx', - '.fst': 'image/vnd.fst', - '.mmr': 'image/vnd.fujixerox.edmics-mmr', - '.rlc': 'image/vnd.fujixerox.edmics-rlc', - '.mdi': 'image/vnd.ms-modi', - '.npx': 'image/vnd.net-fpx', - '.wbmp': 'image/vnd.wap.wbmp', - '.xif': 'image/vnd.xiff', - '.ras': 'image/x-cmu-raster', - '.cmx': 'image/x-cmx', - '.fh': 'image/x-freehand', - '.fh4': 'image/x-freehand', - '.fh5': 'image/x-freehand', - '.fh7': 'image/x-freehand', - '.fhc': 'image/x-freehand', - '.ico': 'image/x-icon', - '.pcx': 'image/x-pcx', - '.pct': 'image/x-pict', - '.pic': 'image/x-pict', - '.pnm': 'image/x-portable-anymap', - '.pbm': 'image/x-portable-bitmap', - '.pgm': 'image/x-portable-graymap', - '.ppm': 'image/x-portable-pixmap', - '.rgb': 'image/x-rgb', - '.xbm': 'image/x-xbitmap', - '.xpm': 'image/x-xpixmap', - '.xwd': 'image/x-xwindowdump', - '.eml': 'message/rfc822', - '.mht': 'message/rfc822', - '.mhtml': 'message/rfc822', - '.mime': 'message/rfc822', - '.nws': 'message/rfc822', - '.iges': 'model/iges', - '.igs': 'model/iges', - '.mesh': 'model/mesh', - '.msh': 'model/mesh', - '.silo': 'model/mesh', - '.dwf': 'model/vnd.dwf', - '.gdl': 'model/vnd.gdl', - '.gtw': 'model/vnd.gtw', - '.mts': 'model/vnd.mts', - '.vtu': 'model/vnd.vtu', - '.vrml': 'model/vrml', - '.wrl': 'model/vrml', - '.ics': 'text/calendar', - '.ifb': 'text/calendar', - '.css': 'text/css', - '.csv': 'text/csv', - '.htm': 'text/html', - '.html': 'text/html', - '.conf': 'text/plain', - '.def': 'text/plain', - '.in': 'text/plain', - '.ksh': 'text/plain', - '.list': 'text/plain', - '.log': 'text/plain', - '.pl': 'text/plain', - '.text': 'text/plain', - '.txt': 'text/plain', - '.dsc': 'text/prs.lines.tag', - '.rtx': 'text/richtext', - '.sgm': 'text/sgml', - '.sgml': 'text/sgml', - '.tsv': 'text/tab-separated-values', - '.man': 'text/troff', - '.me': 'text/troff', - '.ms': 'text/troff', - '.roff': 'text/troff', - '.t': 'text/troff', - '.tr': 'text/troff', - '.uri': 'text/uri-list', - '.uris': 'text/uri-list', - '.urls': 'text/uri-list', - '.curl': 'text/vnd.curl', - '.dcurl': 'text/vnd.curl.dcurl', - '.mcurl': 'text/vnd.curl.mcurl', - '.scurl': 'text/vnd.curl.scurl', - '.fly': 'text/vnd.fly', - '.flx': 'text/vnd.fmi.flexstor', - '.gv': 'text/vnd.graphviz', - '.3dml': 'text/vnd.in3d.3dml', - '.spot': 'text/vnd.in3d.spot', - '.jad': 'text/vnd.sun.j2me.app-descriptor', - '.wml': 'text/vnd.wap.wml', - '.wmls': 'text/vnd.wap.wmlscript', - '.asm': 'text/x-asm', - '.s': 'text/x-asm', - '.c': 'text/x-c', - '.cc': 'text/x-c', - '.cpp': 'text/x-c', - '.cxx': 'text/x-c', - '.dic': 'text/x-c', - '.h': 'text/x-c', - '.hh': 'text/x-c', - '.hpp': 'text/x-c', - '.f': 'text/x-fortran', - '.f77': 'text/x-fortran', - '.f90': 'text/x-fortran', - '.for': 'text/x-fortran', - '.java': 'text/x-java-source', - '.p': 'text/x-pascal', - '.pas': 'text/x-pascal', - '.py': 'text/x-python', - '.etx': 'text/x-setext', - '.uu': 'text/x-uuencode', - '.vcs': 'text/x-vcalendar', - '.vcf': 'text/x-vcard', - '.3gp': 'video/3gpp', - '.3g2': 'video/3gpp2', - '.h261': 'video/h261', - '.h263': 'video/h263', - '.h264': 'video/h264', - '.jpgv': 'video/jpeg', - '.jpgm': 'video/jpm', - '.jpm': 'video/jpm', - '.mj2': 'video/mj2', - '.mjp2': 'video/mj2', - '.mp4': 'video/mp4', - '.mp4v': 'video/mp4', - '.mpg4': 'video/mp4', - '.m1v': 'video/mpeg', - '.m2v': 'video/mpeg', - '.mpa': 'video/mpeg', - '.mpe': 'video/mpeg', - '.mpeg': 'video/mpeg', - '.mpg': 'video/mpeg', - '.ogv': 'video/ogg', - '.mov': 'video/quicktime', - '.qt': 'video/quicktime', - '.fvt': 'video/vnd.fvt', - '.m4u': 'video/vnd.mpegurl', - '.mxu': 'video/vnd.mpegurl', - '.pyv': 'video/vnd.ms-playready.media.pyv', - '.viv': 'video/vnd.vivo', - '.webm': 'video/webm', - '.f4v': 'video/x-f4v', - '.fli': 'video/x-fli', - '.flv': 'video/x-flv', - '.m4v': 'video/x-m4v', - '.asf': 'video/x-ms-asf', - '.asx': 'video/x-ms-asf', - '.wm': 'video/x-ms-wm', - '.wmv': 'video/x-ms-wmv', - '.wmx': 'video/x-ms-wmx', - '.wvx': 'video/x-ms-wvx', - '.avi': 'video/x-msvideo', - '.movie': 'video/x-sgi-movie', + ".atom": "application/atom+xml", + ".cu": "application/cu-seeme", + ".ecma": "application/ecmascript", + ".epub": "application/epub+zip", + ".jar": "application/java-archive", + ".ser": "application/java-serialized-object", + ".class": "application/java-vm", + ".js": "application/javascript", + ".mjs": "application/javascript", + ".json": "application/json", + ".hqx": "application/mac-binhex40", + ".webmanifest": "application/manifest+json", + ".mrc": "application/marc", + ".ma": "application/mathematica", + ".mb": "application/mathematica", + ".nb": "application/mathematica", + ".mathml": "application/mathml+xml", + ".mbox": "application/mbox", + ".mscml": "application/mediaservercontrol+xml", + ".doc": "application/msword", + ".dot": "application/msword", + ".mxf": "application/mxf", + ".a": "application/octet-stream", + ".bin": "application/octet-stream", + ".bpk": "application/octet-stream", + ".deploy": "application/octet-stream", + ".dist": "application/octet-stream", + ".distz": "application/octet-stream", + ".dmg": "application/octet-stream", + ".dms": "application/octet-stream", + ".dump": "application/octet-stream", + ".elc": "application/octet-stream", + ".iso": "application/octet-stream", + ".lha": "application/octet-stream", + ".lrf": "application/octet-stream", + ".lzh": "application/octet-stream", + ".o": "application/octet-stream", + ".obj": "application/octet-stream", + ".pkg": "application/octet-stream", + ".so": "application/octet-stream", + ".oda": "application/oda", + ".ogx": "application/ogg", + ".onepkg": "application/onenote", + ".onetmp": "application/onenote", + ".onetoc": "application/onenote", + ".onetoc2": "application/onenote", + ".pdf": "application/pdf", + ".pgp": "application/pgp-encrypted", + ".asc": "application/pgp-signature", + ".sig": "application/pgp-signature", + ".prf": "application/pics-rules", + ".p10": "application/pkcs10", + ".p7c": "application/pkcs7-mime", + ".p7m": "application/pkcs7-mime", + ".p7s": "application/pkcs7-signature", + ".cer": "application/pkix-cert", + ".crl": "application/pkix-crl", + ".pkipath": "application/pkix-pkipath", + ".pki": "application/pkixcmp", + ".pls": "application/pls+xml", + ".ai": "application/postscript", + ".eps": "application/postscript", + ".ps": "application/postscript", + ".cww": "application/prs.cww", + ".rdf": "application/rdf+xml", + ".rif": "application/reginfo+xml", + ".rnc": "application/relax-ng-compact-syntax", + ".rss": "application/rss+xml", + ".rtf": "application/rtf", + ".sbml": "application/sbml+xml", + ".sdp": "application/sdp", + ".smi": "application/smil+xml", + ".smil": "application/smil+xml", + ".rq": "application/sparql-query", + ".srx": "application/sparql-results+xml", + ".plb": "application/vnd.3gpp.pic-bw-large", + ".psb": "application/vnd.3gpp.pic-bw-small", + ".pvb": "application/vnd.3gpp.pic-bw-var", + ".tcap": "application/vnd.3gpp2.tcap", + ".pwn": "application/vnd.3m.post-it-notes", + ".aso": "application/vnd.accpac.simply.aso", + ".imp": "application/vnd.accpac.simply.imp", + ".acu": "application/vnd.acucobol", + ".air": "application/vnd.adobe.air-application-installer-package+zip", + ".xdp": "application/vnd.adobe.xdp+xml", + ".xfdf": "application/vnd.adobe.xfdf", + ".azw": "application/vnd.amazon.ebook", + ".apk": "application/vnd.android.package-archive", + ".cii": "application/vnd.anser-web-certificate-issue-initiation", + ".fti": "application/vnd.anser-web-funds-transfer-initiation", + ".atx": "application/vnd.antix.game-component", + ".mpkg": "application/vnd.apple.installer+xml", + ".m3u8": "application/vnd.apple.mpegurl", + ".bmi": "application/vnd.bmi", + ".rep": "application/vnd.businessobjects", + ".cdxml": "application/vnd.chemdraw+xml", + ".mmd": "application/vnd.chipnuts.karaoke-mmd", + ".cdy": "application/vnd.cinderella", + ".ppd": "application/vnd.cups-ppd", + ".car": "application/vnd.curl.car", + ".pcurl": "application/vnd.curl.pcurl", + ".mlp": "application/vnd.dolby.mlp", + ".dpg": "application/vnd.dpgraph", + ".dfac": "application/vnd.dreamfactory", + ".geo": "application/vnd.dynageo", + ".mag": "application/vnd.ecowin.chart", + ".nml": "application/vnd.enliven", + ".esf": "application/vnd.epson.esf", + ".msf": "application/vnd.epson.msf", + ".qam": "application/vnd.epson.quickanime", + ".slt": "application/vnd.epson.salt", + ".ssf": "application/vnd.epson.ssf", + ".es3": "application/vnd.eszigno3+xml", + ".et3": "application/vnd.eszigno3+xml", + ".ez2": "application/vnd.ezpix-album", + ".ez3": "application/vnd.ezpix-package", + ".fdf": "application/vnd.fdf", + ".mseed": "application/vnd.fdsn.mseed", + ".dataless": "application/vnd.fdsn.seed", + ".seed": "application/vnd.fdsn.seed", + ".gph": "application/vnd.flographit", + ".ftc": "application/vnd.fluxtime.clip", + ".book": "application/vnd.framemaker", + ".fm": "application/vnd.framemaker", + ".frame": "application/vnd.framemaker", + ".maker": "application/vnd.framemaker", + ".fnc": "application/vnd.frogans.fnc", + ".ltf": "application/vnd.frogans.ltf", + ".fsc": "application/vnd.fsc.weblaunch", + ".oas": "application/vnd.fujitsu.oasys", + ".oa2": "application/vnd.fujitsu.oasys2", + ".oa3": "application/vnd.fujitsu.oasys3", + ".fg5": "application/vnd.fujitsu.oasysgp", + ".bh2": "application/vnd.fujitsu.oasysprs", + ".ddd": "application/vnd.fujixerox.ddd", + ".xdw": "application/vnd.fujixerox.docuworks", + ".xbd": "application/vnd.fujixerox.docuworks.binder", + ".fzs": "application/vnd.fuzzysheet", + ".txd": "application/vnd.genomatix.tuxedo", + ".ggb": "application/vnd.geogebra.file", + ".ggt": "application/vnd.geogebra.tool", + ".gex": "application/vnd.geometry-explorer", + ".gre": "application/vnd.geometry-explorer", + ".gxt": "application/vnd.geonext", + ".g2w": "application/vnd.geoplan", + ".g3w": "application/vnd.geospace", + ".gmx": "application/vnd.gmx", + ".kml": "application/vnd.google-earth.kml+xml", + ".kmz": "application/vnd.google-earth.kmz", + ".gqf": "application/vnd.grafeq", + ".gqs": "application/vnd.grafeq", + ".gac": "application/vnd.groove-account", + ".ghf": "application/vnd.groove-help", + ".gim": "application/vnd.groove-identity-message", + ".grv": "application/vnd.groove-injector", + ".gtm": "application/vnd.groove-tool-message", + ".tpl": "application/vnd.groove-tool-template", + ".vcg": "application/vnd.groove-vcard", + ".zmm": "application/vnd.handheld-entertainment+xml", + ".hbci": "application/vnd.hbci", + ".les": "application/vnd.hhe.lesson-player", + ".hpgl": "application/vnd.hp-hpgl", + ".hpid": "application/vnd.hp-hpid", + ".hps": "application/vnd.hp-hps", + ".jlt": "application/vnd.hp-jlyt", + ".pcl": "application/vnd.hp-pcl", + ".pclxl": "application/vnd.hp-pclxl", + ".sfd-hdstx": "application/vnd.hydrostatix.sof-data", + ".x3d": "application/vnd.hzn-3d-crossword", + ".mpy": "application/vnd.ibm.minipay", + ".afp": "application/vnd.ibm.modcap", + ".list3820": "application/vnd.ibm.modcap", + ".listafp": "application/vnd.ibm.modcap", + ".irm": "application/vnd.ibm.rights-management", + ".sc": "application/vnd.ibm.secure-container", + ".icc": "application/vnd.iccprofile", + ".icm": "application/vnd.iccprofile", + ".igl": "application/vnd.igloader", + ".ivp": "application/vnd.immervision-ivp", + ".ivu": "application/vnd.immervision-ivu", + ".xpw": "application/vnd.intercon.formnet", + ".xpx": "application/vnd.intercon.formnet", + ".qbo": "application/vnd.intu.qbo", + ".qfx": "application/vnd.intu.qfx", + ".rcprofile": "application/vnd.ipunplugged.rcprofile", + ".irp": "application/vnd.irepository.package+xml", + ".xpr": "application/vnd.is-xpr", + ".jam": "application/vnd.jam", + ".rms": "application/vnd.jcp.javame.midlet-rms", + ".jisp": "application/vnd.jisp", + ".joda": "application/vnd.joost.joda-archive", + ".ktr": "application/vnd.kahootz", + ".ktz": "application/vnd.kahootz", + ".karbon": "application/vnd.kde.karbon", + ".chrt": "application/vnd.kde.kchart", + ".kfo": "application/vnd.kde.kformula", + ".flw": "application/vnd.kde.kivio", + ".kon": "application/vnd.kde.kontour", + ".kpr": "application/vnd.kde.kpresenter", + ".kpt": "application/vnd.kde.kpresenter", + ".ksp": "application/vnd.kde.kspread", + ".kwd": "application/vnd.kde.kword", + ".kwt": "application/vnd.kde.kword", + ".htke": "application/vnd.kenameaapp", + ".kia": "application/vnd.kidspiration", + ".kne": "application/vnd.kinar", + ".knp": "application/vnd.kinar", + ".skd": "application/vnd.koan", + ".skm": "application/vnd.koan", + ".skp": "application/vnd.koan", + ".skt": "application/vnd.koan", + ".sse": "application/vnd.kodak-descriptor", + ".lbd": "application/vnd.llamagraphics.life-balance.desktop", + ".lbe": "application/vnd.llamagraphics.life-balance.exchange+xml", + ".123": "application/vnd.lotus-1-2-3", + ".apr": "application/vnd.lotus-approach", + ".pre": "application/vnd.lotus-freelance", + ".nsf": "application/vnd.lotus-notes", + ".org": "application/vnd.lotus-organizer", + ".scm": "application/vnd.lotus-screencam", + ".lwp": "application/vnd.lotus-wordpro", + ".portpkg": "application/vnd.macports.portpkg", + ".mcd": "application/vnd.mcd", + ".mc1": "application/vnd.medcalcdata", + ".cdkey": "application/vnd.mediastation.cdkey", + ".mwf": "application/vnd.mfer", + ".mfm": "application/vnd.mfmp", + ".flo": "application/vnd.micrografx.flo", + ".igx": "application/vnd.micrografx.igx", + ".mif": "application/vnd.mif", + ".daf": "application/vnd.mobius.daf", + ".dis": "application/vnd.mobius.dis", + ".mbk": "application/vnd.mobius.mbk", + ".mqy": "application/vnd.mobius.mqy", + ".msl": "application/vnd.mobius.msl", + ".plc": "application/vnd.mobius.plc", + ".txf": "application/vnd.mobius.txf", + ".mpn": "application/vnd.mophun.application", + ".mpc": "application/vnd.mophun.certificate", + ".xul": "application/vnd.mozilla.xul+xml", + ".cil": "application/vnd.ms-artgalry", + ".cab": "application/vnd.ms-cab-compressed", + ".xla": "application/vnd.ms-excel", + ".xlb": "application/vnd.ms-excel", + ".xlc": "application/vnd.ms-excel", + ".xlm": "application/vnd.ms-excel", + ".xls": "application/vnd.ms-excel", + ".xlt": "application/vnd.ms-excel", + ".xlw": "application/vnd.ms-excel", + ".xlam": "application/vnd.ms-excel.addin.macroenabled.12", + ".xlsb": "application/vnd.ms-excel.sheet.binary.macroenabled.12", + ".xlsm": "application/vnd.ms-excel.sheet.macroenabled.12", + ".xltm": "application/vnd.ms-excel.template.macroenabled.12", + ".eot": "application/vnd.ms-fontobject", + ".chm": "application/vnd.ms-htmlhelp", + ".ims": "application/vnd.ms-ims", + ".lrm": "application/vnd.ms-lrm", + ".cat": "application/vnd.ms-pki.seccat", + ".stl": "application/vnd.ms-pki.stl", + ".pot": "application/vnd.ms-powerpoint", + ".ppa": "application/vnd.ms-powerpoint", + ".pps": "application/vnd.ms-powerpoint", + ".ppt": "application/vnd.ms-powerpoint", + ".pwz": "application/vnd.ms-powerpoint", + ".ppam": "application/vnd.ms-powerpoint.addin.macroenabled.12", + ".pptm": "application/vnd.ms-powerpoint.presentation.macroenabled.12", + ".sldm": "application/vnd.ms-powerpoint.slide.macroenabled.12", + ".ppsm": "application/vnd.ms-powerpoint.slideshow.macroenabled.12", + ".potm": "application/vnd.ms-powerpoint.template.macroenabled.12", + ".mpp": "application/vnd.ms-project", + ".mpt": "application/vnd.ms-project", + ".docm": "application/vnd.ms-word.document.macroenabled.12", + ".dotm": "application/vnd.ms-word.template.macroenabled.12", + ".wcm": "application/vnd.ms-works", + ".wdb": "application/vnd.ms-works", + ".wks": "application/vnd.ms-works", + ".wps": "application/vnd.ms-works", + ".wpl": "application/vnd.ms-wpl", + ".xps": "application/vnd.ms-xpsdocument", + ".mseq": "application/vnd.mseq", + ".mus": "application/vnd.musician", + ".msty": "application/vnd.muvee.style", + ".nlu": "application/vnd.neurolanguage.nlu", + ".nnd": "application/vnd.noblenet-directory", + ".nns": "application/vnd.noblenet-sealer", + ".nnw": "application/vnd.noblenet-web", + ".ngdat": "application/vnd.nokia.n-gage.data", + ".n-gage": "application/vnd.nokia.n-gage.symbian.install", + ".rpst": "application/vnd.nokia.radio-preset", + ".rpss": "application/vnd.nokia.radio-presets", + ".edm": "application/vnd.novadigm.edm", + ".edx": "application/vnd.novadigm.edx", + ".ext": "application/vnd.novadigm.ext", + ".odc": "application/vnd.oasis.opendocument.chart", + ".otc": "application/vnd.oasis.opendocument.chart-template", + ".odb": "application/vnd.oasis.opendocument.database", + ".odf": "application/vnd.oasis.opendocument.formula", + ".odft": "application/vnd.oasis.opendocument.formula-template", + ".odg": "application/vnd.oasis.opendocument.graphics", + ".otg": "application/vnd.oasis.opendocument.graphics-template", + ".odi": "application/vnd.oasis.opendocument.image", + ".oti": "application/vnd.oasis.opendocument.image-template", + ".odp": "application/vnd.oasis.opendocument.presentation", + ".otp": "application/vnd.oasis.opendocument.presentation-template", + ".ods": "application/vnd.oasis.opendocument.spreadsheet", + ".ots": "application/vnd.oasis.opendocument.spreadsheet-template", + ".odt": "application/vnd.oasis.opendocument.text", + ".otm": "application/vnd.oasis.opendocument.text-master", + ".ott": "application/vnd.oasis.opendocument.text-template", + ".oth": "application/vnd.oasis.opendocument.text-web", + ".xo": "application/vnd.olpc-sugar", + ".dd2": "application/vnd.oma.dd2+xml", + ".oxt": "application/vnd.openofficeorg.extension", + ".pptx": "application/vnd.openxmlformats-officedocument.presentationml.presentation", + ".sldx": "application/vnd.openxmlformats-officedocument.presentationml.slide", + ".ppsx": "application/vnd.openxmlformats-officedocument.presentationml.slideshow", + ".potx": "application/vnd.openxmlformats-officedocument.presentationml.template", + ".xlsx": "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet", + ".xltx": "application/vnd.openxmlformats-officedocument.spreadsheetml.template", + ".docx": "application/vnd.openxmlformats-officedocument.wordprocessingml.document", + ".dotx": "application/vnd.openxmlformats-officedocument.wordprocessingml.template", + ".dp": "application/vnd.osgi.dp", + ".oprc": "application/vnd.palm", + ".pdb": "application/vnd.palm", + ".pqa": "application/vnd.palm", + ".paw": "application/vnd.pawaafile", + ".str": "application/vnd.pg.format", + ".ei6": "application/vnd.pg.osasli", + ".efif": "application/vnd.picsel", + ".wg": "application/vnd.pmi.widget", + ".plf": "application/vnd.pocketlearn", + ".pbd": "application/vnd.powerbuilder6", + ".box": "application/vnd.previewsystems.box", + ".mgz": "application/vnd.proteus.magazine", + ".qps": "application/vnd.publishare-delta-tree", + ".ptid": "application/vnd.pvi.ptid1", + ".qwd": "application/vnd.quark.quarkxpress", + ".qwt": "application/vnd.quark.quarkxpress", + ".qxb": "application/vnd.quark.quarkxpress", + ".qxd": "application/vnd.quark.quarkxpress", + ".qxl": "application/vnd.quark.quarkxpress", + ".qxt": "application/vnd.quark.quarkxpress", + ".bed": "application/vnd.realvnc.bed", + ".mxl": "application/vnd.recordare.musicxml", + ".musicxml": "application/vnd.recordare.musicxml+xml", + ".cod": "application/vnd.rim.cod", + ".rm": "application/vnd.rn-realmedia", + ".link66": "application/vnd.route66.link66+xml", + ".st": "application/vnd.sailingtracker.track", + ".see": "application/vnd.seemail", + ".sema": "application/vnd.sema", + ".semd": "application/vnd.semd", + ".semf": "application/vnd.semf", + ".ifm": "application/vnd.shana.informed.formdata", + ".itp": "application/vnd.shana.informed.formtemplate", + ".iif": "application/vnd.shana.informed.interchange", + ".ipk": "application/vnd.shana.informed.package", + ".twd": "application/vnd.simtech-mindmapper", + ".twds": "application/vnd.simtech-mindmapper", + ".mmf": "application/vnd.smaf", + ".teacher": "application/vnd.smart.teacher", + ".sdkd": "application/vnd.solent.sdkm+xml", + ".sdkm": "application/vnd.solent.sdkm+xml", + ".dxp": "application/vnd.spotfire.dxp", + ".sfs": "application/vnd.spotfire.sfs", + ".sdc": "application/vnd.stardivision.calc", + ".sda": "application/vnd.stardivision.draw", + ".sdd": "application/vnd.stardivision.impress", + ".smf": "application/vnd.stardivision.math", + ".sdw": "application/vnd.stardivision.writer", + ".vor": "application/vnd.stardivision.writer", + ".sgl": "application/vnd.stardivision.writer-global", + ".sxc": "application/vnd.sun.xml.calc", + ".stc": "application/vnd.sun.xml.calc.template", + ".sxd": "application/vnd.sun.xml.draw", + ".std": "application/vnd.sun.xml.draw.template", + ".sxi": "application/vnd.sun.xml.impress", + ".sti": "application/vnd.sun.xml.impress.template", + ".sxm": "application/vnd.sun.xml.math", + ".sxw": "application/vnd.sun.xml.writer", + ".sxg": "application/vnd.sun.xml.writer.global", + ".stw": "application/vnd.sun.xml.writer.template", + ".sus": "application/vnd.sus-calendar", + ".susp": "application/vnd.sus-calendar", + ".svd": "application/vnd.svd", + ".sis": "application/vnd.symbian.install", + ".sisx": "application/vnd.symbian.install", + ".xsm": "application/vnd.syncml+xml", + ".bdm": "application/vnd.syncml.dm+wbxml", + ".xdm": "application/vnd.syncml.dm+xml", + ".tao": "application/vnd.tao.intent-module-archive", + ".tmo": "application/vnd.tmobile-livetv", + ".tpt": "application/vnd.trid.tpt", + ".mxs": "application/vnd.triscape.mxs", + ".tra": "application/vnd.trueapp", + ".ufd": "application/vnd.ufdl", + ".ufdl": "application/vnd.ufdl", + ".utz": "application/vnd.uiq.theme", + ".umj": "application/vnd.umajin", + ".unityweb": "application/vnd.unity", + ".uoml": "application/vnd.uoml+xml", + ".vcx": "application/vnd.vcx", + ".vsd": "application/vnd.visio", + ".vss": "application/vnd.visio", + ".vst": "application/vnd.visio", + ".vsw": "application/vnd.visio", + ".vis": "application/vnd.visionary", + ".vsf": "application/vnd.vsf", + ".wbxml": "application/vnd.wap.wbxml", + ".wmlc": "application/vnd.wap.wmlc", + ".wmlsc": "application/vnd.wap.wmlscriptc", + ".wtb": "application/vnd.webturbo", + ".nbp": "application/vnd.wolfram.player", + ".wpd": "application/vnd.wordperfect", + ".wqd": "application/vnd.wqd", + ".stf": "application/vnd.wt.stf", + ".xar": "application/vnd.xara", + ".xfdl": "application/vnd.xfdl", + ".hvd": "application/vnd.yamaha.hv-dic", + ".hvs": "application/vnd.yamaha.hv-script", + ".hvp": "application/vnd.yamaha.hv-voice", + ".osf": "application/vnd.yamaha.openscoreformat", + ".osfpvg": "application/vnd.yamaha.openscoreformat.osfpvg+xml", + ".saf": "application/vnd.yamaha.smaf-audio", + ".spf": "application/vnd.yamaha.smaf-phrase", + ".cmp": "application/vnd.yellowriver-custom-menu", + ".zir": "application/vnd.zul", + ".zirz": "application/vnd.zul", + ".zaz": "application/vnd.zzazz.deck+xml", + ".vxml": "application/voicexml+xml", + ".wasm": "application/wasm", + ".hlp": "application/winhlp", + ".wsdl": "application/wsdl+xml", + ".wspolicy": "application/wspolicy+xml", + ".abw": "application/x-abiword", + ".ace": "application/x-ace-compressed", + ".aab": "application/x-authorware-bin", + ".u32": "application/x-authorware-bin", + ".vox": "application/x-authorware-bin", + ".x32": "application/x-authorware-bin", + ".aam": "application/x-authorware-map", + ".aas": "application/x-authorware-seg", + ".bcpio": "application/x-bcpio", + ".torrent": "application/x-bittorrent", + ".bz": "application/x-bzip", + ".boz": "application/x-bzip2", + ".bz2": "application/x-bzip2", + ".vcd": "application/x-cdlink", + ".chat": "application/x-chat", + ".pgn": "application/x-chess-pgn", + ".cpio": "application/x-cpio", + ".csh": "application/x-csh", + ".deb": "application/x-debian-package", + ".udeb": "application/x-debian-package", + ".cct": "application/x-director", + ".cst": "application/x-director", + ".cxt": "application/x-director", + ".dcr": "application/x-director", + ".dir": "application/x-director", + ".dxr": "application/x-director", + ".fgd": "application/x-director", + ".swa": "application/x-director", + ".w3d": "application/x-director", + ".wad": "application/x-doom", + ".ncx": "application/x-dtbncx+xml", + ".dtb": "application/x-dtbook+xml", + ".res": "application/x-dtbresource+xml", + ".dvi": "application/x-dvi", + ".bdf": "application/x-font-bdf", + ".gsf": "application/x-font-ghostscript", + ".psf": "application/x-font-linux-psf", + ".otf": "application/x-font-otf", + ".pcf": "application/x-font-pcf", + ".snf": "application/x-font-snf", + ".ttc": "application/x-font-ttf", + ".ttf": "application/x-font-ttf", + ".afm": "application/x-font-type1", + ".pfa": "application/x-font-type1", + ".pfb": "application/x-font-type1", + ".pfm": "application/x-font-type1", + ".spl": "application/x-futuresplash", + ".gnumeric": "application/x-gnumeric", + ".gtar": "application/x-gtar", + ".hdf": "application/x-hdf", + ".jnlp": "application/x-java-jnlp-file", + ".latex": "application/x-latex", + ".mobi": "application/x-mobipocket-ebook", + ".prc": "application/x-mobipocket-ebook", + ".wmd": "application/x-ms-wmd", + ".wmz": "application/x-ms-wmz", + ".xbap": "application/x-ms-xbap", + ".mdb": "application/x-msaccess", + ".obd": "application/x-msbinder", + ".crd": "application/x-mscardfile", + ".clp": "application/x-msclip", + ".bat": "application/x-msdownload", + ".com": "application/x-msdownload", + ".dll": "application/x-msdownload", + ".exe": "application/x-msdownload", + ".msi": "application/x-msdownload", + ".m13": "application/x-msmediaview", + ".m14": "application/x-msmediaview", + ".mvb": "application/x-msmediaview", + ".wmf": "application/x-msmetafile", + ".mny": "application/x-msmoney", + ".pub": "application/x-mspublisher", + ".scd": "application/x-msschedule", + ".trm": "application/x-msterminal", + ".wri": "application/x-mswrite", + ".cdf": "application/x-netcdf", + ".nc": "application/x-netcdf", + ".p12": "application/x-pkcs12", + ".pfx": "application/x-pkcs12", + ".p7b": "application/x-pkcs7-certificates", + ".spc": "application/x-pkcs7-certificates", + ".p7r": "application/x-pkcs7-certreqresp", + ".pyc": "application/x-python-code", + ".pyo": "application/x-python-code", + ".rar": "application/x-rar-compressed", + ".sh": "application/x-sh", + ".shar": "application/x-shar", + ".swf": "application/x-shockwave-flash", + ".xap": "application/x-silverlight-app", + ".sit": "application/x-stuffit", + ".sitx": "application/x-stuffitx", + ".sv4cpio": "application/x-sv4cpio", + ".sv4crc": "application/x-sv4crc", + ".tar": "application/x-tar", + ".tcl": "application/x-tcl", + ".tex": "application/x-tex", + ".tfm": "application/x-tex-tfm", + ".texi": "application/x-texinfo", + ".texinfo": "application/x-texinfo", + ".ustar": "application/x-ustar", + ".src": "application/x-wais-source", + ".crt": "application/x-x509-ca-cert", + ".der": "application/x-x509-ca-cert", + ".fig": "application/x-xfig", + ".xpi": "application/x-xpinstall", + ".xenc": "application/xenc+xml", + ".xht": "application/xhtml+xml", + ".xhtml": "application/xhtml+xml", + ".xml": "application/xml", + ".xpdl": "application/xml", + ".xsl": "application/xml", + ".dtd": "application/xml-dtd", + ".xop": "application/xop+xml", + ".xslt": "application/xslt+xml", + ".xspf": "application/xspf+xml", + ".mxml": "application/xv+xml", + ".xhvml": "application/xv+xml", + ".xvm": "application/xv+xml", + ".xvml": "application/xv+xml", + ".zip": "application/zip", + ".adp": "audio/adpcm", + ".au": "audio/basic", + ".snd": "audio/basic", + ".kar": "audio/midi", + ".mid": "audio/midi", + ".midi": "audio/midi", + ".rmi": "audio/midi", + ".mp4a": "audio/mp4", + ".m2a": "audio/mpeg", + ".m3a": "audio/mpeg", + ".mp2": "audio/mpeg", + ".mp2a": "audio/mpeg", + ".mp3": "audio/mpeg", + ".mpga": "audio/mpeg", + ".oga": "audio/ogg", + ".ogg": "audio/ogg", + ".spx": "audio/ogg", + ".eol": "audio/vnd.digital-winds", + ".dra": "audio/vnd.dra", + ".dts": "audio/vnd.dts", + ".dtshd": "audio/vnd.dts.hd", + ".lvp": "audio/vnd.lucent.voice", + ".pya": "audio/vnd.ms-playready.media.pya", + ".ecelp4800": "audio/vnd.nuera.ecelp4800", + ".ecelp7470": "audio/vnd.nuera.ecelp7470", + ".ecelp9600": "audio/vnd.nuera.ecelp9600", + ".aac": "audio/x-aac", + ".aif": "audio/x-aiff", + ".aifc": "audio/x-aiff", + ".aiff": "audio/x-aiff", + ".m3u": "audio/x-mpegurl", + ".wax": "audio/x-ms-wax", + ".wma": "audio/x-ms-wma", + ".ra": "audio/x-pn-realaudio", + ".ram": "audio/x-pn-realaudio", + ".rmp": "audio/x-pn-realaudio-plugin", + ".wav": "audio/x-wav", + ".cdx": "chemical/x-cdx", + ".cif": "chemical/x-cif", + ".cmdf": "chemical/x-cmdf", + ".cml": "chemical/x-cml", + ".csml": "chemical/x-csml", + ".xyz": "chemical/x-xyz", + ".bmp": "image/bmp", + ".cgm": "image/cgm", + ".g3": "image/g3fax", + ".gif": "image/gif", + ".ief": "image/ief", + ".jpe": "image/jpeg", + ".jpeg": "image/jpeg", + ".jpg": "image/jpeg", + ".png": "image/png", + ".btif": "image/prs.btif", + ".svg": "image/svg+xml", + ".svgz": "image/svg+xml", + ".tif": "image/tiff", + ".tiff": "image/tiff", + ".psd": "image/vnd.adobe.photoshop", + ".djv": "image/vnd.djvu", + ".djvu": "image/vnd.djvu", + ".dwg": "image/vnd.dwg", + ".dxf": "image/vnd.dxf", + ".fbs": "image/vnd.fastbidsheet", + ".fpx": "image/vnd.fpx", + ".fst": "image/vnd.fst", + ".mmr": "image/vnd.fujixerox.edmics-mmr", + ".rlc": "image/vnd.fujixerox.edmics-rlc", + ".mdi": "image/vnd.ms-modi", + ".npx": "image/vnd.net-fpx", + ".wbmp": "image/vnd.wap.wbmp", + ".xif": "image/vnd.xiff", + ".ras": "image/x-cmu-raster", + ".cmx": "image/x-cmx", + ".fh": "image/x-freehand", + ".fh4": "image/x-freehand", + ".fh5": "image/x-freehand", + ".fh7": "image/x-freehand", + ".fhc": "image/x-freehand", + ".ico": "image/x-icon", + ".pcx": "image/x-pcx", + ".pct": "image/x-pict", + ".pic": "image/x-pict", + ".pnm": "image/x-portable-anymap", + ".pbm": "image/x-portable-bitmap", + ".pgm": "image/x-portable-graymap", + ".ppm": "image/x-portable-pixmap", + ".rgb": "image/x-rgb", + ".xbm": "image/x-xbitmap", + ".xpm": "image/x-xpixmap", + ".xwd": "image/x-xwindowdump", + ".eml": "message/rfc822", + ".mht": "message/rfc822", + ".mhtml": "message/rfc822", + ".mime": "message/rfc822", + ".nws": "message/rfc822", + ".iges": "model/iges", + ".igs": "model/iges", + ".mesh": "model/mesh", + ".msh": "model/mesh", + ".silo": "model/mesh", + ".dwf": "model/vnd.dwf", + ".gdl": "model/vnd.gdl", + ".gtw": "model/vnd.gtw", + ".mts": "model/vnd.mts", + ".vtu": "model/vnd.vtu", + ".vrml": "model/vrml", + ".wrl": "model/vrml", + ".ics": "text/calendar", + ".ifb": "text/calendar", + ".css": "text/css", + ".csv": "text/csv", + ".htm": "text/html", + ".html": "text/html", + ".conf": "text/plain", + ".def": "text/plain", + ".in": "text/plain", + ".ksh": "text/plain", + ".list": "text/plain", + ".log": "text/plain", + ".pl": "text/plain", + ".text": "text/plain", + ".txt": "text/plain", + ".dsc": "text/prs.lines.tag", + ".rtx": "text/richtext", + ".sgm": "text/sgml", + ".sgml": "text/sgml", + ".tsv": "text/tab-separated-values", + ".man": "text/troff", + ".me": "text/troff", + ".ms": "text/troff", + ".roff": "text/troff", + ".t": "text/troff", + ".tr": "text/troff", + ".uri": "text/uri-list", + ".uris": "text/uri-list", + ".urls": "text/uri-list", + ".curl": "text/vnd.curl", + ".dcurl": "text/vnd.curl.dcurl", + ".mcurl": "text/vnd.curl.mcurl", + ".scurl": "text/vnd.curl.scurl", + ".fly": "text/vnd.fly", + ".flx": "text/vnd.fmi.flexstor", + ".gv": "text/vnd.graphviz", + ".3dml": "text/vnd.in3d.3dml", + ".spot": "text/vnd.in3d.spot", + ".jad": "text/vnd.sun.j2me.app-descriptor", + ".wml": "text/vnd.wap.wml", + ".wmls": "text/vnd.wap.wmlscript", + ".asm": "text/x-asm", + ".s": "text/x-asm", + ".c": "text/x-c", + ".cc": "text/x-c", + ".cpp": "text/x-c", + ".cxx": "text/x-c", + ".dic": "text/x-c", + ".h": "text/x-c", + ".hh": "text/x-c", + ".hpp": "text/x-c", + ".f": "text/x-fortran", + ".f77": "text/x-fortran", + ".f90": "text/x-fortran", + ".for": "text/x-fortran", + ".java": "text/x-java-source", + ".p": "text/x-pascal", + ".pas": "text/x-pascal", + ".py": "text/x-python", + ".etx": "text/x-setext", + ".uu": "text/x-uuencode", + ".vcs": "text/x-vcalendar", + ".vcf": "text/x-vcard", + ".3gp": "video/3gpp", + ".3g2": "video/3gpp2", + ".h261": "video/h261", + ".h263": "video/h263", + ".h264": "video/h264", + ".jpgv": "video/jpeg", + ".jpgm": "video/jpm", + ".jpm": "video/jpm", + ".mj2": "video/mj2", + ".mjp2": "video/mj2", + ".mp4": "video/mp4", + ".mp4v": "video/mp4", + ".mpg4": "video/mp4", + ".m1v": "video/mpeg", + ".m2v": "video/mpeg", + ".mpa": "video/mpeg", + ".mpe": "video/mpeg", + ".mpeg": "video/mpeg", + ".mpg": "video/mpeg", + ".ogv": "video/ogg", + ".mov": "video/quicktime", + ".qt": "video/quicktime", + ".fvt": "video/vnd.fvt", + ".m4u": "video/vnd.mpegurl", + ".mxu": "video/vnd.mpegurl", + ".pyv": "video/vnd.ms-playready.media.pyv", + ".viv": "video/vnd.vivo", + ".webm": "video/webm", + ".f4v": "video/x-f4v", + ".fli": "video/x-fli", + ".flv": "video/x-flv", + ".m4v": "video/x-m4v", + ".asf": "video/x-ms-asf", + ".asx": "video/x-ms-asf", + ".wm": "video/x-ms-wm", + ".wmv": "video/x-ms-wmv", + ".wmx": "video/x-ms-wmx", + ".wvx": "video/x-ms-wvx", + ".avi": "video/x-msvideo", + ".movie": "video/x-sgi-movie", } - diff --git a/src/typecode/pygments_lexers.py b/src/typecode/pygments_lexers.py index d225d62..0392ec3 100644 --- a/src/typecode/pygments_lexers.py +++ b/src/typecode/pygments_lexers.py @@ -4,13 +4,13 @@ # Copyright (c) Pygments authors """ - pygments.lexers - ~~~~~~~~~~~~~~~ +pygments.lexers +~~~~~~~~~~~~~~~ - Pygments lexers. +Pygments lexers. - :copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. +:copyright: Copyright 2006-2021 by the Pygments team, see AUTHORS. +:license: BSD, see LICENSE for details. """ import re @@ -25,12 +25,21 @@ from typecode._vendor.pygments.util import ClassNotFound, guess_decode COMPAT = { - 'Python3Lexer': 'PythonLexer', - 'Python3TracebackLexer': 'PythonTracebackLexer', + "Python3Lexer": "PythonLexer", + "Python3TracebackLexer": "PythonTracebackLexer", } -__all__ = ['get_lexer_by_name', 'get_lexer_for_filename', 'find_lexer_class', - 'guess_lexer', 'load_lexer_from_file'] + list(LEXERS) + list(COMPAT) +__all__ = ( + [ + "get_lexer_by_name", + "get_lexer_for_filename", + "find_lexer_class", + "guess_lexer", + "load_lexer_from_file", + ] + + list(LEXERS) + + list(COMPAT) +) _lexer_cache = {} _pattern_cache = {} @@ -46,14 +55,15 @@ def _fn_matches(fn, glob): def _load_lexers(module_name): """Load a lexer (and all others in the module too).""" - mod = __import__(module_name, None, None, ['__all__']) + mod = __import__(module_name, None, None, ["__all__"]) for lexer_name in mod.__all__: cls = getattr(mod, lexer_name) _lexer_cache[cls.name] = cls def get_all_lexers(): - """Return a generator of tuples in the form ``(name, aliases, + """ + Return a generator of tuples in the form ``(name, aliases, filenames, mimetypes)`` of all know lexers. """ for item in LEXERS.values(): @@ -63,7 +73,8 @@ def get_all_lexers(): def find_lexer_class(name): - """Lookup a lexer class by name. + """ + Lookup a lexer class by name. Return None if not found. """ @@ -81,14 +92,15 @@ def find_lexer_class(name): def find_lexer_class_by_name(_alias): - """Lookup a lexer class by alias. + """ + Lookup a lexer class by alias. Like `get_lexer_by_name`, but does not instantiate the class. .. versionadded:: 2.2 """ if not _alias: - raise ClassNotFound('no lexer for alias %r found' % _alias) + raise ClassNotFound("no lexer for alias %r found" % _alias) # lookup builtin lexers for module_name, name, aliases, _, _ in LEXERS.values(): if _alias.lower() in aliases: @@ -99,16 +111,17 @@ def find_lexer_class_by_name(_alias): for cls in find_plugin_lexers(): if _alias.lower() in cls.aliases: return cls - raise ClassNotFound('no lexer for alias %r found' % _alias) + raise ClassNotFound("no lexer for alias %r found" % _alias) def get_lexer_by_name(_alias, **options): - """Get a lexer by an alias. + """ + Get a lexer by an alias. Raises ClassNotFound if not found. """ if not _alias: - raise ClassNotFound('no lexer for alias %r found' % _alias) + raise ClassNotFound("no lexer for alias %r found" % _alias) # lookup builtin lexers for module_name, name, aliases, _, _ in LEXERS.values(): @@ -120,11 +133,12 @@ def get_lexer_by_name(_alias, **options): for cls in find_plugin_lexers(): if _alias.lower() in cls.aliases: return cls(**options) - raise ClassNotFound('no lexer for alias %r found' % _alias) + raise ClassNotFound("no lexer for alias %r found" % _alias) def load_lexer_from_file(filename, lexername="CustomLexer", **options): - """Load a lexer from a file. + """ + Load a lexer from a file. This method expects a file located relative to the current working directory, which contains a Lexer class. By default, it expects the @@ -141,25 +155,25 @@ def load_lexer_from_file(filename, lexername="CustomLexer", **options): try: # This empty dict will contain the namespace for the exec'd file custom_namespace = {} - with open(filename, 'rb') as f: + with open(filename, "rb") as f: exec(f.read(), custom_namespace) # Retrieve the class `lexername` from that namespace if lexername not in custom_namespace: - raise ClassNotFound('no valid %s class found in %s' % - (lexername, filename)) + raise ClassNotFound("no valid %s class found in %s" % (lexername, filename)) lexer_class = custom_namespace[lexername] # And finally instantiate it with the options return lexer_class(**options) except IOError as err: - raise ClassNotFound('cannot read %s: %s' % (filename, err)) + raise ClassNotFound("cannot read %s: %s" % (filename, err)) except ClassNotFound: raise except Exception as err: - raise ClassNotFound('error when loading custom lexer: %s' % err) + raise ClassNotFound("error when loading custom lexer: %s" % err) def find_lexer_class_for_filename(_fn, code=None): - """Get a lexer for a filename. + """ + Get a lexer for a filename. If multiple lexers match the filename pattern, use ``analyse_text()`` to figure out which one is more appropriate. @@ -186,7 +200,7 @@ def find_lexer_class_for_filename(_fn, code=None): def get_rating(info): cls, filename = info # explicit patterns get a bonus - bonus = '*' not in filename and 0.5 or 0 + bonus = "*" not in filename and 0.5 or 0 # The class _always_ defines analyse_text because it's included in # the Lexer class. The default implementation returns None which # gets turned into 0.0. Run scripts/detect_missing_analyse_text.py @@ -202,7 +216,8 @@ def get_rating(info): def get_lexer_for_filename(_fn, code=None, **options): - """Get a lexer for a filename. + """ + Get a lexer for a filename. If multiple lexers match the filename pattern, use ``analyse_text()`` to figure out which one is more appropriate. @@ -211,12 +226,13 @@ def get_lexer_for_filename(_fn, code=None, **options): """ res = find_lexer_class_for_filename(_fn, code) if not res: - raise ClassNotFound('no lexer for filename %r found' % _fn) + raise ClassNotFound("no lexer for filename %r found" % _fn) return res(**options) def get_lexer_for_mimetype(_mime, **options): - """Get a lexer for a mimetype. + """ + Get a lexer for a mimetype. Raises ClassNotFound if not found. """ @@ -228,7 +244,7 @@ def get_lexer_for_mimetype(_mime, **options): for cls in find_plugin_lexers(): if _mime in cls.mimetypes: return cls(**options) - raise ClassNotFound('no lexer for mimetype %r found' % _mime) + raise ClassNotFound("no lexer for mimetype %r found" % _mime) def _iter_lexerclasses(plugins=True): @@ -270,7 +286,7 @@ def guess_lexer_for_filename(_fn, _text, **options): matching_lexers.add(lexer) primary[lexer] = False if not matching_lexers: - raise ClassNotFound('no lexer for filename %r found' % fn) + raise ClassNotFound("no lexer for filename %r found" % fn) if len(matching_lexers) == 1: return matching_lexers.pop()(**options) result = [] @@ -297,9 +313,9 @@ def guess_lexer(_text, **options): """Guess a lexer by strong distinctions in the text (eg, shebang).""" if not isinstance(_text, str): - inencoding = options.get('inencoding', options.get('encoding')) + inencoding = options.get("inencoding", options.get("encoding")) if inencoding: - _text = _text.decode(inencoding or 'utf8') + _text = _text.decode(inencoding or "utf8") else: _text, _ = guess_decode(_text) @@ -320,7 +336,7 @@ def guess_lexer(_text, **options): if rv > best_lexer[0]: best_lexer[:] = (rv, lexer) if not best_lexer[0] or best_lexer[1] is None: - raise ClassNotFound('no lexer matching the text found') + raise ClassNotFound("no lexer matching the text found") return best_lexer[1](**options) diff --git a/src/typecode/pygments_lexers_mapping.py b/src/typecode/pygments_lexers_mapping.py index 1875ccd..624909a 100644 --- a/src/typecode/pygments_lexers_mapping.py +++ b/src/typecode/pygments_lexers_mapping.py @@ -4,27 +4,33 @@ # Copyright (c) Pygments authors """ - This file is a list of the subset of lexers we care for as actual programming - languages from the larger whole generated list at pygments.lexers._mapping +LEXERS is a list of the subset of lexers we care for as actual programming +languages from the larger whole generated list at pygments.lexers._mapping - This is based on Pygments 2.9.0 pygments.lexers._mapping and the modification is - to comment out certain/most lexers. +This is based on Pygments 2.9.0 pygments.lexers._mapping and the modification is +to comment out certain/most lexers. - pygments.lexers._mapping - ~~~~~~~~~~~~~~~~~~~~~~~~ +pygments.lexers._mapping +~~~~~~~~~~~~~~~~~~~~~~~~ - Lexer mapping definitions. This file is generated by itself. Everytime - you change something on a builtin lexer definition, run this script from - the lexers folder to update it. +Lexer mapping definitions. This file is generated by itself. Everytime +you change something on a builtin lexer definition, run this script from +the lexers folder to update it. - Do not alter the LEXERS dictionary by hand. +Do not alter the LEXERS dictionary by hand. - :copyright: Copyright 2006-2014, 2016 by the Pygments team, see AUTHORS. - :license: BSD, see LICENSE for details. +:copyright: Copyright 2006-2014, 2016 by the Pygments team, see AUTHORS. +:license: BSD, see LICENSE for details. """ LEXERS = { - 'ABAPLexer': ('typecode._vendor.pygments.lexers.business', 'ABAP', ('abap',), ('*.abap', '*.ABAP'), ('text/x-abap',)), + "ABAPLexer": ( + "typecode._vendor.pygments.lexers.business", + "ABAP", + ("abap",), + ("*.abap", "*.ABAP"), + ("text/x-abap",), + ), # 'APLLexer': ('typecode._vendor.pygments.lexers.apl', 'APL', ('apl',), ('*.apl',), ()), # 'AbnfLexer': ('typecode._vendor.pygments.lexers.grammar_notation', 'ABNF', ('abnf',), ('*.abnf',), ('text/x-abnf',)), # 'ActionScript3Lexer': ('typecode._vendor.pygments.lexers.actionscript', 'ActionScript 3', ('as3', 'actionscript3'), ('*.as',), ('application/x-actionscript3', 'text/x-actionscript3', 'text/actionscript3')), @@ -42,7 +48,7 @@ # 'AntlrCSharpLexer': ('typecode._vendor.pygments.lexers.parsers', 'ANTLR With C# Target', ('antlr-csharp', 'antlr-c#'), ('*.G', '*.g'), ()), # 'AntlrCppLexer': ('typecode._vendor.pygments.lexers.parsers', 'ANTLR With CPP Target', ('antlr-cpp',), ('*.G', '*.g'), ()), # 'AntlrJavaLexer': ('typecode._vendor.pygments.lexers.parsers', 'ANTLR With Java Target', ('antlr-java',), ('*.G', '*.g'), ()), - 'AntlrLexer': ('typecode._vendor.pygments.lexers.parsers', 'ANTLR', ('antlr',), (), ()), + "AntlrLexer": ("typecode._vendor.pygments.lexers.parsers", "ANTLR", ("antlr",), (), ()), # 'AntlrObjectiveCLexer': ('typecode._vendor.pygments.lexers.parsers', 'ANTLR With ObjectiveC Target', ('antlr-objc',), ('*.G', '*.g'), ()), # 'AntlrPerlLexer': ('typecode._vendor.pygments.lexers.parsers', 'ANTLR With Perl Target', ('antlr-perl',), ('*.G', '*.g'), ()), # 'AntlrPythonLexer': ('typecode._vendor.pygments.lexers.parsers', 'ANTLR With Python Target', ('antlr-python',), ('*.G', '*.g'), ()), @@ -51,8 +57,13 @@ # 'AppleScriptLexer': ('typecode._vendor.pygments.lexers.scripting', 'AppleScript', ('applescript',), ('*.applescript',), ()), # 'ArduinoLexer': ('typecode._vendor.pygments.lexers.c_like', 'Arduino', ('arduino',), ('*.ino',), ('text/x-arduino',)), # 'ArrowLexer': ('typecode._vendor.pygments.arrow', 'Arrow', ('arrow',), ('*.arw',), ()), - - 'AspectJLexer': ('typecode._vendor.pygments.lexers.jvm', 'AspectJ', ('aspectj',), ('*.aj',), ('text/x-aspectj',)), + "AspectJLexer": ( + "typecode._vendor.pygments.lexers.jvm", + "AspectJ", + ("aspectj",), + ("*.aj",), + ("text/x-aspectj",), + ), # 'AsymptoteLexer': ('typecode._vendor.pygments.lexers.graphics', 'Asymptote', ('asy', 'asymptote'), ('*.asy',), ('text/x-asymptote',)), # 'AugeasLexer': ('typecode._vendor.pygments.lexers.configs', 'Augeas', ('augeas',), ('*.aug',), ()), # 'AutoItLexer': ('typecode._vendor.pygments.lexers.automation', 'AutoIt', ('autoit',), ('*.au3',), ('text/x-autoit',)), @@ -64,9 +75,37 @@ # 'BSTLexer': ('typecode._vendor.pygments.lexers.bibtex', 'BST', ('bst', 'bst-pybtex'), ('*.bst',), ()), # 'BareLexer': ('typecode._vendor.pygments.lexers.bare', 'BARE', ('bare',), ('*.bare',), ()), # 'BaseMakefileLexer': ('typecode._vendor.pygments.lexers.make', 'Base Makefile', ('basemake',), (), ()), - 'BashLexer': ('typecode._vendor.pygments.lexers.shell', 'Bash', ('bash', 'sh', 'ksh', 'zsh', 'shell'), ('*.sh', '*.ksh', '*.bash', '*.ebuild', '*.eclass', '*.exheres-0', '*.exlib', '*.zsh', '.bashrc', 'bashrc', '.bash_*', 'bash_*', 'zshrc', '.zshrc', 'PKGBUILD'), ('application/x-sh', 'application/x-shellscript', 'text/x-shellscript')), + "BashLexer": ( + "typecode._vendor.pygments.lexers.shell", + "Bash", + ("bash", "sh", "ksh", "zsh", "shell"), + ( + "*.sh", + "*.ksh", + "*.bash", + "*.ebuild", + "*.eclass", + "*.exheres-0", + "*.exlib", + "*.zsh", + ".bashrc", + "bashrc", + ".bash_*", + "bash_*", + "zshrc", + ".zshrc", + "PKGBUILD", + ), + ("application/x-sh", "application/x-shellscript", "text/x-shellscript"), + ), # 'BashSessionLexer': ('typecode._vendor.pygments.lexers.shell', 'Bash Session', ('console', 'shell-session'), ('*.sh-session', '*.shell-session'), ('application/x-shell-session', 'application/x-sh-session')), - 'BatchLexer': ('typecode._vendor.pygments.lexers.shell', 'Batchfile', ('bat', 'batch', 'dosbatch', 'winbatch'), ('*.bat', '*.cmd'), ('application/x-dos-batch',)), + "BatchLexer": ( + "typecode._vendor.pygments.lexers.shell", + "Batchfile", + ("bat", "batch", "dosbatch", "winbatch"), + ("*.bat", "*.cmd"), + ("application/x-dos-batch",), + ), # 'BefungeLexer': ('typecode._vendor.pygments.lexers.esoteric', 'Befunge', ('befunge',), ('*.befunge',), ('application/x-befunge',)), # 'BibTeXLexer': ('typecode._vendor.pygments.lexers.bibtex', 'BibTeX', ('bib', 'bibtex'), ('*.bib',), ('text/x-bibtex',)), # 'BlitzBasicLexer': ('typecode._vendor.pygments.lexers.basic', 'BlitzBasic', ('blitzbasic', 'b3d', 'bplus'), ('*.bb', '*.decls'), ('text/x-bb',)), @@ -78,12 +117,36 @@ # 'BrainfuckLexer': ('typecode._vendor.pygments.lexers.esoteric', 'Brainfuck', ('brainfuck', 'bf'), ('*.bf', '*.b'), ('application/x-brainfuck',)), # 'BugsLexer': ('typecode._vendor.pygments.lexers.modeling', 'BUGS', ('bugs', 'winbugs', 'openbugs'), ('*.bug',), ()), # 'CAmkESLexer': ('typecode._vendor.pygments.lexers.esoteric', 'CAmkES', ('camkes', 'idl4'), ('*.camkes', '*.idl4'), ()), - 'CLexer': ('typecode._vendor.pygments.lexers.c_cpp', 'C', ('c',), ('*.c', '*.h', '*.idc'), ('text/x-chdr', 'text/x-csrc')), - 'CMakeLexer': ('typecode._vendor.pygments.lexers.make', 'CMake', ('cmake',), ('*.cmake', 'CMakeLists.txt'), ('text/x-cmake',)), + "CLexer": ( + "typecode._vendor.pygments.lexers.c_cpp", + "C", + ("c",), + ("*.c", "*.h", "*.idc"), + ("text/x-chdr", "text/x-csrc"), + ), + "CMakeLexer": ( + "typecode._vendor.pygments.lexers.make", + "CMake", + ("cmake",), + ("*.cmake", "CMakeLists.txt"), + ("text/x-cmake",), + ), # 'CObjdumpLexer': ('typecode._vendor.pygments.lexers.asm', 'c-objdump', ('c-objdump',), ('*.c-objdump',), ('text/x-c-objdump',)), # 'CPSALexer': ('typecode._vendor.pygments.lexers.lisp', 'CPSA', ('cpsa',), ('*.cpsa',), ()), - 'CSharpAspxLexer': ('typecode._vendor.pygments.lexers.dotnet', 'aspx-cs', ('aspx-cs',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()), - 'CSharpLexer': ('typecode._vendor.pygments.lexers.dotnet', 'C#', ('csharp', 'c#'), ('*.cs',), ('text/x-csharp',)), + "CSharpAspxLexer": ( + "typecode._vendor.pygments.lexers.dotnet", + "aspx-cs", + ("aspx-cs",), + ("*.aspx", "*.asax", "*.ascx", "*.ashx", "*.asmx", "*.axd"), + (), + ), + "CSharpLexer": ( + "typecode._vendor.pygments.lexers.dotnet", + "C#", + ("csharp", "c#"), + ("*.cs",), + ("text/x-csharp",), + ), # 'Ca65Lexer': ('typecode._vendor.pygments.lexers.asm', 'ca65 assembler', ('ca65',), ('*.s',), ()), # 'CadlLexer': ('typecode._vendor.pygments.lexers.archetype', 'cADL', ('cadl',), ('*.cadl',), ()), # 'CapDLLexer': ('typecode._vendor.pygments.lexers.esoteric', 'CapDL', ('capdl',), ('*.cdl',), ()), @@ -101,18 +164,67 @@ # 'CirruLexer': ('typecode._vendor.pygments.lexers.webmisc', 'Cirru', ('cirru',), ('*.cirru',), ('text/x-cirru',)), # 'ClayLexer': ('typecode._vendor.pygments.lexers.c_like', 'Clay', ('clay',), ('*.clay',), ('text/x-clay',)), # 'CleanLexer': ('typecode._vendor.pygments.lexers.clean', 'Clean', ('clean',), ('*.icl', '*.dcl'), ()), - 'ClojureLexer': ('typecode._vendor.pygments.lexers.jvm', 'Clojure', ('clojure', 'clj'), ('*.clj',), ('text/x-clojure', 'application/x-clojure')), - 'ClojureScriptLexer': ('typecode._vendor.pygments.lexers.jvm', 'ClojureScript', ('clojurescript', 'cljs'), ('*.cljs',), ('text/x-clojurescript', 'application/x-clojurescript')), + "ClojureLexer": ( + "typecode._vendor.pygments.lexers.jvm", + "Clojure", + ("clojure", "clj"), + ("*.clj",), + ("text/x-clojure", "application/x-clojure"), + ), + "ClojureScriptLexer": ( + "typecode._vendor.pygments.lexers.jvm", + "ClojureScript", + ("clojurescript", "cljs"), + ("*.cljs",), + ("text/x-clojurescript", "application/x-clojurescript"), + ), # 'CobolFreeformatLexer': ('typecode._vendor.pygments.lexers.business', 'COBOLFree', ('cobolfree',), ('*.cbl', '*.CBL'), ()), - 'CobolLexer': ('typecode._vendor.pygments.lexers.business', 'COBOL', ('cobol',), ('*.cob', '*.COB', '*.cpy', '*.CPY'), ('text/x-cobol',)), - 'CoffeeScriptLexer': ('typecode._vendor.pygments.lexers.javascript', 'CoffeeScript', ('coffee-script', 'coffeescript', 'coffee'), ('*.coffee',), ('text/coffeescript',)), + "CobolLexer": ( + "typecode._vendor.pygments.lexers.business", + "COBOL", + ("cobol",), + ("*.cob", "*.COB", "*.cpy", "*.CPY"), + ("text/x-cobol",), + ), + "CoffeeScriptLexer": ( + "typecode._vendor.pygments.lexers.javascript", + "CoffeeScript", + ("coffee-script", "coffeescript", "coffee"), + ("*.coffee",), + ("text/coffeescript",), + ), # 'ColdfusionCFCLexer': ('typecode._vendor.pygments.lexers.templates', 'Coldfusion CFC', ('cfc',), ('*.cfc',), ()), # 'ColdfusionHtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'Coldfusion HTML', ('cfm',), ('*.cfm', '*.cfml'), ('application/x-coldfusion',)), # 'ColdfusionLexer': ('typecode._vendor.pygments.lexers.templates', 'cfstatement', ('cfs',), (), ()), - 'CommonLispLexer': ('typecode._vendor.pygments.lexers.lisp', 'Common Lisp', ('common-lisp', 'cl', 'lisp'), ('*.cl', '*.lisp'), ('text/x-common-lisp',)), + "CommonLispLexer": ( + "typecode._vendor.pygments.lexers.lisp", + "Common Lisp", + ("common-lisp", "cl", "lisp"), + ("*.cl", "*.lisp"), + ("text/x-common-lisp",), + ), # 'ComponentPascalLexer': ('typecode._vendor.pygments.lexers.oberon', 'Component Pascal', ('componentpascal', 'cp'), ('*.cp', '*.cps'), ('text/x-component-pascal',)), # 'CoqLexer': ('typecode._vendor.pygments.lexers.theorem', 'Coq', ('coq',), ('*.v',), ('text/x-coq',)), - 'CppLexer': ('typecode._vendor.pygments.lexers.c_cpp', 'C++', ('cpp', 'c++'), ('*.cpp', '*.hpp', '*.c++', '*.h++', '*.cc', '*.hh', '*.cxx', '*.hxx', '*.C', '*.H', '*.cp', '*.CPP'), ('text/x-c++hdr', 'text/x-c++src')), + "CppLexer": ( + "typecode._vendor.pygments.lexers.c_cpp", + "C++", + ("cpp", "c++"), + ( + "*.cpp", + "*.hpp", + "*.c++", + "*.h++", + "*.cc", + "*.hh", + "*.cxx", + "*.hxx", + "*.C", + "*.H", + "*.cp", + "*.CPP", + ), + ("text/x-c++hdr", "text/x-c++src"), + ), # 'CppObjdumpLexer': ('typecode._vendor.pygments.lexers.asm', 'cpp-objdump', ('cpp-objdump', 'c++-objdumb', 'cxx-objdump'), ('*.cpp-objdump', '*.c++-objdump', '*.cxx-objdump'), ('text/x-cpp-objdump',)), # 'CrmshLexer': ('typecode._vendor.pygments.lexers.dsls', 'Crmsh', ('crmsh', 'pcmk'), ('*.crmsh', '*.pcmk'), ()), # 'CrocLexer': ('typecode._vendor.pygments.lexers.d', 'Croc', ('croc',), ('*.croc',), ('text/x-crocsrc',)), @@ -124,19 +236,49 @@ # 'CssDjangoLexer': ('typecode._vendor.pygments.lexers.templates', 'CSS+Django/Jinja', ('css+django', 'css+jinja'), (), ('text/css+django', 'text/css+jinja')), # 'CssErbLexer': ('typecode._vendor.pygments.lexers.templates', 'CSS+Ruby', ('css+erb', 'css+ruby'), (), ('text/css+ruby',)), # 'CssGenshiLexer': ('typecode._vendor.pygments.lexers.templates', 'CSS+Genshi Text', ('css+genshitext', 'css+genshi'), (), ('text/css+genshi',)), - 'CssLexer': ('typecode._vendor.pygments.lexers.css', 'CSS', ('css',), ('*.css',), ('text/css',)), + "CssLexer": ( + "typecode._vendor.pygments.lexers.css", + "CSS", + ("css",), + ("*.css",), + ("text/css",), + ), # 'CssPhpLexer': ('typecode._vendor.pygments.lexers.templates', 'CSS+PHP', ('css+php',), (), ('text/css+php',)), # 'CssSmartyLexer': ('typecode._vendor.pygments.lexers.templates', 'CSS+Smarty', ('css+smarty',), (), ('text/css+smarty',)), # 'CudaLexer': ('typecode._vendor.pygments.lexers.c_like', 'CUDA', ('cuda', 'cu'), ('*.cu', '*.cuh'), ('text/x-cuda',)), # 'CypherLexer': ('typecode._vendor.pygments.lexers.graph', 'Cypher', ('cypher',), ('*.cyp', '*.cypher'), ()), - 'CythonLexer': ('typecode._vendor.pygments.lexers.python', 'Cython', ('cython', 'pyx', 'pyrex'), ('*.pyx', '*.pxd', '*.pxi'), ('text/x-cython', 'application/x-cython')), + "CythonLexer": ( + "typecode._vendor.pygments.lexers.python", + "Cython", + ("cython", "pyx", "pyrex"), + ("*.pyx", "*.pxd", "*.pxi"), + ("text/x-cython", "application/x-cython"), + ), # 'DLexer': ('typecode._vendor.pygments.lexers.d', 'D', ('d',), ('*.d', '*.di'), ('text/x-dsrc',)), # 'DObjdumpLexer': ('typecode._vendor.pygments.lexers.asm', 'd-objdump', ('d-objdump',), ('*.d-objdump',), ('text/x-d-objdump',)), # 'DarcsPatchLexer': ('typecode._vendor.pygments.lexers.diff', 'Darcs Patch', ('dpatch',), ('*.dpatch', '*.darcspatch'), ()), - 'DartLexer': ('typecode._vendor.pygments.lexers.javascript', 'Dart', ('dart',), ('*.dart',), ('text/x-dart',)), - 'Dasm16Lexer': ('typecode._vendor.pygments.lexers.asm', 'DASM16', ('dasm16',), ('*.dasm16', '*.dasm'), ('text/x-dasm16',)), + "DartLexer": ( + "typecode._vendor.pygments.lexers.javascript", + "Dart", + ("dart",), + ("*.dart",), + ("text/x-dart",), + ), + "Dasm16Lexer": ( + "typecode._vendor.pygments.lexers.asm", + "DASM16", + ("dasm16",), + ("*.dasm16", "*.dasm"), + ("text/x-dasm16",), + ), # 'DebianControlLexer': ('typecode._vendor.pygments.lexers.installers', 'Debian Control file', ('control', 'debcontrol'), ('control',), ()), - 'DelphiLexer': ('typecode._vendor.pygments.lexers.pascal', 'Delphi', ('delphi', 'pas', 'pascal', 'objectpascal'), ('*.pas', '*.dpr'), ('text/x-pascal',)), + "DelphiLexer": ( + "typecode._vendor.pygments.lexers.pascal", + "Delphi", + ("delphi", "pas", "pascal", "objectpascal"), + ("*.pas", "*.dpr"), + ("text/x-pascal",), + ), # 'DevicetreeLexer': ('typecode._vendor.pygments.lexers.devicetree', 'Devicetree', ('devicetree', 'dts'), ('*.dts', '*.dtsi'), ('text/x-c',)), # 'DgLexer': ('typecode._vendor.pygments.lexers.python', 'dg', ('dg',), ('*.dg',), ('text/x-dg',)), # 'DiffLexer': ('typecode._vendor.pygments.lexers.diff', 'Diff', ('diff', 'udiff'), ('*.diff', '*.patch'), ('text/x-diff', 'text/x-patch')), @@ -154,19 +296,49 @@ # 'EbnfLexer': ('typecode._vendor.pygments.lexers.parsers', 'EBNF', ('ebnf',), ('*.ebnf',), ('text/x-ebnf',)), # 'EiffelLexer': ('typecode._vendor.pygments.lexers.eiffel', 'Eiffel', ('eiffel',), ('*.e',), ('text/x-eiffel',)), # 'ElixirConsoleLexer': ('typecode._vendor.pygments.lexers.erlang', 'Elixir iex session', ('iex',), (), ('text/x-elixir-shellsession',)), - 'ElixirLexer': ('typecode._vendor.pygments.lexers.erlang', 'Elixir', ('elixir', 'ex', 'exs'), ('*.ex', '*.eex', '*.exs'), ('text/x-elixir',)), - 'ElmLexer': ('typecode._vendor.pygments.lexers.elm', 'Elm', ('elm',), ('*.elm',), ('text/x-elm',)), + "ElixirLexer": ( + "typecode._vendor.pygments.lexers.erlang", + "Elixir", + ("elixir", "ex", "exs"), + ("*.ex", "*.eex", "*.exs"), + ("text/x-elixir",), + ), + "ElmLexer": ( + "typecode._vendor.pygments.lexers.elm", + "Elm", + ("elm",), + ("*.elm",), + ("text/x-elm",), + ), # 'EmacsLispLexer': ('typecode._vendor.pygments.lexers.lisp', 'EmacsLisp', ('emacs', 'elisp', 'emacs-lisp'), ('*.el',), ('text/x-elisp', 'application/x-elisp')), # 'EmailLexer': ('typecode._vendor.pygments.lexers.email', 'E-mail', ('email', 'eml'), ('*.eml',), ('message/rfc822',)), - 'ErbLexer': ('typecode._vendor.pygments.lexers.templates', 'ERB', ('erb',), (), ('application/x-ruby-templating',)), - 'ErlangLexer': ('typecode._vendor.pygments.lexers.erlang', 'Erlang', ('erlang',), ('*.erl', '*.hrl', '*.es', '*.escript'), ('text/x-erlang',)), + "ErbLexer": ( + "typecode._vendor.pygments.lexers.templates", + "ERB", + ("erb",), + (), + ("application/x-ruby-templating",), + ), + "ErlangLexer": ( + "typecode._vendor.pygments.lexers.erlang", + "Erlang", + ("erlang",), + ("*.erl", "*.hrl", "*.es", "*.escript"), + ("text/x-erlang",), + ), # 'ErlangShellLexer': ('typecode._vendor.pygments.lexers.erlang', 'Erlang erl session', ('erl',), ('*.erl-sh',), ('text/x-erl-shellsession',)), # 'EvoqueHtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Evoque', ('html+evoque',), ('*.html',), ('text/html+evoque',)), # 'EvoqueLexer': ('typecode._vendor.pygments.lexers.templates', 'Evoque', ('evoque',), ('*.evoque',), ('application/x-evoque',)), # 'EvoqueXmlLexer': ('typecode._vendor.pygments.lexers.templates', 'XML+Evoque', ('xml+evoque',), ('*.xml',), ('application/xml+evoque',)), # 'ExeclineLexer': ('typecode._vendor.pygments.lexers.shell', 'execline', ('execline',), ('*.exec',), ()), # 'EzhilLexer': ('typecode._vendor.pygments.lexers.ezhil', 'Ezhil', ('ezhil',), ('*.n',), ('text/x-ezhil',)), - 'FSharpLexer': ('typecode._vendor.pygments.lexers.dotnet', 'F#', ('fsharp', 'f#'), ('*.fs', '*.fsi'), ('text/x-fsharp',)), + "FSharpLexer": ( + "typecode._vendor.pygments.lexers.dotnet", + "F#", + ("fsharp", "f#"), + ("*.fs", "*.fsi"), + ("text/x-fsharp",), + ), # 'FStarLexer': ('typecode._vendor.pygments.lexers.ml', 'FStar', ('fstar',), ('*.fst', '*.fsti'), ('text/x-fstar',)), # 'FactorLexer': ('typecode._vendor.pygments.lexers.factor', 'Factor', ('factor',), ('*.factor',), ('text/x-factor',)), # 'FancyLexer': ('typecode._vendor.pygments.lexers.ruby', 'Fancy', ('fancy', 'fy'), ('*.fy', '*.fancypack'), ('text/x-fancysrc',)), @@ -176,39 +348,87 @@ # 'FishShellLexer': ('typecode._vendor.pygments.lexers.shell', 'Fish', ('fish', 'fishshell'), ('*.fish', '*.load'), ('application/x-fish',)), # 'FlatlineLexer': ('typecode._vendor.pygments.lexers.dsls', 'Flatline', ('flatline',), (), ('text/x-flatline',)), # 'FloScriptLexer': ('typecode._vendor.pygments.lexers.floscript', 'FloScript', ('floscript', 'flo'), ('*.flo',), ()), - 'ForthLexer': ('typecode._vendor.pygments.lexers.forth', 'Forth', ('forth',), ('*.frt', '*.fs'), ('application/x-forth',)), - 'FortranFixedLexer': ('typecode._vendor.pygments.lexers.fortran', 'FortranFixed', ('fortranfixed',), ('*.f', '*.F'), ()), - 'FortranLexer': ('typecode._vendor.pygments.lexers.fortran', 'Fortran', ('fortran',), ('*.f03', '*.f90', '*.F03', '*.F90'), ('text/x-fortran',)), + "ForthLexer": ( + "typecode._vendor.pygments.lexers.forth", + "Forth", + ("forth",), + ("*.frt", "*.fs"), + ("application/x-forth",), + ), + "FortranFixedLexer": ( + "typecode._vendor.pygments.lexers.fortran", + "FortranFixed", + ("fortranfixed",), + ("*.f", "*.F"), + (), + ), + "FortranLexer": ( + "typecode._vendor.pygments.lexers.fortran", + "Fortran", + ("fortran",), + ("*.f03", "*.f90", "*.F03", "*.F90"), + ("text/x-fortran",), + ), # 'FoxProLexer': ('typecode._vendor.pygments.lexers.foxpro', 'FoxPro', ('foxpro', 'vfp', 'clipper', 'xbase'), ('*.PRG', '*.prg'), ()), # 'FreeFemLexer': ('typecode._vendor.pygments.lexers.freefem', 'Freefem', ('freefem',), ('*.edp',), ('text/x-freefem',)), # 'GAPLexer': ('typecode._vendor.pygments.lexers.algebra', 'GAP', ('gap',), ('*.g', '*.gd', '*.gi', '*.gap'), ()), # 'GDScriptLexer': ('typecode._vendor.pygments.lexers.gdscript', 'GDScript', ('gdscript', 'gd'), ('*.gd',), ('text/x-gdscript', 'application/x-gdscript')), # 'GLShaderLexer': ('typecode._vendor.pygments.lexers.graphics', 'GLSL', ('glsl',), ('*.vert', '*.frag', '*.geo'), ('text/x-glslsrc',)), - 'GasLexer': ('typecode._vendor.pygments.lexers.asm', 'GAS', ('gas', 'asm'), ('*.s', '*.S'), ('text/x-gas',)), + "GasLexer": ( + "typecode._vendor.pygments.lexers.asm", + "GAS", + ("gas", "asm"), + ("*.s", "*.S"), + ("text/x-gas",), + ), # 'GenshiLexer': ('typecode._vendor.pygments.lexers.templates', 'Genshi', ('genshi', 'kid', 'xml+genshi', 'xml+kid'), ('*.kid',), ('application/x-genshi', 'application/x-kid')), # 'GenshiTextLexer': ('typecode._vendor.pygments.lexers.templates', 'Genshi Text', ('genshitext',), (), ('application/x-genshi-text', 'text/x-genshi')), # 'GettextLexer': ('typecode._vendor.pygments.lexers.textfmts', 'Gettext Catalog', ('pot', 'po'), ('*.pot', '*.po'), ('application/x-gettext', 'text/x-gettext', 'text/gettext')), # 'GherkinLexer': ('typecode._vendor.pygments.lexers.testing', 'Gherkin', ('cucumber', 'gherkin'), ('*.feature',), ('text/x-gherkin',)), # 'GnuplotLexer': ('typecode._vendor.pygments.lexers.graphics', 'Gnuplot', ('gnuplot',), ('*.plot', '*.plt'), ('text/x-gnuplot',)), - 'GoLexer': ('typecode._vendor.pygments.lexers.go', 'Go', ('go',), ('*.go',), ('text/x-gosrc',)), + "GoLexer": ("typecode._vendor.pygments.lexers.go", "Go", ("go",), ("*.go",), ("text/x-gosrc",)), # 'GoloLexer': ('typecode._vendor.pygments.lexers.jvm', 'Golo', ('golo',), ('*.golo',), ()), # 'GoodDataCLLexer': ('typecode._vendor.pygments.lexers.business', 'GoodData-CL', ('gooddata-cl',), ('*.gdc',), ('text/x-gooddata-cl',)), # 'GosuLexer': ('typecode._vendor.pygments.lexers.jvm', 'Gosu', ('gosu',), ('*.gs', '*.gsx', '*.gsp', '*.vark'), ('text/x-gosu',)), # 'GosuTemplateLexer': ('typecode._vendor.pygments.lexers.jvm', 'Gosu Template', ('gst',), ('*.gst',), ('text/x-gosu-template',)), # 'GroffLexer': ('typecode._vendor.pygments.lexers.markup', 'Groff', ('groff', 'nroff', 'man'), ('*.[1234567]', '*.man'), ('application/x-troff', 'text/troff')), - 'GroovyLexer': ('typecode._vendor.pygments.lexers.jvm', 'Groovy', ('groovy',), ('*.groovy', '*.gradle'), ('text/x-groovy',)), + "GroovyLexer": ( + "typecode._vendor.pygments.lexers.jvm", + "Groovy", + ("groovy",), + ("*.groovy", "*.gradle"), + ("text/x-groovy",), + ), # 'HLSLShaderLexer': ('typecode._vendor.pygments.lexers.graphics', 'HLSL', ('hlsl',), ('*.hlsl', '*.hlsli'), ('text/x-hlsl',)), # 'HamlLexer': ('typecode._vendor.pygments.lexers.html', 'Haml', ('haml',), ('*.haml',), ('text/x-haml',)), # 'HandlebarsHtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Handlebars', ('html+handlebars',), ('*.handlebars', '*.hbs'), ('text/html+handlebars', 'text/x-handlebars-template')), # 'HandlebarsLexer': ('typecode._vendor.pygments.lexers.templates', 'Handlebars', ('handlebars',), (), ()), - 'HaskellLexer': ('typecode._vendor.pygments.lexers.haskell', 'Haskell', ('haskell', 'hs'), ('*.hs',), ('text/x-haskell',)), - 'HaxeLexer': ('typecode._vendor.pygments.lexers.haxe', 'Haxe', ('hx', 'haxe', 'hxsl'), ('*.hx', '*.hxsl'), ('text/haxe', 'text/x-haxe', 'text/x-hx')), + "HaskellLexer": ( + "typecode._vendor.pygments.lexers.haskell", + "Haskell", + ("haskell", "hs"), + ("*.hs",), + ("text/x-haskell",), + ), + "HaxeLexer": ( + "typecode._vendor.pygments.lexers.haxe", + "Haxe", + ("hx", "haxe", "hxsl"), + ("*.hx", "*.hxsl"), + ("text/haxe", "text/x-haxe", "text/x-hx"), + ), # 'HexdumpLexer': ('typecode._vendor.pygments.lexers.hexdump', 'Hexdump', ('hexdump',), (), ()), # 'HsailLexer': ('typecode._vendor.pygments.lexers.asm', 'HSAIL', ('hsail', 'hsa'), ('*.hsail',), ('text/x-hsail',)), # 'HspecLexer': ('typecode._vendor.pygments.lexers.haskell', 'Hspec', ('hspec',), (), ()), # 'HtmlDjangoLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Django/Jinja', ('html+django', 'html+jinja', 'htmldjango'), (), ('text/html+django', 'text/html+jinja')), # 'HtmlGenshiLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Genshi', ('html+genshi', 'html+kid'), (), ('text/html+genshi',)), - 'HtmlLexer': ('typecode._vendor.pygments.lexers.html', 'HTML', ('html',), ('*.html', '*.htm', '*.xhtml', '*.xslt'), ('text/html', 'application/xhtml+xml')), + "HtmlLexer": ( + "typecode._vendor.pygments.lexers.html", + "HTML", + ("html",), + ("*.html", "*.htm", "*.xhtml", "*.xslt"), + ("text/html", "application/xhtml+xml"), + ), # 'HtmlPhpLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+PHP', ('html+php',), ('*.phtml',), ('application/x-php', 'application/x-httpd-php', 'application/x-httpd-php3', 'application/x-httpd-php4', 'application/x-httpd-php5')), # 'HtmlSmartyLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Smarty', ('html+smarty',), (), ('text/html+smarty',)), # 'HttpLexer': ('typecode._vendor.pygments.lexers.textfmts', 'HTTP', ('http',), (), ()), @@ -230,11 +450,28 @@ # 'JLexer': ('typecode._vendor.pygments.lexers.j', 'J', ('j',), ('*.ijs',), ('text/x-j',)), # 'JagsLexer': ('typecode._vendor.pygments.lexers.modeling', 'JAGS', ('jags',), ('*.jag', '*.bug'), ()), # 'JasminLexer': ('typecode._vendor.pygments.lexers.jvm', 'Jasmin', ('jasmin', 'jasminxt'), ('*.j',), ()), - 'JavaLexer': ('typecode._vendor.pygments.lexers.jvm', 'Java', ('java',), ('*.java',), ('text/x-java',)), + "JavaLexer": ( + "typecode._vendor.pygments.lexers.jvm", + "Java", + ("java",), + ("*.java",), + ("text/x-java",), + ), # 'JavascriptDjangoLexer': ('typecode._vendor.pygments.lexers.templates', 'JavaScript+Django/Jinja', ('js+django', 'javascript+django', 'js+jinja', 'javascript+jinja'), (), ('application/x-javascript+django', 'application/x-javascript+jinja', 'text/x-javascript+django', 'text/x-javascript+jinja', 'text/javascript+django', 'text/javascript+jinja')), # 'JavascriptErbLexer': ('typecode._vendor.pygments.lexers.templates', 'JavaScript+Ruby', ('js+erb', 'javascript+erb', 'js+ruby', 'javascript+ruby'), (), ('application/x-javascript+ruby', 'text/x-javascript+ruby', 'text/javascript+ruby')), # 'JavascriptGenshiLexer': ('typecode._vendor.pygments.lexers.templates', 'JavaScript+Genshi Text', ('js+genshitext', 'js+genshi', 'javascript+genshitext', 'javascript+genshi'), (), ('application/x-javascript+genshi', 'text/x-javascript+genshi', 'text/javascript+genshi')), - 'JavascriptLexer': ('typecode._vendor.pygments.lexers.javascript', 'JavaScript', ('js', 'javascript'), ('*.js', '*.jsm', '*.mjs'), ('application/javascript', 'application/x-javascript', 'text/x-javascript', 'text/javascript')), + "JavascriptLexer": ( + "typecode._vendor.pygments.lexers.javascript", + "JavaScript", + ("js", "javascript"), + ("*.js", "*.jsm", "*.mjs"), + ( + "application/javascript", + "application/x-javascript", + "text/x-javascript", + "text/javascript", + ), + ), # 'JavascriptPhpLexer': ('typecode._vendor.pygments.lexers.templates', 'JavaScript+PHP', ('js+php', 'javascript+php'), (), ('application/x-javascript+php', 'text/x-javascript+php', 'text/javascript+php')), # 'JavascriptSmartyLexer': ('typecode._vendor.pygments.lexers.templates', 'JavaScript+Smarty', ('js+smarty', 'javascript+smarty'), (), ('application/x-javascript+smarty', 'text/x-javascript+smarty', 'text/javascript+smarty')), # 'JclLexer': ('typecode._vendor.pygments.lexers.scripting', 'JCL', ('jcl',), ('*.jcl',), ('text/x-jcl',)), @@ -242,7 +479,13 @@ # 'JsonBareObjectLexer': ('typecode._vendor.pygments.lexers.data', 'JSONBareObject', (), (), ()), # 'JsonLdLexer': ('typecode._vendor.pygments.lexers.data', 'JSON-LD', ('jsonld', 'json-ld'), ('*.jsonld',), ('application/ld+json',)), # 'JsonLexer': ('typecode._vendor.pygments.lexers.data', 'JSON', ('json', 'json-object'), ('*.json', 'Pipfile.lock'), ('application/json', 'application/json-object')), - 'JspLexer': ('typecode._vendor.pygments.lexers.templates', 'Java Server Page', ('jsp',), ('*.jsp',), ('application/x-jsp',)), + "JspLexer": ( + "typecode._vendor.pygments.lexers.templates", + "Java Server Page", + ("jsp",), + ("*.jsp",), + ("application/x-jsp",), + ), # 'JuliaConsoleLexer': ('typecode._vendor.pygments.lexers.julia', 'Julia console', ('jlcon',), (), ()), # 'JuliaLexer': ('typecode._vendor.pygments.lexers.julia', 'Julia', ('julia', 'jl'), ('*.jl',), ('text/x-julia', 'application/x-julia')), # 'JuttleLexer': ('typecode._vendor.pygments.lexers.javascript', 'Juttle', ('juttle',), ('*.juttle',), ('application/juttle', 'application/x-juttle', 'text/x-juttle', 'text/juttle')), @@ -250,7 +493,13 @@ # 'KconfigLexer': ('typecode._vendor.pygments.lexers.configs', 'Kconfig', ('kconfig', 'menuconfig', 'linux-config', 'kernel-config'), ('Kconfig*', '*Config.in*', 'external.in*', 'standard-modules.in'), ('text/x-kconfig',)), # 'KernelLogLexer': ('typecode._vendor.pygments.lexers.textfmts', 'Kernel log', ('kmsg', 'dmesg'), ('*.kmsg', '*.dmesg'), ()), # 'KokaLexer': ('typecode._vendor.pygments.lexers.haskell', 'Koka', ('koka',), ('*.kk', '*.kki'), ('text/x-koka',)), - 'KotlinLexer': ('typecode._vendor.pygments.lexers.jvm', 'Kotlin', ('kotlin',), ('*.kt', '*.kts'), ('text/x-kotlin',)), + "KotlinLexer": ( + "typecode._vendor.pygments.lexers.jvm", + "Kotlin", + ("kotlin",), + ("*.kt", "*.kts"), + ("text/x-kotlin",), + ), # 'LSLLexer': ('typecode._vendor.pygments.lexers.scripting', 'LSL', ('lsl',), ('*.lsl',), ('text/x-lsl',)), # 'LassoCssLexer': ('typecode._vendor.pygments.lexers.templates', 'CSS+Lasso', ('css+lasso',), (), ('text/css+lasso',)), # 'LassoHtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Lasso', ('html+lasso',), (), ('text/html+lasso', 'application/x-httpd-lasso', 'application/x-httpd-lasso[89]')), @@ -272,7 +521,13 @@ # 'LlvmMirLexer': ('typecode._vendor.pygments.lexers.asm', 'LLVM-MIR', ('llvm-mir',), ('*.mir',), ()), # 'LogosLexer': ('typecode._vendor.pygments.lexers.objective', 'Logos', ('logos',), ('*.x', '*.xi', '*.xm', '*.xmi'), ('text/x-logos',)), # 'LogtalkLexer': ('typecode._vendor.pygments.lexers.prolog', 'Logtalk', ('logtalk',), ('*.lgt', '*.logtalk'), ('text/x-logtalk',)), - 'LuaLexer': ('typecode._vendor.pygments.lexers.scripting', 'Lua', ('lua',), ('*.lua', '*.wlua'), ('text/x-lua', 'application/x-lua')), + "LuaLexer": ( + "typecode._vendor.pygments.lexers.scripting", + "Lua", + ("lua",), + ("*.lua", "*.wlua"), + ("text/x-lua", "application/x-lua"), + ), # 'MIMELexer': ('typecode._vendor.pygments.lexers.mime', 'MIME', ('mime',), (), ('multipart/mixed', 'multipart/related', 'multipart/alternative')), # 'MOOCodeLexer': ('typecode._vendor.pygments.lexers.scripting', 'MOOCode', ('moocode', 'moo'), ('*.moo',), ('text/x-moocode',)), # 'MSDOSSessionLexer': ('typecode._vendor.pygments.lexers.shell', 'MSDOS Session', ('doscon',), (), ()), @@ -315,7 +570,13 @@ # 'MyghtyXmlLexer': ('typecode._vendor.pygments.lexers.templates', 'XML+Myghty', ('xml+myghty',), (), ('application/xml+myghty',)), # 'NCLLexer': ('typecode._vendor.pygments.lexers.ncl', 'NCL', ('ncl',), ('*.ncl',), ('text/ncl',)), # 'NSISLexer': ('typecode._vendor.pygments.lexers.installers', 'NSIS', ('nsis', 'nsi', 'nsh'), ('*.nsi', '*.nsh'), ('text/x-nsis',)), - 'NasmLexer': ('typecode._vendor.pygments.lexers.asm', 'NASM', ('nasm',), ('*.asm', '*.ASM'), ('text/x-nasm',)), + "NasmLexer": ( + "typecode._vendor.pygments.lexers.asm", + "NASM", + ("nasm",), + ("*.asm", "*.ASM"), + ("text/x-nasm",), + ), # 'NasmObjdumpLexer': ('typecode._vendor.pygments.lexers.asm', 'objdump-nasm', ('objdump-nasm',), ('*.objdump-intel',), ('text/x-nasm-objdump',)), # 'NemerleLexer': ('typecode._vendor.pygments.lexers.dotnet', 'Nemerle', ('nemerle',), ('*.n',), ('text/x-nemerle',)), # 'NesCLexer': ('typecode._vendor.pygments.lexers.c_like', 'nesC', ('nesc',), ('*.nc',), ('text/x-nescsrc',)), @@ -329,10 +590,28 @@ # 'NuSMVLexer': ('typecode._vendor.pygments.lexers.smv', 'NuSMV', ('nusmv',), ('*.smv',), ()), # 'NumPyLexer': ('typecode._vendor.pygments.lexers.python', 'NumPy', ('numpy',), (), ()), # 'ObjdumpLexer': ('typecode._vendor.pygments.lexers.asm', 'objdump', ('objdump',), ('*.objdump',), ('text/x-objdump',)), - 'ObjectiveCLexer': ('typecode._vendor.pygments.lexers.objective', 'Objective-C', ('objective-c', 'objectivec', 'obj-c', 'objc'), ('*.m', '*.h'), ('text/x-objective-c',)), - 'ObjectiveCppLexer': ('typecode._vendor.pygments.lexers.objective', 'Objective-C++', ('objective-c++', 'objectivec++', 'obj-c++', 'objc++'), ('*.mm', '*.hh'), ('text/x-objective-c++',)), + "ObjectiveCLexer": ( + "typecode._vendor.pygments.lexers.objective", + "Objective-C", + ("objective-c", "objectivec", "obj-c", "objc"), + ("*.m", "*.h"), + ("text/x-objective-c",), + ), + "ObjectiveCppLexer": ( + "typecode._vendor.pygments.lexers.objective", + "Objective-C++", + ("objective-c++", "objectivec++", "obj-c++", "objc++"), + ("*.mm", "*.hh"), + ("text/x-objective-c++",), + ), # 'ObjectiveJLexer': ('typecode._vendor.pygments.lexers.javascript', 'Objective-J', ('objective-j', 'objectivej', 'obj-j', 'objj'), ('*.j',), ('text/x-objective-j',)), - 'OcamlLexer': ('typecode._vendor.pygments.lexers.ml', 'OCaml', ('ocaml',), ('*.ml', '*.mli', '*.mll', '*.mly'), ('text/x-ocaml',)), + "OcamlLexer": ( + "typecode._vendor.pygments.lexers.ml", + "OCaml", + ("ocaml",), + ("*.ml", "*.mli", "*.mll", "*.mly"), + ("text/x-ocaml",), + ), # 'OctaveLexer': ('typecode._vendor.pygments.lexers.matlab', 'Octave', ('octave',), ('*.m',), ('text/octave',)), # 'OdinLexer': ('typecode._vendor.pygments.lexers.archetype', 'ODIN', ('odin',), ('*.odin',), ('text/odin',)), # 'OocLexer': ('typecode._vendor.pygments.lexers.ooc', 'Ooc', ('ooc',), ('*.ooc',), ('text/x-ooc',)), @@ -345,7 +624,13 @@ # 'PegLexer': ('typecode._vendor.pygments.lexers.grammar_notation', 'PEG', ('peg',), ('*.peg',), ('text/x-peg',)), # 'Perl6Lexer': ('typecode._vendor.pygments.lexers.perl', 'Perl6', ('perl6', 'pl6', 'raku'), ('*.pl', '*.pm', '*.nqp', '*.p6', '*.6pl', '*.p6l', '*.pl6', '*.6pm', '*.p6m', '*.pm6', '*.t', '*.raku', '*.rakumod', '*.rakutest', '*.rakudoc'), ('text/x-perl6', 'application/x-perl6')), # 'PerlLexer': ('typecode._vendor.pygments.lexers.perl', 'Perl', ('perl', 'pl'), ('*.pl', '*.pm', '*.t', '*.perl'), ('text/x-perl', 'application/x-perl')), - 'PhpLexer': ('typecode._vendor.pygments.lexers.php', 'PHP', ('php', 'php3', 'php4', 'php5'), ('*.php', '*.php[345]', '*.inc'), ('text/x-php',)), + "PhpLexer": ( + "typecode._vendor.pygments.lexers.php", + "PHP", + ("php", "php3", "php4", "php5"), + ("*.php", "*.php[345]", "*.inc"), + ("text/x-php",), + ), # 'PigLexer': ('typecode._vendor.pygments.lexers.jvm', 'Pig', ('pig',), ('*.pig',), ('text/x-pig',)), # 'PikeLexer': ('typecode._vendor.pygments.lexers.c_like', 'Pike', ('pike',), ('*.pike', '*.pmod'), ('text/x-pike',)), # 'PkgConfigLexer': ('typecode._vendor.pygments.lexers.configs', 'PkgConfig', ('pkgconfig',), ('*.pc',), ()), @@ -356,7 +641,13 @@ # 'PostgresConsoleLexer': ('typecode._vendor.pygments.lexers.sql', 'PostgreSQL console (psql)', ('psql', 'postgresql-console', 'postgres-console'), (), ('text/x-postgresql-psql',)), # 'PostgresLexer': ('typecode._vendor.pygments.lexers.sql', 'PostgreSQL SQL dialect', ('postgresql', 'postgres'), (), ('text/x-postgresql',)), # 'PovrayLexer': ('typecode._vendor.pygments.lexers.graphics', 'POVRay', ('pov',), ('*.pov', '*.inc'), ('text/x-povray',)), - 'PowerShellLexer': ('typecode._vendor.pygments.lexers.shell', 'PowerShell', ('powershell', 'posh', 'ps1', 'psm1'), ('*.ps1', '*.psm1'), ('text/x-powershell',)), + "PowerShellLexer": ( + "typecode._vendor.pygments.lexers.shell", + "PowerShell", + ("powershell", "posh", "ps1", "psm1"), + ("*.ps1", "*.psm1"), + ("text/x-powershell",), + ), # 'PowerShellSessionLexer': ('typecode._vendor.pygments.lexers.shell', 'PowerShell Session', ('ps1con',), (), ()), # 'PraatLexer': ('typecode._vendor.pygments.lexers.praat', 'Praat', ('praat',), ('*.praat', '*.proc', '*.psc'), ()), # 'PrologLexer': ('typecode._vendor.pygments.lexers.prolog', 'Prolog', ('prolog',), ('*.ecl', '*.prolog', '*.pro', '*.pl'), ('text/x-prolog',)), @@ -370,7 +661,27 @@ # 'Python2Lexer': ('typecode._vendor.pygments.lexers.python', 'Python 2.x', ('python2', 'py2'), (), ('text/x-python2', 'application/x-python2')), # 'Python2TracebackLexer': ('typecode._vendor.pygments.lexers.python', 'Python 2.x Traceback', ('py2tb',), ('*.py2tb',), ('text/x-python2-traceback',)), # 'PythonConsoleLexer': ('typecode._vendor.pygments.lexers.python', 'Python console session', ('pycon',), (), ('text/x-python-doctest',)), - 'PythonLexer': ('typecode._vendor.pygments.lexers.python', 'Python', ('python', 'py', 'sage', 'python3', 'py3'), ('*.py', '*.pyw', '*.jy', '*.sage', '*.sc', 'SConstruct', 'SConscript', '*.bzl', 'BUCK', 'BUILD', 'BUILD.bazel', 'WORKSPACE', '*.tac'), ('text/x-python', 'application/x-python', 'text/x-python3', 'application/x-python3')), + "PythonLexer": ( + "typecode._vendor.pygments.lexers.python", + "Python", + ("python", "py", "sage", "python3", "py3"), + ( + "*.py", + "*.pyw", + "*.jy", + "*.sage", + "*.sc", + "SConstruct", + "SConscript", + "*.bzl", + "BUCK", + "BUILD", + "BUILD.bazel", + "WORKSPACE", + "*.tac", + ), + ("text/x-python", "application/x-python", "text/x-python3", "application/x-python3"), + ), # 'PythonTracebackLexer': ('typecode._vendor.pygments.lexers.python', 'Python Traceback', ('pytb', 'py3tb'), ('*.pytb', '*.py3tb'), ('text/x-python-traceback', 'text/x-python3-traceback')), # 'QBasicLexer': ('typecode._vendor.pygments.lexers.basic', 'QBasic', ('qbasic', 'basic'), ('*.BAS', '*.bas'), ('text/basic',)), # 'QVToLexer': ('typecode._vendor.pygments.lexers.qvt', 'QVTO', ('qvto', 'qvt'), ('*.qvto',), ()), @@ -406,19 +717,49 @@ # 'RstLexer': ('typecode._vendor.pygments.lexers.markup', 'reStructuredText', ('rst', 'rest', 'restructuredtext'), ('*.rst', '*.rest'), ('text/x-rst', 'text/prs.fallenstein.rst')), # 'RtsLexer': ('typecode._vendor.pygments.lexers.trafficscript', 'TrafficScript', ('rts', 'trafficscript'), ('*.rts',), ()), # 'RubyConsoleLexer': ('typecode._vendor.pygments.lexers.ruby', 'Ruby irb session', ('rbcon', 'irb'), (), ('text/x-ruby-shellsession',)), - 'RubyLexer': ('typecode._vendor.pygments.lexers.ruby', 'Ruby', ('rb', 'ruby', 'duby'), ('*.rb', '*.rbw', 'Rakefile', '*.rake', '*.gemspec', '*.rbx', '*.duby', 'Gemfile'), ('text/x-ruby', 'application/x-ruby')), - 'RustLexer': ('typecode._vendor.pygments.lexers.rust', 'Rust', ('rust', 'rs'), ('*.rs', '*.rs.in'), ('text/rust', 'text/x-rust')), + "RubyLexer": ( + "typecode._vendor.pygments.lexers.ruby", + "Ruby", + ("rb", "ruby", "duby"), + ("*.rb", "*.rbw", "Rakefile", "*.rake", "*.gemspec", "*.rbx", "*.duby", "Gemfile"), + ("text/x-ruby", "application/x-ruby"), + ), + "RustLexer": ( + "typecode._vendor.pygments.lexers.rust", + "Rust", + ("rust", "rs"), + ("*.rs", "*.rs.in"), + ("text/rust", "text/x-rust"), + ), # 'SASLexer': ('typecode._vendor.pygments.lexers.sas', 'SAS', ('sas',), ('*.SAS', '*.sas'), ('text/x-sas', 'text/sas', 'application/x-sas')), # 'SLexer': ('typecode._vendor.pygments.lexers.r', 'S', ('splus', 's', 'r'), ('*.S', '*.R', '.Rhistory', '.Rprofile', '.Renviron'), ('text/S-plus', 'text/S', 'text/x-r-source', 'text/x-r', 'text/x-R', 'text/x-r-history', 'text/x-r-profile')), # 'SMLLexer': ('typecode._vendor.pygments.lexers.ml', 'Standard ML', ('sml',), ('*.sml', '*.sig', '*.fun'), ('text/x-standardml', 'application/x-standardml')), # 'SarlLexer': ('typecode._vendor.pygments.lexers.jvm', 'SARL', ('sarl',), ('*.sarl',), ('text/x-sarl',)), - 'SassLexer': ('typecode._vendor.pygments.lexers.css', 'Sass', ('sass',), ('*.sass',), ('text/x-sass',)), - 'ScalaLexer': ('typecode._vendor.pygments.lexers.jvm', 'Scala', ('scala',), ('*.scala',), ('text/x-scala',)), + "SassLexer": ( + "typecode._vendor.pygments.lexers.css", + "Sass", + ("sass",), + ("*.sass",), + ("text/x-sass",), + ), + "ScalaLexer": ( + "typecode._vendor.pygments.lexers.jvm", + "Scala", + ("scala",), + ("*.scala",), + ("text/x-scala",), + ), # 'ScamlLexer': ('typecode._vendor.pygments.lexers.html', 'Scaml', ('scaml',), ('*.scaml',), ('text/x-scaml',)), # 'ScdocLexer': ('typecode._vendor.pygments.lexers.scdoc', 'scdoc', ('scdoc', 'scd'), ('*.scd', '*.scdoc'), ()), # 'SchemeLexer': ('typecode._vendor.pygments.lexers.lisp', 'Scheme', ('scheme', 'scm'), ('*.scm', '*.ss'), ('text/x-scheme', 'application/x-scheme')), # 'ScilabLexer': ('typecode._vendor.pygments.lexers.matlab', 'Scilab', ('scilab',), ('*.sci', '*.sce', '*.tst'), ('text/scilab',)), - 'ScssLexer': ('typecode._vendor.pygments.lexers.css', 'SCSS', ('scss',), ('*.scss',), ('text/x-scss',)), + "ScssLexer": ( + "typecode._vendor.pygments.lexers.css", + "SCSS", + ("scss",), + ("*.scss",), + ("text/x-scss",), + ), # 'ShExCLexer': ('typecode._vendor.pygments.lexers.rdf', 'ShExC', ('shexc', 'shex'), ('*.shex',), ('text/shex',)), # 'ShenLexer': ('typecode._vendor.pygments.lexers.lisp', 'Shen', ('shen',), ('*.shen',), ('text/x-shen', 'application/x-shen')), # 'SieveLexer': ('typecode._vendor.pygments.lexers.sieve', 'Sieve', ('sieve',), ('*.siv', '*.sieve'), ()), @@ -444,16 +785,40 @@ # 'StanLexer': ('typecode._vendor.pygments.lexers.modeling', 'Stan', ('stan',), ('*.stan',), ()), # 'StataLexer': ('typecode._vendor.pygments.lexers.stata', 'Stata', ('stata', 'do'), ('*.do', '*.ado'), ('text/x-stata', 'text/stata', 'application/x-stata')), # 'SuperColliderLexer': ('typecode._vendor.pygments.lexers.supercollider', 'SuperCollider', ('sc', 'supercollider'), ('*.sc', '*.scd'), ('application/supercollider', 'text/supercollider')), - 'SwiftLexer': ('typecode._vendor.pygments.lexers.objective', 'Swift', ('swift',), ('*.swift',), ('text/x-swift',)), + "SwiftLexer": ( + "typecode._vendor.pygments.lexers.objective", + "Swift", + ("swift",), + ("*.swift",), + ("text/x-swift",), + ), # 'SwigLexer': ('typecode._vendor.pygments.lexers.c_like', 'SWIG', ('swig',), ('*.swg', '*.i'), ('text/swig',)), # 'SystemVerilogLexer': ('typecode._vendor.pygments.lexers.hdl', 'systemverilog', ('systemverilog', 'sv'), ('*.sv', '*.svh'), ('text/x-systemverilog',)), # 'TAPLexer': ('typecode._vendor.pygments.lexers.testing', 'TAP', ('tap',), ('*.tap',), ()), # 'TNTLexer': ('typecode._vendor.pygments.lexers.tnt', 'Typographic Number Theory', ('tnt',), ('*.tnt',), ()), # 'TOMLLexer': ('typecode._vendor.pygments.lexers.configs', 'TOML', ('toml',), ('*.toml', 'Pipfile', 'poetry.lock'), ()), # 'Tads3Lexer': ('typecode._vendor.pygments.lexers.int_fiction', 'TADS 3', ('tads3',), ('*.t',), ()), - 'TasmLexer': ('typecode._vendor.pygments.lexers.asm', 'TASM', ('tasm',), ('*.asm', '*.ASM', '*.tasm'), ('text/x-tasm',)), - 'TclLexer': ('typecode._vendor.pygments.lexers.tcl', 'Tcl', ('tcl',), ('*.tcl', '*.rvt'), ('text/x-tcl', 'text/x-script.tcl', 'application/x-tcl')), - 'TcshLexer': ('typecode._vendor.pygments.lexers.shell', 'Tcsh', ('tcsh', 'csh'), ('*.tcsh', '*.csh'), ('application/x-csh',)), + "TasmLexer": ( + "typecode._vendor.pygments.lexers.asm", + "TASM", + ("tasm",), + ("*.asm", "*.ASM", "*.tasm"), + ("text/x-tasm",), + ), + "TclLexer": ( + "typecode._vendor.pygments.lexers.tcl", + "Tcl", + ("tcl",), + ("*.tcl", "*.rvt"), + ("text/x-tcl", "text/x-script.tcl", "application/x-tcl"), + ), + "TcshLexer": ( + "typecode._vendor.pygments.lexers.shell", + "Tcsh", + ("tcsh", "csh"), + ("*.tcsh", "*.csh"), + ("application/x-csh",), + ), # 'TcshSessionLexer': ('typecode._vendor.pygments.lexers.shell', 'Tcsh Session', ('tcshcon',), (), ()), # 'TeaTemplateLexer': ('typecode._vendor.pygments.lexers.templates', 'Tea', ('tea',), ('*.tea',), ('text/x-tea',)), # 'TeraTermLexer': ('typecode._vendor.pygments.lexers.teraterm', 'Tera Term macro', ('ttl', 'teraterm', 'teratermmacro'), ('*.ttl',), ('text/x-teratermmacro',)), @@ -462,7 +827,13 @@ # 'TerraformLexer': ('typecode._vendor.pygments.lexers.configs', 'Terraform', ('terraform', 'tf'), ('*.tf',), ('application/x-tf', 'application/x-terraform')), # 'TexLexer': ('typecode._vendor.pygments.lexers.markup', 'TeX', ('tex', 'latex'), ('*.tex', '*.aux', '*.toc'), ('text/x-tex', 'text/x-latex')), # 'TextLexer': ('typecode._vendor.pygments.lexers.special', 'Text only', ('text',), ('*.txt',), ('text/plain',)), - 'ThriftLexer': ('typecode._vendor.pygments.lexers.dsls', 'Thrift', ('thrift',), ('*.thrift',), ('application/x-thrift',)), + "ThriftLexer": ( + "typecode._vendor.pygments.lexers.dsls", + "Thrift", + ("thrift",), + ("*.thrift",), + ("application/x-thrift",), + ), # 'TiddlyWiki5Lexer': ('typecode._vendor.pygments.lexers.markup', 'tiddler', ('tid',), ('*.tid',), ('text/vnd.tiddlywiki',)), # 'TodotxtLexer': ('typecode._vendor.pygments.lexers.textfmts', 'Todotxt', ('todotxt',), ('todo.txt', '*.todotxt'), ('text/x-todo',)), # 'TransactSqlLexer': ('typecode._vendor.pygments.lexers.sql', 'Transact-SQL', ('tsql', 't-sql'), ('*.sql',), ('text/x-tsql',)), @@ -470,7 +841,13 @@ # 'TurtleLexer': ('typecode._vendor.pygments.lexers.rdf', 'Turtle', ('turtle',), ('*.ttl',), ('text/turtle', 'application/x-turtle')), # 'TwigHtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Twig', ('html+twig',), ('*.twig',), ('text/html+twig',)), # 'TwigLexer': ('typecode._vendor.pygments.lexers.templates', 'Twig', ('twig',), (), ('application/x-twig',)), - 'TypeScriptLexer': ('typecode._vendor.pygments.lexers.javascript', 'TypeScript', ('ts', 'typescript'), ('*.ts', '*.tsx'), ('text/x-typescript',)), + "TypeScriptLexer": ( + "typecode._vendor.pygments.lexers.javascript", + "TypeScript", + ("ts", "typescript"), + ("*.ts", "*.tsx"), + ("text/x-typescript",), + ), # 'TypoScriptCssDataLexer': ('typecode._vendor.pygments.lexers.typoscript', 'TypoScriptCssData', ('typoscriptcssdata',), (), ()), # 'TypoScriptHtmlDataLexer': ('typecode._vendor.pygments.lexers.typoscript', 'TypoScriptHtmlData', ('typoscripthtmldata',), (), ()), # 'TypoScriptLexer': ('typecode._vendor.pygments.lexers.typoscript', 'TypoScript', ('typoscript',), ('*.typoscript',), ('text/x-typoscript',)), @@ -478,19 +855,49 @@ # 'UniconLexer': ('typecode._vendor.pygments.lexers.unicon', 'Unicon', ('unicon',), ('*.icn',), ('text/unicon',)), # 'UrbiscriptLexer': ('typecode._vendor.pygments.lexers.urbi', 'UrbiScript', ('urbiscript',), ('*.u',), ('application/x-urbiscript',)), # 'UsdLexer': ('typecode._vendor.pygments.lexers.usd', 'USD', ('usd', 'usda'), ('*.usd', '*.usda'), ()), - 'VBScriptLexer': ('typecode._vendor.pygments.lexers.basic', 'VBScript', ('vbscript',), ('*.vbs', '*.VBS'), ()), + "VBScriptLexer": ( + "typecode._vendor.pygments.lexers.basic", + "VBScript", + ("vbscript",), + ("*.vbs", "*.VBS"), + (), + ), # 'VCLLexer': ('typecode._vendor.pygments.lexers.varnish', 'VCL', ('vcl',), ('*.vcl',), ('text/x-vclsrc',)), # 'VCLSnippetLexer': ('typecode._vendor.pygments.lexers.varnish', 'VCLSnippets', ('vclsnippets', 'vclsnippet'), (), ('text/x-vclsnippet',)), # 'VCTreeStatusLexer': ('typecode._vendor.pygments.lexers.console', 'VCTreeStatus', ('vctreestatus',), (), ()), # 'VGLLexer': ('typecode._vendor.pygments.lexers.dsls', 'VGL', ('vgl',), ('*.rpf',), ()), # 'ValaLexer': ('typecode._vendor.pygments.lexers.c_like', 'Vala', ('vala', 'vapi'), ('*.vala', '*.vapi'), ('text/x-vala',)), - 'VbNetAspxLexer': ('typecode._vendor.pygments.lexers.dotnet', 'aspx-vb', ('aspx-vb',), ('*.aspx', '*.asax', '*.ascx', '*.ashx', '*.asmx', '*.axd'), ()), - 'VbNetLexer': ('typecode._vendor.pygments.lexers.dotnet', 'VB.net', ('vb.net', 'vbnet'), ('*.vb', '*.bas'), ('text/x-vbnet', 'text/x-vba')), + "VbNetAspxLexer": ( + "typecode._vendor.pygments.lexers.dotnet", + "aspx-vb", + ("aspx-vb",), + ("*.aspx", "*.asax", "*.ascx", "*.ashx", "*.asmx", "*.axd"), + (), + ), + "VbNetLexer": ( + "typecode._vendor.pygments.lexers.dotnet", + "VB.net", + ("vb.net", "vbnet"), + ("*.vb", "*.bas"), + ("text/x-vbnet", "text/x-vba"), + ), # 'VelocityHtmlLexer': ('typecode._vendor.pygments.lexers.templates', 'HTML+Velocity', ('html+velocity',), (), ('text/html+velocity',)), # 'VelocityLexer': ('typecode._vendor.pygments.lexers.templates', 'Velocity', ('velocity',), ('*.vm', '*.fhtml'), ()), # 'VelocityXmlLexer': ('typecode._vendor.pygments.lexers.templates', 'XML+Velocity', ('xml+velocity',), (), ('application/xml+velocity',)), - 'VerilogLexer': ('typecode._vendor.pygments.lexers.hdl', 'verilog', ('verilog', 'v'), ('*.v',), ('text/x-verilog',)), - 'VhdlLexer': ('typecode._vendor.pygments.lexers.hdl', 'vhdl', ('vhdl',), ('*.vhdl', '*.vhd'), ('text/x-vhdl',)), + "VerilogLexer": ( + "typecode._vendor.pygments.lexers.hdl", + "verilog", + ("verilog", "v"), + ("*.v",), + ("text/x-verilog",), + ), + "VhdlLexer": ( + "typecode._vendor.pygments.lexers.hdl", + "vhdl", + ("vhdl",), + ("*.vhdl", "*.vhd"), + ("text/x-vhdl",), + ), # 'VimLexer': ('typecode._vendor.pygments.lexers.textedit', 'VimL', ('vim',), ('*.vim', '.vimrc', '.exrc', '.gvimrc', '_vimrc', '_exrc', '_gvimrc', 'vimrc', 'gvimrc'), ('text/x-vim',)), # 'WDiffLexer': ('typecode._vendor.pygments.lexers.diff', 'WDiff', ('wdiff',), ('*.wdiff',), ()), # 'WebIDLLexer': ('typecode._vendor.pygments.lexers.webidl', 'Web IDL', ('webidl',), ('*.webidl',), ()), @@ -514,29 +921,37 @@ # 'ZigLexer': ('typecode._vendor.pygments.lexers.zig', 'Zig', ('zig',), ('*.zig',), ('text/zig',)), } -if __name__ == '__main__': # pragma: no cover +if __name__ == "__main__": # pragma: no cover import sys import os # lookup lexers found_lexers = [] - sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..')) - for root, dirs, files in os.walk('.'): + sys.path.insert(0, os.path.join(os.path.dirname(__file__), "..", "..")) + for root, dirs, files in os.walk("."): for filename in files: - if filename.endswith('.py') and not filename.startswith('_'): - module_name = 'typecode._vendor.pygments.lexers%s.%s' % ( - root[1:].replace('/', '.'), filename[:-3]) + if filename.endswith(".py") and not filename.startswith("_"): + module_name = "typecode._vendor.pygments.lexers%s.%s" % ( + root[1:].replace("/", "."), + filename[:-3], + ) print(module_name) - module = __import__(module_name, None, None, ['']) + module = __import__(module_name, None, None, [""]) for lexer_name in module.__all__: lexer = getattr(module, lexer_name) found_lexers.append( - '%r: %r' % (lexer_name, - (module_name, - lexer.name, - tuple(lexer.aliases), - tuple(lexer.filenames), - tuple(lexer.mimetypes)))) + "%r: %r" + % ( + lexer_name, + ( + module_name, + lexer.name, + tuple(lexer.aliases), + tuple(lexer.filenames), + tuple(lexer.mimetypes), + ), + ) + ) # sort them to make the diff minimal found_lexers.sort() @@ -550,13 +965,13 @@ # management EOL, like `EolExtension # `. content = content.replace("\r\n", "\n") - header = content[:content.find('LEXERS = {')] - footer = content[content.find("if __name__ == '__main__':"):] + header = content[: content.find("LEXERS = {")] + footer = content[content.find("if __name__ == '__main__':") :] # write new file - with open(__file__, 'w') as fp: + with open(__file__, "w") as fp: fp.write(header) - fp.write('LEXERS = {\n %s,\n}\n\n' % ',\n '.join(found_lexers)) + fp.write("LEXERS = {\n %s,\n}\n\n" % ",\n ".join(found_lexers)) fp.write(footer) - print ('=== %d lexers processed.' % len(found_lexers)) + print("=== %d lexers processed." % len(found_lexers)) diff --git a/tests/data/filetest/code/scala/Applicative.scala.yml b/tests/data/filetest/code/scala/Applicative.scala.yml index 00951b7..4b88f7f 100644 --- a/tests/data/filetest/code/scala/Applicative.scala.yml +++ b/tests/data/filetest/code/scala/Applicative.scala.yml @@ -1,4 +1,4 @@ -filetype_file: UTF-8 Unicode text +filetype_file: Unicode text, UTF-8 text mimetype_file: text/plain filetype_pygment: Scala programming_language: Scala diff --git a/tests/data/filetest/data/mysql-arch.yml b/tests/data/filetest/data/mysql-arch.yml index 3edf27d..819131a 100644 --- a/tests/data/filetest/data/mysql-arch.yml +++ b/tests/data/filetest/data/mysql-arch.yml @@ -1,4 +1,4 @@ -filetype_file: MySQL table definition file Version 224, type DIAM_ISAM, MySQL version -1515870811 +filetype_file: MySQL table definition file Version -32, type DIAM_ISAM, MySQL version -1515870811 mimetype_file: application/octet-stream is_file: yes is_regular: yes diff --git a/tests/data/filetest/data/nulls.txt.yml b/tests/data/filetest/data/nulls.txt.yml index 68d8632..4fa7fa8 100644 --- a/tests/data/filetest/data/nulls.txt.yml +++ b/tests/data/filetest/data/nulls.txt.yml @@ -1,4 +1,4 @@ -filetype_file: ASCII text, with very long lines, with no line terminators +filetype_file: ASCII text, with very long lines (10000), with no line terminators mimetype_file: text/plain mimetype_python: text/plain is_file: yes diff --git a/tests/data/filetest/doc/pdf/notpdf.pdf.yml b/tests/data/filetest/doc/pdf/notpdf.pdf.yml index b1b78eb..4f3431a 100644 --- a/tests/data/filetest/doc/pdf/notpdf.pdf.yml +++ b/tests/data/filetest/doc/pdf/notpdf.pdf.yml @@ -1,5 +1,5 @@ filetype_file: CSV text -mimetype_file: application/csv +mimetype_file: text/csv mimetype_python: application/pdf is_file: yes is_regular: yes diff --git a/tests/data/filetest/media/Image-ascii.pgm.yml b/tests/data/filetest/media/Image-ascii.pgm.yml index e40165c..653634e 100644 --- a/tests/data/filetest/media/Image-ascii.pgm.yml +++ b/tests/data/filetest/media/Image-ascii.pgm.yml @@ -1,5 +1,5 @@ filetype_file: Netpbm image data, size = 62 x 23, greymap, ASCII text -mimetype_file: image/x-portable-greymap +mimetype_file: image/x-portable-graymap mimetype_python: image/x-portable-graymap is_file: yes is_regular: yes diff --git a/tests/data/filetest/media/Image1.pbm.yml b/tests/data/filetest/media/Image1.pbm.yml index 5af71b0..12a12ae 100644 --- a/tests/data/filetest/media/Image1.pbm.yml +++ b/tests/data/filetest/media/Image1.pbm.yml @@ -1,4 +1,4 @@ -filetype_file: Netpbm image data, size = 1 x 1, bitmap, ASCII text +filetype_file: Netpbm image data, size = 1 x 1, bitmap Created by Paint Shop Pro 5, ASCII text mimetype_file: image/x-portable-bitmap mimetype_python: image/x-portable-bitmap is_file: yes diff --git a/tests/data/filetest/media/Image1.tga.yml b/tests/data/filetest/media/Image1.tga.yml index 75af116..d6aa4c1 100644 --- a/tests/data/filetest/media/Image1.tga.yml +++ b/tests/data/filetest/media/Image1.tga.yml @@ -1,9 +1,9 @@ -filetype_file: data -mimetype_file: application/octet-stream +filetype_file: Targa image data - RGB - RLE 1 x 1 x 24 - author " " - comment " " 23-2-2010 + 10:24:54 - job " " - Paint Shop Pro 12.80 +mimetype_file: image/x-tga is_file: yes is_regular: yes size: 543 is_binary: yes -is_data: yes is_media: yes is_media_with_meta: yes diff --git a/tests/data/filetest/media/Movie.wmv.yml b/tests/data/filetest/media/Movie.wmv.yml index d4c6128..33ab5b4 100644 --- a/tests/data/filetest/media/Movie.wmv.yml +++ b/tests/data/filetest/media/Movie.wmv.yml @@ -1,8 +1,4 @@ -filetype_file: Microsoft ASF ASF_Extended_Content_Description_Object, Audio Media (Codec Id - 353, Number of channels 1, Samples Per Second 8000, Average Number of Bytes Per Second 1000, - Block Alignment 1048896, Bits Per Sample 10, Error correction type ASF_Audio_Spread), Video - Media (Encoded Image Width 160, Encoded Image Height 120, Image Width 160, Image Height 120, - Bits Per Pixel Count 24, Error correction type ASF_No_Error_Correction) ASF_Stream_Bitrate_Properties_Object +filetype_file: Microsoft ASF ASF_Extended_Content_Description_Object mimetype_file: video/x-ms-asf mimetype_python: video/x-ms-wmv is_file: yes diff --git a/tests/data/filetest/media/Movie_0001.wmv.yml b/tests/data/filetest/media/Movie_0001.wmv.yml index 4dd1eb5..58df249 100644 --- a/tests/data/filetest/media/Movie_0001.wmv.yml +++ b/tests/data/filetest/media/Movie_0001.wmv.yml @@ -1,9 +1,4 @@ -filetype_file: Microsoft ASF ASF_Extended_Content_Description_Object, Audio Media (Codec Id - 353, Number of channels 2, Samples Per Second 44100, Average Number of Bytes Per Second 11112, - Block Alignment 1053035, Bits Per Sample 10, Error correction type ASF_Audio_Spread), Video - Media (Encoded Image Width 320, Encoded Image Height 240, Image Width 320, Image Height 240, - Bits Per Pixel Count 24, Error correction type ASF_No_Error_Correction) ASF_Stream_Bitrate_Properties_Object - ASF_Index_Object ASF_Index_Object +filetype_file: Microsoft ASF ASF_Extended_Content_Description_Object mimetype_file: video/x-ms-asf mimetype_python: video/x-ms-wmv is_file: yes diff --git a/tests/data/filetest/media/Movie_0002.wmv.yml b/tests/data/filetest/media/Movie_0002.wmv.yml index b23d19a..3140d24 100644 --- a/tests/data/filetest/media/Movie_0002.wmv.yml +++ b/tests/data/filetest/media/Movie_0002.wmv.yml @@ -1,8 +1,4 @@ -filetype_file: Microsoft ASF ASF_Extended_Content_Description_Object, Audio Media (Codec Id - 353, Number of channels 2, Samples Per Second 22050, Average Number of Bytes Per Second 4006, - Block Alignment 1049320, Bits Per Sample 10, Error correction type ASF_Audio_Spread), Video - Media (Encoded Image Width 208, Encoded Image Height 160, Image Width 208, Image Height 160, - Bits Per Pixel Count 24, Error correction type ASF_No_Error_Correction) ASF_Stream_Bitrate_Properties_Object +filetype_file: Microsoft ASF ASF_Extended_Content_Description_Object mimetype_file: video/x-ms-asf mimetype_python: video/x-ms-wmv is_file: yes diff --git a/tests/data/filetest/media/a.avi.yml b/tests/data/filetest/media/a.avi.yml index 24fbfda..12b6ad5 100644 --- a/tests/data/filetest/media/a.avi.yml +++ b/tests/data/filetest/media/a.avi.yml @@ -1,5 +1,4 @@ -filetype_file: Microsoft ASF, Video Media (Encoded Image Width 1680, Encoded Image Height 1050, - Image Width 1680, Image Height 1050, Bits Per Pixel Count 24, Error correction type ASF_No_Error_Correction) +filetype_file: Microsoft ASF mimetype_file: video/x-ms-asf mimetype_python: video/x-msvideo is_file: yes diff --git a/tests/data/filetest/media/a4.mp4.yml b/tests/data/filetest/media/a4.mp4.yml index 6433f32..8b04b9d 100644 --- a/tests/data/filetest/media/a4.mp4.yml +++ b/tests/data/filetest/media/a4.mp4.yml @@ -1,4 +1,4 @@ -filetype_file: ISO Media, MP4 Base Media v1 [IS0 14496-12:2003] +filetype_file: ISO Media, MP4 Base Media v1 [ISO 14496-12:2003] mimetype_file: video/mp4 mimetype_python: video/mp4 is_file: yes diff --git a/tests/data/filetest/media/a4.mpg.yml b/tests/data/filetest/media/a4.mpg.yml index 06470ba..c0ad042 100644 --- a/tests/data/filetest/media/a4.mpg.yml +++ b/tests/data/filetest/media/a4.mpg.yml @@ -1,4 +1,4 @@ -filetype_file: ISO Media, MP4 Base Media v1 [IS0 14496-12:2003] +filetype_file: ISO Media, MP4 Base Media v1 [ISO 14496-12:2003] mimetype_file: video/mp4 mimetype_python: video/mpeg is_file: yes diff --git a/tests/data/filetest/media/mov.wvm.wmv.yml b/tests/data/filetest/media/mov.wvm.wmv.yml index aee0555..e119265 100644 --- a/tests/data/filetest/media/mov.wvm.wmv.yml +++ b/tests/data/filetest/media/mov.wvm.wmv.yml @@ -1,8 +1,4 @@ -filetype_file: Microsoft ASF ASF_Extended_Content_Description_Object, Audio Media (Codec Id - 353, Number of channels 1, Samples Per Second 8000, Average Number of Bytes Per Second 1000, - Block Alignment 1048896, Bits Per Sample 10, Error correction type ASF_Audio_Spread), Video - Media (Encoded Image Width 160, Encoded Image Height 120, Image Width 160, Image Height 120, - Bits Per Pixel Count 24, Error correction type ASF_No_Error_Correction) ASF_Stream_Bitrate_Properties_Object +filetype_file: Microsoft ASF ASF_Extended_Content_Description_Object mimetype_file: video/x-ms-asf mimetype_python: video/x-ms-wmv is_file: yes diff --git a/tests/filetype_test_utils.py b/tests/filetype_test_utils.py index 10d3778..4a6c497 100644 --- a/tests/filetype_test_utils.py +++ b/tests/filetype_test_utils.py @@ -28,7 +28,7 @@ """ test_env = FileDrivenTesting() -test_env.test_data_dir = path.join(path.dirname(__file__), 'data') +test_env.test_data_dir = path.join(path.dirname(__file__), "data") @attr.s(slots=True) @@ -51,12 +51,12 @@ class FileTypeTest(object): test_file = attr.ib(default=None) # ATTENTION: keep these attributes in sync with typecode.contenttype.Type - filetype_file = attr.ib(default='') - mimetype_file = attr.ib(default='') - mimetype_python = attr.ib(default='') - filetype_pygment = attr.ib(default='') - elf_type = attr.ib(default='') - programming_language = attr.ib(default='') + filetype_file = attr.ib(default="") + mimetype_file = attr.ib(default="") + mimetype_python = attr.ib(default="") + filetype_pygment = attr.ib(default="") + elf_type = attr.ib(default="") + programming_language = attr.ib(default="") is_file = attr.ib(default=False) is_dir = attr.ib(default=False) @@ -65,7 +65,7 @@ class FileTypeTest(object): is_link = attr.ib(default=False) is_broken_link = attr.ib(default=False) - link_target = attr.ib(default='') + link_target = attr.ib(default="") size = attr.ib(default=False) is_pdf_with_text = attr.ib(default=False) is_text = attr.ib(default=False) @@ -101,13 +101,14 @@ class FileTypeTest(object): def __attrs_post_init__(self, *args, **kwargs): if self.data_file: try: - with io.open(self.data_file, encoding='utf-8') as df: + with io.open(self.data_file, encoding="utf-8") as df: for key, value in saneyaml.load(df.read()).items(): if value: setattr(self, key, value) except: import traceback - msg = f'file://{self.data_file}\n{repr(self)}\n' + traceback.format_exc() + + msg = f"file://{self.data_file}\n{repr(self)}\n" + traceback.format_exc() raise Exception(msg) if isinstance(self.size, str): self.size = int(self.size) @@ -116,8 +117,9 @@ def to_dict(self, filter_empty=False, filter_extra=False): """ Serialize self to an ordered mapping. """ - filtered = [field for field in attr.fields(FileTypeTest) - if field.name in ('data_file', 'test_file')] + filtered = [ + field for field in attr.fields(FileTypeTest) if field.name in ("data_file", "test_file") + ] fields_filter = attr.filters.exclude(*filtered) data = attr.asdict(self, filter=fields_filter, dict_factory=OrderedDict) data = data.items() @@ -125,9 +127,9 @@ def to_dict(self, filter_empty=False, filter_extra=False): # skip empty fields data = ((k, v) for k, v in data if v) if filter_extra: - data = ((k, v) for k, v in data if k not in ('expected_failure', 'notes')) + data = ((k, v) for k, v in data if k not in ("expected_failure", "notes")) - return OrderedDict(data) + return dict(OrderedDict(data)) def dumps(self): """ @@ -141,7 +143,7 @@ def dump(self, check_exists=False): """ if check_exists and path.exists(self.data_file): raise Exception(self.data_file) - with io.open(self.data_file, 'w', encoding='utf-8') as df: + with io.open(self.data_file, "w", encoding="utf-8") as df: df.write(self.dumps()) @@ -181,7 +183,6 @@ def check_types_equal(expected, result): # we have either number, date, None or boolean value and # we want both values to be both trueish or falsish else: - if bool(result_value) != bool(expected_value): return False return True @@ -211,25 +212,24 @@ def closure_test_function(*args, **kwargs): # this is done to display slightly eaier to handle error traces if not passing: - expected['data file'] = 'file://' + data_file - expected['test_file'] = 'file://' + test_file + expected["data file"] = "file://" + data_file + expected["test_file"] = "file://" + test_file assert dict(results) == dict(expected) data_file = test.data_file test_file = test.test_file - tfn = test_file.replace(test_data_dir, '').strip('\\/\\') - test_name = 'test_%(tfn)s_%(index)s' % locals() + tfn = test_file.replace(test_data_dir, "").strip("\\/\\") + test_name = "test_%(tfn)s_%(index)s" % locals() test_name = python_safe_name(test_name) closure_test_function.__name__ = test_name - if (test.expected_failure is True - or (isinstance(test.expected_failure, str) - and ( - ('windows' in test.expected_failure and on_windows) - or ('macos' in test.expected_failure and on_mac) - ) + if test.expected_failure is True or ( + isinstance(test.expected_failure, str) + and ( + ("windows" in test.expected_failure and on_windows) + or ("macos" in test.expected_failure and on_mac) ) ): closure_test_function = pytest.mark.xfail(closure_test_function) @@ -247,13 +247,12 @@ def build_tests( Dynamically build test methods from a sequence of FileTypeTest and attach these method to the clazz test class. """ - for i, test in enumerate(sorted(filetype_tests, key=lambda x:x.test_file)): + for i, test in enumerate(sorted(filetype_tests, key=lambda x: x.test_file)): # closure on the test params if test.expected_failure: actual_regen = False else: actual_regen = regen - method, name = make_filetype_test_functions( - test, i, test_data_dir, actual_regen) + method, name = make_filetype_test_functions(test, i, test_data_dir, actual_regen) # attach that method to our test class setattr(clazz, name, method) diff --git a/tests/test_contenttype.py b/tests/test_contenttype.py index 252e85a..503110e 100644 --- a/tests/test_contenttype.py +++ b/tests/test_contenttype.py @@ -45,239 +45,245 @@ class TestContentTypeComplex(FileBasedTesting): -# test_data_dir = os.path.join(os.path.dirname(__file__), 'data') - test_data_dir = os.path.join(os.path.dirname(__file__), 'data') + # test_data_dir = os.path.join(os.path.dirname(__file__), 'data') + test_data_dir = os.path.join(os.path.dirname(__file__), "data") def test_filetype_file_on_unicode_file_name(self): - test_zip = self.extract_test_zip('contenttype/unicode/unicode.zip') - test_dir = os.path.join(test_zip, 'a') + test_zip = self.extract_test_zip("contenttype/unicode/unicode.zip") + test_dir = os.path.join(test_zip, "a") f = os.listdir(test_dir)[0] test_file = os.path.join(test_dir, f) assert os.path.exists(test_file) - expected = 'PNG image data, 16 x 12, 8-bit/color RGBA, interlaced' + expected = "PNG image data, 16 x 12, 8-bit/color RGBA, interlaced" assert get_filetype_file(test_file) == expected - expected = 'image/png' + expected = "image/png" assert get_mimetype_file(test_file) == expected - @skipIf(not on_linux, 'Windows and macOS have some issues with some non-unicode paths') + @skipIf(not on_linux, "Windows and macOS have some issues with some non-unicode paths") def test_filetype_file_on_unicode_file_name2(self): - zip_file_name = 'contenttype/unicode/unicode2.zip' + zip_file_name = "contenttype/unicode/unicode2.zip" test_zip = self.extract_test_zip(zip_file_name) - test_dir = os.path.join(test_zip, 'a') - f = [f for f in os.listdir(test_dir) if f.startswith('g')][0] + test_dir = os.path.join(test_zip, "a") + f = [f for f in os.listdir(test_dir) if f.startswith("g")][0] test_file = os.path.join(test_dir, f) assert os.path.exists(test_file) - expected = 'PNG image data, 16 x 12, 8-bit/color RGBA, interlaced' + expected = "PNG image data, 16 x 12, 8-bit/color RGBA, interlaced" if on_windows: # FIXME: this is a very short png file though - expected = 'Non-ISO extended-ASCII text' + expected = "Non-ISO extended-ASCII text" assert get_filetype_file(test_file) == expected - expected = 'image/png' + expected = "image/png" if on_windows: # FIXME: this is a very short png file though - expected = 'text/plain' + expected = "text/plain" assert get_mimetype_file(test_file) == expected - @skipIf(on_windows, 'Windows does not have (well supported) links.') + @skipIf(on_windows, "Windows does not have (well supported) links.") def test_symbolink_links(self): - test_dir = self.extract_test_tar('contenttype/links/links.tar.gz', verbatim=True) + test_dir = self.extract_test_tar("contenttype/links/links.tar.gz", verbatim=True) - test_file1 = os.path.join(test_dir, 'prunedirs/targets/simlink_to_dir') + test_file1 = os.path.join(test_dir, "prunedirs/targets/simlink_to_dir") assert is_link(test_file1) assert not is_broken_link(test_file1) - assert get_link_target(test_file1) == '../sources/subdir' + assert get_link_target(test_file1) == "../sources/subdir" - test_file2 = os.path.join(test_dir, 'prunedirs/targets/simlink_to_file') + test_file2 = os.path.join(test_dir, "prunedirs/targets/simlink_to_file") assert is_link(test_file2) assert not is_broken_link(test_file2) - assert get_link_target(test_file2) == '../sources/a.txt' + assert get_link_target(test_file2) == "../sources/a.txt" - test_file3 = os.path.join(test_dir, 'prunedirs/targets/simlink_to_missing_file') + test_file3 = os.path.join(test_dir, "prunedirs/targets/simlink_to_missing_file") assert is_link(test_file3) assert is_broken_link(test_file3) - assert get_link_target(test_file3) == '../sources/temp.txt' + assert get_link_target(test_file3) == "../sources/temp.txt" - test_file4 = os.path.join(test_dir, 'prunedirs/targets/simlink_to_missing_dir') + test_file4 = os.path.join(test_dir, "prunedirs/targets/simlink_to_missing_dir") assert is_link(test_file4) assert is_broken_link(test_file4) - assert get_link_target(test_file4) == '../sources/tempdir' + assert get_link_target(test_file4) == "../sources/tempdir" - @skipIf(not on_windows, 'Hangs for now, for lack of proper sudo access on some test servers.') - @skipIf(on_windows, 'Windows does not have fifos.') + @skipIf(not on_windows, "Hangs for now, for lack of proper sudo access on some test servers.") + @skipIf(on_windows, "Windows does not have fifos.") def test_contenttype_fifo(self): test_dir = self.get_temp_dir() - myfifo = os.path.join(test_dir, 'myfifo') + myfifo = os.path.join(test_dir, "myfifo") import subprocess - if subprocess.call(['mkfifo', myfifo]) != 0: - self.fail('Unable to create fifo') + + if subprocess.call(["mkfifo", myfifo]) != 0: + self.fail("Unable to create fifo") assert os.path.exists(myfifo) assert is_special(myfifo) - assert get_filetype(myfifo) == 'FIFO pipe' + assert get_filetype(myfifo) == "FIFO pipe" def test_debian_package(self): - test_file = self.get_test_loc('contenttype/package/libjama-dev_1.2.4-2_all.deb') + test_file = self.get_test_loc("contenttype/package/libjama-dev_1.2.4-2_all.deb") expected = ( # libmagic 5.38 - 'debian binary package (format 2.0), with control.tar.gz, data compression gz', + "debian binary package (format 2.0), with control.tar.gz, data compression gz", # libmagic 5.2x - 'debian binary package (format 2.0)', + "debian binary package (format 2.0)", ) assert get_filetype(test_file).startswith(expected) assert is_binary(test_file) assert is_archive(test_file) assert is_compressed(test_file) assert not contains_text(test_file) - assert get_filetype_pygment(test_file) == '' + assert get_filetype_pygment(test_file) == "" def test_package_json(self): - test_file = self.get_test_loc('contenttype/package/package.json') + test_file = self.get_test_loc("contenttype/package/package.json") expected = ( - 'ascii text, with very long lines', + "ascii text, with very long lines", # libmagic 5.39+ - 'json data', + "json data", ) assert get_filetype(test_file) in expected assert is_text(test_file) assert not is_binary(test_file) - assert get_filetype_pygment(test_file) == '' + assert get_filetype_pygment(test_file) == "" assert not is_source(test_file) def test_certificate(self): - test_file = self.get_test_loc('contenttype/certificate/CERTIFICATE') + test_file = self.get_test_loc("contenttype/certificate/CERTIFICATE") assert is_binary(test_file) # assert not is_archive(test_file) expected = ( # libmagic 5.38 - 'apple diskcopy 4.2 image', + "apple diskcopy 4.2 image", # libmagic 5.25 - 'data', + "data", ) assert get_filetype(test_file).startswith(expected) - assert get_filetype_pygment(test_file) == '' + assert get_filetype_pygment(test_file) == "" def test_code_c_1(self): - test_file = self.get_test_loc('contenttype/code/c/c_code.c') + test_file = self.get_test_loc("contenttype/code/c/c_code.c") expected = ( # incorrect p to libmagic 5.38 - 'ti-xx graphing calculator (flash)', + "ti-xx graphing calculator (flash)", # correct in libmagic 5.39+ - 'c source, ascii text', + "c source, ascii text", ) assert get_filetype(test_file) in expected - assert get_filetype_pygment(test_file) == 'C' + assert get_filetype_pygment(test_file) == "C" assert is_source(test_file) assert is_text(test_file) def test_code_c_7(self): - test_file = self.get_test_loc('contenttype/code/c/some.c') + test_file = self.get_test_loc("contenttype/code/c/some.c") expected = ( # incorrect p to libmagic 5.38 - 'ti-xx graphing calculator (flash)', + "ti-xx graphing calculator (flash)", # correct in libmagic 5.39+ - 'c source, ascii text', + "c source, ascii text", ) assert get_filetype(test_file) in expected assert is_source(test_file) - assert get_filetype_pygment(test_file) == 'C' + assert get_filetype_pygment(test_file) == "C" def test_code_python_2(self): - test_file = self.get_test_loc('contenttype/code/python/extract.py') + test_file = self.get_test_loc("contenttype/code/python/extract.py") assert is_source(test_file) assert is_text(test_file) - assert get_filetype_pygment(test_file) == 'Python' - assert get_filetype(test_file) == 'python script, ascii text executable' + assert get_filetype_pygment(test_file) == "Python" + assert get_filetype(test_file) == "python script, ascii text executable" expected = ( - 'text/x-python', + "text/x-python", # new in libmagic 5.39 - 'text/x-script.python', + "text/x-script.python", ) assert get_mimetype_file(test_file) in expected - assert get_filetype_file(test_file).startswith('Python script') + assert get_filetype_file(test_file).startswith("Python script") def test_compiled_elf_so(self): - test_file = self.get_test_loc(u'contenttype/compiled/linux/libssl.so.0.9.7') + test_file = self.get_test_loc("contenttype/compiled/linux/libssl.so.0.9.7") assert not is_special(test_file) assert not is_text(test_file) - assert get_filetype_pygment(test_file) == '' - assert get_mimetype_file(test_file) == 'application/x-sharedlib' + assert get_filetype_pygment(test_file) == "" + assert get_mimetype_file(test_file) == "application/x-sharedlib" expected = ( # correct with libmagic 5.38 and 5.39 - 'ELF 32-bit LSB shared object, Intel 80386, version 1 (SYSV), statically linked, stripped', + "ELF 32-bit LSB shared object, Intel 80386, version 1 (SYSV), statically linked, stripped", # incorrect with libmagic 5.2x - 'ELF 32-bit LSB shared object, Intel 80386, version 1 (SYSV), dynamically linked, stripped', + "ELF 32-bit LSB shared object, Intel 80386, version 1 (SYSV), dynamically linked, stripped", ) assert get_filetype_file(test_file) in expected assert get_filetype(test_file) in [t.lower() for t in expected] - assert get_filetype_pygment(test_file) == '' + assert get_filetype_pygment(test_file) == "" def test_compiled_elf_so_2(self): - test_file = self.get_test_loc('contenttype/compiled/linux/libnetsnmpagent.so.5') + test_file = self.get_test_loc("contenttype/compiled/linux/libnetsnmpagent.so.5") assert not is_source(test_file) expected = ( # correct with libmagic 5.38 and 5.39 - 'elf 32-bit lsb shared object, intel 80386, version 1 (sysv), statically linked, with debug_info, not stripped', + "elf 32-bit lsb shared object, intel 80386, version 1 (sysv), statically linked, with debug_info, not stripped", # incorrect with libmagic 5.2x - 'elf 32-bit lsb shared object, intel 80386, version 1 (sysv), dynamically linked, with debug_info, not stripped', + "elf 32-bit lsb shared object, intel 80386, version 1 (sysv), dynamically linked, with debug_info, not stripped", ) assert get_filetype(test_file) in expected - assert get_filetype_pygment(test_file) == '' + assert get_filetype_pygment(test_file) == "" @pytest.mark.xfail( - on_mac or on_windows, reason='Somehow we get really weird results on macOS with libmagic 5.38 and mac, win32 on libmagic 5.39: ' - '[64-bit architecture=6893422] [64-bit architecture=6649701] [architecture=1075809] [architecture=3959150] [architecture=768]') + on_mac or on_windows, + reason="Somehow we get really weird results on macOS with libmagic 5.38 and mac, win32 on libmagic 5.39: " + "[64-bit architecture=6893422] [64-bit architecture=6649701] [architecture=1075809] [architecture=3959150] [architecture=768]", + ) def test_compiled_java_classfile_1(self): - test_file = self.get_test_loc('contenttype/compiled/java/CommonViewerSiteFactory.class') - assert get_filetype(test_file) == 'compiled java class data, version 46.0 (java 1.2)' - assert get_filetype_pygment(test_file) == '' + test_file = self.get_test_loc("contenttype/compiled/java/CommonViewerSiteFactory.class") + assert get_filetype(test_file) == "compiled java class data, version 46.0 (java 1.2)" + assert get_filetype_pygment(test_file) == "" - @pytest.mark.xfail(on_mac or on_windows, reason='Somehow we get really weird results on macOS with libmagic 5.38 and mac, win32 on libmagic 5.39: ' - '[64-bit architecture=6893422] [64-bit architecture=6649701] [architecture=1075809] [architecture=3959150] [architecture=768]') + @pytest.mark.xfail( + on_mac or on_windows, + reason="Somehow we get really weird results on macOS with libmagic 5.38 and mac, win32 on libmagic 5.39: " + "[64-bit architecture=6893422] [64-bit architecture=6649701] [architecture=1075809] [architecture=3959150] [architecture=768]", + ) def test_compiled_java_classfile_2(self): - test_file = self.get_test_loc('contenttype/compiled/java/old.class') + test_file = self.get_test_loc("contenttype/compiled/java/old.class") assert is_binary(test_file) - assert get_filetype(test_file) == 'compiled java class data, version 46.0 (java 1.2)' - assert get_filetype_pygment(test_file) == '' + assert get_filetype(test_file) == "compiled java class data, version 46.0 (java 1.2)" + assert get_filetype_pygment(test_file) == "" def test_compiled_python_1(self): - test_dir = self.extract_test_zip('contenttype/compiled/python/compiled.zip') - test_file = os.path.join(test_dir, 'command.pyc') - assert get_filetype(test_file) == 'python 2.5 byte-compiled' + test_dir = self.extract_test_zip("contenttype/compiled/python/compiled.zip") + test_file = os.path.join(test_dir, "command.pyc") + assert get_filetype(test_file) == "python 2.5 byte-compiled" assert not is_source(test_file) assert not is_text(test_file) expected_mime = ( - 'application/octet-stream', + "application/octet-stream", # libmagic 5.39 - 'text/x-bytecode.python', + "application/x-bytecode.python", ) assert get_mimetype_file(test_file) in expected_mime - assert get_filetype_pygment(test_file) == '' + assert get_filetype_pygment(test_file) == "" - test_file2 = os.path.join(test_dir, 'contenttype.pyc') + test_file2 = os.path.join(test_dir, "contenttype.pyc") assert is_binary(test_file2) assert get_pygments_lexer(test_file2) is None - test_file3 = os.path.join(test_dir, 'contenttype.pyo') + test_file3 = os.path.join(test_dir, "contenttype.pyo") assert is_binary(test_file3) assert get_pygments_lexer(test_file3) is None - test_file4 = os.path.join(test_dir, 'extract.pyc') - assert get_filetype(test_file4) == 'python 2.5 byte-compiled' + test_file4 = os.path.join(test_dir, "extract.pyc") + assert get_filetype(test_file4) == "python 2.5 byte-compiled" assert not is_source(test_file4) assert not is_text(test_file4) assert get_mimetype_file(test_file4) in expected_mime - assert get_filetype_pygment(test_file4) == '' + assert get_filetype_pygment(test_file4) == "" # @pytest.mark.xfail(on_windows or on_mac, reason='Somehow we have incorrect results on win63 with libmagic 5.38: ' # 'application/octet-stream instead of EPS') def test_doc_postscript_eps(self): - test_file = self.get_test_loc('contenttype/doc/postscript/Image1.eps') + test_file = self.get_test_loc("contenttype/doc/postscript/Image1.eps") assert is_binary(test_file) results = dict( @@ -286,25 +292,25 @@ def test_doc_postscript_eps(self): ) if on_windows: expected = dict( - get_filetype_file='DOS EPS Binary File Postscript starts at byte 32 length 466 TIFF starts at byte 498 length 11890', - get_mimetype_file='application/octet-stream', + get_filetype_file="DOS EPS Binary File Postscript starts at byte 32 length 466 TIFF starts at byte 498 length 11890", + get_mimetype_file="application/octet-stream", ) else: expected = dict( - get_filetype_file='DOS EPS Binary File Postscript starts at byte 32 length 466 TIFF starts at byte 498 length 11890', - get_mimetype_file='image/x-eps', + get_filetype_file="DOS EPS Binary File Postscript starts at byte 32 length 466 TIFF starts at byte 498 length 11890", + get_mimetype_file="image/x-eps", ) assert results == expected def test_media_image_img(self): - test_file = self.get_test_loc('contenttype/media/Image1.img') + test_file = self.get_test_loc("contenttype/media/Image1.img") assert is_binary(test_file) - assert get_filetype_file(test_file).startswith('GEM Image data') + assert get_filetype_file(test_file).startswith("GEM Image data") expected = ( # libmagic 5.3.8 - 'image/x-gem', + "image/x-gem", # libmagic 5.2x - 'application/octet-stream', + "application/octet-stream", ) assert get_mimetype_file(test_file) in expected assert not get_mimetype_python(test_file) @@ -314,12 +320,12 @@ def test_media_image_img(self): assert not contains_text(test_file) def test_package_debian(self): - test_file = self.get_test_loc('contenttype/package/wget-el_0.5.0-8_all.deb') + test_file = self.get_test_loc("contenttype/package/wget-el_0.5.0-8_all.deb") expected = ( # libmagic 5.38 - 'debian binary package (format 2.0), with control.tar.gz, data compression gz', + "debian binary package (format 2.0), with control.tar.gz, data compression gz", # libmagic 5.2x - 'debian binary package (format 2.0)', + "debian binary package (format 2.0)", ) assert get_filetype(test_file) in expected assert is_binary(test_file) @@ -329,54 +335,62 @@ def test_package_debian(self): @expectedFailure def test_text_rsync_file_is_not_octet_stream(self): # this is a libmagic bug: http://bugs.gw.com/view.php?id=473 - test_file = self.get_test_loc('contenttype/text/wildtest.txt') - assert 'data' != get_filetype_file(test_file) - assert 'octet' not in get_mimetype_file(test_file) + test_file = self.get_test_loc("contenttype/text/wildtest.txt") + assert "data" != get_filetype_file(test_file) + assert "octet" not in get_mimetype_file(test_file) - @skipIf(on_windows, 'fails because of libmagic bug on windows.') + @skipIf(on_windows, "fails because of libmagic bug on windows.") def test_archive_squashfs_crashing(self): - test_file = self.get_test_loc('contenttype/archive/crashing-squashfs') - assert get_filetype_file(test_file).startswith('Squashfs filesystem, little endian, version') + test_file = self.get_test_loc("contenttype/archive/crashing-squashfs") + assert get_filetype_file(test_file).startswith( + "Squashfs filesystem, little endian, version" + ) assert is_archive(test_file) assert is_compressed(test_file) assert not contains_text(test_file) - assert get_filetype_pygment(test_file) == '' + assert get_filetype_pygment(test_file) == "" - @skipIf(on_windows, 'fails because of libmagic bug on windows.') + @skipIf(on_windows, "fails because of libmagic bug on windows.") def test_archive_squashfs_gz(self): - test_file = self.get_test_loc('contenttype/archive/sqfs-gz.sqs') - assert get_filetype_file(test_file).startswith('Squashfs filesystem, little endian, version') + test_file = self.get_test_loc("contenttype/archive/sqfs-gz.sqs") + assert get_filetype_file(test_file).startswith( + "Squashfs filesystem, little endian, version" + ) assert is_archive(test_file) assert is_compressed(test_file) assert not contains_text(test_file) - assert get_filetype_pygment(test_file) == '' + assert get_filetype_pygment(test_file) == "" - @skipIf(on_windows, 'fails because of libmagic bug on windows.') + @skipIf(on_windows, "fails because of libmagic bug on windows.") def test_archive_squashfs_lzo(self): - test_file = self.get_test_loc('contenttype/archive/sqfs-lzo.sqs') - assert get_filetype_file(test_file).startswith('Squashfs filesystem, little endian, version') + test_file = self.get_test_loc("contenttype/archive/sqfs-lzo.sqs") + assert get_filetype_file(test_file).startswith( + "Squashfs filesystem, little endian, version" + ) assert is_archive(test_file) assert is_compressed(test_file) assert not contains_text(test_file) - assert get_filetype_pygment(test_file) == '' + assert get_filetype_pygment(test_file) == "" - @skipIf(on_windows, 'fails because of libmagic bug on windows.') + @skipIf(on_windows, "fails because of libmagic bug on windows.") def test_archive_squashfs_xz(self): - test_file = self.get_test_loc('contenttype/archive/sqfs-xz.sqs') - assert get_filetype_file(test_file).startswith('Squashfs filesystem, little endian, version') + test_file = self.get_test_loc("contenttype/archive/sqfs-xz.sqs") + assert get_filetype_file(test_file).startswith( + "Squashfs filesystem, little endian, version" + ) assert is_archive(test_file) assert is_compressed(test_file) assert not contains_text(test_file) - assert get_filetype_pygment(test_file) == '' + assert get_filetype_pygment(test_file) == "" def test_directory(self): - test_file = self.get_test_loc('contenttype') + test_file = self.get_test_loc("contenttype") assert not is_binary(test_file) assert not is_compressed(test_file) assert not contains_text(test_file) - assert get_filetype_pygment(test_file) == '' + assert get_filetype_pygment(test_file) == "" def test_size(self): - test_dir = self.get_test_loc('contenttype/size') + test_dir = self.get_test_loc("contenttype/size") result = size(test_dir) assert result == 18 diff --git a/tests/test_entropy.py b/tests/test_entropy.py index 01fac18..ac7e6ef 100644 --- a/tests/test_entropy.py +++ b/tests/test_entropy.py @@ -21,7 +21,6 @@ def check_entropy(data, expected, func=shannon_entropy): class TestEntropy(unittest.TestCase): - def test_shannon_entropy(self): # some tests values collected from various places for sanity # https://www.reddit.com/r/dailyprogrammer/comments/4fc896/20160418_challenge_263_easy_calculating_shannon/ @@ -32,60 +31,66 @@ def test_shannon_entropy(self): check(bytes(list(range(256))), 8.0) - check('', 0.0) - check('0', 0.0) - check(b'\x00' * 1024, 0.0) - check(b'\xff' * 1024, 0.0) - check(b'\x00\xff' * 512, 1.0) - check(b'\xff\x00' * 512, 1.0) - check(b'\x00\xcc\xff' * 512, 1.58) - check('122333444455555666666777777788888888', 2.79) - check('563881467447538846567288767728553786', 2.79) - check(sorted('563881467447538846567288767728553786'), 2.79) - check('https://www.reddit.com/r/dailyprogrammer', 4.06) - check('int main(int argc, char *argv[])', 3.87) - check(('0' * 1000) + ('1' * 1000), 1.0) - check('1223334444', 1.846439) - check('1227774444', 1.846439) - check('Rosetta Code is the best site in the world!', 3.646513) - check('Rosetta Code', 3.08496) - check('1223334444555555555', 1.96981) - check('122333', 1.45914) - check('aaBBcccDDDD', 1.936260) - check('1234567890abcdefghijklmnopqrstuvwxyz', 5.1699250) - check('01010101010101010102020202020202', 1.49) - data = ('Lorem ipsum dolor sit amet, consectetur adipisicing ''elit, ' - 'sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.') + check("", 0.0) + check("0", 0.0) + check(b"\x00" * 1024, 0.0) + check(b"\xff" * 1024, 0.0) + check(b"\x00\xff" * 512, 1.0) + check(b"\xff\x00" * 512, 1.0) + check(b"\x00\xcc\xff" * 512, 1.58) + check("122333444455555666666777777788888888", 2.79) + check("563881467447538846567288767728553786", 2.79) + check(sorted("563881467447538846567288767728553786"), 2.79) + check("https://www.reddit.com/r/dailyprogrammer", 4.06) + check("int main(int argc, char *argv[])", 3.87) + check(("0" * 1000) + ("1" * 1000), 1.0) + check("1223334444", 1.846439) + check("1227774444", 1.846439) + check("Rosetta Code is the best site in the world!", 3.646513) + check("Rosetta Code", 3.08496) + check("1223334444555555555", 1.96981) + check("122333", 1.45914) + check("aaBBcccDDDD", 1.936260) + check("1234567890abcdefghijklmnopqrstuvwxyz", 5.1699250) + check("01010101010101010102020202020202", 1.49) + data = ( + "Lorem ipsum dolor sit amet, consectetur adipisicing " + "elit, " + "sed do eiusmod tempor incididunt ut labore et dolore magna aliqua." + ) check(data, 4.02) - check('hnpshnhahphshnp', 2.11) + check("hnpshnhahphshnp", 2.11) def test_gz_entropy(self): check = partial(check_entropy, func=gzip_entropy) check(bytes(list(range(256))), 1.04) - check('', 0.0) - check('0', 9.0) - check(b'\x00' * 1024, 0.02) - check(b'\xff' * 1024, 0.02) - check(b'\x00\xff' * 512, 0.02) - check(b'\xff\x00' * 512, 0.02) - check(b'\x00\xcc\xff' * 512, 0.01) - check('122333444455555666666777777788888888', 0.72) - check('563881467447538846567288767728553786', 1.03) - check(''.join(sorted('563881467447538846567288767728553786')), 0.72) - check('https://www.reddit.com/r/dailyprogrammer', 1.2) - check('int main(int argc, char *argv[])', 1.16) - check(('0' * 1000) + ('1' * 1000), 0.01) - check('1223334444', 1.6) - check('1227774444', 1.6) - check('Rosetta Code is the best site in the world!', 1.07) - check('Rosetta Code', 1.67) - check('1223334444555555555', 1.0) - check('122333', 2.33) - check('aaBBcccDDDD', 1.55) - check('1234567890abcdefghijklmnopqrstuvwxyz', 1.22) - check('01010101010101010102020202020202', 0.47) - data = ('Lorem ipsum dolor sit amet, consectetur adipisicing ''elit, ' - 'sed do eiusmod tempor incididunt ut labore et dolore magna aliqua.') + check("", 0.0) + check("0", 9.0) + check(b"\x00" * 1024, 0.02) + check(b"\xff" * 1024, 0.02) + check(b"\x00\xff" * 512, 0.02) + check(b"\xff\x00" * 512, 0.02) + check(b"\x00\xcc\xff" * 512, 0.01) + check("122333444455555666666777777788888888", 0.72) + check("563881467447538846567288767728553786", 1.03) + check("".join(sorted("563881467447538846567288767728553786")), 0.72) + check("https://www.reddit.com/r/dailyprogrammer", 1.2) + check("int main(int argc, char *argv[])", 1.16) + check(("0" * 1000) + ("1" * 1000), 0.01) + check("1223334444", 1.6) + check("1227774444", 1.6) + check("Rosetta Code is the best site in the world!", 1.07) + check("Rosetta Code", 1.67) + check("1223334444555555555", 1.0) + check("122333", 2.33) + check("aaBBcccDDDD", 1.55) + check("1234567890abcdefghijklmnopqrstuvwxyz", 1.22) + check("01010101010101010102020202020202", 0.47) + data = ( + "Lorem ipsum dolor sit amet, consectetur adipisicing " + "elit, " + "sed do eiusmod tempor incididunt ut labore et dolore magna aliqua." + ) check(data, 0.8) - check('hnpshnhahphshnp', 1.4) + check("hnpshnhahphshnp", 1.4) diff --git a/tests/test_extractible.py b/tests/test_extractible.py index 2797f39..b2595f0 100644 --- a/tests/test_extractible.py +++ b/tests/test_extractible.py @@ -15,27 +15,27 @@ class TestExtractible(FileBasedTesting): - test_data_dir = os.path.join(os.path.dirname(__file__), 'data') + test_data_dir = os.path.join(os.path.dirname(__file__), "data") def test__can_extract(self): tests = ( - ('extractible/a.tar.gz', True), - ('extractible/crashing-squashfs', False), - ('extractible/dbase.fdt', False), - ('extractible/e.tar.bz2', True), - ('extractible/e.tar.gz', True), - ('extractible/e.tar', True), - ('extractible/file_4.26-1.diff.gz', True), - ('extractible/posixnotgnu.tar', True), - ('extractible/sqfs-gz.sqs', False), - ('extractible/sqfs-lzo.sqs', False), - ('extractible/sqfs-xz.sqs', False), - ('extractible/test.tar.lzma', True), - ('extractible/test.tar.xz', True), - ('extractible/test.zip', True), - ('extractible/win-archive.lib', False), + ("extractible/a.tar.gz", True), + ("extractible/crashing-squashfs", False), + ("extractible/dbase.fdt", False), + ("extractible/e.tar.bz2", True), + ("extractible/e.tar.gz", True), + ("extractible/e.tar", True), + ("extractible/file_4.26-1.diff.gz", True), + ("extractible/posixnotgnu.tar", True), + ("extractible/sqfs-gz.sqs", False), + ("extractible/sqfs-lzo.sqs", False), + ("extractible/sqfs-xz.sqs", False), + ("extractible/test.tar.lzma", True), + ("extractible/test.tar.xz", True), + ("extractible/test.zip", True), + ("extractible/win-archive.lib", False), ) for location, expected in tests: test_file = self.get_test_loc(location) result = extractible._can_extract(test_file) - assert result == expected, '{} should extractible: {}'.format(location, expected) + assert result == expected, "{} should extractible: {}".format(location, expected) diff --git a/tests/test_skeleton_codestyle.py b/tests/test_skeleton_codestyle.py deleted file mode 100644 index 2eb6e55..0000000 --- a/tests/test_skeleton_codestyle.py +++ /dev/null @@ -1,36 +0,0 @@ -# -# Copyright (c) nexB Inc. and others. All rights reserved. -# ScanCode is a trademark of nexB Inc. -# SPDX-License-Identifier: Apache-2.0 -# See http://www.apache.org/licenses/LICENSE-2.0 for the license text. -# See https://github.com/nexB/skeleton for support or download. -# See https://aboutcode.org for more information about nexB OSS projects. -# - -import subprocess -import unittest -import configparser - - -class BaseTests(unittest.TestCase): - def test_skeleton_codestyle(self): - """ - This test shouldn't run in proliferated repositories. - """ - setup_cfg = configparser.ConfigParser() - setup_cfg.read("setup.cfg") - if setup_cfg["metadata"]["name"] != "skeleton": - return - - args = "venv/bin/black --check -l 100 setup.py etc tests" - try: - subprocess.check_output(args.split()) - except subprocess.CalledProcessError as e: - print("===========================================================") - print(e.output) - print("===========================================================") - raise Exception( - "Black style check failed; please format the code using:\n" - " python -m black -l 100 setup.py etc tests", - e.output, - ) from e diff --git a/tests/test_types.py b/tests/test_types.py index 6274b7e..4dc71f3 100644 --- a/tests/test_types.py +++ b/tests/test_types.py @@ -15,7 +15,7 @@ from filetype_test_utils import load_filetype_tests test_env = FileDrivenTesting() -test_env.test_data_dir = os.path.join(os.path.dirname(__file__), 'data') +test_env.test_data_dir = os.path.join(os.path.dirname(__file__), "data") class TestFileTypesDataDriven(FileDrivenTesting): @@ -24,8 +24,7 @@ class TestFileTypesDataDriven(FileDrivenTesting): build_tests( - filetype_tests=load_filetype_tests( - os.path.join(test_env.test_data_dir, 'filetest')), + filetype_tests=load_filetype_tests(os.path.join(test_env.test_data_dir, "filetest")), clazz=TestFileTypesDataDriven, test_data_dir=test_env.test_data_dir, regen=False,