From e893bb5ee77f970d5ec112de3c3a804f28fae573 Mon Sep 17 00:00:00 2001 From: Steve Arnold Date: Sat, 13 Feb 2021 19:42:08 -0800 Subject: [PATCH 01/14] new: switch to cmake build setup and pep517 packaging (#1) * decouple from submodule and use system libdatrie/dev-package instead * chg: add test from installed wheel (ci only on ubuntu for now) --- .github/workflows/ci.yml | 57 +++++++++ .github/workflows/vs_env.bat | 10 ++ .gitignore | 3 +- .pep8speaks.yml | 16 +++ CMakeLists.txt | 42 +++++++ MANIFEST.in | 12 +- README.rst | 11 +- appveyor.yml => appveyor_yml.disabled | 0 cmake/FindCython.cmake | 44 +++++++ libdatrie | 2 +- pyproject.toml | 9 +- setup.cfg | 54 ++++++++ setup.py | 175 +++++++++++++++++--------- src/CMakeLists.txt | 74 +++++++++++ src/cdatrie.pxd | 24 ++-- src/datrie.pyx | 40 +++--- tox-bench.ini | 3 +- tox.ini | 96 +++++++++++++- .travis.yml => travis_yml.disabled | 0 19 files changed, 558 insertions(+), 114 deletions(-) create mode 100644 .github/workflows/ci.yml create mode 100644 .github/workflows/vs_env.bat create mode 100644 .pep8speaks.yml create mode 100644 CMakeLists.txt rename appveyor.yml => appveyor_yml.disabled (100%) create mode 100644 cmake/FindCython.cmake create mode 100644 src/CMakeLists.txt rename .travis.yml => travis_yml.disabled (100%) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml new file mode 100644 index 0000000..5f21835 --- /dev/null +++ b/.github/workflows/ci.yml @@ -0,0 +1,57 @@ +name: CI + +on: + push: + branches: [ master ] + pull_request: + branches: [ master ] + +jobs: + build: + runs-on: ${{ matrix.os }} + defaults: + run: + shell: bash + env: + OS: ${{ matrix.os }} + PYTHON: ${{ matrix.python-version }} + PIP_DOWNLOAD_CACHE: ${{ github.workspace }}/../.pip_download_cache + strategy: + fail-fast: true + matrix: + os: [ubuntu-20.04] + python-version: [3.6, 3.7, 3.8, 3.9] + + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: 0 + + - uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + + - name: Add requirements + run: | + python -m pip install --upgrade pip + pip install tox tox-gh-actions + + - name: apt helper action + if: runner.os == 'Linux' + uses: ryankurte/action-apt@v0.2.0 + with: + # architectures to pass to dpkg --add-architecture + #architectures: # optional + packages: libdatrie-dev pybind11-dev ninja-build + + - name: Test in place + run: | + tox -e py + + - name: Build dist pkgs + run: | + tox -e deploy + + - name: Check wheel + run: | + tox -e check diff --git a/.github/workflows/vs_env.bat b/.github/workflows/vs_env.bat new file mode 100644 index 0000000..81bf72f --- /dev/null +++ b/.github/workflows/vs_env.bat @@ -0,0 +1,10 @@ +@echo off + +SET VSWHERE="C:\Program Files (x86)\Microsoft Visual Studio\Installer\vswhere" + +:: See https://github.com/microsoft/vswhere/wiki/Find-VC +for /f "usebackq delims=*" %%i in (`%VSWHERE% -latest -property installationPath`) do ( + call "%%i\VC\Auxiliary\Build\vcvarsall.bat" %* +) + +bash -c "export -p > env.sh" diff --git a/.gitignore b/.gitignore index cc12d99..4825ce1 100644 --- a/.gitignore +++ b/.gitignore @@ -13,4 +13,5 @@ build/ src/datrie.c src/cdatrie.c src/stdio_ext.c -wheelhouse \ No newline at end of file +wheelhouse +*.patch diff --git a/.pep8speaks.yml b/.pep8speaks.yml new file mode 100644 index 0000000..a1a7901 --- /dev/null +++ b/.pep8speaks.yml @@ -0,0 +1,16 @@ +scanner: + linter: flake8 # Other option is pycodestyle + +no_blank_comment: False # If True, no comment is made on PR without any errors. +descending_issues_order: True # If True, PEP 8 issues in message will be displayed in descending order of line numbers in the file + +[flake8] +exclude = + .git, + __pycache__, + tests, + bench, + build, + dist + +max-line-length = 85 diff --git a/CMakeLists.txt b/CMakeLists.txt new file mode 100644 index 0000000..ced2ce4 --- /dev/null +++ b/CMakeLists.txt @@ -0,0 +1,42 @@ +cmake_minimum_required(VERSION 3.15...3.18) + +project(datrie LANGUAGES C CXX) + +option(PY_DEBUG "Set if python being linked is a Py_DEBUG build" OFF) + +if(NOT CMAKE_BUILD_TYPE) + set(CMAKE_BUILD_TYPE "RelWithDebInfo" CACHE STRING + "Default build type: RelWithDebInfo" FORCE) +endif() + +include(GNUInstallDirs) + +find_package(pybind11 CONFIG) + +if(pybind11_FOUND) + message(STATUS "System pybind11 found") +else() + message(STATUS "Fetching pybind11 from github") + # Fetch pybind11 + include(FetchContent) + + FetchContent_Declare( + pybind11 + GIT_REPOSITORY https://github.com/pybind/pybind11 + GIT_TAG v2.6.1 + ) + FetchContent_MakeAvailable(pybind11) +endif() + +find_package(Threads REQUIRED) + +if (${PYTHON_IS_DEBUG}) + set(PY_DEBUG ON) +endif() + +set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} + ${PROJECT_SOURCE_DIR}/cmake/) + +include_directories(${PROJECT_SOURCE_DIR}/src) + +add_subdirectory(src) diff --git a/MANIFEST.in b/MANIFEST.in index 6d6c78b..23ef592 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,15 +1,13 @@ -include README.rst -include CHANGES.rst -include COPYING +global-include CMakeLists.txt *.cmake +include README.rst CHANGES.rst COPYING include tox.ini include tox-bench.ini include update_c.sh -recursive-include libdatrie *.h -recursive-include libdatrie *.c -include tests/words100k.txt.zip +include bench/words100k.txt.zip recursive-include tests *.py include src/datrie.pyx include src/cdatrie.pxd include src/stdio_ext.pxd -exclude src/datrie.c \ No newline at end of file +exclude src/datrie.c +global-exclude *.py[cod] __pycache__ diff --git a/README.rst b/README.rst index 274cdff..f29671d 100644 --- a/README.rst +++ b/README.rst @@ -1,11 +1,10 @@ -datrie |travis| |appveyor| -========================== +datrie |github| +=============== -.. |travis| image:: https://travis-ci.org/pytries/datrie.svg - :target: https://travis-ci.org/pytries/datrie +.. |github| image:: https://img.shields.io/github/workflow/status/freepn/datrie/ci + :target: https://github.com/freepn/datrie/actions?query=workflow:ci + :alt: GitHub CI Build Status -.. |appveyor| image:: https://ci.appveyor.com/api/projects/status/6bpvhllpjhlau7x0?svg=true - :target: https://ci.appveyor.com/project/superbobry/datrie Super-fast, efficiently stored Trie for Python (2.x and 3.x). Uses `libdatrie`_. diff --git a/appveyor.yml b/appveyor_yml.disabled similarity index 100% rename from appveyor.yml rename to appveyor_yml.disabled diff --git a/cmake/FindCython.cmake b/cmake/FindCython.cmake new file mode 100644 index 0000000..04aed1f --- /dev/null +++ b/cmake/FindCython.cmake @@ -0,0 +1,44 @@ +# Find the Cython compiler. +# +# This code sets the following variables: +# +# CYTHON_EXECUTABLE +# +# See also UseCython.cmake + +#============================================================================= +# Copyright 2011 Kitware, Inc. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +#============================================================================= + +# Use the Cython executable that lives next to the Python executable +# if it is a local installation. +find_package( PythonInterp ) +if( PYTHONINTERP_FOUND ) + get_filename_component( _python_path ${PYTHON_EXECUTABLE} PATH ) + find_program( CYTHON_EXECUTABLE + NAMES cython cython.bat cython3 + HINTS ${_python_path} + ) +else() + find_program( CYTHON_EXECUTABLE + NAMES cython cython.bat cython3 + ) +endif() + + +include( FindPackageHandleStandardArgs ) +FIND_PACKAGE_HANDLE_STANDARD_ARGS( Cython REQUIRED_VARS CYTHON_EXECUTABLE ) + +mark_as_advanced( CYTHON_EXECUTABLE ) diff --git a/libdatrie b/libdatrie index d1dfdb8..d1db08a 160000 --- a/libdatrie +++ b/libdatrie @@ -1 +1 @@ -Subproject commit d1dfdb831093892541cae46eba82c46aec94f726 +Subproject commit d1db08ac1c76f54ba23d63665437473788c999f3 diff --git a/pyproject.toml b/pyproject.toml index 6d1a7c2..780763a 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,6 +1,11 @@ [build-system] requires = [ - "setuptools>=40.8.0", + "setuptools>=42", "wheel", - "Cython" + "Cython>=0.20", + "pybind11>=2.6.0", + "ninja; sys_platform != 'Windows'", + "cmake>=3.15", ] + +build-backend = "setuptools.build_meta" diff --git a/setup.cfg b/setup.cfg index b7e4789..dadbc5e 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,2 +1,56 @@ +[metadata] +name = datrie +author = Mikhail Korobov +author_email = kmike84@gmail.com +maintainer = Steve Arnold +maintainer_email = nerdboy@gentoo.org +description = Super-fast, efficiently stored Trie for Python +long_description = file: README.rst +long_description_content_type = text/x-rst; charset=UTF-8 +url = https://github.com/sarnold/datrie +license = LGPLv2+ +license_files = COPYING +classifiers = + Development Status :: 4 - Beta + License :: OSI Approved :: GNU Lesser General Public License v2 or later (LGPLv2+) + Programming Language :: Cython + Programming Language :: Python :: 3.6 + Programming Language :: Python :: Implementation :: CPython + Intended Audience :: Developers + Intended Audience :: Science/Research + Topic :: Software Development :: Libraries :: Python Modules + Topic :: Scientific/Engineering :: Information Analysis + Topic :: Text Processing :: Linguistic + +[options] +python_requires = >=3.6 + +zip_safe = False + +[options.extras_require] +test = + pytest + hypothesis + pytest-flake8 + [aliases] test=pytest + +[tool:pytest] +minversion = 6.0 +testpaths = + tests + +[flake8] +exclude = + .git, + __pycache__, + tests, + bench, + build, + dist +filename = + *.pyx + *.pxd +max-line-length = 95 +select=E501,E302,E203,E111,E114,E221,E303,E128,E231,E126,E265,E305,E301,E127,E261,E271,E129,W291,E222,E241,E123,F403,C400,C401,C402,C403,C404,C405,C406,C407,C408,C409,C410,C411 diff --git a/setup.py b/setup.py index ac1c943..b3d8dd4 100755 --- a/setup.py +++ b/setup.py @@ -1,64 +1,121 @@ -#! /usr/bin/env python -"""Super-fast, efficiently stored Trie for Python.""" +# -*- coding: utf-8 -*- +# -import glob import os +import sys +import subprocess from setuptools import setup, Extension +from setuptools.command.build_ext import build_ext -from Cython.Build import cythonize - -LIBDATRIE_DIR = 'libdatrie' -LIBDATRIE_FILES = sorted(glob.glob(os.path.join(LIBDATRIE_DIR, "datrie", "*.c"))) - -DESCRIPTION = __doc__ -LONG_DESCRIPTION = open('README.rst').read() + open('CHANGES.rst').read() -LICENSE = 'LGPLv2+' - -CLASSIFIERS = [ - 'Development Status :: 4 - Beta', - 'Intended Audience :: Developers', - 'Intended Audience :: Science/Research', - 'License :: OSI Approved :: GNU Lesser General Public License v2 or later (LGPLv2+)', - 'Programming Language :: Cython', - 'Programming Language :: Python', - 'Programming Language :: Python :: 2', - 'Programming Language :: Python :: 2.7', - 'Programming Language :: Python :: 3', - 'Programming Language :: Python :: 3.4', - 'Programming Language :: Python :: 3.5', - 'Programming Language :: Python :: 3.6', - 'Programming Language :: Python :: 3.7', - 'Programming Language :: Python :: 3.8', - 'Programming Language :: Python :: Implementation :: CPython', - 'Topic :: Software Development :: Libraries :: Python Modules', - 'Topic :: Scientific/Engineering :: Information Analysis', - 'Topic :: Text Processing :: Linguistic' -] - -ext_modules = cythonize( - 'src/datrie.pyx', 'src/cdatrie.pxd', 'src/stdio_ext.pxd', - annotate=True, - include_path=[os.path.join(os.path.dirname(os.path.abspath(__file__)), "src")], - language_level=2 - ) - -for m in ext_modules: - m.include_dirs=[LIBDATRIE_DIR] - -setup(name="datrie", - version="0.8.2", - description=DESCRIPTION, - long_description=LONG_DESCRIPTION, - author='Mikhail Korobov', - author_email='kmike84@gmail.com', - license=LICENSE, - url='https://github.com/kmike/datrie', - classifiers=CLASSIFIERS, - libraries=[('datrie', { - "sources": LIBDATRIE_FILES, - "include_dirs": [LIBDATRIE_DIR]})], - ext_modules=ext_modules, - python_requires=">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*", - setup_requires=["pytest-runner", 'Cython>=0.28'], - tests_require=["pytest", "hypothesis"]) + +# update the version both here and in conda.recipe/meta.yaml +__version__ = '0.8.3.dev0' + +# Convert distutils Windows platform specifiers to CMake -A arguments +PLAT_TO_CMAKE = { + "win32": "Win32", + "win-amd64": "x64", + "win-arm32": "ARM", + "win-arm64": "ARM64", +} + +# A CMakeExtension needs a sourcedir instead of a file list. +class CMakeExtension(Extension): + def __init__(self, name, sourcedir=""): + Extension.__init__(self, name, sources=[], libraries=['datrie']) + self.sourcedir = os.path.abspath(sourcedir) + + +class CMakeBuild(build_ext): + + def build_extension(self, ext): + extdir = os.path.abspath(os.path.dirname(self.get_ext_fullpath(ext.name))) + + # required for auto-detection of auxiliary "native" libs + if not extdir.endswith(os.path.sep): + extdir += os.path.sep + + # Set a sensible default build type for packaging + if "CMAKE_BUILD_OVERRIDE" not in os.environ: + cfg = "Debug" if self.debug else "RelWithDebInfo" + else: + cfg = os.environ.get("CMAKE_BUILD_OVERRIDE", "") + + # CMake lets you override the generator - we need to check this. + # Can be set with Conda-Build, for example. + cmake_generator = os.environ.get("CMAKE_GENERATOR", "") + + # Set Python_EXECUTABLE instead if you use PYBIND11_FINDPYTHON + # SCM_VERSION_INFO shows you how to pass a value into the C++ code + # from Python. + cmake_args = [ + "-DCMAKE_LIBRARY_OUTPUT_DIRECTORY={}".format(extdir), + "-DPYTHON_EXECUTABLE={}".format(sys.executable), + "-DSCM_VERSION_INFO={}".format(__version__), + "-DCMAKE_BUILD_TYPE={}".format(cfg), # not used on MSVC, but no harm + ] + build_args = ["--verbose"] + + # CMake also lets you provide a toolchain file. + # Can be set in CI build environments for example. + cmake_toolchain_file = os.environ.get("CMAKE_TOOLCHAIN_FILE", "") + if cmake_toolchain_file: + cmake_args += ["-DCMAKE_TOOLCHAIN_FILE={}".format(cmake_toolchain_file)] + + if self.compiler.compiler_type != "msvc": + # Using Ninja-build since it a) is available as a wheel and b) + # multithreads automatically. MSVC would require all variables be + # exported for Ninja to pick it up, which is a little tricky to do. + # Users can override the generator with CMAKE_GENERATOR in CMake + # 3.15+. + if not cmake_generator: + cmake_args += ["-GNinja"] + + else: + + # Single config generators are handled "normally" + single_config = any(x in cmake_generator for x in {"NMake", "Ninja"}) + + # CMake allows an arch-in-generator style for backward compatibility + contains_arch = any(x in cmake_generator for x in {"ARM", "Win64"}) + + # Specify the arch if using MSVC generator, but only if it doesn't + # contain a backward-compatibility arch spec already in the + # generator name. + if not single_config and not contains_arch: + cmake_args += ["-A", PLAT_TO_CMAKE[self.plat_name]] + + # Multi-config generators have a different way to specify configs + if not single_config: + cmake_args += [ + "-DCMAKE_LIBRARY_OUTPUT_DIRECTORY_{}={}".format(cfg.upper(), extdir) + ] + build_args += ["--config", cfg] + + # Set CMAKE_BUILD_PARALLEL_LEVEL to control the parallel build level + # across all generators. + if "CMAKE_BUILD_PARALLEL_LEVEL" not in os.environ: + # self.parallel is a Python 3 only way to set parallel jobs by hand + # using -j in the build_ext call, not supported by pip or PyPA-build. + if hasattr(self, "parallel") and self.parallel: + # CMake 3.12+ only. + build_args += ["-j{}".format(self.parallel)] + + if not os.path.exists(self.build_temp): + os.makedirs(self.build_temp) + + subprocess.check_call( + ["cmake", ext.sourcedir] + cmake_args, cwd=self.build_temp + ) + subprocess.check_call( + ["cmake", "--build", "."] + build_args, cwd=self.build_temp + ) + + +setup( + version=__version__, + ext_modules=[CMakeExtension('datrie')], + cmdclass={'build_ext': CMakeBuild}, + zip_safe=False, +) diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt new file mode 100644 index 0000000..8b044b8 --- /dev/null +++ b/src/CMakeLists.txt @@ -0,0 +1,74 @@ +set(CMAKE_EXPORT_COMPILE_COMMANDS ON) + +find_package(Cython REQUIRED) + +set(cython_module datrie) + +set(datrie_include_dir "${PROJECT_SOURCE_DIR}/src") +set(cython_output "${CMAKE_CURRENT_SOURCE_DIR}/${cython_module}.c") +set(cython_src ${cython_module}.pyx) +# Track cython sources +file(GLOB cy_srcs *.pyx *.pxd) + +# .pyx -> .cpp +add_custom_command(OUTPUT ${cython_output} + COMMAND ${CYTHON_EXECUTABLE} + -a -2 + --fast-fail + -I ${datrie_include_dir} + --output-file ${cython_output} ${cython_src} + WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} + DEPENDS ${cy_srcs} + COMMENT "Cythonizing extension ${cython_src}") + +add_library(${cython_module} MODULE ${cython_output}) + +set_target_properties(${cython_module} + PROPERTIES PREFIX "${PYTHON_MODULE_PREFIX}" + SUFFIX "${PYTHON_MODULE_EXTENSION}") + +target_include_directories(${cython_module} PUBLIC + ${PYTHON_INCLUDE_DIRS}) + +target_compile_definitions(${cython_module} PRIVATE VERSION_INFO=${SCM_VERSION_INFO}) + +# here we get to jump through some hoops to find libdatrie on the manylinux +# docker CI images, etc +find_package(datrie CONFIG NAMES datrie) + +if(datrie_FOUND) + message(STATUS "System datrie found") + target_link_libraries(${cython_module} PRIVATE datrie) +elseif(NOT MSVC) + message(STATUS "Trying PkgConfig") + find_package(PkgConfig REQUIRED) + pkg_check_modules(DATRIE datrie-0.2 REQUIRED IMPORTED_TARGET) + + if(DATRIE_FOUND) + include_directories(${DATRIE_INCLUDE_DIRS}) + target_link_libraries(${cython_module} PRIVATE PkgConfig::DATRIE) + else() + # last resort for manylinux: just try it + message(STATUS "Blindly groping instead") + link_directories("/usr/lib64" "/usr/lib") + target_link_libraries(${cython_module} PRIVATE "libdatrie.so") + endif() +else() + # even though we used vcpkg, we get to do the manual dance with windows + find_path(DATRIE_INCLUDE_DIRS datrie/triedefs.h) + find_library(DATRIE_LIBS NAMES datrie libdatrie) + target_include_directories(${cython_module} PUBLIC ${DATRIE_INCLUDE_DIRS}) + target_link_libraries(${cython_module} PRIVATE ${DATRIE_LIBS}) +endif() + +if(APPLE) + # macos/appleclang needs this + target_link_libraries(${cython_module} PRIVATE pybind11::module) + target_link_libraries(${cython_module} PRIVATE pybind11::python_link_helper) +endif() + +if(MSVC) + target_compile_options(${cython_module} PRIVATE /utf-8) + target_link_libraries(${cython_module} PRIVATE ${PYTHON_LIBRARIES}) + target_link_libraries(${cython_module} PRIVATE pybind11::windows_extras) +endif() diff --git a/src/cdatrie.pxd b/src/cdatrie.pxd index b80ce99..f752054 100644 --- a/src/cdatrie.pxd +++ b/src/cdatrie.pxd @@ -1,13 +1,13 @@ # cython: profile=False from libc cimport stdio -cdef extern from "../libdatrie/datrie/triedefs.h": - ctypedef int AlphaChar # it should be utf32 letter +cdef extern from "datrie/triedefs.h": + ctypedef int AlphaChar # it should be utf32 letter ctypedef unsigned char TrieChar # 1 byte ctypedef int TrieIndex ctypedef int TrieData # int -cdef extern from "../libdatrie/datrie/alpha-map.h": +cdef extern from "datrie/alpha-map.h": struct AlphaMap: pass @@ -19,8 +19,7 @@ cdef extern from "../libdatrie/datrie/alpha-map.h": int alpha_map_add_range (AlphaMap *alpha_map, AlphaChar begin, AlphaChar end) int alpha_char_strlen (AlphaChar *str) - -cdef extern from "../libdatrie/datrie/trie.h": +cdef extern from "datrie/trie.h": ctypedef struct Trie: pass @@ -31,8 +30,6 @@ cdef extern from "../libdatrie/datrie/trie.h": ctypedef struct TrieIterator: pass - ctypedef int TrieData - ctypedef bint (*TrieEnumFunc) (AlphaChar *key, TrieData key_data, void *user_data) @@ -56,7 +53,6 @@ cdef extern from "../libdatrie/datrie/trie.h": bint trie_is_dirty (Trie *trie) - # =========== GENERAL QUERY OPERATIONS ========= bint trie_retrieve (Trie *trie, AlphaChar *key, TrieData *o_data) @@ -73,7 +69,6 @@ cdef extern from "../libdatrie/datrie/trie.h": TrieState * trie_root (Trie *trie) - # ========= TRIE STATE =============== TrieState * trie_state_clone (TrieState *s) @@ -98,15 +93,14 @@ cdef extern from "../libdatrie/datrie/trie.h": TrieData trie_state_get_data (TrieState *s) - # ============== ITERATION =================== - TrieIterator* trie_iterator_new (TrieState *s) + TrieIterator * trie_iterator_new (TrieState *s) - void trie_iterator_free (TrieIterator *iter) + void trie_iterator_free (TrieIterator *iter) - bint trie_iterator_next (TrieIterator *iter) + bint trie_iterator_next (TrieIterator *iter) - AlphaChar * trie_iterator_get_key (TrieIterator *iter) + AlphaChar* trie_iterator_get_key (TrieIterator *iter) - TrieData trie_iterator_get_data (TrieIterator *iter) + TrieData trie_iterator_get_data (TrieIterator *iter) diff --git a/src/datrie.pyx b/src/datrie.pyx index 0f91a41..cb941b5 100644 --- a/src/datrie.pyx +++ b/src/datrie.pyx @@ -26,14 +26,15 @@ try: except ImportError: import pickle + class DatrieError(Exception): pass + RAISE_KEY_ERROR = object() RERAISE_KEY_ERROR = object() DELETED_OBJECT = object() - cdef class BaseTrie: """ Wrapper for libdatrie's trie. @@ -312,7 +313,7 @@ cdef class BaseTrie: for char in key: if not cdatrie.trie_state_walk(state, char): return - if cdatrie.trie_state_is_terminal(state): # word is found + if cdatrie.trie_state_is_terminal(state): # word is found yield key[:index], cdatrie.trie_state_get_data(state) index += 1 finally: @@ -398,7 +399,7 @@ cdef class BaseTrie: for char in key: if not cdatrie.trie_state_walk(state, char): break - if cdatrie.trie_state_is_terminal(state): # word is found + if cdatrie.trie_state_is_terminal(state): # word is found result.append( (key[:index], cdatrie.trie_state_get_data(state)) @@ -426,7 +427,7 @@ cdef class BaseTrie: for char in key: if not cdatrie.trie_state_walk(state, char): break - if cdatrie.trie_state_is_terminal(state): # word is found + if cdatrie.trie_state_is_terminal(state): # word is found result.append(cdatrie.trie_state_get_data(state)) return result finally: @@ -804,7 +805,7 @@ cdef class Trie(BaseTrie): - otherwise raises ``KeyError``. """ cdef res = self._longest_prefix_item(key, RERAISE_KEY_ERROR) - if res is RERAISE_KEY_ERROR: # error + if res is RERAISE_KEY_ERROR: # error if default is RAISE_KEY_ERROR: raise KeyError(key) return default @@ -821,7 +822,7 @@ cdef class Trie(BaseTrie): - otherwise raise ``KeyError`` """ cdef res = self._longest_prefix_value(key, RERAISE_KEY_ERROR) - if res is RERAISE_KEY_ERROR: # error + if res is RERAISE_KEY_ERROR: # error if default is RAISE_KEY_ERROR: raise KeyError(key) return default @@ -923,7 +924,7 @@ cdef class BaseState(_TrieState): cdef class State(_TrieState): - def __cinit__(self, Trie trie): # this is overriden for extra type check + def __cinit__(self, Trie trie): # this is overriden for extra type check self._state = cdatrie.trie_root(trie._c_trie) if self._state is NULL: raise MemoryError() @@ -939,7 +940,7 @@ cdef class _TrieIterator: cdef _TrieState _root def __cinit__(self, _TrieState state): - self._root = state # prevent garbage collection of state + self._root = state # prevent garbage collection of state self._iter = cdatrie.trie_iterator_new(state._state) if self._iter is NULL: raise MemoryError() @@ -973,8 +974,8 @@ cdef class Iterator(_TrieIterator): cdatrie.TrieIterator wrapper. It can be used for custom datrie.Trie traversal. """ - def __cinit__(self, State state): # this is overriden for extra type check - self._root = state # prevent garbage collection of state + def __cinit__(self, State state): # this is overriden for extra type check + self._root = state # prevent garbage collection of state self._iter = cdatrie.trie_iterator_new(state._state) if self._iter is NULL: raise MemoryError() @@ -998,14 +999,14 @@ cdef (cdatrie.Trie* ) _load_from_file(f) except NULL: return trie -#cdef (cdatrie.Trie*) _load_from_file(path) except NULL: -# str_path = path.encode(sys.getfilesystemencoding()) -# cdef char* c_path = str_path -# cdef cdatrie.Trie* trie = cdatrie.trie_new_from_file(c_path) -# if trie is NULL: -# raise DatrieError("Can't load trie from file") +# cdef (cdatrie.Trie*) _load_from_file(path) except NULL: +# str_path = path.encode(sys.getfilesystemencoding()) +# cdef char* c_path = str_path +# cdef cdatrie.Trie* trie = cdatrie.trie_new_from_file(c_path) +# if trie is NULL: +# raise DatrieError("Can't load trie from file") # -# return trie +# return trie # ============================ AlphaMap & utils ================================ @@ -1120,7 +1121,10 @@ cdef unicode unicode_from_alpha_char(cdatrie.AlphaChar* key, int len=0): if length == 0: length = cdatrie.alpha_char_strlen(key)*sizeof(cdatrie.AlphaChar) cdef char* c_str = key - return c_str[:length].decode('utf_32_le') + if sys.byteorder == 'little': + return c_str[:length].decode('utf_32_le') + else: + return c_str[:length].decode('utf_32_be') def to_ranges(lst): diff --git a/tox-bench.ini b/tox-bench.ini index 426881f..c62ef03 100644 --- a/tox-bench.ini +++ b/tox-bench.ini @@ -1,5 +1,6 @@ [tox] -envlist = py27,py34,py35,py36,py37 +envlist = py3{5,6,7,8,9} +skip_missing_interpreters = true [testenv] commands= diff --git a/tox.ini b/tox.ini index 0f5f5f0..7230669 100644 --- a/tox.ini +++ b/tox.ini @@ -1,10 +1,98 @@ [tox] -envlist = py27,py34,py35,py36,py37,py38 +envlist = py3{6,7,8,9} +skip_missing_interpreters = true +isolated_build = true +skipsdist=True + +[gh-actions] +3.6 = py36 +3.7 = py37 +3.8 = py38 +3.9 = py39 [testenv] +passenv = + CI + CC + CXX + CMAKE_BUILD_OVERRIDE + CMAKE_TOOLCHAIN_FILE + CMAKE_GENERATOR + PIP_DOWNLOAD_CACHE + +setenv = + PYTHONPATH=. + deps = - hypothesis + pip>=20.0.1 + cython>=0.20 + path pytest - cython + hypothesis + +commands = + python -c "import path; path.Path('build').rmtree_p()" + python setup.py build_ext --inplace + python -m pytest [] + +[testenv:dev] +skip_install = true + +passenv = + CI + CC + CXX + CMAKE_BUILD_OVERRIDE + CMAKE_TOOLCHAIN_FILE + CMAKE_GENERATOR + PIP_DOWNLOAD_CACHE + +deps = + pip>=20.0.1 + path + commands= - py.test [] + python -c "import path; path.Path('build').rmtree_p()" + pip install -e .[test] + flake8 src/ + pytest -v + +[testenv:deploy] +passenv = + pythonLocation + CI + CC + CXX + CMAKE_BUILD_OVERRIDE + CMAKE_TOOLCHAIN_FILE + CMAKE_GENERATOR + PIP_DOWNLOAD_CACHE + +allowlist_externals = bash + +deps = + pip>=20.0.1 + pep517 + twine + path + +commands = + python -c "import path; path.Path('build').rmtree_p()" + python -m pep517.build . + twine check dist/* + +[testenv:check] +skip_install = true +passenv = + CI + +deps = + pip>=20.0.1 + pytest + hypothesis + +commands_pre = + pip install datrie --force-reinstall --pre --prefer-binary -f dist/ + +commands = + pytest -v diff --git a/.travis.yml b/travis_yml.disabled similarity index 100% rename from .travis.yml rename to travis_yml.disabled From 09a3818e78fdac28f59cfa946d867c874c946da7 Mon Sep 17 00:00:00 2001 From: Steve Arnold Date: Tue, 16 Feb 2021 09:41:05 -0800 Subject: [PATCH 02/14] new: switch to cmake build with fetchcontent fallback for library deps (#2) * add cmake module to find libdatrie, remove git submodule * remove vcpkg install from macos, only use lib pkg on linux * add more workflows for manylinux wheels and release --- .gitattributes | 11 +++ .github/workflows/ci.yml | 32 +++++- .github/workflows/release.yml | 143 +++++++++++++++++++++++++++ .github/workflows/vs_env.bat | 10 -- .github/workflows/wheel-check.sh | 17 ++++ .github/workflows/wheels.yml | 92 +++++++++++++++++ .gitmodules | 3 - CMakeLists.txt | 42 ++++++-- MANIFEST.in | 12 +-- cmake/{ => modules}/FindCython.cmake | 0 cmake/modules/FindDatrie.cmake | 53 ++++++++++ libdatrie | 1 - src/CMakeLists.txt | 48 ++++----- tox.ini | 14 ++- 14 files changed, 410 insertions(+), 68 deletions(-) create mode 100644 .gitattributes create mode 100644 .github/workflows/release.yml delete mode 100644 .github/workflows/vs_env.bat create mode 100755 .github/workflows/wheel-check.sh create mode 100644 .github/workflows/wheels.yml delete mode 100644 .gitmodules rename cmake/{ => modules}/FindCython.cmake (100%) create mode 100644 cmake/modules/FindDatrie.cmake delete mode 160000 libdatrie diff --git a/.gitattributes b/.gitattributes new file mode 100644 index 0000000..bb3f296 --- /dev/null +++ b/.gitattributes @@ -0,0 +1,11 @@ +# Set default behaviour to automatically normalize line endings. +* text=auto + +# Force batch scripts to always use CRLF line endings so that if a repo is +# accessed in Windows via a file share from Linux, the scripts will work. +*.{cmd,[cC][mM][dD]} text eol=crlf +*.{bat,[bB][aA][tT]} text eol=crlf + +# Force bash scripts to always use LF line endings so that if a repo is +# accessed in Unix via a file share from Windows, the scripts will work. +*.sh text eol=lf diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 5f21835..76a4e1a 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -15,14 +15,20 @@ jobs: env: OS: ${{ matrix.os }} PYTHON: ${{ matrix.python-version }} + PYTHONIOENCODING: utf-8 PIP_DOWNLOAD_CACHE: ${{ github.workspace }}/../.pip_download_cache strategy: - fail-fast: true + fail-fast: false matrix: - os: [ubuntu-20.04] + os: [ubuntu-20.04, macos-latest, windows-latest] python-version: [3.6, 3.7, 3.8, 3.9] steps: + - name: Set git crlf/eol + run: | + git config --global core.autocrlf false + git config --global core.eol lf + - uses: actions/checkout@v2 with: fetch-depth: 0 @@ -31,12 +37,28 @@ jobs: with: python-version: ${{ matrix.python-version }} - - name: Add requirements + - name: Add pip requirements run: | python -m pip install --upgrade pip pip install tox tox-gh-actions - - name: apt helper action + - name: Install macos deps with brew + if: runner.os == 'macOS' + run: | + brew install ninja + + - name: Prepare compiler environment for ${{ matrix.os }} + if: runner.os == 'Windows' + uses: ilammy/msvc-dev-cmd@v1 + with: + arch: x64 + + - name: Set have package true for ${{ matrix.os }} + if: runner.os == 'Linux' + run: | + echo "HAVE_LIBDATRIE_PKG=TRUE" >> $GITHUB_ENV + + - name: Install deps with apt helper action if: runner.os == 'Linux' uses: ryankurte/action-apt@v0.2.0 with: @@ -45,6 +67,8 @@ jobs: packages: libdatrie-dev pybind11-dev ninja-build - name: Test in place + # windows does not like build_ext -i or removing previous build + if: runner.os != 'Windows' run: | tox -e py diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml new file mode 100644 index 0000000..5892b9f --- /dev/null +++ b/.github/workflows/release.yml @@ -0,0 +1,143 @@ +name: Release + +on: + push: + # release on tag push + tags: + - '*' + +jobs: + cibw_wheels: + name: Build wheels on ${{ matrix.os }} for Python + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-20.04, macos-latest, windows-latest] + + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: 0 + + - uses: actions/setup-python@v2 + name: Install Python + with: + python-version: '3.8' + + - name: Prepare compiler environment for Windows + if: runner.os == 'Windows' + uses: ilammy/msvc-dev-cmd@v1 + with: + arch: amd64 + + - name: Install cibuildwheel + run: | + python -m pip install --upgrade pip + python -m pip install cibuildwheel==1.7.1 + + - name: Build wheels + env: + CIBW_MANYLINUX_X86_64_IMAGE: quay.io/pypa/manylinux2010_x86_64:latest + CIBW_MANYLINUX_I686_IMAGE: quay.io/pypa/manylinux2010_i686:latest + CIBW_BUILD: cp36-* cp37-* cp38-* cp39-* + CIBW_SKIP: "*-win32" + CIBW_BEFORE_ALL_LINUX: > + yum -y -q --enablerepo=extras install epel-release + && yum install -y ninja-build + CIBW_REPAIR_WHEEL_COMMAND_LINUX: "auditwheel show {wheel} && auditwheel repair -w {dest_dir} {wheel}" + CIBW_BEFORE_ALL_MACOS: > + brew install pybind11 ninja + CIBW_ENVIRONMENT_MACOS: MACOSX_DEPLOYMENT_TARGET=10.09 + CIBW_REPAIR_WHEEL_COMMAND_MACOS: "pip uninstall -y delocate && pip install git+https://github.com/Chia-Network/delocate.git && delocate-listdeps {wheel} && delocate-wheel -w {dest_dir} -v {wheel}" + CIBW_TEST_COMMAND: python -c "import datrie" + run: | + python -m cibuildwheel --output-dir wheelhouse + + - uses: actions/upload-artifact@v2 + with: + path: ./wheelhouse/*.whl + + sdist: + name: Build source distribution + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + + - uses: actions/setup-python@v2 + name: Install Python + with: + python-version: '3.7' + + - name: Build sdist + run: | + pip install pep517 + python -m pep517.build -s . + + - uses: actions/upload-artifact@v2 + with: + path: dist/*.tar.gz + + create_release: + needs: [cibw_wheels, sdist] + runs-on: ubuntu-20.04 + + steps: + - name: Get version + id: get_version + run: | + echo "VERSION=${GITHUB_REF/refs\/tags\//}" >> $GITHUB_ENV + echo ${{ env.VERSION }} + + - uses: actions/checkout@v2 + with: + fetch-depth: 0 + + - uses: actions/setup-python@v2 + name: Install Python + with: + python-version: 3.7 + + # download all artifacts to project dir + - uses: actions/download-artifact@v2 + + - name: Install gitchangelog + run: | + python -m pip install https://github.com/freepn/gitchangelog/archive/3.0.5.tar.gz + + - name: Generate changes file + run: | + export GITCHANGELOG_CONFIG_FILENAME=$(get-rcpath) + bash -c 'gitchangelog $(git tag --sort=taggerdate | tail -n2 | head -n1)..${{ env.VERSION }} > CHANGES.md' + + - name: Create draft release + id: create_release + uses: softprops/action-gh-release@v1 + env: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + with: + tag_name: ${{ env.VERSION }} + name: Release v${{ env.VERSION }} + body_path: CHANGES.md + draft: true + prerelease: true + # uncomment below to upload wheels to github releases + files: wheels/datrie*.whl + + #upload_pypi: + #needs: [cibw_wheels, sdist] + #runs-on: ubuntu-latest + ## upload to PyPI on every tag starting with 'v' + #if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags/v') + ## alternatively, to publish when a GitHub Release is created, use the following rule: + ## if: github.event_name == 'release' && github.event.action == 'published' + #steps: + #- uses: actions/download-artifact@v2 + #with: + #name: artifact + #path: dist + + #- uses: pypa/gh-action-pypi-publish@master + #with: + #user: __token__ + #password: ${{ secrets.pypi_password }} + diff --git a/.github/workflows/vs_env.bat b/.github/workflows/vs_env.bat deleted file mode 100644 index 81bf72f..0000000 --- a/.github/workflows/vs_env.bat +++ /dev/null @@ -1,10 +0,0 @@ -@echo off - -SET VSWHERE="C:\Program Files (x86)\Microsoft Visual Studio\Installer\vswhere" - -:: See https://github.com/microsoft/vswhere/wiki/Find-VC -for /f "usebackq delims=*" %%i in (`%VSWHERE% -latest -property installationPath`) do ( - call "%%i\VC\Auxiliary\Build\vcvarsall.bat" %* -) - -bash -c "export -p > env.sh" diff --git a/.github/workflows/wheel-check.sh b/.github/workflows/wheel-check.sh new file mode 100755 index 0000000..dfe15b1 --- /dev/null +++ b/.github/workflows/wheel-check.sh @@ -0,0 +1,17 @@ +#!/usr/bin/env bash + +set -euxo pipefail + +EXPECTED_WHEEL_COUNT=$1 + +if ! [ -n $EXPECTED_WHEEL_COUNT ]; then + exit 0 +fi + +WHEELS=$(find . -maxdepth 3 -name \*.whl) +if [ $(echo $WHEELS | wc -w) -ne $EXPECTED_WHEEL_COUNT ]; then + echo "Error: Expected $EXPECTED_WHEEL_COUNT wheels" + exit 1 +else + exit 0 +fi diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml new file mode 100644 index 0000000..fe7d296 --- /dev/null +++ b/.github/workflows/wheels.yml @@ -0,0 +1,92 @@ +name: Wheels + +on: + workflow_dispatch: + pull_request: + push: + branches: + - master + +jobs: + cibw_wheels: + name: Build wheels on ${{ matrix.os }} for Python + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-20.04, macos-latest, windows-latest] + + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: 0 + + - uses: actions/setup-python@v2 + name: Install Python + with: + python-version: '3.8' + + - name: Prepare compiler environment for Windows + if: runner.os == 'Windows' + uses: ilammy/msvc-dev-cmd@v1 + with: + arch: amd64 + + - name: Install cibuildwheel + run: | + python -m pip install --upgrade pip + python -m pip install cibuildwheel==1.7.1 + + - name: Build wheels + env: + CIBW_MANYLINUX_X86_64_IMAGE: quay.io/pypa/manylinux2010_x86_64:latest + CIBW_MANYLINUX_I686_IMAGE: quay.io/pypa/manylinux2010_i686:latest + CIBW_BUILD: cp36-* cp37-* cp38-* cp39-* + CIBW_SKIP: "*-win32" + CIBW_BEFORE_ALL_LINUX: > + yum -y -q --enablerepo=extras install epel-release + && yum install -y ninja-build + CIBW_REPAIR_WHEEL_COMMAND_LINUX: "auditwheel show {wheel} && auditwheel repair -w {dest_dir} {wheel}" + CIBW_BEFORE_ALL_MACOS: > + brew install pybind11 ninja + CIBW_ENVIRONMENT_MACOS: MACOSX_DEPLOYMENT_TARGET=10.09 + CIBW_REPAIR_WHEEL_COMMAND_MACOS: "pip uninstall -y delocate && pip install git+https://github.com/Chia-Network/delocate.git && delocate-listdeps {wheel} && delocate-wheel -w {dest_dir} -v {wheel}" + CIBW_TEST_COMMAND: python -c "import datrie" + run: | + python -m cibuildwheel --output-dir wheelhouse + + - uses: actions/upload-artifact@v2 + with: + name: wheels + path: ./wheelhouse/*.whl + + check_artifacts: + name: Check artifacts are correct + needs: [cibw_wheels] + runs-on: ubuntu-20.04 + steps: + - uses: actions/checkout@v2 + - uses: actions/download-artifact@v2 + with: + name: wheels + + - name: Check number of downloaded artifacts + run: .github/workflows/wheel-check.sh 24 + + #upload_pypi: + #needs: [cibw_wheels, sdist] + #runs-on: ubuntu-latest + ## upload to PyPI on every tag starting with 'v' + #if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags/v') + ## alternatively, to publish when a GitHub Release is created, use the following rule: + ## if: github.event_name == 'release' && github.event.action == 'published' + #steps: + #- uses: actions/download-artifact@v2 + #with: + #name: artifact + #path: dist + + #- uses: pypa/gh-action-pypi-publish@master + #with: + #user: __token__ + #password: ${{ secrets.pypi_password }} + diff --git a/.gitmodules b/.gitmodules deleted file mode 100644 index 3986f13..0000000 --- a/.gitmodules +++ /dev/null @@ -1,3 +0,0 @@ -[submodule "libdatrie"] - path = libdatrie - url = https://github.com/tlwg/libdatrie.git diff --git a/CMakeLists.txt b/CMakeLists.txt index ced2ce4..18e2530 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,15 +1,47 @@ cmake_minimum_required(VERSION 3.15...3.18) -project(datrie LANGUAGES C CXX) - option(PY_DEBUG "Set if python being linked is a Py_DEBUG build" OFF) +option(USE_LIBDATRIE_PKG "Use OS-provided libdatrie package") +if(DEFINED ENV{HAVE_LIBDATRIE_PKG}) + set(USE_LIBDATRIE_PKG "$ENV{HAVE_LIBDATRIE_PKG}") +endif() if(NOT CMAKE_BUILD_TYPE) set(CMAKE_BUILD_TYPE "RelWithDebInfo" CACHE STRING "Default build type: RelWithDebInfo" FORCE) endif() +project(datrie LANGUAGES C CXX) + include(GNUInstallDirs) +include(CheckIncludeFile) +include(CheckIncludeFileCXX) +include(CheckIncludeFiles) + +list(APPEND CMAKE_MODULE_PATH ${CMAKE_CURRENT_SOURCE_DIR}/cmake/modules) + +if(USE_LIBDATRIE_PKG) + find_package(Datrie) +endif() + +if(USE_LIBDATRIE_PKG AND NOT Datrie_FOUND) + find_package(PkgConfig) + pkg_check_modules(DATRIE datrie-0.2 IMPORTED_TARGET) +endif() + +if(NOT USE_LIBDATRIE_PKG) + message(STATUS "Fetching libdatrie from github") + # Fetch libdatrie + include(FetchContent) + + FetchContent_Declare( + libdatrie + GIT_REPOSITORY https://github.com/tlwg/libdatrie + GIT_TAG v0.2.13 + ) + FetchContent_MakeAvailable(libdatrie) + # this gets us the package source directory +endif() find_package(pybind11 CONFIG) @@ -30,13 +62,9 @@ endif() find_package(Threads REQUIRED) -if (${PYTHON_IS_DEBUG}) +if(${PYTHON_IS_DEBUG}) set(PY_DEBUG ON) endif() -set(CMAKE_MODULE_PATH ${CMAKE_MODULE_PATH} - ${PROJECT_SOURCE_DIR}/cmake/) - include_directories(${PROJECT_SOURCE_DIR}/src) - add_subdirectory(src) diff --git a/MANIFEST.in b/MANIFEST.in index 23ef592..4a82ba2 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -1,13 +1,9 @@ global-include CMakeLists.txt *.cmake include README.rst CHANGES.rst COPYING -include tox.ini -include tox-bench.ini -include update_c.sh +graft src +graft tests +include tox.ini tox-bench.ini update_c.sh include bench/words100k.txt.zip -recursive-include tests *.py - -include src/datrie.pyx -include src/cdatrie.pxd -include src/stdio_ext.pxd exclude src/datrie.c +exclude src/*.html global-exclude *.py[cod] __pycache__ diff --git a/cmake/FindCython.cmake b/cmake/modules/FindCython.cmake similarity index 100% rename from cmake/FindCython.cmake rename to cmake/modules/FindCython.cmake diff --git a/cmake/modules/FindDatrie.cmake b/cmake/modules/FindDatrie.cmake new file mode 100644 index 0000000..23167c9 --- /dev/null +++ b/cmake/modules/FindDatrie.cmake @@ -0,0 +1,53 @@ +# This module finds headers and libdatrie library. +# Results are reported in variables: +# Datrie_FOUND - True if headers and library were found +# Datrie_INCLUDE_DIRS - libdatrie include directories +# Datrie_LIBRARIES - libdatrie library to be linked + +find_path(Datrie_INCLUDE_DIR + NAMES datrie/triedefs.h + HINTS + ENV VCPKG_ROOT + PATH_SUFFIXES include include/datrie + PATHS + ~/Library/Frameworks + /Library/Frameworks + /opt/local + /opt + /usr + /usr/local/ +) + +find_library(Datrie_LIBRARY + NAMES datrie libdatrie + HINTS + ENV VCPKG_ROOT + PATH_SUFFIXES lib lib64 lib32 + PATHS + ~/Library/Frameworks + /Library/Frameworks + /opt/local + /opt + /usr + /usr/local/ +) + +mark_as_advanced(Datrie_INCLUDE_DIR Datrie_LIBRARY) + +include(FindPackageHandleStandardArgs) +find_package_handle_standard_args(Datrie + REQUIRED_VARS Datrie_LIBRARY Datrie_INCLUDE_DIR) + +if(Datrie_FOUND) + # need if _FOUND guard to allow project to autobuild; can't overwrite imported target even if bad + set(Datrie_INCLUDE_DIRS ${Datrie_INCLUDE_DIR}) + set(Datrie_LIBRARIES ${Datrie_LIBRARY}) + + if(NOT TARGET Datrie::Datrie) + add_library(Datrie::Datrie INTERFACE IMPORTED) + set_target_properties(Datrie::Datrie PROPERTIES + INTERFACE_LINK_LIBRARIES "${Datrie_LIBRARIES}" + INTERFACE_INCLUDE_DIRECTORIES "${Datrie_INCLUDE_DIR}" + ) + endif() +endif(Datrie_FOUND) diff --git a/libdatrie b/libdatrie deleted file mode 160000 index d1db08a..0000000 --- a/libdatrie +++ /dev/null @@ -1 +0,0 @@ -Subproject commit d1db08ac1c76f54ba23d63665437473788c999f3 diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index 8b044b8..eeed7eb 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -1,16 +1,15 @@ set(CMAKE_EXPORT_COMPILE_COMMANDS ON) +set(cython_module datrie) find_package(Cython REQUIRED) -set(cython_module datrie) - set(datrie_include_dir "${PROJECT_SOURCE_DIR}/src") set(cython_output "${CMAKE_CURRENT_SOURCE_DIR}/${cython_module}.c") set(cython_src ${cython_module}.pyx) # Track cython sources file(GLOB cy_srcs *.pyx *.pxd) -# .pyx -> .cpp +# .pyx -> .c add_custom_command(OUTPUT ${cython_output} COMMAND ${CYTHON_EXECUTABLE} -a -2 @@ -21,6 +20,16 @@ add_custom_command(OUTPUT ${cython_output} DEPENDS ${cy_srcs} COMMENT "Cythonizing extension ${cython_src}") +if(NOT USE_LIBDATRIE_PKG) + # use the locally cloned source from FetchContent + set(DATRIE_INCLUDE_DIR "${libdatrie_SOURCE_DIR}") + file(GLOB_RECURSE DATRIE_SOURCES + LIST_DIRECTORIES true + "${libdatrie_SOURCE_DIR}/datrie/*.c") + list(APPEND cython_output ${DATRIE_SOURCES}) + include_directories(${DATRIE_INCLUDE_DIR}) +endif() + add_library(${cython_module} MODULE ${cython_output}) set_target_properties(${cython_module} @@ -32,33 +41,12 @@ target_include_directories(${cython_module} PUBLIC target_compile_definitions(${cython_module} PRIVATE VERSION_INFO=${SCM_VERSION_INFO}) -# here we get to jump through some hoops to find libdatrie on the manylinux -# docker CI images, etc -find_package(datrie CONFIG NAMES datrie) - -if(datrie_FOUND) - message(STATUS "System datrie found") - target_link_libraries(${cython_module} PRIVATE datrie) -elseif(NOT MSVC) - message(STATUS "Trying PkgConfig") - find_package(PkgConfig REQUIRED) - pkg_check_modules(DATRIE datrie-0.2 REQUIRED IMPORTED_TARGET) - - if(DATRIE_FOUND) - include_directories(${DATRIE_INCLUDE_DIRS}) - target_link_libraries(${cython_module} PRIVATE PkgConfig::DATRIE) - else() - # last resort for manylinux: just try it - message(STATUS "Blindly groping instead") - link_directories("/usr/lib64" "/usr/lib") - target_link_libraries(${cython_module} PRIVATE "libdatrie.so") - endif() -else() - # even though we used vcpkg, we get to do the manual dance with windows - find_path(DATRIE_INCLUDE_DIRS datrie/triedefs.h) - find_library(DATRIE_LIBS NAMES datrie libdatrie) - target_include_directories(${cython_module} PUBLIC ${DATRIE_INCLUDE_DIRS}) - target_link_libraries(${cython_module} PRIVATE ${DATRIE_LIBS}) +if(Datrie_FOUND) + include_directories(${DATRIE_INCLUDE_DIRS}) + target_link_libraries(${cython_module} PRIVATE Datrie::Datrie) +elseif(DATRIE_FOUND) + include_directories(${DATRIE_INCLUDE_DIRS}) + target_link_libraries(${cython_module} PRIVATE PkgConfig::DATRIE) endif() if(APPLE) diff --git a/tox.ini b/tox.ini index 7230669..c0e0424 100644 --- a/tox.ini +++ b/tox.ini @@ -18,6 +18,8 @@ passenv = CMAKE_BUILD_OVERRIDE CMAKE_TOOLCHAIN_FILE CMAKE_GENERATOR + HAVE_LIBDATRIE_PKG + PYTHONIOENCODING PIP_DOWNLOAD_CACHE setenv = @@ -31,9 +33,9 @@ deps = hypothesis commands = - python -c "import path; path.Path('build').rmtree_p()" python setup.py build_ext --inplace python -m pytest [] + python -c "import path; path.Path('build').rmtree_p()" [testenv:dev] skip_install = true @@ -45,6 +47,8 @@ passenv = CMAKE_BUILD_OVERRIDE CMAKE_TOOLCHAIN_FILE CMAKE_GENERATOR + HAVE_LIBDATRIE_PKG + PYTHONIOENCODING PIP_DOWNLOAD_CACHE deps = @@ -66,18 +70,16 @@ passenv = CMAKE_BUILD_OVERRIDE CMAKE_TOOLCHAIN_FILE CMAKE_GENERATOR + HAVE_LIBDATRIE_PKG + PYTHONIOENCODING PIP_DOWNLOAD_CACHE -allowlist_externals = bash - deps = pip>=20.0.1 pep517 twine - path commands = - python -c "import path; path.Path('build').rmtree_p()" python -m pep517.build . twine check dist/* @@ -85,6 +87,8 @@ commands = skip_install = true passenv = CI + PYTHONIOENCODING + PIP_DOWNLOAD_CACHE deps = pip>=20.0.1 From 7f8a57d5342081227ece464cf096d68390c5df69 Mon Sep 17 00:00:00 2001 From: Steve Arnold Date: Tue, 16 Feb 2021 21:12:38 -0800 Subject: [PATCH 03/14] chg: put submodule back in, add conda recipe/workflow (#3) * restore git submodule, set commit d1dfdb8, make sure path uses https url * switch cmake bits to use submodule hdrs/srcs, let cmake init the submodule if empty * add updated conda recipe and corresponding ci workflow --- .github/workflows/ci.yml | 7 ++--- .github/workflows/conda.yml | 56 +++++++++++++++++++++++++++++++++++ .github/workflows/release.yml | 1 + .github/workflows/wheels.yml | 1 + .gitmodules | 3 ++ CMakeLists.txt | 30 ++++++++++++------- conda.recipe/meta.yaml | 52 ++++++++++++++++++++++++++++++++ libdatrie | 1 + src/CMakeLists.txt | 6 ++-- tox.ini | 6 ++-- 10 files changed, 143 insertions(+), 20 deletions(-) create mode 100644 .github/workflows/conda.yml create mode 100644 .gitmodules create mode 100644 conda.recipe/meta.yaml create mode 160000 libdatrie diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index 76a4e1a..e36b37c 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -32,6 +32,7 @@ jobs: - uses: actions/checkout@v2 with: fetch-depth: 0 + submodules: true - uses: actions/setup-python@v2 with: @@ -47,13 +48,13 @@ jobs: run: | brew install ninja - - name: Prepare compiler environment for ${{ matrix.os }} + - name: Prepare build environment for ${{ matrix.os }} if: runner.os == 'Windows' uses: ilammy/msvc-dev-cmd@v1 with: arch: x64 - - name: Set have package true for ${{ matrix.os }} + - name: Set GITHUB_ENV vars for ${{ matrix.os }} if: runner.os == 'Linux' run: | echo "HAVE_LIBDATRIE_PKG=TRUE" >> $GITHUB_ENV @@ -67,8 +68,6 @@ jobs: packages: libdatrie-dev pybind11-dev ninja-build - name: Test in place - # windows does not like build_ext -i or removing previous build - if: runner.os != 'Windows' run: | tox -e py diff --git a/.github/workflows/conda.yml b/.github/workflows/conda.yml new file mode 100644 index 0000000..2cfc7d1 --- /dev/null +++ b/.github/workflows/conda.yml @@ -0,0 +1,56 @@ +name: Conda + +on: + workflow_dispatch: + #pull_request: + #push: + # branches: + # - master + +jobs: + build: + strategy: + fail-fast: false + matrix: + platform: [ubuntu-latest, windows-2016, macos-latest] + python-version: [3.6, 3.7, 3.8, 3.9] + + runs-on: ${{ matrix.platform }} + + # The setup-miniconda action needs this to activate miniconda + defaults: + run: + shell: "bash -l {0}" + + steps: + - uses: actions/checkout@v2 + with: + fetch-depth: 0 + submodules: true + + - name: Cache conda + uses: actions/cache@v1 + with: + path: ~/conda_pkgs_dir + key: ${{matrix.os}}-conda-pkgs-${{hashFiles('**/conda.recipe/meta.yaml')}} + + - name: Get conda + uses: conda-incubator/setup-miniconda@v2 + with: + python-version: ${{ matrix.python-version }} + channels: conda-forge + channel-priority: strict + use-only-tar-bz2: true + auto-activate-base: true + + - name: Prepare + run: conda install conda-build conda-verify pytest hypothesis + + - name: Build + run: conda build conda.recipe + + - name: Install + run: conda install -c ${CONDA_PREFIX}/conda-bld/ datrie + + - name: Test + run: pytest -v diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 5892b9f..3921b46 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -18,6 +18,7 @@ jobs: - uses: actions/checkout@v2 with: fetch-depth: 0 + submodules: true - uses: actions/setup-python@v2 name: Install Python diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index fe7d296..ee9bc97 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -19,6 +19,7 @@ jobs: - uses: actions/checkout@v2 with: fetch-depth: 0 + submodules: true - uses: actions/setup-python@v2 name: Install Python diff --git a/.gitmodules b/.gitmodules new file mode 100644 index 0000000..3986f13 --- /dev/null +++ b/.gitmodules @@ -0,0 +1,3 @@ +[submodule "libdatrie"] + path = libdatrie + url = https://github.com/tlwg/libdatrie.git diff --git a/CMakeLists.txt b/CMakeLists.txt index 18e2530..1050543 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,6 +1,7 @@ cmake_minimum_required(VERSION 3.15...3.18) option(PY_DEBUG "Set if python being linked is a Py_DEBUG build" OFF) +option(GIT_SUBMODULE "Check submodules during build" ON) option(USE_LIBDATRIE_PKG "Use OS-provided libdatrie package") if(DEFINED ENV{HAVE_LIBDATRIE_PKG}) set(USE_LIBDATRIE_PKG "$ENV{HAVE_LIBDATRIE_PKG}") @@ -30,17 +31,26 @@ if(USE_LIBDATRIE_PKG AND NOT Datrie_FOUND) endif() if(NOT USE_LIBDATRIE_PKG) - message(STATUS "Fetching libdatrie from github") - # Fetch libdatrie - include(FetchContent) + message(STATUS "Trying libdatrie submodule") + find_package(Git QUIET) + if(GIT_FOUND AND EXISTS "${PROJECT_SOURCE_DIR}/.git") + # Update submodules as needed + if(GIT_SUBMODULE) + if(NOT EXISTS "${PROJECT_SOURCE_DIR}/libdatrie/datrie-0.2.pc.in") + message(STATUS "Submodule update") + execute_process(COMMAND ${GIT_EXECUTABLE} submodule update --init --recursive + WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} + RESULT_VARIABLE GIT_SUBMOD_RESULT) + if(NOT GIT_SUBMOD_RESULT EQUAL "0") + message(FATAL_ERROR "git submodule update --init failed with ${GIT_SUBMOD_RESULT}, please checkout submodules") + endif() + endif() + endif() + endif() - FetchContent_Declare( - libdatrie - GIT_REPOSITORY https://github.com/tlwg/libdatrie - GIT_TAG v0.2.13 - ) - FetchContent_MakeAvailable(libdatrie) - # this gets us the package source directory + if(NOT EXISTS "${PROJECT_SOURCE_DIR}/libdatrie/datrie-0.2.pc.in") + message(FATAL_ERROR "The submodules were not downloaded! GIT_SUBMODULE was turned off or failed. Please update submodules and try again.") + endif() endif() find_package(pybind11 CONFIG) diff --git a/conda.recipe/meta.yaml b/conda.recipe/meta.yaml new file mode 100644 index 0000000..181539c --- /dev/null +++ b/conda.recipe/meta.yaml @@ -0,0 +1,52 @@ +{% set name = "datrie" %} +{% set version = "0.8.3.dev0" %} + +package: + name: {{ name|lower }} + version: {{ version }} + +source: + path: .. + +build: + number: 0 + script: {{ PYTHON }} -m pip install . -vv + +requirements: + build: + - {{ compiler('c') }} + - {{ compiler('cxx') }} + host: + - python + - cmake >=3.15 + - pybind11 + - ninja + - cython + - pip + + run: + - python + +test: + requires: + - pytest + - hypothesis + imports: + - datrie + source_files: + - tests + commands: + - python -m pytest + +about: + home: "https://github.com/pytries" + license: LGPL-2.0-or-later + license_family: LGPL + license_file: COPYING + summary: "Fast, efficiently stored Trie for Python. Uses libdatrie." + doc_url: "https://github.com/pytries/datrie/blob/master/README.rst" + dev_url: "https://github.com/pytries/datrie" + +extra: + recipe-maintainers: + - sarnold diff --git a/libdatrie b/libdatrie new file mode 160000 index 0000000..d1dfdb8 --- /dev/null +++ b/libdatrie @@ -0,0 +1 @@ +Subproject commit d1dfdb831093892541cae46eba82c46aec94f726 diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index eeed7eb..fe3864c 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -21,11 +21,11 @@ add_custom_command(OUTPUT ${cython_output} COMMENT "Cythonizing extension ${cython_src}") if(NOT USE_LIBDATRIE_PKG) - # use the locally cloned source from FetchContent - set(DATRIE_INCLUDE_DIR "${libdatrie_SOURCE_DIR}") + # use the local git submodule + set(DATRIE_INCLUDE_DIR "${PROJECT_SOURCE_DIR}/libdatrie") file(GLOB_RECURSE DATRIE_SOURCES LIST_DIRECTORIES true - "${libdatrie_SOURCE_DIR}/datrie/*.c") + "${DATRIE_INCLUDE_DIR}/datrie/*.c") list(APPEND cython_output ${DATRIE_SOURCES}) include_directories(${DATRIE_INCLUDE_DIR}) endif() diff --git a/tox.ini b/tox.ini index c0e0424..71c6424 100644 --- a/tox.ini +++ b/tox.ini @@ -23,19 +23,18 @@ passenv = PIP_DOWNLOAD_CACHE setenv = - PYTHONPATH=. + PYTHONPATH = {toxinidir} + #PYTHONPATH = {env:PYTHONPATH}{:}{toxinidir} deps = pip>=20.0.1 cython>=0.20 - path pytest hypothesis commands = python setup.py build_ext --inplace python -m pytest [] - python -c "import path; path.Path('build').rmtree_p()" [testenv:dev] skip_install = true @@ -56,6 +55,7 @@ deps = path commands= + # this command breaks on windows (permission error) python -c "import path; path.Path('build').rmtree_p()" pip install -e .[test] flake8 src/ From f7614e728a5fabf0a514cceb9e697aa9f793fa7f Mon Sep 17 00:00:00 2001 From: Stephen L Arnold Date: Tue, 16 Feb 2021 22:02:14 -0800 Subject: [PATCH 04/14] fix: readme status urls Signed-off-by: Stephen L Arnold --- README.rst | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/README.rst b/README.rst index f29671d..d2b4260 100644 --- a/README.rst +++ b/README.rst @@ -1,9 +1,9 @@ datrie |github| =============== -.. |github| image:: https://img.shields.io/github/workflow/status/freepn/datrie/ci - :target: https://github.com/freepn/datrie/actions?query=workflow:ci - :alt: GitHub CI Build Status +.. |github| image:: https://img.shields.io/github/workflow/status/pytries/datrie/CI + :target: https://github.com/pytries/datrie/actions?query=workflow:CI + :alt: CI Status Super-fast, efficiently stored Trie for Python (2.x and 3.x). From 5246129fe5ac470450f2485e34fd4785269f7a1b Mon Sep 17 00:00:00 2001 From: Stephen L Arnold Date: Tue, 16 Feb 2021 22:05:52 -0800 Subject: [PATCH 05/14] fix: aand cleanup more maintainer cruft Signed-off-by: Stephen L Arnold --- conda.recipe/meta.yaml | 2 +- setup.cfg | 2 -- 2 files changed, 1 insertion(+), 3 deletions(-) diff --git a/conda.recipe/meta.yaml b/conda.recipe/meta.yaml index 181539c..66269ef 100644 --- a/conda.recipe/meta.yaml +++ b/conda.recipe/meta.yaml @@ -49,4 +49,4 @@ about: extra: recipe-maintainers: - - sarnold + - KOLANICH diff --git a/setup.cfg b/setup.cfg index dadbc5e..6472606 100644 --- a/setup.cfg +++ b/setup.cfg @@ -2,8 +2,6 @@ name = datrie author = Mikhail Korobov author_email = kmike84@gmail.com -maintainer = Steve Arnold -maintainer_email = nerdboy@gentoo.org description = Super-fast, efficiently stored Trie for Python long_description = file: README.rst long_description_content_type = text/x-rst; charset=UTF-8 From 1b491454b77aade4f7c2a68e91ef30e167d50463 Mon Sep 17 00:00:00 2001 From: Stephen L Arnold Date: Wed, 17 Feb 2021 17:16:31 -0800 Subject: [PATCH 06/14] fix: restore other ci and fix readme (again, sorry about that...) Signed-off-by: Stephen L Arnold --- travis_yml.disabled => .travis.yml | 0 README.rst | 11 ++++++++--- appveyor_yml.disabled => appveyor.yml | 0 3 files changed, 8 insertions(+), 3 deletions(-) rename travis_yml.disabled => .travis.yml (100%) rename appveyor_yml.disabled => appveyor.yml (100%) diff --git a/travis_yml.disabled b/.travis.yml similarity index 100% rename from travis_yml.disabled rename to .travis.yml diff --git a/README.rst b/README.rst index d2b4260..352b144 100644 --- a/README.rst +++ b/README.rst @@ -1,11 +1,16 @@ -datrie |github| -=============== +datrie |travis| |appveyor| |github| +=================================== + +.. |travis| image:: https://travis-ci.org/pytries/datrie.svg + :target: https://travis-ci.org/pytries/datrie + +.. |appveyor| image:: https://ci.appveyor.com/api/projects/status/6bpvhllpjhlau7x0?svg=true + :target: https://ci.appveyor.com/project/superbobry/datrie .. |github| image:: https://img.shields.io/github/workflow/status/pytries/datrie/CI :target: https://github.com/pytries/datrie/actions?query=workflow:CI :alt: CI Status - Super-fast, efficiently stored Trie for Python (2.x and 3.x). Uses `libdatrie`_. diff --git a/appveyor_yml.disabled b/appveyor.yml similarity index 100% rename from appveyor_yml.disabled rename to appveyor.yml From aa60a119c7087f9415c343e8f52745a97962c9dd Mon Sep 17 00:00:00 2001 From: Stephen L Arnold Date: Wed, 17 Feb 2021 17:43:10 -0800 Subject: [PATCH 07/14] chg: bump travis to bionic and update python versions Signed-off-by: Stephen L Arnold --- .travis.yml | 6 ++---- 1 file changed, 2 insertions(+), 4 deletions(-) diff --git a/.travis.yml b/.travis.yml index 4dddba1..7999489 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,14 +1,12 @@ -dist: xenial +dist: bionic cache: pip language: python python: - - "2.7" - - "3.4" - - "3.5" - "3.6" - "3.7" - "3.8" + - "3.9" install: - pip install tox-travis cython From c735ec8390206bf8ed692cfc744b8ef6bc224686 Mon Sep 17 00:00:00 2001 From: Stephen L Arnold Date: Wed, 17 Feb 2021 17:57:18 -0800 Subject: [PATCH 08/14] fix: set cmake min version to 3.12 and revert distro upgrade Signed-off-by: Stephen L Arnold --- .travis.yml | 2 +- CMakeLists.txt | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/.travis.yml b/.travis.yml index 7999489..c0ca4c7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,4 +1,4 @@ -dist: bionic +dist: xenial cache: pip language: python diff --git a/CMakeLists.txt b/CMakeLists.txt index 1050543..0b45d6d 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,4 +1,4 @@ -cmake_minimum_required(VERSION 3.15...3.18) +cmake_minimum_required(VERSION 3.12...3.18) option(PY_DEBUG "Set if python being linked is a Py_DEBUG build" OFF) option(GIT_SUBMODULE "Check submodules during build" ON) From 5c8f5824b32c4826a5552989f0863a6859116e94 Mon Sep 17 00:00:00 2001 From: Stephen L Arnold Date: Wed, 17 Feb 2021 19:11:36 -0800 Subject: [PATCH 09/14] chg: update build deps for really old ubuntu Signed-off-by: Stephen L Arnold --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index c0ca4c7..4c914f4 100644 --- a/.travis.yml +++ b/.travis.yml @@ -9,6 +9,6 @@ python: - "3.9" install: - - pip install tox-travis cython + - pip install tox-travis ninja-build script: tox From 4956e665cf864a6c6bd054fc721ecb21cfefc385 Mon Sep 17 00:00:00 2001 From: Stephen L Arnold Date: Wed, 17 Feb 2021 19:14:10 -0800 Subject: [PATCH 10/14] turns out it really does need bionic instead of eol Signed-off-by: Stephen L Arnold --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 4c914f4..a2cb171 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,4 +1,4 @@ -dist: xenial +dist: bionic cache: pip language: python From 1cb6b88143ad44a74c7365b3237f293b7e026bbd Mon Sep 17 00:00:00 2001 From: Stephen L Arnold Date: Wed, 17 Feb 2021 19:21:00 -0800 Subject: [PATCH 11/14] fix brain-fart and install ninja from the right place Signed-off-by: Stephen L Arnold --- .travis.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index a2cb171..6d88813 100644 --- a/.travis.yml +++ b/.travis.yml @@ -9,6 +9,7 @@ python: - "3.9" install: - - pip install tox-travis ninja-build + - sudo apt-get install -y ninja-build + - pip install tox-travis script: tox From fcde144b4b727d844ce89f7285891116e36cadf9 Mon Sep 17 00:00:00 2001 From: Stephen L Arnold Date: Wed, 17 Feb 2021 19:56:36 -0800 Subject: [PATCH 12/14] still more deps to use older cmake on travis bionic Signed-off-by: Stephen L Arnold --- .travis.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.travis.yml b/.travis.yml index 6d88813..fc7fce8 100644 --- a/.travis.yml +++ b/.travis.yml @@ -9,7 +9,7 @@ python: - "3.9" install: - - sudo apt-get install -y ninja-build + - sudo apt-get install -y ninja-build pybind11-dev libdatrie-dev - pip install tox-travis script: tox From 7132db9d225a763ee5a25c72f02f4d8716ddd4d1 Mon Sep 17 00:00:00 2001 From: Stephen L Arnold Date: Wed, 17 Feb 2021 22:02:03 -0800 Subject: [PATCH 13/14] remove --verbose arg to really make it 3.12 compliant Signed-off-by: Stephen L Arnold --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index b3d8dd4..73e0fa9 100755 --- a/setup.py +++ b/setup.py @@ -55,7 +55,7 @@ def build_extension(self, ext): "-DSCM_VERSION_INFO={}".format(__version__), "-DCMAKE_BUILD_TYPE={}".format(cfg), # not used on MSVC, but no harm ] - build_args = ["--verbose"] + build_args = [] # CMake also lets you provide a toolchain file. # Can be set in CI build environments for example. From 752282ac6fe0d576035a9d35fc15e16dc8867259 Mon Sep 17 00:00:00 2001 From: Steve Arnold Date: Sun, 17 Oct 2021 13:43:10 -0700 Subject: [PATCH 14/14] Conda plus coverage (#4) * new: add updated conda recipe and corresponding ci workflow * chg: restore git submodule, set commit d1dfdb8 (uses github relative path) * new: dev: add support for generating test coverage data * update cmake build flags for cython if WITH_COVERAGE * ci uses platform host pybind11, libdatrie (linux-only) * update cmake option handling, set version info * add cmake cmd to copy inplace extension to src/ (coverage only) * fix: dev: add test decorator for macos taking longer on a test * fix: dev: add missing vcpkg action param (not in the readme) * setupOnly *requires* vcpkgGitCommitId (only in hosted examples) * use environment.devenv.yml with condadev * fix: dev: "flaky" test failed again, extend deadline to 2500 ms * macos has occasional lag issues with disk I/O ? --- .github/workflows/ci.yml | 49 +++++---- .github/workflows/conda.yml | 8 +- .github/workflows/release.yml | 130 ++++++++++------------- .github/workflows/smoke.yml | 38 +++++++ .github/workflows/wheels.yml | 130 ++++++++++------------- .gitignore | 4 + CMakeLists.txt | 35 ++++-- MANIFEST.in | 7 +- bench/speed.py | 2 +- cmake/modules/FindDatrie.cmake | 4 +- conda.recipe/meta.yaml | 4 +- environment.devenv.yml | 12 +++ pyproject.toml | 23 +++- requirements-dev.txt | 6 ++ dev-requirements.txt => requirements.txt | 1 - setup.cfg | 15 +-- setup.py | 28 ++++- src/CMakeLists.txt | 38 ++++++- tests/test_random.py | 3 + tox-bench.ini | 4 +- tox.ini | 125 ++++++++++++++++++---- 21 files changed, 442 insertions(+), 224 deletions(-) create mode 100644 .github/workflows/smoke.yml create mode 100644 environment.devenv.yml create mode 100644 requirements-dev.txt rename dev-requirements.txt => requirements.txt (72%) diff --git a/.github/workflows/ci.yml b/.github/workflows/ci.yml index e36b37c..b305350 100644 --- a/.github/workflows/ci.yml +++ b/.github/workflows/ci.yml @@ -38,43 +38,54 @@ jobs: with: python-version: ${{ matrix.python-version }} - - name: Add pip requirements + - name: Set HAVE_LIBDATRIE_PKG var + if: runner.os == 'Linux' + shell: bash + env: + HAVE_LIBDATRIE_PKG: true run: | - python -m pip install --upgrade pip - pip install tox tox-gh-actions + echo "HAVE_LIBDATRIE_PKG is ${HAVE_LIBDATRIE_PKG}" + echo "HAVE_LIBDATRIE_PKG=${HAVE_LIBDATRIE_PKG}" >> $GITHUB_ENV + + - name: Install deps with apt helper action + if: runner.os == 'Linux' + uses: ryankurte/action-apt@v0.2.0 + with: + # architectures to pass to dpkg --add-architecture + #architectures: # optional + packages: pybind11-dev libdatrie-dev - name: Install macos deps with brew if: runner.os == 'macOS' run: | - brew install ninja + brew install pybind11 - - name: Prepare build environment for ${{ matrix.os }} + - name: Prepare build environment for windows if: runner.os == 'Windows' uses: ilammy/msvc-dev-cmd@v1 with: arch: x64 - - name: Set GITHUB_ENV vars for ${{ matrix.os }} - if: runner.os == 'Linux' - run: | - echo "HAVE_LIBDATRIE_PKG=TRUE" >> $GITHUB_ENV + #- name: Install deps with vcpkg + #if: runner.os == 'Windows' + #run: | + #echo $VCPKG_ROOT + #vcpkg install pybind11:x64-windows + #vcpkg integrate install - - name: Install deps with apt helper action - if: runner.os == 'Linux' - uses: ryankurte/action-apt@v0.2.0 - with: - # architectures to pass to dpkg --add-architecture - #architectures: # optional - packages: libdatrie-dev pybind11-dev ninja-build + - name: Add pip requirements + run: | + python -m pip install --upgrade pip + pip install tox tox-gh-actions - - name: Test in place + - name: Test via pip develop run: | - tox -e py + tox -e dev - name: Build dist pkgs run: | tox -e deploy - - name: Check wheel + - name: Test with built wheel run: | tox -e check diff --git a/.github/workflows/conda.yml b/.github/workflows/conda.yml index 2cfc7d1..a4d4d42 100644 --- a/.github/workflows/conda.yml +++ b/.github/workflows/conda.yml @@ -2,10 +2,10 @@ name: Conda on: workflow_dispatch: - #pull_request: - #push: - # branches: - # - master + # pull_request: + push: + branches: + - master jobs: build: diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 3921b46..7721223 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -6,77 +6,61 @@ on: tags: - '*' -jobs: - cibw_wheels: - name: Build wheels on ${{ matrix.os }} for Python - runs-on: ${{ matrix.os }} - strategy: - matrix: - os: [ubuntu-20.04, macos-latest, windows-latest] +env: + CIBW_TEST_COMMAND: pytest {project}/tests + CIBW_TEST_EXTRAS: test +jobs: + sdist: + name: Build sdist + runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - with: - fetch-depth: 0 - submodules: true + - uses: actions/checkout@v2 + with: + submodules: true - - uses: actions/setup-python@v2 - name: Install Python - with: - python-version: '3.8' + - name: Build SDist + run: pipx run build --sdist - - name: Prepare compiler environment for Windows - if: runner.os == 'Windows' - uses: ilammy/msvc-dev-cmd@v1 - with: - arch: amd64 + - name: Check metadata + run: pipx run twine check dist/* - - name: Install cibuildwheel - run: | - python -m pip install --upgrade pip - python -m pip install cibuildwheel==1.7.1 + - uses: actions/upload-artifact@v2 + with: + path: dist/*.tar.gz - - name: Build wheels - env: - CIBW_MANYLINUX_X86_64_IMAGE: quay.io/pypa/manylinux2010_x86_64:latest - CIBW_MANYLINUX_I686_IMAGE: quay.io/pypa/manylinux2010_i686:latest - CIBW_BUILD: cp36-* cp37-* cp38-* cp39-* - CIBW_SKIP: "*-win32" - CIBW_BEFORE_ALL_LINUX: > - yum -y -q --enablerepo=extras install epel-release - && yum install -y ninja-build - CIBW_REPAIR_WHEEL_COMMAND_LINUX: "auditwheel show {wheel} && auditwheel repair -w {dest_dir} {wheel}" - CIBW_BEFORE_ALL_MACOS: > - brew install pybind11 ninja - CIBW_ENVIRONMENT_MACOS: MACOSX_DEPLOYMENT_TARGET=10.09 - CIBW_REPAIR_WHEEL_COMMAND_MACOS: "pip uninstall -y delocate && pip install git+https://github.com/Chia-Network/delocate.git && delocate-listdeps {wheel} && delocate-wheel -w {dest_dir} -v {wheel}" - CIBW_TEST_COMMAND: python -c "import datrie" - run: | - python -m cibuildwheel --output-dir wheelhouse - - - uses: actions/upload-artifact@v2 - with: - path: ./wheelhouse/*.whl + cibw_wheels: + name: Wheels on ${{ matrix.os }} + runs-on: ${{ matrix.os }} + strategy: + fail-fast: false + matrix: + os: [ubuntu-latest, windows-latest, macos-latest] - sdist: - name: Build source distribution - runs-on: ubuntu-latest steps: - - uses: actions/checkout@v2 - - - uses: actions/setup-python@v2 - name: Install Python - with: - python-version: '3.7' - - - name: Build sdist - run: | - pip install pep517 - python -m pep517.build -s . - - - uses: actions/upload-artifact@v2 - with: - path: dist/*.tar.gz + - uses: actions/checkout@v2 + with: + submodules: true + + - uses: pypa/cibuildwheel@v1.12.0 + env: + CMAKE_BUILD_OVERRIDE: Release + CIBW_SKIP: cp27-* + CIBW_ARCHS_MACOS: auto universal2 + CIBW_TEST_SKIP: "*universal2:arm64" + CIBW_TEST_EXTRAS: test + CIBW_TEST_COMMAND: pytest {project}/tests + CIBW_ENVIRONMENT_MACOS: MACOSX_DEPLOYMENT_TARGET=10.09 + CIBW_ENVIRONMENT_WINDOWS: 'CMAKE_TOOLCHAIN_FILE=C:/vcpkg/scripts/buildsystems/vcpkg.cmake' + + - name: Verify clean directory + run: git diff --exit-code + shell: bash + + - name: Upload wheels + uses: actions/upload-artifact@v2 + with: + path: wheelhouse/*.whl create_release: needs: [cibw_wheels, sdist] @@ -93,22 +77,20 @@ jobs: with: fetch-depth: 0 - - uses: actions/setup-python@v2 - name: Install Python - with: - python-version: 3.7 - # download all artifacts to project dir - uses: actions/download-artifact@v2 + with: + name: artifact + path: dist - - name: Install gitchangelog + - name: Display artifacts run: | - python -m pip install https://github.com/freepn/gitchangelog/archive/3.0.5.tar.gz + ls -l dist/ - name: Generate changes file - run: | - export GITCHANGELOG_CONFIG_FILENAME=$(get-rcpath) - bash -c 'gitchangelog $(git tag --sort=taggerdate | tail -n2 | head -n1)..${{ env.VERSION }} > CHANGES.md' + uses: sarnold/gitchangelog-action@master + with: + github_token: ${{ secrets.GITHUB_TOKEN}} - name: Create draft release id: create_release @@ -122,7 +104,7 @@ jobs: draft: true prerelease: true # uncomment below to upload wheels to github releases - files: wheels/datrie*.whl + files: dist/*.whl #upload_pypi: #needs: [cibw_wheels, sdist] diff --git a/.github/workflows/smoke.yml b/.github/workflows/smoke.yml new file mode 100644 index 0000000..43a43ad --- /dev/null +++ b/.github/workflows/smoke.yml @@ -0,0 +1,38 @@ +name: Smoke + +on: + workflow_dispatch: + pull_request: + push: + branches: + - master + +jobs: + build: + strategy: + fail-fast: false + matrix: + platform: [windows-latest, macos-latest, ubuntu-20.04] + python-version: ["3.6", "3.7", "3.8", "3.9"] + + runs-on: ${{ matrix.platform }} + env: + CMAKE_BUILD_OVERRIDE: Release + + steps: + - uses: actions/checkout@v2 + with: + submodules: true + + - uses: actions/setup-python@v2 + with: + python-version: ${{ matrix.python-version }} + + - name: Add requirements + run: python -m pip install --upgrade wheel setuptools + + - name: Build and install + run: pip install --verbose .[test] + + - name: Test + run: python -m pytest diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml index ee9bc97..89407fb 100644 --- a/.github/workflows/wheels.yml +++ b/.github/workflows/wheels.yml @@ -1,93 +1,79 @@ -name: Wheels +name: Platform Wheels on: workflow_dispatch: - pull_request: + #pull_request: push: branches: - master +env: + CIBW_TEST_COMMAND: pytest {project}/tests + CIBW_TEST_EXTRAS: test + jobs: - cibw_wheels: - name: Build wheels on ${{ matrix.os }} for Python + sdist: + name: Build sdist + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v2 + # disabled here so sdist is clean + #with: + #submodules: true + + - name: Build sdist + run: pipx run build --sdist + + - name: Check metadata + run: pipx run twine check dist/* + + - uses: actions/upload-artifact@v2 + with: + name: dist + path: dist/*.tar.gz + + build: + name: ${{ matrix.os }} platform wheels runs-on: ${{ matrix.os }} strategy: + fail-fast: false matrix: - os: [ubuntu-20.04, macos-latest, windows-latest] + os: [ubuntu-latest, windows-latest, macos-latest] steps: - - uses: actions/checkout@v2 - with: - fetch-depth: 0 - submodules: true + - uses: actions/checkout@v2 + with: + submodules: true - - uses: actions/setup-python@v2 - name: Install Python - with: - python-version: '3.8' + - uses: pypa/cibuildwheel@v1.12.0 + env: + CMAKE_BUILD_OVERRIDE: Release + CIBW_SKIP: cp27-* + CIBW_ARCHS_MACOS: auto universal2 + CIBW_TEST_SKIP: "*universal2:arm64" + CIBW_TEST_EXTRAS: test + CIBW_TEST_COMMAND: pytest {project}/tests + CIBW_ENVIRONMENT_MACOS: MACOSX_DEPLOYMENT_TARGET=10.09 + #CIBW_ENVIRONMENT_WINDOWS: 'CMAKE_TOOLCHAIN_FILE=C:/vcpkg/scripts/buildsystems/vcpkg.cmake' - - name: Prepare compiler environment for Windows - if: runner.os == 'Windows' - uses: ilammy/msvc-dev-cmd@v1 - with: - arch: amd64 + - name: Verify clean directory + run: git diff --exit-code + shell: bash - - name: Install cibuildwheel - run: | - python -m pip install --upgrade pip - python -m pip install cibuildwheel==1.7.1 - - - name: Build wheels - env: - CIBW_MANYLINUX_X86_64_IMAGE: quay.io/pypa/manylinux2010_x86_64:latest - CIBW_MANYLINUX_I686_IMAGE: quay.io/pypa/manylinux2010_i686:latest - CIBW_BUILD: cp36-* cp37-* cp38-* cp39-* - CIBW_SKIP: "*-win32" - CIBW_BEFORE_ALL_LINUX: > - yum -y -q --enablerepo=extras install epel-release - && yum install -y ninja-build - CIBW_REPAIR_WHEEL_COMMAND_LINUX: "auditwheel show {wheel} && auditwheel repair -w {dest_dir} {wheel}" - CIBW_BEFORE_ALL_MACOS: > - brew install pybind11 ninja - CIBW_ENVIRONMENT_MACOS: MACOSX_DEPLOYMENT_TARGET=10.09 - CIBW_REPAIR_WHEEL_COMMAND_MACOS: "pip uninstall -y delocate && pip install git+https://github.com/Chia-Network/delocate.git && delocate-listdeps {wheel} && delocate-wheel -w {dest_dir} -v {wheel}" - CIBW_TEST_COMMAND: python -c "import datrie" - run: | - python -m cibuildwheel --output-dir wheelhouse - - - uses: actions/upload-artifact@v2 - with: - name: wheels - path: ./wheelhouse/*.whl + - name: Upload wheels + uses: actions/upload-artifact@v2 + with: + name: wheels + path: wheelhouse/*.whl check_artifacts: name: Check artifacts are correct - needs: [cibw_wheels] + needs: [build] runs-on: ubuntu-20.04 steps: - - uses: actions/checkout@v2 - - uses: actions/download-artifact@v2 - with: - name: wheels - - - name: Check number of downloaded artifacts - run: .github/workflows/wheel-check.sh 24 - - #upload_pypi: - #needs: [cibw_wheels, sdist] - #runs-on: ubuntu-latest - ## upload to PyPI on every tag starting with 'v' - #if: github.event_name == 'push' && startsWith(github.event.ref, 'refs/tags/v') - ## alternatively, to publish when a GitHub Release is created, use the following rule: - ## if: github.event_name == 'release' && github.event.action == 'published' - #steps: - #- uses: actions/download-artifact@v2 - #with: - #name: artifact - #path: dist - - #- uses: pypa/gh-action-pypi-publish@master - #with: - #user: __token__ - #password: ${{ secrets.pypi_password }} - + - uses: actions/checkout@v2 + - uses: actions/download-artifact@v2 + + - name: Check/display number of downloaded artifacts + run: .github/workflows/wheel-check.sh 38 + shell: bash diff --git a/.gitignore b/.gitignore index 4825ce1..e01e148 100644 --- a/.gitignore +++ b/.gitignore @@ -6,8 +6,12 @@ src/*.html .cache *.egg-info/* dist/ +environment.yml +.coverage +coverage.xml *.so +src/*.so build/ src/datrie.c diff --git a/CMakeLists.txt b/CMakeLists.txt index 0b45d6d..01b8c44 100644 --- a/CMakeLists.txt +++ b/CMakeLists.txt @@ -1,10 +1,14 @@ -cmake_minimum_required(VERSION 3.12...3.18) +cmake_minimum_required(VERSION 3.18) option(PY_DEBUG "Set if python being linked is a Py_DEBUG build" OFF) -option(GIT_SUBMODULE "Check submodules during build" ON) -option(USE_LIBDATRIE_PKG "Use OS-provided libdatrie package") +option(USE_GIT_SUBMODULE "Use submodules during build" ON) +option(USE_LIBDATRIE_PKG "Use OS-provided libdatrie package" OFF) + if(DEFINED ENV{HAVE_LIBDATRIE_PKG}) set(USE_LIBDATRIE_PKG "$ENV{HAVE_LIBDATRIE_PKG}") + if(USE_LIBDATRIE_PKG) + set(USE_GIT_SUBMODULE OFF) + endif() endif() if(NOT CMAKE_BUILD_TYPE) @@ -12,7 +16,21 @@ if(NOT CMAKE_BUILD_TYPE) "Default build type: RelWithDebInfo" FORCE) endif() -project(datrie LANGUAGES C CXX) +if(NOT SCM_VERSION_INFO) + # use git describe for scm_version + EXECUTE_PROCESS(COMMAND git describe + WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} + OUTPUT_VARIABLE SCM_VERSION_INFO + OUTPUT_STRIP_TRAILING_WHITESPACE) +endif() + +project(datrie LANGUAGES C CXX VERSION ${SCM_VERSION_INFO}) + +set(CMAKE_EXPORT_COMPILE_COMMANDS ON) +set(CMAKE_VERBOSE_MAKEFILE ON) # this does nothing with ninja :/ +#set(CMAKE_VERBOSE_BUILD ON) # this also does nothing with ninja :/ + +message(STATUS "Trying ${CMAKE_BUILD_TYPE} build type for datrie ${SCM_VERSION_INFO}") include(GNUInstallDirs) include(CheckIncludeFile) @@ -30,7 +48,7 @@ if(USE_LIBDATRIE_PKG AND NOT Datrie_FOUND) pkg_check_modules(DATRIE datrie-0.2 IMPORTED_TARGET) endif() -if(NOT USE_LIBDATRIE_PKG) +if(USE_GIT_SUBMODULE) message(STATUS "Trying libdatrie submodule") find_package(Git QUIET) if(GIT_FOUND AND EXISTS "${PROJECT_SOURCE_DIR}/.git") @@ -47,12 +65,9 @@ if(NOT USE_LIBDATRIE_PKG) endif() endif() endif() - - if(NOT EXISTS "${PROJECT_SOURCE_DIR}/libdatrie/datrie-0.2.pc.in") - message(FATAL_ERROR "The submodules were not downloaded! GIT_SUBMODULE was turned off or failed. Please update submodules and try again.") - endif() endif() +# needs pybind11 in build host without some help (not found in python venv) find_package(pybind11 CONFIG) if(pybind11_FOUND) @@ -65,7 +80,7 @@ else() FetchContent_Declare( pybind11 GIT_REPOSITORY https://github.com/pybind/pybind11 - GIT_TAG v2.6.1 + GIT_TAG v2.7.1 ) FetchContent_MakeAvailable(pybind11) endif() diff --git a/MANIFEST.in b/MANIFEST.in index 4a82ba2..8217b77 100644 --- a/MANIFEST.in +++ b/MANIFEST.in @@ -2,8 +2,11 @@ global-include CMakeLists.txt *.cmake include README.rst CHANGES.rst COPYING graft src graft tests +graft bench +recursive-include libdatrie *.h +recursive-include libdatrie *.c include tox.ini tox-bench.ini update_c.sh -include bench/words100k.txt.zip exclude src/datrie.c exclude src/*.html -global-exclude *.py[cod] __pycache__ +global-exclude tests/*.c tests/*.h tools/*.c +global-exclude *.py[cod] *.tox __pycache__ diff --git a/bench/speed.py b/bench/speed.py index 8dd1da0..2616c70 100644 --- a/bench/speed.py +++ b/bench/speed.py @@ -327,4 +327,4 @@ def check_trie(trie, words): benchmark() #profiling() #memory() - print('\n~~~~~~~~~~~~~~\n') \ No newline at end of file + print('\n~~~~~~~~~~~~~~\n') diff --git a/cmake/modules/FindDatrie.cmake b/cmake/modules/FindDatrie.cmake index 23167c9..2767629 100644 --- a/cmake/modules/FindDatrie.cmake +++ b/cmake/modules/FindDatrie.cmake @@ -8,6 +8,7 @@ find_path(Datrie_INCLUDE_DIR NAMES datrie/triedefs.h HINTS ENV VCPKG_ROOT + ENV CONDA_PREFIX PATH_SUFFIXES include include/datrie PATHS ~/Library/Frameworks @@ -22,6 +23,7 @@ find_library(Datrie_LIBRARY NAMES datrie libdatrie HINTS ENV VCPKG_ROOT + ENV CONDA_PREFIX PATH_SUFFIXES lib lib64 lib32 PATHS ~/Library/Frameworks @@ -47,7 +49,7 @@ if(Datrie_FOUND) add_library(Datrie::Datrie INTERFACE IMPORTED) set_target_properties(Datrie::Datrie PROPERTIES INTERFACE_LINK_LIBRARIES "${Datrie_LIBRARIES}" - INTERFACE_INCLUDE_DIRECTORIES "${Datrie_INCLUDE_DIR}" + INTERFACE_INCLUDE_DIRECTORIES "${Datrie_INCLUDE_DIRS}" ) endif() endif(Datrie_FOUND) diff --git a/conda.recipe/meta.yaml b/conda.recipe/meta.yaml index 66269ef..ba935a6 100644 --- a/conda.recipe/meta.yaml +++ b/conda.recipe/meta.yaml @@ -1,5 +1,5 @@ {% set name = "datrie" %} -{% set version = "0.8.3.dev0" %} +{% set version = "0.8.3" %} package: name: {{ name|lower }} @@ -49,4 +49,4 @@ about: extra: recipe-maintainers: - - KOLANICH + - sarnold diff --git a/environment.devenv.yml b/environment.devenv.yml new file mode 100644 index 0000000..f6e95b9 --- /dev/null +++ b/environment.devenv.yml @@ -0,0 +1,12 @@ +name: datrie + +dependencies: + - cython>=0.20 + - cmake>=3.18 + - ninja + - c-compiler + - cxx-compiler + - make # [linux] + - hypothesis + - pytest + - regex diff --git a/pyproject.toml b/pyproject.toml index 780763a..0cd5efe 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,9 +3,28 @@ requires = [ "setuptools>=42", "wheel", "Cython>=0.20", - "pybind11>=2.6.0", - "ninja; sys_platform != 'Windows'", + "pybind11>=2.6.1", + "ninja; sys_platform != 'win32' and platform_machine != 'arm64'", "cmake>=3.15", ] build-backend = "setuptools.build_meta" + +[tool.pytest.ini_options] +minversion = "6.0" +testpaths = [ + "tests", +] + +[tool.coverage.run] +plugins = ["Cython.Coverage"] +branch = true +source = ["src"] +omit = ["tests", ".tox"] + +[tool.coverage.paths] +source = ["src"] + +[tool.coverage.report] +fail_under = 80 +show_missing = true diff --git a/requirements-dev.txt b/requirements-dev.txt new file mode 100644 index 0000000..b6dfea6 --- /dev/null +++ b/requirements-dev.txt @@ -0,0 +1,6 @@ +cython>=0.20 +hypothesis +pytest +pytest-cov +coverage +coverage_python_version diff --git a/dev-requirements.txt b/requirements.txt similarity index 72% rename from dev-requirements.txt rename to requirements.txt index 905506f..9a54149 100644 --- a/dev-requirements.txt +++ b/requirements.txt @@ -1,3 +1,2 @@ -cython hypothesis pytest diff --git a/setup.cfg b/setup.cfg index 6472606..ebc7b74 100644 --- a/setup.cfg +++ b/setup.cfg @@ -23,24 +23,27 @@ classifiers = [options] python_requires = >=3.6 +# needed for python setup.py blah +#setup_requires = ninja + zip_safe = False [options.extras_require] test = pytest hypothesis - pytest-flake8 + +cov = + cython + coverage + coverage_python_version [aliases] test=pytest -[tool:pytest] -minversion = 6.0 -testpaths = - tests - [flake8] exclude = + .tox, .git, __pycache__, tests, diff --git a/setup.py b/setup.py index 73e0fa9..fe2ae3f 100755 --- a/setup.py +++ b/setup.py @@ -2,6 +2,7 @@ # import os +import re import sys import subprocess @@ -10,7 +11,8 @@ # update the version both here and in conda.recipe/meta.yaml -__version__ = '0.8.3.dev0' +# use semver versioning (not python extended versioning) +__version__ = '0.8.3' # Convert distutils Windows platform specifiers to CMake -A arguments PLAT_TO_CMAKE = { @@ -42,6 +44,12 @@ def build_extension(self, ext): else: cfg = os.environ.get("CMAKE_BUILD_OVERRIDE", "") + # Set a coverage flag if provided + if "WITH_COVERAGE" not in os.environ: + coverage = "OFF" + else: + coverage = os.environ.get("WITH_COVERAGE", "") + # CMake lets you override the generator - we need to check this. # Can be set with Conda-Build, for example. cmake_generator = os.environ.get("CMAKE_GENERATOR", "") @@ -53,10 +61,15 @@ def build_extension(self, ext): "-DCMAKE_LIBRARY_OUTPUT_DIRECTORY={}".format(extdir), "-DPYTHON_EXECUTABLE={}".format(sys.executable), "-DSCM_VERSION_INFO={}".format(__version__), + "-DWITH_COVERAGE={}".format(coverage), "-DCMAKE_BUILD_TYPE={}".format(cfg), # not used on MSVC, but no harm ] build_args = [] + # Add CMake arguments set as environment variable + if "CMAKE_ARGS" in os.environ: + cmake_args += [item for item in os.environ["CMAKE_ARGS"].split(" ") if item] + # CMake also lets you provide a toolchain file. # Can be set in CI build environments for example. cmake_toolchain_file = os.environ.get("CMAKE_TOOLCHAIN_FILE", "") @@ -70,7 +83,12 @@ def build_extension(self, ext): # Users can override the generator with CMAKE_GENERATOR in CMake # 3.15+. if not cmake_generator: - cmake_args += ["-GNinja"] + try: + import ninja # noqa: F401 + + cmake_args += ["-GNinja"] + except ImportError: + pass else: @@ -93,6 +111,12 @@ def build_extension(self, ext): ] build_args += ["--config", cfg] + if sys.platform.startswith("darwin"): + # Cross-compile support for macOS - respect ARCHFLAGS if set + archs = re.findall(r"-arch (\S+)", os.environ.get("ARCHFLAGS", "")) + if archs: + cmake_args += ["-DCMAKE_OSX_ARCHITECTURES={}".format(";".join(archs))] + # Set CMAKE_BUILD_PARALLEL_LEVEL to control the parallel build level # across all generators. if "CMAKE_BUILD_PARALLEL_LEVEL" not in os.environ: diff --git a/src/CMakeLists.txt b/src/CMakeLists.txt index fe3864c..6516f62 100644 --- a/src/CMakeLists.txt +++ b/src/CMakeLists.txt @@ -1,6 +1,20 @@ -set(CMAKE_EXPORT_COMPILE_COMMANDS ON) set(cython_module datrie) +option(WITH_COVERAGE "Enable coverage" OFF) + +# Cython flags must be set before we run find_package for Cython since the +# compiler command is created immediately. +set(CYTHON_FLAGS + "-X binding=True,boundscheck=False,embedsignature=True" + CACHE STRING "The directives for Cython compilation.") + +if(WITH_COVERAGE) + message(STATUS "Coverage enabled") + set(CYTHON_FLAGS + "${CYTHON_FLAGS},linetrace=True" + CACHE STRING "The directives for Cython compilation." FORCE) +endif() + find_package(Cython REQUIRED) set(datrie_include_dir "${PROJECT_SOURCE_DIR}/src") @@ -15,12 +29,13 @@ add_custom_command(OUTPUT ${cython_output} -a -2 --fast-fail -I ${datrie_include_dir} + ${CYTHON_FLAGS} --output-file ${cython_output} ${cython_src} WORKING_DIRECTORY ${CMAKE_CURRENT_SOURCE_DIR} DEPENDS ${cy_srcs} COMMENT "Cythonizing extension ${cython_src}") -if(NOT USE_LIBDATRIE_PKG) +if(USE_GIT_SUBMODULE) # use the local git submodule set(DATRIE_INCLUDE_DIR "${PROJECT_SOURCE_DIR}/libdatrie") file(GLOB_RECURSE DATRIE_SOURCES @@ -39,13 +54,26 @@ set_target_properties(${cython_module} target_include_directories(${cython_module} PUBLIC ${PYTHON_INCLUDE_DIRS}) -target_compile_definitions(${cython_module} PRIVATE VERSION_INFO=${SCM_VERSION_INFO}) +target_compile_definitions(${cython_module} PRIVATE + VERSION_INFO=${SCM_VERSION_INFO}) + +if(WITH_COVERAGE) + target_compile_definitions( + ${cython_module} # Enable line tracing for coverage if requested. + PRIVATE CYTHON_TRACE=1 CYTHON_TRACE_NOGIL=1) + + # Copy build target for coverage + add_custom_command( + DEPENDS ${cython_module} + COMMAND "${CMAKE_COMMAND}" -E copy "${PROJECT_BINARY_DIR}/${cython_module}" "${CMAKE_CURRENT_SOURCE_DIR}" + OUTPUT "${CMAKE_CURRENT_SOURCE_DIR}/${cython_module}") +endif() if(Datrie_FOUND) - include_directories(${DATRIE_INCLUDE_DIRS}) + target_include_directories(${cython_module} PUBLIC ${Datrie_INCLUDE_DIRS}) target_link_libraries(${cython_module} PRIVATE Datrie::Datrie) elseif(DATRIE_FOUND) - include_directories(${DATRIE_INCLUDE_DIRS}) + target_include_directories(${cython_module} PUBLIC ${DATRIE_INCLUDE_DIRS}) target_link_libraries(${cython_module} PRIVATE PkgConfig::DATRIE) endif() diff --git a/tests/test_random.py b/tests/test_random.py index 8d53d2e..1377bda 100644 --- a/tests/test_random.py +++ b/tests/test_random.py @@ -9,6 +9,7 @@ import hypothesis.strategies as st from hypothesis import given +from hypothesis import settings printable_strings = st.lists(st.text(string.printable)) @@ -30,6 +31,8 @@ def test_len(words): assert len(trie) == len(set(words)) +# error on macos only (more than default 200 ms) +@settings(deadline=2500) @given(printable_strings) def test_pickle_unpickle(words): trie = datrie.Trie(string.printable) diff --git a/tox-bench.ini b/tox-bench.ini index c62ef03..f4b2b0d 100644 --- a/tox-bench.ini +++ b/tox-bench.ini @@ -1,7 +1,9 @@ [tox] -envlist = py3{5,6,7,8,9} +envlist = py3{6,7,8,9} skip_missing_interpreters = true +# run this if datrie is already installed in your local environment, +# otherwise just run tox -e perf [testenv] commands= python bench/speed.py diff --git a/tox.ini b/tox.ini index 71c6424..6b19362 100644 --- a/tox.ini +++ b/tox.ini @@ -1,16 +1,42 @@ [tox] -envlist = py3{6,7,8,9} +envlist = py3{6,7,8,9}-{linux,macos,windows} skip_missing_interpreters = true -isolated_build = true -skipsdist=True +#isolated_build = true +skipsdist = true [gh-actions] -3.6 = py36 -3.7 = py37 -3.8 = py38 -3.9 = py39 +python = + 3.6 = py36 + 3.7 = py37 + 3.8 = py38 + 3.9 = py39 + +[gh-actions:env] +PLATFORM = + ubuntu-20.04: linux + macos-latest: macos + windows-latest: windows [testenv] +# Using build_ext for coverage needs pybind11 installed in the build host +# --or-- vendoring, eg, adding pybind11 as a git submodule. Otherwise +# normal builds work using the build deps (specified in pyproject.toml) +# installed in the virtual env. +# +# Order is also important as some cmds leave build cruft that may cause +# subsequent cmds to fail without running clean (mainly running +# --inplace for coverage *after* running install with develop, ie, +# pip install -e). The following works without clean called as a +# tox -e arg in between the build cmds: +# tox -e py -> dev -> deploy -> check +# All at once: +# tox -e lint,py,dev,deploy,check +# The following order may fail: +# tox -e dev -> py +# This is a side-effect of calling setup.py subcommands directly, +# eg build_ext, instead of pip install or python -m build or some +# other PEP517/518 compliant builder. +skip_install = true passenv = CI CC @@ -22,25 +48,30 @@ passenv = PYTHONIOENCODING PIP_DOWNLOAD_CACHE +# Note: To generate coverage data requires the built extension to be +# in the src/ directory when pytest --cov is run. Cmake copies +# the --inplace extension to src/ if WITH_COVERAGE is enabled setenv = - PYTHONPATH = {toxinidir} - #PYTHONPATH = {env:PYTHONPATH}{:}{toxinidir} + PYTHONPATH = {toxinidir}/src + WITH_COVERAGE = ON deps = pip>=20.0.1 - cython>=0.20 - pytest - hypothesis + cmake + ninja + #pybind11 # this doesn't work :/ + -rrequirements-dev.txt commands = python setup.py build_ext --inplace - python -m pytest [] + python -m pytest -v --cov --cov-report term-missing [] [testenv:dev] skip_install = true passenv = CI + PYTHON CC CXX CMAKE_BUILD_OVERRIDE @@ -52,14 +83,11 @@ passenv = deps = pip>=20.0.1 - path commands= - # this command breaks on windows (permission error) - python -c "import path; path.Path('build').rmtree_p()" - pip install -e .[test] - flake8 src/ - pytest -v + #python -m pip install -e .[test] + python -m pip --use-feature=in-tree-build install --verbose -e .[test] + python -m pytest -v [] [testenv:deploy] passenv = @@ -67,20 +95,22 @@ passenv = CI CC CXX - CMAKE_BUILD_OVERRIDE CMAKE_TOOLCHAIN_FILE CMAKE_GENERATOR HAVE_LIBDATRIE_PKG PYTHONIOENCODING PIP_DOWNLOAD_CACHE +setenv = + CMAKE_BUILD_OVERRIDE = Release + deps = pip>=20.0.1 - pep517 + build twine commands = - python -m pep517.build . + python -m build . twine check dist/* [testenv:check] @@ -100,3 +130,54 @@ commands_pre = commands = pytest -v + +[testenv:perf] +skip_install = true +passenv = + pythonLocation + CI + CC + CXX + HAVE_LIBDATRIE_PKG + CMAKE_TOOLCHAIN_FILE + CMAKE_GENERATOR + PYTHONIOENCODING + PIP_DOWNLOAD_CACHE + +setenv = + CMAKE_BUILD_OVERRIDE = Release + +deps = + pip>=20.0.1 + +commands_pre = + python setup.py install + +commands = + python bench/speed.py + +[testenv:lint] +skip_install = true +passenv = + CI + PYTHONIOENCODING + PIP_DOWNLOAD_CACHE + +deps = + pip>=20.0.1 + flake8 + +commands = + flake8 + +[testenv:clean] +skip_install = true + +whitelist_externals = + bash + +deps = + +commands = + bash -c "echo Removing build by-products and tox envs" + bash -c "rm -rf .coverage .eggs .hypothesis .tox build dist datrie.egg-info *.so src/*.so src/*.c"