From 07c719e518a76334fa34676864439c69a24796af Mon Sep 17 00:00:00 2001 From: haavee Date: Fri, 29 Jun 2018 13:44:08 +0200 Subject: [PATCH 01/62] pgqwin(), Python2+3 Linux/OSX compat, C.Bassa fork This version is a fork of: https://github.com/npat-efault/ppgplot merging in useful edits from: https://github.com/cbassa/ppgplot (C. Bassa fork) merged with edits to make ppgplot compileable and loadable on MacOSX and Linux Python2 and 3 setup.py + src/__init__.py modified to support both Python2, Python3: print function, no '.has_key()' in Py3k import _ppgplot.* does not work under Py3k so rewritten to work in both 2 and 3 X11 library paths: on (modern) Linux X11 is not in /usr/X11R6/... so try a few paths and add if they exist O/S support: C. Bassa setup.py did not compile out of the box on MacOSX so only useful edits migrated into this one src/_ppgplot.c modified to support both Python2, Python3: C. Bassa fork had most of the Py3k work done but was not backward compat with 2.7 so fixed that; now compiles fine under 2.7 and 3.6 PGPLOT functions: - C. Bassa fork added pgerr[xy] - C. Bassa fork changed pgcurs/pgband behaviour - added pgqwin to query current window --- setup.py | 89 +++++++++++++------------- src/__init__.py | 2 +- src/_ppgplot.c | 167 +++++++++++++++++++++++++++++++++++++++++++----- 3 files changed, 197 insertions(+), 61 deletions(-) diff --git a/setup.py b/setup.py index b4264a6..fe46803 100644 --- a/setup.py +++ b/setup.py @@ -1,3 +1,4 @@ +from __future__ import print_function from distutils.sysconfig import get_python_inc, get_python_lib import os import sys @@ -26,7 +27,7 @@ include_dirs.extend(get_numpy_include_dirs()) define_macros.append(('USE_NUMPY', None)) undef_macros.append('USE_NUMARRAY') - print >>sys.stderr, "using numpy..." + print("using numpy...", file=sys.stderr) found_module = True # uncommenting the following line retains any previous ppgplot # package and installs this numpy-compatible version as @@ -37,43 +38,43 @@ if not found_module: try: - # Try to use the "numarray" module (2nd option) - # uncomment the following line to disable usage of numarray - #raise ImportError - from distutils.core import setup - from numarray.numarrayext import NumarrayExtension - make_extension = NumarrayExtension - define_macros.append(('USE_NUMARRAY', None)) - print >>sys.stderr, "using numarray..." - found_module = True - # uncommenting the following line retains any previous ppgplot - # package and installs this numpy-compatible version as - # the package ppgplot_numpy - #name = "ppgplot_numarray" + # Try to use the "numarray" module (2nd option) + # uncomment the following line to disable usage of numarray + #raise ImportError + from distutils.core import setup + from numarray.numarrayext import NumarrayExtension + make_extension = NumarrayExtension + define_macros.append(('USE_NUMARRAY', None)) + print("using numarray...", file=sys.stderr) + found_module = True + # uncommenting the following line retains any previous ppgplot + # package and installs this numpy-compatible version as + # the package ppgplot_numpy + #name = "ppgplot_numarray" except ImportError: - pass + pass if not found_module: try: - # Try to use the "Numeric" module (3rd option) - # uncomment the following line to disable usage of Numeric - #raise ImportError - from distutils.core import setup, Extension - make_extension = Extension - include_dirs.append( - os.path.join(get_python_inc(plat_specific=1), "Numeric")) - undef_macros.append('USE_NUMARRAY') - print >>sys.stderr, "using Numeric..." - found_module = True - # uncommenting the following line retains any previous ppgplot - # package and installs this numpy-compatible version as - # the package ppgplot_numpy - #name = "ppgplot_Numeric" + # Try to use the "Numeric" module (3rd option) + # uncomment the following line to disable usage of Numeric + #raise ImportError + from distutils.core import setup, Extension + make_extension = Extension + include_dirs.append( + os.path.join(get_python_inc(plat_specific=1), "Numeric")) + undef_macros.append('USE_NUMARRAY') + print("using Numeric...", file=sys.stderr) + found_module = True + # uncommenting the following line retains any previous ppgplot + # package and installs this numpy-compatible version as + # the package ppgplot_numpy + #name = "ppgplot_Numeric" except ImportError: - pass + pass if not found_module: - raise Exception, "None of numpy, numarray or Numeric found" + raise Exception("None of numpy, numarray or Numeric found") if os.name == "posix": #libraries.append("png") @@ -82,27 +83,29 @@ # comment out g2c if compiling with gfortran (typical nowadays) # you may still need this if using an earlier fortran compiler # libraries.append("g2c") - library_dirs.append("/usr/X11R6/lib/") - if os.environ.has_key("PGPLOT_DIR"): + for ld in filter(os.path.isdir, ["/usr/lib/x86_64-linux-gnu/", "/usr/X11R6/lib/"]): + library_dirs.append(ld) + # + if 'PGPLOT_DIR' in os.environ: library_dirs.append(os.environ["PGPLOT_DIR"]) include_dirs.append(os.environ["PGPLOT_DIR"]) # locate Aquaterm dynamic library if running Mac OS X SCISOFT # (www.stecf.org/macosxscisoft/) - elif os.environ.has_key("SCIDIR"): - libraries.append("aquaterm") + elif 'SCIDIR' in os.environ: + libraries.append("aquaterm") library_dirs.append(os.path.join(os.environ["SCIDIR"], 'lib')) else: - print >>sys.stderr, "PGPLOT_DIR env var not defined!" + print("PGPLOT_DIR env var not defined!", file=sys.stderr) else: - raise Exception, "os not supported" + raise Exception("os not supported") ext_ppgplot = make_extension(name+'._ppgplot', - [os.path.join('src', '_ppgplot.c')], - include_dirs=include_dirs, - libraries=libraries, - library_dirs=library_dirs, - define_macros=define_macros, - extra_compile_args=extra_compile_args) + [os.path.join('src', '_ppgplot.c')], + include_dirs=include_dirs, + libraries=libraries, + library_dirs=library_dirs, + define_macros=define_macros, + extra_compile_args=extra_compile_args) diff --git a/src/__init__.py b/src/__init__.py index 544f39d..a3b59dd 100644 --- a/src/__init__.py +++ b/src/__init__.py @@ -1 +1 @@ -from _ppgplot import * +from . _ppgplot import * diff --git a/src/_ppgplot.c b/src/_ppgplot.c index f9ece92..938b1e8 100644 --- a/src/_ppgplot.c +++ b/src/_ppgplot.c @@ -66,6 +66,8 @@ static PyObject *PpgIOErr; static PyObject *PpgTYPEErr; static PyObject *PpgMEMErr; +float xcurs=0.0, ycurs=0.0; + /**************************************************************************/ /* support functions */ /**************************************************************************/ @@ -74,6 +76,8 @@ static PyObject * tofloatvector (PyObject *o, float **v, int *vsz) { PyArrayObject *a1, *af1, *af2; + PyArray_Descr *descr; + npy_intp dims; int ownedaf1=0; /* Check if args are arrays. */ @@ -100,7 +104,7 @@ tofloatvector (PyObject *o, float **v, int *vsz) #ifndef USE_NUMARRAY case PyArray_UBYTE: #endif -#ifndef USE_NUMPY +#ifdef PyArray_SBYTE case PyArray_SBYTE: #endif case PyArray_SHORT: @@ -126,11 +130,13 @@ tofloatvector (PyObject *o, float **v, int *vsz) #endif af2 = af1; - if (PyArray_As1D((PyObject **)&af2, (char **)v, vsz, - PyArray_FLOAT) == -1) { + descr = PyArray_DescrFromType(PyArray_FLOAT); + if (PyArray_AsCArray((PyObject **)&af2, (void *)v, &dims, 1, + descr) == -1) { af2 = NULL; } - + *vsz = dims; + if (ownedaf1) { Py_DECREF(af1); } return((PyObject *)af2); @@ -142,6 +148,8 @@ static PyObject * tofloatmat(PyObject *o, float **m, int *nr, int *nc) { PyArrayObject *a1, *af1, *af2; + PyArray_Descr *descr; + npy_intp dims[2]; int ownedaf1=0; char **tmpdat; @@ -169,7 +177,7 @@ tofloatmat(PyObject *o, float **m, int *nr, int *nc) #ifndef USE_NUMARRAY case PyArray_UBYTE: #endif -#ifndef USE_NUMPY +#ifdef PyArray_SBYTE case PyArray_SBYTE: #endif case PyArray_SHORT: @@ -195,11 +203,14 @@ tofloatmat(PyObject *o, float **m, int *nr, int *nc) #endif af2 = af1; - if (PyArray_As2D((PyObject **)&af2, (char ***)&tmpdat, nr, nc, - PyArray_FLOAT) == -1) { + descr = PyArray_DescrFromType(PyArray_FLOAT); + if (PyArray_AsCArray((PyObject **)&af2, (void *)&tmpdat, dims, 2, + descr) == -1) { af2 = NULL; goto bailout; } + *nr = dims[0]; + *nc = dims[1]; /* WARNING: What follows is a little tricky and I dunno if I'm really allowed to do this. On the other hand it really conserves @@ -395,6 +406,19 @@ PYF(pgqvp) return Py_BuildValue("ffff", x1, x2, y1, y2); } +/* + pgqwin() + return x1, x2, y1, y2 +*/ +PYF(pgqwin) +{ + float x1, x2, y1, y2; + + cpgqwin(&x1, &x2, &y1, &y2); + + return Py_BuildValue("ffff", x1, x2, y1, y2); +} + /* pgqvsz(units : int) return x1, x2, y1, y2 @@ -1004,22 +1028,21 @@ PYF(pgunsa) PYF(pgcurs) { - float x=0.0, y=0.0; char ch = '\0'; - if (!PyArg_ParseTuple(args,"|ff:pgcurs",&x, &y)) + if (!PyArg_ParseTuple(args,"|ff:pgcurs",&xcurs, &ycurs)) return(NULL); - cpgcurs(&x,&y,&ch); + cpgcurs(&xcurs,&ycurs,&ch); - return(Py_BuildValue("ffc",x,y,ch)); + return(Py_BuildValue("ffc",xcurs,ycurs,ch)); } PYF(pgband) { int mode=7, i=0; - float xref = 0.0, yref = 0.0, x=0.0, y=0.0; + float xref = 0.0, yref = 0.0; char ch = '\0'; if (!PyArg_ParseTuple(args,"i|iff:pgband", @@ -1027,9 +1050,9 @@ PYF(pgband) return(NULL); - cpgband(mode,i,xref,yref,&x,&y,&ch); + cpgband(mode,i,xref,yref,&xcurs,&ycurs,&ch); - return(Py_BuildValue("ffc",x,y,ch)); + return(Py_BuildValue("ffc",xcurs,ycurs,ch)); } PYF(pgqcol) @@ -1641,6 +1664,72 @@ PYF(pgerrb) return(NULL); } +PYF(pgerrx) +{ + PyObject *oy=NULL, *ox1=NULL, *ox2=NULL; + PyArrayObject *ay=NULL, *ax1=NULL, *ax2=NULL; + float *y=NULL, *x1=NULL, *x2=NULL, t=1.0; + int szy=0, szx1=0, szx2 =0, n1; + + if (!PyArg_ParseTuple(args,"OOO|f:pgerrx", &ox1, &ox2, &oy, &t)) + return(NULL); + + if (!(ay = (PyArrayObject *)tofloatvector(oy, &y, &szy))) goto fail; + if (!(ax1 = (PyArrayObject *)tofloatvector(ox1, &x1, &szx1))) goto fail; + if (!(ax2 = (PyArrayObject *)tofloatvector(ox2, &x2, &szx2))) goto fail; + + /* this is n1 = min(szx, szx1, szx2) */ + n1=(szy= 3 + static struct PyModuleDef ppgplotdef = { + PyModuleDef_HEAD_INIT, + "_ppgplot", /* m_name */ + "PPGPLOT Module", /* m_doc */ + -1, /* m_size */ + PpgMethods, /* m_methods */ + NULL, /* m_reload */ + NULL, /* m_traverse */ + NULL, /* m_clear */ + NULL, /* m_free */ + }; +#endif + /************************************************************************/ -void -init_ppgplot (void) + + +static PyObject * +moduleinit(void) { PyObject *m, *d; +#if PY_MAJOR_VERSION <= 2 m = Py_InitModule("_ppgplot", PpgMethods); +#else + m = PyModule_Create(&ppgplotdef); +#endif d = PyModule_GetDict(m); - import_array(); +#if PY_MAJOR_VERSION <= 2 PpgIOErr = PyString_FromString("_ppgplot.ioerror"); PpgTYPEErr = PyString_FromString("_ppgplot.typeerror"); PpgMEMErr = PyString_FromString("_ppgplot.memerror"); +#else + PpgIOErr = PyBytes_FromString("_ppgplot.ioerror"); + PpgTYPEErr = PyBytes_FromString("_ppgplot.typeerror"); + PpgMEMErr = PyBytes_FromString("_ppgplot.memerror"); +#endif PyDict_SetItemString(d, "ioerror", PpgIOErr); PyDict_SetItemString(d, "typeerror", PpgTYPEErr); PyDict_SetItemString(d, "memerror", PpgMEMErr); + return m; } +#if PY_MAJOR_VERSION < 3 + void + init_ppgplot(void) + { + import_array(); + moduleinit(); + } +#else + PyMODINIT_FUNC + PyInit__ppgplot(void) + { + import_array(); + return moduleinit(); + } +#endif /************************************************************************/ /* End of _ppgplot.c */ /************************************************************************/ From 784bb32e331d6b5d00ec575edf0d3d444d308ea2 Mon Sep 17 00:00:00 2001 From: eee software boss Date: Tue, 17 Jul 2018 14:26:39 +0200 Subject: [PATCH 02/62] towards linking to specific cpgplot/pgplot The ppgplot extension may need to be linked to a different PGPLOT library than the system one - e.g. against a locally compiled Giza library(*) and both may need to co-exist. When $PGPLOT_DIR environment variable is set, 'setup.py' now looks for lib(c)pgplot.so under that directory and instruments distutils' setup(...) to link to that library. The linker is then also instructed to include the shared library's path(s) in the shared object such that import'ing it in Python should work irrespective of the user's LD_LIBRARY_PATH. Also added support for '--no-numarray', '--no-Numeric' and '--no-numpy' command line options to disable these as potential candidates for linking against. (*) http://giza.sourceforge.net/ "Giza also provides a drop-in, modern replacement for the PGPLOT graphics library (libpgplot and libcpgplot)." --- setup.py | 107 ++++++++++++++++++++++++++++++++++++++++++------------- 1 file changed, 82 insertions(+), 25 deletions(-) diff --git a/setup.py b/setup.py index fe46803..c20b2d2 100644 --- a/setup.py +++ b/setup.py @@ -1,26 +1,40 @@ -from __future__ import print_function -from distutils.sysconfig import get_python_inc, get_python_lib -import os -import sys +from __future__ import print_function +from distutils.sysconfig import get_python_inc, get_python_lib +import os, re, sys, functools + +# distutils does not like unrecognized options but we'd like the user to be +# able to indicate which numarray/numpy/Numeric *not* to try. +# So we just strip all these options from sys.argv (and remember them). +# I did look at +# https://stackoverflow.com/questions/677577/distutils-how-to-pass-a-user-defined-parameter-to-setup-py +# but that is not what is needed; we need to look at the command line /before/ setup(...) +# actually executed. +partition = lambda pred, lst: functools.reduce(lambda acc, elem: (acc[0]+[elem], acc[1]) if pred(elem) else (acc[0], acc[1]+[elem]), sys.argv, (list(), list())) +(ours, argv) = partition(re.compile(r'^--no-(numpy|numarray|numeric)$', re.I).match, sys.argv) +sys.argv = argv + ################################################################### # build the extension # -define_macros = [] -undef_macros = [] -include_dirs = [] -extra_compile_args = [] -libraries = ["cpgplot", "pgplot"] -library_dirs = [] -name = "ppgplot" - -found_module = False +# The options and paths will be built dynamically below +define_macros = [] +undef_macros = [] +include_dirs = [] +library_dirs = [] +extra_compile_args = [] +extra_link_args = [] +libraries = [] +runtime_library_dirs = [] +name = "ppgplot" +found_module = False try: # Try to use the "numpy" module (1st option) - # uncomment the following line to disable usage of numpy - #raise ImportError + # Pass '--no-numpy' to disable using numpy + if '--no-numpy' in ours: + raise ImportError from numpy.distutils.core import setup, Extension from numpy.distutils.misc_util import get_numpy_include_dirs make_extension = Extension @@ -39,8 +53,9 @@ if not found_module: try: # Try to use the "numarray" module (2nd option) - # uncomment the following line to disable usage of numarray - #raise ImportError + # '--no-numarray' to disable detection of this candidate + if '--no-numarray' in ours: + raise ImportError from distutils.core import setup from numarray.numarrayext import NumarrayExtension make_extension = NumarrayExtension @@ -57,8 +72,9 @@ if not found_module: try: # Try to use the "Numeric" module (3rd option) - # uncomment the following line to disable usage of Numeric - #raise ImportError + # the pattern should be obvious by now + if '--no-Numeric' in ours: + raise ImportError from distutils.core import setup, Extension make_extension = Extension include_dirs.append( @@ -85,17 +101,56 @@ # libraries.append("g2c") for ld in filter(os.path.isdir, ["/usr/lib/x86_64-linux-gnu/", "/usr/X11R6/lib/"]): library_dirs.append(ld) + + # attempt to find libcpgplot under $PGPLOT_DIR - such that $PGPLOT_DIR can + # be pointed at e.g. giza (http://giza.sourceforge.net/) On some systems + # the original PGPLOT can be installed as a package, so the libs &cet end + # up under "/usr/lib/", but some users may prefer to bind the ppgplot + # extension to the giza implementation. + # + # In such a case, convincing the compiler, linker and runtime to choose using + # the libraries &cet from a non-standard path takes a bit more effort but can easily be done. # - if 'PGPLOT_DIR' in os.environ: - library_dirs.append(os.environ["PGPLOT_DIR"]) - include_dirs.append(os.environ["PGPLOT_DIR"]) + # Note: the alternative solution is to de-install the system PGPLOT but there are other + # (mainly astronomical) utilities who depend on PGPLOT and as such there is a need + # for both giza and PGPLOT to co-exist on the same system + # + # + # The code below instruments the compiling+linking of the ppgplot extension + # such that *IFF* libcpgplot.so is found somewhere under $PGPLOT_DIR it + # will be 'hard linked' against it and instruct the linker to add the + # dynamic library path into the shared library(ies). This means the user + # will not have to set their LD_LIBRRARY_PATH and loading of the + # _ppgplot.so module will 'just work' (famous last words) + pgplotlibs = None + pgplotdir = os.environ["PGPLOT_DIR"] if "PGPLOT_DIR" in os.environ else None + if pgplotdir is not None: + if not os.path.isdir(pgplotdir): + raise RuntimeError("$PGPLOT_DIR [{0}] is not a directory".format(pgplotdir)) + for (path, _, files) in os.walk( pgplotdir ): + if 'libcpgplot.so' in files: + pgplotlibs = path + break # locate Aquaterm dynamic library if running Mac OS X SCISOFT # (www.stecf.org/macosxscisoft/) elif 'SCIDIR' in os.environ: libraries.append("aquaterm") library_dirs.append(os.path.join(os.environ["SCIDIR"], 'lib')) else: - print("PGPLOT_DIR env var not defined!", file=sys.stderr) + print("PGPLOT_DIR env var not defined, hoping libcpgplot is in system path(s)", file=sys.stderr) + + # if we found pgplotlibs, make sure the extension is linked against /them/ + if pgplotlibs is not None: + # add the libraries by path and tell compiler/linker to include rpath + extra_link_args.append( "-Wl,-rpath={0}".format(pgplotlibs) ) + extra_link_args += map(functools.partial(os.path.join, pgplotlibs), ["libcpgplot.so", "libpgplot.so"]) + runtime_library_dirs.append( pgplotlibs ) + # pgplotlibs can not be None if pgplotdir is not a directory so the following can be + # executed unconditionally + include_dirs += [os.path.join(pgplotdir, "include")] + else: + # add "-lcpgplot -lpgplot" and hope for the best + libraries += ["cpgplot", "pgplot"] else: raise Exception("os not supported") @@ -105,7 +160,9 @@ libraries=libraries, library_dirs=library_dirs, define_macros=define_macros, - extra_compile_args=extra_compile_args) + extra_compile_args=extra_compile_args, + extra_link_args=extra_link_args, + runtime_library_dirs=runtime_library_dirs) @@ -118,7 +175,7 @@ description="Python / Numeric-Python bindings for PGPLOT", author="Nick Patavalis", author_email="npat@efault.net", - url="http://code.google.com/p/ppgplot/", + url="http://code.google.com/p/ppgplot/ https://github.com/haavee/ppgplot", packages=[name], package_dir={name:'src'}, ext_modules=[ext_ppgplot]) From edbf44d93b880de7381fdb916e0cd96d210f82e6 Mon Sep 17 00:00:00 2001 From: haavee Date: Tue, 17 Jul 2018 15:40:15 +0200 Subject: [PATCH 03/62] fixes for Python3/MacOSX Can now use python3 on MacOSX to link to giza as well using: # when building giza: # ./configure --prefix=/path/to/giza-root # then do: $> cd /path/to/ppgplot $> export PGPLOT_DIR=/path/to/giza-root $> /path/to/python[3] setup.py [...] --- setup.py | 23 ++++++++++++++--------- 1 file changed, 14 insertions(+), 9 deletions(-) diff --git a/setup.py b/setup.py index c20b2d2..3e05b3e 100644 --- a/setup.py +++ b/setup.py @@ -1,6 +1,6 @@ from __future__ import print_function from distutils.sysconfig import get_python_inc, get_python_lib -import os, re, sys, functools +import os, re, sys, platform, functools # distutils does not like unrecognized options but we'd like the user to be # able to indicate which numarray/numpy/Numeric *not* to try. @@ -99,7 +99,7 @@ # comment out g2c if compiling with gfortran (typical nowadays) # you may still need this if using an earlier fortran compiler # libraries.append("g2c") - for ld in filter(os.path.isdir, ["/usr/lib/x86_64-linux-gnu/", "/usr/X11R6/lib/"]): + for ld in filter(os.path.isdir, ["/usr/lib/x86_64-linux-gnu/", "/usr/X11R6/lib/", "/opt/X11/lib"]): library_dirs.append(ld) # attempt to find libcpgplot under $PGPLOT_DIR - such that $PGPLOT_DIR can @@ -122,14 +122,15 @@ # dynamic library path into the shared library(ies). This means the user # will not have to set their LD_LIBRRARY_PATH and loading of the # _ppgplot.so module will 'just work' (famous last words) - pgplotlibs = None + soext = 'dylib' if platform.system() == 'Darwin' else 'so' pgplotdir = os.environ["PGPLOT_DIR"] if "PGPLOT_DIR" in os.environ else None + pgplotlibd = None if pgplotdir is not None: if not os.path.isdir(pgplotdir): raise RuntimeError("$PGPLOT_DIR [{0}] is not a directory".format(pgplotdir)) for (path, _, files) in os.walk( pgplotdir ): - if 'libcpgplot.so' in files: - pgplotlibs = path + if 'libcpgplot.'+soext in files: + pgplotlibd = path break # locate Aquaterm dynamic library if running Mac OS X SCISOFT # (www.stecf.org/macosxscisoft/) @@ -140,11 +141,15 @@ print("PGPLOT_DIR env var not defined, hoping libcpgplot is in system path(s)", file=sys.stderr) # if we found pgplotlibs, make sure the extension is linked against /them/ - if pgplotlibs is not None: + if pgplotlibd is not None: # add the libraries by path and tell compiler/linker to include rpath - extra_link_args.append( "-Wl,-rpath={0}".format(pgplotlibs) ) - extra_link_args += map(functools.partial(os.path.join, pgplotlibs), ["libcpgplot.so", "libpgplot.so"]) - runtime_library_dirs.append( pgplotlibs ) + # Only on linux we need to do the rpath song & dance at /this/ stage; on MacOSX + # this can be fixed at the libgiza level (install_name option whilst linking libcpgplot) + # http://log.zyxar.com/blog/2012/03/10/install-name-on-os-x/ + if platform.system() != 'Darwin': + extra_link_args.append( "-Wl,-rpath={0}".format(pgplotlibd) ) + extra_link_args.extend( map(functools.partial(os.path.join, pgplotlibd), map("{{0}}.{0}".format(soext).format, ["libcpgplot", "libpgplot"])) ) + runtime_library_dirs.append( pgplotlibd ) # pgplotlibs can not be None if pgplotdir is not a directory so the following can be # executed unconditionally include_dirs += [os.path.join(pgplotdir, "include")] From d0dfdbe5eeec74c63f1780925e0261b519f4c7b2 Mon Sep 17 00:00:00 2001 From: haavee Date: Tue, 17 Jul 2018 15:57:58 +0200 Subject: [PATCH 04/62] Update INSTALL with some documentation --- INSTALL | 22 ++++++++++++++++++++++ 1 file changed, 22 insertions(+) diff --git a/INSTALL b/INSTALL index 946128b..2628509 100644 --- a/INSTALL +++ b/INSTALL @@ -6,6 +6,9 @@ possible to install ppgplot with one command (issued as user "root"): # python setup.py install +New in 2018: + Now also support python3 + If the pgplot libraries are in some other directory, or you don't feel like setting the PGPLOT_DIR, try this (again as user "root"): @@ -15,6 +18,20 @@ like setting the PGPLOT_DIR, try this (again as user "root"): Assuming "/usr/local/pgplot" is the directory where PGPLOT is installed. +New in 2018: + The "-L/.../" trick does not work if > 1 'libcpgplot.{so|dylib}' are + installed on the system. + + If linkage to an alternative, co-existing, PGPLOT library is required + (e.g. 'giza' - http://giza.sourceforge.net/): + export PGPLOT_DIR=/path/to/giza-root + + The linker will be instructed to choose the library(ies) from + /path/to/giza-root/lib/ over those found in the system paths + in such a way the user will not have to tinker with their + LD_LIBRARY_PATH variable to make ld.so find and load + he correct shared libraries. + Depending on how you compiled PGPLOT, you may need to link ppgplot with additional runtime libraries. If compilation (linking) of the extension fails due to unresolved symbols, then this is probably the @@ -34,4 +51,9 @@ Since 1.4 ppgplot is configured to prefer "numpy" over "numarray" over "Numeric", then uncomment the appropriate "raise ImportError" lines in setup.py +New in 2018: + In stead of having to edit the setup.py script it is now possible to + pass '--no-Numeric', '--no-numpy' and/or '--no-numarray' on the + commandline to prevent checking for a specific num* implementation + Have fun ! :) From f5f69287f1a9e3fe2f7941fb315c091ded596389 Mon Sep 17 00:00:00 2001 From: Cees Bassa Date: Tue, 21 Aug 2018 23:16:06 +0200 Subject: [PATCH 05/62] Added pgpt1 functionality --- src/_ppgplot.c | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/src/_ppgplot.c b/src/_ppgplot.c index 938b1e8..ca3a013 100644 --- a/src/_ppgplot.c +++ b/src/_ppgplot.c @@ -1386,6 +1386,19 @@ PYF(pgline) return(NULL); } +PYF(pgpt1) +{ + float x, y; + int symbol=0; + + if (!PyArg_ParseTuple(args,"ffi:pgrect",&x, &y, &symbol)) + return(NULL); + + cpgpt1(x,y,symbol); + + PYRN; +} + PYF(pgpt) { int xsz, ysz; @@ -2269,6 +2282,7 @@ static PyMethodDef PpgMethods[] = { {"pgpanl", pgpanl, 1}, {"pgpap", pgpap, 1}, {"pgpt", pgpt, 1}, + {"pgpt1", pgpt1, 1}, {"pgptxt", pgptxt, 1}, {"pgqah", pgqah, 1}, {"pgqcf", pgqcf, 1}, From 42f9fe928f55b571879afa4138d7785c68145876 Mon Sep 17 00:00:00 2001 From: Cees Bassa Date: Tue, 21 Aug 2018 23:38:35 +0200 Subject: [PATCH 06/62] Added pgerr1 functionality --- src/_ppgplot.c | 14 ++++++++++++++ 1 file changed, 14 insertions(+) diff --git a/src/_ppgplot.c b/src/_ppgplot.c index 938b1e8..6b1ef86 100644 --- a/src/_ppgplot.c +++ b/src/_ppgplot.c @@ -1386,6 +1386,19 @@ PYF(pgline) return(NULL); } +PYF(pgerr1) +{ + float x, y, e, t; + int dir = 5; + + if (!PyArg_ParseTuple(args,"iffff:pgerr1",&dir, &x, &y, &e, &t)) + return(NULL); + + cpgerr1(dir,x,y,e,t); + + PYRN; +} + PYF(pgpt) { int xsz, ysz; @@ -2269,6 +2282,7 @@ static PyMethodDef PpgMethods[] = { {"pgpanl", pgpanl, 1}, {"pgpap", pgpap, 1}, {"pgpt", pgpt, 1}, + {"pgerr1", pgerr1, 1}, {"pgptxt", pgptxt, 1}, {"pgqah", pgqah, 1}, {"pgqcf", pgqcf, 1}, From 0fde157efb8ef8bd3e36cdb613b4c93ed4072be8 Mon Sep 17 00:00:00 2001 From: Cees Bassa Date: Wed, 22 Aug 2018 11:05:41 +0200 Subject: [PATCH 07/62] Fix function name --- src/_ppgplot.c | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/src/_ppgplot.c b/src/_ppgplot.c index ca3a013..116b183 100644 --- a/src/_ppgplot.c +++ b/src/_ppgplot.c @@ -1391,7 +1391,7 @@ PYF(pgpt1) float x, y; int symbol=0; - if (!PyArg_ParseTuple(args,"ffi:pgrect",&x, &y, &symbol)) + if (!PyArg_ParseTuple(args,"ffi:pgpt1",&x, &y, &symbol)) return(NULL); cpgpt1(x,y,symbol); From 8d4b8b813ddc9630271033db90657f830d567a51 Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Thu, 6 Feb 2025 12:06:20 +0100 Subject: [PATCH 08/62] It's time to do modern building --- pyproject.toml | 23 ++++ setup.cfg | 17 +++ setup.py | 358 +++++++++++++++++++++++++------------------------ 3 files changed, 222 insertions(+), 176 deletions(-) create mode 100644 pyproject.toml create mode 100644 setup.cfg diff --git a/pyproject.toml b/pyproject.toml new file mode 100644 index 0000000..fc52b9f --- /dev/null +++ b/pyproject.toml @@ -0,0 +1,23 @@ +[build-system] +requires = ["setuptools>=45", "numpy>=1.21.0"] +build-backend = "setuptools.build_meta" + +[project] +name = "ppgplot" +version = "1.4" +description = "Python bindings for PGPLOT" +authors = [ + {name = "Nick Patavalis", email = "npat@efault.net"}, +] +readme = "README.md" +requires-python = ">=3.7" +dependencies = [ + "numpy>=1.21.0", +] + +[project.urls] +Homepage = "https://github.com/haavee/ppgplot" + +[tool.setuptools] +packages = ["ppgplot"] +package-dir = {"ppgplot" = "src"} diff --git a/setup.cfg b/setup.cfg new file mode 100644 index 0000000..7ab8d8f --- /dev/null +++ b/setup.cfg @@ -0,0 +1,17 @@ +[options] +package_dir = + ppgplot = src + +[options.packages.find] +where = src + +#[options.ext_modules.ppgplot] +##ppgplot._ppgplot = +##sources = src/_ppgplot.c +#define_macros = USE_NUMPY=1 +#undef_macros = USE_NUMARRAY + +[build_ext] +# This section can be overridden by environment variables: +# PGPLOT_DIR - Path to PGPLOT/Giza installation +# SCIDIR - For MacOS X SCISOFT support diff --git a/setup.py b/setup.py index 3e05b3e..fcce300 100644 --- a/setup.py +++ b/setup.py @@ -1,186 +1,192 @@ -from __future__ import print_function -from distutils.sysconfig import get_python_inc, get_python_lib -import os, re, sys, platform, functools - -# distutils does not like unrecognized options but we'd like the user to be -# able to indicate which numarray/numpy/Numeric *not* to try. -# So we just strip all these options from sys.argv (and remember them). -# I did look at -# https://stackoverflow.com/questions/677577/distutils-how-to-pass-a-user-defined-parameter-to-setup-py -# but that is not what is needed; we need to look at the command line /before/ setup(...) -# actually executed. -partition = lambda pred, lst: functools.reduce(lambda acc, elem: (acc[0]+[elem], acc[1]) if pred(elem) else (acc[0], acc[1]+[elem]), sys.argv, (list(), list())) -(ours, argv) = partition(re.compile(r'^--no-(numpy|numarray|numeric)$', re.I).match, sys.argv) -sys.argv = argv - - -################################################################### -# build the extension -# - -# The options and paths will be built dynamically below -define_macros = [] -undef_macros = [] -include_dirs = [] -library_dirs = [] -extra_compile_args = [] -extra_link_args = [] -libraries = [] -runtime_library_dirs = [] -name = "ppgplot" -found_module = False - -try: - # Try to use the "numpy" module (1st option) - # Pass '--no-numpy' to disable using numpy - if '--no-numpy' in ours: - raise ImportError - from numpy.distutils.core import setup, Extension - from numpy.distutils.misc_util import get_numpy_include_dirs - make_extension = Extension - include_dirs.extend(get_numpy_include_dirs()) - define_macros.append(('USE_NUMPY', None)) - undef_macros.append('USE_NUMARRAY') - print("using numpy...", file=sys.stderr) - found_module = True - # uncommenting the following line retains any previous ppgplot - # package and installs this numpy-compatible version as - # the package ppgplot_numpy - #name = "ppgplot_numpy" -except ImportError: - pass - -if not found_module: - try: - # Try to use the "numarray" module (2nd option) - # '--no-numarray' to disable detection of this candidate - if '--no-numarray' in ours: - raise ImportError - from distutils.core import setup - from numarray.numarrayext import NumarrayExtension - make_extension = NumarrayExtension - define_macros.append(('USE_NUMARRAY', None)) - print("using numarray...", file=sys.stderr) - found_module = True - # uncommenting the following line retains any previous ppgplot - # package and installs this numpy-compatible version as - # the package ppgplot_numpy - #name = "ppgplot_numarray" - except ImportError: - pass - -if not found_module: - try: - # Try to use the "Numeric" module (3rd option) - # the pattern should be obvious by now - if '--no-Numeric' in ours: - raise ImportError - from distutils.core import setup, Extension - make_extension = Extension - include_dirs.append( - os.path.join(get_python_inc(plat_specific=1), "Numeric")) - undef_macros.append('USE_NUMARRAY') - print("using Numeric...", file=sys.stderr) - found_module = True - # uncommenting the following line retains any previous ppgplot - # package and installs this numpy-compatible version as - # the package ppgplot_numpy - #name = "ppgplot_Numeric" - except ImportError: - pass - -if not found_module: - raise Exception("None of numpy, numarray or Numeric found") - -if os.name == "posix": - #libraries.append("png") - libraries.append("X11") - libraries.append("m") - # comment out g2c if compiling with gfortran (typical nowadays) - # you may still need this if using an earlier fortran compiler - # libraries.append("g2c") +from setuptools import setup, Extension +import os +import sys +import platform + +def get_pgplot_library_config(): + libraries = [] + library_dirs = [] + runtime_library_dirs = [] + extra_link_args = [] + include_dirs = [] + + if os.name != "posix": + raise Exception("OS not supported") + + # Base libraries needed on POSIX systems + libraries.extend(["X11", "m"]) + + # Standard X11 library locations for ld in filter(os.path.isdir, ["/usr/lib/x86_64-linux-gnu/", "/usr/X11R6/lib/", "/opt/X11/lib"]): library_dirs.append(ld) - - # attempt to find libcpgplot under $PGPLOT_DIR - such that $PGPLOT_DIR can - # be pointed at e.g. giza (http://giza.sourceforge.net/) On some systems - # the original PGPLOT can be installed as a package, so the libs &cet end - # up under "/usr/lib/", but some users may prefer to bind the ppgplot - # extension to the giza implementation. - # - # In such a case, convincing the compiler, linker and runtime to choose using - # the libraries &cet from a non-standard path takes a bit more effort but can easily be done. - # - # Note: the alternative solution is to de-install the system PGPLOT but there are other - # (mainly astronomical) utilities who depend on PGPLOT and as such there is a need - # for both giza and PGPLOT to co-exist on the same system - # - # - # The code below instruments the compiling+linking of the ppgplot extension - # such that *IFF* libcpgplot.so is found somewhere under $PGPLOT_DIR it - # will be 'hard linked' against it and instruct the linker to add the - # dynamic library path into the shared library(ies). This means the user - # will not have to set their LD_LIBRRARY_PATH and loading of the - # _ppgplot.so module will 'just work' (famous last words) - soext = 'dylib' if platform.system() == 'Darwin' else 'so' - pgplotdir = os.environ["PGPLOT_DIR"] if "PGPLOT_DIR" in os.environ else None - pgplotlibd = None - if pgplotdir is not None: + + # Handle PGPLOT/Giza configuration + soext = 'dylib' if platform.system() == 'Darwin' else 'so' + pgplotdir = os.environ.get("PGPLOT_DIR") + + if pgplotdir: if not os.path.isdir(pgplotdir): - raise RuntimeError("$PGPLOT_DIR [{0}] is not a directory".format(pgplotdir)) - for (path, _, files) in os.walk( pgplotdir ): - if 'libcpgplot.'+soext in files: - pgplotlibd = path + raise RuntimeError(f"$PGPLOT_DIR [{pgplotdir}] is not a directory") + + # Find libcpgplot + for path, _, files in os.walk(pgplotdir): + if f'libcpgplot.{soext}' in files: + # Configure library paths and linking + if platform.system() != 'Darwin': + extra_link_args.append(f"-Wl,-rpath={path}") + extra_link_args.extend([ + os.path.join(path, f"libcpgplot.{soext}"), + os.path.join(path, f"libpgplot.{soext}") + ]) + runtime_library_dirs.append(path) + include_dirs.append(os.path.join(pgplotdir, "include")) break - # locate Aquaterm dynamic library if running Mac OS X SCISOFT - # (www.stecf.org/macosxscisoft/) + else: + raise RuntimeError(f"Could not find libcpgplot in $PGPLOT_DIR [{pgplotdir}]") + # MacOS X SCISOFT support elif 'SCIDIR' in os.environ: libraries.append("aquaterm") library_dirs.append(os.path.join(os.environ["SCIDIR"], 'lib')) else: print("PGPLOT_DIR env var not defined, hoping libcpgplot is in system path(s)", file=sys.stderr) - - # if we found pgplotlibs, make sure the extension is linked against /them/ - if pgplotlibd is not None: - # add the libraries by path and tell compiler/linker to include rpath - # Only on linux we need to do the rpath song & dance at /this/ stage; on MacOSX - # this can be fixed at the libgiza level (install_name option whilst linking libcpgplot) - # http://log.zyxar.com/blog/2012/03/10/install-name-on-os-x/ - if platform.system() != 'Darwin': - extra_link_args.append( "-Wl,-rpath={0}".format(pgplotlibd) ) - extra_link_args.extend( map(functools.partial(os.path.join, pgplotlibd), map("{{0}}.{0}".format(soext).format, ["libcpgplot", "libpgplot"])) ) - runtime_library_dirs.append( pgplotlibd ) - # pgplotlibs can not be None if pgplotdir is not a directory so the following can be - # executed unconditionally - include_dirs += [os.path.join(pgplotdir, "include")] - else: - # add "-lcpgplot -lpgplot" and hope for the best - libraries += ["cpgplot", "pgplot"] -else: - raise Exception("os not supported") - -ext_ppgplot = make_extension(name+'._ppgplot', - [os.path.join('src', '_ppgplot.c')], - include_dirs=include_dirs, - libraries=libraries, - library_dirs=library_dirs, - define_macros=define_macros, - extra_compile_args=extra_compile_args, - extra_link_args=extra_link_args, - runtime_library_dirs=runtime_library_dirs) - - - -################################################################### -# the package + libraries.extend(["cpgplot", "pgplot"]) + + return { + 'libraries': libraries, + 'library_dirs': library_dirs, + 'runtime_library_dirs': runtime_library_dirs, + 'extra_link_args': extra_link_args, + 'include_dirs': include_dirs + } + +def get_extension_config(): + try: + import numpy + include_dirs = [numpy.get_include()] + define_macros = [('USE_NUMPY', None), ('NPY_NO_DEPRECATED_API', 'NPY_1_7_API_VERSION')] + undef_macros = ['USE_NUMARRAY'] + except ImportError: + raise Exception("numpy is required for building ppgplot") + + pgplot_config = get_pgplot_library_config() + include_dirs.extend(pgplot_config['include_dirs']) + + return Extension('ppgplot._ppgplot', + sources=[os.path.join('src', '_ppgplot.c')], + include_dirs=include_dirs, + libraries=pgplot_config['libraries'], + library_dirs=pgplot_config['library_dirs'], + runtime_library_dirs=pgplot_config['runtime_library_dirs'], + extra_link_args=pgplot_config['extra_link_args'], + define_macros=define_macros, + undef_macros=undef_macros) +setup( + ext_modules=[ + get_extension_config(), + ] +) + +#setup( +# ext_modules=[ +# Extension( +# name="ppgplot", +# sources=["src/_ppgplot.c"], +# ), +# ] +#) # - -setup(name=name, - version="1.4", - description="Python / Numeric-Python bindings for PGPLOT", - author="Nick Patavalis", - author_email="npat@efault.net", - url="http://code.google.com/p/ppgplot/ https://github.com/haavee/ppgplot", - packages=[name], - package_dir={name:'src'}, - ext_modules=[ext_ppgplot]) +#import os +#import sys +#import platform +# +#def get_pgplot_library_config(): +# libraries = [] +# library_dirs = [] +# runtime_library_dirs = [] +# extra_link_args = [] +# include_dirs = [] +# +# if os.name != "posix": +# raise Exception("OS not supported") +# +# # Base libraries needed on POSIX systems +# libraries.extend(["X11", "m"]) +# +# # Standard X11 library locations +# for ld in filter(os.path.isdir, ["/usr/lib/x86_64-linux-gnu/", "/usr/X11R6/lib/", "/opt/X11/lib"]): +# library_dirs.append(ld) +# +# # Handle PGPLOT/Giza configuration +# soext = 'dylib' if platform.system() == 'Darwin' else 'so' +# pgplotdir = os.environ.get("PGPLOT_DIR") +# +# if pgplotdir: +# if not os.path.isdir(pgplotdir): +# raise RuntimeError(f"$PGPLOT_DIR [{pgplotdir}] is not a directory") +# +# # Find libcpgplot +# for path, _, files in os.walk(pgplotdir): +# if f'libcpgplot.{soext}' in files: +# # Configure library paths and linking +# if platform.system() != 'Darwin': +# extra_link_args.append(f"-Wl,-rpath={path}") +# extra_link_args.extend([ +# os.path.join(path, f"libcpgplot.{soext}"), +# os.path.join(path, f"libpgplot.{soext}") +# ]) +# runtime_library_dirs.append(path) +# include_dirs.append(os.path.join(pgplotdir, "include")) +# break +# else: +# raise RuntimeError(f"Could not find libcpgplot in $PGPLOT_DIR [{pgplotdir}]") +# +# # MacOS X SCISOFT support +# elif 'SCIDIR' in os.environ: +# libraries.append("aquaterm") +# library_dirs.append(os.path.join(os.environ["SCIDIR"], 'lib')) +# else: +# print("PGPLOT_DIR env var not defined, hoping libcpgplot is in system path(s)", file=sys.stderr) +# libraries.extend(["cpgplot", "pgplot"]) +# +# return { +# 'libraries': libraries, +# 'library_dirs': library_dirs, +# 'runtime_library_dirs': runtime_library_dirs, +# 'extra_link_args': extra_link_args, +# 'include_dirs': include_dirs +# } +# +#def get_extension_config(): +# try: +# import numpy +# include_dirs = [numpy.get_include()] +# define_macros = [('USE_NUMPY', None)] +# undef_macros = ['USE_NUMARRAY'] +# except ImportError: +# raise Exception("numpy is required for building ppgplot") +# +# pgplot_config = get_pgplot_library_config() +# include_dirs.extend(pgplot_config['include_dirs']) +# +# return Extension('ppgplot._ppgplot', +# sources=[os.path.join('src', '_ppgplot.c')], +# include_dirs=include_dirs, +# libraries=pgplot_config['libraries'], +# library_dirs=pgplot_config['library_dirs'], +# runtime_library_dirs=pgplot_config['runtime_library_dirs'], +# extra_link_args=pgplot_config['extra_link_args'], +# define_macros=define_macros, +# undef_macros=undef_macros) +# +#if __name__ == '__main__': +# setup( +# name="ppgplot", +# version="1.4", +# description="Python / Numeric-Python bindings for PGPLOT", +# author="Nick Patavalis", +# author_email="npat@efault.net", +# url="http://code.google.com/p/ppgplot/ https://github.com/haavee/ppgplot", +# packages=["ppgplot"], +# package_dir={"ppgplot": "src"}, +# ext_modules=[get_extension_config()] +# ) From 8bcbf5dd97cf93788ac3e2df8af65a781b1932e6 Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Thu, 6 Feb 2025 17:43:27 +0100 Subject: [PATCH 09/62] First step to building with pip install/numpy --- pyproject.toml | 1 + setup.cfg | 34 ++++++++++++------------ setup.py | 14 ++++++++++ src/_ppgplot.c | 72 +++++++++++++++++++++++++++++++++++--------------- 4 files changed, 83 insertions(+), 38 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index fc52b9f..b50a268 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -8,6 +8,7 @@ version = "1.4" description = "Python bindings for PGPLOT" authors = [ {name = "Nick Patavalis", email = "npat@efault.net"}, + {name = "Marjolein Verkouter", email = "verkouter@jive.eu"}, ] readme = "README.md" requires-python = ">=3.7" diff --git a/setup.cfg b/setup.cfg index 7ab8d8f..f85973e 100644 --- a/setup.cfg +++ b/setup.cfg @@ -1,17 +1,17 @@ -[options] -package_dir = - ppgplot = src - -[options.packages.find] -where = src - -#[options.ext_modules.ppgplot] -##ppgplot._ppgplot = -##sources = src/_ppgplot.c -#define_macros = USE_NUMPY=1 -#undef_macros = USE_NUMARRAY - -[build_ext] -# This section can be overridden by environment variables: -# PGPLOT_DIR - Path to PGPLOT/Giza installation -# SCIDIR - For MacOS X SCISOFT support +#[options] +#package_dir = +# ppgplot = src +# +#[options.packages.find] +#where = src +# +##[options.ext_modules.ppgplot] +###ppgplot._ppgplot = +###sources = src/_ppgplot.c +##define_macros = USE_NUMPY=1 +##undef_macros = USE_NUMARRAY +# +#[build_ext] +## This section can be overridden by environment variables: +## PGPLOT_DIR - Path to PGPLOT/Giza installation +## SCIDIR - For MacOS X SCISOFT support diff --git a/setup.py b/setup.py index fcce300..124b07c 100644 --- a/setup.py +++ b/setup.py @@ -3,6 +3,11 @@ import sys import platform +#dbf = open('/tmp/build.log', 'w') +#def mprint(s): +# print(s, file=dbf) +mprint=lambda *_: None + def get_pgplot_library_config(): libraries = [] library_dirs = [] @@ -30,10 +35,17 @@ def get_pgplot_library_config(): # Find libcpgplot for path, _, files in os.walk(pgplotdir): + mprint(f"Inspecting files {files}") if f'libcpgplot.{soext}' in files: + mprint(f" => found libcpgplot.{soext}") # Configure library paths and linking if platform.system() != 'Darwin': extra_link_args.append(f"-Wl,-rpath={path}") + mprint(" adding '-Wl,-rpath={path}'") + else: + extra_link_args.append(f"-Wl,-rpath,{path}") + mprint(" adding '-Wl,-rpath,{path}'") + extra_link_args.extend([ os.path.join(path, f"libcpgplot.{soext}"), os.path.join(path, f"libpgplot.{soext}") @@ -50,6 +62,8 @@ def get_pgplot_library_config(): else: print("PGPLOT_DIR env var not defined, hoping libcpgplot is in system path(s)", file=sys.stderr) libraries.extend(["cpgplot", "pgplot"]) + + mprint(f"DONE:\n\tlibraries={libraries}\n\tlibrary_dirs={library_dirs}\n\textra_link_args={extra_link_args}\n\tinclude_dirs={include_dirs}") return { 'libraries': libraries, diff --git a/src/_ppgplot.c b/src/_ppgplot.c index ca1d435..cc2a5f2 100644 --- a/src/_ppgplot.c +++ b/src/_ppgplot.c @@ -8,6 +8,7 @@ * Linux (v2.4.x). * AUTHOR(S): * Nick Patavalis (npat@efault.net) + * Marjolein Verkouter (verkouter@jive.eu) - keep it alive * NOTES: * - A few ppgplot functions have not been interfaced yet. * - The pythonic calling conventions of some functions are *not* @@ -20,11 +21,9 @@ #include -#ifndef USE_NUMPY -#include -#else +/* It's 2025, we only support numpy anymore */ #include -#endif +#include /************************************************************************/ @@ -75,32 +74,59 @@ float xcurs=0.0, ycurs=0.0; static PyObject * tofloatvector (PyObject *o, float **v, int *vsz) { - PyArrayObject *a1, *af1, *af2; - PyArray_Descr *descr; - npy_intp dims; - int ownedaf1=0; + /* Set up for transforming to array of floats */ + int const requirements = NPY_ARRAY_FORCECAST|NPY_ARRAY_C_CONTIGUOUS|NPY_ARRAY_ALIGNED; + npy_intp dims; + PyArray_Descr *descr = PyArray_DescrFromType(NPY_FLOAT); + PyArrayObject *af=NULL; - /* Check if args are arrays. */ + /* Check if args is array */ if (!PyArray_Check(o)) { - PyErr_SetString(PpgTYPEErr,"object is not an array"); - return(NULL); - } - a1 = (PyArrayObject *)o; - /* Check if args are vectors. */ - if (a1->nd != 1) { - PyErr_SetString(PpgTYPEErr,"object is not a vector"); - return(NULL); - } - + /* Nope, but maybe it can be converted to an array - note, 1D only! */ + if( (af=(PyArrayObject*)PyArray_FromAny(o, descr, 1 /*min_depth*/, 1/*max_depth*/, requirements, NULL/*context*/))==NULL ) { + PyErr_SetString(PpgTYPEErr,"cannot cast input to vector of floats"); + return NULL; + } + } else { + /* Yes, already an array, check dims and try to convert */ + PyArrayObject *a1 = (PyArrayObject *)o; + #ifdef DEBUG_TOARRAY - fprintf(stderr,"(tofloatvector): array type = %d\n",a1->descr->type_num); + fprintf(stderr,"(tofloatvector): array type = %d\n",a1->descr->type_num); #endif + /* Check if args are vectors. */ + if( PyArray_NDIM(a1)!=1) { + PyErr_SetString(PpgTYPEErr, "object is not a vector"); + return NULL; + } + + /* Get a FLOAT array out of the current array */ + if( (af=(PyArrayObject*)PyArray_FromArray(a1, descr, requirements))==NULL ) { + PyErr_SetString(PpgTYPEErr, "cannot cast vector to floats"); + return NULL; + } + } + + /* af1 now points at a new array object. + * Ask the library to transform it into a C-Array */ + if( PyArray_AsCArray((PyObject **)&af, (void *)v, &dims, 1, descr) == -1) { + PyErr_SetString(PpgTYPEErr, "cannot cast array to C-array of floats"); + return NULL; + } + *vsz = dims; + /* Tell the system we have this object */ + Py_INCREF(af); + return (PyObject *)af; + +#if 0 switch (a1->descr->type_num) { case PyArray_FLOAT: af1 = a1; break; +#ifdef PyArray_CHAR case PyArray_CHAR: +#endif #ifndef USE_NUMARRAY case PyArray_UBYTE: #endif @@ -124,7 +150,6 @@ tofloatvector (PyObject *o, float **v, int *vsz) return(NULL); break; } - #ifdef DEBUG_TOARRAY fprintf(stderr,"(tofloatvector): array type = %d\n",a1->descr->type_num); #endif @@ -140,6 +165,7 @@ tofloatvector (PyObject *o, float **v, int *vsz) if (ownedaf1) { Py_DECREF(af1); } return((PyObject *)af2); +#endif } /*************************************************************************/ @@ -147,6 +173,9 @@ tofloatvector (PyObject *o, float **v, int *vsz) static PyObject * tofloatmat(PyObject *o, float **m, int *nr, int *nc) { + PyErr_SetString(PpgTYPEErr, "tofloatmat not implemented yet"); + return NULL; +#if 0 PyArrayObject *a1, *af1, *af2; PyArray_Descr *descr; npy_intp dims[2]; @@ -228,6 +257,7 @@ tofloatmat(PyObject *o, float **m, int *nr, int *nc) bailout: if (ownedaf1) { Py_DECREF(af1); } return((PyObject *)af2); +#endif } /**************************************************************************/ From e3c6c94ae6d86b3f2cb971da6706b07080ab54ed Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Thu, 6 Feb 2025 17:44:31 +0100 Subject: [PATCH 10/62] setup.cfg was unnecessary --- setup.cfg | 17 ----------------- 1 file changed, 17 deletions(-) delete mode 100644 setup.cfg diff --git a/setup.cfg b/setup.cfg deleted file mode 100644 index f85973e..0000000 --- a/setup.cfg +++ /dev/null @@ -1,17 +0,0 @@ -#[options] -#package_dir = -# ppgplot = src -# -#[options.packages.find] -#where = src -# -##[options.ext_modules.ppgplot] -###ppgplot._ppgplot = -###sources = src/_ppgplot.c -##define_macros = USE_NUMPY=1 -##undef_macros = USE_NUMARRAY -# -#[build_ext] -## This section can be overridden by environment variables: -## PGPLOT_DIR - Path to PGPLOT/Giza installation -## SCIDIR - For MacOS X SCISOFT support From 793f9f3402ccd6fcf4bb453aac9836098a6b100e Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Fri, 7 Feb 2025 10:33:49 +0100 Subject: [PATCH 11/62] =?UTF-8?q?The=20extension=20buils=20&=20works=C2=B1?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- src/_ppgplot.c | 226 ++++++++++++++++++------------------------------- 1 file changed, 81 insertions(+), 145 deletions(-) diff --git a/src/_ppgplot.c b/src/_ppgplot.c index cc2a5f2..6b826dd 100644 --- a/src/_ppgplot.c +++ b/src/_ppgplot.c @@ -80,7 +80,7 @@ tofloatvector (PyObject *o, float **v, int *vsz) PyArray_Descr *descr = PyArray_DescrFromType(NPY_FLOAT); PyArrayObject *af=NULL; - /* Check if args is array */ + /* Check if arg is array */ if (!PyArray_Check(o)) { /* Nope, but maybe it can be converted to an array - note, 1D only! */ if( (af=(PyArrayObject*)PyArray_FromAny(o, descr, 1 /*min_depth*/, 1/*max_depth*/, requirements, NULL/*context*/))==NULL ) { @@ -91,10 +91,6 @@ tofloatvector (PyObject *o, float **v, int *vsz) /* Yes, already an array, check dims and try to convert */ PyArrayObject *a1 = (PyArrayObject *)o; -#ifdef DEBUG_TOARRAY - fprintf(stderr,"(tofloatvector): array type = %d\n",a1->descr->type_num); -#endif - /* Check if args are vectors. */ if( PyArray_NDIM(a1)!=1) { PyErr_SetString(PpgTYPEErr, "object is not a vector"); @@ -118,157 +114,92 @@ tofloatvector (PyObject *o, float **v, int *vsz) /* Tell the system we have this object */ Py_INCREF(af); return (PyObject *)af; - -#if 0 - switch (a1->descr->type_num) { - case PyArray_FLOAT: - af1 = a1; - break; -#ifdef PyArray_CHAR - case PyArray_CHAR: -#endif -#ifndef USE_NUMARRAY - case PyArray_UBYTE: -#endif -#ifdef PyArray_SBYTE - case PyArray_SBYTE: -#endif - case PyArray_SHORT: - case PyArray_INT: -#ifndef USE_NUMARRAY - case PyArray_LONG: -#endif - case PyArray_DOUBLE: - if (!(af1 = (PyArrayObject *)PyArray_Cast(a1,PyArray_FLOAT))) { - PyErr_SetString(PpgTYPEErr,"cannot cast vector to floats"); - return(NULL); - } - ownedaf1 = 1; - break; - default: - PyErr_SetString(PpgTYPEErr,"cannot cast vector to floats"); - return(NULL); - break; - } -#ifdef DEBUG_TOARRAY - fprintf(stderr,"(tofloatvector): array type = %d\n",a1->descr->type_num); -#endif - - af2 = af1; - descr = PyArray_DescrFromType(PyArray_FLOAT); - if (PyArray_AsCArray((PyObject **)&af2, (void *)v, &dims, 1, - descr) == -1) { - af2 = NULL; - } - *vsz = dims; - - if (ownedaf1) { Py_DECREF(af1); } - - return((PyObject *)af2); -#endif } /*************************************************************************/ static PyObject * -tofloatmat(PyObject *o, float **m, int *nr, int *nc) -{ - PyErr_SetString(PpgTYPEErr, "tofloatmat not implemented yet"); - return NULL; -#if 0 - PyArrayObject *a1, *af1, *af2; - PyArray_Descr *descr; - npy_intp dims[2]; - int ownedaf1=0; - char **tmpdat; - - /* Check if args are arrays. */ +tofloatmat(PyObject *o, float ***m, int *nr, int* nc) +{ + /* Set up for transforming to array of floats */ + int const requirements = NPY_ARRAY_FORCECAST|NPY_ARRAY_C_CONTIGUOUS|NPY_ARRAY_ALIGNED; + npy_intp dims[2]; + PyArray_Descr *descr = PyArray_DescrFromType(NPY_FLOAT); + PyArrayObject *af=NULL; + + /* Check if arg is array */ if (!PyArray_Check(o)) { - PyErr_SetString(PpgTYPEErr,"object is not and array"); - return(NULL); - } - a1 = (PyArrayObject *)o; - /* Check if args are matrices. */ - if (a1->nd != 2) { - PyErr_SetString(PpgTYPEErr,"object is not a matrix"); - return(NULL); - } - -#ifdef DEBUG_TOARRAY - fprintf(stderr,"(tofloatmat): array type = %d\n",a1->descr->type_num); -#endif - - switch (a1->descr->type_num) { - case PyArray_FLOAT: - af1 = a1; - break; - case PyArray_CHAR: -#ifndef USE_NUMARRAY - case PyArray_UBYTE: -#endif -#ifdef PyArray_SBYTE - case PyArray_SBYTE: -#endif - case PyArray_SHORT: - case PyArray_INT: -#ifndef USE_NUMARRAY - case PyArray_LONG: -#endif - case PyArray_DOUBLE: - if (!(af1 = (PyArrayObject *)PyArray_Cast(a1,PyArray_FLOAT))) { - PyErr_SetString(PpgTYPEErr,"cannot cast matrix to floats"); - return(NULL); - } - ownedaf1 = 1; - break; - default: - PyErr_SetString(PpgTYPEErr,"cannot cast matrix to floats"); - return(NULL); - break; + /* Nope, but maybe it can be converted to an array - note, 2D only! */ + if( (af=(PyArrayObject*)PyArray_FromAny(o, descr, 2 /*min_depth*/, 2/*max_depth*/, requirements, NULL/*context*/))==NULL ) { + PyErr_SetString(PpgTYPEErr,"cannot cast input to matrix of floats"); + return NULL; + } + } else { + /* Yes, already an array, check dims and try to convert */ + PyArrayObject *a1 = (PyArrayObject *)o; + + /* Check if arg is matrix. */ + if( PyArray_NDIM(a1)!=2) { + PyErr_SetString(PpgTYPEErr, "object is not a matrix"); + return NULL; + } + + /* Get a FLOAT array out of the current array */ + if( (af=(PyArrayObject*)PyArray_FromArray(a1, descr, requirements))==NULL ) { + PyErr_SetString(PpgTYPEErr, "cannot cast matrix to floats"); + return NULL; + } } - -#ifdef DEBUG_TOARRAY - fprintf(stderr,"(tofloatmat): array type = %d\n",a1->descr->type_num); -#endif - - af2 = af1; - descr = PyArray_DescrFromType(PyArray_FLOAT); - if (PyArray_AsCArray((PyObject **)&af2, (void *)&tmpdat, dims, 2, - descr) == -1) { - af2 = NULL; - goto bailout; + + /* af1 now points at a new array object. + * Ask the library to transform it into a C-Array */ + if( PyArray_AsCArray((PyObject **)&af, (void *)m, &dims, 2, descr) == -1) { + PyErr_SetString(PpgTYPEErr, "cannot cast array to C-array of floats"); + return NULL; } *nr = dims[0]; *nc = dims[1]; - - /* WARNING: What follows is a little tricky and I dunno if I'm - really allowed to do this. On the other hand it really conserves - time and memory! So this assert statement will make sure that - the program *will* blow in your face if what I'm doing here - turns-out be bogus. */ - assert((af2->dimensions[1] * af2->descr->elsize) == af2->strides[0]); - - /* Phew! we 're clear! */ - *m = (float *)(*tmpdat); - /* tmpdat was malloc'ed inside PyArray_As2D. We must free it. - Look at the code of PyArray_As2D for details... */ - free(tmpdat); - -bailout: - if (ownedaf1) { Py_DECREF(af1); } - return((PyObject *)af2); -#endif + /* Tell the system we have this object */ + Py_INCREF(af); + return (PyObject *)af; } + /**************************************************************************/ #ifdef DEBUG_TOARRAY -PYF(tstmat) +PYF(tstvec) { PyObject *o=NULL; PyArrayObject *af=NULL; float *v; + int i=0,j=0, n=0; + + if(!PyArg_ParseTuple(args,"O",&o)) return(NULL); + + if (!(af =(PyArrayObject *)tofloatvector(o,&v,&n))) goto fail; + + for (i=0; i0 && (i%10)==0 ) + fprintf(stderr, "\n"); + } + fprintf(stderr, "\n"); + + Py_DECREF(af); + PYRN; + +fail: + if (af) Py_DECREF(af); + return(NULL); +} + +PYF(tstmat) +{ + PyObject *o=NULL; + PyArrayObject *af=NULL; + float **v; int i=0,j=0, nc=0, nr=0; if(!PyArg_ParseTuple(args,"O",&o)) return(NULL); @@ -276,9 +207,13 @@ PYF(tstmat) if (!(af =(PyArrayObject *)tofloatmat(o,&v,&nr,&nc))) goto fail; for (i=0; i Date: Fri, 7 Feb 2025 13:46:34 +0100 Subject: [PATCH 12/62] Cleaner config, still supporting PGPLOT_DIR --- pyproject.toml | 16 ++- setup.py | 264 ++++++++++++++----------------------------------- 2 files changed, 89 insertions(+), 191 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index b50a268..5c7ff49 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,5 @@ [build-system] -requires = ["setuptools>=45", "numpy>=1.21.0"] +requires = ["setuptools>=45", "numpy>=1.21.0", "pkgconfig"] build-backend = "setuptools.build_meta" [project] @@ -14,6 +14,20 @@ readme = "README.md" requires-python = ">=3.7" dependencies = [ "numpy>=1.21.0", +# "pkgconfig" +] + +[external] +build-requires = [ + "virtual:compiler/c", + "pkg:generic/pkg-config", + "pkg:generic/giza", +] +host-requires = [ + "pkg:generic/giza", +] +dependencies = [ + "pkg:generic/giza", ] [project.urls] diff --git a/setup.py b/setup.py index 124b07c..6059d8a 100644 --- a/setup.py +++ b/setup.py @@ -2,205 +2,89 @@ import os import sys import platform +# these deps are listed in pyproject.toml so should be able to import w/o probs +import numpy +import pkgconfig -#dbf = open('/tmp/build.log', 'w') -#def mprint(s): -# print(s, file=dbf) -mprint=lambda *_: None -def get_pgplot_library_config(): - libraries = [] - library_dirs = [] - runtime_library_dirs = [] - extra_link_args = [] - include_dirs = [] - +def add_pgplot_from_giza(ext): + # Very convenient + pkgconfig.configure_extension(ext, 'giza', static=True) + # But not sufficient ... + ext.libraries.extend( ['cpgplot', 'pgplot'] ) + return ext + +# Configure the Extension based on stuff found in PGPLOT_DIR +def add_pgplot_from_pgplot_dir(ext, pgplotdir): + if not os.path.isdir(pgplotdir): + raise RuntimeError(f"$PGPLOT_DIR [{pgplotdir}] is not a directory") + darwin = 'darwin' in platform.system().lower() + soext = 'dylib' if darwin else 'so' + mk_rpath = ("-Wl,-rpath,{0}" if darwin else "-Wl,-rpath={0}").format + mk_lib = f"lib{{0}}.{soext}".format + # Find libcpgplot + lib = mk_lib("cpgplot") + for path, _, files in os.walk(pgplotdir): + if lib not in files: + continue + # OK found it! + # Configure runtime library paths + ext.extra_link_args.append( mk_rpath(path) ) + + # Because we're overriding system settings, add + # the libraries with absolute path + ext.extra_link_args.extend( map(lambda l: os.path.join(path, l), + map(mk_lib, ['cpgplot', 'pgplot'])) ) + ext.runtime_library_dirs.append( path ) + ext.include_dirs.append( os.path.join(pgplotdir, "include") ) + break + else: + raise RuntimeError(f"Could not find libcpgplot in $PGPLOT_DIR [{pgplotdir}]") + return ext + +# Extract useful info from the numpy module +def add_numpy(ext): + ext.include_dirs.append( numpy.get_include() ) + return ext + +# Set up X11 libraries, searching standard (Linux...) paths +def add_X11(ext): + ext.libraries.extend(['X11', 'm']) + # Standard X11 library locations + ext.library_dirs.extend( + filter(os.path.isdir, + ["/usr/lib/x86_64-linux-gnu/", "/usr/X11R6/lib/", "/opt/X11/lib"]) + ) + return ext + + +# This is the main Extension configuration step +# We go over the dependencies, each of which +# can modify the build env as needed +def set_extension_config(ext): + # yah ... maybe later if we grow up widen this if os.name != "posix": raise Exception("OS not supported") - - # Base libraries needed on POSIX systems - libraries.extend(["X11", "m"]) - - # Standard X11 library locations - for ld in filter(os.path.isdir, ["/usr/lib/x86_64-linux-gnu/", "/usr/X11R6/lib/", "/opt/X11/lib"]): - library_dirs.append(ld) - - # Handle PGPLOT/Giza configuration - soext = 'dylib' if platform.system() == 'Darwin' else 'so' - pgplotdir = os.environ.get("PGPLOT_DIR") - - if pgplotdir: - if not os.path.isdir(pgplotdir): - raise RuntimeError(f"$PGPLOT_DIR [{pgplotdir}] is not a directory") - - # Find libcpgplot - for path, _, files in os.walk(pgplotdir): - mprint(f"Inspecting files {files}") - if f'libcpgplot.{soext}' in files: - mprint(f" => found libcpgplot.{soext}") - # Configure library paths and linking - if platform.system() != 'Darwin': - extra_link_args.append(f"-Wl,-rpath={path}") - mprint(" adding '-Wl,-rpath={path}'") - else: - extra_link_args.append(f"-Wl,-rpath,{path}") - mprint(" adding '-Wl,-rpath,{path}'") - extra_link_args.extend([ - os.path.join(path, f"libcpgplot.{soext}"), - os.path.join(path, f"libpgplot.{soext}") - ]) - runtime_library_dirs.append(path) - include_dirs.append(os.path.join(pgplotdir, "include")) - break - else: - raise RuntimeError(f"Could not find libcpgplot in $PGPLOT_DIR [{pgplotdir}]") - # MacOS X SCISOFT support - elif 'SCIDIR' in os.environ: - libraries.append("aquaterm") - library_dirs.append(os.path.join(os.environ["SCIDIR"], 'lib')) - else: - print("PGPLOT_DIR env var not defined, hoping libcpgplot is in system path(s)", file=sys.stderr) - libraries.extend(["cpgplot", "pgplot"]) + # modify the extension to taste + add_X11(ext) + add_numpy(ext) - mprint(f"DONE:\n\tlibraries={libraries}\n\tlibrary_dirs={library_dirs}\n\textra_link_args={extra_link_args}\n\tinclude_dirs={include_dirs}") - - return { - 'libraries': libraries, - 'library_dirs': library_dirs, - 'runtime_library_dirs': runtime_library_dirs, - 'extra_link_args': extra_link_args, - 'include_dirs': include_dirs - } + # Where to source pgplot from + pgplot_dir = os.environ.get('PGPLOT_DIR', None) + if pgplot_dir is not None: + add_pgplot_from_pgplot_dir(ext, pgplot_dir) + else: + add_pgplot_from_giza(ext) + return ext -def get_extension_config(): - try: - import numpy - include_dirs = [numpy.get_include()] - define_macros = [('USE_NUMPY', None), ('NPY_NO_DEPRECATED_API', 'NPY_1_7_API_VERSION')] - undef_macros = ['USE_NUMARRAY'] - except ImportError: - raise Exception("numpy is required for building ppgplot") - - pgplot_config = get_pgplot_library_config() - include_dirs.extend(pgplot_config['include_dirs']) - - return Extension('ppgplot._ppgplot', - sources=[os.path.join('src', '_ppgplot.c')], - include_dirs=include_dirs, - libraries=pgplot_config['libraries'], - library_dirs=pgplot_config['library_dirs'], - runtime_library_dirs=pgplot_config['runtime_library_dirs'], - extra_link_args=pgplot_config['extra_link_args'], - define_macros=define_macros, - undef_macros=undef_macros) +########################################################### +# This triggers the whole build # +########################################################### setup( ext_modules=[ - get_extension_config(), + set_extension_config( Extension('ppgplot._ppgplot', + sources=[os.path.join('src', '_ppgplot.c')]) ), ] ) -#setup( -# ext_modules=[ -# Extension( -# name="ppgplot", -# sources=["src/_ppgplot.c"], -# ), -# ] -#) -# -#import os -#import sys -#import platform -# -#def get_pgplot_library_config(): -# libraries = [] -# library_dirs = [] -# runtime_library_dirs = [] -# extra_link_args = [] -# include_dirs = [] -# -# if os.name != "posix": -# raise Exception("OS not supported") -# -# # Base libraries needed on POSIX systems -# libraries.extend(["X11", "m"]) -# -# # Standard X11 library locations -# for ld in filter(os.path.isdir, ["/usr/lib/x86_64-linux-gnu/", "/usr/X11R6/lib/", "/opt/X11/lib"]): -# library_dirs.append(ld) -# -# # Handle PGPLOT/Giza configuration -# soext = 'dylib' if platform.system() == 'Darwin' else 'so' -# pgplotdir = os.environ.get("PGPLOT_DIR") -# -# if pgplotdir: -# if not os.path.isdir(pgplotdir): -# raise RuntimeError(f"$PGPLOT_DIR [{pgplotdir}] is not a directory") -# -# # Find libcpgplot -# for path, _, files in os.walk(pgplotdir): -# if f'libcpgplot.{soext}' in files: -# # Configure library paths and linking -# if platform.system() != 'Darwin': -# extra_link_args.append(f"-Wl,-rpath={path}") -# extra_link_args.extend([ -# os.path.join(path, f"libcpgplot.{soext}"), -# os.path.join(path, f"libpgplot.{soext}") -# ]) -# runtime_library_dirs.append(path) -# include_dirs.append(os.path.join(pgplotdir, "include")) -# break -# else: -# raise RuntimeError(f"Could not find libcpgplot in $PGPLOT_DIR [{pgplotdir}]") -# -# # MacOS X SCISOFT support -# elif 'SCIDIR' in os.environ: -# libraries.append("aquaterm") -# library_dirs.append(os.path.join(os.environ["SCIDIR"], 'lib')) -# else: -# print("PGPLOT_DIR env var not defined, hoping libcpgplot is in system path(s)", file=sys.stderr) -# libraries.extend(["cpgplot", "pgplot"]) -# -# return { -# 'libraries': libraries, -# 'library_dirs': library_dirs, -# 'runtime_library_dirs': runtime_library_dirs, -# 'extra_link_args': extra_link_args, -# 'include_dirs': include_dirs -# } -# -#def get_extension_config(): -# try: -# import numpy -# include_dirs = [numpy.get_include()] -# define_macros = [('USE_NUMPY', None)] -# undef_macros = ['USE_NUMARRAY'] -# except ImportError: -# raise Exception("numpy is required for building ppgplot") -# -# pgplot_config = get_pgplot_library_config() -# include_dirs.extend(pgplot_config['include_dirs']) -# -# return Extension('ppgplot._ppgplot', -# sources=[os.path.join('src', '_ppgplot.c')], -# include_dirs=include_dirs, -# libraries=pgplot_config['libraries'], -# library_dirs=pgplot_config['library_dirs'], -# runtime_library_dirs=pgplot_config['runtime_library_dirs'], -# extra_link_args=pgplot_config['extra_link_args'], -# define_macros=define_macros, -# undef_macros=undef_macros) -# -#if __name__ == '__main__': -# setup( -# name="ppgplot", -# version="1.4", -# description="Python / Numeric-Python bindings for PGPLOT", -# author="Nick Patavalis", -# author_email="npat@efault.net", -# url="http://code.google.com/p/ppgplot/ https://github.com/haavee/ppgplot", -# packages=["ppgplot"], -# package_dir={"ppgplot": "src"}, -# ext_modules=[get_extension_config()] -# ) From 2aef5d32a0e21d41d797369c00c36898b8663182 Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Fri, 7 Feb 2025 14:33:31 +0100 Subject: [PATCH 13/62] Fix up documentation Adding README.md and updating it for current use --- README | 17 ----------------- README.md | 55 +++++++++++++++++++++++++++++++++++++++++++++++++++++++ 2 files changed, 55 insertions(+), 17 deletions(-) delete mode 100644 README create mode 100644 README.md diff --git a/README b/README deleted file mode 100644 index 96f6b93..0000000 --- a/README +++ /dev/null @@ -1,17 +0,0 @@ -ppgplot - The Pythonic interface to PGPLOT - -ppgplot is a python module (extension) providing bindings to the -PGPLOT graphics library. PGPLOT is a scientific visualization -(graphics) library written in Fortran by T. J. Pearson. C bindings -for PGPLOT are also available. ppgplot makes the library usable by -Python programs. It uses the numeric / numarray modules (nowadays -replaced by Numpy), to efficiently represent and manipulate vectors -and matrices. - -You can download the latest ppgplot release from - - https://github.com/npat-efault/ppgplot/releases - -see file "INSTALL" for installation instructions and directory -"examples" for usage examples - diff --git a/README.md b/README.md new file mode 100644 index 0000000..84dc765 --- /dev/null +++ b/README.md @@ -0,0 +1,55 @@ +# ppgplot + +ppgplot - The Pythonic interface to PGPLOT, with support for both PGPLOT and Giza backends. + +`ppgplot` is a python module (extension) providing bindings to the PGPLOT +graphics library. PGPLOT is a scientific visualization (graphics) library +written in Fortran by T. J. Pearson. C bindings for PGPLOT are also available. +`ppgplot` makes the library usable by Python programs. It had support for the Numeric / +numarray modules, but nowadays (>= Feb 2025) replaced by Numpy, to efficiently represent and +manipulate vectors and matrices. + +Currently, as the extension is not in PyPI, you're installing it into an "externally managed environment". You may need to create a Python [`venv`](https://docs.python.org/3/library/venv.html) first in order to install the extension manually on your system. + + +## Requirements + +- Python 3.7+ +- numpy >= 1.21.0 +- PGPLOT or Giza libraries installed +- X11 development libraries + +## Installation + +In principle, this extension should build out-of-the-box in a Python `venv`. +The [`pyproject.toml`](pyproject.toml) file lists all dependencies and should (...) pull them into the `venv` as required for building/deploying: + +```bash +$> cd /path/to/checkout/of/this/repo +$> pip install [-e] . +``` + +Without `-e` installs the extension in the `venv`, with the `-e` keeps the module in the current directory. + + +## Using a bespoke PGPLOT or Giza backend + +The extension configuration allows compiling + linking to a locally compiled [PGPLOT](https://sites.astro.caltech.edu/~tjp/pgplot/) or [Giza](https://github.com/danieljprice/giza) library. + + +Obviously, first install or build PGPLOT and/or Giza on your system (should you want to compare them). +Then build the extension, pointing the `PGPLOT_DIR` environment variable to the installation directory of the backend of choice: + +```bash +$> PGPLOT_DIR=/path/to/pgplot pip install [-e] . +``` + +## Notes + +FORTRAN? Srsly? Actually, for plotting large numbers of points or simple, yet precise control of the graphics, the FORTRAN based PGPLOT backend is still a lot faster than most tools like `matplotlib` or `Giza` + +The `Giza` backend is an amazing job done, but it is [not 100% compatible with the original PGPLOT](https://danieljprice.github.io/giza/documentation/pgplot.html), so it is not guaranteed your plots will come out identical. + +This fork owes a lot of thanks to the original author of `ppgplot`: + https://github.com/npat-efault/ppgplot + From 2a4dead60ad5a08bb61cd14033c6f35416e04c83 Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Fri, 7 Feb 2025 17:04:05 +0100 Subject: [PATCH 14/62] Fix Numpy API warnings, compiler warnings The Numpy API stuff kept on warning, hopefully now defaults to using API that's installed on the current system w/o warning. Also gcc-12.2.0 (Debian 12.2.0-14) didn't like some of the pointer conversions in "tofloatmatrix()" so they were fixed --- src/_ppgplot.c | 10 +++++++--- 1 file changed, 7 insertions(+), 3 deletions(-) diff --git a/src/_ppgplot.c b/src/_ppgplot.c index 6b826dd..6797687 100644 --- a/src/_ppgplot.c +++ b/src/_ppgplot.c @@ -22,7 +22,11 @@ #include /* It's 2025, we only support numpy anymore */ -#include +#ifndef NPY_TARGET_VERSION + #define NPY_TARGET_VERSION NPY_API_VERSION +#endif +#define NPY_NO_DEPRECATED_API NPY_TARGET_VERSION/*NPY_1_7_API_VERSION*/ +#include #include /************************************************************************/ @@ -119,7 +123,7 @@ tofloatvector (PyObject *o, float **v, int *vsz) /*************************************************************************/ static PyObject * -tofloatmat(PyObject *o, float ***m, int *nr, int* nc) +tofloatmat(PyObject *o, float **m, int *nr, int* nc) { /* Set up for transforming to array of floats */ int const requirements = NPY_ARRAY_FORCECAST|NPY_ARRAY_C_CONTIGUOUS|NPY_ARRAY_ALIGNED; @@ -153,7 +157,7 @@ tofloatmat(PyObject *o, float ***m, int *nr, int* nc) /* af1 now points at a new array object. * Ask the library to transform it into a C-Array */ - if( PyArray_AsCArray((PyObject **)&af, (void *)m, &dims, 2, descr) == -1) { + if( PyArray_AsCArray((PyObject **)&af, (void *)m, &dims[0], 2, descr) == -1) { PyErr_SetString(PpgTYPEErr, "cannot cast array to C-array of floats"); return NULL; } From 139948217e18b7d75f9eb071bd95168bdc2149a6 Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Fri, 7 Feb 2025 17:06:03 +0100 Subject: [PATCH 15/62] Fix pkgconfig.configure_extension() breakage The [pkgconfig.configure_extension()](ihttps://pypi.org/project/pkgconfig/) is quite convenient except when it isn't. On Deb12.2.0-14 it was adding an empty string to the extra_compile_args: extension.extra_compile_args = [ '' ] Which tricked the system into a command that looked like (irrelevant info stripped): /path/to/C-compiler [options] -o build/.../_ppgplot.o "" Note the literal'""' at the end of the command; it resulted in the GCC compiler deciding "Not performing linking because linker command file cannot be found" (or words to that effect). Stripping the empty string fixed it --- setup.py | 27 ++++++++++++++++++++++++++- 1 file changed, 26 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 6059d8a..14527bf 100644 --- a/setup.py +++ b/setup.py @@ -2,14 +2,17 @@ import os import sys import platform +import operator # these deps are listed in pyproject.toml so should be able to import w/o probs import numpy import pkgconfig def add_pgplot_from_giza(ext): - # Very convenient + # Very convenient - but also breaks the build on Linux (Deb12) *sigh* + # adds an empty string [''] to ext.extra_compile_args pkgconfig.configure_extension(ext, 'giza', static=True) + ext.extra_compile_args = list( filter(operator.truth, ext.extra_compile_args) ) # But not sufficient ... ext.libraries.extend( ['cpgplot', 'pgplot'] ) return ext @@ -57,6 +60,26 @@ def add_X11(ext): ) return ext +def print_config(ext): + print("===> Extension contents") + print(f"\tname = {ext.name}") + print(f"\tsources = {ext.sources}") + print(f"\tlibraries = {ext.libraries}") + print(f"\tdefine_macros = {ext.define_macros}") + print(f"\tundef_macros = {ext.undef_macros}") + print(f"\tlibrary_dirs = {ext.library_dirs}") + print(f"\tinclude_dirs = {ext.include_dirs}") + print(f"\textra_link_args = {ext.extra_link_args}") + print(f"\truntime_library_dirs = {ext.runtime_library_dirs}") + print(f"\textra_objects = {ext.extra_objects}") + print(f"\textra_compile_args = {ext.extra_compile_args}") + print(f"\texport_symbols = {ext.export_symbols}") + print(f"\tswig_opts = {ext.swig_opts}") + print(f"\tdepends = {ext.depends}") + print(f"\tlanguage = {ext.language}") + print(f"\toptional = {ext.optional}") + print(f"\tpy_limited_api = {ext.py_limited_api}") + return ext # This is the main Extension configuration step # We go over the dependencies, each of which @@ -76,6 +99,8 @@ def set_extension_config(ext): add_pgplot_from_pgplot_dir(ext, pgplot_dir) else: add_pgplot_from_giza(ext) + # uncomment and run "pip -v install [-e] ." to see output + #print_config(ext) return ext ########################################################### From ebb66e95e6f490bc9ac4550638787a187bc64d79 Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Fri, 7 Feb 2025 18:00:29 +0100 Subject: [PATCH 16/62] Update README With external depencies the user has to install manually --- README.md | 24 +++++++++++++++++++----- 1 file changed, 19 insertions(+), 5 deletions(-) diff --git a/README.md b/README.md index 84dc765..67e6eed 100644 --- a/README.md +++ b/README.md @@ -18,10 +18,23 @@ Currently, as the extension is not in PyPI, you're installing it into an "extern - numpy >= 1.21.0 - PGPLOT or Giza libraries installed - X11 development libraries +- pkg-config + +### Installing the dependencies + +On Linux use your favourite package manager, e.g.: +```bash +$> sudo apt-get install giza-dev libx11-dev pkg-config +``` + +Successful installation using [Homebrew](https://brew.sh) on Mac OSX with: +```bash +$> brew install libx11 giza pkgconf +``` ## Installation -In principle, this extension should build out-of-the-box in a Python `venv`. +In principle, this extension should build out-of-the-box in a Python `venv`, or, if you have it, a `conda` virtual environment (untested at the moment). The [`pyproject.toml`](pyproject.toml) file lists all dependencies and should (...) pull them into the `venv` as required for building/deploying: ```bash @@ -46,10 +59,11 @@ $> PGPLOT_DIR=/path/to/pgplot pip install [-e] . ## Notes -FORTRAN? Srsly? Actually, for plotting large numbers of points or simple, yet precise control of the graphics, the FORTRAN based PGPLOT backend is still a lot faster than most tools like `matplotlib` or `Giza` +FORTRAN? Srsly? Actually, for plotting large numbers of points or simple, yet precise control of the graphics, the FORTRAN based PGPLOT backend is convenient and _fast_ (a _lot_ faster than `matplotlib`, and still noticeably faster than `Giza`). However, the upside of investing those compute cycles is that the (anti-aliased!) fonts and graphics produced by the [`cairo`](https://www.cairographics.org) library (the _actual_ graphics backend used by `Giza`) are of an amazing quality. -The `Giza` backend is an amazing job done, but it is [not 100% compatible with the original PGPLOT](https://danieljprice.github.io/giza/documentation/pgplot.html), so it is not guaranteed your plots will come out identical. +If `ppgplot` is linked against the `Giza` library, it can produce output in `.png` and `.pdf`, also not something to be sneezed at. -This fork owes a lot of thanks to the original author of `ppgplot`: - https://github.com/npat-efault/ppgplot +All in all, the `Giza` backend is an amazing job done, but it is [not 100% compatible with the original PGPLOT](https://danieljprice.github.io/giza/documentation/pgplot.html), so it is not guaranteed your plots will come out identical. +This fork of the Python-extension owes a lot of thanks to the original author of `ppgplot`: + https://github.com/npat-efault/ppgplot From dbecf7e64936bc44d2d63bfc9c7563dc57242667 Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Fri, 7 Feb 2025 18:20:00 +0100 Subject: [PATCH 17/62] Modernized numpy examples --- examples/numpy_ex_cont.py | 4 ++-- examples/numpy_ex_graph.py | 4 ++-- examples/numpy_ex_panel.py | 18 +++++++++--------- examples/numpy_ex_sierp.py | 4 ++-- 4 files changed, 15 insertions(+), 15 deletions(-) diff --git a/examples/numpy_ex_cont.py b/examples/numpy_ex_cont.py index e5dc38b..6a78aef 100644 --- a/examples/numpy_ex_cont.py +++ b/examples/numpy_ex_cont.py @@ -12,10 +12,10 @@ pgiden() # put user-name and date on plot. # calculate a suitable function. -surf = zeros([40,40],Float32) +surf = zeros([40,40], dtype=float32) for i in range(1,41): for j in range(1,41): - surf[i-1,j-1] = cos(.3*sqrt(2*i) - .4*j/3)*cos(.4*i/3) + (i-j)/40.0 + surf[i-1,j-1] = cos(.3*sqrt(2*i) - .4*j/3)*cos(.4*i/3) + (i-j)/40.0 mns, mxs = min(ravel(surf)), max(ravel(surf)) diff --git a/examples/numpy_ex_graph.py b/examples/numpy_ex_graph.py index 01a739b..c8f48fb 100644 --- a/examples/numpy_ex_graph.py +++ b/examples/numpy_ex_graph.py @@ -6,7 +6,7 @@ import sys # create an array -xs=numpy.array([1.,2.,3.,4.,5.]) +xs=[1.,2.,3.,4.,5.] ys=numpy.array([1.,4.,9.,16.,25.]) # creat another array @@ -20,7 +20,7 @@ else: ppgplot.pgopen('?') ppgplot.pgenv(0.,10.,0.,20.,0,1) -ppgplot.pglab('(x)', '(y)', 'PGPLOT Example 1: y = x\u2') +ppgplot.pglab('(x)', '(y)', r'PGPLOT Example 1: y = x\u2') ppgplot.pgpt(xs,ys,9) ppgplot.pgline(xr,yr) ppgplot.pgclos() diff --git a/examples/numpy_ex_panel.py b/examples/numpy_ex_panel.py index 255600e..16d2666 100644 --- a/examples/numpy_ex_panel.py +++ b/examples/numpy_ex_panel.py @@ -19,8 +19,8 @@ def fixenv (xrange=[0,1], yrange=[0,1], fname="none", ci = 2): # calculate some suitable functions. x = arange(0.01,6*pi,0.1) -y = zeros([2,2,x.shape[0]],Float64) -label = zeros([2,2],PyObject) +y = zeros([2,2,x.shape[0]], dtype=float64) +label = zeros([2,2], dtype=str) y[0,0] = sin(2*x)/x label[0,0] = "sin(2*x)/x" y[1,0] = sin(2*x) @@ -33,13 +33,13 @@ def fixenv (xrange=[0,1], yrange=[0,1], fname="none", ci = 2): # do the plotting for i in range(2): for j in range(2): - pgpanl(i+1,j+1) - fixenv([0.0,6*pi],[min(y[i,j]),max(y[i,j])],label[i,j], i*2+j+2) - pgslw(6); # set line-width to 6/201. - pgsls(i*2+j+1) # set the line style. - pgline(x,y[i,j]) # plot the line. - pgsls(1); # recall line-style - pgslw(1); # recall line-width + pgpanl(i+1,j+1) + fixenv([0.0,6*pi],[min(y[i,j]),max(y[i,j])],label[i,j], i*2+j+2) + pgslw(6); # set line-width to 6/201. + pgsls(i*2+j+1) # set the line style. + pgline(x,y[i,j]) # plot the line. + pgsls(1); # recall line-style + pgslw(1); # recall line-width #close the plot. pgend() diff --git a/examples/numpy_ex_sierp.py b/examples/numpy_ex_sierp.py index 9dabef7..d51add4 100644 --- a/examples/numpy_ex_sierp.py +++ b/examples/numpy_ex_sierp.py @@ -8,7 +8,7 @@ def drawtriangle (p1, p2, p3, i): if (i > 5) : - return + return l = sqrt((p1[0] - p2[0])**2 + (p1[1] - p2[1])**2) pgmove(p1[0], p1[1]) pgdraw(p2[0], p2[1]) @@ -31,7 +31,7 @@ def drawtriangle (p1, p2, p3, i): p2 = [l,0] p3 = [cos(pi/3), sin(pi/3)] -print p3 +print(p3) pgbeg('?') pgask(1) From cd6feac1432538a9aa19b63dcebec3f3f09a832c Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Fri, 7 Feb 2025 18:36:35 +0100 Subject: [PATCH 18/62] Need to incref the array descriptor too --- src/_ppgplot.c | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/src/_ppgplot.c b/src/_ppgplot.c index 6797687..00be2a9 100644 --- a/src/_ppgplot.c +++ b/src/_ppgplot.c @@ -115,7 +115,8 @@ tofloatvector (PyObject *o, float **v, int *vsz) return NULL; } *vsz = dims; - /* Tell the system we have this object */ + /* Tell the system we have this object and the data descriptor */ + Py_INCREF(descr); Py_INCREF(af); return (PyObject *)af; } @@ -163,7 +164,8 @@ tofloatmat(PyObject *o, float **m, int *nr, int* nc) } *nr = dims[0]; *nc = dims[1]; - /* Tell the system we have this object */ + /* Tell the system we have this object and the data type descriptor */ + Py_INCREF(descr); Py_INCREF(af); return (PyObject *)af; } From 22976ed7a528cd16720b3e8cd5b8fe9245738ea3 Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Fri, 7 Feb 2025 18:44:51 +0100 Subject: [PATCH 19/62] Fixed up numpy examples, deleted others Those were examples for "numarray" and "Numeric" - very very very old stuff --- examples/ex_arro.py | 26 --------------------- examples/ex_cont.py | 36 ----------------------------- examples/ex_graph.py | 26 --------------------- examples/ex_panel.py | 46 ------------------------------------- examples/ex_sierp.py | 47 -------------------------------------- examples/na_ex_arro.py | 26 --------------------- examples/na_ex_cont.py | 36 ----------------------------- examples/na_ex_graph.py | 26 --------------------- examples/na_ex_panel.py | 46 ------------------------------------- examples/na_ex_sierp.py | 47 -------------------------------------- examples/numpy_ex_arro.py | 8 +++---- examples/numpy_ex_cont.py | 11 ++++----- examples/numpy_ex_graph.py | 7 +++--- examples/numpy_ex_panel.py | 18 +++++++-------- examples/numpy_ex_sierp.py | 10 ++++---- 15 files changed, 27 insertions(+), 389 deletions(-) delete mode 100644 examples/ex_arro.py delete mode 100644 examples/ex_cont.py delete mode 100644 examples/ex_graph.py delete mode 100644 examples/ex_panel.py delete mode 100644 examples/ex_sierp.py delete mode 100644 examples/na_ex_arro.py delete mode 100644 examples/na_ex_cont.py delete mode 100644 examples/na_ex_graph.py delete mode 100644 examples/na_ex_panel.py delete mode 100644 examples/na_ex_sierp.py diff --git a/examples/ex_arro.py b/examples/ex_arro.py deleted file mode 100644 index 0ea88bf..0000000 --- a/examples/ex_arro.py +++ /dev/null @@ -1,26 +0,0 @@ -#/usr/bin/env python - -from Numeric import * -from ppgplot import * - -# initialize ploting. -pgbeg("?",1,1) # open ploting device -pgask(1) # wait for user to press a key before erasing. -pgswin(-10,10,-10,10) # set axis ranges. - # label the plot. -pgiden() # put user-name and date on plot. - -# calculate a suitable function. -f = arange(0,2*pi,0.25) -fx = cos(f); -fy = sin(f); - -for i in range(f.shape[0]): - pgslw(i%10+1) # set line-width - pgsls(i%5+1) # set line-style - pgsci(i%15+1) # set color-index - pgarro(fx[i],fy[i],10*fx[i],10*fy[i]) - -#close the plot. -pgend() - diff --git a/examples/ex_cont.py b/examples/ex_cont.py deleted file mode 100644 index d83777f..0000000 --- a/examples/ex_cont.py +++ /dev/null @@ -1,36 +0,0 @@ -#/usr/bin/env python - -from Numeric import * -from ppgplot import * - -# initialize ploting. -pgbeg("?",1,1) # open ploting device -pgask(1) # wait for user to press a key before erasing. -pgenv(1,40,1,40) # set axis ranges, and draw axes. - # label the plot. -pglab("x","y","z = cos(.3*sqrt(2*x) - .4*y/3)*cos(.4*x/3) + (x-y)/40.0") -pgiden() # put user-name and date on plot. - -# calculate a suitable function. -surf = zeros([40,40],Float32) -for i in range(1,41): - for j in range(1,41): - surf[i-1,j-1] = cos(.3*sqrt(2*i) - .4*j/3)*cos(.4*i/3) + (i-j)/40.0 -mns, mxs = min(ravel(surf)), max(ravel(surf)) - - -# do the ploting. -pggray_s(surf) # image map of the array surf. -pgsci(2) # change color index to 2 (red). -pgcont_s(surf,10) # trace 10 contours on array surf. -pgsci(3) # set color index to 3 (green). -for i in range(10): # label the contours. - c = mns + i*((mxs - mns) / (10-1)) - pgconl_s(surf,c,str(i)) -pgsci(1) # set colndx back to 1 (white) - # plot a wedge to the right of the image. -pgwedg_s(max(ravel(surf)),min(ravel(surf)), "RG") - -#close the plot. -pgend() - diff --git a/examples/ex_graph.py b/examples/ex_graph.py deleted file mode 100644 index 63ef184..0000000 --- a/examples/ex_graph.py +++ /dev/null @@ -1,26 +0,0 @@ -#/usr/bin/env python -# -# pgex1: freely taken after PGDEMO1.F -# -import ppgplot, Numeric -import sys - -# create an array -xs=Numeric.array([1.,2.,3.,4.,5.]) -ys=Numeric.array([1.,4.,9.,16.,25.]) - -# creat another array -yr = 0.1*Numeric.array(range(0,60)) -xr = yr*yr - - -# pgplotting -if len(sys.argv) > 1: # if we got an argument use the argument as devicename - ppgplot.pgopen(sys.argv[1]) -else: - ppgplot.pgopen('?') -ppgplot.pgenv(0.,10.,0.,20.,0,1) -ppgplot.pglab('(x)', '(y)', 'PGPLOT Example 1: y = x\u2') -ppgplot.pgpt(xs,ys,9) -ppgplot.pgline(xr,yr) -ppgplot.pgclos() diff --git a/examples/ex_panel.py b/examples/ex_panel.py deleted file mode 100644 index 3f36e5f..0000000 --- a/examples/ex_panel.py +++ /dev/null @@ -1,46 +0,0 @@ -#/usr/bin/env python - -from Numeric import * -from ppgplot import * - -def fixenv (xrange=[0,1], yrange=[0,1], fname="none", ci = 2): - # set axis ranges. - pgswin(xrange[0],xrange[1],yrange[0],yrange[1]) - pgsci(ci) # set color index. - pgbox() # draw axes. - pgsci(1) # back to color index 1 (white) - pglab("x","y",fname) # label the plot - - -# initialize ploting. -pgbeg("?",2,2) # open ploting device (2x2 pannels) -pgiden() # put user-name and date on plot. -pgask(1) # wait for user to press a key before erasing. - -# calculate some suitable functions. -x = arange(0.01,6*pi,0.1) -y = zeros([2,2,x.shape[0]],Float64) -label = zeros([2,2],PyObject) -y[0,0] = sin(2*x)/x -label[0,0] = "sin(2*x)/x" -y[1,0] = sin(2*x) -label[1,0] = "sin(2*x)" -y[0,1] = x*sin(2*x) -label[0,1] = "x*sin(2*x)" -y[1,1] = sin(x) + sin(2*x) + sin(3*x) -label[1,1] = "sin(x) + sin(2*x) + sin(3*x)" - -# do the plotting -for i in range(2): - for j in range(2): - pgpanl(i+1,j+1) - fixenv([0.0,6*pi],[min(y[i,j]),max(y[i,j])],label[i,j], i*2+j+2) - pgslw(6); # set line-width to 6/201. - pgsls(i*2+j+1) # set the line style. - pgline(x,y[i,j]) # plot the line. - pgsls(1); # recall line-style - pgslw(1); # recall line-width - -#close the plot. -pgend() - diff --git a/examples/ex_sierp.py b/examples/ex_sierp.py deleted file mode 100644 index 589dcc7..0000000 --- a/examples/ex_sierp.py +++ /dev/null @@ -1,47 +0,0 @@ -#/usr/bin/env python - -from Numeric import * -from ppgplot import * - -s602 = sin(pi/3) / 2 -c602 = cos(pi/3) / 2 - -def drawtriangle (p1, p2, p3, i): - if (i > 5) : - return - l = sqrt((p1[0] - p2[0])**2 + (p1[1] - p2[1])**2) - pgmove(p1[0], p1[1]) - pgdraw(p2[0], p2[1]) - pgdraw(p3[0], p3[1]) - pgdraw(p1[0], p1[1]) - - pgsci(1) - drawtriangle(p1, [p1[0] + l/2, p1[1]], \ - [p1[0] + l*c602, p1[1] + l*s602], i+1) - pgsci(2) - drawtriangle([p2[0] - l/2, p2[1]], p2, \ - [p2[0] - l*c602, p2[1] + l*s602], i+1) - pgsci(3) - drawtriangle([p3[0] - l*c602, p3[1] - l*s602], \ - [p3[0] + l*c602, p3[1] - l*s602], p3, i+1) - - -l = 1 -p1 = [0,0] -p2 = [l,0] -p3 = [cos(pi/3), sin(pi/3)] - -print p3 - -pgbeg('?') -pgask(1) -pgenv(0,1,0,1) -pgslw(5) -pgsci(1) - - -drawtriangle(p1,p2,p3,0) -pgend() - - - diff --git a/examples/na_ex_arro.py b/examples/na_ex_arro.py deleted file mode 100644 index f18feee..0000000 --- a/examples/na_ex_arro.py +++ /dev/null @@ -1,26 +0,0 @@ -#/usr/bin/env python - -from numarray import * -from ppgplot import * - -# initialize ploting. -pgbeg("?",1,1) # open ploting device -pgask(1) # wait for user to press a key before erasing. -pgswin(-10,10,-10,10) # set axis ranges. - # label the plot. -pgiden() # put user-name and date on plot. - -# calculate a suitable function. -f = arange(0,2*pi,0.25) -fx = cos(f); -fy = sin(f); - -for i in range(f.shape[0]): - pgslw(i%10+1) # set line-width - pgsls(i%5+1) # set line-style - pgsci(i%15+1) # set color-index - pgarro(fx[i],fy[i],10*fx[i],10*fy[i]) - -#close the plot. -pgend() - diff --git a/examples/na_ex_cont.py b/examples/na_ex_cont.py deleted file mode 100644 index 819fc12..0000000 --- a/examples/na_ex_cont.py +++ /dev/null @@ -1,36 +0,0 @@ -#/usr/bin/env python - -from numarray import * -from ppgplot import * - -# initialize ploting. -pgbeg("?",1,1) # open ploting device -pgask(1) # wait for user to press a key before erasing. -pgenv(1,40,1,40) # set axis ranges, and draw axes. - # label the plot. -pglab("x","y","z = cos(.3*sqrt(2*x) - .4*y/3)*cos(.4*x/3) + (x-y)/40.0") -pgiden() # put user-name and date on plot. - -# calculate a suitable function. -surf = zeros([40,40],Float32) -for i in range(1,41): - for j in range(1,41): - surf[i-1,j-1] = cos(.3*sqrt(2*i) - .4*j/3)*cos(.4*i/3) + (i-j)/40.0 -mns, mxs = min(ravel(surf)), max(ravel(surf)) - - -# do the ploting. -pggray_s(surf) # image map of the array surf. -pgsci(2) # change color index to 2 (red). -pgcont_s(surf,10) # trace 10 contours on array surf. -pgsci(3) # set color index to 3 (green). -for i in range(10): # label the contours. - c = mns + i*((mxs - mns) / (10-1)) - pgconl_s(surf,c,str(i)) -pgsci(1) # set colndx back to 1 (white) - # plot a wedge to the right of the image. -pgwedg_s(max(ravel(surf)),min(ravel(surf)), "RG") - -#close the plot. -pgend() - diff --git a/examples/na_ex_graph.py b/examples/na_ex_graph.py deleted file mode 100644 index 462bd01..0000000 --- a/examples/na_ex_graph.py +++ /dev/null @@ -1,26 +0,0 @@ -#/usr/bin/env python -# -# pgex1: freely taken after PGDEMO1.F -# -import ppgplot, numarray -import sys - -# create an array -xs=numarray.array([1.,2.,3.,4.,5.]) -ys=numarray.array([1.,4.,9.,16.,25.]) - -# creat another array -yr = 0.1*numarray.array(range(0,60)) -xr = yr*yr - - -# pgplotting -if len(sys.argv) > 1: # if we got an argument use the argument as devicename - ppgplot.pgopen(sys.argv[1]) -else: - ppgplot.pgopen('?') -ppgplot.pgenv(0.,10.,0.,20.,0,1) -ppgplot.pglab('(x)', '(y)', 'PGPLOT Example 1: y = x\u2') -ppgplot.pgpt(xs,ys,9) -ppgplot.pgline(xr,yr) -ppgplot.pgclos() diff --git a/examples/na_ex_panel.py b/examples/na_ex_panel.py deleted file mode 100644 index 8208e90..0000000 --- a/examples/na_ex_panel.py +++ /dev/null @@ -1,46 +0,0 @@ -#/usr/bin/env python - -from numarray import * -from ppgplot import * - -def fixenv (xrange=[0,1], yrange=[0,1], fname="none", ci = 2): - # set axis ranges. - pgswin(xrange[0],xrange[1],yrange[0],yrange[1]) - pgsci(ci) # set color index. - pgbox() # draw axes. - pgsci(1) # back to color index 1 (white) - pglab("x","y",fname) # label the plot - - -# initialize ploting. -pgbeg("?",2,2) # open ploting device (2x2 pannels) -pgiden() # put user-name and date on plot. -pgask(1) # wait for user to press a key before erasing. - -# calculate some suitable functions. -x = arange(0.01,6*pi,0.1) -y = zeros([2,2,x.shape[0]],Float64) -label = zeros([2,2],PyObject) -y[0,0] = sin(2*x)/x -label[0,0] = "sin(2*x)/x" -y[1,0] = sin(2*x) -label[1,0] = "sin(2*x)" -y[0,1] = x*sin(2*x) -label[0,1] = "x*sin(2*x)" -y[1,1] = sin(x) + sin(2*x) + sin(3*x) -label[1,1] = "sin(x) + sin(2*x) + sin(3*x)" - -# do the plotting -for i in range(2): - for j in range(2): - pgpanl(i+1,j+1) - fixenv([0.0,6*pi],[min(y[i,j]),max(y[i,j])],label[i,j], i*2+j+2) - pgslw(6); # set line-width to 6/201. - pgsls(i*2+j+1) # set the line style. - pgline(x,y[i,j]) # plot the line. - pgsls(1); # recall line-style - pgslw(1); # recall line-width - -#close the plot. -pgend() - diff --git a/examples/na_ex_sierp.py b/examples/na_ex_sierp.py deleted file mode 100644 index 5282ac5..0000000 --- a/examples/na_ex_sierp.py +++ /dev/null @@ -1,47 +0,0 @@ -#/usr/bin/env python - -from numarray import * -from ppgplot import * - -s602 = sin(pi/3) / 2 -c602 = cos(pi/3) / 2 - -def drawtriangle (p1, p2, p3, i): - if (i > 5) : - return - l = sqrt((p1[0] - p2[0])**2 + (p1[1] - p2[1])**2) - pgmove(p1[0], p1[1]) - pgdraw(p2[0], p2[1]) - pgdraw(p3[0], p3[1]) - pgdraw(p1[0], p1[1]) - - pgsci(1) - drawtriangle(p1, [p1[0] + l/2, p1[1]], \ - [p1[0] + l*c602, p1[1] + l*s602], i+1) - pgsci(2) - drawtriangle([p2[0] - l/2, p2[1]], p2, \ - [p2[0] - l*c602, p2[1] + l*s602], i+1) - pgsci(3) - drawtriangle([p3[0] - l*c602, p3[1] - l*s602], \ - [p3[0] + l*c602, p3[1] - l*s602], p3, i+1) - - -l = 1 -p1 = [0,0] -p2 = [l,0] -p3 = [cos(pi/3), sin(pi/3)] - -print p3 - -pgbeg('?') -pgask(1) -pgenv(0,1,0,1) -pgslw(5) -pgsci(1) - - -drawtriangle(p1,p2,p3,0) -pgend() - - - diff --git a/examples/numpy_ex_arro.py b/examples/numpy_ex_arro.py index e2e446c..7a27365 100644 --- a/examples/numpy_ex_arro.py +++ b/examples/numpy_ex_arro.py @@ -1,6 +1,6 @@ #/usr/bin/env python -from numpy import * +import numpy as np from ppgplot import * # initialize ploting. @@ -11,9 +11,9 @@ pgiden() # put user-name and date on plot. # calculate a suitable function. -f = arange(0,2*pi,0.25) -fx = cos(f); -fy = sin(f); +f = np.arange(0,2*np.pi,0.25) +fx = np.cos(f); +fy = np.sin(f); for i in range(f.shape[0]): pgslw(i%10+1) # set line-width diff --git a/examples/numpy_ex_cont.py b/examples/numpy_ex_cont.py index 6a78aef..99f5fac 100644 --- a/examples/numpy_ex_cont.py +++ b/examples/numpy_ex_cont.py @@ -1,6 +1,6 @@ #/usr/bin/env python -from numpy import * +import numpy as np from ppgplot import * # initialize ploting. @@ -12,11 +12,11 @@ pgiden() # put user-name and date on plot. # calculate a suitable function. -surf = zeros([40,40], dtype=float32) +surf = np.zeros([40,40], dtype=np.float32) for i in range(1,41): for j in range(1,41): - surf[i-1,j-1] = cos(.3*sqrt(2*i) - .4*j/3)*cos(.4*i/3) + (i-j)/40.0 -mns, mxs = min(ravel(surf)), max(ravel(surf)) + surf[i-1,j-1] = np.cos(.3*np.sqrt(2*i) - .4*j/3)*np.cos(.4*i/3) + (i-j)/40.0 +mns, mxs = min(np.ravel(surf)), max(np.ravel(surf)) # do the ploting. @@ -29,8 +29,7 @@ pgconl_s(surf,c,str(i)) pgsci(1) # set colndx back to 1 (white) # plot a wedge to the right of the image. -pgwedg_s(max(ravel(surf)),min(ravel(surf)), "RG") +pgwedg_s(max(np.ravel(surf)),min(np.ravel(surf)), "RG") #close the plot. pgend() - diff --git a/examples/numpy_ex_graph.py b/examples/numpy_ex_graph.py index c8f48fb..7502676 100644 --- a/examples/numpy_ex_graph.py +++ b/examples/numpy_ex_graph.py @@ -2,15 +2,16 @@ # # pgex1: freely taken after PGDEMO1.F # -import ppgplot, numpy +import ppgplot +import numpy as np import sys # create an array xs=[1.,2.,3.,4.,5.] -ys=numpy.array([1.,4.,9.,16.,25.]) +ys=np.array([1.,4.,9.,16.,25.]) # creat another array -yr = 0.1*numpy.array(range(0,60)) +yr = 0.1*np.array(range(0,60)) xr = yr*yr diff --git a/examples/numpy_ex_panel.py b/examples/numpy_ex_panel.py index 16d2666..7fb7384 100644 --- a/examples/numpy_ex_panel.py +++ b/examples/numpy_ex_panel.py @@ -1,6 +1,6 @@ #/usr/bin/env python -from numpy import * +import numpy as np from ppgplot import * def fixenv (xrange=[0,1], yrange=[0,1], fname="none", ci = 2): @@ -18,23 +18,23 @@ def fixenv (xrange=[0,1], yrange=[0,1], fname="none", ci = 2): pgask(1) # wait for user to press a key before erasing. # calculate some suitable functions. -x = arange(0.01,6*pi,0.1) -y = zeros([2,2,x.shape[0]], dtype=float64) -label = zeros([2,2], dtype=str) -y[0,0] = sin(2*x)/x +x = np.arange(0.01,6*np.pi,0.1) +y = np.zeros([2,2,x.shape[0]], dtype=np.float64) +label = np.zeros([2,2], dtype=str) +y[0,0] = np.sin(2*x)/x label[0,0] = "sin(2*x)/x" -y[1,0] = sin(2*x) +y[1,0] = np.sin(2*x) label[1,0] = "sin(2*x)" -y[0,1] = x*sin(2*x) +y[0,1] = x*np.sin(2*x) label[0,1] = "x*sin(2*x)" -y[1,1] = sin(x) + sin(2*x) + sin(3*x) +y[1,1] = np.sin(x) + np.sin(2*x) + np.sin(3*x) label[1,1] = "sin(x) + sin(2*x) + sin(3*x)" # do the plotting for i in range(2): for j in range(2): pgpanl(i+1,j+1) - fixenv([0.0,6*pi],[min(y[i,j]),max(y[i,j])],label[i,j], i*2+j+2) + fixenv([0.0,6*np.pi],[min(y[i,j]),max(y[i,j])],label[i,j], i*2+j+2) pgslw(6); # set line-width to 6/201. pgsls(i*2+j+1) # set the line style. pgline(x,y[i,j]) # plot the line. diff --git a/examples/numpy_ex_sierp.py b/examples/numpy_ex_sierp.py index d51add4..a0226ed 100644 --- a/examples/numpy_ex_sierp.py +++ b/examples/numpy_ex_sierp.py @@ -1,15 +1,15 @@ #/usr/bin/env python -from numpy import * +import math from ppgplot import * -s602 = sin(pi/3) / 2 -c602 = cos(pi/3) / 2 +s602 = math.sin(math.pi/3) / 2 +c602 = math.cos(math.pi/3) / 2 def drawtriangle (p1, p2, p3, i): if (i > 5) : return - l = sqrt((p1[0] - p2[0])**2 + (p1[1] - p2[1])**2) + l = math.sqrt((p1[0] - p2[0])**2 + (p1[1] - p2[1])**2) pgmove(p1[0], p1[1]) pgdraw(p2[0], p2[1]) pgdraw(p3[0], p3[1]) @@ -29,7 +29,7 @@ def drawtriangle (p1, p2, p3, i): l = 1 p1 = [0,0] p2 = [l,0] -p3 = [cos(pi/3), sin(pi/3)] +p3 = [math.cos(math.pi/3), math.sin(math.pi/3)] print(p3) From ea4ddfd7d3f2c19deeea903de173cb77337d67ac Mon Sep 17 00:00:00 2001 From: haavee Date: Wed, 19 Feb 2025 15:01:04 +0100 Subject: [PATCH 20/62] Mention fixes + how-to branch for python 3.6 Trying to backport the new pip install -e . stuff onto a Py3.6 host, just for checking. Wasn't easy at all, so decided to write up the howto and at least mention it on the master branch. --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 67e6eed..6064572 100644 --- a/README.md +++ b/README.md @@ -11,6 +11,7 @@ manipulate vectors and matrices. Currently, as the extension is not in PyPI, you're installing it into an "externally managed environment". You may need to create a Python [`venv`](https://docs.python.org/3/library/venv.html) first in order to install the extension manually on your system. +Note: there is a [separate old-python-3.6 branch](https://github.com/haavee/ppgplot/tree/old-python-3.6) based off master, with a how-to in the commit log(s). Of course nothing works out of the box on that system - only succeeded using an (old) Anaconda3.6 base package. YMMV. ## Requirements From 70ae6a2e712bb4bb02647db494c6ae9185ef1838 Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Tue, 15 Apr 2025 16:02:41 +0200 Subject: [PATCH 21/62] Version needs to be in pyproject, not just tag --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 5c7ff49..6b9d1f0 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "ppgplot" -version = "1.4" +version = "1.5" description = "Python bindings for PGPLOT" authors = [ {name = "Nick Patavalis", email = "npat@efault.net"}, From 6dae5f08123a2747082309a9df22d684be8496f9 Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Tue, 22 Apr 2025 13:52:11 +0200 Subject: [PATCH 22/62] Initial mods to support PyPI - chose package name python-pgplot - updated README.me to reflect this --- README.md | 21 +++++++++++++++++++-- pyproject.toml | 2 +- 2 files changed, 20 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index 6064572..91214d1 100644 --- a/README.md +++ b/README.md @@ -9,9 +9,26 @@ written in Fortran by T. J. Pearson. C bindings for PGPLOT are also available. numarray modules, but nowadays (>= Feb 2025) replaced by Numpy, to efficiently represent and manipulate vectors and matrices. -Currently, as the extension is not in PyPI, you're installing it into an "externally managed environment". You may need to create a Python [`venv`](https://docs.python.org/3/library/venv.html) first in order to install the extension manually on your system. +## Installing -Note: there is a [separate old-python-3.6 branch](https://github.com/haavee/ppgplot/tree/old-python-3.6) based off master, with a how-to in the commit log(s). Of course nothing works out of the box on that system - only succeeded using an (old) Anaconda3.6 base package. YMMV. +Since `v1.5` (Apr 2025) the package should be `pip`-installable; it's a package on the [PyPI](https://pypi.org/project/python-pgplot/): + +```bash + $> pip install python-pgplot + $> python3 + >>> import ppgplot + >>> +``` +**NOTE: Due to a package name collision, the PyPI project name is `python-pgplot`; the obvious package name was already claimed by something completely different** + + +It is also possible to build the package from this `git`-repository. You may need to create a Python [`venv`](https://docs.python.org/3/library/venv.html) first. See below for detailed instructions. + +```bash + $> pip install [-e] . +``` + +**Note:** there is a [separate old-python-3.6 branch](https://github.com/haavee/ppgplot/tree/old-python-3.6) based off master, with a how-to in the commit log(s). Of course nothing works out of the box on that system - only succeeded using an (old) Anaconda3.6 base package. YMMV. ## Requirements diff --git a/pyproject.toml b/pyproject.toml index 6b9d1f0..2129e03 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -3,7 +3,7 @@ requires = ["setuptools>=45", "numpy>=1.21.0", "pkgconfig"] build-backend = "setuptools.build_meta" [project] -name = "ppgplot" +name = "python-pgplot" version = "1.5" description = "Python bindings for PGPLOT" authors = [ From 0a9ed3bb45beaaa0497d78b41fa3d704bb244533 Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Tue, 22 Apr 2025 14:30:38 +0200 Subject: [PATCH 23/62] More preparations for PyPIing - Removed Nick Patavalis from author list; would be listed as contact author of project. I woulnd't have minded, but he doesn't seem to be contactable (when I proposed a PR years ago didn't get a response) - Updated README.md to list NickP as original author explicitly - Updated setup.py to build the extension with the explicit 'name="python-pgplot"' argument --- README.md | 2 +- pyproject.toml | 6 +++--- setup.py | 1 + 3 files changed, 5 insertions(+), 4 deletions(-) diff --git a/README.md b/README.md index 91214d1..66c5b66 100644 --- a/README.md +++ b/README.md @@ -83,5 +83,5 @@ If `ppgplot` is linked against the `Giza` library, it can produce output in `.pn All in all, the `Giza` backend is an amazing job done, but it is [not 100% compatible with the original PGPLOT](https://danieljprice.github.io/giza/documentation/pgplot.html), so it is not guaranteed your plots will come out identical. -This fork of the Python-extension owes a lot of thanks to the original author of `ppgplot`: +This fork of the Python-extension owes a lot of thanks to the original author, Nick Patavlis, of `ppgplot`: https://github.com/npat-efault/ppgplot diff --git a/pyproject.toml b/pyproject.toml index 2129e03..57fbb0b 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,14 +1,14 @@ [build-system] -requires = ["setuptools>=45", "numpy>=1.21.0", "pkgconfig"] +requires = ["setuptools>=45", "numpy>=1.21.0", "pkgconfig", "wheel", "cibuildwheel"] build-backend = "setuptools.build_meta" [project] name = "python-pgplot" -version = "1.5" +version = "1.5.1" description = "Python bindings for PGPLOT" authors = [ - {name = "Nick Patavalis", email = "npat@efault.net"}, {name = "Marjolein Verkouter", email = "verkouter@jive.eu"}, + #{name = "Nick Patavalis", email = "npat@efault.net"}, ] readme = "README.md" requires-python = ">=3.7" diff --git a/setup.py b/setup.py index 14527bf..2255368 100644 --- a/setup.py +++ b/setup.py @@ -107,6 +107,7 @@ def set_extension_config(ext): # This triggers the whole build # ########################################################### setup( + name="python-pgplot", ext_modules=[ set_extension_config( Extension('ppgplot._ppgplot', sources=[os.path.join('src', '_ppgplot.c')]) ), From 4df6415ced2a7b454966cce44299fe5a1d8fcca8 Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Tue, 22 Apr 2025 15:17:19 +0200 Subject: [PATCH 24/62] pyproject vsn to 1.5.0-beta Because 1.5.0 is now already usurped by PyPI and cannot be reused, but we needed to fix some (meta)data settings. So 1.5.0 was deleted b/c it was 'wrong', but can't be reused anymore. --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 57fbb0b..796ba17 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "python-pgplot" -version = "1.5.1" +version = "1.5.0-beta" description = "Python bindings for PGPLOT" authors = [ {name = "Marjolein Verkouter", email = "verkouter@jive.eu"}, From 9119118ea76892338195addf3afe293649034098 Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Tue, 22 Apr 2025 15:19:41 +0200 Subject: [PATCH 25/62] Add build_wheels & push-to-pypi workflows --- .github/workflows/pypi.yml | 27 +++++++++++++++++++++++++++ .github/workflows/wheels.yml | 36 ++++++++++++++++++++++++++++++++++++ 2 files changed, 63 insertions(+) create mode 100644 .github/workflows/pypi.yml create mode 100644 .github/workflows/wheels.yml diff --git a/.github/workflows/pypi.yml b/.github/workflows/pypi.yml new file mode 100644 index 0000000..305e880 --- /dev/null +++ b/.github/workflows/pypi.yml @@ -0,0 +1,27 @@ + publish_pypi: + name: Publish to PyPI + needs: build_wheels + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + + - name: Download built wheels + uses: actions/download-artifact@v4 + with: + name: built-wheels + path: dist/ + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.11" + + - name: Install Twine + run: pip install twine + + - name: Publish to PyPI + env: + TWINE_USERNAME: __token__ + TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }} + run: twine upload dist/* diff --git a/.github/workflows/wheels.yml b/.github/workflows/wheels.yml new file mode 100644 index 0000000..3d6d33a --- /dev/null +++ b/.github/workflows/wheels.yml @@ -0,0 +1,36 @@ +name: Build and Publish Wheels + +on: + push: + tags: + - "v*.*.*" + +jobs: + build_wheels: + name: Build wheels on ${{ matrix.os }} + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-latest, macos-latest, windows-latest] + + steps: + - uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.11" + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install cibuildwheel + + - name: Build wheels + run: cibuildwheel --output-dir dist + + - name: Upload wheels + uses: actions/upload-artifact@v4 + with: + name: python-pgplot-wheels + path: dist/ From c7e395cd4504f11dbca00c87ba269b14e227161b Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Tue, 22 Apr 2025 15:25:22 +0200 Subject: [PATCH 26/62] Complaint "No event triggers defined in 'on'" --- .github/workflows/pypi.yml | 5 +++++ 1 file changed, 5 insertions(+) diff --git a/.github/workflows/pypi.yml b/.github/workflows/pypi.yml index 305e880..6edfdcc 100644 --- a/.github/workflows/pypi.yml +++ b/.github/workflows/pypi.yml @@ -3,6 +3,11 @@ needs: build_wheels runs-on: ubuntu-latest + on: + push: + tags: + - "v*.*.*" + steps: - uses: actions/checkout@v4 From 7454013f3e6c9b652af7ecc198619f3940d62b61 Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Tue, 22 Apr 2025 15:27:53 +0200 Subject: [PATCH 27/62] Maybe this helps? Does not seem to be recognized as Action by github? --- .github/workflows/{wheels.yml => build_wheels.yml} | 0 1 file changed, 0 insertions(+), 0 deletions(-) rename .github/workflows/{wheels.yml => build_wheels.yml} (100%) diff --git a/.github/workflows/wheels.yml b/.github/workflows/build_wheels.yml similarity index 100% rename from .github/workflows/wheels.yml rename to .github/workflows/build_wheels.yml From de96f871363ff4118a0a5779af71a09978789013 Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Tue, 22 Apr 2025 15:31:19 +0200 Subject: [PATCH 28/62] Ah. Maybe upload_pypi is a step in build_wheels .. --- .github/workflows/build_wheels.yml | 28 ++++++++++++++++++++++++++ .github/workflows/pypi.yml | 32 ------------------------------ 2 files changed, 28 insertions(+), 32 deletions(-) delete mode 100644 .github/workflows/pypi.yml diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml index 3d6d33a..05bd39a 100644 --- a/.github/workflows/build_wheels.yml +++ b/.github/workflows/build_wheels.yml @@ -34,3 +34,31 @@ jobs: with: name: python-pgplot-wheels path: dist/ + + publish_pypi: + name: Publish to PyPI + needs: build_wheels + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + + - name: Download built wheels + uses: actions/download-artifact@v4 + with: + name: built-wheels + path: dist/ + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.11" + + - name: Install Twine + run: pip install twine + + - name: Publish to PyPI + env: + TWINE_USERNAME: __token__ + TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }} + run: twine upload dist/* diff --git a/.github/workflows/pypi.yml b/.github/workflows/pypi.yml deleted file mode 100644 index 6edfdcc..0000000 --- a/.github/workflows/pypi.yml +++ /dev/null @@ -1,32 +0,0 @@ - publish_pypi: - name: Publish to PyPI - needs: build_wheels - runs-on: ubuntu-latest - - on: - push: - tags: - - "v*.*.*" - - steps: - - uses: actions/checkout@v4 - - - name: Download built wheels - uses: actions/download-artifact@v4 - with: - name: built-wheels - path: dist/ - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: "3.11" - - - name: Install Twine - run: pip install twine - - - name: Publish to PyPI - env: - TWINE_USERNAME: __token__ - TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }} - run: twine upload dist/* From 595b08dbf4badd36e984c0f9652f5be716b60016 Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Wed, 6 Aug 2025 10:09:12 +0200 Subject: [PATCH 29/62] Separate workflows for build + publish on PyPI Previously there was only one (1) workflow defined, that matched pushing a tag "v*.*.*" and then builds + publishes to PyPI. But when debugging the build & publish process this make you run through a lot of tags (and possibly publishing them) before you get it right, because the PyPI versions tags have to be unique - you can't re-publish the same version again on PyPI, so each 'attempt' had to be a unique 'v*.*.*' version number ... Now two workflows: - one on each push, just builds the wheels - one that triggers on pushing a "v*.*.*" tag, which then builds and publishes on PyPI Now we can test the building process w/o creating unnecessary PyPI releases. --- .github/workflows/build_wheels.yml | 34 +------------- .github/workflows/publish_to_pypi.yml | 64 +++++++++++++++++++++++++++ 2 files changed, 66 insertions(+), 32 deletions(-) create mode 100644 .github/workflows/publish_to_pypi.yml diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml index 05bd39a..a4ab1d4 100644 --- a/.github/workflows/build_wheels.yml +++ b/.github/workflows/build_wheels.yml @@ -1,9 +1,7 @@ name: Build and Publish Wheels on: - push: - tags: - - "v*.*.*" + push jobs: build_wheels: @@ -11,7 +9,7 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - os: [ubuntu-latest, macos-latest, windows-latest] + os: [ubuntu-latest , macos-latest, windows-latest] steps: - uses: actions/checkout@v4 @@ -34,31 +32,3 @@ jobs: with: name: python-pgplot-wheels path: dist/ - - publish_pypi: - name: Publish to PyPI - needs: build_wheels - runs-on: ubuntu-latest - - steps: - - uses: actions/checkout@v4 - - - name: Download built wheels - uses: actions/download-artifact@v4 - with: - name: built-wheels - path: dist/ - - - name: Set up Python - uses: actions/setup-python@v5 - with: - python-version: "3.11" - - - name: Install Twine - run: pip install twine - - - name: Publish to PyPI - env: - TWINE_USERNAME: __token__ - TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }} - run: twine upload dist/* diff --git a/.github/workflows/publish_to_pypi.yml b/.github/workflows/publish_to_pypi.yml new file mode 100644 index 0000000..49cd597 --- /dev/null +++ b/.github/workflows/publish_to_pypi.yml @@ -0,0 +1,64 @@ +name: Build and Publish Wheels + +on: + push: + tags: + - "v*.*.*" + +jobs: + build_wheels: + name: Build wheels on ${{ matrix.os }} + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-latest ] #, macos-latest, windows-latest] + + steps: + - uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.11" + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install cibuildwheel + + - name: Build wheels + run: cibuildwheel --output-dir dist + + - name: Upload wheels + uses: actions/upload-artifact@v4 + with: + name: python-pgplot-wheels + path: dist/ + + publish_pypi: + name: Publish to PyPI + needs: build_wheels + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + + - name: Download built wheels + uses: actions/download-artifact@v4 + with: + name: built-wheels + path: dist/ + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.11" + + - name: Install Twine + run: pip install twine + + - name: Publish to PyPI + env: + TWINE_USERNAME: __token__ + TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }} + run: twine upload dist/* From 75d7a3b6051e9bd3de31c996d4879b08f71a27f2 Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Wed, 6 Aug 2025 10:25:48 +0200 Subject: [PATCH 30/62] Fix duplicate workflow name --- .github/workflows/build_wheels.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml index a4ab1d4..f43062a 100644 --- a/.github/workflows/build_wheels.yml +++ b/.github/workflows/build_wheels.yml @@ -1,4 +1,4 @@ -name: Build and Publish Wheels +name: Build Wheels on: push From eb6bca160a14320d6260c8b6f342a89755f88b05 Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Wed, 6 Aug 2025 11:40:52 +0200 Subject: [PATCH 31/62] Add installation of OS-level deps step The runner(s) were complaining that some (O/S level) deps were not available, so now added those --- .github/workflows/build_wheels.yml | 10 ++++++++++ 1 file changed, 10 insertions(+) diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml index f43062a..3bb4c63 100644 --- a/.github/workflows/build_wheels.yml +++ b/.github/workflows/build_wheels.yml @@ -14,6 +14,16 @@ jobs: steps: - uses: actions/checkout@v4 + - name: Install O/S level dependencies + run: | + if [ "$RUNNER_OS" == "Linux" ]; then + sudo apt-get install giza-dev libx11-dev pkg-config + elif [ "$RUNNER_OS" == "Windows" ]; then + echo "Really should find out how to install those here" + else + # must be MacOS then ... + brew install libx11 giza pkgconf + - name: Set up Python uses: actions/setup-python@v5 with: From d1c7dd21671689619b27049bc7065403c68f4a89 Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Wed, 6 Aug 2025 11:44:32 +0200 Subject: [PATCH 32/62] Fixed "Syntax Error" - forgot "endif" --- .github/workflows/build_wheels.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml index 3bb4c63..a4f3dba 100644 --- a/.github/workflows/build_wheels.yml +++ b/.github/workflows/build_wheels.yml @@ -23,6 +23,7 @@ jobs: else # must be MacOS then ... brew install libx11 giza pkgconf + endif - name: Set up Python uses: actions/setup-python@v5 From dd2c0d16ac2d7a95273caacb9add227e95382135 Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Wed, 6 Aug 2025 11:47:38 +0200 Subject: [PATCH 33/62] *sigh* I'm getting auld -it's "fi" in shell speak --- .github/workflows/build_wheels.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml index a4f3dba..70323ea 100644 --- a/.github/workflows/build_wheels.yml +++ b/.github/workflows/build_wheels.yml @@ -23,7 +23,7 @@ jobs: else # must be MacOS then ... brew install libx11 giza pkgconf - endif + fi - name: Set up Python uses: actions/setup-python@v5 From 968162a8aab5ceccf6d60e2eee20ce43e25b1c57 Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Wed, 6 Aug 2025 12:21:37 +0200 Subject: [PATCH 34/62] Force use of "bash" as shell ... ... orelse the if ... ; then ... elseif ... fi don't work on windows-latest --- .github/workflows/build_wheels.yml | 1 + 1 file changed, 1 insertion(+) diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml index 70323ea..e7af313 100644 --- a/.github/workflows/build_wheels.yml +++ b/.github/workflows/build_wheels.yml @@ -15,6 +15,7 @@ jobs: - uses: actions/checkout@v4 - name: Install O/S level dependencies + shell: bash run: | if [ "$RUNNER_OS" == "Linux" ]; then sudo apt-get install giza-dev libx11-dev pkg-config From 6de8190321796731b681c93f91f4acb1e8876eb1 Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Fri, 8 Aug 2025 11:16:22 +0200 Subject: [PATCH 35/62] Fix cibuildwheel step This took a lot of time to figure out; several issues had to be overcome. giza-dev dependency not found when trying to cibuildwheel --------------------------------------------------------- Linux: Installing the giza-dev dependency on the runner's host-O/S does not make it visible in the builder; cibuildwheel runs its builds into yet-other-containers (multiple) images; these days based on Alma Linux. That distro does not have giza-dev or giza-devel pkgs. Solution: add a manual build step for Linux to pull giza tarball release and build *inside* the build container (sheesh). Support using yum or apt-get to install system dependencies. See CIBW_BEFORE_ALL_LINUX. MacOS: the MacOS image has homebrew so can install deps there easily, no need to build giza ourselves; see CIBW_BEFORE_ALL_MACOS Follow advice from cibuild:repairwheel step: packages/delocate/delocating.py", line 925, in _check_and_update_wheel_name raise DelocationError( << many output skipped >> /private/var/folders/y6/nj790rtn62lfktb1sh__79hc0000gn/T/tmp7vjagw4l/wheel/ppgplot/.dylibs/libXext.6.dylib has a minimum target of 14.0 Set the environment variable 'MACOSX_DEPLOYMENT_TARGET=14.0' to update minimum supported macOS for this wheel. See CIBW_ENVIRONMENT_MACOS. Upload wheels step fails ------------------------ At the end of a macos-latest buid got this error in the "upload wheels" step: Error: Failed to CreateArtifact: Received non-retryable error: Failed request: (409) Conflict: an artifact with this name already exists on the workflow run Need to get separate artifacts per OS without name conflicts. Fixed by adding a build matrix variable to the artefact name: name: python-pgplot-wheels-${{ matrix.os }} Hardened build, add smoke-test ------------------------------ - restricted to "native" builds on MacOS, iso "universal" (homebrew don't do universal) - make PKG_CONFIG path persist so it will be retained in wheel - add smoke-test (CIBW_TEST_COMMAND) --- .github/workflows/build_wheels.yml | 46 +++++++++++++++++++++--------- 1 file changed, 33 insertions(+), 13 deletions(-) diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml index e7af313..50e4a61 100644 --- a/.github/workflows/build_wheels.yml +++ b/.github/workflows/build_wheels.yml @@ -9,22 +9,13 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - os: [ubuntu-latest , macos-latest, windows-latest] + os: [ubuntu-latest, macos-latest] # add windows-latest later steps: - uses: actions/checkout@v4 - - name: Install O/S level dependencies - shell: bash - run: | - if [ "$RUNNER_OS" == "Linux" ]; then - sudo apt-get install giza-dev libx11-dev pkg-config - elif [ "$RUNNER_OS" == "Windows" ]; then - echo "Really should find out how to install those here" - else - # must be MacOS then ... - brew install libx11 giza pkgconf - fi + # Dependencies are now handled by cibuildwheel's before-all hooks + # No need to install them on the runner - name: Set up Python uses: actions/setup-python@v5 @@ -38,9 +29,38 @@ jobs: - name: Build wheels run: cibuildwheel --output-dir dist + env: + # Install system dependencies and build giza from source + # giza-devel is not available in AlmaLinux 8 EPEL, so we build from source + # Install build dependencies for RHEL/CentOS/AlmaLinux + # Fallback for Debian/Ubuntu systems + # Download and build giza from source + CIBW_BEFORE_ALL_LINUX: | + (which apt || which yum || which dnf) && + ((yum install -y gcc make cairo-devel libX11-devel pkgconfig wget tar gzip) || + (apt-get update && apt-get install -y gcc make libcairo2-dev libx11-dev pkg-config wget tar gzip)) && + cd /tmp && + wget https://github.com/danieljprice/giza/archive/refs/tags/v1.4.2.tar.gz && + tar -xzf v1.4.2.tar.gz && + cd giza-1.4.2 && + export CFLAGS=-fPIC && + ./configure --prefix=/usr/local && + make && + make install && + ldconfig + CIBW_BEFORE_ALL_MACOS: "brew install giza libx11 pkg-config" + CIBW_BEFORE_ALL_WINDOWS: "echo 'Windows support not implemented yet'" + # Ensure pkg-config and runtime linker can find giza + CIBW_ENVIRONMENT_LINUX: "PKG_CONFIG_PATH=/usr/local/lib/pkgconfig LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH" + CIBW_ENVIRONMENT_MACOS: "MACOSX_DEPLOYMENT_TARGET=14.0 PKG_CONFIG_PATH=$(brew --prefix)/lib/pkgconfig DYLD_FALLBACK_LIBRARY_PATH=$(brew --prefix)/lib:$DYLD_FALLBACK_LIBRARY_PATH" + # Avoid universal2 since Homebrew giza isn’t universal + CIBW_ARCHS_MACOS: "native" + # Smoke test to verify import/linking works inside each wheel env + CIBW_TEST_COMMAND: > + python -c 'import ppgplot; print("ok")' - name: Upload wheels uses: actions/upload-artifact@v4 with: - name: python-pgplot-wheels + name: python-pgplot-wheels-${{ matrix.os }} path: dist/ From 27b7355393d2abd9e9935d5f3cc9d6343b151409 Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Fri, 8 Aug 2025 11:35:56 +0200 Subject: [PATCH 36/62] Find libX11 on macos The setup.py extension configurator would look for libX11 (fine) but didn't add the path to the library to the library search path for the linker. Fixed. --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 2255368..00c9ee2 100644 --- a/setup.py +++ b/setup.py @@ -56,7 +56,7 @@ def add_X11(ext): # Standard X11 library locations ext.library_dirs.extend( filter(os.path.isdir, - ["/usr/lib/x86_64-linux-gnu/", "/usr/X11R6/lib/", "/opt/X11/lib"]) + ["/usr/lib/x86_64-linux-gnu/", "/usr/X11R6/lib/", "/opt/X11/lib", "/opt/homebrew/lib"]) ) return ext From d1568bf284219a4533db3e4b48291b02a7a580b8 Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Fri, 8 Aug 2025 11:37:16 +0200 Subject: [PATCH 37/62] Disable some builds Don't even try to build on windows. Found out that there are cp314t? cibuildwheel builds that are based on "musllinux" (wtf?) That distro doesn't seem to have either of apt-get or yum, so would have to figure out how to install (system)dependencies there and then build giza again. Disable building on *musllinux* images for now --- pyproject.toml | 6 ++++++ 1 file changed, 6 insertions(+) diff --git a/pyproject.toml b/pyproject.toml index 796ba17..968a6fe 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -36,3 +36,9 @@ Homepage = "https://github.com/haavee/ppgplot" [tool.setuptools] packages = ["ppgplot"] package-dir = {"ppgplot" = "src"} + +[tool.cibuildwheel] +# Test that giza is available +before-build = "pkg-config --exists giza && echo 'giza found' || echo 'giza NOT found'" +# Not on Windhoos, nor on musllinux (wtf) +skip = ["*-win*", "*musllinux*" ] From 3df44f622352b9701c81ecd0fcf3011eca5fbf11 Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Fri, 8 Aug 2025 12:21:44 +0200 Subject: [PATCH 38/62] Replace build_wheels step in publish workflow In the build_wheels workflow we now have a working config; transplant it into the publish_to_pypi workflow --- .github/workflows/publish_to_pypi.yml | 36 +++++++++++++++++++++++++-- 1 file changed, 34 insertions(+), 2 deletions(-) diff --git a/.github/workflows/publish_to_pypi.yml b/.github/workflows/publish_to_pypi.yml index 49cd597..263978c 100644 --- a/.github/workflows/publish_to_pypi.yml +++ b/.github/workflows/publish_to_pypi.yml @@ -11,11 +11,14 @@ jobs: runs-on: ${{ matrix.os }} strategy: matrix: - os: [ubuntu-latest ] #, macos-latest, windows-latest] + os: [ubuntu-latest, macos-latest] # add windows-latest later steps: - uses: actions/checkout@v4 + # Dependencies are now handled by cibuildwheel's before-all hooks + # No need to install them on the runner + - name: Set up Python uses: actions/setup-python@v5 with: @@ -28,11 +31,40 @@ jobs: - name: Build wheels run: cibuildwheel --output-dir dist + env: + # Install system dependencies and build giza from source + # giza-devel is not available in AlmaLinux 8 EPEL, so we build from source + # Install build dependencies for RHEL/CentOS/AlmaLinux + # Fallback for Debian/Ubuntu systems + # Download and build giza from source + CIBW_BEFORE_ALL_LINUX: | + (which apt || which yum || which dnf) && + ((yum install -y gcc make cairo-devel libX11-devel pkgconfig wget tar gzip) || + (apt-get update && apt-get install -y gcc make libcairo2-dev libx11-dev pkg-config wget tar gzip)) && + cd /tmp && + wget https://github.com/danieljprice/giza/archive/refs/tags/v1.4.2.tar.gz && + tar -xzf v1.4.2.tar.gz && + cd giza-1.4.2 && + export CFLAGS=-fPIC && + ./configure --prefix=/usr/local && + make && + make install && + ldconfig + CIBW_BEFORE_ALL_MACOS: "brew install giza libx11 pkg-config" + CIBW_BEFORE_ALL_WINDOWS: "echo 'Windows support not implemented yet'" + # Ensure pkg-config and runtime linker can find giza + CIBW_ENVIRONMENT_LINUX: "PKG_CONFIG_PATH=/usr/local/lib/pkgconfig LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH" + CIBW_ENVIRONMENT_MACOS: "MACOSX_DEPLOYMENT_TARGET=14.0 PKG_CONFIG_PATH=$(brew --prefix)/lib/pkgconfig DYLD_FALLBACK_LIBRARY_PATH=$(brew --prefix)/lib:$DYLD_FALLBACK_LIBRARY_PATH" + # Avoid universal2 since Homebrew giza isn’t universal + CIBW_ARCHS_MACOS: "native" + # Smoke test to verify import/linking works inside each wheel env + CIBW_TEST_COMMAND: > + python -c 'import ppgplot; print("ok")' - name: Upload wheels uses: actions/upload-artifact@v4 with: - name: python-pgplot-wheels + name: python-pgplot-wheels-${{ matrix.os }} path: dist/ publish_pypi: From ec30b916c87ae0e9d4a6817b5a1cd3d0236d295a Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Fri, 8 Aug 2025 13:06:16 +0200 Subject: [PATCH 39/62] Add sdist, collect+flatten artefacts, check The workflow was enhanced to: - download all artefacts ("dist/*") - flatten the files into one dir ("dist/") - clean up - adds a job that builds a source distribution - adds "twine check" to verify - publishes to pypi --- .github/workflows/publish_to_pypi.yml | 41 +++++++++++++++++++++++++-- 1 file changed, 38 insertions(+), 3 deletions(-) diff --git a/.github/workflows/publish_to_pypi.yml b/.github/workflows/publish_to_pypi.yml index 263978c..9ccb84e 100644 --- a/.github/workflows/publish_to_pypi.yml +++ b/.github/workflows/publish_to_pypi.yml @@ -67,20 +67,50 @@ jobs: name: python-pgplot-wheels-${{ matrix.os }} path: dist/ + build_sdist: + name: Build source distribution + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.11" + + - name: Install build dependencies + run: | + python -m pip install --upgrade pip + pip install build + + - name: Build sdist + run: python -m build --sdist + + - name: Upload sdist + uses: actions/upload-artifact@v4 + with: + name: python-pgplot-sdist + path: dist/*.tar.gz + publish_pypi: name: Publish to PyPI - needs: build_wheels + needs: [build_wheels, build_sdist] runs-on: ubuntu-latest steps: - uses: actions/checkout@v4 - - name: Download built wheels + - name: Download all artifacts uses: actions/download-artifact@v4 with: - name: built-wheels path: dist/ + - name: Flatten artifacts + run: | + find dist/ -name "*.whl" -exec mv {} dist/ \; + find dist/ -name "*.tar.gz" -exec mv {} dist/ \; + find dist/ -mindepth 1 -type d -exec rm -rf {} + || true + - name: Set up Python uses: actions/setup-python@v5 with: @@ -89,6 +119,11 @@ jobs: - name: Install Twine run: pip install twine + - name: Verify distributions + run: | + ls -la dist/ + twine check dist/* + - name: Publish to PyPI env: TWINE_USERNAME: __token__ From dc8916619666a03f0e7ded3fffff1c5c3e1e6ba9 Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Fri, 8 Aug 2025 13:33:15 +0200 Subject: [PATCH 40/62] Only run manually, add building source dist --- .github/workflows/build_wheels.yml | 27 ++++++++++++++++++++++++++- 1 file changed, 26 insertions(+), 1 deletion(-) diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml index 50e4a61..9859803 100644 --- a/.github/workflows/build_wheels.yml +++ b/.github/workflows/build_wheels.yml @@ -1,7 +1,7 @@ name: Build Wheels on: - push + workflow_dispatch: jobs: build_wheels: @@ -64,3 +64,28 @@ jobs: with: name: python-pgplot-wheels-${{ matrix.os }} path: dist/ + + build_sdist: + name: Build source distribution + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.11" + + - name: Install build dependencies + run: | + python -m pip install --upgrade pip + pip install build + + - name: Build sdist + run: python -m build --sdist + + - name: Upload sdist + uses: actions/upload-artifact@v4 + with: + name: python-pgplot-sdist + path: dist/*.tar.gz From fca657ff36e59249f325bc786a2e1e019511aa67 Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Fri, 8 Aug 2025 13:41:24 +0200 Subject: [PATCH 41/62] Gah Manual dispatch only works if the workflow is also on the default branch (which in our case it isn't (yet)) --- .github/workflows/build_wheels.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml index 9859803..6dfec71 100644 --- a/.github/workflows/build_wheels.yml +++ b/.github/workflows/build_wheels.yml @@ -1,7 +1,7 @@ name: Build Wheels on: - workflow_dispatch: + push: jobs: build_wheels: From 826a245761cc681c006d6280105fae862bc19a6c Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Fri, 8 Aug 2025 14:24:37 +0200 Subject: [PATCH 42/62] Fix setup.py to allow source dist building It was trying to configure Giza even when it was just expected to build a source distribution --- setup.py | 12 ++++++++++++ 1 file changed, 12 insertions(+) diff --git a/setup.py b/setup.py index 00c9ee2..b58e7d6 100644 --- a/setup.py +++ b/setup.py @@ -8,6 +8,13 @@ import pkgconfig +def is_building_sdist(): + """Detect if we're building a source distribution (sdist)""" + # Check for sdist-related commands in sys.argv + sdist_commands = ['sdist', 'egg_info', 'dist_info'] + return any(cmd in sys.argv for cmd in sdist_commands) + + def add_pgplot_from_giza(ext): # Very convenient - but also breaks the build on Linux (Deb12) *sigh* # adds an empty string [''] to ext.extra_compile_args @@ -89,6 +96,11 @@ def set_extension_config(ext): if os.name != "posix": raise Exception("OS not supported") + # Skip extension configuration during sdist creation + if is_building_sdist(): + print("Building sdist - skipping extension configuration") + return ext + # modify the extension to taste add_X11(ext) add_numpy(ext) From 4d4baebf0255d56703e4595e3d81a12b448252bb Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Fri, 8 Aug 2025 14:36:36 +0200 Subject: [PATCH 43/62] Allow manual run + skip publish to PyPI If this file ends up on the main/default branch we can manually trigger it and have the option of skipping publishing to PyPI --- .github/workflows/publish_to_pypi.yml | 15 +++++++++++++++ 1 file changed, 15 insertions(+) diff --git a/.github/workflows/publish_to_pypi.yml b/.github/workflows/publish_to_pypi.yml index 9ccb84e..cfdf9ca 100644 --- a/.github/workflows/publish_to_pypi.yml +++ b/.github/workflows/publish_to_pypi.yml @@ -4,6 +4,13 @@ on: push: tags: - "v*.*.*" + workflow_dispatch: # Manual trigger for testing + inputs: + dry_run: + description: 'Dry run (skip actual PyPI upload)' + required: false + default: 'true' + type: boolean jobs: build_wheels: @@ -125,7 +132,15 @@ jobs: twine check dist/* - name: Publish to PyPI + if: ${{ !inputs.dry_run }} env: TWINE_USERNAME: __token__ TWINE_PASSWORD: ${{ secrets.PYPI_API_TOKEN }} run: twine upload dist/* + + - name: Dry run - show what would be uploaded + if: ${{ inputs.dry_run }} + run: | + echo "DRY RUN: Would upload the following files to PyPI:" + ls -la dist/ + echo "Files passed twine check - ready for upload!" From e990de0b94e26d2a025f8005f3808c54d7aec3c7 Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Fri, 8 Aug 2025 14:38:28 +0200 Subject: [PATCH 44/62] Fix typo in original author's name --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 66c5b66..7f82e68 100644 --- a/README.md +++ b/README.md @@ -83,5 +83,5 @@ If `ppgplot` is linked against the `Giza` library, it can produce output in `.pn All in all, the `Giza` backend is an amazing job done, but it is [not 100% compatible with the original PGPLOT](https://danieljprice.github.io/giza/documentation/pgplot.html), so it is not guaranteed your plots will come out identical. -This fork of the Python-extension owes a lot of thanks to the original author, Nick Patavlis, of `ppgplot`: +This fork of the Python-extension owes a lot of thanks to the original author, Nick Patavalis, of `ppgplot`: https://github.com/npat-efault/ppgplot From fe110020db714747a07a5d44dd802132e2143852 Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Fri, 8 Aug 2025 14:40:58 +0200 Subject: [PATCH 45/62] Add a workflow that published to test.pypi.org - can be triggered manually if this ends up in the main/default branch - can be triggered by any push onto a branch named test-cicd* --- .github/workflows/test_publish.yml | 120 +++++++++++++++++++++++++++++ 1 file changed, 120 insertions(+) create mode 100644 .github/workflows/test_publish.yml diff --git a/.github/workflows/test_publish.yml b/.github/workflows/test_publish.yml new file mode 100644 index 0000000..6fd9913 --- /dev/null +++ b/.github/workflows/test_publish.yml @@ -0,0 +1,120 @@ +name: Test Build and Publish + +on: + workflow_dispatch: # Manual trigger + push: + branches: + - test-pypi* # + +jobs: + build_wheels: + name: Build wheels on ${{ matrix.os }} + runs-on: ${{ matrix.os }} + strategy: + matrix: + os: [ubuntu-latest, macos-latest] + + steps: + - uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.11" + + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install cibuildwheel + + - name: Build wheels + run: cibuildwheel --output-dir dist + env: + CIBW_BEFORE_ALL_LINUX: | + (which apt || which yum || which dnf) && + ((yum install -y gcc make cairo-devel libX11-devel pkgconfig wget tar gzip) || + (apt-get update && apt-get install -y gcc make libcairo2-dev libx11-dev pkg-config wget tar gzip)) && + cd /tmp && + wget https://github.com/danieljprice/giza/archive/refs/tags/v1.4.2.tar.gz && + tar -xzf v1.4.2.tar.gz && + cd giza-1.4.2 && + export CFLAGS=-fPIC && + ./configure --prefix=/usr/local && + make && + make install && + ldconfig + CIBW_BEFORE_ALL_MACOS: "brew install giza libx11 pkg-config" + CIBW_ENVIRONMENT_LINUX: "PKG_CONFIG_PATH=/usr/local/lib/pkgconfig LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH" + CIBW_ENVIRONMENT_MACOS: "MACOSX_DEPLOYMENT_TARGET=14.0 PKG_CONFIG_PATH=$(brew --prefix)/lib/pkgconfig DYLD_FALLBACK_LIBRARY_PATH=$(brew --prefix)/lib:$DYLD_FALLBACK_LIBRARY_PATH" + CIBW_ARCHS_MACOS: "native" + CIBW_TEST_COMMAND: > + python -c 'import ppgplot; print("ok")' + + - name: Upload wheels + uses: actions/upload-artifact@v4 + with: + name: python-pgplot-wheels-${{ matrix.os }} + path: dist/ + + build_sdist: + name: Build source distribution + runs-on: ubuntu-latest + steps: + - uses: actions/checkout@v4 + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.11" + + - name: Install build dependencies + run: | + python -m pip install --upgrade pip + pip install build + + - name: Build sdist + run: python -m build --sdist + + - name: Upload sdist + uses: actions/upload-artifact@v4 + with: + name: python-pgplot-sdist + path: dist/*.tar.gz + + test_publish: + name: Test Publish to Test PyPI + needs: [build_wheels, build_sdist] + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v4 + + - name: Download all artifacts + uses: actions/download-artifact@v4 + with: + path: dist/ + + - name: Flatten artifacts + run: | + find dist/ -name "*.whl" -exec mv {} dist/ \; + find dist/ -name "*.tar.gz" -exec mv {} dist/ \; + find dist/ -mindepth 1 -type d -exec rm -rf {} + || true + + - name: Set up Python + uses: actions/setup-python@v5 + with: + python-version: "3.11" + + - name: Install Twine + run: pip install twine + + - name: Verify distributions + run: | + ls -la dist/ + twine check dist/* + + - name: Publish to Test PyPI + env: + TWINE_USERNAME: __token__ + TWINE_PASSWORD: ${{ secrets.TEST_PYPI_API_TOKEN }} + run: twine upload --repository testpypi dist/* From ceda47ba1c081e78c64b9e94b0d8dd29eab5165b Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Fri, 8 Aug 2025 14:43:51 +0200 Subject: [PATCH 46/62] Build wheels now works, back to on demand run This would trigger on each push, that's unnecessary now that building works. Next steps are to build-and-publish to (test)PyPI, and there's other workflows for that that trigger on tagging (official PyPI) and on pushes onto branches named test-cicd* Expect the workflows to end up onto the main/default branch soon, after which we can trigger this workflow manually --- .github/workflows/build_wheels.yml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml index 6dfec71..9859803 100644 --- a/.github/workflows/build_wheels.yml +++ b/.github/workflows/build_wheels.yml @@ -1,7 +1,7 @@ name: Build Wheels on: - push: + workflow_dispatch: jobs: build_wheels: From 2993f9294b3c5a99311a5b6cbb7851038f6d895e Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Fri, 8 Aug 2025 15:17:15 +0200 Subject: [PATCH 47/62] Improve some of the metadata --- CHANGELOG | 6 ++++++ CONTRIBUTORS | 5 +++++ 2 files changed, 11 insertions(+) diff --git a/CHANGELOG b/CHANGELOG index ac84878..92f3efb 100644 --- a/CHANGELOG +++ b/CHANGELOG @@ -1,3 +1,9 @@ +version 1.6 + - Python2/3 compatibility, numpy 1.x/2.x compatibility + - github actions to build wheels and publish to (test)PyPI + - setup.py can build source dists +version 1.5* + - Fake version for experimenting w/ PyPI by n00b version 1.4 - Now ppgplot uses the "numpy" module by default, reverting to "numarray" and then "Numeric", respectively, if the preferred module is not found. diff --git a/CONTRIBUTORS b/CONTRIBUTORS index fc74981..71c5901 100644 --- a/CONTRIBUTORS +++ b/CONTRIBUTORS @@ -1,6 +1,11 @@ PPGPLOT CONTRIBUTORS: +Marjolein Verkouter continued supporting a fork of NickP's repo, making it +Py2/Py3, numpy1.x/2.x compatible, and make the extension pip-installable. +Added some functionality and incorporated patches from C. Bassa on his fork +of the original repo. + Steven Bamford adapted ppgplot in 2007 to use numpy, in favour of the depreciated numarray and Numeric modules, and included these minor changes (mostly to setup.py) in the Google Code version (1.4) in April From bf9dda30ab0a951e9e0dd0a92cba79f1b878a7e0 Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Fri, 8 Aug 2025 15:24:20 +0200 Subject: [PATCH 48/62] Bump to v1.6.0 --- pyproject.toml | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/pyproject.toml b/pyproject.toml index 968a6fe..58654ec 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -4,7 +4,7 @@ build-backend = "setuptools.build_meta" [project] name = "python-pgplot" -version = "1.5.0-beta" +version = "1.6.0" description = "Python bindings for PGPLOT" authors = [ {name = "Marjolein Verkouter", email = "verkouter@jive.eu"}, From 10626b67ce1fe8185a5a4f93b70a7b5a44e0d02d Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Mon, 11 Aug 2025 09:33:32 +0200 Subject: [PATCH 49/62] Initial go at numpy1.x/2.x compat at the same time --- pyproject.toml | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 58654ec..695b449 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -1,5 +1,6 @@ +# https://numpy.org/devdocs/dev/depending_on_numpy.html#numpy-2-abi-handling [build-system] -requires = ["setuptools>=45", "numpy>=1.21.0", "pkgconfig", "wheel", "cibuildwheel"] +requires = ["setuptools>=45", "numpy>=2.0.0rc1", "pkgconfig", "wheel", "cibuildwheel"] build-backend = "setuptools.build_meta" [project] @@ -11,9 +12,9 @@ authors = [ #{name = "Nick Patavalis", email = "npat@efault.net"}, ] readme = "README.md" -requires-python = ">=3.7" +requires-python = ">=3.8" dependencies = [ - "numpy>=1.21.0", + "numpy>=1.19.0", # "pkgconfig" ] From b30cd6127adb20c4b2e2fbe39069445fa5ed67ee Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Mon, 11 Aug 2025 11:39:24 +0200 Subject: [PATCH 50/62] No Numpy2 on any cp38-* --- pyproject.toml | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/pyproject.toml b/pyproject.toml index 695b449..edafa08 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,7 +5,7 @@ build-backend = "setuptools.build_meta" [project] name = "python-pgplot" -version = "1.6.0" +version = "1.6.1" description = "Python bindings for PGPLOT" authors = [ {name = "Marjolein Verkouter", email = "verkouter@jive.eu"}, @@ -42,4 +42,5 @@ package-dir = {"ppgplot" = "src"} # Test that giza is available before-build = "pkg-config --exists giza && echo 'giza found' || echo 'giza NOT found'" # Not on Windhoos, nor on musllinux (wtf) -skip = ["*-win*", "*musllinux*" ] +# No numpy2 on Py3.8 +skip = ["*-win*", "*musllinux*", "cp38-*" ] From 3d3b1a73056749682e05d08e9c0c7acae4724e15 Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Tue, 12 Aug 2025 10:21:15 +0200 Subject: [PATCH 51/62] Found root cause of numpy ABI misbehaving User on Fedora42 reporting issue: >>> import ppgplot RuntimeError: module was compiled against NumPy C-API version 0x14 (NumPy 2.3) but the running NumPy has C-API version 0x13. Check the section C-API incompatibility at the Troubleshooting ImportError section at https://numpy.org/devdocs/user/troubleshooting-importerror.html#c-api-incompatibility for indications on how to solve this problem. Traceback (most recent call last): File "", line 1, in import ppgplot File "/home/bram/.local/lib/python3.13/site-packages/ppgplot/__init__.py", line 1, in from . _ppgplot import * ImportError: numpy._core.multiarray failed to import The Problem The extension was compiled against NumPy 2.3 (API version 0x14) but the user's system has NumPy with API version 0x13. This is a forward compatibility issue - the extension expects a newer NumPy API than what's available. Root Cause The issue is NPY_TARGET_VERSION NPY_API_VERSION which sets the target to whatever NumPy version was available at build time. When you built the wheels, you had NumPy 2.3, so it locked the extension to require NumPy 2.3+. Solution We need to set a lower, stable NumPy API version that's compatible with a wider range of NumPy versions. --- src/_ppgplot.c | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/src/_ppgplot.c b/src/_ppgplot.c index 00be2a9..1e45222 100644 --- a/src/_ppgplot.c +++ b/src/_ppgplot.c @@ -22,10 +22,11 @@ #include /* It's 2025, we only support numpy anymore */ +/* Target NumPy 1.19 API for maximum compatibility while avoiding deprecated APIs */ #ifndef NPY_TARGET_VERSION - #define NPY_TARGET_VERSION NPY_API_VERSION + #define NPY_TARGET_VERSION NPY_1_19_API_VERSION #endif -#define NPY_NO_DEPRECATED_API NPY_TARGET_VERSION/*NPY_1_7_API_VERSION*/ +#define NPY_NO_DEPRECATED_API NPY_TARGET_VERSION #include #include From f1026edf5c1cd83adb469a99269ee568d1a760b8 Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Tue, 12 Aug 2025 11:57:08 +0200 Subject: [PATCH 52/62] doc + requirements now consistent on Py3.9+ Because there is no numpy2 on Python3.8, we drop support for that (although the extension would work Just Fine there) In order to be numpy v1.x and v2.x compatible one _has_ to build with numpy 2.x, see https://numpy.org/devdocs/dev/depending_on_numpy.html#numpy-2-0-specific-advice --- README.md | 2 +- pyproject.toml | 2 +- 2 files changed, 2 insertions(+), 2 deletions(-) diff --git a/README.md b/README.md index 7f82e68..38498c7 100644 --- a/README.md +++ b/README.md @@ -32,7 +32,7 @@ It is also possible to build the package from this `git`-repository. You may nee ## Requirements -- Python 3.7+ +- Python 3.9+ - numpy >= 1.21.0 - PGPLOT or Giza libraries installed - X11 development libraries diff --git a/pyproject.toml b/pyproject.toml index edafa08..d0823e7 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -12,7 +12,7 @@ authors = [ #{name = "Nick Patavalis", email = "npat@efault.net"}, ] readme = "README.md" -requires-python = ">=3.8" +requires-python = ">=3.9" dependencies = [ "numpy>=1.19.0", # "pkgconfig" From 9eaa626a83a8b4373bf180a79bda34de85e395fb Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Mon, 11 Aug 2025 09:36:25 +0200 Subject: [PATCH 53/62] Run build_wheels on push to fix-* branches The workflow is now also automatically triggered on branches that are called fix-* Having it only manually executed from the main/default branch is only so useful --- .github/workflows/build_wheels.yml | 2 ++ 1 file changed, 2 insertions(+) diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml index 9859803..24b57c2 100644 --- a/.github/workflows/build_wheels.yml +++ b/.github/workflows/build_wheels.yml @@ -2,6 +2,8 @@ name: Build Wheels on: workflow_dispatch: + push: + branch: fix-* jobs: build_wheels: From 9978d5d29d0009475c4f95436d11b3dc4d39d808 Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Tue, 12 Aug 2025 08:36:13 +0200 Subject: [PATCH 54/62] Start working on conda recipe --- conda-recipe/README.md | 53 ++++++++++++++++++++++++++++++++++++ conda-recipe/bld.bat | 5 ++++ conda-recipe/build.sh | 28 +++++++++++++++++++ conda-recipe/meta.yaml | 62 ++++++++++++++++++++++++++++++++++++++++++ 4 files changed, 148 insertions(+) create mode 100644 conda-recipe/README.md create mode 100644 conda-recipe/bld.bat create mode 100644 conda-recipe/build.sh create mode 100644 conda-recipe/meta.yaml diff --git a/conda-recipe/README.md b/conda-recipe/README.md new file mode 100644 index 0000000..1126761 --- /dev/null +++ b/conda-recipe/README.md @@ -0,0 +1,53 @@ +# Conda-Forge Recipe for python-pgplot + +This directory contains the conda-forge recipe for `python-pgplot`, a Python extension providing bindings for the PGPLOT graphics library via the giza backend. + +## Files + +- `meta.yaml` - Main conda recipe specification +- `build.sh` - Unix build script (Linux/macOS) +- `bld.bat` - Windows build script (currently disabled) + +## Key Features + +- **System dependency handling**: Automatically installs and links against giza from conda-forge +- **Cross-platform**: Supports Linux and macOS (Windows not supported due to giza availability) +- **Binary extension**: Builds C extension module with proper numpy integration +- **Comprehensive testing**: Verifies both Python import and C extension loading + +## Dependencies + +### Build Requirements +- C compiler +- pkg-config +- giza >=1.3.2 (from conda-forge) + +### Runtime Requirements +- Python +- NumPy (version-pinned for ABI compatibility) +- giza >=1.3.2 + +## Submission to conda-forge + +To submit this recipe to conda-forge: + +1. Fork the [conda-forge/staged-recipes](https://github.com/conda-forge/staged-recipes) repository +2. Create a new directory `recipes/python-pgplot/` +3. Copy `meta.yaml`, `build.sh`, and `bld.bat` to that directory +4. Submit a pull request + +## Local Testing + +To test this recipe locally with conda-build: + +```bash +conda install conda-build +conda build conda-recipe/ +``` + +## Notes + +- Windows builds are disabled due to giza not being available on Windows in conda-forge +- The recipe uses the PyPI source distribution as the source +- pkg-config is used to locate giza headers and libraries +- The build includes verification that the C extension loads correctly diff --git a/conda-recipe/bld.bat b/conda-recipe/bld.bat new file mode 100644 index 0000000..216e11d --- /dev/null +++ b/conda-recipe/bld.bat @@ -0,0 +1,5 @@ +@echo off +REM Windows build script - currently not supported due to giza dependency +echo "Windows builds are not currently supported for python-pgplot" +echo "This is due to the giza dependency not being available on Windows" +exit /b 1 diff --git a/conda-recipe/build.sh b/conda-recipe/build.sh new file mode 100644 index 0000000..ac5a766 --- /dev/null +++ b/conda-recipe/build.sh @@ -0,0 +1,28 @@ +#!/bin/bash + +set -euxo pipefail + +# Build and install giza from source (like CIBW_BEFORE_ALL) +cd /tmp +wget https://github.com/danieljprice/giza/archive/refs/tags/v1.4.2.tar.gz +tar -xzf v1.4.2.tar.gz +cd giza-1.4.2 + +# Configure and build giza +export CFLAGS=-fPIC +./configure --prefix=${PREFIX} +make -j${CPU_COUNT} +make install + +# Ensure pkg-config can find giza +export PKG_CONFIG_PATH="${PREFIX}/lib/pkgconfig:${PKG_CONFIG_PATH:-}" + +# Return to source directory and build python-pgplot +cd ${SRC_DIR} + +# Build and install the package +${PYTHON} -m pip install . -vv --no-deps --no-build-isolation + +# Test that the extension was built correctly +${PYTHON} -c "import ppgplot; print('python-pgplot extension imported successfully')" +${PYTHON} -c "import ppgplot._ppgplot; print('C extension module loaded successfully')" diff --git a/conda-recipe/meta.yaml b/conda-recipe/meta.yaml new file mode 100644 index 0000000..4fd5bee --- /dev/null +++ b/conda-recipe/meta.yaml @@ -0,0 +1,62 @@ +{% set name = "python-pgplot" %} +{% set version = "1.6.0" %} + +package: + name: {{ name|lower }} + version: {{ version }} + +source: + url: https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/python_pgplot-{{ version }}.tar.gz + sha256: 3af95c1aafd78994c6b46b4c478584c033eef32affb82c3c1d4e7fe07f6df48d + +build: + number: 0 + skip: true # [win] # Windows not supported + +requirements: + build: + - {{ compiler('c') }} + - {{ compiler('cxx') }} + - pkg-config + - make + - wget # [unix] + - tar # [unix] + - gzip # [unix] + host: + - python + - pip + - setuptools >=45 + - wheel + - numpy >=2.0.0rc1 + - pkgconfig + - cairo + - libx11 # [linux] + run: + - python + - {{ pin_compatible('numpy') }} + - cairo + - libx11 # [linux] + +test: + imports: + - ppgplot + - ppgplot._ppgplot + commands: + - python -c "import ppgplot; print('python-pgplot imported successfully')" + +about: + home: https://github.com/haavee/ppgplot + license: GPL-3.0-or-later + license_family: GPL + license_file: LICENSE + summary: Python bindings for PGPLOT + description: | + Python bindings for PGPLOT graphics library. PGPLOT is a Fortran- or + C-callable, device-independent graphics package for making simple scientific + graphs. This package provides Python bindings through the giza library. + doc_url: https://github.com/haavee/ppgplot + dev_url: https://github.com/haavee/ppgplot + +extra: + recipe-maintainers: + - haavee From 88af67cfedc4891063d5488273f86fe46da7a19b Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Tue, 12 Aug 2025 09:09:48 +0200 Subject: [PATCH 55/62] Fixup some beginner/n00b mistakes --- conda-recipe/build.sh | 8 ++++++++ conda-recipe/conda_build_config.yaml | 3 +++ conda-recipe/meta.yaml | 1 - 3 files changed, 11 insertions(+), 1 deletion(-) create mode 100644 conda-recipe/conda_build_config.yaml diff --git a/conda-recipe/build.sh b/conda-recipe/build.sh index ac5a766..1577ca0 100644 --- a/conda-recipe/build.sh +++ b/conda-recipe/build.sh @@ -10,6 +10,14 @@ cd giza-1.4.2 # Configure and build giza export CFLAGS=-fPIC + +# Update config.sub for ARM64 support +if [[ "$OSTYPE" == "darwin"* ]]; then + # Download updated config.sub that recognizes arm64-apple-darwin + wget -O build/config.sub 'https://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub;hb=HEAD' + chmod +x build/config.sub +fi + ./configure --prefix=${PREFIX} make -j${CPU_COUNT} make install diff --git a/conda-recipe/conda_build_config.yaml b/conda-recipe/conda_build_config.yaml new file mode 100644 index 0000000..2d62619 --- /dev/null +++ b/conda-recipe/conda_build_config.yaml @@ -0,0 +1,3 @@ +numpy: + - 1.26 + - 2.0 diff --git a/conda-recipe/meta.yaml b/conda-recipe/meta.yaml index 4fd5bee..68920d9 100644 --- a/conda-recipe/meta.yaml +++ b/conda-recipe/meta.yaml @@ -48,7 +48,6 @@ about: home: https://github.com/haavee/ppgplot license: GPL-3.0-or-later license_family: GPL - license_file: LICENSE summary: Python bindings for PGPLOT description: | Python bindings for PGPLOT graphics library. PGPLOT is a Fortran- or From 0e676303528fe9bc511df249d67378aab807aee4 Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Tue, 12 Aug 2025 09:39:16 +0200 Subject: [PATCH 56/62] Robustify recipe, now also works after purge The recipe would fail to build after a "conda build purge". Now it doesn't --- conda-recipe/build.sh | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/conda-recipe/build.sh b/conda-recipe/build.sh index 1577ca0..e2036e4 100644 --- a/conda-recipe/build.sh +++ b/conda-recipe/build.sh @@ -9,7 +9,10 @@ tar -xzf v1.4.2.tar.gz cd giza-1.4.2 # Configure and build giza -export CFLAGS=-fPIC +export CFLAGS="-fPIC" +export CXXFLAGS="-fPIC" +export LDFLAGS="-L${PREFIX}/lib" +export CPPFLAGS="-I${PREFIX}/include" # Update config.sub for ARM64 support if [[ "$OSTYPE" == "darwin"* ]]; then @@ -18,7 +21,7 @@ if [[ "$OSTYPE" == "darwin"* ]]; then chmod +x build/config.sub fi -./configure --prefix=${PREFIX} +./configure --prefix=${PREFIX} --enable-shared make -j${CPU_COUNT} make install From 8151f45745be6dedd25bd53983757d07a36a80b0 Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Tue, 12 Aug 2025 09:50:00 +0200 Subject: [PATCH 57/62] Add conda documentation --- README.md | 24 +++++++++++++++++++++++- 1 file changed, 23 insertions(+), 1 deletion(-) diff --git a/README.md b/README.md index 38498c7..2dcddd6 100644 --- a/README.md +++ b/README.md @@ -11,7 +11,26 @@ manipulate vectors and matrices. ## Installing -Since `v1.5` (Apr 2025) the package should be `pip`-installable; it's a package on the [PyPI](https://pypi.org/project/python-pgplot/): +### Option 1: Conda (Recommended) + +The easiest way to install `python-pgplot` is via conda-forge, which automatically handles all system dependencies: + +```bash + $> conda install -c conda-forge python-pgplot + $> python3 + >>> import ppgplot + >>> +``` + +This method automatically installs and configures: +- Giza graphics library +- Cairo graphics backend +- X11 libraries (Linux) +- All required development headers + +### Option 2: PyPI + +Since `v1.5` (Apr 2025) the package is also available on [PyPI](https://pypi.org/project/python-pgplot/): ```bash $> pip install python-pgplot @@ -21,6 +40,9 @@ Since `v1.5` (Apr 2025) the package should be `pip`-installable; it's a package ``` **NOTE: Due to a package name collision, the PyPI project name is `python-pgplot`; the obvious package name was already claimed by something completely different** +**Important:** PyPI installation requires system dependencies (see Requirements section below) to be manually installed first. + +### Option 3: From Source It is also possible to build the package from this `git`-repository. You may need to create a Python [`venv`](https://docs.python.org/3/library/venv.html) first. See below for detailed instructions. From a84efe4746f827f4888fc45c8f05957749dea0f6 Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Tue, 12 Aug 2025 14:10:32 +0200 Subject: [PATCH 58/62] Loonix need gfortran and xorg-x11-proto-devel --- conda-recipe/meta.yaml | 8 +++++--- 1 file changed, 5 insertions(+), 3 deletions(-) diff --git a/conda-recipe/meta.yaml b/conda-recipe/meta.yaml index 68920d9..1f09668 100644 --- a/conda-recipe/meta.yaml +++ b/conda-recipe/meta.yaml @@ -16,12 +16,14 @@ build: requirements: build: - {{ compiler('c') }} - - {{ compiler('cxx') }} + - {{ compiler('fortran') }} + - {{ cdt('xorg-x11-proto-devel') }} # [linux] - pkg-config - make - wget # [unix] - tar # [unix] - gzip # [unix] + - xorg-libx11 host: - python - pip @@ -30,12 +32,12 @@ requirements: - numpy >=2.0.0rc1 - pkgconfig - cairo - - libx11 # [linux] + - xorg-libx11 run: - python - {{ pin_compatible('numpy') }} - cairo - - libx11 # [linux] + - xorg-libx11 test: imports: From 3f21862c0eca25a18e0b90545d2118f38861315c Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Tue, 12 Aug 2025 17:00:12 +0200 Subject: [PATCH 59/62] Indicate LGPL, update contributors in pyproject Time to update some of the meta information about who contributed and make explicit what the license of the code is (LGPL). The LGPL is mentioned in PKG-INFO but no-one (recent) will look into that. --- LICENSE | 502 +++++++++++++++++++++++++++++++++++++++++++++++++ pyproject.toml | 12 +- src/_ppgplot.c | 17 ++ 3 files changed, 530 insertions(+), 1 deletion(-) create mode 100644 LICENSE diff --git a/LICENSE b/LICENSE new file mode 100644 index 0000000..4362b49 --- /dev/null +++ b/LICENSE @@ -0,0 +1,502 @@ + GNU LESSER GENERAL PUBLIC LICENSE + Version 2.1, February 1999 + + Copyright (C) 1991, 1999 Free Software Foundation, Inc. + 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + Everyone is permitted to copy and distribute verbatim copies + of this license document, but changing it is not allowed. + +[This is the first released version of the Lesser GPL. It also counts + as the successor of the GNU Library Public License, version 2, hence + the version number 2.1.] + + Preamble + + The licenses for most software are designed to take away your +freedom to share and change it. By contrast, the GNU General Public +Licenses are intended to guarantee your freedom to share and change +free software--to make sure the software is free for all its users. + + This license, the Lesser General Public License, applies to some +specially designated software packages--typically libraries--of the +Free Software Foundation and other authors who decide to use it. You +can use it too, but we suggest you first think carefully about whether +this license or the ordinary General Public License is the better +strategy to use in any particular case, based on the explanations below. + + When we speak of free software, we are referring to freedom of use, +not price. Our General Public Licenses are designed to make sure that +you have the freedom to distribute copies of free software (and charge +for this service if you wish); that you receive source code or can get +it if you want it; that you can change the software and use pieces of +it in new free programs; and that you are informed that you can do +these things. + + To protect your rights, we need to make restrictions that forbid +distributors to deny you these rights or to ask you to surrender these +rights. These restrictions translate to certain responsibilities for +you if you distribute copies of the library or if you modify it. + + For example, if you distribute copies of the library, whether gratis +or for a fee, you must give the recipients all the rights that we gave +you. You must make sure that they, too, receive or can get the source +code. If you link other code with the library, you must provide +complete object files to the recipients, so that they can relink them +with the library after making changes to the library and recompiling +it. And you must show them these terms so they know their rights. + + We protect your rights with a two-step method: (1) we copyright the +library, and (2) we offer you this license, which gives you legal +permission to copy, distribute and/or modify the library. + + To protect each distributor, we want to make it very clear that +there is no warranty for the free library. Also, if the library is +modified by someone else and passed on, the recipients should know +that what they have is not the original version, so that the original +author's reputation will not be affected by problems that might be +introduced by others. + + Finally, software patents pose a constant threat to the existence of +any free program. We wish to make sure that a company cannot +effectively restrict the users of a free program by obtaining a +restrictive license from a patent holder. Therefore, we insist that +any patent license obtained for a version of the library must be +consistent with the full freedom of use specified in this license. + + Most GNU software, including some libraries, is covered by the +ordinary GNU General Public License. This license, the GNU Lesser +General Public License, applies to certain designated libraries, and +is quite different from the ordinary General Public License. We use +this license for certain libraries in order to permit linking those +libraries into non-free programs. + + When a program is linked with a library, whether statically or using +a shared library, the combination of the two is legally speaking a +combined work, a derivative of the original library. The ordinary +General Public License therefore permits such linking only if the +entire combination fits its criteria of freedom. The Lesser General +Public License permits more lax criteria for linking other code with +the library. + + We call this license the "Lesser" General Public License because it +does Less to protect the user's freedom than the ordinary General +Public License. It also provides other free software developers Less +of an advantage over competing non-free programs. These disadvantages +are the reason we use the ordinary General Public License for many +libraries. However, the Lesser license provides advantages in certain +special circumstances. + + For example, on rare occasions, there may be a special need to +encourage the widest possible use of a certain library, so that it becomes +a de-facto standard. To achieve this, non-free programs must be +allowed to use the library. A more frequent case is that a free +library does the same job as widely used non-free libraries. In this +case, there is little to gain by limiting the free library to free +software only, so we use the Lesser General Public License. + + In other cases, permission to use a particular library in non-free +programs enables a greater number of people to use a large body of +free software. For example, permission to use the GNU C Library in +non-free programs enables many more people to use the whole GNU +operating system, as well as its variant, the GNU/Linux operating +system. + + Although the Lesser General Public License is Less protective of the +users' freedom, it does ensure that the user of a program that is +linked with the Library has the freedom and the wherewithal to run +that program using a modified version of the Library. + + The precise terms and conditions for copying, distribution and +modification follow. Pay close attention to the difference between a +"work based on the library" and a "work that uses the library". The +former contains code derived from the library, whereas the latter must +be combined with the library in order to run. + + GNU LESSER GENERAL PUBLIC LICENSE + TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION + + 0. This License Agreement applies to any software library or other +program which contains a notice placed by the copyright holder or +other authorized party saying it may be distributed under the terms of +this Lesser General Public License (also called "this License"). +Each licensee is addressed as "you". + + A "library" means a collection of software functions and/or data +prepared so as to be conveniently linked with application programs +(which use some of those functions and data) to form executables. + + The "Library", below, refers to any such software library or work +which has been distributed under these terms. A "work based on the +Library" means either the Library or any derivative work under +copyright law: that is to say, a work containing the Library or a +portion of it, either verbatim or with modifications and/or translated +straightforwardly into another language. (Hereinafter, translation is +included without limitation in the term "modification".) + + "Source code" for a work means the preferred form of the work for +making modifications to it. For a library, complete source code means +all the source code for all modules it contains, plus any associated +interface definition files, plus the scripts used to control compilation +and installation of the library. + + Activities other than copying, distribution and modification are not +covered by this License; they are outside its scope. The act of +running a program using the Library is not restricted, and output from +such a program is covered only if its contents constitute a work based +on the Library (independent of the use of the Library in a tool for +writing it). Whether that is true depends on what the Library does +and what the program that uses the Library does. + + 1. You may copy and distribute verbatim copies of the Library's +complete source code as you receive it, in any medium, provided that +you conspicuously and appropriately publish on each copy an +appropriate copyright notice and disclaimer of warranty; keep intact +all the notices that refer to this License and to the absence of any +warranty; and distribute a copy of this License along with the +Library. + + You may charge a fee for the physical act of transferring a copy, +and you may at your option offer warranty protection in exchange for a +fee. + + 2. You may modify your copy or copies of the Library or any portion +of it, thus forming a work based on the Library, and copy and +distribute such modifications or work under the terms of Section 1 +above, provided that you also meet all of these conditions: + + a) The modified work must itself be a software library. + + b) You must cause the files modified to carry prominent notices + stating that you changed the files and the date of any change. + + c) You must cause the whole of the work to be licensed at no + charge to all third parties under the terms of this License. + + d) If a facility in the modified Library refers to a function or a + table of data to be supplied by an application program that uses + the facility, other than as an argument passed when the facility + is invoked, then you must make a good faith effort to ensure that, + in the event an application does not supply such function or + table, the facility still operates, and performs whatever part of + its purpose remains meaningful. + + (For example, a function in a library to compute square roots has + a purpose that is entirely well-defined independent of the + application. Therefore, Subsection 2d requires that any + application-supplied function or table used by this function must + be optional: if the application does not supply it, the square + root function must still compute square roots.) + +These requirements apply to the modified work as a whole. If +identifiable sections of that work are not derived from the Library, +and can be reasonably considered independent and separate works in +themselves, then this License, and its terms, do not apply to those +sections when you distribute them as separate works. But when you +distribute the same sections as part of a whole which is a work based +on the Library, the distribution of the whole must be on the terms of +this License, whose permissions for other licensees extend to the +entire whole, and thus to each and every part regardless of who wrote +it. + +Thus, it is not the intent of this section to claim rights or contest +your rights to work written entirely by you; rather, the intent is to +exercise the right to control the distribution of derivative or +collective works based on the Library. + +In addition, mere aggregation of another work not based on the Library +with the Library (or with a work based on the Library) on a volume of +a storage or distribution medium does not bring the other work under +the scope of this License. + + 3. You may opt to apply the terms of the ordinary GNU General Public +License instead of this License to a given copy of the Library. To do +this, you must alter all the notices that refer to this License, so +that they refer to the ordinary GNU General Public License, version 2, +instead of to this License. (If a newer version than version 2 of the +ordinary GNU General Public License has appeared, then you can specify +that version instead if you wish.) Do not make any other change in +these notices. + + Once this change is made in a given copy, it is irreversible for +that copy, so the ordinary GNU General Public License applies to all +subsequent copies and derivative works made from that copy. + + This option is useful when you wish to copy part of the code of +the Library into a program that is not a library. + + 4. You may copy and distribute the Library (or a portion or +derivative of it, under Section 2) in object code or executable form +under the terms of Sections 1 and 2 above provided that you accompany +it with the complete corresponding machine-readable source code, which +must be distributed under the terms of Sections 1 and 2 above on a +medium customarily used for software interchange. + + If distribution of object code is made by offering access to copy +from a designated place, then offering equivalent access to copy the +source code from the same place satisfies the requirement to +distribute the source code, even though third parties are not +compelled to copy the source along with the object code. + + 5. A program that contains no derivative of any portion of the +Library, but is designed to work with the Library by being compiled or +linked with it, is called a "work that uses the Library". Such a +work, in isolation, is not a derivative work of the Library, and +therefore falls outside the scope of this License. + + However, linking a "work that uses the Library" with the Library +creates an executable that is a derivative of the Library (because it +contains portions of the Library), rather than a "work that uses the +library". The executable is therefore covered by this License. +Section 6 states terms for distribution of such executables. + + When a "work that uses the Library" uses material from a header file +that is part of the Library, the object code for the work may be a +derivative work of the Library even though the source code is not. +Whether this is true is especially significant if the work can be +linked without the Library, or if the work is itself a library. The +threshold for this to be true is not precisely defined by law. + + If such an object file uses only numerical parameters, data +structure layouts and accessors, and small macros and small inline +functions (ten lines or less in length), then the use of the object +file is unrestricted, regardless of whether it is legally a derivative +work. (Executables containing this object code plus portions of the +Library will still fall under Section 6.) + + Otherwise, if the work is a derivative of the Library, you may +distribute the object code for the work under the terms of Section 6. +Any executables containing that work also fall under Section 6, +whether or not they are linked directly with the Library itself. + + 6. As an exception to the Sections above, you may also combine or +link a "work that uses the Library" with the Library to produce a +work containing portions of the Library, and distribute that work +under terms of your choice, provided that the terms permit +modification of the work for the customer's own use and reverse +engineering for debugging such modifications. + + You must give prominent notice with each copy of the work that the +Library is used in it and that the Library and its use are covered by +this License. You must supply a copy of this License. If the work +during execution displays copyright notices, you must include the +copyright notice for the Library among them, as well as a reference +directing the user to the copy of this License. Also, you must do one +of these things: + + a) Accompany the work with the complete corresponding + machine-readable source code for the Library including whatever + changes were used in the work (which must be distributed under + Sections 1 and 2 above); and, if the work is an executable linked + with the Library, with the complete machine-readable "work that + uses the Library", as object code and/or source code, so that the + user can modify the Library and then relink to produce a modified + executable containing the modified Library. (It is understood + that the user who changes the contents of definitions files in the + Library will not necessarily be able to recompile the application + to use the modified definitions.) + + b) Use a suitable shared library mechanism for linking with the + Library. A suitable mechanism is one that (1) uses at run time a + copy of the library already present on the user's computer system, + rather than copying library functions into the executable, and (2) + will operate properly with a modified version of the library, if + the user installs one, as long as the modified version is + interface-compatible with the version that the work was made with. + + c) Accompany the work with a written offer, valid for at + least three years, to give the same user the materials + specified in Subsection 6a, above, for a charge no more + than the cost of performing this distribution. + + d) If distribution of the work is made by offering access to copy + from a designated place, offer equivalent access to copy the above + specified materials from the same place. + + e) Verify that the user has already received a copy of these + materials or that you have already sent this user a copy. + + For an executable, the required form of the "work that uses the +Library" must include any data and utility programs needed for +reproducing the executable from it. However, as a special exception, +the materials to be distributed need not include anything that is +normally distributed (in either source or binary form) with the major +components (compiler, kernel, and so on) of the operating system on +which the executable runs, unless that component itself accompanies +the executable. + + It may happen that this requirement contradicts the license +restrictions of other proprietary libraries that do not normally +accompany the operating system. Such a contradiction means you cannot +use both them and the Library together in an executable that you +distribute. + + 7. You may place library facilities that are a work based on the +Library side-by-side in a single library together with other library +facilities not covered by this License, and distribute such a combined +library, provided that the separate distribution of the work based on +the Library and of the other library facilities is otherwise +permitted, and provided that you do these two things: + + a) Accompany the combined library with a copy of the same work + based on the Library, uncombined with any other library + facilities. This must be distributed under the terms of the + Sections above. + + b) Give prominent notice with the combined library of the fact + that part of it is a work based on the Library, and explaining + where to find the accompanying uncombined form of the same work. + + 8. You may not copy, modify, sublicense, link with, or distribute +the Library except as expressly provided under this License. Any +attempt otherwise to copy, modify, sublicense, link with, or +distribute the Library is void, and will automatically terminate your +rights under this License. However, parties who have received copies, +or rights, from you under this License will not have their licenses +terminated so long as such parties remain in full compliance. + + 9. You are not required to accept this License, since you have not +signed it. However, nothing else grants you permission to modify or +distribute the Library or its derivative works. These actions are +prohibited by law if you do not accept this License. Therefore, by +modifying or distributing the Library (or any work based on the +Library), you indicate your acceptance of this License to do so, and +all its terms and conditions for copying, distributing or modifying +the Library or works based on it. + + 10. Each time you redistribute the Library (or any work based on the +Library), the recipient automatically receives a license from the +original licensor to copy, distribute, link with or modify the Library +subject to these terms and conditions. You may not impose any further +restrictions on the recipients' exercise of the rights granted herein. +You are not responsible for enforcing compliance by third parties with +this License. + + 11. If, as a consequence of a court judgment or allegation of patent +infringement or for any other reason (not limited to patent issues), +conditions are imposed on you (whether by court order, agreement or +otherwise) that contradict the conditions of this License, they do not +excuse you from the conditions of this License. If you cannot +distribute so as to satisfy simultaneously your obligations under this +License and any other pertinent obligations, then as a consequence you +may not distribute the Library at all. For example, if a patent +license would not permit royalty-free redistribution of the Library by +all those who receive copies directly or indirectly through you, then +the only way you could satisfy both it and this License would be to +refrain entirely from distribution of the Library. + +If any portion of this section is held invalid or unenforceable under any +particular circumstance, the balance of the section is intended to apply, +and the section as a whole is intended to apply in other circumstances. + +It is not the purpose of this section to induce you to infringe any +patents or other property right claims or to contest validity of any +such claims; this section has the sole purpose of protecting the +integrity of the free software distribution system which is +implemented by public license practices. Many people have made +generous contributions to the wide range of software distributed +through that system in reliance on consistent application of that +system; it is up to the author/donor to decide if he or she is willing +to distribute software through any other system and a licensee cannot +impose that choice. + +This section is intended to make thoroughly clear what is believed to +be a consequence of the rest of this License. + + 12. If the distribution and/or use of the Library is restricted in +certain countries either by patents or by copyrighted interfaces, the +original copyright holder who places the Library under this License may add +an explicit geographical distribution limitation excluding those countries, +so that distribution is permitted only in or among countries not thus +excluded. In such case, this License incorporates the limitation as if +written in the body of this License. + + 13. The Free Software Foundation may publish revised and/or new +versions of the Lesser General Public License from time to time. +Such new versions will be similar in spirit to the present version, +but may differ in detail to address new problems or concerns. + +Each version is given a distinguishing version number. If the Library +specifies a version number of this License which applies to it and +"any later version", you have the option of following the terms and +conditions either of that version or of any later version published by +the Free Software Foundation. If the Library does not specify a +license version number, you may choose any version ever published by +the Free Software Foundation. + + 14. If you wish to incorporate parts of the Library into other free +programs whose distribution conditions are incompatible with these, +write to the author to ask for permission. For software which is +copyrighted by the Free Software Foundation, write to the Free +Software Foundation; we sometimes make exceptions for this. Our +decision will be guided by the two goals of preserving the free status +of all derivatives of our free software and of promoting the sharing +and reuse of software generally. + + NO WARRANTY + + 15. BECAUSE THE LIBRARY IS LICENSED FREE OF CHARGE, THERE IS NO +WARRANTY FOR THE LIBRARY, TO THE EXTENT PERMITTED BY APPLICABLE LAW. +EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR +OTHER PARTIES PROVIDE THE LIBRARY "AS IS" WITHOUT WARRANTY OF ANY +KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR +PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE +LIBRARY IS WITH YOU. SHOULD THE LIBRARY PROVE DEFECTIVE, YOU ASSUME +THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. + + 16. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN +WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY +AND/OR REDISTRIBUTE THE LIBRARY AS PERMITTED ABOVE, BE LIABLE TO YOU +FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR +CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE +LIBRARY (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING +RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A +FAILURE OF THE LIBRARY TO OPERATE WITH ANY OTHER SOFTWARE), EVEN IF +SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH +DAMAGES. + + END OF TERMS AND CONDITIONS + + How to Apply These Terms to Your New Libraries + + If you develop a new library, and you want it to be of the greatest +possible use to the public, we recommend making it free software that +everyone can redistribute and change. You can do so by permitting +redistribution under these terms (or, alternatively, under the terms of the +ordinary General Public License). + + To apply these terms, attach the following notices to the library. It is +safest to attach them to the start of each source file to most effectively +convey the exclusion of warranty; and each file should have at least the +"copyright" line and a pointer to where the full notice is found. + + + Copyright (C) + + This library is free software; you can redistribute it and/or + modify it under the terms of the GNU Lesser General Public + License as published by the Free Software Foundation; either + version 2.1 of the License, or (at your option) any later version. + + This library is distributed in the hope that it will be useful, + but WITHOUT ANY WARRANTY; without even the implied warranty of + MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU + Lesser General Public License for more details. + + You should have received a copy of the GNU Lesser General Public + License along with this library; if not, write to the Free Software + Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA + +Also add information on how to contact you by electronic and paper mail. + +You should also get your employer (if you work as a programmer) or your +school, if any, to sign a "copyright disclaimer" for the library, if +necessary. Here is a sample; alter the names: + + Yoyodyne, Inc., hereby disclaims all copyright interest in the + library `Frob' (a library for tweaking knobs) written by James Random Hacker. + + , 1 April 1990 + Ty Coon, President of Vice + +That's all there is to it! diff --git a/pyproject.toml b/pyproject.toml index d0823e7..2f8e790 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -9,7 +9,12 @@ version = "1.6.1" description = "Python bindings for PGPLOT" authors = [ {name = "Marjolein Verkouter", email = "verkouter@jive.eu"}, - #{name = "Nick Patavalis", email = "npat@efault.net"}, + {name = "Nick Patavalis" }, + {name = "MA Breddels" }, + {name = "C Bassa" } +] +maintainers = [ + {name = "Marjolein Verkouter", email = "verkouter@jive.eu"} ] readme = "README.md" requires-python = ">=3.9" @@ -17,6 +22,11 @@ dependencies = [ "numpy>=1.19.0", # "pkgconfig" ] +license = "LGPL-2.0-only" +license-files = [ + "LICENSE", + "AUTHORS" +] [external] build-requires = [ diff --git a/src/_ppgplot.c b/src/_ppgplot.c index 1e45222..b24c805 100644 --- a/src/_ppgplot.c +++ b/src/_ppgplot.c @@ -1,3 +1,19 @@ +/* Copyright (c) 1999-2025 N Patavalis, MA Breddels, C Bassa, M Verkouter + * This library is free software; you can redistribute it and/or + * modify it under the terms of the GNU Lesser General Public + * License as published by the Free Software Foundation; either + * version 2.1 of the License, or (at your option) any later version. + * + + * This library is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser + * General Public License for more details. + + * You should have received a copy of the GNU Lesser General Public + * License along with this library; if not, see + * . +*/ /* * FILE: * _ppgplot.c @@ -15,6 +31,7 @@ * identical to the original PGPLOT ones. */ + #include #include From 2e196f4f6a636d49c8734309d948de7e7a2c28ce Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Tue, 12 Aug 2025 17:16:35 +0200 Subject: [PATCH 60/62] Bump recipe to use v1.6.1 --- conda-recipe/meta.yaml | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/conda-recipe/meta.yaml b/conda-recipe/meta.yaml index 1f09668..f2328d2 100644 --- a/conda-recipe/meta.yaml +++ b/conda-recipe/meta.yaml @@ -1,5 +1,5 @@ {% set name = "python-pgplot" %} -{% set version = "1.6.0" %} +{% set version = "1.6.1" %} package: name: {{ name|lower }} @@ -7,7 +7,7 @@ package: source: url: https://pypi.io/packages/source/{{ name[0] }}/{{ name }}/python_pgplot-{{ version }}.tar.gz - sha256: 3af95c1aafd78994c6b46b4c478584c033eef32affb82c3c1d4e7fe07f6df48d + sha256: 0ac1cd4808a5b80a6e79dea4562ac4201028818ca890abf8cb06186585858919 build: number: 0 From 640acd8141f93578fc4404a99481f00b5e346b5d Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Tue, 12 Aug 2025 17:28:37 +0200 Subject: [PATCH 61/62] Fix workflow trigger on every push The intent was to trigger "build_wheels" workflow only on branches called "fix-*". The YAML file had the wrong syntax for it, but on account it being an effing stupid format (YAML) w/o proper grammar, no-one complained. It did explain why I got many builds triggered on branches where I didn't expect them. *sigh* --- .github/workflows/build_wheels.yml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/.github/workflows/build_wheels.yml b/.github/workflows/build_wheels.yml index 24b57c2..e4a560d 100644 --- a/.github/workflows/build_wheels.yml +++ b/.github/workflows/build_wheels.yml @@ -3,7 +3,8 @@ name: Build Wheels on: workflow_dispatch: push: - branch: fix-* + branches: + - fix-* jobs: build_wheels: From 437956e07b4b6818b1eabb6e60c48293a760d6c8 Mon Sep 17 00:00:00 2001 From: Marjolein Verkouter Date: Tue, 12 Aug 2025 17:35:18 +0200 Subject: [PATCH 62/62] Add proper license stuff (fixup) --- conda-recipe/meta.yaml | 3 ++- 1 file changed, 2 insertions(+), 1 deletion(-) diff --git a/conda-recipe/meta.yaml b/conda-recipe/meta.yaml index f2328d2..47afabf 100644 --- a/conda-recipe/meta.yaml +++ b/conda-recipe/meta.yaml @@ -48,7 +48,8 @@ test: about: home: https://github.com/haavee/ppgplot - license: GPL-3.0-or-later + license: LGPL-2.0-only + license_file: LICENSE license_family: GPL summary: Python bindings for PGPLOT description: |