From 5d037bcc93242b5b8ee1ed9d2ad1383f4bc5a3b5 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Fri, 28 Nov 2025 21:54:30 +0000 Subject: [PATCH 1/4] Initial plan From d414290f0c252e24bb00cf9319dcc953fedc7391 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Fri, 28 Nov 2025 22:10:08 +0000 Subject: [PATCH 2/4] Remove Python 2 compatibility code for Python 3.8+ support Co-authored-by: amol- <601423+amol-@users.noreply.github.com> --- beaker/_compat.py | 193 +++++++++++--------------------------- beaker/cache.py | 5 +- beaker/container.py | 10 +- beaker/cookie.py | 58 ++---------- beaker/crypto/__init__.py | 7 +- beaker/crypto/pbkdf2.py | 101 ++++---------------- beaker/ext/memcached.py | 5 +- beaker/ext/mongodb.py | 5 +- beaker/ext/redisnm.py | 5 +- beaker/session.py | 2 +- beaker/util.py | 2 +- setup.py | 45 +-------- 12 files changed, 96 insertions(+), 342 deletions(-) diff --git a/beaker/_compat.py b/beaker/_compat.py index a73f1fdd..bdcff72a 100644 --- a/beaker/_compat.py +++ b/beaker/_compat.py @@ -1,169 +1,82 @@ -from __future__ import absolute_import -import sys +"""Compatibility module for Python 3.8+. + +This module provides compatibility aliases and utility functions +that were previously used to support both Python 2 and Python 3. +It is now simplified for Python 3.8+ only. +""" +import pickle +import http.cookies as http_cookies +from base64 import b64decode as _b64decode, b64encode as _b64encode +from urllib.parse import urlencode as url_encode +from urllib.parse import quote as url_quote +from urllib.parse import unquote as url_unquote +from urllib.parse import urlparse as url_parse +from urllib.request import url2pathname +from inspect import signature as func_signature -# True if we are running on Python 2. -PY2 = sys.version_info[0] == 2 -PYVER = sys.version_info[:2] -JYTHON = sys.platform.startswith('java') - -if PY2 and not JYTHON: # pragma: no cover - import cPickle as pickle -else: # pragma: no cover - import pickle - - -if not PY2: # pragma: no cover - xrange_ = range - NoneType = type(None) - - string_type = str - unicode_text = str - byte_string = bytes - - from urllib.parse import urlencode as url_encode - from urllib.parse import quote as url_quote - from urllib.parse import unquote as url_unquote - from urllib.parse import urlparse as url_parse - from urllib.request import url2pathname - import http.cookies as http_cookies - from base64 import b64decode as _b64decode, b64encode as _b64encode - - try: - import dbm.gnu as anydbm - except ImportError: - import dbm.dumb as anydbm - - def b64decode(b): - return _b64decode(b.encode('ascii')) +try: + import dbm.gnu as anydbm +except ImportError: + import dbm.dumb as anydbm - def b64encode(s): - return _b64encode(s).decode('ascii') +# Type aliases for backwards compatibility +NoneType = type(None) +string_type = str +unicode_text = str +byte_string = bytes - def u_(s): - return str(s) - def bytes_(s): - if isinstance(s, byte_string): - return s - return str(s).encode('ascii', 'strict') +def b64decode(b): + """Base64 decode a string, returning bytes.""" + return _b64decode(b.encode('ascii')) - def dictkeyslist(d): - return list(d.keys()) -else: - xrange_ = xrange - from types import NoneType +def b64encode(s): + """Base64 encode bytes, returning a string.""" + return _b64encode(s).decode('ascii') - string_type = basestring - unicode_text = unicode - byte_string = str - from urllib import urlencode as url_encode - from urllib import quote as url_quote - from urllib import unquote as url_unquote - from urlparse import urlparse as url_parse - from urllib import url2pathname - import Cookie as http_cookies - from base64 import b64decode, b64encode - import anydbm +def u_(s): + """Convert to string (unicode).""" + return str(s) - def u_(s): - if isinstance(s, unicode_text): - return s - if not isinstance(s, byte_string): - s = str(s) - return unicode(s, 'utf-8') +def bytes_(s): + """Convert to bytes.""" + if isinstance(s, bytes): + return s + return str(s).encode('ascii', 'strict') - def bytes_(s): - if isinstance(s, byte_string): - return s - return str(s) - def dictkeyslist(d): - return d.keys() +def dictkeyslist(d): + """Return dictionary keys as a list.""" + return list(d.keys()) def im_func(f): - if not PY2: # pragma: no cover - return getattr(f, '__func__', None) - else: - return getattr(f, 'im_func', None) + """Get the function from a bound method.""" + return getattr(f, '__func__', None) def default_im_func(f): - if not PY2: # pragma: no cover - return getattr(f, '__func__', f) - else: - return getattr(f, 'im_func', f) + """Get the function from a bound method, or return the function itself.""" + return getattr(f, '__func__', f) def im_self(f): - if not PY2: # pragma: no cover - return getattr(f, '__self__', None) - else: - return getattr(f, 'im_self', None) + """Get the instance from a bound method.""" + return getattr(f, '__self__', None) def im_class(f): - if not PY2: # pragma: no cover - self = im_self(f) - if self is not None: - return self.__class__ - else: - return None - else: - return getattr(f, 'im_class', None) - - -def add_metaclass(metaclass): - """Class decorator for creating a class with a metaclass.""" - def wrapper(cls): - orig_vars = cls.__dict__.copy() - slots = orig_vars.get('__slots__') - if slots is not None: - if isinstance(slots, str): - slots = [slots] - for slots_var in slots: - orig_vars.pop(slots_var) - orig_vars.pop('__dict__', None) - orig_vars.pop('__weakref__', None) - return metaclass(cls.__name__, cls.__bases__, orig_vars) - return wrapper - - -if not PY2: # pragma: no cover - import builtins - exec_ = getattr(builtins, "exec") - - def reraise(tp, value, tb=None): - if value.__traceback__ is not tb: - raise value.with_traceback(tb) - raise value -else: # pragma: no cover - def exec_(code, globs=None, locs=None): - """Execute code in a namespace.""" - if globs is None: - frame = sys._getframe(1) - globs = frame.f_globals - if locs is None: - locs = frame.f_locals - del frame - elif locs is None: - locs = globs - exec("""exec code in globs, locs""") - - exec_("""def reraise(tp, value, tb=None): - raise tp, value, tb -""") - - -try: - from inspect import signature as func_signature -except ImportError: - from funcsigs import signature as func_signature + """Get the class from a bound method.""" + self = im_self(f) + if self is not None: + return self.__class__ + return None def bindfuncargs(arginfo, args, kwargs): + """Bind function arguments to their parameters.""" boundargs = arginfo.bind(*args, **kwargs) return boundargs.args, boundargs.kwargs diff --git a/beaker/cache.py b/beaker/cache.py index 61b8e4e9..6fa0d567 100644 --- a/beaker/cache.py +++ b/beaker/cache.py @@ -101,10 +101,7 @@ def _init(self): # Warn when there's a problem loading a NamespaceManager if not isinstance(sys.exc_info()[1], DistributionNotFound): import traceback - try: - from StringIO import StringIO # Python2 - except ImportError: - from io import StringIO # Python3 + from io import StringIO tb = StringIO() traceback.print_exc(file=tb) diff --git a/beaker/container.py b/beaker/container.py index f3f5b4f8..c0861e63 100644 --- a/beaker/container.py +++ b/beaker/container.py @@ -1,7 +1,7 @@ """Container and Namespace classes""" import errno -from ._compat import pickle, anydbm, add_metaclass, PYVER, unicode_text +from ._compat import pickle, anydbm, unicode_text import beaker.util as util import logging @@ -597,11 +597,6 @@ def __getitem__(self, key): return pickle.loads(self.dbm[key]) def __contains__(self, key): - if PYVER == (3, 2): - # Looks like this is a bug that got solved in PY3.3 and PY3.4 - # http://bugs.python.org/issue19288 - if isinstance(key, unicode_text): - key = key.encode('UTF-8') return key in self.dbm def __setitem__(self, key, value): @@ -734,8 +729,7 @@ def __call__(self, key, context, namespace, createfunc=None, return Value(key, ns, createfunc=createfunc, expiretime=expiretime, starttime=starttime) -@add_metaclass(ContainerMeta) -class Container(object): +class Container(metaclass=ContainerMeta): """Implements synchronization and value-creation logic for a 'value' stored in a :class:`.NamespaceManager`. diff --git a/beaker/cookie.py b/beaker/cookie.py index 729fbe3c..43224e9b 100644 --- a/beaker/cookie.py +++ b/beaker/cookie.py @@ -1,62 +1,18 @@ -import sys -from ._compat import http_cookies +"""Cookie handling utilities for Beaker. -# Some versions of Python 2.7 and later won't need this encoding bug fix: -_cookie_encodes_correctly = http_cookies.SimpleCookie().value_encode(';') == (';', '"\\073"') - -# Cookie pickling bug is fixed in Python 2.7.9 and Python 3.4.3+ -# http://bugs.python.org/issue22775 -cookie_pickles_properly = ( - (sys.version_info[:2] == (2, 7) and sys.version_info >= (2, 7, 9)) or - sys.version_info >= (3, 4, 3) -) - -# Add support for the SameSite attribute (obsolete when PY37 is unsupported). -http_cookies.Morsel._reserved.setdefault('samesite', 'SameSite') +This module provides a SimpleCookie class that gracefully handles +invalid cookie keys while keeping around the session. +""" +import http.cookies as http_cookies # Adapted from Django.http.cookies and always enabled the bad_cookies # behaviour to cope with any invalid cookie key while keeping around # the session. class SimpleCookie(http_cookies.SimpleCookie): - if not cookie_pickles_properly: - def __setitem__(self, key, value): - # Apply the fix from http://bugs.python.org/issue22775 where - # it's not fixed in Python itself - if isinstance(value, http_cookies.Morsel): - # allow assignment of constructed Morsels (e.g. for pickling) - dict.__setitem__(self, key, value) - else: - super(SimpleCookie, self).__setitem__(key, value) - - if not _cookie_encodes_correctly: - def value_encode(self, val): - # Some browsers do not support quoted-string from RFC 2109, - # including some versions of Safari and Internet Explorer. - # These browsers split on ';', and some versions of Safari - # are known to split on ', '. Therefore, we encode ';' and ',' - - # SimpleCookie already does the hard work of encoding and decoding. - # It uses octal sequences like '\\012' for newline etc. - # and non-ASCII chars. We just make use of this mechanism, to - # avoid introducing two encoding schemes which would be confusing - # and especially awkward for javascript. - - # NB, contrary to Python docs, value_encode returns a tuple containing - # (real val, encoded_val) - val, encoded = super(SimpleCookie, self).value_encode(val) - - encoded = encoded.replace(";", "\\073").replace(",", "\\054") - # If encoded now contains any quoted chars, we need double quotes - # around the whole string. - if "\\" in encoded and not encoded.startswith('"'): - encoded = '"' + encoded + '"' - - return val, encoded - def load(self, rawdata): self.bad_cookies = set() - super(SimpleCookie, self).load(rawdata) + super().load(rawdata) for key in self.bad_cookies: del self[key] @@ -64,7 +20,7 @@ def load(self, rawdata): # (needed for using our Morsel, and for laxness with CookieError def _BaseCookie__set(self, key, real_value, coded_value): try: - super(SimpleCookie, self)._BaseCookie__set(key, real_value, coded_value) + super()._BaseCookie__set(key, real_value, coded_value) except http_cookies.CookieError: if not hasattr(self, 'bad_cookies'): self.bad_cookies = set() diff --git a/beaker/crypto/__init__.py b/beaker/crypto/__init__.py index c7085a8e..2f27685b 100644 --- a/beaker/crypto/__init__.py +++ b/beaker/crypto/__init__.py @@ -15,9 +15,7 @@ Return the maximum size for keys for this crypto object, in bytes. """ - -from .._compat import JYTHON - +import sys from beaker.crypto.pbkdf2 import pbkdf2 from beaker.crypto.util import hmac, sha1, hmac_sha1, md5 @@ -29,6 +27,9 @@ CRYPTO_MODULES = {} +# Check if we're running on Jython +JYTHON = sys.platform.startswith('java') + def load_default_module(): """Load the default crypto module and return it. diff --git a/beaker/crypto/pbkdf2.py b/beaker/crypto/pbkdf2.py index 3dca7376..39d689d0 100644 --- a/beaker/crypto/pbkdf2.py +++ b/beaker/crypto/pbkdf2.py @@ -1,94 +1,29 @@ """ -PBKDF2 Implementation adapted from django.utils.crypto. +PBKDF2 Implementation using the stdlib. This is used to generate the encryption key for enciphered sessions. """ -from beaker._compat import bytes_, xrange_ +from beaker._compat import bytes_ -import hmac -import struct import hashlib -import binascii -def _bin_to_long(x): - """Convert a binary string into a long integer""" - return int(binascii.hexlify(x), 16) - - -def _long_to_bin(x, hex_format_string): - """ - Convert a long integer into a binary string. - hex_format_string is like "%020x" for padding 10 characters. +def pbkdf2(password, salt, iterations, dklen=0, digest=None): """ - return binascii.unhexlify((hex_format_string % x).encode('ascii')) - - -if hasattr(hashlib, "pbkdf2_hmac"): - def pbkdf2(password, salt, iterations, dklen=0, digest=None): - """ - Implements PBKDF2 using the stdlib. This is used in Python 2.7.8+ and 3.4+. - - HMAC+SHA256 is used as the default pseudo random function. + Implements PBKDF2 using the stdlib. - As of 2014, 100,000 iterations was the recommended default which took - 100ms on a 2.7Ghz Intel i7 with an optimized implementation. This is - probably the bare minimum for security given 1000 iterations was - recommended in 2001. - """ - if digest is None: - digest = hashlib.sha1 - if not dklen: - dklen = None - password = bytes_(password) - salt = bytes_(salt) - return hashlib.pbkdf2_hmac( - digest().name, password, salt, iterations, dklen) -else: - def pbkdf2(password, salt, iterations, dklen=0, digest=None): - """ - Implements PBKDF2 as defined in RFC 2898, section 5.2 + HMAC+SHA256 is used as the default pseudo random function. - HMAC+SHA256 is used as the default pseudo random function. - - As of 2014, 100,000 iterations was the recommended default which took - 100ms on a 2.7Ghz Intel i7 with an optimized implementation. This is - probably the bare minimum for security given 1000 iterations was - recommended in 2001. This code is very well optimized for CPython and - is about five times slower than OpenSSL's implementation. - """ - assert iterations > 0 - if not digest: - digest = hashlib.sha1 - password = bytes_(password) - salt = bytes_(salt) - hlen = digest().digest_size - if not dklen: - dklen = hlen - if dklen > (2 ** 32 - 1) * hlen: - raise OverflowError('dklen too big') - l = -(-dklen // hlen) - r = dklen - (l - 1) * hlen - - hex_format_string = "%%0%ix" % (hlen * 2) - - inner, outer = digest(), digest() - if len(password) > inner.block_size: - password = digest(password).digest() - password += b'\x00' * (inner.block_size - len(password)) - inner.update(password.translate(hmac.trans_36)) - outer.update(password.translate(hmac.trans_5C)) - - def F(i): - u = salt + struct.pack(b'>I', i) - result = 0 - for j in xrange_(int(iterations)): - dig1, dig2 = inner.copy(), outer.copy() - dig1.update(u) - dig2.update(dig1.digest()) - u = dig2.digest() - result ^= _bin_to_long(u) - return _long_to_bin(result, hex_format_string) - - T = [F(x) for x in xrange_(1, l)] - return b''.join(T) + F(l)[:r] + As of 2014, 100,000 iterations was the recommended default which took + 100ms on a 2.7Ghz Intel i7 with an optimized implementation. This is + probably the bare minimum for security given 1000 iterations was + recommended in 2001. + """ + if digest is None: + digest = hashlib.sha1 + if not dklen: + dklen = None + password = bytes_(password) + salt = bytes_(salt) + return hashlib.pbkdf2_hmac( + digest().name, password, salt, iterations, dklen) diff --git a/beaker/ext/memcached.py b/beaker/ext/memcached.py index 1b8a881f..ed55febb 100644 --- a/beaker/ext/memcached.py +++ b/beaker/ext/memcached.py @@ -1,5 +1,3 @@ -from .._compat import PY2 - from beaker.container import NamespaceManager, Container from beaker.crypto.util import sha1 from beaker.exceptions import InvalidCacheBackendError, MissingCacheParameter @@ -94,8 +92,7 @@ def _format_key(self, key): key = key.decode('ascii') formated_key = (self.namespace + '_' + key).replace(' ', '\302\267') if len(formated_key) > MAX_KEY_LENGTH: - if not PY2: - formated_key = formated_key.encode('utf-8') + formated_key = formated_key.encode('utf-8') formated_key = sha1(formated_key).hexdigest() return formated_key diff --git a/beaker/ext/mongodb.py b/beaker/ext/mongodb.py index 95ee74a3..9f60ef37 100644 --- a/beaker/ext/mongodb.py +++ b/beaker/ext/mongodb.py @@ -16,7 +16,7 @@ from beaker.synchronization import SynchronizerImpl from beaker.util import SyncDict, machine_identifier from beaker.crypto.util import sha1 -from beaker._compat import string_type, PY2 +from beaker._compat import string_type class MongoNamespaceManager(NamespaceManager): @@ -50,8 +50,7 @@ def _format_key(self, key): if not isinstance(key, str): key = key.decode('ascii') if len(key) > (self.MAX_KEY_LENGTH - len(self.namespace) - 1): - if not PY2: - key = key.encode('utf-8') + key = key.encode('utf-8') key = sha1(key).hexdigest() return '%s:%s' % (self.namespace, key) diff --git a/beaker/ext/redisnm.py b/beaker/ext/redisnm.py index fe958866..956f47e1 100644 --- a/beaker/ext/redisnm.py +++ b/beaker/ext/redisnm.py @@ -12,7 +12,7 @@ from beaker.synchronization import SynchronizerImpl from beaker.util import SyncDict, machine_identifier from beaker.crypto.util import sha1 -from beaker._compat import string_type, PY2 +from beaker._compat import string_type class RedisNamespaceManager(NamespaceManager): @@ -47,8 +47,7 @@ def _format_key(self, key): if not isinstance(key, str): key = key.decode('ascii') if len(key) > (self.MAX_KEY_LENGTH - len(self.namespace) - len('beaker_cache:') - 1): - if not PY2: - key = key.encode('utf-8') + key = key.encode('utf-8') key = sha1(key).hexdigest() return 'beaker_cache:%s:%s' % (self.namespace, key) diff --git a/beaker/session.py b/beaker/session.py index ab3a0752..8d17bda4 100644 --- a/beaker/session.py +++ b/beaker/session.py @@ -1,4 +1,4 @@ -from ._compat import PY2, pickle, http_cookies, unicode_text, b64encode, b64decode, string_type +from ._compat import pickle, http_cookies, unicode_text, b64encode, b64decode, string_type import os import time diff --git a/beaker/util.py b/beaker/util.py index 64b61f3c..e4287d22 100644 --- a/beaker/util.py +++ b/beaker/util.py @@ -4,7 +4,7 @@ import binascii -from ._compat import PY2, string_type, unicode_text, NoneType, dictkeyslist, im_class, im_func, pickle, func_signature, \ +from ._compat import string_type, unicode_text, NoneType, dictkeyslist, im_class, im_func, pickle, func_signature, \ default_im_func try: diff --git a/setup.py b/setup.py index 9bbf141c..f404b79f 100644 --- a/setup.py +++ b/setup.py @@ -1,11 +1,8 @@ import os -import sys import re -import inspect from setuptools import setup, find_packages -py_version = sys.version_info[:2] here = os.path.abspath(os.path.dirname(__file__)) v = open(os.path.join(here, 'beaker', '__init__.py')) VERSION = re.compile(r".*__version__ = '(.*?)'", re.S).match(v.read()).group(1) @@ -17,41 +14,8 @@ README = '' -INSTALL_REQUIRES = [] -if not hasattr(inspect, 'signature'): - # On Python 2.6, 2.7 and 3.2 we need funcsigs dependency - INSTALL_REQUIRES.append('funcsigs') - - -TESTS_REQUIRE = ['pytest', 'pycryptodome'] - -if py_version == (2, 6): - TESTS_REQUIRE.append('WebTest<2.0.24') - TESTS_REQUIRE.append('pycparser==2.18') -else: - TESTS_REQUIRE.append('webtest') - -if py_version == (3, 2): - TESTS_REQUIRE.append('coverage < 4.0') -else: - TESTS_REQUIRE.append('coverage') - -if py_version == (3, 3): - TESTS_REQUIRE.append('cryptography < 2.1.0') -else: - TESTS_REQUIRE.append('cryptography') - -if not sys.platform.startswith('java') and not sys.platform == 'cli': - if py_version == (2, 6): - TESTS_REQUIRE.append('sqlalchemy < 1.2') - else: - TESTS_REQUIRE.append('sqlalchemy') - TESTS_REQUIRE.extend(['pymongo', 'redis']) - try: - import sqlite3 - except ImportError: - TESTS_REQUIRE.append('pysqlite') - TESTS_REQUIRE.extend(['python-memcached']) +TESTS_REQUIRE = ['pytest', 'pycryptodome', 'webtest', 'coverage', 'cryptography', + 'sqlalchemy', 'pymongo', 'redis', 'python-memcached'] setup(name='Beaker', @@ -64,6 +28,7 @@ 'Intended Audience :: Developers', 'License :: OSI Approved :: BSD License', 'Programming Language :: Python', + 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.8', 'Programming Language :: Python :: 3.9', 'Programming Language :: Python :: 3.10', @@ -71,8 +36,6 @@ 'Programming Language :: Python :: 3.12', 'Programming Language :: Python :: 3.13', 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', - 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', - 'Topic :: Internet :: WWW/HTTP :: Dynamic Content', 'Topic :: Internet :: WWW/HTTP :: WSGI', 'Topic :: Internet :: WWW/HTTP :: WSGI :: Middleware', ], @@ -83,8 +46,8 @@ license='BSD', license_files=['LICENSE'], packages=find_packages(exclude=['ez_setup', 'examples', 'tests', 'tests.*']), + python_requires='>=3.8', zip_safe=False, - install_requires=INSTALL_REQUIRES, extras_require={ 'crypto': ['pycryptopp>=0.5.12'], 'pycrypto': ['pycrypto'], From a90704a87e647f33add88b6a97fb20a3e3f87eb6 Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Fri, 28 Nov 2025 22:39:42 +0000 Subject: [PATCH 3/4] Remove type aliases and u_() function per review feedback Co-authored-by: amol- <601423+amol-@users.noreply.github.com> --- beaker/_compat.py | 11 --- beaker/cache.py | 10 +-- beaker/container.py | 2 +- beaker/converters.py | 6 +- beaker/ext/mongodb.py | 5 +- beaker/ext/redisclusternm.py | 5 +- beaker/ext/redisnm.py | 5 +- beaker/session.py | 4 +- beaker/util.py | 50 +++++------ tests/test_cache.py | 12 +-- tests/test_converters.py | 3 +- tests/test_database.py | 12 +-- tests/test_managers/base.py | 22 ++--- tests/test_memcached.py | 24 +++--- tests/test_session.py | 140 +++++++++++++++---------------- tests/test_sqla.py | 12 +-- tests/test_unicode_cache_keys.py | 18 ++-- 17 files changed, 162 insertions(+), 179 deletions(-) diff --git a/beaker/_compat.py b/beaker/_compat.py index bdcff72a..c4ff3a89 100644 --- a/beaker/_compat.py +++ b/beaker/_compat.py @@ -19,12 +19,6 @@ except ImportError: import dbm.dumb as anydbm -# Type aliases for backwards compatibility -NoneType = type(None) -string_type = str -unicode_text = str -byte_string = bytes - def b64decode(b): """Base64 decode a string, returning bytes.""" @@ -36,11 +30,6 @@ def b64encode(s): return _b64encode(s).decode('ascii') -def u_(s): - """Convert to string (unicode).""" - return str(s) - - def bytes_(s): """Convert to bytes.""" if isinstance(s, bytes): diff --git a/beaker/cache.py b/beaker/cache.py index 6fa0d567..21f367e0 100644 --- a/beaker/cache.py +++ b/beaker/cache.py @@ -9,7 +9,7 @@ import warnings from itertools import chain -from beaker._compat import u_, unicode_text, func_signature, bindfuncargs +from beaker._compat import func_signature, bindfuncargs import beaker.container as container import beaker.util as util from beaker.crypto.util import sha1 @@ -327,7 +327,7 @@ def remove_value(self, key, **kw): remove = remove_value def _get_value(self, key, **kw): - if isinstance(key, unicode_text): + if isinstance(key, str): key = key.encode('ascii', 'backslashreplace') if 'type' in kw: @@ -573,13 +573,13 @@ def cached(*args, **kwargs): # kwargs provided, merge them in positional args # to avoid having different cache keys. args, kwargs = bindfuncargs(signature, args, kwargs) - cache_key_kwargs = [u_(':').join((u_(key), u_(value))) for key, value in kwargs.items()] + cache_key_kwargs = [':'.join((str(key), str(value))) for key, value in kwargs.items()] cache_key_args = args if skip_self: cache_key_args = args[1:] - cache_key = u_(" ").join(map(u_, chain(deco_args, cache_key_args, cache_key_kwargs))) + cache_key = " ".join(map(str, chain(deco_args, cache_key_args, cache_key_kwargs))) if region: cachereg = cache_regions[region] @@ -608,7 +608,7 @@ def go(): def _cache_decorator_invalidate(cache, key_length, args): """Invalidate a cache key based on function arguments.""" - cache_key = u_(" ").join(map(u_, args)) + cache_key = " ".join(map(str, args)) if len(cache_key) + len(cache.namespace_name) > key_length: cache_key = sha1(cache_key.encode('utf-8')).hexdigest() cache.remove_value(cache_key) diff --git a/beaker/container.py b/beaker/container.py index c0861e63..626a93d0 100644 --- a/beaker/container.py +++ b/beaker/container.py @@ -1,7 +1,7 @@ """Container and Namespace classes""" import errno -from ._compat import pickle, anydbm, unicode_text +from ._compat import pickle, anydbm import beaker.util as util import logging diff --git a/beaker/converters.py b/beaker/converters.py index a8fb3c93..468c7ec1 100644 --- a/beaker/converters.py +++ b/beaker/converters.py @@ -1,9 +1,7 @@ -from beaker._compat import string_type - # (c) 2005 Ian Bicking and contributors; written for Paste (http://pythonpaste.org) # Licensed under the MIT license: http://www.opensource.org/licenses/mit-license.php def asbool(obj): - if isinstance(obj, string_type): + if isinstance(obj, str): obj = obj.strip().lower() if obj in ['true', 'yes', 'on', 'y', 't', '1']: return True @@ -16,7 +14,7 @@ def asbool(obj): def aslist(obj, sep=None, strip=True): - if isinstance(obj, string_type): + if isinstance(obj, str): lst = obj.split(sep) if strip: lst = [v.strip() for v in lst] diff --git a/beaker/ext/mongodb.py b/beaker/ext/mongodb.py index 9f60ef37..59b2b7ae 100644 --- a/beaker/ext/mongodb.py +++ b/beaker/ext/mongodb.py @@ -16,7 +16,6 @@ from beaker.synchronization import SynchronizerImpl from beaker.util import SyncDict, machine_identifier from beaker.crypto.util import sha1 -from beaker._compat import string_type class MongoNamespaceManager(NamespaceManager): @@ -40,7 +39,7 @@ def __init__(self, namespace, url, **kw): if pymongo is None: raise RuntimeError('pymongo3 is not available') - if isinstance(url, string_type): + if isinstance(url, str): self.client = MongoNamespaceManager.clients.get(url, pymongo.MongoClient, url) else: self.client = url @@ -127,7 +126,7 @@ class MongoSynchronizer(SynchronizerImpl): def __init__(self, identifier, url): super(MongoSynchronizer, self).__init__() self.identifier = identifier - if isinstance(url, string_type): + if isinstance(url, str): self.client = MongoNamespaceManager.clients.get(url, pymongo.MongoClient, url) else: self.client = url diff --git a/beaker/ext/redisclusternm.py b/beaker/ext/redisclusternm.py index 96a43d58..002cf2ec 100644 --- a/beaker/ext/redisclusternm.py +++ b/beaker/ext/redisclusternm.py @@ -11,7 +11,6 @@ redis = None from beaker.ext.redisnm import RedisNamespaceManager, RedisSynchronizer -from beaker._compat import string_type class RedisClusterNamespaceManager(RedisNamespaceManager): @@ -41,7 +40,7 @@ def __init__(self, namespace, urls, timeout=None, **kwargs): if redis is None: raise RuntimeError('redis is not available') - if isinstance(urls, string_type): + if isinstance(urls, str): for url in urls.split(','): url_options = redis.connection.parse_url(url) if 'db' in url_options: @@ -86,7 +85,7 @@ class RedisClusterSynchronizer(RedisSynchronizer): def __init__(self, identifier, urls, nodes=None, **kwargs): super(RedisSynchronizer, self).__init__() self.identifier = 'beaker_lock:%s' % identifier - if isinstance(urls, string_type): + if isinstance(urls, str): self.client = RedisClusterNamespaceManager.clients.get( urls, redis.cluster.RedisCluster, startup_nodes=nodes, **kwargs ) diff --git a/beaker/ext/redisnm.py b/beaker/ext/redisnm.py index 956f47e1..f577343b 100644 --- a/beaker/ext/redisnm.py +++ b/beaker/ext/redisnm.py @@ -12,7 +12,6 @@ from beaker.synchronization import SynchronizerImpl from beaker.util import SyncDict, machine_identifier from beaker.crypto.util import sha1 -from beaker._compat import string_type class RedisNamespaceManager(NamespaceManager): @@ -38,7 +37,7 @@ def __init__(self, namespace, url, timeout=None, **kw): if redis is None: raise RuntimeError('redis is not available') - if isinstance(url, string_type): + if isinstance(url, str): self.client = RedisNamespaceManager.clients.get(url, redis.StrictRedis.from_url, url) else: self.client = url @@ -105,7 +104,7 @@ class RedisSynchronizer(SynchronizerImpl): def __init__(self, identifier, url): super(RedisSynchronizer, self).__init__() self.identifier = 'beaker_lock:%s' % identifier - if isinstance(url, string_type): + if isinstance(url, str): self.client = RedisNamespaceManager.clients.get(url, redis.StrictRedis.from_url, url) else: self.client = url diff --git a/beaker/session.py b/beaker/session.py index 8d17bda4..726fe879 100644 --- a/beaker/session.py +++ b/beaker/session.py @@ -1,4 +1,4 @@ -from ._compat import pickle, http_cookies, unicode_text, b64encode, b64decode, string_type +from ._compat import pickle, http_cookies, b64encode, b64decode import os import time @@ -241,7 +241,7 @@ def _set_serializer(self, data_serializer): self.serializer = util.JsonSerializer() elif self.data_serializer == 'pickle': self.serializer = util.PickleSerializer() - elif isinstance(self.data_serializer, string_type): + elif isinstance(self.data_serializer, str): raise BeakerException('Invalid value for data_serializer: %s' % data_serializer) else: self.serializer = data_serializer diff --git a/beaker/util.py b/beaker/util.py index e4287d22..4d802eb8 100644 --- a/beaker/util.py +++ b/beaker/util.py @@ -4,7 +4,7 @@ import binascii -from ._compat import string_type, unicode_text, NoneType, dictkeyslist, im_class, im_func, pickle, func_signature, \ +from ._compat import dictkeyslist, im_class, im_func, pickle, func_signature, \ default_im_func try: @@ -104,7 +104,7 @@ def has_self_arg(func): def warn(msg, stacklevel=3): """Issue a warning.""" - if isinstance(msg, string_type): + if isinstance(msg, str): warnings.warn(msg, exceptions.BeakerWarning, stacklevel=stacklevel) else: warnings.warn(msg, stacklevel=stacklevel) @@ -228,7 +228,7 @@ def encoded_path(root, identifiers, extension=".enc", depth=3, from beaker.crypto import sha1 if digest_filenames: - if isinstance(ident, unicode_text): + if isinstance(ident, str): ident = sha1(ident.encode('utf-8')).hexdigest() else: ident = sha1(ident).hexdigest() @@ -248,7 +248,7 @@ def encoded_path(root, identifiers, extension=".enc", depth=3, def asint(obj): if isinstance(obj, int): return obj - elif isinstance(obj, string_type) and re.match(r'^\d+$', obj): + elif isinstance(obj, str) and re.match(r'^\d+$', obj): return int(obj) else: raise Exception("This is not a proper int") @@ -293,26 +293,26 @@ def verify_rules(params, ruleset): def coerce_session_params(params): rules = [ - ('data_dir', (str, NoneType), "data_dir must be a string referring to a directory."), - ('lock_dir', (str, NoneType), "lock_dir must be a string referring to a directory."), - ('type', (str, NoneType), "Session type must be a string."), + ('data_dir', (str, type(None)), "data_dir must be a string referring to a directory."), + ('lock_dir', (str, type(None)), "lock_dir must be a string referring to a directory."), + ('type', (str, type(None)), "Session type must be a string."), ('cookie_expires', (bool, datetime, timedelta, int), "Cookie expires was not a boolean, datetime, int, or timedelta instance."), - ('cookie_domain', (str, NoneType), "Cookie domain must be a string."), - ('cookie_path', (str, NoneType), "Cookie path must be a string."), + ('cookie_domain', (str, type(None)), "Cookie domain must be a string."), + ('cookie_path', (str, type(None)), "Cookie path must be a string."), ('id', (str,), "Session id must be a string."), ('key', (str,), "Session key must be a string."), - ('secret', (str, NoneType), "Session secret must be a string."), - ('validate_key', (str, NoneType), "Session validate_key must be a string."), - ('encrypt_key', (str, NoneType), "Session encrypt_key must be a string."), - ('encrypt_nonce_bits', (int, NoneType), "Session encrypt_nonce_bits must be a number"), - ('secure', (bool, NoneType), "Session secure must be a boolean."), - ('httponly', (bool, NoneType), "Session httponly must be a boolean."), - ('timeout', (int, NoneType), "Session timeout must be an integer."), - ('save_accessed_time', (bool, NoneType), + ('secret', (str, type(None)), "Session secret must be a string."), + ('validate_key', (str, type(None)), "Session validate_key must be a string."), + ('encrypt_key', (str, type(None)), "Session encrypt_key must be a string."), + ('encrypt_nonce_bits', (int, type(None)), "Session encrypt_nonce_bits must be a number"), + ('secure', (bool, type(None)), "Session secure must be a boolean."), + ('httponly', (bool, type(None)), "Session httponly must be a boolean."), + ('timeout', (int, type(None)), "Session timeout must be an integer."), + ('save_accessed_time', (bool, type(None)), "Session save_accessed_time must be a boolean (defaults to true)."), - ('auto', (bool, NoneType), "Session is created if accessed."), - ('webtest_varname', (str, NoneType), "Session varname must be a string."), + ('auto', (bool, type(None)), "Session is created if accessed."), + ('webtest_varname', (str, type(None)), "Session varname must be a string."), ('data_serializer', (str,), "data_serializer must be a string.") ] opts = verify_rules(params, rules) @@ -329,15 +329,15 @@ def coerce_session_params(params): def coerce_cache_params(params): rules = [ - ('data_dir', (str, NoneType), "data_dir must be a string referring to a directory."), - ('lock_dir', (str, NoneType), "lock_dir must be a string referring to a directory."), + ('data_dir', (str, type(None)), "data_dir must be a string referring to a directory."), + ('lock_dir', (str, type(None)), "lock_dir must be a string referring to a directory."), ('type', (str,), "Cache type must be a string."), - ('enabled', (bool, NoneType), "enabled must be true/false if present."), - ('expire', (int, NoneType), + ('enabled', (bool, type(None)), "enabled must be true/false if present."), + ('expire', (int, type(None)), "expire must be an integer representing how many seconds the cache is valid for"), - ('regions', (list, tuple, NoneType), + ('regions', (list, tuple, type(None)), "Regions must be a comma separated list of valid regions"), - ('key_length', (int, NoneType), + ('key_length', (int, type(None)), "key_length must be an integer which indicates the longest a key can be before hashing"), ] return verify_rules(params, rules) diff --git a/tests/test_cache.py b/tests/test_cache.py index cbdb92c0..8b6709c3 100644 --- a/tests/test_cache.py +++ b/tests/test_cache.py @@ -1,5 +1,5 @@ # coding: utf-8 -from beaker._compat import u_, bytes_ +from beaker._compat import bytes_ import os import platform @@ -146,11 +146,11 @@ def test_has_key_multicache(): def test_unicode_keys(): cache = Cache('test', data_dir='./cache', type='dbm') o = object() - cache.set_value(u_('hiŏ'), o) - assert u_('hiŏ') in cache - assert u_('hŏa') not in cache - cache.remove_value(u_('hiŏ')) - assert u_('hiŏ') not in cache + cache.set_value('hiŏ', o) + assert 'hiŏ' in cache + assert 'hŏa' not in cache + cache.remove_value('hiŏ') + assert 'hiŏ' not in cache def test_remove_stale(): """test that remove_value() removes even if the value is expired.""" diff --git a/tests/test_converters.py b/tests/test_converters.py index ffb02da1..380c4c06 100644 --- a/tests/test_converters.py +++ b/tests/test_converters.py @@ -1,4 +1,3 @@ -from beaker._compat import u_ import unittest from beaker.converters import asbool, aslist @@ -27,7 +26,7 @@ def test_coerce(self): def test_bad_values(self): self.assertRaises(ValueError, asbool, ('mommy!')) - self.assertRaises(ValueError, asbool, (u_('Blargl?'))) + self.assertRaises(ValueError, asbool, ('Blargl?')) class AsList(unittest.TestCase): diff --git a/tests/test_database.py b/tests/test_database.py index 202cac52..4849cbb4 100644 --- a/tests/test_database.py +++ b/tests/test_database.py @@ -1,5 +1,5 @@ # coding: utf-8 -from beaker._compat import u_ +# Removed u_ import from beaker.cache import clsmap, Cache, util from beaker.exceptions import InvalidCacheBackendError @@ -85,11 +85,11 @@ def test_clear(): def test_unicode_keys(): cache = Cache('test', data_dir='./cache', url=db_url, type='ext:database') o = object() - cache.set_value(u_('hiŏ'), o) - assert u_('hiŏ') in cache - assert u_('hŏa') not in cache - cache.remove_value(u_('hiŏ')) - assert u_('hiŏ') not in cache + cache.set_value('hiŏ', o) + assert 'hiŏ' in cache + assert 'hŏa' not in cache + cache.remove_value('hiŏ') + assert 'hiŏ' not in cache @util.skip_if(lambda: WebTestApp is None, "webtest not installed") def test_increment(): diff --git a/tests/test_managers/base.py b/tests/test_managers/base.py index 3f1fd9ed..b4729f7d 100644 --- a/tests/test_managers/base.py +++ b/tests/test_managers/base.py @@ -6,7 +6,7 @@ import datetime -from beaker._compat import u_ +# Removed u_ import from beaker.cache import Cache from beaker.middleware import SessionMiddleware, CacheMiddleware from webtest import TestApp as WebTestApp @@ -161,11 +161,11 @@ def test_has_key_multicache(self): def test_unicode_keys(self): cache = Cache('test', **self.CACHE_ARGS) o = object() - cache.set_value(u_('hiŏ'), o) - assert u_('hiŏ') in cache - assert u_('hŏa') not in cache - cache.remove_value(u_('hiŏ')) - assert u_('hiŏ') not in cache + cache.set_value('hiŏ', o) + assert 'hiŏ' in cache + assert 'hŏa' not in cache + cache.remove_value('hiŏ') + assert 'hiŏ' not in cache def test_long_unicode_keys(self): cache = Cache('test', **self.CACHE_ARGS) @@ -181,11 +181,11 @@ def test_long_unicode_keys(self): def test_spaces_in_unicode_keys(self): cache = Cache('test', **self.CACHE_ARGS) o = object() - cache.set_value(u_('hi ŏ'), o) - assert u_('hi ŏ') in cache - assert u_('hŏa') not in cache - cache.remove_value(u_('hi ŏ')) - assert u_('hi ŏ') not in cache + cache.set_value('hi ŏ', o) + assert 'hi ŏ' in cache + assert 'hŏa' not in cache + cache.remove_value('hi ŏ') + assert 'hi ŏ' not in cache def test_spaces_in_keys(self): cache = Cache('test', **self.CACHE_ARGS) diff --git a/tests/test_memcached.py b/tests/test_memcached.py index 043227ca..6500063f 100644 --- a/tests/test_memcached.py +++ b/tests/test_memcached.py @@ -1,5 +1,5 @@ # coding: utf-8 -from beaker._compat import u_ +# Removed u_ import import unittest.mock @@ -195,16 +195,16 @@ def test_has_key_multicache(): def test_unicode_keys(): cache = Cache('test', data_dir='./cache', url=mc_url, type='ext:memcached') o = object() - cache.set_value(u_('hiŏ'), o) - assert u_('hiŏ') in cache - assert u_('hŏa') not in cache - cache.remove_value(u_('hiŏ')) - assert u_('hiŏ') not in cache + cache.set_value('hiŏ', o) + assert 'hiŏ' in cache + assert 'hŏa' not in cache + cache.remove_value('hiŏ') + assert 'hiŏ' not in cache def test_long_unicode_keys(): cache = Cache('test', data_dir='./cache', url=mc_url, type='ext:memcached') o = object() - long_str = u_('Очень длинная строка, которая не влезает в сто двадцать восемь байт и поэтому не проходит ограничение в check_key, что очень прискорбно, не правда ли, друзья? Давайте же скорее исправим это досадное недоразумение!') + long_str = 'Очень длинная строка, которая не влезает в сто двадцать восемь байт и поэтому не проходит ограничение в check_key, что очень прискорбно, не правда ли, друзья? Давайте же скорее исправим это досадное недоразумение!' cache.set_value(long_str, o) assert long_str in cache cache.remove_value(long_str) @@ -213,11 +213,11 @@ def test_long_unicode_keys(): def test_spaces_in_unicode_keys(): cache = Cache('test', data_dir='./cache', url=mc_url, type='ext:memcached') o = object() - cache.set_value(u_('hi ŏ'), o) - assert u_('hi ŏ') in cache - assert u_('hŏa') not in cache - cache.remove_value(u_('hi ŏ')) - assert u_('hi ŏ') not in cache + cache.set_value('hi ŏ', o) + assert 'hi ŏ' in cache + assert 'hŏa' not in cache + cache.remove_value('hi ŏ') + assert 'hi ŏ' not in cache def test_spaces_in_keys(): cache = Cache('test', data_dir='./cache', url=mc_url, type='ext:memcached') diff --git a/tests/test_session.py b/tests/test_session.py index 77c4c745..a400f4e8 100644 --- a/tests/test_session.py +++ b/tests/test_session.py @@ -1,5 +1,5 @@ # -*- coding: utf-8 -*- -from beaker._compat import u_, pickle, b64decode +from beaker._compat import pickle, b64decode import binascii import shutil @@ -60,19 +60,19 @@ def test_session(): def check_save_load(session_getter): """Test if the data is actually persistent across requests""" session = session_getter() - session[u_('Suomi')] = u_('Kimi Räikkönen') - session[u_('Great Britain')] = u_('Jenson Button') - session[u_('Deutchland')] = u_('Sebastian Vettel') + session['Suomi'] = 'Kimi Räikkönen' + session['Great Britain'] = 'Jenson Button' + session['Deutchland'] = 'Sebastian Vettel' session.save() session = session_getter(id=session.id) - assert u_('Suomi') in session - assert u_('Great Britain') in session - assert u_('Deutchland') in session + assert 'Suomi' in session + assert 'Great Britain' in session + assert 'Deutchland' in session - assert session[u_('Suomi')] == u_('Kimi Räikkönen') - assert session[u_('Great Britain')] == u_('Jenson Button') - assert session[u_('Deutchland')] == u_('Sebastian Vettel') + assert session['Suomi'] == 'Kimi Räikkönen' + assert session['Great Britain'] == 'Jenson Button' + assert session['Deutchland'] == 'Sebastian Vettel' @pytest.mark.skipif(not get_crypto_module('default').has_aes) @@ -80,20 +80,20 @@ def check_save_load_encryption(session_getter): """Test if the data is actually persistent across requests""" session = session_getter(encrypt_key='666a19cf7f61c64c', validate_key='hoobermas') - session[u_('Suomi')] = u_('Kimi Räikkönen') - session[u_('Great Britain')] = u_('Jenson Button') - session[u_('Deutchland')] = u_('Sebastian Vettel') + session['Suomi'] = 'Kimi Räikkönen' + session['Great Britain'] = 'Jenson Button' + session['Deutchland'] = 'Sebastian Vettel' session.save() session = session_getter(id=session.id, encrypt_key='666a19cf7f61c64c', validate_key='hoobermas') - assert u_('Suomi') in session - assert u_('Great Britain') in session - assert u_('Deutchland') in session + assert 'Suomi' in session + assert 'Great Britain' in session + assert 'Deutchland' in session - assert session[u_('Suomi')] == u_('Kimi Räikkönen') - assert session[u_('Great Britain')] == u_('Jenson Button') - assert session[u_('Deutchland')] == u_('Sebastian Vettel') + assert session['Suomi'] == 'Kimi Räikkönen' + assert session['Great Britain'] == 'Jenson Button' + assert session['Deutchland'] == 'Sebastian Vettel' # cryptography only works for py3.3+, so skip for python 3.2 @@ -109,22 +109,22 @@ def check_save_load_encryption_cryptography(session_getter): encrypt_key='666a19cf7f61c64c', validate_key='hoobermas', crypto_type='cryptography') - session[u_('Suomi')] = u_('Kimi Räikkönen') - session[u_('Great Britain')] = u_('Jenson Button') - session[u_('Deutchland')] = u_('Sebastian Vettel') + session['Suomi'] = 'Kimi Räikkönen' + session['Great Britain'] = 'Jenson Button' + session['Deutchland'] = 'Sebastian Vettel' session.save() session = session_getter( id=session.id, encrypt_key='666a19cf7f61c64c', validate_key='hoobermas', crypto_type='cryptography') - assert u_('Suomi') in session - assert u_('Great Britain') in session - assert u_('Deutchland') in session + assert 'Suomi' in session + assert 'Great Britain' in session + assert 'Deutchland' in session - assert session[u_('Suomi')] == u_('Kimi Räikkönen') - assert session[u_('Great Britain')] == u_('Jenson Button') - assert session[u_('Deutchland')] == u_('Sebastian Vettel') + assert session['Suomi'] == 'Kimi Räikkönen' + assert session['Great Britain'] == 'Jenson Button' + assert session['Deutchland'] == 'Sebastian Vettel' @pytest.mark.skipif(not get_crypto_module('default').has_aes) @@ -132,49 +132,49 @@ def check_decryption_failure(session_getter): """Test if the data fails without the right keys""" session = session_getter(encrypt_key='666a19cf7f61c64c', validate_key='hoobermas') - session[u_('Suomi')] = u_('Kimi Räikkönen') - session[u_('Great Britain')] = u_('Jenson Button') - session[u_('Deutchland')] = u_('Sebastian Vettel') + session['Suomi'] = 'Kimi Räikkönen' + session['Great Britain'] = 'Jenson Button' + session['Deutchland'] = 'Sebastian Vettel' session.save() session = session_getter(id=session.id, encrypt_key='asfdasdfadsfsadf', validate_key='hoobermas', invalidate_corrupt=True) - assert u_('Suomi') not in session - assert u_('Great Britain') not in session + assert 'Suomi' not in session + assert 'Great Britain' not in session def check_delete(session_getter): """Test :meth:`Session.delete`""" session = session_getter() - session[u_('Suomi')] = u_('Kimi Räikkönen') - session[u_('Great Britain')] = u_('Jenson Button') - session[u_('Deutchland')] = u_('Sebastian Vettel') + session['Suomi'] = 'Kimi Räikkönen' + session['Great Britain'] = 'Jenson Button' + session['Deutchland'] = 'Sebastian Vettel' session.delete() - assert u_('Suomi') not in session - assert u_('Great Britain') not in session - assert u_('Deutchland') not in session + assert 'Suomi' not in session + assert 'Great Britain' not in session + assert 'Deutchland' not in session def check_revert(session_getter): """Test :meth:`Session.revert`""" session = session_getter() - session[u_('Suomi')] = u_('Kimi Räikkönen') - session[u_('Great Britain')] = u_('Jenson Button') - session[u_('Deutchland')] = u_('Sebastian Vettel') + session['Suomi'] = 'Kimi Räikkönen' + session['Great Britain'] = 'Jenson Button' + session['Deutchland'] = 'Sebastian Vettel' session.save() session = session_getter(id=session.id) - del session[u_('Suomi')] - session[u_('Great Britain')] = u_('Lewis Hamilton') - session[u_('Deutchland')] = u_('Michael Schumacher') - session[u_('España')] = u_('Fernando Alonso') + del session['Suomi'] + session['Great Britain'] = 'Lewis Hamilton' + session['Deutchland'] = 'Michael Schumacher' + session['España'] = 'Fernando Alonso' session.revert() - assert session[u_('Suomi')] == u_('Kimi Räikkönen') - assert session[u_('Great Britain')] == u_('Jenson Button') - assert session[u_('Deutchland')] == u_('Sebastian Vettel') - assert u_('España') not in session + assert session['Suomi'] == 'Kimi Räikkönen' + assert session['Great Britain'] == 'Jenson Button' + assert session['Deutchland'] == 'Sebastian Vettel' + assert 'España' not in session def check_invalidate(session_getter): @@ -183,17 +183,17 @@ def check_invalidate(session_getter): session.save() id = session.id created = session.created - session[u_('Suomi')] = u_('Kimi Räikkönen') - session[u_('Great Britain')] = u_('Jenson Button') - session[u_('Deutchland')] = u_('Sebastian Vettel') + session['Suomi'] = 'Kimi Räikkönen' + session['Great Britain'] = 'Jenson Button' + session['Deutchland'] = 'Sebastian Vettel' session.invalidate() session.save() assert session.id != id assert session.created != created - assert u_('Suomi') not in session - assert u_('Great Britain') not in session - assert u_('Deutchland') not in session + assert 'Suomi' not in session + assert 'Great Britain' not in session + assert 'Deutchland' not in session def test_regenerate_id(): @@ -202,13 +202,13 @@ def test_regenerate_id(): setup_cookie_request() session = get_session() orig_id = session.id - session[u_('foo')] = u_('bar') + session['foo'] = 'bar' session.save() # load session session = get_session(id=session.id) # data should still be there - assert session[u_('foo')] == u_('bar') + assert session['foo'] == 'bar' # regenerate the id session.regenerate_id() @@ -216,21 +216,21 @@ def test_regenerate_id(): assert session.id != orig_id # data is still there - assert session[u_('foo')] == u_('bar') + assert session['foo'] == 'bar' # should be the new id assert 'beaker.session.id=%s' % session.id in session.request['cookie_out'] # get a new session before calling save bunk_sess = get_session(id=session.id) - assert u_('foo') not in bunk_sess + assert 'foo' not in bunk_sess # save it session.save() # make sure we get the data back session = get_session(id=session.id) - assert session[u_('foo')] == u_('bar') + assert session['foo'] == 'bar' def check_timeout(session_getter): @@ -239,25 +239,25 @@ def check_timeout(session_getter): session.save() id = session.id created = session.created - session[u_('Suomi')] = u_('Kimi Räikkönen') - session[u_('Great Britain')] = u_('Jenson Button') - session[u_('Deutchland')] = u_('Sebastian Vettel') + session['Suomi'] = 'Kimi Räikkönen' + session['Great Britain'] = 'Jenson Button' + session['Deutchland'] = 'Sebastian Vettel' session.save() session = session_getter(id=session.id, timeout=2) assert session.id == id assert session.created == created - assert session[u_('Suomi')] == u_('Kimi Räikkönen') - assert session[u_('Great Britain')] == u_('Jenson Button') - assert session[u_('Deutchland')] == u_('Sebastian Vettel') + assert session['Suomi'] == 'Kimi Räikkönen' + assert session['Great Britain'] == 'Jenson Button' + assert session['Deutchland'] == 'Sebastian Vettel' time.sleep(2) session = session_getter(id=session.id, timeout=2) assert session.id != id assert session.created != created - assert u_('Suomi') not in session - assert u_('Great Britain') not in session - assert u_('Deutchland') not in session + assert 'Suomi' not in session + assert 'Great Britain' not in session + assert 'Deutchland' not in session def test_timeout_requires_accessed_time(): diff --git a/tests/test_sqla.py b/tests/test_sqla.py index d3ae5aaf..f05a83c8 100644 --- a/tests/test_sqla.py +++ b/tests/test_sqla.py @@ -1,5 +1,5 @@ # coding: utf-8 -from beaker._compat import u_ +# Removed u_ import from beaker.cache import clsmap, Cache, util from beaker.exceptions import InvalidCacheBackendError from beaker.middleware import CacheMiddleware @@ -96,11 +96,11 @@ def test_clear(): def test_unicode_keys(): cache = make_cache() o = object() - cache.set_value(u_('hiŏ'), o) - assert u_('hiŏ') in cache - assert u_('hŏa') not in cache - cache.remove_value(u_('hiŏ')) - assert u_('hiŏ') not in cache + cache.set_value('hiŏ', o) + assert 'hiŏ' in cache + assert 'hŏa' not in cache + cache.remove_value('hiŏ') + assert 'hiŏ' not in cache @util.skip_if(lambda: WebTestApp is None, "webtest not installed") def test_increment(): diff --git a/tests/test_unicode_cache_keys.py b/tests/test_unicode_cache_keys.py index 9f81672d..8d0e3128 100644 --- a/tests/test_unicode_cache_keys.py +++ b/tests/test_unicode_cache_keys.py @@ -5,7 +5,7 @@ for more on this """ -from beaker._compat import u_ +# Removed u_ import from beaker.cache import CacheManager def eq_(a, b, msg=''): @@ -31,17 +31,17 @@ def quux(cls, garply): def test_A_unicode_encode_key_str(): eq_(foo('Espanol'), 'Espanol') eq_(foo(12334), 12334) - eq_(foo(u_('Espanol')), u_('Espanol')) - eq_(foo(u_('Español')), u_('Español')) + eq_(foo('Espanol'), 'Espanol') + eq_(foo('Español'), 'Español') b = bar() eq_(b.baz('Espanol'), 'Espanol') eq_(b.baz(12334), 12334) - eq_(b.baz(u_('Espanol')), u_('Espanol')) - eq_(b.baz(u_('Español')), u_('Español')) + eq_(b.baz('Espanol'), 'Espanol') + eq_(b.baz('Español'), 'Español') eq_(b.quux('Espanol'), 'Espanol') eq_(b.quux(12334), 12334) - eq_(b.quux(u_('Espanol')), u_('Espanol')) - eq_(b.quux(u_('Español')), u_('Español')) + eq_(b.quux('Espanol'), 'Espanol') + eq_(b.quux('Español'), 'Español') def test_B_replacing_non_ascii(): @@ -49,8 +49,8 @@ def test_B_replacing_non_ascii(): the function distinguishes between the two it should not return the past value """ - assert foo(u_('Espaáol')) != u_('Español') - eq_(foo(u_('Espaáol')), u_('Espaáol')) + assert foo('Espaáol') != 'Español' + eq_(foo('Espaáol'), 'Espaáol') def test_C_more_unicode(): """We again test the same stuff but this time we use From 5922cb89d8f4e5aa20bfc49bb8bde9753b35cdcb Mon Sep 17 00:00:00 2001 From: "copilot-swe-agent[bot]" <198982749+Copilot@users.noreply.github.com> Date: Sun, 30 Nov 2025 20:42:23 +0000 Subject: [PATCH 4/4] Fix remaining u_() usages in test files Co-authored-by: amol- <601423+amol-@users.noreply.github.com> --- tests/test_managers/base.py | 3 +-- tests/test_unicode_cache_keys.py | 41 ++++++++++++++++---------------- 2 files changed, 21 insertions(+), 23 deletions(-) diff --git a/tests/test_managers/base.py b/tests/test_managers/base.py index b4729f7d..fe25bbee 100644 --- a/tests/test_managers/base.py +++ b/tests/test_managers/base.py @@ -6,7 +6,6 @@ import datetime -# Removed u_ import from beaker.cache import Cache from beaker.middleware import SessionMiddleware, CacheMiddleware from webtest import TestApp as WebTestApp @@ -170,7 +169,7 @@ def test_unicode_keys(self): def test_long_unicode_keys(self): cache = Cache('test', **self.CACHE_ARGS) o = object() - long_str = u_( + long_str = ( 'Очень длинная строка, которая не влезает в сто двадцать восемь байт и поэтому не проходит ограничение в check_key, что очень прискорбно, не правда ли, друзья? Давайте же скорее исправим это досадное недоразумение!' ) cache.set_value(long_str, o) diff --git a/tests/test_unicode_cache_keys.py b/tests/test_unicode_cache_keys.py index 8d0e3128..8187fde0 100644 --- a/tests/test_unicode_cache_keys.py +++ b/tests/test_unicode_cache_keys.py @@ -5,7 +5,6 @@ for more on this """ -# Removed u_ import from beaker.cache import CacheManager def eq_(a, b, msg=''): @@ -58,45 +57,45 @@ def test_C_more_unicode(): as keys""" keys = [ # arabic (egyptian) - u_("\u0644\u064a\u0647\u0645\u0627\u0628\u062a\u0643\u0644\u0645\u0648\u0634\u0639\u0631\u0628\u064a\u061f"), + "\u0644\u064a\u0647\u0645\u0627\u0628\u062a\u0643\u0644\u0645\u0648\u0634\u0639\u0631\u0628\u064a\u061f", # Chinese (simplified) - u_("\u4ed6\u4eec\u4e3a\u4ec0\u4e48\u4e0d\u8bf4\u4e2d\u6587"), + "\u4ed6\u4eec\u4e3a\u4ec0\u4e48\u4e0d\u8bf4\u4e2d\u6587", # Chinese (traditional) - u_("\u4ed6\u5011\u7232\u4ec0\u9ebd\u4e0d\u8aaa\u4e2d\u6587"), + "\u4ed6\u5011\u7232\u4ec0\u9ebd\u4e0d\u8aaa\u4e2d\u6587", # czech - u_("\u0050\u0072\u006f\u010d\u0070\u0072\u006f\u0073\u0074\u011b\u006e\u0065\u006d\u006c\u0075\u0076\u00ed\u010d\u0065\u0073\u006b\u0079"), + "\u0050\u0072\u006f\u010d\u0070\u0072\u006f\u0073\u0074\u011b\u006e\u0065\u006d\u006c\u0075\u0076\u00ed\u010d\u0065\u0073\u006b\u0079", # hebrew - u_("\u05dc\u05de\u05d4\u05d4\u05dd\u05e4\u05e9\u05d5\u05d8\u05dc\u05d0\u05de\u05d3\u05d1\u05e8\u05d9\u05dd\u05e2\u05d1\u05e8\u05d9\u05ea"), + "\u05dc\u05de\u05d4\u05d4\u05dd\u05e4\u05e9\u05d5\u05d8\u05dc\u05d0\u05de\u05d3\u05d1\u05e8\u05d9\u05dd\u05e2\u05d1\u05e8\u05d9\u05ea", # Hindi (Devanagari) - u_("\u092f\u0939\u0932\u094b\u0917\u0939\u093f\u0928\u094d\u0926\u0940\u0915\u094d\u092f\u094b\u0902\u0928\u0939\u0940\u0902\u092c\u094b\u0932\u0938\u0915\u0924\u0947\u0939\u0948\u0902"), + "\u092f\u0939\u0932\u094b\u0917\u0939\u093f\u0928\u094d\u0926\u0940\u0915\u094d\u092f\u094b\u0902\u0928\u0939\u0940\u0902\u092c\u094b\u0932\u0938\u0915\u0924\u0947\u0939\u0948\u0902", # Japanese (kanji and hiragana) - u_("\u306a\u305c\u307f\u3093\u306a\u65e5\u672c\u8a9e\u3092\u8a71\u3057\u3066\u304f\u308c\u306a\u3044\u306e\u304b"), + "\u306a\u305c\u307f\u3093\u306a\u65e5\u672c\u8a9e\u3092\u8a71\u3057\u3066\u304f\u308c\u306a\u3044\u306e\u304b", # Russian (Cyrillic) - u_("\u043f\u043e\u0447\u0435\u043c\u0443\u0436\u0435\u043e\u043d\u0438\u043d\u0435\u0433\u043e\u0432\u043e\u0440\u044f\u0442\u043f\u043e\u0440\u0443\u0441\u0441\u043a\u0438"), + "\u043f\u043e\u0447\u0435\u043c\u0443\u0436\u0435\u043e\u043d\u0438\u043d\u0435\u0433\u043e\u0432\u043e\u0440\u044f\u0442\u043f\u043e\u0440\u0443\u0441\u0441\u043a\u0438", # Spanish - u_("\u0050\u006f\u0072\u0071\u0075\u00e9\u006e\u006f\u0070\u0075\u0065\u0064\u0065\u006e\u0073\u0069\u006d\u0070\u006c\u0065\u006d\u0065\u006e\u0074\u0065\u0068\u0061\u0062\u006c\u0061\u0072\u0065\u006e\u0045\u0073\u0070\u0061\u00f1\u006f\u006c"), + "\u0050\u006f\u0072\u0071\u0075\u00e9\u006e\u006f\u0070\u0075\u0065\u0064\u0065\u006e\u0073\u0069\u006d\u0070\u006c\u0065\u006d\u0065\u006e\u0074\u0065\u0068\u0061\u0062\u006c\u0061\u0072\u0065\u006e\u0045\u0073\u0070\u0061\u00f1\u006f\u006c", # Vietnamese - u_("\u0054\u1ea1\u0069\u0073\u0061\u006f\u0068\u1ecd\u006b\u0068\u00f4\u006e\u0067\u0074\u0068\u1ec3\u0063\u0068\u1ec9\u006e\u00f3\u0069\u0074\u0069\u1ebf\u006e\u0067\u0056\u0069\u1ec7\u0074"), + "\u0054\u1ea1\u0069\u0073\u0061\u006f\u0068\u1ecd\u006b\u0068\u00f4\u006e\u0067\u0074\u0068\u1ec3\u0063\u0068\u1ec9\u006e\u00f3\u0069\u0074\u0069\u1ebf\u006e\u0067\u0056\u0069\u1ec7\u0074", # Japanese - u_("\u0033\u5e74\u0042\u7d44\u91d1\u516b\u5148\u751f"), + "\u0033\u5e74\u0042\u7d44\u91d1\u516b\u5148\u751f", # Japanese - u_("\u5b89\u5ba4\u5948\u7f8e\u6075\u002d\u0077\u0069\u0074\u0068\u002d\u0053\u0055\u0050\u0045\u0052\u002d\u004d\u004f\u004e\u004b\u0045\u0059\u0053"), + "\u5b89\u5ba4\u5948\u7f8e\u6075\u002d\u0077\u0069\u0074\u0068\u002d\u0053\u0055\u0050\u0045\u0052\u002d\u004d\u004f\u004e\u004b\u0045\u0059\u0053", # Japanese - u_("\u0048\u0065\u006c\u006c\u006f\u002d\u0041\u006e\u006f\u0074\u0068\u0065\u0072\u002d\u0057\u0061\u0079\u002d\u305d\u308c\u305e\u308c\u306e\u5834\u6240"), + "\u0048\u0065\u006c\u006c\u006f\u002d\u0041\u006e\u006f\u0074\u0068\u0065\u0072\u002d\u0057\u0061\u0079\u002d\u305d\u308c\u305e\u308c\u306e\u5834\u6240", # Japanese - u_("\u3072\u3068\u3064\u5c4b\u6839\u306e\u4e0b\u0032"), + "\u3072\u3068\u3064\u5c4b\u6839\u306e\u4e0b\u0032", # Japanese - u_("\u004d\u0061\u006a\u0069\u3067\u004b\u006f\u0069\u3059\u308b\u0035\u79d2\u524d"), + "\u004d\u0061\u006a\u0069\u3067\u004b\u006f\u0069\u3059\u308b\u0035\u79d2\u524d", # Japanese - u_("\u30d1\u30d5\u30a3\u30fc\u0064\u0065\u30eb\u30f3\u30d0"), + "\u30d1\u30d5\u30a3\u30fc\u0064\u0065\u30eb\u30f3\u30d0", # Japanese - u_("\u305d\u306e\u30b9\u30d4\u30fc\u30c9\u3067"), + "\u305d\u306e\u30b9\u30d4\u30fc\u30c9\u3067", # greek - u_("\u03b5\u03bb\u03bb\u03b7\u03bd\u03b9\u03ba\u03ac"), + "\u03b5\u03bb\u03bb\u03b7\u03bd\u03b9\u03ba\u03ac", # Maltese (Malti) - u_("\u0062\u006f\u006e\u0121\u0075\u0073\u0061\u0127\u0127\u0061"), + "\u0062\u006f\u006e\u0121\u0075\u0073\u0061\u0127\u0127\u0061", # Russian (Cyrillic) - u_("\u043f\u043e\u0447\u0435\u043c\u0443\u0436\u0435\u043e\u043d\u0438\u043d\u0435\u0433\u043e\u0432\u043e\u0440\u044f\u0442\u043f\u043e\u0440\u0443\u0441\u0441\u043a\u0438") + "\u043f\u043e\u0447\u0435\u043c\u0443\u0436\u0435\u043e\u043d\u0438\u043d\u0435\u0433\u043e\u0432\u043e\u0440\u044f\u0442\u043f\u043e\u0440\u0443\u0441\u0441\u043a\u0438" ] for i in keys: eq_(foo(i),i)