diff --git a/.gitignore b/.gitignore index 7bbc71c..4d84a8e 100644 --- a/.gitignore +++ b/.gitignore @@ -99,3 +99,6 @@ ENV/ # mypy .mypy_cache/ + + +*.pyc \ No newline at end of file diff --git a/app.yaml b/app.yaml new file mode 100644 index 0000000..6bac33f --- /dev/null +++ b/app.yaml @@ -0,0 +1,28 @@ +runtime: python27 +api_version: 1 +threadsafe: true + +libraries: +- name: jinja2 + version: "2.6" +- name: webapp2 + version: "2.5.1" +- name: pycrypto + version: "2.6" + +handlers: +- url: /static/ + static_dir: static + application_readable: true + secure: always +- url: /.* + script: server.app + secure: always + +skip_files: +- ^(.*/)?#.*#$ +- ^(.*/)?.*~$ +- ^(.*/)?.*\.py[co]$ +- ^(.*/)?.*/RCS/.*$ +- ^(.*/)?\..*$ +- Crypto diff --git a/build.sh b/build.sh new file mode 100644 index 0000000..de82ed7 --- /dev/null +++ b/build.sh @@ -0,0 +1,52 @@ +#!/bin/bash + +# This Bash script builds Python dependencies needed to run and deploy +# the Trendy Lights application + +# Builds the specified dependency if it hasn't been built. Takes 3 parameters: +# For PyPI packages: +# 1. The name of the PyPI package. +# 2. The version of the package. +# 3. The path within the package of the library folder. +# For Git repositories: +# 1. The URL of the git repository. +# 2. The tag name or commit SHA at which to checkout the repo. +# 3. The path within the repo of the library folder. +BuildDep () { + DST_FOLDER=$(basename "$3") + echo "Building $DST_FOLDER ($2)..." + if [ ! -d "$DST_FOLDER" ]; then + if [ ! -f "$DST_FOLDER" ]; then + # See: http://unix.stackexchange.com/a/84980 + TEMP_DIR=$(mktemp -d 2>/dev/null || mktemp -d -t 'mytmpdir') + cd "$TEMP_DIR" + if [[ $1 == *git ]]; then + echo "Git: Cloning $1..." + git clone "$1" . + echo "Git: Checking out $3..." + git checkout "$2" . + else + echo "Pip: Installing $1..." + pip install -t "$TEMP_DIR" "$1"=="$2" + fi + cd - + mv "$TEMP_DIR/$3" ./ + rm -rf "$TEMP_DIR" + fi + fi +} + +# Build oauth2client v2.2.0 dependencies. +BuildDep six 1.10.0 six.py +BuildDep pyasn1 0.1.9 pyasn1 +BuildDep pyasn1-modules 0.0.8 pyasn1_modules +BuildDep rsa 3.4.2 rsa + +# Build oauth2client. +BuildDep https://github.com/google/oauth2client.git tags/v2.2.0 oauth2client + +# Build the Earth Engine Python client library. +BuildDep https://github.com/google/earthengine-api.git v0.1.114 python/ee + +# Build httplib2. +BuildDep https://github.com/jcgregorio/httplib2.git tags/v0.9.1 python2/httplib2 diff --git a/config.py b/config.py new file mode 100644 index 0000000..73cf969 --- /dev/null +++ b/config.py @@ -0,0 +1,10 @@ +#!/usr/bin/env python +"""Required credentials configuration.""" + + +# The service account email address authorized by your Google contact. +# The process to set up a service account is described in the README. +EE_ACCOUNT = 'earthengine@earthenginetestbed.iam.gserviceaccount.com' + +# The private key associated with your service account in JSON format. +EE_PRIVATE_KEY_FILE = 'privatekey.json' diff --git a/ee/__init__.py b/ee/__init__.py new file mode 100644 index 0000000..faa030a --- /dev/null +++ b/ee/__init__.py @@ -0,0 +1,374 @@ +#!/usr/bin/env python +"""The EE Python library.""" + + +__version__ = '0.1.114' + +# Using lowercase function naming to match the JavaScript names. +# pylint: disable=g-bad-name + +# pylint: disable=g-bad-import-order +import collections +import datetime +import inspect +import numbers +import os +import six + +from . import batch +from . import data +from . import deserializer +from . import ee_types as types +from ._helpers import _GetPersistentCredentials + +# Public re-exports. +from ._helpers import ServiceAccountCredentials +from ._helpers import apply # pylint: disable=redefined-builtin +from ._helpers import call +from ._helpers import profilePrinting +from .apifunction import ApiFunction +from .collection import Collection +from .computedobject import ComputedObject +from .customfunction import CustomFunction +from .dictionary import Dictionary +from .ee_date import Date +from .ee_exception import EEException +from .ee_list import List +from .ee_number import Number +from .ee_string import String +from .element import Element +from .encodable import Encodable +from .feature import Feature +from .featurecollection import FeatureCollection +from .filter import Filter +from .function import Function +from .geometry import Geometry +from .image import Image +from .imagecollection import ImageCollection +from .serializer import Serializer +from .terrain import Terrain + +# A list of autogenerated class names added by _InitializeGenerateClasses. +_generatedClasses = [] + + +class _AlgorithmsContainer(dict): + """A lightweight class that is used as a dictionary with dot notation. + """ + + def __getattr__(self, name): + try: + return self[name] + except KeyError: + raise AttributeError + + def __setattr__(self, name, value): + self[name] = value + + def __delattr__(self, name): + del self[name] + +# A dictionary of algorithms that are not bound to a specific class. +Algorithms = _AlgorithmsContainer() + + +def Initialize(credentials='persistent', opt_url=None): + """Initialize the EE library. + + If this hasn't been called by the time any object constructor is used, + it will be called then. If this is called a second time with a different + URL, this doesn't do an un-initialization of e.g.: the previously loaded + Algorithms, but will overwrite them and let point at alternate servers. + + Args: + credentials: OAuth2 credentials. 'persistent' (default) means use + credentials already stored in the filesystem, or raise an explanatory + exception guiding the user to create those credentials. + opt_url: The base url for the EarthEngine REST API to connect to. + """ + if credentials == 'persistent': + credentials = _GetPersistentCredentials() + data.initialize(credentials, (opt_url + '/api' if opt_url else None), opt_url) + # Initialize the dynamically loaded functions on the objects that want them. + ApiFunction.initialize() + Element.initialize() + Image.initialize() + Feature.initialize() + Collection.initialize() + ImageCollection.initialize() + FeatureCollection.initialize() + Filter.initialize() + Geometry.initialize() + List.initialize() + Number.initialize() + String.initialize() + Date.initialize() + Dictionary.initialize() + Terrain.initialize() + _InitializeGeneratedClasses() + _InitializeUnboundMethods() + + +def Reset(): + """Reset the library. Useful for re-initializing to a different server.""" + data.reset() + ApiFunction.reset() + Element.reset() + Image.reset() + Feature.reset() + Collection.reset() + ImageCollection.reset() + FeatureCollection.reset() + Filter.reset() + Geometry.reset() + List.reset() + Number.reset() + String.reset() + Date.reset() + Dictionary.reset() + Terrain.reset() + _ResetGeneratedClasses() + global Algorithms + Algorithms = _AlgorithmsContainer() + + +def _ResetGeneratedClasses(): + """Remove the dynamic classes.""" + global _generatedClasses + + for name in _generatedClasses: + ApiFunction.clearApi(globals()[name]) + del globals()[name] + _generatedClasses = [] + # Warning: we're passing all of globals() into registerClasses. + # This is a) pass by reference, and b) a lot more stuff. + types._registerClasses(globals()) # pylint: disable=protected-access + + +def _Promote(arg, klass): + """Wrap an argument in an object of the specified class. + + This is used to e.g.: promote numbers or strings to Images and arrays + to Collections. + + Args: + arg: The object to promote. + klass: The expected type. + + Returns: + The argument promoted if the class is recognized, otherwise the + original argument. + """ + if arg is None: + return arg + + if klass == 'Image': + return Image(arg) + elif klass == 'Feature': + if isinstance(arg, Collection): + # TODO(user): Decide whether we want to leave this in. It can be + # quite dangerous on large collections. + return ApiFunction.call_( + 'Feature', ApiFunction.call_('Collection.geometry', arg)) + else: + return Feature(arg) + elif klass == 'Element': + if isinstance(arg, Element): + # Already an Element. + return arg + elif isinstance(arg, Geometry): + # Geometries get promoted to Features. + return Feature(arg) + elif isinstance(arg, ComputedObject): + # Try a cast. + return Element(arg.func, arg.args, arg.varName) + else: + # No way to convert. + raise EEException('Cannot convert %s to Element.' % arg) + elif klass == 'Geometry': + if isinstance(arg, Collection): + return ApiFunction.call_('Collection.geometry', arg) + else: + return Geometry(arg) + elif klass in ('FeatureCollection', 'Collection'): + # For now Collection is synonymous with FeatureCollection. + if isinstance(arg, Collection): + return arg + else: + return FeatureCollection(arg) + elif klass == 'ImageCollection': + return ImageCollection(arg) + elif klass == 'Filter': + return Filter(arg) + elif klass == 'Algorithm': + if isinstance(arg, six.string_types): + # An API function name. + return ApiFunction.lookup(arg) + elif callable(arg): + # A native function that needs to be wrapped. + args_count = len(inspect.getargspec(arg).args) + return CustomFunction.create(arg, 'Object', ['Object'] * args_count) + elif isinstance(arg, Encodable): + # An ee.Function or a computed function like the return value of + # Image.parseExpression(). + return arg + else: + raise EEException('Argument is not a function: %s' % arg) + elif klass == 'Dictionary': + if isinstance(arg, dict): + return arg + else: + return Dictionary(arg) + elif klass == 'String': + if (types.isString(arg) or + isinstance(arg, ComputedObject) or + isinstance(arg, String)): + return String(arg) + else: + return arg + elif klass == 'List': + return List(arg) + elif klass in ('Number', 'Float', 'Long', 'Integer', 'Short', 'Byte'): + return Number(arg) + elif klass in globals(): + cls = globals()[klass] + ctor = ApiFunction.lookupInternal(klass) + # Handle dynamically created classes. + if isinstance(arg, cls): + # Return unchanged. + return arg + elif ctor: + # The client-side constructor will call the server-side constructor. + return cls(arg) + elif isinstance(arg, six.string_types): + if hasattr(cls, arg): + # arg is the name of a method in klass. + return getattr(cls, arg)() + else: + raise EEException('Unknown algorithm: %s.%s' % (klass, arg)) + else: + # Client-side cast. + return cls(arg) + else: + return arg + + +def _InitializeUnboundMethods(): + # Sort the items by length, so parents get created before children. + items = sorted( + ApiFunction.unboundFunctions().items(), key=lambda x: len(x[0])) + + for name, func in items: + signature = func.getSignature() + if signature.get('hidden', False): + continue + + # Create nested objects as needed. + name_parts = name.split('.') + target = Algorithms + while len(name_parts) > 1: + first = name_parts[0] + # Set the attribute if it doesn't already exist. The try/except block + # works in both Python 2 & 3. + try: + getattr(target, first) + except AttributeError: + setattr(target, first, _AlgorithmsContainer()) + + target = getattr(target, first) + name_parts = name_parts[1:] + + # Attach the function. + # We need a copy of the function to attach properties. + def GenerateFunction(f): + return lambda *args, **kwargs: f.call(*args, **kwargs) # pylint: disable=unnecessary-lambda + bound = GenerateFunction(func) + bound.signature = signature + bound.__doc__ = str(func) + setattr(target, name_parts[0], bound) + + +def _InitializeGeneratedClasses(): + """Generate classes for extra types that appear in the web API.""" + signatures = ApiFunction.allSignatures() + # Collect the first part of all function names. + names = set([name.split('.')[0] for name in signatures]) + # Collect the return types of all functions. + returns = set([signatures[sig]['returns'] for sig in signatures]) + + want = [name for name in names.intersection(returns) if name not in globals()] + + for name in want: + globals()[name] = _MakeClass(name) + _generatedClasses.append(name) + ApiFunction._bound_signatures.add(name) # pylint: disable=protected-access + + # Warning: we're passing all of globals() into registerClasses. + # This is a) pass by reference, and b) a lot more stuff. + types._registerClasses(globals()) # pylint: disable=protected-access + + +def _MakeClass(name): + """Generates a dynamic API class for a given name.""" + + def init(self, *args): + """Initializer for dynamically created classes. + + Args: + self: The instance of this class. Listed to make the linter hush. + *args: Either a ComputedObject to be promoted to this type, or + arguments to an algorithm with the same name as this class. + + Returns: + The new class. + """ + klass = globals()[name] + onlyOneArg = (len(args) == 1) + # Are we trying to cast something that's already of the right class? + if onlyOneArg and isinstance(args[0], klass): + result = args[0] + else: + # Decide whether to call a server-side constructor or just do a + # client-side cast. + ctor = ApiFunction.lookupInternal(name) + firstArgIsPrimitive = not isinstance(args[0], ComputedObject) + shouldUseConstructor = False + if ctor: + if not onlyOneArg: + # Can't client-cast multiple arguments. + shouldUseConstructor = True + elif firstArgIsPrimitive: + # Can't cast a primitive. + shouldUseConstructor = True + elif args[0].func != ctor: + # We haven't already called the constructor on this object. + shouldUseConstructor = True + + # Apply our decision. + if shouldUseConstructor: + # Call ctor manually to avoid having promote() called on the output. + ComputedObject.__init__( + self, ctor, ctor.promoteArgs(ctor.nameArgs(args))) + else: + # Just cast and hope for the best. + if not onlyOneArg: + # We don't know what to do with multiple args. + raise EEException( + 'Too many arguments for ee.%s(): %s' % (name, args)) + elif firstArgIsPrimitive: + # Can't cast a primitive. + raise EEException( + 'Invalid argument for ee.%s(): %s. Must be a ComputedObject.' % + (name, args)) + else: + result = args[0] + ComputedObject.__init__(self, result.func, result.args, result.varName) + + properties = {'__init__': init, 'name': lambda self: name} + new_class = type(str(name), (ComputedObject,), properties) + ApiFunction.importApi(new_class, name, name) + return new_class + + +# Set up type promotion rules as soon the package is loaded. +Function._registerPromoter(_Promote) # pylint: disable=protected-access diff --git a/ee/_helpers.py b/ee/_helpers.py new file mode 100644 index 0000000..8329f6c --- /dev/null +++ b/ee/_helpers.py @@ -0,0 +1,147 @@ +#!/usr/bin/env python +"""Convenience functions and code used by ee/__init__.py. + +These functions are in general re-exported from the "ee" module and should be +referenced from there (e.g. "ee.profilePrinting"). +""" + +# Using lowercase function naming to match the JavaScript names. +# pylint: disable=g-bad-name + +import contextlib +import json +import sys +# pylint: disable=g-importing-member +from . import data +from . import oauth +from .apifunction import ApiFunction +from .ee_exception import EEException +# pylint: enable=g-importing-member +import oauth2client.client +import oauth2client.service_account +import six + + +def _GetPersistentCredentials(): + """Read persistent credentials from ~/.config/earthengine. + + Raises EEException with helpful explanation if credentials don't exist. + + Returns: + OAuth2Credentials built from persistently stored refresh_token + """ + try: + tokens = json.load(open(oauth.get_credentials_path())) + refresh_token = tokens['refresh_token'] + return oauth2client.client.OAuth2Credentials( + None, oauth.CLIENT_ID, oauth.CLIENT_SECRET, refresh_token, + None, 'https://accounts.google.com/o/oauth2/token', None) + except IOError: + raise EEException('Please authorize access to your Earth Engine account ' + 'by running\n\nearthengine authenticate\n\nin your ' + 'command line, and then retry.') + + +def ServiceAccountCredentials(email, key_file=None, key_data=None): + """Configure OAuth2 credentials for a Google Service Account. + + Args: + email: The email address of the account for which to configure credentials. + key_file: The path to a file containing the private key associated with + the service account. PEM files are supported for oauth2client v1 and + JSON files are supported for oauth2client v2+. + key_data: Raw key data to use, if key_file is not specified. + + Returns: + An OAuth2 credentials object. + + Raises: + NotImplementedError: Occurs if using oauth2client v2+ and a PEM formatted + credentials key file. + """ + try: + # oauth2client v2+ and JSON key + sa_creds = oauth2client.service_account.ServiceAccountCredentials + credentials = sa_creds.from_json_keyfile_name(key_file, oauth.SCOPE) + except ValueError: + # oauth2client v2+ and PEM key + raise NotImplementedError( + 'When using oauth2client version 2 or later, you must use a JSON ' + 'formatted key file (instead of a p12 or PEM formatted file). See the ' + 'following page for information on creating a JSON formatted file:\n' + 'https://developers.google.com/api-client-library/python/auth/web-app') + except AttributeError: + # oauth2client v1 (i.e. does not have a ServiceAccountCredentials) + if key_file: + with open(key_file, 'rb') as key_file: + key_data = key_file.read() + credentials = oauth2client.client.SignedJwtAssertionCredentials( + email, key_data, oauth.SCOPE) + return credentials + + +def call(func, *args, **kwargs): + """Invoke the given algorithm with the specified args. + + Args: + func: The function to call. Either an ee.Function object or the name of + an API function. + *args: The positional arguments to pass to the function. + **kwargs: The named arguments to pass to the function. + + Returns: + A ComputedObject representing the called function. If the signature + specifies a recognized return type, the returned value will be cast + to that type. + """ + if isinstance(func, six.string_types): + func = ApiFunction.lookup(func) + return func.call(*args, **kwargs) + + +def apply(func, named_args): # pylint: disable=redefined-builtin + """Call a function with a dictionary of named arguments. + + Args: + func: The function to call. Either an ee.Function object or the name of + an API function. + named_args: A dictionary of arguments to the function. + + Returns: + A ComputedObject representing the called function. If the signature + specifies a recognized return type, the returned value will be cast + to that type. + """ + if isinstance(func, six.string_types): + func = ApiFunction.lookup(func) + return func.apply(named_args) + + +@contextlib.contextmanager +def profilePrinting(destination=sys.stderr): + # pylint: disable=g-doc-return-or-yield + """Returns a context manager that prints a profile of enclosed API calls. + + The profile will be printed when the context ends, whether or not any error + occurred within the context. + + # Simple example: + with ee.profilePrinting(): + print ee.Number(1).add(1).getInfo() + + Args: + destination: A file-like object to which the profile text is written. + Defaults to sys.stderr. + + """ + # TODO(user): Figure out why ee.Profile.getProfiles isn't generated and fix + # that. + getProfiles = ApiFunction.lookup('Profile.getProfiles') + + profile_ids = [] + try: + with data.profiling(profile_ids.append): + yield + finally: + profile_text = getProfiles.call(ids=profile_ids).getInfo() + destination.write(profile_text) diff --git a/ee/apifunction.py b/ee/apifunction.py new file mode 100644 index 0000000..b67596e --- /dev/null +++ b/ee/apifunction.py @@ -0,0 +1,243 @@ +#!/usr/bin/env python +"""A class for representing built-in EE API Function. + +Earth Engine can dynamically produce a JSON array listing the +algorithms available to the user. Each item in the dictionary identifies +the name and return type of the algorithm, the name and type of its +arguments, whether they're required or optional, default values and docs +for each argument and the algorithms as a whole. + +This class manages the algorithm dictionary and creates JavaScript functions +to apply each EE algorithm. +""" + + + +# Using lowercase function naming to match the JavaScript names. +# pylint: disable=g-bad-name + +import copy +import keyword +import re + +from . import computedobject +from . import data +from . import deprecation +from . import ee_exception +from . import ee_types +from . import function + + +class ApiFunction(function.Function): + """An object representing an EE API Function.""" + + # A dictionary of functions defined by the API server. + _api = None + + # A set of algorithm names containing all algorithms that have been bound to + # a function so far using importApi(). + _bound_signatures = set() + + def __init__(self, name, opt_signature=None): + """Creates a function defined by the EE API. + + Args: + name: The name of the function. + opt_signature: The signature of the function. If unspecified, + looked up dynamically. + """ + if opt_signature is None: + opt_signature = ApiFunction.lookup(name).getSignature() + + # The signature of this API function. + self._signature = copy.deepcopy(opt_signature) + self._signature['name'] = name + + def __eq__(self, other): + return (isinstance(other, ApiFunction) and + self.getSignature() == other.getSignature()) + + # For Python 3, __hash__ is needed because __eq__ is defined. + # See https://docs.python.org/3/reference/datamodel.html#object.__hash__ + def __hash__(self): + return hash(computedobject.ComputedObject.freeze(self.getSignature())) + + def __ne__(self, other): + return not self.__eq__(other) + + @classmethod + def call_(cls, name, *args, **kwargs): + """Call a named API function with positional and keyword arguments. + + Args: + name: The name of the API function to call. + *args: Positional arguments to pass to the function. + **kwargs: Keyword arguments to pass to the function. + + Returns: + An object representing the called function. If the signature specifies + a recognized return type, the returned value will be cast to that type. + """ + return cls.lookup(name).call(*args, **kwargs) + + @classmethod + def apply_(cls, name, named_args): + """Call a named API function with a dictionary of named arguments. + + Args: + name: The name of the API function to call. + named_args: A dictionary of arguments to the function. + + Returns: + An object representing the called function. If the signature specifies + a recognized return type, the returned value will be cast to that type. + """ + return cls.lookup(name).apply(named_args) + + def encode(self, unused_encoder): + return self._signature['name'] + + def getSignature(self): + """Returns a description of the interface provided by this function.""" + return self._signature + + @classmethod + def allSignatures(cls): + """Returns a map from the name to signature for all API functions.""" + cls.initialize() + return dict([(name, func.getSignature()) + for name, func in cls._api.items()]) + + @classmethod + def unboundFunctions(cls): + """Returns the functions that have not been bound using importApi() yet.""" + cls.initialize() + return dict([(name, func) for name, func in cls._api.items() + if name not in cls._bound_signatures]) + + @classmethod + def lookup(cls, name): + """Looks up an API function by name. + + Args: + name: The name of the function to get. + + Returns: + The requested ApiFunction. + """ + result = cls.lookupInternal(name) + if not name: + raise ee_exception.EEException( + 'Unknown built-in function name: %s' % name) + return result + + @classmethod + def lookupInternal(cls, name): + """Looks up an API function by name. + + Args: + name: The name of the function to get. + + Returns: + The requested ApiFunction or None if not found. + """ + cls.initialize() + return cls._api.get(name, None) + + @classmethod + def initialize(cls): + """Initializes the list of signatures from the Earth Engine front-end.""" + if not cls._api: + signatures = data.getAlgorithms() + api = {} + for name, sig in signatures.items(): + # Strip type parameters. + sig['returns'] = re.sub('<.*>', '', sig['returns']) + for arg in sig['args']: + arg['type'] = re.sub('<.*>', '', arg['type']) + api[name] = cls(name, sig) + cls._api = api + + @classmethod + def reset(cls): + """Clears the API functions list so it will be reloaded from the server.""" + cls._api = None + cls._bound_signatures = set() + + @classmethod + def importApi(cls, target, prefix, type_name, opt_prepend=None): + """Adds all API functions that begin with a given prefix to a target class. + + Args: + target: The class to add to. + prefix: The prefix to search for in the signatures. + type_name: The name of the object's type. Functions whose + first argument matches this type are bound as instance methods, and + those whose first argument doesn't match are bound as static methods. + opt_prepend: An optional string to prepend to the names of the + added functions. + """ + cls.initialize() + prepend = opt_prepend or '' + for name, api_func in cls._api.items(): + parts = name.split('.') + if len(parts) == 2 and parts[0] == prefix: + fname = prepend + parts[1] + signature = api_func.getSignature() + + cls._bound_signatures.add(name) + + # Specifically handle the function names that are illegal in python. + if keyword.iskeyword(fname): + fname = fname.title() + + # Don't overwrite existing versions of this function. + if (hasattr(target, fname) and + not hasattr(getattr(target, fname), 'signature')): + continue + + # Create a new function so we can attach properties to it. + def MakeBoundFunction(func): + # We need the lambda to capture "func" from the enclosing scope. + return lambda *args, **kwargs: func.call(*args, **kwargs) # pylint: disable=unnecessary-lambda + bound_function = MakeBoundFunction(api_func) + + # Add docs. + try: + setattr(bound_function, '__name__', str(name)) + except TypeError: + setattr(bound_function, '__name__', name.encode('utf8')) + try: + bound_function.__doc__ = str(api_func) + except UnicodeEncodeError: + bound_function.__doc__ = api_func.__str__().encode('utf8') + + # Attach the signature object for documentation generators. + bound_function.signature = signature + + # Mark as deprecated if needed. + if signature.get('deprecated'): + deprecated_decorator = deprecation.Deprecated(signature['deprecated']) + bound_function = deprecated_decorator(bound_function) + + # Decide whether this is a static or an instance function. + is_instance = (signature['args'] and + ee_types.isSubtype(signature['args'][0]['type'], + type_name)) + if not is_instance: + bound_function = staticmethod(bound_function) + + # Attach the function as a method. + setattr(target, fname, bound_function) + + @staticmethod + def clearApi(target): + """Removes all methods added by importApi() from a target class. + + Args: + target: The class to remove from. + """ + for attr_name in dir(target): + attr_value = getattr(target, attr_name) + if callable(attr_value) and hasattr(attr_value, 'signature'): + delattr(target, attr_name) diff --git a/ee/apitestcase.py b/ee/apitestcase.py new file mode 100644 index 0000000..ca3e732 --- /dev/null +++ b/ee/apitestcase.py @@ -0,0 +1,1477 @@ +#!/usr/bin/env python +"""A TestCase that initializes the library with standard API methods.""" + + + +import unittest + +import ee + + +class ApiTestCase(unittest.TestCase): + + def setUp(self): + self.InitializeApi() + + def InitializeApi(self): + """Initializes the library with standard API methods. + + This is normally invoked during setUp(), but subclasses may invoke + it manually instead if they prefer. + """ + self.last_download_call = None + self.last_thumb_call = None + self.last_table_call = None + + ee.data.send_ = self.MockSend + + ee.Reset() + ee.Initialize(None, '') + + def MockSend(self, path, params, unused_method=None, unused_raw=None): + if path == '/algorithms': + return BUILTIN_FUNCTIONS + elif path == '/value': + return {'value': 'fakeValue'} + elif path == '/mapid': + return {'mapid': 'fakeMapId'} + elif path == '/download': + # Hang on to the call arguments. + self.last_download_call = {'url': path, 'data': params} + return {'docid': '1', 'token': '2'} + elif path == '/thumb': + # Hang on to the call arguments. + self.last_thumb_call = {'url': path, 'data': params} + return {'thumbid': '3', 'token': '4'} + elif path == '/table': + # Hang on to the call arguments. + self.last_table_call = {'url': path, 'data': params} + return {'docid': '5', 'token': '6'} + else: + raise Exception('Unexpected API call to %s with %s' % (path, params)) + +BUILTIN_FUNCTIONS = { + 'Image.constant': { + 'type': 'Algorithm', + 'args': [ + { + 'description': '', + 'name': 'value', + 'type': 'Object' + } + ], + 'description': '', + 'returns': 'Image' + }, + 'Image.load': { + 'type': 'Algorithm', + 'args': [ + { + 'description': '', + 'name': 'id', + 'type': 'String' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'version', + 'type': 'Long' + } + ], + 'description': '', + 'returns': 'Image' + }, + 'Image.addBands': { + 'type': 'Algorithm', + 'args': [ + { + 'description': '', + 'name': 'dstImg', + 'type': 'Image' + }, + { + 'description': '', + 'name': 'srcImg', + 'type': 'Image' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'names', + 'type': 'List' + }, + { + 'default': False, + 'description': '', + 'optional': True, + 'name': 'overwrite', + 'type': 'boolean' + } + ], + 'description': '', + 'returns': 'Image' + }, + 'Image.clip': { + 'type': 'Algorithm', + 'args': [ + { + 'description': '', + 'name': 'input', + 'type': 'Image' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'geometry', + 'type': 'Object' + } + ], + 'description': '', + 'returns': 'Image' + }, + 'Image.select': { + 'type': 'Algorithm', + 'args': [ + { + 'description': '', + 'name': 'input', + 'type': 'Image' + }, + { + 'description': '', + 'name': 'bandSelectors', + 'type': 'List' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'newNames', + 'type': 'List' + } + ], + 'description': '', + 'returns': 'Image' + }, + 'Image.parseExpression': { + 'type': 'Algorithm', + 'args': [ + { + 'description': '', + 'name': 'expression', + 'type': 'String' + }, + { + 'default': 'image', + 'description': '', + 'optional': True, + 'name': 'argName', + 'type': 'String' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'vars', + 'type': 'List' + } + ], + 'description': '', + 'returns': 'Algorithm' + }, + 'Feature': { + 'type': 'Algorithm', + 'args': [ + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'geometry', + 'type': 'Geometry' + }, + { + 'default': {}, + 'description': '', + 'optional': True, + 'name': 'metadata', + 'type': 'Dictionary' + } + ], + 'description': '', + 'returns': 'Feature' + }, + 'Feature.get': { + 'type': 'Algorithm', + 'returns': '', + 'hidden': False, + 'args': [ + { + 'type': 'Element', + 'description': '', + 'name': 'object' + }, + { + 'type': 'String', + 'description': '', + 'name': 'property' + } + ], + 'description': '' + }, + 'Collection': { + 'type': 'Algorithm', + 'args': [ + { + 'description': '', + 'name': 'features', + 'type': 'List' + } + ], + 'description': '', + 'returns': 'FeatureCollection' + }, + 'Collection.loadTable': { + 'type': 'Algorithm', + 'args': [ + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'tableId', + 'type': 'Object' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'geometryColumn', + 'type': 'String' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'version', + 'type': 'Long' + } + ], + 'description': '', + 'returns': 'FeatureCollection' + }, + 'Collection.filter': { + 'type': 'Algorithm', + 'args': [ + { + 'description': '', + 'name': 'collection', + 'type': 'FeatureCollection' + }, + { + 'description': '', + 'name': 'filter', + 'type': 'Filter' + } + ], + 'description': '', + 'returns': 'FeatureCollection' + }, + 'Collection.limit': { + 'type': 'Algorithm', + 'args': [ + { + 'description': '', + 'name': 'collection', + 'type': 'FeatureCollection' + }, + { + 'default': -1, + 'description': '', + 'optional': True, + 'name': 'limit', + 'type': 'int' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'key', + 'type': 'String' + }, + { + 'default': True, + 'description': '', + 'optional': True, + 'name': 'ascending', + 'type': 'boolean' + } + ], + 'description': '', + 'returns': 'FeatureCollection' + }, + 'Collection.map': { + 'type': 'Algorithm', + 'args': [ + { + 'description': '', + 'name': 'collection', + 'type': 'FeatureCollection' + }, + { + 'description': '', + 'name': 'baseAlgorithm', + 'type': 'Algorithm' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'dynamicArgs', + 'type': 'Dictionary' + }, + { + 'default': {}, + 'description': '', + 'optional': True, + 'name': 'constantArgs', + 'type': 'Dictionary' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'destination', + 'type': 'String' + } + ], + 'description': '', + 'returns': 'FeatureCollection' + }, + 'Collection.iterate': { + 'type': 'Algorithm', + 'args': [ + { + 'description': '', + 'name': 'collection', + 'type': 'FeatureCollection' + }, + { + 'description': '', + 'name': 'function', + 'type': 'Algorithm' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'first', + 'type': 'Object' + } + ], + 'description': '', + 'returns': 'Object', + }, + 'ImageCollection.load': { + 'type': 'Algorithm', + 'args': [ + { + 'description': '', + 'name': 'id', + 'type': 'String' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'version', + 'type': 'Long' + } + ], + 'description': '', + 'returns': 'ImageCollection' + }, + 'ImageCollection.fromImages': { + 'type': 'Algorithm', + 'args': [ + { + 'description': '', + 'name': 'images', + 'type': 'List' + } + ], + 'description': '', + 'returns': 'ImageCollection' + }, + 'ImageCollection.mosaic': { + 'type': 'Algorithm', + 'args': [ + { + 'description': '', + 'name': 'collection', + 'type': 'ImageCollection' + } + ], + 'description': '', + 'returns': 'Image' + }, + 'Collection.geometry': { + 'type': 'Algorithm', + 'args': [ + { + 'description': '', + 'name': 'collection', + 'type': 'FeatureCollection' + }, + { + 'default': { + 'type': 'ErrorMargin', + 'unit': 'meters', + 'value': 0 + }, + 'description': '', + 'optional': True, + 'name': 'maxError', + 'type': 'ErrorMargin' + } + ], + 'description': '', + 'returns': 'Geometry' + }, + 'Collection.draw': { + 'type': 'Algorithm', + 'args': [ + { + 'description': '', + 'name': 'collection', + 'type': 'FeatureCollection' + }, + { + 'description': '', + 'name': 'color', + 'type': 'String' + }, + { + 'default': 3, + 'description': '', + 'optional': True, + 'name': 'pointRadius', + 'type': 'int' + }, + { + 'default': 2, + 'description': '', + 'optional': True, + 'name': 'strokeWidth', + 'type': 'int' + } + ], + 'description': '', + 'returns': 'Image' + }, + 'DateRange': { + 'type': 'Algorithm', + 'args': [ + { + 'description': '', + 'name': 'start', + 'type': 'Date' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'end', + 'type': 'Date' + } + ], + 'description': '', + 'returns': 'DateRange' + }, + 'Date': { + 'returns': 'Date', + 'hidden': False, + 'args': [ + { + 'type': 'Object', + 'description': '', + 'name': 'value' + }, + { + 'type': 'String', + 'default': None, + 'description': '', + 'optional': True, + 'name': 'timeZone' + } + ], + 'type': 'Algorithm', + 'description': '' + }, + 'ErrorMargin': { + 'type': 'Algorithm', + 'args': [ + { + 'description': '', + 'name': 'value', + 'type': 'Double' + }, + { + 'default': 'meters', + 'description': '', + 'optional': True, + 'name': 'unit', + 'type': 'String' + } + ], + 'description': '', + 'returns': 'ErrorMargin' + }, + 'Filter.intersects': { + 'type': 'Algorithm', + 'args': [ + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'leftField', + 'type': 'String' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'rightValue', + 'type': 'Object' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'rightField', + 'type': 'String' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'leftValue', + 'type': 'Object' + }, + { + 'default': { + 'type': 'ErrorMargin', + 'unit': 'meters', + 'value': 0.1 + }, + 'description': '', + 'optional': True, + 'name': 'maxError', + 'type': 'ErrorMargin' + } + ], + 'description': '', + 'returns': 'Filter' + }, + 'Filter.dateRangeContains': { + 'type': 'Algorithm', + 'args': [ + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'leftField', + 'type': 'String' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'rightValue', + 'type': 'Object' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'rightField', + 'type': 'String' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'leftValue', + 'type': 'Object' + } + ], + 'description': '', + 'returns': 'Filter' + }, + 'Filter.or': { + 'type': 'Algorithm', + 'args': [ + { + 'description': '', + 'name': 'filters', + 'type': 'List' + } + ], + 'description': '', + 'returns': 'Filter' + }, + 'Filter.and': { + 'type': 'Algorithm', + 'args': [ + { + 'description': '', + 'name': 'filters', + 'type': 'List' + } + ], + 'description': '', + 'returns': 'Filter' + }, + 'Filter.not': { + 'type': 'Algorithm', + 'args': [ + { + 'description': '', + 'name': 'filter', + 'type': 'Filter' + } + ], + 'description': '', + 'returns': 'Filter' + }, + 'Filter.equals': { + 'type': 'Algorithm', + 'args': [ + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'leftField', + 'type': 'String' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'rightValue', + 'type': 'Object' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'rightField', + 'type': 'String' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'leftValue', + 'type': 'Object' + } + ], + 'description': '', + 'returns': 'Filter' + }, + 'Filter.lessThan': { + 'type': 'Algorithm', + 'args': [ + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'leftField', + 'type': 'String' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'rightValue', + 'type': 'Object' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'rightField', + 'type': 'String' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'leftValue', + 'type': 'Object' + } + ], + 'description': '', + 'returns': 'Filter' + }, + 'Filter.greaterThan': { + 'type': 'Algorithm', + 'args': [ + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'leftField', + 'type': 'String' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'rightValue', + 'type': 'Object' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'rightField', + 'type': 'String' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'leftValue', + 'type': 'Object' + } + ], + 'description': '', + 'returns': 'Filter' + }, + 'Filter.stringContains': { + 'type': 'Algorithm', + 'args': [ + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'leftField', + 'type': 'String' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'rightValue', + 'type': 'Object' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'rightField', + 'type': 'String' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'leftValue', + 'type': 'Object' + } + ], + 'description': '', + 'returns': 'Filter' + }, + 'Filter.stringStartsWith': { + 'type': 'Algorithm', + 'args': [ + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'leftField', + 'type': 'String' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'rightValue', + 'type': 'Object' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'rightField', + 'type': 'String' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'leftValue', + 'type': 'Object' + } + ], + 'description': '', + 'returns': 'Filter' + }, + 'Filter.stringEndsWith': { + 'type': 'Algorithm', + 'args': [ + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'leftField', + 'type': 'String' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'rightValue', + 'type': 'Object' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'rightField', + 'type': 'String' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'leftValue', + 'type': 'Object' + } + ], + 'description': '', + 'returns': 'Filter' + }, + 'Filter.listContains': { + 'type': 'Algorithm', + 'args': [ + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'leftField', + 'type': 'String' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'rightValue', + 'type': 'Object' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'rightField', + 'type': 'String' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'leftValue', + 'type': 'Object' + } + ], + 'description': '', + 'returns': 'Filter' + }, + 'Image.mask': { + 'type': 'Algorithm', + 'args': [ + { + 'name': 'image', + 'type': 'Image', + 'description': '' + }, + { + 'name': 'mask', + 'type': 'Image', + 'description': '', + 'optional': True, + 'default': None + } + ], + 'description': '', + 'returns': 'Image' + }, + # These two functions (Dictionary.get and Image.reduceRegion) are here + # to force the creation of the Dictionary class. + 'Dictionary.get': { + 'returns': 'Object', + 'args': [ + { + 'type': 'Dictionary', + 'description': '', + 'name': 'map' + }, + { + 'type': 'String', + 'description': '', + 'name': 'property' + } + ], + 'type': 'Algorithm', + 'description': '', + }, + 'Image.reduceRegion': { + 'returns': 'Dictionary', + 'hidden': False, + 'args': [ + { + 'type': 'Image', + 'description': '', + 'name': 'image' + }, + { + 'type': 'ReducerOld', + 'description': '', + 'name': 'reducer' + }, + { + 'default': None, + 'type': 'Geometry', + 'optional': True, + 'description': '', + 'name': 'geometry' + }, + { + 'default': None, + 'type': 'Double', + 'optional': True, + 'description': '', + 'name': 'scale' + }, + { + 'default': 'EPSG:4326', + 'type': 'String', + 'optional': True, + 'description': '', + 'name': 'crs' + }, + { + 'default': None, + 'type': 'double[]', + 'optional': True, + 'description': '', + 'name': 'crsTransform' + }, + { + 'default': False, + 'type': 'boolean', + 'optional': True, + 'description': '', + 'name': 'bestEffort' + } + ], + 'type': 'Algorithm', + 'description': '' + }, + # Algorithms for testing ee.String. + 'String': { + 'returns': 'String', + 'hidden': False, + 'args': [ + { + 'type': 'Object', + 'description': '', + 'name': 'input' + } + ], + 'type': 'Algorithm', + 'description': '' + }, + 'String.cat': { + 'returns': 'String', + 'hidden': False, + 'args': [ + { + 'type': 'String', + 'description': '', + 'name': 'string1' + }, + { + 'type': 'String', + 'description': '', + 'name': 'string2' + } + ], + 'type': 'Algorithm', + 'description': '' + }, + # An algorithm for testing computed Geometries. + 'Geometry.bounds': { + 'returns': 'Geometry', + 'hidden': False, + 'args': [ + { + 'type': 'Geometry', + 'description': '', + 'name': 'geometry' + }, + { + 'default': None, + 'type': 'ErrorMargin', + 'optional': True, + 'description': '', + 'name': 'maxError' + }, + { + 'default': None, + 'type': 'Projection', + 'optional': True, + 'description': '', + 'name': 'proj' + } + ], + 'type': 'Algorithm', + 'description': '' + }, + 'Geometry.centroid': { + 'returns': 'Geometry', + 'args': [ + { + 'description': '', + 'name': 'geometry', + 'type': 'Geometry' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'maxError', + 'type': 'ErrorMargin' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'proj', + 'type': 'Projection' + } + ], + 'description': '', + 'type': 'Algorithm', + }, + 'GeometryConstructors.Point': { + 'returns': 'Geometry', + 'args': [ + { + 'name': 'coordinates', + 'type': 'List', + 'description': '' + }, + { + 'name': 'crs', + 'type': 'Projection', + 'description': '', + 'optional': True, + 'default': 'epsg:4326' + } + ], + 'type': 'Algorithm', + 'description': '' + }, + 'GeometryConstructors.LineString': { + 'returns': 'Geometry', + 'args': [ + { + 'name': 'coordinates', + 'type': 'List', + 'description': '' + }, + { + 'name': 'crs', + 'type': 'Projection', + 'description': '', + 'optional': True, + 'default': 'epsg:4326' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'geodesic', + 'type': 'Boolean' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'maxError', + 'type': 'ErrorMargin' + }, + ], + 'type': 'Algorithm', + 'description': '' + }, + # Element property setting, used by the client-side override. + 'Element.set': { + 'returns': 'Element', + 'hidden': False, + 'args': [ + { + 'type': 'Element', + 'description': '', + 'name': 'object' + }, + { + 'type': 'String', + 'description': '', + 'name': 'key' + }, + { + 'type': 'Object', + 'description': '', + 'name': 'value' + } + ], + 'type': 'Algorithm', + 'description': '' + }, + 'Element.setMulti': { + 'returns': 'Element', + 'hidden': False, + 'args': [ + { + 'type': 'Element', + 'description': '', + 'name': 'object' + }, + { + 'type': 'Dictionary', + 'description': '', + 'name': 'properties' + } + ], + 'type': 'Algorithm', + 'description': '' + }, + 'Image.geometry': { + 'returns': 'Geometry', + 'args': [ + { + 'description': '', + 'name': 'feature', + 'type': 'Element' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'maxError', + 'type': 'ErrorMargin' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'proj', + 'type': 'Projection' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'geodesics', + 'type': 'Boolean' + } + ], + 'type': 'Algorithm', + 'description': '', + }, + 'Number.add': { + 'returns': 'Number', + 'hidden': False, + 'args': [ + { + 'type': 'Number', + 'description': '', + 'name': 'left' + }, + { + 'type': 'Number', + 'description': '', + 'name': 'right' + } + ], + 'type': 'Algorithm', + 'description': '' + }, + 'Array': { + 'returns': 'Array', + 'hidden': False, + 'args': [ + { + 'name': 'values', + 'type': 'Object' + }, + { + 'name': 'pixelType', + 'type': 'PixelType', + 'optional': True, + 'default': None + } + ], + 'type': 'Algorithm', + 'description': '' + }, + 'List.slice': { + 'returns': 'List', + 'args': [ + { + 'type': 'List', + 'name': 'list' + }, + { + 'type': 'Integer', + 'name': 'start' + }, + { + 'default': None, + 'type': 'Integer', + 'optional': True, + 'name': 'end' + } + ], + 'type': 'Algorithm', + 'description': '', + }, + 'List.map': { + 'type': 'Algorithm', + 'args': [ + { + 'description': '', + 'name': 'list', + 'type': 'List' + }, + { + 'description': '', + 'name': 'baseAlgorithm', + 'type': 'Algorithm' + }, + ], + 'description': '', + 'returns': 'List' + }, + 'Profile.getProfiles': { + 'args': [ + { + 'description': '', + 'name': 'ids', + 'type': 'List' + }, + { + 'default': 'text', + 'description': '', + 'name': 'format', + 'optional': True, + 'type': 'String' + } + ], + 'description': '', + 'returns': 'Object', + 'type': 'Algorithm', + 'hidden': True + }, + 'Profile.getProfilesInternal': { + 'args': [ + { + 'description': '', + 'name': 'ids', + 'type': 'List' + }, + { + 'default': 'text', + 'description': '', + 'name': 'format', + 'optional': True, + 'type': 'String' + } + ], + 'description': '', + 'returns': 'Object', + 'type': 'Algorithm', + 'hidden': True + }, + 'Projection': { + 'returns': 'Projection', + 'type': 'Algorithm', + 'description': '', + 'args': [ + { + 'name': 'crs', + 'type': 'Object', + 'description': '' + }, + { + 'name': 'transform', + 'default': None, + 'type': 'List', + 'optional': True, + 'description': '' + }, + { + 'name': 'transformWkt', + 'default': None, + 'type': 'String', + 'optional': True, + 'description': '', + } + ] + }, + 'Image.cast': { + 'type': 'Algorithm', + 'args': [ + { + 'description': '', + 'name': 'image', + 'type': 'Image' + }, + { + 'description': '', + 'name': 'bandTypes', + 'type': 'Dictionary' + }, + { + 'default': None, + 'description': '', + 'optional': True, + 'name': 'bandOrder', + 'type': 'List' + } + ], + 'description': '', + 'returns': 'Image' + }, + 'Describe': { + 'type': 'Algorithm', + 'args': [ + { + 'description': '', + 'name': 'input', + 'type': 'Object' + } + ], + 'description': '', + 'returns': 'Object', + }, + 'Image.rename': { + 'type': 'Algorithm', + 'args': [ + { + 'description': '', + 'name': 'input', + 'type': 'Image' + }, + { + 'description': '', + 'name': 'names', + 'type': 'List' + } + ], + 'description': '', + 'returns': 'Image' + }, + 'Dictionary': { + 'type': 'Algorithm', + 'args': [ + { + 'description': '', + 'name': 'input', + 'optional': 'true', + 'type': 'Object' + } + ], + 'returns': 'Dictionary' + }, +} + + +# A sample of encoded EE API JSON, used by SerializerTest and DeserializerTest. +ENCODED_JSON_SAMPLE = { + 'type': 'CompoundValue', + 'scope': [ + ['0', { + 'type': 'Invocation', + 'functionName': 'Date', + 'arguments': { + 'value': 1234567890000 + } + }], + ['1', { + 'type': 'LineString', + 'coordinates': [[1, 2], [3, 4]], + 'crs': { + 'type': 'name', + 'properties': { + 'name': 'SR-ORG:6974' + } + } + }], + ['2', { + 'evenOdd': True, + 'type': 'Polygon', + 'coordinates': [ + [[0, 0], [10, 0], [10, 10], [0, 10], [0, 0]], + [[5, 6], [7, 6], [7, 8], [5, 8]], + [[1, 1], [2, 1], [2, 2], [1, 2]] + ] + }], + ['3', { + 'type': 'Bytes', + 'value': 'aGVsbG8=' + }], + ['4', { + 'type': 'Invocation', + 'functionName': 'String.cat', + 'arguments': { + 'string1': 'x', + 'string2': 'y' + } + }], + ['5', { + 'type': 'Dictionary', + 'value': { + 'foo': 'bar', + 'baz': {'type': 'ValueRef', 'value': '4'} + } + }], + ['6', { + 'type': 'Function', + 'argumentNames': ['x', 'y'], + 'body': {'type': 'ArgumentRef', 'value': 'y'} + }], + ['7', [ + None, + True, + 5, + 7, + 3.4, + 2.5, + 'hello', + {'type': 'ValueRef', 'value': '0'}, + {'type': 'ValueRef', 'value': '1'}, + {'type': 'ValueRef', 'value': '2'}, + {'type': 'ValueRef', 'value': '3'}, + {'type': 'ValueRef', 'value': '5'}, + {'type': 'ValueRef', 'value': '4'}, + {'type': 'ValueRef', 'value': '6'} + ]] + ], + 'value': {'type': 'ValueRef', 'value': '7'} +} diff --git a/ee/batch.py b/ee/batch.py new file mode 100644 index 0000000..d3b0b96 --- /dev/null +++ b/ee/batch.py @@ -0,0 +1,860 @@ +#!/usr/bin/env python +"""An interface to the Earth Engine batch processing system. + +Use the static methods on the Export class to create export tasks, call start() +on them to launch them, then poll status() to find out when they are finished. +The function styling uses camelCase to match the JavaScript names. +""" + +# pylint: disable=g-bad-name + +# pylint: disable=g-bad-import-order +import json +import six + +from . import data +from . import ee_exception +from . import geometry + + +class Task(object): + """A batch task that can be run on the EE batch processing system.""" + + def __init__(self, taskId, config=None): + """Creates a Task with the given ID and configuration. + + The constructor is not for public use. Instances can be obtained by: + - Calling the static method Task.list(). + - Calling any of the methods on the Export static class. + - Unpickling a previously pickled Task object. + + If you're looking for a task's status but don't need a full task object, + ee.data.getTaskStatus() may be appropriate. + + Args: + taskId: The task ID, originally obtained through ee.data.newTaskId(). + config: The task configuration dictionary. Only necessary if start() + will be called. Fields shared by all tasks are: + - type: The type of the task. One of entries in Task.Type. + - state: The state of the task. One of entries in Task.State. + - description: The name of the task, a freeform string. + - sourceURL: An optional URL for the script that generated the task. + Specific task types have other custom config fields. + """ + self.id = taskId + self.config = config and config.copy() + + class Type(object): + EXPORT_IMAGE = 'EXPORT_IMAGE' + EXPORT_MAP = 'EXPORT_TILES' + EXPORT_TABLE = 'EXPORT_FEATURES' + EXPORT_VIDEO = 'EXPORT_VIDEO' + + class State(object): + UNSUBMITTED = 'UNSUBMITTED' + READY = 'READY' + RUNNING = 'RUNNING' + COMPLETED = 'COMPLETED' + FAILED = 'FAILED' + CANCEL_REQUESTED = 'CANCEL_REQUESTED' + CANCELLED = 'CANCELLED' + + # Export destinations. + class ExportDestination(object): + DRIVE = 'DRIVE' + GCS = 'GOOGLE_CLOUD_STORAGE' + ASSET = 'ASSET' + + def start(self): + """Starts the task. No-op for started tasks.""" + if not self.config: + raise ee_exception.EEException( + 'Task config must be specified for tasks to be started.') + data.startProcessing(self.id, self.config) + + def status(self): + """Fetches the current status of the task. + + Returns: + A dictionary describing the current status of the task as it appears on + the EE server. Includes the following fields: + - state: One of the values in Task.State. + - creation_timestamp_ms: The Unix timestamp of when the task was created. + - update_timestamp_ms: The Unix timestamp of when the task last changed. + - output_url: URL of the output. Appears only if state is COMPLETED. + - error_message: Failure reason. Appears only if state is FAILED. + May also include other fields. + """ + result = data.getTaskStatus(self.id)[0] + if result['state'] == 'UNKNOWN': result['state'] = Task.State.UNSUBMITTED + return result + + def active(self): + """Returns whether the task is still running.""" + return self.status()['state'] in (Task.State.READY, + Task.State.RUNNING, + Task.State.CANCEL_REQUESTED) + + def cancel(self): + """Cancels the task.""" + data.cancelTask(self.id) + + @staticmethod + def list(): + """Returns the tasks submitted to EE by the current user. + + These include all currently running tasks as well as recently canceled or + failed tasks. + + Returns: + A list of Tasks. + """ + statuses = data.getTaskList() + tasks = [] + for status in statuses: + tasks.append(Task(status['id'], { + 'type': status['task_type'], + 'description': status['description'], + 'state': status['state'], + })) + return tasks + + def __repr__(self): + """Returns a string representation of the task.""" + if self.config: + return '' % self.config + else: + return '' % self.id + + +class Export(object): + """A class with static methods to start export tasks.""" + + def __init__(self): + """Forbids class instantiation.""" + raise AssertionError('This class cannot be instantiated.') + + class image(object): + """A static class with methods to start image export tasks.""" + + def __init__(self): + """Forbids class instantiation.""" + raise AssertionError('This class cannot be instantiated.') + + def __new__(cls, image, description='myExportImageTask', config=None): + """Creates a task to export an EE Image to Google Drive or Cloud Storage. + + Args: + image: The image to be exported. + description: Human-readable name of the task. + config: A dictionary that will be copied and used as parameters + for the task: + - region: The lon,lat coordinates for a LinearRing or Polygon + specifying the region to export. Can be specified as a nested + lists of numbers or a serialized string. Defaults to the image's + region. + - scale: The resolution in meters per pixel. + Defaults to the native resolution of the image assset unless + a crs_transform is specified. + - maxPixels: The maximum allowed number of pixels in the exported + image. The task will fail if the exported region covers + more pixels in the specified projection. Defaults to 100,000,000. + - crs: The coordinate reference system of the exported image's + projection. Defaults to the image's default projection. + - crs_transform: A comma-separated string of 6 numbers describing + the affine transform of the coordinate reference system of the + exported image's projection, in the order: xScale, yShearing, + xShearing, yScale, xTranslation and yTranslation. Defaults to + the image's native CRS transform. + - dimensions: The dimensions of the exported image. Takes either a + single positive integer as the maximum dimension or + "WIDTHxHEIGHT" where WIDTH and HEIGHT are each positive integers. + - skipEmptyTiles: If true, skip writing empty (i.e. fully-masked) + image tiles. + If exporting to Google Drive (default): + - driveFolder: The name of a unique folder in your Drive account to + export into. Defaults to the root of the drive. + - driveFileNamePrefix: The Google Drive filename for the export. + Defaults to the name of the task. + If exporting to Google Cloud Storage: + - outputBucket: The name of a Cloud Storage bucket for the export. + - outputPrefix: Cloud Storage object name prefix for the export. + + Returns: + An unstarted Task that exports the image. + """ + config = (config or {}).copy() + if 'driveFileNamePrefix' not in config and 'outputBucket' not in config: + config['driveFileNamePrefix'] = description + + if 'region' in config: + # Convert the region to a serialized form, if necessary. + config['region'] = _GetSerializedRegion(config.get('region')) + + return _CreateTask( + Task.Type.EXPORT_IMAGE, image, description, config) + + # Disable argument usage check; arguments are accessed using locals(). + # pylint: disable=unused-argument + @staticmethod + def toAsset(image, description='myExportImageTask', assetId=None, + pyramidingPolicy=None, dimensions=None, region=None, + scale=None, crs=None, crsTransform=None, maxPixels=None, + **kwargs): + """Creates a task to export an EE Image to an EE Asset. + + Args: + image: The image to be exported. + description: Human-readable name of the task. + assetId: The destination asset ID. + pyramidingPolicy: The pyramiding policy to apply to each band in the + image, a dictionary keyed by band name. Values must be + one of: "mean", "sample", "min", "max", or "mode". + Defaults to "mean". A special key, ".default", may be used to + change the default for all bands. + dimensions: The dimensions of the exported image. Takes either a + single positive integer as the maximum dimension or "WIDTHxHEIGHT" + where WIDTH and HEIGHT are each positive integers. + region: The lon,lat coordinates for a LinearRing or Polygon + specifying the region to export. Can be specified as a nested + lists of numbers or a serialized string. Defaults to the image's + region. + scale: The resolution in meters per pixel. Defaults to the + native resolution of the image assset unless a crsTransform + is specified. + crs: The coordinate reference system of the exported image's + projection. Defaults to the image's default projection. + crsTransform: A comma-separated string of 6 numbers describing + the affine transform of the coordinate reference system of the + exported image's projection, in the order: xScale, yShearing, + xShearing, yScale, xTranslation and yTranslation. Defaults to + the image's native CRS transform. + maxPixels: The maximum allowed number of pixels in the exported + image. The task will fail if the exported region covers more + pixels in the specified projection. Defaults to 100,000,000. + **kwargs: Holds other keyword arguments that may have been deprecated + such as 'crs_transform'. + + Returns: + An unstarted Task that exports the image to Drive. + """ + # _CopyDictFilterNone must be called first because it copies locals to + # support deprecated arguments. + config = _CopyDictFilterNone(locals()) + + _ConvertToServerParams(config, 'image', Task.ExportDestination.ASSET) + + if 'region' in config: + # Convert the region to a serialized form, if necessary. + config['region'] = _GetSerializedRegion(config.get('region')) + + return _CreateTask( + Task.Type.EXPORT_IMAGE, image, description, config) + + # Disable argument usage check; arguments are accessed using locals(). + # pylint: disable=unused-argument + @staticmethod + def toCloudStorage(image, description='myExportImageTask', + bucket=None, fileNamePrefix=None, + dimensions=None, region=None, scale=None, + crs=None, crsTransform=None, maxPixels=None, + shardSize=None, fileDimensions=None, + skipEmptyTiles=None, **kwargs): + """Creates a task to export an EE Image to Google Cloud Storage. + + Args: + image: The image to be exported. + description: Human-readable name of the task. + bucket: The name of a Cloud Storage bucket for the export. + fileNamePrefix: Cloud Storage object name prefix for the export. + Defaults to the name of the task. + dimensions: The dimensions of the exported image. Takes either a + single positive integer as the maximum dimension or "WIDTHxHEIGHT" + where WIDTH and HEIGHT are each positive integers. + region: The lon,lat coordinates for a LinearRing or Polygon + specifying the region to export. Can be specified as a nested + lists of numbers or a serialized string. Defaults to the image's + region. + scale: The resolution in meters per pixel. Defaults to the + native resolution of the image assset unless a crsTransform + is specified. + crs: The coordinate reference system of the exported image's + projection. Defaults to the image's default projection. + crsTransform: A comma-separated string of 6 numbers describing + the affine transform of the coordinate reference system of the + exported image's projection, in the order: xScale, yShearing, + xShearing, yScale, xTranslation and yTranslation. Defaults to + the image's native CRS transform. + maxPixels: The maximum allowed number of pixels in the exported + image. The task will fail if the exported region covers more + pixels in the specified projection. Defaults to 100,000,000. + shardSize: Size in pixels of the shards in which this image will be + computed. Defaults to 256. + fileDimensions: The dimensions in pixels of each image file, if the + image is too large to fit in a single file. May specify a + single number to indicate a square shape, or a tuple of two + dimensions to indicate (width,height). Note that the image will + still be clipped to the overall image dimensions. Must be a + multiple of shardSize. + skipEmptyTiles: If true, skip writing empty (i.e. fully-masked) + image tiles. + **kwargs: Holds other keyword arguments that may have been deprecated + such as 'crs_transform'. + + Returns: + An unstarted Task that exports the image to Google Cloud Storage. + """ + # _CopyDictFilterNone must be called first because it copies locals to + # support deprecated arguments. + config = _CopyDictFilterNone(locals()) + + _ConvertToServerParams(config, 'image', Task.ExportDestination.GCS) + + if 'region' in config: + # Convert the region to a serialized form, if necessary. + config['region'] = _GetSerializedRegion(config.get('region')) + + return _CreateTask( + Task.Type.EXPORT_IMAGE, image, description, config) + + @staticmethod + def toDrive(image, description='myExportImageTask', folder=None, + fileNamePrefix=None, dimensions=None, region=None, + scale=None, crs=None, crsTransform=None, + maxPixels=None, shardSize=None, fileDimensions=None, + skipEmptyTiles=None, **kwargs): + """Creates a task to export an EE Image to Drive. + + Args: + image: The image to be exported. + description: Human-readable name of the task. + folder: The name of a unique folder in your Drive account to + export into. Defaults to the root of the drive. + fileNamePrefix: The Google Drive filename for the export. + Defaults to the name of the task. + dimensions: The dimensions of the exported image. Takes either a + single positive integer as the maximum dimension or "WIDTHxHEIGHT" + where WIDTH and HEIGHT are each positive integers. + region: The lon,lat coordinates for a LinearRing or Polygon + specifying the region to export. Can be specified as a nested + lists of numbers or a serialized string. Defaults to the image's + region. + scale: The resolution in meters per pixel. Defaults to the + native resolution of the image assset unless a crsTransform + is specified. + crs: The coordinate reference system of the exported image's + projection. Defaults to the image's default projection. + crsTransform: A comma-separated string of 6 numbers describing + the affine transform of the coordinate reference system of the + exported image's projection, in the order: xScale, yShearing, + xShearing, yScale, xTranslation and yTranslation. Defaults to + the image's native CRS transform. + maxPixels: The maximum allowed number of pixels in the exported + image. The task will fail if the exported region covers more + pixels in the specified projection. Defaults to 100,000,000. + shardSize: Size in pixels of the shards in which this image will be + computed. Defaults to 256. + fileDimensions: The dimensions in pixels of each image file, if the + image is too large to fit in a single file. May specify a + single number to indicate a square shape, or a tuple of two + dimensions to indicate (width,height). Note that the image will + still be clipped to the overall image dimensions. Must be a + multiple of shardSize. + skipEmptyTiles: If true, skip writing empty (i.e. fully-masked) + image tiles. + **kwargs: Holds other keyword arguments that may have been deprecated + such as 'crs_transform', 'driveFolder', and 'driveFileNamePrefix'. + + Returns: + An unstarted Task that exports the image to Drive. + """ + # _CopyDictFilterNone must be called first because it copies locals to + # support deprecated arguments. + config = _CopyDictFilterNone(locals()) + + # fileNamePrefix should be defaulted before converting to server params. + if 'fileNamePrefix' not in config: + config['fileNamePrefix'] = description + + _ConvertToServerParams(config, 'image', Task.ExportDestination.DRIVE) + + if 'region' in config: + # Convert the region to a serialized form, if necessary. + config['region'] = _GetSerializedRegion(config.get('region')) + + return _CreateTask( + Task.Type.EXPORT_IMAGE, image, description, config) + # pylint: enable=unused-argument + + class map(object): + """A class with a static method to start map export tasks.""" + + def __init__(self): + """Forbids class instantiation.""" + raise AssertionError('This class cannot be instantiated.') + + # Disable argument usage check; arguments are accessed using locals(). + # pylint: disable=unused-argument + @staticmethod + def toCloudStorage(image, description='myExportMapTask', bucket=None, + fileFormat=None, path=None, writePublicTiles=None, + maxZoom=None, scale=None, minZoom=None, + region=None, skipEmptyTiles=None, **kwargs): + """Creates a task to export an Image as a pyramid of map tiles. + + Exports a rectangular pyramid of map tiles for use with web map + viewers. The map tiles will be accompanied by a reference + index.html file that displays them using the Google Maps API. + + Args: + image: The image to export as tiles. + description: Human-readable name of the task. + bucket: The destination bucket to write to. + fileFormat: The map tiles' file format, one of 'auto', 'png', + or 'jpeg'. Defaults to 'auto', which means that opaque tiles + will be encoded as 'jpg' and tiles with transparency will be + encoded as 'png'. + path: The string used as the output's path. A trailing '/' + is optional. Defaults to the task's description. + writePublicTiles: Whether to write public tiles instead of using the + bucket's default object ACL. Defaults to true and requires the + invoker to be an OWNER of bucket. + maxZoom: The maximum zoom level of the map tiles to export. + scale: The max image resolution in meters per pixel, as an alternative + to 'maxZoom'. The scale will be converted to the most appropriate + maximum zoom level at the equator. + minZoom: The optional minimum zoom level of the map tiles to export. + region: The lon,lat coordinates for a LinearRing or Polygon + specifying the region to export. Can be specified as a nested + lists of numbers or a serialized string. Map tiles will be + produced in the rectangular region containing this geometry. + Defaults to the image's region. + skipEmptyTiles: If true, skip writing empty (i.e. fully-transparent) + map tiles. + **kwargs: Holds other keyword arguments that may have been deprecated + such as 'crs_transform'. + + Returns: + An unstarted Task that exports the image to Google Cloud Storage. + + """ + # _CopyDictFilterNone must be called first because it copies locals to + # support deprecated arguments. + config = _CopyDictFilterNone(locals()) + + # The path is defaulted before converting to server params so that it + # is properly converted into the server parameter 'outputPrefix'. + if 'path' not in config: + config['path'] = description + + _ConvertToServerParams(config, 'image', Task.ExportDestination.GCS) + + if 'fileFormat' not in config: + config['fileFormat'] = 'auto' + if 'writePublicTiles' not in config: + config['writePublicTiles'] = True + if 'region' in config: + # Convert the region to a serialized form, if necessary. + config['region'] = _GetSerializedRegion(config.get('region')) + + return _CreateTask( + Task.Type.EXPORT_MAP, image, description, config) + # pylint: enable=unused-argument + + class table(object): + """A class with static methods to start table export tasks.""" + + def __init__(self): + """Forbids class instantiation.""" + raise AssertionError('This class cannot be instantiated.') + + def __new__(cls, collection, description='myExportTableTask', config=None): + """Export an EE FeatureCollection as a table. + + The exported table will reside in Google Drive or Cloud Storage. + + Args: + collection: The feature collection to be exported. + description: Human-readable name of the task. + config: A dictionary that will be copied and used as parameters + for the task: + - fileFormat: The output format: "CSV" (default), "GeoJSON", "KML", + or "KMZ". + If exporting to Google Drive (default): + - driveFolder: The name of a unique folder in your Drive + account to export into. Defaults to the root of the drive. + - driveFileNamePrefix: The Google Drive filename for the export. + Defaults to the name of the task. + If exporting to Google Cloud Storage: + - outputBucket: The name of a Cloud Storage bucket for the export. + - outputPrefix: Cloud Storage object name prefix for the export. + + Returns: + An unstarted Task that exports the table. + """ + config = (config or {}).copy() + if 'driveFileNamePrefix' not in config and 'outputBucket' not in config: + config['driveFileNamePrefix'] = description + if 'fileFormat' not in config: + config['fileFormat'] = 'CSV' + return _CreateTask( + Task.Type.EXPORT_TABLE, collection, description, config) + + # Disable argument usage check; arguments are accessed using locals(). + # pylint: disable=unused-argument + @staticmethod + def toCloudStorage(collection, description='myExportTableTask', + bucket=None, fileNamePrefix=None, + fileFormat=None, **kwargs): + """Creates a task to export a FeatureCollection to Google Cloud Storage. + + Args: + collection: The feature collection to be exported. + description: Human-readable name of the task. + bucket: The name of a Cloud Storage bucket for the export. + fileNamePrefix: Cloud Storage object name prefix for the export. + Defaults to the name of the task. + fileFormat: The output format: "CSV" (default), "GeoJSON", "KML", + or "KMZ". + **kwargs: Holds other keyword arguments that may have been deprecated + such as 'outputBucket'. + + Returns: + An unstarted Task that exports the table. + """ + # _CopyDictFilterNone must be called first because it copies locals to + # support deprecated arguments. + config = _CopyDictFilterNone(locals()) + + if 'fileFormat' not in config: + config['fileFormat'] = 'CSV' + + _ConvertToServerParams( + config, 'collection', Task.ExportDestination.GCS) + + return _CreateTask( + Task.Type.EXPORT_TABLE, collection, description, config) + + @staticmethod + def toDrive(collection, description='myExportTableTask', + folder=None, fileNamePrefix=None, fileFormat=None, **kwargs): + """Creates a task to export a FeatureCollection to Google Cloud Storage. + + Args: + collection: The feature collection to be exported. + description: Human-readable name of the task. + folder: The name of a unique folder in your Drive account to + export into. Defaults to the root of the drive. + fileNamePrefix: The Google Drive filename for the export. + Defaults to the name of the task. + fileFormat: The output format: "CSV" (default), "GeoJSON", "KML", + or "KMZ". + **kwargs: Holds other keyword arguments that may have been deprecated + such as 'driveFolder' and 'driveFileNamePrefix'. + + Returns: + An unstarted Task that exports the table. + """ + # _CopyDictFilterNone must be called first because it copies locals to + # support deprecated arguments. + config = _CopyDictFilterNone(locals()) + + # fileNamePrefix should be defaulted before converting to server params. + if 'fileNamePrefix' not in config: + config['fileNamePrefix'] = description + if 'fileFormat' not in config: + config['fileFormat'] = 'CSV' + + _ConvertToServerParams( + config, 'collection', Task.ExportDestination.DRIVE) + + return _CreateTask( + Task.Type.EXPORT_TABLE, collection, description, config) + + class video(object): + """A class with static methods to start video export task.""" + + def __init__(self): + """Forbids class instantiation.""" + raise AssertionError('This class cannot be instantiated.') + + def __new__(cls, collection, description='myExportVideoTask', config=None): + """Exports an EE ImageCollection as a video. + + The exported video will reside in Google Drive or Cloud Storage. + + Args: + collection: The image collection to be exported. The collection must + only contain RGB images. + description: Human-readable name of the task. + config: A dictionary of configuration parameters for the task: + - region: The lon,lat coordinates for a LinearRing or Polygon + specifying the region to export. Can be specified as a nested + lists of numbers or a serialized string. Defaults to the first + image's region. + - scale: The resolution in meters per pixel. + - crs: The coordinate reference system of the exported video's + projection. Defaults to SR-ORG:6627. + - crs_transform: A comma-separated string of 6 numbers describing + the affine transform of the coordinate reference system of the + exported video's projection, in the order: xScale, yShearing, + xShearing, yScale, xTranslation and yTranslation. Defaults to + the image collection's native CRS transform. + - dimensions: The dimensions of the exported video. Takes either a + single positive integer as the maximum dimension or "WIDTHxHEIGHT" + where WIDTH and HEIGHT are each positive integers. + - framesPerSecond: A number between .1 and 100 describing the + framerate of the exported video. + - maxPixels: The maximum number of pixels per frame. + Defaults to 1e8 pixels per frame. By setting this explicitly, + you may raise or lower the limit. + If exporting to Google Drive (default): + - driveFolder: The name of a unique folder in your Drive account to + export into. Defaults to the root of the drive. + - driveFileNamePrefix: The Google Drive filename for the export. + Defaults to the name of the task. + If exporting to Google Cloud Storage: + - outputBucket: The name of a Cloud Storage bucket for the export. + - outputPrefix: Cloud Storage object name prefix for the export. + + Returns: + An unstarted Task that exports the video. + """ + config = (config or {}).copy() + if 'crs' not in config: + config['crs'] = 'SR-ORG:6627' + if 'driveFileNamePrefix' not in config and 'outputBucket' not in config: + config['driveFileNamePrefix'] = description + + if 'region' in config: + # Convert the region to a serialized form, if necessary. + config['region'] = _GetSerializedRegion(config.get('region')) + + return _CreateTask( + Task.Type.EXPORT_VIDEO, collection, description, config) + + # Disable argument usage check; arguments are accessed using locals(). + # pylint: disable=unused-argument + @staticmethod + def toCloudStorage(collection, description='myExportVideoTask', + bucket=None, fileNamePrefix=None, framesPerSecond=None, + dimensions=None, region=None, scale=None, crs=None, + crsTransform=None, maxPixels=None, + maxFrames=None, **kwargs): + """Creates a task to export an ImageCollection video to Cloud Storage. + + Args: + collection: The image collection to be exported. The collection must + only contain RGB images. + description: Human-readable name of the task. + bucket: The name of a Cloud Storage bucket for the export. + fileNamePrefix: Cloud Storage object name prefix for the export. + Defaults to the task's description. + framesPerSecond: A number between .1 and 100 describing the + framerate of the exported video. + dimensions: The dimensions of the exported video. Takes either a + single positive integer as the maximum dimension or "WIDTHxHEIGHT" + where WIDTH and HEIGHT are each positive integers. + region: The lon,lat coordinates for a LinearRing or Polygon + specifying the region to export. Can be specified as a nested + lists of numbers or a serialized string. Defaults to the first + image's region. + scale: The resolution in meters per pixel. + crs: The coordinate reference system of the exported video's + projection. Defaults to SR-ORG:6627. + crsTransform: A comma-separated string of 6 numbers describing + the affine transform of the coordinate reference system of the + exported video's projection, in the order: xScale, yShearing, + xShearing, yScale, xTranslation and yTranslation. Defaults to + the image collection's native CRS transform. + maxPixels: The maximum number of pixels per frame. + Defaults to 1e8 pixels per frame. By setting this explicitly, + you may raise or lower the limit. + maxFrames: The maximum number of frames to export. + Defaults to 1000 frames. By setting this explicitly, you may + raise or lower the limit. + **kwargs: Holds other keyword arguments that may have been deprecated + such as 'crs_transform'. + + Returns: + An unstarted Task that exports the image collection + to Google Cloud Storage. + """ + # _CopyDictFilterNone must be called first because it copies locals to + # support deprecated arguments. + config = _CopyDictFilterNone(locals()) + if 'crs' not in config: + config['crs'] = 'SR-ORG:6627' + if 'fileNamePrefix' not in config: + config['fileNamePrefix'] = description + + _ConvertToServerParams(config, 'collection', Task.ExportDestination.GCS) + + if 'region' in config: + # Convert the region to a serialized form, if necessary. + config['region'] = _GetSerializedRegion(config.get('region')) + + return _CreateTask( + Task.Type.EXPORT_VIDEO, collection, description, config) + + @staticmethod + def toDrive(collection, description='myExportVideoTask', + folder=None, fileNamePrefix=None, framesPerSecond=None, + dimensions=None, region=None, scale=None, crs=None, + crsTransform=None, maxPixels=None, maxFrames=None, **kwargs): + """Creates a task to export an ImageCollection as a video to Drive. + + Args: + collection: The image collection to be exported. The collection must + only contain RGB images. + description: Human-readable name of the task. + folder: The name of a unique folder in your Drive account to + export into. Defaults to the root of the drive. + fileNamePrefix: The Google Drive filename for the export. + Defaults to the name of the task. + framesPerSecond: A number between .1 and 100 describing the + framerate of the exported video. + dimensions: The dimensions of the exported video. Takes either a + single positive integer as the maximum dimension or "WIDTHxHEIGHT" + where WIDTH and HEIGHT are each positive integers. + region: The lon,lat coordinates for a LinearRing or Polygon + specifying the region to export. Can be specified as a nested + lists of numbers or a serialized string. Defaults to the first + image's region. + scale: The resolution in meters per pixel. + crs: The coordinate reference system of the exported video's + projection. Defaults to SR-ORG:6627. + crsTransform: A comma-separated string of 6 numbers describing + the affine transform of the coordinate reference system of the + exported video's projection, in the order: xScale, yShearing, + xShearing, yScale, xTranslation and yTranslation. Defaults to + the image collection's native CRS transform. + maxPixels: The maximum number of pixels per frame. + Defaults to 1e8 pixels per frame. By setting this explicitly, + you may raise or lower the limit. + maxFrames: The maximum number of frames to export. + Defaults to 1000 frames. By setting this explicitly, you may + raise or lower the limit. + **kwargs: Holds other keyword arguments that may have been deprecated + such as 'crs_transform'. + + Returns: + An unstarted Task that exports the image collection to Drive. + """ + # _CopyDictFilterNone must be called first because it copies locals to + # support deprecated arguments. + config = _CopyDictFilterNone(locals()) + if 'crs' not in config: + config['crs'] = 'SR-ORG:6627' + if 'fileNamePrefix' not in config: + config['fileNamePrefix'] = description + + _ConvertToServerParams(config, 'collection', Task.ExportDestination.DRIVE) + + if 'region' in config: + # Convert the region to a serialized form, if necessary. + config['region'] = _GetSerializedRegion(config.get('region')) + + return _CreateTask( + Task.Type.EXPORT_VIDEO, collection, description, config) + # pylint: enable=unused-argument + + +def _CreateTask(task_type, ee_object, description, config): + """Creates an export task. + + Args: + task_type: The type of the task to create. One of Task.Type. + ee_object: The object to export. + description: Human-readable name of the task. + config: Custom config fields for the task. + + Returns: + An unstarted export Task. + """ + full_config = { + 'type': task_type, + 'json': ee_object.serialize(), + 'description': description, + 'state': Task.State.UNSUBMITTED, + } + if config: full_config.update(config) + return Task(data.newTaskId()[0], full_config) + + +def _GetSerializedRegion(region): + """Converts a region parameter to serialized form, if it isn't already.""" + region_error = ee_exception.EEException( + 'Invalid format for region property. ' + 'See Export.image() documentation for more details.') + if isinstance(region, six.string_types): + try: + region = json.loads(region) + except: + raise region_error + try: + geometry.Geometry.LineString(region) + except: # pylint: disable=bare-except + try: + geometry.Geometry.Polygon(region) + except: + raise region_error + return json.dumps(region) + + +def _CopyDictFilterNone(originalDict): + """Copies a dictionary and filters out None values.""" + return dict((k, v) for k, v in originalDict.items() if v is not None) + + +def _ConvertToServerParams(configDict, eeElementKey, destination): + """Converts an export configuration to server friendly parameters. + + Note that configDict is changed in place and not returned. + + Args: + configDict: The configuration dictionary to be converted. + eeElementKey: The key used to access the EE element. + destination: The destination to export to. + """ + del configDict[eeElementKey] + if 'kwargs' in configDict: + configDict.update(configDict['kwargs']) + del configDict['kwargs'] + + if 'crsTransform' in configDict: + configDict['crs_transform'] = configDict.pop('crsTransform') + + # Convert iterable fileDimensions to a comma-separated string. + if 'fileDimensions' in configDict: + dimensions = configDict['fileDimensions'] + try: + configDict['fileDimensions'] = ','.join('%d' % dim for dim in dimensions) + except TypeError: + # We pass numbers straight through. + pass + + if destination is Task.ExportDestination.GCS: + if 'bucket' in configDict: + configDict['outputBucket'] = configDict.pop('bucket') + + if 'fileNamePrefix' in configDict: + if 'outputPrefix' not in configDict: + configDict['outputPrefix'] = configDict.pop('fileNamePrefix') + else: + del configDict['fileNamePrefix'] + + # Only used with Export.map + if 'path' in configDict: + configDict['outputPrefix'] = configDict.pop('path') + elif destination is Task.ExportDestination.DRIVE: + if 'folder' in configDict: + configDict['driveFolder'] = configDict.pop('folder') + + if 'fileNamePrefix' in configDict: + if 'driveFileNamePrefix' not in configDict: + configDict['driveFileNamePrefix'] = configDict.pop('fileNamePrefix') + else: + del configDict['fileNamePrefix'] + elif destination is not Task.ExportDestination.ASSET: + raise ee_exception.EEException('Unknown export destination.') diff --git a/ee/cli/__init__.py b/ee/cli/__init__.py new file mode 100644 index 0000000..d26fdf8 --- /dev/null +++ b/ee/cli/__init__.py @@ -0,0 +1,2 @@ +#!/usr/bin/env python +# Exposes CLI tool as a package when installing via setup.py diff --git a/ee/cli/build_archive.sh b/ee/cli/build_archive.sh new file mode 100644 index 0000000..cf3e160 --- /dev/null +++ b/ee/cli/build_archive.sh @@ -0,0 +1,15 @@ +#!/bin/sh + +# Builds a self-contained archive from the open source release of EE CLI tool. + +EE_CLI_DIR="earthengine-cli" +mkdir -p $EE_CLI_DIR/third_party +pip install -t $EE_CLI_DIR/third_party earthengine-api + +cp eecli.py commands.py utils.py $EE_CLI_DIR/ + +cp eecli_wrapper.py $EE_CLI_DIR/earthengine +chmod +x $EE_CLI_DIR/earthengine + +tar cvf earthengine-cli.tar.gz $EE_CLI_DIR +rm -rf $EE_CLI_DIR diff --git a/ee/cli/commands.py b/ee/cli/commands.py new file mode 100644 index 0000000..8962193 --- /dev/null +++ b/ee/cli/commands.py @@ -0,0 +1,922 @@ +#!/usr/bin/env python +"""Commands supported by the Earth Engine command line interface. + +Each command is implemented by extending the Command class. Each class +defines the supported positional and optional arguments, as well as +the actions to be taken when the command is executed. +""" + +from __future__ import print_function + +# pylint: disable=g-bad-import-order +from six.moves import input # pylint: disable=redefined-builtin +import argparse +import calendar +from collections import Counter +import datetime +import json +import os +import re +import sys +import webbrowser + +# pylint: disable=g-import-not-at-top +try: + # Python 2.x + import urlparse +except ImportError: + # Python 3.x + from urllib.parse import urlparse + +import ee +from ee.cli import utils + +# Constants used in ACLs. +ALL_USERS = 'AllUsers' +ALL_USERS_CAN_READ = 'all_users_can_read' +READERS = 'readers' +WRITERS = 'writers' + +# Constants used in setting metadata properties. +TYPE_DATE = 'date' +TYPE_NUMBER = 'number' +TYPE_STRING = 'string' +SYSTEM_TIME_START = 'system:time_start' +SYSTEM_TIME_END = 'system:time_end' + +# A regex that parses properties of the form "[(type)]name=value". The +# second, third, and fourth group are type, name, and number, respectively. +PROPERTY_RE = re.compile(r'(\(([^\)]*)\))?([^=]+)=(.*)') + +# Translate internal task type identifiers to user-friendly strings that +# are consistent with the language in the API and docs. +TASK_TYPES = { + 'EXPORT_FEATURES': 'Export.table', + 'EXPORT_IMAGE': 'Export.image', + 'EXPORT_TILES': 'Export.map', + 'EXPORT_VIDEO': 'Export.video', + 'INGEST': 'Upload', +} + + +def _add_wait_arg(parser): + parser.add_argument( + '--wait', '-w', nargs='?', default=-1, type=int, const=sys.maxsize, + help=('Wait for the task to finish,' + ' or timeout after the specified number of seconds.' + ' Without this flag, the command just starts an export' + ' task in the background, and returns immediately.')) + + +def _upload(args, request, ingestion_function): + if 0 <= args.wait < 10: + raise ee.EEException('Wait time should be at least 10 seconds.') + task_id = ee.data.newTaskId()[0] + ingestion_function(task_id, request) + print('Started upload task with ID: %s' % task_id) + if args.wait >= 0: + print('Waiting for the upload task to complete...') + utils.wait_for_task(task_id, args.wait) + + +# Argument types +def _comma_separated_strings(string): + """Parses an input consisting of comma-separated strings.""" + error_msg = 'Argument should be a comma-separated list of strings: {}' + values = string.split(',') + if not values: + raise argparse.ArgumentTypeError(error_msg.format(string)) + return values + + +def _comma_separated_numbers(string): + """Parses an input consisting of comma-separated numbers.""" + error_msg = 'Argument should be a comma-separated list of numbers: {}' + values = string.split(',') + if not values: + raise argparse.ArgumentTypeError(error_msg.format(string)) + numbervalues = [] + for value in values: + try: + numbervalues.append(int(value)) + except ValueError: + try: + numbervalues.append(float(value)) + except ValueError: + raise argparse.ArgumentTypeError(error_msg.format(string)) + return numbervalues + + +def _comma_separated_pyramiding_policies(string): + """Parses an input consisting of comma-separated pyramiding policies.""" + error_msg = ('Argument should be a comma-separated list of: ' + '{{"mean", "sample", "min", "max", "mode"}}: {}') + values = string.split(',') + if not values: + raise argparse.ArgumentTypeError(error_msg.format(string)) + redvalues = [] + for value in values: + if value.lower() not in {'mean', 'sample', 'min', 'max', 'mode'}: + raise argparse.ArgumentTypeError(error_msg.format(string)) + redvalues.append(value.lower()) + return redvalues + + +def _decode_number(string): + """Decodes a number from a command line argument.""" + try: + return float(string) + except ValueError: + raise argparse.ArgumentTypeError( + 'Invalid value for property of type "number": "%s".' % string) + + +def _timestamp_ms_for_datetime(datetime_obj): + """Returns time since the epoch in ms for the given UTC datetime object.""" + return ( + int(calendar.timegm(datetime_obj.timetuple()) * 1000) + + datetime_obj.microsecond / 1000) + + +def _decode_date(string): + """Decodes a date from a command line argument, as msec since the epoch.""" + try: + return int(string) + except ValueError: + date_formats = ['%Y-%m-%d', + '%Y-%m-%dT%H:%M:%S', + '%Y-%m-%dT%H:%M:%S.%f'] + for date_format in date_formats: + try: + dt = datetime.datetime.strptime(string, date_format) + return _timestamp_ms_for_datetime(dt) + except ValueError: + continue + raise argparse.ArgumentTypeError( + 'Invalid value for property of type "date": "%s".' % string) + + +def _decode_property(string): + """Decodes a general key-value property from a command line argument.""" + m = PROPERTY_RE.match(string) + if not m: + raise argparse.ArgumentTypeError( + 'Invalid property: "%s". Must have the form "name=value" or ' + '"(type)name=value".', string) + _, type_str, name, value_str = m.groups() + if type_str is None: + # Guess numeric types automatically. + try: + value = _decode_number(value_str) + except argparse.ArgumentTypeError: + value = value_str + elif type_str == TYPE_DATE: + value = _decode_date(value_str) + elif type_str == TYPE_NUMBER: + value = _decode_number(value_str) + elif type_str == TYPE_STRING: + value = value_str + else: + raise argparse.ArgumentTypeError( + 'Unrecognized property type name: "%s". Expected one of "string", ' + '"number", "date", or a prefix.' % type_str) + return (name, value) + + +def _add_property_flags(parser): + """Adds command line flags related to metadata properties to a parser.""" + parser.add_argument( + '--property', '-p', + help='A property to set, in the form [(type)]name=value. If no type ' + 'is specified the type will be "number" if the value is numeric and ' + '"string" otherwise. May be provided multiple times.', + action='append', + type=_decode_property) + parser.add_argument( + '--time_start', '-ts', + help='Sets the start time property to a number or date.', + type=_decode_date) + parser.add_argument( + '--time_end', '-te', + help='Sets the end time property to a number or date.', + type=_decode_date) + + +def _decode_property_flags(args): + """Decodes metadata properties from args as a list of (name,value) pairs.""" + property_list = list(args.property or []) + if args.time_start: + property_list.append((SYSTEM_TIME_START, args.time_start)) + if args.time_end: + property_list.append((SYSTEM_TIME_END, args.time_end)) + names = [name for name, _ in property_list] + duplicates = [name for name, count in Counter(names).items() if count > 1] + if duplicates: + raise ee.EEException('Duplicate property name(s): %s.' % duplicates) + return dict(property_list) + + +def _check_valid_files(filenames): + """Returns true if the given filenames are valid upload file URIs.""" + for filename in filenames: + if not filename.startswith('gs://'): + raise ee.EEException('Invalid Cloud Storage URL: ' + filename) + + +def _pretty_print_json(json_obj): + """Pretty-prints a JSON object to stdandard output.""" + print(json.dumps(json_obj, sort_keys=True, indent=2, separators=(',', ': '))) + + +class Dispatcher(object): + """Dispatches to a set of commands implemented as command classes.""" + + def __init__(self, parser): + self.command_dict = {} + self.dest = self.name + '_cmd' + subparsers = parser.add_subparsers(title='Commands', dest=self.dest) + subparsers.required = True # Needed for proper missing arg handling in 3.x + for command in self.COMMANDS: + subparser = subparsers.add_parser( + command.name, description=command.__doc__, + help=command.__doc__.splitlines()[0]) + self.command_dict[command.name] = command(subparser) + + def run(self, args, config): + self.command_dict[vars(args)[self.dest]].run(args, config) + + +class AuthenticateCommand(object): + """Prompts the user to authorize access to Earth Engine via OAuth2.""" + + name = 'authenticate' + + def __init__(self, unused_parser): + pass + + def run(self, unused_args, unused_config): + """Generates and opens a URL to get auth code, then retrieve a token.""" + + auth_url = ee.oauth.get_authorization_url() + webbrowser.open_new(auth_url) + + print(""" + Opening web browser to address %s + Please authorize access to your Earth Engine account, and paste + the resulting code below. + If the web browser does not start, please manually browse the URL above. + """ % auth_url) + + auth_code = input('Please enter authorization code: ').strip() + + token = ee.oauth.request_token(auth_code) + ee.oauth.write_token(token) + print('\nSuccessfully saved authorization token.') + + +class AclChCommand(object): + """Changes the access control list for an asset. + + Each change specifies the email address of a user or group and, + for additions, one of R or W corresponding to the read or write + permissions to be granted, as in "user@domain.com:R". Use the + special name "AllUsers" to change whether all users can read the + asset. + """ + + name = 'ch' + + def __init__(self, parser): + parser.add_argument('-u', action='append', metavar='permission', + help='Add or modify a user\'s permission.') + parser.add_argument('-d', action='append', metavar='user', + help='Remove all permissions for a user.') + parser.add_argument('asset_id', help='ID of the asset.') + + def run(self, args, config): + config.ee_init() + permissions = self._parse_permissions(args) + acl = ee.data.getAssetAcl(args.asset_id) + self._apply_permissions(acl, permissions) + # The original permissions will contain an 'owners' stanza, but EE + # does not currently allow setting the owner ACL so we have to + # remove it even though it has not changed. + del acl['owners'] + ee.data.setAssetAcl(args.asset_id, json.dumps(acl)) + + def _parse_permissions(self, args): + """Decodes and sanity-checks the permissions in the arguments.""" + # A dictionary mapping from user ids to one of 'R', 'W', or 'D'. + permissions = {} + if args.u: + for grant in args.u: + parts = grant.split(':') + if len(parts) != 2 or parts[1] not in ['R', 'W']: + raise ee.EEException('Invalid permission "%s".' % grant) + user, role = parts + if user in permissions: + raise ee.EEException('Multiple permission settings for "%s".' % user) + if user == ALL_USERS and role == 'W': + raise ee.EEException('Cannot grant write permissions to AllUsers.') + permissions[user] = role + if args.d: + for user in args.d: + if user in permissions: + raise ee.EEException('Multiple permission settings for "%s".' % user) + permissions[user] = 'D' + return permissions + + def _apply_permissions(self, acl, permissions): + """Applies the given permission edits to the given acl.""" + for user, role in permissions.iteritems(): + if user == ALL_USERS: + acl[ALL_USERS_CAN_READ] = (role == 'R') + elif role == 'R': + if user not in acl[READERS]: + acl[READERS].append(user) + if user in acl[WRITERS]: + acl[WRITERS].remove(user) + elif role == 'W': + if user in acl[READERS]: + acl[READERS].remove(user) + if user not in acl[WRITERS]: + acl[WRITERS].append(user) + elif role == 'D': + if user in acl[READERS]: + acl[READERS].remove(user) + if user in acl[WRITERS]: + acl[WRITERS].remove(user) + + +class AclGetCommand(object): + """Prints the access control list for an asset.""" + + name = 'get' + + def __init__(self, parser): + parser.add_argument('asset_id', help='ID of the asset.') + + def run(self, args, config): + config.ee_init() + acl = ee.data.getAssetAcl(args.asset_id) + _pretty_print_json(acl) + + +class AclSetCommand(object): + """Sets the access control list for an asset. + + The ACL may be the name of a canned ACL, or it may be the path to a + file containing the output from "acl get". The recognized canned ACL + names are "private", indicating that no users other than the owner + have access, and "public", indicating that all users have read + access. It is currently not possible to modify the owner ACL using + this tool. + """ + + name = 'set' + + CANNED_ACLS = { + 'private': { + READERS: [], + WRITERS: [], + ALL_USERS_CAN_READ: False, + }, + 'public': { + READERS: [], + WRITERS: [], + ALL_USERS_CAN_READ: True, + }, + } + + def __init__(self, parser): + parser.add_argument('file_or_acl_name', + help='File path or canned ACL name.') + parser.add_argument('asset_id', help='ID of the asset.') + + def run(self, args, config): + """Sets asset ACL to a canned ACL or one provided in a JSON file.""" + config.ee_init() + if args.file_or_acl_name in self.CANNED_ACLS.keys(): + acl = self.CANNED_ACLS[args.file_or_acl_name] + else: + acl = json.load(open(args.file_or_acl_name)) + # In the expected usage the ACL file will have come from a previous + # invocation of 'acl get', which means it will include an 'owners' + # stanza, but EE does not currently allow setting the owner ACL, + # so we have to remove it. + if 'owners' in acl: + print('Warning: Not updating the owner ACL.') + del acl['owners'] + ee.data.setAssetAcl(args.asset_id, json.dumps(acl)) + + +class AclCommand(Dispatcher): + """Prints or updates the access control list of the specified asset.""" + + name = 'acl' + + COMMANDS = [ + AclChCommand, + AclGetCommand, + AclSetCommand, + ] + + +class AssetInfoCommand(object): + """Prints metadata and other information about an Earth Engine asset.""" + + name = 'info' + + def __init__(self, parser): + parser.add_argument('asset_id', help='ID of the asset to print.') + + def run(self, args, config): + config.ee_init() + info = ee.data.getInfo(args.asset_id) + if info: + _pretty_print_json(info) + else: + raise ee.EEException( + 'Asset does not exist or is not accessible: %s' % args.asset_id) + + +class AssetSetCommand(object): + """Sets metadata properties of an Earth Engine asset. + + Properties may be of type "string", "number", or "date". Dates must + be specified in the form YYYY-MM-DD[Thh:mm:ss[.ff]] in UTC and are + stored as numbers representing the number of milliseconds since the + Unix epoch (00:00:00 UTC on 1 January 1970). + """ + + name = 'set' + + def __init__(self, parser): + parser.add_argument('asset_id', help='ID of the asset to update.') + _add_property_flags(parser) + + def run(self, args, config): + properties = _decode_property_flags(args) + config.ee_init() + if not properties: + raise ee.EEException('No properties specified.') + ee.data.setAssetProperties(args.asset_id, properties) + + +class AssetCommand(Dispatcher): + """Prints or updates metadata associated with an Earth Engine asset.""" + + name = 'asset' + + COMMANDS = [ + AssetInfoCommand, + AssetSetCommand, + ] + + + + +class CopyCommand(object): + """Creates a new Earth Engine asset as a copy of another asset.""" + + name = 'cp' + + def __init__(self, parser): + parser.add_argument( + 'source', help='Full path of the source asset.') + parser.add_argument( + 'destination', help='Full path of the destination asset.') + + def run(self, args, config): + """Runs the asset copy.""" + config.ee_init() + ee.data.copyAsset(args.source, args.destination) + + +class CreateCommandBase(object): + """Base class for implementing Create subcommands.""" + + def __init__(self, parser, fragment, asset_type): + parser.add_argument( + 'asset_id', nargs='+', + help='Full path of %s to create.' % fragment) + parser.add_argument( + '--parents', '-p', action='store_true', + help='Make parent folders as needed.') + self.asset_type = asset_type + + def run(self, args, config): + config.ee_init() + ee.data.create_assets(args.asset_id, self.asset_type, args.parents) + + +class CreateCollectionCommand(CreateCommandBase): + """Creates one or more image collections.""" + + name = 'collection' + + def __init__(self, parser): + super(CreateCollectionCommand, self).__init__( + parser, 'an image collection', ee.data.ASSET_TYPE_IMAGE_COLL) + + +class CreateFolderCommand(CreateCommandBase): + """Creates one or more folders.""" + + name = 'folder' + + def __init__(self, parser): + super(CreateFolderCommand, self).__init__( + parser, 'a folder', ee.data.ASSET_TYPE_FOLDER) + + +class CreateCommand(Dispatcher): + """Creates assets and folders.""" + + name = 'create' + + COMMANDS = [ + CreateCollectionCommand, + CreateFolderCommand, + ] + + + + +class ListCommand(object): + """Prints the contents of a folder or collection.""" + + name = 'ls' + + def __init__(self, parser): + parser.add_argument( + 'asset_id', nargs='*', + help='A folder or image collection to be inspected.') + parser.add_argument( + '-l', action='store_true', + help='Print output in long format.') + parser.add_argument( + '--max_items', '-m', default=-1, type=int, + help='Maximum number of items to list for each collection.') + + def run(self, args, config): + config.ee_init() + if not args.asset_id: + roots = ee.data.getAssetRoots() + self._print_assets(roots, '', args.l) + return + assets = args.asset_id + count = 0 + for asset in assets: + if count > 0: + print() + self._list_asset_content( + asset, args.max_items, len(assets), args.l) + count += 1 + + def _print_assets(self, assets, indent, long_format): + if not assets: + return + max_type_length = max([len(asset['type']) for asset in assets]) + format_str = '%s{:%ds}{:s}' % (indent, max_type_length + 4) + for asset in assets: + if long_format: + # Example output: + # [Image] user/test/my_img + # [ImageCollection] user/test/my_coll + print(format_str.format('['+asset['type']+']', asset['id'])) + else: + print(asset['id']) + + def _list_asset_content(self, asset, max_items, total_assets, long_format): + try: + list_req = {'id': asset} + if max_items >= 0: + list_req['num'] = max_items + children = ee.data.getList(list_req) + indent = '' + if total_assets > 1: + print('%s:' % asset) + indent = ' ' + self._print_assets(children, indent, long_format) + except ee.EEException as e: + print(e) + + +class MoveCommand(object): + """Moves or renames an Earth Engine asset.""" + + name = 'mv' + + def __init__(self, parser): + parser.add_argument( + 'source', help='Full path of the source asset.') + parser.add_argument( + 'destination', help='Full path of the destination asset.') + + def run(self, args, config): + config.ee_init() + ee.data.renameAsset(args.source, args.destination) + + +class RmCommand(object): + """Deletes the specified assets.""" + + name = 'rm' + + def __init__(self, parser): + parser.add_argument( + 'asset_id', nargs='+', help='Full path of an asset to delete.') + parser.add_argument( + '--recursive', '-r', action='store_true', + help='Recursively delete child assets.') + parser.add_argument( + '--dry_run', action='store_true', + help=('Perform a dry run of the delete operation. Does not ' + 'delete any assets.')) + parser.add_argument( + '--verbose', '-v', action='store_true', + help='Print the progress of the operation to the console.') + + def run(self, args, config): + config.ee_init() + for asset in args.asset_id: + self._delete_asset(asset, args.recursive, args.verbose, args.dry_run) + + def _delete_asset(self, asset_id, recursive, verbose, dry_run): + """Attempts to delete the specified asset or asset collection.""" + info = ee.data.getInfo(asset_id) + if info is None: + print('Asset does not exist or is not accessible: %s' % asset_id) + return + if recursive: + if info['type'] in (ee.data.ASSET_TYPE_FOLDER, + ee.data.ASSET_TYPE_IMAGE_COLL): + children = ee.data.getList({'id': asset_id}) + for child in children: + self._delete_asset(child['id'], True, verbose, dry_run) + if dry_run: + print('[dry-run] Deleting asset: %s' % asset_id) + else: + if verbose: + print('Deleting asset: %s' % asset_id) + try: + ee.data.deleteAsset(asset_id) + except ee.EEException as e: + print('Failed to delete %s. %s' % (asset_id, e)) + + +class TaskCancelCommand(object): + """Cancels a running task.""" + + name = 'cancel' + + def __init__(self, parser): + parser.add_argument( + 'task_ids', nargs='+', + help='IDs of one or more tasks to cancel,' + ' or `all` to cancel all tasks.') + + def run(self, args, config): + config.ee_init() + cancel_all = args.task_ids == ['all'] + if cancel_all: + statuses = ee.data.getTaskList() + else: + statuses = ee.data.getTaskStatus(args.task_ids) + for status in statuses: + state = status['state'] + task_id = status['id'] + if state == 'UNKNOWN': + raise ee.EEException('Unknown task id "%s"' % task_id) + elif state == 'READY' or state == 'RUNNING': + print('Canceling task "%s"' % task_id) + ee.data.cancelTask(task_id) + elif not cancel_all: + print('Task "%s" already in state "%s".' % (status['id'], state)) + + +class TaskInfoCommand(object): + """Prints information about a task.""" + + name = 'info' + + def __init__(self, parser): + parser.add_argument('task_id', nargs='*', help='ID of a task to get.') + + def run(self, args, config): + config.ee_init() + for i, status in enumerate(ee.data.getTaskStatus(args.task_id)): + if i: + print() + print('%s:' % status['id']) + print(' State: %s' % status['state']) + if status['state'] == 'UNKNOWN': + continue + print(' Type: %s' % TASK_TYPES.get(status.get('task_type'), 'Unknown')) + print(' Description: %s' % status.get('description')) + print(' Created: %s' + % self._format_time(status['creation_timestamp_ms'])) + if 'start_timestamp_ms' in status: + print(' Started: %s' % self._format_time(status['start_timestamp_ms'])) + if 'update_timestamp_ms' in status: + print(' Updated: %s' + % self._format_time(status['update_timestamp_ms'])) + if 'error_message' in status: + print(' Error: %s' % status['error_message']) + + def _format_time(self, millis): + return datetime.datetime.fromtimestamp(millis / 1000) + + +class TaskListCommand(object): + """Lists the tasks submitted recently.""" + + name = 'list' + + def __init__(self, unused_parser): + pass + + def run(self, unused_args, config): + config.ee_init() + tasks = ee.data.getTaskList() + descs = [utils.truncate(task.get('description', ''), 40) for task in tasks] + desc_length = max(len(word) for word in descs) + format_str = '{:25s} {:13s} {:%ds} {:10s} {:s}' % (desc_length + 1) + for task in tasks: + truncated_desc = utils.truncate(task.get('description', ''), 40) + task_type = TASK_TYPES.get(task['task_type'], 'Unknown') + print(format_str.format( + task['id'], task_type, truncated_desc, + task['state'], task.get('error_message', '---'))) + + +class TaskWaitCommand(object): + """Waits for the specified task or tasks to complete.""" + + name = 'wait' + + def __init__(self, parser): + parser.add_argument( + '--timeout', '-t', default=sys.maxsize, type=int, + help=('Stop waiting for the task(s) to finish after the specified,' + ' number of seconds. Without this flag, the command will wait' + ' indefinitely.')) + parser.add_argument('--verbose', '-v', action='store_true', + help=('Print periodic status messages for each' + ' incomplete task.')) + parser.add_argument('task_ids', nargs='+', + help=('Either a list of one or more currently-running' + ' task ids to wait on; or \'all\' to wait on all' + ' running tasks.')) + + def run(self, args, config): + """Waits on the given tasks to complete or for a timeout to pass.""" + config.ee_init() + task_ids = [] + if args.task_ids == ['all']: + tasks = ee.data.getTaskList() + for task in tasks: + if task['state'] not in utils.TASK_FINISHED_STATES: + task_ids.append(task['id']) + else: + statuses = ee.data.getTaskStatus(args.task_ids) + for status in statuses: + state = status['state'] + task_id = status['id'] + if state == 'UNKNOWN': + raise ee.EEException('Unknown task id "%s"' % task_id) + else: + task_ids.append(task_id) + + utils.wait_for_tasks(task_ids, args.timeout, log_progress=args.verbose) + + +class TaskCommand(Dispatcher): + """Prints information about or manages long-running tasks.""" + + name = 'task' + + COMMANDS = [ + TaskCancelCommand, + TaskInfoCommand, + TaskListCommand, + TaskWaitCommand, + ] + + +# TODO(user): in both upload tasks, check if the parent namespace +# exists and is writeable first. +class UploadImageCommand(object): + """Uploads an image from Cloud Storage to Earth Engine. + + See docs for "asset set" for additional details on how to specify asset + metadata properties. + """ + + name = 'image' + + def __init__(self, parser): + _add_wait_arg(parser) + parser.add_argument( + 'src_files', + help=('Cloud Storage URL(s) of the file(s) to upload. ' + 'Must have the prefix \'gs://\'.'), + nargs='+') + parser.add_argument( + '--asset_id', + help='Destination asset ID for the uploaded file.') + parser.add_argument( + '--last_band_alpha', + help='Use the last band as a masking channel for all bands. ' + 'Mutually exclusive with nodata_value.', + action='store_true') + parser.add_argument( + '--nodata_value', + help='Value for missing data. ' + 'Mutually exclusive with last_band_alpha.', + type=_comma_separated_numbers) + parser.add_argument( + '--pyramiding_policy', + help='The pyramid reduction policy to use', + type=_comma_separated_pyramiding_policies) + parser.add_argument( + '--bands', + help='Comma-separated list of names to use for the image bands.', + type=_comma_separated_strings) + parser.add_argument( + '--crs', + help='The coordinate reference system, to override the map projection ' + 'of the image. May be either a well-known authority code (e.g. ' + 'EPSG:4326) or a WKT string.') + _add_property_flags(parser) + + def _check_num_bands(self, request, num_bands, flag_name): + """Checks the number of bands, creating them if there are none yet.""" + if 'bands' in request: + if len(request['bands']) != num_bands: + raise ValueError( + 'Inconsistent number of bands in --{}: expected {} but found {}.' + .format(flag_name, len(request['bands']), num_bands)) + else: + request['bands'] = [{'id': 'b%d' % (i + 1)} for i in xrange(num_bands)] + + def run(self, args, config): + """Starts the upload task, and waits for completion if requested.""" + _check_valid_files(args.src_files) + config.ee_init() + + if args.last_band_alpha and args.nodata_value: + raise ValueError( + 'last_band_alpha and nodata_value are mutually exclusive.') + + properties = _decode_property_flags(args) + + request = { + 'id': args.asset_id, + 'properties': properties + } + + source_files = utils.expand_gcs_wildcards(args.src_files) + sources = [{'primaryPath': source} for source in source_files] + tileset = {'sources': sources} + if args.last_band_alpha: + tileset['fileBands'] = [{'fileBandIndex': -1, 'maskForAllBands': True}] + request['tilesets'] = [tileset] + + if args.bands: + request['bands'] = [{'id': name} for name in args.bands] + + if args.pyramiding_policy: + if len(args.pyramiding_policy) == 1: + request['pyramidingPolicy'] = args.pyramiding_policy[0].upper() + else: + self._check_num_bands(request, len(args.pyramiding_policy), + 'pyramiding_policy') + for index, policy in enumerate(args.pyramiding_policy): + request['bands'][index]['pyramidingPolicy'] = policy.upper() + + if args.nodata_value: + if len(args.nodata_value) == 1: + request['missingData'] = {'value': args.nodata_value[0]} + else: + self._check_num_bands(request, len(args.nodata_value), 'nodata_value') + for index, nodata in enumerate(args.nodata_value): + request['bands'][index]['missingData'] = {'value': nodata} + + if args.crs: + request['crs'] = args.crs + + _upload(args, request, ee.data.startIngestion) + + +class UploadCommand(Dispatcher): + """Uploads assets to Earth Engine.""" + + name = 'upload' + + COMMANDS = [ + UploadImageCommand, + ] + + diff --git a/ee/cli/eecli.py b/ee/cli/eecli.py new file mode 100644 index 0000000..54fcb83 --- /dev/null +++ b/ee/cli/eecli.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python +"""Executable for the Earth Engine command line interface. + +This executable starts a Python Cmd instance to receive and process command +line input entered by the user. If the executable is invoked with some +command line arguments, the Cmd is launched in the one-off mode, where +the provided arguments are processed as a single command after which the +program is terminated. Otherwise, this executable will launch the Cmd in the +interactive (looping) mode, where the user will be able to run multiple +commands as in a typical terminal program. +""" + +from __future__ import print_function + +import argparse +import sys + +import ee +from ee.cli import commands +from ee.cli import utils + + +class CommandDispatcher(commands.Dispatcher): + name = 'main' + + COMMANDS = [ + commands.AuthenticateCommand, + commands.AclCommand, + commands.AssetCommand, + commands.CopyCommand, + commands.CreateCommand, + commands.ListCommand, + commands.MoveCommand, + commands.RmCommand, + commands.TaskCommand, + commands.UploadCommand, + ] + + +def main(): + # Set the program name to 'earthengine' for proper help text display. + parser = argparse.ArgumentParser( + prog='earthengine', description='Earth Engine Command Line Interface.') + parser.add_argument( + '--ee_config', help='Path to the earthengine configuration file. ' + 'Defaults to "~/%s".' % utils.DEFAULT_EE_CONFIG_FILE_RELATIVE) + + dispatcher = CommandDispatcher(parser) + + # Print the list of commands if the user supplied no arguments at all. + if len(sys.argv) == 1: + parser.print_help() + return + + args = parser.parse_args() + config = utils.CommandLineConfig(args.ee_config) + + # Catch EEException errors, which wrap server-side Earth Engine + # errors, and print the error message without the irrelevant local + # stack trace. (Individual commands may also catch EEException if + # they want to be able to continue despite errors.) + try: + dispatcher.run(args, config) + except ee.EEException as e: + print(e) + sys.exit(1) + +if __name__ == '__main__': + main() diff --git a/ee/cli/eecli_wrapper.py b/ee/cli/eecli_wrapper.py new file mode 100644 index 0000000..36f3905 --- /dev/null +++ b/ee/cli/eecli_wrapper.py @@ -0,0 +1,33 @@ +#!/usr/bin/env python + +"""Wrapper module for running eecli.main() from the command line.""" + +import os +import sys + +if not (2, 6) <= sys.version_info[:3] < (3,): + sys.exit('earthengine requires python 2.6 or 2.7.') + + +def OutputAndExit(message): + sys.stderr.write('%s\n' % message) + sys.exit(1) + + +EECLI_DIR = os.path.dirname(os.path.abspath(os.path.realpath(__file__))) +if not EECLI_DIR: + OutputAndExit('Unable to determine where earthengine CLI is installed. Sorry,' + ' cannot run correctly without this.\n') + +# The wrapper script adds all third_party libraries to the Python path, since +# we don't assume any third party libraries are installed system-wide. +THIRD_PARTY_DIR = os.path.join(EECLI_DIR, 'third_party') +sys.path.insert(0, THIRD_PARTY_DIR) + + +def RunMain(): + import eecli # pylint: disable=g-import-not-at-top + sys.exit(eecli.main()) + +if __name__ == '__main__': + RunMain() diff --git a/ee/cli/utils.py b/ee/cli/utils.py new file mode 100644 index 0000000..1025ad1 --- /dev/null +++ b/ee/cli/utils.py @@ -0,0 +1,261 @@ +#!/usr/bin/env python +"""Support utilities used by the Earth Engine command line interface. + +This module defines the Command class which is the base class of all +the commands supported by the EE command line tool. It also defines +the classes for configuration and runtime context management. +""" +from __future__ import print_function +import collections +from datetime import datetime +import json +import os +import re +import threading +import time + +import urllib +import httplib2 + +import oauth2client.client + +import ee + +HOMEDIR = os.path.expanduser('~') +EE_CONFIG_FILE = 'EE_CONFIG_FILE' +DEFAULT_EE_CONFIG_FILE_RELATIVE = os.path.join( + '.config', 'earthengine', 'credentials') +DEFAULT_EE_CONFIG_FILE = os.path.join( + HOMEDIR, DEFAULT_EE_CONFIG_FILE_RELATIVE) + +CONFIG_PARAMS = { + 'url': 'https://earthengine.googleapis.com', + 'account': None, + 'private_key': None, + 'refresh_token': None, +} + +TASK_FINISHED_STATES = (ee.batch.Task.State.COMPLETED, + ee.batch.Task.State.FAILED, + ee.batch.Task.State.CANCELLED) + + +class CommandLineConfig(object): + """Holds the configuration parameters used by the EE command line interface. + + This class attempts to load the configuration parameters from a file + specified as a constructor argument. If not provided, it attempts to load + the configuration from a file specified via the EE_CONFIG_FILE environment + variable. If the variable is not set, it looks for a JSON file at the + path ~/.config/earthengine/credentials. If all fails, it fallsback to using + some predefined defaults for each configuration parameter. + """ + + def __init__(self, config_file=None): + if not config_file: + config_file = os.environ.get(EE_CONFIG_FILE, DEFAULT_EE_CONFIG_FILE) + self.config_file = config_file + config = {} + if os.path.exists(config_file): + with open(config_file) as config_file_json: + config = json.load(config_file_json) + for key, default_value in CONFIG_PARAMS.items(): + setattr(self, key, config.get(key, default_value)) + + def ee_init(self): + """Load the EE credentils and initialize the EE client.""" + if self.account and self.private_key: + credentials = ee.ServiceAccountCredentials(self.account, self.private_key) + elif self.refresh_token: + credentials = oauth2client.client.OAuth2Credentials( + None, ee.oauth.CLIENT_ID, ee.oauth.CLIENT_SECRET, + self.refresh_token, None, + 'https://accounts.google.com/o/oauth2/token', None) + else: + credentials = 'persistent' + + ee.Initialize(credentials=credentials, opt_url=self.url) + + def save(self): + config = {} + for key in CONFIG_PARAMS: + value = getattr(self, key) + if value is not None: + config[key] = value + with open(self.config_file, 'w') as output_file: + json.dump(config, output_file) + + +def query_yes_no(msg): + print('%s (y/n)' % msg) + while True: + confirm = raw_input().lower() + if confirm == 'y': + return True + elif confirm == 'n': + return False + else: + print('Please respond with \'y\' or \'n\'.') + + +def truncate(string, length): + return (string[:length] + '..') if len(string) > length else string + + +def wait_for_task(task_id, timeout, log_progress=True): + """Waits for the specified task to finish, or a timeout to occur.""" + start = time.time() + elapsed = 0 + last_check = 0 + while True: + elapsed = time.time() - start + status = ee.data.getTaskStatus(task_id)[0] + state = status['state'] + if state in TASK_FINISHED_STATES: + error_message = status.get('error_message', None) + print('Task %s ended at state: %s after %.2f seconds' + % (task_id, state, elapsed)) + if error_message: + print('Error: %s' % error_message) + return + if log_progress and elapsed - last_check >= 30: + print('[{:%H:%M:%S}] Current state for task {}: {}' + .format(datetime.now(), task_id, state)) + last_check = elapsed + remaining = timeout - elapsed + if remaining > 0: + time.sleep(min(10, remaining)) + else: + break + print('Wait for task %s timed out after %.2f seconds' % (task_id, elapsed)) + + +def wait_for_tasks(task_id_list, timeout, log_progress=False): + """For each task specified in task_id_list, wait for that task or timeout.""" + + if len(task_id_list) == 1: + wait_for_task(task_id_list[0], timeout, log_progress) + return + + threads = [] + for task_id in task_id_list: + t = threading.Thread(target=wait_for_task, + args=(task_id, timeout, log_progress)) + threads.append(t) + t.start() + + for thread in threads: + thread.join() + + status_list = ee.data.getTaskStatus(task_id_list) + status_counts = collections.defaultdict(int) + for status in status_list: + status_counts[status['state']] += 1 + num_incomplete = (len(status_list) - status_counts['COMPLETED'] + - status_counts['FAILED'] - status_counts['CANCELLED']) + print('Finished waiting for tasks.\n Status summary:') + print(' %d tasks completed successfully.' % status_counts['COMPLETED']) + print(' %d tasks failed.' % status_counts['FAILED']) + print(' %d tasks cancelled.' % status_counts['CANCELLED']) + print(' %d tasks are still incomplete (timed-out)' % num_incomplete) + + +def expand_gcs_wildcards(source_files): + """Implements glob-like '*' wildcard completion for cloud storage objects. + + Args: + source_files: A list of one or more cloud storage paths of the format + gs://[bucket]/[path-maybe-with-wildcards] + + Yields: + cloud storage paths of the above format with '*' wildcards expanded. + Raises: + EEException: If badly formatted source_files + (e.g., missing gs://) are specified + """ + for source in source_files: + if '*' not in source: + yield source + continue + + # We extract the bucket and prefix from the input path to match + # the parameters for calling GCS list objects and reduce the number + # of items returned by that API call + + # Capture the part of the path after gs:// and before the first / + bucket_regex = 'gs://([a-z0-9_.-]+)(/.*)' + bucket_match = re.match(bucket_regex, source) + if bucket_match: + bucket, rest = bucket_match.group(1, 2) + else: + raise ee.ee_exception.EEException( + 'Badly formatted source file or bucket: %s' % source) + prefix = rest[:rest.find('*')] # Everything before the first wildcard + + bucket_files = _gcs_ls(bucket, prefix) + + # Regex to match the source path with wildcards expanded + regex = re.escape(source).replace(r'\*', '[^/]*') + '$' + for gcs_path in bucket_files: + if re.match(regex, gcs_path): + yield gcs_path + + +def _gcs_ls(bucket, prefix=''): + """Retrieve a list of cloud storage filepaths from the given bucket. + + Args: + bucket: The cloud storage bucket to be queried + prefix: Optional, a prefix used to select the objects to return + Yields: + Cloud storage filepaths matching the given bucket and prefix + Raises: + EEException: + If there is an error in accessing the specified bucket + """ + + base_url = 'https://www.googleapis.com/storage/v1/b/%s/o'%bucket + method = 'GET' + http = ee.data.authorizeHttp(httplib2.Http(0)) + next_page_token = None + + # Loop to handle paginated responses from GCS; + # Exits once no 'next page token' is returned + while True: + params = {'fields': 'items/name,nextPageToken'} + if next_page_token: + params['pageToken'] = next_page_token + if prefix: + params['prefix'] = prefix + payload = urllib.urlencode(params) + + url = base_url + '?' + payload + try: + response, content = http.request(url, method=method) + except httplib2.HttpLib2Error as e: + raise ee.ee_exception.EEException( + 'Unexpected HTTP error: %s' % e.message) + + if response.status < 100 or response.status >= 300: + raise ee.ee_exception.EEException(('Error retreiving bucket %s;' + ' Server returned HTTP code: %d' % + (bucket, response.status))) + + json_content = json.loads(content) + if 'error' in json_content: + json_error = json_content['error']['message'] + raise ee.ee_exception.EEException('Error retreiving bucket %s: %s' % + (bucket, json_error)) + + objects = json_content['items'] + object_names = [str(gc_object['name']) for gc_object in objects] + + for name in object_names: + yield 'gs://%s/%s' % (bucket, name) + + # GCS indicates no more results + if 'nextPageToken' not in json_content: + return + + # Load next page, continue at beginning of while True: + next_page_token = json_content['nextPageToken'] diff --git a/ee/collection.py b/ee/collection.py new file mode 100644 index 0000000..1a5dbe6 --- /dev/null +++ b/ee/collection.py @@ -0,0 +1,225 @@ +#!/usr/bin/env python +"""Common representation for ImageCollection and FeatureCollection. + +This class is never intended to be instantiated by the user. +""" + + + +# Using lowercase function naming to match the JavaScript names. +# pylint: disable=g-bad-name + +from . import apifunction +from . import deprecation +from . import ee_exception +from . import element +from . import filter # pylint: disable=redefined-builtin + + +class Collection(element.Element): + """Base class for ImageCollection and FeatureCollection.""" + + _initialized = False + + def __init__(self, func, args, opt_varName=None): + """Constructs a collection by initializing its ComputedObject.""" + super(Collection, self).__init__(func, args, opt_varName) + + @classmethod + def initialize(cls): + """Imports API functions to this class.""" + if not cls._initialized: + apifunction.ApiFunction.importApi(cls, 'Collection', 'Collection') + apifunction.ApiFunction.importApi( + cls, 'AggregateFeatureCollection', 'Collection', 'aggregate_') + cls._initialized = True + + @classmethod + def reset(cls): + """Removes imported API functions from this class. + + Also resets the serial ID used for mapping Python functions to 0. + """ + apifunction.ApiFunction.clearApi(cls) + cls._initialized = False + + def filter(self, new_filter): + """Apply a filter to this collection. + + Args: + new_filter: Filter to add to this collection. + + Returns: + The filtered collection object. + """ + if not new_filter: + raise ee_exception.EEException('Empty filters.') + return self._cast(apifunction.ApiFunction.call_( + 'Collection.filter', self, new_filter)) + + @deprecation.CanUseDeprecated + def filterMetadata(self, name, operator, value): + """Shortcut to add a metadata filter to a collection. + + This is equivalent to self.filter(Filter().metadata(...)). + + Args: + name: Name of a property to filter. + operator: Name of a comparison operator as defined + by FilterCollection. Possible values are: "equals", "less_than", + "greater_than", "not_equals", "not_less_than", "not_greater_than", + "starts_with", "ends_with", "not_starts_with", "not_ends_with", + "contains", "not_contains". + value: The value to compare against. + + Returns: + The filtered collection. + """ + return self.filter(filter.Filter.metadata_(name, operator, value)) + + def filterBounds(self, geometry): + """Shortcut to add a geometry filter to a collection. + + Items in the collection with a footprint that fails to intersect + the given geometry will be excluded when the collection is evaluated. + This is equivalent to self.filter(Filter().geometry(...)). + + Args: + geometry: The boundary to filter to either as a GeoJSON geometry, + or a FeatureCollection, from which a geometry will be extracted. + + Returns: + The filter object. + """ + return self.filter(filter.Filter.geometry(geometry)) + + def filterDate(self, start, opt_end=None): + """Shortcut to filter a collection with a date range. + + Items in the collection with a time_start property that doesn't + fall between the start and end dates will be excluded. + This is equivalent to self.filter(Filter().date(...)). + + Args: + start: The start date as a Date object, a string representation of + a date, or milliseconds since epoch. + opt_end: The end date as a Date object, a string representation of + a date, or milliseconds since epoch. + + Returns: + The filter object. + """ + return self.filter(filter.Filter.date(start, opt_end)) + + def getInfo(self): + """Returns all the known information about this collection. + + This function makes an REST call to to retrieve all the known information + about this collection. + + Returns: + The return contents vary but will include at least: + features: an array containing metadata about the items in the + collection that passed all filters. + properties: a dictionary containing the collection's metadata + properties. + """ + return super(Collection, self).getInfo() + + def limit(self, maximum, opt_property=None, opt_ascending=None): + """Limit a collection to the specified number of elements. + + This limits a collection to the specified number of elements, optionally + sorting them by a specified property first. + + Args: + maximum: The number to limit the collection to. + opt_property: The property to sort by, if sorting. + opt_ascending: Whether to sort in ascending or descending order. + The default is true (ascending). + + Returns: + The collection. + """ + args = {'collection': self, 'limit': maximum} + if opt_property is not None: + args['key'] = opt_property + if opt_ascending is not None: + args['ascending'] = opt_ascending + return self._cast( + apifunction.ApiFunction.apply_('Collection.limit', args)) + + def sort(self, prop, opt_ascending=None): + """Sort a collection by the specified property. + + Args: + prop: The property to sort by. + opt_ascending: Whether to sort in ascending or descending + order. The default is true (ascending). + + Returns: + The collection. + """ + args = {'collection': self, 'key': prop} + if opt_ascending is not None: + args['ascending'] = opt_ascending + return self._cast( + apifunction.ApiFunction.apply_('Collection.limit', args)) + + @staticmethod + def name(): + return 'Collection' + + @staticmethod + def elementType(): + """Returns the type of the collection's elements.""" + return element.Element + + def map(self, algorithm, opt_dropNulls=None): + """Maps an algorithm over a collection. + + Args: + algorithm: The operation to map over the images or features of the + collection, a Python function that receives an image or features and + returns one. The function is called only once and the result is + captured as a description, so it cannot perform imperative operations + or rely on external state. + opt_dropNulls: If true, the mapped algorithm is allowed to return nulls, + and the elements for which it returns nulls will be dropped. + + Returns: + The mapped collection. + + Raises: + ee_exception.EEException: if algorithm is not a function. + """ + element_type = self.elementType() + with_cast = lambda e: algorithm(element_type(e)) + return self._cast(apifunction.ApiFunction.call_( + 'Collection.map', self, with_cast, opt_dropNulls)) + + def iterate(self, algorithm, first=None): + """Iterates over a collection with an algorithm. + + Applies a user-supplied function to each element of a collection. The + user-supplied function is given two arguments: the current element, and + the value returned by the previous call to iterate() or the first argument, + for the first iteration. The result is the value returned by the final + call to the user-supplied function. + + Args: + algorithm: The function to apply to each element. Must take two + arguments - an element of the collection and the value from the + previous iteration. + first: The initial state. + + Returns: + The result of the Collection.iterate() call. + + Raises: + ee_exception.EEException: if algorithm is not a function. + """ + element_type = self.elementType() + with_cast = lambda e, prev: algorithm(element_type(e), prev) + return apifunction.ApiFunction.call_( + 'Collection.iterate', self, with_cast, first) diff --git a/ee/computedobject.py b/ee/computedobject.py new file mode 100644 index 0000000..33e5114 --- /dev/null +++ b/ee/computedobject.py @@ -0,0 +1,197 @@ +#!/usr/bin/env python +"""A representation of an Earth Engine computed object.""" + + + +# Using lowercase function naming to match the JavaScript names. +# pylint: disable=g-bad-name + +# pylint: disable=g-bad-import-order +import six + +from . import data +from . import ee_exception +from . import encodable +from . import serializer + + +class ComputedObjectMetaclass(type): + """A meta-class that makes type coercion idempotent. + + If an instance of a ComputedObject subclass is instantiated by passing + another instance of that class as the sole argument, this short-circuits + and returns that argument. + """ + + def __call__(cls, *args, **kwargs): + """Creates a computed object, catching self-casts.""" + if len(args) == 1 and not kwargs and isinstance(args[0], cls): + # Self-casting returns the argument unchanged. + return args[0] + else: + return type.__call__(cls, *args, **kwargs) + + +class ComputedObject(six.with_metaclass( + ComputedObjectMetaclass, encodable.Encodable)): + """A representation of an Earth Engine computed object. + + This is a base class for most API objects. + + The class itself is not abstract as it is used to wrap the return values of + algorithms that produce unrecognized types with the minimal functionality + necessary to interact well with the rest of the API. + + ComputedObjects come in two flavors: + 1. If func != null and args != null, the ComputedObject is encoded as an + invocation of func with args. + 2. If func == null and agrs == null, the ComputedObject is a variable + reference. The variable name is stored in its varName member. Note that + in this case, varName may still be null; this allows the name to be + deterministically generated at a later time. This is used to generate + deterministic variable names for mapped functions, ensuring that nested + mapping calls do not use the same variable name. + """ + + def __init__(self, func, args, opt_varName=None): + """Creates a computed object. + + Args: + func: The ee.Function called to compute this object, either as an + Algorithm name or an ee.Function object. + args: A dictionary of arguments to pass to the specified function. + Note that the caller is responsible for promoting the arguments + to the correct types. + opt_varName: A variable name. If not None, the object will be encoded + as a reference to a CustomFunction variable of this name, and both + 'func' and 'args' must be None. If all arguments are None, the + object is considered an unnamed variable, and a name will be + generated when it is included in an ee.CustomFunction. + """ + if opt_varName and (func or args): + raise ee_exception.EEException( + 'When "opt_varName" is specified, "func" and "args" must be null.') + self.func = func + self.args = args + self.varName = opt_varName + + def __eq__(self, other): + # pylint: disable=unidiomatic-typecheck + return (type(self) == type(other) and + self.__dict__ == other.__dict__) + + def __ne__(self, other): + return not self.__eq__(other) + + def __hash__(self): + return hash(ComputedObject.freeze(self.__dict__)) + + def getInfo(self): + """Fetch and return information about this object. + + Returns: + The object can evaluate to anything. + """ + return data.getValue({'json': self.serialize()}) + + def encode(self, encoder): + """Encodes the object in a format compatible with Serializer.""" + if self.isVariable(): + return { + 'type': 'ArgumentRef', + 'value': self.varName + } + else: + # Encode the function that we're calling. + func = encoder(self.func) + # Built-in functions are encoded as strings under a different key. + key = 'functionName' if isinstance(func, six.string_types) else 'function' + + # Encode all arguments recursively. + encoded_args = {} + for name, value in self.args.items(): + if value is not None: + encoded_args[name] = encoder(value) + + return { + 'type': 'Invocation', + 'arguments': encoded_args, + key: func + } + + def serialize(self, opt_pretty=False): + """Serialize this object into a JSON string. + + Args: + opt_pretty: A flag indicating whether to pretty-print the JSON. + + Returns: + The serialized representation of this object. + """ + return serializer.toJSON(self, opt_pretty) + + def __str__(self): + """Writes out the object in a human-readable form.""" + return 'ee.%s(%s)' % (self.name(), serializer.toReadableJSON(self)) + + def isVariable(self): + """Returns whether this computed object is a variable reference.""" + # We can't just check for varName != null, since we allow that + # to remain null until for CustomFunction.resolveNamelessArgs_(). + return self.func is None and self.args is None + + def aside(self, func, *var_args): + """Calls a function passing this object as the first argument. + + Returns the object itself for chaining. Convenient e.g. when debugging: + + c = (ee.ImageCollection('foo').aside(logging.info) + .filterDate('2001-01-01', '2002-01-01').aside(logging.info) + .filterBounds(geom).aside(logging.info) + .aside(addToMap, {'min': 0, 'max': 142}) + .select('a', 'b')) + + Args: + func: The function to call. + *var_args: Any extra arguments to pass to the function. + + Returns: + The same object, for chaining. + """ + func(self, *var_args) + return self + + @classmethod + def name(cls): + """Returns the name of the object, used in __str__().""" + return 'ComputedObject' + + @classmethod + def _cast(cls, obj): + """Cast a ComputedObject to a new instance of the same class as this. + + Args: + obj: The object to cast. + + Returns: + The cast object, and instance of the class on which this method is called. + """ + if isinstance(obj, cls): + return obj + else: + result = cls.__new__(cls) + result.func = obj.func + result.args = obj.args + result.varName = obj.varName + return result + + @staticmethod + def freeze(obj): + """Freeze a list or dict so it can be hashed.""" + if isinstance(obj, dict): + return frozenset( + (key, ComputedObject.freeze(val)) for key, val in obj.items()) + elif isinstance(obj, list): + return tuple(map(ComputedObject.freeze, obj)) + else: + return obj diff --git a/ee/customfunction.py b/ee/customfunction.py new file mode 100644 index 0000000..fdbbaee --- /dev/null +++ b/ee/customfunction.py @@ -0,0 +1,154 @@ +#!/usr/bin/env python +"""An object representing a custom EE Function.""" + + + +# Using lowercase function naming to match the JavaScript names. +# pylint: disable=g-bad-name + +# pylint: disable=g-bad-import-order +import six + +from . import computedobject +from . import ee_types +from . import function +from . import serializer + + +class CustomFunction(function.Function): + """An object representing a custom EE Function.""" + + def __init__(self, signature, body): + """Creates a function defined by a given expression with unbound variables. + + The expression is created by evaluating the given function + using variables as placeholders. + + Args: + signature: The function signature. If any of the argument names are + null, their names will be generated deterministically, based on + the body. + body: The Python function to evaluate. + """ + variables = [CustomFunction.variable(arg['type'], arg['name']) + for arg in signature['args']] + + # The signature of the function. + self._signature = CustomFunction._resolveNamelessArgs( + signature, variables, body) + + # The expression to evaluate. + self._body = body(*variables) + + def encode(self, encoder): + return { + 'type': 'Function', + 'argumentNames': [x['name'] for x in self._signature['args']], + 'body': encoder(self._body) + } + + def getSignature(self): + """Returns a description of the interface provided by this function.""" + return self._signature + + @staticmethod + def variable(type_name, name): + """Returns a placeholder variable with a given name and EE type. + + Args: + type_name: A class to mimic. + name: The name of the variable as it will appear in the + arguments of the custom functions that use this variable. If null, + a name will be auto-generated in _resolveNamelessArgs(). + + Returns: + A variable with the given name implementing the given type. + """ + var_type = ee_types.nameToClass(type_name) or computedobject.ComputedObject + result = var_type.__new__(var_type) + result.func = None + result.args = None + result.varName = name + return result + + @staticmethod + def create(func, return_type, arg_types): + """Creates a CustomFunction. + + The result calls a given native function with the specified return type and + argument types and auto-generated argument names. + + Args: + func: The native function to wrap. + return_type: The type of the return value, either as a string or a + class reference. + arg_types: The types of the arguments, either as strings or class + references. + + Returns: + The constructed CustomFunction. + """ + + def StringifyType(t): + return t if isinstance(t, six.string_types) else ee_types.classToName(t) + + args = [{'name': None, 'type': StringifyType(i)} for i in arg_types] + signature = { + 'name': '', + 'returns': StringifyType(return_type), + 'args': args + } + return CustomFunction(signature, func) + + @staticmethod + def _resolveNamelessArgs(signature, variables, body): + """Deterministically generates names for unnamed variables. + + The names are based on the body of the function. + + Args: + signature: The signature which may contain null argument names. + variables: A list of variables, some of which may be nameless. + These will be updated to include names when this method returns. + body: The Python function to evaluate. + + Returns: + The signature with null arg names resolved. + """ + nameless_arg_indices = [] + for i, variable in enumerate(variables): + if variable.varName is None: + nameless_arg_indices.append(i) + + # Do we have any nameless arguments at all? + if not nameless_arg_indices: + return signature + + # Generate the name base by counting the number of custom functions + # within the body. + def CountFunctions(expression): + """Counts the number of custom functions in a serialized expression.""" + count = 0 + if isinstance(expression, dict): + if expression.get('type') == 'Function': + # Technically this allows false positives if one of the user + # dictionaries contains type=Function, but that does not matter + # for this use case, as we only care about determinism. + count += 1 + else: + for sub_expression in expression.values(): + count += CountFunctions(sub_expression) + elif isinstance(expression, (list, tuple)): + for sub_expression in expression: + count += CountFunctions(sub_expression) + return count + serialized_body = serializer.encode(body(*variables)) + base_name = '_MAPPING_VAR_%d_' % CountFunctions(serialized_body) + + # Update the vars and signature by the name. + for (i, index) in enumerate(nameless_arg_indices): + name = base_name + str(i) + variables[index].varName = name + signature['args'][index]['name'] = name + + return signature diff --git a/ee/data.py b/ee/data.py new file mode 100644 index 0000000..79ccea1 --- /dev/null +++ b/ee/data.py @@ -0,0 +1,798 @@ +#!/usr/bin/env python +"""Singleton for the library's communication with the Earth Engine API.""" + +from __future__ import print_function + + + +# Using lowercase function naming to match the JavaScript names. +# pylint: disable=g-bad-name + +# pylint: disable=g-bad-import-order +import contextlib +import json +import time + +import httplib2 +import six + +# pylint: disable=g-import-not-at-top +try: + # Python 3.x + import urllib.error + import urllib.parse + import urllib.request +except ImportError: + # Python 2.x + import urllib + +from . import ee_exception + +# OAuth2 credentials object. This may be set by ee.Initialize(). +_credentials = None + +# The base URL for all data calls. This is set by ee.initialize(). +_api_base_url = None + +# The base URL for map tiles. This is set by ee.initialize(). +_tile_base_url = None + +# Whether the module has been initialized. +_initialized = False + +# Sets the number of milliseconds to wait for a request before considering +# it timed out. 0 means no limit. +_deadline_ms = 0 + +# A function called when profile results are received from the server. Takes the +# profile ID as an argument. None if profiling is disabled. +# +# This is a global variable because the alternative is to add a parameter to +# ee.data.send_, which would then have to be propagated from the assorted API +# call functions (ee.data.getInfo, ee.data.getMapId, etc.), and the user would +# have to modify each call to profile, rather than enabling profiling as a +# wrapper around the entire program (with ee.data.profiling, defined below). +_profile_hook = None + + +# The HTTP header through which profile results are returned. +# Lowercase because that's how httplib2 does things. +_PROFILE_HEADER_LOWERCASE = 'x-earth-engine-computation-profile' + +# Maximum number of times to retry a rate-limited request. +MAX_RETRIES = 5 + +# Maximum time to wait before retrying a rate-limited request (in milliseconds). +MAX_RETRY_WAIT = 120000 + +# Base time (in ms) to wait when performing exponential backoff in request +# retries. +BASE_RETRY_WAIT = 1000 + +# The default base URL for API calls. +DEFAULT_API_BASE_URL = 'https://earthengine.googleapis.com/api' + +# The default base URL for media/tile calls. +DEFAULT_TILE_BASE_URL = 'https://earthengine.googleapis.com' + +# Asset types recognized by create_assets(). +ASSET_TYPE_FOLDER = 'Folder' +ASSET_TYPE_IMAGE_COLL = 'ImageCollection' + + +def initialize(credentials=None, api_base_url=None, tile_base_url=None): + """Initializes the data module, setting credentials and base URLs. + + If any of the arguments are unspecified, they will keep their old values; + the defaults if initialize() has never been called before. + + Args: + credentials: The OAuth2 credentials. + api_base_url: The EarthEngine REST API endpoint. + tile_base_url: The EarthEngine REST tile endpoint. + """ + global _api_base_url, _tile_base_url, _credentials, _initialized + + # If already initialized, only replace the explicitly specified parts. + + if credentials is not None: + _credentials = credentials + + if api_base_url is not None: + _api_base_url = api_base_url + elif not _initialized: + _api_base_url = DEFAULT_API_BASE_URL + + if tile_base_url is not None: + _tile_base_url = tile_base_url + elif not _initialized: + _tile_base_url = DEFAULT_TILE_BASE_URL + + _initialized = True + + +def reset(): + """Resets the data module, clearing credentials and custom base URLs.""" + global _api_base_url, _tile_base_url, _credentials, _initialized + _credentials = None + _api_base_url = None + _tile_base_url = None + _initialized = False + + +def setDeadline(milliseconds): + """Sets the timeout length for API requests. + + Args: + milliseconds: The number of milliseconds to wait for a request + before considering it timed out. 0 means no limit. + """ + global _deadline_ms + _deadline_ms = milliseconds + + +@contextlib.contextmanager +def profiling(hook): + # pylint: disable=g-doc-return-or-yield + """Returns a context manager which enables or disables profiling. + + If hook is not None, enables profiling for all API calls in its scope and + calls the hook function with all resulting profile IDs. If hook is null, + disables profiling (or leaves it disabled). + + Args: + hook: A function of one argument which is called with each profile + ID obtained from API calls, just before the API call returns. + """ + global _profile_hook + saved_hook = _profile_hook + _profile_hook = hook + try: + yield + finally: + _profile_hook = saved_hook + + + + +def getInfo(asset_id): + """Load info for an asset, given an asset id. + + Args: + asset_id: The asset to be retrieved. + + Returns: + The value call results. + """ + return send_('/info', {'id': asset_id}) + + +def getList(params): + """Get a list of contents for a collection asset. + + Args: + params: An object containing request parameters with the + following possible values: + id (string) The asset id of the collection to list. + starttime (number) Start time, in msec since the epoch. + endtime (number) End time, in msec since the epoch. + fields (comma-separated strings) Field names to return. + + Returns: + The list call results. + """ + return send_('/list', params) + + +def getMapId(params): + """Get a Map ID for a given asset. + + Args: + params: An object containing visualization options with the + following possible values: + image - (JSON string) The image to render. + version - (number) Version number of image (or latest). + bands - (comma-seprated strings) Comma-delimited list of + band names to be mapped to RGB. + min - (comma-separated numbers) Value (or one per band) + to map onto 00. + max - (comma-separated numbers) Value (or one per band) + to map onto FF. + gain - (comma-separated numbers) Gain (or one per band) + to map onto 00-FF. + bias - (comma-separated numbers) Offset (or one per band) + to map onto 00-FF. + gamma - (comma-separated numbers) Gamma correction + factor (or one per band) + palette - (comma-separated strings) A string of comma-separated + CSS-style color strings (single-band previews only). For example, + 'FF0000,000000'. + format (string) Either 'jpg' (does not support transparency) or + 'png' (supports transparency). + + Returns: + A dictionary containing "mapid" and "token" strings, which can + be combined to retrieve tiles from the /map service. + """ + params['json_format'] = 'v2' + return send_('/mapid', params) + + +def getTileUrl(mapid, x, y, z): + """Generate a URL for map tiles from a Map ID and coordinates. + + Args: + mapid: The Map ID to generate tiles for, a dictionary containing "mapid" + and "token" strings. + x: The tile x coordinate. + y: The tile y coordinate. + z: The tile zoom level. + + Returns: + The tile URL. + """ + width = 2 ** z + x %= width + if x < 0: + x += width + return '%s/map/%s/%d/%d/%d?token=%s' % ( + _tile_base_url, mapid['mapid'], z, x, y, mapid['token']) + + +def getValue(params): + """Retrieve a processed value from the front end. + + Args: + params: A dictionary containing: + json - (String) A JSON object to be evaluated. + + Returns: + The value call results. + """ + params['json_format'] = 'v2' + return send_('/value', params) + + +def getThumbnail(params): + """Get a Thumbnail for a given asset. + + Args: + params: Parameters identical to getMapId, plus: + size - (a number or pair of numbers in format WIDTHxHEIGHT) Maximum + dimensions of the thumbnail to render, in pixels. If only one number + is passed, it is used as the maximum, and the other dimension is + computed by proportional scaling. + region - (E,S,W,N or GeoJSON) Geospatial region of the image + to render. By default, the whole image. + format - (string) Either 'png' (default) or 'jpg'. + + Returns: + A thumbnail image as raw PNG data. + """ + return send_('/thumb', params, opt_method='GET', opt_raw=True) + + +def getThumbId(params): + """Get a Thumbnail ID for a given asset. + + Args: + params: Parameters identical to getMapId, plus: + size - (a number or pair of numbers in format WIDTHxHEIGHT) Maximum + dimensions of the thumbnail to render, in pixels. If only one number + is passed, it is used as the maximum, and the other dimension is + computed by proportional scaling. + region - (E,S,W,N or GeoJSON) Geospatial region of the image + to render. By default, the whole image. + format - (string) Either 'png' (default) or 'jpg'. + + Returns: + A thumbnail ID. + """ + request = params.copy() + request['getid'] = '1' + request['json_format'] = 'v2' + if 'size' in request and isinstance(request['size'], (list, tuple)): + request['size'] = 'x'.join(map(str, request['size'])) + return send_('/thumb', request) + + +def makeThumbUrl(thumbId): + """Create a thumbnail URL from the given thumbid and token. + + Args: + thumbId: An object containing a thumbnail thumbid and token. + + Returns: + A URL from which the thumbnail can be obtained. + """ + return '%s/api/thumb?thumbid=%s&token=%s' % ( + _tile_base_url, thumbId['thumbid'], thumbId['token']) + + +def getDownloadId(params): + """Get a Download ID. + + Args: + params: An object containing visualization options with the following + possible values: + name - a base name to use when constructing filenames. + bands - a description of the bands to download. Must be an array of + dictionaries, each with the following keys: + id - the name of the band, a string, required. + crs - an optional CRS string defining the band projection. + crs_transform - an optional array of 6 numbers specifying an affine + transform from the specified CRS, in the order: xScale, + yShearing, xShearing, yScale, xTranslation and yTranslation. + dimensions - an optional array of two integers defining the width and + height to which the band is cropped. + scale - an optional number, specifying the scale in meters of the + band; ignored if crs and crs_transform is specified. + crs - a default CRS string to use for any bands that do not explicitly + specify one. + crs_transform - a default affine transform to use for any bands that do + not specify one, of the same format as the crs_transform of bands. + dimensions - default image cropping dimensions to use for any bands + that do not specify them. + scale - a default scale to use for any bands that do not specify one; + ignored if crs and crs_transform is specified. + region - a polygon specifying a region to download; ignored if crs + and crs_transform is specified. + + Returns: + A dict containing a docid and token. + """ + params['json_format'] = 'v2' + if 'bands' in params and not isinstance(params['bands'], six.string_types): + params['bands'] = json.dumps(params['bands']) + return send_('/download', params) + + +def makeDownloadUrl(downloadId): + """Create a download URL from the given docid and token. + + Args: + downloadId: An object containing a download docid and token. + + Returns: + A URL from which the download can be obtained. + """ + return '%s/api/download?docid=%s&token=%s' % ( + _tile_base_url, downloadId['docid'], downloadId['token']) + + +def getTableDownloadId(params): + """Get a Download ID. + + Args: + params: An object containing table download options with the following + possible values: + format - The download format, CSV or JSON. + selectors - Comma separated string of selectors that can be used to + determine which attributes will be downloaded. + filename - The name of the file that will be downloaded. + + Returns: + A dict containing a docid and token. + """ + params['json_format'] = 'v2' + return send_('/table', params) + + +def makeTableDownloadUrl(downloadId): + """Create a table download URL from a docid and token. + + Args: + downloadId: A table download id and token. + + Returns: + A Url from which the download can be obtained. + """ + return '%s/api/table?docid=%s&token=%s' % ( + _tile_base_url, downloadId['docid'], downloadId['token']) + + +def getAlgorithms(): + """Get the list of algorithms. + + Returns: + The dictionary of algorithms. Each algorithm is a dictionary containing + the following fields: + "description" - (string) A text description of the algorithm. + "returns" - (string) The return type of the algorithm. + "args" - An array of arguments. Each argument specifies the following: + "name" - (string) The name of the argument. + "description" - (string) A text description of the argument. + "type" - (string) The type of the argument. + "optional" - (boolean) Whether the argument is optional or not. + "default" - A representation of the default value if the argument + is not specified. + """ + return send_('/algorithms', {}, 'GET') + + +def createAsset(value, opt_path=None): + """Creates an asset from a JSON value. + + To create an empty image collection or folder, pass in a "value" object + with a "type" key whose value is "ImageCollection" or "Folder". + + Args: + value: An object describing the asset to create or a JSON string + with the already-serialized value for the new asset. + opt_path: An optional desired ID, including full path. + + Returns: + A description of the saved asset, including a generated ID. + """ + if not isinstance(value, six.string_types): + value = json.dumps(value) + args = {'value': value, 'json_format': 'v2'} + if opt_path is not None: + args['id'] = opt_path + return send_('/create', args) + + +def copyAsset(sourceId, destinationId): + """Copies the asset from sourceId into destinationId. + + Args: + sourceId: The ID of the asset to copy. + destinationId: The ID of the new asset created by copying. + """ + send_('/copy', { + 'sourceId': sourceId, + 'destinationId': destinationId, + }) + + +def renameAsset(sourceId, destinationId): + """Renames the asset from sourceId to destinationId. + + Args: + sourceId: The ID of the asset to rename. + destinationId: The new ID of the asset. + """ + send_('/rename', { + 'sourceId': sourceId, + 'destinationId': destinationId, + }) + + +def deleteAsset(assetId): + """Deletes the asset with the given id. + + Args: + assetId: The ID of the asset to delete. + """ + send_('/delete', {'id': assetId}) + + +def newTaskId(count=1): + """Generate an ID for a long-running task. + + Args: + count: Optional count of IDs to generate, one by default. + + Returns: + A list containing generated ID strings. + """ + args = {'count': count} + return send_('/newtaskid', args) + + +def getTaskList(): + """Retrieves a list of the user's tasks. + + Returns: + A list of task status dictionaries, one for each task submitted to EE by + the current user. These include currently running tasks as well as recently + canceled or failed tasks. + """ + return send_('/tasklist', {}, 'GET')['tasks'] + + +def getTaskStatus(taskId): + """Retrieve status of one or more long-running tasks. + + Args: + taskId: ID of the task or a list of multiple IDs. + + Returns: + List containing one object for each queried task, in the same order as + the input array, each object containing the following values: + id (string) ID of the task. + state (string) State of the task, one of READY, RUNNING, COMPLETED, + FAILED, CANCELLED; or UNKNOWN if the task with the specified ID + doesn't exist. + error_message (string) For a FAILED task, a description of the error. + """ + if isinstance(taskId, six.string_types): + taskId = [taskId] + args = {'q': ','.join(taskId)} + return send_('/taskstatus', args, 'GET') + + +def cancelTask(taskId): + """Cancels a batch task.""" + send_('/updatetask', {'id': taskId, 'action': 'CANCEL'}) + + +def startProcessing(taskId, params): + """Create processing task that exports or pre-renders an image. + + Args: + taskId: ID for the task (obtained using newTaskId). + params: The object that describes the processing task; only fields + that are common for all processing types are documented below. + type (string) Either 'EXPORT_IMAGE', 'EXPORT_FEATURES', + 'EXPORT_VIDEO', or 'EXPORT_TILES'. + json (string) JSON description of the image. + + Returns: + A dict with optional notes about the created task. + """ + args = params.copy() + args['id'] = taskId + return send_('/processingrequest', args) + + +def startIngestion(taskId, params): + """Creates an asset import task. + + Args: + taskId: ID for the task (obtained using newTaskId). + params: The object that describes the import task, which can + have these fields: + id (string) The destination asset id (e.g. users/foo/bar). + tilesets (array) A list of Google Cloud Storage source file paths + formatted like: + [{'sources': [ + {'primaryPath': 'foo.tif', 'additionalPaths': ['foo.prj']}, + {'primaryPath': 'bar.tif', 'additionalPaths': ['bar.prj'}, + ]}] + Where path values correspond to source files' Google Cloud Storage + object names, e.g. 'gs://bucketname/filename.tif' + bands (array) An optional list of band names formatted like: + [{'id': 'R'}, {'id': 'G'}, {'id': 'B'}] + + Returns: + A dict with optional notes about the created task. + """ + args = {'id': taskId, 'request': json.dumps(params)} + return send_('/ingestionrequest', args) + + + + +def getAssetRoots(): + """Returns the list of the root folders the user owns. + + Note: The "id" values for roots are two levels deep, e.g. "users/johndoe" + not "users/johndoe/notaroot". + + Returns: + A list of folder descriptions formatted like: + [ + {"type": "Folder", "id": "users/foo"}, + {"type": "Folder", "id": "projects/bar"}, + ] + """ + return send_('/buckets', {}, 'GET') + + +def getAssetRootQuota(rootId): + """Returns quota usage details for the asset root with the given ID. + + Usage notes: + + - The id *must* be a root folder like "users/foo" (not "users/foo/bar"). + - The authenticated user must own the asset root to see its quota usage. + + Args: + rootId: The ID of the asset to check. + + Returns: + A dict describing the asset's quota usage. Looks like, with size in bytes: + { + asset_count: {usage: number, limit: number}, + asset_size: {usage: number, limit: number}, + } + """ + return send_('/quota', {'id': rootId}, 'GET') + + +def getAssetAcl(assetId): + """Returns the access control list of the asset with the given ID. + + Args: + assetId: The ID of the asset to check. + + Returns: + A dict describing the asset's ACL. Looks like: + { + "owners" : ["user@domain1.com"], + "writers": ["user2@domain1.com", "user3@domain1.com"], + "readers": ["some_group@domain2.com"], + "all_users_can_read" : True + } + """ + return send_('/getacl', {'id': assetId}, 'GET') + + +def setAssetAcl(assetId, aclUpdate): + """Sets the access control list of the asset with the given ID. + + The owner ACL cannot be changed, and the final ACL of the asset + is constructed by merging the OWNER entries of the old ACL with + the incoming ACL record. + + Args: + assetId: The ID of the asset to set the ACL on. + aclUpdate: The updated ACL for the asset. Must be formatted like the + value returned by getAssetAcl but without "owners". + """ + send_('/setacl', {'id': assetId, 'value': aclUpdate}) + + +def setAssetProperties(assetId, properties): + """Sets metadata properties of the asset with the given ID. + + To delete a property, set its value to null. + The authenticated user must be a writer or owner of the asset. + + Args: + assetId: The ID of the asset to set the ACL on. + properties: A dictionary of keys and values for the properties to update. + """ + send_('/setproperties', {'id': assetId, 'properties': json.dumps(properties)}) + + +def createAssetHome(requestedId): + """Attempts to create a home root folder for the current user ("users/joe"). + + Results in an error if the user already has a home root folder or the + requested ID is unavailable. + + Args: + requestedId: The requested ID of the home folder (e.g. "users/joe"). + """ + send_('/createbucket', {'id': requestedId}) + + +def authorizeHttp(http): + if _credentials: + return _credentials.authorize(http) + else: + return http + + +def send_(path, params, opt_method='POST', opt_raw=False): + """Send an API call. + + Args: + path: The API endpoint to call. + params: The call parameters. + opt_method: The HTTPRequest method (GET or POST). + opt_raw: Whether the data should be returned raw, without attempting + to decode it as JSON. + + Returns: + The data object returned by the API call. + + Raises: + EEException: For malformed requests or errors from the server. + """ + # Make sure we never perform API calls before initialization. + initialize() + + if _profile_hook: + params = params.copy() + params['profiling'] = '1' + + url = _api_base_url + path + headers = {} + + + try: + payload = urllib.parse.urlencode(params) # Python 3.x + except AttributeError: + payload = urllib.urlencode(params) # Python 2.x + http = httplib2.Http(timeout=(_deadline_ms / 1000.0) or None) + http = authorizeHttp(http) + + if opt_method == 'GET': + url = url + ('&' if '?' in url else '?') + payload + payload = None + elif opt_method == 'POST': + headers['Content-type'] = 'application/x-www-form-urlencoded' + else: + raise ee_exception.EEException('Unexpected request method: ' + opt_method) + + def send_with_backoff(retries=0): + """Send an API call with backoff. + + Attempts an API call. If the server's response has a 429 status, retry the + request using an incremental backoff strategy. + + Args: + retries: The number of retries that have already occurred. + + Returns: + A tuple of response, content returned by the API call. + + Raises: + EEException: For errors from the server. + """ + try: + response, content = http.request(url, method=opt_method, body=payload, + headers=headers) + if response.status == 429: + if retries < MAX_RETRIES: + time.sleep(min(2 ** retries * BASE_RETRY_WAIT, MAX_RETRY_WAIT) / 1000) + response, content = send_with_backoff(retries + 1) + except httplib2.HttpLib2Error as e: + raise ee_exception.EEException( + 'Unexpected HTTP error: %s' % e.message) + return response, content + + response, content = send_with_backoff() + + # Call the profile hook if present. Note that this is done before we handle + # the content, so that profiles are reported even if the response is an error. + if _profile_hook and _PROFILE_HEADER_LOWERCASE in response: + _profile_hook(response[_PROFILE_HEADER_LOWERCASE]) + + # Whether or not the response is an error, it may be JSON. + content_type = (response['content-type'] or 'application/json').split(';')[0] + if content_type in ('application/json', 'text/json') and not opt_raw: + try: + try: + # Python 3.x + try: + content = content.decode() + except AttributeError: + pass + except UnicodeDecodeError: + # Python 2.x + content = content + json_content = json.loads(content) + except Exception: + raise ee_exception.EEException('Invalid JSON: %s' % content) + if 'error' in json_content: + raise ee_exception.EEException(json_content['error']['message']) + if 'data' not in content: + raise ee_exception.EEException('Malformed response: ' + str(content)) + else: + json_content = None + + if response.status < 100 or response.status >= 300: + # Note if the response is JSON and contains an error value, we raise that + # error above rather than this generic one. + raise ee_exception.EEException('Server returned HTTP code: %d' % + response.status) + + # Now known not to be an error response... + if opt_raw: + return content + elif json_content is None: + raise ee_exception.EEException( + 'Response was unexpectedly not JSON, but %s' % response['content-type']) + else: + return json_content['data'] + + +def create_assets(asset_ids, asset_type, mk_parents): + """Creates the specified assets if they do not exist.""" + for asset_id in asset_ids: + if getInfo(asset_id): + print('Asset %s already exists' % asset_id) + continue + if mk_parents: + parts = asset_id.split('/') + path = '' + for part in parts[:-1]: + path += part + if getInfo(path) is None: + createAsset({'type': ASSET_TYPE_FOLDER}, path) + path += '/' + createAsset({'type': asset_type}, asset_id) diff --git a/ee/deprecation.py b/ee/deprecation.py new file mode 100644 index 0000000..8269ebb --- /dev/null +++ b/ee/deprecation.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python +"""Decorators to mark function deprecated.""" + + + +import functools +import warnings + + +def Deprecated(message): + """Returns a decorator with a given warning message.""" + + def Decorator(func): + """Emits a deprecation warning when the decorated function is called. + + Also adds the deprecation message to the function's docstring. + + Args: + func: The function to deprecate. + + Returns: + func: The wrapped function. + """ + + @functools.wraps(func) + def Wrapper(*args, **kwargs): + warnings.warn_explicit( + '%s() is deprecated: %s' % (func.__name__, message), + category=DeprecationWarning, + filename=func.__code__.co_filename, + lineno=func.__code__.co_firstlineno + 1) + return func(*args, **kwargs) + Wrapper.__doc__ += '\nDEPRECATED: ' + message + return Wrapper + return Decorator + + +def CanUseDeprecated(func): + """Ignores deprecation warnings emitted while the decorated function runs.""" + + @functools.wraps(func) + def Wrapper(*args, **kwargs): + with warnings.catch_warnings(): + warnings.filterwarnings('ignore', category=DeprecationWarning) + return func(*args, **kwargs) + return Wrapper diff --git a/ee/deserializer.py b/ee/deserializer.py new file mode 100644 index 0000000..431bec0 --- /dev/null +++ b/ee/deserializer.py @@ -0,0 +1,143 @@ +#!/usr/bin/env python +"""A deserializer that decodes EE object trees from JSON DAGs.""" + + + +# Using lowercase function naming to match the JavaScript names. +# pylint: disable=g-bad-name + +# pylint: disable=g-bad-import-order +import json +import numbers +import six + +from . import apifunction +from . import computedobject +from . import customfunction +from . import ee_date +from . import ee_exception +from . import encodable +from . import function +from . import geometry + + +def fromJSON(json_obj): + """Deserialize an object from a JSON string appropriate for API calls. + + Args: + json_obj: The JSON represenation of the input. + + Returns: + The deserialized object. + """ + return decode(json.loads(json_obj)) + + +def decode(json_obj): + """Decodes an object previously encoded using the EE API v2 (DAG) format. + + Args: + json_obj: The serialied object to decode. + + Returns: + The decoded object. + """ + named_values = {} + + # Incrementally decode scope entries if there are any. + if isinstance(json_obj, dict) and json_obj['type'] == 'CompoundValue': + for i, (key, value) in enumerate(json_obj['scope']): + if key in named_values: + raise ee_exception.EEException( + 'Duplicate scope key "%s" in scope #%d.' % (key, i)) + named_values[key] = _decodeValue(value, named_values) + json_obj = json_obj['value'] + + # Decode the final value. + return _decodeValue(json_obj, named_values) + + +def _decodeValue(json_obj, named_values): + """Decodes an object previously encoded using the EE API v2 (DAG) format. + + This uses a provided scopre for ValueRef lookup and does not not allow the + input to be a CompoundValue. + + Args: + json_obj: The serialied object to decode. + named_values: The objects that can be referenced by ValueRefs. + + Returns: + The decoded object. + """ + + # Check for primitive values. + if (json_obj is None or + isinstance(json_obj, (bool, numbers.Number, six.string_types))): + return json_obj + + # Check for array values. + if isinstance(json_obj, (list, tuple)): + return [_decodeValue(element, named_values) for element in json_obj] + + # Ensure that we've got a proper object at this point. + if not isinstance(json_obj, dict): + raise ee_exception.EEException('Cannot decode object: ' + json_obj) + + # Check for explicitly typed values. + type_name = json_obj['type'] + if type_name == 'ValueRef': + if json_obj['value'] in named_values: + return named_values[json_obj['value']] + else: + raise ee_exception.EEException('Unknown ValueRef: ' + json_obj) + elif type_name == 'ArgumentRef': + var_name = json_obj['value'] + if not isinstance(var_name, six.string_types): + raise ee_exception.EEException('Invalid variable name: ' + var_name) + return customfunction.CustomFunction.variable(None, var_name) # pylint: disable=protected-access + elif type_name == 'Date': + microseconds = json_obj['value'] + if not isinstance(microseconds, numbers.Number): + raise ee_exception.EEException('Invalid date value: ' + microseconds) + return ee_date.Date(microseconds / 1e3) + elif type_name == 'Bytes': + result = encodable.Encodable() + result.encode = lambda encoder: json_obj + return result + elif type_name == 'Invocation': + if 'functionName' in json_obj: + func = apifunction.ApiFunction.lookup(json_obj['functionName']) + else: + func = _decodeValue(json_obj['function'], named_values) + args = dict((key, _decodeValue(value, named_values)) + for (key, value) in json_obj['arguments'].items()) + if isinstance(func, function.Function): + return func.apply(args) + elif isinstance(func, computedobject.ComputedObject): + # We have to allow ComputedObjects for cases where invocations + # return a function, e.g. Image.parseExpression(). + return computedobject.ComputedObject(func, args) + else: + raise ee_exception.EEException( + 'Invalid function value: ' + json_obj['function']) + elif type_name == 'Dictionary': + return dict((key, _decodeValue(value, named_values)) + for (key, value) in json_obj['value'].items()) + elif type_name == 'Function': + body = _decodeValue(json_obj['body'], named_values) + signature = { + 'name': '', + 'args': [{'name': arg_name, 'type': 'Object', 'optional': False} + for arg_name in json_obj['argumentNames']], + 'returns': 'Object' + } + return customfunction.CustomFunction(signature, lambda *args: body) + elif type_name in ('Point', 'MultiPoint', 'LineString', 'MultiLineString', + 'Polygon', 'MultiPolygon', 'LinearRing', + 'GeometryCollection'): + return geometry.Geometry(json_obj) + elif type_name == 'CompoundValue': + raise ee_exception.EEException('Nested CompoundValues are disallowed.') + else: + raise ee_exception.EEException('Unknown encoded object type: ' + type_name) diff --git a/ee/dictionary.py b/ee/dictionary.py new file mode 100644 index 0000000..78c1fd9 --- /dev/null +++ b/ee/dictionary.py @@ -0,0 +1,65 @@ +#!/usr/bin/env python +"""A wrapper for dictionaries.""" + + + +from . import apifunction +from . import computedobject + +# Using lowercase function naming to match the JavaScript names. +# pylint: disable=g-bad-name + + +class Dictionary(computedobject.ComputedObject): + """An object to represent dictionaries.""" + + _initialized = False + + def __init__(self, arg=None): + """Construct a dictionary. + + Args: + arg: This constructor accepts the following args: + 1) Another dictionary. + 2) A list of key/value pairs. + 3) A null or no argument (producing an empty dictionary) + """ + self.initialize() + + if isinstance(arg, dict): + super(Dictionary, self).__init__(None, None) + self._dictionary = arg + else: + self._dictionary = None + if (isinstance(arg, computedobject.ComputedObject) + and arg.func + and arg.func.getSignature()['returns'] == 'Dictionary'): + # If it's a call that's already returning a Dictionary, just cast. + super(Dictionary, self).__init__(arg.func, arg.args, arg.varName) + else: + # Delegate everything else to the server-side constructor. + super(Dictionary, self).__init__( + apifunction.ApiFunction('Dictionary'), {'input': arg}) + + @classmethod + def initialize(cls): + """Imports API functions to this class.""" + if not cls._initialized: + apifunction.ApiFunction.importApi(cls, 'Dictionary', 'Dictionary') + cls._initialized = True + + @classmethod + def reset(cls): + """Removes imported API functions from this class.""" + apifunction.ApiFunction.clearApi(cls) + cls._initialized = False + + @staticmethod + def name(): + return 'Dictionary' + + def encode(self, opt_encoder=None): + if self._dictionary is not None: + return opt_encoder(self._dictionary) + else: + return super(Dictionary, self).encode(opt_encoder) diff --git a/ee/ee_date.py b/ee/ee_date.py new file mode 100644 index 0000000..fab3177 --- /dev/null +++ b/ee/ee_date.py @@ -0,0 +1,88 @@ +#!/usr/bin/env python +"""A wrapper for dates.""" + + + +# pylint: disable=g-bad-import-order +import datetime +import math +import six + +from . import apifunction +from . import computedobject +from . import ee_exception +from . import ee_types as types +from . import serializer + +# Using lowercase function naming to match the JavaScript names. +# pylint: disable=g-bad-name + + +class Date(computedobject.ComputedObject): + """An object to represent dates.""" + + _initialized = False + + def __init__(self, date, opt_tz=None): + """Construct a date. + + This sends all inputs (except another Date) through the Date function. + + This constructor accepts the following args: + 1) A bare date. + 2) An ISO string + 3) A integer number of milliseconds since the epoch. + 4) A ComputedObject. + + Args: + date: The date to wrap. + opt_tz: An optional timezone, only useable with a string date. + """ + self.initialize() + + func = apifunction.ApiFunction('Date') + args = None + varName = None + if isinstance(date, datetime.datetime): + args = {'value': + math.floor(serializer.DatetimeToMicroseconds(date) / 1000)} + elif types.isNumber(date): + args = {'value': date} + elif isinstance(date, six.string_types): + args = {'value': date} + if opt_tz: + if isinstance(opt_tz, six.string_types): + args['timeZone'] = opt_tz + else: + raise ee_exception.EEException( + 'Invalid argument specified for ee.Date(..., opt_tz): %s' % date) + elif isinstance(date, computedobject.ComputedObject): + if date.func and date.func.getSignature()['returns'] == 'Date': + # If it's a call that's already returning a Date, just cast. + func = date.func + args = date.args + varName = date.varName + else: + args = {'value': date} + else: + raise ee_exception.EEException( + 'Invalid argument specified for ee.Date(): %s' % date) + + super(Date, self).__init__(func, args, varName) + + @classmethod + def initialize(cls): + """Imports API functions to this class.""" + if not cls._initialized: + apifunction.ApiFunction.importApi(cls, 'Date', 'Date') + cls._initialized = True + + @classmethod + def reset(cls): + """Removes imported API functions from this class.""" + apifunction.ApiFunction.clearApi(cls) + cls._initialized = False + + @staticmethod + def name(): + return 'Date' diff --git a/ee/ee_exception.py b/ee/ee_exception.py new file mode 100644 index 0000000..33469f2 --- /dev/null +++ b/ee/ee_exception.py @@ -0,0 +1,9 @@ +#!/usr/bin/env python +"""A simple exception for the EE library.""" + + + + +class EEException(Exception): + """A simple exception for the EE library.""" + pass diff --git a/ee/ee_list.py b/ee/ee_list.py new file mode 100644 index 0000000..eb203a1 --- /dev/null +++ b/ee/ee_list.py @@ -0,0 +1,65 @@ +#!/usr/bin/env python +"""A wrapper for lists.""" + + + +from . import apifunction +from . import computedobject +from . import ee_exception + +# Using lowercase function naming to match the JavaScript names. +# pylint: disable=g-bad-name + + +class List(computedobject.ComputedObject): + """An object to represent lists.""" + + _initialized = False + + def __init__(self, arg): + """Construct a list wrapper. + + This constructor accepts the following args: + 1) A bare list. + 2) A ComputedObject returning a list. + + Args: + arg: The list to wrap. + + Raises: + ee_exception.EEException: On bad input. + """ + self.initialize() + + if isinstance(arg, (list, tuple)): + super(List, self).__init__(None, None) + self._list = arg + elif isinstance(arg, computedobject.ComputedObject): + super(List, self).__init__(arg.func, arg.args, arg.varName) + self._list = None + else: + raise ee_exception.EEException( + 'Invalid argument specified for ee.List(): %s' % arg) + + @classmethod + def initialize(cls): + """Imports API functions to this class.""" + if not cls._initialized: + apifunction.ApiFunction.importApi(cls, 'List', 'List') + cls._initialized = True + + @classmethod + def reset(cls): + """Removes imported API functions from this class.""" + apifunction.ApiFunction.clearApi(cls) + cls._initialized = False + + @staticmethod + def name(): + return 'List' + + def encode(self, opt_encoder=None): + if isinstance(self._list, (list, tuple)): + return [opt_encoder(elem) for elem in self._list] + else: + return super(List, self).encode(opt_encoder) diff --git a/ee/ee_number.py b/ee/ee_number.py new file mode 100644 index 0000000..01edf0a --- /dev/null +++ b/ee/ee_number.py @@ -0,0 +1,64 @@ +#!/usr/bin/env python +"""A wrapper for numbers.""" + + + +import numbers + +from . import apifunction +from . import computedobject +from . import ee_exception + +# Using lowercase function naming to match the JavaScript names. +# pylint: disable=g-bad-name + + +class Number(computedobject.ComputedObject): + """An object to represent numbers.""" + + _initialized = False + + def __init__(self, number): + """Construct a number wrapper. + + This constructor accepts the following args: + 1) A bare number. + 2) A ComputedObject returning a number. + + Args: + number: The number to wrap. + """ + self.initialize() + + if isinstance(number, numbers.Number): + super(Number, self).__init__(None, None) + self._number = number + elif isinstance(number, computedobject.ComputedObject): + super(Number, self).__init__(number.func, number.args, number.varName) + self._number = None + else: + raise ee_exception.EEException( + 'Invalid argument specified for ee.Number(): %s' % number) + + @classmethod + def initialize(cls): + """Imports API functions to this class.""" + if not cls._initialized: + apifunction.ApiFunction.importApi(cls, 'Number', 'Number') + cls._initialized = True + + @classmethod + def reset(cls): + """Removes imported API functions from this class.""" + apifunction.ApiFunction.clearApi(cls) + cls._initialized = False + + @staticmethod + def name(): + return 'Number' + + def encode(self, opt_encoder=None): + if isinstance(self._number, numbers.Number): + return self._number + else: + return super(Number, self).encode(opt_encoder) diff --git a/ee/ee_string.py b/ee/ee_string.py new file mode 100644 index 0000000..99e4cc5 --- /dev/null +++ b/ee/ee_string.py @@ -0,0 +1,70 @@ +#!/usr/bin/env python +"""A wrapper for strings.""" + + + +# pylint: disable=g-bad-import-order +import six # For Python 2/3 compatibility + +from . import apifunction +from . import computedobject +from . import ee_exception + +# Using lowercase function naming to match the JavaScript names. +# pylint: disable=g-bad-name + + +class String(computedobject.ComputedObject): + """An object to represent strings.""" + + _initialized = False + + def __init__(self, string): + """Construct a string wrapper. + + This constructor accepts the following args: + 1) A bare string. + 2) A ComputedObject returning a string. + + Args: + string: The string to wrap. + """ + self.initialize() + + if isinstance(string, six.string_types): + super(String, self).__init__(None, None) + elif isinstance(string, computedobject.ComputedObject): + if string.func and string.func.getSignature()['returns'] == 'String': + # If it's a call that's already returning a String, just cast. + super(String, self).__init__(string.func, string.args, string.varName) + else: + super(String, self).__init__(apifunction.ApiFunction('String'), { + 'input': string + }) + else: + raise ee_exception.EEException( + 'Invalid argument specified for ee.String(): %s' % string) + self._string = string + + @classmethod + def initialize(cls): + """Imports API functions to this class.""" + if not cls._initialized: + apifunction.ApiFunction.importApi(cls, 'String', 'String') + cls._initialized = True + + @classmethod + def reset(cls): + """Removes imported API functions from this class.""" + apifunction.ApiFunction.clearApi(cls) + cls._initialized = False + + @staticmethod + def name(): + return 'String' + + def encode(self, opt_encoder=None): + if isinstance(self._string, six.string_types): + return self._string + else: + return self._string.encode(opt_encoder) diff --git a/ee/ee_types.py b/ee/ee_types.py new file mode 100644 index 0000000..c5c28db --- /dev/null +++ b/ee/ee_types.py @@ -0,0 +1,129 @@ +#!/usr/bin/env python +"""A set of utilities to work with EE types.""" + + + +# Using lowercase function naming to match the JavaScript names. +# pylint: disable=g-bad-name + +# pylint: disable=g-bad-import-order +import datetime +import numbers +import six + +from . import computedobject + + +# A dictionary of the classes in the ee module. Set by registerClasses. +_registered_classes = {} + + +def _registerClasses(classes): + """Registers the known classes. + + Args: + classes: A dictionary of the classes available in the ee module. + """ + global _registered_classes + _registered_classes = classes + + +def classToName(klass): + """Converts a class to the API-friendly type name. + + Args: + klass: The class. + + Returns: + The name of the class, or "Object" if not recognized. + """ + if issubclass(klass, computedobject.ComputedObject): + return klass.name() + elif issubclass(klass, numbers.Number): + return 'Number' + elif issubclass(klass, six.string_types): + return 'String' + elif issubclass(klass, (list, tuple)): + return 'Array' + elif issubclass(klass, datetime.datetime): + return 'Date' + else: + return 'Object' + + +def nameToClass(name): + """Converts a class name to a class. Returns None if not an ee class. + + Args: + name: The class name. + + Returns: + The named class. + """ + return _registered_classes.get(name) + + +def isSubtype(firstType, secondType): + """Checks whether a type is a subtype of another. + + Args: + firstType: The first type name. + secondType: The second type name. + + Returns: + Whether secondType is a subtype of firstType. + """ + if secondType == firstType: + return True + + if firstType == 'Element': + return secondType in ('Element', 'Image', 'Feature', + 'Collection', 'ImageCollection', 'FeatureCollection') + elif firstType in ('FeatureCollection', 'Collection'): + return secondType in ('Collection', 'ImageCollection', 'FeatureCollection') + elif firstType == object: + return True + else: + return False + + +def isNumber(obj): + """Returns true if this object is a number or number variable. + + Args: + obj: The object to check. + + Returns: + Whether the object is a number or number variable. + """ + return (isinstance(obj, numbers.Number) or + (isinstance(obj, computedobject.ComputedObject) and + obj.name() == 'Number')) + + +def isString(obj): + """Returns true if this object is a string or string variable. + + Args: + obj: The object to check. + + Returns: + Whether the object is a string or string variable. + """ + return (isinstance(obj, six.string_types) or + (isinstance(obj, computedobject.ComputedObject) and + obj.name() == 'String')) + + +def isArray(obj): + """Returns true if this object is an array or array variable. + + Args: + obj: The object to check. + + Returns: + Whether the object is an array or array variable. + """ + return (isinstance(obj, (list, tuple)) or + (isinstance(obj, computedobject.ComputedObject) and + obj.name() == 'List')) diff --git a/ee/element.py b/ee/element.py new file mode 100644 index 0000000..64f3adb --- /dev/null +++ b/ee/element.py @@ -0,0 +1,94 @@ +#!/usr/bin/env python +"""Base class for Image, Feature and Collection. + +This class is never intended to be instantiated by the user. +""" + + + +# Using lowercase function naming to match the JavaScript names. +# pylint: disable=g-bad-name + +from . import apifunction +from . import computedobject +from . import ee_exception + + +class Element(computedobject.ComputedObject): + """Base class for ImageCollection and FeatureCollection.""" + + _initialized = False + + def __init__(self, func, args, opt_varName=None): + """Constructs a collection by initializing its ComputedObject.""" + super(Element, self).__init__(func, args, opt_varName) + + @classmethod + def initialize(cls): + """Imports API functions to this class.""" + if not cls._initialized: + apifunction.ApiFunction.importApi(cls, 'Element', 'Element') + cls._initialized = True + + @classmethod + def reset(cls): + """Removes imported API functions from this class.""" + apifunction.ApiFunction.clearApi(cls) + cls._initialized = False + + @staticmethod + def name(): + return 'Element' + + def set(self, *args): + """Overrides one or more metadata properties of an Element. + + Args: + *args: Either a dictionary of properties, or a vararg sequence of + properties, e.g. key1, value1, key2, value2, ... + + Returns: + The element with the specified properties overridden. + """ + if len(args) == 1: + properties = args[0] + + # If this is a keyword call, unwrap it. + if (isinstance(properties, dict) and + (len(properties) == 1 and 'properties' in properties) and + isinstance(properties['properties'], + (dict, computedobject.ComputedObject))): + # Looks like a call with keyword parameters. Extract them. + properties = properties['properties'] + + if isinstance(properties, dict): + # Still a plain object. Extract its keys. Setting the keys separately + # allows filter propagation. + result = self + for key, value in properties.items(): + result = apifunction.ApiFunction.call_( + 'Element.set', result, key, value) + elif (isinstance(properties, computedobject.ComputedObject) and + apifunction.ApiFunction.lookupInternal('Element.setMulti')): + # A computed dictionary. Can't set each key separately. + result = apifunction.ApiFunction.call_( + 'Element.setMulti', self, properties) + else: + raise ee_exception.EEException( + 'When Element.set() is passed one argument, ' + 'it must be a dictionary.') + else: + # Interpret as key1, value1, key2, value2, ... + if len(args) % 2 != 0: + raise ee_exception.EEException( + 'When Element.set() is passed multiple arguments, there ' + 'must be an even number of them.') + result = self + for i in range(0, len(args), 2): + key = args[i] + value = args[i + 1] + result = apifunction.ApiFunction.call_( + 'Element.set', result, key, value) + + # Manually cast the result to an image. + return self._cast(result) diff --git a/ee/encodable.py b/ee/encodable.py new file mode 100644 index 0000000..21444fa --- /dev/null +++ b/ee/encodable.py @@ -0,0 +1,23 @@ +#!/usr/bin/env python +"""An interface implemented by serializable objects.""" + + + +# Using lowercase function naming to match the JavaScript names. +# pylint: disable-msg=g-bad-name + + +class Encodable(object): + """An interface implemented by objects that can serialize themselves.""" + + def encode(self, encoder): + """Encodes the object in a format compatible with Serializer. + + Args: + encoder: A function that can be called to encode the components of + an object. + + Returns: + The encoded form of the object. + """ + raise NotImplementedError('Encodable classes must implement encode().') diff --git a/ee/feature.py b/ee/feature.py new file mode 100644 index 0000000..838e42c --- /dev/null +++ b/ee/feature.py @@ -0,0 +1,112 @@ +#!/usr/bin/env python +"""An object representing EE Features.""" + + + +# Using lowercase function naming to match the JavaScript names. +# pylint: disable=g-bad-name + +from . import apifunction +from . import computedobject +from . import ee_exception +from . import element +from . import geometry + + +class Feature(element.Element): + """An object representing EE Features.""" + + _initialized = False + + def __init__(self, geom, opt_properties=None): + """Creates a feature a geometry or computed object. + + Features can be constructed from one of the following arguments plus an + optional dictionary of properties: + 1) An ee.Geometry. + 2) A GeoJSON Geometry. + 3) A GeoJSON Feature. + 4) A computed object - reinterpreted as a geometry if properties + are specified, and as a feature if they aren't. + + Args: + geom: A geometry or feature. + opt_properties: A dictionary of metadata properties. If the first + parameter is a Feature (instead of a geometry), this is unused. + + Raises: + EEException: if the given geometry isn't valid. + """ + if isinstance(geom, Feature): + if opt_properties is not None: + raise ee_exception.EEException( + 'Can\'t create Feature out of a Feature and properties.') + # A pre-constructed Feature. Copy. + super(Feature, self).__init__(geom.func, geom.args) + return + + self.initialize() + + feature_constructor = apifunction.ApiFunction.lookup('Feature') + if geom is None or isinstance(geom, geometry.Geometry): + # A geometry object. + super(Feature, self).__init__(feature_constructor, { + 'geometry': geom, + 'metadata': opt_properties or None + }) + elif isinstance(geom, computedobject.ComputedObject): + # A custom object to reinterpret as a Feature. + super(Feature, self).__init__(geom.func, geom.args, geom.varName) + elif isinstance(geom, dict) and geom.get('type') == 'Feature': + properties = geom.get('properties', {}) + if 'id' in geom: + if 'system:index' in properties: + raise ee_exception.EEException( + 'Can\'t specify both "id" and "system:index".') + properties = properties.copy() + properties['system:index'] = geom['id'] + # Try to convert a GeoJSON Feature. + super(Feature, self).__init__(feature_constructor, { + 'geometry': geometry.Geometry(geom.get('geometry', None)), + 'metadata': properties + }) + else: + # Try to convert the geometry arg to a Geometry, in the hopes of it + # turning out to be GeoJSON. + super(Feature, self).__init__(feature_constructor, { + 'geometry': geometry.Geometry(geom), + 'metadata': opt_properties or None + }) + + @classmethod + def initialize(cls): + """Imports API functions to this class.""" + if not cls._initialized: + apifunction.ApiFunction.importApi(cls, 'Feature', 'Feature') + cls._initialized = True + + @classmethod + def reset(cls): + """Removes imported API functions from this class.""" + apifunction.ApiFunction.clearApi(cls) + cls._initialized = False + + def getMapId(self, vis_params=None): + """Fetch and return a map id and token, suitable for use in a Map overlay. + + Args: + vis_params: The visualization parameters. Currently only one parameter, + 'color', containing a hex RGB color string is allowed. + + Returns: + An object containing a mapid string, an access token, plus a + Collection.draw image wrapping a FeatureCollection containing + this feature. + """ + # Create a collection containing this one feature and render it. + collection = apifunction.ApiFunction.call_('Collection', [self]) + return collection.getMapId(vis_params) + + @staticmethod + def name(): + return 'Feature' diff --git a/ee/featurecollection.py b/ee/featurecollection.py new file mode 100644 index 0000000..06ca9c9 --- /dev/null +++ b/ee/featurecollection.py @@ -0,0 +1,163 @@ +#!/usr/bin/env python +"""Representation of an Earth Engine FeatureCollection.""" + + + +# Using lowercase function naming to match the JavaScript names. +# pylint: disable=g-bad-name + +from . import apifunction +from . import collection +from . import computedobject +from . import data +from . import deprecation +from . import ee_exception +from . import ee_list +from . import ee_types +from . import feature +from . import geometry + + +class FeatureCollection(collection.Collection): + """A representation of a FeatureCollection.""" + + _initialized = False + + def __init__(self, args, opt_column=None): + """Constructs a collection features. + + Args: + args: constructor argument. One of: + 1) A string - assumed to be the name of a collection. + 2) A number - assumed to be the ID of a Fusion Table. + 3) A geometry. + 4) A feature. + 5) An array of features. + 6) A computed object - reinterpreted as a collection. + opt_column: The name of the geometry column to use. Only useful with the + string or number constructor arguments. + + Raises: + EEException: if passed something other than the above. + """ + self.initialize() + + # Wrap geometries with features. + if isinstance(args, geometry.Geometry): + args = feature.Feature(args) + + # Wrap single features in an array. + if isinstance(args, feature.Feature): + args = [args] + + if ee_types.isNumber(args) or ee_types.isString(args): + # An ID. + actual_args = {'tableId': args} + if opt_column: + actual_args['geometryColumn'] = opt_column + super(FeatureCollection, self).__init__( + apifunction.ApiFunction.lookup('Collection.loadTable'), actual_args) + elif isinstance(args, (list, tuple)): + # A list of features. + super(FeatureCollection, self).__init__( + apifunction.ApiFunction.lookup('Collection'), { + 'features': [feature.Feature(i) for i in args] + }) + elif isinstance(args, ee_list.List): + # A computed list of features. + super(FeatureCollection, self).__init__( + apifunction.ApiFunction.lookup('Collection'), { + 'features': args + }) + elif isinstance(args, computedobject.ComputedObject): + # A custom object to reinterpret as a FeatureCollection. + super(FeatureCollection, self).__init__( + args.func, args.args, args.varName) + else: + raise ee_exception.EEException( + 'Unrecognized argument type to convert to a FeatureCollection: %s' % + args) + + @classmethod + def initialize(cls): + """Imports API functions to this class.""" + if not cls._initialized: + super(FeatureCollection, cls).initialize() + apifunction.ApiFunction.importApi( + cls, 'FeatureCollection', 'FeatureCollection') + cls._initialized = True + + @classmethod + def reset(cls): + """Removes imported API functions from this class.""" + apifunction.ApiFunction.clearApi(cls) + cls._initialized = False + + def getMapId(self, vis_params=None): + """Fetch and return a map id and token, suitable for use in a Map overlay. + + Args: + vis_params: The visualization parameters. Currently only one parameter, + 'color', containing a hex RGB color string is allowed. + + Returns: + An object containing a mapid string, an access token, plus a + Collection.draw image wrapping this collection. + """ + painted = apifunction.ApiFunction.apply_('Collection.draw', { + 'collection': self, + 'color': (vis_params or {}).get('color', '000000') + }) + return painted.getMapId({}) + + def getDownloadURL(self, filetype=None, selectors=None, filename=None): + """Get a download URL for this feature collection. + + Args: + filetype: The filetype of download, either CSV or JSON. Defaults to CSV. + selectors: The selectors that should be used to determine which attributes + will be downloaded. + filename: The name of the file to be downloaded. + + Returns: + A URL to download the specified feature collection. + """ + request = {} + request['table'] = self.serialize() + if filetype is not None: + request['format'] = filetype.upper() + if filename is not None: + request['filename'] = filename + if selectors is not None: + if isinstance(selectors, (list, tuple)): + selectors = ','.join(selectors) + request['selectors'] = selectors + return data.makeTableDownloadUrl(data.getTableDownloadId(request)) + + # Deprecated spelling to match the JS library. + getDownloadUrl = deprecation.Deprecated('Use getDownloadURL().')( + getDownloadURL) + + def select(self, selectors, opt_names=None, *args): + """Select properties from each feature in a collection. + + Args: + selectors: An array of names or regexes specifying the properties + to select. + opt_names: An array of strings specifying the new names for the + selected properties. If supplied, the length must match the number + of properties selected. + *args: Selector elements as varargs. + + Returns: + The feature collection with selected properties. + """ + return self.map(lambda feat: feat.select(selectors, opt_names, *args)) + + @staticmethod + def name(): + return 'FeatureCollection' + + @staticmethod + def elementType(): + return feature.Feature diff --git a/ee/filter.py b/ee/filter.py new file mode 100644 index 0000000..3849336 --- /dev/null +++ b/ee/filter.py @@ -0,0 +1,288 @@ +#!/usr/bin/env python +"""Collection filters. + +Example usage: + Filter('time', low, high) + .bounds(ring) + .eq('time', value) + .lt('time', value) +""" + + + +# Using lowercase function naming to match the JavaScript names. +# pylint: disable=g-bad-name + +from . import apifunction +from . import computedobject +from . import ee_exception + + +# A map from the deprecated old-style comparison operator names to API +# function names, implicitly prefixed with "Filter.". Negative operators +# (those starting with "not_") are not included. +_FUNCTION_NAMES = { + 'equals': 'equals', + 'less_than': 'lessThan', + 'greater_than': 'greaterThan', + 'contains': 'stringContains', + 'starts_with': 'stringStartsWith', + 'ends_with': 'stringEndsWith', +} + + +class Filter(computedobject.ComputedObject): + """An object to represent collection filters.""" + + _initialized = False + + def __init__(self, opt_filter=None): + """Construct a filter. + + This constructor accepts the following args: + 1) Another filter. + 2) An array of filters (which are implicitly ANDed together). + 3) A ComputedObject returning a filter. Users shouldn't be making these; + they're produced by the generator functions below. + + Args: + opt_filter: Optional filter to add. + """ + self.initialize() + + if isinstance(opt_filter, (list, tuple)): + if not opt_filter: + raise ee_exception.EEException('Empty list specified for ee.Filter().') + elif len(opt_filter) == 1: + opt_filter = opt_filter[0] + else: + self._filter = tuple(opt_filter) + super(Filter, self).__init__( + apifunction.ApiFunction.lookup('Filter.and'), + {'filters': self._filter}) + return + + if isinstance(opt_filter, computedobject.ComputedObject): + super(Filter, self).__init__( + opt_filter.func, opt_filter.args, opt_filter.varName) + self._filter = (opt_filter,) + elif opt_filter is None: + # A silly call with no arguments left for backward-compatibility. + # Encoding such a filter is expected to fail, but it can be composed + # by calling the various methods that end up in _append(). + super(Filter, self).__init__(None, None) + self._filter = () + else: + raise ee_exception.EEException( + 'Invalid argument specified for ee.Filter(): %s' % opt_filter) + + @classmethod + def initialize(cls): + """Imports API functions to this class.""" + if not cls._initialized: + apifunction.ApiFunction.importApi(cls, 'Filter', 'Filter') + cls._initialized = True + + @classmethod + def reset(cls): + """Removes imported API functions from this class.""" + apifunction.ApiFunction.clearApi(cls) + cls._initialized = False + + def predicateCount(self): + """Return the number of predicates that have been added to this filter. + + Returns: + The number of predicates that have been added to this filter. + This does not count nested predicates. + """ + return len(self._filter) + + def _append(self, new_filter): + """Append a predicate to this filter. + + These are implicitly ANDed. + + Args: + new_filter: The filter to append to this one. Possible types are: + 1) another fully constructed Filter, + 2) a JSON representation of a filter, + 3) an array of 1 or 2. + + Returns: + A new filter that is the combination of both. + """ + if new_filter is not None: + prev = list(self._filter) + if isinstance(new_filter, Filter): + prev.extend(new_filter._filter) # pylint: disable=protected-access + elif isinstance(new_filter, list): + prev.extend(new_filter) + else: + prev.append(new_filter) + return Filter(prev) + + def Not(self): + """Returns the opposite of this filter. + + Returns: + The negated filter, which will match iff this filter doesn't. + """ + return apifunction.ApiFunction.call_('Filter.not', self) + + @staticmethod + def metadata_(name, operator, value): + """Filter on metadata. This is deprecated. + + Args: + name: The property name to filter on. + operator: The type of comparison. One of: + "equals", "less_than", "greater_than", "contains", "begins_with", + "ends_with", or any of these prefixed with "not_". + value: The value to compare against. + + Returns: + The new filter. + + Deprecated. Use ee.Filter.eq(), ee.Filter.gte(), etc.' + """ + operator = operator.lower() + + # Check for negated filters. + negated = False + if operator.startswith('not_'): + negated = True + operator = operator[4:] + + # Convert the operator to a function. + if operator not in _FUNCTION_NAMES: + raise ee_exception.EEException( + 'Unknown filtering operator: %s' % operator) + func_name = 'Filter.' + _FUNCTION_NAMES[operator] + new_filter = apifunction.ApiFunction.call_(func_name, name, value) + + return new_filter.Not() if negated else new_filter + + @staticmethod + def eq(name, value): + """Filter to metadata equal to the given value.""" + return apifunction.ApiFunction.call_('Filter.equals', name, value) + + @staticmethod + def neq(name, value): + """Filter to metadata not equal to the given value.""" + return Filter.eq(name, value).Not() + + @staticmethod + def lt(name, value): + """Filter to metadata less than the given value.""" + return apifunction.ApiFunction.call_('Filter.lessThan', name, value) + + @staticmethod + def gte(name, value): + """Filter on metadata greater than or equal to the given value.""" + return Filter.lt(name, value).Not() + + @staticmethod + def gt(name, value): + """Filter on metadata greater than the given value.""" + return apifunction.ApiFunction.call_('Filter.greaterThan', name, value) + + @staticmethod + def lte(name, value): + """Filter on metadata less than or equal to the given value.""" + return Filter.gt(name, value).Not() + + @staticmethod + def And(*args): + """Combine two or more filters using boolean AND.""" + if len(args) == 1 and isinstance(args[0], (list, tuple)): + args = args[0] + return apifunction.ApiFunction.call_('Filter.and', args) + + @staticmethod + def Or(*args): + """Combine two or more filters using boolean OR.""" + if len(args) == 1 and isinstance(args[0], (list, tuple)): + args = args[0] + return apifunction.ApiFunction.call_('Filter.or', args) + + @staticmethod + def date(start, opt_end=None): + """Filter images by date. + + The start and end may be a Date, numbers (interpreted as milliseconds since + 1970-01-01T00:00:00Z), or strings (such as '1996-01-01T08:00'). + + Args: + start: The inclusive start date. + opt_end: The optional exclusive end date, If not specified, a + 1-millisecond range starting at 'start' is created. + + Returns: + The modified filter. + """ + date_range = apifunction.ApiFunction.call_('DateRange', start, opt_end) + return apifunction.ApiFunction.apply_('Filter.dateRangeContains', { + 'leftValue': date_range, + 'rightField': 'system:time_start' + }) + + @staticmethod + def inList(opt_leftField=None, + opt_rightValue=None, + opt_rightField=None, + opt_leftValue=None): + """Filter on metadata contained in a list. + + Args: + opt_leftField: A selector for the left operand. + Should not be specified if leftValue is specified. + opt_rightValue: The value of the right operand. + Should not be specified if rightField is specified. + opt_rightField: A selector for the right operand. + Should not be specified if rightValue is specified. + opt_leftValue: The value of the left operand. + Should not be specified if leftField is specified. + + Returns: + The constructed filter. + """ + # Implement this in terms of listContains, with the arguments switched. + # In listContains the list is on the left side, while in inList it's on + # the right. + return apifunction.ApiFunction.apply_('Filter.listContains', { + 'leftField': opt_rightField, + 'rightValue': opt_leftValue, + 'rightField': opt_leftField, + 'leftValue': opt_rightValue + }) + + @staticmethod + def geometry(geometry, opt_errorMargin=None): + """Filter on bounds. + + Items in the collection with a footprint that fails to intersect + the bounds will be excluded when the collection is evaluated. + + Args: + geometry: The geometry to filter to either as a GeoJSON geometry, + or a FeatureCollection, from which a geometry will be extracted. + opt_errorMargin: An optional error margin. If a number, interpreted as + sphere surface meters. + + Returns: + The modified filter. + """ + # Invoke geometry promotion then manually promote to a Feature. + args = { + 'leftField': '.all', + 'rightValue': apifunction.ApiFunction.call_('Feature', geometry) + } + if opt_errorMargin is not None: + args['maxError'] = opt_errorMargin + return apifunction.ApiFunction.apply_('Filter.intersects', args) + + @staticmethod + def name(): + return 'Filter' diff --git a/ee/function.py b/ee/function.py new file mode 100644 index 0000000..e638a93 --- /dev/null +++ b/ee/function.py @@ -0,0 +1,187 @@ +#!/usr/bin/env python +"""A base class for EE Functions.""" + + + +# Using lowercase function naming to match the JavaScript names. +# pylint: disable=g-bad-name + +import textwrap + +from . import computedobject +from . import ee_exception +from . import encodable +from . import serializer + + +class Function(encodable.Encodable): + """An abstract base class for functions callable by the EE API. + + Subclasses must implement encode() and getSignature(). + """ + + # A function used to type-coerce arguments and return values. + _promoter = staticmethod(lambda value, type_name: value) + + @staticmethod + def _registerPromoter(promoter): + """Registers a function used to type-coerce arguments and return values. + + Args: + promoter: A function used to type-coerce arguments and return values. + Passed a value as the first parameter and a type name as the second. + Can be used, for example, promote numbers or strings to Images. + Should return the input promoted if the type is recognized, + otherwise the original input. + """ + Function._promoter = staticmethod(promoter) + + def getSignature(self): + """Returns a description of the interface provided by this function. + + Returns: + The function's signature, a dictionary containing: + name: string + returns: type name string + args: list of argument dictionaries, each containing: + name: string + type: type name string + optional: boolean + default: an arbitrary primitive or encodable object + """ + raise NotImplementedError( + 'Function subclasses must implement getSignature().') + + def call(self, *args, **kwargs): + """Calls the function with the given positional and keyword arguments. + + Args: + *args: The positional arguments to pass to the function. + **kwargs: The named arguments to pass to the function. + + Returns: + A ComputedObject representing the called function. If the signature + specifies a recognized return type, the returned value will be cast + to that type. + """ + return self.apply(self.nameArgs(args, kwargs)) + + def apply(self, named_args): + """Calls the function with a dictionary of named arguments. + + Args: + named_args: A dictionary of named arguments to pass to the function. + + Returns: + A ComputedObject representing the called function. If the signature + specifies a recognized return type, the returned value will be cast + to that type. + """ + result = computedobject.ComputedObject(self, self.promoteArgs(named_args)) + return Function._promoter(result, self.getReturnType()) + + def promoteArgs(self, args): + """Promotes arguments to their types based on the function's signature. + + Verifies that all required arguments are provided and no unknown arguments + are present. + + Args: + args: A dictionary of keyword arguments to the function. + + Returns: + A dictionary of promoted arguments. + + Raises: + EEException: If unrecognized arguments are passed or required ones are + missing. + """ + specs = self.getSignature()['args'] + + # Promote all recognized args. + promoted_args = {} + known = set() + for spec in specs: + name = spec['name'] + if name in args: + promoted_args[name] = Function._promoter(args[name], spec['type']) + elif not spec.get('optional'): + raise ee_exception.EEException( + 'Required argument (%s) missing to function: %s' % (name, self)) + known.add(name) + + # Check for unknown arguments. + unknown = set(args.keys()).difference(known) + if unknown: + raise ee_exception.EEException( + 'Unrecognized arguments %s to function: %s' % (unknown, self)) + + return promoted_args + + def nameArgs(self, args, extra_keyword_args=None): + """Converts a list of positional arguments to a map of keyword arguments. + + Uses the function's signature for argument names. Note that this does not + check whether the array contains enough arguments to satisfy the call. + + Args: + args: Positional arguments to the function. + extra_keyword_args: Optional named arguments to add. + + Returns: + Keyword arguments to the function. + + Raises: + EEException: If conflicting arguments or too many of them are supplied. + """ + specs = self.getSignature()['args'] + + # Handle positional arguments. + if len(specs) < len(args): + raise ee_exception.EEException( + 'Too many (%d) arguments to function: %s' % (len(args), self)) + named_args = dict([(spec['name'], value) + for spec, value in zip(specs, args)]) + + # Handle keyword arguments. + if extra_keyword_args: + for name in extra_keyword_args: + if name in named_args: + raise ee_exception.EEException( + 'Argument %s specified as both positional and ' + 'keyword to function: %s' % (name, self)) + named_args[name] = extra_keyword_args[name] + # Unrecognized arguments are checked in promoteArgs(). + + return named_args + + def getReturnType(self): + return self.getSignature()['returns'] + + def serialize(self): + return serializer.toJSON(self) + + def __str__(self): + """Returns a user-readable docstring for this function.""" + DOCSTRING_WIDTH = 75 + signature = self.getSignature() + parts = [] + if 'description' in signature: + parts.append( + textwrap.fill(signature['description'], width=DOCSTRING_WIDTH)) + args = signature['args'] + if args: + parts.append('') + parts.append('Args:') + for arg in args: + name_part = ' ' + arg['name'] + if 'description' in arg: + name_part += ': ' + arg_header = name_part + arg['description'] + else: + arg_header = name_part + arg_doc = textwrap.fill(arg_header, + width=DOCSTRING_WIDTH - len(name_part), + subsequent_indent=' ' * 6) + parts.append(arg_doc) + return '\n'.join(parts) diff --git a/ee/geometry.py b/ee/geometry.py new file mode 100644 index 0000000..35108c5 --- /dev/null +++ b/ee/geometry.py @@ -0,0 +1,690 @@ +#!/usr/bin/env python +"""An object representing EE Geometries.""" + + + +# Using lowercase function naming to match the JavaScript names. +# pylint: disable=g-bad-name + +# pylint: disable=g-bad-import-order +import collections +import json +import numbers +import six + +from . import apifunction +from . import computedobject +from . import ee_exception +from . import ee_types +from . import serializer + + +# A sentinel value used to detect unspecified function parameters. +_UNSPECIFIED = object() + + +class Geometry(computedobject.ComputedObject): + """An Earth Engine geometry.""" + + _initialized = False + + def __init__(self, geo_json, opt_proj=None, opt_geodesic=None, + opt_evenOdd=None): + """Creates a geometry. + + Args: + geo_json: The GeoJSON object describing the geometry or a + computed object to be reinterpred as a Geometry. Supports + CRS specifications as per the GeoJSON spec, but only allows named + (rather than "linked" CRSs). If this includes a 'geodesic' field, + and opt_geodesic is not specified, it will be used as opt_geodesic. + opt_proj: An optional projection specification, either as an + ee.Projection, as a CRS ID code or as a WKT string. If specified, + overrides any CRS found in the geo_json parameter. If unspecified and + the geo_json does not declare a CRS, defaults to "EPSG:4326" + (x=longitude, y=latitude). + opt_geodesic: Whether line segments should be interpreted as spherical + geodesics. If false, indicates that line segments should be + interpreted as planar lines in the specified CRS. If absent, + defaults to true if the CRS is geographic (including the default + EPSG:4326), or to false if the CRS is projected. + opt_evenOdd: If true, polygon interiors will be determined by the even/odd + rule, where a point is inside if it crosses an odd number of edges to + reach a point at infinity. Otherwise polygons use the left-inside + rule, where interiors are on the left side of the shell's edges when + walking the vertices in the given order. If unspecified, defaults to + True. + Raises: + EEException: if the given geometry isn't valid. + """ + self.initialize() + + computed = (isinstance(geo_json, computedobject.ComputedObject) and + not (isinstance(geo_json, Geometry) and + geo_json._type is not None)) # pylint: disable=protected-access + options = opt_proj or opt_geodesic or opt_evenOdd + if computed: + if options: + raise ee_exception.EEException( + 'Setting the CRS or geodesic on a computed Geometry is not ' + 'supported. Use Geometry.transform().') + else: + super(Geometry, self).__init__( + geo_json.func, geo_json.args, geo_json.varName) + return + + # Below here we're working with a GeoJSON literal. + if isinstance(geo_json, Geometry): + geo_json = geo_json.encode() + + if not Geometry._isValidGeometry(geo_json): + raise ee_exception.EEException('Invalid GeoJSON geometry.') + + super(Geometry, self).__init__(None, None) + + # The type of the geometry. + self._type = geo_json['type'] + + # The coordinates of the geometry, up to 4 nested levels with numbers at + # the last level. None iff type is GeometryCollection. + self._coordinates = geo_json.get('coordinates') + + # The subgeometries, None unless type is GeometryCollection. + self._geometries = geo_json.get('geometries') + + # The projection code (WKT or identifier) of the geometry. + if opt_proj: + self._proj = opt_proj + elif 'crs' in geo_json: + if (isinstance(geo_json.get('crs'), dict) and + geo_json['crs'].get('type') == 'name' and + isinstance(geo_json['crs'].get('properties'), dict) and + isinstance( + geo_json['crs']['properties'].get('name'), six.string_types)): + self._proj = geo_json['crs']['properties']['name'] + else: + raise ee_exception.EEException('Invalid CRS declaration in GeoJSON: ' + + json.dumps(geo_json['crs'])) + else: + self._proj = None + + # Whether the geometry has spherical geodesic edges. + self._geodesic = opt_geodesic + if opt_geodesic is None and 'geodesic' in geo_json: + self._geodesic = bool(geo_json['geodesic']) + + # Whether polygon interiors use the even/odd rule. + self._evenOdd = opt_evenOdd + if opt_evenOdd is None and 'evenOdd' in geo_json: + self._evenOdd = bool(geo_json['evenOdd']) + + @classmethod + def initialize(cls): + """Imports API functions to this class.""" + if not cls._initialized: + apifunction.ApiFunction.importApi(cls, 'Geometry', 'Geometry') + cls._initialized = True + + @classmethod + def reset(cls): + """Removes imported API functions from this class.""" + apifunction.ApiFunction.clearApi(cls) + cls._initialized = False + + def __getitem__(self, key): + """Allows access to GeoJSON properties for backward-compatibility.""" + return self.toGeoJSON()[key] + + @staticmethod + def Point(coords=_UNSPECIFIED, proj=_UNSPECIFIED, *args, **kwargs): + """Constructs an ee.Geometry describing a point. + + Args: + coords: A list of two [x,y] coordinates in the given projection. + proj: The projection of this geometry, or EPSG:4326 if unspecified. + *args: For convenience, varargs may be used when all arguments are + numbers. This allows creating EPSG:4326 points, e.g. + ee.Geometry.Point(lng, lat). + **kwargs: Keyword args that accept "lon" and "lat" for backward- + compatibility. + + Returns: + An ee.Geometry describing a point. + """ + init = Geometry._parseArgs('Point', 1, Geometry._GetSpecifiedArgs( + (coords, proj) + args, ('lon', 'lat'), **kwargs)) + if not isinstance(init, computedobject.ComputedObject): + xy = init['coordinates'] + if not isinstance(xy, (list, tuple)) or len(xy) != 2: + raise ee_exception.EEException( + 'The Geometry.Point constructor requires 2 coordinates.') + return Geometry(init) + + @staticmethod + def MultiPoint(coords=_UNSPECIFIED, proj=_UNSPECIFIED, *args): + """Constructs an ee.Geometry describing a MultiPoint. + + Args: + coords: A list of points, each in the GeoJSON 'coordinates' format of a + Point, or a list of the x,y coordinates in the given projection, or + an ee.Geometry describing a point. + proj: The projection of this geometry. If unspecified, the default is + the projection of the input ee.Geometry, or EPSG:4326 if there are + no ee.Geometry inputs. + *args: For convenience, varargs may be used when all arguments are + numbers. This allows creating EPSG:4326 MultiPoints given an even + number of arguments, e.g. + ee.Geometry.MultiPoint(aLng, aLat, bLng, bLat, ...). + + Returns: + An ee.Geometry describing a MultiPoint. + """ + all_args = Geometry._GetSpecifiedArgs((coords, proj) + args) + return Geometry(Geometry._parseArgs('MultiPoint', 2, all_args)) + + @staticmethod + def Rectangle(coords=_UNSPECIFIED, proj=_UNSPECIFIED, + geodesic=_UNSPECIFIED, maxError=_UNSPECIFIED, + evenOdd=_UNSPECIFIED, *args, **kwargs): + """Constructs an ee.Geometry describing a rectangular polygon. + + Args: + coords: The minimum and maximum corners of the rectangle, as a list of + two points each in the format of GeoJSON 'Point' coordinates, or a + list of two ee.Geometry describing a point, or a list of four + numbers in the order xMin, yMin, xMax, yMax. + proj: The projection of this geometry. If unspecified, the default is the + projection of the input ee.Geometry, or EPSG:4326 if there are no + ee.Geometry inputs. + geodesic: If false, edges are straight in the projection. If true, edges + are curved to follow the shortest path on the surface of the Earth. + The default is the geodesic state of the inputs, or true if the + inputs are numbers. + maxError: Max error when input geometry must be reprojected to an + explicitly requested result projection or geodesic state. + evenOdd: If true, polygon interiors will be determined by the even/odd + rule, where a point is inside if it crosses an odd number of edges to + reach a point at infinity. Otherwise polygons use the left-inside + rule, where interiors are on the left side of the shell's edges when + walking the vertices in the given order. If unspecified, defaults to + True. + *args: For convenience, varargs may be used when all arguments are + numbers. This allows creating EPSG:4326 Polygons given exactly four + coordinates, e.g. + ee.Geometry.Rectangle(minLng, minLat, maxLng, maxLat). + **kwargs: Keyword args that accept "xlo", "ylo", "xhi" and "yhi" for + backward-compatibility. + + Returns: + An ee.Geometry describing a rectangular polygon. + """ + init = Geometry._parseArgs('Rectangle', 2, Geometry._GetSpecifiedArgs( + (coords, proj, geodesic, maxError, evenOdd) + args, + ('xlo', 'ylo', 'xhi', 'yhi'), **kwargs)) + if not isinstance(init, computedobject.ComputedObject): + # GeoJSON does not have a Rectangle type, so expand to a Polygon. + xy = init['coordinates'] + if not isinstance(xy, (list, tuple)) or len(xy) != 2: + raise ee_exception.EEException( + 'The Geometry.Rectangle constructor requires 2 points or 4 ' + 'coordinates.') + x1 = xy[0][0] + y1 = xy[0][1] + x2 = xy[1][0] + y2 = xy[1][1] + init['coordinates'] = [[[x1, y2], [x1, y1], [x2, y1], [x2, y2]]] + init['type'] = 'Polygon' + return Geometry(init) + + @staticmethod + def LineString(coords=_UNSPECIFIED, proj=_UNSPECIFIED, + geodesic=_UNSPECIFIED, maxError=_UNSPECIFIED, + *args): + """Constructs an ee.Geometry describing a LineString. + + Args: + coords: A list of at least two points. May be a list of coordinates in + the GeoJSON 'LineString' format, a list of at least two ee.Geometry + describing a point, or a list of at least four numbers defining the + [x,y] coordinates of at least two points. + proj: The projection of this geometry. If unspecified, the default is the + projection of the input ee.Geometry, or EPSG:4326 if there are no + ee.Geometry inputs. + geodesic: If false, edges are straight in the projection. If true, edges + are curved to follow the shortest path on the surface of the Earth. + The default is the geodesic state of the inputs, or true if the + inputs are numbers. + maxError: Max error when input geometry must be reprojected to an + explicitly requested result projection or geodesic state. + *args: For convenience, varargs may be used when all arguments are + numbers. This allows creating geodesic EPSG:4326 LineStrings given + an even number of arguments, e.g. + ee.Geometry.LineString(aLng, aLat, bLng, bLat, ...). + + Returns: + An ee.Geometry describing a LineString. + """ + all_args = Geometry._GetSpecifiedArgs( + (coords, proj, geodesic, maxError) + args) + return Geometry(Geometry._parseArgs('LineString', 2, all_args)) + + @staticmethod + def LinearRing(coords=_UNSPECIFIED, proj=_UNSPECIFIED, + geodesic=_UNSPECIFIED, maxError=_UNSPECIFIED, + *args): + """Constructs an ee.Geometry describing a LinearRing. + + If the last point is not equal to the first, a duplicate of the first + point will be added at the end. + + Args: + coords: A list of points in the ring. May be a list of coordinates in + the GeoJSON 'LinearRing' format, a list of at least three ee.Geometry + describing a point, or a list of at least six numbers defining the + [x,y] coordinates of at least three points. + proj: The projection of this geometry. If unspecified, the default is the + projection of the input ee.Geometry, or EPSG:4326 if there are no + ee.Geometry inputs. + geodesic: If false, edges are straight in the projection. If true, edges + are curved to follow the shortest path on the surface of the Earth. + The default is the geodesic state of the inputs, or true if the + inputs are numbers. + maxError: Max error when input geometry must be reprojected to an + explicitly requested result projection or geodesic state. + *args: For convenience, varargs may be used when all arguments are + numbers. This allows creating geodesic EPSG:4326 LinearRings given + an even number of arguments, e.g. + ee.Geometry.LinearRing(aLng, aLat, bLng, bLat, ...). + + Returns: + A dictionary representing a GeoJSON LinearRing. + """ + all_args = Geometry._GetSpecifiedArgs( + (coords, proj, geodesic, maxError) + args) + return Geometry(Geometry._parseArgs('LinearRing', 2, all_args)) + + @staticmethod + def MultiLineString(coords=_UNSPECIFIED, proj=_UNSPECIFIED, + geodesic=_UNSPECIFIED, maxError=_UNSPECIFIED, + *args): + """Constructs an ee.Geometry describing a MultiLineString. + + Create a GeoJSON MultiLineString from either a list of points, or an array + of lines (each an array of Points). If a list of points is specified, + only a single line is created. + + Args: + coords: A list of linestrings. May be a list of coordinates in the + GeoJSON 'MultiLineString' format, a list of at least two ee.Geometry + describing a LineString, or a list of number defining a single + linestring. + proj: The projection of this geometry. If unspecified, the default is the + projection of the input ee.Geometry, or EPSG:4326 if there are no + ee.Geometry inputs. + geodesic: If false, edges are straight in the projection. If true, edges + are curved to follow the shortest path on the surface of the Earth. + The default is the geodesic state of the inputs, or true if the + inputs are numbers. + maxError: Max error when input geometry must be reprojected to an + explicitly requested result projection or geodesic state. + *args: For convenience, varargs may be used when all arguments are + numbers. This allows creating geodesic EPSG:4326 MultiLineStrings + with a single LineString, given an even number of arguments, e.g. + ee.Geometry.MultiLineString(aLng, aLat, bLng, bLat, ...). + + Returns: + An ee.Geometry describing a MultiLineString. + """ + all_args = Geometry._GetSpecifiedArgs( + (coords, proj, geodesic, maxError) + args) + return Geometry(Geometry._parseArgs('MultiLineString', 3, all_args)) + + @staticmethod + def Polygon(coords=_UNSPECIFIED, proj=_UNSPECIFIED, + geodesic=_UNSPECIFIED, maxError=_UNSPECIFIED, + evenOdd=_UNSPECIFIED, *args): + """Constructs an ee.Geometry describing a polygon. + + Args: + coords: A list of rings defining the boundaries of the polygon. May be a + list of coordinates in the GeoJSON 'Polygon' format, a list of + ee.Geometry describing a LinearRing, or a list of number defining a + single polygon boundary. + proj: The projection of this geometry. If unspecified, the default is the + projection of the input ee.Geometry, or EPSG:4326 if there are no + ee.Geometry inputs. + geodesic: If false, edges are straight in the projection. If true, edges + are curved to follow the shortest path on the surface of the Earth. + The default is the geodesic state of the inputs, or true if the + inputs are numbers. + maxError: Max error when input geometry must be reprojected to an + explicitly requested result projection or geodesic state. + evenOdd: If true, polygon interiors will be determined by the even/odd + rule, where a point is inside if it crosses an odd number of edges to + reach a point at infinity. Otherwise polygons use the left-inside + rule, where interiors are on the left side of the shell's edges when + walking the vertices in the given order. If unspecified, defaults to + True. + *args: For convenience, varargs may be used when all arguments are + numbers. This allows creating geodesic EPSG:4326 Polygons with a + single LinearRing given an even number of arguments, e.g. + ee.Geometry.Polygon(aLng, aLat, bLng, bLat, ..., aLng, aLat). + + Returns: + An ee.Geometry describing a polygon. + """ + all_args = Geometry._GetSpecifiedArgs( + (coords, proj, geodesic, maxError, evenOdd) + args) + return Geometry(Geometry._parseArgs('Polygon', 3, all_args)) + + @staticmethod + def MultiPolygon(coords=_UNSPECIFIED, proj=_UNSPECIFIED, + geodesic=_UNSPECIFIED, maxError=_UNSPECIFIED, + evenOdd=_UNSPECIFIED, *args): + """Constructs an ee.Geometry describing a MultiPolygon. + + If created from points, only one polygon can be specified. + + Args: + coords: A list of polygons. May be a list of coordinates in the GeoJSON + 'MultiPolygon' format, a list of ee.Geometry describing a Polygon, + or a list of number defining a single polygon boundary. + proj: The projection of this geometry. If unspecified, the default is the + projection of the input ee.Geometry, or EPSG:4326 if there are no + ee.Geometry inputs. + geodesic: If false, edges are straight in the projection. If true, edges + are curved to follow the shortest path on the surface of the Earth. + The default is the geodesic state of the inputs, or true if the + inputs are numbers. + maxError: Max error when input geometry must be reprojected to an + explicitly requested result projection or geodesic state. + evenOdd: If true, polygon interiors will be determined by the even/odd + rule, where a point is inside if it crosses an odd number of edges to + reach a point at infinity. Otherwise polygons use the left-inside + rule, where interiors are on the left side of the shell's edges when + walking the vertices in the given order. If unspecified, defaults to + True. + *args: For convenience, varargs may be used when all arguments are + numbers. This allows creating geodesic EPSG:4326 MultiPolygons with + a single Polygon with a single LinearRing given an even number of + arguments, e.g. + ee.Geometry.MultiPolygon(aLng, aLat, bLng, bLat, ..., aLng, aLat). + + Returns: + An ee.Geometry describing a MultiPolygon. + """ + all_args = Geometry._GetSpecifiedArgs( + (coords, proj, geodesic, maxError, evenOdd) + args) + return Geometry(Geometry._parseArgs('MultiPolygon', 4, all_args)) + + def encode(self, opt_encoder=None): # pylint: disable=unused-argument + """Returns a GeoJSON-compatible representation of the geometry.""" + if not getattr(self, '_type', None): + return super(Geometry, self).encode(opt_encoder) + + result = {'type': self._type} + if self._type == 'GeometryCollection': + result['geometries'] = self._geometries + else: + result['coordinates'] = self._coordinates + + if self._proj is not None: + result['crs'] = { + 'type': 'name', + 'properties': { + 'name': self._proj + } + } + + if self._geodesic is not None: + result['geodesic'] = self._geodesic + + if self._evenOdd is not None: + result['evenOdd'] = self._evenOdd + + return result + + def toGeoJSON(self): + """Returns a GeoJSON representation of the geometry.""" + if self.func: + raise ee_exception.EEException( + 'Can\'t convert a computed geometry to GeoJSON. ' + 'Use getInfo() instead.') + + return self.encode() + + def toGeoJSONString(self): + """Returns a GeoJSON string representation of the geometry.""" + if self.func: + raise ee_exception.EEException( + 'Can\'t convert a computed geometry to GeoJSON. ' + 'Use getInfo() instead.') + return json.dumps(self.toGeoJSON()) + + def serialize(self): + """Returns the serialized representation of this object.""" + return serializer.toJSON(self) + + def __str__(self): + return 'ee.Geometry(%s)' % serializer.toReadableJSON(self) + + @staticmethod + def _isValidGeometry(geometry): + """Check if a geometry looks valid. + + Args: + geometry: The geometry to check. + + Returns: + True if the geometry looks valid. + """ + if not isinstance(geometry, dict): + return False + geometry_type = geometry.get('type') + if geometry_type == 'GeometryCollection': + geometries = geometry.get('geometries') + if not isinstance(geometries, (list, tuple)): + return False + for sub_geometry in geometries: + if not Geometry._isValidGeometry(sub_geometry): + return False + return True + else: + coords = geometry.get('coordinates') + nesting = Geometry._isValidCoordinates(coords) + return ((geometry_type == 'Point' and nesting == 1) or + (geometry_type == 'MultiPoint' and + (nesting == 2 or not coords)) or + (geometry_type == 'LineString' and nesting == 2) or + (geometry_type == 'LinearRing' and nesting == 2) or + (geometry_type == 'MultiLineString' and + (nesting == 3 or not coords)) or + (geometry_type == 'Polygon' and nesting == 3) or + (geometry_type == 'MultiPolygon' and + (nesting == 4 or not coords))) + + @staticmethod + def _isValidCoordinates(shape): + """Validate the coordinates of a geometry. + + Args: + shape: The coordinates to validate. + + Returns: + The number of nested arrays or -1 on error. + """ + if not isinstance(shape, collections.Iterable): + return -1 + + if shape and isinstance(shape[0], collections.Iterable): + count = Geometry._isValidCoordinates(shape[0]) + # If more than 1 ring or polygon, they should have the same nesting. + for i in range(1, len(shape)): + if Geometry._isValidCoordinates(shape[i]) != count: + return -1 + return count + 1 + else: + # Make sure the pts are all numbers. + for i in shape: + if not isinstance(i, numbers.Number): + return -1 + + # Test that we have an even number of pts. + if len(shape) % 2 == 0: + return 1 + else: + return -1 + + @staticmethod + def _coordinatesToLine(coordinates): + """Create a line from a list of points. + + Args: + coordinates: The points to convert. Must be list of numbers of + even length, in the format [x1, y1, x2, y2, ...] + + Returns: + An array of pairs of points. + """ + if not (coordinates and isinstance(coordinates[0], numbers.Number)): + return coordinates + if len(coordinates) == 2: + return coordinates + if len(coordinates) % 2 != 0: + raise ee_exception.EEException('Invalid number of coordinates: %s' % + len(coordinates)) + + line = [] + for i in range(0, len(coordinates), 2): + pt = [coordinates[i], coordinates[i + 1]] + line.append(pt) + return line + + @staticmethod + def _parseArgs(ctor_name, depth, args): + """Parses arguments into a GeoJSON dictionary or a ComputedObject. + + Args: + ctor_name: The name of the constructor to use. + depth: The nesting depth at which points are found. + args: The array of values to test. + + Returns: + If the arguments are simple, a GeoJSON object describing the geometry. + Otherwise a ComputedObject calling the appropriate constructor. + """ + result = {} + keys = ['coordinates', 'crs', 'geodesic', 'maxError', 'evenOdd'] + + if all(ee_types.isNumber(i) for i in args): + # All numbers, so convert them to a true array. + result['coordinates'] = args + else: + # Parse parameters by position. + if len(args) > len(keys): + raise ee_exception.EEException( + 'Geometry constructor given extra arguments.') + for key, arg in zip(keys, args): + if arg is not None: + result[key] = arg + + # Standardize the coordinates and test if they are simple enough for + # client-side initialization. + if (Geometry._hasServerValue(result['coordinates']) or + result.get('crs') is not None or + result.get('geodesic') is not None or + result.get('maxError') is not None): + # Some arguments cannot be handled in the client, so make a server call. + # Note we don't declare a default evenOdd value, so the server can infer + # a default based on the projection. + server_name = 'GeometryConstructors.' + ctor_name + return apifunction.ApiFunction.lookup(server_name).apply(result) + else: + # Everything can be handled here, so check the depth and init this object. + result['type'] = ctor_name + result['coordinates'] = Geometry._fixDepth(depth, result['coordinates']) + # Enable evenOdd by default for any kind of polygon. + if ('evenOdd' not in result and + ctor_name in ['Polygon', 'Rectangle', 'MultiPolygon']): + result['evenOdd'] = True + return result + + @staticmethod + def _hasServerValue(coordinates): + """Returns whether any of the coordinates are computed values or geometries. + + Computed items must be resolved by the server (evaluated in the case of + computed values, and processed to a single projection and geodesic state + in the case of geometries. + + Args: + coordinates: A nested list of ... of number coordinates. + + Returns: + Whether all coordinates are lists or numbers. + """ + if isinstance(coordinates, (list, tuple)): + return any(Geometry._hasServerValue(i) for i in coordinates) + else: + return isinstance(coordinates, computedobject.ComputedObject) + + @staticmethod + def _fixDepth(depth, coords): + """Fixes the depth of the given coordinates. + + Checks that each element has the expected depth as all other elements + at that depth. + + Args: + depth: The desired depth. + coords: The coordinates to fix. + + Returns: + The fixed coordinates, with the deepest elements at the requested depth. + + Raises: + EEException: if the depth is invalid and could not be fixed. + """ + if depth < 1 or depth > 4: + raise ee_exception.EEException('Unexpected nesting level.') + + # Handle a list of numbers. + if all(isinstance(i, numbers.Number) for i in coords): + coords = Geometry._coordinatesToLine(coords) + + # Make sure the number of nesting levels is correct. + item = coords + count = 0 + while isinstance(item, (list, tuple)): + item = item[0] if item else None + count += 1 + while count < depth: + coords = [coords] + count += 1 + + if Geometry._isValidCoordinates(coords) != depth: + raise ee_exception.EEException('Invalid geometry.') + + # Empty arrays should not be wrapped. + item = coords + while isinstance(item, (list, tuple)) and len(item) == 1: + item = item[0] + if isinstance(item, (list, tuple)) and not item: + return [] + + return coords + + @staticmethod + def _GetSpecifiedArgs(args, keywords=(), **kwargs): + """Returns args, filtering out _UNSPECIFIED and checking for keywords.""" + if keywords: + args = list(args) + for i, keyword in enumerate(keywords): + if keyword in kwargs: + assert args[i] is _UNSPECIFIED + args[i] = kwargs[keyword] + return [i for i in args if i != _UNSPECIFIED] + + @staticmethod + def name(): + return 'Geometry' diff --git a/ee/image.py b/ee/image.py new file mode 100644 index 0000000..0a17b95 --- /dev/null +++ b/ee/image.py @@ -0,0 +1,409 @@ +#!/usr/bin/env python +"""A representation of an Earth Engine image. + +See: https://sites.google.com/site/earthengineapidocs for more details. +""" + + + +# Using lowercase function naming to match the JavaScript names. +# pylint: disable=g-bad-name + +# Disable lint messages caused by Python 3 compatibility changes. +# pylint: disable=superfluous-parens + +import json + +from . import apifunction +from . import computedobject +from . import data +from . import deprecation +from . import ee_exception +from . import ee_types +from . import element +from . import function +from . import geometry + + +class Image(element.Element): + """An object to represent an Earth Engine image.""" + + _initialized = False + + def __init__(self, args=None, version=None): + """Constructs an Earth Engine image. + + Args: + args: This constructor accepts a variety of arguments: + - A string - an EarthEngine asset id, + - A string and a number - an EarthEngine asset id and version, + - A number - creates a constant image, + - An EEArray - creates a constant array image, + - A list - creates an image out of each element of the array and + combines them into a single image, + - An ee.Image - returns the argument, + - Nothing - results in an empty transparent image. + version: An optional asset version. + + Raises: + EEException: if passed something other than the above. + """ + + self.initialize() + + if version is not None: + if ee_types.isString(args) and ee_types.isNumber(version): + # An ID and version. + super(Image, self).__init__( + apifunction.ApiFunction.lookup('Image.load'), + {'id': args, 'version': version}) + else: + raise ee_exception.EEException( + 'If version is specified, the arg to Image() must be a string. ' + 'Received: %s' % (args,)) + return + + if ee_types.isNumber(args): + # A constant image. + super(Image, self).__init__( + apifunction.ApiFunction.lookup('Image.constant'), {'value': args}) + elif ee_types.isString(args): + # An ID. + super(Image, self).__init__( + apifunction.ApiFunction.lookup('Image.load'), {'id': args}) + elif isinstance(args, (list, tuple)): + # Make an image out of each element. + image = Image.combine_([Image(i) for i in args]) + super(Image, self).__init__(image.func, image.args) + elif isinstance(args, computedobject.ComputedObject): + if args.name() == 'Array': + # A constant array image. + super(Image, self).__init__( + apifunction.ApiFunction.lookup('Image.constant'), {'value': args}) + else: + # A custom object to reinterpret as an Image. + super(Image, self).__init__(args.func, args.args, args.varName) + elif args is None: + super(Image, self).__init__( + apifunction.ApiFunction.lookup('Image.mask'), + {'image': Image(0), 'mask': Image(0)}) + else: + raise ee_exception.EEException( + 'Unrecognized argument type to convert to an Image: %s' % args) + + @classmethod + def initialize(cls): + """Imports API functions to this class.""" + if not cls._initialized: + apifunction.ApiFunction.importApi(cls, 'Image', 'Image') + apifunction.ApiFunction.importApi(cls, 'Window', 'Image', 'focal_') + cls._initialized = True + + @classmethod + def reset(cls): + """Removes imported API functions from this class.""" + apifunction.ApiFunction.clearApi(cls) + cls._initialized = False + + def getInfo(self): + """Fetch and return information about this image. + + Returns: + The return contents vary but will include at least: + bands - Array containing metadata about the bands in the image, + properties - Dictionary containing the image's metadata properties. + """ + return super(Image, self).getInfo() + + def getMapId(self, vis_params=None): + """Fetch and return a map id and token, suitable for use in a Map overlay. + + Args: + vis_params: The visualization parameters. See ee.data.getMapId. + + Returns: + An object containing a mapid and access token, or an error message. + """ + request = (vis_params or {}).copy() + request['image'] = self.serialize() + response = data.getMapId(request) + response['image'] = self + return response + + def getDownloadURL(self, params=None): + """Get a download URL for this image. + + Args: + params: An object containing visualization options with the following + possible values: + name - a base name to use when constructing filenames. + bands - a description of the bands to download. Must be an array of + dictionaries, each with the following keys: + id - the name of the band, a string, required. + crs - an optional CRS string defining the band projection. + crs_transform - an optional array of 6 numbers specifying an affine + transform from the specified CRS, in the order: xScale, yShearing, + xShearing, yScale, xTranslation and yTranslation. + dimensions - an optional array of two integers defining the width and + height to which the band is cropped. + scale - an optional number, specifying the scale in meters of the + band; ignored if crs and crs_transform is specified. + crs - a default CRS string to use for any bands that do not explicitly + specify one. + crs_transform - a default affine transform to use for any bands that do + not specify one, of the same format as the crs_transform of bands. + dimensions - default image cropping dimensions to use for any bands + that do not specify them. + scale - a default scale to use for any bands that do not specify one; + ignored if crs and crs_transform is specified. + region - a polygon specifying a region to download; ignored if crs + and crs_transform is specified. + + Returns: + A URL to download the specified image. + """ + request = params or {} + request['image'] = self.serialize() + return data.makeDownloadUrl(data.getDownloadId(request)) + + def getThumbURL(self, params=None): + """Get a thumbnail URL for this image. + + Args: + params: Parameters identical to getMapId, plus, optionally: + dimensions - (a number or pair of numbers in format WIDTHxHEIGHT) Max + dimensions of the thumbnail to render, in pixels. If only one number + is passed, it is used as the maximum, and the other dimension is + computed by proportional scaling. + region - (E,S,W,N or GeoJSON) Geospatial region of the image + to render. By default, the whole image. + format - (string) Either 'png' or 'jpg'. + + Returns: + A URL to download a thumbnail the specified image. + + Raises: + EEException: If the region parameter is not an array or GeoJSON object. + """ + request = params or {} + request['image'] = self.serialize() + if 'region' in request: + if (isinstance(request['region'], dict) or + isinstance(request['region'], list)): + request['region'] = json.dumps(request['region']) + elif not isinstance(request['region'], str): + raise ee_exception.EEException( + 'The region parameter must be an array or a GeoJSON object.') + return data.makeThumbUrl(data.getThumbId(request)) + + # Deprecated spellings to match the JS library. + getDownloadUrl = deprecation.Deprecated('Use getDownloadURL().')( + getDownloadURL) + getThumbUrl = deprecation.Deprecated('Use getThumbURL().')(getThumbURL) + + ################################################### + # Static methods. + ################################################### + + @staticmethod + def rgb(r, g, b): + """Create a 3-band image. + + This creates a 3-band image specifically for visualization using + the first band in each image. + + Args: + r: The red image. + g: The green image. + b: The blue image. + + Returns: + The combined image. + """ + return Image.combine_([r, g, b], ['vis-red', 'vis-green', 'vis-blue']) + + @staticmethod + def cat(*args): + """Concatenate the given images together into a single image.""" + return Image.combine_(args) + + @staticmethod + def combine_(images, names=None): + """Combine all the bands from the given images into a single image. + + Args: + images: The images to be combined. + names: An array of names for the output bands. + + Returns: + The combined image. + """ + if not images: + raise ee_exception.EEException('Can\'t combine 0 images.') + + # Append all the bands. + result = Image(images[0]) + for image in images[1:]: + result = apifunction.ApiFunction.call_('Image.addBands', result, image) + + # Optionally, rename the bands of the result. + if names: + result = result.select(['.*'], names) + + return result + + def select(self, opt_selectors=None, opt_names=None, *args): + """Selects bands from an image. + + Can be called in one of two ways: + - Passed any number of non-list arguments. All of these will be + interpreted as band selectors. These can be band names, regexes, or + numeric indices. E.g. + selected = image.select('a', 'b', 3, 'd'); + - Passed two lists. The first will be used as band selectors and the + second as new names for the selected bands. The number of new names + must match the number of selected bands. E.g. + selected = image.select(['a', 4], ['newA', 'newB']); + + Args: + opt_selectors: An array of names, regexes or numeric indices specifying + the bands to select. + opt_names: An array of strings specifying the new names for the + selected bands. + *args: Selector elements as varargs. + + Returns: + An image with the selected bands. + """ + if opt_selectors is not None: + args = list(args) + if opt_names is not None: + args.insert(0, opt_names) + args.insert(0, opt_selectors) + algorithm_args = { + 'input': self, + 'bandSelectors': args[0] if args else [], + } + if args: + # If the user didn't pass an array as the first argument, assume + # that everything in the arguments array is actually a selector. + if (len(args) > 2 or + ee_types.isString(args[0]) or + ee_types.isNumber(args[0])): + # Varargs inputs. + selectors = args + # Verify we didn't get anything unexpected. + for selector in selectors: + if (not ee_types.isString(selector) and + not ee_types.isNumber(selector) and + not isinstance(selector, computedobject.ComputedObject)): + raise ee_exception.EEException( + 'Illegal argument to select(): ' + selector) + algorithm_args['bandSelectors'] = selectors + elif len(args) > 1: + algorithm_args['newNames'] = args[1] + return apifunction.ApiFunction.apply_('Image.select', algorithm_args) + + def expression(self, expression, opt_map=None): + """Evaluates an arithmetic expression on an image or images. + + The bands of the primary input image are available using the built-in + function b(), as b(0) or b('band_name'). + + Variables in the expression are interpreted as additional image parameters + which must be supplied in opt_map. The bands of each such image can be + accessed like image.band_name or image[0]. + + Both b() and image[] allow multiple arguments, to specify multiple bands, + such as b(1, 'name', 3). Calling b() with no arguments, or using a variable + by itself, returns all bands of the image. + + Args: + expression: The expression to evaluate. + opt_map: An optional map of input images available by name. + + Returns: + The image computed by the provided expression. + """ + arg_name = 'DEFAULT_EXPRESSION_IMAGE' + all_vars = [arg_name] + args = {arg_name: self} + + # Add custom arguments, promoting them to Images manually. + if opt_map: + for name, value in opt_map.items(): + all_vars.append(name) + args[name] = Image(value) + + body = apifunction.ApiFunction.call_( + 'Image.parseExpression', expression, arg_name, all_vars) + + # Reinterpret the body call as an ee.Function by hand-generating the + # signature so the computed function knows its input and output types. + class ReinterpretedFunction(function.Function): + + def encode(self, encoder): + return body.encode(encoder) + + def getSignature(self): + return { + 'name': '', + 'args': [{'name': name, 'type': 'Image', 'optional': False} + for name in all_vars], + 'returns': 'Image' + } + + # Perform the call. + return ReinterpretedFunction().apply(args) + + def clip(self, clip_geometry): + """Clips an image to a Geometry or Feature. + + The output bands correspond exactly the input bands, except data not + covered by the geometry is masked. The output image retains the + metadata of the input image. + + Use clipToCollection to clip an image to a FeatureCollection. + + Args: + clip_geometry: The Geometry or Feature to clip to. + + Returns: + The clipped image. + """ + try: + # Need to manually promote GeoJSON, because the signature does not + # specify the type so auto promotion won't work. + clip_geometry = geometry.Geometry(clip_geometry) + except ee_exception.EEException: + pass # Not an ee.Geometry or GeoJSON. Just pass it along. + return apifunction.ApiFunction.call_('Image.clip', self, clip_geometry) + + def rename(self, names, *args): + """Rename the bands of an image. + + Can be called with either a list of strings or any number of strings. + + Args: + names: An array of strings specifying the new names for the + bands. Must exactly match the number of bands in the image. + *args: Band names as varargs. + + Returns: + An image with the renamed bands. + """ + if args: + # Handle varargs; everything else we let the server handle. + args = list(args) + args.insert(0, names) + names = args + + algorithm_args = { + 'input': self, + 'names': names + } + return apifunction.ApiFunction.apply_('Image.rename', algorithm_args) + + @staticmethod + def name(): + return 'Image' diff --git a/ee/imagecollection.py b/ee/imagecollection.py new file mode 100644 index 0000000..9fca6e9 --- /dev/null +++ b/ee/imagecollection.py @@ -0,0 +1,121 @@ +#!/usr/bin/env python +"""Representation for an Earth Engine ImageCollection.""" + + + +# Using lowercase function naming to match the JavaScript names. +# pylint: disable=g-bad-name + +from . import apifunction +from . import collection +from . import computedobject +from . import ee_exception +from . import ee_list +from . import ee_types +from . import image + + +class ImageCollection(collection.Collection): + """Representation for an Earth Engine ImageCollection.""" + + _initialized = False + + def __init__(self, args): + """ImageCollection constructor. + + Args: + args: ImageCollections can be constructed from the following arguments: + 1) A string: assumed to be the name of a collection, + 2) An array of images, or anything that can be used to construct an + image. + 3) A single image. + 5) A computed object - reinterpreted as a collection. + + Raises: + EEException: if passed something other than the above. + """ + self.initialize() + + # Wrap single images in an array. + if isinstance(args, image.Image): + args = [args] + + if ee_types.isString(args): + # An ID. + super(ImageCollection, self).__init__( + apifunction.ApiFunction.lookup('ImageCollection.load'), {'id': args}) + elif isinstance(args, (list, tuple)): + # A list of images. + super(ImageCollection, self).__init__( + apifunction.ApiFunction.lookup('ImageCollection.fromImages'), { + 'images': [image.Image(i) for i in args] + }) + elif isinstance(args, ee_list.List): + # A computed list of images. + super(ImageCollection, self).__init__( + apifunction.ApiFunction.lookup('ImageCollection.fromImages'), { + 'images': args + }) + elif isinstance(args, computedobject.ComputedObject): + # A custom object to reinterpret as a ImageCollection. + super(ImageCollection, self).__init__(args.func, args.args, args.varName) + else: + raise ee_exception.EEException( + 'Unrecognized argument type to convert to a ImageCollection: %s' % + args) + + @classmethod + def initialize(cls): + """Imports API functions to this class.""" + if not cls._initialized: + super(ImageCollection, cls).initialize() + apifunction.ApiFunction.importApi( + cls, 'ImageCollection', 'ImageCollection') + apifunction.ApiFunction.importApi( + cls, 'reduce', 'ImageCollection') + cls._initialized = True + + @classmethod + def reset(cls): + """Removes imported API functions from this class.""" + apifunction.ApiFunction.clearApi(cls) + cls._initialized = False + + def getMapId(self, vis_params=None): + """Fetch and return a MapID. + + This mosaics the collection to a single image and return a mapid suitable + for building a Google Maps overlay. + + Args: + vis_params: The visualization parameters. + + Returns: + A mapid and token. + """ + mosaic = apifunction.ApiFunction.call_('ImageCollection.mosaic', self) + return mosaic.getMapId(vis_params) + + def select(self, selectors, opt_names=None, *args): + """Select bands from each image in a collection. + + Args: + selectors: An array of names, regexes or numeric indices specifying + the bands to select. + opt_names: An array of strings specifying the new names for the + selected bands. If supplied, the length must match the number + of bands selected. + *args: Selector elements as varargs. + + Returns: + The image collection with selected bands. + """ + return self.map(lambda img: img.select(selectors, opt_names, *args)) + + @staticmethod + def name(): + return 'ImageCollection' + + @staticmethod + def elementType(): + return image.Image diff --git a/ee/mapclient.py b/ee/mapclient.py new file mode 100644 index 0000000..fc69965 --- /dev/null +++ b/ee/mapclient.py @@ -0,0 +1,504 @@ +#!/usr/bin/env python +"""A slippy map GUI. + +Implements a tiled slippy map using Tk canvas. Displays map tiles using +whatever projection the tiles are in and only knows about tile coordinates, +(as opposed to geospatial coordinates.) This assumes that the tile-space is +organized as a power-of-two pyramid, with the origin in the upper left corner. +This currently has several spots that are hard-coded for 256x256 tiles, even +though MapOverlay tries to track this. + +Supports mouse-based pan and zoom as well as tile upsampling while waiting +for new tiles to load. The map to display is specified by a MapOverlay, and +added to the GUI on creation or manually using addOverlay() + gui = MapClient(MakeOverlay(mapid)) + +Tiles are referenced using a key of (level, x, y) throughout. + +Several of the functions are named to match the Google Maps Javascript API, +and therefore violate style guidelines. +""" + + + +# TODO(user): +# 1) Add a zoom bar. +# 2) When the move() is happening inside the Drag function, it'd be +# a good idea to use a semaphore to keep new tiles from being added +# and subsequently moved. + +import collections +import cStringIO +import functools +import math +import Queue +import sys +import threading +import urllib2 +import six + +# check if the Python imaging libraries used by the mapclient module are +# installed +try: + import ImageTk # pylint: disable=g-import-not-at-top + import Image # pylint: disable=g-import-not-at-top +except ImportError: + # pylint: disable=superfluous-parens + print(""" + ERROR: A Python library (PIL) used by the Earth Engine API mapclient module + was not found. Information on PIL can be found at: + http://pypi.python.org/pypi/PIL + """) + raise + +try: + import Tkinter # pylint: disable=g-import-not-at-top +except ImportError: + # pylint: disable=superfluous-parens + print(""" + ERROR: A Python library (Tkinter) used by the Earth Engine API mapclient + module was not found. Instructions for installing Tkinter can be found at: + http://tkinter.unpythonic.net/wiki/How_to_install_Tkinter + """) + raise + +# The default URL to fetch tiles from. We could pull this from the EE library, +# however this doesn't have any other dependencies on that yet, so let's not. +BASE_URL = 'https://earthengine.googleapis.com' + +# This is a URL pattern for creating an overlay from the google maps base map. +# The z, x and y arguments at the end correspond to level, x, y here. +DEFAULT_MAP_URL_PATTERN = ('http://mt1.google.com/vt/lyrs=m@176000000&hl=en&' + 'src=app&z=%d&x=%d&y=%d') + + +class MapClient(threading.Thread): + """A simple discrete zoom level map viewer.""" + + def __init__(self, opt_overlay=None, opt_width=1024, opt_height=768): + """Initialize the MapClient UI. + + Args: + opt_overlay: A mapoverlay to display. If not specified, the default + Google Maps basemap is used. + opt_width: The default width of the frame to construct. + opt_height: The default height of the frame to construct. + """ + threading.Thread.__init__(self) + self.ready = False # All initialization is done. + self.tiles = {} # The cached stack of images at each grid cell. + self.tktiles = {} # The cached PhotoImage at each grid cell. + self.level = 2 # Starting zoom level + self.origin_x = None # The map origin x offset at the current level. + self.origin_y = None # The map origin y offset at the current level. + self.parent = None # A handle to the top level Tk widget. + self.frame = None # A handle to the Tk frame. + self.canvas = None # A handle to the Tk canvas + self.width = opt_width + self.height = opt_height + self.anchor_x = None # Drag anchor. + self.anchor_y = None # Drag anchor. + + # Map origin offsets; start at the center of the map. + self.origin_x = (-(2 ** self.level) * 128) + self.width / 2 + self.origin_y = (-(2 ** self.level) * 128) + self.height / 2 + + if not opt_overlay: + # Default to a google maps basemap + opt_overlay = MapOverlay(DEFAULT_MAP_URL_PATTERN) + + # The array of overlays are displayed as last on top. + self.overlays = [opt_overlay] + self.start() + + def run(self): + """Set up the user interface.""" + width = self.width + height = self.height + + # Build the UI + self.parent = Tkinter.Tk() + self.frame = frame = Tkinter.Frame(self.parent, width=width, height=height) + frame.pack(fill=Tkinter.BOTH, expand=Tkinter.YES) + self.canvas = canvas = Tkinter.Canvas(frame, + width=self.GetFrameSize()[0], + height=self.GetFrameSize()[1]) + + canvas.pack(side=Tkinter.TOP, fill=Tkinter.BOTH, expand=Tkinter.YES) + canvas.create_rectangle(0, 0, self.GetMapSize()[0], self.GetMapSize()[1], + fill='#888888') + + canvas.bind('', self.ClickHandler) + canvas.bind('', self.ReleaseHandler) + # Button-4 and Button-5 are scroll wheel up/down events. + canvas.bind('', functools.partial(self.Zoom, direction=1)) + canvas.bind('', functools.partial(self.Zoom, direction=-1)) + canvas.bind('', functools.partial(self.Zoom, direction=1)) + frame.bind('', self.ResizeHandler) + frame.bind_all('', self.KeypressHandler) + + def SetReady(): + self.ready = True + + self.parent.after_idle(SetReady) + self.parent.mainloop() + + def addOverlay(self, overlay): # pylint: disable=g-bad-name + """Add an overlay to the map.""" + self.overlays.append(overlay) + self.LoadTiles() + + def GetFrameSize(self): + if self.frame: + return (int(self.frame.cget('width')), int(self.frame.cget('height'))) + else: + return (self.width, self.height) + + def GetMapSize(self): + if self.frame: + return (int(self.canvas.cget('width')), int(self.canvas.cget('height'))) + else: + return (self.width, self.height) + + def GetViewport(self): + """Return the visible portion of the map as [xlo, ylo, xhi, yhi].""" + width, height = self.GetMapSize() + # pylint: disable=invalid-unary-operand-type + return [-self.origin_x, -self.origin_y, + -self.origin_x + width, -self.origin_y + height] + + def LoadTiles(self): + """Refresh the entire map.""" + # Start with the overlay on top. + if not self.ready: + return + for i, overlay in reversed(list(enumerate(self.overlays))): + tile_list = overlay.CalcTiles(self.level, self.GetViewport()) + for key in tile_list: + overlay.getTile(key, functools.partial( + self.AddTile, key=key, overlay=overlay, layer=i)) + + def Flush(self): + """Empty out all the image fetching queues.""" + for overlay in self.overlays: + overlay.Flush() + + def CompositeTiles(self, key): + """Composite together all the tiles in this cell into a single image.""" + composite = None + for layer in sorted(self.tiles[key]): + image = self.tiles[key][layer] + if not composite: + composite = image.copy() + else: + composite.paste(image, (0, 0), image) + return composite + + def AddTile(self, image, key, overlay, layer): + """Add a tile to the map. + + This keeps track of the tiles for each overlay in each grid cell. + As new tiles come in, all the tiles in a grid cell are composited together + into a new tile and any old tile for that spot is replaced. + + Args: + image: The image tile to display. + key: A tuple containing the key of the image (level, x, y) + overlay: The overlay this tile belongs to. + layer: The layer number this overlay corresponds to. Only used + for caching purposes. + """ + # TODO(user): This function is called from multiple threads, and + # could use some synchronization, but it seems to work. + if self.level == key[0]: # Don't add late tiles from another level. + self.tiles[key] = self.tiles.get(key, {}) + self.tiles[key][layer] = image + + newtile = self.CompositeTiles(key) + if key not in self.tktiles: + newtile = ImageTk.PhotoImage(newtile) + xpos = key[1] * overlay.TILE_WIDTH + self.origin_x + ypos = key[2] * overlay.TILE_HEIGHT + self.origin_y + self.canvas.create_image( + xpos, ypos, anchor=Tkinter.NW, image=newtile, tags=['tile', key]) + self.tktiles[key] = newtile # Hang on to the new tile. + else: + self.tktiles[key].paste(newtile) + + def Zoom(self, event, direction): + """Zoom the map. + + Args: + event: The event that caused this zoom request. + direction: The direction to zoom. +1 for higher zoom, -1 for lower. + """ + if self.level + direction >= 0: + # Discard everything cached in the MapClient, and flush the fetch queues. + self.Flush() + self.canvas.delete(Tkinter.ALL) + self.tiles = {} + self.tktiles = {} + + if direction > 0: + self.origin_x = self.origin_x * 2 - event.x + self.origin_y = self.origin_y * 2 - event.y + else: + self.origin_x = (self.origin_x + event.x) / 2 + self.origin_y = (self.origin_y + event.y) / 2 + + self.level += direction + self.LoadTiles() + + def ClickHandler(self, event): + """Records the anchor location and sets drag handler.""" + self.anchor_x = event.x + self.anchor_y = event.y + self.canvas.bind('', self.DragHandler) + + def DragHandler(self, event): + """Updates the map position and anchor position.""" + dx = event.x - self.anchor_x + dy = event.y - self.anchor_y + if dx or dy: + self.canvas.move(Tkinter.ALL, dx, dy) + self.origin_x += dx + self.origin_y += dy + self.anchor_x = event.x + self.anchor_y = event.y + + def ReleaseHandler(self, unused_event): + """Unbind drag handler and redraw.""" + self.canvas.unbind('') + self.LoadTiles() + + def ResizeHandler(self, event): + """Handle resize events.""" + # There's a 2 pixel border. + self.canvas.config(width=event.width - 2, height=event.height - 2) + self.LoadTiles() + + def CenterMap(self, lon, lat, opt_zoom=None): + """Center the map at the given lon, lat and zoom level.""" + if self.canvas: + self.Flush() + self.canvas.delete(Tkinter.ALL) + self.tiles = {} + self.tktiles = {} + width, height = self.GetMapSize() + if opt_zoom is not None: + self.level = opt_zoom + + # From maps/api/javascript/geometry/mercator_projection.js + mercator_range = 256.0 + scale = 2 ** self.level + origin_x = (mercator_range / 2.0) * scale + origin_y = (mercator_range / 2.0) * scale + pixels_per_lon_degree = (mercator_range / 360.0) * scale + pixels_per_lon_radian = (mercator_range / (2 * math.pi)) * scale + + x = origin_x + (lon * pixels_per_lon_degree) + siny = math.sin(lat * math.pi / 180.0) + # Prevent sin() overflow. + e = 1 - 1e-15 + if siny > e: + siny = e + elif siny < -e: + siny = -e + y = origin_y + (0.5 * math.log((1 + siny) / (1 - siny)) * + -pixels_per_lon_radian) + + self.origin_x = -x + width / 2 + self.origin_y = -y + height / 2 + self.LoadTiles() + + def KeypressHandler(self, event): + """Handle keypress events.""" + if event.char == 'q' or event.char == 'Q': + self.parent.destroy() + + +class MapOverlay(object): + """A class representing a map overlay.""" + + TILE_WIDTH = 256 + TILE_HEIGHT = 256 + MAX_CACHE = 1000 # The maximum number of tiles to cache. + _images = {} # The tile cache, keyed by (url, level, x, y). + _lru_keys = [] # Keys to the cached tiles, for cache ejection. + + def __init__(self, url): + """Initialize the MapOverlay.""" + self.url = url + # Make 10 workers. + self.queue = Queue.Queue() + self.fetchers = [MapOverlay.TileFetcher(self) for unused_x in range(10)] + self.constant = None + + def getTile(self, key, callback): # pylint: disable=g-bad-name + """Get the requested tile. + + If the requested tile is already cached, it's returned (sent to the + callback) directly. If it's not cached, a check is made to see if + a lower-res version is cached, and if so that's interpolated up, before + a request for the actual tile is made. + + Args: + key: The key of the tile to fetch. + callback: The callback to call when the tile is available. The callback + may be called more than once if a low-res version is available. + """ + result = self.GetCachedTile(key) + if result: + callback(result) + else: + # Interpolate what we have and put the key on the fetch queue. + self.queue.put((key, callback)) + self.Interpolate(key, callback) + + def Flush(self): + """Empty the tile queue.""" + while not self.queue.empty(): + self.queue.get_nowait() + + def CalcTiles(self, level, bbox): + """Calculate which tiles to load based on the visible viewport. + + Args: + level: The level at which to calculate the required tiles. + bbox: The viewport coordinates as a tuple (xlo, ylo, xhi, yhi]) + + Returns: + The list of tile keys to fill the given viewport. + """ + tile_list = [] + for y in xrange(int(bbox[1] / MapOverlay.TILE_HEIGHT), + int(bbox[3] / MapOverlay.TILE_HEIGHT + 1)): + for x in xrange(int(bbox[0] / MapOverlay.TILE_WIDTH), + int(bbox[2] / MapOverlay.TILE_WIDTH + 1)): + tile_list.append((level, x, y)) + return tile_list + + def Interpolate(self, key, callback): + """Upsample a lower res tile if one is available. + + Args: + key: The tile key to upsample. + callback: The callback to call when the tile is ready. + """ + level, x, y = key + delta = 1 + result = None + while level - delta > 0 and result is None: + prevkey = (level - delta, x / 2, y / 2) + result = self.GetCachedTile(prevkey) + if not result: + (_, x, y) = prevkey + delta += 1 + + if result: + px = (key[1] % 2 ** delta) * MapOverlay.TILE_WIDTH / 2 ** delta + py = (key[2] % 2 ** delta) * MapOverlay.TILE_HEIGHT / 2 ** delta + image = (result.crop([px, py, + px + MapOverlay.TILE_WIDTH / 2 ** delta, + py + MapOverlay.TILE_HEIGHT / 2 ** delta]) + .resize((MapOverlay.TILE_WIDTH, MapOverlay.TILE_HEIGHT))) + callback(image) + + def PutCacheTile(self, key, image): + """Insert a new tile in the cache and eject old ones if it's too big.""" + cache_key = (self.url,) + key + MapOverlay._images[cache_key] = image + MapOverlay._lru_keys.append(cache_key) + while len(MapOverlay._lru_keys) > MapOverlay.MAX_CACHE: + remove_key = MapOverlay._lru_keys.pop(0) + try: + MapOverlay._images.pop(remove_key) + except KeyError: + # Just in case someone removed this before we did. + pass + + def GetCachedTile(self, key): + """Returns the specified tile if it's in the cache.""" + cache_key = (self.url,) + key + return MapOverlay._images.get(cache_key, None) + + class TileFetcher(threading.Thread): + """A threaded URL fetcher.""" + + def __init__(self, overlay): + threading.Thread.__init__(self) + self.overlay = overlay + self.setDaemon(True) + self.start() + + def run(self): + """Pull URLs off the ovelay's queue and call the callback when done.""" + while True: + (key, callback) = self.overlay.queue.get() + # Check one more time that we don't have this yet. + if not self.overlay.GetCachedTile(key): + (level, x, y) = key + if x >= 0 and y >= 0 and x <= 2 ** level-1 and y <= 2 ** level-1: + url = self.overlay.url % key + try: + data = urllib2.urlopen(url).read() + except urllib2.HTTPError as e: + print() >> sys.stderr, e + else: + # PhotoImage can't handle alpha on LA images. + image = Image.open(cStringIO.StringIO(data)).convert('RGBA') + callback(image) + self.overlay.PutCacheTile(key, image) + + +def MakeOverlay(mapid, baseurl=BASE_URL): + """Create an overlay from a mapid.""" + url = (baseurl + '/map/' + mapid['mapid'] + '/%d/%d/%d?token=' + + mapid['token']) + return MapOverlay(url) + + +# +# A global MapClient instance for addToMap convenience. +# +map_instance = None + + +# pylint: disable=g-bad-name +def addToMap(eeobject, vis_params=None, *unused_args): + """Adds a layer to the default map instance. + + Args: + eeobject: the object to add to the map. + vis_params: a dictionary of visualization parameters. See + ee.data.getMapId(). + *unused_args: unused arguments, left for compatibility with the JS API. + + This call exists to be an equivalent to the playground addToMap() call. + It uses a global MapInstance to hang on to "the map". If the MapInstance + isn't initializd, this creates a new one. + """ + # Flatten any lists to comma separated strings. + if vis_params: + vis_params = dict(vis_params) + for key in vis_params: + item = vis_params.get(key) + if (isinstance(item, collections.Iterable) and + not isinstance(item, six.string_types)): + vis_params[key] = ','.join([str(x) for x in item]) + + overlay = MakeOverlay(eeobject.getMapId(vis_params)) + + global map_instance + if not map_instance: + map_instance = MapClient() + map_instance.addOverlay(overlay) + + +def centerMap(lng, lat, zoom): # pylint: disable=g-bad-name + """Center the default map instance at the given lat, lon and zoom values.""" + global map_instance + if not map_instance: + map_instance = MapClient() + + map_instance.CenterMap(lng, lat, zoom) diff --git a/ee/oauth.py b/ee/oauth.py new file mode 100644 index 0000000..ff03fa5 --- /dev/null +++ b/ee/oauth.py @@ -0,0 +1,97 @@ +#!/usr/bin/env python +"""Earth Engine OAuth2 helper functions for generating client tokens. + +Typical use-case consists of: +1. Calling 'get_authorization_url' +2. Using a browser to access the output URL and copy the generated OAuth2 code +3. Calling 'request_token' to request a token using that code and the OAuth API +4. Calling 'write_token' to save the token at the path given by + 'get_credentials_path' +""" + + +import errno +import json +import os + +# pylint: disable=g-import-not-at-top +try: + # Python 3.x + import urllib + from urllib.parse import urlencode + from urllib.error import HTTPError +except ImportError: + # Python 2.x + import urllib + from urllib import urlencode + import urllib2 + from urllib2 import HTTPError + +CLIENT_ID = ('517222506229-vsmmajv00ul0bs7p89v5m89qs8eb9359.' + 'apps.googleusercontent.com') +CLIENT_SECRET = 'RUP0RZ6e0pPhDzsqIJ7KlNd1' +REDIRECT_URI = 'urn:ietf:wg:oauth:2.0:oob' # Prompts user to copy-paste code +SCOPE = ('https://www.googleapis.com/auth/earthengine' + ' https://www.googleapis.com/auth/devstorage.full_control') +TOKEN_REQ_URL = 'https://accounts.google.com/o/oauth2/token' + + +def get_credentials_path(): + return os.path.expanduser('~/.config/earthengine/credentials') + + +def get_authorization_url(): + """Returns a URL to generate an auth code.""" + + return 'https://accounts.google.com/o/oauth2/auth?' + urlencode({ + 'client_id': CLIENT_ID, + 'scope': SCOPE, + 'redirect_uri': REDIRECT_URI, + 'response_type': 'code', + }) + + +def request_token(auth_code): + """Uses authorization code to request tokens.""" + + request_args = { + 'code': auth_code, + 'client_id': CLIENT_ID, + 'client_secret': CLIENT_SECRET, + 'redirect_uri': REDIRECT_URI, + 'grant_type': 'authorization_code', + } + + refresh_token = None + + try: + try: + # Python 2.x + response = urllib2.urlopen(TOKEN_REQ_URL, + urllib.urlencode(request_args).encode() + ).read().decode() + except NameError: + # Python 3.x + response = urllib.request.urlopen(TOKEN_REQ_URL, + urlencode(request_args).encode() + ).read().decode() + refresh_token = json.loads(response)['refresh_token'] + except HTTPError as e: + raise Exception('Problem requesting tokens. Please try again. %s %s' % + (e, e.read())) + + return refresh_token + + +def write_token(refresh_token): + """Attempts to write the passed token to the given user directory.""" + + credentials_path = get_credentials_path() + dirname = os.path.dirname(credentials_path) + try: + os.makedirs(dirname) + except OSError as e: + if e.errno != errno.EEXIST: + raise Exception('Error creating directory %s: %s' % (dirname, e)) + + json.dump({'refresh_token': refresh_token}, open(credentials_path, 'w')) diff --git a/ee/serializer.py b/ee/serializer.py new file mode 100644 index 0000000..684370e --- /dev/null +++ b/ee/serializer.py @@ -0,0 +1,184 @@ +#!/usr/bin/env python +"""A serializer that encodes EE object trees as JSON DAGs.""" + + + +# Using lowercase function naming to match the JavaScript names. +# pylint: disable=g-bad-name + +# pylint: disable=g-bad-import-order +import datetime +import hashlib +import json +import math +import numbers +import six + +from . import ee_exception +from . import encodable + + +# The datetime for the beginning of the Unix epoch. +_EPOCH_DATETIME = datetime.datetime.utcfromtimestamp(0) + + +def DatetimeToMicroseconds(date): + """Convert a datetime to a timestamp, microseconds since the epoch.""" + td = (date - _EPOCH_DATETIME) + microseconds = td.microseconds + (td.seconds + td.days * 24 * 3600) * 1e6 + return math.floor(microseconds) + + +class Serializer(object): + """A serializer for EE object trees.""" + + def __init__(self, is_compound=True): + """Constructs a serializer. + + Args: + is_compound: Whether the encoding should factor out shared subtrees. + """ + + # Whether the encoding should factor out shared subtrees. + self._is_compound = bool(is_compound) + + # A list of shared subtrees as [name, value] pairs. + self._scope = [] + + # A lookup table from object hash to subtree names as stored in self._scope + self._encoded = {} + + # A lookup table from object ID as retrieved by id() to md5 hash values. + self._hashcache = {} + + def _encode(self, obj): + """Encodes a top level object in the EE API v2 (DAG) format. + + Args: + obj: The object to encode. + + Returns: + An encoded object ready for JSON serialization. + """ + value = self._encodeValue(obj) + if self._is_compound: + if (isinstance(value, dict) and + value['type'] == 'ValueRef' and + len(self._scope) == 1): + # Just one value. No need for complex structure. + value = self._scope[0][1] + else: + # Wrap the scopes and final value with a CompoundValue. + value = { + 'type': 'CompoundValue', + 'scope': self._scope, + 'value': value + } + # Clear state in case of future encoding. + self._scope = [] + self._encoded = {} + self._hashcache = {} + return value + + def _encodeValue(self, obj): + """Encodes a subtree as a Value in the EE API v2 (DAG) format. + + If _is_compound is True, this will fill the _scope and _encoded properties. + + Args: + obj: The object to encode. + + Returns: + An encoded object. + """ + obj_id = id(obj) + hashval = self._hashcache.get(obj_id) + encoded = self._encoded.get(hashval, None) + if self._is_compound and encoded: + # Already encoded objects are encoded as ValueRefs and returned directly. + return { + 'type': 'ValueRef', + 'value': encoded + } + elif obj is None or isinstance( + obj, (bool, numbers.Number, six.string_types)): + # Primitives are encoded as is and not saved in the scope. + return obj + elif isinstance(obj, datetime.datetime): + # A raw date slipped through. Wrap it. Calling ee.Date from here would + # cause a circular dependency, so we encode it manually. + return { + 'type': 'Invocation', + 'functionName': 'Date', + 'arguments': {'value': DatetimeToMicroseconds(obj) / 1e3} + } + elif isinstance(obj, encodable.Encodable): + # Some objects know how to encode themselves. + result = obj.encode(self._encodeValue) + if (not isinstance(result, (list, tuple)) and + (not isinstance(result, (dict)) or result['type'] == 'ArgumentRef')): + # Optimization: simple enough that adding it to the scope is probably + # not worth it. + return result + elif isinstance(obj, (list, tuple)): + # Lists are encoded recursively. + result = [self._encodeValue(i) for i in obj] + elif isinstance(obj, dict): + # Dictionary are encoded recursively and wrapped in a type specifier. + result = { + 'type': 'Dictionary', + 'value': dict([(key, self._encodeValue(value)) + for key, value in obj.items()]) + } + else: + raise ee_exception.EEException('Can\'t encode object: %s' % obj) + + if self._is_compound: + # Save the new object and return a ValueRef. + hashval = hashlib.md5(json.dumps(result).encode()).digest() + self._hashcache[obj_id] = hashval + name = self._encoded.get(hashval, None) + if not name: + name = str(len(self._scope)) + self._scope.append((name, result)) + self._encoded[hashval] = name + return { + 'type': 'ValueRef', + 'value': name + } + else: + return result + + +def encode(obj, is_compound=True): + """Serialize an object to a JSON-compatible structure for API calls. + + Args: + obj: The object to serialize. + is_compound: Whether the encoding should factor out shared subtrees. + + Returns: + A JSON-compatible structure representing the input. + """ + serializer = Serializer(is_compound) + return serializer._encode(obj) # pylint: disable=protected-access + + +def toJSON(obj, opt_pretty=False): + """Serialize an object to a JSON string appropriate for API calls. + + Args: + obj: The object to serialize. + opt_pretty: True to pretty-print the object. + + Returns: + A JSON string representing the input. + """ + serializer = Serializer(not opt_pretty) + encoded = serializer._encode(obj) # pylint: disable=protected-access + return json.dumps(encoded, indent=2 if opt_pretty else None) + + +def toReadableJSON(obj): + """Convert an object to readable JSON.""" + return toJSON(obj, True) diff --git a/ee/terrain.py b/ee/terrain.py new file mode 100644 index 0000000..63e1717 --- /dev/null +++ b/ee/terrain.py @@ -0,0 +1,28 @@ +#!/usr/bin/env python +"""A namespace for Terrain.""" + + + +from . import apifunction + +# Using lowercase function naming to match the JavaScript names. +# pylint: disable=g-bad-name + + +class Terrain(object): + """An namespace for Terrain Algorithms.""" + + _initialized = False + + @classmethod + def initialize(cls): + """Imports API functions to this class.""" + if not cls._initialized: + apifunction.ApiFunction.importApi(cls, 'Terrain', 'Terrain') + cls._initialized = True + + @classmethod + def reset(cls): + """Removes imported API functions from this class.""" + apifunction.ApiFunction.clearApi(cls) + cls._initialized = False diff --git a/ee/tests/_helpers_test.py b/ee/tests/_helpers_test.py new file mode 100644 index 0000000..1b50496 --- /dev/null +++ b/ee/tests/_helpers_test.py @@ -0,0 +1,41 @@ +#!/usr/bin/env python + + +import StringIO + +import unittest +import ee +from ee import apitestcase +from ee import deserializer +from ee.apifunction import ApiFunction +from ee.computedobject import ComputedObject + + +class ProfilingTest(apitestcase.ApiTestCase): + + def MockSend(self, path, params, *args): + """Overridden to check for profiling-related data.""" + if path == '/value': + value = deserializer.fromJSON(params['json']) + hooked = ee.data._profile_hook is not None + is_get_profiles = (isinstance(value, ComputedObject) and value.func == + ApiFunction.lookup('Profile.getProfiles')) + return 'hooked=%s getProfiles=%s' % (hooked, is_get_profiles) + else: + return super(ProfilingTest, self).MockSend(path, params, *args) + + def testProfilePrinting(self): + out = StringIO.StringIO() + with ee.profilePrinting(destination=out): + self.assertEquals('hooked=True getProfiles=False', ee.Number(1).getInfo()) + self.assertEquals('hooked=False getProfiles=True', out.getvalue()) + + def testProfilePrintingDefaultSmoke(self): + # This will print to sys.stderr, so we can't make any assertions about the + # output. But we can check that it doesn't fail. + with ee.profilePrinting(): + self.assertEquals('hooked=True getProfiles=False', ee.Number(1).getInfo()) + + +if __name__ == '__main__': + unittest.main() diff --git a/ee/tests/apifunction_test.py b/ee/tests/apifunction_test.py new file mode 100644 index 0000000..fc5b63d --- /dev/null +++ b/ee/tests/apifunction_test.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python +"""Tests for the ee.apifunction module.""" + + + +import types + +import unittest + +import ee + +from ee import apitestcase + + +class ApiFunctionTest(apitestcase.ApiTestCase): + + def testAddFunctions(self): + """Verifies that addition of static and instance API functions.""" + + # Check instance vs static functions, and trampling of + # existing functions. + class TestClass(object): + + def pre_addBands(self): # pylint: disable=g-bad-name + pass + + self.assertFalse(hasattr(TestClass, 'pre_load')) + self.assertFalse(hasattr(TestClass, 'select')) + self.assertFalse(hasattr(TestClass, 'pre_select')) + self.assertTrue(hasattr(TestClass, 'pre_addBands')) + self.assertFalse(hasattr(TestClass, '_pre_addBands')) + + ee.ApiFunction.importApi(TestClass, 'Image', 'Image', 'pre_') + self.assertFalse(isinstance(TestClass.pre_load, types.MethodType)) + self.assertFalse(hasattr(TestClass, 'select')) + # Unbound methods are just functions in Python 3. Check both to maintain + # backward compatibility. + self.assertTrue(isinstance(TestClass.pre_select, + (types.FunctionType, types.MethodType))) + self.assertTrue(isinstance(TestClass.pre_addBands, + (types.FunctionType, types.MethodType))) + self.assertFalse(hasattr(TestClass, '_pre_addBands')) + + ee.ApiFunction.clearApi(TestClass) + self.assertFalse(hasattr(TestClass, 'pre_load')) + self.assertFalse(hasattr(TestClass, 'select')) + self.assertFalse(hasattr(TestClass, 'pre_select')) + self.assertTrue(hasattr(TestClass, 'pre_addBands')) + self.assertFalse(hasattr(TestClass, '_pre_addBands')) + + def testAddFunctions_Inherited(self): + """Verifies that inherited non-client functions can be overriden.""" + + class Base(object): + + def ClientOverride(self): + pass + + class Child(Base): + pass + + ee.ApiFunction.importApi(Base, 'Image', 'Image') + ee.ApiFunction.importApi(Child, 'Image', 'Image') + self.assertEquals(Base.ClientOverride, Child.ClientOverride) + self.assertNotEquals(Base.addBands, Child.addBands) + + +if __name__ == '__main__': + unittest.main() diff --git a/ee/tests/batch_test.py b/ee/tests/batch_test.py new file mode 100644 index 0000000..4902003 --- /dev/null +++ b/ee/tests/batch_test.py @@ -0,0 +1,482 @@ +#!/usr/bin/env python +"""Test for the ee.batch module.""" + + + +import unittest + +import ee +from ee import apitestcase + + +TASK_STATUS_1 = { + 'description': 'FirstTestTask', + 'id': 'TEST1', + 'source_url': 'http://example.org/', + 'state': 'RUNNING', + 'task_type': 'EXPORT_IMAGE', + 'creation_timestamp_ms': 7, + 'start_timestamp_ms': 13, + 'update_timestamp_ms': 42, +} +TASK_STATUS_2 = { + 'description': 'SecondTestTask', + 'id': 'TEST2', + 'state': 'FAILED', + 'task_type': 'EXPORT_FEATURES', + 'creation_timestamp_ms': 17, + 'start_timestamp_ms': 113, + 'update_timestamp_ms': 142, + 'error_message': 'Explosions.', +} + + +class BatchTestCase(apitestcase.ApiTestCase): + """A test case for batch functionality.""" + + def setUp(self): + super(BatchTestCase, self).setUp() + + self.start_call_params = None + self.update_call_params = None + + def MockSend(path, params, unused_method=None, unused_raw=None): + if path == '/newtaskid': + return ['TESTTASKID'] + elif path == '/tasklist': + return {'tasks': [TASK_STATUS_1.copy(), TASK_STATUS_2.copy()]} + elif path == '/taskstatus': + if params['q'] == TASK_STATUS_1['id']: + return [TASK_STATUS_1.copy()] + elif params['q'] == TASK_STATUS_2['id']: + return [TASK_STATUS_2.copy()] + else: + return [{ + 'creation_timestamp_ms': 0, + 'id': params['q'], + 'state': 'UNKNOWN' + }] + elif path == '/processingrequest': + self.start_call_params = params + return {'started': 'OK'} + elif path == '/updatetask': + self.update_call_params = params + return {'updated': 'OK'} + else: + raise Exception('Unexpected API call to %s with %s' % (path, params)) + ee.data.send_ = MockSend + + def testTaskList(self): + """Verifies the return value of Task.list().""" + tasks = ee.batch.Task.list() + self.assertEquals(2, len(tasks)) + self.assertEquals(TASK_STATUS_1['id'], tasks[0].id) + self.assertEquals(TASK_STATUS_1['task_type'], tasks[0].config['type']) + self.assertEquals(TASK_STATUS_2['id'], tasks[1].id) + + def testTaskStatus(self): + """Verifies the return value of Task.status().""" + tasks = ee.batch.Task.list() + self.assertEquals( + { + 'state': 'RUNNING', + 'creation_timestamp_ms': 7, + 'update_timestamp_ms': 42, + 'description': 'FirstTestTask', + 'id': 'TEST1', + 'source_url': 'http://example.org/', + 'start_timestamp_ms': 13, + 'task_type': 'EXPORT_IMAGE', + }, tasks[0].status()) + self.assertTrue(tasks[0].active()) + self.assertEquals( + { + 'state': 'FAILED', + 'creation_timestamp_ms': 17, + 'update_timestamp_ms': 142, + 'error_message': 'Explosions.', + 'description': 'SecondTestTask', + 'id': 'TEST2', + 'start_timestamp_ms': 113, + 'task_type': 'EXPORT_FEATURES', + }, tasks[1].status()) + self.assertFalse(tasks[1].active()) + new_task = ee.batch.Export.table(ee.FeatureCollection('foo')) + self.assertEquals({ + 'state': 'UNSUBMITTED', + 'creation_timestamp_ms': 0, + 'id': 'TESTTASKID', + }, new_task.status()) + self.assertFalse(new_task.active()) + + def testTaskStart(self): + """Verifies that Task.start() calls the server appropriately.""" + task = ee.batch.Export.table(ee.FeatureCollection('foo'), 'bar') + task.start() + self.assertEquals('TESTTASKID', self.start_call_params['id']) + self.assertEquals('bar', self.start_call_params['description']) + + def testTaskCancel(self): + """Verifies that Task.cancel() calls the server appropriately.""" + task = ee.batch.Task.list()[0] + task.cancel() + self.assertEquals('TEST1', self.update_call_params['id']) + self.assertEquals('CANCEL', self.update_call_params['action']) + + def testStringRepresentation(self): + """Verifies the string representation of tasks.""" + tasks = ee.batch.Task.list() + self.assertEquals( + '', str(tasks[0])) + self.assertEquals( + '', str(tasks[1])) + new_task = ee.batch.Export.table(ee.FeatureCollection('foo'), 'bar') + self.assertEquals( + '', str(new_task)) + self.assertEquals( + '', str(ee.batch.Task('foo'))) + + def testExportImage(self): + """Verifies the task created by Export.image().""" + region = ee.Geometry.Rectangle(1, 2, 3, 4) + config = dict(region=region['coordinates'], maxPixels=10**10, + crs='foo', crs_transform='bar') + task = ee.batch.Export.image(ee.Image(1), 'TestDescription', config) + self.assertEquals('TESTTASKID', task.id) + self.assertEquals( + { + 'type': 'EXPORT_IMAGE', + 'state': 'UNSUBMITTED', + 'json': ee.Image(1).serialize(), + 'description': 'TestDescription', + 'region': '[[[1, 4], [1, 2], [3, 2], [3, 4]]]', + 'driveFileNamePrefix': 'TestDescription', + 'maxPixels': 10**10, + 'crs': 'foo', + 'crs_transform': 'bar', + }, + task.config) + + def testExportImageToAsset(self): + """Verifies the Asset export task created by Export.image.toAsset().""" + config = dict( + image=ee.Image(1), assetId='user/foo/bar', + pyramidingPolicy={'B1': 'min'}) + + # Test keyed parameters. + task_keyed = ee.batch.Export.image.toAsset( + image=config['image'], assetId=config['assetId'], + pyramidingPolicy=config['pyramidingPolicy']) + self.assertEquals('TESTTASKID', task_keyed.id) + self.assertEquals( + { + 'type': 'EXPORT_IMAGE', + 'state': 'UNSUBMITTED', + 'json': config['image'].serialize(), + 'description': 'myExportImageTask', + 'assetId': config['assetId'], + 'pyramidingPolicy': config['pyramidingPolicy'] + }, task_keyed.config) + + task_ordered = ee.batch.Export.image.toAsset( + config['image'], 'TestDescription', config['assetId'], maxPixels=1000) + self.assertEquals( + { + 'type': 'EXPORT_IMAGE', + 'state': 'UNSUBMITTED', + 'json': config['image'].serialize(), + 'description': 'TestDescription', + 'assetId': config['assetId'], + 'maxPixels': 1000 + }, task_ordered.config) + + def testExportImageToCloudStorage(self): + """Verifies the Cloud Storge export task created by Export.image().""" + region = ee.Geometry.Rectangle(1, 2, 3, 4) + config = dict(region=region['coordinates'], maxPixels=10**10, + outputBucket='test-bucket') + task = ee.batch.Export.image.toCloudStorage( + ee.Image(1), 'TestDescription', + config['outputBucket'], None, None, + config['region'], None, None, None, config['maxPixels']) + self.assertEquals('TESTTASKID', task.id) + self.assertEquals( + { + 'type': 'EXPORT_IMAGE', + 'state': 'UNSUBMITTED', + 'json': ee.Image(1).serialize(), + 'description': 'TestDescription', + 'region': '[[[1, 4], [1, 2], [3, 2], [3, 4]]]', + 'outputBucket': 'test-bucket', + 'maxPixels': 10**10, + }, + task.config) + + def testExportImageToGoogleDrive(self): + """Verifies the Drive destined task created by Export.table.toDrive().""" + region = ee.Geometry.Rectangle(1, 2, 3, 4) + drive_task_by_keys = ee.batch.Export.image.toDrive( + image=ee.Image(1), region=region['coordinates'], folder='foo', + maxPixels=10**10, crsTransform='bar') + self.assertEquals('TESTTASKID', drive_task_by_keys.id) + self.assertEquals( + { + 'type': 'EXPORT_IMAGE', + 'state': 'UNSUBMITTED', + 'json': ee.Image(1).serialize(), + 'description': 'myExportImageTask', + 'region': '[[[1, 4], [1, 2], [3, 2], [3, 4]]]', + 'driveFileNamePrefix': 'myExportImageTask', + 'driveFolder': 'foo', + 'maxPixels': 10**10, + 'crs_transform': 'bar', # Transformed by _ConvertToServerParams. + }, + drive_task_by_keys.config) + + drive_task_with_old_keys = ee.batch.Export.image.toDrive( + image=ee.Image(1), region=region['coordinates'], driveFolder='foo', + driveFileNamePrefix='fooExport', maxPixels=10**10, crs_transform='bar') + self.assertEquals( + { + 'type': 'EXPORT_IMAGE', + 'state': 'UNSUBMITTED', + 'json': ee.Image(1).serialize(), + 'description': 'myExportImageTask', + 'region': '[[[1, 4], [1, 2], [3, 2], [3, 4]]]', + 'driveFileNamePrefix': 'fooExport', + 'driveFolder': 'foo', + 'maxPixels': 10**10, + 'crs_transform': 'bar', # Transformed by _ConvertToServerParams. + }, + drive_task_with_old_keys.config) + + def testExportImageFileDimensions(self): + """Verifies proper handling of the fileDimensions parameter.""" + number_task = ee.batch.Export.image.toDrive( + image=ee.Image(1), fileDimensions=100) + self.assertEquals(100, number_task.config['fileDimensions']) + + tuple_task = ee.batch.Export.image.toDrive( + image=ee.Image(1), fileDimensions=(100, 200)) + self.assertEquals('100,200', tuple_task.config['fileDimensions']) + + def testExportMapToCloudStorage(self): + """Verifies the task created by Export.map.toCloudStorage().""" + config = dict( + image=ee.Image(1), bucket='test-bucket', maxZoom=7, path='foo/gcs/path') + + # Test keyed parameters. + task_keyed = ee.batch.Export.map.toCloudStorage( + image=config['image'], bucket=config['bucket'], + maxZoom=config['maxZoom'], path=config['path']) + self.assertEquals('TESTTASKID', task_keyed.id) + self.assertEquals( + { + 'type': 'EXPORT_TILES', + 'state': 'UNSUBMITTED', + 'json': config['image'].serialize(), + 'description': 'myExportMapTask', + 'outputBucket': config['bucket'], + 'maxZoom': config['maxZoom'], + 'outputPrefix': config['path'], + 'writePublicTiles': True, + 'fileFormat': 'auto' + }, task_keyed.config) + + # Test ordered parameters. + task_ordered = ee.batch.Export.map.toCloudStorage( + config['image'], 'TestDescription', config['bucket'], 'jpeg', None, + False, None, 30) + self.assertEquals( + { + 'type': 'EXPORT_TILES', + 'state': 'UNSUBMITTED', + 'json': config['image'].serialize(), + 'description': 'TestDescription', + 'outputBucket': config['bucket'], + 'outputPrefix': 'TestDescription', + 'scale': 30, + 'writePublicTiles': False, + 'fileFormat': 'jpeg' + }, task_ordered.config) + + def testExportTable(self): + """Verifies the task created by Export.table().""" + task = ee.batch.Export.table(ee.FeatureCollection('drive test FC')) + self.assertEquals('TESTTASKID', task.id) + self.assertEquals( + { + 'type': 'EXPORT_FEATURES', + 'state': 'UNSUBMITTED', + 'json': ee.FeatureCollection('drive test FC').serialize(), + 'description': 'myExportTableTask', + 'driveFileNamePrefix': 'myExportTableTask', + 'fileFormat': 'CSV', + }, + task.config) + + def testExportTableToCloudStorage(self): + """Verifies the Cloud Storage task created by Export.table().""" + task = ee.batch.Export.table.toCloudStorage( + collection=ee.FeatureCollection('foo'), outputBucket='test-bucket') + self.assertEquals('TESTTASKID', task.id) + self.assertEquals( + { + 'type': 'EXPORT_FEATURES', + 'state': 'UNSUBMITTED', + 'json': ee.FeatureCollection('foo').serialize(), + 'description': 'myExportTableTask', + 'outputBucket': 'test-bucket', + 'fileFormat': 'CSV', + }, + task.config) + + def testExportTableToGoogleDrive(self): + """Verifies the Drive destined task created by Export.table.toDrive().""" + test_collection = ee.FeatureCollection('foo') + test_description = 'TestDescription' + test_file_name_prefix = 'fooDriveFileNamePrefix' + test_format = 'KML' + expected_config = { + 'type': 'EXPORT_FEATURES', + 'state': 'UNSUBMITTED', + 'json': test_collection.serialize(), + 'description': test_description, + 'driveFileNamePrefix': test_file_name_prefix, + 'fileFormat': test_format, + } + + # Ordered parameters + task_ordered = ee.batch.Export.table.toDrive( + test_collection, test_description, + None, test_file_name_prefix, test_format) + self.assertEquals('TESTTASKID', task_ordered.id) + self.assertEquals(expected_config, task_ordered.config) + + # Updating expectations to test keyed parameters + expected_config.update({ + 'fileFormat': 'CSV', + 'description': 'myExportTableTask', + 'driveFolder': 'fooFolder' + }) + + # Test that deprecated parameters (driveFolder and driveFileNamePrefix) + # still work. + task_old_keys = ee.batch.Export.table.toDrive( + collection=test_collection, driveFolder='fooFolder', + driveFileNamePrefix='fooDriveFileNamePrefix') + self.assertEquals(expected_config, task_old_keys.config) + + # Test that new parameters work + task_new_keys = ee.batch.Export.table.toDrive( + collection=test_collection, folder='fooFolder', + fileNamePrefix='fooDriveFileNamePrefix') + self.assertEquals(expected_config, task_new_keys.config) + + def testExportVideo(self): + """Verifies the task created by Export.video().""" + region = ee.Geometry.Rectangle(1, 2, 3, 4) + config = dict(region=region['coordinates'], dimensions=16) + collection = ee.ImageCollection([ee.Image(1), ee.Image(2)]) + task = ee.batch.Export.video(collection, 'TestVideoName', config) + self.assertEquals('TESTTASKID', task.id) + # Defaults the destination to Drive. + self.assertEquals( + { + 'type': 'EXPORT_VIDEO', + 'state': 'UNSUBMITTED', + 'json': collection.serialize(), + 'description': 'TestVideoName', + 'crs': 'SR-ORG:6627', + 'driveFileNamePrefix': 'TestVideoName', + 'region': '[[[1, 4], [1, 2], [3, 2], [3, 4]]]', + 'dimensions': 16 + }, + task.config) + + config['outputBucket'] = 'test-bucket' + gcs_task = ee.batch.Export.video(collection, 'TestVideoName', config) + self.assertEquals( + { + 'type': 'EXPORT_VIDEO', + 'state': 'UNSUBMITTED', + 'json': collection.serialize(), + 'description': 'TestVideoName', + 'crs': 'SR-ORG:6627', + 'outputBucket': 'test-bucket', + 'region': '[[[1, 4], [1, 2], [3, 2], [3, 4]]]', + 'dimensions': 16 + }, + gcs_task.config) + + def testExportVideoToCloudStorage(self): + """Verifies the task created by Export.video.toCloudStorage().""" + region = ee.Geometry.Rectangle(1, 2, 3, 4) + collection = ee.ImageCollection([ee.Image(1), ee.Image(2)]) + expected_config = { + 'type': 'EXPORT_VIDEO', + 'state': 'UNSUBMITTED', + 'json': collection.serialize(), + 'description': 'TestVideoName', + 'outputBucket': 'test-bucket', + 'outputPrefix': 'TestVideoName', + 'region': '[[[1, 4], [1, 2], [3, 2], [3, 4]]]', + 'dimensions': 16, + 'crs_transform': 'bar', # Transformed by _ConvertToServerParams. + 'crs': 'foo' + } + + # Test keyed parameters. + task_keyed = ee.batch.Export.video.toCloudStorage( + collection=collection, + description='TestVideoName', + bucket='test-bucket', + dimensions=16, + region=region['coordinates'], + crsTransform='bar', + crs='foo') + self.assertEquals('TESTTASKID', task_keyed.id) + self.assertEquals(expected_config, task_keyed.config) + + # Test orderd parameters. + task_ordered = ee.batch.Export.video.toCloudStorage( + collection, 'TestVideoName', 'test-bucket', None, None, 16, + region['coordinates'], None, 'foo', 'bar') + self.assertEquals(expected_config, task_ordered.config) + + def testExportVideoToDrive(self): + """Verifies the task created by Export.video.toDrive().""" + region = ee.Geometry.Rectangle(1, 2, 3, 4) + collection = ee.ImageCollection([ee.Image(1), ee.Image(2)]) + expected_config = { + 'type': 'EXPORT_VIDEO', + 'state': 'UNSUBMITTED', + 'json': collection.serialize(), + 'description': 'TestVideoName', + 'crs': 'SR-ORG:6627', + 'driveFolder': 'test-folder', + 'driveFileNamePrefix': 'TestVideoName', + 'region': '[[[1, 4], [1, 2], [3, 2], [3, 4]]]', + 'dimensions': 16, + 'crs_transform': 'bar' + } + + # Test keyed parameters. + task_keyed = ee.batch.Export.video.toDrive( + collection=collection, + description='TestVideoName', + folder='test-folder', + dimensions=16, + crsTransform='bar', + region=region['coordinates']) + self.assertEquals('TESTTASKID', task_keyed.id) + self.assertEquals(expected_config, task_keyed.config) + + # Test orderd parameters. + task_ordered = ee.batch.Export.video.toDrive( + collection, 'TestVideoName', 'test-folder', None, None, 16, + region['coordinates'], None, 'SR-ORG:6627', 'bar') + self.assertEquals(expected_config, task_ordered.config) + +if __name__ == '__main__': + unittest.main() diff --git a/ee/tests/collection_test.py b/ee/tests/collection_test.py new file mode 100644 index 0000000..e675335 --- /dev/null +++ b/ee/tests/collection_test.py @@ -0,0 +1,144 @@ +#!/usr/bin/env python +"""Test for the ee.collection module.""" + + + +import datetime + +import unittest + +import ee +from ee import apitestcase + + +class CollectionTestCase(apitestcase.ApiTestCase): + + def testSortAndLimit(self): + """Verifies the behavior of the sort() and limit() methods.""" + collection = ee.Collection(ee.Function(), {}) + + limited = collection.limit(10) + self.assertEquals(ee.ApiFunction.lookup('Collection.limit'), limited.func) + self.assertEquals( + {'collection': collection, 'limit': 10}, + limited.args) + + sorted_collection = collection.sort('bar', True) + self.assertEquals( + ee.ApiFunction.lookup('Collection.limit'), + sorted_collection.func) + self.assertEquals( + {'collection': collection, 'key': ee.String('bar'), 'ascending': True}, + sorted_collection.args) + + reverse_sorted_collection = collection.sort('bar', False) + self.assertEquals( + ee.ApiFunction.lookup('Collection.limit'), + reverse_sorted_collection.func) + self.assertEquals( + {'collection': collection, 'key': ee.String('bar'), 'ascending': False}, + reverse_sorted_collection.args) + + def testFilter(self): + """Verifies the behavior of filter() method.""" + collection = ee.Collection(ee.Function(), {}) + + # We don't allow empty filters. + self.assertRaises(Exception, collection.filter) + + filtered = collection.filter(ee.Filter.eq('foo', 1)) + self.assertEquals( + ee.ApiFunction.lookup('Collection.filter'), + filtered.func) + self.assertEquals( + {'collection': collection, 'filter': ee.Filter.eq('foo', 1)}, + filtered.args) + self.assertTrue(isinstance(filtered, ee.Collection)) + + def testFilterShortcuts(self): + """Verifies the behavior of the various filtering shortcut methods.""" + collection = ee.Collection(ee.Function(), {}) + geom = {'type': 'Polygon', 'coordinates': [[[1, 2], [3, 4]]]} + d1 = datetime.datetime.strptime('1/1/2000', '%m/%d/%Y') + d2 = datetime.datetime.strptime('1/1/2001', '%m/%d/%Y') + + self.assertEquals(collection.filter(ee.Filter.geometry(geom)), + collection.filterBounds(geom)) + self.assertEquals(collection.filter(ee.Filter.date(d1)), + collection.filterDate(d1)) + self.assertEquals(collection.filter(ee.Filter.date(d1, d2)), + collection.filterDate(d1, d2)) + self.assertEquals(collection.filter(ee.Filter.eq('foo', 13)), + collection.filterMetadata('foo', 'equals', 13)) + + def testMapping(self): + """Verifies the behavior of the map() method.""" + collection = ee.ImageCollection('foo') + algorithm = lambda img: img.select('bar') + mapped = collection.map(algorithm) + + self.assertTrue(isinstance(mapped, ee.ImageCollection)) + self.assertEquals(ee.ApiFunction.lookup('Collection.map'), mapped.func) + self.assertEquals(collection, mapped.args['collection']) + + # Need to do a serialized comparison for the function body because + # variables returned from CustomFunction.variable() do not implement + # __eq__. + sig = { + 'returns': 'Image', + 'args': [{'name': '_MAPPING_VAR_0_0', 'type': 'Image'}] + } + expected_function = ee.CustomFunction(sig, algorithm) + self.assertEquals(expected_function.serialize(), + mapped.args['baseAlgorithm'].serialize()) + + def testNestedMapping(self): + """Verifies that nested map() calls produce distinct variables.""" + collection = ee.FeatureCollection('foo') + result = collection.map(lambda x: collection.map(lambda y: [x, y])) + + # Verify the signatures. + self.assertEquals( + '_MAPPING_VAR_1_0', + result.args['baseAlgorithm']._signature['args'][0]['name']) + inner_result = result.args['baseAlgorithm']._body + self.assertEquals( + '_MAPPING_VAR_0_0', + inner_result.args['baseAlgorithm']._signature['args'][0]['name']) + + # Verify the references. + self.assertEquals( + '_MAPPING_VAR_1_0', + inner_result.args['baseAlgorithm']._body[0].varName) + self.assertEquals( + '_MAPPING_VAR_0_0', + inner_result.args['baseAlgorithm']._body[1].varName) + + def testIteration(self): + """Verifies the behavior of the iterate() method.""" + collection = ee.ImageCollection('foo') + first = ee.Image(0) + algorithm = lambda img, prev: img.addBands(ee.Image(prev)) + result = collection.iterate(algorithm, first) + + self.assertEquals(ee.ApiFunction.lookup('Collection.iterate'), result.func) + self.assertEquals(collection, result.args['collection']) + self.assertEquals(first, result.args['first']) + + # Need to do a serialized comparison for the function body because + # variables returned from CustomFunction.variable() do not implement + # __eq__. + sig = { + 'returns': 'Object', + 'args': [ + {'name': '_MAPPING_VAR_0_0', 'type': 'Image'}, + {'name': '_MAPPING_VAR_0_1', 'type': 'Object'} + ] + } + expected_function = ee.CustomFunction(sig, algorithm) + self.assertEquals(expected_function.serialize(), + result.args['function'].serialize()) + + +if __name__ == '__main__': + unittest.main() diff --git a/ee/tests/computedobject_test.py b/ee/tests/computedobject_test.py new file mode 100644 index 0000000..433740a --- /dev/null +++ b/ee/tests/computedobject_test.py @@ -0,0 +1,36 @@ +#!/usr/bin/env python +"""Tests for the ee.computedobject module.""" + + + +import six # For Python 2/3 compatibility + +import unittest +import ee +from ee import apitestcase + + +class ComputedObjectTest(apitestcase.ApiTestCase): + + def testComputedObject(self): + """Verifies that untyped calls wrap the result in a ComputedObject.""" + + result = ee.ApiFunction.call_('DateRange', 1, 2) + self.assertTrue(isinstance(result.serialize(), six.string_types)) + self.assertEquals({'value': 'fakeValue'}, result.getInfo()) + + def testInternals(self): + """Test eq(), ne() and hash().""" + a = ee.ApiFunction.call_('DateRange', 1, 2) + b = ee.ApiFunction.call_('DateRange', 2, 3) + c = ee.ApiFunction.call_('DateRange', 1, 2) + + self.assertEquals(a, a) + self.assertNotEquals(a, b) + self.assertEquals(a, c) + self.assertNotEquals(b, c) + self.assertNotEquals(hash(a), hash(b)) + + +if __name__ == '__main__': + unittest.main() diff --git a/ee/tests/data_test.py b/ee/tests/data_test.py new file mode 100644 index 0000000..ba00374 --- /dev/null +++ b/ee/tests/data_test.py @@ -0,0 +1,157 @@ +#!/usr/bin/env python + + +import httplib2 +import mock +import unittest +import ee + + +class DataTest(unittest.TestCase): + + @mock.patch('time.sleep') + def testSuccess(self, mock_sleep): + with DoStubHttp(200, 'application/json', '{"data": "bar"}'): + self.assertEqual('bar', ee.data.send_('/foo', {})) + self.assertEqual(False, mock_sleep.called) + + @mock.patch('time.sleep') + def testRetry(self, mock_sleep): + with DoStubHttp(429, 'application/json', '{"data": "bar"}'): + with self.assertRaises(ee.ee_exception.EEException): + ee.data.send_('/foo', {}) + self.assertEqual(5, mock_sleep.call_count) + + def testNon200Success(self): + with DoStubHttp(202, 'application/json', '{"data": "bar"}'): + self.assertEqual('bar', ee.data.send_('/foo', {})) + + def testJsonSyntaxError(self): + with DoStubHttp(200, 'application/json', '{"data"}'): + with self.assertRaises(ee.ee_exception.EEException) as cm: + ee.data.send_('/foo', {}) + self.assertEqual('Invalid JSON: {"data"}', str(cm.exception)) + + def testJsonStructureError(self): + with DoStubHttp(200, 'application/json', '{}'): + with self.assertRaises(ee.ee_exception.EEException) as cm: + ee.data.send_('/foo', {}) + self.assertEqual('Malformed response: {}', str(cm.exception)) + + def testUnexpectedStatus(self): + with DoStubHttp(418, 'text/html', ''): + with self.assertRaises(ee.ee_exception.EEException) as cm: + ee.data.send_('/foo', {}) + self.assertEqual('Server returned HTTP code: 418', str(cm.exception)) + + def testJson200Error(self): + with DoStubHttp(200, 'application/json', + '{"error": {"code": 500, "message": "bar"}}'): + with self.assertRaises(ee.ee_exception.EEException) as cm: + ee.data.send_('/foo', {}) + self.assertEqual(u'bar', str(cm.exception)) + + def testJsonNon2xxError(self): + with DoStubHttp(400, 'application/json', + '{"error": {"code": 400, "message": "bar"}}'): + with self.assertRaises(ee.ee_exception.EEException) as cm: + ee.data.send_('/foo', {}) + self.assertEqual(u'bar', str(cm.exception)) + + def testWrongContentType(self): + with DoStubHttp(200, 'text/html', '{"data": "bar"}'): + with self.assertRaises(ee.ee_exception.EEException) as cm: + ee.data.send_('/foo', {}) + self.assertEqual(u'Response was unexpectedly not JSON, but text/html', + str(cm.exception)) + + def testNoContentType(self): + with DoStubHttp(200, None, '{"data": "bar"}'): + self.assertEqual('bar', ee.data.send_('/foo', {})) + + def testContentTypeParameterAllowed(self): + with DoStubHttp(200, 'application/json; charset=utf-8', '{"data": ""}'): + self.assertEqual('', ee.data.send_('/foo', {})) + + def testRawSuccess(self): + with DoStubHttp(200, 'image/png', 'FAKEDATA'): + self.assertEqual('FAKEDATA', ee.data.send_('/foo', {}, opt_raw=True)) + + def testRawError(self): + with DoStubHttp(400, 'application/json', + '{"error": {"code": 400, "message": "bar"}}'): + with self.assertRaises(ee.ee_exception.EEException) as cm: + ee.data.send_('/foo', {}, opt_raw=True) + self.assertEqual(u'Server returned HTTP code: 400', str(cm.exception)) + + def testRaw200Error(self): + """Raw shouldn't be parsed, so the error-in-200 shouldn't be noticed. + + (This is an edge case we do not expect to see.) + """ + with DoStubHttp(200, 'application/json', + '{"error": {"code": 400, "message": "bar"}}'): + self.assertEqual('{"error": {"code": 400, "message": "bar"}}', + ee.data.send_('/foo', {}, opt_raw=True)) + + def testNotProfiling(self): + # Test that we do not request profiling. + with DoProfileStubHttp(self, False): + ee.data.send_('/foo', {}) + + def testProfiling(self): + with DoProfileStubHttp(self, True): + seen = [] + def ProfileHook(profile_id): + seen.append(profile_id) + + with ee.data.profiling(ProfileHook): + ee.data.send_('/foo', {}) + self.assertEqual(['someProfileId'], seen) + + def testProfilingCleanup(self): + with DoProfileStubHttp(self, True): + try: + with ee.data.profiling(lambda _: None): + raise ExceptionForTest() + except ExceptionForTest: + pass + + # Should not have profiling enabled after exiting the context by raising. + with DoProfileStubHttp(self, False): + ee.data.send_('/foo', {}) + + +def DoStubHttp(status, mime, resp_body): + """Context manager for temporarily overriding Http.""" + def Request(unused_self, unused_url, method, body, headers): + _ = method, body, headers # Unused kwargs. + response = httplib2.Response({ + 'status': status, + 'content-type': mime, + }) + return response, resp_body + return mock.patch('httplib2.Http.request', new=Request) + + +def DoProfileStubHttp(test, expect_profiling): + def Request(unused_self, unused_url, method, body, headers): + _ = method, headers # Unused kwargs. + test.assertEqual(expect_profiling, 'profiling=1' in body, msg=body) + response_dict = { + 'status': 200, + 'content-type': 'application/json' + } + if expect_profiling: + response_dict['x-earth-engine-computation-profile'] = 'someProfileId' + response = httplib2.Response(response_dict) + return response, '{"data": "dummy_data"}' + return mock.patch('httplib2.Http.request', new=Request) + + +class ExceptionForTest(Exception): + pass + + +if __name__ == '__main__': + unittest.main() diff --git a/ee/tests/date_test.py b/ee/tests/date_test.py new file mode 100644 index 0000000..edb441c --- /dev/null +++ b/ee/tests/date_test.py @@ -0,0 +1,56 @@ +#!/usr/bin/env python +"""Test for the ee.date module.""" + + + +import datetime + +import unittest + +import ee +from ee import apitestcase + + +class DateTest(apitestcase.ApiTestCase): + def testDate(self): + """Verifies date constructors.""" + + datefunc = ee.ApiFunction.lookup('Date') + + d1 = ee.Date('2000-01-01') + d2 = ee.Date(946684800000) + d3 = ee.Date(datetime.datetime(2000, 1, 1)) + d4 = ee.Date(d3) + dates = [d1, d2, d3, d4] + + for d in dates: + self.assertTrue(isinstance(d, ee.Date)) + self.assertEquals(datefunc, d.func) + + self.assertEquals(d1.args, {'value': '2000-01-01'}) + for d in dates[1:]: + self.assertEquals(d.args['value'], 946684800000) + + d5 = ee.Date(ee.CustomFunction.variable('Date', 'foo')) + self.assertTrue(isinstance(d5, ee.Date)) + self.assertTrue(d5.isVariable()) + self.assertEquals('foo', d5.varName) + + # A non-date variable. + v = ee.CustomFunction.variable('Number', 'bar') + d6 = ee.Date(v) + self.assertTrue(isinstance(d6, ee.Date)) + self.assertFalse(d6.isVariable()) + self.assertEquals(datefunc, d6.func) + self.assertEquals({'value': v}, d6.args) + + # A non-date ComputedObject, promotion and casting. + obj = ee.ApiFunction.call_('DateRange', 1, 2) + d7 = ee.Date(obj) + self.assertTrue(isinstance(d7, ee.Date)) + self.assertEquals(datefunc, d7.func) + self.assertEquals({'value': obj}, d7.args) + + +if __name__ == '__main__': + unittest.main() diff --git a/ee/tests/deserializer_test.py b/ee/tests/deserializer_test.py new file mode 100644 index 0000000..5736d70 --- /dev/null +++ b/ee/tests/deserializer_test.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python +"""Tests for the ee.deserializer module.""" + + + +import json + +import unittest + +import ee +from ee import apitestcase +from ee import deserializer +from ee import serializer + + +class DeserializerTest(apitestcase.ApiTestCase): + + def testRoundTrip(self): + """Verifies a round trip of a comprehensive serialization case.""" + encoded = apitestcase.ENCODED_JSON_SAMPLE + decoded = deserializer.decode(encoded) + re_encoded = json.loads(serializer.toJSON(decoded)) + self.assertEquals(encoded, re_encoded) + + def testCast(self): + """Verifies that decoding casts the result to the right class.""" + input_image = ee.Image(13).addBands(42) + output = deserializer.fromJSON(serializer.toJSON(input_image)) + self.assertTrue(isinstance(output, ee.Image)) + + def testReuse(self): + """Verifies that decoding results can be used and re-encoded.""" + input_image = ee.Image(13) + output = deserializer.fromJSON(serializer.toJSON(input_image)) + self.assertEquals(output.addBands(42).serialize(), + input_image.addBands(42).serialize()) + +if __name__ == '__main__': + unittest.main() diff --git a/ee/tests/dictionary_test.py b/ee/tests/dictionary_test.py new file mode 100644 index 0000000..fc86a14 --- /dev/null +++ b/ee/tests/dictionary_test.py @@ -0,0 +1,48 @@ +#!/usr/bin/env python +"""Test for the ee.dictionary module.""" + + + +import unittest + +import ee +from ee import apitestcase + + +class DictionaryTest(apitestcase.ApiTestCase): + + def testDictionary(self): + """Verifies basic behavior of ee.Dictionary.""" + src = {'a': 1, 'b': 2, 'c': 'three'} + dictionary = ee.Dictionary(src) + self.assertEquals({'type': 'Dictionary', 'value': src}, + ee.Serializer(False)._encode(dictionary)) + + f = ee.Feature(None, {'properties': src}) + computed = ee.Dictionary(f.get('properties')) + self.assertTrue(isinstance(computed, ee.Dictionary)) + + # The 4 types of arguments we expect + cons = (ee.Dictionary(src), + ee.Dictionary(f.get('properties')), + ee.Dictionary(), + ee.Dictionary(('one', 1))) + + for d in cons: + self.assertTrue(isinstance(d, ee.ComputedObject)) + + def testInternals(self): + """Test eq(), ne() and hash().""" + a = ee.Dictionary({'one': 1}) + b = ee.Dictionary({'two': 2}) + c = ee.Dictionary({'one': 1}) + + self.assertEquals(a, a) + self.assertNotEquals(a, b) + self.assertEquals(a, c) + self.assertNotEquals(b, c) + self.assertNotEquals(hash(a), hash(b)) + + +if __name__ == '__main__': + unittest.main() diff --git a/ee/tests/ee_test.py b/ee/tests/ee_test.py new file mode 100644 index 0000000..3e3ac95 --- /dev/null +++ b/ee/tests/ee_test.py @@ -0,0 +1,380 @@ +#!/usr/bin/env python +"""Test for the ee.__init__ file.""" + + + +import unittest + +import ee +from ee import apitestcase + + +class EETestCase(apitestcase.ApiTestCase): + + def setUp(self): + ee.Reset() + + def testInitialization(self): + """Verifies library initialization.""" + + def MockSend(path, params, unused_method=None, unused_raw=None): + if path == '/algorithms': + return {} + else: + raise Exception('Unexpected API call to %s with %s' % (path, params)) + ee.data.send_ = MockSend + + # Verify that the base state is uninitialized. + self.assertFalse(ee.data._initialized) + self.assertEquals(ee.data._api_base_url, None) + self.assertEquals(ee.ApiFunction._api, None) + self.assertFalse(ee.Image._initialized) + + # Verify that ee.Initialize() sets the URL and initializes classes. + ee.Initialize(None, 'foo') + self.assertTrue(ee.data._initialized) + self.assertEquals(ee.data._api_base_url, 'foo/api') + self.assertEquals(ee.ApiFunction._api, {}) + self.assertTrue(ee.Image._initialized) + + # Verify that ee.Initialize(None) does not override custom URLs. + ee.Initialize(None) + self.assertTrue(ee.data._initialized) + self.assertEquals(ee.data._api_base_url, 'foo/api') + + # Verify that ee.Reset() reverts everything to the base state. + ee.Reset() + self.assertFalse(ee.data._initialized) + self.assertEquals(ee.data._api_base_url, None) + self.assertEquals(ee.ApiFunction._api, None) + self.assertFalse(ee.Image._initialized) + + def testCallAndApply(self): + """Verifies library initialization.""" + + # Use a custom set of known functions. + def MockSend(path, params, unused_method=None, unused_raw=None): + if path == '/algorithms': + return { + 'fakeFunction': { + 'type': 'Algorithm', + 'args': [ + {'name': 'image1', 'type': 'Image'}, + {'name': 'image2', 'type': 'Image'} + ], + 'returns': 'Image' + }, + 'Image.constant': apitestcase.BUILTIN_FUNCTIONS['Image.constant'] + } + else: + raise Exception('Unexpected API call to %s with %s' % (path, params)) + ee.data.send_ = MockSend + + ee.Initialize(None) + image1 = ee.Image(1) + image2 = ee.Image(2) + expected = ee.Image(ee.ComputedObject( + ee.ApiFunction.lookup('fakeFunction'), + {'image1': image1, 'image2': image2})) + + applied_with_images = ee.apply( + 'fakeFunction', {'image1': image1, 'image2': image2}) + self.assertEquals(expected, applied_with_images) + + applied_with_numbers = ee.apply('fakeFunction', {'image1': 1, 'image2': 2}) + self.assertEquals(expected, applied_with_numbers) + + called_with_numbers = ee.call('fakeFunction', 1, 2) + self.assertEquals(expected, called_with_numbers) + + # Test call and apply() with a custom function. + sig = {'returns': 'Image', 'args': [{'name': 'foo', 'type': 'Image'}]} + func = ee.CustomFunction(sig, lambda foo: ee.call('fakeFunction', 42, foo)) + expected_custom_function_call = ee.Image( + ee.ComputedObject(func, {'foo': ee.Image(13)})) + self.assertEquals(expected_custom_function_call, ee.call(func, 13)) + self.assertEquals(expected_custom_function_call, + ee.apply(func, {'foo': 13})) + + # Test None promotion. + called_with_null = ee.call('fakeFunction', None, 1) + self.assertEquals(None, called_with_null.args['image1']) + + def testDynamicClasses(self): + """Verifies dynamic class initialization.""" + + # Use a custom set of known functions. + def MockSend(path, unused_params, unused_method=None, unused_raw=None): + if path == '/algorithms': + return { + 'Array': { + 'type': 'Algorithm', + 'args': [ + { + 'name': 'values', + 'type': 'Serializable', + 'description': '' + } + ], + 'description': '', + 'returns': 'Array' + }, + 'Array.cos': { + 'type': 'Algorithm', + 'args': [ + { + 'type': 'Array', + 'description': '', + 'name': 'input' + } + ], + 'description': '', + 'returns': 'Array' + }, + 'Kernel.circle': { + 'returns': 'Kernel', + 'args': [ + { + 'type': 'float', + 'description': '', + 'name': 'radius', + }, + { + 'default': 1.0, + 'type': 'float', + 'optional': True, + 'description': '', + 'name': 'scale' + }, + { + 'default': True, + 'type': 'boolean', + 'optional': True, + 'description': '', + 'name': 'normalize' + } + ], + 'type': 'Algorithm', + 'description': '' + }, + 'Reducer.mean': { + 'returns': 'Reducer', + 'args': [] + }, + 'fakeFunction': { + 'returns': 'Array', + 'args': [ + { + 'type': 'Reducer', + 'description': '', + 'name': 'kernel', + } + ] + } + } + ee.data.send_ = MockSend + + ee.Initialize(None) + + # Verify that the expected classes got generated. + self.assertTrue(hasattr(ee, 'Array')) + self.assertTrue(hasattr(ee, 'Kernel')) + self.assertTrue(hasattr(ee.Array, 'cos')) + self.assertTrue(hasattr(ee.Kernel, 'circle')) + + # Try out the constructors. + kernel = ee.ApiFunction('Kernel.circle').call(1, 2) + self.assertEquals(kernel, ee.Kernel.circle(1, 2)) + + array = ee.ApiFunction('Array').call([1, 2]) + self.assertEquals(array, ee.Array([1, 2])) + self.assertEquals(array, ee.Array(ee.Array([1, 2]))) + + # Try out the member function. + self.assertEquals(ee.ApiFunction('Array.cos').call(array), + ee.Array([1, 2]).cos()) + + # Test argument promotion. + f1 = ee.ApiFunction('Array.cos').call([1, 2]) + f2 = ee.ApiFunction('Array.cos').call(ee.Array([1, 2])) + self.assertEquals(f1, f2) + self.assertTrue(isinstance(f1, ee.Array)) + + f3 = ee.call('fakeFunction', 'mean') + f4 = ee.call('fakeFunction', ee.Reducer.mean()) + self.assertEquals(f3, f4) + + try: + ee.call('fakeFunction', 'moo') + self.fail() + except ee.EEException as e: + self.assertTrue('Unknown algorithm: Reducer.moo' in str(e)) + + def testDynamicConstructor(self): + # Test the behavior of the dynamic class constructor. + + # Use a custom set of known functions for classes Foo and Bar. + # Foo Foo(arg1, [arg2]) + # Bar Foo.makeBar() + # Bar Foo.takeBar(Bar bar) + # Baz Foo.baz() + def MockSend(path, unused_params, unused_method=None, unused_raw=None): + if path == '/algorithms': + return { + 'Foo': { + 'returns': 'Foo', + 'args': [ + {'name': 'arg1', 'type': 'Object'}, + {'name': 'arg2', 'type': 'Object', 'optional': True} + ] + }, + 'Foo.makeBar': { + 'returns': 'Bar', + 'args': [{'name': 'foo', 'type': 'Foo'}] + }, + 'Foo.takeBar': { + 'returns': 'Bar', + 'args': [ + {'name': 'foo', 'type': 'Foo'}, + {'name': 'bar', 'type': 'Bar'} + ] + }, + 'Bar.baz': { + 'returns': 'Baz', + 'args': [{'name': 'bar', 'type': 'Bar'}] + } + } + + ee.data.send_ = MockSend + ee.Initialize(None) + + # Try to cast something that's already of the right class. + x = ee.Foo('argument') + self.assertEquals(ee.Foo(x), x) + + # Tests for dynamic classes, where there is a constructor. + # + # If there's more than 1 arg, call the constructor. + x = ee.Foo('a') + y = ee.Foo(x, 'b') + ctor = ee.ApiFunction.lookup('Foo') + self.assertEquals(y.func, ctor) + self.assertEquals(y.args, {'arg1': x, 'arg2': 'b'}) + + # Can't cast a primitive; call the constructor. + self.assertEquals(ctor, ee.Foo(1).func) + + # A computed object, but not this class; call the constructor. + self.assertEquals(ctor, ee.Foo(ee.List([1, 2, 3])).func) + + # Tests for dynamic classes, where there isn't a constructor. + # + # Foo.makeBar and Foo.takeBar should have caused Bar to be generated. + self.assertTrue(hasattr(ee, 'Bar')) + + # Make sure we can create a Bar. + bar = ee.Foo(1).makeBar() + self.assertTrue(isinstance(bar, ee.Bar)) + + # Now cast something else to a Bar and verify it was just a cast. + cast = ee.Bar(ee.Foo(1)) + self.assertTrue(isinstance(cast, ee.Bar)) + self.assertEquals(ctor, cast.func) + + # We shouldn't be able to cast with more than 1 arg. + try: + ee.Bar(x, 'foo') + self.fail('Expected an exception.') + except ee.EEException as e: + self.assertTrue('Too many arguments for ee.Bar' in str(e)) + + # We shouldn't be able to cast a primitive. + try: + ee.Bar(1) + self.fail('Expected an exception.') + except ee.EEException as e: + self.assertTrue('Must be a ComputedObject' in str(e)) + + def testDynamicConstructorCasting(self): + """Test the behavior of casting with dynamic classes.""" + self.InitializeApi() + result = ee.Geometry.Rectangle(1, 1, 2, 2).bounds(0, 'EPSG:4326') + expected = (ee.Geometry.Polygon([[1, 2], [1, 1], [2, 1], [2, 2]]) + .bounds(ee.ErrorMargin(0), ee.Projection('EPSG:4326'))) + self.assertEquals(expected, result) + + def testPromotion(self): + """Verifies object promotion rules.""" + self.InitializeApi() + + # Features and Images are both already Elements. + self.assertTrue(isinstance(ee._Promote(ee.Feature(None), 'Element'), + ee.Feature)) + self.assertTrue(isinstance(ee._Promote(ee.Image(0), 'Element'), ee.Image)) + + # Promote an untyped object to an Element. + untyped = ee.ComputedObject('foo', {}) + self.assertTrue(isinstance(ee._Promote(untyped, 'Element'), ee.Element)) + + # Promote an untyped variable to an Element. + untyped = ee.ComputedObject(None, None, 'foo') + self.assertTrue(isinstance(ee._Promote(untyped, 'Element'), ee.Element)) + self.assertEquals('foo', ee._Promote(untyped, 'Element').varName) + + def testUnboundMethods(self): + """Verifies unbound method attachment to ee.Algorithms.""" + + # Use a custom set of known functions. + def MockSend(path, unused_params, unused_method=None, unused_raw=None): + if path == '/algorithms': + return { + 'Foo': { + 'type': 'Algorithm', + 'args': [], + 'description': '', + 'returns': 'Object' + }, + 'Foo.bar': { + 'type': 'Algorithm', + 'args': [], + 'description': '', + 'returns': 'Object' + }, + 'Quux.baz': { + 'type': 'Algorithm', + 'args': [], + 'description': '', + 'returns': 'Object' + }, + 'last': { + 'type': 'Algorithm', + 'args': [], + 'description': '', + 'returns': 'Object' + } + } + ee.data.send_ = MockSend + + ee.ApiFunction.importApi(lambda: None, 'Quux', 'Quux') + ee._InitializeUnboundMethods() + + self.assertTrue(callable(ee.Algorithms.Foo)) + self.assertTrue(callable(ee.Algorithms.Foo.bar)) + self.assertTrue('Quux' not in ee.Algorithms) + self.assertEquals(ee.call('Foo.bar'), ee.Algorithms.Foo.bar()) + self.assertNotEquals(ee.Algorithms.Foo.bar(), ee.Algorithms.last()) + + def testDatePromtion(self): + # Make a feature, put a time in it, and get it out as a date. + self.InitializeApi() + point = ee.Geometry.Point(1, 2) + feature = ee.Feature(point, {'x': 1, 'y': 2}) + date_range = ee.call('DateRange', feature.get('x'), feature.get('y')) + + # Check that the start and end args are wrapped in a call to Date. + self.assertEquals(date_range.args['start'].func._signature['name'], 'Date') + self.assertEquals(date_range.args['end'].func._signature['name'], 'Date') + + +if __name__ == '__main__': + unittest.main() diff --git a/ee/tests/element_test.py b/ee/tests/element_test.py new file mode 100644 index 0000000..c45a0dd --- /dev/null +++ b/ee/tests/element_test.py @@ -0,0 +1,52 @@ +#!/usr/bin/env python +"""Test for the ee.element module.""" + + + +import six + +import unittest +import ee +from ee import apitestcase + + +class ElementTestCase(apitestcase.ApiTestCase): + + def testSet(self): + """Verifies Element.set() keyword argument interpretation.""" + image = ee.Image(1) + + # Constant dictionary. + def AssertProperties(expected, image): + properties = {} + while image.func == ee.ApiFunction.lookup('Element.set'): + key = image.args['key'] + if not isinstance(key, six.string_types): + key = key.encode() + properties[key] = image.args['value'] + image = image.args['object'] + self.assertEquals(ee.Image(1), image) + self.assertEquals(expected, properties) + + AssertProperties({'foo': 'bar'}, image.set({'foo': 'bar'})) + AssertProperties({'foo': 'bar'}, image.set({'properties': {'foo': 'bar'}})) + AssertProperties({'properties': 5}, image.set({'properties': 5})) + AssertProperties({'properties': {'foo': 'bar'}, 'baz': 'quux'}, + image.set({'properties': {'foo': 'bar'}, 'baz': 'quux'})) + AssertProperties({'foo': 'bar', 'baz': 'quux'}, + image.set('foo', 'bar', 'baz', 'quux')) + + # Computed dictionary. + computed_arg = ee.ComputedObject(None, None, 'foo') + + def CheckMultiProperties(result): + self.assertEquals(ee.ApiFunction.lookup('Element.setMulti'), result.func) + self.assertEquals( + {'object': image, 'properties': ee.Dictionary(computed_arg)}, + result.args) + CheckMultiProperties(image.set(computed_arg)) + CheckMultiProperties(image.set({'properties': computed_arg})) + + +if __name__ == '__main__': + unittest.main() diff --git a/ee/tests/feature_test.py b/ee/tests/feature_test.py new file mode 100644 index 0000000..6abd7d8 --- /dev/null +++ b/ee/tests/feature_test.py @@ -0,0 +1,68 @@ +#!/usr/bin/env python +"""Test for the ee.feature module.""" + + + +import unittest + +import ee +from ee import apitestcase + + +class FeatureTest(apitestcase.ApiTestCase): + + def testConstructors(self): + """Verifies that constructors understand valid parameters.""" + point = ee.Geometry.Point(1, 2) + from_geometry = ee.Feature(point) + self.assertEquals(ee.ApiFunction('Feature'), from_geometry.func) + self.assertEquals({'geometry': point, 'metadata': None}, from_geometry.args) + + from_null_geometry = ee.Feature(None, {'x': 2}) + self.assertEquals(ee.ApiFunction('Feature'), from_null_geometry.func) + self.assertEquals({'geometry': None, 'metadata': {'x': 2}}, + from_null_geometry.args) + + computed_geometry = ee.Geometry(ee.ComputedObject(ee.Function(), {'a': 1})) + computed_properties = ee.ComputedObject(ee.Function(), {'b': 2}) + from_computed_one = ee.Feature(computed_geometry) + from_computed_both = ee.Feature(computed_geometry, computed_properties) + self.assertEquals(ee.ApiFunction('Feature'), from_computed_one.func) + self.assertEquals({'geometry': computed_geometry, + 'metadata': None}, + from_computed_one.args) + self.assertEquals(ee.ApiFunction('Feature'), from_computed_both.func) + self.assertEquals({'geometry': computed_geometry, + 'metadata': computed_properties}, + from_computed_both.args) + + from_variable = ee.Feature(ee.CustomFunction.variable(None, 'foo')) + self.assertTrue(isinstance(from_variable, ee.Feature)) + self.assertEquals({'type': 'ArgumentRef', 'value': 'foo'}, + from_variable.encode(None)) + + from_geo_json_feature = ee.Feature({ + 'type': 'Feature', + 'id': 'bar', + 'geometry': point.toGeoJSON(), + 'properties': {'foo': 42} + }) + self.assertEquals(ee.ApiFunction('Feature'), from_geo_json_feature.func) + self.assertEquals(point, from_geo_json_feature.args['geometry']) + self.assertEquals({'foo': 42, 'system:index': 'bar'}, + from_geo_json_feature.args['metadata']) + + def testGetMap(self): + """Verifies that getMap() uses Collection.draw to rasterize Features.""" + feature = ee.Feature(None) + mapid = feature.getMapId({'color': 'ABCDEF'}) + manual = ee.ApiFunction.apply_('Collection.draw', { + 'collection': ee.FeatureCollection([feature]), + 'color': 'ABCDEF'}) + + self.assertEquals('fakeMapId', mapid['mapid']) + self.assertEquals(manual.serialize(), mapid['image'].serialize()) + + +if __name__ == '__main__': + unittest.main() diff --git a/ee/tests/featurecollection_test.py b/ee/tests/featurecollection_test.py new file mode 100644 index 0000000..e792ab0 --- /dev/null +++ b/ee/tests/featurecollection_test.py @@ -0,0 +1,100 @@ +#!/usr/bin/env python +"""Test for the ee.featurecollection module.""" + + + +import unittest + +import ee +from ee import apitestcase + + +class FeatureCollectionTestCase(apitestcase.ApiTestCase): + + def testConstructors(self): + """Verifies that constructors understand valid parameters.""" + from_id = ee.FeatureCollection('abcd') + self.assertEquals(ee.ApiFunction.lookup('Collection.loadTable'), + from_id.func) + self.assertEquals({'tableId': 'abcd'}, from_id.args) + + from_id_and_geom_column = ee.FeatureCollection('abcd', 'xyz') + self.assertEquals(ee.ApiFunction.lookup('Collection.loadTable'), + from_id_and_geom_column.func) + self.assertEquals({'tableId': 'abcd', 'geometryColumn': 'xyz'}, + from_id_and_geom_column.args) + + from_numeric_id = ee.FeatureCollection(123456) + self.assertEquals(ee.ApiFunction.lookup('Collection.loadTable'), + from_numeric_id.func) + self.assertEquals({'tableId': 123456}, from_numeric_id.args) + + from_numeric_id_and_geom_column = ee.FeatureCollection(123456, 'xyz') + self.assertEquals(ee.ApiFunction('Collection.loadTable'), + from_numeric_id_and_geom_column.func) + self.assertEquals({'tableId': 123456, 'geometryColumn': 'xyz'}, + from_numeric_id_and_geom_column.args) + + geometry = ee.Geometry.Point(1, 2) + feature = ee.Feature(geometry) + from_geometries = ee.FeatureCollection([geometry]) + from_single_geometry = ee.FeatureCollection(geometry) + from_features = ee.FeatureCollection([feature]) + from_single_feature = ee.FeatureCollection(feature) + self.assertEquals(from_geometries, from_single_geometry) + self.assertEquals(from_geometries, from_features) + self.assertEquals(from_geometries, from_single_feature) + self.assertEquals(ee.ApiFunction.lookup('Collection'), from_geometries.func) + self.assertEquals({'features': [feature]}, from_geometries.args) + + # Test a computed list object. + l = ee.List([feature]).slice(0) + from_list = ee.FeatureCollection(l) + self.assertEquals({'features': l}, from_list.args) + + from_computed_object = ee.FeatureCollection( + ee.ComputedObject(None, {'x': 'y'})) + self.assertEquals({'x': 'y'}, from_computed_object.args) + + def testGetMapId(self): + """Verifies that getMap() uses Collection.draw to draw.""" + collection = ee.FeatureCollection(5) + mapid = collection.getMapId({'color': 'ABCDEF'}) + manual = ee.ApiFunction.call_('Collection.draw', collection, 'ABCDEF') + + self.assertEquals('fakeMapId', mapid['mapid']) + self.assertEquals(manual, mapid['image']) + + def testDownload(self): + """Verifies that Download ID and URL generation.""" + csv_url = ee.FeatureCollection(7).getDownloadURL('csv') + + self.assertEquals('/table', self.last_table_call['url']) + self.assertEquals( + { + 'table': ee.FeatureCollection(7).serialize(), + 'json_format': 'v2', + 'format': 'CSV' + }, + self.last_table_call['data']) + self.assertEquals('/api/table?docid=5&token=6', csv_url) + + everything_url = ee.FeatureCollection(8).getDownloadURL( + 'json', 'bar, baz', 'qux') + self.assertEquals( + { + 'table': ee.FeatureCollection(8).serialize(), + 'json_format': 'v2', + 'format': 'JSON', + 'selectors': 'bar, baz', + 'filename': 'qux' + }, + self.last_table_call['data']) + self.assertEquals('/api/table?docid=5&token=6', everything_url) + + self.assertEquals(ee.FeatureCollection(7).getDownloadUrl('csv'), + ee.FeatureCollection(7).getDownloadURL('csv')) + + +if __name__ == '__main__': + unittest.main() diff --git a/ee/tests/filter_test.py b/ee/tests/filter_test.py new file mode 100644 index 0000000..9351d80 --- /dev/null +++ b/ee/tests/filter_test.py @@ -0,0 +1,126 @@ +#!/usr/bin/env python +"""Test for the ee.filter module.""" + + + +import datetime + +import unittest + +import ee +from ee import apitestcase + + +class FilterTest(apitestcase.ApiTestCase): + + def testConstructors(self): + """Verifies that constructors understand valid parameters.""" + from_static_method = ee.Filter.gt('foo', 1) + from_computed_object = ee.Filter( + ee.ApiFunction.call_('Filter.greaterThan', 'foo', 1)) + self.assertEquals(from_static_method, from_computed_object) + + copy = ee.Filter(from_static_method) + self.assertEquals(from_static_method, copy) + + def testMetadata(self): + """Verifies that the metadata_() method works.""" + self.assertEquals( + ee.ApiFunction.call_('Filter.equals', 'x', 1), + ee.Filter.metadata_('x', 'equals', 1)) + self.assertEquals( + ee.Filter.metadata_('x', 'equals', 1), + ee.Filter.eq('x', 1)) + self.assertEquals( + ee.Filter.metadata_('x', 'EQUALS', 1), + ee.Filter.eq('x', 1)) + self.assertEquals( + ee.Filter.metadata_('x', 'not_equals', 1), + ee.Filter.neq('x', 1)) + self.assertEquals( + ee.Filter.metadata_('x', 'less_than', 1), + ee.Filter.lt('x', 1)) + self.assertEquals( + ee.Filter.metadata_('x', 'not_greater_than', 1), + ee.Filter.lte('x', 1)) + self.assertEquals( + ee.Filter.metadata_('x', 'greater_than', 1), + ee.Filter.gt('x', 1)) + self.assertEquals( + ee.Filter.metadata_('x', 'not_less_than', 1), + ee.Filter.gte('x', 1)) + + def testLogicalCombinations(self): + """Verifies that the and() and or() methods work.""" + f1 = ee.Filter.eq('x', 1) + f2 = ee.Filter.eq('x', 2) + + or_filter = ee.Filter.Or(f1, f2) + self.assertEquals(ee.ApiFunction.call_('Filter.or', (f1, f2)), or_filter) + + and_filter = ee.Filter.And(f1, f2) + self.assertEquals(ee.ApiFunction.call_('Filter.and', (f1, f2)), and_filter) + + self.assertEquals( + ee.ApiFunction.call_('Filter.or', (or_filter, and_filter)), + ee.Filter.Or(or_filter, and_filter)) + + def testDate(self): + """Verifies that date filters work.""" + d1 = datetime.datetime.strptime('1/1/2000', '%m/%d/%Y') + d2 = datetime.datetime.strptime('1/1/2001', '%m/%d/%Y') + instant_range = ee.ApiFunction.call_('DateRange', d1, None) + long_range = ee.ApiFunction.call_('DateRange', d1, d2) + + instant_filter = ee.Filter.date(d1) + self.assertEquals(ee.ApiFunction.lookup('Filter.dateRangeContains'), + instant_filter.func) + self.assertEquals({'leftValue': instant_range, + 'rightField': ee.String('system:time_start')}, + instant_filter.args) + + long_filter = ee.Filter.date(d1, d2) + self.assertEquals(ee.ApiFunction.lookup('Filter.dateRangeContains'), + long_filter.func) + self.assertEquals({'leftValue': long_range, + 'rightField': ee.String('system:time_start')}, + long_filter.args) + + def testBounds(self): + """Verifies that geometry intersection filters work.""" + polygon = ee.Geometry.Polygon(1, 2, 3, 4, 5, 6) + self.assertEquals( + ee.ApiFunction.call_( + 'Filter.intersects', '.all', + ee.ApiFunction.call_('Feature', polygon)), + ee.Filter.geometry(polygon)) + + # Collection-to-geometry promotion. + collection = ee.FeatureCollection('foo') + feature = ee.ApiFunction.call_( + 'Feature', ee.ApiFunction.call_('Collection.geometry', collection)) + self.assertEquals( + ee.ApiFunction.call_('Filter.intersects', '.all', feature), + ee.Filter.geometry(collection)) + + def testInList(self): + """Verifies that list membership filters work.""" + self.assertEquals( + ee.Filter.listContains(None, None, 'foo', [1, 2]), + ee.Filter.inList('foo', [1, 2])) + + def testInternals(self): + """Test eq(), ne() and hash().""" + a = ee.Filter.eq('x', 1) + b = ee.Filter.eq('x', 2) + c = ee.Filter.eq('x', 1) + + self.assertEquals(a, a) + self.assertNotEquals(a, b) + self.assertEquals(a, c) + self.assertNotEquals(b, c) + self.assertNotEquals(hash(a), hash(b)) + + +if __name__ == '__main__': + unittest.main() diff --git a/ee/tests/function_test.py b/ee/tests/function_test.py new file mode 100644 index 0000000..c1fc31b --- /dev/null +++ b/ee/tests/function_test.py @@ -0,0 +1,99 @@ +#!/usr/bin/env python +"""Tests for the ee.function module.""" + + + +import unittest + +import ee + +# A function to experiment on. +TEST_FUNC = ee.Function() +TEST_FUNC.getSignature = lambda: { # pylint: disable-msg=g-long-lambda + 'description': 'Method description.', + 'returns': 'Image', + 'args': [ + { + 'type': 'Image', + 'name': 'a', + 'description': 'Arg A doc.'}, + { + 'type': 'Image', + 'name': 'b', + 'description': 'Arg B doc.', + 'optional': True + } + ] +} + +EXPECTED_DOC = """Method description. + +Args: + a: Arg A doc. + b: Arg B doc.""" + + +class FunctionTest(unittest.TestCase): + + def testNameArgs(self): + """Verifies that Functions can convert positional to named arguments.""" + self.assertEquals({}, TEST_FUNC.nameArgs([])) + self.assertEquals({'a': 42}, TEST_FUNC.nameArgs([42])) + self.assertEquals({'a': 42, 'b': 13}, TEST_FUNC.nameArgs([42, 13])) + self.assertEquals({'a': 3, 'b': 5}, TEST_FUNC.nameArgs([3], {'b': 5})) + + self.assertRaisesWithRegexpMatch('Too many', TEST_FUNC.nameArgs, [1, 2, 3]) + + def testPromoteArgs(self): + """Verifies that Functions can promote and verify their arguments.""" + old_promoter = ee.Function._promoter + ee.Function._registerPromoter(lambda obj, type_name: [type_name, obj]) + + # Regular call. + self.assertEquals({'a': ['Image', 42], 'b': ['Image', 13]}, + TEST_FUNC.promoteArgs({'a': 42, 'b': 13})) + + # Allow missing optional argument. + self.assertEquals({'a': ['Image', 42]}, + TEST_FUNC.promoteArgs({'a': 42})) + + # Disallow unknown arguments. + self.assertRaisesWithRegexpMatch( + 'Required argument', TEST_FUNC.promoteArgs, {}) + + # Disallow unknown arguments. + self.assertRaisesWithRegexpMatch( + 'Unrecognized', TEST_FUNC.promoteArgs, {'a': 42, 'c': 13}) + + # Clean up. + ee.Function._registerPromoter(old_promoter) + + def testCall(self): + """Verifies the full function invocation flow.""" + old_promoter = ee.Function._promoter + ee.Function._registerPromoter(lambda obj, type_name: [type_name, obj]) + + return_type, return_value = TEST_FUNC.call(42, 13) + self.assertEquals('Image', return_type) + self.assertEquals(TEST_FUNC, return_value.func) + self.assertEquals({'a': ['Image', 42], 'b': ['Image', 13]}, + return_value.args) + + # Clean up. + ee.Function._registerPromoter(old_promoter) + + def testToString(self): + """Verifies function docstring generation.""" + self.assertEquals(EXPECTED_DOC, str(TEST_FUNC)) + + def assertRaisesWithRegexpMatch(self, msg, func, *args): + try: + func(*args) + except ee.EEException as e: + self.assertTrue(msg in str(e)) + else: + self.fail('Expected an exception.') + + +if __name__ == '__main__': + unittest.main() diff --git a/ee/tests/geometry_test.py b/ee/tests/geometry_test.py new file mode 100644 index 0000000..f552b80 --- /dev/null +++ b/ee/tests/geometry_test.py @@ -0,0 +1,326 @@ +#!/usr/bin/env python +"""Test for the ee.geometry module.""" + + + +import unittest + +import ee +from ee import apitestcase + + +class GeometryTest(apitestcase.ApiTestCase): + + def testValid_Point(self): + """Verifies Point constructor behavior with valid arguments.""" + self.assertValid(1, ee.Geometry.Point, [1, 2]) + self.assertValid(1, ee.Geometry.Point, 1, 2) + + def testValid_MultiPoint(self): + """Verifies MultiPoint constructor behavior with valid arguments.""" + self.assertValid(2, ee.Geometry.MultiPoint, 1, 2, 3, 4, 5, 6) + self.assertValid(1, ee.Geometry.MultiPoint) + + def testValid_LineString(self): + """Verifies LineString constructor behavior with valid arguments.""" + self.assertValid(2, ee.Geometry.LineString, 1, 2, 3, 4, 5, 6) + + def testValid_LinearRing(self): + """Verifies LinearRing constructor behavior with valid arguments.""" + self.assertValid(2, ee.Geometry.LinearRing, 1, 2, 3, 4, 5, 6) + + def testValid_MultiLineString(self): + """Verifies MultiLineString constructor behavior with valid arguments.""" + self.assertValid(3, ee.Geometry.MultiLineString, 1, 2, 3, 4, 5, 6) + self.assertValid(1, ee.Geometry.MultiLineString) + + def testValid_Polygon(self): + """Verifies Polygon constructor behavior with valid arguments.""" + self.assertValid(3, ee.Geometry.Polygon, 1, 2, 3, 4, 5, 6) + + def testValid_Rectangle(self): + """Verifies Rectangle constructor behavior with valid arguments.""" + self.assertValid(3, ee.Geometry.Rectangle, 1, 2, 5, 6) + + def testValid_MultiPolygon(self): + """Verifies MultiPolygon constructor behavior with valid arguments.""" + self.assertValid(4, ee.Geometry.MultiPolygon, 1, 2, 3, 4, 5, 6) + self.assertValid(1, ee.Geometry.MultiPolygon) + + def testInvalid_MultiPoint(self): + """Verifies MultiPoint constructor behavior with invalid arguments.""" + f = ee.Geometry.MultiPoint + self.assertInvalid( + f, 'Invalid number of coordinates: 5', 1, 2, 3, 4, 5) + self.assertInvalid(f, 'Invalid number of coordinates: 5', [1, 2, 3, 4, 5]) + self.assertInvalid(f, 'Invalid geometry', [[1, 2], [3, 4], 5]) + # Too many nesting levels. + self.assertInvalid(f, 'Invalid geometry', [[[1, 2], [3, 4]]]) + + def testInvalid_LineString(self): + """Verifies LineString constructor behavior with invalid arguments.""" + f = ee.Geometry.LineString + self.assertInvalid( + f, 'Invalid number of coordinates: 5', 1, 2, 3, 4, 5) + self.assertInvalid(f, 'Invalid number of coordinates: 5', [1, 2, 3, 4, 5]) + self.assertInvalid(f, 'Invalid geometry', [[1, 2], [3, 4], 5]) + # Too many nesting levels. + self.assertInvalid(f, 'Invalid geometry', [[[1, 2], [3, 4]]]) + + def testInvalid_LinearRing(self): + """Verifies LinearRing constructor behavior with invalid arguments.""" + f = ee.Geometry.LinearRing + self.assertInvalid( + f, 'Invalid number of coordinates: 5', 1, 2, 3, 4, 5) + self.assertInvalid(f, 'Invalid number of coordinates: 5', [1, 2, 3, 4, 5]) + self.assertInvalid(f, 'Invalid geometry', [[1, 2], [3, 4], 5]) + # Too many nesting levels. + self.assertInvalid(f, 'Invalid geometry', [[[1, 2], [3, 4]]]) + + def testInvalid_MultiLineString(self): + """Verifies MultiLineString constructor behavior with invalid arguments.""" + f = ee.Geometry.MultiLineString + self.assertInvalid( + f, 'Invalid number of coordinates: 5', 1, 2, 3, 4, 5) + self.assertInvalid(f, 'Invalid number of coordinates: 5', [1, 2, 3, 4, 5]) + self.assertInvalid(f, 'Invalid geometry', [[1, 2], [3, 4], 5]) + # Too many nesting levels. + self.assertInvalid(f, 'Invalid geometry', [[[[1, 2], [3, 4]]]]) + # Bad nesting + self.assertInvalid(f, 'Invalid geometry', [[[1, 2], [3, 4]], [1, 2]]) + + def testInvalid_Polygon(self): + """Verifies Polygon constructor behavior with invalid arguments.""" + f = ee.Geometry.Polygon + self.assertInvalid( + f, 'Invalid number of coordinates: 5', 1, 2, 3, 4, 5) + self.assertInvalid(f, 'Invalid number of coordinates: 5', [1, 2, 3, 4, 5]) + self.assertInvalid(f, 'Invalid geometry', [[1, 2], [3, 4], 5]) + # Too many nesting levels. + self.assertInvalid(f, 'Invalid geometry', [[[[1, 2], [3, 4], [5, 6]]]]) + # Bad nesting + self.assertInvalid(f, 'Invalid geometry', [[[1, 2], [3, 4]], [1, 2]]) + + def testInvalid_MultiPolygon(self): + """Verifies MultiPolygon constructor behavior with invalid arguments.""" + f = ee.Geometry.MultiPolygon + self.assertInvalid(f, 'Invalid number of coordinates: 5', 1, 2, 3, 4, 5) + self.assertInvalid(f, 'Invalid number of coordinates: 5', [1, 2, 3, 4, 5]) + self.assertInvalid(f, 'Invalid geometry', [[1, 2], [3, 4], 5]) + # Too many nesting levels. + self.assertInvalid(f, 'Invalid geometry', [[[[[1, 2], [3, 4], [5, 6]]]]]) + # Bad nesting + self.assertInvalid(f, 'Invalid geometry', [[[[1, 2], [3, 4]], [1, 2]]]) + + def testEvenOddPolygon(self): + poly1 = ee.Geometry.Polygon([0, 0, 0, 5, 5, 0]) + self.assertTrue(poly1.toGeoJSON()['evenOdd']) + poly2 = ee.Geometry.Polygon([0, 0, 0, 5, 5, 0], None, None, None, False) + self.assertFalse(poly2.toGeoJSON()['evenOdd']) + + def testArrayConstructors(self): + """Verifies that constructors that take arrays fix nesting.""" + get_coordinates_count = lambda g: len(g.toGeoJSON()['coordinates']) + + point = ee.Geometry.Point([1, 2]) + self.assertEquals(2, get_coordinates_count(point)) + + multipoint = ee.Geometry.MultiPoint([[1, 2], [3, 4], [5, 6]]) + self.assertEquals(3, get_coordinates_count(multipoint)) + + line = ee.Geometry.LineString([[1, 2], [3, 4], [5, 6]]) + self.assertEquals(3, get_coordinates_count(line)) + + ring = ee.Geometry.LinearRing([[1, 2], [3, 4], [5, 6]]) + self.assertEquals(3, get_coordinates_count(ring)) + + multiline = ee.Geometry.MultiLineString( + [[[1, 2], [3, 4]], + [[5, 6], [7, 8]]]) + self.assertEquals(2, get_coordinates_count(multiline)) + + polygon = ee.Geometry.Polygon([[[1, 2], [3, 4], [5, 6]]]) + self.assertEquals(1, get_coordinates_count(polygon)) + + mpolygon = ee.Geometry.MultiPolygon( + [[[[1, 2], [3, 4], [5, 6]]], + [[[1, 2], [3, 4], [5, 6]]]]) + self.assertEquals(2, get_coordinates_count(mpolygon)) + + def testGeodesicFlag(self): + """Verifies that JSON parsing and generation preserves the geodesic flag.""" + geodesic = ee.Geometry({ + 'type': 'LineString', + 'coordinates': [[1, 2], [3, 4]], + 'geodesic': True + }) + projected = ee.Geometry({ + 'type': 'LineString', + 'coordinates': [[1, 2], [3, 4]], + 'geodesic': False + }) + self.assertTrue(geodesic.toGeoJSON()['geodesic']) + self.assertFalse(projected.toGeoJSON()['geodesic']) + + def testConstructor(self): + """Check the behavior of the Geometry constructor. + + There are 5 options: + 1) A geoJSON object. + 2) A not-computed geometry. + 3) A not-computed geometry with overrides. + 4) A computed geometry. + 5) something to cast to geometry. + """ + line = ee.Geometry.LineString(1, 2, 3, 4) + + # GeoJSON. + from_json = ee.Geometry(line.toGeoJSON()) + self.assertEquals(from_json.func, None) + self.assertEquals(from_json._type, 'LineString') + self.assertEquals(from_json._coordinates, [[1, 2], [3, 4]]) + + # GeoJSON with a CRS specified. + json_with_crs = line.toGeoJSON() + json_with_crs['crs'] = { + 'type': 'name', + 'properties': { + 'name': 'SR-ORG:6974' + } + } + from_json_with_crs = ee.Geometry(json_with_crs) + self.assertEquals(from_json_with_crs.func, None) + self.assertEquals(from_json_with_crs._type, 'LineString') + self.assertEquals(from_json_with_crs._proj, 'SR-ORG:6974') + + # A not-computed geometry. + self.assertEquals(ee.Geometry(line), line) + + # A not-computed geometry with an override. + with_override = ee.Geometry(line, 'SR-ORG:6974') + self.assertEquals(with_override._proj, 'SR-ORG:6974') + + # A computed geometry. + self.assertEquals(ee.Geometry(line.bounds()), line.bounds()) + + # Something to cast to a geometry. + computed = ee.ComputedObject(ee.Function(), {'a': 1}) + geom = ee.Geometry(computed) + self.assertEquals(computed.func, geom.func) + self.assertEquals(computed.args, geom.args) + + def testComputedGeometries(self): + """Verifies the computed object behavior of the Geometry constructor.""" + line = ee.Geometry.LineString(1, 2, 3, 4) + bounds = line.bounds() + + self.assertTrue(isinstance(bounds, ee.Geometry)) + self.assertEquals( + ee.ApiFunction.lookup('Geometry.bounds'), bounds.func) + self.assertEquals(line, bounds.args['geometry']) + self.assertTrue(hasattr(bounds, 'bounds')) + + def testComputedCoordinate(self): + """Verifies that a computed coordinate produces a computed geometry.""" + coords = [1, ee.Number(1).add(1)] + p = ee.Geometry.Point(coords) + + self.assertTrue(isinstance(p, ee.Geometry)) + self.assertEquals( + ee.ApiFunction.lookup('GeometryConstructors.Point'), p.func) + self.assertEquals({'coordinates': ee.List(coords)}, p.args) + + def testComputedList(self): + """Verifies that a computed coordinate produces a computed geometry.""" + lst = ee.List([1, 2, 3, 4]).slice(0, 2) + p = ee.Geometry.Point(lst) + + self.assertTrue(isinstance(p, ee.Geometry)) + self.assertEquals( + ee.ApiFunction.lookup('GeometryConstructors.Point'), p.func) + self.assertEquals({'coordinates': lst}, p.args) + + def testComputedProjection(self): + """Verifies that a geometry with a projection can be constructed.""" + p = ee.Geometry.Point([1, 2], 'epsg:4326') + + self.assertTrue(isinstance(p, ee.Geometry)) + self.assertEquals( + ee.ApiFunction.lookup('GeometryConstructors.Point'), p.func) + expected_args = { + 'coordinates': ee.List([1, 2]), + 'crs': ee.ApiFunction.lookup('Projection').call('epsg:4326') + } + self.assertEquals(expected_args, p.args) + + def testGeometryInputs(self): + """Verifies that a geometry with geometry inputs can be constructed.""" + p1 = ee.Geometry.Point([1, 2]) + p2 = ee.Geometry.Point([3, 4]) + line = ee.Geometry.LineString([p1, p2]) + + self.assertTrue(isinstance(line, ee.Geometry)) + self.assertEquals( + ee.ApiFunction.lookup('GeometryConstructors.LineString'), line.func) + self.assertEquals({'coordinates': ee.List([p1, p2])}, line.args) + + def testOldPointKeywordArgs(self): + """Verifies that Points still allow keyword lon/lat args.""" + self.assertEquals(ee.Geometry.Point(1, 2), ee.Geometry.Point(lon=1, lat=2)) + self.assertEquals(ee.Geometry.Point(1, 2), ee.Geometry.Point(1, lat=2)) + + def testOldRectangleKeywordArgs(self): + """Verifies that Rectangles still allow keyword xlo/ylo/xhi/yhi args.""" + self.assertEquals(ee.Geometry.Rectangle(1, 2, 3, 4), + ee.Geometry.Rectangle(xlo=1, ylo=2, xhi=3, yhi=4)) + self.assertEquals(ee.Geometry.Rectangle(1, 2, 3, 4), + ee.Geometry.Rectangle(1, 2, xhi=3, yhi=4)) + + def assertValid(self, nesting, ctor, *coords): + """Checks that geometry is valid and has the expected nesting level. + + Args: + nesting: The expected coordinate nesting level. + ctor: The geometry constructor function, e.g. ee.Geometry.MultiPoint. + *coords: The coordinates of the geometry. + """ + # The constructor already does a validity check. + geometry = ctor(*coords) + self.assertTrue(isinstance(geometry, ee.Geometry)) + self.assertTrue(isinstance(geometry.toGeoJSON(), dict)) + final_coords = geometry.toGeoJSON()['coordinates'] + self.assertEquals(nesting, ee.Geometry._isValidCoordinates(final_coords)) + + def assertInvalid(self, ctor, msg, *coords): + """Verifies that geometry is invalid. + + Calls the given constructor with whatever arguments have been passed, + and verifies that the given error message is thrown. + + Args: + ctor: The geometry constructor function, e.g. ee.Geometry.MultiPoint. + msg: The expected error message in the thrown exception. + *coords: The coordinates of the geometry. + """ + try: + ctor(*coords) + except ee.EEException as e: + self.assertTrue(msg in str(e)) + else: + self.fail('Expected an exception.') + + def testInternals(self): + """Test eq(), ne() and hash().""" + a = ee.Geometry.Point(1, 2) + b = ee.Geometry.Point(2, 1) + c = ee.Geometry.Point(1, 2) + + self.assertEquals(a, a) + self.assertNotEquals(a, b) + self.assertEquals(a, c) + self.assertNotEquals(b, c) + self.assertNotEquals(hash(a), hash(b)) + + +if __name__ == '__main__': + unittest.main() diff --git a/ee/tests/image_test.py b/ee/tests/image_test.py new file mode 100644 index 0000000..9617956 --- /dev/null +++ b/ee/tests/image_test.py @@ -0,0 +1,166 @@ +#!/usr/bin/env python +"""Test for the ee.image module.""" + + + +import json + +import unittest + +import ee +from ee import apitestcase + + +class ImageTestCase(apitestcase.ApiTestCase): + + def testConstructors(self): + """Verifies that constructors understand valid parameters.""" + from_constant = ee.Image(1) + self.assertEquals(ee.ApiFunction.lookup('Image.constant'), + from_constant.func) + self.assertEquals({'value': 1}, from_constant.args) + + array_constant = ee.Array([1, 2]) + from_array_constant = ee.Image(array_constant) + self.assertEquals(ee.ApiFunction.lookup('Image.constant'), + from_array_constant.func) + self.assertEquals({'value': array_constant}, from_array_constant.args) + + from_id = ee.Image('abcd') + self.assertEquals(ee.ApiFunction.lookup('Image.load'), from_id.func) + self.assertEquals({'id': 'abcd'}, from_id.args) + + from_array = ee.Image([1, 2]) + self.assertEquals(ee.ApiFunction.lookup('Image.addBands'), from_array.func) + self.assertEquals({'dstImg': ee.Image(1), 'srcImg': ee.Image(2)}, + from_array.args) + + from_computed_object = ee.Image(ee.ComputedObject(None, {'x': 'y'})) + self.assertEquals({'x': 'y'}, from_computed_object.args) + + original = ee.Image(1) + from_other_image = ee.Image(original) + self.assertEquals(from_other_image, original) + + from_nothing = ee.Image() + self.assertEquals(ee.ApiFunction.lookup('Image.mask'), from_nothing.func) + self.assertEquals({'image': ee.Image(0), 'mask': ee.Image(0)}, + from_nothing.args) + + from_id_and_version = ee.Image('abcd', 123) + self.assertEquals(ee.ApiFunction.lookup('Image.load'), + from_id_and_version.func) + self.assertEquals({'id': 'abcd', 'version': 123}, + from_id_and_version.args) + + from_variable = ee.Image(ee.CustomFunction.variable(None, 'foo')) + self.assertTrue(isinstance(from_variable, ee.Image)) + self.assertEquals({'type': 'ArgumentRef', 'value': 'foo'}, + from_variable.encode(None)) + + def testImageSignatures(self): + """Verifies that the API functions are added to ee.Image.""" + self.assertTrue(hasattr(ee.Image(1), 'addBands')) + + def testImperativeFunctions(self): + """Verifies that imperative functions return ready values.""" + image = ee.Image(1) + self.assertEquals({'value': 'fakeValue'}, image.getInfo()) + self.assertEquals({'mapid': 'fakeMapId', 'image': image}, image.getMapId()) + + def testCombine(self): + """Verifies the behavior of ee.Image.combine_().""" + image1 = ee.Image([1, 2]) + image2 = ee.Image([3, 4]) + combined = ee.Image.combine_([image1, image2], ['a', 'b', 'c', 'd']) + + self.assertEquals(ee.ApiFunction.lookup('Image.select'), combined.func) + self.assertEquals(ee.List(['.*']), combined.args['bandSelectors']) + self.assertEquals(ee.List(['a', 'b', 'c', 'd']), combined.args['newNames']) + self.assertEquals(ee.ApiFunction.lookup('Image.addBands'), + combined.args['input'].func) + self.assertEquals({'dstImg': image1, 'srcImg': image2}, + combined.args['input'].args) + + def testSelect(self): + """Verifies regression in the behavior of empty ee.Image.select().""" + image = ee.Image([1, 2]).select() + self.assertEquals(ee.ApiFunction.lookup('Image.select'), image.func) + self.assertEquals(ee.List([]), image.args['bandSelectors']) + + def testRename(self): + """Verifies image.rename varargs handling.""" + image = ee.Image([1, 2]).rename('a', 'b') + self.assertEquals(ee.ApiFunction.lookup('Image.rename'), image.func) + self.assertEquals(ee.List(['a', 'b']), image.args['names']) + + image = ee.Image([1, 2]).rename(['a', 'b']) + self.assertEquals(ee.ApiFunction.lookup('Image.rename'), image.func) + self.assertEquals(ee.List(['a', 'b']), image.args['names']) + + def testExpression(self): + """Verifies the behavior of ee.Image.expression().""" + image = ee.Image([1, 2]).expression('a', {'b': 'c'}) + expression_func = image.func + + # The call is buried in a one-time override of .encode so we have to call + # it rather than comparing the object structure. + def dummy_encoder(x): + if isinstance(x, ee.encodable.Encodable): + return x.encode(dummy_encoder) + else: + return x + + self.assertEquals( + { + 'type': 'Invocation', + 'functionName': 'Image.parseExpression', + 'arguments': { + 'expression': 'a', + 'argName': 'DEFAULT_EXPRESSION_IMAGE', + 'vars': ['DEFAULT_EXPRESSION_IMAGE', 'b'] + } + }, + dummy_encoder(expression_func)) + + def testDownload(self): + """Verifies Download ID and URL generation.""" + url = ee.Image(1).getDownloadURL() + + self.assertEquals('/download', self.last_download_call['url']) + self.assertEquals( + { + 'image': ee.Image(1).serialize(), + 'json_format': 'v2' + }, + self.last_download_call['data']) + self.assertEquals('/api/download?docid=1&token=2', url) + + def testThumb(self): + """Verifies Thumbnail ID and URL generation.""" + geo_json = { + 'type': 'Polygon', + 'coordinates': [[ + [-112.587890625, 44.94924926661151], + [-114.873046875, 39.48708498168749], + [-103.623046875, 41.82045509614031], + ]], + } + url = ee.Image(1).getThumbURL({ + 'size': [13, 42], + 'region': geo_json, + }) + + self.assertEquals('/thumb', self.last_thumb_call['url']) + self.assertEquals({ + 'image': ee.Image(1).serialize(), + 'json_format': 'v2', + 'size': '13x42', + 'getid': '1', + 'region': json.dumps(geo_json), + }, self.last_thumb_call['data']) + self.assertEquals('/api/thumb?thumbid=3&token=4', url) + + +if __name__ == '__main__': + unittest.main() diff --git a/ee/tests/imagecollection_test.py b/ee/tests/imagecollection_test.py new file mode 100644 index 0000000..3cd8067 --- /dev/null +++ b/ee/tests/imagecollection_test.py @@ -0,0 +1,60 @@ +#!/usr/bin/env python +"""Test for the ee.imagecollection module.""" + + + +import unittest + +import ee +from ee import apitestcase + + +class ImageCollectionTestCase(apitestcase.ApiTestCase): + + def testImageCollectionConstructors(self): + """Verifies that constructors understand valid parameters.""" + from_id = ee.ImageCollection('abcd') + self.assertEquals(ee.ApiFunction.lookup('ImageCollection.load'), + from_id.func) + self.assertEquals({'id': 'abcd'}, from_id.args) + + from_images = ee.ImageCollection([ee.Image(1), ee.Image(2)]) + self.assertEquals(ee.ApiFunction.lookup('ImageCollection.fromImages'), + from_images.func) + self.assertEquals({'images': [ee.Image(1), ee.Image(2)]}, from_images.args) + + self.assertEquals(ee.ImageCollection([ee.Image(1)]), + ee.ImageCollection(ee.Image(1))) + + original = ee.ImageCollection('foo') + from_other_image_collection = ee.ImageCollection(original) + self.assertEquals(from_other_image_collection, original) + + l = ee.List([ee.Image(1)]).slice(0) + from_list = ee.ImageCollection(l) + self.assertEquals({'images': l}, from_list.args) + + from_computed_object = ee.ImageCollection( + ee.ComputedObject(None, {'x': 'y'})) + self.assertEquals({'x': 'y'}, from_computed_object.args) + + def testImperativeFunctions(self): + """Verifies that imperative functions return ready values.""" + image_collection = ee.ImageCollection(ee.Image(1)) + self.assertEquals({'value': 'fakeValue'}, image_collection.getInfo()) + self.assertEquals('fakeMapId', image_collection.getMapId()['mapid']) + + def testFilter(self): + """Verifies that filtering an ImageCollection wraps the result.""" + collection = ee.ImageCollection(ee.Image(1)) + noop_filter = ee.Filter() + filtered = collection.filter(noop_filter) + self.assertTrue(isinstance(filtered, ee.ImageCollection)) + self.assertEquals(ee.ApiFunction.lookup('Collection.filter'), + filtered.func) + self.assertEquals({'collection': collection, 'filter': noop_filter}, + filtered.args) + + +if __name__ == '__main__': + unittest.main() diff --git a/ee/tests/list_test.py b/ee/tests/list_test.py new file mode 100644 index 0000000..0b3c978 --- /dev/null +++ b/ee/tests/list_test.py @@ -0,0 +1,59 @@ +#!/usr/bin/env python +"""Test for the ee.lber module.""" + + + +import unittest + +import ee +from ee import apitestcase + + +class ListTest(apitestcase.ApiTestCase): + + def testList(self): + """Verifies basic behavior of ee.List.""" + l = ee.List([1, 2, 3]) + self.assertEquals([1, 2, 3], ee.Serializer(False)._encode(l)) + + computed = ee.List([1, 2, 3]).slice(0) # pylint: disable=no-member + self.assertTrue(isinstance(computed, ee.List)) + self.assertEquals(ee.ApiFunction.lookup('List.slice'), computed.func) + self.assertEquals({'list': ee.List([1, 2, 3]), 'start': ee.Number(0)}, + computed.args) + + def testMapping(self): + lst = ee.List(['foo', 'bar']) + body = lambda s: ee.String(s).cat('bar') + mapped = lst.map(body) + + self.assertTrue(isinstance(mapped, ee.List)) + self.assertEquals(ee.ApiFunction.lookup('List.map'), mapped.func) + self.assertEquals(lst, mapped.args['list']) + + # Need to do a serialized comparison for the function body because + # variables returned from CustomFunction.variable() do not implement + # __eq__. + sig = { + 'returns': 'Object', + 'args': [{'name': '_MAPPING_VAR_0_0', 'type': 'Object'}] + } + expected_function = ee.CustomFunction(sig, body) + self.assertEquals(expected_function.serialize(), + mapped.args['baseAlgorithm'].serialize()) + + def testInternals(self): + """Test eq(), ne() and hash().""" + a = ee.List([1, 2]) + b = ee.List([2, 1]) + c = ee.List([1, 2]) + + self.assertTrue(a.__eq__(a)) + self.assertFalse(a.__eq__(b)) + self.assertTrue(a.__eq__(c)) + self.assertTrue(b.__ne__(c)) + self.assertNotEquals(a.__hash__(), b.__hash__()) + self.assertEquals(a.__hash__(), c.__hash__()) + +if __name__ == '__main__': + unittest.main() diff --git a/ee/tests/number_test.py b/ee/tests/number_test.py new file mode 100644 index 0000000..0799cbf --- /dev/null +++ b/ee/tests/number_test.py @@ -0,0 +1,39 @@ +#!/usr/bin/env python +"""Test for the ee.number module.""" + + + +import unittest + +import ee +from ee import apitestcase + + +class NumberTest(apitestcase.ApiTestCase): + + def testNumber(self): + """Verifies basic behavior of ee.Number.""" + num = ee.Number(1) + self.assertEquals(1, num.encode()) + + computed = ee.Number(1).add(2) + self.assertTrue(isinstance(computed, ee.Number)) + self.assertEquals(ee.ApiFunction.lookup('Number.add'), computed.func) + self.assertEquals({'left': ee.Number(1), 'right': ee.Number(2)}, + computed.args) + + def testInternals(self): + """Test eq(), ne() and hash().""" + a = ee.Number(1) + b = ee.Number(2.1) + c = ee.Number(1) + + self.assertEquals(a, a) + self.assertNotEquals(a, b) + self.assertEquals(a, c) + self.assertNotEquals(b, c) + self.assertNotEquals(hash(a), hash(b)) + + +if __name__ == '__main__': + unittest.main() diff --git a/ee/tests/oauth_test.py b/ee/tests/oauth_test.py new file mode 100644 index 0000000..833dea1 --- /dev/null +++ b/ee/tests/oauth_test.py @@ -0,0 +1,69 @@ +#!/usr/bin/env python +"""Test class for oauth.""" + + +import json +import sys +import mock + +# pylint: disable=g-import-not-at-top +try: + # Python 2.x + import urlparse +except ImportError: + # Python 3.x + import urllib.parse as urlparse + +import tempfile +import unittest + +import ee + + +class OAuthTest(unittest.TestCase): + + def setUp(self): + self.test_tmpdir = tempfile.mkdtemp() + + def testRequestToken(self): + + class MockResponse(object): + + def __init__(self, code): + self.code = code.decode() + + def read(self): + return ('{"refresh_token": "' + self.code + '456"}').encode() + + def mock_urlopen(unused_url, param): + return MockResponse(urlparse.parse_qs(param)[b'code'][0]) + + # Choose urlopen function to mock based on Python version + if sys.version_info[0] < 3: + urlopen_lib = 'urllib2.urlopen' + else: + urlopen_lib = 'urllib.request.urlopen' + + with mock.patch(urlopen_lib, new=mock_urlopen): + auth_code = '123' + refresh_token = ee.oauth.request_token(auth_code) + self.assertEqual('123456', refresh_token) + + def testWriteToken(self): + + def mock_credentials_path(): + return self.test_tmpdir+'/tempfile' + + oauth_pkg = 'ee.oauth' + with mock.patch(oauth_pkg+'.get_credentials_path', + new=mock_credentials_path): + refresh_token = '123' + ee.oauth.write_token(refresh_token) + + with open(mock_credentials_path()) as f: + token = json.load(f) + self.assertEquals({'refresh_token': '123'}, token) + + +if __name__ == '__main__': + unittest.main() diff --git a/ee/tests/serializer_test.py b/ee/tests/serializer_test.py new file mode 100644 index 0000000..b91781c --- /dev/null +++ b/ee/tests/serializer_test.py @@ -0,0 +1,107 @@ +#!/usr/bin/env python +"""Tests for the ee.serializer module.""" + + + +import json + +import unittest + +import ee +from ee import apitestcase +from ee import serializer + + +class SerializerTest(apitestcase.ApiTestCase): + + def testSerialization(self): + """Verifies a complex serialization case.""" + + class ByteString(ee.Encodable): + """A custom Encodable class that does not use invocations. + + This one is actually supported by the EE API encoding. + """ + + def __init__(self, value): + """Creates a bytestring with a given string value.""" + self._value = value + + def encode(self, unused_encoder): # pylint: disable-msg=g-bad-name + return { + 'type': 'Bytes', + 'value': self._value + } + + call = ee.ComputedObject('String.cat', {'string1': 'x', 'string2': 'y'}) + body = lambda x, y: ee.CustomFunction.variable(None, 'y') + sig = {'returns': 'Object', + 'args': [ + {'name': 'x', 'type': 'Object'}, + {'name': 'y', 'type': 'Object'}]} + custom_function = ee.CustomFunction(sig, body) + to_encode = [ + None, + True, + 5, + 7, + 3.4, + 2.5, + 'hello', + ee.Date(1234567890000), + ee.Geometry(ee.Geometry.LineString(1, 2, 3, 4), 'SR-ORG:6974'), + ee.Geometry.Polygon([ + [[0, 0], [10, 0], [10, 10], [0, 10], [0, 0]], + [[5, 6], [7, 6], [7, 8], [5, 8]], + [[1, 1], [2, 1], [2, 2], [1, 2]] + ]), + ByteString('aGVsbG8='), + { + 'foo': 'bar', + 'baz': call + }, + call, + custom_function + ] + + self.assertEquals(apitestcase.ENCODED_JSON_SAMPLE, + json.loads(serializer.toJSON(to_encode))) + + def testRepeats(self): + """Verifies serialization finds and removes repeated values.""" + test1 = ee.Image(5).mask(ee.Image(5)) # pylint: disable-msg=no-member + expected1 = { + 'type': 'CompoundValue', + 'scope': [ + ['0', { + 'type': 'Invocation', + 'arguments': { + 'value': 5 + }, + 'functionName': 'Image.constant' + }], + ['1', { + 'type': 'Invocation', + 'arguments': { + 'image': { + 'type': 'ValueRef', + 'value': '0' + }, + 'mask': { + 'type': 'ValueRef', + 'value': '0' + } + }, + 'functionName': 'Image.mask' + }] + ], + 'value': { + 'type': 'ValueRef', + 'value': '1' + } + } + self.assertEquals(expected1, json.loads(serializer.toJSON(test1))) + + +if __name__ == '__main__': + unittest.main() diff --git a/ee/tests/string_test.py b/ee/tests/string_test.py new file mode 100644 index 0000000..07bfd49 --- /dev/null +++ b/ee/tests/string_test.py @@ -0,0 +1,46 @@ +#!/usr/bin/env python +"""Test for the ee.string module.""" + + + +import unittest + +import ee +from ee import apitestcase + + +class StringTest(apitestcase.ApiTestCase): + + def testString(self): + """Verifies basic behavior of ee.String.""" + bare_string = ee.String('foo') + self.assertEquals('foo', bare_string.encode()) + + computed = ee.String('foo').cat('bar') + self.assertTrue(isinstance(computed, ee.String)) + self.assertEquals(ee.ApiFunction.lookup('String.cat'), computed.func) + self.assertEquals({'string1': ee.String('foo'), + 'string2': ee.String('bar')}, computed.args) + + # Casting a non-string ComputedObject. + obj = ee.Number(1).add(1) + s = ee.String(obj) + self.assertTrue(isinstance(s, ee.String)) + self.assertEquals(ee.ApiFunction.lookup('String'), s.func) + self.assertEquals({'input': obj}, s.args) + + def testInternals(self): + """Test eq(), ne() and hash().""" + a = ee.String('one') + b = ee.String('two') + c = ee.String('one') + + self.assertEquals(a, a) + self.assertNotEquals(a, b) + self.assertEquals(a, c) + self.assertNotEquals(b, c) + self.assertNotEquals(hash(a), hash(b)) + + +if __name__ == '__main__': + unittest.main() diff --git a/favicon.ico b/favicon.ico new file mode 100644 index 0000000..1b61055 Binary files /dev/null and b/favicon.ico differ diff --git a/how_to_deploy.txt b/how_to_deploy.txt new file mode 100644 index 0000000..e63486e --- /dev/null +++ b/how_to_deploy.txt @@ -0,0 +1,32 @@ +Check out the repo: + +git clone https://github.com/MRSD2018/GroundsBot-Website.git + +Download and install the Google Cloud SDK (Python, standard environment) + +https://cloud.google.com/appengine/docs/standard/python/download + +Open up a terminal in the GroundsBot-Website folder. + +gcloud config set account info@groundsbot.com +gcloud auth login + +Sign in and authenticate with the info@groundsbot.com account. + +gcloud config set project mrsd2018groundsbot + +To run a local server at localhost:8080: + +dev_appserver.py . + +To deploy to the cloud: + +gcloud app deploy + +Choose us-east1 (option 2) for the server region if it asks. + +When prompted, enter Y to deploy to mrsd2018groundsbot.appspot.com + +To navigate to the app address online: + +gcloud app browse \ No newline at end of file diff --git a/httplib2/__init__.py b/httplib2/__init__.py new file mode 100644 index 0000000..19e7cff --- /dev/null +++ b/httplib2/__init__.py @@ -0,0 +1,1695 @@ +from __future__ import generators +""" +httplib2 + +A caching http interface that supports ETags and gzip +to conserve bandwidth. + +Requires Python 2.3 or later + +Changelog: +2007-08-18, Rick: Modified so it's able to use a socks proxy if needed. + +""" + +__author__ = "Joe Gregorio (joe@bitworking.org)" +__copyright__ = "Copyright 2006, Joe Gregorio" +__contributors__ = ["Thomas Broyer (t.broyer@ltgt.net)", + "James Antill", + "Xavier Verges Farrero", + "Jonathan Feinberg", + "Blair Zajac", + "Sam Ruby", + "Louis Nyffenegger"] +__license__ = "MIT" +__version__ = "0.9.1" + +import re +import sys +import email +import email.Utils +import email.Message +import email.FeedParser +import StringIO +import gzip +import zlib +import httplib +import urlparse +import urllib +import base64 +import os +import copy +import calendar +import time +import random +import errno +try: + from hashlib import sha1 as _sha, md5 as _md5 +except ImportError: + # prior to Python 2.5, these were separate modules + import sha + import md5 + _sha = sha.new + _md5 = md5.new +import hmac +from gettext import gettext as _ +import socket + +try: + from httplib2 import socks +except ImportError: + try: + import socks + except (ImportError, AttributeError): + socks = None + +# Build the appropriate socket wrapper for ssl +try: + import ssl # python 2.6 + ssl_SSLError = ssl.SSLError + def _ssl_wrap_socket(sock, key_file, cert_file, + disable_validation, ca_certs): + if disable_validation: + cert_reqs = ssl.CERT_NONE + else: + cert_reqs = ssl.CERT_REQUIRED + # We should be specifying SSL version 3 or TLS v1, but the ssl module + # doesn't expose the necessary knobs. So we need to go with the default + # of SSLv23. + return ssl.wrap_socket(sock, keyfile=key_file, certfile=cert_file, + cert_reqs=cert_reqs, ca_certs=ca_certs) +except (AttributeError, ImportError): + ssl_SSLError = None + def _ssl_wrap_socket(sock, key_file, cert_file, + disable_validation, ca_certs): + if not disable_validation: + raise CertificateValidationUnsupported( + "SSL certificate validation is not supported without " + "the ssl module installed. To avoid this error, install " + "the ssl module, or explicity disable validation.") + ssl_sock = socket.ssl(sock, key_file, cert_file) + return httplib.FakeSocket(sock, ssl_sock) + + +if sys.version_info >= (2,3): + from iri2uri import iri2uri +else: + def iri2uri(uri): + return uri + +def has_timeout(timeout): # python 2.6 + if hasattr(socket, '_GLOBAL_DEFAULT_TIMEOUT'): + return (timeout is not None and timeout is not socket._GLOBAL_DEFAULT_TIMEOUT) + return (timeout is not None) + +__all__ = [ + 'Http', 'Response', 'ProxyInfo', 'HttpLib2Error', 'RedirectMissingLocation', + 'RedirectLimit', 'FailedToDecompressContent', + 'UnimplementedDigestAuthOptionError', + 'UnimplementedHmacDigestAuthOptionError', + 'debuglevel', 'ProxiesUnavailableError'] + + +# The httplib debug level, set to a non-zero value to get debug output +debuglevel = 0 + +# A request will be tried 'RETRIES' times if it fails at the socket/connection level. +RETRIES = 2 + +# Python 2.3 support +if sys.version_info < (2,4): + def sorted(seq): + seq.sort() + return seq + +# Python 2.3 support +def HTTPResponse__getheaders(self): + """Return list of (header, value) tuples.""" + if self.msg is None: + raise httplib.ResponseNotReady() + return self.msg.items() + +if not hasattr(httplib.HTTPResponse, 'getheaders'): + httplib.HTTPResponse.getheaders = HTTPResponse__getheaders + +# All exceptions raised here derive from HttpLib2Error +class HttpLib2Error(Exception): pass + +# Some exceptions can be caught and optionally +# be turned back into responses. +class HttpLib2ErrorWithResponse(HttpLib2Error): + def __init__(self, desc, response, content): + self.response = response + self.content = content + HttpLib2Error.__init__(self, desc) + +class RedirectMissingLocation(HttpLib2ErrorWithResponse): pass +class RedirectLimit(HttpLib2ErrorWithResponse): pass +class FailedToDecompressContent(HttpLib2ErrorWithResponse): pass +class UnimplementedDigestAuthOptionError(HttpLib2ErrorWithResponse): pass +class UnimplementedHmacDigestAuthOptionError(HttpLib2ErrorWithResponse): pass + +class MalformedHeader(HttpLib2Error): pass +class RelativeURIError(HttpLib2Error): pass +class ServerNotFoundError(HttpLib2Error): pass +class ProxiesUnavailableError(HttpLib2Error): pass +class CertificateValidationUnsupported(HttpLib2Error): pass +class SSLHandshakeError(HttpLib2Error): pass +class NotSupportedOnThisPlatform(HttpLib2Error): pass +class CertificateHostnameMismatch(SSLHandshakeError): + def __init__(self, desc, host, cert): + HttpLib2Error.__init__(self, desc) + self.host = host + self.cert = cert + +# Open Items: +# ----------- +# Proxy support + +# Are we removing the cached content too soon on PUT (only delete on 200 Maybe?) + +# Pluggable cache storage (supports storing the cache in +# flat files by default. We need a plug-in architecture +# that can support Berkeley DB and Squid) + +# == Known Issues == +# Does not handle a resource that uses conneg and Last-Modified but no ETag as a cache validator. +# Does not handle Cache-Control: max-stale +# Does not use Age: headers when calculating cache freshness. + + +# The number of redirections to follow before giving up. +# Note that only GET redirects are automatically followed. +# Will also honor 301 requests by saving that info and never +# requesting that URI again. +DEFAULT_MAX_REDIRECTS = 5 + +try: + # Users can optionally provide a module that tells us where the CA_CERTS + # are located. + import ca_certs_locater + CA_CERTS = ca_certs_locater.get() +except ImportError: + # Default CA certificates file bundled with httplib2. + CA_CERTS = os.path.join( + os.path.dirname(os.path.abspath(__file__ )), "cacerts.txt") + +# Which headers are hop-by-hop headers by default +HOP_BY_HOP = ['connection', 'keep-alive', 'proxy-authenticate', 'proxy-authorization', 'te', 'trailers', 'transfer-encoding', 'upgrade'] + +def _get_end2end_headers(response): + hopbyhop = list(HOP_BY_HOP) + hopbyhop.extend([x.strip() for x in response.get('connection', '').split(',')]) + return [header for header in response.keys() if header not in hopbyhop] + +URI = re.compile(r"^(([^:/?#]+):)?(//([^/?#]*))?([^?#]*)(\?([^#]*))?(#(.*))?") + +def parse_uri(uri): + """Parses a URI using the regex given in Appendix B of RFC 3986. + + (scheme, authority, path, query, fragment) = parse_uri(uri) + """ + groups = URI.match(uri).groups() + return (groups[1], groups[3], groups[4], groups[6], groups[8]) + +def urlnorm(uri): + (scheme, authority, path, query, fragment) = parse_uri(uri) + if not scheme or not authority: + raise RelativeURIError("Only absolute URIs are allowed. uri = %s" % uri) + authority = authority.lower() + scheme = scheme.lower() + if not path: + path = "/" + # Could do syntax based normalization of the URI before + # computing the digest. See Section 6.2.2 of Std 66. + request_uri = query and "?".join([path, query]) or path + scheme = scheme.lower() + defrag_uri = scheme + "://" + authority + request_uri + return scheme, authority, request_uri, defrag_uri + + +# Cache filename construction (original borrowed from Venus http://intertwingly.net/code/venus/) +re_url_scheme = re.compile(r'^\w+://') +re_slash = re.compile(r'[?/:|]+') + +def safename(filename): + """Return a filename suitable for the cache. + + Strips dangerous and common characters to create a filename we + can use to store the cache in. + """ + + try: + if re_url_scheme.match(filename): + if isinstance(filename,str): + filename = filename.decode('utf-8') + filename = filename.encode('idna') + else: + filename = filename.encode('idna') + except UnicodeError: + pass + if isinstance(filename,unicode): + filename=filename.encode('utf-8') + filemd5 = _md5(filename).hexdigest() + filename = re_url_scheme.sub("", filename) + filename = re_slash.sub(",", filename) + + # limit length of filename + if len(filename)>200: + filename=filename[:200] + return ",".join((filename, filemd5)) + +NORMALIZE_SPACE = re.compile(r'(?:\r\n)?[ \t]+') +def _normalize_headers(headers): + return dict([ (key.lower(), NORMALIZE_SPACE.sub(value, ' ').strip()) for (key, value) in headers.iteritems()]) + +def _parse_cache_control(headers): + retval = {} + if headers.has_key('cache-control'): + parts = headers['cache-control'].split(',') + parts_with_args = [tuple([x.strip().lower() for x in part.split("=", 1)]) for part in parts if -1 != part.find("=")] + parts_wo_args = [(name.strip().lower(), 1) for name in parts if -1 == name.find("=")] + retval = dict(parts_with_args + parts_wo_args) + return retval + +# Whether to use a strict mode to parse WWW-Authenticate headers +# Might lead to bad results in case of ill-formed header value, +# so disabled by default, falling back to relaxed parsing. +# Set to true to turn on, usefull for testing servers. +USE_WWW_AUTH_STRICT_PARSING = 0 + +# In regex below: +# [^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+ matches a "token" as defined by HTTP +# "(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?" matches a "quoted-string" as defined by HTTP, when LWS have already been replaced by a single space +# Actually, as an auth-param value can be either a token or a quoted-string, they are combined in a single pattern which matches both: +# \"?((?<=\")(?:[^\0-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?@,;:\\\"/[\]?={} \t]+(?!\"))\"? +WWW_AUTH_STRICT = re.compile(r"^(?:\s*(?:,\s*)?([^\0-\x1f\x7f-\xff()<>@,;:\\\"/[\]?={} \t]+)\s*=\s*\"?((?<=\")(?:[^\0-\x08\x0A-\x1f\x7f-\xff\\\"]|\\[\0-\x7f])*?(?=\")|(?@,;:\\\"/[\]?={} \t]+(?!\"))\"?)(.*)$") +WWW_AUTH_RELAXED = re.compile(r"^(?:\s*(?:,\s*)?([^ \t\r\n=]+)\s*=\s*\"?((?<=\")(?:[^\\\"]|\\.)*?(?=\")|(? current_age: + retval = "FRESH" + return retval + +def _decompressContent(response, new_content): + content = new_content + try: + encoding = response.get('content-encoding', None) + if encoding in ['gzip', 'deflate']: + if encoding == 'gzip': + content = gzip.GzipFile(fileobj=StringIO.StringIO(new_content)).read() + if encoding == 'deflate': + content = zlib.decompress(content) + response['content-length'] = str(len(content)) + # Record the historical presence of the encoding in a way the won't interfere. + response['-content-encoding'] = response['content-encoding'] + del response['content-encoding'] + except IOError: + content = "" + raise FailedToDecompressContent(_("Content purported to be compressed with %s but failed to decompress.") % response.get('content-encoding'), response, content) + return content + +def _updateCache(request_headers, response_headers, content, cache, cachekey): + if cachekey: + cc = _parse_cache_control(request_headers) + cc_response = _parse_cache_control(response_headers) + if cc.has_key('no-store') or cc_response.has_key('no-store'): + cache.delete(cachekey) + else: + info = email.Message.Message() + for key, value in response_headers.iteritems(): + if key not in ['status','content-encoding','transfer-encoding']: + info[key] = value + + # Add annotations to the cache to indicate what headers + # are variant for this request. + vary = response_headers.get('vary', None) + if vary: + vary_headers = vary.lower().replace(' ', '').split(',') + for header in vary_headers: + key = '-varied-%s' % header + try: + info[key] = request_headers[header] + except KeyError: + pass + + status = response_headers.status + if status == 304: + status = 200 + + status_header = 'status: %d\r\n' % status + + header_str = info.as_string() + + header_str = re.sub("\r(?!\n)|(? 0: + service = "cl" + # No point in guessing Base or Spreadsheet + #elif request_uri.find("spreadsheets") > 0: + # service = "wise" + + auth = dict(Email=credentials[0], Passwd=credentials[1], service=service, source=headers['user-agent']) + resp, content = self.http.request("https://www.google.com/accounts/ClientLogin", method="POST", body=urlencode(auth), headers={'Content-Type': 'application/x-www-form-urlencoded'}) + lines = content.split('\n') + d = dict([tuple(line.split("=", 1)) for line in lines if line]) + if resp.status == 403: + self.Auth = "" + else: + self.Auth = d['Auth'] + + def request(self, method, request_uri, headers, content): + """Modify the request headers to add the appropriate + Authorization header.""" + headers['authorization'] = 'GoogleLogin Auth=' + self.Auth + + +AUTH_SCHEME_CLASSES = { + "basic": BasicAuthentication, + "wsse": WsseAuthentication, + "digest": DigestAuthentication, + "hmacdigest": HmacDigestAuthentication, + "googlelogin": GoogleLoginAuthentication +} + +AUTH_SCHEME_ORDER = ["hmacdigest", "googlelogin", "digest", "wsse", "basic"] + +class FileCache(object): + """Uses a local directory as a store for cached files. + Not really safe to use if multiple threads or processes are going to + be running on the same cache. + """ + def __init__(self, cache, safe=safename): # use safe=lambda x: md5.new(x).hexdigest() for the old behavior + self.cache = cache + self.safe = safe + if not os.path.exists(cache): + os.makedirs(self.cache) + + def get(self, key): + retval = None + cacheFullPath = os.path.join(self.cache, self.safe(key)) + try: + f = file(cacheFullPath, "rb") + retval = f.read() + f.close() + except IOError: + pass + return retval + + def set(self, key, value): + cacheFullPath = os.path.join(self.cache, self.safe(key)) + f = file(cacheFullPath, "wb") + f.write(value) + f.close() + + def delete(self, key): + cacheFullPath = os.path.join(self.cache, self.safe(key)) + if os.path.exists(cacheFullPath): + os.remove(cacheFullPath) + +class Credentials(object): + def __init__(self): + self.credentials = [] + + def add(self, name, password, domain=""): + self.credentials.append((domain.lower(), name, password)) + + def clear(self): + self.credentials = [] + + def iter(self, domain): + for (cdomain, name, password) in self.credentials: + if cdomain == "" or domain == cdomain: + yield (name, password) + +class KeyCerts(Credentials): + """Identical to Credentials except that + name/password are mapped to key/cert.""" + pass + +class AllHosts(object): + pass + +class ProxyInfo(object): + """Collect information required to use a proxy.""" + bypass_hosts = () + + def __init__(self, proxy_type, proxy_host, proxy_port, + proxy_rdns=True, proxy_user=None, proxy_pass=None): + """ + Args: + proxy_type: The type of proxy server. This must be set to one of + socks.PROXY_TYPE_XXX constants. For example: + + p = ProxyInfo(proxy_type=socks.PROXY_TYPE_HTTP, + proxy_host='localhost', proxy_port=8000) + + proxy_host: The hostname or IP address of the proxy server. + + proxy_port: The port that the proxy server is running on. + + proxy_rdns: If True (default), DNS queries will not be performed + locally, and instead, handed to the proxy to resolve. This is useful + if the network does not allow resolution of non-local names. In + httplib2 0.9 and earlier, this defaulted to False. + + proxy_user: The username used to authenticate with the proxy server. + + proxy_pass: The password used to authenticate with the proxy server. + """ + self.proxy_type = proxy_type + self.proxy_host = proxy_host + self.proxy_port = proxy_port + self.proxy_rdns = proxy_rdns + self.proxy_user = proxy_user + self.proxy_pass = proxy_pass + + def astuple(self): + return (self.proxy_type, self.proxy_host, self.proxy_port, + self.proxy_rdns, self.proxy_user, self.proxy_pass) + + def isgood(self): + return (self.proxy_host != None) and (self.proxy_port != None) + + def applies_to(self, hostname): + return not self.bypass_host(hostname) + + def bypass_host(self, hostname): + """Has this host been excluded from the proxy config""" + if self.bypass_hosts is AllHosts: + return True + + bypass = False + for domain in self.bypass_hosts: + if hostname.endswith(domain): + bypass = True + + return bypass + + +def proxy_info_from_environment(method='http'): + """ + Read proxy info from the environment variables. + """ + if method not in ['http', 'https']: + return + + env_var = method + '_proxy' + url = os.environ.get(env_var, os.environ.get(env_var.upper())) + if not url: + return + pi = proxy_info_from_url(url, method) + + no_proxy = os.environ.get('no_proxy', os.environ.get('NO_PROXY', '')) + bypass_hosts = [] + if no_proxy: + bypass_hosts = no_proxy.split(',') + # special case, no_proxy=* means all hosts bypassed + if no_proxy == '*': + bypass_hosts = AllHosts + + pi.bypass_hosts = bypass_hosts + return pi + +def proxy_info_from_url(url, method='http'): + """ + Construct a ProxyInfo from a URL (such as http_proxy env var) + """ + url = urlparse.urlparse(url) + username = None + password = None + port = None + if '@' in url[1]: + ident, host_port = url[1].split('@', 1) + if ':' in ident: + username, password = ident.split(':', 1) + else: + password = ident + else: + host_port = url[1] + if ':' in host_port: + host, port = host_port.split(':', 1) + else: + host = host_port + + if port: + port = int(port) + else: + port = dict(https=443, http=80)[method] + + proxy_type = 3 # socks.PROXY_TYPE_HTTP + return ProxyInfo( + proxy_type = proxy_type, + proxy_host = host, + proxy_port = port, + proxy_user = username or None, + proxy_pass = password or None, + ) + + +class HTTPConnectionWithTimeout(httplib.HTTPConnection): + """ + HTTPConnection subclass that supports timeouts + + All timeouts are in seconds. If None is passed for timeout then + Python's default timeout for sockets will be used. See for example + the docs of socket.setdefaulttimeout(): + http://docs.python.org/library/socket.html#socket.setdefaulttimeout + """ + + def __init__(self, host, port=None, strict=None, timeout=None, proxy_info=None): + httplib.HTTPConnection.__init__(self, host, port, strict) + self.timeout = timeout + self.proxy_info = proxy_info + + def connect(self): + """Connect to the host and port specified in __init__.""" + # Mostly verbatim from httplib.py. + if self.proxy_info and socks is None: + raise ProxiesUnavailableError( + 'Proxy support missing but proxy use was requested!') + msg = "getaddrinfo returns an empty list" + if self.proxy_info and self.proxy_info.isgood(): + use_proxy = True + proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass = self.proxy_info.astuple() + + host = proxy_host + port = proxy_port + else: + use_proxy = False + + host = self.host + port = self.port + + for res in socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM): + af, socktype, proto, canonname, sa = res + try: + if use_proxy: + self.sock = socks.socksocket(af, socktype, proto) + self.sock.setproxy(proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass) + else: + self.sock = socket.socket(af, socktype, proto) + self.sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + # Different from httplib: support timeouts. + if has_timeout(self.timeout): + self.sock.settimeout(self.timeout) + # End of difference from httplib. + if self.debuglevel > 0: + print "connect: (%s, %s) ************" % (self.host, self.port) + if use_proxy: + print "proxy: %s ************" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)) + + self.sock.connect((self.host, self.port) + sa[2:]) + except socket.error, msg: + if self.debuglevel > 0: + print "connect fail: (%s, %s)" % (self.host, self.port) + if use_proxy: + print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)) + if self.sock: + self.sock.close() + self.sock = None + continue + break + if not self.sock: + raise socket.error, msg + +class HTTPSConnectionWithTimeout(httplib.HTTPSConnection): + """ + This class allows communication via SSL. + + All timeouts are in seconds. If None is passed for timeout then + Python's default timeout for sockets will be used. See for example + the docs of socket.setdefaulttimeout(): + http://docs.python.org/library/socket.html#socket.setdefaulttimeout + """ + def __init__(self, host, port=None, key_file=None, cert_file=None, + strict=None, timeout=None, proxy_info=None, + ca_certs=None, disable_ssl_certificate_validation=False): + httplib.HTTPSConnection.__init__(self, host, port=port, + key_file=key_file, + cert_file=cert_file, strict=strict) + self.timeout = timeout + self.proxy_info = proxy_info + if ca_certs is None: + ca_certs = CA_CERTS + self.ca_certs = ca_certs + self.disable_ssl_certificate_validation = \ + disable_ssl_certificate_validation + + # The following two methods were adapted from https_wrapper.py, released + # with the Google Appengine SDK at + # http://googleappengine.googlecode.com/svn-history/r136/trunk/python/google/appengine/tools/https_wrapper.py + # under the following license: + # + # Copyright 2007 Google Inc. + # + # Licensed under the Apache License, Version 2.0 (the "License"); + # you may not use this file except in compliance with the License. + # You may obtain a copy of the License at + # + # http://www.apache.org/licenses/LICENSE-2.0 + # + # Unless required by applicable law or agreed to in writing, software + # distributed under the License is distributed on an "AS IS" BASIS, + # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + # See the License for the specific language governing permissions and + # limitations under the License. + # + + def _GetValidHostsForCert(self, cert): + """Returns a list of valid host globs for an SSL certificate. + + Args: + cert: A dictionary representing an SSL certificate. + Returns: + list: A list of valid host globs. + """ + if 'subjectAltName' in cert: + return [x[1] for x in cert['subjectAltName'] + if x[0].lower() == 'dns'] + else: + return [x[0][1] for x in cert['subject'] + if x[0][0].lower() == 'commonname'] + + def _ValidateCertificateHostname(self, cert, hostname): + """Validates that a given hostname is valid for an SSL certificate. + + Args: + cert: A dictionary representing an SSL certificate. + hostname: The hostname to test. + Returns: + bool: Whether or not the hostname is valid for this certificate. + """ + hosts = self._GetValidHostsForCert(cert) + for host in hosts: + host_re = host.replace('.', '\.').replace('*', '[^.]*') + if re.search('^%s$' % (host_re,), hostname, re.I): + return True + return False + + def connect(self): + "Connect to a host on a given (SSL) port." + + msg = "getaddrinfo returns an empty list" + if self.proxy_info and self.proxy_info.isgood(): + use_proxy = True + proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass = self.proxy_info.astuple() + + host = proxy_host + port = proxy_port + else: + use_proxy = False + + host = self.host + port = self.port + + address_info = socket.getaddrinfo(host, port, 0, socket.SOCK_STREAM) + for family, socktype, proto, canonname, sockaddr in address_info: + try: + if use_proxy: + sock = socks.socksocket(family, socktype, proto) + + sock.setproxy(proxy_type, proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass) + else: + sock = socket.socket(family, socktype, proto) + sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) + + if has_timeout(self.timeout): + sock.settimeout(self.timeout) + sock.connect((self.host, self.port)) + self.sock =_ssl_wrap_socket( + sock, self.key_file, self.cert_file, + self.disable_ssl_certificate_validation, self.ca_certs) + if self.debuglevel > 0: + print "connect: (%s, %s)" % (self.host, self.port) + if use_proxy: + print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)) + if not self.disable_ssl_certificate_validation: + cert = self.sock.getpeercert() + hostname = self.host.split(':', 0)[0] + if not self._ValidateCertificateHostname(cert, hostname): + raise CertificateHostnameMismatch( + 'Server presented certificate that does not match ' + 'host %s: %s' % (hostname, cert), hostname, cert) + except ssl_SSLError, e: + if sock: + sock.close() + if self.sock: + self.sock.close() + self.sock = None + # Unfortunately the ssl module doesn't seem to provide any way + # to get at more detailed error information, in particular + # whether the error is due to certificate validation or + # something else (such as SSL protocol mismatch). + if e.errno == ssl.SSL_ERROR_SSL: + raise SSLHandshakeError(e) + else: + raise + except (socket.timeout, socket.gaierror): + raise + except socket.error, msg: + if self.debuglevel > 0: + print "connect fail: (%s, %s)" % (self.host, self.port) + if use_proxy: + print "proxy: %s" % str((proxy_host, proxy_port, proxy_rdns, proxy_user, proxy_pass)) + if self.sock: + self.sock.close() + self.sock = None + continue + break + if not self.sock: + raise socket.error, msg + +SCHEME_TO_CONNECTION = { + 'http': HTTPConnectionWithTimeout, + 'https': HTTPSConnectionWithTimeout +} + +# Use a different connection object for Google App Engine +try: + try: + from google.appengine.api import apiproxy_stub_map + if apiproxy_stub_map.apiproxy.GetStub('urlfetch') is None: + raise ImportError # Bail out; we're not actually running on App Engine. + from google.appengine.api.urlfetch import fetch + from google.appengine.api.urlfetch import InvalidURLError + except (ImportError, AttributeError): + from google3.apphosting.api import apiproxy_stub_map + if apiproxy_stub_map.apiproxy.GetStub('urlfetch') is None: + raise ImportError # Bail out; we're not actually running on App Engine. + from google3.apphosting.api.urlfetch import fetch + from google3.apphosting.api.urlfetch import InvalidURLError + + def _new_fixed_fetch(validate_certificate): + def fixed_fetch(url, payload=None, method="GET", headers={}, + allow_truncated=False, follow_redirects=True, + deadline=None): + if deadline is None: + deadline = socket.getdefaulttimeout() or 5 + return fetch(url, payload=payload, method=method, headers=headers, + allow_truncated=allow_truncated, + follow_redirects=follow_redirects, deadline=deadline, + validate_certificate=validate_certificate) + return fixed_fetch + + class AppEngineHttpConnection(httplib.HTTPConnection): + """Use httplib on App Engine, but compensate for its weirdness. + + The parameters key_file, cert_file, proxy_info, ca_certs, and + disable_ssl_certificate_validation are all dropped on the ground. + """ + def __init__(self, host, port=None, key_file=None, cert_file=None, + strict=None, timeout=None, proxy_info=None, ca_certs=None, + disable_ssl_certificate_validation=False): + httplib.HTTPConnection.__init__(self, host, port=port, + strict=strict, timeout=timeout) + + class AppEngineHttpsConnection(httplib.HTTPSConnection): + """Same as AppEngineHttpConnection, but for HTTPS URIs.""" + def __init__(self, host, port=None, key_file=None, cert_file=None, + strict=None, timeout=None, proxy_info=None, ca_certs=None, + disable_ssl_certificate_validation=False): + httplib.HTTPSConnection.__init__(self, host, port=port, + key_file=key_file, + cert_file=cert_file, strict=strict, + timeout=timeout) + self._fetch = _new_fixed_fetch( + not disable_ssl_certificate_validation) + + # Update the connection classes to use the Googel App Engine specific ones. + SCHEME_TO_CONNECTION = { + 'http': AppEngineHttpConnection, + 'https': AppEngineHttpsConnection + } +except (ImportError, AttributeError): + pass + + +class Http(object): + """An HTTP client that handles: + + - all methods + - caching + - ETags + - compression, + - HTTPS + - Basic + - Digest + - WSSE + + and more. + """ + def __init__(self, cache=None, timeout=None, + proxy_info=proxy_info_from_environment, + ca_certs=None, disable_ssl_certificate_validation=False): + """If 'cache' is a string then it is used as a directory name for + a disk cache. Otherwise it must be an object that supports the + same interface as FileCache. + + All timeouts are in seconds. If None is passed for timeout + then Python's default timeout for sockets will be used. See + for example the docs of socket.setdefaulttimeout(): + http://docs.python.org/library/socket.html#socket.setdefaulttimeout + + `proxy_info` may be: + - a callable that takes the http scheme ('http' or 'https') and + returns a ProxyInfo instance per request. By default, uses + proxy_nfo_from_environment. + - a ProxyInfo instance (static proxy config). + - None (proxy disabled). + + ca_certs is the path of a file containing root CA certificates for SSL + server certificate validation. By default, a CA cert file bundled with + httplib2 is used. + + If disable_ssl_certificate_validation is true, SSL cert validation will + not be performed. + """ + self.proxy_info = proxy_info + self.ca_certs = ca_certs + self.disable_ssl_certificate_validation = \ + disable_ssl_certificate_validation + + # Map domain name to an httplib connection + self.connections = {} + # The location of the cache, for now a directory + # where cached responses are held. + if cache and isinstance(cache, basestring): + self.cache = FileCache(cache) + else: + self.cache = cache + + # Name/password + self.credentials = Credentials() + + # Key/cert + self.certificates = KeyCerts() + + # authorization objects + self.authorizations = [] + + # If set to False then no redirects are followed, even safe ones. + self.follow_redirects = True + + # Which HTTP methods do we apply optimistic concurrency to, i.e. + # which methods get an "if-match:" etag header added to them. + self.optimistic_concurrency_methods = ["PUT", "PATCH"] + + # If 'follow_redirects' is True, and this is set to True then + # all redirecs are followed, including unsafe ones. + self.follow_all_redirects = False + + self.ignore_etag = False + + self.force_exception_to_status_code = False + + self.timeout = timeout + + # Keep Authorization: headers on a redirect. + self.forward_authorization_headers = False + + def __getstate__(self): + state_dict = copy.copy(self.__dict__) + # In case request is augmented by some foreign object such as + # credentials which handle auth + if 'request' in state_dict: + del state_dict['request'] + if 'connections' in state_dict: + del state_dict['connections'] + return state_dict + + def __setstate__(self, state): + self.__dict__.update(state) + self.connections = {} + + def _auth_from_challenge(self, host, request_uri, headers, response, content): + """A generator that creates Authorization objects + that can be applied to requests. + """ + challenges = _parse_www_authenticate(response, 'www-authenticate') + for cred in self.credentials.iter(host): + for scheme in AUTH_SCHEME_ORDER: + if challenges.has_key(scheme): + yield AUTH_SCHEME_CLASSES[scheme](cred, host, request_uri, headers, response, content, self) + + def add_credentials(self, name, password, domain=""): + """Add a name and password that will be used + any time a request requires authentication.""" + self.credentials.add(name, password, domain) + + def add_certificate(self, key, cert, domain): + """Add a key and cert that will be used + any time a request requires authentication.""" + self.certificates.add(key, cert, domain) + + def clear_credentials(self): + """Remove all the names and passwords + that are used for authentication""" + self.credentials.clear() + self.authorizations = [] + + def _conn_request(self, conn, request_uri, method, body, headers): + i = 0 + seen_bad_status_line = False + while i < RETRIES: + i += 1 + try: + if hasattr(conn, 'sock') and conn.sock is None: + conn.connect() + conn.request(method, request_uri, body, headers) + except socket.timeout: + raise + except socket.gaierror: + conn.close() + raise ServerNotFoundError("Unable to find the server at %s" % conn.host) + except ssl_SSLError: + conn.close() + raise + except socket.error, e: + err = 0 + if hasattr(e, 'args'): + err = getattr(e, 'args')[0] + else: + err = e.errno + if err == errno.ECONNREFUSED: # Connection refused + raise + except httplib.HTTPException: + # Just because the server closed the connection doesn't apparently mean + # that the server didn't send a response. + if hasattr(conn, 'sock') and conn.sock is None: + if i < RETRIES-1: + conn.close() + conn.connect() + continue + else: + conn.close() + raise + if i < RETRIES-1: + conn.close() + conn.connect() + continue + try: + response = conn.getresponse() + except httplib.BadStatusLine: + # If we get a BadStatusLine on the first try then that means + # the connection just went stale, so retry regardless of the + # number of RETRIES set. + if not seen_bad_status_line and i == 1: + i = 0 + seen_bad_status_line = True + conn.close() + conn.connect() + continue + else: + conn.close() + raise + except (socket.error, httplib.HTTPException): + if i < RETRIES-1: + conn.close() + conn.connect() + continue + else: + conn.close() + raise + else: + content = "" + if method == "HEAD": + conn.close() + else: + content = response.read() + response = Response(response) + if method != "HEAD": + content = _decompressContent(response, content) + break + return (response, content) + + + def _request(self, conn, host, absolute_uri, request_uri, method, body, headers, redirections, cachekey): + """Do the actual request using the connection object + and also follow one level of redirects if necessary""" + + auths = [(auth.depth(request_uri), auth) for auth in self.authorizations if auth.inscope(host, request_uri)] + auth = auths and sorted(auths)[0][1] or None + if auth: + auth.request(method, request_uri, headers, body) + + (response, content) = self._conn_request(conn, request_uri, method, body, headers) + + if auth: + if auth.response(response, body): + auth.request(method, request_uri, headers, body) + (response, content) = self._conn_request(conn, request_uri, method, body, headers ) + response._stale_digest = 1 + + if response.status == 401: + for authorization in self._auth_from_challenge(host, request_uri, headers, response, content): + authorization.request(method, request_uri, headers, body) + (response, content) = self._conn_request(conn, request_uri, method, body, headers, ) + if response.status != 401: + self.authorizations.append(authorization) + authorization.response(response, body) + break + + if (self.follow_all_redirects or (method in ["GET", "HEAD"]) or response.status == 303): + if self.follow_redirects and response.status in [300, 301, 302, 303, 307]: + # Pick out the location header and basically start from the beginning + # remembering first to strip the ETag header and decrement our 'depth' + if redirections: + if not response.has_key('location') and response.status != 300: + raise RedirectMissingLocation( _("Redirected but the response is missing a Location: header."), response, content) + # Fix-up relative redirects (which violate an RFC 2616 MUST) + if response.has_key('location'): + location = response['location'] + (scheme, authority, path, query, fragment) = parse_uri(location) + if authority == None: + response['location'] = urlparse.urljoin(absolute_uri, location) + if response.status == 301 and method in ["GET", "HEAD"]: + response['-x-permanent-redirect-url'] = response['location'] + if not response.has_key('content-location'): + response['content-location'] = absolute_uri + _updateCache(headers, response, content, self.cache, cachekey) + if headers.has_key('if-none-match'): + del headers['if-none-match'] + if headers.has_key('if-modified-since'): + del headers['if-modified-since'] + if 'authorization' in headers and not self.forward_authorization_headers: + del headers['authorization'] + if response.has_key('location'): + location = response['location'] + old_response = copy.deepcopy(response) + if not old_response.has_key('content-location'): + old_response['content-location'] = absolute_uri + redirect_method = method + if response.status in [302, 303]: + redirect_method = "GET" + body = None + (response, content) = self.request( + location, method=redirect_method, + body=body, headers=headers, + redirections=redirections - 1) + response.previous = old_response + else: + raise RedirectLimit("Redirected more times than rediection_limit allows.", response, content) + elif response.status in [200, 203] and method in ["GET", "HEAD"]: + # Don't cache 206's since we aren't going to handle byte range requests + if not response.has_key('content-location'): + response['content-location'] = absolute_uri + _updateCache(headers, response, content, self.cache, cachekey) + + return (response, content) + + def _normalize_headers(self, headers): + return _normalize_headers(headers) + +# Need to catch and rebrand some exceptions +# Then need to optionally turn all exceptions into status codes +# including all socket.* and httplib.* exceptions. + + + def request(self, uri, method="GET", body=None, headers=None, redirections=DEFAULT_MAX_REDIRECTS, connection_type=None): + """ Performs a single HTTP request. + + The 'uri' is the URI of the HTTP resource and can begin with either + 'http' or 'https'. The value of 'uri' must be an absolute URI. + + The 'method' is the HTTP method to perform, such as GET, POST, DELETE, + etc. There is no restriction on the methods allowed. + + The 'body' is the entity body to be sent with the request. It is a + string object. + + Any extra headers that are to be sent with the request should be + provided in the 'headers' dictionary. + + The maximum number of redirect to follow before raising an + exception is 'redirections. The default is 5. + + The return value is a tuple of (response, content), the first + being and instance of the 'Response' class, the second being + a string that contains the response entity body. + """ + try: + if headers is None: + headers = {} + else: + headers = self._normalize_headers(headers) + + if not headers.has_key('user-agent'): + headers['user-agent'] = "Python-httplib2/%s (gzip)" % __version__ + + uri = iri2uri(uri) + + (scheme, authority, request_uri, defrag_uri) = urlnorm(uri) + domain_port = authority.split(":")[0:2] + if len(domain_port) == 2 and domain_port[1] == '443' and scheme == 'http': + scheme = 'https' + authority = domain_port[0] + + proxy_info = self._get_proxy_info(scheme, authority) + + conn_key = scheme+":"+authority + if conn_key in self.connections: + conn = self.connections[conn_key] + else: + if not connection_type: + connection_type = SCHEME_TO_CONNECTION[scheme] + certs = list(self.certificates.iter(authority)) + if scheme == 'https': + if certs: + conn = self.connections[conn_key] = connection_type( + authority, key_file=certs[0][0], + cert_file=certs[0][1], timeout=self.timeout, + proxy_info=proxy_info, + ca_certs=self.ca_certs, + disable_ssl_certificate_validation= + self.disable_ssl_certificate_validation) + else: + conn = self.connections[conn_key] = connection_type( + authority, timeout=self.timeout, + proxy_info=proxy_info, + ca_certs=self.ca_certs, + disable_ssl_certificate_validation= + self.disable_ssl_certificate_validation) + else: + conn = self.connections[conn_key] = connection_type( + authority, timeout=self.timeout, + proxy_info=proxy_info) + conn.set_debuglevel(debuglevel) + + if 'range' not in headers and 'accept-encoding' not in headers: + headers['accept-encoding'] = 'gzip, deflate' + + info = email.Message.Message() + cached_value = None + if self.cache: + cachekey = defrag_uri.encode('utf-8') + cached_value = self.cache.get(cachekey) + if cached_value: + # info = email.message_from_string(cached_value) + # + # Need to replace the line above with the kludge below + # to fix the non-existent bug not fixed in this + # bug report: http://mail.python.org/pipermail/python-bugs-list/2005-September/030289.html + try: + info, content = cached_value.split('\r\n\r\n', 1) + feedparser = email.FeedParser.FeedParser() + feedparser.feed(info) + info = feedparser.close() + feedparser._parse = None + except (IndexError, ValueError): + self.cache.delete(cachekey) + cachekey = None + cached_value = None + else: + cachekey = None + + if method in self.optimistic_concurrency_methods and self.cache and info.has_key('etag') and not self.ignore_etag and 'if-match' not in headers: + # http://www.w3.org/1999/04/Editing/ + headers['if-match'] = info['etag'] + + if method not in ["GET", "HEAD"] and self.cache and cachekey: + # RFC 2616 Section 13.10 + self.cache.delete(cachekey) + + # Check the vary header in the cache to see if this request + # matches what varies in the cache. + if method in ['GET', 'HEAD'] and 'vary' in info: + vary = info['vary'] + vary_headers = vary.lower().replace(' ', '').split(',') + for header in vary_headers: + key = '-varied-%s' % header + value = info[key] + if headers.get(header, None) != value: + cached_value = None + break + + if cached_value and method in ["GET", "HEAD"] and self.cache and 'range' not in headers: + if info.has_key('-x-permanent-redirect-url'): + # Should cached permanent redirects be counted in our redirection count? For now, yes. + if redirections <= 0: + raise RedirectLimit("Redirected more times than rediection_limit allows.", {}, "") + (response, new_content) = self.request( + info['-x-permanent-redirect-url'], method='GET', + headers=headers, redirections=redirections - 1) + response.previous = Response(info) + response.previous.fromcache = True + else: + # Determine our course of action: + # Is the cached entry fresh or stale? + # Has the client requested a non-cached response? + # + # There seems to be three possible answers: + # 1. [FRESH] Return the cache entry w/o doing a GET + # 2. [STALE] Do the GET (but add in cache validators if available) + # 3. [TRANSPARENT] Do a GET w/o any cache validators (Cache-Control: no-cache) on the request + entry_disposition = _entry_disposition(info, headers) + + if entry_disposition == "FRESH": + if not cached_value: + info['status'] = '504' + content = "" + response = Response(info) + if cached_value: + response.fromcache = True + return (response, content) + + if entry_disposition == "STALE": + if info.has_key('etag') and not self.ignore_etag and not 'if-none-match' in headers: + headers['if-none-match'] = info['etag'] + if info.has_key('last-modified') and not 'last-modified' in headers: + headers['if-modified-since'] = info['last-modified'] + elif entry_disposition == "TRANSPARENT": + pass + + (response, new_content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey) + + if response.status == 304 and method == "GET": + # Rewrite the cache entry with the new end-to-end headers + # Take all headers that are in response + # and overwrite their values in info. + # unless they are hop-by-hop, or are listed in the connection header. + + for key in _get_end2end_headers(response): + info[key] = response[key] + merged_response = Response(info) + if hasattr(response, "_stale_digest"): + merged_response._stale_digest = response._stale_digest + _updateCache(headers, merged_response, content, self.cache, cachekey) + response = merged_response + response.status = 200 + response.fromcache = True + + elif response.status == 200: + content = new_content + else: + self.cache.delete(cachekey) + content = new_content + else: + cc = _parse_cache_control(headers) + if cc.has_key('only-if-cached'): + info['status'] = '504' + response = Response(info) + content = "" + else: + (response, content) = self._request(conn, authority, uri, request_uri, method, body, headers, redirections, cachekey) + except Exception, e: + if self.force_exception_to_status_code: + if isinstance(e, HttpLib2ErrorWithResponse): + response = e.response + content = e.content + response.status = 500 + response.reason = str(e) + elif isinstance(e, socket.timeout): + content = "Request Timeout" + response = Response({ + "content-type": "text/plain", + "status": "408", + "content-length": len(content) + }) + response.reason = "Request Timeout" + else: + content = str(e) + response = Response({ + "content-type": "text/plain", + "status": "400", + "content-length": len(content) + }) + response.reason = "Bad Request" + else: + raise + + + return (response, content) + + def _get_proxy_info(self, scheme, authority): + """Return a ProxyInfo instance (or None) based on the scheme + and authority. + """ + hostname, port = urllib.splitport(authority) + proxy_info = self.proxy_info + if callable(proxy_info): + proxy_info = proxy_info(scheme) + + if (hasattr(proxy_info, 'applies_to') + and not proxy_info.applies_to(hostname)): + proxy_info = None + return proxy_info + + +class Response(dict): + """An object more like email.Message than httplib.HTTPResponse.""" + + """Is this response from our local cache""" + fromcache = False + + """HTTP protocol version used by server. 10 for HTTP/1.0, 11 for HTTP/1.1. """ + version = 11 + + "Status code returned by server. " + status = 200 + + """Reason phrase returned by server.""" + reason = "Ok" + + previous = None + + def __init__(self, info): + # info is either an email.Message or + # an httplib.HTTPResponse object. + if isinstance(info, httplib.HTTPResponse): + for key, value in info.getheaders(): + self[key.lower()] = value + self.status = info.status + self['status'] = str(self.status) + self.reason = info.reason + self.version = info.version + elif isinstance(info, email.Message.Message): + for key, value in info.items(): + self[key.lower()] = value + self.status = int(self['status']) + else: + for key, value in info.iteritems(): + self[key.lower()] = value + self.status = int(self.get('status', self.status)) + self.reason = self.get('reason', self.reason) + + + def __getattr__(self, name): + if name == 'dict': + return self + else: + raise AttributeError, name diff --git a/httplib2/cacerts.txt b/httplib2/cacerts.txt new file mode 100644 index 0000000..70990f1 --- /dev/null +++ b/httplib2/cacerts.txt @@ -0,0 +1,2183 @@ +# Issuer: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc. +# Subject: CN=GTE CyberTrust Global Root O=GTE Corporation OU=GTE CyberTrust Solutions, Inc. +# Label: "GTE CyberTrust Global Root" +# Serial: 421 +# MD5 Fingerprint: ca:3d:d3:68:f1:03:5c:d0:32:fa:b8:2b:59:e8:5a:db +# SHA1 Fingerprint: 97:81:79:50:d8:1c:96:70:cc:34:d8:09:cf:79:44:31:36:7e:f4:74 +# SHA256 Fingerprint: a5:31:25:18:8d:21:10:aa:96:4b:02:c7:b7:c6:da:32:03:17:08:94:e5:fb:71:ff:fb:66:67:d5:e6:81:0a:36 +-----BEGIN CERTIFICATE----- +MIICWjCCAcMCAgGlMA0GCSqGSIb3DQEBBAUAMHUxCzAJBgNVBAYTAlVTMRgwFgYD +VQQKEw9HVEUgQ29ycG9yYXRpb24xJzAlBgNVBAsTHkdURSBDeWJlclRydXN0IFNv +bHV0aW9ucywgSW5jLjEjMCEGA1UEAxMaR1RFIEN5YmVyVHJ1c3QgR2xvYmFsIFJv +b3QwHhcNOTgwODEzMDAyOTAwWhcNMTgwODEzMjM1OTAwWjB1MQswCQYDVQQGEwJV +UzEYMBYGA1UEChMPR1RFIENvcnBvcmF0aW9uMScwJQYDVQQLEx5HVEUgQ3liZXJU +cnVzdCBTb2x1dGlvbnMsIEluYy4xIzAhBgNVBAMTGkdURSBDeWJlclRydXN0IEds +b2JhbCBSb290MIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQCVD6C28FCc6HrH +iM3dFw4usJTQGz0O9pTAipTHBsiQl8i4ZBp6fmw8U+E3KHNgf7KXUwefU/ltWJTS +r41tiGeA5u2ylc9yMcqlHHK6XALnZELn+aks1joNrI1CqiQBOeacPwGFVw1Yh0X4 +04Wqk2kmhXBIgD8SFcd5tB8FLztimQIDAQABMA0GCSqGSIb3DQEBBAUAA4GBAG3r +GwnpXtlR22ciYaQqPEh346B8pt5zohQDhT37qw4wxYMWM4ETCJ57NE7fQMh017l9 +3PR2VX2bY1QY6fDq81yx2YtCHrnAlU66+tXifPVoYb+O7AWXX1uw16OFNMQkpw0P +lZPvy5TYnh+dXIVtx6quTx8itc2VrbqnzPmrC3p/ +-----END CERTIFICATE----- + +# Issuer: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division +# Subject: CN=Thawte Server CA O=Thawte Consulting cc OU=Certification Services Division +# Label: "Thawte Server CA" +# Serial: 1 +# MD5 Fingerprint: c5:70:c4:a2:ed:53:78:0c:c8:10:53:81:64:cb:d0:1d +# SHA1 Fingerprint: 23:e5:94:94:51:95:f2:41:48:03:b4:d5:64:d2:a3:a3:f5:d8:8b:8c +# SHA256 Fingerprint: b4:41:0b:73:e2:e6:ea:ca:47:fb:c4:2f:8f:a4:01:8a:f4:38:1d:c5:4c:fa:a8:44:50:46:1e:ed:09:45:4d:e9 +-----BEGIN CERTIFICATE----- +MIIDEzCCAnygAwIBAgIBATANBgkqhkiG9w0BAQQFADCBxDELMAkGA1UEBhMCWkEx +FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD +VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv +biBTZXJ2aWNlcyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEm +MCQGCSqGSIb3DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wHhcNOTYwODAx +MDAwMDAwWhcNMjAxMjMxMjM1OTU5WjCBxDELMAkGA1UEBhMCWkExFTATBgNVBAgT +DFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYDVQQKExRUaGF3 +dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNl +cyBEaXZpc2lvbjEZMBcGA1UEAxMQVGhhd3RlIFNlcnZlciBDQTEmMCQGCSqGSIb3 +DQEJARYXc2VydmVyLWNlcnRzQHRoYXd0ZS5jb20wgZ8wDQYJKoZIhvcNAQEBBQAD +gY0AMIGJAoGBANOkUG7I/1Zr5s9dtuoMaHVHoqrC2oQl/Kj0R1HahbUgdJSGHg91 +yekIYfUGbTBuFRkC6VLAYttNmZ7iagxEOM3+vuNkCXDF/rFrKbYvScg71CcEJRCX +L+eQbcAoQpnXTEPew/UhbVSfXcNY4cDk2VuwuNy0e982OsK1ZiIS1ocNAgMBAAGj +EzARMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZIhvcNAQEEBQADgYEAB/pMaVz7lcxG +7oWDTSEwjsrZqG9JGubaUeNgcGyEYRGhGshIPllDfU+VPaGLtwtimHp1it2ITk6e +QNuozDJ0uW8NxuOzRAvZim+aKZuZGCg70eNAKJpaPNW15yAbi8qkq43pUdniTCxZ +qdq5snUb9kLy78fyGPmJvKP/iiMucEc= +-----END CERTIFICATE----- + +# Issuer: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division +# Subject: CN=Thawte Premium Server CA O=Thawte Consulting cc OU=Certification Services Division +# Label: "Thawte Premium Server CA" +# Serial: 1 +# MD5 Fingerprint: 06:9f:69:79:16:66:90:02:1b:8c:8c:a2:c3:07:6f:3a +# SHA1 Fingerprint: 62:7f:8d:78:27:65:63:99:d2:7d:7f:90:44:c9:fe:b3:f3:3e:fa:9a +# SHA256 Fingerprint: ab:70:36:36:5c:71:54:aa:29:c2:c2:9f:5d:41:91:16:3b:16:2a:22:25:01:13:57:d5:6d:07:ff:a7:bc:1f:72 +-----BEGIN CERTIFICATE----- +MIIDJzCCApCgAwIBAgIBATANBgkqhkiG9w0BAQQFADCBzjELMAkGA1UEBhMCWkEx +FTATBgNVBAgTDFdlc3Rlcm4gQ2FwZTESMBAGA1UEBxMJQ2FwZSBUb3duMR0wGwYD +VQQKExRUaGF3dGUgQ29uc3VsdGluZyBjYzEoMCYGA1UECxMfQ2VydGlmaWNhdGlv +biBTZXJ2aWNlcyBEaXZpc2lvbjEhMB8GA1UEAxMYVGhhd3RlIFByZW1pdW0gU2Vy +dmVyIENBMSgwJgYJKoZIhvcNAQkBFhlwcmVtaXVtLXNlcnZlckB0aGF3dGUuY29t +MB4XDTk2MDgwMTAwMDAwMFoXDTIwMTIzMTIzNTk1OVowgc4xCzAJBgNVBAYTAlpB +MRUwEwYDVQQIEwxXZXN0ZXJuIENhcGUxEjAQBgNVBAcTCUNhcGUgVG93bjEdMBsG +A1UEChMUVGhhd3RlIENvbnN1bHRpbmcgY2MxKDAmBgNVBAsTH0NlcnRpZmljYXRp +b24gU2VydmljZXMgRGl2aXNpb24xITAfBgNVBAMTGFRoYXd0ZSBQcmVtaXVtIFNl +cnZlciBDQTEoMCYGCSqGSIb3DQEJARYZcHJlbWl1bS1zZXJ2ZXJAdGhhd3RlLmNv +bTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEA0jY2aovXwlue2oFBYo847kkE +VdbQ7xwblRZH7xhINTpS9CtqBo87L+pW46+GjZ4X9560ZXUCTe/LCaIhUdib0GfQ +ug2SBhRz1JPLlyoAnFxODLz6FVL88kRu2hFKbgifLy3j+ao6hnO2RlNYyIkFvYMR +uHM/qgeN9EJN50CdHDcCAwEAAaMTMBEwDwYDVR0TAQH/BAUwAwEB/zANBgkqhkiG +9w0BAQQFAAOBgQAmSCwWwlj66BZ0DKqqX1Q/8tfJeGBeXm43YyJ3Nn6yF8Q0ufUI +hfzJATj/Tb7yFkJD57taRvvBxhEf8UqwKEbJw8RCfbz6q1lu1bdRiBHjpIUZa4JM +pAwSremkrj/xw0llmozFyD4lt5SZu5IycQfwhl7tUCemDaYj+bvLpgcUQg== +-----END CERTIFICATE----- + +# Issuer: O=Equifax OU=Equifax Secure Certificate Authority +# Subject: O=Equifax OU=Equifax Secure Certificate Authority +# Label: "Equifax Secure CA" +# Serial: 903804111 +# MD5 Fingerprint: 67:cb:9d:c0:13:24:8a:82:9b:b2:17:1e:d1:1b:ec:d4 +# SHA1 Fingerprint: d2:32:09:ad:23:d3:14:23:21:74:e4:0d:7f:9d:62:13:97:86:63:3a +# SHA256 Fingerprint: 08:29:7a:40:47:db:a2:36:80:c7:31:db:6e:31:76:53:ca:78:48:e1:be:bd:3a:0b:01:79:a7:07:f9:2c:f1:78 +-----BEGIN CERTIFICATE----- +MIIDIDCCAomgAwIBAgIENd70zzANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV +UzEQMA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2Vy +dGlmaWNhdGUgQXV0aG9yaXR5MB4XDTk4MDgyMjE2NDE1MVoXDTE4MDgyMjE2NDE1 +MVowTjELMAkGA1UEBhMCVVMxEDAOBgNVBAoTB0VxdWlmYXgxLTArBgNVBAsTJEVx +dWlmYXggU2VjdXJlIENlcnRpZmljYXRlIEF1dGhvcml0eTCBnzANBgkqhkiG9w0B +AQEFAAOBjQAwgYkCgYEAwV2xWGcIYu6gmi0fCG2RFGiYCh7+2gRvE4RiIcPRfM6f +BeC4AfBONOziipUEZKzxa1NfBbPLZ4C/QgKO/t0BCezhABRP/PvwDN1Dulsr4R+A +cJkVV5MW8Q+XarfCaCMczE1ZMKxRHjuvK9buY0V7xdlfUNLjUA86iOe/FP3gx7kC +AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEQ +MA4GA1UEChMHRXF1aWZheDEtMCsGA1UECxMkRXF1aWZheCBTZWN1cmUgQ2VydGlm +aWNhdGUgQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTgw +ODIyMTY0MTUxWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUSOZo+SvSspXXR9gj +IBBPM5iQn9QwHQYDVR0OBBYEFEjmaPkr0rKV10fYIyAQTzOYkJ/UMAwGA1UdEwQF +MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA +A4GBAFjOKer89961zgK5F7WF0bnj4JXMJTENAKaSbn+2kmOeUJXRmm/kEd5jhW6Y +7qj/WsjTVbJmcVfewCHrPSqnI0kBBIZCe/zuf6IWUrVnZ9NA2zsmWLIodz2uFHdh +1voqZiegDfqnc1zqcPGUIWVEX/r87yloqaKHee9570+sB3c4 +-----END CERTIFICATE----- + +# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority +# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority +# Label: "Verisign Class 3 Public Primary Certification Authority" +# Serial: 149843929435818692848040365716851702463 +# MD5 Fingerprint: 10:fc:63:5d:f6:26:3e:0d:f3:25:be:5f:79:cd:67:67 +# SHA1 Fingerprint: 74:2c:31:92:e6:07:e4:24:eb:45:49:54:2b:e1:bb:c5:3e:61:74:e2 +# SHA256 Fingerprint: e7:68:56:34:ef:ac:f6:9a:ce:93:9a:6b:25:5b:7b:4f:ab:ef:42:93:5b:50:a2:65:ac:b5:cb:60:27:e4:4e:70 +-----BEGIN CERTIFICATE----- +MIICPDCCAaUCEHC65B0Q2Sk0tjjKewPMur8wDQYJKoZIhvcNAQECBQAwXzELMAkG +A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz +cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2 +MDEyOTAwMDAwMFoXDTI4MDgwMTIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV +BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt +YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN +ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE +BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is +I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G +CSqGSIb3DQEBAgUAA4GBALtMEivPLCYATxQT3ab7/AoRhIzzKBxnki98tsX63/Do +lbwdj2wsqFHMc9ikwFPwTtYmwHYBV4GSXiHx0bH/59AhWM1pF+NEHJwZRDmJXNyc +AA9WjQKZ7aKQRUzkuxCkPfAyAw7xzvjoyVGM5mKf5p/AfbdynMk2OmufTqj/ZA1k +-----END CERTIFICATE----- + +# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network +# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority - G2/(c) 1998 VeriSign, Inc. - For authorized use only/VeriSign Trust Network +# Label: "Verisign Class 3 Public Primary Certification Authority - G2" +# Serial: 167285380242319648451154478808036881606 +# MD5 Fingerprint: a2:33:9b:4c:74:78:73:d4:6c:e7:c1:f3:8d:cb:5c:e9 +# SHA1 Fingerprint: 85:37:1c:a6:e5:50:14:3d:ce:28:03:47:1b:de:3a:09:e8:f8:77:0f +# SHA256 Fingerprint: 83:ce:3c:12:29:68:8a:59:3d:48:5f:81:97:3c:0f:91:95:43:1e:da:37:cc:5e:36:43:0e:79:c7:a8:88:63:8b +-----BEGIN CERTIFICATE----- +MIIDAjCCAmsCEH3Z/gfPqB63EHln+6eJNMYwDQYJKoZIhvcNAQEFBQAwgcExCzAJ +BgNVBAYTAlVTMRcwFQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xh +c3MgMyBQdWJsaWMgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcy +MTowOAYDVQQLEzEoYykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3Jp +emVkIHVzZSBvbmx5MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMB4X +DTk4MDUxODAwMDAwMFoXDTI4MDgwMTIzNTk1OVowgcExCzAJBgNVBAYTAlVTMRcw +FQYDVQQKEw5WZXJpU2lnbiwgSW5jLjE8MDoGA1UECxMzQ2xhc3MgMyBQdWJsaWMg +UHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAtIEcyMTowOAYDVQQLEzEo +YykgMTk5OCBWZXJpU2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5 +MR8wHQYDVQQLExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMIGfMA0GCSqGSIb3DQEB +AQUAA4GNADCBiQKBgQDMXtERXVxp0KvTuWpMmR9ZmDCOFoUgRm1HP9SFIIThbbP4 +pO0M8RcPO/mn+SXXwc+EY/J8Y8+iR/LGWzOOZEAEaMGAuWQcRXfH2G71lSk8UOg0 +13gfqLptQ5GVj0VXXn7F+8qkBOvqlzdUMG+7AUcyM83cV5tkaWH4mx0ciU9cZwID +AQABMA0GCSqGSIb3DQEBBQUAA4GBAFFNzb5cy5gZnBWyATl4Lk0PZ3BwmcYQWpSk +U01UbSuvDV1Ai2TT1+7eVmGSX6bEHRBhNtMsJzzoKQm5EWR0zLVznxxIqbxhAe7i +F6YM40AIOw7n60RzKprxaZLvcRTDOaxxp5EJb+RxBrO6WVcmeQD2+A2iMzAo1KpY +oJ2daZH9 +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Subject: CN=GlobalSign Root CA O=GlobalSign nv-sa OU=Root CA +# Label: "GlobalSign Root CA" +# Serial: 4835703278459707669005204 +# MD5 Fingerprint: 3e:45:52:15:09:51:92:e1:b7:5d:37:9f:b1:87:29:8a +# SHA1 Fingerprint: b1:bc:96:8b:d4:f4:9d:62:2a:a8:9a:81:f2:15:01:52:a4:1d:82:9c +# SHA256 Fingerprint: eb:d4:10:40:e4:bb:3e:c7:42:c9:e3:81:d3:1e:f2:a4:1a:48:b6:68:5c:96:e7:ce:f3:c1:df:6c:d4:33:1c:99 +-----BEGIN CERTIFICATE----- +MIIDdTCCAl2gAwIBAgILBAAAAAABFUtaw5QwDQYJKoZIhvcNAQEFBQAwVzELMAkG +A1UEBhMCQkUxGTAXBgNVBAoTEEdsb2JhbFNpZ24gbnYtc2ExEDAOBgNVBAsTB1Jv +b3QgQ0ExGzAZBgNVBAMTEkdsb2JhbFNpZ24gUm9vdCBDQTAeFw05ODA5MDExMjAw +MDBaFw0yODAxMjgxMjAwMDBaMFcxCzAJBgNVBAYTAkJFMRkwFwYDVQQKExBHbG9i +YWxTaWduIG52LXNhMRAwDgYDVQQLEwdSb290IENBMRswGQYDVQQDExJHbG9iYWxT +aWduIFJvb3QgQ0EwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDaDuaZ +jc6j40+Kfvvxi4Mla+pIH/EqsLmVEQS98GPR4mdmzxzdzxtIK+6NiY6arymAZavp +xy0Sy6scTHAHoT0KMM0VjU/43dSMUBUc71DuxC73/OlS8pF94G3VNTCOXkNz8kHp +1Wrjsok6Vjk4bwY8iGlbKk3Fp1S4bInMm/k8yuX9ifUSPJJ4ltbcdG6TRGHRjcdG +snUOhugZitVtbNV4FpWi6cgKOOvyJBNPc1STE4U6G7weNLWLBYy5d4ux2x8gkasJ +U26Qzns3dLlwR5EiUWMWea6xrkEmCMgZK9FGqkjWZCrXgzT/LCrBbBlDSgeF59N8 +9iFo7+ryUp9/k5DPAgMBAAGjQjBAMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8E +BTADAQH/MB0GA1UdDgQWBBRge2YaRQ2XyolQL30EzTSo//z9SzANBgkqhkiG9w0B +AQUFAAOCAQEA1nPnfE920I2/7LqivjTFKDK1fPxsnCwrvQmeU79rXqoRSLblCKOz +yj1hTdNGCbM+w6DjY1Ub8rrvrTnhQ7k4o+YviiY776BQVvnGCv04zcQLcFGUl5gE +38NflNUVyRRBnMRddWQVDf9VMOyGj/8N7yy5Y0b2qvzfvGn9LhJIZJrglfCm7ymP +AbEVtQwdpf5pLGkkeB6zpxxxYu7KyJesF12KwvhHhm4qxFYxldBniYUr+WymXUad +DKqC5JlR3XC321Y9YeRq4VzW9v493kHMB65jUr9TU/Qr6cf9tveCX4XSQRjbgbME +HMUfpIBvFSDJ3gyICh3WZlXi/EjJKSZp4A== +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R2 +# Label: "GlobalSign Root CA - R2" +# Serial: 4835703278459682885658125 +# MD5 Fingerprint: 94:14:77:7e:3e:5e:fd:8f:30:bd:41:b0:cf:e7:d0:30 +# SHA1 Fingerprint: 75:e0:ab:b6:13:85:12:27:1c:04:f8:5f:dd:de:38:e4:b7:24:2e:fe +# SHA256 Fingerprint: ca:42:dd:41:74:5f:d0:b8:1e:b9:02:36:2c:f9:d8:bf:71:9d:a1:bd:1b:1e:fc:94:6f:5b:4c:99:f4:2c:1b:9e +-----BEGIN CERTIFICATE----- +MIIDujCCAqKgAwIBAgILBAAAAAABD4Ym5g0wDQYJKoZIhvcNAQEFBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjIxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDYxMjE1MDgwMDAwWhcNMjExMjE1 +MDgwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMjETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAKbPJA6+Lm8omUVCxKs+IVSbC9N/hHD6ErPL +v4dfxn+G07IwXNb9rfF73OX4YJYJkhD10FPe+3t+c4isUoh7SqbKSaZeqKeMWhG8 +eoLrvozps6yWJQeXSpkqBy+0Hne/ig+1AnwblrjFuTosvNYSuetZfeLQBoZfXklq +tTleiDTsvHgMCJiEbKjNS7SgfQx5TfC4LcshytVsW33hoCmEofnTlEnLJGKRILzd +C9XZzPnqJworc5HGnRusyMvo4KD0L5CLTfuwNhv2GXqF4G3yYROIXJ/gkwpRl4pa +zq+r1feqCapgvdzZX99yqWATXgAByUr6P6TqBwMhAo6CygPCm48CAwEAAaOBnDCB +mTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUm+IH +V2ccHsBqBt5ZtJot39wZhi4wNgYDVR0fBC8wLTAroCmgJ4YlaHR0cDovL2NybC5n +bG9iYWxzaWduLm5ldC9yb290LXIyLmNybDAfBgNVHSMEGDAWgBSb4gdXZxwewGoG +3lm0mi3f3BmGLjANBgkqhkiG9w0BAQUFAAOCAQEAmYFThxxol4aR7OBKuEQLq4Gs +J0/WwbgcQ3izDJr86iw8bmEbTUsp9Z8FHSbBuOmDAGJFtqkIk7mpM0sYmsL4h4hO +291xNBrBVNpGP+DTKqttVCL1OmLNIG+6KYnX3ZHu01yiPqFbQfXf5WRDLenVOavS +ot+3i9DAgBkcRcAtjOj4LaR0VknFBbVPFd5uRHg5h6h+u/N5GJG79G+dwfCMNYxd +AfvDbbnvRG15RjF+Cv6pgsH/76tuIMRQyV+dTZsXjAzlAcmgQWpzU/qlULRuJQ/7 +TBj0/VLZjmmx6BEP3ojY+x1J96relc8geMJgEtslQIxq/H5COEBkEveegeGTLg== +-----END CERTIFICATE----- + +# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority +# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 1 Policy Validation Authority +# Label: "ValiCert Class 1 VA" +# Serial: 1 +# MD5 Fingerprint: 65:58:ab:15:ad:57:6c:1e:a8:a7:b5:69:ac:bf:ff:eb +# SHA1 Fingerprint: e5:df:74:3c:b6:01:c4:9b:98:43:dc:ab:8c:e8:6a:81:10:9f:e4:8e +# SHA256 Fingerprint: f4:c1:49:55:1a:30:13:a3:5b:c7:bf:fe:17:a7:f3:44:9b:c1:ab:5b:5a:0a:e7:4b:06:c2:3b:90:00:4c:01:04 +-----BEGIN CERTIFICATE----- +MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0 +IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz +BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y +aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG +9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNTIyMjM0OFoXDTE5MDYy +NTIyMjM0OFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y +azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs +YXNzIDEgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw +Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl +cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDYWYJ6ibiWuqYvaG9Y +LqdUHAZu9OqNSLwxlBfw8068srg1knaw0KWlAdcAAxIiGQj4/xEjm84H9b9pGib+ +TunRf50sQB1ZaG6m+FiwnRqP0z/x3BkGgagO4DrdyFNFCQbmD3DD+kCmDuJWBQ8Y +TfwggtFzVXSNdnKgHZ0dwN0/cQIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFBoPUn0 +LBwGlN+VYH+Wexf+T3GtZMjdd9LvWVXoP+iOBSoh8gfStadS/pyxtuJbdxdA6nLW +I8sogTLDAHkY7FkXicnGah5xyf23dKUlRWnFSKsZ4UWKJWsZ7uW7EvV/96aNUcPw +nXS3qT6gpf+2SQMT2iLM7XGCK5nPOrf1LXLI +-----END CERTIFICATE----- + +# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority +# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 2 Policy Validation Authority +# Label: "ValiCert Class 2 VA" +# Serial: 1 +# MD5 Fingerprint: a9:23:75:9b:ba:49:36:6e:31:c2:db:f2:e7:66:ba:87 +# SHA1 Fingerprint: 31:7a:2a:d0:7f:2b:33:5e:f5:a1:c3:4e:4b:57:e8:b7:d8:f1:fc:a6 +# SHA256 Fingerprint: 58:d0:17:27:9c:d4:dc:63:ab:dd:b1:96:a6:c9:90:6c:30:c4:e0:87:83:ea:e8:c1:60:99:54:d6:93:55:59:6b +-----BEGIN CERTIFICATE----- +MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0 +IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz +BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y +aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG +9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMTk1NFoXDTE5MDYy +NjAwMTk1NFowgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y +azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs +YXNzIDIgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw +Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl +cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDOOnHK5avIWZJV16vY +dA757tn2VUdZZUcOBVXc65g2PFxTXdMwzzjsvUGJ7SVCCSRrCl6zfN1SLUzm1NZ9 +WlmpZdRJEy0kTRxQb7XBhVQ7/nHk01xC+YDgkRoKWzk2Z/M/VXwbP7RfZHM047QS +v4dk+NoS/zcnwbNDu+97bi5p9wIDAQABMA0GCSqGSIb3DQEBBQUAA4GBADt/UG9v +UJSZSWI4OB9L+KXIPqeCgfYrx+jFzug6EILLGACOTb2oWH+heQC1u+mNr0HZDzTu +IYEZoDJJKPTEjlbVUjP9UNV+mWwD5MlM/Mtsq2azSiGM5bUMMj4QssxsodyamEwC +W/POuZ6lcg5Ktz885hZo+L7tdEy8W9ViH0Pd +-----END CERTIFICATE----- + +# Issuer: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority +# Subject: CN=http://www.valicert.com/ O=ValiCert, Inc. OU=ValiCert Class 3 Policy Validation Authority +# Label: "RSA Root Certificate 1" +# Serial: 1 +# MD5 Fingerprint: a2:6f:53:b7:ee:40:db:4a:68:e7:fa:18:d9:10:4b:72 +# SHA1 Fingerprint: 69:bd:8c:f4:9c:d3:00:fb:59:2e:17:93:ca:55:6a:f3:ec:aa:35:fb +# SHA256 Fingerprint: bc:23:f9:8a:31:3c:b9:2d:e3:bb:fc:3a:5a:9f:44:61:ac:39:49:4c:4a:e1:5a:9e:9d:f1:31:e9:9b:73:01:9a +-----BEGIN CERTIFICATE----- +MIIC5zCCAlACAQEwDQYJKoZIhvcNAQEFBQAwgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0 +IFZhbGlkYXRpb24gTmV0d29yazEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAz +BgNVBAsTLFZhbGlDZXJ0IENsYXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9y +aXR5MSEwHwYDVQQDExhodHRwOi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG +9w0BCQEWEWluZm9AdmFsaWNlcnQuY29tMB4XDTk5MDYyNjAwMjIzM1oXDTE5MDYy +NjAwMjIzM1owgbsxJDAiBgNVBAcTG1ZhbGlDZXJ0IFZhbGlkYXRpb24gTmV0d29y +azEXMBUGA1UEChMOVmFsaUNlcnQsIEluYy4xNTAzBgNVBAsTLFZhbGlDZXJ0IENs +YXNzIDMgUG9saWN5IFZhbGlkYXRpb24gQXV0aG9yaXR5MSEwHwYDVQQDExhodHRw +Oi8vd3d3LnZhbGljZXJ0LmNvbS8xIDAeBgkqhkiG9w0BCQEWEWluZm9AdmFsaWNl +cnQuY29tMIGfMA0GCSqGSIb3DQEBAQUAA4GNADCBiQKBgQDjmFGWHOjVsQaBalfD +cnWTq8+epvzzFlLWLU2fNUSoLgRNB0mKOCn1dzfnt6td3zZxFJmP3MKS8edgkpfs +2Ejcv8ECIMYkpChMMFp2bbFc893enhBxoYjHW5tBbcqwuI4V7q0zK89HBFx1cQqY +JJgpp0lZpd34t0NiYfPT4tBVPwIDAQABMA0GCSqGSIb3DQEBBQUAA4GBAFa7AliE +Zwgs3x/be0kz9dNnnfS0ChCzycUs4pJqcXgn8nCDQtM+z6lU9PHYkhaM0QTLS6vJ +n0WuPIqpsHEzXcjFV9+vqDWzf4mH6eglkrh/hXqu1rweN1gqZ8mRzyqBPu3GOd/A +PhmcGcwTTYJBtYze4D1gCCAPRX5ron+jjBXu +-----END CERTIFICATE----- + +# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only +# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only +# Label: "Verisign Class 3 Public Primary Certification Authority - G3" +# Serial: 206684696279472310254277870180966723415 +# MD5 Fingerprint: cd:68:b6:a7:c7:c4:ce:75:e0:1d:4f:57:44:61:92:09 +# SHA1 Fingerprint: 13:2d:0d:45:53:4b:69:97:cd:b2:d5:c3:39:e2:55:76:60:9b:5c:c6 +# SHA256 Fingerprint: eb:04:cf:5e:b1:f3:9a:fa:76:2f:2b:b1:20:f2:96:cb:a5:20:c1:b9:7d:b1:58:95:65:b8:1c:b9:a1:7b:72:44 +-----BEGIN CERTIFICATE----- +MIIEGjCCAwICEQCbfgZJoz5iudXukEhxKe9XMA0GCSqGSIb3DQEBBQUAMIHKMQsw +CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl +cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu +LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT +aWduIENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp +dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD +VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT +aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ +bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu +IENsYXNzIDMgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg +LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMu6nFL8eB8aHm8b +N3O9+MlrlBIwT/A2R/XQkQr1F8ilYcEWQE37imGQ5XYgwREGfassbqb1EUGO+i2t +KmFZpGcmTNDovFJbcCAEWNF6yaRpvIMXZK0Fi7zQWM6NjPXr8EJJC52XJ2cybuGu +kxUccLwgTS8Y3pKI6GyFVxEa6X7jJhFUokWWVYPKMIno3Nij7SqAP395ZVc+FSBm +CC+Vk7+qRy+oRpfwEuL+wgorUeZ25rdGt+INpsyow0xZVYnm6FNcHOqd8GIWC6fJ +Xwzw3sJ2zq/3avL6QaaiMxTJ5Xpj055iN9WFZZ4O5lMkdBteHRJTW8cs54NJOxWu +imi5V5cCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAERSWwauSCPc/L8my/uRan2Te +2yFPhpk0djZX3dAVL8WtfxUfN2JzPtTnX84XA9s1+ivbrmAJXx5fj267Cz3qWhMe +DGBvtcC1IyIuBwvLqXTLR7sdwdela8wv0kL9Sd2nic9TutoAWii/gt/4uhMdUIaC +/Y4wjylGsB49Ndo4YhYYSq3mtlFs3q9i6wHQHiT+eo8SGhJouPtmmRQURVyu565p +F4ErWjfJXir0xuKhXFSbplQAz/DxwceYMBo7Nhbbo27q/a2ywtrvAkcTisDxszGt +TxzhT5yvDwyd93gN2PQ1VoDat20Xj50egWTh/sVFuq1ruQp6Tk9LhO5L8X3dEQ== +-----END CERTIFICATE----- + +# Issuer: CN=VeriSign Class 4 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only +# Subject: CN=VeriSign Class 4 Public Primary Certification Authority - G3 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 1999 VeriSign, Inc. - For authorized use only +# Label: "Verisign Class 4 Public Primary Certification Authority - G3" +# Serial: 314531972711909413743075096039378935511 +# MD5 Fingerprint: db:c8:f2:27:2e:b1:ea:6a:29:23:5d:fe:56:3e:33:df +# SHA1 Fingerprint: c8:ec:8c:87:92:69:cb:4b:ab:39:e9:8d:7e:57:67:f3:14:95:73:9d +# SHA256 Fingerprint: e3:89:36:0d:0f:db:ae:b3:d2:50:58:4b:47:30:31:4e:22:2f:39:c1:56:a0:20:14:4e:8d:96:05:61:79:15:06 +-----BEGIN CERTIFICATE----- +MIIEGjCCAwICEQDsoKeLbnVqAc/EfMwvlF7XMA0GCSqGSIb3DQEBBQUAMIHKMQsw +CQYDVQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZl +cmlTaWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWdu +LCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlT +aWduIENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3Jp +dHkgLSBHMzAeFw05OTEwMDEwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMIHKMQswCQYD +VQQGEwJVUzEXMBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlT +aWduIFRydXN0IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAxOTk5IFZlcmlTaWduLCBJ +bmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxRTBDBgNVBAMTPFZlcmlTaWdu +IENsYXNzIDQgUHVibGljIFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg +LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAK3LpRFpxlmr8Y+1 +GQ9Wzsy1HyDkniYlS+BzZYlZ3tCD5PUPtbut8XzoIfzk6AzufEUiGXaStBO3IFsJ ++mGuqPKljYXCKtbeZjbSmwL0qJJgfJxptI8kHtCGUvYynEFYHiK9zUVilQhu0Gbd +U6LM8BDcVHOLBKFGMzNcF0C5nk3T875Vg+ixiY5afJqWIpA7iCXy0lOIAgwLePLm +NxdLMEYH5IBtptiWLugs+BGzOA1mppvqySNb247i8xOOGlktqgLw7KSHZtzBP/XY +ufTsgsbSPZUd5cBPhMnZo0QoBmrXRazwa2rvTl/4EYIeOGM0ZlDUPpNz+jDDZq3/ +ky2X7wMCAwEAATANBgkqhkiG9w0BAQUFAAOCAQEAj/ola09b5KROJ1WrIhVZPMq1 +CtRK26vdoV9TxaBXOcLORyu+OshWv8LZJxA6sQU8wHcxuzrTBXttmhwwjIDLk5Mq +g6sFUYICABFna/OIYUdfA5PVWw3g8dShMjWFsjrbsIKr0csKvE+MW8VLADsfKoKm +fjaF3H48ZwC15DtS4KjrXRX5xm3wrR0OhbepmnMUWluPQSjA1egtTaRezarZ7c7c +2NU8Qh0XwRJdRTjDOPP8hS6DRkiy1yBfkjaP53kPmF6Z6PDQpLv1U70qzlmwr25/ +bLvSHgCwIe34QWKCudiyxLtGUPMxxY8BqHTr9Xgn2uf3ZkPznoM+IKrDNWCRzg== +-----END CERTIFICATE----- + +# Issuer: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Subject: CN=Entrust.net Secure Server Certification Authority O=Entrust.net OU=www.entrust.net/CPS incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Label: "Entrust.net Secure Server CA" +# Serial: 927650371 +# MD5 Fingerprint: df:f2:80:73:cc:f1:e6:61:73:fc:f5:42:e9:c5:7c:ee +# SHA1 Fingerprint: 99:a6:9b:e6:1a:fe:88:6b:4d:2b:82:00:7c:b8:54:fc:31:7e:15:39 +# SHA256 Fingerprint: 62:f2:40:27:8c:56:4c:4d:d8:bf:7d:9d:4f:6f:36:6e:a8:94:d2:2f:5f:34:d9:89:a9:83:ac:ec:2f:ff:ed:50 +-----BEGIN CERTIFICATE----- +MIIE2DCCBEGgAwIBAgIEN0rSQzANBgkqhkiG9w0BAQUFADCBwzELMAkGA1UEBhMC +VVMxFDASBgNVBAoTC0VudHJ1c3QubmV0MTswOQYDVQQLEzJ3d3cuZW50cnVzdC5u +ZXQvQ1BTIGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxpYWIuKTElMCMGA1UECxMc +KGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDE6MDgGA1UEAxMxRW50cnVzdC5u +ZXQgU2VjdXJlIFNlcnZlciBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw05OTA1 +MjUxNjA5NDBaFw0xOTA1MjUxNjM5NDBaMIHDMQswCQYDVQQGEwJVUzEUMBIGA1UE +ChMLRW50cnVzdC5uZXQxOzA5BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5j +b3JwLiBieSByZWYuIChsaW1pdHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBF +bnRydXN0Lm5ldCBMaW1pdGVkMTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUg +U2VydmVyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGdMA0GCSqGSIb3DQEBAQUA +A4GLADCBhwKBgQDNKIM0VBuJ8w+vN5Ex/68xYMmo6LIQaO2f55M28Qpku0f1BBc/ +I0dNxScZgSYMVHINiC3ZH5oSn7yzcdOAGT9HZnuMNSjSuQrfJNqc1lB5gXpa0zf3 +wkrYKZImZNHkmGw6AIr1NJtl+O3jEP/9uElY3KDegjlrgbEWGWG5VLbmQwIBA6OC +AdcwggHTMBEGCWCGSAGG+EIBAQQEAwIABzCCARkGA1UdHwSCARAwggEMMIHeoIHb +oIHYpIHVMIHSMQswCQYDVQQGEwJVUzEUMBIGA1UEChMLRW50cnVzdC5uZXQxOzA5 +BgNVBAsTMnd3dy5lbnRydXN0Lm5ldC9DUFMgaW5jb3JwLiBieSByZWYuIChsaW1p +dHMgbGlhYi4pMSUwIwYDVQQLExwoYykgMTk5OSBFbnRydXN0Lm5ldCBMaW1pdGVk +MTowOAYDVQQDEzFFbnRydXN0Lm5ldCBTZWN1cmUgU2VydmVyIENlcnRpZmljYXRp +b24gQXV0aG9yaXR5MQ0wCwYDVQQDEwRDUkwxMCmgJ6AlhiNodHRwOi8vd3d3LmVu +dHJ1c3QubmV0L0NSTC9uZXQxLmNybDArBgNVHRAEJDAigA8xOTk5MDUyNTE2MDk0 +MFqBDzIwMTkwNTI1MTYwOTQwWjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAU8Bdi +E1U9s/8KAGv7UISX8+1i0BowHQYDVR0OBBYEFPAXYhNVPbP/CgBr+1CEl/PtYtAa +MAwGA1UdEwQFMAMBAf8wGQYJKoZIhvZ9B0EABAwwChsEVjQuMAMCBJAwDQYJKoZI +hvcNAQEFBQADgYEAkNwwAvpkdMKnCqV8IY00F6j7Rw7/JXyNEwr75Ji174z4xRAN +95K+8cPV1ZVqBLssziY2ZcgxxufuP+NXdYR6Ee9GTxj005i7qIcyunL2POI9n9cd +2cNgQ4xYDiKWL2KjLB+6rQXvqzJ4h6BUcxm1XAX5Uj5tLUUL9wqT6u0G+bI= +-----END CERTIFICATE----- + +# Issuer: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Subject: CN=Entrust.net Certification Authority (2048) O=Entrust.net OU=www.entrust.net/CPS_2048 incorp. by ref. (limits liab.)/(c) 1999 Entrust.net Limited +# Label: "Entrust.net Premium 2048 Secure Server CA" +# Serial: 946059622 +# MD5 Fingerprint: ba:21:ea:20:d6:dd:db:8f:c1:57:8b:40:ad:a1:fc:fc +# SHA1 Fingerprint: 80:1d:62:d0:7b:44:9d:5c:5c:03:5c:98:ea:61:fa:44:3c:2a:58:fe +# SHA256 Fingerprint: d1:c3:39:ea:27:84:eb:87:0f:93:4f:c5:63:4e:4a:a9:ad:55:05:01:64:01:f2:64:65:d3:7a:57:46:63:35:9f +-----BEGIN CERTIFICATE----- +MIIEXDCCA0SgAwIBAgIEOGO5ZjANBgkqhkiG9w0BAQUFADCBtDEUMBIGA1UEChML +RW50cnVzdC5uZXQxQDA+BgNVBAsUN3d3dy5lbnRydXN0Lm5ldC9DUFNfMjA0OCBp +bmNvcnAuIGJ5IHJlZi4gKGxpbWl0cyBsaWFiLikxJTAjBgNVBAsTHChjKSAxOTk5 +IEVudHJ1c3QubmV0IExpbWl0ZWQxMzAxBgNVBAMTKkVudHJ1c3QubmV0IENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5ICgyMDQ4KTAeFw05OTEyMjQxNzUwNTFaFw0xOTEy +MjQxODIwNTFaMIG0MRQwEgYDVQQKEwtFbnRydXN0Lm5ldDFAMD4GA1UECxQ3d3d3 +LmVudHJ1c3QubmV0L0NQU18yMDQ4IGluY29ycC4gYnkgcmVmLiAobGltaXRzIGxp +YWIuKTElMCMGA1UECxMcKGMpIDE5OTkgRW50cnVzdC5uZXQgTGltaXRlZDEzMDEG +A1UEAxMqRW50cnVzdC5uZXQgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgKDIwNDgp +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEArU1LqRKGsuqjIAcVFmQq +K0vRvwtKTY7tgHalZ7d4QMBzQshowNtTK91euHaYNZOLGp18EzoOH1u3Hs/lJBQe +sYGpjX24zGtLA/ECDNyrpUAkAH90lKGdCCmziAv1h3edVc3kw37XamSrhRSGlVuX +MlBvPci6Zgzj/L24ScF2iUkZ/cCovYmjZy/Gn7xxGWC4LeksyZB2ZnuU4q941mVT +XTzWnLLPKQP5L6RQstRIzgUyVYr9smRMDuSYB3Xbf9+5CFVghTAp+XtIpGmG4zU/ +HoZdenoVve8AjhUiVBcAkCaTvA5JaJG/+EfTnZVCwQ5N328mz8MYIWJmQ3DW1cAH +4QIDAQABo3QwcjARBglghkgBhvhCAQEEBAMCAAcwHwYDVR0jBBgwFoAUVeSB0RGA +vtiJuQijMfmhJAkWuXAwHQYDVR0OBBYEFFXkgdERgL7YibkIozH5oSQJFrlwMB0G +CSqGSIb2fQdBAAQQMA4bCFY1LjA6NC4wAwIEkDANBgkqhkiG9w0BAQUFAAOCAQEA +WUesIYSKF8mciVMeuoCFGsY8Tj6xnLZ8xpJdGGQC49MGCBFhfGPjK50xA3B20qMo +oPS7mmNz7W3lKtvtFKkrxjYR0CvrB4ul2p5cGZ1WEvVUKcgF7bISKo30Axv/55IQ +h7A6tcOdBTcSo8f0FbnVpDkWm1M6I5HxqIKiaohowXkCIryqptau37AUX7iH0N18 +f3v/rxzP5tsHrV7bhZ3QKw0z2wTR5klAEyt2+z7pnIkPFc4YsIV4IU9rTw76NmfN +B/L/CNDi3tm/Kq+4h4YhPATKt5Rof8886ZjXOP/swNlQ8C5LWK5Gb9Auw2DaclVy +vUxFnmG6v4SBkgPR0ml8xQ== +-----END CERTIFICATE----- + +# Issuer: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Subject: CN=Baltimore CyberTrust Root O=Baltimore OU=CyberTrust +# Label: "Baltimore CyberTrust Root" +# Serial: 33554617 +# MD5 Fingerprint: ac:b6:94:a5:9c:17:e0:d7:91:52:9b:b1:97:06:a6:e4 +# SHA1 Fingerprint: d4:de:20:d0:5e:66:fc:53:fe:1a:50:88:2c:78:db:28:52:ca:e4:74 +# SHA256 Fingerprint: 16:af:57:a9:f6:76:b0:ab:12:60:95:aa:5e:ba:de:f2:2a:b3:11:19:d6:44:ac:95:cd:4b:93:db:f3:f2:6a:eb +-----BEGIN CERTIFICATE----- +MIIDdzCCAl+gAwIBAgIEAgAAuTANBgkqhkiG9w0BAQUFADBaMQswCQYDVQQGEwJJ +RTESMBAGA1UEChMJQmFsdGltb3JlMRMwEQYDVQQLEwpDeWJlclRydXN0MSIwIAYD +VQQDExlCYWx0aW1vcmUgQ3liZXJUcnVzdCBSb290MB4XDTAwMDUxMjE4NDYwMFoX +DTI1MDUxMjIzNTkwMFowWjELMAkGA1UEBhMCSUUxEjAQBgNVBAoTCUJhbHRpbW9y +ZTETMBEGA1UECxMKQ3liZXJUcnVzdDEiMCAGA1UEAxMZQmFsdGltb3JlIEN5YmVy +VHJ1c3QgUm9vdDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAKMEuyKr +mD1X6CZymrV51Cni4eiVgLGw41uOKymaZN+hXe2wCQVt2yguzmKiYv60iNoS6zjr +IZ3AQSsBUnuId9Mcj8e6uYi1agnnc+gRQKfRzMpijS3ljwumUNKoUMMo6vWrJYeK +mpYcqWe4PwzV9/lSEy/CG9VwcPCPwBLKBsua4dnKM3p31vjsufFoREJIE9LAwqSu +XmD+tqYF/LTdB1kC1FkYmGP1pWPgkAx9XbIGevOF6uvUA65ehD5f/xXtabz5OTZy +dc93Uk3zyZAsuT3lySNTPx8kmCFcB5kpvcY67Oduhjprl3RjM71oGDHweI12v/ye +jl0qhqdNkNwnGjkCAwEAAaNFMEMwHQYDVR0OBBYEFOWdWTCCR1jMrPoIVDaGezq1 +BE3wMBIGA1UdEwEB/wQIMAYBAf8CAQMwDgYDVR0PAQH/BAQDAgEGMA0GCSqGSIb3 +DQEBBQUAA4IBAQCFDF2O5G9RaEIFoN27TyclhAO992T9Ldcw46QQF+vaKSm2eT92 +9hkTI7gQCvlYpNRhcL0EYWoSihfVCr3FvDB81ukMJY2GQE/szKN+OMY3EU/t3Wgx +jkzSswF07r51XgdIGn9w/xZchMB5hbgF/X++ZRGjD8ACtPhSNzkE1akxehi/oCr0 +Epn3o0WC4zxe9Z2etciefC7IpJ5OCBRLbf1wbWsaY71k5h+3zvDyny67G7fyUIhz +ksLi4xaNmjICq44Y3ekQEe5+NauQrz4wlHrQMz2nZQ/1/I6eYs9HRCwBXbsdtTLS +R9I4LtD+gdwyah617jzV/OeBHRnDJELqYzmp +-----END CERTIFICATE----- + +# Issuer: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc. +# Subject: CN=Equifax Secure Global eBusiness CA-1 O=Equifax Secure Inc. +# Label: "Equifax Secure Global eBusiness CA" +# Serial: 1 +# MD5 Fingerprint: 8f:5d:77:06:27:c4:98:3c:5b:93:78:e7:d7:7d:9b:cc +# SHA1 Fingerprint: 7e:78:4a:10:1c:82:65:cc:2d:e1:f1:6d:47:b4:40:ca:d9:0a:19:45 +# SHA256 Fingerprint: 5f:0b:62:ea:b5:e3:53:ea:65:21:65:16:58:fb:b6:53:59:f4:43:28:0a:4a:fb:d1:04:d7:7d:10:f9:f0:4c:07 +-----BEGIN CERTIFICATE----- +MIICkDCCAfmgAwIBAgIBATANBgkqhkiG9w0BAQQFADBaMQswCQYDVQQGEwJVUzEc +MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEtMCsGA1UEAxMkRXF1aWZheCBT +ZWN1cmUgR2xvYmFsIGVCdXNpbmVzcyBDQS0xMB4XDTk5MDYyMTA0MDAwMFoXDTIw +MDYyMTA0MDAwMFowWjELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0VxdWlmYXggU2Vj +dXJlIEluYy4xLTArBgNVBAMTJEVxdWlmYXggU2VjdXJlIEdsb2JhbCBlQnVzaW5l +c3MgQ0EtMTCBnzANBgkqhkiG9w0BAQEFAAOBjQAwgYkCgYEAuucXkAJlsTRVPEnC +UdXfp9E3j9HngXNBUmCbnaEXJnitx7HoJpQytd4zjTov2/KaelpzmKNc6fuKcxtc +58O/gGzNqfTWK8D3+ZmqY6KxRwIP1ORROhI8bIpaVIRw28HFkM9yRcuoWcDNM50/ +o5brhTMhHD4ePmBudpxnhcXIw2ECAwEAAaNmMGQwEQYJYIZIAYb4QgEBBAQDAgAH +MA8GA1UdEwEB/wQFMAMBAf8wHwYDVR0jBBgwFoAUvqigdHJQa0S3ySPY+6j/s1dr +aGwwHQYDVR0OBBYEFL6ooHRyUGtEt8kj2Puo/7NXa2hsMA0GCSqGSIb3DQEBBAUA +A4GBADDiAVGqx+pf2rnQZQ8w1j7aDRRJbpGTJxQx78T3LUX47Me/okENI7SS+RkA +Z70Br83gcfxaz2TE4JaY0KNA4gGK7ycH8WUBikQtBmV1UsCGECAhX2xrD2yuCRyv +8qIYNMR1pHMc8Y3c7635s3a0kr/clRAevsvIO1qEYBlWlKlV +-----END CERTIFICATE----- + +# Issuer: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc. +# Subject: CN=Equifax Secure eBusiness CA-1 O=Equifax Secure Inc. +# Label: "Equifax Secure eBusiness CA 1" +# Serial: 4 +# MD5 Fingerprint: 64:9c:ef:2e:44:fc:c6:8f:52:07:d0:51:73:8f:cb:3d +# SHA1 Fingerprint: da:40:18:8b:91:89:a3:ed:ee:ae:da:97:fe:2f:9d:f5:b7:d1:8a:41 +# SHA256 Fingerprint: cf:56:ff:46:a4:a1:86:10:9d:d9:65:84:b5:ee:b5:8a:51:0c:42:75:b0:e5:f9:4f:40:bb:ae:86:5e:19:f6:73 +-----BEGIN CERTIFICATE----- +MIICgjCCAeugAwIBAgIBBDANBgkqhkiG9w0BAQQFADBTMQswCQYDVQQGEwJVUzEc +MBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5jLjEmMCQGA1UEAxMdRXF1aWZheCBT +ZWN1cmUgZUJ1c2luZXNzIENBLTEwHhcNOTkwNjIxMDQwMDAwWhcNMjAwNjIxMDQw +MDAwWjBTMQswCQYDVQQGEwJVUzEcMBoGA1UEChMTRXF1aWZheCBTZWN1cmUgSW5j +LjEmMCQGA1UEAxMdRXF1aWZheCBTZWN1cmUgZUJ1c2luZXNzIENBLTEwgZ8wDQYJ +KoZIhvcNAQEBBQADgY0AMIGJAoGBAM4vGbwXt3fek6lfWg0XTzQaDJj0ItlZ1MRo +RvC0NcWFAyDGr0WlIVFFQesWWDYyb+JQYmT5/VGcqiTZ9J2DKocKIdMSODRsjQBu +WqDZQu4aIZX5UkxVWsUPOE9G+m34LjXWHXzr4vCwdYDIqROsvojvOm6rXyo4YgKw +Env+j6YDAgMBAAGjZjBkMBEGCWCGSAGG+EIBAQQEAwIABzAPBgNVHRMBAf8EBTAD +AQH/MB8GA1UdIwQYMBaAFEp4MlIR21kWNl7fwRQ2QGpHfEyhMB0GA1UdDgQWBBRK +eDJSEdtZFjZe38EUNkBqR3xMoTANBgkqhkiG9w0BAQQFAAOBgQB1W6ibAxHm6VZM +zfmpTMANmvPMZWnmJXbMWbfWVMMdzZmsGd20hdXgPfxiIKeES1hl8eL5lSE/9dR+ +WB5Hh1Q+WKG1tfgq73HnvMP2sUlG4tega+VWeponmHxGYhTnyfxuAxJ5gDgdSIKN +/Bf+KpYrtWKmpj29f5JZzVoqgrI3eQ== +-----END CERTIFICATE----- + +# Issuer: O=Equifax Secure OU=Equifax Secure eBusiness CA-2 +# Subject: O=Equifax Secure OU=Equifax Secure eBusiness CA-2 +# Label: "Equifax Secure eBusiness CA 2" +# Serial: 930140085 +# MD5 Fingerprint: aa:bf:bf:64:97:da:98:1d:6f:c6:08:3a:95:70:33:ca +# SHA1 Fingerprint: 39:4f:f6:85:0b:06:be:52:e5:18:56:cc:10:e1:80:e8:82:b3:85:cc +# SHA256 Fingerprint: 2f:27:4e:48:ab:a4:ac:7b:76:59:33:10:17:75:50:6d:c3:0e:e3:8e:f6:ac:d5:c0:49:32:cf:e0:41:23:42:20 +-----BEGIN CERTIFICATE----- +MIIDIDCCAomgAwIBAgIEN3DPtTANBgkqhkiG9w0BAQUFADBOMQswCQYDVQQGEwJV +UzEXMBUGA1UEChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2Vj +dXJlIGVCdXNpbmVzcyBDQS0yMB4XDTk5MDYyMzEyMTQ0NVoXDTE5MDYyMzEyMTQ0 +NVowTjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDkVxdWlmYXggU2VjdXJlMSYwJAYD +VQQLEx1FcXVpZmF4IFNlY3VyZSBlQnVzaW5lc3MgQ0EtMjCBnzANBgkqhkiG9w0B +AQEFAAOBjQAwgYkCgYEA5Dk5kx5SBhsoNviyoynF7Y6yEb3+6+e0dMKP/wXn2Z0G +vxLIPw7y1tEkshHe0XMJitSxLJgJDR5QRrKDpkWNYmi7hRsgcDKqQM2mll/EcTc/ +BPO3QSQ5BxoeLmFYoBIL5aXfxavqN3HMHMg3OrmXUqesxWoklE6ce8/AatbfIb0C +AwEAAaOCAQkwggEFMHAGA1UdHwRpMGcwZaBjoGGkXzBdMQswCQYDVQQGEwJVUzEX +MBUGA1UEChMORXF1aWZheCBTZWN1cmUxJjAkBgNVBAsTHUVxdWlmYXggU2VjdXJl +IGVCdXNpbmVzcyBDQS0yMQ0wCwYDVQQDEwRDUkwxMBoGA1UdEAQTMBGBDzIwMTkw +NjIzMTIxNDQ1WjALBgNVHQ8EBAMCAQYwHwYDVR0jBBgwFoAUUJ4L6q9euSBIplBq +y/3YIHqngnYwHQYDVR0OBBYEFFCeC+qvXrkgSKZQasv92CB6p4J2MAwGA1UdEwQF +MAMBAf8wGgYJKoZIhvZ9B0EABA0wCxsFVjMuMGMDAgbAMA0GCSqGSIb3DQEBBQUA +A4GBAAyGgq3oThr1jokn4jVYPSm0B482UJW/bsGe68SQsoWou7dC4A8HOd/7npCy +0cE+U58DRLB+S/Rv5Hwf5+Kx5Lia78O9zt4LMjTZ3ijtM2vE1Nc9ElirfQkty3D1 +E4qUoSek1nDFbZS1yX2doNLGCEnZZpum0/QL3MUmV+GRMOrN +-----END CERTIFICATE----- + +# Issuer: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network +# Subject: CN=AddTrust Class 1 CA Root O=AddTrust AB OU=AddTrust TTP Network +# Label: "AddTrust Low-Value Services Root" +# Serial: 1 +# MD5 Fingerprint: 1e:42:95:02:33:92:6b:b9:5f:c0:7f:da:d6:b2:4b:fc +# SHA1 Fingerprint: cc:ab:0e:a0:4c:23:01:d6:69:7b:dd:37:9f:cd:12:eb:24:e3:94:9d +# SHA256 Fingerprint: 8c:72:09:27:9a:c0:4e:27:5e:16:d0:7f:d3:b7:75:e8:01:54:b5:96:80:46:e3:1f:52:dd:25:76:63:24:e9:a7 +-----BEGIN CERTIFICATE----- +MIIEGDCCAwCgAwIBAgIBATANBgkqhkiG9w0BAQUFADBlMQswCQYDVQQGEwJTRTEU +MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3 +b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwHhcNMDAwNTMw +MTAzODMxWhcNMjAwNTMwMTAzODMxWjBlMQswCQYDVQQGEwJTRTEUMBIGA1UEChML +QWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYD +VQQDExhBZGRUcnVzdCBDbGFzcyAxIENBIFJvb3QwggEiMA0GCSqGSIb3DQEBAQUA +A4IBDwAwggEKAoIBAQCWltQhSWDia+hBBwzexODcEyPNwTXH+9ZOEQpnXvUGW2ul +CDtbKRY654eyNAbFvAWlA3yCyykQruGIgb3WntP+LVbBFc7jJp0VLhD7Bo8wBN6n +tGO0/7Gcrjyvd7ZWxbWroulpOj0OM3kyP3CCkplhbY0wCI9xP6ZIVxn4JdxLZlyl +dI+Yrsj5wAYi56xz36Uu+1LcsRVlIPo1Zmne3yzxbrww2ywkEtvrNTVokMsAsJch +PXQhI2U0K7t4WaPW4XY5mqRJjox0r26kmqPZm9I4XJuiGMx1I4S+6+JNM3GOGvDC ++Mcdoq0Dlyz4zyXG9rgkMbFjXZJ/Y/AlyVMuH79NAgMBAAGjgdIwgc8wHQYDVR0O +BBYEFJWxtPCUtr3H2tERCSG+wa9J/RB7MAsGA1UdDwQEAwIBBjAPBgNVHRMBAf8E +BTADAQH/MIGPBgNVHSMEgYcwgYSAFJWxtPCUtr3H2tERCSG+wa9J/RB7oWmkZzBl +MQswCQYDVQQGEwJTRTEUMBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFk +ZFRydXN0IFRUUCBOZXR3b3JrMSEwHwYDVQQDExhBZGRUcnVzdCBDbGFzcyAxIENB +IFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBACxtZBsfzQ3duQH6lmM0MkhHma6X +7f1yFqZzR1r0693p9db7RcwpiURdv0Y5PejuvE1Uhh4dbOMXJ0PhiVYrqW9yTkkz +43J8KiOavD7/KCrto/8cI7pDVwlnTUtiBi34/2ydYB7YHEt9tTEv2dB8Xfjea4MY +eDdXL+gzB2ffHsdrKpV2ro9Xo/D0UrSpUwjP4E/TelOL/bscVjby/rK25Xa71SJl +pz/+0WatC7xrmYbvP33zGDLKe8bjq2RGlfgmadlVg3sslgf/WSxEo8bl6ancoWOA +WiFeIc9TVPC6b4nbqKqVz4vjccweGyBECMB6tkD9xOQ14R0WHNC8K47Wcdk= +-----END CERTIFICATE----- + +# Issuer: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network +# Subject: CN=AddTrust External CA Root O=AddTrust AB OU=AddTrust External TTP Network +# Label: "AddTrust External Root" +# Serial: 1 +# MD5 Fingerprint: 1d:35:54:04:85:78:b0:3f:42:42:4d:bf:20:73:0a:3f +# SHA1 Fingerprint: 02:fa:f3:e2:91:43:54:68:60:78:57:69:4d:f5:e4:5b:68:85:18:68 +# SHA256 Fingerprint: 68:7f:a4:51:38:22:78:ff:f0:c8:b1:1f:8d:43:d5:76:67:1c:6e:b2:bc:ea:b4:13:fb:83:d9:65:d0:6d:2f:f2 +-----BEGIN CERTIFICATE----- +MIIENjCCAx6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBvMQswCQYDVQQGEwJTRTEU +MBIGA1UEChMLQWRkVHJ1c3QgQUIxJjAkBgNVBAsTHUFkZFRydXN0IEV4dGVybmFs +IFRUUCBOZXR3b3JrMSIwIAYDVQQDExlBZGRUcnVzdCBFeHRlcm5hbCBDQSBSb290 +MB4XDTAwMDUzMDEwNDgzOFoXDTIwMDUzMDEwNDgzOFowbzELMAkGA1UEBhMCU0Ux +FDASBgNVBAoTC0FkZFRydXN0IEFCMSYwJAYDVQQLEx1BZGRUcnVzdCBFeHRlcm5h +bCBUVFAgTmV0d29yazEiMCAGA1UEAxMZQWRkVHJ1c3QgRXh0ZXJuYWwgQ0EgUm9v +dDCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBALf3GjPm8gAELTngTlvt +H7xsD821+iO2zt6bETOXpClMfZOfvUq8k+0DGuOPz+VtUFrWlymUWoCwSXrbLpX9 +uMq/NzgtHj6RQa1wVsfwTz/oMp50ysiQVOnGXw94nZpAPA6sYapeFI+eh6FqUNzX +mk6vBbOmcZSccbNQYArHE504B4YCqOmoaSYYkKtMsE8jqzpPhNjfzp/haW+710LX +a0Tkx63ubUFfclpxCDezeWWkWaCUN/cALw3CknLa0Dhy2xSoRcRdKn23tNbE7qzN +E0S3ySvdQwAl+mG5aWpYIxG3pzOPVnVZ9c0p10a3CitlttNCbxWyuHv77+ldU9U0 +WicCAwEAAaOB3DCB2TAdBgNVHQ4EFgQUrb2YejS0Jvf6xCZU7wO94CTLVBowCwYD +VR0PBAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wgZkGA1UdIwSBkTCBjoAUrb2YejS0 +Jvf6xCZU7wO94CTLVBqhc6RxMG8xCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtBZGRU +cnVzdCBBQjEmMCQGA1UECxMdQWRkVHJ1c3QgRXh0ZXJuYWwgVFRQIE5ldHdvcmsx +IjAgBgNVBAMTGUFkZFRydXN0IEV4dGVybmFsIENBIFJvb3SCAQEwDQYJKoZIhvcN +AQEFBQADggEBALCb4IUlwtYj4g+WBpKdQZic2YR5gdkeWxQHIzZlj7DYd7usQWxH +YINRsPkyPef89iYTx4AWpb9a/IfPeHmJIZriTAcKhjW88t5RxNKWt9x+Tu5w/Rw5 +6wwCURQtjr0W4MHfRnXnJK3s9EK0hZNwEGe6nQY1ShjTK3rMUUKhemPR5ruhxSvC +Nr4TDea9Y355e6cJDUCrat2PisP29owaQgVR1EX1n6diIWgVIEM8med8vSTYqZEX +c4g/VhsxOBi0cQ+azcgOno4uG+GMmIPLHzHxREzGBHNJdmAPx/i9F4BrLunMTA5a +mnkPIAou1Z5jJh5VkpTYghdae9C8x49OhgQ= +-----END CERTIFICATE----- + +# Issuer: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network +# Subject: CN=AddTrust Public CA Root O=AddTrust AB OU=AddTrust TTP Network +# Label: "AddTrust Public Services Root" +# Serial: 1 +# MD5 Fingerprint: c1:62:3e:23:c5:82:73:9c:03:59:4b:2b:e9:77:49:7f +# SHA1 Fingerprint: 2a:b6:28:48:5e:78:fb:f3:ad:9e:79:10:dd:6b:df:99:72:2c:96:e5 +# SHA256 Fingerprint: 07:91:ca:07:49:b2:07:82:aa:d3:c7:d7:bd:0c:df:c9:48:58:35:84:3e:b2:d7:99:60:09:ce:43:ab:6c:69:27 +-----BEGIN CERTIFICATE----- +MIIEFTCCAv2gAwIBAgIBATANBgkqhkiG9w0BAQUFADBkMQswCQYDVQQGEwJTRTEU +MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3 +b3JrMSAwHgYDVQQDExdBZGRUcnVzdCBQdWJsaWMgQ0EgUm9vdDAeFw0wMDA1MzAx +MDQxNTBaFw0yMDA1MzAxMDQxNTBaMGQxCzAJBgNVBAYTAlNFMRQwEgYDVQQKEwtB +ZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIDAeBgNV +BAMTF0FkZFRydXN0IFB1YmxpYyBDQSBSb290MIIBIjANBgkqhkiG9w0BAQEFAAOC +AQ8AMIIBCgKCAQEA6Rowj4OIFMEg2Dybjxt+A3S72mnTRqX4jsIMEZBRpS9mVEBV +6tsfSlbunyNu9DnLoblv8n75XYcmYZ4c+OLspoH4IcUkzBEMP9smcnrHAZcHF/nX +GCwwfQ56HmIexkvA/X1id9NEHif2P0tEs7c42TkfYNVRknMDtABp4/MUTu7R3AnP +dzRGULD4EfL+OHn3Bzn+UZKXC1sIXzSGAa2Il+tmzV7R/9x98oTaunet3IAIx6eH +1lWfl2royBFkuucZKT8Rs3iQhCBSWxHveNCD9tVIkNAwHM+A+WD+eeSI8t0A65RF +62WUaUC6wNW0uLp9BBGo6zEFlpROWCGOn9Bg/QIDAQABo4HRMIHOMB0GA1UdDgQW +BBSBPjfYkrAfd59ctKtzquf2NGAv+jALBgNVHQ8EBAMCAQYwDwYDVR0TAQH/BAUw +AwEB/zCBjgYDVR0jBIGGMIGDgBSBPjfYkrAfd59ctKtzquf2NGAv+qFopGYwZDEL +MAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQLExRBZGRU +cnVzdCBUVFAgTmV0d29yazEgMB4GA1UEAxMXQWRkVHJ1c3QgUHVibGljIENBIFJv +b3SCAQEwDQYJKoZIhvcNAQEFBQADggEBAAP3FUr4JNojVhaTdt02KLmuG7jD8WS6 +IBh4lSknVwW8fCr0uVFV2ocC3g8WFzH4qnkuCRO7r7IgGRLlk/lL+YPoRNWyQSW/ +iHVv/xD8SlTQX/D67zZzfRs2RcYhbbQVuE7PnFylPVoAjgbjPGsye/Kf8Lb93/Ao +GEjwxrzQvzSAlsJKsW2Ox5BF3i9nrEUEo3rcVZLJR2bYGozH7ZxOmuASu7VqTITh +4SINhwBk/ox9Yjllpu9CtoAlEmEBqCQTcAARJl/6NVDFSMwGR+gn2HCNX2TmoUQm +XiLsks3/QppEIW1cxeMiHV9HEufOX1362KqxMy3ZdvJOOjMMK7MtkAY= +-----END CERTIFICATE----- + +# Issuer: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network +# Subject: CN=AddTrust Qualified CA Root O=AddTrust AB OU=AddTrust TTP Network +# Label: "AddTrust Qualified Certificates Root" +# Serial: 1 +# MD5 Fingerprint: 27:ec:39:47:cd:da:5a:af:e2:9a:01:65:21:a9:4c:bb +# SHA1 Fingerprint: 4d:23:78:ec:91:95:39:b5:00:7f:75:8f:03:3b:21:1e:c5:4d:8b:cf +# SHA256 Fingerprint: 80:95:21:08:05:db:4b:bc:35:5e:44:28:d8:fd:6e:c2:cd:e3:ab:5f:b9:7a:99:42:98:8e:b8:f4:dc:d0:60:16 +-----BEGIN CERTIFICATE----- +MIIEHjCCAwagAwIBAgIBATANBgkqhkiG9w0BAQUFADBnMQswCQYDVQQGEwJTRTEU +MBIGA1UEChMLQWRkVHJ1c3QgQUIxHTAbBgNVBAsTFEFkZFRydXN0IFRUUCBOZXR3 +b3JrMSMwIQYDVQQDExpBZGRUcnVzdCBRdWFsaWZpZWQgQ0EgUm9vdDAeFw0wMDA1 +MzAxMDQ0NTBaFw0yMDA1MzAxMDQ0NTBaMGcxCzAJBgNVBAYTAlNFMRQwEgYDVQQK +EwtBZGRUcnVzdCBBQjEdMBsGA1UECxMUQWRkVHJ1c3QgVFRQIE5ldHdvcmsxIzAh +BgNVBAMTGkFkZFRydXN0IFF1YWxpZmllZCBDQSBSb290MIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEA5B6a/twJWoekn0e+EV+vhDTbYjx5eLfpMLXsDBwq +xBb/4Oxx64r1EW7tTw2R0hIYLUkVAcKkIhPHEWT/IhKauY5cLwjPcWqzZwFZ8V1G +87B4pfYOQnrjfxvM0PC3KP0q6p6zsLkEqv32x7SxuCqg+1jxGaBvcCV+PmlKfw8i +2O+tCBGaKZnhqkRFmhJePp1tUvznoD1oL/BLcHwTOK28FSXx1s6rosAx1i+f4P8U +WfyEk9mHfExUE+uf0S0R+Bg6Ot4l2ffTQO2kBhLEO+GRwVY18BTcZTYJbqukB8c1 +0cIDMzZbdSZtQvESa0NvS3GU+jQd7RNuyoB/mC9suWXY6QIDAQABo4HUMIHRMB0G +A1UdDgQWBBQ5lYtii1zJ1IC6WA+XPxUIQ8yYpzALBgNVHQ8EBAMCAQYwDwYDVR0T +AQH/BAUwAwEB/zCBkQYDVR0jBIGJMIGGgBQ5lYtii1zJ1IC6WA+XPxUIQ8yYp6Fr +pGkwZzELMAkGA1UEBhMCU0UxFDASBgNVBAoTC0FkZFRydXN0IEFCMR0wGwYDVQQL +ExRBZGRUcnVzdCBUVFAgTmV0d29yazEjMCEGA1UEAxMaQWRkVHJ1c3QgUXVhbGlm +aWVkIENBIFJvb3SCAQEwDQYJKoZIhvcNAQEFBQADggEBABmrder4i2VhlRO6aQTv +hsoToMeqT2QbPxj2qC0sVY8FtzDqQmodwCVRLae/DLPt7wh/bDxGGuoYQ992zPlm +hpwsaPXpF/gxsxjE1kh9I0xowX67ARRvxdlu3rsEQmr49lx95dr6h+sNNVJn0J6X +dgWTP5XHAeZpVTh/EGGZyeNfpso+gmNIquIISD6q8rKFYqa0p9m9N5xotS1WfbC3 +P6CxB9bpT9zeRXEwMn8bLgn5v1Kh7sKAPgZcLlVAwRv1cEWw3F369nJad9Jjzc9Y +iQBCYz95OdBEsIJuQRno3eDBiFrRHnGTHyQwdOUeqN48Jzd/g66ed8/wMLH/S5no +xqE= +-----END CERTIFICATE----- + +# Issuer: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Subject: CN=Entrust Root Certification Authority O=Entrust, Inc. OU=www.entrust.net/CPS is incorporated by reference/(c) 2006 Entrust, Inc. +# Label: "Entrust Root Certification Authority" +# Serial: 1164660820 +# MD5 Fingerprint: d6:a5:c3:ed:5d:dd:3e:00:c1:3d:87:92:1f:1d:3f:e4 +# SHA1 Fingerprint: b3:1e:b1:b7:40:e3:6c:84:02:da:dc:37:d4:4d:f5:d4:67:49:52:f9 +# SHA256 Fingerprint: 73:c1:76:43:4f:1b:c6:d5:ad:f4:5b:0e:76:e7:27:28:7c:8d:e5:76:16:c1:e6:e6:14:1a:2b:2c:bc:7d:8e:4c +-----BEGIN CERTIFICATE----- +MIIEkTCCA3mgAwIBAgIERWtQVDANBgkqhkiG9w0BAQUFADCBsDELMAkGA1UEBhMC +VVMxFjAUBgNVBAoTDUVudHJ1c3QsIEluYy4xOTA3BgNVBAsTMHd3dy5lbnRydXN0 +Lm5ldC9DUFMgaXMgaW5jb3Jwb3JhdGVkIGJ5IHJlZmVyZW5jZTEfMB0GA1UECxMW +KGMpIDIwMDYgRW50cnVzdCwgSW5jLjEtMCsGA1UEAxMkRW50cnVzdCBSb290IENl +cnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA2MTEyNzIwMjM0MloXDTI2MTEyNzIw +NTM0MlowgbAxCzAJBgNVBAYTAlVTMRYwFAYDVQQKEw1FbnRydXN0LCBJbmMuMTkw +NwYDVQQLEzB3d3cuZW50cnVzdC5uZXQvQ1BTIGlzIGluY29ycG9yYXRlZCBieSBy +ZWZlcmVuY2UxHzAdBgNVBAsTFihjKSAyMDA2IEVudHJ1c3QsIEluYy4xLTArBgNV +BAMTJEVudHJ1c3QgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASIwDQYJ +KoZIhvcNAQEBBQADggEPADCCAQoCggEBALaVtkNC+sZtKm9I35RMOVcF7sN5EUFo +Nu3s/poBj6E4KPz3EEZmLk0eGrEaTsbRwJWIsMn/MYszA9u3g3s+IIRe7bJWKKf4 +4LlAcTfFy0cOlypowCKVYhXbR9n10Cv/gkvJrT7eTNuQgFA/CYqEAOwwCj0Yzfv9 +KlmaI5UXLEWeH25DeW0MXJj+SKfFI0dcXv1u5x609mhF0YaDW6KKjbHjKYD+JXGI +rb68j6xSlkuqUY3kEzEZ6E5Nn9uss2rVvDlUccp6en+Q3X0dgNmBu1kmwhH+5pPi +94DkZfs0Nw4pgHBNrziGLp5/V6+eF67rHMsoIV+2HNjnogQi+dPa2MsCAwEAAaOB +sDCBrTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zArBgNVHRAEJDAi +gA8yMDA2MTEyNzIwMjM0MlqBDzIwMjYxMTI3MjA1MzQyWjAfBgNVHSMEGDAWgBRo +kORnpKZTgMeGZqTx90tD+4S9bTAdBgNVHQ4EFgQUaJDkZ6SmU4DHhmak8fdLQ/uE +vW0wHQYJKoZIhvZ9B0EABBAwDhsIVjcuMTo0LjADAgSQMA0GCSqGSIb3DQEBBQUA +A4IBAQCT1DCw1wMgKtD5Y+iRDAUgqV8ZyntyTtSx29CW+1RaGSwMCPeyvIWonX9t +O1KzKtvn1ISMY/YPyyYBkVBs9F8U4pN0wBOeMDpQ47RgxRzwIkSNcUesyBrJ6Zua +AGAT/3B+XxFNSRuzFVJ7yVTav52Vr2ua2J7p8eRDjeIRRDq/r72DQnNSi6q7pynP +9WQcCk3RvKqsnyrQ/39/2n3qse0wJcGE2jTSW3iDVuycNsMm4hH2Z0kdkquM++v/ +eu6FSqdQgPCnXEqULl8FmTxSQeDNtGPPAUO6nIPcj2A781q0tHuu2guQOHXvgR1m +0vdXcDazv/wor3ElhVsT/h5/WrQ8 +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Global CA O=GeoTrust Inc. +# Subject: CN=GeoTrust Global CA O=GeoTrust Inc. +# Label: "GeoTrust Global CA" +# Serial: 144470 +# MD5 Fingerprint: f7:75:ab:29:fb:51:4e:b7:77:5e:ff:05:3c:99:8e:f5 +# SHA1 Fingerprint: de:28:f4:a4:ff:e5:b9:2f:a3:c5:03:d1:a3:49:a7:f9:96:2a:82:12 +# SHA256 Fingerprint: ff:85:6a:2d:25:1d:cd:88:d3:66:56:f4:50:12:67:98:cf:ab:aa:de:40:79:9c:72:2d:e4:d2:b5:db:36:a7:3a +-----BEGIN CERTIFICATE----- +MIIDVDCCAjygAwIBAgIDAjRWMA0GCSqGSIb3DQEBBQUAMEIxCzAJBgNVBAYTAlVT +MRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMRswGQYDVQQDExJHZW9UcnVzdCBHbG9i +YWwgQ0EwHhcNMDIwNTIxMDQwMDAwWhcNMjIwNTIxMDQwMDAwWjBCMQswCQYDVQQG +EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEbMBkGA1UEAxMSR2VvVHJ1c3Qg +R2xvYmFsIENBMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA2swYYzD9 +9BcjGlZ+W988bDjkcbd4kdS8odhM+KhDtgPpTSEHCIjaWC9mOSm9BXiLnTjoBbdq +fnGk5sRgprDvgOSJKA+eJdbtg/OtppHHmMlCGDUUna2YRpIuT8rxh0PBFpVXLVDv +iS2Aelet8u5fa9IAjbkU+BQVNdnARqN7csiRv8lVK83Qlz6cJmTM386DGXHKTubU +1XupGc1V3sjs0l44U+VcT4wt/lAjNvxm5suOpDkZALeVAjmRCw7+OC7RHQWa9k0+ +bw8HHa8sHo9gOeL6NlMTOdReJivbPagUvTLrGAMoUgRx5aszPeE4uwc2hGKceeoW +MPRfwCvocWvk+QIDAQABo1MwUTAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBTA +ephojYn7qwVkDBF9qn1luMrMTjAfBgNVHSMEGDAWgBTAephojYn7qwVkDBF9qn1l +uMrMTjANBgkqhkiG9w0BAQUFAAOCAQEANeMpauUvXVSOKVCUn5kaFOSPeCpilKIn +Z57QzxpeR+nBsqTP3UEaBU6bS+5Kb1VSsyShNwrrZHYqLizz/Tt1kL/6cdjHPTfS +tQWVYrmm3ok9Nns4d0iXrKYgjy6myQzCsplFAMfOEVEiIuCl6rYVSAlk6l5PdPcF +PseKUgzbFbS9bZvlxrFUaKnjaZC2mqUPuLk/IH2uSrW4nOQdtqvmlKXBx4Ot2/Un +hw4EbNX/3aBd7YdStysVAq45pmp06drE57xNNB6pXE0zX5IJL4hmXXeXxx12E6nV +5fEWCRE11azbJHFwLJhWC9kXtNHjUStedejV0NxPNO3CBWaAocvmMw== +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Global CA 2 O=GeoTrust Inc. +# Subject: CN=GeoTrust Global CA 2 O=GeoTrust Inc. +# Label: "GeoTrust Global CA 2" +# Serial: 1 +# MD5 Fingerprint: 0e:40:a7:6c:de:03:5d:8f:d1:0f:e4:d1:8d:f9:6c:a9 +# SHA1 Fingerprint: a9:e9:78:08:14:37:58:88:f2:05:19:b0:6d:2b:0d:2b:60:16:90:7d +# SHA256 Fingerprint: ca:2d:82:a0:86:77:07:2f:8a:b6:76:4f:f0:35:67:6c:fe:3e:5e:32:5e:01:21:72:df:3f:92:09:6d:b7:9b:85 +-----BEGIN CERTIFICATE----- +MIIDZjCCAk6gAwIBAgIBATANBgkqhkiG9w0BAQUFADBEMQswCQYDVQQGEwJVUzEW +MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3QgR2xvYmFs +IENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMTkwMzA0MDUwMDAwWjBEMQswCQYDVQQG +EwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEdMBsGA1UEAxMUR2VvVHJ1c3Qg +R2xvYmFsIENBIDIwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQDvPE1A +PRDfO1MA4Wf+lGAVPoWI8YkNkMgoI5kF6CsgncbzYEbYwbLVjDHZ3CB5JIG/NTL8 +Y2nbsSpr7iFY8gjpeMtvy/wWUsiRxP89c96xPqfCfWbB9X5SJBri1WeR0IIQ13hL +TytCOb1kLUCgsBDTOEhGiKEMuzozKmKY+wCdE1l/bztyqu6mD4b5BWHqZ38MN5aL +5mkWRxHCJ1kDs6ZgwiFAVvqgx306E+PsV8ez1q6diYD3Aecs9pYrEw15LNnA5IZ7 +S4wMcoKK+xfNAGw6EzywhIdLFnopsk/bHdQL82Y3vdj2V7teJHq4PIu5+pIaGoSe +2HSPqht/XvT+RSIhAgMBAAGjYzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYE +FHE4NvICMVNHK266ZUapEBVYIAUJMB8GA1UdIwQYMBaAFHE4NvICMVNHK266ZUap +EBVYIAUJMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG9w0BAQUFAAOCAQEAA/e1K6td +EPx7srJerJsOflN4WT5CBP51o62sgU7XAotexC3IUnbHLB/8gTKY0UvGkpMzNTEv +/NgdRN3ggX+d6YvhZJFiCzkIjKx0nVnZellSlxG5FntvRdOW2TF9AjYPnDtuzywN +A0ZF66D0f0hExghAzN4bcLUprbqLOzRldRtxIR0sFAqwlpW41uryZfspuk/qkZN0 +abby/+Ea0AzRdoXLiiW9l14sbxWZJue2Kf8i7MkCx1YAzUm5s2x7UwQa4qjJqhIF +I8LO57sEAszAR6LkxCkvW0VXiVHuPOtSCP8HNR6fNWpHSlaY0VqFH4z1Ir+rzoPz +4iIprn2DQKi6bA== +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Universal CA O=GeoTrust Inc. +# Subject: CN=GeoTrust Universal CA O=GeoTrust Inc. +# Label: "GeoTrust Universal CA" +# Serial: 1 +# MD5 Fingerprint: 92:65:58:8b:a2:1a:31:72:73:68:5c:b4:a5:7a:07:48 +# SHA1 Fingerprint: e6:21:f3:35:43:79:05:9a:4b:68:30:9d:8a:2f:74:22:15:87:ec:79 +# SHA256 Fingerprint: a0:45:9b:9f:63:b2:25:59:f5:fa:5d:4c:6d:b3:f9:f7:2f:f1:93:42:03:35:78:f0:73:bf:1d:1b:46:cb:b9:12 +-----BEGIN CERTIFICATE----- +MIIFaDCCA1CgAwIBAgIBATANBgkqhkiG9w0BAQUFADBFMQswCQYDVQQGEwJVUzEW +MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEeMBwGA1UEAxMVR2VvVHJ1c3QgVW5pdmVy +c2FsIENBMB4XDTA0MDMwNDA1MDAwMFoXDTI5MDMwNDA1MDAwMFowRTELMAkGA1UE +BhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xHjAcBgNVBAMTFUdlb1RydXN0 +IFVuaXZlcnNhbCBDQTCCAiIwDQYJKoZIhvcNAQEBBQADggIPADCCAgoCggIBAKYV +VaCjxuAfjJ0hUNfBvitbtaSeodlyWL0AG0y/YckUHUWCq8YdgNY96xCcOq9tJPi8 +cQGeBvV8Xx7BDlXKg5pZMK4ZyzBIle0iN430SppyZj6tlcDgFgDgEB8rMQ7XlFTT +QjOgNB0eRXbdT8oYN+yFFXoZCPzVx5zw8qkuEKmS5j1YPakWaDwvdSEYfyh3peFh +F7em6fgemdtzbvQKoiFs7tqqhZJmr/Z6a4LauiIINQ/PQvE1+mrufislzDoR5G2v +c7J2Ha3QsnhnGqQ5HFELZ1aD/ThdDc7d8Lsrlh/eezJS/R27tQahsiFepdaVaH/w +mZ7cRQg+59IJDTWU3YBOU5fXtQlEIGQWFwMCTFMNaN7VqnJNk22CDtucvc+081xd +VHppCZbW2xHBjXWotM85yM48vCR85mLK4b19p71XZQvk/iXttmkQ3CgaRr0BHdCX +teGYO8A3ZNY9lO4L4fUorgtWv3GLIylBjobFS1J72HGrH4oVpjuDWtdYAVHGTEHZ +f9hBZ3KiKN9gg6meyHv8U3NyWfWTehd2Ds735VzZC1U0oqpbtWpU5xPKV+yXbfRe +Bi9Fi1jUIxaS5BZuKGNZMN9QAZxjiRqf2xeUgnA3wySemkfWWspOqGmJch+RbNt+ +nhutxx9z3SxPGWX9f5NAEC7S8O08ni4oPmkmM8V7AgMBAAGjYzBhMA8GA1UdEwEB +/wQFMAMBAf8wHQYDVR0OBBYEFNq7LqqwDLiIJlF0XG0D08DYj3rWMB8GA1UdIwQY +MBaAFNq7LqqwDLiIJlF0XG0D08DYj3rWMA4GA1UdDwEB/wQEAwIBhjANBgkqhkiG +9w0BAQUFAAOCAgEAMXjmx7XfuJRAyXHEqDXsRh3ChfMoWIawC/yOsjmPRFWrZIRc +aanQmjg8+uUfNeVE44B5lGiku8SfPeE0zTBGi1QrlaXv9z+ZhP015s8xxtxqv6fX +IwjhmF7DWgh2qaavdy+3YL1ERmrvl/9zlcGO6JP7/TG37FcREUWbMPEaiDnBTzyn +ANXH/KttgCJwpQzgXQQpAvvLoJHRfNbDflDVnVi+QTjruXU8FdmbyUqDWcDaU/0z +uzYYm4UPFd3uLax2k7nZAY1IEKj79TiG8dsKxr2EoyNB3tZ3b4XUhRxQ4K5RirqN +Pnbiucon8l+f725ZDQbYKxek0nxru18UGkiPGkzns0ccjkxFKyDuSN/n3QmOGKja +QI2SJhFTYXNd673nxE0pN2HrrDktZy4W1vUAg4WhzH92xH3kt0tm7wNFYGm2DFKW +koRepqO1pD4r2czYG0eq8kTaT/kD6PAUyz/zg97QwVTjt+gKN02LIFkDMBmhLMi9 +ER/frslKxfMnZmaGrGiR/9nmUxwPi1xpZQomyB40w11Re9epnAahNt3ViZS82eQt +DF4JbAiXfKM9fJP/P6EUp8+1Xevb2xzEdt+Iub1FBZUbrvxGakyvSOPOrg/Sfuvm +bJxPgWp6ZKy7PtXny3YuxadIwVyQD8vIP/rmMuGNG2+k5o7Y+SlIis5z/iw= +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. +# Subject: CN=GeoTrust Universal CA 2 O=GeoTrust Inc. +# Label: "GeoTrust Universal CA 2" +# Serial: 1 +# MD5 Fingerprint: 34:fc:b8:d0:36:db:9e:14:b3:c2:f2:db:8f:e4:94:c7 +# SHA1 Fingerprint: 37:9a:19:7b:41:85:45:35:0c:a6:03:69:f3:3c:2e:af:47:4f:20:79 +# SHA256 Fingerprint: a0:23:4f:3b:c8:52:7c:a5:62:8e:ec:81:ad:5d:69:89:5d:a5:68:0d:c9:1d:1c:b8:47:7f:33:f8:78:b9:5b:0b +-----BEGIN CERTIFICATE----- +MIIFbDCCA1SgAwIBAgIBATANBgkqhkiG9w0BAQUFADBHMQswCQYDVQQGEwJVUzEW +MBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1c3QgVW5pdmVy +c2FsIENBIDIwHhcNMDQwMzA0MDUwMDAwWhcNMjkwMzA0MDUwMDAwWjBHMQswCQYD +VQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjEgMB4GA1UEAxMXR2VvVHJ1 +c3QgVW5pdmVyc2FsIENBIDIwggIiMA0GCSqGSIb3DQEBAQUAA4ICDwAwggIKAoIC +AQCzVFLByT7y2dyxUxpZKeexw0Uo5dfR7cXFS6GqdHtXr0om/Nj1XqduGdt0DE81 +WzILAePb63p3NeqqWuDW6KFXlPCQo3RWlEQwAx5cTiuFJnSCegx2oG9NzkEtoBUG +FF+3Qs17j1hhNNwqCPkuwwGmIkQcTAeC5lvO0Ep8BNMZcyfwqph/Lq9O64ceJHdq +XbboW0W63MOhBW9Wjo8QJqVJwy7XQYci4E+GymC16qFjwAGXEHm9ADwSbSsVsaxL +se4YuU6W3Nx2/zu+z18DwPw76L5GG//aQMJS9/7jOvdqdzXQ2o3rXhhqMcceujwb +KNZrVMaqW9eiLBsZzKIC9ptZvTdrhrVtgrrY6slWvKk2WP0+GfPtDCapkzj4T8Fd +IgbQl+rhrcZV4IErKIM6+vR7IVEAvlI4zs1meaj0gVbi0IMJR1FbUGrP20gaXT73 +y/Zl92zxlfgCOzJWgjl6W70viRu/obTo/3+NjN8D8WBOWBFM66M/ECuDmgFz2ZRt +hAAnZqzwcEAJQpKtT5MNYQlRJNiS1QuUYbKHsu3/mjX/hVTK7URDrBs8FmtISgoc +QIgfksILAAX/8sgCSqSqqcyZlpwvWOB94b67B9xfBHJcMTTD7F8t4D1kkCLm0ey4 +Lt1ZrtmhN79UNdxzMk+MBB4zsslG8dhcyFVQyWi9qLo2CQIDAQABo2MwYTAPBgNV +HRMBAf8EBTADAQH/MB0GA1UdDgQWBBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAfBgNV +HSMEGDAWgBR281Xh+qQ2+/CfXGJx7Tz0RzgQKzAOBgNVHQ8BAf8EBAMCAYYwDQYJ +KoZIhvcNAQEFBQADggIBAGbBxiPz2eAubl/oz66wsCVNK/g7WJtAJDday6sWSf+z +dXkzoS9tcBc0kf5nfo/sm+VegqlVHy/c1FEHEv6sFj4sNcZj/NwQ6w2jqtB8zNHQ +L1EuxBRa3ugZ4T7GzKQp5y6EqgYweHZUcyiYWTjgAA1i00J9IZ+uPTqM1fp3DRgr +Fg5fNuH8KrUwJM/gYwx7WBr+mbpCErGR9Hxo4sjoryzqyX6uuyo9DRXcNJW2GHSo +ag/HtPQTxORb7QrSpJdMKu0vbBKJPfEncKpqA1Ihn0CoZ1Dy81of398j9tx4TuaY +T1U6U+Pv8vSfx3zYWK8pIpe44L2RLrB27FcRz+8pRPPphXpgY+RdM4kX2TGq2tbz +GDVyz4crL2MjhF2EjD9XoIj8mZEoJmmZ1I+XRL6O1UixpCgp8RW04eWe3fiPpm8m +1wk8OhwRDqZsN/etRIcsKMfYdIKz0G9KV7s1KSegi+ghp4dkNl3M2Basx7InQJJV +OCiNUW7dFGdTbHFcJoRNdVq2fmBWqU2t+5sel/MN2dKXVHfaPRK34B7vCAas+YWH +6aLcr34YEoP9VhdBLtUpgn2Z9DH2canPLAEnpQW5qrJITirvn5NSUZU8UnOOVkwX +QMAJKOSLakhT2+zNVVXxxvjpoixMptEmX36vWkzaH6byHCx+rgIW0lbQL1dTR+iS +-----END CERTIFICATE----- + +# Issuer: CN=America Online Root Certification Authority 1 O=America Online Inc. +# Subject: CN=America Online Root Certification Authority 1 O=America Online Inc. +# Label: "America Online Root Certification Authority 1" +# Serial: 1 +# MD5 Fingerprint: 14:f1:08:ad:9d:fa:64:e2:89:e7:1c:cf:a8:ad:7d:5e +# SHA1 Fingerprint: 39:21:c1:15:c1:5d:0e:ca:5c:cb:5b:c4:f0:7d:21:d8:05:0b:56:6a +# SHA256 Fingerprint: 77:40:73:12:c6:3a:15:3d:5b:c0:0b:4e:51:75:9c:df:da:c2:37:dc:2a:33:b6:79:46:e9:8e:9b:fa:68:0a:e3 +-----BEGIN CERTIFICATE----- +MIIDpDCCAoygAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEc +MBoGA1UEChMTQW1lcmljYSBPbmxpbmUgSW5jLjE2MDQGA1UEAxMtQW1lcmljYSBP +bmxpbmUgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAxMB4XDTAyMDUyODA2 +MDAwMFoXDTM3MTExOTIwNDMwMFowYzELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0Ft +ZXJpY2EgT25saW5lIEluYy4xNjA0BgNVBAMTLUFtZXJpY2EgT25saW5lIFJvb3Qg +Q2VydGlmaWNhdGlvbiBBdXRob3JpdHkgMTCCASIwDQYJKoZIhvcNAQEBBQADggEP +ADCCAQoCggEBAKgv6KRpBgNHw+kqmP8ZonCaxlCyfqXfaE0bfA+2l2h9LaaLl+lk +hsmj76CGv2BlnEtUiMJIxUo5vxTjWVXlGbR0yLQFOVwWpeKVBeASrlmLojNoWBym +1BW32J/X3HGrfpq/m44zDyL9Hy7nBzbvYjnF3cu6JRQj3gzGPTzOggjmZj7aUTsW +OqMFf6Dch9Wc/HKpoH145LcxVR5lu9RhsCFg7RAycsWSJR74kEoYeEfffjA3PlAb +2xzTa5qGUwew76wGePiEmf4hjUyAtgyC9mZweRrTT6PP8c9GsEsPPt2IYriMqQko +O3rHl+Ee5fSfwMCuJKDIodkP1nsmgmkyPacCAwEAAaNjMGEwDwYDVR0TAQH/BAUw +AwEB/zAdBgNVHQ4EFgQUAK3Zo/Z59m50qX8zPYEX10zPM94wHwYDVR0jBBgwFoAU +AK3Zo/Z59m50qX8zPYEX10zPM94wDgYDVR0PAQH/BAQDAgGGMA0GCSqGSIb3DQEB +BQUAA4IBAQB8itEfGDeC4Liwo+1WlchiYZwFos3CYiZhzRAW18y0ZTTQEYqtqKkF +Zu90821fnZmv9ov761KyBZiibyrFVL0lvV+uyIbqRizBs73B6UlwGBaXCBOMIOAb +LjpHyx7kADCVW/RFo8AasAFOq73AI25jP4BKxQft3OJvx8Fi8eNy1gTIdGcL+oir +oQHIb/AUr9KZzVGTfu0uOMe9zkZQPXLjeSWdm4grECDdpbgyn43gKd8hdIaC2y+C +MMbHNYaz+ZZfRtsMRf3zUMNvxsNIrUam4SdHCh0Om7bCd39j8uB9Gr784N/Xx6ds +sPmuujz9dLQR6FgNgLzTqIA6me11zEZ7 +-----END CERTIFICATE----- + +# Issuer: CN=America Online Root Certification Authority 2 O=America Online Inc. +# Subject: CN=America Online Root Certification Authority 2 O=America Online Inc. +# Label: "America Online Root Certification Authority 2" +# Serial: 1 +# MD5 Fingerprint: d6:ed:3c:ca:e2:66:0f:af:10:43:0d:77:9b:04:09:bf +# SHA1 Fingerprint: 85:b5:ff:67:9b:0c:79:96:1f:c8:6e:44:22:00:46:13:db:17:92:84 +# SHA256 Fingerprint: 7d:3b:46:5a:60:14:e5:26:c0:af:fc:ee:21:27:d2:31:17:27:ad:81:1c:26:84:2d:00:6a:f3:73:06:cc:80:bd +-----BEGIN CERTIFICATE----- +MIIFpDCCA4ygAwIBAgIBATANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEc +MBoGA1UEChMTQW1lcmljYSBPbmxpbmUgSW5jLjE2MDQGA1UEAxMtQW1lcmljYSBP +bmxpbmUgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eSAyMB4XDTAyMDUyODA2 +MDAwMFoXDTM3MDkyOTE0MDgwMFowYzELMAkGA1UEBhMCVVMxHDAaBgNVBAoTE0Ft +ZXJpY2EgT25saW5lIEluYy4xNjA0BgNVBAMTLUFtZXJpY2EgT25saW5lIFJvb3Qg +Q2VydGlmaWNhdGlvbiBBdXRob3JpdHkgMjCCAiIwDQYJKoZIhvcNAQEBBQADggIP +ADCCAgoCggIBAMxBRR3pPU0Q9oyxQcngXssNt79Hc9PwVU3dxgz6sWYFas14tNwC +206B89enfHG8dWOgXeMHDEjsJcQDIPT/DjsS/5uN4cbVG7RtIuOx238hZK+GvFci +KtZHgVdEglZTvYYUAQv8f3SkWq7xuhG1m1hagLQ3eAkzfDJHA1zEpYNI9FdWboE2 +JxhP7JsowtS013wMPgwr38oE18aO6lhOqKSlGBxsRZijQdEt0sdtjRnxrXm3gT+9 +BoInLRBYBbV4Bbkv2wxrkJB+FFk4u5QkE+XRnRTf04JNRvCAOVIyD+OEsnpD8l7e +Xz8d3eOyG6ChKiMDbi4BFYdcpnV1x5dhvt6G3NRI270qv0pV2uh9UPu0gBe4lL8B +PeraunzgWGcXuVjgiIZGZ2ydEEdYMtA1fHkqkKJaEBEjNa0vzORKW6fIJ/KD3l67 +Xnfn6KVuY8INXWHQjNJsWiEOyiijzirplcdIz5ZvHZIlyMbGwcEMBawmxNJ10uEq +Z8A9W6Wa6897GqidFEXlD6CaZd4vKL3Ob5Rmg0gp2OpljK+T2WSfVVcmv2/LNzGZ +o2C7HK2JNDJiuEMhBnIMoVxtRsX6Kc8w3onccVvdtjc+31D1uAclJuW8tf48ArO3 ++L5DwYcRlJ4jbBeKuIonDFRH8KmzwICMoCfrHRnjB453cMor9H124HhnAgMBAAGj +YzBhMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFE1FwWg4u3OpaaEg5+31IqEj +FNeeMB8GA1UdIwQYMBaAFE1FwWg4u3OpaaEg5+31IqEjFNeeMA4GA1UdDwEB/wQE +AwIBhjANBgkqhkiG9w0BAQUFAAOCAgEAZ2sGuV9FOypLM7PmG2tZTiLMubekJcmn +xPBUlgtk87FYT15R/LKXeydlwuXK5w0MJXti4/qftIe3RUavg6WXSIylvfEWK5t2 +LHo1YGwRgJfMqZJS5ivmae2p+DYtLHe/YUjRYwu5W1LtGLBDQiKmsXeu3mnFzccc +obGlHBD7GL4acN3Bkku+KVqdPzW+5X1R+FXgJXUjhx5c3LqdsKyzadsXg8n33gy8 +CNyRnqjQ1xU3c6U1uPx+xURABsPr+CKAXEfOAuMRn0T//ZoyzH1kUQ7rVyZ2OuMe +IjzCpjbdGe+n/BLzJsBZMYVMnNjP36TMzCmT/5RtdlwTCJfy7aULTd3oyWgOZtMA +DjMSW7yV5TKQqLPGbIOtd+6Lfn6xqavT4fG2wLHqiMDn05DpKJKUe2h7lyoKZy2F +AjgQ5ANh1NolNscIWC2hp1GvMApJ9aZphwctREZ2jirlmjvXGKL8nDgQzMY70rUX +Om/9riW99XJZZLF0KjhfGEzfz3EEWjbUvy+ZnOjZurGV5gJLIaFb1cFPj65pbVPb +AZO1XB4Y3WRayhgoPmMEEf0cjQAPuDffZ4qdZqkCapH/E8ovXYO8h5Ns3CRRFgQl +Zvqz2cK6Kb6aSDiCmfS/O0oxGfm/jiEzFMpPVF/7zvuPcX/9XhmgD0uRuMRUvAaw +RY8mkaKO/qk= +-----END CERTIFICATE----- + +# Issuer: CN=AAA Certificate Services O=Comodo CA Limited +# Subject: CN=AAA Certificate Services O=Comodo CA Limited +# Label: "Comodo AAA Services root" +# Serial: 1 +# MD5 Fingerprint: 49:79:04:b0:eb:87:19:ac:47:b0:bc:11:51:9b:74:d0 +# SHA1 Fingerprint: d1:eb:23:a4:6d:17:d6:8f:d9:25:64:c2:f1:f1:60:17:64:d8:e3:49 +# SHA256 Fingerprint: d7:a7:a0:fb:5d:7e:27:31:d7:71:e9:48:4e:bc:de:f7:1d:5f:0c:3e:0a:29:48:78:2b:c8:3e:e0:ea:69:9e:f4 +-----BEGIN CERTIFICATE----- +MIIEMjCCAxqgAwIBAgIBATANBgkqhkiG9w0BAQUFADB7MQswCQYDVQQGEwJHQjEb +MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow +GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEhMB8GA1UEAwwYQUFBIENlcnRpZmlj +YXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVowezEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxITAfBgNVBAMM +GEFBQSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEBBQADggEP +ADCCAQoCggEBAL5AnfRu4ep2hxxNRUSOvkbIgwadwSr+GB+O5AL686tdUIoWMQua +BtDFcCLNSS1UY8y2bmhGC1Pqy0wkwLxyTurxFa70VJoSCsN6sjNg4tqJVfMiWPPe +3M/vg4aijJRPn2jymJBGhCfHdr/jzDUsi14HZGWCwEiwqJH5YZ92IFCokcdmtet4 +YgNW8IoaE+oxox6gmf049vYnMlhvB/VruPsUK6+3qszWY19zjNoFmag4qMsXeDZR +rOme9Hg6jc8P2ULimAyrL58OAd7vn5lJ8S3frHRNG5i1R8XlKdH5kBjHYpy+g8cm +ez6KJcfA3Z3mNWgQIJ2P2N7Sw4ScDV7oL8kCAwEAAaOBwDCBvTAdBgNVHQ4EFgQU +oBEKIz6W8Qfs4q8p74Klf9AwpLQwDgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQF +MAMBAf8wewYDVR0fBHQwcjA4oDagNIYyaHR0cDovL2NybC5jb21vZG9jYS5jb20v +QUFBQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmwwNqA0oDKGMGh0dHA6Ly9jcmwuY29t +b2RvLm5ldC9BQUFDZXJ0aWZpY2F0ZVNlcnZpY2VzLmNybDANBgkqhkiG9w0BAQUF +AAOCAQEACFb8AvCb6P+k+tZ7xkSAzk/ExfYAWMymtrwUSWgEdujm7l3sAg9g1o1Q +GE8mTgHj5rCl7r+8dFRBv/38ErjHT1r0iWAFf2C3BUrz9vHCv8S5dIa2LX1rzNLz +Rt0vxuBqw8M0Ayx9lt1awg6nCpnBBYurDC/zXDrPbDdVCYfeU0BsWO/8tqtlbgT2 +G9w84FoVxp7Z8VlIMCFlA2zs6SFz7JsDoeA3raAVGI/6ugLOpyypEBMs1OUIJqsi +l2D4kF501KKaU73yqWjgom7C12yxow+ev+to51byrvLjKzg6CYG1a4XXvi3tPxq3 +smPi9WIsgtRqAEFQ8TmDn5XpNpaYbg== +-----END CERTIFICATE----- + +# Issuer: CN=Secure Certificate Services O=Comodo CA Limited +# Subject: CN=Secure Certificate Services O=Comodo CA Limited +# Label: "Comodo Secure Services root" +# Serial: 1 +# MD5 Fingerprint: d3:d9:bd:ae:9f:ac:67:24:b3:c8:1b:52:e1:b9:a9:bd +# SHA1 Fingerprint: 4a:65:d5:f4:1d:ef:39:b8:b8:90:4a:4a:d3:64:81:33:cf:c7:a1:d1 +# SHA256 Fingerprint: bd:81:ce:3b:4f:65:91:d1:1a:67:b5:fc:7a:47:fd:ef:25:52:1b:f9:aa:4e:18:b9:e3:df:2e:34:a7:80:3b:e8 +-----BEGIN CERTIFICATE----- +MIIEPzCCAyegAwIBAgIBATANBgkqhkiG9w0BAQUFADB+MQswCQYDVQQGEwJHQjEb +MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow +GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDEkMCIGA1UEAwwbU2VjdXJlIENlcnRp +ZmljYXRlIFNlcnZpY2VzMB4XDTA0MDEwMTAwMDAwMFoXDTI4MTIzMTIzNTk1OVow +fjELMAkGA1UEBhMCR0IxGzAZBgNVBAgMEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBwwHU2FsZm9yZDEaMBgGA1UECgwRQ29tb2RvIENBIExpbWl0ZWQxJDAiBgNV +BAMMG1NlY3VyZSBDZXJ0aWZpY2F0ZSBTZXJ2aWNlczCCASIwDQYJKoZIhvcNAQEB +BQADggEPADCCAQoCggEBAMBxM4KK0HDrc4eCQNUd5MvJDkKQ+d40uaG6EfQlhfPM +cm3ye5drswfxdySRXyWP9nQ95IDC+DwN879A6vfIUtFyb+/Iq0G4bi4XKpVpDM3S +HpR7LZQdqnXXs5jLrLxkU0C8j6ysNstcrbvd4JQX7NFc0L/vpZXJkMWwrPsbQ996 +CF23uPJAGysnnlDOXmWCiIxe004MeuoIkbY2qitC++rCoznl2yY4rYsK7hljxxwk +3wN42ubqwUcaCwtGCd0C/N7Lh1/XMGNooa7cMqG6vv5Eq2i2pRcV/b3Vp6ea5EQz +6YiO/O1R65NxTq0B50SOqy3LqP4BSUjwwN3HaNiS/j0CAwEAAaOBxzCBxDAdBgNV +HQ4EFgQUPNiTiMLAggnMAZkGkyDpnnAJY08wDgYDVR0PAQH/BAQDAgEGMA8GA1Ud +EwEB/wQFMAMBAf8wgYEGA1UdHwR6MHgwO6A5oDeGNWh0dHA6Ly9jcmwuY29tb2Rv +Y2EuY29tL1NlY3VyZUNlcnRpZmljYXRlU2VydmljZXMuY3JsMDmgN6A1hjNodHRw +Oi8vY3JsLmNvbW9kby5uZXQvU2VjdXJlQ2VydGlmaWNhdGVTZXJ2aWNlcy5jcmww +DQYJKoZIhvcNAQEFBQADggEBAIcBbSMdflsXfcFhMs+P5/OKlFlm4J4oqF7Tt/Q0 +5qo5spcWxYJvMqTpjOev/e/C6LlLqqP05tqNZSH7uoDrJiiFGv45jN5bBAS0VPmj +Z55B+glSzAVIqMk/IQQezkhr/IXownuvf7fM+F86/TXGDe+X3EyrEeFryzHRbPtI +gKvcnDe4IRRLDXE97IMzbtFuMhbsmMcWi1mmNKsFVy2T96oTy9IT4rcuO81rUBcJ +aD61JlfutuC23bkpgHl9j6PwpCikFcSF9CfUa7/lXORlAnZUtOM3ZiTTGWHIUhDl +izeauan5Hb/qmZJhlv8BzaFfDbxxvA6sCx1HRR3B7Hzs/Sk= +-----END CERTIFICATE----- + +# Issuer: CN=Trusted Certificate Services O=Comodo CA Limited +# Subject: CN=Trusted Certificate Services O=Comodo CA Limited +# Label: "Comodo Trusted Services root" +# Serial: 1 +# MD5 Fingerprint: 91:1b:3f:6e:cd:9e:ab:ee:07:fe:1f:71:d2:b3:61:27 +# SHA1 Fingerprint: e1:9f:e3:0e:8b:84:60:9e:80:9b:17:0d:72:a8:c5:ba:6e:14:09:bd +# SHA256 Fingerprint: 3f:06:e5:56:81:d4:96:f5:be:16:9e:b5:38:9f:9f:2b:8f:f6:1e:17:08:df:68:81:72:48:49:cd:5d:27:cb:69 +-----BEGIN CERTIFICATE----- +MIIEQzCCAyugAwIBAgIBATANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJHQjEb +MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow +GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDElMCMGA1UEAwwcVHJ1c3RlZCBDZXJ0 +aWZpY2F0ZSBTZXJ2aWNlczAeFw0wNDAxMDEwMDAwMDBaFw0yODEyMzEyMzU5NTla +MH8xCzAJBgNVBAYTAkdCMRswGQYDVQQIDBJHcmVhdGVyIE1hbmNoZXN0ZXIxEDAO +BgNVBAcMB1NhbGZvcmQxGjAYBgNVBAoMEUNvbW9kbyBDQSBMaW1pdGVkMSUwIwYD +VQQDDBxUcnVzdGVkIENlcnRpZmljYXRlIFNlcnZpY2VzMIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEA33FvNlhTWvI2VFeAxHQIIO0Yfyod5jWaHiWsnOWW +fnJSoBVC21ndZHoa0Lh73TkVvFVIxO06AOoxEbrycXQaZ7jPM8yoMa+j49d/vzMt +TGo87IvDktJTdyR0nAducPy9C1t2ul/y/9c3S0pgePfw+spwtOpZqqPOSC+pw7IL +fhdyFgymBwwbOM/JYrc/oJOlh0Hyt3BAd9i+FHzjqMB6juljatEPmsbS9Is6FARW +1O24zG71++IsWL1/T2sr92AkWCTOJu80kTrV44HQsvAEAtdbtz6SrGsSivnkBbA7 +kUlcsutT6vifR4buv5XAwAaf0lteERv0xwQ1KdJVXOTt6wIDAQABo4HJMIHGMB0G +A1UdDgQWBBTFe1i97doladL3WRaoszLAeydb9DAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zCBgwYDVR0fBHwwejA8oDqgOIY2aHR0cDovL2NybC5jb21v +ZG9jYS5jb20vVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMuY3JsMDqgOKA2hjRo +dHRwOi8vY3JsLmNvbW9kby5uZXQvVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMu +Y3JsMA0GCSqGSIb3DQEBBQUAA4IBAQDIk4E7ibSvuIQSTI3S8NtwuleGFTQQuS9/ +HrCoiWChisJ3DFBKmwCL2Iv0QeLQg4pKHBQGsKNoBXAxMKdTmw7pSqBYaWcOrp32 +pSxBvzwGa+RZzG0Q8ZZvH9/0BAKkn0U+yNj6NkZEUD+Cl5EfKNsYEYwq5GWDVxIS +jBc/lDb+XbDABHcTuPQV1T84zJQ6VdCsmPW6AF/ghhmBeC8owH7TzEIK9a5QoNE+ +xqFx7D+gIIxmOom0jtTYsU0lR+4viMi14QVFwL4Ucd56/Y57fU0IlqUSc/Atyjcn +dBInTMu2l+nZrghtWjlA3QVHdWpaIbOjGM9O9y5Xt5hwXsjEeLBi +-----END CERTIFICATE----- + +# Issuer: CN=UTN - DATACorp SGC O=The USERTRUST Network OU=http://www.usertrust.com +# Subject: CN=UTN - DATACorp SGC O=The USERTRUST Network OU=http://www.usertrust.com +# Label: "UTN DATACorp SGC Root CA" +# Serial: 91374294542884689855167577680241077609 +# MD5 Fingerprint: b3:a5:3e:77:21:6d:ac:4a:c0:c9:fb:d5:41:3d:ca:06 +# SHA1 Fingerprint: 58:11:9f:0e:12:82:87:ea:50:fd:d9:87:45:6f:4f:78:dc:fa:d6:d4 +# SHA256 Fingerprint: 85:fb:2f:91:dd:12:27:5a:01:45:b6:36:53:4f:84:02:4a:d6:8b:69:b8:ee:88:68:4f:f7:11:37:58:05:b3:48 +-----BEGIN CERTIFICATE----- +MIIEXjCCA0agAwIBAgIQRL4Mi1AAIbQR0ypoBqmtaTANBgkqhkiG9w0BAQUFADCB +kzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug +Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho +dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xGzAZBgNVBAMTElVUTiAtIERBVEFDb3Jw +IFNHQzAeFw05OTA2MjQxODU3MjFaFw0xOTA2MjQxOTA2MzBaMIGTMQswCQYDVQQG +EwJVUzELMAkGA1UECBMCVVQxFzAVBgNVBAcTDlNhbHQgTGFrZSBDaXR5MR4wHAYD +VQQKExVUaGUgVVNFUlRSVVNUIE5ldHdvcmsxITAfBgNVBAsTGGh0dHA6Ly93d3cu +dXNlcnRydXN0LmNvbTEbMBkGA1UEAxMSVVROIC0gREFUQUNvcnAgU0dDMIIBIjAN +BgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA3+5YEKIrblXEjr8uRgnn4AgPLit6 +E5Qbvfa2gI5lBZMAHryv4g+OGQ0SR+ysraP6LnD43m77VkIVni5c7yPeIbkFdicZ +D0/Ww5y0vpQZY/KmEQrrU0icvvIpOxboGqBMpsn0GFlowHDyUwDAXlCCpVZvNvlK +4ESGoE1O1kduSUrLZ9emxAW5jh70/P/N5zbgnAVssjMiFdC04MwXwLLA9P4yPykq +lXvY8qdOD1R8oQ2AswkDwf9c3V6aPryuvEeKaq5xyh+xKrhfQgUL7EYw0XILyulW +bfXv33i+Ybqypa4ETLyorGkVl73v67SMvzX41MPRKA5cOp9wGDMgd8SirwIDAQAB +o4GrMIGoMAsGA1UdDwQEAwIBxjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQWBBRT +MtGzz3/64PGgXYVOktKeRR20TzA9BgNVHR8ENjA0MDKgMKAuhixodHRwOi8vY3Js +LnVzZXJ0cnVzdC5jb20vVVROLURBVEFDb3JwU0dDLmNybDAqBgNVHSUEIzAhBggr +BgEFBQcDAQYKKwYBBAGCNwoDAwYJYIZIAYb4QgQBMA0GCSqGSIb3DQEBBQUAA4IB +AQAnNZcAiosovcYzMB4p/OL31ZjUQLtgyr+rFywJNn9Q+kHcrpY6CiM+iVnJowft +Gzet/Hy+UUla3joKVAgWRcKZsYfNjGjgaQPpxE6YsjuMFrMOoAyYUJuTqXAJyCyj +j98C5OBxOvG0I3KgqgHf35g+FFCgMSa9KOlaMCZ1+XtgHI3zzVAmbQQnmt/VDUVH +KWss5nbZqSl9Mt3JNjy9rjXxEZ4du5A/EkdOjtd+D2JzHVImOBwYSf0wdJrE5SIv +2MCN7ZF6TACPcn9d2t0bi0Vr591pl6jFVkwPDPafepE39peC4N1xaf92P2BNPM/3 +mfnGV/TJVTl4uix5yaaIK/QI +-----END CERTIFICATE----- + +# Issuer: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com +# Subject: CN=UTN-USERFirst-Hardware O=The USERTRUST Network OU=http://www.usertrust.com +# Label: "UTN USERFirst Hardware Root CA" +# Serial: 91374294542884704022267039221184531197 +# MD5 Fingerprint: 4c:56:41:e5:0d:bb:2b:e8:ca:a3:ed:18:08:ad:43:39 +# SHA1 Fingerprint: 04:83:ed:33:99:ac:36:08:05:87:22:ed:bc:5e:46:00:e3:be:f9:d7 +# SHA256 Fingerprint: 6e:a5:47:41:d0:04:66:7e:ed:1b:48:16:63:4a:a3:a7:9e:6e:4b:96:95:0f:82:79:da:fc:8d:9b:d8:81:21:37 +-----BEGIN CERTIFICATE----- +MIIEdDCCA1ygAwIBAgIQRL4Mi1AAJLQR0zYq/mUK/TANBgkqhkiG9w0BAQUFADCB +lzELMAkGA1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2Ug +Q2l0eTEeMBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExho +dHRwOi8vd3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3Qt +SGFyZHdhcmUwHhcNOTkwNzA5MTgxMDQyWhcNMTkwNzA5MTgxOTIyWjCBlzELMAkG +A1UEBhMCVVMxCzAJBgNVBAgTAlVUMRcwFQYDVQQHEw5TYWx0IExha2UgQ2l0eTEe +MBwGA1UEChMVVGhlIFVTRVJUUlVTVCBOZXR3b3JrMSEwHwYDVQQLExhodHRwOi8v +d3d3LnVzZXJ0cnVzdC5jb20xHzAdBgNVBAMTFlVUTi1VU0VSRmlyc3QtSGFyZHdh +cmUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCx98M4P7Sof885glFn +0G2f0v9Y8+efK+wNiVSZuTiZFvfgIXlIwrthdBKWHTxqctU8EGc6Oe0rE81m65UJ +M6Rsl7HoxuzBdXmcRl6Nq9Bq/bkqVRcQVLMZ8Jr28bFdtqdt++BxF2uiiPsA3/4a +MXcMmgF6sTLjKwEHOG7DpV4jvEWbe1DByTCP2+UretNb+zNAHqDVmBe8i4fDidNd +oI6yqqr2jmmIBsX6iSHzCJ1pLgkzmykNRg+MzEk0sGlRvfkGzWitZky8PqxhvQqI +DsjfPe58BEydCl5rkdbux+0ojatNh4lz0G6k0B4WixThdkQDf2Os5M1JnMWS9Ksy +oUhbAgMBAAGjgbkwgbYwCwYDVR0PBAQDAgHGMA8GA1UdEwEB/wQFMAMBAf8wHQYD +VR0OBBYEFKFyXyYbKJhDlV0HN9WFlp1L0sNFMEQGA1UdHwQ9MDswOaA3oDWGM2h0 +dHA6Ly9jcmwudXNlcnRydXN0LmNvbS9VVE4tVVNFUkZpcnN0LUhhcmR3YXJlLmNy +bDAxBgNVHSUEKjAoBggrBgEFBQcDAQYIKwYBBQUHAwUGCCsGAQUFBwMGBggrBgEF +BQcDBzANBgkqhkiG9w0BAQUFAAOCAQEARxkP3nTGmZev/K0oXnWO6y1n7k57K9cM +//bey1WiCuFMVGWTYGufEpytXoMs61quwOQt9ABjHbjAbPLPSbtNk28Gpgoiskli +CE7/yMgUsogWXecB5BKV5UU0s4tpvc+0hY91UZ59Ojg6FEgSxvunOxqNDYJAB+gE +CJChicsZUN/KHAG8HQQZexB2lzvukJDKxA4fFm517zP4029bHpbj4HR3dHuKom4t +3XbWOTCC8KucUvIqx69JXn7HaOWCgchqJ/kniCrVWFCVH/A7HFe7fRQ5YiuayZSS +KqMiDP+JJn1fIytH1xUdqWqeUQ0qUZ6B+dQ7XnASfxAynB67nfhmqA== +-----END CERTIFICATE----- + +# Issuer: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Subject: CN=XRamp Global Certification Authority O=XRamp Security Services Inc OU=www.xrampsecurity.com +# Label: "XRamp Global CA Root" +# Serial: 107108908803651509692980124233745014957 +# MD5 Fingerprint: a1:0b:44:b3:ca:10:d8:00:6e:9d:0f:d8:0f:92:0a:d1 +# SHA1 Fingerprint: b8:01:86:d1:eb:9c:86:a5:41:04:cf:30:54:f3:4c:52:b7:e5:58:c6 +# SHA256 Fingerprint: ce:cd:dc:90:50:99:d8:da:df:c5:b1:d2:09:b7:37:cb:e2:c1:8c:fb:2c:10:c0:ff:0b:cf:0d:32:86:fc:1a:a2 +-----BEGIN CERTIFICATE----- +MIIEMDCCAxigAwIBAgIQUJRs7Bjq1ZxN1ZfvdY+grTANBgkqhkiG9w0BAQUFADCB +gjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3dy54cmFtcHNlY3VyaXR5LmNvbTEk +MCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2VydmljZXMgSW5jMS0wKwYDVQQDEyRY +UmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQxMTAxMTcx +NDA0WhcNMzUwMTAxMDUzNzE5WjCBgjELMAkGA1UEBhMCVVMxHjAcBgNVBAsTFXd3 +dy54cmFtcHNlY3VyaXR5LmNvbTEkMCIGA1UEChMbWFJhbXAgU2VjdXJpdHkgU2Vy +dmljZXMgSW5jMS0wKwYDVQQDEyRYUmFtcCBHbG9iYWwgQ2VydGlmaWNhdGlvbiBB +dXRob3JpdHkwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCYJB69FbS6 +38eMpSe2OAtp87ZOqCwuIR1cRN8hXX4jdP5efrRKt6atH67gBhbim1vZZ3RrXYCP +KZ2GG9mcDZhtdhAoWORlsH9KmHmf4MMxfoArtYzAQDsRhtDLooY2YKTVMIJt2W7Q +DxIEM5dfT2Fa8OT5kavnHTu86M/0ay00fOJIYRyO82FEzG+gSqmUsE3a56k0enI4 +qEHMPJQRfevIpoy3hsvKMzvZPTeL+3o+hiznc9cKV6xkmxnr9A8ECIqsAxcZZPRa +JSKNNCyy9mgdEm3Tih4U2sSPpuIjhdV6Db1q4Ons7Be7QhtnqiXtRYMh/MHJfNVi +PvryxS3T/dRlAgMBAAGjgZ8wgZwwEwYJKwYBBAGCNxQCBAYeBABDAEEwCwYDVR0P +BAQDAgGGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFMZPoj0GY4QJnM5i5ASs +jVy16bYbMDYGA1UdHwQvMC0wK6ApoCeGJWh0dHA6Ly9jcmwueHJhbXBzZWN1cml0 +eS5jb20vWEdDQS5jcmwwEAYJKwYBBAGCNxUBBAMCAQEwDQYJKoZIhvcNAQEFBQAD +ggEBAJEVOQMBG2f7Shz5CmBbodpNl2L5JFMn14JkTpAuw0kbK5rc/Kh4ZzXxHfAR +vbdI4xD2Dd8/0sm2qlWkSLoC295ZLhVbO50WfUfXN+pfTXYSNrsf16GBBEYgoyxt +qZ4Bfj8pzgCT3/3JknOJiWSe5yvkHJEs0rnOfc5vMZnT5r7SHpDwCRR5XCOrTdLa +IR9NmXmd4c8nnxCbHIgNsIpkQTG4DmyQJKSbXHGPurt+HBvbaoAPIbzp26a3QPSy +i6mx5O+aGtA9aZnuqCij4Tyz8LIRnM98QObd50N9otg6tamN8jSZxNQQ4Qb9CYQQ +O+7ETPTsJ3xCwnR8gooJybQDJbw= +-----END CERTIFICATE----- + +# Issuer: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Subject: O=The Go Daddy Group, Inc. OU=Go Daddy Class 2 Certification Authority +# Label: "Go Daddy Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 91:de:06:25:ab:da:fd:32:17:0c:bb:25:17:2a:84:67 +# SHA1 Fingerprint: 27:96:ba:e6:3f:18:01:e2:77:26:1b:a0:d7:77:70:02:8f:20:ee:e4 +# SHA256 Fingerprint: c3:84:6b:f2:4b:9e:93:ca:64:27:4c:0e:c6:7c:1e:cc:5e:02:4f:fc:ac:d2:d7:40:19:35:0e:81:fe:54:6a:e4 +-----BEGIN CERTIFICATE----- +MIIEADCCAuigAwIBAgIBADANBgkqhkiG9w0BAQUFADBjMQswCQYDVQQGEwJVUzEh +MB8GA1UEChMYVGhlIEdvIERhZGR5IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBE +YWRkeSBDbGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTA0MDYyOTE3 +MDYyMFoXDTM0MDYyOTE3MDYyMFowYzELMAkGA1UEBhMCVVMxITAfBgNVBAoTGFRo +ZSBHbyBEYWRkeSBHcm91cCwgSW5jLjExMC8GA1UECxMoR28gRGFkZHkgQ2xhc3Mg +MiBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTCCASAwDQYJKoZIhvcNAQEBBQADggEN +ADCCAQgCggEBAN6d1+pXGEmhW+vXX0iG6r7d/+TvZxz0ZWizV3GgXne77ZtJ6XCA +PVYYYwhv2vLM0D9/AlQiVBDYsoHUwHU9S3/Hd8M+eKsaA7Ugay9qK7HFiH7Eux6w +wdhFJ2+qN1j3hybX2C32qRe3H3I2TqYXP2WYktsqbl2i/ojgC95/5Y0V4evLOtXi +EqITLdiOr18SPaAIBQi2XKVlOARFmR6jYGB0xUGlcmIbYsUfb18aQr4CUWWoriMY +avx4A6lNf4DD+qta/KFApMoZFv6yyO9ecw3ud72a9nmYvLEHZ6IVDd2gWMZEewo+ +YihfukEHU1jPEX44dMX4/7VpkI+EdOqXG68CAQOjgcAwgb0wHQYDVR0OBBYEFNLE +sNKR1EwRcbNhyz2h/t2oatTjMIGNBgNVHSMEgYUwgYKAFNLEsNKR1EwRcbNhyz2h +/t2oatTjoWekZTBjMQswCQYDVQQGEwJVUzEhMB8GA1UEChMYVGhlIEdvIERhZGR5 +IEdyb3VwLCBJbmMuMTEwLwYDVQQLEyhHbyBEYWRkeSBDbGFzcyAyIENlcnRpZmlj +YXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8wDQYJKoZIhvcNAQEFBQAD +ggEBADJL87LKPpH8EsahB4yOd6AzBhRckB4Y9wimPQoZ+YeAEW5p5JYXMP80kWNy +OO7MHAGjHZQopDH2esRU1/blMVgDoszOYtuURXO1v0XJJLXVggKtI3lpjbi2Tc7P +TMozI+gciKqdi0FuFskg5YmezTvacPd+mSYgFFQlq25zheabIZ0KbIIOqPjCDPoQ +HmyW74cNxA9hi63ugyuV+I6ShHI56yDqg+2DzZduCLzrTia2cyvk0/ZM/iZx4mER +dEr/VxqHD3VILs9RaRegAhJhldXRQLIQTO7ErBBDpqWeCtWVYpoNz4iCxTIM5Cuf +ReYNnyicsbkqWletNw+vHX/bvZ8= +-----END CERTIFICATE----- + +# Issuer: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Subject: O=Starfield Technologies, Inc. OU=Starfield Class 2 Certification Authority +# Label: "Starfield Class 2 CA" +# Serial: 0 +# MD5 Fingerprint: 32:4a:4b:bb:c8:63:69:9b:be:74:9a:c6:dd:1d:46:24 +# SHA1 Fingerprint: ad:7e:1c:28:b0:64:ef:8f:60:03:40:20:14:c3:d0:e3:37:0e:b5:8a +# SHA256 Fingerprint: 14:65:fa:20:53:97:b8:76:fa:a6:f0:a9:95:8e:55:90:e4:0f:cc:7f:aa:4f:b7:c2:c8:67:75:21:fb:5f:b6:58 +-----BEGIN CERTIFICATE----- +MIIEDzCCAvegAwIBAgIBADANBgkqhkiG9w0BAQUFADBoMQswCQYDVQQGEwJVUzEl +MCMGA1UEChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMp +U3RhcmZpZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDQw +NjI5MTczOTE2WhcNMzQwNjI5MTczOTE2WjBoMQswCQYDVQQGEwJVUzElMCMGA1UE +ChMcU3RhcmZpZWxkIFRlY2hub2xvZ2llcywgSW5jLjEyMDAGA1UECxMpU3RhcmZp +ZWxkIENsYXNzIDIgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggEgMA0GCSqGSIb3 +DQEBAQUAA4IBDQAwggEIAoIBAQC3Msj+6XGmBIWtDBFk385N78gDGIc/oav7PKaf +8MOh2tTYbitTkPskpD6E8J7oX+zlJ0T1KKY/e97gKvDIr1MvnsoFAZMej2YcOadN ++lq2cwQlZut3f+dZxkqZJRRU6ybH838Z1TBwj6+wRir/resp7defqgSHo9T5iaU0 +X9tDkYI22WY8sbi5gv2cOj4QyDvvBmVmepsZGD3/cVE8MC5fvj13c7JdBmzDI1aa +K4UmkhynArPkPw2vCHmCuDY96pzTNbO8acr1zJ3o/WSNF4Azbl5KXZnJHoe0nRrA +1W4TNSNe35tfPe/W93bC6j67eA0cQmdrBNj41tpvi/JEoAGrAgEDo4HFMIHCMB0G +A1UdDgQWBBS/X7fRzt0fhvRbVazc1xDCDqmI5zCBkgYDVR0jBIGKMIGHgBS/X7fR +zt0fhvRbVazc1xDCDqmI56FspGowaDELMAkGA1UEBhMCVVMxJTAjBgNVBAoTHFN0 +YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAsTKVN0YXJmaWVsZCBD +bGFzcyAyIENlcnRpZmljYXRpb24gQXV0aG9yaXR5ggEAMAwGA1UdEwQFMAMBAf8w +DQYJKoZIhvcNAQEFBQADggEBAAWdP4id0ckaVaGsafPzWdqbAYcaT1epoXkJKtv3 +L7IezMdeatiDh6GX70k1PncGQVhiv45YuApnP+yz3SFmH8lU+nLMPUxA2IGvd56D +eruix/U0F47ZEUD0/CwqTRV/p2JdLiXTAAsgGh1o+Re49L2L7ShZ3U0WixeDyLJl +xy16paq8U4Zt3VekyvggQQto8PT7dL5WXXp59fkdheMtlb71cZBDzI0fmgAKhynp +VSJYACPq4xJDKVtHCN2MQWplBqjlIapBtJUhlbl90TSrE9atvNziPTnNvT51cKEY +WQPJIrSPnNVeKtelttQKbfi3QBFGmh95DmK/D5fs4C8fF5Q= +-----END CERTIFICATE----- + +# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing +# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing +# Label: "StartCom Certification Authority" +# Serial: 1 +# MD5 Fingerprint: 22:4d:8f:8a:fc:f7:35:c2:bb:57:34:90:7b:8b:22:16 +# SHA1 Fingerprint: 3e:2b:f7:f2:03:1b:96:f3:8c:e6:c4:d8:a8:5d:3e:2d:58:47:6a:0f +# SHA256 Fingerprint: c7:66:a9:be:f2:d4:07:1c:86:3a:31:aa:49:20:e8:13:b2:d1:98:60:8c:b7:b7:cf:e2:11:43:b8:36:df:09:ea +-----BEGIN CERTIFICATE----- +MIIHyTCCBbGgAwIBAgIBATANBgkqhkiG9w0BAQUFADB9MQswCQYDVQQGEwJJTDEW +MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg +Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM2WhcNMzYwOTE3MTk0NjM2WjB9 +MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi +U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh +cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk +pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf +OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C +Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT +Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi +HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM +Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w ++2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+ +Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3 +Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B +26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID +AQABo4ICUjCCAk4wDAYDVR0TBAUwAwEB/zALBgNVHQ8EBAMCAa4wHQYDVR0OBBYE +FE4L7xqkQFulF2mHMMo0aEPQQa7yMGQGA1UdHwRdMFswLKAqoCiGJmh0dHA6Ly9j +ZXJ0LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMCugKaAnhiVodHRwOi8vY3Js +LnN0YXJ0Y29tLm9yZy9zZnNjYS1jcmwuY3JsMIIBXQYDVR0gBIIBVDCCAVAwggFM +BgsrBgEEAYG1NwEBATCCATswLwYIKwYBBQUHAgEWI2h0dHA6Ly9jZXJ0LnN0YXJ0 +Y29tLm9yZy9wb2xpY3kucGRmMDUGCCsGAQUFBwIBFilodHRwOi8vY2VydC5zdGFy +dGNvbS5vcmcvaW50ZXJtZWRpYXRlLnBkZjCB0AYIKwYBBQUHAgIwgcMwJxYgU3Rh +cnQgQ29tbWVyY2lhbCAoU3RhcnRDb20pIEx0ZC4wAwIBARqBl0xpbWl0ZWQgTGlh +YmlsaXR5LCByZWFkIHRoZSBzZWN0aW9uICpMZWdhbCBMaW1pdGF0aW9ucyogb2Yg +dGhlIFN0YXJ0Q29tIENlcnRpZmljYXRpb24gQXV0aG9yaXR5IFBvbGljeSBhdmFp +bGFibGUgYXQgaHR0cDovL2NlcnQuc3RhcnRjb20ub3JnL3BvbGljeS5wZGYwEQYJ +YIZIAYb4QgEBBAQDAgAHMDgGCWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNT +TCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTANBgkqhkiG9w0BAQUFAAOCAgEAFmyZ +9GYMNPXQhV59CuzaEE44HF7fpiUFS5Eyweg78T3dRAlbB0mKKctmArexmvclmAk8 +jhvh3TaHK0u7aNM5Zj2gJsfyOZEdUauCe37Vzlrk4gNXcGmXCPleWKYK34wGmkUW +FjgKXlf2Ysd6AgXmvB618p70qSmD+LIU424oh0TDkBreOKk8rENNZEXO3SipXPJz +ewT4F+irsfMuXGRuczE6Eri8sxHkfY+BUZo7jYn0TZNmezwD7dOaHZrzZVD1oNB1 +ny+v8OqCQ5j4aZyJecRDjkZy42Q2Eq/3JR44iZB3fsNrarnDy0RLrHiQi+fHLB5L +EUTINFInzQpdn4XBidUaePKVEFMy3YCEZnXZtWgo+2EuvoSoOMCZEoalHmdkrQYu +L6lwhceWD3yJZfWOQ1QOq92lgDmUYMA0yZZwLKMS9R9Ie70cfmu3nZD0Ijuu+Pwq +yvqCUqDvr0tVk+vBtfAii6w0TiYiBKGHLHVKt+V9E9e4DGTANtLJL4YSjCMJwRuC +O3NJo2pXh5Tl1njFmUNj403gdy3hZZlyaQQaRwnmDwFWJPsfvw55qVguucQJAX6V +um0ABj6y6koQOdjQK/W/7HW/lwLFCRsI3FU34oH7N4RDYiDK51ZLZer+bMEkkySh +NOsF/5oirpt9P/FlUQqmMGqz9IgcgA38corog14= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Assured ID Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Assured ID Root CA" +# Serial: 17154717934120587862167794914071425081 +# MD5 Fingerprint: 87:ce:0b:7b:2a:0e:49:00:e1:58:71:9b:37:a8:93:72 +# SHA1 Fingerprint: 05:63:b8:63:0d:62:d7:5a:bb:c8:ab:1e:4b:df:b5:a8:99:b2:4d:43 +# SHA256 Fingerprint: 3e:90:99:b5:01:5e:8f:48:6c:00:bc:ea:9d:11:1e:e7:21:fa:ba:35:5a:89:bc:f1:df:69:56:1e:3d:c6:32:5c +-----BEGIN CERTIFICATE----- +MIIDtzCCAp+gAwIBAgIQDOfg5RfYRv6P5WD8G/AwOTANBgkqhkiG9w0BAQUFADBl +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJv +b3QgQ0EwHhcNMDYxMTEwMDAwMDAwWhcNMzExMTEwMDAwMDAwWjBlMQswCQYDVQQG +EwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3d3cuZGlnaWNl +cnQuY29tMSQwIgYDVQQDExtEaWdpQ2VydCBBc3N1cmVkIElEIFJvb3QgQ0EwggEi +MA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCtDhXO5EOAXLGH87dg+XESpa7c +JpSIqvTO9SA5KFhgDPiA2qkVlTJhPLWxKISKityfCgyDF3qPkKyK53lTXDGEKvYP +mDI2dsze3Tyoou9q+yHyUmHfnyDXH+Kx2f4YZNISW1/5WBg1vEfNoTb5a3/UsDg+ +wRvDjDPZ2C8Y/igPs6eD1sNuRMBhNZYW/lmci3Zt1/GiSw0r/wty2p5g0I6QNcZ4 +VYcgoc/lbQrISXwxmDNsIumH0DJaoroTghHtORedmTpyoeb6pNnVFzF1roV9Iq4/ +AUaG9ih5yLHa5FcXxH4cDrC0kqZWs72yl+2qp/C3xag/lRbQ/6GW6whfGHdPAgMB +AAGjYzBhMA4GA1UdDwEB/wQEAwIBhjAPBgNVHRMBAf8EBTADAQH/MB0GA1UdDgQW +BBRF66Kv9JLLgjEtUYunpyGd823IDzAfBgNVHSMEGDAWgBRF66Kv9JLLgjEtUYun +pyGd823IDzANBgkqhkiG9w0BAQUFAAOCAQEAog683+Lt8ONyc3pklL/3cmbYMuRC +dWKuh+vy1dneVrOfzM4UKLkNl2BcEkxY5NM9g0lFWJc1aRqoR+pWxnmrEthngYTf +fwk8lOa4JiwgvT2zKIn3X/8i4peEH+ll74fg38FnSbNd67IJKusm7Xi+fT8r87cm +NW1fiQG2SVufAQWbqz0lwcy2f8Lxb4bG+mRo64EtlOtCt/qMHt1i8b5QZ7dsvfPx +H2sMNgcWfzd8qVttevESRmCD1ycEvkvOl77DZypoEd+A5wwzZr8TDRRu838fYxAe ++o0bJW1sj6W3YQGx0qMmoRBxna3iw/nDmVG3KwcIzi7mULKn+gpFL6Lw8g== +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert Global Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert Global Root CA" +# Serial: 10944719598952040374951832963794454346 +# MD5 Fingerprint: 79:e4:a9:84:0d:7d:3a:96:d7:c0:4f:e2:43:4c:89:2e +# SHA1 Fingerprint: a8:98:5d:3a:65:e5:e5:c4:b2:d7:d6:6d:40:c6:dd:2f:b1:9c:54:36 +# SHA256 Fingerprint: 43:48:a0:e9:44:4c:78:cb:26:5e:05:8d:5e:89:44:b4:d8:4f:96:62:bd:26:db:25:7f:89:34:a4:43:c7:01:61 +-----BEGIN CERTIFICATE----- +MIIDrzCCApegAwIBAgIQCDvgVpBCRrGhdWrJWZHHSjANBgkqhkiG9w0BAQUFADBh +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSAwHgYDVQQDExdEaWdpQ2VydCBHbG9iYWwgUm9vdCBD +QTAeFw0wNjExMTAwMDAwMDBaFw0zMTExMTAwMDAwMDBaMGExCzAJBgNVBAYTAlVT +MRUwEwYDVQQKEwxEaWdpQ2VydCBJbmMxGTAXBgNVBAsTEHd3dy5kaWdpY2VydC5j +b20xIDAeBgNVBAMTF0RpZ2lDZXJ0IEdsb2JhbCBSb290IENBMIIBIjANBgkqhkiG +9w0BAQEFAAOCAQ8AMIIBCgKCAQEA4jvhEXLeqKTTo1eqUKKPC3eQyaKl7hLOllsB +CSDMAZOnTjC3U/dDxGkAV53ijSLdhwZAAIEJzs4bg7/fzTtxRuLWZscFs3YnFo97 +nh6Vfe63SKMI2tavegw5BmV/Sl0fvBf4q77uKNd0f3p4mVmFaG5cIzJLv07A6Fpt +43C/dxC//AH2hdmoRBBYMql1GNXRor5H4idq9Joz+EkIYIvUX7Q6hL+hqkpMfT7P +T19sdl6gSzeRntwi5m3OFBqOasv+zbMUZBfHWymeMr/y7vrTC0LUq7dBMtoM1O/4 +gdW7jVg/tRvoSSiicNoxBN33shbyTApOB6jtSj1etX+jkMOvJwIDAQABo2MwYTAO +BgNVHQ8BAf8EBAMCAYYwDwYDVR0TAQH/BAUwAwEB/zAdBgNVHQ4EFgQUA95QNVbR +TLtm8KPiGxvDl7I90VUwHwYDVR0jBBgwFoAUA95QNVbRTLtm8KPiGxvDl7I90VUw +DQYJKoZIhvcNAQEFBQADggEBAMucN6pIExIK+t1EnE9SsPTfrgT1eXkIoyQY/Esr +hMAtudXH/vTBH1jLuG2cenTnmCmrEbXjcKChzUyImZOMkXDiqw8cvpOp/2PV5Adg +06O/nVsJ8dWO41P0jmP6P6fbtGbfYmbW0W5BjfIttep3Sp+dWOIrWcBAI+0tKIJF +PnlUkiaY4IBIqDfv8NZ5YBberOgOzW6sRBc4L0na4UU+Krk2U886UAb3LujEV0ls +YSEY1QSteDwsOoBrp+uvFRTp2InBuThs4pFsiv9kuXclVzDAGySj4dzp30d8tbQk +CAUw7C29C79Fv1C5qfPrmAESrciIxpg0X40KPMbp1ZWVbd4= +-----END CERTIFICATE----- + +# Issuer: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Subject: CN=DigiCert High Assurance EV Root CA O=DigiCert Inc OU=www.digicert.com +# Label: "DigiCert High Assurance EV Root CA" +# Serial: 3553400076410547919724730734378100087 +# MD5 Fingerprint: d4:74:de:57:5c:39:b2:d3:9c:85:83:c5:c0:65:49:8a +# SHA1 Fingerprint: 5f:b7:ee:06:33:e2:59:db:ad:0c:4c:9a:e6:d3:8f:1a:61:c7:dc:25 +# SHA256 Fingerprint: 74:31:e5:f4:c3:c1:ce:46:90:77:4f:0b:61:e0:54:40:88:3b:a9:a0:1e:d0:0b:a6:ab:d7:80:6e:d3:b1:18:cf +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIQAqxcJmoLQJuPC3nyrkYldzANBgkqhkiG9w0BAQUFADBs +MQswCQYDVQQGEwJVUzEVMBMGA1UEChMMRGlnaUNlcnQgSW5jMRkwFwYDVQQLExB3 +d3cuZGlnaWNlcnQuY29tMSswKQYDVQQDEyJEaWdpQ2VydCBIaWdoIEFzc3VyYW5j +ZSBFViBSb290IENBMB4XDTA2MTExMDAwMDAwMFoXDTMxMTExMDAwMDAwMFowbDEL +MAkGA1UEBhMCVVMxFTATBgNVBAoTDERpZ2lDZXJ0IEluYzEZMBcGA1UECxMQd3d3 +LmRpZ2ljZXJ0LmNvbTErMCkGA1UEAxMiRGlnaUNlcnQgSGlnaCBBc3N1cmFuY2Ug +RVYgUm9vdCBDQTCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAMbM5XPm ++9S75S0tMqbf5YE/yc0lSbZxKsPVlDRnogocsF9ppkCxxLeyj9CYpKlBWTrT3JTW +PNt0OKRKzE0lgvdKpVMSOO7zSW1xkX5jtqumX8OkhPhPYlG++MXs2ziS4wblCJEM +xChBVfvLWokVfnHoNb9Ncgk9vjo4UFt3MRuNs8ckRZqnrG0AFFoEt7oT61EKmEFB +Ik5lYYeBQVCmeVyJ3hlKV9Uu5l0cUyx+mM0aBhakaHPQNAQTXKFx01p8VdteZOE3 +hzBWBOURtCmAEvF5OYiiAhF8J2a3iLd48soKqDirCmTCv2ZdlYTBoSUeh10aUAsg +EsxBu24LUTi4S8sCAwEAAaNjMGEwDgYDVR0PAQH/BAQDAgGGMA8GA1UdEwEB/wQF +MAMBAf8wHQYDVR0OBBYEFLE+w2kD+L9HAdSYJhoIAu9jZCvDMB8GA1UdIwQYMBaA +FLE+w2kD+L9HAdSYJhoIAu9jZCvDMA0GCSqGSIb3DQEBBQUAA4IBAQAcGgaX3Nec +nzyIZgYIVyHbIUf4KmeqvxgydkAQV8GK83rZEWWONfqe/EW1ntlMMUu4kehDLI6z +eM7b41N5cdblIZQB2lWHmiRk9opmzN6cN82oNLFpmyPInngiK3BD41VHMWEZ71jF +hS9OMPagMRYjyOfiZRYzy78aG6A9+MpeizGLYAiJLQwGXFK3xPkKmNEVX58Svnw2 +Yzi9RKR/5CYrCsSXaQ3pjOLAEFe4yHYSkVXySGnYvCoCWw9E1CAx2/S6cCZdkGCe +vEsXCS+0yx5DaMkHJ8HSXPfqIbloEpw8nL+e/IBcm2PN7EeqJSdnoDfzAIJ9VNep ++OkuE6N36B9K +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. +# Subject: CN=GeoTrust Primary Certification Authority O=GeoTrust Inc. +# Label: "GeoTrust Primary Certification Authority" +# Serial: 32798226551256963324313806436981982369 +# MD5 Fingerprint: 02:26:c3:01:5e:08:30:37:43:a9:d0:7d:cf:37:e6:bf +# SHA1 Fingerprint: 32:3c:11:8e:1b:f7:b8:b6:52:54:e2:e2:10:0d:d6:02:90:37:f0:96 +# SHA256 Fingerprint: 37:d5:10:06:c5:12:ea:ab:62:64:21:f1:ec:8c:92:01:3f:c5:f8:2a:e9:8e:e5:33:eb:46:19:b8:de:b4:d0:6c +-----BEGIN CERTIFICATE----- +MIIDfDCCAmSgAwIBAgIQGKy1av1pthU6Y2yv2vrEoTANBgkqhkiG9w0BAQUFADBY +MQswCQYDVQQGEwJVUzEWMBQGA1UEChMNR2VvVHJ1c3QgSW5jLjExMC8GA1UEAxMo +R2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEx +MjcwMDAwMDBaFw0zNjA3MTYyMzU5NTlaMFgxCzAJBgNVBAYTAlVTMRYwFAYDVQQK +Ew1HZW9UcnVzdCBJbmMuMTEwLwYDVQQDEyhHZW9UcnVzdCBQcmltYXJ5IENlcnRp +ZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAvrgVe//UfH1nrYNke8hCUy3f9oQIIGHWAVlqnEQRr+92/ZV+zmEwu3qDXwK9 +AWbK7hWNb6EwnL2hhZ6UOvNWiAAxz9juapYC2e0DjPt1befquFUWBRaa9OBesYjA +ZIVcFU2Ix7e64HXprQU9nceJSOC7KMgD4TCTZF5SwFlwIjVXiIrxlQqD17wxcwE0 +7e9GceBrAqg1cmuXm2bgyxx5X9gaBGgeRwLmnWDiNpcB3841kt++Z8dtd1k7j53W +kBWUvEI0EME5+bEnPn7WinXFsq+W06Lem+SYvn3h6YGttm/81w7a4DSwDRp35+MI +mO9Y+pyEtzavwt+s0vQQBnBxNQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQULNVQQZcVi/CPNmFbSvtr2ZnJM5IwDQYJ +KoZIhvcNAQEFBQADggEBAFpwfyzdtzRP9YZRqSa+S7iq8XEN3GHHoOo0Hnp3DwQ1 +6CePbJC/kRYkRj5KTs4rFtULUh38H2eiAkUxT87z+gOneZ1TatnaYzr4gNfTmeGl +4b7UVXGYNTq+k+qurUKykG/g/CFNNWMziUnWm07Kx+dOCQD32sfvmWKZd7aVIl6K +oKv0uHiYyjgZmclynnjNS6yvGaBzEi38wkG6gZHaFloxt/m0cYASSJlyc1pZU8Fj +UjPtp8nSOQJw+uCxQmYpqptR7TBUIhRf2asdweSU8Pj1K/fqynhG1riR/aYNKxoU +AT6A8EKglQdebc3MS6RFjasS6LPeWuWgfOgPIh1a6Vk= +-----END CERTIFICATE----- + +# Issuer: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only +# Subject: CN=thawte Primary Root CA O=thawte, Inc. OU=Certification Services Division/(c) 2006 thawte, Inc. - For authorized use only +# Label: "thawte Primary Root CA" +# Serial: 69529181992039203566298953787712940909 +# MD5 Fingerprint: 8c:ca:dc:0b:22:ce:f5:be:72:ac:41:1a:11:a8:d8:12 +# SHA1 Fingerprint: 91:c6:d6:ee:3e:8a:c8:63:84:e5:48:c2:99:29:5c:75:6c:81:7b:81 +# SHA256 Fingerprint: 8d:72:2f:81:a9:c1:13:c0:79:1d:f1:36:a2:96:6d:b2:6c:95:0a:97:1d:b4:6b:41:99:f4:ea:54:b7:8b:fb:9f +-----BEGIN CERTIFICATE----- +MIIEIDCCAwigAwIBAgIQNE7VVyDV7exJ9C/ON9srbTANBgkqhkiG9w0BAQUFADCB +qTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf +Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw +MDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxHzAdBgNV +BAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwHhcNMDYxMTE3MDAwMDAwWhcNMzYw +NzE2MjM1OTU5WjCBqTELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5j +LjEoMCYGA1UECxMfQ2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYG +A1UECxMvKGMpIDIwMDYgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNl +IG9ubHkxHzAdBgNVBAMTFnRoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQCsoPD7gFnUnMekz52hWXMJEEUMDSxuaPFs +W0hoSVk3/AszGcJ3f8wQLZU0HObrTQmnHNK4yZc2AreJ1CRfBsDMRJSUjQJib+ta +3RGNKJpchJAQeg29dGYvajig4tVUROsdB58Hum/u6f1OCyn1PoSgAfGcq/gcfomk +6KHYcWUNo1F77rzSImANuVud37r8UVsLr5iy6S7pBOhih94ryNdOwUxkHt3Ph1i6 +Sk/KaAcdHJ1KxtUvkcx8cXIcxcBn6zL9yZJclNqFwJu/U30rCfSMnZEfl2pSy94J +NqR32HuHUETVPm4pafs5SSYeCaWAe0At6+gnhcn+Yf1+5nyXHdWdAgMBAAGjQjBA +MA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBR7W0XP +r87Lev0xkhpqtvNG61dIUDANBgkqhkiG9w0BAQUFAAOCAQEAeRHAS7ORtvzw6WfU +DW5FvlXok9LOAz/t2iWwHVfLHjp2oEzsUHboZHIMpKnxuIvW1oeEuzLlQRHAd9mz +YJ3rG9XRbkREqaYB7FViHXe4XI5ISXycO1cRrK1zN44veFyQaEfZYGDm/Ac9IiAX +xPcW6cTYcvnIc3zfFi8VqT79aie2oetaupgf1eNNZAqdE8hhuvU5HIe6uL17In/2 +/qxAeeWsEG89jxt5dovEN7MhGITlNgDrYyCZuen+MwS7QcjBAvlEYyCegc5C09Y/ +LHbTY5xZ3Y+m4Q6gLkH3LpVHz7z9M/P2C2F+fpErgUfCJzDupxBdN49cOSvkBPB7 +jVaMaA== +-----END CERTIFICATE----- + +# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only +# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G5 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2006 VeriSign, Inc. - For authorized use only +# Label: "VeriSign Class 3 Public Primary Certification Authority - G5" +# Serial: 33037644167568058970164719475676101450 +# MD5 Fingerprint: cb:17:e4:31:67:3e:e2:09:fe:45:57:93:f3:0a:fa:1c +# SHA1 Fingerprint: 4e:b6:d5:78:49:9b:1c:cf:5f:58:1e:ad:56:be:3d:9b:67:44:a5:e5 +# SHA256 Fingerprint: 9a:cf:ab:7e:43:c8:d8:80:d0:6b:26:2a:94:de:ee:e4:b4:65:99:89:c3:d0:ca:f1:9b:af:64:05:e4:1a:b7:df +-----BEGIN CERTIFICATE----- +MIIE0zCCA7ugAwIBAgIQGNrRniZ96LtKIVjNzGs7SjANBgkqhkiG9w0BAQUFADCB +yjELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL +ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJp +U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxW +ZXJpU2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5IC0gRzUwHhcNMDYxMTA4MDAwMDAwWhcNMzYwNzE2MjM1OTU5WjCByjEL +MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW +ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNiBWZXJpU2ln +biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp +U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y +aXR5IC0gRzUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQCvJAgIKXo1 +nmAMqudLO07cfLw8RRy7K+D+KQL5VwijZIUVJ/XxrcgxiV0i6CqqpkKzj/i5Vbex +t0uz/o9+B1fs70PbZmIVYc9gDaTY3vjgw2IIPVQT60nKWVSFJuUrjxuf6/WhkcIz +SdhDY2pSS9KP6HBRTdGJaXvHcPaz3BJ023tdS1bTlr8Vd6Gw9KIl8q8ckmcY5fQG +BO+QueQA5N06tRn/Arr0PO7gi+s3i+z016zy9vA9r911kTMZHRxAy3QkGSGT2RT+ +rCpSx4/VBEnkjWNHiDxpg8v+R70rfk/Fla4OndTRQ8Bnc+MUCH7lP59zuDMKz10/ +NIeWiu5T6CUVAgMBAAGjgbIwga8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8E +BAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJaW1hZ2UvZ2lmMCEwHzAH +BgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYjaHR0cDovL2xvZ28udmVy +aXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFH/TZafC3ey78DAJ80M5+gKv +MzEzMA0GCSqGSIb3DQEBBQUAA4IBAQCTJEowX2LP2BqYLz3q3JktvXf2pXkiOOzE +p6B4Eq1iDkVwZMXnl2YtmAl+X6/WzChl8gGqCBpH3vn5fJJaCGkgDdk+bW48DW7Y +5gaRQBi5+MHt39tBquCWIMnNZBU4gcmU7qKEKQsTb47bDN0lAtukixlE0kF6BWlK +WE9gyn6CagsCqiUXObXbf+eEZSqVir2G3l6BFoMtEMze/aiCKm0oHw0LxOXnGiYZ +4fQRbxC1lfznQgUy286dUV4otp6F01vvpX1FQHKOtw5rDgb7MzVIcbidJ4vEZV8N +hnacRHr2lVz2XTIIM6RUthg/aFzyQkqFOFSDX9HoLPKsEdao7WNq +-----END CERTIFICATE----- + +# Issuer: CN=COMODO Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO Certification Authority O=COMODO CA Limited +# Label: "COMODO Certification Authority" +# Serial: 104350513648249232941998508985834464573 +# MD5 Fingerprint: 5c:48:dc:f7:42:72:ec:56:94:6d:1c:cc:71:35:80:75 +# SHA1 Fingerprint: 66:31:bf:9e:f7:4f:9e:b6:c9:d5:a6:0c:ba:6a:be:d1:f7:bd:ef:7b +# SHA256 Fingerprint: 0c:2c:d6:3d:f7:80:6f:a3:99:ed:e8:09:11:6b:57:5b:f8:79:89:f0:65:18:f9:80:8c:86:05:03:17:8b:af:66 +-----BEGIN CERTIFICATE----- +MIIEHTCCAwWgAwIBAgIQToEtioJl4AsC7j41AkblPTANBgkqhkiG9w0BAQUFADCB +gTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4G +A1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxJzAlBgNV +BAMTHkNPTU9ETyBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAeFw0wNjEyMDEwMDAw +MDBaFw0yOTEyMzEyMzU5NTlaMIGBMQswCQYDVQQGEwJHQjEbMBkGA1UECBMSR3Jl +YXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHEwdTYWxmb3JkMRowGAYDVQQKExFDT01P +RE8gQ0EgTGltaXRlZDEnMCUGA1UEAxMeQ09NT0RPIENlcnRpZmljYXRpb24gQXV0 +aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA0ECLi3LjkRv3 +UcEbVASY06m/weaKXTuH+7uIzg3jLz8GlvCiKVCZrts7oVewdFFxze1CkU1B/qnI +2GqGd0S7WWaXUF601CxwRM/aN5VCaTwwxHGzUvAhTaHYujl8HJ6jJJ3ygxaYqhZ8 +Q5sVW7euNJH+1GImGEaaP+vB+fGQV+useg2L23IwambV4EajcNxo2f8ESIl33rXp ++2dtQem8Ob0y2WIC8bGoPW43nOIv4tOiJovGuFVDiOEjPqXSJDlqR6sA1KGzqSX+ +DT+nHbrTUcELpNqsOO9VUCQFZUaTNE8tja3G1CEZ0o7KBWFxB3NH5YoZEr0ETc5O +nKVIrLsm9wIDAQABo4GOMIGLMB0GA1UdDgQWBBQLWOWLxkwVN6RAqTCpIb5HNlpW +/zAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/BAUwAwEB/zBJBgNVHR8EQjBAMD6g +PKA6hjhodHRwOi8vY3JsLmNvbW9kb2NhLmNvbS9DT01PRE9DZXJ0aWZpY2F0aW9u +QXV0aG9yaXR5LmNybDANBgkqhkiG9w0BAQUFAAOCAQEAPpiem/Yb6dc5t3iuHXIY +SdOH5EOC6z/JqvWote9VfCFSZfnVDeFs9D6Mk3ORLgLETgdxb8CPOGEIqB6BCsAv +IC9Bi5HcSEW88cbeunZrM8gALTFGTO3nnc+IlP8zwFboJIYmuNg4ON8qa90SzMc/ +RxdMosIGlgnW2/4/PEZB31jiVg88O8EckzXZOFKs7sjsLjBOlDW0JB9LeGna8gI4 +zJVSk/BwJVmcIGfE7vmLV2H0knZ9P4SNVbfo5azV8fUZVqZa+5Acr5Pr5RzUZ5dd +BA6+C4OmF4O5MBKgxTMVBbkN+8cFduPYSo38NBejxiEovjBFMR7HeL5YYTisO+IB +ZQ== +-----END CERTIFICATE----- + +# Issuer: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. +# Subject: CN=Network Solutions Certificate Authority O=Network Solutions L.L.C. +# Label: "Network Solutions Certificate Authority" +# Serial: 116697915152937497490437556386812487904 +# MD5 Fingerprint: d3:f3:a6:16:c0:fa:6b:1d:59:b1:2d:96:4d:0e:11:2e +# SHA1 Fingerprint: 74:f8:a3:c3:ef:e7:b3:90:06:4b:83:90:3c:21:64:60:20:e5:df:ce +# SHA256 Fingerprint: 15:f0:ba:00:a3:ac:7a:f3:ac:88:4c:07:2b:10:11:a0:77:bd:77:c0:97:f4:01:64:b2:f8:59:8a:bd:83:86:0c +-----BEGIN CERTIFICATE----- +MIID5jCCAs6gAwIBAgIQV8szb8JcFuZHFhfjkDFo4DANBgkqhkiG9w0BAQUFADBi +MQswCQYDVQQGEwJVUzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMu +MTAwLgYDVQQDEydOZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3Jp +dHkwHhcNMDYxMjAxMDAwMDAwWhcNMjkxMjMxMjM1OTU5WjBiMQswCQYDVQQGEwJV +UzEhMB8GA1UEChMYTmV0d29yayBTb2x1dGlvbnMgTC5MLkMuMTAwLgYDVQQDEydO +ZXR3b3JrIFNvbHV0aW9ucyBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDkvH6SMG3G2I4rC7xGzuAnlt7e+foS0zwz +c7MEL7xxjOWftiJgPl9dzgn/ggwbmlFQGiaJ3dVhXRncEg8tCqJDXRfQNJIg6nPP +OCwGJgl6cvf6UDL4wpPTaaIjzkGxzOTVHzbRijr4jGPiFFlp7Q3Tf2vouAPlT2rl +mGNpSAW+Lv8ztumXWWn4Zxmuk2GWRBXTcrA/vGp97Eh/jcOrqnErU2lBUzS1sLnF +BgrEsEX1QV1uiUV7PTsmjHTC5dLRfbIR1PtYMiKagMnc/Qzpf14Dl847ABSHJ3A4 +qY5usyd2mFHgBeMhqxrVhSI8KbWaFsWAqPS7azCPL0YCorEMIuDTAgMBAAGjgZcw +gZQwHQYDVR0OBBYEFCEwyfsA106Y2oeqKtCnLrFAMadMMA4GA1UdDwEB/wQEAwIB +BjAPBgNVHRMBAf8EBTADAQH/MFIGA1UdHwRLMEkwR6BFoEOGQWh0dHA6Ly9jcmwu +bmV0c29sc3NsLmNvbS9OZXR3b3JrU29sdXRpb25zQ2VydGlmaWNhdGVBdXRob3Jp +dHkuY3JsMA0GCSqGSIb3DQEBBQUAA4IBAQC7rkvnt1frf6ott3NHhWrB5KUd5Oc8 +6fRZZXe1eltajSU24HqXLjjAV2CDmAaDn7l2em5Q4LqILPxFzBiwmZVRDuwduIj/ +h1AcgsLj4DKAv6ALR8jDMe+ZZzKATxcheQxpXN5eNK4CtSbqUN9/GGUsyfJj4akH +/nxxH2szJGoeBfcFaMBqEssuXmHLrijTfsK0ZpEmXzwuJF/LWA/rKOyvEZbz3Htv +wKeI8lN3s2Berq4o2jUsbzRF0ybh3uxbTydrFny9RAQYgrOJeRcQcT16ohZO9QHN +pGxlaKFJdlxDydi8NmdspZS11My5vWo1ViHe2MPr+8ukYEywVaCge1ey +-----END CERTIFICATE----- + +# Issuer: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Subject: CN=COMODO ECC Certification Authority O=COMODO CA Limited +# Label: "COMODO ECC Certification Authority" +# Serial: 41578283867086692638256921589707938090 +# MD5 Fingerprint: 7c:62:ff:74:9d:31:53:5e:68:4a:d5:78:aa:1e:bf:23 +# SHA1 Fingerprint: 9f:74:4e:9f:2b:4d:ba:ec:0f:31:2c:50:b6:56:3b:8e:2d:93:c3:11 +# SHA256 Fingerprint: 17:93:92:7a:06:14:54:97:89:ad:ce:2f:8f:34:f7:f0:b6:6d:0f:3a:e3:a3:b8:4d:21:ec:15:db:ba:4f:ad:c7 +-----BEGIN CERTIFICATE----- +MIICiTCCAg+gAwIBAgIQH0evqmIAcFBUTAGem2OZKjAKBggqhkjOPQQDAzCBhTEL +MAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdyZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UE +BxMHU2FsZm9yZDEaMBgGA1UEChMRQ09NT0RPIENBIExpbWl0ZWQxKzApBgNVBAMT +IkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwHhcNMDgwMzA2MDAw +MDAwWhcNMzgwMTE4MjM1OTU5WjCBhTELMAkGA1UEBhMCR0IxGzAZBgNVBAgTEkdy +ZWF0ZXIgTWFuY2hlc3RlcjEQMA4GA1UEBxMHU2FsZm9yZDEaMBgGA1UEChMRQ09N +T0RPIENBIExpbWl0ZWQxKzApBgNVBAMTIkNPTU9ETyBFQ0MgQ2VydGlmaWNhdGlv +biBBdXRob3JpdHkwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQDR3svdcmCFYX7deSR +FtSrYpn1PlILBs5BAH+X4QokPB0BBO490o0JlwzgdeT6+3eKKvUDYEs2ixYjFq0J +cfRK9ChQtP6IHG4/bC8vCVlbpVsLM5niwz2J+Wos77LTBumjQjBAMB0GA1UdDgQW +BBR1cacZSBm8nZ3qQUfflMRId5nTeTAOBgNVHQ8BAf8EBAMCAQYwDwYDVR0TAQH/ +BAUwAwEB/zAKBggqhkjOPQQDAwNoADBlAjEA7wNbeqy3eApyt4jf/7VGFAkK+qDm +fQjGGoe9GKhzvSbKYAydzpmfz1wPMOG+FDHqAjAU9JM8SaczepBGR7NjfRObTrdv +GDeAU/7dIOA1mjbRxwG55tzd8/8dLDoWV9mSOdY= +-----END CERTIFICATE----- + +# Issuer: CN=TC TrustCenter Class 2 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 2 CA +# Subject: CN=TC TrustCenter Class 2 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 2 CA +# Label: "TC TrustCenter Class 2 CA II" +# Serial: 941389028203453866782103406992443 +# MD5 Fingerprint: ce:78:33:5c:59:78:01:6e:18:ea:b9:36:a0:b9:2e:23 +# SHA1 Fingerprint: ae:50:83:ed:7c:f4:5c:bc:8f:61:c6:21:fe:68:5d:79:42:21:15:6e +# SHA256 Fingerprint: e6:b8:f8:76:64:85:f8:07:ae:7f:8d:ac:16:70:46:1f:07:c0:a1:3e:ef:3a:1f:f7:17:53:8d:7a:ba:d3:91:b4 +-----BEGIN CERTIFICATE----- +MIIEqjCCA5KgAwIBAgIOLmoAAQACH9dSISwRXDswDQYJKoZIhvcNAQEFBQAwdjEL +MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxIjAgBgNV +BAsTGVRDIFRydXN0Q2VudGVyIENsYXNzIDIgQ0ExJTAjBgNVBAMTHFRDIFRydXN0 +Q2VudGVyIENsYXNzIDIgQ0EgSUkwHhcNMDYwMTEyMTQzODQzWhcNMjUxMjMxMjI1 +OTU5WjB2MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIgR21i +SDEiMCAGA1UECxMZVEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMiBDQTElMCMGA1UEAxMc +VEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMiBDQSBJSTCCASIwDQYJKoZIhvcNAQEBBQAD +ggEPADCCAQoCggEBAKuAh5uO8MN8h9foJIIRszzdQ2Lu+MNF2ujhoF/RKrLqk2jf +tMjWQ+nEdVl//OEd+DFwIxuInie5e/060smp6RQvkL4DUsFJzfb95AhmC1eKokKg +uNV/aVyQMrKXDcpK3EY+AlWJU+MaWss2xgdW94zPEfRMuzBwBJWl9jmM/XOBCH2J +XjIeIqkiRUuwZi4wzJ9l/fzLganx4Duvo4bRierERXlQXa7pIXSSTYtZgo+U4+lK +8edJsBTj9WLL1XK9H7nSn6DNqPoByNkN39r8R52zyFTfSUrxIan+GE7uSNQZu+99 +5OKdy1u2bv/jzVrndIIFuoAlOMvkaZ6vQaoahPUCAwEAAaOCATQwggEwMA8GA1Ud +EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTjq1RMgKHbVkO3 +kUrL84J6E1wIqzCB7QYDVR0fBIHlMIHiMIHfoIHcoIHZhjVodHRwOi8vd3d3LnRy +dXN0Y2VudGVyLmRlL2NybC92Mi90Y19jbGFzc18yX2NhX0lJLmNybIaBn2xkYXA6 +Ly93d3cudHJ1c3RjZW50ZXIuZGUvQ049VEMlMjBUcnVzdENlbnRlciUyMENsYXNz +JTIwMiUyMENBJTIwSUksTz1UQyUyMFRydXN0Q2VudGVyJTIwR21iSCxPVT1yb290 +Y2VydHMsREM9dHJ1c3RjZW50ZXIsREM9ZGU/Y2VydGlmaWNhdGVSZXZvY2F0aW9u +TGlzdD9iYXNlPzANBgkqhkiG9w0BAQUFAAOCAQEAjNfffu4bgBCzg/XbEeprS6iS +GNn3Bzn1LL4GdXpoUxUc6krtXvwjshOg0wn/9vYua0Fxec3ibf2uWWuFHbhOIprt +ZjluS5TmVfwLG4t3wVMTZonZKNaL80VKY7f9ewthXbhtvsPcW3nS7Yblok2+XnR8 +au0WOB9/WIFaGusyiC2y8zl3gK9etmF1KdsjTYjKUCjLhdLTEKJZbtOTVAB6okaV +hgWcqRmY5TFyDADiZ9lA4CQze28suVyrZZ0srHbqNZn1l7kPJOzHdiEoZa5X6AeI +dUpWoNIFOqTmjZKILPPy4cHGYdtBxceb9w4aUUXCYWvcZCcXjFq32nQozZfkvQ== +-----END CERTIFICATE----- + +# Issuer: CN=TC TrustCenter Class 3 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 3 CA +# Subject: CN=TC TrustCenter Class 3 CA II O=TC TrustCenter GmbH OU=TC TrustCenter Class 3 CA +# Label: "TC TrustCenter Class 3 CA II" +# Serial: 1506523511417715638772220530020799 +# MD5 Fingerprint: 56:5f:aa:80:61:12:17:f6:67:21:e6:2b:6d:61:56:8e +# SHA1 Fingerprint: 80:25:ef:f4:6e:70:c8:d4:72:24:65:84:fe:40:3b:8a:8d:6a:db:f5 +# SHA256 Fingerprint: 8d:a0:84:fc:f9:9c:e0:77:22:f8:9b:32:05:93:98:06:fa:5c:b8:11:e1:c8:13:f6:a1:08:c7:d3:36:b3:40:8e +-----BEGIN CERTIFICATE----- +MIIEqjCCA5KgAwIBAgIOSkcAAQAC5aBd1j8AUb8wDQYJKoZIhvcNAQEFBQAwdjEL +MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxIjAgBgNV +BAsTGVRDIFRydXN0Q2VudGVyIENsYXNzIDMgQ0ExJTAjBgNVBAMTHFRDIFRydXN0 +Q2VudGVyIENsYXNzIDMgQ0EgSUkwHhcNMDYwMTEyMTQ0MTU3WhcNMjUxMjMxMjI1 +OTU5WjB2MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIgR21i +SDEiMCAGA1UECxMZVEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMyBDQTElMCMGA1UEAxMc +VEMgVHJ1c3RDZW50ZXIgQ2xhc3MgMyBDQSBJSTCCASIwDQYJKoZIhvcNAQEBBQAD +ggEPADCCAQoCggEBALTgu1G7OVyLBMVMeRwjhjEQY0NVJz/GRcekPewJDRoeIMJW +Ht4bNwcwIi9v8Qbxq63WyKthoy9DxLCyLfzDlml7forkzMA5EpBCYMnMNWju2l+Q +Vl/NHE1bWEnrDgFPZPosPIlY2C8u4rBo6SI7dYnWRBpl8huXJh0obazovVkdKyT2 +1oQDZogkAHhg8fir/gKya/si+zXmFtGt9i4S5Po1auUZuV3bOx4a+9P/FRQI2Alq +ukWdFHlgfa9Aigdzs5OW03Q0jTo3Kd5c7PXuLjHCINy+8U9/I1LZW+Jk2ZyqBwi1 +Rb3R0DHBq1SfqdLDYmAD8bs5SpJKPQq5ncWg/jcCAwEAAaOCATQwggEwMA8GA1Ud +EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBTUovyfs8PYA9NX +XAek0CSnwPIA1DCB7QYDVR0fBIHlMIHiMIHfoIHcoIHZhjVodHRwOi8vd3d3LnRy +dXN0Y2VudGVyLmRlL2NybC92Mi90Y19jbGFzc18zX2NhX0lJLmNybIaBn2xkYXA6 +Ly93d3cudHJ1c3RjZW50ZXIuZGUvQ049VEMlMjBUcnVzdENlbnRlciUyMENsYXNz +JTIwMyUyMENBJTIwSUksTz1UQyUyMFRydXN0Q2VudGVyJTIwR21iSCxPVT1yb290 +Y2VydHMsREM9dHJ1c3RjZW50ZXIsREM9ZGU/Y2VydGlmaWNhdGVSZXZvY2F0aW9u +TGlzdD9iYXNlPzANBgkqhkiG9w0BAQUFAAOCAQEANmDkcPcGIEPZIxpC8vijsrlN +irTzwppVMXzEO2eatN9NDoqTSheLG43KieHPOh6sHfGcMrSOWXaiQYUlN6AT0PV8 +TtXqluJucsG7Kv5sbviRmEb8yRtXW+rIGjs/sFGYPAfaLFkB2otE6OF0/ado3VS6 +g0bsyEa1+K+XwDsJHI/OcpY9M1ZwvJbL2NV9IJqDnxrcOfHFcqMRA/07QlIp2+gB +95tejNaNhk4Z+rwcvsUhpYeeeC422wlxo3I0+GzjBgnyXlal092Y+tTmBvTwtiBj +S+opvaqCZh77gaqnN60TGOaSw4HBM7uIHqHn4rS9MWwOUT1v+5ZWgOI2F9Hc5A== +-----END CERTIFICATE----- + +# Issuer: CN=TC TrustCenter Universal CA I O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA +# Subject: CN=TC TrustCenter Universal CA I O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA +# Label: "TC TrustCenter Universal CA I" +# Serial: 601024842042189035295619584734726 +# MD5 Fingerprint: 45:e1:a5:72:c5:a9:36:64:40:9e:f5:e4:58:84:67:8c +# SHA1 Fingerprint: 6b:2f:34:ad:89:58:be:62:fd:b0:6b:5c:ce:bb:9d:d9:4f:4e:39:f3 +# SHA256 Fingerprint: eb:f3:c0:2a:87:89:b1:fb:7d:51:19:95:d6:63:b7:29:06:d9:13:ce:0d:5e:10:56:8a:8a:77:e2:58:61:67:e7 +-----BEGIN CERTIFICATE----- +MIID3TCCAsWgAwIBAgIOHaIAAQAC7LdggHiNtgYwDQYJKoZIhvcNAQEFBQAweTEL +MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxJDAiBgNV +BAsTG1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQTEmMCQGA1UEAxMdVEMgVHJ1 +c3RDZW50ZXIgVW5pdmVyc2FsIENBIEkwHhcNMDYwMzIyMTU1NDI4WhcNMjUxMjMx +MjI1OTU5WjB5MQswCQYDVQQGEwJERTEcMBoGA1UEChMTVEMgVHJ1c3RDZW50ZXIg +R21iSDEkMCIGA1UECxMbVEMgVHJ1c3RDZW50ZXIgVW5pdmVyc2FsIENBMSYwJAYD +VQQDEx1UQyBUcnVzdENlbnRlciBVbml2ZXJzYWwgQ0EgSTCCASIwDQYJKoZIhvcN +AQEBBQADggEPADCCAQoCggEBAKR3I5ZEr5D0MacQ9CaHnPM42Q9e3s9B6DGtxnSR +JJZ4Hgmgm5qVSkr1YnwCqMqs+1oEdjneX/H5s7/zA1hV0qq34wQi0fiU2iIIAI3T +fCZdzHd55yx4Oagmcw6iXSVphU9VDprvxrlE4Vc93x9UIuVvZaozhDrzznq+VZeu +jRIPFDPiUHDDSYcTvFHe15gSWu86gzOSBnWLknwSaHtwag+1m7Z3W0hZneTvWq3z +wZ7U10VOylY0Ibw+F1tvdwxIAUMpsN0/lm7mlaoMwCC2/T42J5zjXM9OgdwZu5GQ +fezmlwQek8wiSdeXhrYTCjxDI3d+8NzmzSQfO4ObNDqDNOMCAwEAAaNjMGEwHwYD +VR0jBBgwFoAUkqR1LKSevoFE63n8isWVpesQdXMwDwYDVR0TAQH/BAUwAwEB/zAO +BgNVHQ8BAf8EBAMCAYYwHQYDVR0OBBYEFJKkdSyknr6BROt5/IrFlaXrEHVzMA0G +CSqGSIb3DQEBBQUAA4IBAQAo0uCG1eb4e/CX3CJrO5UUVg8RMKWaTzqwOuAGy2X1 +7caXJ/4l8lfmXpWMPmRgFVp/Lw0BxbFg/UU1z/CyvwbZ71q+s2IhtNerNXxTPqYn +8aEt2hojnczd7Dwtnic0XQ/CNnm8yUpiLe1r2X1BQ3y2qsrtYbE3ghUJGooWMNjs +ydZHcnhLEEYUjl8Or+zHL6sQ17bxbuyGssLoDZJz3KL0Dzq/YSMQiZxIQG5wALPT +ujdEWBF6AmqI8Dc08BnprNRlc/ZpjGSUOnmFKbAWKwyCPwacx/0QK54PLLae4xW/ +2TYcuiUaUj0a7CIMHOCkoj3w6DnPgcB77V0fb8XQC9eY +-----END CERTIFICATE----- + +# Issuer: CN=Cybertrust Global Root O=Cybertrust, Inc +# Subject: CN=Cybertrust Global Root O=Cybertrust, Inc +# Label: "Cybertrust Global Root" +# Serial: 4835703278459682877484360 +# MD5 Fingerprint: 72:e4:4a:87:e3:69:40:80:77:ea:bc:e3:f4:ff:f0:e1 +# SHA1 Fingerprint: 5f:43:e5:b1:bf:f8:78:8c:ac:1c:c7:ca:4a:9a:c6:22:2b:cc:34:c6 +# SHA256 Fingerprint: 96:0a:df:00:63:e9:63:56:75:0c:29:65:dd:0a:08:67:da:0b:9c:bd:6e:77:71:4a:ea:fb:23:49:ab:39:3d:a3 +-----BEGIN CERTIFICATE----- +MIIDoTCCAomgAwIBAgILBAAAAAABD4WqLUgwDQYJKoZIhvcNAQEFBQAwOzEYMBYG +A1UEChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2Jh +bCBSb290MB4XDTA2MTIxNTA4MDAwMFoXDTIxMTIxNTA4MDAwMFowOzEYMBYGA1UE +ChMPQ3liZXJ0cnVzdCwgSW5jMR8wHQYDVQQDExZDeWJlcnRydXN0IEdsb2JhbCBS +b290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEA+Mi8vRRQZhP/8NN5 +7CPytxrHjoXxEnOmGaoQ25yiZXRadz5RfVb23CO21O1fWLE3TdVJDm71aofW0ozS +J8bi/zafmGWgE07GKmSb1ZASzxQG9Dvj1Ci+6A74q05IlG2OlTEQXO2iLb3VOm2y +HLtgwEZLAfVJrn5GitB0jaEMAs7u/OePuGtm839EAL9mJRQr3RAwHQeWP032a7iP +t3sMpTjr3kfb1V05/Iin89cqdPHoWqI7n1C6poxFNcJQZZXcY4Lv3b93TZxiyWNz +FtApD0mpSPCzqrdsxacwOUBdrsTiXSZT8M4cIwhhqJQZugRiQOwfOHB3EgZxpzAY +XSUnpQIDAQABo4GlMIGiMA4GA1UdDwEB/wQEAwIBBjAPBgNVHRMBAf8EBTADAQH/ +MB0GA1UdDgQWBBS2CHsNesysIEyGVjJez6tuhS1wVzA/BgNVHR8EODA2MDSgMqAw +hi5odHRwOi8vd3d3Mi5wdWJsaWMtdHJ1c3QuY29tL2NybC9jdC9jdHJvb3QuY3Js +MB8GA1UdIwQYMBaAFLYIew16zKwgTIZWMl7Pq26FLXBXMA0GCSqGSIb3DQEBBQUA +A4IBAQBW7wojoFROlZfJ+InaRcHUowAl9B8Tq7ejhVhpwjCt2BWKLePJzYFa+HMj +Wqd8BfP9IjsO0QbE2zZMcwSO5bAi5MXzLqXZI+O4Tkogp24CJJ8iYGd7ix1yCcUx +XOl5n4BHPa2hCwcUPUf/A2kaDAtE52Mlp3+yybh2hO0j9n0Hq0V+09+zv+mKts2o +omcrUtW3ZfA5TGOgkXmTUg9U3YO7n9GPp1Nzw8v/MOx8BLjYRB+TX3EJIrduPuoc +A06dGiBh+4E37F78CkWr1+cXVdCg6mCbpvbjjFspwgZgFJ0tl0ypkxWdYcQBX0jW +WL1WMRJOEcgh4LMRkWXbtKaIOM5V +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only +# Subject: CN=GeoTrust Primary Certification Authority - G3 O=GeoTrust Inc. OU=(c) 2008 GeoTrust Inc. - For authorized use only +# Label: "GeoTrust Primary Certification Authority - G3" +# Serial: 28809105769928564313984085209975885599 +# MD5 Fingerprint: b5:e8:34:36:c9:10:44:58:48:70:6d:2e:83:d4:b8:05 +# SHA1 Fingerprint: 03:9e:ed:b8:0b:e7:a0:3c:69:53:89:3b:20:d2:d9:32:3a:4c:2a:fd +# SHA256 Fingerprint: b4:78:b8:12:25:0d:f8:78:63:5c:2a:a7:ec:7d:15:5e:aa:62:5e:e8:29:16:e2:cd:29:43:61:88:6c:d1:fb:d4 +-----BEGIN CERTIFICATE----- +MIID/jCCAuagAwIBAgIQFaxulBmyeUtB9iepwxgPHzANBgkqhkiG9w0BAQsFADCB +mDELMAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsT +MChjKSAyMDA4IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25s +eTE2MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhv +cml0eSAtIEczMB4XDTA4MDQwMjAwMDAwMFoXDTM3MTIwMTIzNTk1OVowgZgxCzAJ +BgNVBAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykg +MjAwOCBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0 +BgNVBAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkg +LSBHMzCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBANziXmJYHTNXOTIz ++uvLh4yn1ErdBojqZI4xmKU4kB6Yzy5jK/BGvESyiaHAKAxJcCGVn2TAppMSAmUm +hsalifD614SgcK9PGpc/BkTVyetyEH3kMSj7HGHmKAdEc5IiaacDiGydY8hS2pgn +5whMcD60yRLBxWeDXTPzAxHsatBT4tG6NmCUgLthY2xbF37fQJQeqw3CIShwiP/W +JmxsYAQlTlV+fe+/lEjetx3dcI0FX4ilm/LC7urRQEFtYjgdVgbFA0dRIBn8exAL +DmKudlW/X3e+PkkBUz2YJQN2JFodtNuJ6nnltrM7P7pMKEF/BqxqjsHQ9gUdfeZC +huOl1UcCAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYw +HQYDVR0OBBYEFMR5yo6hTgMdHNxr2zFblD4/MH8tMA0GCSqGSIb3DQEBCwUAA4IB +AQAtxRPPVoB7eni9n64smefv2t+UXglpp+duaIy9cr5HqQ6XErhK8WTTOd8lNNTB +zU6B8A8ExCSzNJbGpqow32hhc9f5joWJ7w5elShKKiePEI4ufIbEAp7aDHdlDkQN +kv39sxY2+hENHYwOB4lqKVb3cvTdFZx3NWZXqxNT2I7BQMXXExZacse3aQHEerGD +AWh9jUGhlBjBJVz88P6DAod8DQ3PLghcSkANPuyBYeYk28rgDi0Hsj5W3I31QYUH +SJsMC8tJP33st/3LjWeJGqvtux6jAAgIFyqCXDFdRootD4abdNlF+9RAsXqqaC2G +spki4cErx5z481+oghLrGREt +-----END CERTIFICATE----- + +# Issuer: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only +# Subject: CN=thawte Primary Root CA - G2 O=thawte, Inc. OU=(c) 2007 thawte, Inc. - For authorized use only +# Label: "thawte Primary Root CA - G2" +# Serial: 71758320672825410020661621085256472406 +# MD5 Fingerprint: 74:9d:ea:60:24:c4:fd:22:53:3e:cc:3a:72:d9:29:4f +# SHA1 Fingerprint: aa:db:bc:22:23:8f:c4:01:a1:27:bb:38:dd:f4:1d:db:08:9e:f0:12 +# SHA256 Fingerprint: a4:31:0d:50:af:18:a6:44:71:90:37:2a:86:af:af:8b:95:1f:fb:43:1d:83:7f:1e:56:88:b4:59:71:ed:15:57 +-----BEGIN CERTIFICATE----- +MIICiDCCAg2gAwIBAgIQNfwmXNmET8k9Jj1Xm67XVjAKBggqhkjOPQQDAzCBhDEL +MAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjE4MDYGA1UECxMvKGMp +IDIwMDcgdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAi +BgNVBAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMjAeFw0wNzExMDUwMDAw +MDBaFw0zODAxMTgyMzU5NTlaMIGEMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhh +d3RlLCBJbmMuMTgwNgYDVQQLEy8oYykgMjAwNyB0aGF3dGUsIEluYy4gLSBGb3Ig +YXV0aG9yaXplZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9v +dCBDQSAtIEcyMHYwEAYHKoZIzj0CAQYFK4EEACIDYgAEotWcgnuVnfFSeIf+iha/ +BebfowJPDQfGAFG6DAJSLSKkQjnE/o/qycG+1E3/n3qe4rF8mq2nhglzh9HnmuN6 +papu+7qzcMBniKI11KOasf2twu8x+qi58/sIxpHR+ymVo0IwQDAPBgNVHRMBAf8E +BTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNVHQ4EFgQUmtgAMADna3+FGO6Lts6K +DPgR4bswCgYIKoZIzj0EAwMDaQAwZgIxAN344FdHW6fmCsO99YCKlzUNG4k8VIZ3 +KMqh9HneteY4sPBlcIx/AlTCv//YoT7ZzwIxAMSNlPzcU9LcnXgWHxUzI1NS41ox +XZ3Krr0TKUQNJ1uo52icEvdYPy5yAlejj6EULg== +-----END CERTIFICATE----- + +# Issuer: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only +# Subject: CN=thawte Primary Root CA - G3 O=thawte, Inc. OU=Certification Services Division/(c) 2008 thawte, Inc. - For authorized use only +# Label: "thawte Primary Root CA - G3" +# Serial: 127614157056681299805556476275995414779 +# MD5 Fingerprint: fb:1b:5d:43:8a:94:cd:44:c6:76:f2:43:4b:47:e7:31 +# SHA1 Fingerprint: f1:8b:53:8d:1b:e9:03:b6:a6:f0:56:43:5b:17:15:89:ca:f3:6b:f2 +# SHA256 Fingerprint: 4b:03:f4:58:07:ad:70:f2:1b:fc:2c:ae:71:c9:fd:e4:60:4c:06:4c:f5:ff:b6:86:ba:e5:db:aa:d7:fd:d3:4c +-----BEGIN CERTIFICATE----- +MIIEKjCCAxKgAwIBAgIQYAGXt0an6rS0mtZLL/eQ+zANBgkqhkiG9w0BAQsFADCB +rjELMAkGA1UEBhMCVVMxFTATBgNVBAoTDHRoYXd0ZSwgSW5jLjEoMCYGA1UECxMf +Q2VydGlmaWNhdGlvbiBTZXJ2aWNlcyBEaXZpc2lvbjE4MDYGA1UECxMvKGMpIDIw +MDggdGhhd3RlLCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxJDAiBgNV +BAMTG3RoYXd0ZSBQcmltYXJ5IFJvb3QgQ0EgLSBHMzAeFw0wODA0MDIwMDAwMDBa +Fw0zNzEyMDEyMzU5NTlaMIGuMQswCQYDVQQGEwJVUzEVMBMGA1UEChMMdGhhd3Rl +LCBJbmMuMSgwJgYDVQQLEx9DZXJ0aWZpY2F0aW9uIFNlcnZpY2VzIERpdmlzaW9u +MTgwNgYDVQQLEy8oYykgMjAwOCB0aGF3dGUsIEluYy4gLSBGb3IgYXV0aG9yaXpl +ZCB1c2Ugb25seTEkMCIGA1UEAxMbdGhhd3RlIFByaW1hcnkgUm9vdCBDQSAtIEcz +MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsr8nLPvb2FvdeHsbnndm +gcs+vHyu86YnmjSjaDFxODNi5PNxZnmxqWWjpYvVj2AtP0LMqmsywCPLLEHd5N/8 +YZzic7IilRFDGF/Eth9XbAoFWCLINkw6fKXRz4aviKdEAhN0cXMKQlkC+BsUa0Lf +b1+6a4KinVvnSr0eAXLbS3ToO39/fR8EtCab4LRarEc9VbjXsCZSKAExQGbY2SS9 +9irY7CFJXJv2eul/VTV+lmuNk5Mny5K76qxAwJ/C+IDPXfRa3M50hqY+bAtTyr2S +zhkGcuYMXDhpxwTWvGzOW/b3aJzcJRVIiKHpqfiYnODz1TEoYRFsZ5aNOZnLwkUk +OQIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQEAwIBBjAdBgNV +HQ4EFgQUrWyqlGCc7eT/+j4KdCtjA/e2Wb8wDQYJKoZIhvcNAQELBQADggEBABpA +2JVlrAmSicY59BDlqQ5mU1143vokkbvnRFHfxhY0Cu9qRFHqKweKA3rD6z8KLFIW +oCtDuSWQP3CpMyVtRRooOyfPqsMpQhvfO0zAMzRbQYi/aytlryjvsvXDqmbOe1bu +t8jLZ8HJnBoYuMTDSQPxYA5QzUbF83d597YV4Djbxy8ooAw/dyZ02SUS2jHaGh7c +KUGRIjxpp7sC8rZcJwOJ9Abqm+RyguOhCcHpABnTPtRwa7pxpqpYrvS76Wy274fM +m7v/OeZWYdMKp8RcTGB7BXcmer/YB1IsYvdwY9k5vG8cwnncdimvzsUsZAReiDZu +MdRAGmI0Nj81Aa6sY6A= +-----END CERTIFICATE----- + +# Issuer: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only +# Subject: CN=GeoTrust Primary Certification Authority - G2 O=GeoTrust Inc. OU=(c) 2007 GeoTrust Inc. - For authorized use only +# Label: "GeoTrust Primary Certification Authority - G2" +# Serial: 80682863203381065782177908751794619243 +# MD5 Fingerprint: 01:5e:d8:6b:bd:6f:3d:8e:a1:31:f8:12:e0:98:73:6a +# SHA1 Fingerprint: 8d:17:84:d5:37:f3:03:7d:ec:70:fe:57:8b:51:9a:99:e6:10:d7:b0 +# SHA256 Fingerprint: 5e:db:7a:c4:3b:82:a0:6a:87:61:e8:d7:be:49:79:eb:f2:61:1f:7d:d7:9b:f9:1c:1c:6b:56:6a:21:9e:d7:66 +-----BEGIN CERTIFICATE----- +MIICrjCCAjWgAwIBAgIQPLL0SAoA4v7rJDteYD7DazAKBggqhkjOPQQDAzCBmDEL +MAkGA1UEBhMCVVMxFjAUBgNVBAoTDUdlb1RydXN0IEluYy4xOTA3BgNVBAsTMChj +KSAyMDA3IEdlb1RydXN0IEluYy4gLSBGb3IgYXV0aG9yaXplZCB1c2Ugb25seTE2 +MDQGA1UEAxMtR2VvVHJ1c3QgUHJpbWFyeSBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0 +eSAtIEcyMB4XDTA3MTEwNTAwMDAwMFoXDTM4MDExODIzNTk1OVowgZgxCzAJBgNV +BAYTAlVTMRYwFAYDVQQKEw1HZW9UcnVzdCBJbmMuMTkwNwYDVQQLEzAoYykgMjAw +NyBHZW9UcnVzdCBJbmMuIC0gRm9yIGF1dGhvcml6ZWQgdXNlIG9ubHkxNjA0BgNV +BAMTLUdlb1RydXN0IFByaW1hcnkgQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgLSBH +MjB2MBAGByqGSM49AgEGBSuBBAAiA2IABBWx6P0DFUPlrOuHNxFi79KDNlJ9RVcL +So17VDs6bl8VAsBQps8lL33KSLjHUGMcKiEIfJo22Av+0SbFWDEwKCXzXV2juLal +tJLtbCyf691DiaI8S0iRHVDsJt/WYC69IaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO +BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFBVfNVdRVfslsq0DafwBo/q+EVXVMAoG +CCqGSM49BAMDA2cAMGQCMGSWWaboCd6LuvpaiIjwH5HTRqjySkwCY/tsXzjbLkGT +qQ7mndwxHLKgpxgceeHHNgIwOlavmnRs9vuD4DPTCF+hnMJbn0bWtsuRBmOiBucz +rD6ogRLQy7rQkgu2npaqBA+K +-----END CERTIFICATE----- + +# Issuer: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only +# Subject: CN=VeriSign Universal Root Certification Authority O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2008 VeriSign, Inc. - For authorized use only +# Label: "VeriSign Universal Root Certification Authority" +# Serial: 85209574734084581917763752644031726877 +# MD5 Fingerprint: 8e:ad:b5:01:aa:4d:81:e4:8c:1d:d1:e1:14:00:95:19 +# SHA1 Fingerprint: 36:79:ca:35:66:87:72:30:4d:30:a5:fb:87:3b:0f:a7:7b:b7:0d:54 +# SHA256 Fingerprint: 23:99:56:11:27:a5:71:25:de:8c:ef:ea:61:0d:df:2f:a0:78:b5:c8:06:7f:4e:82:82:90:bf:b8:60:e8:4b:3c +-----BEGIN CERTIFICATE----- +MIIEuTCCA6GgAwIBAgIQQBrEZCGzEyEDDrvkEhrFHTANBgkqhkiG9w0BAQsFADCB +vTELMAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQL +ExZWZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwOCBWZXJp +U2lnbiwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MTgwNgYDVQQDEy9W +ZXJpU2lnbiBVbml2ZXJzYWwgUm9vdCBDZXJ0aWZpY2F0aW9uIEF1dGhvcml0eTAe +Fw0wODA0MDIwMDAwMDBaFw0zNzEyMDEyMzU5NTlaMIG9MQswCQYDVQQGEwJVUzEX +MBUGA1UEChMOVmVyaVNpZ24sIEluYy4xHzAdBgNVBAsTFlZlcmlTaWduIFRydXN0 +IE5ldHdvcmsxOjA4BgNVBAsTMShjKSAyMDA4IFZlcmlTaWduLCBJbmMuIC0gRm9y +IGF1dGhvcml6ZWQgdXNlIG9ubHkxODA2BgNVBAMTL1ZlcmlTaWduIFVuaXZlcnNh +bCBSb290IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIIBIjANBgkqhkiG9w0BAQEF +AAOCAQ8AMIIBCgKCAQEAx2E3XrEBNNti1xWb/1hajCMj1mCOkdeQmIN65lgZOIzF +9uVkhbSicfvtvbnazU0AtMgtc6XHaXGVHzk8skQHnOgO+k1KxCHfKWGPMiJhgsWH +H26MfF8WIFFE0XBPV+rjHOPMee5Y2A7Cs0WTwCznmhcrewA3ekEzeOEz4vMQGn+H +LL729fdC4uW/h2KJXwBL38Xd5HVEMkE6HnFuacsLdUYI0crSK5XQz/u5QGtkjFdN +/BMReYTtXlT2NJ8IAfMQJQYXStrxHXpma5hgZqTZ79IugvHw7wnqRMkVauIDbjPT +rJ9VAMf2CGqUuV/c4DPxhGD5WycRtPwW8rtWaoAljQIDAQABo4GyMIGvMA8GA1Ud +EwEB/wQFMAMBAf8wDgYDVR0PAQH/BAQDAgEGMG0GCCsGAQUFBwEMBGEwX6FdoFsw +WTBXMFUWCWltYWdlL2dpZjAhMB8wBwYFKw4DAhoEFI/l0xqGrI2Oa8PPgGrUSBgs +exkuMCUWI2h0dHA6Ly9sb2dvLnZlcmlzaWduLmNvbS92c2xvZ28uZ2lmMB0GA1Ud +DgQWBBS2d/ppSEefUxLVwuoHMnYH0ZcHGTANBgkqhkiG9w0BAQsFAAOCAQEASvj4 +sAPmLGd75JR3Y8xuTPl9Dg3cyLk1uXBPY/ok+myDjEedO2Pzmvl2MpWRsXe8rJq+ +seQxIcaBlVZaDrHC1LGmWazxY8u4TB1ZkErvkBYoH1quEPuBUDgMbMzxPcP1Y+Oz +4yHJJDnp/RVmRvQbEdBNc6N9Rvk97ahfYtTxP/jgdFcrGJ2BtMQo2pSXpXDrrB2+ +BxHw1dvd5Yzw1TKwg+ZX4o+/vqGqvz0dtdQ46tewXDpPaj+PwGZsY6rp2aQW9IHR +lRQOfc2VNNnSj3BzgXucfr2YYdhFh5iQxeuGMMY1v/D/w1WIg0vvBZIGcfK4mJO3 +7M2CYfE45k+XmCpajQ== +-----END CERTIFICATE----- + +# Issuer: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only +# Subject: CN=VeriSign Class 3 Public Primary Certification Authority - G4 O=VeriSign, Inc. OU=VeriSign Trust Network/(c) 2007 VeriSign, Inc. - For authorized use only +# Label: "VeriSign Class 3 Public Primary Certification Authority - G4" +# Serial: 63143484348153506665311985501458640051 +# MD5 Fingerprint: 3a:52:e1:e7:fd:6f:3a:e3:6f:f3:6f:99:1b:f9:22:41 +# SHA1 Fingerprint: 22:d5:d8:df:8f:02:31:d1:8d:f7:9d:b7:cf:8a:2d:64:c9:3f:6c:3a +# SHA256 Fingerprint: 69:dd:d7:ea:90:bb:57:c9:3e:13:5d:c8:5e:a6:fc:d5:48:0b:60:32:39:bd:c4:54:fc:75:8b:2a:26:cf:7f:79 +-----BEGIN CERTIFICATE----- +MIIDhDCCAwqgAwIBAgIQL4D+I4wOIg9IZxIokYesszAKBggqhkjOPQQDAzCByjEL +MAkGA1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZW +ZXJpU2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2ln +biwgSW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJp +U2lnbiBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9y +aXR5IC0gRzQwHhcNMDcxMTA1MDAwMDAwWhcNMzgwMTE4MjM1OTU5WjCByjELMAkG +A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMR8wHQYDVQQLExZWZXJp +U2lnbiBUcnVzdCBOZXR3b3JrMTowOAYDVQQLEzEoYykgMjAwNyBWZXJpU2lnbiwg +SW5jLiAtIEZvciBhdXRob3JpemVkIHVzZSBvbmx5MUUwQwYDVQQDEzxWZXJpU2ln +biBDbGFzcyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5 +IC0gRzQwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAASnVnp8Utpkmw4tXNherJI9/gHm +GUo9FANL+mAnINmDiWn6VMaaGF5VKmTeBvaNSjutEDxlPZCIBIngMGGzrl0Bp3ve +fLK+ymVhAIau2o970ImtTR1ZmkGxvEeA3J5iw/mjgbIwga8wDwYDVR0TAQH/BAUw +AwEB/zAOBgNVHQ8BAf8EBAMCAQYwbQYIKwYBBQUHAQwEYTBfoV2gWzBZMFcwVRYJ +aW1hZ2UvZ2lmMCEwHzAHBgUrDgMCGgQUj+XTGoasjY5rw8+AatRIGCx7GS4wJRYj +aHR0cDovL2xvZ28udmVyaXNpZ24uY29tL3ZzbG9nby5naWYwHQYDVR0OBBYEFLMW +kf3upm7ktS5Jj4d4gYDs5bG1MAoGCCqGSM49BAMDA2gAMGUCMGYhDBgmYFo4e1ZC +4Kf8NoRRkSAsdk1DPcQdhCPQrNZ8NQbOzWm9kA3bbEhCHQ6qQgIxAJw9SDkjOVga +FRJZap7v1VmyHVIsmXHNxynfGyphe3HR3vPA5Q06Sqotp9iGKt0uEA== +-----END CERTIFICATE----- + +# Issuer: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority +# Subject: O=VeriSign, Inc. OU=Class 3 Public Primary Certification Authority +# Label: "Verisign Class 3 Public Primary Certification Authority" +# Serial: 80507572722862485515306429940691309246 +# MD5 Fingerprint: ef:5a:f1:33:ef:f1:cd:bb:51:02:ee:12:14:4b:96:c4 +# SHA1 Fingerprint: a1:db:63:93:91:6f:17:e4:18:55:09:40:04:15:c7:02:40:b0:ae:6b +# SHA256 Fingerprint: a4:b6:b3:99:6f:c2:f3:06:b3:fd:86:81:bd:63:41:3d:8c:50:09:cc:4f:a3:29:c2:cc:f0:e2:fa:1b:14:03:05 +-----BEGIN CERTIFICATE----- +MIICPDCCAaUCEDyRMcsf9tAbDpq40ES/Er4wDQYJKoZIhvcNAQEFBQAwXzELMAkG +A1UEBhMCVVMxFzAVBgNVBAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFz +cyAzIFB1YmxpYyBQcmltYXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MB4XDTk2 +MDEyOTAwMDAwMFoXDTI4MDgwMjIzNTk1OVowXzELMAkGA1UEBhMCVVMxFzAVBgNV +BAoTDlZlcmlTaWduLCBJbmMuMTcwNQYDVQQLEy5DbGFzcyAzIFB1YmxpYyBQcmlt +YXJ5IENlcnRpZmljYXRpb24gQXV0aG9yaXR5MIGfMA0GCSqGSIb3DQEBAQUAA4GN +ADCBiQKBgQDJXFme8huKARS0EN8EQNvjV69qRUCPhAwL0TPZ2RHP7gJYHyX3KqhE +BarsAx94f56TuZoAqiN91qyFomNFx3InzPRMxnVx0jnvT0Lwdd8KkMaOIG+YD/is +I19wKTakyYbnsZogy1Olhec9vn2a/iRFM9x2Fe0PonFkTGUugWhFpwIDAQABMA0G +CSqGSIb3DQEBBQUAA4GBABByUqkFFBkyCEHwxWsKzH4PIRnN5GfcX6kb5sroc50i +2JhucwNhkcV8sEVAbkSdjbCxlnRhLQ2pRdKkkirWmnWXbj9T/UWZYB2oK0z5XqcJ +2HUw19JlYD1n1khVdWk/kfVIC0dpImmClr7JyDiGSnoscxlIaU5rfGW/D/xwzoiQ +-----END CERTIFICATE----- + +# Issuer: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Subject: CN=GlobalSign O=GlobalSign OU=GlobalSign Root CA - R3 +# Label: "GlobalSign Root CA - R3" +# Serial: 4835703278459759426209954 +# MD5 Fingerprint: c5:df:b8:49:ca:05:13:55:ee:2d:ba:1a:c3:3e:b0:28 +# SHA1 Fingerprint: d6:9b:56:11:48:f0:1c:77:c5:45:78:c1:09:26:df:5b:85:69:76:ad +# SHA256 Fingerprint: cb:b5:22:d7:b7:f1:27:ad:6a:01:13:86:5b:df:1c:d4:10:2e:7d:07:59:af:63:5a:7c:f4:72:0d:c9:63:c5:3b +-----BEGIN CERTIFICATE----- +MIIDXzCCAkegAwIBAgILBAAAAAABIVhTCKIwDQYJKoZIhvcNAQELBQAwTDEgMB4G +A1UECxMXR2xvYmFsU2lnbiBSb290IENBIC0gUjMxEzARBgNVBAoTCkdsb2JhbFNp +Z24xEzARBgNVBAMTCkdsb2JhbFNpZ24wHhcNMDkwMzE4MTAwMDAwWhcNMjkwMzE4 +MTAwMDAwWjBMMSAwHgYDVQQLExdHbG9iYWxTaWduIFJvb3QgQ0EgLSBSMzETMBEG +A1UEChMKR2xvYmFsU2lnbjETMBEGA1UEAxMKR2xvYmFsU2lnbjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBAMwldpB5BngiFvXAg7aEyiie/QV2EcWtiHL8 +RgJDx7KKnQRfJMsuS+FggkbhUqsMgUdwbN1k0ev1LKMPgj0MK66X17YUhhB5uzsT +gHeMCOFJ0mpiLx9e+pZo34knlTifBtc+ycsmWQ1z3rDI6SYOgxXG71uL0gRgykmm +KPZpO/bLyCiR5Z2KYVc3rHQU3HTgOu5yLy6c+9C7v/U9AOEGM+iCK65TpjoWc4zd +QQ4gOsC0p6Hpsk+QLjJg6VfLuQSSaGjlOCZgdbKfd/+RFO+uIEn8rUAVSNECMWEZ +XriX7613t2Saer9fwRPvm2L7DWzgVGkWqQPabumDk3F2xmmFghcCAwEAAaNCMEAw +DgYDVR0PAQH/BAQDAgEGMA8GA1UdEwEB/wQFMAMBAf8wHQYDVR0OBBYEFI/wS3+o +LkUkrk1Q+mOai97i3Ru8MA0GCSqGSIb3DQEBCwUAA4IBAQBLQNvAUKr+yAzv95ZU +RUm7lgAJQayzE4aGKAczymvmdLm6AC2upArT9fHxD4q/c2dKg8dEe3jgr25sbwMp +jjM5RcOO5LlXbKr8EpbsU8Yt5CRsuZRj+9xTaGdWPoO4zzUhw8lo/s7awlOqzJCK +6fBdRoyV3XpYKBovHd7NADdBj+1EbddTKJd+82cEHhXXipa0095MJ6RMG3NzdvQX +mcIfeg7jLQitChws/zyrVQ4PkX4268NXSb7hLi18YIvDQVETI53O9zJrlAGomecs +Mx86OyXShkDOOyyGeMlhLxS67ttVb9+E7gUJTb0o2HLO02JQZR7rkpeDMdmztcpH +WD9f +-----END CERTIFICATE----- + +# Issuer: CN=TC TrustCenter Universal CA III O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA +# Subject: CN=TC TrustCenter Universal CA III O=TC TrustCenter GmbH OU=TC TrustCenter Universal CA +# Label: "TC TrustCenter Universal CA III" +# Serial: 2010889993983507346460533407902964 +# MD5 Fingerprint: 9f:dd:db:ab:ff:8e:ff:45:21:5f:f0:6c:9d:8f:fe:2b +# SHA1 Fingerprint: 96:56:cd:7b:57:96:98:95:d0:e1:41:46:68:06:fb:b8:c6:11:06:87 +# SHA256 Fingerprint: 30:9b:4a:87:f6:ca:56:c9:31:69:aa:a9:9c:6d:98:88:54:d7:89:2b:d5:43:7e:2d:07:b2:9c:be:da:55:d3:5d +-----BEGIN CERTIFICATE----- +MIID4TCCAsmgAwIBAgIOYyUAAQACFI0zFQLkbPQwDQYJKoZIhvcNAQEFBQAwezEL +MAkGA1UEBhMCREUxHDAaBgNVBAoTE1RDIFRydXN0Q2VudGVyIEdtYkgxJDAiBgNV +BAsTG1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQTEoMCYGA1UEAxMfVEMgVHJ1 +c3RDZW50ZXIgVW5pdmVyc2FsIENBIElJSTAeFw0wOTA5MDkwODE1MjdaFw0yOTEy +MzEyMzU5NTlaMHsxCzAJBgNVBAYTAkRFMRwwGgYDVQQKExNUQyBUcnVzdENlbnRl +ciBHbWJIMSQwIgYDVQQLExtUQyBUcnVzdENlbnRlciBVbml2ZXJzYWwgQ0ExKDAm +BgNVBAMTH1RDIFRydXN0Q2VudGVyIFVuaXZlcnNhbCBDQSBJSUkwggEiMA0GCSqG +SIb3DQEBAQUAA4IBDwAwggEKAoIBAQDC2pxisLlxErALyBpXsq6DFJmzNEubkKLF +5+cvAqBNLaT6hdqbJYUtQCggbergvbFIgyIpRJ9Og+41URNzdNW88jBmlFPAQDYv +DIRlzg9uwliT6CwLOunBjvvya8o84pxOjuT5fdMnnxvVZ3iHLX8LR7PH6MlIfK8v +zArZQe+f/prhsq75U7Xl6UafYOPfjdN/+5Z+s7Vy+EutCHnNaYlAJ/Uqwa1D7KRT +yGG299J5KmcYdkhtWyUB0SbFt1dpIxVbYYqt8Bst2a9c8SaQaanVDED1M4BDj5yj +dipFtK+/fz6HP3bFzSreIMUWWMv5G/UPyw0RUmS40nZid4PxWJ//AgMBAAGjYzBh +MB8GA1UdIwQYMBaAFFbn4VslQ4Dg9ozhcbyO5YAvxEjiMA8GA1UdEwEB/wQFMAMB +Af8wDgYDVR0PAQH/BAQDAgEGMB0GA1UdDgQWBBRW5+FbJUOA4PaM4XG8juWAL8RI +4jANBgkqhkiG9w0BAQUFAAOCAQEAg8ev6n9NCjw5sWi+e22JLumzCecYV42Fmhfz +dkJQEw/HkG8zrcVJYCtsSVgZ1OK+t7+rSbyUyKu+KGwWaODIl0YgoGhnYIg5IFHY +aAERzqf2EQf27OysGh+yZm5WZ2B6dF7AbZc2rrUNXWZzwCUyRdhKBgePxLcHsU0G +DeGl6/R1yrqc0L2z0zIkTO5+4nYES0lT2PLpVDP85XEfPRRclkvxOvIAu2y0+pZV +CIgJwcyRGSmwIC3/yzikQOEXvnlhgP8HA4ZMTnsGnxGGjYnuJ8Tb4rwZjgvDwxPH +LQNjO9Po5KIqwoIIlBZU8O8fJ5AluA0OKBtHd0e9HKgl8ZS0Zg== +-----END CERTIFICATE----- + +# Issuer: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Subject: CN=Go Daddy Root Certificate Authority - G2 O=GoDaddy.com, Inc. +# Label: "Go Daddy Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 80:3a:bc:22:c1:e6:fb:8d:9b:3b:27:4a:32:1b:9a:01 +# SHA1 Fingerprint: 47:be:ab:c9:22:ea:e8:0e:78:78:34:62:a7:9f:45:c2:54:fd:e6:8b +# SHA256 Fingerprint: 45:14:0b:32:47:eb:9c:c8:c5:b4:f0:d7:b5:30:91:f7:32:92:08:9e:6e:5a:63:e2:74:9d:d3:ac:a9:19:8e:da +-----BEGIN CERTIFICATE----- +MIIDxTCCAq2gAwIBAgIBADANBgkqhkiG9w0BAQsFADCBgzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxGjAYBgNVBAoT +EUdvRGFkZHkuY29tLCBJbmMuMTEwLwYDVQQDEyhHbyBEYWRkeSBSb290IENlcnRp +ZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAwMFoXDTM3MTIzMTIz +NTk1OVowgYMxCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6b25hMRMwEQYDVQQH +EwpTY290dHNkYWxlMRowGAYDVQQKExFHb0RhZGR5LmNvbSwgSW5jLjExMC8GA1UE +AxMoR28gRGFkZHkgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIw +DQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAL9xYgjx+lk09xvJGKP3gElY6SKD +E6bFIEMBO4Tx5oVJnyfq9oQbTqC023CYxzIBsQU+B07u9PpPL1kwIuerGVZr4oAH +/PMWdYA5UXvl+TW2dE6pjYIT5LY/qQOD+qK+ihVqf94Lw7YZFAXK6sOoBJQ7Rnwy +DfMAZiLIjWltNowRGLfTshxgtDj6AozO091GB94KPutdfMh8+7ArU6SSYmlRJQVh +GkSBjCypQ5Yj36w6gZoOKcUcqeldHraenjAKOc7xiID7S13MMuyFYkMlNAJWJwGR +tDtwKj9useiciAF9n9T521NtYJ2/LOdYq7hfRvzOxBsDPAnrSTFcaUaz4EcCAwEA +AaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYE +FDqahQcQZyi27/a9BUFuIMGU2g/eMA0GCSqGSIb3DQEBCwUAA4IBAQCZ21151fmX +WWcDYfF+OwYxdS2hII5PZYe096acvNjpL9DbWu7PdIxztDhC2gV7+AJ1uP2lsdeu +9tfeE8tTEH6KRtGX+rcuKxGrkLAngPnon1rpN5+r5N9ss4UXnT3ZJE95kTXWXwTr +gIOrmgIttRD02JDHBHNA7XIloKmf7J6raBKZV8aPEjoJpL1E/QYVN8Gb5DKj7Tjo +2GTzLH4U/ALqn83/B2gX2yKQOC16jdFU8WnjXzPKej17CuPKf1855eJ1usV2GDPO +LPAvTK33sefOT6jEm0pUBsV/fdUID+Ic/n4XuKxe9tQWskMJDE32p2u0mYRlynqI +4uJEvlz36hz1 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: d6:39:81:c6:52:7e:96:69:fc:fc:ca:66:ed:05:f2:96 +# SHA1 Fingerprint: b5:1c:06:7c:ee:2b:0c:3d:f8:55:ab:2d:92:f4:fe:39:d4:e7:0f:0e +# SHA256 Fingerprint: 2c:e1:cb:0b:f9:d2:f9:e1:02:99:3f:be:21:51:52:c3:b2:dd:0c:ab:de:1c:68:e5:31:9b:83:91:54:db:b7:f5 +-----BEGIN CERTIFICATE----- +MIID3TCCAsWgAwIBAgIBADANBgkqhkiG9w0BAQsFADCBjzELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xMjAwBgNVBAMTKVN0YXJmaWVs +ZCBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5MDkwMTAwMDAw +MFoXDTM3MTIzMTIzNTk1OVowgY8xCzAJBgNVBAYTAlVTMRAwDgYDVQQIEwdBcml6 +b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFyZmllbGQgVGVj +aG5vbG9naWVzLCBJbmMuMTIwMAYDVQQDEylTdGFyZmllbGQgUm9vdCBDZXJ0aWZp +Y2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoC +ggEBAL3twQP89o/8ArFvW59I2Z154qK3A2FWGMNHttfKPTUuiUP3oWmb3ooa/RMg +nLRJdzIpVv257IzdIvpy3Cdhl+72WoTsbhm5iSzchFvVdPtrX8WJpRBSiUZV9Lh1 +HOZ/5FSuS/hVclcCGfgXcVnrHigHdMWdSL5stPSksPNkN3mSwOxGXn/hbVNMYq/N +Hwtjuzqd+/x5AJhhdM8mgkBj87JyahkNmcrUDnXMN/uLicFZ8WJ/X7NfZTD4p7dN +dloedl40wOiWVpmKs/B/pM293DIxfJHP4F8R+GuqSVzRmZTRouNjWwl2tVZi4Ut0 +HZbUJtQIBFnQmA4O5t78w+wfkPECAwEAAaNCMEAwDwYDVR0TAQH/BAUwAwEB/zAO +BgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFHwMMh+n2TB/xH1oo2Kooc6rB1snMA0G +CSqGSIb3DQEBCwUAA4IBAQARWfolTwNvlJk7mh+ChTnUdgWUXuEok21iXQnCoKjU +sHU48TRqneSfioYmUeYs0cYtbpUgSpIB7LiKZ3sx4mcujJUDJi5DnUox9g61DLu3 +4jd/IroAow57UvtruzvE03lRTs2Q9GcHGcg8RnoNAX3FWOdt5oUwF5okxBDgBPfg +8n/Uqgr/Qh037ZTlZFkSIHc40zI+OIF1lnP6aI+xy84fxez6nH7PfrHxBy22/L/K +pL/QlwVKvOoYKAKQvVR4CSFx09F9HdkWsKlhPdAKACL8x3vLCWRFCztAgfd9fDL1 +mMpYjn0q7pBZc2T5NnReJaH1ZgUufzkVqSr7UIuOhWn0 +-----END CERTIFICATE----- + +# Issuer: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Subject: CN=Starfield Services Root Certificate Authority - G2 O=Starfield Technologies, Inc. +# Label: "Starfield Services Root Certificate Authority - G2" +# Serial: 0 +# MD5 Fingerprint: 17:35:74:af:7b:61:1c:eb:f4:f9:3c:e2:ee:40:f9:a2 +# SHA1 Fingerprint: 92:5a:8f:8d:2c:6d:04:e0:66:5f:59:6a:ff:22:d8:63:e8:25:6f:3f +# SHA256 Fingerprint: 56:8d:69:05:a2:c8:87:08:a4:b3:02:51:90:ed:cf:ed:b1:97:4a:60:6a:13:c6:e5:29:0f:cb:2a:e6:3e:da:b5 +-----BEGIN CERTIFICATE----- +MIID7zCCAtegAwIBAgIBADANBgkqhkiG9w0BAQsFADCBmDELMAkGA1UEBhMCVVMx +EDAOBgNVBAgTB0FyaXpvbmExEzARBgNVBAcTClNjb3R0c2RhbGUxJTAjBgNVBAoT +HFN0YXJmaWVsZCBUZWNobm9sb2dpZXMsIEluYy4xOzA5BgNVBAMTMlN0YXJmaWVs +ZCBTZXJ2aWNlcyBSb290IENlcnRpZmljYXRlIEF1dGhvcml0eSAtIEcyMB4XDTA5 +MDkwMTAwMDAwMFoXDTM3MTIzMTIzNTk1OVowgZgxCzAJBgNVBAYTAlVTMRAwDgYD +VQQIEwdBcml6b25hMRMwEQYDVQQHEwpTY290dHNkYWxlMSUwIwYDVQQKExxTdGFy +ZmllbGQgVGVjaG5vbG9naWVzLCBJbmMuMTswOQYDVQQDEzJTdGFyZmllbGQgU2Vy +dmljZXMgUm9vdCBDZXJ0aWZpY2F0ZSBBdXRob3JpdHkgLSBHMjCCASIwDQYJKoZI +hvcNAQEBBQADggEPADCCAQoCggEBANUMOsQq+U7i9b4Zl1+OiFOxHz/Lz58gE20p +OsgPfTz3a3Y4Y9k2YKibXlwAgLIvWX/2h/klQ4bnaRtSmpDhcePYLQ1Ob/bISdm2 +8xpWriu2dBTrz/sm4xq6HZYuajtYlIlHVv8loJNwU4PahHQUw2eeBGg6345AWh1K +Ts9DkTvnVtYAcMtS7nt9rjrnvDH5RfbCYM8TWQIrgMw0R9+53pBlbQLPLJGmpufe +hRhJfGZOozptqbXuNC66DQO4M99H67FrjSXZm86B0UVGMpZwh94CDklDhbZsc7tk +6mFBrMnUVN+HL8cisibMn1lUaJ/8viovxFUcdUBgF4UCVTmLfwUCAwEAAaNCMEAw +DwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYDVR0OBBYEFJxfAN+q +AdcwKziIorhtSpzyEZGDMA0GCSqGSIb3DQEBCwUAA4IBAQBLNqaEd2ndOxmfZyMI +bw5hyf2E3F/YNoHN2BtBLZ9g3ccaaNnRbobhiCPPE95Dz+I0swSdHynVv/heyNXB +ve6SbzJ08pGCL72CQnqtKrcgfU28elUSwhXqvfdqlS5sdJ/PHLTyxQGjhdByPq1z +qwubdQxtRbeOlKyWN7Wg0I8VRw7j6IPdj/3vQQF3zCepYoUz8jcI73HPdwbeyBkd +iEDPfUYd/x7H4c7/I9vG+o1VTqkC50cRRj70/b17KSa7qWFiNyi2LSr2EIZkyXCn +0q23KXB56jzaYyWf/Wi3MOxw+3WKt21gZ7IeyLnp2KhvAotnDU0mV3HaIPzBSlCN +sSi6 +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Commercial O=AffirmTrust +# Subject: CN=AffirmTrust Commercial O=AffirmTrust +# Label: "AffirmTrust Commercial" +# Serial: 8608355977964138876 +# MD5 Fingerprint: 82:92:ba:5b:ef:cd:8a:6f:a6:3d:55:f9:84:f6:d6:b7 +# SHA1 Fingerprint: f9:b5:b6:32:45:5f:9c:be:ec:57:5f:80:dc:e9:6e:2c:c7:b2:78:b7 +# SHA256 Fingerprint: 03:76:ab:1d:54:c5:f9:80:3c:e4:b2:e2:01:a0:ee:7e:ef:7b:57:b6:36:e8:a9:3c:9b:8d:48:60:c9:6f:5f:a7 +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIId3cGJyapsXwwDQYJKoZIhvcNAQELBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBDb21tZXJjaWFsMB4XDTEwMDEyOTE0MDYwNloXDTMwMTIzMTE0MDYwNlowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBDb21tZXJjaWFsMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEA9htPZwcroRX1BiLLHwGy43NFBkRJLLtJJRTWzsO3qyxPxkEylFf6EqdbDuKP +Hx6GGaeqtS25Xw2Kwq+FNXkyLbscYjfysVtKPcrNcV/pQr6U6Mje+SJIZMblq8Yr +ba0F8PrVC8+a5fBQpIs7R6UjW3p6+DM/uO+Zl+MgwdYoic+U+7lF7eNAFxHUdPAL +MeIrJmqbTFeurCA+ukV6BfO9m2kVrn1OIGPENXY6BwLJN/3HR+7o8XYdcxXyl6S1 +yHp52UKqK39c/s4mT6NmgTWvRLpUHhwwMmWd5jyTXlBOeuM61G7MGvv50jeuJCqr +VwMiKA1JdX+3KNp1v47j3A55MQIDAQABo0IwQDAdBgNVHQ4EFgQUnZPGU4teyq8/ +nx4P5ZmVvCT2lI8wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQELBQADggEBAFis9AQOzcAN/wr91LoWXym9e2iZWEnStB03TX8nfUYG +XUPGhi4+c7ImfU+TqbbEKpqrIZcUsd6M06uJFdhrJNTxFq7YpFzUf1GO7RgBsZNj +vbz4YYCanrHOQnDiqX0GJX0nof5v7LMeJNrjS1UaADs1tDvZ110w/YETifLCBivt +Z8SOyUOyXGsViQK8YvxO8rUzqrJv0wqiUOP2O+guRMLbZjipM1ZI8W0bM40NjD9g +N53Tym1+NH4Nn3J2ixufcv1SNUFFApYvHLKac0khsUlHRUe072o0EclNmsxZt9YC +nlpOZbWUrhvfKbAW8b8Angc6F2S1BLUjIZkKlTuXfO8= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Networking O=AffirmTrust +# Subject: CN=AffirmTrust Networking O=AffirmTrust +# Label: "AffirmTrust Networking" +# Serial: 8957382827206547757 +# MD5 Fingerprint: 42:65:ca:be:01:9a:9a:4c:a9:8c:41:49:cd:c0:d5:7f +# SHA1 Fingerprint: 29:36:21:02:8b:20:ed:02:f5:66:c5:32:d1:d6:ed:90:9f:45:00:2f +# SHA256 Fingerprint: 0a:81:ec:5a:92:97:77:f1:45:90:4a:f3:8d:5d:50:9f:66:b5:e2:c5:8f:cd:b5:31:05:8b:0e:17:f3:f0:b4:1b +-----BEGIN CERTIFICATE----- +MIIDTDCCAjSgAwIBAgIIfE8EORzUmS0wDQYJKoZIhvcNAQEFBQAwRDELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZpcm1UcnVz +dCBOZXR3b3JraW5nMB4XDTEwMDEyOTE0MDgyNFoXDTMwMTIzMTE0MDgyNFowRDEL +MAkGA1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MR8wHQYDVQQDDBZBZmZp +cm1UcnVzdCBOZXR3b3JraW5nMIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKC +AQEAtITMMxcua5Rsa2FSoOujz3mUTOWUgJnLVWREZY9nZOIG41w3SfYvm4SEHi3y +YJ0wTsyEheIszx6e/jarM3c1RNg1lho9Nuh6DtjVR6FqaYvZ/Ls6rnla1fTWcbua +kCNrmreIdIcMHl+5ni36q1Mr3Lt2PpNMCAiMHqIjHNRqrSK6mQEubWXLviRmVSRL +QESxG9fhwoXA3hA/Pe24/PHxI1Pcv2WXb9n5QHGNfb2V1M6+oF4nI979ptAmDgAp +6zxG8D1gvz9Q0twmQVGeFDdCBKNwV6gbh+0t+nvujArjqWaJGctB+d1ENmHP4ndG +yH329JKBNv3bNPFyfvMMFr20FQIDAQABo0IwQDAdBgNVHQ4EFgQUBx/S55zawm6i +QLSwelAQUHTEyL0wDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwDQYJ +KoZIhvcNAQEFBQADggEBAIlXshZ6qML91tmbmzTCnLQyFE2npN/svqe++EPbkTfO +tDIuUFUaNU52Q3Eg75N3ThVwLofDwR1t3Mu1J9QsVtFSUzpE0nPIxBsFZVpikpzu +QY0x2+c06lkh1QF612S4ZDnNye2v7UsDSKegmQGA3GWjNq5lWUhPgkvIZfFXHeVZ +Lgo/bNjR9eUJtGxUAArgFU2HdW23WJZa3W3SAKD0m0i+wzekujbgfIeFlxoVot4u +olu9rxj5kFDNcFn4J2dHy8egBzp90SxdbBk6ZrV9/ZFvgrG+CJPbFEfxojfHRZ48 +x3evZKiT3/Zpg4Jg8klCNO1aAFSFHBY2kgxc+qatv9s= +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium O=AffirmTrust +# Subject: CN=AffirmTrust Premium O=AffirmTrust +# Label: "AffirmTrust Premium" +# Serial: 7893706540734352110 +# MD5 Fingerprint: c4:5d:0e:48:b6:ac:28:30:4e:0a:bc:f9:38:16:87:57 +# SHA1 Fingerprint: d8:a6:33:2c:e0:03:6f:b1:85:f6:63:4f:7d:6a:06:65:26:32:28:27 +# SHA256 Fingerprint: 70:a7:3f:7f:37:6b:60:07:42:48:90:45:34:b1:14:82:d5:bf:0e:69:8e:cc:49:8d:f5:25:77:eb:f2:e9:3b:9a +-----BEGIN CERTIFICATE----- +MIIFRjCCAy6gAwIBAgIIbYwURrGmCu4wDQYJKoZIhvcNAQEMBQAwQTELMAkGA1UE +BhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1UcnVz +dCBQcmVtaXVtMB4XDTEwMDEyOTE0MTAzNloXDTQwMTIzMTE0MTAzNlowQTELMAkG +A1UEBhMCVVMxFDASBgNVBAoMC0FmZmlybVRydXN0MRwwGgYDVQQDDBNBZmZpcm1U +cnVzdCBQcmVtaXVtMIICIjANBgkqhkiG9w0BAQEFAAOCAg8AMIICCgKCAgEAxBLf +qV/+Qd3d9Z+K4/as4Tx4mrzY8H96oDMq3I0gW64tb+eT2TZwamjPjlGjhVtnBKAQ +JG9dKILBl1fYSCkTtuG+kU3fhQxTGJoeJKJPj/CihQvL9Cl/0qRY7iZNyaqoe5rZ ++jjeRFcV5fiMyNlI4g0WJx0eyIOFJbe6qlVBzAMiSy2RjYvmia9mx+n/K+k8rNrS +s8PhaJyJ+HoAVt70VZVs+7pk3WKL3wt3MutizCaam7uqYoNMtAZ6MMgpv+0GTZe5 +HMQxK9VfvFMSF5yZVylmd2EhMQcuJUmdGPLu8ytxjLW6OQdJd/zvLpKQBY0tL3d7 +70O/Nbua2Plzpyzy0FfuKE4mX4+QaAkvuPjcBukumj5Rp9EixAqnOEhss/n/fauG +V+O61oV4d7pD6kh/9ti+I20ev9E2bFhc8e6kGVQa9QPSdubhjL08s9NIS+LI+H+S +qHZGnEJlPqQewQcDWkYtuJfzt9WyVSHvutxMAJf7FJUnM7/oQ0dG0giZFmA7mn7S +5u046uwBHjxIVkkJx0w3AJ6IDsBz4W9m6XJHMD4Q5QsDyZpCAGzFlH5hxIrff4Ia +C1nEWTJ3s7xgaVY5/bQGeyzWZDbZvUjthB9+pSKPKrhC9IK31FOQeE4tGv2Bb0TX +OwF0lkLgAOIua+rF7nKsu7/+6qqo+Nz2snmKtmcCAwEAAaNCMEAwHQYDVR0OBBYE +FJ3AZ6YMItkm9UWrpmVSESfYRaxjMA8GA1UdEwEB/wQFMAMBAf8wDgYDVR0PAQH/ +BAQDAgEGMA0GCSqGSIb3DQEBDAUAA4ICAQCzV00QYk465KzquByvMiPIs0laUZx2 +KI15qldGF9X1Uva3ROgIRL8YhNILgM3FEv0AVQVhh0HctSSePMTYyPtwni94loMg +Nt58D2kTiKV1NpgIpsbfrM7jWNa3Pt668+s0QNiigfV4Py/VpfzZotReBA4Xrf5B +8OWycvpEgjNC6C1Y91aMYj+6QrCcDFx+LmUmXFNPALJ4fqENmS2NuB2OosSw/WDQ +MKSOyARiqcTtNd56l+0OOF6SL5Nwpamcb6d9Ex1+xghIsV5n61EIJenmJWtSKZGc +0jlzCFfemQa0W50QBuHCAKi4HEoCChTQwUHK+4w1IX2COPKpVJEZNZOUbWo6xbLQ +u4mGk+ibyQ86p3q4ofB4Rvr8Ny/lioTz3/4E2aFooC8k4gmVBtWVyuEklut89pMF +u+1z6S3RdTnX5yTb2E5fQ4+e0BQ5v1VwSJlXMbSc7kqYA5YwH2AG7hsj/oFgIxpH +YoWlzBk0gG+zrBrjn/B7SK3VAdlntqlyk+otZrWyuOQ9PLLvTIzq6we/qzWaVYa8 +GKa1qF60g2xraUDTn9zxw2lrueFtCfTxqlB2Cnp9ehehVZZCmTEJ3WARjQUwfuaO +RtGdFNrHF+QFlozEJLUbzxQHskD4o55BhrwE0GuWyCqANP2/7waj3VjFhT0+j/6e +KeC2uAloGRwYQw== +-----END CERTIFICATE----- + +# Issuer: CN=AffirmTrust Premium ECC O=AffirmTrust +# Subject: CN=AffirmTrust Premium ECC O=AffirmTrust +# Label: "AffirmTrust Premium ECC" +# Serial: 8401224907861490260 +# MD5 Fingerprint: 64:b0:09:55:cf:b1:d5:99:e2:be:13:ab:a6:5d:ea:4d +# SHA1 Fingerprint: b8:23:6b:00:2f:1d:16:86:53:01:55:6c:11:a4:37:ca:eb:ff:c3:bb +# SHA256 Fingerprint: bd:71:fd:f6:da:97:e4:cf:62:d1:64:7a:dd:25:81:b0:7d:79:ad:f8:39:7e:b4:ec:ba:9c:5e:84:88:82:14:23 +-----BEGIN CERTIFICATE----- +MIIB/jCCAYWgAwIBAgIIdJclisc/elQwCgYIKoZIzj0EAwMwRTELMAkGA1UEBhMC +VVMxFDASBgNVBAoMC0FmZmlybVRydXN0MSAwHgYDVQQDDBdBZmZpcm1UcnVzdCBQ +cmVtaXVtIEVDQzAeFw0xMDAxMjkxNDIwMjRaFw00MDEyMzExNDIwMjRaMEUxCzAJ +BgNVBAYTAlVTMRQwEgYDVQQKDAtBZmZpcm1UcnVzdDEgMB4GA1UEAwwXQWZmaXJt +VHJ1c3QgUHJlbWl1bSBFQ0MwdjAQBgcqhkjOPQIBBgUrgQQAIgNiAAQNMF4bFZ0D +0KF5Nbc6PJJ6yhUczWLznCZcBz3lVPqj1swS6vQUX+iOGasvLkjmrBhDeKzQN8O9 +ss0s5kfiGuZjuD0uL3jET9v0D6RoTFVya5UdThhClXjMNzyR4ptlKymjQjBAMB0G +A1UdDgQWBBSaryl6wBE1NSZRMADDav5A1a7WPDAPBgNVHRMBAf8EBTADAQH/MA4G +A1UdDwEB/wQEAwIBBjAKBggqhkjOPQQDAwNnADBkAjAXCfOHiFBar8jAQr9HX/Vs +aobgxCd05DhT1wV/GzTjxi+zygk8N53X57hG8f2h4nECMEJZh0PUUd+60wkyWs6I +flc9nF9Ca/UHLbXwgpP5WW+uZPpY5Yse42O+tYHNbwKMeQ== +-----END CERTIFICATE----- + +# Issuer: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing +# Subject: CN=StartCom Certification Authority O=StartCom Ltd. OU=Secure Digital Certificate Signing +# Label: "StartCom Certification Authority" +# Serial: 45 +# MD5 Fingerprint: c9:3b:0d:84:41:fc:a4:76:79:23:08:57:de:10:19:16 +# SHA1 Fingerprint: a3:f1:33:3f:e2:42:bf:cf:c5:d1:4e:8f:39:42:98:40:68:10:d1:a0 +# SHA256 Fingerprint: e1:78:90:ee:09:a3:fb:f4:f4:8b:9c:41:4a:17:d6:37:b7:a5:06:47:e9:bc:75:23:22:72:7f:cc:17:42:a9:11 +-----BEGIN CERTIFICATE----- +MIIHhzCCBW+gAwIBAgIBLTANBgkqhkiG9w0BAQsFADB9MQswCQYDVQQGEwJJTDEW +MBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMiU2VjdXJlIERpZ2l0YWwg +Q2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3RhcnRDb20gQ2VydGlmaWNh +dGlvbiBBdXRob3JpdHkwHhcNMDYwOTE3MTk0NjM3WhcNMzYwOTE3MTk0NjM2WjB9 +MQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjErMCkGA1UECxMi +U2VjdXJlIERpZ2l0YWwgQ2VydGlmaWNhdGUgU2lnbmluZzEpMCcGA1UEAxMgU3Rh +cnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkwggIiMA0GCSqGSIb3DQEBAQUA +A4ICDwAwggIKAoICAQDBiNsJvGxGfHiflXu1M5DycmLWwTYgIiRezul38kMKogZk +pMyONvg45iPwbm2xPN1yo4UcodM9tDMr0y+v/uqwQVlntsQGfQqedIXWeUyAN3rf +OQVSWff0G0ZDpNKFhdLDcfN1YjS6LIp/Ho/u7TTQEceWzVI9ujPW3U3eCztKS5/C +Ji/6tRYccjV3yjxd5srhJosaNnZcAdt0FCX+7bWgiA/deMotHweXMAEtcnn6RtYT +Kqi5pquDSR3l8u/d5AGOGAqPY1MWhWKpDhk6zLVmpsJrdAfkK+F2PrRt2PZE4XNi +HzvEvqBTViVsUQn3qqvKv3b9bZvzndu/PWa8DFaqr5hIlTpL36dYUNk4dalb6kMM +Av+Z6+hsTXBbKWWc3apdzK8BMewM69KN6Oqce+Zu9ydmDBpI125C4z/eIT574Q1w ++2OqqGwaVLRcJXrJosmLFqa7LH4XXgVNWG4SHQHuEhANxjJ/GP/89PrNbpHoNkm+ +Gkhpi8KWTRoSsmkXwQqQ1vp5Iki/untp+HDH+no32NgN0nZPV/+Qt+OR0t3vwmC3 +Zzrd/qqc8NSLf3Iizsafl7b4r4qgEKjZ+xjGtrVcUjyJthkqcwEKDwOzEmDyei+B +26Nu/yYwl/WL3YlXtq09s68rxbd2AvCl1iuahhQqcvbjM4xdCUsT37uMdBNSSwID +AQABo4ICEDCCAgwwDwYDVR0TAQH/BAUwAwEB/zAOBgNVHQ8BAf8EBAMCAQYwHQYD +VR0OBBYEFE4L7xqkQFulF2mHMMo0aEPQQa7yMB8GA1UdIwQYMBaAFE4L7xqkQFul +F2mHMMo0aEPQQa7yMIIBWgYDVR0gBIIBUTCCAU0wggFJBgsrBgEEAYG1NwEBATCC +ATgwLgYIKwYBBQUHAgEWImh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL3BvbGljeS5w +ZGYwNAYIKwYBBQUHAgEWKGh0dHA6Ly93d3cuc3RhcnRzc2wuY29tL2ludGVybWVk +aWF0ZS5wZGYwgc8GCCsGAQUFBwICMIHCMCcWIFN0YXJ0IENvbW1lcmNpYWwgKFN0 +YXJ0Q29tKSBMdGQuMAMCAQEagZZMaW1pdGVkIExpYWJpbGl0eSwgcmVhZCB0aGUg +c2VjdGlvbiAqTGVnYWwgTGltaXRhdGlvbnMqIG9mIHRoZSBTdGFydENvbSBDZXJ0 +aWZpY2F0aW9uIEF1dGhvcml0eSBQb2xpY3kgYXZhaWxhYmxlIGF0IGh0dHA6Ly93 +d3cuc3RhcnRzc2wuY29tL3BvbGljeS5wZGYwEQYJYIZIAYb4QgEBBAQDAgAHMDgG +CWCGSAGG+EIBDQQrFilTdGFydENvbSBGcmVlIFNTTCBDZXJ0aWZpY2F0aW9uIEF1 +dGhvcml0eTANBgkqhkiG9w0BAQsFAAOCAgEAjo/n3JR5fPGFf59Jb2vKXfuM/gTF +wWLRfUKKvFO3lANmMD+x5wqnUCBVJX92ehQN6wQOQOY+2IirByeDqXWmN3PH/UvS +Ta0XQMhGvjt/UfzDtgUx3M2FIk5xt/JxXrAaxrqTi3iSSoX4eA+D/i+tLPfkpLst +0OcNOrg+zvZ49q5HJMqjNTbOx8aHmNrs++myziebiMMEofYLWWivydsQD032ZGNc +pRJvkrKTlMeIFw6Ttn5ii5B/q06f/ON1FE8qMt9bDeD1e5MNq6HPh+GlBEXoPBKl +CcWw0bdT82AUuoVpaiF8H3VhFyAXe2w7QSlc4axa0c2Mm+tgHRns9+Ww2vl5GKVF +P0lDV9LdJNUso/2RjSe15esUBppMeyG7Oq0wBhjA2MFrLH9ZXF2RsXAiV+uKa0hK +1Q8p7MZAwC+ITGgBF3f0JBlPvfrhsiAhS90a2Cl9qrjeVOwhVYBsHvUwyKMQ5bLm +KhQxw4UtjJixhlpPiVktucf3HMiKf8CdBUrmQk9io20ppB+Fq9vlgcitKj1MXVuE +JnHEhV5xJMqlG2zYYdMa4FTbzrqpMrUi9nNBCV24F10OD5mQ1kfabwo6YigUZ4LZ +8dCAWZvLMdibD4x3TrVoivJs9iQOLWxwxXPR3hTQcY+203sC9uO41Alua551hDnm +fyWl8kgAwKQB2j8= +-----END CERTIFICATE----- + +# Issuer: CN=StartCom Certification Authority G2 O=StartCom Ltd. +# Subject: CN=StartCom Certification Authority G2 O=StartCom Ltd. +# Label: "StartCom Certification Authority G2" +# Serial: 59 +# MD5 Fingerprint: 78:4b:fb:9e:64:82:0a:d3:b8:4c:62:f3:64:f2:90:64 +# SHA1 Fingerprint: 31:f1:fd:68:22:63:20:ee:c6:3b:3f:9d:ea:4a:3e:53:7c:7c:39:17 +# SHA256 Fingerprint: c7:ba:65:67:de:93:a7:98:ae:1f:aa:79:1e:71:2d:37:8f:ae:1f:93:c4:39:7f:ea:44:1b:b7:cb:e6:fd:59:95 +-----BEGIN CERTIFICATE----- +MIIFYzCCA0ugAwIBAgIBOzANBgkqhkiG9w0BAQsFADBTMQswCQYDVQQGEwJJTDEW +MBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoGA1UEAxMjU3RhcnRDb20gQ2VydGlm +aWNhdGlvbiBBdXRob3JpdHkgRzIwHhcNMTAwMTAxMDEwMDAxWhcNMzkxMjMxMjM1 +OTAxWjBTMQswCQYDVQQGEwJJTDEWMBQGA1UEChMNU3RhcnRDb20gTHRkLjEsMCoG +A1UEAxMjU3RhcnRDb20gQ2VydGlmaWNhdGlvbiBBdXRob3JpdHkgRzIwggIiMA0G +CSqGSIb3DQEBAQUAA4ICDwAwggIKAoICAQC2iTZbB7cgNr2Cu+EWIAOVeq8Oo1XJ +JZlKxdBWQYeQTSFgpBSHO839sj60ZwNq7eEPS8CRhXBF4EKe3ikj1AENoBB5uNsD +vfOpL9HG4A/LnooUCri99lZi8cVytjIl2bLzvWXFDSxu1ZJvGIsAQRSCb0AgJnoo +D/Uefyf3lLE3PbfHkffiAez9lInhzG7TNtYKGXmu1zSCZf98Qru23QumNK9LYP5/ +Q0kGi4xDuFby2X8hQxfqp0iVAXV16iulQ5XqFYSdCI0mblWbq9zSOdIxHWDirMxW +RST1HFSr7obdljKF+ExP6JV2tgXdNiNnvP8V4so75qbsO+wmETRIjfaAKxojAuuK +HDp2KntWFhxyKrOq42ClAJ8Em+JvHhRYW6Vsi1g8w7pOOlz34ZYrPu8HvKTlXcxN +nw3h3Kq74W4a7I/htkxNeXJdFzULHdfBR9qWJODQcqhaX2YtENwvKhOuJv4KHBnM +0D4LnMgJLvlblnpHnOl68wVQdJVznjAJ85eCXuaPOQgeWeU1FEIT/wCc976qUM/i +UUjXuG+v+E5+M5iSFGI6dWPPe/regjupuznixL0sAA7IF6wT700ljtizkC+p2il9 +Ha90OrInwMEePnWjFqmveiJdnxMaz6eg6+OGCtP95paV1yPIN93EfKo2rJgaErHg +TuixO/XWb/Ew1wIDAQABo0IwQDAPBgNVHRMBAf8EBTADAQH/MA4GA1UdDwEB/wQE +AwIBBjAdBgNVHQ4EFgQUS8W0QGutHLOlHGVuRjaJhwUMDrYwDQYJKoZIhvcNAQEL +BQADggIBAHNXPyzVlTJ+N9uWkusZXn5T50HsEbZH77Xe7XRcxfGOSeD8bpkTzZ+K +2s06Ctg6Wgk/XzTQLwPSZh0avZyQN8gMjgdalEVGKua+etqhqaRpEpKwfTbURIfX +UfEpY9Z1zRbkJ4kd+MIySP3bmdCPX1R0zKxnNBFi2QwKN4fRoxdIjtIXHfbX/dtl +6/2o1PXWT6RbdejF0mCy2wl+JYt7ulKSnj7oxXehPOBKc2thz4bcQ///If4jXSRK +9dNtD2IEBVeC2m6kMyV5Sy5UGYvMLD0w6dEG/+gyRr61M3Z3qAFdlsHB1b6uJcDJ +HgoJIIihDsnzb02CVAAgp9KP5DlUFy6NHrgbuxu9mk47EDTcnIhT76IxW1hPkWLI +wpqazRVdOKnWvvgTtZ8SafJQYqz7Fzf07rh1Z2AQ+4NQ+US1dZxAF7L+/XldblhY +XzD8AK6vM8EOTmy6p6ahfzLbOOCxchcKK5HsamMm7YnUeMx0HgX4a/6ManY5Ka5l +IxKVCCIcl85bBu4M4ru8H0ST9tg4RQUh7eStqxK2A6RCLi3ECToDZ2mEmuFZkIoo +hdVddLHRDiBYmxOlsGOm7XtH/UVVMKTumtTm4ofvmMkyghEpIrwACjFeLQ/Ajulr +so8uBtjRkcfGEvRM/TAXw8HaOFvjqermobp573PYtlNXLfbQ4ddI +-----END CERTIFICATE----- diff --git a/httplib2/iri2uri.py b/httplib2/iri2uri.py new file mode 100644 index 0000000..d88c91f --- /dev/null +++ b/httplib2/iri2uri.py @@ -0,0 +1,110 @@ +""" +iri2uri + +Converts an IRI to a URI. + +""" +__author__ = "Joe Gregorio (joe@bitworking.org)" +__copyright__ = "Copyright 2006, Joe Gregorio" +__contributors__ = [] +__version__ = "1.0.0" +__license__ = "MIT" +__history__ = """ +""" + +import urlparse + + +# Convert an IRI to a URI following the rules in RFC 3987 +# +# The characters we need to enocde and escape are defined in the spec: +# +# iprivate = %xE000-F8FF / %xF0000-FFFFD / %x100000-10FFFD +# ucschar = %xA0-D7FF / %xF900-FDCF / %xFDF0-FFEF +# / %x10000-1FFFD / %x20000-2FFFD / %x30000-3FFFD +# / %x40000-4FFFD / %x50000-5FFFD / %x60000-6FFFD +# / %x70000-7FFFD / %x80000-8FFFD / %x90000-9FFFD +# / %xA0000-AFFFD / %xB0000-BFFFD / %xC0000-CFFFD +# / %xD0000-DFFFD / %xE1000-EFFFD + +escape_range = [ + (0xA0, 0xD7FF), + (0xE000, 0xF8FF), + (0xF900, 0xFDCF), + (0xFDF0, 0xFFEF), + (0x10000, 0x1FFFD), + (0x20000, 0x2FFFD), + (0x30000, 0x3FFFD), + (0x40000, 0x4FFFD), + (0x50000, 0x5FFFD), + (0x60000, 0x6FFFD), + (0x70000, 0x7FFFD), + (0x80000, 0x8FFFD), + (0x90000, 0x9FFFD), + (0xA0000, 0xAFFFD), + (0xB0000, 0xBFFFD), + (0xC0000, 0xCFFFD), + (0xD0000, 0xDFFFD), + (0xE1000, 0xEFFFD), + (0xF0000, 0xFFFFD), + (0x100000, 0x10FFFD), +] + +def encode(c): + retval = c + i = ord(c) + for low, high in escape_range: + if i < low: + break + if i >= low and i <= high: + retval = "".join(["%%%2X" % ord(o) for o in c.encode('utf-8')]) + break + return retval + + +def iri2uri(uri): + """Convert an IRI to a URI. Note that IRIs must be + passed in a unicode strings. That is, do not utf-8 encode + the IRI before passing it into the function.""" + if isinstance(uri ,unicode): + (scheme, authority, path, query, fragment) = urlparse.urlsplit(uri) + authority = authority.encode('idna') + # For each character in 'ucschar' or 'iprivate' + # 1. encode as utf-8 + # 2. then %-encode each octet of that utf-8 + uri = urlparse.urlunsplit((scheme, authority, path, query, fragment)) + uri = "".join([encode(c) for c in uri]) + return uri + +if __name__ == "__main__": + import unittest + + class Test(unittest.TestCase): + + def test_uris(self): + """Test that URIs are invariant under the transformation.""" + invariant = [ + u"ftp://ftp.is.co.za/rfc/rfc1808.txt", + u"http://www.ietf.org/rfc/rfc2396.txt", + u"ldap://[2001:db8::7]/c=GB?objectClass?one", + u"mailto:John.Doe@example.com", + u"news:comp.infosystems.www.servers.unix", + u"tel:+1-816-555-1212", + u"telnet://192.0.2.16:80/", + u"urn:oasis:names:specification:docbook:dtd:xml:4.1.2" ] + for uri in invariant: + self.assertEqual(uri, iri2uri(uri)) + + def test_iri(self): + """ Test that the right type of escaping is done for each part of the URI.""" + self.assertEqual("http://xn--o3h.com/%E2%98%84", iri2uri(u"http://\N{COMET}.com/\N{COMET}")) + self.assertEqual("http://bitworking.org/?fred=%E2%98%84", iri2uri(u"http://bitworking.org/?fred=\N{COMET}")) + self.assertEqual("http://bitworking.org/#%E2%98%84", iri2uri(u"http://bitworking.org/#\N{COMET}")) + self.assertEqual("#%E2%98%84", iri2uri(u"#\N{COMET}")) + self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}")) + self.assertEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}"))) + self.assertNotEqual("/fred?bar=%E2%98%9A#%E2%98%84", iri2uri(u"/fred?bar=\N{BLACK LEFT POINTING INDEX}#\N{COMET}".encode('utf-8'))) + + unittest.main() + + diff --git a/httplib2/socks.py b/httplib2/socks.py new file mode 100644 index 0000000..0991f4c --- /dev/null +++ b/httplib2/socks.py @@ -0,0 +1,438 @@ +"""SocksiPy - Python SOCKS module. +Version 1.00 + +Copyright 2006 Dan-Haim. All rights reserved. + +Redistribution and use in source and binary forms, with or without modification, +are permitted provided that the following conditions are met: +1. Redistributions of source code must retain the above copyright notice, this + list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, + this list of conditions and the following disclaimer in the documentation + and/or other materials provided with the distribution. +3. Neither the name of Dan Haim nor the names of his contributors may be used + to endorse or promote products derived from this software without specific + prior written permission. + +THIS SOFTWARE IS PROVIDED BY DAN HAIM "AS IS" AND ANY EXPRESS OR IMPLIED +WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +EVENT SHALL DAN HAIM OR HIS CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, +INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA +OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF +LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT +OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMANGE. + + +This module provides a standard socket-like interface for Python +for tunneling connections through SOCKS proxies. + +""" + +""" + +Minor modifications made by Christopher Gilbert (http://motomastyle.com/) +for use in PyLoris (http://pyloris.sourceforge.net/) + +Minor modifications made by Mario Vilas (http://breakingcode.wordpress.com/) +mainly to merge bug fixes found in Sourceforge + +""" + +import base64 +import socket +import struct +import sys + +if getattr(socket, 'socket', None) is None: + raise ImportError('socket.socket missing, proxy support unusable') + +PROXY_TYPE_SOCKS4 = 1 +PROXY_TYPE_SOCKS5 = 2 +PROXY_TYPE_HTTP = 3 +PROXY_TYPE_HTTP_NO_TUNNEL = 4 + +_defaultproxy = None +_orgsocket = socket.socket + +class ProxyError(Exception): pass +class GeneralProxyError(ProxyError): pass +class Socks5AuthError(ProxyError): pass +class Socks5Error(ProxyError): pass +class Socks4Error(ProxyError): pass +class HTTPError(ProxyError): pass + +_generalerrors = ("success", + "invalid data", + "not connected", + "not available", + "bad proxy type", + "bad input") + +_socks5errors = ("succeeded", + "general SOCKS server failure", + "connection not allowed by ruleset", + "Network unreachable", + "Host unreachable", + "Connection refused", + "TTL expired", + "Command not supported", + "Address type not supported", + "Unknown error") + +_socks5autherrors = ("succeeded", + "authentication is required", + "all offered authentication methods were rejected", + "unknown username or invalid password", + "unknown error") + +_socks4errors = ("request granted", + "request rejected or failed", + "request rejected because SOCKS server cannot connect to identd on the client", + "request rejected because the client program and identd report different user-ids", + "unknown error") + +def setdefaultproxy(proxytype=None, addr=None, port=None, rdns=True, username=None, password=None): + """setdefaultproxy(proxytype, addr[, port[, rdns[, username[, password]]]]) + Sets a default proxy which all further socksocket objects will use, + unless explicitly changed. + """ + global _defaultproxy + _defaultproxy = (proxytype, addr, port, rdns, username, password) + +def wrapmodule(module): + """wrapmodule(module) + Attempts to replace a module's socket library with a SOCKS socket. Must set + a default proxy using setdefaultproxy(...) first. + This will only work on modules that import socket directly into the namespace; + most of the Python Standard Library falls into this category. + """ + if _defaultproxy != None: + module.socket.socket = socksocket + else: + raise GeneralProxyError((4, "no proxy specified")) + +class socksocket(socket.socket): + """socksocket([family[, type[, proto]]]) -> socket object + Open a SOCKS enabled socket. The parameters are the same as + those of the standard socket init. In order for SOCKS to work, + you must specify family=AF_INET, type=SOCK_STREAM and proto=0. + """ + + def __init__(self, family=socket.AF_INET, type=socket.SOCK_STREAM, proto=0, _sock=None): + _orgsocket.__init__(self, family, type, proto, _sock) + if _defaultproxy != None: + self.__proxy = _defaultproxy + else: + self.__proxy = (None, None, None, None, None, None) + self.__proxysockname = None + self.__proxypeername = None + self.__httptunnel = True + + def __recvall(self, count): + """__recvall(count) -> data + Receive EXACTLY the number of bytes requested from the socket. + Blocks until the required number of bytes have been received. + """ + data = self.recv(count) + while len(data) < count: + d = self.recv(count-len(data)) + if not d: raise GeneralProxyError((0, "connection closed unexpectedly")) + data = data + d + return data + + def sendall(self, content, *args): + """ override socket.socket.sendall method to rewrite the header + for non-tunneling proxies if needed + """ + if not self.__httptunnel: + content = self.__rewriteproxy(content) + return super(socksocket, self).sendall(content, *args) + + def __rewriteproxy(self, header): + """ rewrite HTTP request headers to support non-tunneling proxies + (i.e. those which do not support the CONNECT method). + This only works for HTTP (not HTTPS) since HTTPS requires tunneling. + """ + host, endpt = None, None + hdrs = header.split("\r\n") + for hdr in hdrs: + if hdr.lower().startswith("host:"): + host = hdr + elif hdr.lower().startswith("get") or hdr.lower().startswith("post"): + endpt = hdr + if host and endpt: + hdrs.remove(host) + hdrs.remove(endpt) + host = host.split(" ")[1] + endpt = endpt.split(" ") + if (self.__proxy[4] != None and self.__proxy[5] != None): + hdrs.insert(0, self.__getauthheader()) + hdrs.insert(0, "Host: %s" % host) + hdrs.insert(0, "%s http://%s%s %s" % (endpt[0], host, endpt[1], endpt[2])) + return "\r\n".join(hdrs) + + def __getauthheader(self): + auth = self.__proxy[4] + ":" + self.__proxy[5] + return "Proxy-Authorization: Basic " + base64.b64encode(auth) + + def setproxy(self, proxytype=None, addr=None, port=None, rdns=True, username=None, password=None): + """setproxy(proxytype, addr[, port[, rdns[, username[, password]]]]) + Sets the proxy to be used. + proxytype - The type of the proxy to be used. Three types + are supported: PROXY_TYPE_SOCKS4 (including socks4a), + PROXY_TYPE_SOCKS5 and PROXY_TYPE_HTTP + addr - The address of the server (IP or DNS). + port - The port of the server. Defaults to 1080 for SOCKS + servers and 8080 for HTTP proxy servers. + rdns - Should DNS queries be preformed on the remote side + (rather than the local side). The default is True. + Note: This has no effect with SOCKS4 servers. + username - Username to authenticate with to the server. + The default is no authentication. + password - Password to authenticate with to the server. + Only relevant when username is also provided. + """ + self.__proxy = (proxytype, addr, port, rdns, username, password) + + def __negotiatesocks5(self, destaddr, destport): + """__negotiatesocks5(self,destaddr,destport) + Negotiates a connection through a SOCKS5 server. + """ + # First we'll send the authentication packages we support. + if (self.__proxy[4]!=None) and (self.__proxy[5]!=None): + # The username/password details were supplied to the + # setproxy method so we support the USERNAME/PASSWORD + # authentication (in addition to the standard none). + self.sendall(struct.pack('BBBB', 0x05, 0x02, 0x00, 0x02)) + else: + # No username/password were entered, therefore we + # only support connections with no authentication. + self.sendall(struct.pack('BBB', 0x05, 0x01, 0x00)) + # We'll receive the server's response to determine which + # method was selected + chosenauth = self.__recvall(2) + if chosenauth[0:1] != chr(0x05).encode(): + self.close() + raise GeneralProxyError((1, _generalerrors[1])) + # Check the chosen authentication method + if chosenauth[1:2] == chr(0x00).encode(): + # No authentication is required + pass + elif chosenauth[1:2] == chr(0x02).encode(): + # Okay, we need to perform a basic username/password + # authentication. + self.sendall(chr(0x01).encode() + chr(len(self.__proxy[4])) + self.__proxy[4] + chr(len(self.__proxy[5])) + self.__proxy[5]) + authstat = self.__recvall(2) + if authstat[0:1] != chr(0x01).encode(): + # Bad response + self.close() + raise GeneralProxyError((1, _generalerrors[1])) + if authstat[1:2] != chr(0x00).encode(): + # Authentication failed + self.close() + raise Socks5AuthError((3, _socks5autherrors[3])) + # Authentication succeeded + else: + # Reaching here is always bad + self.close() + if chosenauth[1] == chr(0xFF).encode(): + raise Socks5AuthError((2, _socks5autherrors[2])) + else: + raise GeneralProxyError((1, _generalerrors[1])) + # Now we can request the actual connection + req = struct.pack('BBB', 0x05, 0x01, 0x00) + # If the given destination address is an IP address, we'll + # use the IPv4 address request even if remote resolving was specified. + try: + ipaddr = socket.inet_aton(destaddr) + req = req + chr(0x01).encode() + ipaddr + except socket.error: + # Well it's not an IP number, so it's probably a DNS name. + if self.__proxy[3]: + # Resolve remotely + ipaddr = None + req = req + chr(0x03).encode() + chr(len(destaddr)).encode() + destaddr + else: + # Resolve locally + ipaddr = socket.inet_aton(socket.gethostbyname(destaddr)) + req = req + chr(0x01).encode() + ipaddr + req = req + struct.pack(">H", destport) + self.sendall(req) + # Get the response + resp = self.__recvall(4) + if resp[0:1] != chr(0x05).encode(): + self.close() + raise GeneralProxyError((1, _generalerrors[1])) + elif resp[1:2] != chr(0x00).encode(): + # Connection failed + self.close() + if ord(resp[1:2])<=8: + raise Socks5Error((ord(resp[1:2]), _socks5errors[ord(resp[1:2])])) + else: + raise Socks5Error((9, _socks5errors[9])) + # Get the bound address/port + elif resp[3:4] == chr(0x01).encode(): + boundaddr = self.__recvall(4) + elif resp[3:4] == chr(0x03).encode(): + resp = resp + self.recv(1) + boundaddr = self.__recvall(ord(resp[4:5])) + else: + self.close() + raise GeneralProxyError((1,_generalerrors[1])) + boundport = struct.unpack(">H", self.__recvall(2))[0] + self.__proxysockname = (boundaddr, boundport) + if ipaddr != None: + self.__proxypeername = (socket.inet_ntoa(ipaddr), destport) + else: + self.__proxypeername = (destaddr, destport) + + def getproxysockname(self): + """getsockname() -> address info + Returns the bound IP address and port number at the proxy. + """ + return self.__proxysockname + + def getproxypeername(self): + """getproxypeername() -> address info + Returns the IP and port number of the proxy. + """ + return _orgsocket.getpeername(self) + + def getpeername(self): + """getpeername() -> address info + Returns the IP address and port number of the destination + machine (note: getproxypeername returns the proxy) + """ + return self.__proxypeername + + def __negotiatesocks4(self,destaddr,destport): + """__negotiatesocks4(self,destaddr,destport) + Negotiates a connection through a SOCKS4 server. + """ + # Check if the destination address provided is an IP address + rmtrslv = False + try: + ipaddr = socket.inet_aton(destaddr) + except socket.error: + # It's a DNS name. Check where it should be resolved. + if self.__proxy[3]: + ipaddr = struct.pack("BBBB", 0x00, 0x00, 0x00, 0x01) + rmtrslv = True + else: + ipaddr = socket.inet_aton(socket.gethostbyname(destaddr)) + # Construct the request packet + req = struct.pack(">BBH", 0x04, 0x01, destport) + ipaddr + # The username parameter is considered userid for SOCKS4 + if self.__proxy[4] != None: + req = req + self.__proxy[4] + req = req + chr(0x00).encode() + # DNS name if remote resolving is required + # NOTE: This is actually an extension to the SOCKS4 protocol + # called SOCKS4A and may not be supported in all cases. + if rmtrslv: + req = req + destaddr + chr(0x00).encode() + self.sendall(req) + # Get the response from the server + resp = self.__recvall(8) + if resp[0:1] != chr(0x00).encode(): + # Bad data + self.close() + raise GeneralProxyError((1,_generalerrors[1])) + if resp[1:2] != chr(0x5A).encode(): + # Server returned an error + self.close() + if ord(resp[1:2]) in (91, 92, 93): + self.close() + raise Socks4Error((ord(resp[1:2]), _socks4errors[ord(resp[1:2]) - 90])) + else: + raise Socks4Error((94, _socks4errors[4])) + # Get the bound address/port + self.__proxysockname = (socket.inet_ntoa(resp[4:]), struct.unpack(">H", resp[2:4])[0]) + if rmtrslv != None: + self.__proxypeername = (socket.inet_ntoa(ipaddr), destport) + else: + self.__proxypeername = (destaddr, destport) + + def __negotiatehttp(self, destaddr, destport): + """__negotiatehttp(self,destaddr,destport) + Negotiates a connection through an HTTP server. + """ + # If we need to resolve locally, we do this now + if not self.__proxy[3]: + addr = socket.gethostbyname(destaddr) + else: + addr = destaddr + headers = ["CONNECT ", addr, ":", str(destport), " HTTP/1.1\r\n"] + headers += ["Host: ", destaddr, "\r\n"] + if (self.__proxy[4] != None and self.__proxy[5] != None): + headers += [self.__getauthheader(), "\r\n"] + headers.append("\r\n") + self.sendall("".join(headers).encode()) + # We read the response until we get the string "\r\n\r\n" + resp = self.recv(1) + while resp.find("\r\n\r\n".encode()) == -1: + resp = resp + self.recv(1) + # We just need the first line to check if the connection + # was successful + statusline = resp.splitlines()[0].split(" ".encode(), 2) + if statusline[0] not in ("HTTP/1.0".encode(), "HTTP/1.1".encode()): + self.close() + raise GeneralProxyError((1, _generalerrors[1])) + try: + statuscode = int(statusline[1]) + except ValueError: + self.close() + raise GeneralProxyError((1, _generalerrors[1])) + if statuscode != 200: + self.close() + raise HTTPError((statuscode, statusline[2])) + self.__proxysockname = ("0.0.0.0", 0) + self.__proxypeername = (addr, destport) + + def connect(self, destpair): + """connect(self, despair) + Connects to the specified destination through a proxy. + destpar - A tuple of the IP/DNS address and the port number. + (identical to socket's connect). + To select the proxy server use setproxy(). + """ + # Do a minimal input check first + if (not type(destpair) in (list,tuple)) or (len(destpair) < 2) or (not isinstance(destpair[0], basestring)) or (type(destpair[1]) != int): + raise GeneralProxyError((5, _generalerrors[5])) + if self.__proxy[0] == PROXY_TYPE_SOCKS5: + if self.__proxy[2] != None: + portnum = self.__proxy[2] + else: + portnum = 1080 + _orgsocket.connect(self, (self.__proxy[1], portnum)) + self.__negotiatesocks5(destpair[0], destpair[1]) + elif self.__proxy[0] == PROXY_TYPE_SOCKS4: + if self.__proxy[2] != None: + portnum = self.__proxy[2] + else: + portnum = 1080 + _orgsocket.connect(self,(self.__proxy[1], portnum)) + self.__negotiatesocks4(destpair[0], destpair[1]) + elif self.__proxy[0] == PROXY_TYPE_HTTP: + if self.__proxy[2] != None: + portnum = self.__proxy[2] + else: + portnum = 8080 + _orgsocket.connect(self,(self.__proxy[1], portnum)) + self.__negotiatehttp(destpair[0], destpair[1]) + elif self.__proxy[0] == PROXY_TYPE_HTTP_NO_TUNNEL: + if self.__proxy[2] != None: + portnum = self.__proxy[2] + else: + portnum = 8080 + _orgsocket.connect(self,(self.__proxy[1],portnum)) + if destpair[1] == 443: + self.__negotiatehttp(destpair[0],destpair[1]) + else: + self.__httptunnel = False + elif self.__proxy[0] == None: + _orgsocket.connect(self, (destpair[0], destpair[1])) + else: + raise GeneralProxyError((4, _generalerrors[4])) diff --git a/httplib2/test/__init__.py b/httplib2/test/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/httplib2/test/brokensocket/socket.py b/httplib2/test/brokensocket/socket.py new file mode 100644 index 0000000..ff7c0b7 --- /dev/null +++ b/httplib2/test/brokensocket/socket.py @@ -0,0 +1 @@ +from realsocket import gaierror, error, getaddrinfo, SOCK_STREAM diff --git a/httplib2/test/functional/test_proxies.py b/httplib2/test/functional/test_proxies.py new file mode 100644 index 0000000..0b7880f --- /dev/null +++ b/httplib2/test/functional/test_proxies.py @@ -0,0 +1,88 @@ +import unittest +import errno +import os +import signal +import subprocess +import tempfile + +import nose + +import httplib2 +from httplib2 import socks +from httplib2.test import miniserver + +tinyproxy_cfg = """ +User "%(user)s" +Port %(port)s +Listen 127.0.0.1 +PidFile "%(pidfile)s" +LogFile "%(logfile)s" +MaxClients 2 +StartServers 1 +LogLevel Info +""" + + +class FunctionalProxyHttpTest(unittest.TestCase): + def setUp(self): + if not socks: + raise nose.SkipTest('socks module unavailable') + if not subprocess: + raise nose.SkipTest('subprocess module unavailable') + + # start a short-lived miniserver so we can get a likely port + # for the proxy + self.httpd, self.proxyport = miniserver.start_server( + miniserver.ThisDirHandler) + self.httpd.shutdown() + self.httpd, self.port = miniserver.start_server( + miniserver.ThisDirHandler) + + self.pidfile = tempfile.mktemp() + self.logfile = tempfile.mktemp() + fd, self.conffile = tempfile.mkstemp() + f = os.fdopen(fd, 'w') + our_cfg = tinyproxy_cfg % {'user': os.getlogin(), + 'pidfile': self.pidfile, + 'port': self.proxyport, + 'logfile': self.logfile} + f.write(our_cfg) + f.close() + try: + # TODO use subprocess.check_call when 2.4 is dropped + ret = subprocess.call(['tinyproxy', '-c', self.conffile]) + self.assertEqual(0, ret) + except OSError, e: + if e.errno == errno.ENOENT: + raise nose.SkipTest('tinyproxy not available') + raise + + def tearDown(self): + self.httpd.shutdown() + try: + pid = int(open(self.pidfile).read()) + os.kill(pid, signal.SIGTERM) + except OSError, e: + if e.errno == errno.ESRCH: + print '\n\n\nTinyProxy Failed to start, log follows:' + print open(self.logfile).read() + print 'end tinyproxy log\n\n\n' + raise + map(os.unlink, (self.pidfile, + self.logfile, + self.conffile)) + + def testSimpleProxy(self): + proxy_info = httplib2.ProxyInfo(socks.PROXY_TYPE_HTTP, + 'localhost', self.proxyport) + client = httplib2.Http(proxy_info=proxy_info) + src = 'miniserver.py' + response, body = client.request('http://localhost:%d/%s' % + (self.port, src)) + self.assertEqual(response.status, 200) + self.assertEqual(body, open(os.path.join(miniserver.HERE, src)).read()) + lf = open(self.logfile).read() + expect = ('Established connection to host "127.0.0.1" ' + 'using file descriptor') + self.assertTrue(expect in lf, + 'tinyproxy did not proxy a request for miniserver') diff --git a/httplib2/test/miniserver.py b/httplib2/test/miniserver.py new file mode 100644 index 0000000..e32bf5e --- /dev/null +++ b/httplib2/test/miniserver.py @@ -0,0 +1,100 @@ +import logging +import os +import select +import SimpleHTTPServer +import SocketServer +import threading + +HERE = os.path.dirname(__file__) +logger = logging.getLogger(__name__) + + +class ThisDirHandler(SimpleHTTPServer.SimpleHTTPRequestHandler): + def translate_path(self, path): + path = path.split('?', 1)[0].split('#', 1)[0] + return os.path.join(HERE, *filter(None, path.split('/'))) + + def log_message(self, s, *args): + # output via logging so nose can catch it + logger.info(s, *args) + + +class ShutdownServer(SocketServer.TCPServer): + """Mixin that allows serve_forever to be shut down. + + The methods in this mixin are backported from SocketServer.py in the Python + 2.6.4 standard library. The mixin is unnecessary in 2.6 and later, when + BaseServer supports the shutdown method directly. + """ + + def __init__(self, *args, **kwargs): + SocketServer.TCPServer.__init__(self, *args, **kwargs) + self.__is_shut_down = threading.Event() + self.__serving = False + + def serve_forever(self, poll_interval=0.1): + """Handle one request at a time until shutdown. + + Polls for shutdown every poll_interval seconds. Ignores + self.timeout. If you need to do periodic tasks, do them in + another thread. + """ + self.__serving = True + self.__is_shut_down.clear() + while self.__serving: + r, w, e = select.select([self.socket], [], [], poll_interval) + if r: + self._handle_request_noblock() + self.__is_shut_down.set() + + def shutdown(self): + """Stops the serve_forever loop. + + Blocks until the loop has finished. This must be called while + serve_forever() is running in another thread, or it will deadlock. + """ + self.__serving = False + self.__is_shut_down.wait() + + def handle_request(self): + """Handle one request, possibly blocking. + + Respects self.timeout. + """ + # Support people who used socket.settimeout() to escape + # handle_request before self.timeout was available. + timeout = self.socket.gettimeout() + if timeout is None: + timeout = self.timeout + elif self.timeout is not None: + timeout = min(timeout, self.timeout) + fd_sets = select.select([self], [], [], timeout) + if not fd_sets[0]: + self.handle_timeout() + return + self._handle_request_noblock() + + def _handle_request_noblock(self): + """Handle one request, without blocking. + + I assume that select.select has returned that the socket is + readable before this function was called, so there should be + no risk of blocking in get_request(). + """ + try: + request, client_address = self.get_request() + except socket.error: + return + if self.verify_request(request, client_address): + try: + self.process_request(request, client_address) + except: + self.handle_error(request, client_address) + self.close_request(request) + + +def start_server(handler): + httpd = ShutdownServer(("", 0), handler) + threading.Thread(target=httpd.serve_forever).start() + _, port = httpd.socket.getsockname() + return httpd, port diff --git a/httplib2/test/other_cacerts.txt b/httplib2/test/other_cacerts.txt new file mode 100644 index 0000000..360954a --- /dev/null +++ b/httplib2/test/other_cacerts.txt @@ -0,0 +1,70 @@ +# Certifcate Authority certificates for validating SSL connections. +# +# This file contains PEM format certificates generated from +# http://mxr.mozilla.org/seamonkey/source/security/nss/lib/ckfw/builtins/certdata.txt +# +# ***** BEGIN LICENSE BLOCK ***** +# Version: MPL 1.1/GPL 2.0/LGPL 2.1 +# +# The contents of this file are subject to the Mozilla Public License Version +# 1.1 (the "License"); you may not use this file except in compliance with +# the License. You may obtain a copy of the License at +# http://www.mozilla.org/MPL/ +# +# Software distributed under the License is distributed on an "AS IS" basis, +# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License +# for the specific language governing rights and limitations under the +# License. +# +# The Original Code is the Netscape security libraries. +# +# The Initial Developer of the Original Code is +# Netscape Communications Corporation. +# Portions created by the Initial Developer are Copyright (C) 1994-2000 +# the Initial Developer. All Rights Reserved. +# +# Contributor(s): +# +# Alternatively, the contents of this file may be used under the terms of +# either the GNU General Public License Version 2 or later (the "GPL"), or +# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"), +# in which case the provisions of the GPL or the LGPL are applicable instead +# of those above. If you wish to allow use of your version of this file only +# under the terms of either the GPL or the LGPL, and not to allow others to +# use your version of this file under the terms of the MPL, indicate your +# decision by deleting the provisions above and replace them with the notice +# and other provisions required by the GPL or the LGPL. If you do not delete +# the provisions above, a recipient may use your version of this file under +# the terms of any one of the MPL, the GPL or the LGPL. +# +# ***** END LICENSE BLOCK ***** + + +Comodo CA Limited, CN=Trusted Certificate Services +================================================== + +-----BEGIN CERTIFICATE----- +MIIEQzCCAyugAwIBAgIBATANBgkqhkiG9w0BAQUFADB/MQswCQYDVQQGEwJHQjEb +MBkGA1UECAwSR3JlYXRlciBNYW5jaGVzdGVyMRAwDgYDVQQHDAdTYWxmb3JkMRow +GAYDVQQKDBFDb21vZG8gQ0EgTGltaXRlZDElMCMGA1UEAwwcVHJ1c3RlZCBDZXJ0 +aWZpY2F0ZSBTZXJ2aWNlczAeFw0wNDAxMDEwMDAwMDBaFw0yODEyMzEyMzU5NTla +MH8xCzAJBgNVBAYTAkdCMRswGQYDVQQIDBJHcmVhdGVyIE1hbmNoZXN0ZXIxEDAO +BgNVBAcMB1NhbGZvcmQxGjAYBgNVBAoMEUNvbW9kbyBDQSBMaW1pdGVkMSUwIwYD +VQQDDBxUcnVzdGVkIENlcnRpZmljYXRlIFNlcnZpY2VzMIIBIjANBgkqhkiG9w0B +AQEFAAOCAQ8AMIIBCgKCAQEA33FvNlhTWvI2VFeAxHQIIO0Yfyod5jWaHiWsnOWW +fnJSoBVC21ndZHoa0Lh73TkVvFVIxO06AOoxEbrycXQaZ7jPM8yoMa+j49d/vzMt +TGo87IvDktJTdyR0nAducPy9C1t2ul/y/9c3S0pgePfw+spwtOpZqqPOSC+pw7IL +fhdyFgymBwwbOM/JYrc/oJOlh0Hyt3BAd9i+FHzjqMB6juljatEPmsbS9Is6FARW +1O24zG71++IsWL1/T2sr92AkWCTOJu80kTrV44HQsvAEAtdbtz6SrGsSivnkBbA7 +kUlcsutT6vifR4buv5XAwAaf0lteERv0xwQ1KdJVXOTt6wIDAQABo4HJMIHGMB0G +A1UdDgQWBBTFe1i97doladL3WRaoszLAeydb9DAOBgNVHQ8BAf8EBAMCAQYwDwYD +VR0TAQH/BAUwAwEB/zCBgwYDVR0fBHwwejA8oDqgOIY2aHR0cDovL2NybC5jb21v +ZG9jYS5jb20vVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMuY3JsMDqgOKA2hjRo +dHRwOi8vY3JsLmNvbW9kby5uZXQvVHJ1c3RlZENlcnRpZmljYXRlU2VydmljZXMu +Y3JsMA0GCSqGSIb3DQEBBQUAA4IBAQDIk4E7ibSvuIQSTI3S8NtwuleGFTQQuS9/ +HrCoiWChisJ3DFBKmwCL2Iv0QeLQg4pKHBQGsKNoBXAxMKdTmw7pSqBYaWcOrp32 +pSxBvzwGa+RZzG0Q8ZZvH9/0BAKkn0U+yNj6NkZEUD+Cl5EfKNsYEYwq5GWDVxIS +jBc/lDb+XbDABHcTuPQV1T84zJQ6VdCsmPW6AF/ghhmBeC8owH7TzEIK9a5QoNE+ +xqFx7D+gIIxmOom0jtTYsU0lR+4viMi14QVFwL4Ucd56/Y57fU0IlqUSc/Atyjcn +dBInTMu2l+nZrghtWjlA3QVHdWpaIbOjGM9O9y5Xt5hwXsjEeLBi +-----END CERTIFICATE----- diff --git a/httplib2/test/smoke_test.py b/httplib2/test/smoke_test.py new file mode 100644 index 0000000..9f1e6f0 --- /dev/null +++ b/httplib2/test/smoke_test.py @@ -0,0 +1,23 @@ +import os +import unittest + +import httplib2 + +from httplib2.test import miniserver + + +class HttpSmokeTest(unittest.TestCase): + def setUp(self): + self.httpd, self.port = miniserver.start_server( + miniserver.ThisDirHandler) + + def tearDown(self): + self.httpd.shutdown() + + def testGetFile(self): + client = httplib2.Http() + src = 'miniserver.py' + response, body = client.request('http://localhost:%d/%s' % + (self.port, src)) + self.assertEqual(response.status, 200) + self.assertEqual(body, open(os.path.join(miniserver.HERE, src)).read()) diff --git a/httplib2/test/test_no_socket.py b/httplib2/test/test_no_socket.py new file mode 100644 index 0000000..66ba056 --- /dev/null +++ b/httplib2/test/test_no_socket.py @@ -0,0 +1,24 @@ +"""Tests for httplib2 when the socket module is missing. + +This helps ensure compatibility with environments such as AppEngine. +""" +import os +import sys +import unittest + +import httplib2 + +class MissingSocketTest(unittest.TestCase): + def setUp(self): + self._oldsocks = httplib2.socks + httplib2.socks = None + + def tearDown(self): + httplib2.socks = self._oldsocks + + def testProxyDisabled(self): + proxy_info = httplib2.ProxyInfo('blah', + 'localhost', 0) + client = httplib2.Http(proxy_info=proxy_info) + self.assertRaises(httplib2.ProxiesUnavailableError, + client.request, 'http://localhost:-1/') diff --git a/index.html b/index.html new file mode 100644 index 0000000..1ae91a3 --- /dev/null +++ b/index.html @@ -0,0 +1,53 @@ + + + + GroundsBot + + + + + + + + + + + + + + + + + + + + + +
+ + +

GroundsBot Mowing Selection

+ + + + + + + + +
+ + + +
+ + + + + + + diff --git a/oauth2client/__init__.py b/oauth2client/__init__.py new file mode 100644 index 0000000..b35ee16 --- /dev/null +++ b/oauth2client/__init__.py @@ -0,0 +1,23 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Client library for using OAuth2, especially with Google APIs.""" + +__version__ = '2.2.0' + +GOOGLE_AUTH_URI = 'https://accounts.google.com/o/oauth2/v2/auth' +GOOGLE_DEVICE_URI = 'https://accounts.google.com/o/oauth2/device/code' +GOOGLE_REVOKE_URI = 'https://accounts.google.com/o/oauth2/revoke' +GOOGLE_TOKEN_URI = 'https://www.googleapis.com/oauth2/v4/token' +GOOGLE_TOKEN_INFO_URI = 'https://www.googleapis.com/oauth2/v3/tokeninfo' diff --git a/oauth2client/_helpers.py b/oauth2client/_helpers.py new file mode 100644 index 0000000..39bfeb6 --- /dev/null +++ b/oauth2client/_helpers.py @@ -0,0 +1,103 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Helper functions for commonly used utilities.""" + +import base64 +import json +import six + + +def _parse_pem_key(raw_key_input): + """Identify and extract PEM keys. + + Determines whether the given key is in the format of PEM key, and extracts + the relevant part of the key if it is. + + Args: + raw_key_input: The contents of a private key file (either PEM or + PKCS12). + + Returns: + string, The actual key if the contents are from a PEM file, or + else None. + """ + offset = raw_key_input.find(b'-----BEGIN ') + if offset != -1: + return raw_key_input[offset:] + + +def _json_encode(data): + return json.dumps(data, separators=(',', ':')) + + +def _to_bytes(value, encoding='ascii'): + """Converts a string value to bytes, if necessary. + + Unfortunately, ``six.b`` is insufficient for this task since in + Python2 it does not modify ``unicode`` objects. + + Args: + value: The string/bytes value to be converted. + encoding: The encoding to use to convert unicode to bytes. Defaults + to "ascii", which will not allow any characters from ordinals + larger than 127. Other useful values are "latin-1", which + which will only allows byte ordinals (up to 255) and "utf-8", + which will encode any unicode that needs to be. + + Returns: + The original value converted to bytes (if unicode) or as passed in + if it started out as bytes. + + Raises: + ValueError if the value could not be converted to bytes. + """ + result = (value.encode(encoding) + if isinstance(value, six.text_type) else value) + if isinstance(result, six.binary_type): + return result + else: + raise ValueError('%r could not be converted to bytes' % (value,)) + + +def _from_bytes(value): + """Converts bytes to a string value, if necessary. + + Args: + value: The string/bytes value to be converted. + + Returns: + The original value converted to unicode (if bytes) or as passed in + if it started out as unicode. + + Raises: + ValueError if the value could not be converted to unicode. + """ + result = (value.decode('utf-8') + if isinstance(value, six.binary_type) else value) + if isinstance(result, six.text_type): + return result + else: + raise ValueError('%r could not be converted to unicode' % (value,)) + + +def _urlsafe_b64encode(raw_bytes): + raw_bytes = _to_bytes(raw_bytes, encoding='utf-8') + return base64.urlsafe_b64encode(raw_bytes).rstrip(b'=') + + +def _urlsafe_b64decode(b64string): + # Guard against unicode strings, which base64 can't handle. + b64string = _to_bytes(b64string) + padded = b64string + b'=' * (4 - len(b64string) % 4) + return base64.urlsafe_b64decode(padded) diff --git a/oauth2client/_openssl_crypt.py b/oauth2client/_openssl_crypt.py new file mode 100644 index 0000000..7a76fb7 --- /dev/null +++ b/oauth2client/_openssl_crypt.py @@ -0,0 +1,139 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""OpenSSL Crypto-related routines for oauth2client.""" + +import base64 + +from OpenSSL import crypto + +from oauth2client._helpers import _parse_pem_key +from oauth2client._helpers import _to_bytes + + +class OpenSSLVerifier(object): + """Verifies the signature on a message.""" + + def __init__(self, pubkey): + """Constructor. + + Args: + pubkey: OpenSSL.crypto.PKey, The public key to verify with. + """ + self._pubkey = pubkey + + def verify(self, message, signature): + """Verifies a message against a signature. + + Args: + message: string or bytes, The message to verify. If string, will be + encoded to bytes as utf-8. + signature: string or bytes, The signature on the message. If string, + will be encoded to bytes as utf-8. + + Returns: + True if message was signed by the private key associated with the + public key that this object was constructed with. + """ + message = _to_bytes(message, encoding='utf-8') + signature = _to_bytes(signature, encoding='utf-8') + try: + crypto.verify(self._pubkey, signature, message, 'sha256') + return True + except crypto.Error: + return False + + @staticmethod + def from_string(key_pem, is_x509_cert): + """Construct a Verified instance from a string. + + Args: + key_pem: string, public key in PEM format. + is_x509_cert: bool, True if key_pem is an X509 cert, otherwise it + is expected to be an RSA key in PEM format. + + Returns: + Verifier instance. + + Raises: + OpenSSL.crypto.Error: if the key_pem can't be parsed. + """ + key_pem = _to_bytes(key_pem) + if is_x509_cert: + pubkey = crypto.load_certificate(crypto.FILETYPE_PEM, key_pem) + else: + pubkey = crypto.load_privatekey(crypto.FILETYPE_PEM, key_pem) + return OpenSSLVerifier(pubkey) + + +class OpenSSLSigner(object): + """Signs messages with a private key.""" + + def __init__(self, pkey): + """Constructor. + + Args: + pkey: OpenSSL.crypto.PKey (or equiv), The private key to sign with. + """ + self._key = pkey + + def sign(self, message): + """Signs a message. + + Args: + message: bytes, Message to be signed. + + Returns: + string, The signature of the message for the given key. + """ + message = _to_bytes(message, encoding='utf-8') + return crypto.sign(self._key, message, 'sha256') + + @staticmethod + def from_string(key, password=b'notasecret'): + """Construct a Signer instance from a string. + + Args: + key: string, private key in PKCS12 or PEM format. + password: string, password for the private key file. + + Returns: + Signer instance. + + Raises: + OpenSSL.crypto.Error if the key can't be parsed. + """ + key = _to_bytes(key) + parsed_pem_key = _parse_pem_key(key) + if parsed_pem_key: + pkey = crypto.load_privatekey(crypto.FILETYPE_PEM, parsed_pem_key) + else: + password = _to_bytes(password, encoding='utf-8') + pkey = crypto.load_pkcs12(key, password).get_privatekey() + return OpenSSLSigner(pkey) + + +def pkcs12_key_as_pem(private_key_bytes, private_key_password): + """Convert the contents of a PKCS#12 key to PEM using pyOpenSSL. + + Args: + private_key_bytes: Bytes. PKCS#12 key in DER format. + private_key_password: String. Password for PKCS#12 key. + + Returns: + String. PEM contents of ``private_key_bytes``. + """ + private_key_password = _to_bytes(private_key_password) + pkcs12 = crypto.load_pkcs12(private_key_bytes, private_key_password) + return crypto.dump_privatekey(crypto.FILETYPE_PEM, + pkcs12.get_privatekey()) diff --git a/oauth2client/_pkce.py b/oauth2client/_pkce.py new file mode 100644 index 0000000..e4952d8 --- /dev/null +++ b/oauth2client/_pkce.py @@ -0,0 +1,67 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" +Utility functions for implementing Proof Key for Code Exchange (PKCE) by OAuth +Public Clients + +See RFC7636. +""" + +import base64 +import hashlib +import os + + +def code_verifier(n_bytes=64): + """ + Generates a 'code_verifier' as described in section 4.1 of RFC 7636. + + This is a 'high-entropy cryptographic random string' that will be + impractical for an attacker to guess. + + Args: + n_bytes: integer between 31 and 96, inclusive. default: 64 + number of bytes of entropy to include in verifier. + + Returns: + Bytestring, representing urlsafe base64-encoded random data. + """ + verifier = base64.urlsafe_b64encode(os.urandom(n_bytes)).rstrip(b'=') + # https://tools.ietf.org/html/rfc7636#section-4.1 + # minimum length of 43 characters and a maximum length of 128 characters. + if len(verifier) < 43: + raise ValueError("Verifier too short. n_bytes must be > 30.") + elif len(verifier) > 128: + raise ValueError("Verifier too long. n_bytes must be < 97.") + else: + return verifier + + +def code_challenge(verifier): + """ + Creates a 'code_challenge' as described in section 4.2 of RFC 7636 + by taking the sha256 hash of the verifier and then urlsafe + base64-encoding it. + + Args: + verifier: bytestring, representing a code_verifier as generated by + code_verifier(). + + Returns: + Bytestring, representing a urlsafe base64-encoded sha256 hash digest, + without '=' padding. + """ + digest = hashlib.sha256(verifier).digest() + return base64.urlsafe_b64encode(digest).rstrip(b'=') diff --git a/oauth2client/_pure_python_crypt.py b/oauth2client/_pure_python_crypt.py new file mode 100644 index 0000000..af98477 --- /dev/null +++ b/oauth2client/_pure_python_crypt.py @@ -0,0 +1,185 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Pure Python crypto-related routines for oauth2client. + +Uses the ``rsa``, ``pyasn1`` and ``pyasn1_modules`` packages +to parse PEM files storing PKCS#1 or PKCS#8 keys as well as +certificates. +""" + +from pyasn1.codec.der import decoder +from pyasn1_modules import pem +from pyasn1_modules.rfc2459 import Certificate +from pyasn1_modules.rfc5208 import PrivateKeyInfo +import rsa +import six + +from oauth2client._helpers import _from_bytes +from oauth2client._helpers import _to_bytes + + +_PKCS12_ERROR = r"""\ +PKCS12 format is not supported by the RSA library. +Either install PyOpenSSL, or please convert .p12 format +to .pem format: + $ cat key.p12 | \ + > openssl pkcs12 -nodes -nocerts -passin pass:notasecret | \ + > openssl rsa > key.pem +""" + +_POW2 = (128, 64, 32, 16, 8, 4, 2, 1) +_PKCS1_MARKER = ('-----BEGIN RSA PRIVATE KEY-----', + '-----END RSA PRIVATE KEY-----') +_PKCS8_MARKER = ('-----BEGIN PRIVATE KEY-----', + '-----END PRIVATE KEY-----') +_PKCS8_SPEC = PrivateKeyInfo() + + +def _bit_list_to_bytes(bit_list): + """Converts an iterable of 1's and 0's to bytes. + + Combines the list 8 at a time, treating each group of 8 bits + as a single byte. + """ + num_bits = len(bit_list) + byte_vals = bytearray() + for start in six.moves.xrange(0, num_bits, 8): + curr_bits = bit_list[start:start + 8] + char_val = sum(val * digit + for val, digit in zip(_POW2, curr_bits)) + byte_vals.append(char_val) + return bytes(byte_vals) + + +class RsaVerifier(object): + """Verifies the signature on a message. + + Args: + pubkey: rsa.key.PublicKey (or equiv), The public key to verify with. + """ + + def __init__(self, pubkey): + self._pubkey = pubkey + + def verify(self, message, signature): + """Verifies a message against a signature. + + Args: + message: string or bytes, The message to verify. If string, will be + encoded to bytes as utf-8. + signature: string or bytes, The signature on the message. If + string, will be encoded to bytes as utf-8. + + Returns: + True if message was signed by the private key associated with the + public key that this object was constructed with. + """ + message = _to_bytes(message, encoding='utf-8') + try: + return rsa.pkcs1.verify(message, signature, self._pubkey) + except (ValueError, rsa.pkcs1.VerificationError): + return False + + @classmethod + def from_string(cls, key_pem, is_x509_cert): + """Construct an RsaVerifier instance from a string. + + Args: + key_pem: string, public key in PEM format. + is_x509_cert: bool, True if key_pem is an X509 cert, otherwise it + is expected to be an RSA key in PEM format. + + Returns: + RsaVerifier instance. + + Raises: + ValueError: if the key_pem can't be parsed. In either case, error + will begin with 'No PEM start marker'. If + ``is_x509_cert`` is True, will fail to find the + "-----BEGIN CERTIFICATE-----" error, otherwise fails + to find "-----BEGIN RSA PUBLIC KEY-----". + """ + key_pem = _to_bytes(key_pem) + if is_x509_cert: + der = rsa.pem.load_pem(key_pem, 'CERTIFICATE') + asn1_cert, remaining = decoder.decode(der, asn1Spec=Certificate()) + if remaining != b'': + raise ValueError('Unused bytes', remaining) + + cert_info = asn1_cert['tbsCertificate']['subjectPublicKeyInfo'] + key_bytes = _bit_list_to_bytes(cert_info['subjectPublicKey']) + pubkey = rsa.PublicKey.load_pkcs1(key_bytes, 'DER') + else: + pubkey = rsa.PublicKey.load_pkcs1(key_pem, 'PEM') + return cls(pubkey) + + +class RsaSigner(object): + """Signs messages with a private key. + + Args: + pkey: rsa.key.PrivateKey (or equiv), The private key to sign with. + """ + + def __init__(self, pkey): + self._key = pkey + + def sign(self, message): + """Signs a message. + + Args: + message: bytes, Message to be signed. + + Returns: + string, The signature of the message for the given key. + """ + message = _to_bytes(message, encoding='utf-8') + return rsa.pkcs1.sign(message, self._key, 'SHA-256') + + @classmethod + def from_string(cls, key, password='notasecret'): + """Construct an RsaSigner instance from a string. + + Args: + key: string, private key in PEM format. + password: string, password for private key file. Unused for PEM + files. + + Returns: + RsaSigner instance. + + Raises: + ValueError if the key cannot be parsed as PKCS#1 or PKCS#8 in + PEM format. + """ + key = _from_bytes(key) # pem expects str in Py3 + marker_id, key_bytes = pem.readPemBlocksFromFile( + six.StringIO(key), _PKCS1_MARKER, _PKCS8_MARKER) + + if marker_id == 0: + pkey = rsa.key.PrivateKey.load_pkcs1(key_bytes, + format='DER') + elif marker_id == 1: + key_info, remaining = decoder.decode( + key_bytes, asn1Spec=_PKCS8_SPEC) + if remaining != b'': + raise ValueError('Unused bytes', remaining) + pkey_info = key_info.getComponentByName('privateKey') + pkey = rsa.key.PrivateKey.load_pkcs1(pkey_info.asOctets(), + format='DER') + else: + raise ValueError('No key could be detected.') + + return cls(pkey) diff --git a/oauth2client/_pycrypto_crypt.py b/oauth2client/_pycrypto_crypt.py new file mode 100644 index 0000000..5ee7046 --- /dev/null +++ b/oauth2client/_pycrypto_crypt.py @@ -0,0 +1,126 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""pyCrypto Crypto-related routines for oauth2client.""" + +from Crypto.PublicKey import RSA +from Crypto.Hash import SHA256 +from Crypto.Signature import PKCS1_v1_5 +from Crypto.Util.asn1 import DerSequence + +from oauth2client._helpers import _parse_pem_key +from oauth2client._helpers import _to_bytes +from oauth2client._helpers import _urlsafe_b64decode + + +class PyCryptoVerifier(object): + """Verifies the signature on a message.""" + + def __init__(self, pubkey): + """Constructor. + + Args: + pubkey: OpenSSL.crypto.PKey (or equiv), The public key to verify + with. + """ + self._pubkey = pubkey + + def verify(self, message, signature): + """Verifies a message against a signature. + + Args: + message: string or bytes, The message to verify. If string, will be + encoded to bytes as utf-8. + signature: string or bytes, The signature on the message. + + Returns: + True if message was signed by the private key associated with the + public key that this object was constructed with. + """ + message = _to_bytes(message, encoding='utf-8') + return PKCS1_v1_5.new(self._pubkey).verify( + SHA256.new(message), signature) + + @staticmethod + def from_string(key_pem, is_x509_cert): + """Construct a Verified instance from a string. + + Args: + key_pem: string, public key in PEM format. + is_x509_cert: bool, True if key_pem is an X509 cert, otherwise it + is expected to be an RSA key in PEM format. + + Returns: + Verifier instance. + """ + if is_x509_cert: + key_pem = _to_bytes(key_pem) + pemLines = key_pem.replace(b' ', b'').split() + certDer = _urlsafe_b64decode(b''.join(pemLines[1:-1])) + certSeq = DerSequence() + certSeq.decode(certDer) + tbsSeq = DerSequence() + tbsSeq.decode(certSeq[0]) + pubkey = RSA.importKey(tbsSeq[6]) + else: + pubkey = RSA.importKey(key_pem) + return PyCryptoVerifier(pubkey) + + +class PyCryptoSigner(object): + """Signs messages with a private key.""" + + def __init__(self, pkey): + """Constructor. + + Args: + pkey, OpenSSL.crypto.PKey (or equiv), The private key to sign with. + """ + self._key = pkey + + def sign(self, message): + """Signs a message. + + Args: + message: string, Message to be signed. + + Returns: + string, The signature of the message for the given key. + """ + message = _to_bytes(message, encoding='utf-8') + return PKCS1_v1_5.new(self._key).sign(SHA256.new(message)) + + @staticmethod + def from_string(key, password='notasecret'): + """Construct a Signer instance from a string. + + Args: + key: string, private key in PEM format. + password: string, password for private key file. Unused for PEM + files. + + Returns: + Signer instance. + + Raises: + NotImplementedError if the key isn't in PEM format. + """ + parsed_pem_key = _parse_pem_key(_to_bytes(key)) + if parsed_pem_key: + pkey = RSA.importKey(parsed_pem_key) + else: + raise NotImplementedError( + 'No key in PEM format was detected. This implementation ' + 'can only use the PyCrypto library for keys in PEM ' + 'format.') + return PyCryptoSigner(pkey) diff --git a/oauth2client/client.py b/oauth2client/client.py new file mode 100644 index 0000000..a3e58ff --- /dev/null +++ b/oauth2client/client.py @@ -0,0 +1,2220 @@ +# Copyright 2014 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""An OAuth 2.0 client. + +Tools for interacting with OAuth 2.0 protected resources. +""" + +import base64 +import collections +import copy +import datetime +import json +import logging +import os +import socket +import sys +import tempfile +import time +import shutil +import six +from six.moves import http_client +from six.moves import urllib + +import httplib2 +from oauth2client import GOOGLE_AUTH_URI +from oauth2client import GOOGLE_DEVICE_URI +from oauth2client import GOOGLE_REVOKE_URI +from oauth2client import GOOGLE_TOKEN_URI +from oauth2client import GOOGLE_TOKEN_INFO_URI +from oauth2client._helpers import _from_bytes +from oauth2client._helpers import _to_bytes +from oauth2client._helpers import _urlsafe_b64decode +from oauth2client import clientsecrets +from oauth2client import util + + +__author__ = 'jcgregorio@google.com (Joe Gregorio)' + +HAS_OPENSSL = False +HAS_CRYPTO = False +try: + from oauth2client import crypt + HAS_CRYPTO = True + HAS_OPENSSL = crypt.OpenSSLVerifier is not None +except ImportError: # pragma: NO COVER + pass + + +logger = logging.getLogger(__name__) + +# Expiry is stored in RFC3339 UTC format +EXPIRY_FORMAT = '%Y-%m-%dT%H:%M:%SZ' + +# Which certs to use to validate id_tokens received. +ID_TOKEN_VERIFICATION_CERTS = 'https://www.googleapis.com/oauth2/v1/certs' +# This symbol previously had a typo in the name; we keep the old name +# around for now, but will remove it in the future. +ID_TOKEN_VERIFICATON_CERTS = ID_TOKEN_VERIFICATION_CERTS + +# Constant to use for the out of band OAuth 2.0 flow. +OOB_CALLBACK_URN = 'urn:ietf:wg:oauth:2.0:oob' + +# Google Data client libraries may need to set this to [401, 403]. +REFRESH_STATUS_CODES = (http_client.UNAUTHORIZED,) + +# The value representing user credentials. +AUTHORIZED_USER = 'authorized_user' + +# The value representing service account credentials. +SERVICE_ACCOUNT = 'service_account' + +# The environment variable pointing the file with local +# Application Default Credentials. +GOOGLE_APPLICATION_CREDENTIALS = 'GOOGLE_APPLICATION_CREDENTIALS' +# The ~/.config subdirectory containing gcloud credentials. Intended +# to be swapped out in tests. +_CLOUDSDK_CONFIG_DIRECTORY = 'gcloud' +# The environment variable name which can replace ~/.config if set. +_CLOUDSDK_CONFIG_ENV_VAR = 'CLOUDSDK_CONFIG' + +# The error message we show users when we can't find the Application +# Default Credentials. +ADC_HELP_MSG = ( + 'The Application Default Credentials are not available. They are ' + 'available if running in Google Compute Engine. Otherwise, the ' + 'environment variable ' + + GOOGLE_APPLICATION_CREDENTIALS + + ' must be defined pointing to a file defining the credentials. See ' + 'https://developers.google.com/accounts/docs/' + 'application-default-credentials for more information.') + +_WELL_KNOWN_CREDENTIALS_FILE = 'application_default_credentials.json' + +# The access token along with the seconds in which it expires. +AccessTokenInfo = collections.namedtuple( + 'AccessTokenInfo', ['access_token', 'expires_in']) + +DEFAULT_ENV_NAME = 'UNKNOWN' + +# If set to True _get_environment avoid GCE check (_detect_gce_environment) +NO_GCE_CHECK = os.environ.setdefault('NO_GCE_CHECK', 'False') + +_SERVER_SOFTWARE = 'SERVER_SOFTWARE' +_GCE_METADATA_HOST = '169.254.169.254' +_METADATA_FLAVOR_HEADER = 'Metadata-Flavor' +_DESIRED_METADATA_FLAVOR = 'Google' + +# Expose utcnow() at module level to allow for +# easier testing (by replacing with a stub). +_UTCNOW = datetime.datetime.utcnow + + +class SETTINGS(object): + """Settings namespace for globally defined values.""" + env_name = None + + +class Error(Exception): + """Base error for this module.""" + + +class FlowExchangeError(Error): + """Error trying to exchange an authorization grant for an access token.""" + + +class AccessTokenRefreshError(Error): + """Error trying to refresh an expired access token.""" + + +class HttpAccessTokenRefreshError(AccessTokenRefreshError): + """Error (with HTTP status) trying to refresh an expired access token.""" + def __init__(self, *args, **kwargs): + super(HttpAccessTokenRefreshError, self).__init__(*args) + self.status = kwargs.get('status') + + +class TokenRevokeError(Error): + """Error trying to revoke a token.""" + + +class UnknownClientSecretsFlowError(Error): + """The client secrets file called for an unknown type of OAuth 2.0 flow.""" + + +class AccessTokenCredentialsError(Error): + """Having only the access_token means no refresh is possible.""" + + +class VerifyJwtTokenError(Error): + """Could not retrieve certificates for validation.""" + + +class NonAsciiHeaderError(Error): + """Header names and values must be ASCII strings.""" + + +class ApplicationDefaultCredentialsError(Error): + """Error retrieving the Application Default Credentials.""" + + +class OAuth2DeviceCodeError(Error): + """Error trying to retrieve a device code.""" + + +class CryptoUnavailableError(Error, NotImplementedError): + """Raised when a crypto library is required, but none is available.""" + + +class MemoryCache(object): + """httplib2 Cache implementation which only caches locally.""" + + def __init__(self): + self.cache = {} + + def get(self, key): + return self.cache.get(key) + + def set(self, key, value): + self.cache[key] = value + + def delete(self, key): + self.cache.pop(key, None) + + +def _parse_expiry(expiry): + if expiry and isinstance(expiry, datetime.datetime): + return expiry.strftime(EXPIRY_FORMAT) + else: + return None + + +class Credentials(object): + """Base class for all Credentials objects. + + Subclasses must define an authorize() method that applies the credentials + to an HTTP transport. + + Subclasses must also specify a classmethod named 'from_json' that takes a + JSON string as input and returns an instantiated Credentials object. + """ + + NON_SERIALIZED_MEMBERS = frozenset(['store']) + + def authorize(self, http): + """Take an httplib2.Http instance (or equivalent) and authorizes it. + + Authorizes it for the set of credentials, usually by replacing + http.request() with a method that adds in the appropriate headers and + then delegates to the original Http.request() method. + + Args: + http: httplib2.Http, an http object to be used to make the refresh + request. + """ + raise NotImplementedError + + def refresh(self, http): + """Forces a refresh of the access_token. + + Args: + http: httplib2.Http, an http object to be used to make the refresh + request. + """ + raise NotImplementedError + + def revoke(self, http): + """Revokes a refresh_token and makes the credentials void. + + Args: + http: httplib2.Http, an http object to be used to make the revoke + request. + """ + raise NotImplementedError + + def apply(self, headers): + """Add the authorization to the headers. + + Args: + headers: dict, the headers to add the Authorization header to. + """ + raise NotImplementedError + + def _to_json(self, strip, to_serialize=None): + """Utility function that creates JSON repr. of a Credentials object. + + Args: + strip: array, An array of names of members to exclude from the + JSON. + to_serialize: dict, (Optional) The properties for this object + that will be serialized. This allows callers to modify + before serializing. + + Returns: + string, a JSON representation of this instance, suitable to pass to + from_json(). + """ + curr_type = self.__class__ + if to_serialize is None: + to_serialize = copy.copy(self.__dict__) + else: + # Assumes it is a str->str dictionary, so we don't deep copy. + to_serialize = copy.copy(to_serialize) + for member in strip: + if member in to_serialize: + del to_serialize[member] + to_serialize['token_expiry'] = _parse_expiry( + to_serialize.get('token_expiry')) + # Add in information we will need later to reconstitute this instance. + to_serialize['_class'] = curr_type.__name__ + to_serialize['_module'] = curr_type.__module__ + for key, val in to_serialize.items(): + if isinstance(val, bytes): + to_serialize[key] = val.decode('utf-8') + if isinstance(val, set): + to_serialize[key] = list(val) + return json.dumps(to_serialize) + + def to_json(self): + """Creating a JSON representation of an instance of Credentials. + + Returns: + string, a JSON representation of this instance, suitable to pass to + from_json(). + """ + return self._to_json(self.NON_SERIALIZED_MEMBERS) + + @classmethod + def new_from_json(cls, json_data): + """Utility class method to instantiate a Credentials subclass from JSON. + + Expects the JSON string to have been produced by to_json(). + + Args: + json_data: string or bytes, JSON from to_json(). + + Returns: + An instance of the subclass of Credentials that was serialized with + to_json(). + """ + json_data_as_unicode = _from_bytes(json_data) + data = json.loads(json_data_as_unicode) + # Find and call the right classmethod from_json() to restore + # the object. + module_name = data['_module'] + try: + module_obj = __import__(module_name) + except ImportError: + # In case there's an object from the old package structure, + # update it + module_name = module_name.replace('.googleapiclient', '') + module_obj = __import__(module_name) + + module_obj = __import__(module_name, + fromlist=module_name.split('.')[:-1]) + kls = getattr(module_obj, data['_class']) + return kls.from_json(json_data_as_unicode) + + @classmethod + def from_json(cls, unused_data): + """Instantiate a Credentials object from a JSON description of it. + + The JSON should have been produced by calling .to_json() on the object. + + Args: + unused_data: dict, A deserialized JSON object. + + Returns: + An instance of a Credentials subclass. + """ + return Credentials() + + +class Flow(object): + """Base class for all Flow objects.""" + pass + + +class Storage(object): + """Base class for all Storage objects. + + Store and retrieve a single credential. This class supports locking + such that multiple processes and threads can operate on a single + store. + """ + def __init__(self, lock=None): + """Create a Storage instance. + + Args: + lock: An optional threading.Lock-like object. Must implement at + least acquire() and release(). Does not need to be re-entrant. + """ + self._lock = lock + + def acquire_lock(self): + """Acquires any lock necessary to access this Storage. + + This lock is not reentrant. + """ + if self._lock is not None: + self._lock.acquire() + + def release_lock(self): + """Release the Storage lock. + + Trying to release a lock that isn't held will result in a + RuntimeError in the case of a threading.Lock or multiprocessing.Lock. + """ + if self._lock is not None: + self._lock.release() + + def locked_get(self): + """Retrieve credential. + + The Storage lock must be held when this is called. + + Returns: + oauth2client.client.Credentials + """ + raise NotImplementedError + + def locked_put(self, credentials): + """Write a credential. + + The Storage lock must be held when this is called. + + Args: + credentials: Credentials, the credentials to store. + """ + raise NotImplementedError + + def locked_delete(self): + """Delete a credential. + + The Storage lock must be held when this is called. + """ + raise NotImplementedError + + def get(self): + """Retrieve credential. + + The Storage lock must *not* be held when this is called. + + Returns: + oauth2client.client.Credentials + """ + self.acquire_lock() + try: + return self.locked_get() + finally: + self.release_lock() + + def put(self, credentials): + """Write a credential. + + The Storage lock must be held when this is called. + + Args: + credentials: Credentials, the credentials to store. + """ + self.acquire_lock() + try: + self.locked_put(credentials) + finally: + self.release_lock() + + def delete(self): + """Delete credential. + + Frees any resources associated with storing the credential. + The Storage lock must *not* be held when this is called. + + Returns: + None + """ + self.acquire_lock() + try: + return self.locked_delete() + finally: + self.release_lock() + + +def clean_headers(headers): + """Forces header keys and values to be strings, i.e not unicode. + + The httplib module just concats the header keys and values in a way that + may make the message header a unicode string, which, if it then tries to + contatenate to a binary request body may result in a unicode decode error. + + Args: + headers: dict, A dictionary of headers. + + Returns: + The same dictionary but with all the keys converted to strings. + """ + clean = {} + try: + for k, v in six.iteritems(headers): + if not isinstance(k, six.binary_type): + k = str(k) + if not isinstance(v, six.binary_type): + v = str(v) + clean[_to_bytes(k)] = _to_bytes(v) + except UnicodeEncodeError: + raise NonAsciiHeaderError(k, ': ', v) + return clean + + +def _update_query_params(uri, params): + """Updates a URI with new query parameters. + + Args: + uri: string, A valid URI, with potential existing query parameters. + params: dict, A dictionary of query parameters. + + Returns: + The same URI but with the new query parameters added. + """ + parts = urllib.parse.urlparse(uri) + query_params = dict(urllib.parse.parse_qsl(parts.query)) + query_params.update(params) + new_parts = parts._replace(query=urllib.parse.urlencode(query_params)) + return urllib.parse.urlunparse(new_parts) + + +def _initialize_headers(headers): + """Creates a copy of the headers.""" + if headers is None: + headers = {} + else: + headers = dict(headers) + return headers + + +def _apply_user_agent(headers, user_agent): + """Adds a user-agent to the headers.""" + if user_agent is not None: + if 'user-agent' in headers: + headers['user-agent'] = (user_agent + ' ' + headers['user-agent']) + else: + headers['user-agent'] = user_agent + + return headers + + +class OAuth2Credentials(Credentials): + """Credentials object for OAuth 2.0. + + Credentials can be applied to an httplib2.Http object using the authorize() + method, which then adds the OAuth 2.0 access token to each request. + + OAuth2Credentials objects may be safely pickled and unpickled. + """ + + @util.positional(8) + def __init__(self, access_token, client_id, client_secret, refresh_token, + token_expiry, token_uri, user_agent, revoke_uri=None, + id_token=None, token_response=None, scopes=None, + token_info_uri=None): + """Create an instance of OAuth2Credentials. + + This constructor is not usually called by the user, instead + OAuth2Credentials objects are instantiated by the OAuth2WebServerFlow. + + Args: + access_token: string, access token. + client_id: string, client identifier. + client_secret: string, client secret. + refresh_token: string, refresh token. + token_expiry: datetime, when the access_token expires. + token_uri: string, URI of token endpoint. + user_agent: string, The HTTP User-Agent to provide for this + application. + revoke_uri: string, URI for revoke endpoint. Defaults to None; a + token can't be revoked if this is None. + id_token: object, The identity of the resource owner. + token_response: dict, the decoded response to the token request. + None if a token hasn't been requested yet. Stored + because some providers (e.g. wordpress.com) include + extra fields that clients may want. + scopes: list, authorized scopes for these credentials. + token_info_uri: string, the URI for the token info endpoint. Defaults + to None; scopes can not be refreshed if this is None. + + Notes: + store: callable, A callable that when passed a Credential + will store the credential back to where it came from. + This is needed to store the latest access_token if it + has expired and been refreshed. + """ + self.access_token = access_token + self.client_id = client_id + self.client_secret = client_secret + self.refresh_token = refresh_token + self.store = None + self.token_expiry = token_expiry + self.token_uri = token_uri + self.user_agent = user_agent + self.revoke_uri = revoke_uri + self.id_token = id_token + self.token_response = token_response + self.scopes = set(util.string_to_scopes(scopes or [])) + self.token_info_uri = token_info_uri + + # True if the credentials have been revoked or expired and can't be + # refreshed. + self.invalid = False + + def authorize(self, http): + """Authorize an httplib2.Http instance with these credentials. + + The modified http.request method will add authentication headers to + each request and will refresh access_tokens when a 401 is received on a + request. In addition the http.request method has a credentials + property, http.request.credentials, which is the Credentials object + that authorized it. + + Args: + http: An instance of ``httplib2.Http`` or something that acts + like it. + + Returns: + A modified instance of http that was passed in. + + Example:: + + h = httplib2.Http() + h = credentials.authorize(h) + + You can't create a new OAuth subclass of httplib2.Authentication + because it never gets passed the absolute URI, which is needed for + signing. So instead we have to overload 'request' with a closure + that adds in the Authorization header and then calls the original + version of 'request()'. + """ + request_orig = http.request + + # The closure that will replace 'httplib2.Http.request'. + def new_request(uri, method='GET', body=None, headers=None, + redirections=httplib2.DEFAULT_MAX_REDIRECTS, + connection_type=None): + if not self.access_token: + logger.info('Attempting refresh to obtain ' + 'initial access_token') + self._refresh(request_orig) + + # Clone and modify the request headers to add the appropriate + # Authorization header. + headers = _initialize_headers(headers) + self.apply(headers) + _apply_user_agent(headers, self.user_agent) + + body_stream_position = None + if all(getattr(body, stream_prop, None) for stream_prop in + ('read', 'seek', 'tell')): + body_stream_position = body.tell() + + resp, content = request_orig(uri, method, body, + clean_headers(headers), + redirections, connection_type) + + # A stored token may expire between the time it is retrieved and + # the time the request is made, so we may need to try twice. + max_refresh_attempts = 2 + for refresh_attempt in range(max_refresh_attempts): + if resp.status not in REFRESH_STATUS_CODES: + break + logger.info('Refreshing due to a %s (attempt %s/%s)', + resp.status, refresh_attempt + 1, + max_refresh_attempts) + self._refresh(request_orig) + self.apply(headers) + if body_stream_position is not None: + body.seek(body_stream_position) + + resp, content = request_orig(uri, method, body, + clean_headers(headers), + redirections, connection_type) + + return (resp, content) + + # Replace the request method with our own closure. + http.request = new_request + + # Set credentials as a property of the request method. + setattr(http.request, 'credentials', self) + + return http + + def refresh(self, http): + """Forces a refresh of the access_token. + + Args: + http: httplib2.Http, an http object to be used to make the refresh + request. + """ + self._refresh(http.request) + + def revoke(self, http): + """Revokes a refresh_token and makes the credentials void. + + Args: + http: httplib2.Http, an http object to be used to make the revoke + request. + """ + self._revoke(http.request) + + def apply(self, headers): + """Add the authorization to the headers. + + Args: + headers: dict, the headers to add the Authorization header to. + """ + headers['Authorization'] = 'Bearer ' + self.access_token + + def has_scopes(self, scopes): + """Verify that the credentials are authorized for the given scopes. + + Returns True if the credentials authorized scopes contain all of the + scopes given. + + Args: + scopes: list or string, the scopes to check. + + Notes: + There are cases where the credentials are unaware of which scopes + are authorized. Notably, credentials obtained and stored before + this code was added will not have scopes, AccessTokenCredentials do + not have scopes. In both cases, you can use refresh_scopes() to + obtain the canonical set of scopes. + """ + scopes = util.string_to_scopes(scopes) + return set(scopes).issubset(self.scopes) + + def retrieve_scopes(self, http): + """Retrieves the canonical list of scopes for this access token. + + Gets the scopes from the OAuth2 provider. + + Args: + http: httplib2.Http, an http object to be used to make the refresh + request. + + Returns: + A set of strings containing the canonical list of scopes. + """ + self._retrieve_scopes(http.request) + return self.scopes + + @classmethod + def from_json(cls, json_data): + """Instantiate a Credentials object from a JSON description of it. + + The JSON should have been produced by calling .to_json() on the object. + + Args: + json_data: string or bytes, JSON to deserialize. + + Returns: + An instance of a Credentials subclass. + """ + data = json.loads(_from_bytes(json_data)) + if (data.get('token_expiry') and + not isinstance(data['token_expiry'], datetime.datetime)): + try: + data['token_expiry'] = datetime.datetime.strptime( + data['token_expiry'], EXPIRY_FORMAT) + except ValueError: + data['token_expiry'] = None + retval = cls( + data['access_token'], + data['client_id'], + data['client_secret'], + data['refresh_token'], + data['token_expiry'], + data['token_uri'], + data['user_agent'], + revoke_uri=data.get('revoke_uri', None), + id_token=data.get('id_token', None), + token_response=data.get('token_response', None), + scopes=data.get('scopes', None), + token_info_uri=data.get('token_info_uri', None)) + retval.invalid = data['invalid'] + return retval + + @property + def access_token_expired(self): + """True if the credential is expired or invalid. + + If the token_expiry isn't set, we assume the token doesn't expire. + """ + if self.invalid: + return True + + if not self.token_expiry: + return False + + now = _UTCNOW() + if now >= self.token_expiry: + logger.info('access_token is expired. Now: %s, token_expiry: %s', + now, self.token_expiry) + return True + return False + + def get_access_token(self, http=None): + """Return the access token and its expiration information. + + If the token does not exist, get one. + If the token expired, refresh it. + """ + if not self.access_token or self.access_token_expired: + if not http: + http = httplib2.Http() + self.refresh(http) + return AccessTokenInfo(access_token=self.access_token, + expires_in=self._expires_in()) + + def set_store(self, store): + """Set the Storage for the credential. + + Args: + store: Storage, an implementation of Storage object. + This is needed to store the latest access_token if it + has expired and been refreshed. This implementation uses + locking to check for updates before updating the + access_token. + """ + self.store = store + + def _expires_in(self): + """Return the number of seconds until this token expires. + + If token_expiry is in the past, this method will return 0, meaning the + token has already expired. + + If token_expiry is None, this method will return None. Note that + returning 0 in such a case would not be fair: the token may still be + valid; we just don't know anything about it. + """ + if self.token_expiry: + now = _UTCNOW() + if self.token_expiry > now: + time_delta = self.token_expiry - now + # TODO(orestica): return time_delta.total_seconds() + # once dropping support for Python 2.6 + return time_delta.days * 86400 + time_delta.seconds + else: + return 0 + + def _updateFromCredential(self, other): + """Update this Credential from another instance.""" + self.__dict__.update(other.__getstate__()) + + def __getstate__(self): + """Trim the state down to something that can be pickled.""" + d = copy.copy(self.__dict__) + del d['store'] + return d + + def __setstate__(self, state): + """Reconstitute the state of the object from being pickled.""" + self.__dict__.update(state) + self.store = None + + def _generate_refresh_request_body(self): + """Generate the body that will be used in the refresh request.""" + body = urllib.parse.urlencode({ + 'grant_type': 'refresh_token', + 'client_id': self.client_id, + 'client_secret': self.client_secret, + 'refresh_token': self.refresh_token, + }) + return body + + def _generate_refresh_request_headers(self): + """Generate the headers that will be used in the refresh request.""" + headers = { + 'content-type': 'application/x-www-form-urlencoded', + } + + if self.user_agent is not None: + headers['user-agent'] = self.user_agent + + return headers + + def _refresh(self, http_request): + """Refreshes the access_token. + + This method first checks by reading the Storage object if available. + If a refresh is still needed, it holds the Storage lock until the + refresh is completed. + + Args: + http_request: callable, a callable that matches the method + signature of httplib2.Http.request, used to make the + refresh request. + + Raises: + HttpAccessTokenRefreshError: When the refresh fails. + """ + if not self.store: + self._do_refresh_request(http_request) + else: + self.store.acquire_lock() + try: + new_cred = self.store.locked_get() + + if (new_cred and not new_cred.invalid and + new_cred.access_token != self.access_token and + not new_cred.access_token_expired): + logger.info('Updated access_token read from Storage') + self._updateFromCredential(new_cred) + else: + self._do_refresh_request(http_request) + finally: + self.store.release_lock() + + def _do_refresh_request(self, http_request): + """Refresh the access_token using the refresh_token. + + Args: + http_request: callable, a callable that matches the method + signature of httplib2.Http.request, used to make the + refresh request. + + Raises: + HttpAccessTokenRefreshError: When the refresh fails. + """ + body = self._generate_refresh_request_body() + headers = self._generate_refresh_request_headers() + + logger.info('Refreshing access_token') + resp, content = http_request( + self.token_uri, method='POST', body=body, headers=headers) + content = _from_bytes(content) + if resp.status == http_client.OK: + d = json.loads(content) + self.token_response = d + self.access_token = d['access_token'] + self.refresh_token = d.get('refresh_token', self.refresh_token) + if 'expires_in' in d: + delta = datetime.timedelta(seconds=int(d['expires_in'])) + self.token_expiry = delta + _UTCNOW() + else: + self.token_expiry = None + if 'id_token' in d: + self.id_token = _extract_id_token(d['id_token']) + else: + self.id_token = None + # On temporary refresh errors, the user does not actually have to + # re-authorize, so we unflag here. + self.invalid = False + if self.store: + self.store.locked_put(self) + else: + # An {'error':...} response body means the token is expired or + # revoked, so we flag the credentials as such. + logger.info('Failed to retrieve access token: %s', content) + error_msg = 'Invalid response %s.' % (resp['status'],) + try: + d = json.loads(content) + if 'error' in d: + error_msg = d['error'] + if 'error_description' in d: + error_msg += ': ' + d['error_description'] + self.invalid = True + if self.store is not None: + self.store.locked_put(self) + except (TypeError, ValueError): + pass + raise HttpAccessTokenRefreshError(error_msg, status=resp.status) + + def _revoke(self, http_request): + """Revokes this credential and deletes the stored copy (if it exists). + + Args: + http_request: callable, a callable that matches the method + signature of httplib2.Http.request, used to make the + revoke request. + """ + self._do_revoke(http_request, self.refresh_token or self.access_token) + + def _do_revoke(self, http_request, token): + """Revokes this credential and deletes the stored copy (if it exists). + + Args: + http_request: callable, a callable that matches the method + signature of httplib2.Http.request, used to make the + refresh request. + token: A string used as the token to be revoked. Can be either an + access_token or refresh_token. + + Raises: + TokenRevokeError: If the revoke request does not return with a + 200 OK. + """ + logger.info('Revoking token') + query_params = {'token': token} + token_revoke_uri = _update_query_params(self.revoke_uri, query_params) + resp, content = http_request(token_revoke_uri) + if resp.status == http_client.OK: + self.invalid = True + else: + error_msg = 'Invalid response %s.' % resp.status + try: + d = json.loads(_from_bytes(content)) + if 'error' in d: + error_msg = d['error'] + except (TypeError, ValueError): + pass + raise TokenRevokeError(error_msg) + + if self.store: + self.store.delete() + + def _retrieve_scopes(self, http_request): + """Retrieves the list of authorized scopes from the OAuth2 provider. + + Args: + http_request: callable, a callable that matches the method + signature of httplib2.Http.request, used to make the + revoke request. + """ + self._do_retrieve_scopes(http_request, self.access_token) + + def _do_retrieve_scopes(self, http_request, token): + """Retrieves the list of authorized scopes from the OAuth2 provider. + + Args: + http_request: callable, a callable that matches the method + signature of httplib2.Http.request, used to make the + refresh request. + token: A string used as the token to identify the credentials to + the provider. + + Raises: + Error: When refresh fails, indicating the the access token is + invalid. + """ + logger.info('Refreshing scopes') + query_params = {'access_token': token, 'fields': 'scope'} + token_info_uri = _update_query_params(self.token_info_uri, + query_params) + resp, content = http_request(token_info_uri) + content = _from_bytes(content) + if resp.status == http_client.OK: + d = json.loads(content) + self.scopes = set(util.string_to_scopes(d.get('scope', ''))) + else: + error_msg = 'Invalid response %s.' % (resp.status,) + try: + d = json.loads(content) + if 'error_description' in d: + error_msg = d['error_description'] + except (TypeError, ValueError): + pass + raise Error(error_msg) + + +class AccessTokenCredentials(OAuth2Credentials): + """Credentials object for OAuth 2.0. + + Credentials can be applied to an httplib2.Http object using the + authorize() method, which then signs each request from that object + with the OAuth 2.0 access token. This set of credentials is for the + use case where you have acquired an OAuth 2.0 access_token from + another place such as a JavaScript client or another web + application, and wish to use it from Python. Because only the + access_token is present it can not be refreshed and will in time + expire. + + AccessTokenCredentials objects may be safely pickled and unpickled. + + Usage:: + + credentials = AccessTokenCredentials('', + 'my-user-agent/1.0') + http = httplib2.Http() + http = credentials.authorize(http) + + Raises: + AccessTokenCredentialsExpired: raised when the access_token expires or + is revoked. + """ + + def __init__(self, access_token, user_agent, revoke_uri=None): + """Create an instance of OAuth2Credentials + + This is one of the few types if Credentials that you should contrust, + Credentials objects are usually instantiated by a Flow. + + Args: + access_token: string, access token. + user_agent: string, The HTTP User-Agent to provide for this + application. + revoke_uri: string, URI for revoke endpoint. Defaults to None; a + token can't be revoked if this is None. + """ + super(AccessTokenCredentials, self).__init__( + access_token, + None, + None, + None, + None, + None, + user_agent, + revoke_uri=revoke_uri) + + @classmethod + def from_json(cls, json_data): + data = json.loads(_from_bytes(json_data)) + retval = AccessTokenCredentials( + data['access_token'], + data['user_agent']) + return retval + + def _refresh(self, http_request): + raise AccessTokenCredentialsError( + 'The access_token is expired or invalid and can\'t be refreshed.') + + def _revoke(self, http_request): + """Revokes the access_token and deletes the store if available. + + Args: + http_request: callable, a callable that matches the method + signature of httplib2.Http.request, used to make the + revoke request. + """ + self._do_revoke(http_request, self.access_token) + + +def _detect_gce_environment(): + """Determine if the current environment is Compute Engine. + + Returns: + Boolean indicating whether or not the current environment is Google + Compute Engine. + """ + # NOTE: The explicit ``timeout`` is a workaround. The underlying + # issue is that resolving an unknown host on some networks will take + # 20-30 seconds; making this timeout short fixes the issue, but + # could lead to false negatives in the event that we are on GCE, but + # the metadata resolution was particularly slow. The latter case is + # "unlikely". + connection = six.moves.http_client.HTTPConnection( + _GCE_METADATA_HOST, timeout=1) + + try: + headers = {_METADATA_FLAVOR_HEADER: _DESIRED_METADATA_FLAVOR} + connection.request('GET', '/', headers=headers) + response = connection.getresponse() + if response.status == http_client.OK: + return (response.getheader(_METADATA_FLAVOR_HEADER) == + _DESIRED_METADATA_FLAVOR) + except socket.error: # socket.timeout or socket.error(64, 'Host is down') + logger.info('Timeout attempting to reach GCE metadata service.') + return False + finally: + connection.close() + + +def _in_gae_environment(): + """Detects if the code is running in the App Engine environment. + + Returns: + True if running in the GAE environment, False otherwise. + """ + if SETTINGS.env_name is not None: + return SETTINGS.env_name in ('GAE_PRODUCTION', 'GAE_LOCAL') + + try: + import google.appengine # noqa: unused import + except ImportError: + pass + else: + server_software = os.environ.get(_SERVER_SOFTWARE, '') + if server_software.startswith('Google App Engine/'): + SETTINGS.env_name = 'GAE_PRODUCTION' + return True + elif server_software.startswith('Development/'): + SETTINGS.env_name = 'GAE_LOCAL' + return True + + return False + + +def _in_gce_environment(): + """Detect if the code is running in the Compute Engine environment. + + Returns: + True if running in the GCE environment, False otherwise. + """ + if SETTINGS.env_name is not None: + return SETTINGS.env_name == 'GCE_PRODUCTION' + + if NO_GCE_CHECK != 'True' and _detect_gce_environment(): + SETTINGS.env_name = 'GCE_PRODUCTION' + return True + return False + + +class GoogleCredentials(OAuth2Credentials): + """Application Default Credentials for use in calling Google APIs. + + The Application Default Credentials are being constructed as a function of + the environment where the code is being run. + More details can be found on this page: + https://developers.google.com/accounts/docs/application-default-credentials + + Here is an example of how to use the Application Default Credentials for a + service that requires authentication:: + + from googleapiclient.discovery import build + from oauth2client.client import GoogleCredentials + + credentials = GoogleCredentials.get_application_default() + service = build('compute', 'v1', credentials=credentials) + + PROJECT = 'bamboo-machine-422' + ZONE = 'us-central1-a' + request = service.instances().list(project=PROJECT, zone=ZONE) + response = request.execute() + + print(response) + """ + + NON_SERIALIZED_MEMBERS = ( + frozenset(['_private_key']) | + OAuth2Credentials.NON_SERIALIZED_MEMBERS) + """Members that aren't serialized when object is converted to JSON.""" + + def __init__(self, access_token, client_id, client_secret, refresh_token, + token_expiry, token_uri, user_agent, + revoke_uri=GOOGLE_REVOKE_URI): + """Create an instance of GoogleCredentials. + + This constructor is not usually called by the user, instead + GoogleCredentials objects are instantiated by + GoogleCredentials.from_stream() or + GoogleCredentials.get_application_default(). + + Args: + access_token: string, access token. + client_id: string, client identifier. + client_secret: string, client secret. + refresh_token: string, refresh token. + token_expiry: datetime, when the access_token expires. + token_uri: string, URI of token endpoint. + user_agent: string, The HTTP User-Agent to provide for this + application. + revoke_uri: string, URI for revoke endpoint. Defaults to + GOOGLE_REVOKE_URI; a token can't be revoked if this + is None. + """ + super(GoogleCredentials, self).__init__( + access_token, client_id, client_secret, refresh_token, + token_expiry, token_uri, user_agent, revoke_uri=revoke_uri) + + def create_scoped_required(self): + """Whether this Credentials object is scopeless. + + create_scoped(scopes) method needs to be called in order to create + a Credentials object for API calls. + """ + return False + + def create_scoped(self, scopes): + """Create a Credentials object for the given scopes. + + The Credentials type is preserved. + """ + return self + + @classmethod + def from_json(cls, json_data): + # TODO(issue 388): eliminate the circularity that is the reason for + # this non-top-level import. + from oauth2client.service_account import ServiceAccountCredentials + from oauth2client.service_account import _JWTAccessCredentials + data = json.loads(_from_bytes(json_data)) + + # We handle service_account.ServiceAccountCredentials since it is a + # possible return type of GoogleCredentials.get_application_default() + if (data['_module'] == 'oauth2client.service_account' and + data['_class'] == 'ServiceAccountCredentials'): + return ServiceAccountCredentials.from_json(data) + elif (data['_module'] == 'oauth2client.service_account' and + data['_class'] == '_JWTAccessCredentials'): + return _JWTAccessCredentials.from_json(data) + + + token_expiry = _parse_expiry(data.get('token_expiry')) + google_credentials = cls( + data['access_token'], + data['client_id'], + data['client_secret'], + data['refresh_token'], + token_expiry, + data['token_uri'], + data['user_agent'], + revoke_uri=data.get('revoke_uri', None)) + google_credentials.invalid = data['invalid'] + return google_credentials + + @property + def serialization_data(self): + """Get the fields and values identifying the current credentials.""" + return { + 'type': 'authorized_user', + 'client_id': self.client_id, + 'client_secret': self.client_secret, + 'refresh_token': self.refresh_token + } + + @staticmethod + def _implicit_credentials_from_gae(): + """Attempts to get implicit credentials in Google App Engine env. + + If the current environment is not detected as App Engine, returns None, + indicating no Google App Engine credentials can be detected from the + current environment. + + Returns: + None, if not in GAE, else an appengine.AppAssertionCredentials + object. + """ + if not _in_gae_environment(): + return None + + return _get_application_default_credential_GAE() + + @staticmethod + def _implicit_credentials_from_gce(): + """Attempts to get implicit credentials in Google Compute Engine env. + + If the current environment is not detected as Compute Engine, returns + None, indicating no Google Compute Engine credentials can be detected + from the current environment. + + Returns: + None, if not in GCE, else a gce.AppAssertionCredentials object. + """ + if not _in_gce_environment(): + return None + + return _get_application_default_credential_GCE() + + @staticmethod + def _implicit_credentials_from_files(): + """Attempts to get implicit credentials from local credential files. + + First checks if the environment variable GOOGLE_APPLICATION_CREDENTIALS + is set with a filename and then falls back to a configuration file (the + "well known" file) associated with the 'gcloud' command line tool. + + Returns: + Credentials object associated with the + GOOGLE_APPLICATION_CREDENTIALS file or the "well known" file if + either exist. If neither file is define, returns None, indicating + no credentials from a file can detected from the current + environment. + """ + credentials_filename = _get_environment_variable_file() + if not credentials_filename: + credentials_filename = _get_well_known_file() + if os.path.isfile(credentials_filename): + extra_help = (' (produced automatically when running' + ' "gcloud auth login" command)') + else: + credentials_filename = None + else: + extra_help = (' (pointed to by ' + GOOGLE_APPLICATION_CREDENTIALS + + ' environment variable)') + + if not credentials_filename: + return + + # If we can read the credentials from a file, we don't need to know + # what environment we are in. + SETTINGS.env_name = DEFAULT_ENV_NAME + + try: + return _get_application_default_credential_from_file( + credentials_filename) + except (ApplicationDefaultCredentialsError, ValueError) as error: + _raise_exception_for_reading_json(credentials_filename, + extra_help, error) + + @classmethod + def _get_implicit_credentials(cls): + """Gets credentials implicitly from the environment. + + Checks environment in order of precedence: + - Google App Engine (production and testing) + - Environment variable GOOGLE_APPLICATION_CREDENTIALS pointing to + a file with stored credentials information. + - Stored "well known" file associated with `gcloud` command line tool. + - Google Compute Engine production environment. + + Raises: + ApplicationDefaultCredentialsError: raised when the credentials + fail to be retrieved. + """ + # Environ checks (in order). + environ_checkers = [ + cls._implicit_credentials_from_gae, + cls._implicit_credentials_from_files, + cls._implicit_credentials_from_gce, + ] + + for checker in environ_checkers: + credentials = checker() + if credentials is not None: + return credentials + + # If no credentials, fail. + raise ApplicationDefaultCredentialsError(ADC_HELP_MSG) + + @staticmethod + def get_application_default(): + """Get the Application Default Credentials for the current environment. + + Raises: + ApplicationDefaultCredentialsError: raised when the credentials + fail to be retrieved. + """ + return GoogleCredentials._get_implicit_credentials() + + @staticmethod + def from_stream(credential_filename): + """Create a Credentials object by reading information from a file. + + It returns an object of type GoogleCredentials. + + Args: + credential_filename: the path to the file from where the + credentials are to be read + + Raises: + ApplicationDefaultCredentialsError: raised when the credentials + fail to be retrieved. + """ + if credential_filename and os.path.isfile(credential_filename): + try: + return _get_application_default_credential_from_file( + credential_filename) + except (ApplicationDefaultCredentialsError, ValueError) as error: + extra_help = (' (provided as parameter to the ' + 'from_stream() method)') + _raise_exception_for_reading_json(credential_filename, + extra_help, + error) + else: + raise ApplicationDefaultCredentialsError( + 'The parameter passed to the from_stream() ' + 'method should point to a file.') + + +def _save_private_file(filename, json_contents): + """Saves a file with read-write permissions on for the owner. + + Args: + filename: String. Absolute path to file. + json_contents: JSON serializable object to be saved. + """ + temp_filename = tempfile.mktemp() + file_desc = os.open(temp_filename, os.O_WRONLY | os.O_CREAT, 0o600) + with os.fdopen(file_desc, 'w') as file_handle: + json.dump(json_contents, file_handle, sort_keys=True, + indent=2, separators=(',', ': ')) + shutil.move(temp_filename, filename) + + +def save_to_well_known_file(credentials, well_known_file=None): + """Save the provided GoogleCredentials to the well known file. + + Args: + credentials: the credentials to be saved to the well known file; + it should be an instance of GoogleCredentials + well_known_file: the name of the file where the credentials are to be + saved; this parameter is supposed to be used for + testing only + """ + # TODO(orestica): move this method to tools.py + # once the argparse import gets fixed (it is not present in Python 2.6) + + if well_known_file is None: + well_known_file = _get_well_known_file() + + config_dir = os.path.dirname(well_known_file) + if not os.path.isdir(config_dir): + raise OSError('Config directory does not exist: %s' % config_dir) + + credentials_data = credentials.serialization_data + _save_private_file(well_known_file, credentials_data) + + +def _get_environment_variable_file(): + application_default_credential_filename = ( + os.environ.get(GOOGLE_APPLICATION_CREDENTIALS, + None)) + + if application_default_credential_filename: + if os.path.isfile(application_default_credential_filename): + return application_default_credential_filename + else: + raise ApplicationDefaultCredentialsError( + 'File ' + application_default_credential_filename + + ' (pointed by ' + + GOOGLE_APPLICATION_CREDENTIALS + + ' environment variable) does not exist!') + + +def _get_well_known_file(): + """Get the well known file produced by command 'gcloud auth login'.""" + # TODO(orestica): Revisit this method once gcloud provides a better way + # of pinpointing the exact location of the file. + default_config_dir = os.getenv(_CLOUDSDK_CONFIG_ENV_VAR) + if default_config_dir is None: + if os.name == 'nt': + try: + default_config_dir = os.path.join(os.environ['APPDATA'], + _CLOUDSDK_CONFIG_DIRECTORY) + except KeyError: + # This should never happen unless someone is really + # messing with things. + drive = os.environ.get('SystemDrive', 'C:') + default_config_dir = os.path.join(drive, '\\', + _CLOUDSDK_CONFIG_DIRECTORY) + else: + default_config_dir = os.path.join(os.path.expanduser('~'), + '.config', + _CLOUDSDK_CONFIG_DIRECTORY) + + return os.path.join(default_config_dir, _WELL_KNOWN_CREDENTIALS_FILE) + + +def _get_application_default_credential_from_file(filename): + """Build the Application Default Credentials from file.""" + # read the credentials from the file + with open(filename) as file_obj: + client_credentials = json.load(file_obj) + + credentials_type = client_credentials.get('type') + if credentials_type == AUTHORIZED_USER: + required_fields = set(['client_id', 'client_secret', 'refresh_token']) + elif credentials_type == SERVICE_ACCOUNT: + required_fields = set(['client_id', 'client_email', 'private_key_id', + 'private_key']) + else: + raise ApplicationDefaultCredentialsError( + "'type' field should be defined (and have one of the '" + + AUTHORIZED_USER + "' or '" + SERVICE_ACCOUNT + "' values)") + + missing_fields = required_fields.difference(client_credentials.keys()) + + if missing_fields: + _raise_exception_for_missing_fields(missing_fields) + + if client_credentials['type'] == AUTHORIZED_USER: + return GoogleCredentials( + access_token=None, + client_id=client_credentials['client_id'], + client_secret=client_credentials['client_secret'], + refresh_token=client_credentials['refresh_token'], + token_expiry=None, + token_uri=GOOGLE_TOKEN_URI, + user_agent='Python client library') + else: # client_credentials['type'] == SERVICE_ACCOUNT + from oauth2client.service_account import _JWTAccessCredentials + return _JWTAccessCredentials.from_json_keyfile_dict( + client_credentials) + + +def _raise_exception_for_missing_fields(missing_fields): + raise ApplicationDefaultCredentialsError( + 'The following field(s) must be defined: ' + ', '.join(missing_fields)) + + +def _raise_exception_for_reading_json(credential_file, + extra_help, + error): + raise ApplicationDefaultCredentialsError( + 'An error was encountered while reading json file: ' + + credential_file + extra_help + ': ' + str(error)) + + +def _get_application_default_credential_GAE(): + from oauth2client.contrib.appengine import AppAssertionCredentials + + return AppAssertionCredentials([]) + + +def _get_application_default_credential_GCE(): + from oauth2client.contrib.gce import AppAssertionCredentials + + return AppAssertionCredentials() + + +class AssertionCredentials(GoogleCredentials): + """Abstract Credentials object used for OAuth 2.0 assertion grants. + + This credential does not require a flow to instantiate because it + represents a two legged flow, and therefore has all of the required + information to generate and refresh its own access tokens. It must + be subclassed to generate the appropriate assertion string. + + AssertionCredentials objects may be safely pickled and unpickled. + """ + + @util.positional(2) + def __init__(self, assertion_type, user_agent=None, + token_uri=GOOGLE_TOKEN_URI, + revoke_uri=GOOGLE_REVOKE_URI, + **unused_kwargs): + """Constructor for AssertionFlowCredentials. + + Args: + assertion_type: string, assertion type that will be declared to the + auth server + user_agent: string, The HTTP User-Agent to provide for this + application. + token_uri: string, URI for token endpoint. For convenience defaults + to Google's endpoints but any OAuth 2.0 provider can be + used. + revoke_uri: string, URI for revoke endpoint. + """ + super(AssertionCredentials, self).__init__( + None, + None, + None, + None, + None, + token_uri, + user_agent, + revoke_uri=revoke_uri) + self.assertion_type = assertion_type + + def _generate_refresh_request_body(self): + assertion = self._generate_assertion() + + body = urllib.parse.urlencode({ + 'assertion': assertion, + 'grant_type': 'urn:ietf:params:oauth:grant-type:jwt-bearer', + }) + + return body + + def _generate_assertion(self): + """Generate assertion string to be used in the access token request.""" + raise NotImplementedError + + def _revoke(self, http_request): + """Revokes the access_token and deletes the store if available. + + Args: + http_request: callable, a callable that matches the method + signature of httplib2.Http.request, used to make the + revoke request. + """ + self._do_revoke(http_request, self.access_token) + + def sign_blob(self, blob): + """Cryptographically sign a blob (of bytes). + + Args: + blob: bytes, Message to be signed. + + Returns: + tuple, A pair of the private key ID used to sign the blob and + the signed contents. + """ + raise NotImplementedError('This method is abstract.') + + +def _require_crypto_or_die(): + """Ensure we have a crypto library, or throw CryptoUnavailableError. + + The oauth2client.crypt module requires either PyCrypto or PyOpenSSL + to be available in order to function, but these are optional + dependencies. + """ + if not HAS_CRYPTO: + raise CryptoUnavailableError('No crypto library available') + + +# Only used in verify_id_token(), which is always calling to the same URI +# for the certs. +_cached_http = httplib2.Http(MemoryCache()) + + +@util.positional(2) +def verify_id_token(id_token, audience, http=None, + cert_uri=ID_TOKEN_VERIFICATION_CERTS): + """Verifies a signed JWT id_token. + + This function requires PyOpenSSL and because of that it does not work on + App Engine. + + Args: + id_token: string, A Signed JWT. + audience: string, The audience 'aud' that the token should be for. + http: httplib2.Http, instance to use to make the HTTP request. Callers + should supply an instance that has caching enabled. + cert_uri: string, URI of the certificates in JSON format to + verify the JWT against. + + Returns: + The deserialized JSON in the JWT. + + Raises: + oauth2client.crypt.AppIdentityError: if the JWT fails to verify. + CryptoUnavailableError: if no crypto library is available. + """ + _require_crypto_or_die() + if http is None: + http = _cached_http + + resp, content = http.request(cert_uri) + if resp.status == http_client.OK: + certs = json.loads(_from_bytes(content)) + return crypt.verify_signed_jwt_with_certs(id_token, certs, audience) + else: + raise VerifyJwtTokenError('Status code: %d' % resp.status) + + +def _extract_id_token(id_token): + """Extract the JSON payload from a JWT. + + Does the extraction w/o checking the signature. + + Args: + id_token: string or bytestring, OAuth 2.0 id_token. + + Returns: + object, The deserialized JSON payload. + """ + if type(id_token) == bytes: + segments = id_token.split(b'.') + else: + segments = id_token.split(u'.') + + if len(segments) != 3: + raise VerifyJwtTokenError( + 'Wrong number of segments in token: %s' % id_token) + + return json.loads(_from_bytes(_urlsafe_b64decode(segments[1]))) + + +def _parse_exchange_token_response(content): + """Parses response of an exchange token request. + + Most providers return JSON but some (e.g. Facebook) return a + url-encoded string. + + Args: + content: The body of a response + + Returns: + Content as a dictionary object. Note that the dict could be empty, + i.e. {}. That basically indicates a failure. + """ + resp = {} + content = _from_bytes(content) + try: + resp = json.loads(content) + except Exception: + # different JSON libs raise different exceptions, + # so we just do a catch-all here + resp = dict(urllib.parse.parse_qsl(content)) + + # some providers respond with 'expires', others with 'expires_in' + if resp and 'expires' in resp: + resp['expires_in'] = resp.pop('expires') + + return resp + + +@util.positional(4) +def credentials_from_code(client_id, client_secret, scope, code, + redirect_uri='postmessage', http=None, + user_agent=None, token_uri=GOOGLE_TOKEN_URI, + auth_uri=GOOGLE_AUTH_URI, + revoke_uri=GOOGLE_REVOKE_URI, + device_uri=GOOGLE_DEVICE_URI, + token_info_uri=GOOGLE_TOKEN_INFO_URI): + """Exchanges an authorization code for an OAuth2Credentials object. + + Args: + client_id: string, client identifier. + client_secret: string, client secret. + scope: string or iterable of strings, scope(s) to request. + code: string, An authorization code, most likely passed down from + the client + redirect_uri: string, this is generally set to 'postmessage' to match + the redirect_uri that the client specified + http: httplib2.Http, optional http instance to use to do the fetch + token_uri: string, URI for token endpoint. For convenience defaults + to Google's endpoints but any OAuth 2.0 provider can be + used. + auth_uri: string, URI for authorization endpoint. For convenience + defaults to Google's endpoints but any OAuth 2.0 provider + can be used. + revoke_uri: string, URI for revoke endpoint. For convenience + defaults to Google's endpoints but any OAuth 2.0 provider + can be used. + device_uri: string, URI for device authorization endpoint. For + convenience defaults to Google's endpoints but any OAuth + 2.0 provider can be used. + + Returns: + An OAuth2Credentials object. + + Raises: + FlowExchangeError if the authorization code cannot be exchanged for an + access token + """ + flow = OAuth2WebServerFlow(client_id, client_secret, scope, + redirect_uri=redirect_uri, + user_agent=user_agent, auth_uri=auth_uri, + token_uri=token_uri, revoke_uri=revoke_uri, + device_uri=device_uri, + token_info_uri=token_info_uri) + + credentials = flow.step2_exchange(code, http=http) + return credentials + + +@util.positional(3) +def credentials_from_clientsecrets_and_code(filename, scope, code, + message=None, + redirect_uri='postmessage', + http=None, + cache=None, + device_uri=None): + """Returns OAuth2Credentials from a clientsecrets file and an auth code. + + Will create the right kind of Flow based on the contents of the + clientsecrets file or will raise InvalidClientSecretsError for unknown + types of Flows. + + Args: + filename: string, File name of clientsecrets. + scope: string or iterable of strings, scope(s) to request. + code: string, An authorization code, most likely passed down from + the client + message: string, A friendly string to display to the user if the + clientsecrets file is missing or invalid. If message is + provided then sys.exit will be called in the case of an error. + If message in not provided then + clientsecrets.InvalidClientSecretsError will be raised. + redirect_uri: string, this is generally set to 'postmessage' to match + the redirect_uri that the client specified + http: httplib2.Http, optional http instance to use to do the fetch + cache: An optional cache service client that implements get() and set() + methods. See clientsecrets.loadfile() for details. + device_uri: string, OAuth 2.0 device authorization endpoint + + Returns: + An OAuth2Credentials object. + + Raises: + FlowExchangeError: if the authorization code cannot be exchanged for an + access token + UnknownClientSecretsFlowError: if the file describes an unknown kind + of Flow. + clientsecrets.InvalidClientSecretsError: if the clientsecrets file is + invalid. + """ + flow = flow_from_clientsecrets(filename, scope, message=message, + cache=cache, redirect_uri=redirect_uri, + device_uri=device_uri) + credentials = flow.step2_exchange(code, http=http) + return credentials + + +class DeviceFlowInfo(collections.namedtuple('DeviceFlowInfo', ( + 'device_code', 'user_code', 'interval', 'verification_url', + 'user_code_expiry'))): + """Intermediate information the OAuth2 for devices flow.""" + + @classmethod + def FromResponse(cls, response): + """Create a DeviceFlowInfo from a server response. + + The response should be a dict containing entries as described here: + + http://tools.ietf.org/html/draft-ietf-oauth-v2-05#section-3.7.1 + """ + # device_code, user_code, and verification_url are required. + kwargs = { + 'device_code': response['device_code'], + 'user_code': response['user_code'], + } + # The response may list the verification address as either + # verification_url or verification_uri, so we check for both. + verification_url = response.get( + 'verification_url', response.get('verification_uri')) + if verification_url is None: + raise OAuth2DeviceCodeError( + 'No verification_url provided in server response') + kwargs['verification_url'] = verification_url + # expires_in and interval are optional. + kwargs.update({ + 'interval': response.get('interval'), + 'user_code_expiry': None, + }) + if 'expires_in' in response: + kwargs['user_code_expiry'] = ( + _UTCNOW() + + datetime.timedelta(seconds=int(response['expires_in']))) + return cls(**kwargs) + + +class OAuth2WebServerFlow(Flow): + """Does the Web Server Flow for OAuth 2.0. + + OAuth2WebServerFlow objects may be safely pickled and unpickled. + """ + + @util.positional(4) + def __init__(self, client_id, + client_secret=None, + scope=None, + redirect_uri=None, + user_agent=None, + auth_uri=GOOGLE_AUTH_URI, + token_uri=GOOGLE_TOKEN_URI, + revoke_uri=GOOGLE_REVOKE_URI, + login_hint=None, + device_uri=GOOGLE_DEVICE_URI, + token_info_uri=GOOGLE_TOKEN_INFO_URI, + authorization_header=None, + **kwargs): + """Constructor for OAuth2WebServerFlow. + + The kwargs argument is used to set extra query parameters on the + auth_uri. For example, the access_type and approval_prompt + query parameters can be set via kwargs. + + Args: + client_id: string, client identifier. + client_secret: string client secret. + scope: string or iterable of strings, scope(s) of the credentials + being requested. + redirect_uri: string, Either the string 'urn:ietf:wg:oauth:2.0:oob' + for a non-web-based application, or a URI that + handles the callback from the authorization server. + user_agent: string, HTTP User-Agent to provide for this + application. + auth_uri: string, URI for authorization endpoint. For convenience + defaults to Google's endpoints but any OAuth 2.0 provider + can be used. + token_uri: string, URI for token endpoint. For convenience + defaults to Google's endpoints but any OAuth 2.0 + provider can be used. + revoke_uri: string, URI for revoke endpoint. For convenience + defaults to Google's endpoints but any OAuth 2.0 + provider can be used. + login_hint: string, Either an email address or domain. Passing this + hint will either pre-fill the email box on the sign-in + form or select the proper multi-login session, thereby + simplifying the login flow. + device_uri: string, URI for device authorization endpoint. For + convenience defaults to Google's endpoints but any + OAuth 2.0 provider can be used. + authorization_header: string, For use with OAuth 2.0 providers that + require a client to authenticate using a + header value instead of passing client_secret + in the POST body. + **kwargs: dict, The keyword arguments are all optional and required + parameters for the OAuth calls. + """ + # scope is a required argument, but to preserve backwards-compatibility + # we don't want to rearrange the positional arguments + if scope is None: + raise TypeError("The value of scope must not be None") + self.client_id = client_id + self.client_secret = client_secret + self.scope = util.scopes_to_string(scope) + self.redirect_uri = redirect_uri + self.login_hint = login_hint + self.user_agent = user_agent + self.auth_uri = auth_uri + self.token_uri = token_uri + self.revoke_uri = revoke_uri + self.device_uri = device_uri + self.token_info_uri = token_info_uri + self.authorization_header = authorization_header + self.params = { + 'access_type': 'offline', + 'response_type': 'code', + } + self.params.update(kwargs) + + @util.positional(1) + def step1_get_authorize_url(self, redirect_uri=None, state=None): + """Returns a URI to redirect to the provider. + + Args: + redirect_uri: string, Either the string 'urn:ietf:wg:oauth:2.0:oob' + for a non-web-based application, or a URI that + handles the callback from the authorization server. + This parameter is deprecated, please move to passing + the redirect_uri in via the constructor. + state: string, Opaque state string which is passed through the + OAuth2 flow and returned to the client as a query parameter + in the callback. + + Returns: + A URI as a string to redirect the user to begin the authorization + flow. + """ + if redirect_uri is not None: + logger.warning(( + 'The redirect_uri parameter for ' + 'OAuth2WebServerFlow.step1_get_authorize_url is deprecated. ' + 'Please move to passing the redirect_uri in via the ' + 'constructor.')) + self.redirect_uri = redirect_uri + + if self.redirect_uri is None: + raise ValueError('The value of redirect_uri must not be None.') + + query_params = { + 'client_id': self.client_id, + 'redirect_uri': self.redirect_uri, + 'scope': self.scope, + } + if state is not None: + query_params['state'] = state + if self.login_hint is not None: + query_params['login_hint'] = self.login_hint + query_params.update(self.params) + return _update_query_params(self.auth_uri, query_params) + + @util.positional(1) + def step1_get_device_and_user_codes(self, http=None): + """Returns a user code and the verification URL where to enter it + + Returns: + A user code as a string for the user to authorize the application + An URL as a string where the user has to enter the code + """ + if self.device_uri is None: + raise ValueError('The value of device_uri must not be None.') + + body = urllib.parse.urlencode({ + 'client_id': self.client_id, + 'scope': self.scope, + }) + headers = { + 'content-type': 'application/x-www-form-urlencoded', + } + + if self.user_agent is not None: + headers['user-agent'] = self.user_agent + + if http is None: + http = httplib2.Http() + + resp, content = http.request(self.device_uri, method='POST', body=body, + headers=headers) + content = _from_bytes(content) + if resp.status == http_client.OK: + try: + flow_info = json.loads(content) + except ValueError as exc: + raise OAuth2DeviceCodeError( + 'Could not parse server response as JSON: "%s", ' + 'error: "%s"' % (content, exc)) + return DeviceFlowInfo.FromResponse(flow_info) + else: + error_msg = 'Invalid response %s.' % (resp.status,) + try: + error_dict = json.loads(content) + if 'error' in error_dict: + error_msg += ' Error: %s' % (error_dict['error'],) + except ValueError: + # Couldn't decode a JSON response, stick with the + # default message. + pass + raise OAuth2DeviceCodeError(error_msg) + + @util.positional(2) + def step2_exchange(self, code=None, http=None, device_flow_info=None): + """Exchanges a code for OAuth2Credentials. + + Args: + code: string, a dict-like object, or None. For a non-device + flow, this is either the response code as a string, or a + dictionary of query parameters to the redirect_uri. For a + device flow, this should be None. + http: httplib2.Http, optional http instance to use when fetching + credentials. + device_flow_info: DeviceFlowInfo, return value from step1 in the + case of a device flow. + + Returns: + An OAuth2Credentials object that can be used to authorize requests. + + Raises: + FlowExchangeError: if a problem occurred exchanging the code for a + refresh_token. + ValueError: if code and device_flow_info are both provided or both + missing. + """ + if code is None and device_flow_info is None: + raise ValueError('No code or device_flow_info provided.') + if code is not None and device_flow_info is not None: + raise ValueError('Cannot provide both code and device_flow_info.') + + if code is None: + code = device_flow_info.device_code + elif not isinstance(code, (six.string_types, six.binary_type)): + if 'code' not in code: + raise FlowExchangeError(code.get( + 'error', 'No code was supplied in the query parameters.')) + code = code['code'] + + post_data = { + 'client_id': self.client_id, + 'code': code, + 'scope': self.scope, + } + if self.client_secret is not None: + post_data['client_secret'] = self.client_secret + if device_flow_info is not None: + post_data['grant_type'] = 'http://oauth.net/grant_type/device/1.0' + else: + post_data['grant_type'] = 'authorization_code' + post_data['redirect_uri'] = self.redirect_uri + body = urllib.parse.urlencode(post_data) + headers = { + 'content-type': 'application/x-www-form-urlencoded', + } + if self.authorization_header is not None: + headers['Authorization'] = self.authorization_header + if self.user_agent is not None: + headers['user-agent'] = self.user_agent + + if http is None: + http = httplib2.Http() + + resp, content = http.request(self.token_uri, method='POST', body=body, + headers=headers) + d = _parse_exchange_token_response(content) + if resp.status == http_client.OK and 'access_token' in d: + access_token = d['access_token'] + refresh_token = d.get('refresh_token', None) + if not refresh_token: + logger.info( + 'Received token response with no refresh_token. Consider ' + "reauthenticating with approval_prompt='force'.") + token_expiry = None + if 'expires_in' in d: + delta = datetime.timedelta(seconds=int(d['expires_in'])) + token_expiry = delta + _UTCNOW() + + extracted_id_token = None + if 'id_token' in d: + extracted_id_token = _extract_id_token(d['id_token']) + + logger.info('Successfully retrieved access token') + return OAuth2Credentials( + access_token, self.client_id, self.client_secret, + refresh_token, token_expiry, self.token_uri, self.user_agent, + revoke_uri=self.revoke_uri, id_token=extracted_id_token, + token_response=d, scopes=self.scope, + token_info_uri=self.token_info_uri) + else: + logger.info('Failed to retrieve access token: %s', content) + if 'error' in d: + # you never know what those providers got to say + error_msg = (str(d['error']) + + str(d.get('error_description', ''))) + else: + error_msg = 'Invalid response: %s.' % str(resp.status) + raise FlowExchangeError(error_msg) + + +@util.positional(2) +def flow_from_clientsecrets(filename, scope, redirect_uri=None, + message=None, cache=None, login_hint=None, + device_uri=None): + """Create a Flow from a clientsecrets file. + + Will create the right kind of Flow based on the contents of the + clientsecrets file or will raise InvalidClientSecretsError for unknown + types of Flows. + + Args: + filename: string, File name of client secrets. + scope: string or iterable of strings, scope(s) to request. + redirect_uri: string, Either the string 'urn:ietf:wg:oauth:2.0:oob' for + a non-web-based application, or a URI that handles the + callback from the authorization server. + message: string, A friendly string to display to the user if the + clientsecrets file is missing or invalid. If message is + provided then sys.exit will be called in the case of an error. + If message in not provided then + clientsecrets.InvalidClientSecretsError will be raised. + cache: An optional cache service client that implements get() and set() + methods. See clientsecrets.loadfile() for details. + login_hint: string, Either an email address or domain. Passing this + hint will either pre-fill the email box on the sign-in form + or select the proper multi-login session, thereby + simplifying the login flow. + device_uri: string, URI for device authorization endpoint. For + convenience defaults to Google's endpoints but any + OAuth 2.0 provider can be used. + + Returns: + A Flow object. + + Raises: + UnknownClientSecretsFlowError: if the file describes an unknown kind of + Flow. + clientsecrets.InvalidClientSecretsError: if the clientsecrets file is + invalid. + """ + try: + client_type, client_info = clientsecrets.loadfile(filename, + cache=cache) + if client_type in (clientsecrets.TYPE_WEB, + clientsecrets.TYPE_INSTALLED): + constructor_kwargs = { + 'redirect_uri': redirect_uri, + 'auth_uri': client_info['auth_uri'], + 'token_uri': client_info['token_uri'], + 'login_hint': login_hint, + } + revoke_uri = client_info.get('revoke_uri') + if revoke_uri is not None: + constructor_kwargs['revoke_uri'] = revoke_uri + if device_uri is not None: + constructor_kwargs['device_uri'] = device_uri + return OAuth2WebServerFlow( + client_info['client_id'], client_info['client_secret'], + scope, **constructor_kwargs) + + except clientsecrets.InvalidClientSecretsError: + if message: + sys.exit(message) + else: + raise + else: + raise UnknownClientSecretsFlowError( + 'This OAuth 2.0 flow is unsupported: %r' % (client_type,)) diff --git a/oauth2client/clientsecrets.py b/oauth2client/clientsecrets.py new file mode 100644 index 0000000..4a47d0d --- /dev/null +++ b/oauth2client/clientsecrets.py @@ -0,0 +1,174 @@ +# Copyright 2014 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Utilities for reading OAuth 2.0 client secret files. + +A client_secrets.json file contains all the information needed to interact with +an OAuth 2.0 protected service. +""" + +import json +import six + + +__author__ = 'jcgregorio@google.com (Joe Gregorio)' + +# Properties that make a client_secrets.json file valid. +TYPE_WEB = 'web' +TYPE_INSTALLED = 'installed' + +VALID_CLIENT = { + TYPE_WEB: { + 'required': [ + 'client_id', + 'client_secret', + 'redirect_uris', + 'auth_uri', + 'token_uri', + ], + 'string': [ + 'client_id', + 'client_secret', + ], + }, + TYPE_INSTALLED: { + 'required': [ + 'client_id', + 'client_secret', + 'redirect_uris', + 'auth_uri', + 'token_uri', + ], + 'string': [ + 'client_id', + 'client_secret', + ], + }, +} + + +class Error(Exception): + """Base error for this module.""" + + +class InvalidClientSecretsError(Error): + """Format of ClientSecrets file is invalid.""" + + +def _validate_clientsecrets(clientsecrets_dict): + """Validate parsed client secrets from a file. + + Args: + clientsecrets_dict: dict, a dictionary holding the client secrets. + + Returns: + tuple, a string of the client type and the information parsed + from the file. + """ + _INVALID_FILE_FORMAT_MSG = ( + 'Invalid file format. See ' + 'https://developers.google.com/api-client-library/' + 'python/guide/aaa_client_secrets') + + if clientsecrets_dict is None: + raise InvalidClientSecretsError(_INVALID_FILE_FORMAT_MSG) + try: + (client_type, client_info), = clientsecrets_dict.items() + except (ValueError, AttributeError): + raise InvalidClientSecretsError( + _INVALID_FILE_FORMAT_MSG + ' ' + 'Expected a JSON object with a single property for a "web" or ' + '"installed" application') + + if client_type not in VALID_CLIENT: + raise InvalidClientSecretsError( + 'Unknown client type: %s.' % (client_type,)) + + for prop_name in VALID_CLIENT[client_type]['required']: + if prop_name not in client_info: + raise InvalidClientSecretsError( + 'Missing property "%s" in a client type of "%s".' % + (prop_name, client_type)) + for prop_name in VALID_CLIENT[client_type]['string']: + if client_info[prop_name].startswith('[['): + raise InvalidClientSecretsError( + 'Property "%s" is not configured.' % prop_name) + return client_type, client_info + + +def load(fp): + obj = json.load(fp) + return _validate_clientsecrets(obj) + + +def loads(s): + obj = json.loads(s) + return _validate_clientsecrets(obj) + + +def _loadfile(filename): + try: + with open(filename, 'r') as fp: + obj = json.load(fp) + except IOError as exc: + raise InvalidClientSecretsError('Error opening file', exc.filename, + exc.strerror, exc.errno) + return _validate_clientsecrets(obj) + + +def loadfile(filename, cache=None): + """Loading of client_secrets JSON file, optionally backed by a cache. + + Typical cache storage would be App Engine memcache service, + but you can pass in any other cache client that implements + these methods: + + * ``get(key, namespace=ns)`` + * ``set(key, value, namespace=ns)`` + + Usage:: + + # without caching + client_type, client_info = loadfile('secrets.json') + # using App Engine memcache service + from google.appengine.api import memcache + client_type, client_info = loadfile('secrets.json', cache=memcache) + + Args: + filename: string, Path to a client_secrets.json file on a filesystem. + cache: An optional cache service client that implements get() and set() + methods. If not specified, the file is always being loaded from + a filesystem. + + Raises: + InvalidClientSecretsError: In case of a validation error or some + I/O failure. Can happen only on cache miss. + + Returns: + (client_type, client_info) tuple, as _loadfile() normally would. + JSON contents is validated only during first load. Cache hits are not + validated. + """ + _SECRET_NAMESPACE = 'oauth2client:secrets#ns' + + if not cache: + return _loadfile(filename) + + obj = cache.get(filename, namespace=_SECRET_NAMESPACE) + if obj is None: + client_type, client_info = _loadfile(filename) + obj = {client_type: client_info} + cache.set(filename, obj, namespace=_SECRET_NAMESPACE) + + return next(six.iteritems(obj)) diff --git a/oauth2client/contrib/__init__.py b/oauth2client/contrib/__init__.py new file mode 100644 index 0000000..ecfd06c --- /dev/null +++ b/oauth2client/contrib/__init__.py @@ -0,0 +1,6 @@ +"""Contributed modules. + +Contrib contains modules that are not considered part of the core oauth2client +library but provide additional functionality. These modules are intended to +make it easier to use oauth2client. +""" diff --git a/oauth2client/contrib/_appengine_ndb.py b/oauth2client/contrib/_appengine_ndb.py new file mode 100644 index 0000000..44c0dac --- /dev/null +++ b/oauth2client/contrib/_appengine_ndb.py @@ -0,0 +1,163 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Google App Engine utilities helper. + +Classes that directly require App Engine's ndb library. Provided +as a separate module in case of failure to import ndb while +other App Engine libraries are present. +""" + +import logging + +from google.appengine.ext import ndb + +from oauth2client import client + + +NDB_KEY = ndb.Key +"""Key constant used by :mod:`oauth2client.contrib.appengine`.""" + +NDB_MODEL = ndb.Model +"""Model constant used by :mod:`oauth2client.contrib.appengine`.""" + +_LOGGER = logging.getLogger(__name__) + + +class SiteXsrfSecretKeyNDB(ndb.Model): + """NDB Model for storage for the sites XSRF secret key. + + Since this model uses the same kind as SiteXsrfSecretKey, it can be + used interchangeably. This simply provides an NDB model for interacting + with the same data the DB model interacts with. + + There should only be one instance stored of this model, the one used + for the site. + """ + secret = ndb.StringProperty() + + @classmethod + def _get_kind(cls): + """Return the kind name for this class.""" + return 'SiteXsrfSecretKey' + + +class FlowNDBProperty(ndb.PickleProperty): + """App Engine NDB datastore Property for Flow. + + Serves the same purpose as the DB FlowProperty, but for NDB models. + Since PickleProperty inherits from BlobProperty, the underlying + representation of the data in the datastore will be the same as in the + DB case. + + Utility property that allows easy storage and retrieval of an + oauth2client.Flow + """ + + def _validate(self, value): + """Validates a value as a proper Flow object. + + Args: + value: A value to be set on the property. + + Raises: + TypeError if the value is not an instance of Flow. + """ + _LOGGER.info('validate: Got type %s', type(value)) + if value is not None and not isinstance(value, client.Flow): + raise TypeError('Property %s must be convertible to a flow ' + 'instance; received: %s.' % (self._name, + value)) + + +class CredentialsNDBProperty(ndb.BlobProperty): + """App Engine NDB datastore Property for Credentials. + + Serves the same purpose as the DB CredentialsProperty, but for NDB + models. Since CredentialsProperty stores data as a blob and this + inherits from BlobProperty, the data in the datastore will be the same + as in the DB case. + + Utility property that allows easy storage and retrieval of Credentials + and subclasses. + """ + + def _validate(self, value): + """Validates a value as a proper credentials object. + + Args: + value: A value to be set on the property. + + Raises: + TypeError if the value is not an instance of Credentials. + """ + _LOGGER.info('validate: Got type %s', type(value)) + if value is not None and not isinstance(value, client.Credentials): + raise TypeError('Property %s must be convertible to a ' + 'credentials instance; received: %s.' % + (self._name, value)) + + def _to_base_type(self, value): + """Converts our validated value to a JSON serialized string. + + Args: + value: A value to be set in the datastore. + + Returns: + A JSON serialized version of the credential, else '' if value + is None. + """ + if value is None: + return '' + else: + return value.to_json() + + def _from_base_type(self, value): + """Converts our stored JSON string back to the desired type. + + Args: + value: A value from the datastore to be converted to the + desired type. + + Returns: + A deserialized Credentials (or subclass) object, else None if + the value can't be parsed. + """ + if not value: + return None + try: + # Uses the from_json method of the implied class of value + credentials = client.Credentials.new_from_json(value) + except ValueError: + credentials = None + return credentials + + +class CredentialsNDBModel(ndb.Model): + """NDB Model for storage of OAuth 2.0 Credentials + + Since this model uses the same kind as CredentialsModel and has a + property which can serialize and deserialize Credentials correctly, it + can be used interchangeably with a CredentialsModel to access, insert + and delete the same entities. This simply provides an NDB model for + interacting with the same data the DB model interacts with. + + Storage of the model is keyed by the user.user_id(). + """ + credentials = CredentialsNDBProperty() + + @classmethod + def _get_kind(cls): + """Return the kind name for this class.""" + return 'CredentialsModel' diff --git a/oauth2client/contrib/_fcntl_opener.py b/oauth2client/contrib/_fcntl_opener.py new file mode 100644 index 0000000..4e758b9 --- /dev/null +++ b/oauth2client/contrib/_fcntl_opener.py @@ -0,0 +1,85 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import errno +import time + +import fcntl + +from oauth2client.contrib.locked_file import _Opener +from oauth2client.contrib.locked_file import AlreadyLockedException +from oauth2client.contrib.locked_file import logger +from oauth2client.contrib.locked_file import validate_file + + +class _FcntlOpener(_Opener): + """Open, lock, and unlock a file using fcntl.lockf.""" + + def open_and_lock(self, timeout, delay): + """Open the file and lock it. + + Args: + timeout: float, How long to try to lock for. + delay: float, How long to wait between retries + + Raises: + AlreadyLockedException: if the lock is already acquired. + IOError: if the open fails. + CredentialsFileSymbolicLinkError: if the file is a symbolic + link. + """ + if self._locked: + raise AlreadyLockedException('File %s is already locked' % + self._filename) + start_time = time.time() + + validate_file(self._filename) + try: + self._fh = open(self._filename, self._mode) + except IOError as e: + # If we can't access with _mode, try _fallback_mode and + # don't lock. + if e.errno in (errno.EPERM, errno.EACCES): + self._fh = open(self._filename, self._fallback_mode) + return + + # We opened in _mode, try to lock the file. + while True: + try: + fcntl.lockf(self._fh.fileno(), fcntl.LOCK_EX) + self._locked = True + return + except IOError as e: + # If not retrying, then just pass on the error. + if timeout == 0: + raise + if e.errno != errno.EACCES: + raise + # We could not acquire the lock. Try again. + if (time.time() - start_time) >= timeout: + logger.warn('Could not lock %s in %s seconds', + self._filename, timeout) + if self._fh: + self._fh.close() + self._fh = open(self._filename, self._fallback_mode) + return + time.sleep(delay) + + def unlock_and_close(self): + """Close and unlock the file using the fcntl.lockf primitive.""" + if self._locked: + fcntl.lockf(self._fh.fileno(), fcntl.LOCK_UN) + self._locked = False + if self._fh: + self._fh.close() diff --git a/oauth2client/contrib/_metadata.py b/oauth2client/contrib/_metadata.py new file mode 100644 index 0000000..564cd39 --- /dev/null +++ b/oauth2client/contrib/_metadata.py @@ -0,0 +1,118 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Provides helper methods for talking to the Compute Engine metadata server. + +See https://cloud.google.com/compute/docs/metadata +""" + +import datetime +import json +import os + +from six.moves import http_client +from six.moves.urllib import parse as urlparse + +from oauth2client import _helpers +from oauth2client import client +from oauth2client import transport + + +METADATA_ROOT = 'http://{}/computeMetadata/v1/'.format( + os.getenv('GCE_METADATA_ROOT', 'metadata.google.internal')) +METADATA_HEADERS = {'Metadata-Flavor': 'Google'} + + +def get(http, path, root=METADATA_ROOT, recursive=None): + """Fetch a resource from the metadata server. + + Args: + http: an object to be used to make HTTP requests. + path: A string indicating the resource to retrieve. For example, + 'instance/service-accounts/default' + root: A string indicating the full path to the metadata server root. + recursive: A boolean indicating whether to do a recursive query of + metadata. See + https://cloud.google.com/compute/docs/metadata#aggcontents + + Returns: + A dictionary if the metadata server returns JSON, otherwise a string. + + Raises: + http_client.HTTPException if an error corrured while + retrieving metadata. + """ + url = urlparse.urljoin(root, path) + url = _helpers._add_query_parameter(url, 'recursive', recursive) + + response, content = transport.request( + http, url, headers=METADATA_HEADERS) + + if response.status == http_client.OK: + decoded = _helpers._from_bytes(content) + if response['content-type'] == 'application/json': + return json.loads(decoded) + else: + return decoded + else: + raise http_client.HTTPException( + 'Failed to retrieve {0} from the Google Compute Engine' + 'metadata service. Response:\n{1}'.format(url, response)) + + +def get_service_account_info(http, service_account='default'): + """Get information about a service account from the metadata server. + + Args: + http: an object to be used to make HTTP requests. + service_account: An email specifying the service account for which to + look up information. Default will be information for the "default" + service account of the current compute engine instance. + + Returns: + A dictionary with information about the specified service account, + for example: + + { + 'email': '...', + 'scopes': ['scope', ...], + 'aliases': ['default', '...'] + } + """ + return get( + http, + 'instance/service-accounts/{0}/'.format(service_account), + recursive=True) + + +def get_token(http, service_account='default'): + """Fetch an oauth token for the + + Args: + http: an object to be used to make HTTP requests. + service_account: An email specifying the service account this token + should represent. Default will be a token for the "default" service + account of the current compute engine instance. + + Returns: + A tuple of (access token, token expiration), where access token is the + access token as a string and token expiration is a datetime object + that indicates when the access token will expire. + """ + token_json = get( + http, + 'instance/service-accounts/{0}/token'.format(service_account)) + token_expiry = client._UTCNOW() + datetime.timedelta( + seconds=token_json['expires_in']) + return token_json['access_token'], token_expiry diff --git a/oauth2client/contrib/_win32_opener.py b/oauth2client/contrib/_win32_opener.py new file mode 100644 index 0000000..4a0580e --- /dev/null +++ b/oauth2client/contrib/_win32_opener.py @@ -0,0 +1,109 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import errno +import time + +import pywintypes +import win32con +import win32file + +from oauth2client.contrib.locked_file import _Opener +from oauth2client.contrib.locked_file import AlreadyLockedException +from oauth2client.contrib.locked_file import logger +from oauth2client.contrib.locked_file import validate_file + + +class _Win32Opener(_Opener): + """Open, lock, and unlock a file using windows primitives.""" + + # Error #33: + # 'The process cannot access the file because another process' + FILE_IN_USE_ERROR = 33 + + # Error #158: + # 'The segment is already unlocked.' + FILE_ALREADY_UNLOCKED_ERROR = 158 + + def open_and_lock(self, timeout, delay): + """Open the file and lock it. + + Args: + timeout: float, How long to try to lock for. + delay: float, How long to wait between retries + + Raises: + AlreadyLockedException: if the lock is already acquired. + IOError: if the open fails. + CredentialsFileSymbolicLinkError: if the file is a symbolic + link. + """ + if self._locked: + raise AlreadyLockedException('File %s is already locked' % + self._filename) + start_time = time.time() + + validate_file(self._filename) + try: + self._fh = open(self._filename, self._mode) + except IOError as e: + # If we can't access with _mode, try _fallback_mode + # and don't lock. + if e.errno == errno.EACCES: + self._fh = open(self._filename, self._fallback_mode) + return + + # We opened in _mode, try to lock the file. + while True: + try: + hfile = win32file._get_osfhandle(self._fh.fileno()) + win32file.LockFileEx( + hfile, + (win32con.LOCKFILE_FAIL_IMMEDIATELY | + win32con.LOCKFILE_EXCLUSIVE_LOCK), 0, -0x10000, + pywintypes.OVERLAPPED()) + self._locked = True + return + except pywintypes.error as e: + if timeout == 0: + raise + + # If the error is not that the file is already + # in use, raise. + if e[0] != _Win32Opener.FILE_IN_USE_ERROR: + raise + + # We could not acquire the lock. Try again. + if (time.time() - start_time) >= timeout: + logger.warn('Could not lock %s in %s seconds' % ( + self._filename, timeout)) + if self._fh: + self._fh.close() + self._fh = open(self._filename, self._fallback_mode) + return + time.sleep(delay) + + def unlock_and_close(self): + """Close and unlock the file using the win32 primitive.""" + if self._locked: + try: + hfile = win32file._get_osfhandle(self._fh.fileno()) + win32file.UnlockFileEx(hfile, 0, -0x10000, + pywintypes.OVERLAPPED()) + except pywintypes.error as e: + if e[0] != _Win32Opener.FILE_ALREADY_UNLOCKED_ERROR: + raise + self._locked = False + if self._fh: + self._fh.close() diff --git a/oauth2client/contrib/appengine.py b/oauth2client/contrib/appengine.py new file mode 100644 index 0000000..84c3a39 --- /dev/null +++ b/oauth2client/contrib/appengine.py @@ -0,0 +1,928 @@ +# Copyright 2014 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Utilities for Google App Engine + +Utilities for making it easier to use OAuth 2.0 on Google App Engine. +""" + +import cgi +import json +import logging +import os +import pickle +import threading + +import httplib2 +import webapp2 as webapp + +from google.appengine.api import app_identity +from google.appengine.api import memcache +from google.appengine.api import users +from google.appengine.ext import db +from google.appengine.ext.webapp.util import login_required + +from oauth2client import GOOGLE_AUTH_URI +from oauth2client import GOOGLE_REVOKE_URI +from oauth2client import GOOGLE_TOKEN_URI +from oauth2client import clientsecrets +from oauth2client import util +from oauth2client.client import AccessTokenRefreshError +from oauth2client.client import AssertionCredentials +from oauth2client.client import Credentials +from oauth2client.client import Flow +from oauth2client.client import OAuth2WebServerFlow +from oauth2client.client import Storage +from oauth2client.contrib import xsrfutil + +# This is a temporary fix for a Google internal issue. +try: + from oauth2client.contrib import _appengine_ndb +except ImportError: # pragma: NO COVER + _appengine_ndb = None + + +__author__ = 'jcgregorio@google.com (Joe Gregorio)' + +logger = logging.getLogger(__name__) + +OAUTH2CLIENT_NAMESPACE = 'oauth2client#ns' + +XSRF_MEMCACHE_ID = 'xsrf_secret_key' + +if _appengine_ndb is None: # pragma: NO COVER + CredentialsNDBModel = None + CredentialsNDBProperty = None + FlowNDBProperty = None + _NDB_KEY = None + _NDB_MODEL = None + SiteXsrfSecretKeyNDB = None +else: + CredentialsNDBModel = _appengine_ndb.CredentialsNDBModel + CredentialsNDBProperty = _appengine_ndb.CredentialsNDBProperty + FlowNDBProperty = _appengine_ndb.FlowNDBProperty + _NDB_KEY = _appengine_ndb.NDB_KEY + _NDB_MODEL = _appengine_ndb.NDB_MODEL + SiteXsrfSecretKeyNDB = _appengine_ndb.SiteXsrfSecretKeyNDB + + +def _safe_html(s): + """Escape text to make it safe to display. + + Args: + s: string, The text to escape. + + Returns: + The escaped text as a string. + """ + return cgi.escape(s, quote=1).replace("'", ''') + + +class InvalidClientSecretsError(Exception): + """The client_secrets.json file is malformed or missing required fields.""" + + +class InvalidXsrfTokenError(Exception): + """The XSRF token is invalid or expired.""" + + +class SiteXsrfSecretKey(db.Model): + """Storage for the sites XSRF secret key. + + There will only be one instance stored of this model, the one used for the + site. + """ + secret = db.StringProperty() + + +def _generate_new_xsrf_secret_key(): + """Returns a random XSRF secret key.""" + return os.urandom(16).encode("hex") + + +def xsrf_secret_key(): + """Return the secret key for use for XSRF protection. + + If the Site entity does not have a secret key, this method will also create + one and persist it. + + Returns: + The secret key. + """ + secret = memcache.get(XSRF_MEMCACHE_ID, namespace=OAUTH2CLIENT_NAMESPACE) + if not secret: + # Load the one and only instance of SiteXsrfSecretKey. + model = SiteXsrfSecretKey.get_or_insert(key_name='site') + if not model.secret: + model.secret = _generate_new_xsrf_secret_key() + model.put() + secret = model.secret + memcache.add(XSRF_MEMCACHE_ID, secret, + namespace=OAUTH2CLIENT_NAMESPACE) + + return str(secret) + + +class AppAssertionCredentials(AssertionCredentials): + """Credentials object for App Engine Assertion Grants + + This object will allow an App Engine application to identify itself to + Google and other OAuth 2.0 servers that can verify assertions. It can be + used for the purpose of accessing data stored under an account assigned to + the App Engine application itself. + + This credential does not require a flow to instantiate because it + represents a two legged flow, and therefore has all of the required + information to generate and refresh its own access tokens. + """ + + @util.positional(2) + def __init__(self, scope, **kwargs): + """Constructor for AppAssertionCredentials + + Args: + scope: string or iterable of strings, scope(s) of the credentials + being requested. + **kwargs: optional keyword args, including: + service_account_id: service account id of the application. If None + or unspecified, the default service account for + the app is used. + """ + self.scope = util.scopes_to_string(scope) + self._kwargs = kwargs + self.service_account_id = kwargs.get('service_account_id', None) + self._service_account_email = None + + # Assertion type is no longer used, but still in the + # parent class signature. + super(AppAssertionCredentials, self).__init__(None) + + @classmethod + def from_json(cls, json_data): + data = json.loads(json_data) + return AppAssertionCredentials(data['scope']) + + def _refresh(self, http_request): + """Refreshes the access_token. + + Since the underlying App Engine app_identity implementation does its + own caching we can skip all the storage hoops and just to a refresh + using the API. + + Args: + http_request: callable, a callable that matches the method + signature of httplib2.Http.request, used to make the + refresh request. + + Raises: + AccessTokenRefreshError: When the refresh fails. + """ + try: + scopes = self.scope.split() + (token, _) = app_identity.get_access_token( + scopes, service_account_id=self.service_account_id) + except app_identity.Error as e: + raise AccessTokenRefreshError(str(e)) + self.access_token = token + + @property + def serialization_data(self): + raise NotImplementedError('Cannot serialize credentials ' + 'for Google App Engine.') + + def create_scoped_required(self): + return not self.scope + + def create_scoped(self, scopes): + return AppAssertionCredentials(scopes, **self._kwargs) + + def sign_blob(self, blob): + """Cryptographically sign a blob (of bytes). + + Implements abstract method + :meth:`oauth2client.client.AssertionCredentials.sign_blob`. + + Args: + blob: bytes, Message to be signed. + + Returns: + tuple, A pair of the private key ID used to sign the blob and + the signed contents. + """ + return app_identity.sign_blob(blob) + + @property + def service_account_email(self): + """Get the email for the current service account. + + Returns: + string, The email associated with the Google App Engine + service account. + """ + if self._service_account_email is None: + self._service_account_email = ( + app_identity.get_service_account_name()) + return self._service_account_email + + +class FlowProperty(db.Property): + """App Engine datastore Property for Flow. + + Utility property that allows easy storage and retrieval of an + oauth2client.Flow + """ + + # Tell what the user type is. + data_type = Flow + + # For writing to datastore. + def get_value_for_datastore(self, model_instance): + flow = super(FlowProperty, self).get_value_for_datastore( + model_instance) + return db.Blob(pickle.dumps(flow)) + + # For reading from datastore. + def make_value_from_datastore(self, value): + if value is None: + return None + return pickle.loads(value) + + def validate(self, value): + if value is not None and not isinstance(value, Flow): + raise db.BadValueError('Property %s must be convertible ' + 'to a FlowThreeLegged instance (%s)' % + (self.name, value)) + return super(FlowProperty, self).validate(value) + + def empty(self, value): + return not value + + +class CredentialsProperty(db.Property): + """App Engine datastore Property for Credentials. + + Utility property that allows easy storage and retrieval of + oauth2client.Credentials + """ + + # Tell what the user type is. + data_type = Credentials + + # For writing to datastore. + def get_value_for_datastore(self, model_instance): + logger.info("get: Got type " + str(type(model_instance))) + cred = super(CredentialsProperty, self).get_value_for_datastore( + model_instance) + if cred is None: + cred = '' + else: + cred = cred.to_json() + return db.Blob(cred) + + # For reading from datastore. + def make_value_from_datastore(self, value): + logger.info("make: Got type " + str(type(value))) + if value is None: + return None + if len(value) == 0: + return None + try: + credentials = Credentials.new_from_json(value) + except ValueError: + credentials = None + return credentials + + def validate(self, value): + value = super(CredentialsProperty, self).validate(value) + logger.info("validate: Got type " + str(type(value))) + if value is not None and not isinstance(value, Credentials): + raise db.BadValueError('Property %s must be convertible ' + 'to a Credentials instance (%s)' % + (self.name, value)) + return value + + +class StorageByKeyName(Storage): + """Store and retrieve a credential to and from the App Engine datastore. + + This Storage helper presumes the Credentials have been stored as a + CredentialsProperty or CredentialsNDBProperty on a datastore model class, + and that entities are stored by key_name. + """ + + @util.positional(4) + def __init__(self, model, key_name, property_name, cache=None, user=None): + """Constructor for Storage. + + Args: + model: db.Model or ndb.Model, model class + key_name: string, key name for the entity that has the credentials + property_name: string, name of the property that is a + CredentialsProperty or CredentialsNDBProperty. + cache: memcache, a write-through cache to put in front of the + datastore. If the model you are using is an NDB model, using + a cache will be redundant since the model uses an instance + cache and memcache for you. + user: users.User object, optional. Can be used to grab user ID as a + key_name if no key name is specified. + """ + super(StorageByKeyName, self).__init__() + + if key_name is None: + if user is None: + raise ValueError('StorageByKeyName called with no ' + 'key name or user.') + key_name = user.user_id() + + self._model = model + self._key_name = key_name + self._property_name = property_name + self._cache = cache + + def _is_ndb(self): + """Determine whether the model of the instance is an NDB model. + + Returns: + Boolean indicating whether or not the model is an NDB or DB model. + """ + # issubclass will fail if one of the arguments is not a class, only + # need worry about new-style classes since ndb and db models are + # new-style + if isinstance(self._model, type): + if _NDB_MODEL is not None and issubclass(self._model, _NDB_MODEL): + return True + elif issubclass(self._model, db.Model): + return False + + raise TypeError('Model class not an NDB or DB model: %s.' % + (self._model,)) + + def _get_entity(self): + """Retrieve entity from datastore. + + Uses a different model method for db or ndb models. + + Returns: + Instance of the model corresponding to the current storage object + and stored using the key name of the storage object. + """ + if self._is_ndb(): + return self._model.get_by_id(self._key_name) + else: + return self._model.get_by_key_name(self._key_name) + + def _delete_entity(self): + """Delete entity from datastore. + + Attempts to delete using the key_name stored on the object, whether or + not the given key is in the datastore. + """ + if self._is_ndb(): + _NDB_KEY(self._model, self._key_name).delete() + else: + entity_key = db.Key.from_path(self._model.kind(), self._key_name) + db.delete(entity_key) + + @db.non_transactional(allow_existing=True) + def locked_get(self): + """Retrieve Credential from datastore. + + Returns: + oauth2client.Credentials + """ + credentials = None + if self._cache: + json = self._cache.get(self._key_name) + if json: + credentials = Credentials.new_from_json(json) + if credentials is None: + entity = self._get_entity() + if entity is not None: + credentials = getattr(entity, self._property_name) + if self._cache: + self._cache.set(self._key_name, credentials.to_json()) + + if credentials and hasattr(credentials, 'set_store'): + credentials.set_store(self) + return credentials + + @db.non_transactional(allow_existing=True) + def locked_put(self, credentials): + """Write a Credentials to the datastore. + + Args: + credentials: Credentials, the credentials to store. + """ + entity = self._model.get_or_insert(self._key_name) + setattr(entity, self._property_name, credentials) + entity.put() + if self._cache: + self._cache.set(self._key_name, credentials.to_json()) + + @db.non_transactional(allow_existing=True) + def locked_delete(self): + """Delete Credential from datastore.""" + + if self._cache: + self._cache.delete(self._key_name) + + self._delete_entity() + + +class CredentialsModel(db.Model): + """Storage for OAuth 2.0 Credentials + + Storage of the model is keyed by the user.user_id(). + """ + credentials = CredentialsProperty() + + +def _build_state_value(request_handler, user): + """Composes the value for the 'state' parameter. + + Packs the current request URI and an XSRF token into an opaque string that + can be passed to the authentication server via the 'state' parameter. + + Args: + request_handler: webapp.RequestHandler, The request. + user: google.appengine.api.users.User, The current user. + + Returns: + The state value as a string. + """ + uri = request_handler.request.url + token = xsrfutil.generate_token(xsrf_secret_key(), user.user_id(), + action_id=str(uri)) + return uri + ':' + token + + +def _parse_state_value(state, user): + """Parse the value of the 'state' parameter. + + Parses the value and validates the XSRF token in the state parameter. + + Args: + state: string, The value of the state parameter. + user: google.appengine.api.users.User, The current user. + + Raises: + InvalidXsrfTokenError: if the XSRF token is invalid. + + Returns: + The redirect URI. + """ + uri, token = state.rsplit(':', 1) + if not xsrfutil.validate_token(xsrf_secret_key(), token, user.user_id(), + action_id=uri): + raise InvalidXsrfTokenError() + + return uri + + +class OAuth2Decorator(object): + """Utility for making OAuth 2.0 easier. + + Instantiate and then use with oauth_required or oauth_aware + as decorators on webapp.RequestHandler methods. + + :: + + decorator = OAuth2Decorator( + client_id='837...ent.com', + client_secret='Qh...wwI', + scope='https://www.googleapis.com/auth/plus') + + class MainHandler(webapp.RequestHandler): + @decorator.oauth_required + def get(self): + http = decorator.http() + # http is authorized with the user's Credentials and can be + # used in API calls + + """ + + def set_credentials(self, credentials): + self._tls.credentials = credentials + + def get_credentials(self): + """A thread local Credentials object. + + Returns: + A client.Credentials object, or None if credentials hasn't been set + in this thread yet, which may happen when calling has_credentials + inside oauth_aware. + """ + return getattr(self._tls, 'credentials', None) + + credentials = property(get_credentials, set_credentials) + + def set_flow(self, flow): + self._tls.flow = flow + + def get_flow(self): + """A thread local Flow object. + + Returns: + A credentials.Flow object, or None if the flow hasn't been set in + this thread yet, which happens in _create_flow() since Flows are + created lazily. + """ + return getattr(self._tls, 'flow', None) + + flow = property(get_flow, set_flow) + + @util.positional(4) + def __init__(self, client_id, client_secret, scope, + auth_uri=GOOGLE_AUTH_URI, + token_uri=GOOGLE_TOKEN_URI, + revoke_uri=GOOGLE_REVOKE_URI, + user_agent=None, + message=None, + callback_path='/oauth2callback', + token_response_param=None, + _storage_class=StorageByKeyName, + _credentials_class=CredentialsModel, + _credentials_property_name='credentials', + **kwargs): + """Constructor for OAuth2Decorator + + Args: + client_id: string, client identifier. + client_secret: string client secret. + scope: string or iterable of strings, scope(s) of the credentials + being requested. + auth_uri: string, URI for authorization endpoint. For convenience + defaults to Google's endpoints but any OAuth 2.0 provider + can be used. + token_uri: string, URI for token endpoint. For convenience defaults + to Google's endpoints but any OAuth 2.0 provider can be + used. + revoke_uri: string, URI for revoke endpoint. For convenience + defaults to Google's endpoints but any OAuth 2.0 + provider can be used. + user_agent: string, User agent of your application, default to + None. + message: Message to display if there are problems with the + OAuth 2.0 configuration. The message may contain HTML and + will be presented on the web interface for any method that + uses the decorator. + callback_path: string, The absolute path to use as the callback + URI. Note that this must match up with the URI given + when registering the application in the APIs + Console. + token_response_param: string. If provided, the full JSON response + to the access token request will be encoded + and included in this query parameter in the + callback URI. This is useful with providers + (e.g. wordpress.com) that include extra + fields that the client may want. + _storage_class: "Protected" keyword argument not typically provided + to this constructor. A storage class to aid in + storing a Credentials object for a user in the + datastore. Defaults to StorageByKeyName. + _credentials_class: "Protected" keyword argument not typically + provided to this constructor. A db or ndb Model + class to hold credentials. Defaults to + CredentialsModel. + _credentials_property_name: "Protected" keyword argument not + typically provided to this constructor. + A string indicating the name of the + field on the _credentials_class where a + Credentials object will be stored. + Defaults to 'credentials'. + **kwargs: dict, Keyword arguments are passed along as kwargs to + the OAuth2WebServerFlow constructor. + """ + self._tls = threading.local() + self.flow = None + self.credentials = None + self._client_id = client_id + self._client_secret = client_secret + self._scope = util.scopes_to_string(scope) + self._auth_uri = auth_uri + self._token_uri = token_uri + self._revoke_uri = revoke_uri + self._user_agent = user_agent + self._kwargs = kwargs + self._message = message + self._in_error = False + self._callback_path = callback_path + self._token_response_param = token_response_param + self._storage_class = _storage_class + self._credentials_class = _credentials_class + self._credentials_property_name = _credentials_property_name + + def _display_error_message(self, request_handler): + request_handler.response.out.write('') + request_handler.response.out.write(_safe_html(self._message)) + request_handler.response.out.write('') + + def oauth_required(self, method): + """Decorator that starts the OAuth 2.0 dance. + + Starts the OAuth dance for the logged in user if they haven't already + granted access for this application. + + Args: + method: callable, to be decorated method of a webapp.RequestHandler + instance. + """ + + def check_oauth(request_handler, *args, **kwargs): + if self._in_error: + self._display_error_message(request_handler) + return + + user = users.get_current_user() + # Don't use @login_decorator as this could be used in a + # POST request. + if not user: + request_handler.redirect(users.create_login_url( + request_handler.request.uri)) + return + + self._create_flow(request_handler) + + # Store the request URI in 'state' so we can use it later + self.flow.params['state'] = _build_state_value( + request_handler, user) + self.credentials = self._storage_class( + self._credentials_class, None, + self._credentials_property_name, user=user).get() + + if not self.has_credentials(): + return request_handler.redirect(self.authorize_url()) + try: + resp = method(request_handler, *args, **kwargs) + except AccessTokenRefreshError: + return request_handler.redirect(self.authorize_url()) + finally: + self.credentials = None + return resp + + return check_oauth + + def _create_flow(self, request_handler): + """Create the Flow object. + + The Flow is calculated lazily since we don't know where this app is + running until it receives a request, at which point redirect_uri can be + calculated and then the Flow object can be constructed. + + Args: + request_handler: webapp.RequestHandler, the request handler. + """ + if self.flow is None: + redirect_uri = request_handler.request.relative_url( + self._callback_path) # Usually /oauth2callback + self.flow = OAuth2WebServerFlow( + self._client_id, self._client_secret, self._scope, + redirect_uri=redirect_uri, user_agent=self._user_agent, + auth_uri=self._auth_uri, token_uri=self._token_uri, + revoke_uri=self._revoke_uri, **self._kwargs) + + def oauth_aware(self, method): + """Decorator that sets up for OAuth 2.0 dance, but doesn't do it. + + Does all the setup for the OAuth dance, but doesn't initiate it. + This decorator is useful if you want to create a page that knows + whether or not the user has granted access to this application. + From within a method decorated with @oauth_aware the has_credentials() + and authorize_url() methods can be called. + + Args: + method: callable, to be decorated method of a webapp.RequestHandler + instance. + """ + + def setup_oauth(request_handler, *args, **kwargs): + if self._in_error: + self._display_error_message(request_handler) + return + + user = users.get_current_user() + # Don't use @login_decorator as this could be used in a + # POST request. + if not user: + request_handler.redirect(users.create_login_url( + request_handler.request.uri)) + return + + self._create_flow(request_handler) + + self.flow.params['state'] = _build_state_value(request_handler, + user) + self.credentials = self._storage_class( + self._credentials_class, None, + self._credentials_property_name, user=user).get() + try: + resp = method(request_handler, *args, **kwargs) + finally: + self.credentials = None + return resp + return setup_oauth + + def has_credentials(self): + """True if for the logged in user there are valid access Credentials. + + Must only be called from with a webapp.RequestHandler subclassed method + that had been decorated with either @oauth_required or @oauth_aware. + """ + return self.credentials is not None and not self.credentials.invalid + + def authorize_url(self): + """Returns the URL to start the OAuth dance. + + Must only be called from with a webapp.RequestHandler subclassed method + that had been decorated with either @oauth_required or @oauth_aware. + """ + url = self.flow.step1_get_authorize_url() + return str(url) + + def http(self, *args, **kwargs): + """Returns an authorized http instance. + + Must only be called from within an @oauth_required decorated method, or + from within an @oauth_aware decorated method where has_credentials() + returns True. + + Args: + *args: Positional arguments passed to httplib2.Http constructor. + **kwargs: Positional arguments passed to httplib2.Http constructor. + """ + return self.credentials.authorize(httplib2.Http(*args, **kwargs)) + + @property + def callback_path(self): + """The absolute path where the callback will occur. + + Note this is the absolute path, not the absolute URI, that will be + calculated by the decorator at runtime. See callback_handler() for how + this should be used. + + Returns: + The callback path as a string. + """ + return self._callback_path + + def callback_handler(self): + """RequestHandler for the OAuth 2.0 redirect callback. + + Usage:: + + app = webapp.WSGIApplication([ + ('/index', MyIndexHandler), + ..., + (decorator.callback_path, decorator.callback_handler()) + ]) + + Returns: + A webapp.RequestHandler that handles the redirect back from the + server during the OAuth 2.0 dance. + """ + decorator = self + + class OAuth2Handler(webapp.RequestHandler): + """Handler for the redirect_uri of the OAuth 2.0 dance.""" + + @login_required + def get(self): + error = self.request.get('error') + if error: + errormsg = self.request.get('error_description', error) + self.response.out.write( + 'The authorization request failed: %s' % + _safe_html(errormsg)) + else: + user = users.get_current_user() + decorator._create_flow(self) + credentials = decorator.flow.step2_exchange( + self.request.params) + decorator._storage_class( + decorator._credentials_class, None, + decorator._credentials_property_name, + user=user).put(credentials) + redirect_uri = _parse_state_value( + str(self.request.get('state')), user) + + if (decorator._token_response_param and + credentials.token_response): + resp_json = json.dumps(credentials.token_response) + redirect_uri = util._add_query_parameter( + redirect_uri, decorator._token_response_param, + resp_json) + + self.redirect(redirect_uri) + + return OAuth2Handler + + def callback_application(self): + """WSGI application for handling the OAuth 2.0 redirect callback. + + If you need finer grained control use `callback_handler` which returns + just the webapp.RequestHandler. + + Returns: + A webapp.WSGIApplication that handles the redirect back from the + server during the OAuth 2.0 dance. + """ + return webapp.WSGIApplication([ + (self.callback_path, self.callback_handler()) + ]) + + +class OAuth2DecoratorFromClientSecrets(OAuth2Decorator): + """An OAuth2Decorator that builds from a clientsecrets file. + + Uses a clientsecrets file as the source for all the information when + constructing an OAuth2Decorator. + + :: + + decorator = OAuth2DecoratorFromClientSecrets( + os.path.join(os.path.dirname(__file__), 'client_secrets.json') + scope='https://www.googleapis.com/auth/plus') + + class MainHandler(webapp.RequestHandler): + @decorator.oauth_required + def get(self): + http = decorator.http() + # http is authorized with the user's Credentials and can be + # used in API calls + + """ + + @util.positional(3) + def __init__(self, filename, scope, message=None, cache=None, **kwargs): + """Constructor + + Args: + filename: string, File name of client secrets. + scope: string or iterable of strings, scope(s) of the credentials + being requested. + message: string, A friendly string to display to the user if the + clientsecrets file is missing or invalid. The message may + contain HTML and will be presented on the web interface + for any method that uses the decorator. + cache: An optional cache service client that implements get() and + set() + methods. See clientsecrets.loadfile() for details. + **kwargs: dict, Keyword arguments are passed along as kwargs to + the OAuth2WebServerFlow constructor. + """ + client_type, client_info = clientsecrets.loadfile(filename, + cache=cache) + if client_type not in (clientsecrets.TYPE_WEB, + clientsecrets.TYPE_INSTALLED): + raise InvalidClientSecretsError( + "OAuth2Decorator doesn't support this OAuth 2.0 flow.") + + constructor_kwargs = dict(kwargs) + constructor_kwargs.update({ + 'auth_uri': client_info['auth_uri'], + 'token_uri': client_info['token_uri'], + 'message': message, + }) + revoke_uri = client_info.get('revoke_uri') + if revoke_uri is not None: + constructor_kwargs['revoke_uri'] = revoke_uri + super(OAuth2DecoratorFromClientSecrets, self).__init__( + client_info['client_id'], client_info['client_secret'], + scope, **constructor_kwargs) + if message is not None: + self._message = message + else: + self._message = 'Please configure your application for OAuth 2.0.' + + +@util.positional(2) +def oauth2decorator_from_clientsecrets(filename, scope, + message=None, cache=None): + """Creates an OAuth2Decorator populated from a clientsecrets file. + + Args: + filename: string, File name of client secrets. + scope: string or list of strings, scope(s) of the credentials being + requested. + message: string, A friendly string to display to the user if the + clientsecrets file is missing or invalid. The message may + contain HTML and will be presented on the web interface for + any method that uses the decorator. + cache: An optional cache service client that implements get() and set() + methods. See clientsecrets.loadfile() for details. + + Returns: An OAuth2Decorator + """ + return OAuth2DecoratorFromClientSecrets(filename, scope, + message=message, cache=cache) diff --git a/oauth2client/contrib/devshell.py b/oauth2client/contrib/devshell.py new file mode 100644 index 0000000..662cb70 --- /dev/null +++ b/oauth2client/contrib/devshell.py @@ -0,0 +1,150 @@ +# Copyright 2015 Google Inc. All Rights Reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""OAuth 2.0 utitilies for Google Developer Shell environment.""" + +import datetime +import json +import os +import socket + +from oauth2client._helpers import _to_bytes +from oauth2client import client + +# Expose utcnow() at module level to allow for +# easier testing (by replacing with a stub). +_UTCNOW = datetime.datetime.utcnow + +DEVSHELL_ENV = 'DEVSHELL_CLIENT_PORT' + + +class Error(Exception): + """Errors for this module.""" + pass + + +class CommunicationError(Error): + """Errors for communication with the Developer Shell server.""" + + +class NoDevshellServer(Error): + """Error when no Developer Shell server can be contacted.""" + +# The request for credential information to the Developer Shell client socket +# is always an empty PBLite-formatted JSON object, so just define it as a +# constant. +CREDENTIAL_INFO_REQUEST_JSON = '[]' + + +class CredentialInfoResponse(object): + """Credential information response from Developer Shell server. + + The credential information response from Developer Shell socket is a + PBLite-formatted JSON array with fields encoded by their index in the + array: + + * Index 0 - user email + * Index 1 - default project ID. None if the project context is not known. + * Index 2 - OAuth2 access token. None if there is no valid auth context. + * Index 3 - Seconds until the access token expires. None if not present. + """ + + def __init__(self, json_string): + """Initialize the response data from JSON PBLite array.""" + pbl = json.loads(json_string) + if not isinstance(pbl, list): + raise ValueError('Not a list: ' + str(pbl)) + pbl_len = len(pbl) + self.user_email = pbl[0] if pbl_len > 0 else None + self.project_id = pbl[1] if pbl_len > 1 else None + self.access_token = pbl[2] if pbl_len > 2 else None + self.expires_in = pbl[3] if pbl_len > 3 else None + + +def _SendRecv(): + """Communicate with the Developer Shell server socket.""" + + port = int(os.getenv(DEVSHELL_ENV, 0)) + if port == 0: + raise NoDevshellServer() + + sock = socket.socket() + sock.connect(('localhost', port)) + + data = CREDENTIAL_INFO_REQUEST_JSON + msg = '%s\n%s' % (len(data), data) + sock.sendall(_to_bytes(msg, encoding='utf-8')) + + header = sock.recv(6).decode() + if '\n' not in header: + raise CommunicationError('saw no newline in the first 6 bytes') + len_str, json_str = header.split('\n', 1) + to_read = int(len_str) - len(json_str) + if to_read > 0: + json_str += sock.recv(to_read, socket.MSG_WAITALL).decode() + + return CredentialInfoResponse(json_str) + + +class DevshellCredentials(client.GoogleCredentials): + """Credentials object for Google Developer Shell environment. + + This object will allow a Google Developer Shell session to identify its + user to Google and other OAuth 2.0 servers that can verify assertions. It + can be used for the purpose of accessing data stored under the user + account. + + This credential does not require a flow to instantiate because it + represents a two legged flow, and therefore has all of the required + information to generate and refresh its own access tokens. + """ + + def __init__(self, user_agent=None): + super(DevshellCredentials, self).__init__( + None, # access_token, initialized below + None, # client_id + None, # client_secret + None, # refresh_token + None, # token_expiry + None, # token_uri + user_agent) + self._refresh(None) + + def _refresh(self, http_request): + self.devshell_response = _SendRecv() + self.access_token = self.devshell_response.access_token + expires_in = self.devshell_response.expires_in + if expires_in is not None: + delta = datetime.timedelta(seconds=expires_in) + self.token_expiry = _UTCNOW() + delta + else: + self.token_expiry = None + + @property + def user_email(self): + return self.devshell_response.user_email + + @property + def project_id(self): + return self.devshell_response.project_id + + @classmethod + def from_json(cls, json_data): + raise NotImplementedError( + 'Cannot load Developer Shell credentials from JSON.') + + @property + def serialization_data(self): + raise NotImplementedError( + 'Cannot serialize Developer Shell credentials.') diff --git a/oauth2client/contrib/dictionary_storage.py b/oauth2client/contrib/dictionary_storage.py new file mode 100644 index 0000000..8d8e6cf --- /dev/null +++ b/oauth2client/contrib/dictionary_storage.py @@ -0,0 +1,66 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Dictionary storage for OAuth2 Credentials.""" + +from oauth2client.client import OAuth2Credentials +from oauth2client.client import Storage + + +class DictionaryStorage(Storage): + """Store and retrieve credentials to and from a dictionary-like object. + + Args: + dictionary: A dictionary or dictionary-like object. + key: A string or other hashable. The credentials will be stored in + ``dictionary[key]``. + lock: An optional threading.Lock-like object. The lock will be + acquired before anything is written or read from the + dictionary. + """ + + def __init__(self, dictionary, key, lock=None): + """Construct a DictionaryStorage instance.""" + super(DictionaryStorage, self).__init__(lock=lock) + self._dictionary = dictionary + self._key = key + + def locked_get(self): + """Retrieve the credentials from the dictionary, if they exist. + + Returns: A :class:`oauth2client.client.OAuth2Credentials` instance. + """ + serialized = self._dictionary.get(self._key) + + if serialized is None: + return None + + credentials = OAuth2Credentials.from_json(serialized) + credentials.set_store(self) + + return credentials + + def locked_put(self, credentials): + """Save the credentials to the dictionary. + + Args: + credentials: A :class:`oauth2client.client.OAuth2Credentials` + instance. + """ + serialized = credentials.to_json() + self._dictionary[self._key] = serialized + + def locked_delete(self): + """Remove the credentials from the dictionary, if they exist.""" + self._dictionary.pop(self._key, None) diff --git a/oauth2client/contrib/django_orm.py b/oauth2client/contrib/django_orm.py new file mode 100644 index 0000000..18e24f2 --- /dev/null +++ b/oauth2client/contrib/django_orm.py @@ -0,0 +1,182 @@ +# Copyright 2014 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""OAuth 2.0 utilities for Django. + +Utilities for using OAuth 2.0 in conjunction with +the Django datastore. + + +Only Django versions 1.8+ are supported. +""" + +import oauth2client +import base64 +import pickle +import six + +from django.db import models +from django.utils.encoding import smart_bytes, smart_text +from oauth2client.client import Storage as BaseStorage + + +__author__ = 'jcgregorio@google.com (Joe Gregorio)' + + +class CredentialsField(models.Field): + + def __init__(self, *args, **kwargs): + if 'null' not in kwargs: + kwargs['null'] = True + super(CredentialsField, self).__init__(*args, **kwargs) + + def get_internal_type(self): + return 'TextField' + + def from_db_value(self, value, expression, connection, context): + return self.to_python(value) + + def to_python(self, value): + if value is None: + return None + if isinstance(value, oauth2client.client.Credentials): + return value + return pickle.loads(base64.b64decode(smart_bytes(value))) + + def get_prep_value(self, value): + if value is None: + return None + return smart_text(base64.b64encode(pickle.dumps(value))) + + def value_to_string(self, obj): + """Convert the field value from the provided model to a string. + + Used during model serialization. + + Args: + obj: db.Model, model object + + Returns: + string, the serialized field value + """ + value = self._get_val_from_obj(obj) + return self.get_prep_value(value) + + +class FlowField(models.Field): + + def __init__(self, *args, **kwargs): + if 'null' not in kwargs: + kwargs['null'] = True + super(FlowField, self).__init__(*args, **kwargs) + + def get_internal_type(self): + return 'TextField' + + def from_db_value(self, value, expression, connection, context): + return self.to_python(value) + + def to_python(self, value): + if value is None: + return None + if isinstance(value, oauth2client.client.Flow): + return value + return pickle.loads(base64.b64decode(value)) + + def get_prep_value(self, value): + if value is None: + return None + return smart_text(base64.b64encode(pickle.dumps(value))) + + def value_to_string(self, obj): + """Convert the field value from the provided model to a string. + + Used during model serialization. + + Args: + obj: db.Model, model object + + Returns: + string, the serialized field value + """ + value = self._get_val_from_obj(obj) + return self.get_prep_value(value) + + +class Storage(BaseStorage): + """Store and retrieve a single credential to and from the Django datastore. + + This Storage helper presumes the Credentials + have been stored as a CredentialsField + on a db model class. + """ + + def __init__(self, model_class, key_name, key_value, property_name): + """Constructor for Storage. + + Args: + model: db.Model, model class + key_name: string, key name for the entity that has the credentials + key_value: string, key value for the entity that has the + credentials + property_name: string, name of the property that is an + CredentialsProperty + """ + super(Storage, self).__init__() + self.model_class = model_class + self.key_name = key_name + self.key_value = key_value + self.property_name = property_name + + def locked_get(self): + """Retrieve stored credential. + + Returns: + oauth2client.Credentials + """ + credential = None + + query = {self.key_name: self.key_value} + entities = self.model_class.objects.filter(**query) + if len(entities) > 0: + credential = getattr(entities[0], self.property_name) + if credential and hasattr(credential, 'set_store'): + credential.set_store(self) + return credential + + def locked_put(self, credentials, overwrite=False): + """Write a Credentials to the Django datastore. + + Args: + credentials: Credentials, the credentials to store. + overwrite: Boolean, indicates whether you would like these + credentials to overwrite any existing stored + credentials. + """ + args = {self.key_name: self.key_value} + + if overwrite: + (entity, + unused_is_new) = self.model_class.objects.get_or_create(**args) + else: + entity = self.model_class(**args) + + setattr(entity, self.property_name, credentials) + entity.save() + + def locked_delete(self): + """Delete Credentials from the datastore.""" + + query = {self.key_name: self.key_value} + entities = self.model_class.objects.filter(**query).delete() diff --git a/oauth2client/contrib/django_util/__init__.py b/oauth2client/contrib/django_util/__init__.py new file mode 100644 index 0000000..5aa12ac --- /dev/null +++ b/oauth2client/contrib/django_util/__init__.py @@ -0,0 +1,307 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Utilities for the Django web framework + +Provides Django views and helpers the make using the OAuth2 web server +flow easier. It includes an ``oauth_required`` decorator to automatically ensure +that user credentials are available, and an ``oauth_enabled`` decorator to check +if the user has authorized, and helper shortcuts to create the authorization +URL otherwise. + +Only Django versions 1.8+ are supported. + +Configuration +============= + +To configure, you'll need a set of OAuth2 web application credentials from +`Google Developer's Console `. + +Add the helper to your INSTALLED_APPS: + +.. code-block:: python + :caption: settings.py + :name: installed_apps + + INSTALLED_APPS = ( + # other apps + "oauth2client.contrib.django_util" + ) + +Add the client secrets created earlier to the settings. You can either +specify the path to the credentials file in JSON format + +.. code-block:: python + :caption: settings.py + :name: secrets_file + + GOOGLE_OAUTH2_CLIENT_SECRETS_JSON=/path/to/client-secret.json + +Or, directly configure the client Id and client secret. + + +.. code-block:: python + :caption: settings.py + :name: secrets_config + + GOOGLE_OAUTH2_CLIENT_ID=client-id-field + GOOGLE_OAUTH2_CLIENT_SECRET=client-secret-field + +By default, the default scopes for the required decorator only contains the +``email`` scopes. You can change that default in the settings. + +.. code-block:: python + :caption: settings.py + :name: scopes + + GOOGLE_OAUTH2_SCOPES = ('email', 'https://www.googleapis.com/auth/calendar',) + +By default, the decorators will add an `oauth` object to the Django request +object, and include all of its state and helpers inside that object. If the +`oauth` name conflicts with another usage, it can be changed + +.. code-block:: python + :caption: settings.py + :name: request_prefix + + # changes request.oauth to request.google_oauth + GOOGLE_OAUTH2_REQUEST_ATTRIBUTE = 'google_oauth' + +Add the oauth2 routes to your application's urls.py urlpatterns. + +.. code-block:: python + :caption: urls.py + :name: urls + + from oauth2client.contrib.django_util.site import urls as oauth2_urls + + urlpatterns += [url(r'^oauth2/', include(oauth2_urls))] + +To require OAuth2 credentials for a view, use the `oauth2_required` decorator. +This creates a credentials object with an id_token, and allows you to create an +`http` object to build service clients with. These are all attached to the +request.oauth + +.. code-block:: python + :caption: views.py + :name: views_required + + from oauth2client.contrib.django_util.decorators import oauth_required + + @oauth_required + def requires_default_scopes(request): + email = request.oauth.credentials.id_token['email'] + service = build(serviceName='calendar', version='v3', + http=request.oauth.http, + developerKey=API_KEY) + events = service.events().list(calendarId='primary').execute()['items'] + return HttpResponse("email: %s , calendar: %s" % (email, str(events))) + +To make OAuth2 optional and provide an authorization link in your own views. + +.. code-block:: python + :caption: views.py + :name: views_enabled2 + + from oauth2client.contrib.django_util.decorators import oauth_enabled + + @oauth_enabled + def optional_oauth2(request): + if request.oauth.has_credentials(): + # this could be passed into a view + # request.oauth.http is also initialized + return HttpResponse("User email: %s" + % request.oauth.credentials.id_token['email']) + else: + return HttpResponse('Here is an OAuth Authorize link: + Authorize' % request.oauth.get_authorize_redirect()) + +If a view needs a scope not included in the default scopes specified in +the settings, you can use [incremental auth](https://developers.google.com/identity/sign-in/web/incremental-auth) +and specify additional scopes in the decorator arguments. + +.. code-block:: python + :caption: views.py + :name: views_required_additional_scopes + + @oauth_enabled(scopes=['https://www.googleapis.com/auth/drive']) + def drive_required(request): + if request.oauth.has_credentials(): + service = build(serviceName='drive', version='v2', + http=request.oauth.http, + developerKey=API_KEY) + events = service.files().list().execute()['items'] + return HttpResponse(str(events)) + else: + return HttpResponse('Here is an OAuth Authorize link: + Authorize' % request.oauth.get_authorize_redirect()) + + +To provide a callback on authorization being completed, use the +oauth2_authorized signal: + +.. code-block:: python + :caption: views.py + :name: signals + + from oauth2client.contrib.django_util.signals import oauth2_authorized + + def test_callback(sender, request, credentials, **kwargs): + print "Authorization Signal Received %s" % credentials.id_token['email'] + + oauth2_authorized.connect(test_callback) + +""" + +import django.conf +from django.core import exceptions +from django.core import urlresolvers +import httplib2 +from oauth2client import clientsecrets +from oauth2client.contrib.django_util import storage +from six.moves.urllib import parse + +GOOGLE_OAUTH2_DEFAULT_SCOPES = ('email',) +GOOGLE_OAUTH2_REQUEST_ATTRIBUTE = 'oauth' + + +def _load_client_secrets(filename): + """Loads client secrets from the given filename.""" + client_type, client_info = clientsecrets.loadfile(filename) + + if client_type != clientsecrets.TYPE_WEB: + raise ValueError( + 'The flow specified in {} is not supported, only the WEB flow ' + 'type is supported.'.format(client_type)) + return client_info['client_id'], client_info['client_secret'] + + +def _get_oauth2_client_id_and_secret(settings_instance): + """Initializes client id and client secret based on the settings""" + secret_json = getattr(django.conf.settings, + 'GOOGLE_OAUTH2_CLIENT_SECRETS_JSON', None) + if secret_json is not None: + return _load_client_secrets(secret_json) + else: + client_id = getattr(settings_instance, "GOOGLE_OAUTH2_CLIENT_ID", + None) + client_secret = getattr(settings_instance, + "GOOGLE_OAUTH2_CLIENT_SECRET", None) + if client_id is not None and client_secret is not None: + return client_id, client_secret + else: + raise exceptions.ImproperlyConfigured( + "Must specify either GOOGLE_OAUTH2_CLIENT_SECRETS_JSON, or " + " both GOOGLE_OAUTH2_CLIENT_ID and GOOGLE_OAUTH2_CLIENT_SECRET " + "in settings.py") + + +class OAuth2Settings(object): + """Initializes Django OAuth2 Helper Settings + + This class loads the OAuth2 Settings from the Django settings, and then + provides those settings as attributes to the rest of the views and + decorators in the module. + + Attributes: + scopes: A list of OAuth2 scopes that the decorators and views will use + as defaults + request_prefix: The name of the attribute that the decorators use to + attach the UserOAuth2 object to the Django request object. + client_id: The OAuth2 Client ID + client_secret: The OAuth2 Client Secret + """ + + def __init__(self, settings_instance): + self.scopes = getattr(settings_instance, 'GOOGLE_OAUTH2_SCOPES', + GOOGLE_OAUTH2_DEFAULT_SCOPES) + self.request_prefix = getattr(settings_instance, + 'GOOGLE_OAUTH2_REQUEST_ATTRIBUTE', + GOOGLE_OAUTH2_REQUEST_ATTRIBUTE) + self.client_id, self.client_secret = \ + _get_oauth2_client_id_and_secret(settings_instance) + + if ('django.contrib.sessions.middleware.SessionMiddleware' + not in settings_instance.MIDDLEWARE_CLASSES): + raise exceptions.ImproperlyConfigured( + "The Google OAuth2 Helper requires session middleware to " + "be installed. Edit your MIDDLEWARE_CLASSES setting" + " to include 'django.contrib.sessions.middleware." + "SessionMiddleware'.") + + +oauth2_settings = OAuth2Settings(django.conf.settings) + + +def _redirect_with_params(url_name, *args, **kwargs): + """Helper method to create a redirect response that uses GET URL + parameters.""" + + url = urlresolvers.reverse(url_name, args=args) + params = parse.urlencode(kwargs, True) + return "{0}?{1}".format(url, params) + + +class UserOAuth2(object): + """Class to create oauth2 objects on Django request objects containing + credentials and helper methods. + """ + + def __init__(self, request, scopes=None, return_url=None): + """Initialize the Oauth2 Object + :param request: Django request object + :param scopes: Scopes desired for this OAuth2 flow + :param return_url: URL to return to after authorization is complete + :return: + """ + self.request = request + self.return_url = return_url or request.get_full_path() + self.scopes = set(oauth2_settings.scopes) + if scopes: + self.scopes |= set(scopes) + + # make sure previously requested custom scopes are maintained + # in future authorizations + credentials = storage.get_storage(self.request).get() + if credentials: + self.scopes |= credentials.scopes + + def get_authorize_redirect(self): + """Creates a URl to start the OAuth2 authorization flow""" + get_params = { + 'return_url': self.return_url, + 'scopes': self.scopes + } + + return _redirect_with_params('google_oauth:authorize', + **get_params) + + def has_credentials(self): + """Returns True if there are valid credentials for the current user + and required scopes.""" + return (self.credentials and not self.credentials.invalid + and self.credentials.has_scopes(self.scopes)) + + @property + def credentials(self): + """Gets the authorized credentials for this flow, if they exist""" + return storage.get_storage(self.request).get() + + @property + def http(self): + """Helper method to create an HTTP client authorized with OAuth2 + credentials""" + if self.has_credentials(): + return self.credentials.authorize(httplib2.Http()) + return None diff --git a/oauth2client/contrib/django_util/apps.py b/oauth2client/contrib/django_util/apps.py new file mode 100644 index 0000000..7fa20ab --- /dev/null +++ b/oauth2client/contrib/django_util/apps.py @@ -0,0 +1,32 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Application Config For Django OAuth2 Helper + +Django 1.7+ provides an +[applications](https://docs.djangoproject.com/en/1.8/ref/applications/) +API so that Django projects can introspect on installed applications using a +stable API. This module exists to follow that convention. +""" + +import sys + +# Django 1.7+ only supports Python 2.7+ +if sys.hexversion >= 0x02070000: # pragma: NO COVER + from django.apps import AppConfig + + class GoogleOAuth2HelperConfig(AppConfig): + """ App Config for Django Helper""" + name = 'oauth2client.django_util' + verbose_name = "Google OAuth2 Django Helper" diff --git a/oauth2client/contrib/django_util/decorators.py b/oauth2client/contrib/django_util/decorators.py new file mode 100644 index 0000000..0e0a4b2 --- /dev/null +++ b/oauth2client/contrib/django_util/decorators.py @@ -0,0 +1,117 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from django import shortcuts +from oauth2client.contrib import django_util +from six import wraps + + +def oauth_required(decorated_function=None, scopes=None, **decorator_kwargs): + """ Decorator to require OAuth2 credentials for a view + + + .. code-block:: python + :caption: views.py + :name: views_required_2 + + + from oauth2client.django_util.decorators import oauth_required + + @oauth_required + def requires_default_scopes(request): + email = request.credentials.id_token['email'] + service = build(serviceName='calendar', version='v3', + http=request.oauth.http, + developerKey=API_KEY) + events = service.events().list( + calendarId='primary').execute()['items'] + return HttpResponse("email: %s , calendar: %s" % (email, str(events))) + + :param decorated_function: View function to decorate, must have the Django + request object as the first argument + :param scopes: Scopes to require, will default + :param decorator_kwargs: Can include ``return_url`` to specify the URL to + return to after OAuth2 authorization is complete + :return: An OAuth2 Authorize view if credentials are not found or if the + credentials are missing the required scopes. Otherwise, + the decorated view. + """ + + def curry_wrapper(wrapped_function): + @wraps(wrapped_function) + def required_wrapper(request, *args, **kwargs): + return_url = decorator_kwargs.pop('return_url', + request.get_full_path()) + user_oauth = django_util.UserOAuth2(request, scopes, return_url) + if not user_oauth.has_credentials(): + return shortcuts.redirect(user_oauth.get_authorize_redirect()) + setattr(request, django_util.oauth2_settings.request_prefix, + user_oauth) + return wrapped_function(request, *args, **kwargs) + + return required_wrapper + + if decorated_function: + return curry_wrapper(decorated_function) + else: + return curry_wrapper + + +def oauth_enabled(decorated_function=None, scopes=None, **decorator_kwargs): + """ Decorator to enable OAuth Credentials if authorized, and setup + the oauth object on the request object to provide helper functions + to start the flow otherwise. + + .. code-block:: python + :caption: views.py + :name: views_enabled3 + + from oauth2client.django_util.decorators import oauth_enabled + + @oauth_enabled + def optional_oauth2(request): + if request.oauth.has_credentials(): + # this could be passed into a view + # request.oauth.http is also initialized + return HttpResponse("User email: %s" % + request.oauth.credentials.id_token['email']) + else: + return HttpResponse('Here is an OAuth Authorize link: + Authorize' % + request.oauth.get_authorize_redirect()) + + + :param decorated_function: View function to decorate + :param scopes: Scopes to require, will default + :param decorator_kwargs: Can include ``return_url`` to specify the URL to + return to after OAuth2 authorization is complete + :return: The decorated view function + """ + + def curry_wrapper(wrapped_function): + @wraps(wrapped_function) + def enabled_wrapper(request, *args, **kwargs): + return_url = decorator_kwargs.pop('return_url', + request.get_full_path()) + user_oauth = django_util.UserOAuth2(request, scopes, return_url) + setattr(request, django_util.oauth2_settings.request_prefix, + user_oauth) + return wrapped_function(request, *args, **kwargs) + + return enabled_wrapper + + if decorated_function: + return curry_wrapper(decorated_function) + else: + return curry_wrapper diff --git a/oauth2client/contrib/django_util/models.py b/oauth2client/contrib/django_util/models.py new file mode 100644 index 0000000..37cc697 --- /dev/null +++ b/oauth2client/contrib/django_util/models.py @@ -0,0 +1,82 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Contains classes used for the Django ORM storage.""" + +import base64 +import pickle + +from django.db import models +from django.utils import encoding +import jsonpickle + +import oauth2client + + +class CredentialsField(models.Field): + """Django ORM field for storing OAuth2 Credentials.""" + + def __init__(self, *args, **kwargs): + if 'null' not in kwargs: + kwargs['null'] = True + super(CredentialsField, self).__init__(*args, **kwargs) + + def get_internal_type(self): + return 'BinaryField' + + def from_db_value(self, value, expression, connection, context): + """Overrides ``models.Field`` method. This converts the value + returned from the database to an instance of this class. + """ + return self.to_python(value) + + def to_python(self, value): + """Overrides ``models.Field`` method. This is used to convert + bytes (from serialization etc) to an instance of this class""" + if value is None: + return None + elif isinstance(value, oauth2client.client.Credentials): + return value + else: + try: + return jsonpickle.decode( + base64.b64decode(encoding.smart_bytes(value)).decode()) + except ValueError: + return pickle.loads( + base64.b64decode(encoding.smart_bytes(value))) + + def get_prep_value(self, value): + """Overrides ``models.Field`` method. This is used to convert + the value from an instances of this class to bytes that can be + inserted into the database. + """ + if value is None: + return None + else: + return encoding.smart_text( + base64.b64encode(jsonpickle.encode(value).encode())) + + def value_to_string(self, obj): + """Convert the field value from the provided model to a string. + + Used during model serialization. + + Args: + obj: db.Model, model object + + Returns: + string, the serialized field value + """ + value = self._get_val_from_obj(obj) + return self.get_prep_value(value) diff --git a/oauth2client/contrib/django_util/signals.py b/oauth2client/contrib/django_util/signals.py new file mode 100644 index 0000000..ccbe881 --- /dev/null +++ b/oauth2client/contrib/django_util/signals.py @@ -0,0 +1,28 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +""" Signals for Google OAuth2 Helper + +This module contains signals for Google OAuth2 Helper. Currently it only +contains one, which fires when an OAuth2 authorization flow has completed. +""" + +import django.dispatch + +"""Signal that fires when OAuth2 Flow has completed. +It passes the Django request object and the OAuth2 credentials object to the + receiver. +""" +oauth2_authorized = django.dispatch.Signal( + providing_args=["request", "credentials"]) diff --git a/oauth2client/contrib/django_util/site.py b/oauth2client/contrib/django_util/site.py new file mode 100644 index 0000000..c13ed6b --- /dev/null +++ b/oauth2client/contrib/django_util/site.py @@ -0,0 +1,23 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from django.conf import urls +from oauth2client.contrib.django_util import views + +urlpatterns = [ + urls.url(r'oauth2callback/', views.oauth2_callback, name="callback"), + urls.url(r'oauth2authorize/', views.oauth2_authorize, name="authorize") +] + +urls = (urlpatterns, "google_oauth", "google_oauth") diff --git a/oauth2client/contrib/django_util/storage.py b/oauth2client/contrib/django_util/storage.py new file mode 100644 index 0000000..d42a987 --- /dev/null +++ b/oauth2client/contrib/django_util/storage.py @@ -0,0 +1,27 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +from oauth2client.contrib.dictionary_storage import DictionaryStorage + +_CREDENTIALS_KEY = 'google_oauth2_credentials' + + +def get_storage(request): + # TODO(issue 319): Make this pluggable with different storage providers + # https://github.com/google/oauth2client/issues/319 + """ Gets a Credentials storage object for the Django OAuth2 Helper object + :param request: Reference to the current request object + :return: A OAuth2Client Storage implementation based on sessions + """ + return DictionaryStorage(request.session, key=_CREDENTIALS_KEY) diff --git a/oauth2client/contrib/django_util/views.py b/oauth2client/contrib/django_util/views.py new file mode 100644 index 0000000..0d5561c --- /dev/null +++ b/oauth2client/contrib/django_util/views.py @@ -0,0 +1,139 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import hashlib +import json +import os +import pickle +from django import http +from django.core import urlresolvers +from django import shortcuts +from oauth2client import client +from oauth2client.contrib import django_util +from oauth2client.contrib.django_util import signals +from oauth2client.contrib.django_util import storage + +_CSRF_KEY = 'google_oauth2_csrf_token' +_FLOW_KEY = 'google_oauth2_flow_{0}' + + +def _make_flow(request, scopes, return_url=None): + """Creates a Web Server Flow""" + # Generate a CSRF token to prevent malicious requests. + csrf_token = hashlib.sha256(os.urandom(1024)).hexdigest() + + request.session[_CSRF_KEY] = csrf_token + + state = json.dumps({ + 'csrf_token': csrf_token, + 'return_url': return_url, + }) + + flow = client.OAuth2WebServerFlow( + client_id=django_util.oauth2_settings.client_id, + client_secret=django_util.oauth2_settings.client_secret, + scope=scopes, + state=state, + redirect_uri=request.build_absolute_uri( + urlresolvers.reverse("google_oauth:callback"))) + + flow_key = _FLOW_KEY.format(csrf_token) + request.session[flow_key] = pickle.dumps(flow) + return flow + + +def _get_flow_for_token(csrf_token, request): + """ Looks up the flow in session to recover information about requested + scopes.""" + flow_pickle = request.session.get(_FLOW_KEY.format(csrf_token), None) + return None if flow_pickle is None else pickle.loads(flow_pickle) + + +def oauth2_callback(request): + """ View that handles the user's return from OAuth2 provider. + + This view verifies the CSRF state and OAuth authorization code, and on + success stores the credentials obtained in the storage provider, + and redirects to the return_url specified in the authorize view and + stored in the session. + + :param request: Django request + :return: A redirect response back to the return_url + """ + if 'error' in request.GET: + reason = request.GET.get( + 'error_description', request.GET.get('error', '')) + return http.HttpResponseBadRequest( + 'Authorization failed %s' % reason) + + try: + encoded_state = request.GET['state'] + code = request.GET['code'] + except KeyError: + return http.HttpResponseBadRequest( + "Request missing state or authorization code") + + try: + server_csrf = request.session[_CSRF_KEY] + except KeyError: + return http.HttpResponseBadRequest("No existing session for this flow.") + + try: + state = json.loads(encoded_state) + client_csrf = state['csrf_token'] + return_url = state['return_url'] + except (ValueError, KeyError): + return http.HttpResponseBadRequest('Invalid state parameter.') + + if client_csrf != server_csrf: + return http.HttpResponseBadRequest('Invalid CSRF token.') + + flow = _get_flow_for_token(client_csrf, request) + + if not flow: + return http.HttpResponseBadRequest("Missing Oauth2 flow.") + + try: + credentials = flow.step2_exchange(code) + except client.FlowExchangeError as exchange_error: + return http.HttpResponseBadRequest( + "An error has occurred: {0}".format(exchange_error)) + + storage.get_storage(request).put(credentials) + + signals.oauth2_authorized.send(sender=signals.oauth2_authorized, + request=request, credentials=credentials) + return shortcuts.redirect(return_url) + + +def oauth2_authorize(request): + """ View to start the OAuth2 Authorization flow + + This view starts the OAuth2 authorization flow. If scopes is passed in + as a GET URL parameter, it will authorize those scopes, otherwise the + default scopes specified in settings. The return_url can also be + specified as a GET parameter, otherwise the referer header will be + checked, and if that isn't found it will return to the root path. + + :param request: The Django request object + :return: A redirect to Google OAuth2 Authorization + """ + scopes = request.GET.getlist('scopes', django_util.oauth2_settings.scopes) + return_url = request.GET.get('return_url', None) + + if not return_url: + return_url = request.META.get('HTTP_REFERER', '/') + flow = _make_flow(request=request, scopes=scopes, return_url=return_url) + auth_url = flow.step1_get_authorize_url() + return shortcuts.redirect(auth_url) diff --git a/oauth2client/contrib/flask_util.py b/oauth2client/contrib/flask_util.py new file mode 100644 index 0000000..fc6dd3d --- /dev/null +++ b/oauth2client/contrib/flask_util.py @@ -0,0 +1,556 @@ +# Copyright 2015 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Utilities for the Flask web framework + +Provides a Flask extension that makes using OAuth2 web server flow easier. +The extension includes views that handle the entire auth flow and a +``@required`` decorator to automatically ensure that user credentials are +available. + + +Configuration +============= + +To configure, you'll need a set of OAuth2 web application credentials from the +`Google Developer's Console `__. + +.. code-block:: python + + from oauth2client.contrib.flask_util import UserOAuth2 + + app = Flask(__name__) + + app.config['SECRET_KEY'] = 'your-secret-key' + + app.config['GOOGLE_OAUTH2_CLIENT_SECRETS_FILE'] = 'client_secrets.json' + + # or, specify the client id and secret separately + app.config['GOOGLE_OAUTH2_CLIENT_ID'] = 'your-client-id' + app.config['GOOGLE_OAUTH2_CLIENT_SECRET'] = 'your-client-secret' + + oauth2 = UserOAuth2(app) + + +Usage +===== + +Once configured, you can use the :meth:`UserOAuth2.required` decorator to +ensure that credentials are available within a view. + +.. code-block:: python + :emphasize-lines: 3,7,10 + + # Note that app.route should be the outermost decorator. + @app.route('/needs_credentials') + @oauth2.required + def example(): + # http is authorized with the user's credentials and can be used + # to make http calls. + http = oauth2.http() + + # Or, you can access the credentials directly + credentials = oauth2.credentials + +If you want credentials to be optional for a view, you can leave the decorator +off and use :meth:`UserOAuth2.has_credentials` to check. + +.. code-block:: python + :emphasize-lines: 3 + + @app.route('/optional') + def optional(): + if oauth2.has_credentials(): + return 'Credentials found!' + else: + return 'No credentials!' + + +When credentials are available, you can use :attr:`UserOAuth2.email` and +:attr:`UserOAuth2.user_id` to access information from the `ID Token +`__, if +available. + +.. code-block:: python + :emphasize-lines: 4 + + @app.route('/info') + @oauth2.required + def info(): + return "Hello, {} ({})".format(oauth2.email, oauth2.user_id) + + +URLs & Trigging Authorization +============================= + +The extension will add two new routes to your application: + + * ``"oauth2.authorize"`` -> ``/oauth2authorize`` + * ``"oauth2.callback"`` -> ``/oauth2callback`` + +When configuring your OAuth2 credentials on the Google Developer's Console, be +sure to add ``http[s]://[your-app-url]/oauth2callback`` as an authorized +callback url. + +Typically you don't not need to use these routes directly, just be sure to +decorate any views that require credentials with ``@oauth2.required``. If +needed, you can trigger authorization at any time by redirecting the user +to the URL returned by :meth:`UserOAuth2.authorize_url`. + +.. code-block:: python + :emphasize-lines: 3 + + @app.route('/login') + def login(): + return oauth2.authorize_url("/") + + +Incremental Auth +================ + +This extension also supports `Incremental Auth `__. To enable it, +configure the extension with ``include_granted_scopes``. + +.. code-block:: python + + oauth2 = UserOAuth2(app, include_granted_scopes=True) + +Then specify any additional scopes needed on the decorator, for example: + +.. code-block:: python + :emphasize-lines: 2,7 + + @app.route('/drive') + @oauth2.required(scopes=["https://www.googleapis.com/auth/drive"]) + def requires_drive(): + ... + + @app.route('/calendar') + @oauth2.required(scopes=["https://www.googleapis.com/auth/calendar"]) + def requires_calendar(): + ... + +The decorator will ensure that the the user has authorized all specified scopes +before allowing them to access the view, and will also ensure that credentials +do not lose any previously authorized scopes. + + +Storage +======= + +By default, the extension uses a Flask session-based storage solution. This +means that credentials are only available for the duration of a session. It +also means that with Flask's default configuration, the credentials will be +visible in the session cookie. It's highly recommended to use database-backed +session and to use https whenever handling user credentials. + +If you need the credentials to be available longer than a user session or +available outside of a request context, you will need to implement your own +:class:`oauth2client.Storage`. +""" + +import hashlib +import json +import os +import pickle +from functools import wraps + +import six.moves.http_client as httplib +import httplib2 + +try: + from flask import Blueprint + from flask import _app_ctx_stack + from flask import current_app + from flask import redirect + from flask import request + from flask import session + from flask import url_for +except ImportError: # pragma: NO COVER + raise ImportError('The flask utilities require flask 0.9 or newer.') + +from oauth2client.client import FlowExchangeError +from oauth2client.client import OAuth2WebServerFlow +from oauth2client.contrib.dictionary_storage import DictionaryStorage +from oauth2client import clientsecrets + + +__author__ = 'jonwayne@google.com (Jon Wayne Parrott)' + +_DEFAULT_SCOPES = ('email',) +_CREDENTIALS_KEY = 'google_oauth2_credentials' +_FLOW_KEY = 'google_oauth2_flow_{0}' +_CSRF_KEY = 'google_oauth2_csrf_token' + + +def _get_flow_for_token(csrf_token): + """Retrieves the flow instance associated with a given CSRF token from + the Flask session.""" + flow_pickle = session.pop( + _FLOW_KEY.format(csrf_token), None) + + if flow_pickle is None: + return None + else: + return pickle.loads(flow_pickle) + + +class UserOAuth2(object): + """Flask extension for making OAuth 2.0 easier. + + Configuration values: + + * ``GOOGLE_OAUTH2_CLIENT_SECRETS_FILE`` path to a client secrets json + file, obtained from the credentials screen in the Google Developers + console. + * ``GOOGLE_OAUTH2_CLIENT_ID`` the oauth2 credentials' client ID. This + is only needed if ``GOOGLE_OAUTH2_CLIENT_SECRETS_FILE`` is not + specified. + * ``GOOGLE_OAUTH2_CLIENT_SECRET`` the oauth2 credentials' client + secret. This is only needed if ``GOOGLE_OAUTH2_CLIENT_SECRETS_FILE`` + is not specified. + + If app is specified, all arguments will be passed along to init_app. + + If no app is specified, then you should call init_app in your application + factory to finish initialization. + """ + + def __init__(self, app=None, *args, **kwargs): + self.app = app + if app is not None: + self.init_app(app, *args, **kwargs) + + def init_app(self, app, scopes=None, client_secrets_file=None, + client_id=None, client_secret=None, authorize_callback=None, + storage=None, **kwargs): + """Initialize this extension for the given app. + + Arguments: + app: A Flask application. + scopes: Optional list of scopes to authorize. + client_secrets_file: Path to a file containing client secrets. You + can also specify the GOOGLE_OAUTH2_CLIENT_SECRETS_FILE config + value. + client_id: If not specifying a client secrets file, specify the + OAuth2 client id. You can also specify the + GOOGLE_OAUTH2_CLIENT_ID config value. You must also provide a + client secret. + client_secret: The OAuth2 client secret. You can also specify the + GOOGLE_OAUTH2_CLIENT_SECRET config value. + authorize_callback: A function that is executed after successful + user authorization. + storage: A oauth2client.client.Storage subclass for storing the + credentials. By default, this is a Flask session based storage. + kwargs: Any additional args are passed along to the Flow + constructor. + """ + self.app = app + self.authorize_callback = authorize_callback + self.flow_kwargs = kwargs + + if storage is None: + storage = DictionaryStorage(session, key=_CREDENTIALS_KEY) + self.storage = storage + + if scopes is None: + scopes = app.config.get('GOOGLE_OAUTH2_SCOPES', _DEFAULT_SCOPES) + self.scopes = scopes + + self._load_config(client_secrets_file, client_id, client_secret) + + app.register_blueprint(self._create_blueprint()) + + def _load_config(self, client_secrets_file, client_id, client_secret): + """Loads oauth2 configuration in order of priority. + + Priority: + 1. Config passed to the constructor or init_app. + 2. Config passed via the GOOGLE_OAUTH2_CLIENT_SECRETS_FILE app + config. + 3. Config passed via the GOOGLE_OAUTH2_CLIENT_ID and + GOOGLE_OAUTH2_CLIENT_SECRET app config. + + Raises: + ValueError if no config could be found. + """ + if client_id and client_secret: + self.client_id, self.client_secret = client_id, client_secret + return + + if client_secrets_file: + self._load_client_secrets(client_secrets_file) + return + + if 'GOOGLE_OAUTH2_CLIENT_SECRETS_FILE' in self.app.config: + self._load_client_secrets( + self.app.config['GOOGLE_OAUTH2_CLIENT_SECRETS_FILE']) + return + + try: + self.client_id, self.client_secret = ( + self.app.config['GOOGLE_OAUTH2_CLIENT_ID'], + self.app.config['GOOGLE_OAUTH2_CLIENT_SECRET']) + except KeyError: + raise ValueError( + 'OAuth2 configuration could not be found. Either specify the ' + 'client_secrets_file or client_id and client_secret or set ' + 'the app configuration variables ' + 'GOOGLE_OAUTH2_CLIENT_SECRETS_FILE or ' + 'GOOGLE_OAUTH2_CLIENT_ID and GOOGLE_OAUTH2_CLIENT_SECRET.') + + def _load_client_secrets(self, filename): + """Loads client secrets from the given filename.""" + client_type, client_info = clientsecrets.loadfile(filename) + if client_type != clientsecrets.TYPE_WEB: + raise ValueError( + 'The flow specified in {0} is not supported.'.format( + client_type)) + + self.client_id = client_info['client_id'] + self.client_secret = client_info['client_secret'] + + def _make_flow(self, return_url=None, **kwargs): + """Creates a Web Server Flow""" + # Generate a CSRF token to prevent malicious requests. + csrf_token = hashlib.sha256(os.urandom(1024)).hexdigest() + + session[_CSRF_KEY] = csrf_token + + state = json.dumps({ + 'csrf_token': csrf_token, + 'return_url': return_url + }) + + kw = self.flow_kwargs.copy() + kw.update(kwargs) + + extra_scopes = kw.pop('scopes', []) + scopes = set(self.scopes).union(set(extra_scopes)) + + flow = OAuth2WebServerFlow( + client_id=self.client_id, + client_secret=self.client_secret, + scope=scopes, + state=state, + redirect_uri=url_for('oauth2.callback', _external=True), + **kw) + + flow_key = _FLOW_KEY.format(csrf_token) + session[flow_key] = pickle.dumps(flow) + + return flow + + def _create_blueprint(self): + bp = Blueprint('oauth2', __name__) + bp.add_url_rule('/oauth2authorize', 'authorize', self.authorize_view) + bp.add_url_rule('/oauth2callback', 'callback', self.callback_view) + + return bp + + def authorize_view(self): + """Flask view that starts the authorization flow. + + Starts flow by redirecting the user to the OAuth2 provider. + """ + args = request.args.to_dict() + + # Scopes will be passed as mutliple args, and to_dict() will only + # return one. So, we use getlist() to get all of the scopes. + args['scopes'] = request.args.getlist('scopes') + + return_url = args.pop('return_url', None) + if return_url is None: + return_url = request.referrer or '/' + + flow = self._make_flow(return_url=return_url, **args) + auth_url = flow.step1_get_authorize_url() + + return redirect(auth_url) + + def callback_view(self): + """Flask view that handles the user's return from OAuth2 provider. + + On return, exchanges the authorization code for credentials and stores + the credentials. + """ + if 'error' in request.args: + reason = request.args.get( + 'error_description', request.args.get('error', '')) + return ('Authorization failed: {0}'.format(reason), + httplib.BAD_REQUEST) + + try: + encoded_state = request.args['state'] + server_csrf = session[_CSRF_KEY] + code = request.args['code'] + except KeyError: + return 'Invalid request', httplib.BAD_REQUEST + + try: + state = json.loads(encoded_state) + client_csrf = state['csrf_token'] + return_url = state['return_url'] + except (ValueError, KeyError): + return 'Invalid request state', httplib.BAD_REQUEST + + if client_csrf != server_csrf: + return 'Invalid request state', httplib.BAD_REQUEST + + flow = _get_flow_for_token(server_csrf) + + if flow is None: + return 'Invalid request state', httplib.BAD_REQUEST + + # Exchange the auth code for credentials. + try: + credentials = flow.step2_exchange(code) + except FlowExchangeError as exchange_error: + current_app.logger.exception(exchange_error) + content = 'An error occurred: {0}'.format(exchange_error) + return content, httplib.BAD_REQUEST + + # Save the credentials to the storage. + self.storage.put(credentials) + + if self.authorize_callback: + self.authorize_callback(credentials) + + return redirect(return_url) + + @property + def credentials(self): + """The credentials for the current user or None if unavailable.""" + ctx = _app_ctx_stack.top + + if not hasattr(ctx, _CREDENTIALS_KEY): + ctx.google_oauth2_credentials = self.storage.get() + + return ctx.google_oauth2_credentials + + def has_credentials(self): + """Returns True if there are valid credentials for the current user.""" + if not self.credentials: + return False + # Is the access token expired? If so, do we have an refresh token? + elif (self.credentials.access_token_expired + and not self.credentials.refresh_token): + return False + else: + return True + + @property + def email(self): + """Returns the user's email address or None if there are no credentials. + + The email address is provided by the current credentials' id_token. + This should not be used as unique identifier as the user can change + their email. If you need a unique identifier, use user_id. + """ + if not self.credentials: + return None + try: + return self.credentials.id_token['email'] + except KeyError: + current_app.logger.error( + 'Invalid id_token {0}'.format(self.credentials.id_token)) + + @property + def user_id(self): + """Returns the a unique identifier for the user + + Returns None if there are no credentials. + + The id is provided by the current credentials' id_token. + """ + if not self.credentials: + return None + try: + return self.credentials.id_token['sub'] + except KeyError: + current_app.logger.error( + 'Invalid id_token {0}'.format(self.credentials.id_token)) + + def authorize_url(self, return_url, **kwargs): + """Creates a URL that can be used to start the authorization flow. + + When the user is directed to the URL, the authorization flow will + begin. Once complete, the user will be redirected to the specified + return URL. + + Any kwargs are passed into the flow constructor. + """ + return url_for('oauth2.authorize', return_url=return_url, **kwargs) + + def required(self, decorated_function=None, scopes=None, + **decorator_kwargs): + """Decorator to require OAuth2 credentials for a view. + + If credentials are not available for the current user, then they will + be redirected to the authorization flow. Once complete, the user will + be redirected back to the original page. + """ + + def curry_wrapper(wrapped_function): + @wraps(wrapped_function) + def required_wrapper(*args, **kwargs): + return_url = decorator_kwargs.pop('return_url', request.url) + + requested_scopes = set(self.scopes) + if scopes is not None: + requested_scopes |= set(scopes) + if self.has_credentials(): + requested_scopes |= self.credentials.scopes + + requested_scopes = list(requested_scopes) + + # Does the user have credentials and does the credentials have + # all of the needed scopes? + if (self.has_credentials() and + self.credentials.has_scopes(requested_scopes)): + return wrapped_function(*args, **kwargs) + # Otherwise, redirect to authorization + else: + auth_url = self.authorize_url( + return_url, + scopes=requested_scopes, + **decorator_kwargs) + + return redirect(auth_url) + + return required_wrapper + + if decorated_function: + return curry_wrapper(decorated_function) + else: + return curry_wrapper + + def http(self, *args, **kwargs): + """Returns an authorized http instance. + + Can only be called if there are valid credentials for the user, such + as inside of a view that is decorated with @required. + + Args: + *args: Positional arguments passed to httplib2.Http constructor. + **kwargs: Positional arguments passed to httplib2.Http constructor. + + Raises: + ValueError if no credentials are available. + """ + if not self.credentials: + raise ValueError('No credentials available.') + return self.credentials.authorize(httplib2.Http(*args, **kwargs)) diff --git a/oauth2client/contrib/gce.py b/oauth2client/contrib/gce.py new file mode 100644 index 0000000..6542008 --- /dev/null +++ b/oauth2client/contrib/gce.py @@ -0,0 +1,194 @@ +# Copyright 2014 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Utilities for Google Compute Engine + +Utilities for making it easier to use OAuth 2.0 on Google Compute Engine. +""" + +import json +import logging +import warnings + +import httplib2 +from six.moves import http_client +from six.moves import urllib + +from oauth2client._helpers import _from_bytes +from oauth2client import util +from oauth2client.client import HttpAccessTokenRefreshError +from oauth2client.client import AssertionCredentials + + +__author__ = 'jcgregorio@google.com (Joe Gregorio)' + +logger = logging.getLogger(__name__) + +# URI Template for the endpoint that returns access_tokens. +_METADATA_ROOT = ('http://metadata.google.internal/computeMetadata/v1/' + 'instance/service-accounts/default/') +META = _METADATA_ROOT + 'token' +_DEFAULT_EMAIL_METADATA = _METADATA_ROOT + 'email' +_SCOPES_WARNING = """\ +You have requested explicit scopes to be used with a GCE service account. +Using this argument will have no effect on the actual scopes for tokens +requested. These scopes are set at VM instance creation time and +can't be overridden in the request. +""" + + +def _get_service_account_email(http_request=None): + """Get the GCE service account email from the current environment. + + Args: + http_request: callable, (Optional) a callable that matches the method + signature of httplib2.Http.request, used to make + the request to the metadata service. + + Returns: + tuple, A pair where the first entry is an optional response (from a + failed request) and the second is service account email found (as + a string). + """ + if http_request is None: + http_request = httplib2.Http().request + response, content = http_request( + _DEFAULT_EMAIL_METADATA, headers={'Metadata-Flavor': 'Google'}) + if response.status == http_client.OK: + content = _from_bytes(content) + return None, content + else: + return response, content + + +class AppAssertionCredentials(AssertionCredentials): + """Credentials object for Compute Engine Assertion Grants + + This object will allow a Compute Engine instance to identify itself to + Google and other OAuth 2.0 servers that can verify assertions. It can be + used for the purpose of accessing data stored under an account assigned to + the Compute Engine instance itself. + + This credential does not require a flow to instantiate because it + represents a two legged flow, and therefore has all of the required + information to generate and refresh its own access tokens. + """ + + @util.positional(2) + def __init__(self, scope='', **kwargs): + """Constructor for AppAssertionCredentials + + Args: + scope: string or iterable of strings, scope(s) of the credentials + being requested. Using this argument will have no effect on + the actual scopes for tokens requested. These scopes are + set at VM instance creation time and won't change. + """ + if scope: + warnings.warn(_SCOPES_WARNING) + # This is just provided for backwards compatibility, but is not + # used by this class. + self.scope = util.scopes_to_string(scope) + self.kwargs = kwargs + + # Assertion type is no longer used, but still in the + # parent class signature. + super(AppAssertionCredentials, self).__init__(None) + self._service_account_email = None + + @classmethod + def from_json(cls, json_data): + data = json.loads(_from_bytes(json_data)) + return AppAssertionCredentials(data['scope']) + + def _refresh(self, http_request): + """Refreshes the access_token. + + Skip all the storage hoops and just refresh using the API. + + Args: + http_request: callable, a callable that matches the method + signature of httplib2.Http.request, used to make + the refresh request. + + Raises: + HttpAccessTokenRefreshError: When the refresh fails. + """ + response, content = http_request( + META, headers={'Metadata-Flavor': 'Google'}) + content = _from_bytes(content) + if response.status == http_client.OK: + try: + token_content = json.loads(content) + except Exception as e: + raise HttpAccessTokenRefreshError(str(e), + status=response.status) + self.access_token = token_content['access_token'] + else: + if response.status == http_client.NOT_FOUND: + content += (' This can occur if a VM was created' + ' with no service account or scopes.') + raise HttpAccessTokenRefreshError(content, status=response.status) + + @property + def serialization_data(self): + raise NotImplementedError( + 'Cannot serialize credentials for GCE service accounts.') + + def create_scoped_required(self): + return False + + def create_scoped(self, scopes): + return AppAssertionCredentials(scopes, **self.kwargs) + + def sign_blob(self, blob): + """Cryptographically sign a blob (of bytes). + + This method is provided to support a common interface, but + the actual key used for a Google Compute Engine service account + is not available, so it can't be used to sign content. + + Args: + blob: bytes, Message to be signed. + + Raises: + NotImplementedError, always. + """ + raise NotImplementedError( + 'Compute Engine service accounts cannot sign blobs') + + @property + def service_account_email(self): + """Get the email for the current service account. + + Uses the Google Compute Engine metadata service to retrieve the email + of the default service account. + + Returns: + string, The email associated with the Google Compute Engine + service account. + + Raises: + AttributeError, if the email can not be retrieved from the Google + Compute Engine metadata service. + """ + if self._service_account_email is None: + failure, email = _get_service_account_email() + if failure is None: + self._service_account_email = email + else: + raise AttributeError('Failed to retrieve the email from the ' + 'Google Compute Engine metadata service', + failure, email) + return self._service_account_email diff --git a/oauth2client/contrib/keyring_storage.py b/oauth2client/contrib/keyring_storage.py new file mode 100644 index 0000000..431b67b --- /dev/null +++ b/oauth2client/contrib/keyring_storage.py @@ -0,0 +1,99 @@ +# Copyright 2014 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""A keyring based Storage. + +A Storage for Credentials that uses the keyring module. +""" + +import threading + +import keyring + +from oauth2client.client import Credentials +from oauth2client.client import Storage as BaseStorage + + +__author__ = 'jcgregorio@google.com (Joe Gregorio)' + + +class Storage(BaseStorage): + """Store and retrieve a single credential to and from the keyring. + + To use this module you must have the keyring module installed. See + . This is an optional module and is + not installed with oauth2client by default because it does not work on all + the platforms that oauth2client supports, such as Google App Engine. + + The keyring module is a + cross-platform library for access the keyring capabilities of the local + system. The user will be prompted for their keyring password when this + module is used, and the manner in which the user is prompted will vary per + platform. + + Usage:: + + from oauth2client.keyring_storage import Storage + + s = Storage('name_of_application', 'user1') + credentials = s.get() + + """ + + def __init__(self, service_name, user_name): + """Constructor. + + Args: + service_name: string, The name of the service under which the + credentials are stored. + user_name: string, The name of the user to store credentials for. + """ + super(Storage, self).__init__(lock=threading.Lock()) + self._service_name = service_name + self._user_name = user_name + + def locked_get(self): + """Retrieve Credential from file. + + Returns: + oauth2client.client.Credentials + """ + credentials = None + content = keyring.get_password(self._service_name, self._user_name) + + if content is not None: + try: + credentials = Credentials.new_from_json(content) + credentials.set_store(self) + except ValueError: + pass + + return credentials + + def locked_put(self, credentials): + """Write Credentials to file. + + Args: + credentials: Credentials, the credentials to store. + """ + keyring.set_password(self._service_name, self._user_name, + credentials.to_json()) + + def locked_delete(self): + """Delete Credentials file. + + Args: + credentials: Credentials, the credentials to store. + """ + keyring.set_password(self._service_name, self._user_name, '') diff --git a/oauth2client/contrib/locked_file.py b/oauth2client/contrib/locked_file.py new file mode 100644 index 0000000..ab7de2b --- /dev/null +++ b/oauth2client/contrib/locked_file.py @@ -0,0 +1,234 @@ +# Copyright 2014 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Locked file interface that should work on Unix and Windows pythons. + +This module first tries to use fcntl locking to ensure serialized access +to a file, then falls back on a lock file if that is unavialable. + +Usage:: + + f = LockedFile('filename', 'r+b', 'rb') + f.open_and_lock() + if f.is_locked(): + print('Acquired filename with r+b mode') + f.file_handle().write('locked data') + else: + print('Acquired filename with rb mode') + f.unlock_and_close() + +""" + +from __future__ import print_function + +import errno +import logging +import os +import time + +from oauth2client import util + + +__author__ = 'cache@google.com (David T McWherter)' + +logger = logging.getLogger(__name__) + + +class CredentialsFileSymbolicLinkError(Exception): + """Credentials files must not be symbolic links.""" + + +class AlreadyLockedException(Exception): + """Trying to lock a file that has already been locked by the LockedFile.""" + pass + + +def validate_file(filename): + if os.path.islink(filename): + raise CredentialsFileSymbolicLinkError( + 'File: %s is a symbolic link.' % filename) + + +class _Opener(object): + """Base class for different locking primitives.""" + + def __init__(self, filename, mode, fallback_mode): + """Create an Opener. + + Args: + filename: string, The pathname of the file. + mode: string, The preferred mode to access the file with. + fallback_mode: string, The mode to use if locking fails. + """ + self._locked = False + self._filename = filename + self._mode = mode + self._fallback_mode = fallback_mode + self._fh = None + self._lock_fd = None + + def is_locked(self): + """Was the file locked.""" + return self._locked + + def file_handle(self): + """The file handle to the file. Valid only after opened.""" + return self._fh + + def filename(self): + """The filename that is being locked.""" + return self._filename + + def open_and_lock(self, timeout, delay): + """Open the file and lock it. + + Args: + timeout: float, How long to try to lock for. + delay: float, How long to wait between retries. + """ + pass + + def unlock_and_close(self): + """Unlock and close the file.""" + pass + + +class _PosixOpener(_Opener): + """Lock files using Posix advisory lock files.""" + + def open_and_lock(self, timeout, delay): + """Open the file and lock it. + + Tries to create a .lock file next to the file we're trying to open. + + Args: + timeout: float, How long to try to lock for. + delay: float, How long to wait between retries. + + Raises: + AlreadyLockedException: if the lock is already acquired. + IOError: if the open fails. + CredentialsFileSymbolicLinkError if the file is a symbolic link. + """ + if self._locked: + raise AlreadyLockedException('File %s is already locked' % + self._filename) + self._locked = False + + validate_file(self._filename) + try: + self._fh = open(self._filename, self._mode) + except IOError as e: + # If we can't access with _mode, try _fallback_mode and don't lock. + if e.errno == errno.EACCES: + self._fh = open(self._filename, self._fallback_mode) + return + + lock_filename = self._posix_lockfile(self._filename) + start_time = time.time() + while True: + try: + self._lock_fd = os.open(lock_filename, + os.O_CREAT | os.O_EXCL | os.O_RDWR) + self._locked = True + break + + except OSError as e: + if e.errno != errno.EEXIST: + raise + if (time.time() - start_time) >= timeout: + logger.warn('Could not acquire lock %s in %s seconds', + lock_filename, timeout) + # Close the file and open in fallback_mode. + if self._fh: + self._fh.close() + self._fh = open(self._filename, self._fallback_mode) + return + time.sleep(delay) + + def unlock_and_close(self): + """Unlock a file by removing the .lock file, and close the handle.""" + if self._locked: + lock_filename = self._posix_lockfile(self._filename) + os.close(self._lock_fd) + os.unlink(lock_filename) + self._locked = False + self._lock_fd = None + if self._fh: + self._fh.close() + + def _posix_lockfile(self, filename): + """The name of the lock file to use for posix locking.""" + return '%s.lock' % filename + + +class LockedFile(object): + """Represent a file that has exclusive access.""" + + @util.positional(4) + def __init__(self, filename, mode, fallback_mode, use_native_locking=True): + """Construct a LockedFile. + + Args: + filename: string, The path of the file to open. + mode: string, The mode to try to open the file with. + fallback_mode: string, The mode to use if locking fails. + use_native_locking: bool, Whether or not fcntl/win32 locking is + used. + """ + opener = None + if not opener and use_native_locking: + try: + from oauth2client.contrib._win32_opener import _Win32Opener + opener = _Win32Opener(filename, mode, fallback_mode) + except ImportError: + try: + from oauth2client.contrib._fcntl_opener import _FcntlOpener + opener = _FcntlOpener(filename, mode, fallback_mode) + except ImportError: + pass + + if not opener: + opener = _PosixOpener(filename, mode, fallback_mode) + + self._opener = opener + + def filename(self): + """Return the filename we were constructed with.""" + return self._opener._filename + + def file_handle(self): + """Return the file_handle to the opened file.""" + return self._opener.file_handle() + + def is_locked(self): + """Return whether we successfully locked the file.""" + return self._opener.is_locked() + + def open_and_lock(self, timeout=0, delay=0.05): + """Open the file, trying to lock it. + + Args: + timeout: float, The number of seconds to try to acquire the lock. + delay: float, The number of seconds to wait between retry attempts. + + Raises: + AlreadyLockedException: if the lock is already acquired. + IOError: if the open fails. + """ + self._opener.open_and_lock(timeout, delay) + + def unlock_and_close(self): + """Unlock and close a file.""" + self._opener.unlock_and_close() diff --git a/oauth2client/contrib/multiprocess_file_storage.py b/oauth2client/contrib/multiprocess_file_storage.py new file mode 100644 index 0000000..e9e8c8c --- /dev/null +++ b/oauth2client/contrib/multiprocess_file_storage.py @@ -0,0 +1,355 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Multiprocess file credential storage. + +This module provides file-based storage that supports multiple credentials and +cross-thread and process access. + +This module supersedes the functionality previously found in `multistore_file`. + +This module provides :class:`MultiprocessFileStorage` which: + * Is tied to a single credential via a user-specified key. This key can be + used to distinguish between multiple users, client ids, and/or scopes. + * Can be safely accessed and refreshed across threads and processes. + +Process & thread safety guarantees the following behavior: + * If one thread or process refreshes a credential, subsequent refreshes + from other processes will re-fetch the credentials from the file instead + of performing an http request. + * If two processes or threads attempt to refresh concurrently, only one + will be able to acquire the lock and refresh, with the deadlock caveat + below. + * The interprocess lock will not deadlock, instead, the if a process can + not acquire the interprocess lock within ``INTERPROCESS_LOCK_DEADLINE`` + it will allow refreshing the credential but will not write the updated + credential to disk, This logic happens during every lock cycle - if the + credentials are refreshed again it will retry locking and writing as + normal. + +Usage +===== + +Before using the storage, you need to decide how you want to key the +credentials. A few common strategies include: + + * If you're storing credentials for multiple users in a single file, use + a unique identifier for each user as the key. + * If you're storing credentials for multiple client IDs in a single file, + use the client ID as the key. + * If you're storing multiple credentials for one user, use the scopes as + the key. + * If you have a complicated setup, use a compound key. For example, you + can use a combination of the client ID and scopes as the key. + +Create an instance of :class:`MultiprocessFileStorage` for each credential you +want to store, for example:: + + filename = 'credentials' + key = '{}-{}'.format(client_id, user_id) + storage = MultiprocessFileStorage(filename, key) + +To store the credentials:: + + storage.put(credentials) + +If you're going to continue to use the credentials after storing them, be sure +to call :func:`set_store`:: + + credentials.set_store(storage) + +To retrieve the credentials:: + + storage.get(credentials) + +""" + +import base64 +import json +import logging +import os +import threading + +import fasteners +from six import iteritems + +from oauth2client import _helpers +from oauth2client import client + + +#: The maximum amount of time, in seconds, to wait when acquire the +#: interprocess lock before falling back to read-only mode. +INTERPROCESS_LOCK_DEADLINE = 1 + +logger = logging.getLogger(__name__) +_backends = {} +_backends_lock = threading.Lock() + + +def _create_file_if_needed(filename): + """Creates the an empty file if it does not already exist. + + Returns: + True if the file was created, False otherwise. + """ + if os.path.exists(filename): + return False + else: + # Equivalent to "touch". + open(filename, 'a+b').close() + logger.info('Credential file {0} created'.format(filename)) + return True + + +def _load_credentials_file(credentials_file): + """Load credentials from the given file handle. + + The file is expected to be in this format: + + { + "file_version": 2, + "credentials": { + "key": "base64 encoded json representation of credentials." + } + } + + This function will warn and return empty credentials instead of raising + exceptions. + + Args: + credentials_file: An open file handle. + + Returns: + A dictionary mapping user-defined keys to an instance of + :class:`oauth2client.client.Credentials`. + """ + try: + credentials_file.seek(0) + data = json.load(credentials_file) + except Exception: + logger.warning( + 'Credentials file could not be loaded, will ignore and ' + 'overwrite.') + return {} + + if data.get('file_version') != 2: + logger.warning( + 'Credentials file is not version 2, will ignore and ' + 'overwrite.') + return {} + + credentials = {} + + for key, encoded_credential in iteritems(data.get('credentials', {})): + try: + credential_json = base64.b64decode(encoded_credential) + credential = client.Credentials.new_from_json(credential_json) + credentials[key] = credential + except: + logger.warning( + 'Invalid credential {0} in file, ignoring.'.format(key)) + + return credentials + + +def _write_credentials_file(credentials_file, credentials): + """Writes credentials to a file. + + Refer to :func:`_load_credentials_file` for the format. + + Args: + credentials_file: An open file handle, must be read/write. + credentials: A dictionary mapping user-defined keys to an instance of + :class:`oauth2client.client.Credentials`. + """ + data = {'file_version': 2, 'credentials': {}} + + for key, credential in iteritems(credentials): + credential_json = credential.to_json() + encoded_credential = _helpers._from_bytes(base64.b64encode( + _helpers._to_bytes(credential_json))) + data['credentials'][key] = encoded_credential + + credentials_file.seek(0) + json.dump(data, credentials_file) + credentials_file.truncate() + + +class _MultiprocessStorageBackend(object): + """Thread-local backend for multiprocess storage. + + Each process has only one instance of this backend per file. All threads + share a single instance of this backend. This ensures that all threads + use the same thread lock and process lock when accessing the file. + """ + + def __init__(self, filename): + self._file = None + self._filename = filename + self._process_lock = fasteners.InterProcessLock( + '{0}.lock'.format(filename)) + self._thread_lock = threading.Lock() + self._read_only = False + self._credentials = {} + + def _load_credentials(self): + """(Re-)loads the credentials from the file.""" + if not self._file: + return + + loaded_credentials = _load_credentials_file(self._file) + self._credentials.update(loaded_credentials) + + logger.debug('Read credential file') + + def _write_credentials(self): + if self._read_only: + logger.debug('In read-only mode, not writing credentials.') + return + + _write_credentials_file(self._file, self._credentials) + logger.debug('Wrote credential file {0}.'.format(self._filename)) + + def acquire_lock(self): + self._thread_lock.acquire() + locked = self._process_lock.acquire(timeout=INTERPROCESS_LOCK_DEADLINE) + + if locked: + _create_file_if_needed(self._filename) + self._file = open(self._filename, 'r+') + self._read_only = False + + else: + logger.warn( + 'Failed to obtain interprocess lock for credentials. ' + 'If a credential is being refreshed, other processes may ' + 'not see the updated access token and refresh as well.') + if os.path.exists(self._filename): + self._file = open(self._filename, 'r') + else: + self._file = None + self._read_only = True + + self._load_credentials() + + def release_lock(self): + if self._file is not None: + self._file.close() + self._file = None + + if not self._read_only: + self._process_lock.release() + + self._thread_lock.release() + + def _refresh_predicate(self, credentials): + if credentials is None: + return True + elif credentials.invalid: + return True + elif credentials.access_token_expired: + return True + else: + return False + + def locked_get(self, key): + # Check if the credential is already in memory. + credentials = self._credentials.get(key, None) + + # Use the refresh predicate to determine if the entire store should be + # reloaded. This basically checks if the credentials are invalid + # or expired. This covers the situation where another process has + # refreshed the credentials and this process doesn't know about it yet. + # In that case, this process won't needlessly refresh the credentials. + if self._refresh_predicate(credentials): + self._load_credentials() + credentials = self._credentials.get(key, None) + + return credentials + + def locked_put(self, key, credentials): + self._load_credentials() + self._credentials[key] = credentials + self._write_credentials() + + def locked_delete(self, key): + self._load_credentials() + self._credentials.pop(key, None) + self._write_credentials() + + +def _get_backend(filename): + """A helper method to get or create a backend with thread locking. + + This ensures that only one backend is used per-file per-process, so that + thread and process locks are appropriately shared. + + Args: + filename: The full path to the credential storage file. + + Returns: + An instance of :class:`_MultiprocessStorageBackend`. + """ + filename = os.path.abspath(filename) + + with _backends_lock: + if filename not in _backends: + _backends[filename] = _MultiprocessStorageBackend(filename) + return _backends[filename] + + +class MultiprocessFileStorage(client.Storage): + """Multiprocess file credential storage. + + Args: + filename: The path to the file where credentials will be stored. + key: An arbitrary string used to uniquely identify this set of + credentials. For example, you may use the user's ID as the key or + a combination of the client ID and user ID. + """ + def __init__(self, filename, key): + self._key = key + self._backend = _get_backend(filename) + + def acquire_lock(self): + self._backend.acquire_lock() + + def release_lock(self): + self._backend.release_lock() + + def locked_get(self): + """Retrieves the current credentials from the store. + + Returns: + An instance of :class:`oauth2client.client.Credentials` or `None`. + """ + credential = self._backend.locked_get(self._key) + + if credential is not None: + credential.set_store(self) + + return credential + + def locked_put(self, credentials): + """Writes the given credentials to the store. + + Args: + credentials: an instance of + :class:`oauth2client.client.Credentials`. + """ + return self._backend.locked_put(self._key, credentials) + + def locked_delete(self): + """Deletes the current credentials from the store.""" + return self._backend.locked_delete(self._key) diff --git a/oauth2client/contrib/multistore_file.py b/oauth2client/contrib/multistore_file.py new file mode 100644 index 0000000..879f3b6 --- /dev/null +++ b/oauth2client/contrib/multistore_file.py @@ -0,0 +1,502 @@ +# Copyright 2014 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Multi-credential file store with lock support. + +This module implements a JSON credential store where multiple +credentials can be stored in one file. That file supports locking +both in a single process and across processes. + +The credential themselves are keyed off of: + +* client_id +* user_agent +* scope + +The format of the stored data is like so:: + + { + 'file_version': 1, + 'data': [ + { + 'key': { + 'clientId': '', + 'userAgent': '', + 'scope': '' + }, + 'credential': { + # JSON serialized Credentials. + } + } + ] + } + +""" + +import errno +import json +import logging +import os +import threading + +from oauth2client.client import Credentials +from oauth2client.client import Storage as BaseStorage +from oauth2client import util +from oauth2client.contrib.locked_file import LockedFile + + +__author__ = 'jbeda@google.com (Joe Beda)' + +logger = logging.getLogger(__name__) + +# A dict from 'filename'->_MultiStore instances +_multistores = {} +_multistores_lock = threading.Lock() + + +class Error(Exception): + """Base error for this module.""" + + +class NewerCredentialStoreError(Error): + """The credential store is a newer version than supported.""" + + +def _dict_to_tuple_key(dictionary): + """Converts a dictionary to a tuple that can be used as an immutable key. + + The resulting key is always sorted so that logically equivalent + dictionaries always produce an identical tuple for a key. + + Args: + dictionary: the dictionary to use as the key. + + Returns: + A tuple representing the dictionary in it's naturally sorted ordering. + """ + return tuple(sorted(dictionary.items())) + + +@util.positional(4) +def get_credential_storage(filename, client_id, user_agent, scope, + warn_on_readonly=True): + """Get a Storage instance for a credential. + + Args: + filename: The JSON file storing a set of credentials + client_id: The client_id for the credential + user_agent: The user agent for the credential + scope: string or iterable of strings, Scope(s) being requested + warn_on_readonly: if True, log a warning if the store is readonly + + Returns: + An object derived from client.Storage for getting/setting the + credential. + """ + # Recreate the legacy key with these specific parameters + key = {'clientId': client_id, 'userAgent': user_agent, + 'scope': util.scopes_to_string(scope)} + return get_credential_storage_custom_key( + filename, key, warn_on_readonly=warn_on_readonly) + + +@util.positional(2) +def get_credential_storage_custom_string_key(filename, key_string, + warn_on_readonly=True): + """Get a Storage instance for a credential using a single string as a key. + + Allows you to provide a string as a custom key that will be used for + credential storage and retrieval. + + Args: + filename: The JSON file storing a set of credentials + key_string: A string to use as the key for storing this credential. + warn_on_readonly: if True, log a warning if the store is readonly + + Returns: + An object derived from client.Storage for getting/setting the + credential. + """ + # Create a key dictionary that can be used + key_dict = {'key': key_string} + return get_credential_storage_custom_key( + filename, key_dict, warn_on_readonly=warn_on_readonly) + + +@util.positional(2) +def get_credential_storage_custom_key(filename, key_dict, + warn_on_readonly=True): + """Get a Storage instance for a credential using a dictionary as a key. + + Allows you to provide a dictionary as a custom key that will be used for + credential storage and retrieval. + + Args: + filename: The JSON file storing a set of credentials + key_dict: A dictionary to use as the key for storing this credential. + There is no ordering of the keys in the dictionary. Logically + equivalent dictionaries will produce equivalent storage keys. + warn_on_readonly: if True, log a warning if the store is readonly + + Returns: + An object derived from client.Storage for getting/setting the + credential. + """ + multistore = _get_multistore(filename, warn_on_readonly=warn_on_readonly) + key = _dict_to_tuple_key(key_dict) + return multistore._get_storage(key) + + +@util.positional(1) +def get_all_credential_keys(filename, warn_on_readonly=True): + """Gets all the registered credential keys in the given Multistore. + + Args: + filename: The JSON file storing a set of credentials + warn_on_readonly: if True, log a warning if the store is readonly + + Returns: + A list of the credential keys present in the file. They are returned + as dictionaries that can be passed into + get_credential_storage_custom_key to get the actual credentials. + """ + multistore = _get_multistore(filename, warn_on_readonly=warn_on_readonly) + multistore._lock() + try: + return multistore._get_all_credential_keys() + finally: + multistore._unlock() + + +@util.positional(1) +def _get_multistore(filename, warn_on_readonly=True): + """A helper method to initialize the multistore with proper locking. + + Args: + filename: The JSON file storing a set of credentials + warn_on_readonly: if True, log a warning if the store is readonly + + Returns: + A multistore object + """ + filename = os.path.expanduser(filename) + _multistores_lock.acquire() + try: + multistore = _multistores.setdefault( + filename, _MultiStore(filename, warn_on_readonly=warn_on_readonly)) + finally: + _multistores_lock.release() + return multistore + + +class _MultiStore(object): + """A file backed store for multiple credentials.""" + + @util.positional(2) + def __init__(self, filename, warn_on_readonly=True): + """Initialize the class. + + This will create the file if necessary. + """ + self._file = LockedFile(filename, 'r+', 'r') + self._thread_lock = threading.Lock() + self._read_only = False + self._warn_on_readonly = warn_on_readonly + + self._create_file_if_needed() + + # Cache of deserialized store. This is only valid after the + # _MultiStore is locked or _refresh_data_cache is called. This is + # of the form of: + # + # ((key, value), (key, value)...) -> OAuth2Credential + # + # If this is None, then the store hasn't been read yet. + self._data = None + + class _Storage(BaseStorage): + """A Storage object that can read/write a single credential.""" + + def __init__(self, multistore, key): + self._multistore = multistore + self._key = key + + def acquire_lock(self): + """Acquires any lock necessary to access this Storage. + + This lock is not reentrant. + """ + self._multistore._lock() + + def release_lock(self): + """Release the Storage lock. + + Trying to release a lock that isn't held will result in a + RuntimeError. + """ + self._multistore._unlock() + + def locked_get(self): + """Retrieve credential. + + The Storage lock must be held when this is called. + + Returns: + oauth2client.client.Credentials + """ + credential = self._multistore._get_credential(self._key) + if credential: + credential.set_store(self) + return credential + + def locked_put(self, credentials): + """Write a credential. + + The Storage lock must be held when this is called. + + Args: + credentials: Credentials, the credentials to store. + """ + self._multistore._update_credential(self._key, credentials) + + def locked_delete(self): + """Delete a credential. + + The Storage lock must be held when this is called. + + Args: + credentials: Credentials, the credentials to store. + """ + self._multistore._delete_credential(self._key) + + def _create_file_if_needed(self): + """Create an empty file if necessary. + + This method will not initialize the file. Instead it implements a + simple version of "touch" to ensure the file has been created. + """ + if not os.path.exists(self._file.filename()): + old_umask = os.umask(0o177) + try: + open(self._file.filename(), 'a+b').close() + finally: + os.umask(old_umask) + + def _lock(self): + """Lock the entire multistore.""" + self._thread_lock.acquire() + try: + self._file.open_and_lock() + except (IOError, OSError) as e: + if e.errno == errno.ENOSYS: + logger.warn('File system does not support locking the ' + 'credentials file.') + elif e.errno == errno.ENOLCK: + logger.warn('File system is out of resources for writing the ' + 'credentials file (is your disk full?).') + elif e.errno == errno.EDEADLK: + logger.warn('Lock contention on multistore file, opening ' + 'in read-only mode.') + elif e.errno == errno.EACCES: + logger.warn('Cannot access credentials file.') + else: + raise + if not self._file.is_locked(): + self._read_only = True + if self._warn_on_readonly: + logger.warn('The credentials file (%s) is not writable. ' + 'Opening in read-only mode. Any refreshed ' + 'credentials will only be ' + 'valid for this run.', self._file.filename()) + + if os.path.getsize(self._file.filename()) == 0: + logger.debug('Initializing empty multistore file') + # The multistore is empty so write out an empty file. + self._data = {} + self._write() + elif not self._read_only or self._data is None: + # Only refresh the data if we are read/write or we haven't + # cached the data yet. If we are readonly, we assume is isn't + # changing out from under us and that we only have to read it + # once. This prevents us from whacking any new access keys that + # we have cached in memory but were unable to write out. + self._refresh_data_cache() + + def _unlock(self): + """Release the lock on the multistore.""" + self._file.unlock_and_close() + self._thread_lock.release() + + def _locked_json_read(self): + """Get the raw content of the multistore file. + + The multistore must be locked when this is called. + + Returns: + The contents of the multistore decoded as JSON. + """ + assert self._thread_lock.locked() + self._file.file_handle().seek(0) + return json.load(self._file.file_handle()) + + def _locked_json_write(self, data): + """Write a JSON serializable data structure to the multistore. + + The multistore must be locked when this is called. + + Args: + data: The data to be serialized and written. + """ + assert self._thread_lock.locked() + if self._read_only: + return + self._file.file_handle().seek(0) + json.dump(data, self._file.file_handle(), + sort_keys=True, indent=2, separators=(',', ': ')) + self._file.file_handle().truncate() + + def _refresh_data_cache(self): + """Refresh the contents of the multistore. + + The multistore must be locked when this is called. + + Raises: + NewerCredentialStoreError: Raised when a newer client has written + the store. + """ + self._data = {} + try: + raw_data = self._locked_json_read() + except Exception: + logger.warn('Credential data store could not be loaded. ' + 'Will ignore and overwrite.') + return + + version = 0 + try: + version = raw_data['file_version'] + except Exception: + logger.warn('Missing version for credential data store. It may be ' + 'corrupt or an old version. Overwriting.') + if version > 1: + raise NewerCredentialStoreError( + 'Credential file has file_version of %d. ' + 'Only file_version of 1 is supported.' % version) + + credentials = [] + try: + credentials = raw_data['data'] + except (TypeError, KeyError): + pass + + for cred_entry in credentials: + try: + key, credential = self._decode_credential_from_json(cred_entry) + self._data[key] = credential + except: + # If something goes wrong loading a credential, just ignore it + logger.info('Error decoding credential, skipping', + exc_info=True) + + def _decode_credential_from_json(self, cred_entry): + """Load a credential from our JSON serialization. + + Args: + cred_entry: A dict entry from the data member of our format + + Returns: + (key, cred) where the key is the key tuple and the cred is the + OAuth2Credential object. + """ + raw_key = cred_entry['key'] + key = _dict_to_tuple_key(raw_key) + credential = None + credential = Credentials.new_from_json( + json.dumps(cred_entry['credential'])) + return (key, credential) + + def _write(self): + """Write the cached data back out. + + The multistore must be locked. + """ + raw_data = {'file_version': 1} + raw_creds = [] + raw_data['data'] = raw_creds + for (cred_key, cred) in self._data.items(): + raw_key = dict(cred_key) + raw_cred = json.loads(cred.to_json()) + raw_creds.append({'key': raw_key, 'credential': raw_cred}) + self._locked_json_write(raw_data) + + def _get_all_credential_keys(self): + """Gets all the registered credential keys in the multistore. + + Returns: + A list of dictionaries corresponding to all the keys currently + registered + """ + return [dict(key) for key in self._data.keys()] + + def _get_credential(self, key): + """Get a credential from the multistore. + + The multistore must be locked. + + Args: + key: The key used to retrieve the credential + + Returns: + The credential specified or None if not present + """ + return self._data.get(key, None) + + def _update_credential(self, key, cred): + """Update a credential and write the multistore. + + This must be called when the multistore is locked. + + Args: + key: The key used to retrieve the credential + cred: The OAuth2Credential to update/set + """ + self._data[key] = cred + self._write() + + def _delete_credential(self, key): + """Delete a credential and write the multistore. + + This must be called when the multistore is locked. + + Args: + key: The key used to retrieve the credential + """ + try: + del self._data[key] + except KeyError: + pass + self._write() + + def _get_storage(self, key): + """Get a Storage object to get/set a credential. + + This Storage is a 'view' into the multistore. + + Args: + key: The key used to retrieve the credential + + Returns: + A Storage object that can be used to get/set this cred + """ + return self._Storage(self, key) diff --git a/oauth2client/contrib/sqlalchemy.py b/oauth2client/contrib/sqlalchemy.py new file mode 100644 index 0000000..7d9fd4b --- /dev/null +++ b/oauth2client/contrib/sqlalchemy.py @@ -0,0 +1,173 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""OAuth 2.0 utilities for SQLAlchemy. + +Utilities for using OAuth 2.0 in conjunction with a SQLAlchemy. + +Configuration +============= + +In order to use this storage, you'll need to create table +with :class:`oauth2client.contrib.sqlalchemy.CredentialsType` column. +It's recommended to either put this column on some sort of user info +table or put the column in a table with a belongs-to relationship to +a user info table. + +Here's an example of a simple table with a :class:`CredentialsType` +column that's related to a user table by the `user_id` key. + +.. code-block:: python + + from sqlalchemy import Column, ForeignKey, Integer + from sqlalchemy.ext.declarative import declarative_base + from sqlalchemy.orm import relationship + + from oauth2client.contrib.sqlalchemy import CredentialsType + + + Base = declarative_base() + + + class Credentials(Base): + __tablename__ = 'credentials' + + user_id = Column(Integer, ForeignKey('user.id')) + credentials = Column(CredentialsType) + + + class User(Base): + id = Column(Integer, primary_key=True) + # bunch of other columns + credentials = relationship('Credentials') + + +Usage +===== + +With tables ready, you are now able to store credentials in database. +We will reuse tables defined above. + +.. code-block:: python + + from sqlalchemy.orm import Session + + from oauth2client.client import OAuth2Credentials + from oauth2client.contrib.sql_alchemy import Storage + + session = Session() + user = session.query(User).first() + storage = Storage( + session=session, + model_class=Credentials, + # This is the key column used to identify + # the row that stores the credentials. + key_name='user_id', + key_value=user.id, + property_name='credentials', + ) + + # Store + credentials = OAuth2Credentials(...) + storage.put(credentials) + + # Retrieve + credentials = storage.get() + + # Delete + storage.delete() + +""" + +from __future__ import absolute_import + +import sqlalchemy.types + +from oauth2client import client + + +class CredentialsType(sqlalchemy.types.PickleType): + """Type representing credentials. + + Alias for :class:`sqlalchemy.types.PickleType`. + """ + + +class Storage(client.Storage): + """Store and retrieve a single credential to and from SQLAlchemy. + This helper presumes the Credentials + have been stored as a Credentials column + on a db model class. + """ + + def __init__(self, session, model_class, key_name, + key_value, property_name): + """Constructor for Storage. + + Args: + session: An instance of :class:`sqlalchemy.orm.Session`. + model_class: SQLAlchemy declarative mapping. + key_name: string, key name for the entity that has the credentials + key_value: key value for the entity that has the credentials + property_name: A string indicating which property on the + ``model_class`` to store the credentials. + This property must be a + :class:`CredentialsType` column. + """ + super(Storage, self).__init__() + + self.session = session + self.model_class = model_class + self.key_name = key_name + self.key_value = key_value + self.property_name = property_name + + def locked_get(self): + """Retrieve stored credential. + + Returns: + A :class:`oauth2client.Credentials` instance or `None`. + """ + filters = {self.key_name: self.key_value} + query = self.session.query(self.model_class).filter_by(**filters) + entity = query.first() + + if entity: + credential = getattr(entity, self.property_name) + if credential and hasattr(credential, 'set_store'): + credential.set_store(self) + return credential + else: + return None + + def locked_put(self, credentials): + """Write a credentials to the SQLAlchemy datastore. + + Args: + credentials: :class:`oauth2client.Credentials` + """ + filters = {self.key_name: self.key_value} + query = self.session.query(self.model_class).filter_by(**filters) + entity = query.first() + + if not entity: + entity = self.model_class(**filters) + + setattr(entity, self.property_name, credentials) + self.session.add(entity) + + def locked_delete(self): + """Delete credentials from the SQLAlchemy datastore.""" + filters = {self.key_name: self.key_value} + self.session.query(self.model_class).filter_by(**filters).delete() diff --git a/oauth2client/contrib/xsrfutil.py b/oauth2client/contrib/xsrfutil.py new file mode 100644 index 0000000..20e728e --- /dev/null +++ b/oauth2client/contrib/xsrfutil.py @@ -0,0 +1,106 @@ +# Copyright 2014 the Melange authors. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Helper methods for creating & verifying XSRF tokens.""" + +import base64 +import binascii +import hmac +import time + +from oauth2client._helpers import _to_bytes +from oauth2client import util + +__authors__ = [ + '"Doug Coker" ', + '"Joe Gregorio" ', +] + +# Delimiter character +DELIMITER = b':' + +# 1 hour in seconds +DEFAULT_TIMEOUT_SECS = 60 * 60 + + +@util.positional(2) +def generate_token(key, user_id, action_id='', when=None): + """Generates a URL-safe token for the given user, action, time tuple. + + Args: + key: secret key to use. + user_id: the user ID of the authenticated user. + action_id: a string identifier of the action they requested + authorization for. + when: the time in seconds since the epoch at which the user was + authorized for this action. If not set the current time is used. + + Returns: + A string XSRF protection token. + """ + digester = hmac.new(_to_bytes(key, encoding='utf-8')) + digester.update(_to_bytes(str(user_id), encoding='utf-8')) + digester.update(DELIMITER) + digester.update(_to_bytes(action_id, encoding='utf-8')) + digester.update(DELIMITER) + when = _to_bytes(str(when or int(time.time())), encoding='utf-8') + digester.update(when) + digest = digester.digest() + + token = base64.urlsafe_b64encode(digest + DELIMITER + when) + return token + + +@util.positional(3) +def validate_token(key, token, user_id, action_id="", current_time=None): + """Validates that the given token authorizes the user for the action. + + Tokens are invalid if the time of issue is too old or if the token + does not match what generateToken outputs (i.e. the token was forged). + + Args: + key: secret key to use. + token: a string of the token generated by generateToken. + user_id: the user ID of the authenticated user. + action_id: a string identifier of the action they requested + authorization for. + + Returns: + A boolean - True if the user is authorized for the action, False + otherwise. + """ + if not token: + return False + try: + decoded = base64.urlsafe_b64decode(token) + token_time = int(decoded.split(DELIMITER)[-1]) + except (TypeError, ValueError, binascii.Error): + return False + if current_time is None: + current_time = time.time() + # If the token is too old it's not valid. + if current_time - token_time > DEFAULT_TIMEOUT_SECS: + return False + + # The given token should match the generated one with the same time. + expected_token = generate_token(key, user_id, action_id=action_id, + when=token_time) + if len(token) != len(expected_token): + return False + + # Perform constant time comparison to avoid timing attacks + different = 0 + for x, y in zip(bytearray(token), bytearray(expected_token)): + different |= x ^ y + return not different diff --git a/oauth2client/crypt.py b/oauth2client/crypt.py new file mode 100644 index 0000000..70bef8c --- /dev/null +++ b/oauth2client/crypt.py @@ -0,0 +1,248 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2014 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""Crypto-related routines for oauth2client.""" + +import json +import logging +import time + +from oauth2client._helpers import _from_bytes +from oauth2client._helpers import _json_encode +from oauth2client._helpers import _to_bytes +from oauth2client._helpers import _urlsafe_b64decode +from oauth2client._helpers import _urlsafe_b64encode +from oauth2client._pure_python_crypt import RsaSigner +from oauth2client._pure_python_crypt import RsaVerifier + + +CLOCK_SKEW_SECS = 300 # 5 minutes in seconds +AUTH_TOKEN_LIFETIME_SECS = 300 # 5 minutes in seconds +MAX_TOKEN_LIFETIME_SECS = 86400 # 1 day in seconds + +logger = logging.getLogger(__name__) + + +class AppIdentityError(Exception): + """Error to indicate crypto failure.""" + + +def _bad_pkcs12_key_as_pem(*args, **kwargs): + raise NotImplementedError('pkcs12_key_as_pem requires OpenSSL.') + + +try: + from oauth2client._openssl_crypt import OpenSSLVerifier + from oauth2client._openssl_crypt import OpenSSLSigner + from oauth2client._openssl_crypt import pkcs12_key_as_pem +except ImportError: # pragma: NO COVER + OpenSSLVerifier = None + OpenSSLSigner = None + pkcs12_key_as_pem = _bad_pkcs12_key_as_pem + +try: + from oauth2client._pycrypto_crypt import PyCryptoVerifier + from oauth2client._pycrypto_crypt import PyCryptoSigner +except ImportError: # pragma: NO COVER + PyCryptoVerifier = None + PyCryptoSigner = None + + +if OpenSSLSigner: + Signer = OpenSSLSigner + Verifier = OpenSSLVerifier +elif PyCryptoSigner: # pragma: NO COVER + Signer = PyCryptoSigner + Verifier = PyCryptoVerifier +else: # pragma: NO COVER + Signer = RsaSigner + Verifier = RsaVerifier + + +def make_signed_jwt(signer, payload, key_id=None): + """Make a signed JWT. + + See http://self-issued.info/docs/draft-jones-json-web-token.html. + + Args: + signer: crypt.Signer, Cryptographic signer. + payload: dict, Dictionary of data to convert to JSON and then sign. + key_id: string, (Optional) Key ID header. + + Returns: + string, The JWT for the payload. + """ + header = {'typ': 'JWT', 'alg': 'RS256'} + if key_id is not None: + header['kid'] = key_id + + segments = [ + _urlsafe_b64encode(_json_encode(header)), + _urlsafe_b64encode(_json_encode(payload)), + ] + signing_input = b'.'.join(segments) + + signature = signer.sign(signing_input) + segments.append(_urlsafe_b64encode(signature)) + + logger.debug(str(segments)) + + return b'.'.join(segments) + + +def _verify_signature(message, signature, certs): + """Verifies signed content using a list of certificates. + + Args: + message: string or bytes, The message to verify. + signature: string or bytes, The signature on the message. + certs: iterable, certificates in PEM format. + + Raises: + AppIdentityError: If none of the certificates can verify the message + against the signature. + """ + for pem in certs: + verifier = Verifier.from_string(pem, is_x509_cert=True) + if verifier.verify(message, signature): + return + + # If we have not returned, no certificate confirms the signature. + raise AppIdentityError('Invalid token signature') + + +def _check_audience(payload_dict, audience): + """Checks audience field from a JWT payload. + + Does nothing if the passed in ``audience`` is null. + + Args: + payload_dict: dict, A dictionary containing a JWT payload. + audience: string or NoneType, an audience to check for in + the JWT payload. + + Raises: + AppIdentityError: If there is no ``'aud'`` field in the payload + dictionary but there is an ``audience`` to check. + AppIdentityError: If the ``'aud'`` field in the payload dictionary + does not match the ``audience``. + """ + if audience is None: + return + + audience_in_payload = payload_dict.get('aud') + if audience_in_payload is None: + raise AppIdentityError('No aud field in token: %s' % + (payload_dict,)) + if audience_in_payload != audience: + raise AppIdentityError('Wrong recipient, %s != %s: %s' % + (audience_in_payload, audience, payload_dict)) + + +def _verify_time_range(payload_dict): + """Verifies the issued at and expiration from a JWT payload. + + Makes sure the current time (in UTC) falls between the issued at and + expiration for the JWT (with some skew allowed for via + ``CLOCK_SKEW_SECS``). + + Args: + payload_dict: dict, A dictionary containing a JWT payload. + + Raises: + AppIdentityError: If there is no ``'iat'`` field in the payload + dictionary. + AppIdentityError: If there is no ``'exp'`` field in the payload + dictionary. + AppIdentityError: If the JWT expiration is too far in the future (i.e. + if the expiration would imply a token lifetime + longer than what is allowed.) + AppIdentityError: If the token appears to have been issued in the + future (up to clock skew). + AppIdentityError: If the token appears to have expired in the past + (up to clock skew). + """ + # Get the current time to use throughout. + now = int(time.time()) + + # Make sure issued at and expiration are in the payload. + issued_at = payload_dict.get('iat') + if issued_at is None: + raise AppIdentityError('No iat field in token: %s' % (payload_dict,)) + expiration = payload_dict.get('exp') + if expiration is None: + raise AppIdentityError('No exp field in token: %s' % (payload_dict,)) + + # Make sure the expiration gives an acceptable token lifetime. + if expiration >= now + MAX_TOKEN_LIFETIME_SECS: + raise AppIdentityError('exp field too far in future: %s' % + (payload_dict,)) + + # Make sure (up to clock skew) that the token wasn't issued in the future. + earliest = issued_at - CLOCK_SKEW_SECS + if now < earliest: + raise AppIdentityError('Token used too early, %d < %d: %s' % + (now, earliest, payload_dict)) + # Make sure (up to clock skew) that the token isn't already expired. + latest = expiration + CLOCK_SKEW_SECS + if now > latest: + raise AppIdentityError('Token used too late, %d > %d: %s' % + (now, latest, payload_dict)) + + +def verify_signed_jwt_with_certs(jwt, certs, audience=None): + """Verify a JWT against public certs. + + See http://self-issued.info/docs/draft-jones-json-web-token.html. + + Args: + jwt: string, A JWT. + certs: dict, Dictionary where values of public keys in PEM format. + audience: string, The audience, 'aud', that this JWT should contain. If + None then the JWT's 'aud' parameter is not verified. + + Returns: + dict, The deserialized JSON payload in the JWT. + + Raises: + AppIdentityError: if any checks are failed. + """ + jwt = _to_bytes(jwt) + + if jwt.count(b'.') != 2: + raise AppIdentityError( + 'Wrong number of segments in token: %s' % (jwt,)) + + header, payload, signature = jwt.split(b'.') + message_to_sign = header + b'.' + payload + signature = _urlsafe_b64decode(signature) + + # Parse token. + payload_bytes = _urlsafe_b64decode(payload) + try: + payload_dict = json.loads(_from_bytes(payload_bytes)) + except: + raise AppIdentityError('Can\'t parse token: %s' % (payload_bytes,)) + + # Verify that the signature matches the message. + _verify_signature(message_to_sign, signature, certs.values()) + + # Verify the issued at and created times in the payload. + _verify_time_range(payload_dict) + + # Check audience. + _check_audience(payload_dict, audience) + + return payload_dict diff --git a/oauth2client/file.py b/oauth2client/file.py new file mode 100644 index 0000000..d482359 --- /dev/null +++ b/oauth2client/file.py @@ -0,0 +1,107 @@ +# Copyright 2014 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Utilities for OAuth. + +Utilities for making it easier to work with OAuth 2.0 +credentials. +""" + +import os +import threading + +from oauth2client.client import Credentials +from oauth2client.client import Storage as BaseStorage + + +__author__ = 'jcgregorio@google.com (Joe Gregorio)' + + +class CredentialsFileSymbolicLinkError(Exception): + """Credentials files must not be symbolic links.""" + + +class Storage(BaseStorage): + """Store and retrieve a single credential to and from a file.""" + + def __init__(self, filename): + super(Storage, self).__init__(lock=threading.Lock()) + self._filename = filename + + def _validate_file(self): + if os.path.islink(self._filename): + raise CredentialsFileSymbolicLinkError( + 'File: %s is a symbolic link.' % self._filename) + + def locked_get(self): + """Retrieve Credential from file. + + Returns: + oauth2client.client.Credentials + + Raises: + CredentialsFileSymbolicLinkError if the file is a symbolic link. + """ + credentials = None + self._validate_file() + try: + f = open(self._filename, 'rb') + content = f.read() + f.close() + except IOError: + return credentials + + try: + credentials = Credentials.new_from_json(content) + credentials.set_store(self) + except ValueError: + pass + + return credentials + + def _create_file_if_needed(self): + """Create an empty file if necessary. + + This method will not initialize the file. Instead it implements a + simple version of "touch" to ensure the file has been created. + """ + if not os.path.exists(self._filename): + old_umask = os.umask(0o177) + try: + open(self._filename, 'a+b').close() + finally: + os.umask(old_umask) + + def locked_put(self, credentials): + """Write Credentials to file. + + Args: + credentials: Credentials, the credentials to store. + + Raises: + CredentialsFileSymbolicLinkError if the file is a symbolic link. + """ + self._create_file_if_needed() + self._validate_file() + f = open(self._filename, 'w') + f.write(credentials.to_json()) + f.close() + + def locked_delete(self): + """Delete Credentials file. + + Args: + credentials: Credentials, the credentials to store. + """ + os.unlink(self._filename) diff --git a/oauth2client/service_account.py b/oauth2client/service_account.py new file mode 100644 index 0000000..ce7f78e --- /dev/null +++ b/oauth2client/service_account.py @@ -0,0 +1,710 @@ +# Copyright 2014 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""oauth2client Service account credentials class.""" + +import base64 +import copy +import datetime +import httplib2 +import json +import time + +from oauth2client import GOOGLE_REVOKE_URI +from oauth2client import GOOGLE_TOKEN_URI +from oauth2client._helpers import _json_encode +from oauth2client._helpers import _from_bytes +from oauth2client._helpers import _urlsafe_b64encode +from oauth2client import util +from oauth2client.client import _apply_user_agent +from oauth2client.client import _initialize_headers +from oauth2client.client import AccessTokenInfo +from oauth2client.client import AssertionCredentials +from oauth2client.client import clean_headers +from oauth2client.client import EXPIRY_FORMAT +from oauth2client.client import GoogleCredentials +from oauth2client.client import SERVICE_ACCOUNT +from oauth2client.client import TokenRevokeError +from oauth2client.client import _UTCNOW +from oauth2client import crypt + + +_PASSWORD_DEFAULT = 'notasecret' +_PKCS12_KEY = '_private_key_pkcs12' +_PKCS12_ERROR = r""" +This library only implements PKCS#12 support via the pyOpenSSL library. +Either install pyOpenSSL, or please convert the .p12 file +to .pem format: + $ cat key.p12 | \ + > openssl pkcs12 -nodes -nocerts -passin pass:notasecret | \ + > openssl rsa > key.pem +""" + + +class ServiceAccountCredentials(AssertionCredentials): + """Service Account credential for OAuth 2.0 signed JWT grants. + + Supports + + * JSON keyfile (typically contains a PKCS8 key stored as + PEM text) + * ``.p12`` key (stores PKCS12 key and certificate) + + Makes an assertion to server using a signed JWT assertion in exchange + for an access token. + + This credential does not require a flow to instantiate because it + represents a two legged flow, and therefore has all of the required + information to generate and refresh its own access tokens. + + Args: + service_account_email: string, The email associated with the + service account. + signer: ``crypt.Signer``, A signer which can be used to sign content. + scopes: List or string, (Optional) Scopes to use when acquiring + an access token. + private_key_id: string, (Optional) Private key identifier. Typically + only used with a JSON keyfile. Can be sent in the + header of a JWT token assertion. + client_id: string, (Optional) Client ID for the project that owns the + service account. + user_agent: string, (Optional) User agent to use when sending + request. + token_uri: string, URI for token endpoint. For convenience defaults + to Google's endpoints but any OAuth 2.0 provider can be + used. + revoke_uri: string, URI for revoke endpoint. For convenience defaults + to Google's endpoints but any OAuth 2.0 provider can be + used. + kwargs: dict, Extra key-value pairs (both strings) to send in the + payload body when making an assertion. + """ + + MAX_TOKEN_LIFETIME_SECS = 3600 + """Max lifetime of the token (one hour, in seconds).""" + + NON_SERIALIZED_MEMBERS = ( + frozenset(['_signer']) | + AssertionCredentials.NON_SERIALIZED_MEMBERS) + """Members that aren't serialized when object is converted to JSON.""" + + # Can be over-ridden by factory constructors. Used for + # serialization/deserialization purposes. + _private_key_pkcs8_pem = None + _private_key_pkcs12 = None + _private_key_password = None + + def __init__(self, + service_account_email, + signer, + scopes='', + private_key_id=None, + client_id=None, + user_agent=None, + token_uri=GOOGLE_TOKEN_URI, + revoke_uri=GOOGLE_REVOKE_URI, + **kwargs): + + super(ServiceAccountCredentials, self).__init__( + None, user_agent=user_agent, token_uri=token_uri, + revoke_uri=revoke_uri) + + self._service_account_email = service_account_email + self._signer = signer + self._scopes = util.scopes_to_string(scopes) + self._private_key_id = private_key_id + self.client_id = client_id + self._user_agent = user_agent + self._kwargs = kwargs + + def _to_json(self, strip, to_serialize=None): + """Utility function that creates JSON repr. of a credentials object. + + Over-ride is needed since PKCS#12 keys will not in general be JSON + serializable. + + Args: + strip: array, An array of names of members to exclude from the + JSON. + to_serialize: dict, (Optional) The properties for this object + that will be serialized. This allows callers to modify + before serializing. + + Returns: + string, a JSON representation of this instance, suitable to pass to + from_json(). + """ + if to_serialize is None: + to_serialize = copy.copy(self.__dict__) + pkcs12_val = to_serialize.get(_PKCS12_KEY) + if pkcs12_val is not None: + to_serialize[_PKCS12_KEY] = base64.b64encode(pkcs12_val) + return super(ServiceAccountCredentials, self)._to_json( + strip, to_serialize=to_serialize) + + @classmethod + def _from_parsed_json_keyfile(cls, keyfile_dict, scopes, + token_uri=None, revoke_uri=None): + """Helper for factory constructors from JSON keyfile. + + Args: + keyfile_dict: dict-like object, The parsed dictionary-like object + containing the contents of the JSON keyfile. + scopes: List or string, Scopes to use when acquiring an + access token. + token_uri: string, URI for OAuth 2.0 provider token endpoint. + If unset and not present in keyfile_dict, defaults + to Google's endpoints. + revoke_uri: string, URI for OAuth 2.0 provider revoke endpoint. + If unset and not present in keyfile_dict, defaults + to Google's endpoints. + + Returns: + ServiceAccountCredentials, a credentials object created from + the keyfile contents. + + Raises: + ValueError, if the credential type is not :data:`SERVICE_ACCOUNT`. + KeyError, if one of the expected keys is not present in + the keyfile. + """ + creds_type = keyfile_dict.get('type') + if creds_type != SERVICE_ACCOUNT: + raise ValueError('Unexpected credentials type', creds_type, + 'Expected', SERVICE_ACCOUNT) + + service_account_email = keyfile_dict['client_email'] + private_key_pkcs8_pem = keyfile_dict['private_key'] + private_key_id = keyfile_dict['private_key_id'] + client_id = keyfile_dict['client_id'] + if not token_uri: + token_uri = keyfile_dict.get('token_uri', GOOGLE_TOKEN_URI) + if not revoke_uri: + revoke_uri = keyfile_dict.get('revoke_uri', GOOGLE_REVOKE_URI) + + signer = crypt.Signer.from_string(private_key_pkcs8_pem) + credentials = cls(service_account_email, signer, scopes=scopes, + private_key_id=private_key_id, + client_id=client_id, token_uri=token_uri, + revoke_uri=revoke_uri) + credentials._private_key_pkcs8_pem = private_key_pkcs8_pem + return credentials + + @classmethod + def from_json_keyfile_name(cls, filename, scopes='', + token_uri=None, revoke_uri=None): + + """Factory constructor from JSON keyfile by name. + + Args: + filename: string, The location of the keyfile. + scopes: List or string, (Optional) Scopes to use when acquiring an + access token. + token_uri: string, URI for OAuth 2.0 provider token endpoint. + If unset and not present in the key file, defaults + to Google's endpoints. + revoke_uri: string, URI for OAuth 2.0 provider revoke endpoint. + If unset and not present in the key file, defaults + to Google's endpoints. + + Returns: + ServiceAccountCredentials, a credentials object created from + the keyfile. + + Raises: + ValueError, if the credential type is not :data:`SERVICE_ACCOUNT`. + KeyError, if one of the expected keys is not present in + the keyfile. + """ + with open(filename, 'r') as file_obj: + client_credentials = json.load(file_obj) + return cls._from_parsed_json_keyfile(client_credentials, scopes, + token_uri=token_uri, + revoke_uri=revoke_uri) + + @classmethod + def from_json_keyfile_dict(cls, keyfile_dict, scopes='', + token_uri=None, revoke_uri=None): + """Factory constructor from parsed JSON keyfile. + + Args: + keyfile_dict: dict-like object, The parsed dictionary-like object + containing the contents of the JSON keyfile. + scopes: List or string, (Optional) Scopes to use when acquiring an + access token. + token_uri: string, URI for OAuth 2.0 provider token endpoint. + If unset and not present in keyfile_dict, defaults + to Google's endpoints. + revoke_uri: string, URI for OAuth 2.0 provider revoke endpoint. + If unset and not present in keyfile_dict, defaults + to Google's endpoints. + + Returns: + ServiceAccountCredentials, a credentials object created from + the keyfile. + + Raises: + ValueError, if the credential type is not :data:`SERVICE_ACCOUNT`. + KeyError, if one of the expected keys is not present in + the keyfile. + """ + return cls._from_parsed_json_keyfile(keyfile_dict, scopes, + token_uri=token_uri, + revoke_uri=revoke_uri) + + @classmethod + def _from_p12_keyfile_contents(cls, service_account_email, + private_key_pkcs12, + private_key_password=None, scopes='', + token_uri=GOOGLE_TOKEN_URI, + revoke_uri=GOOGLE_REVOKE_URI): + """Factory constructor from JSON keyfile. + + Args: + service_account_email: string, The email associated with the + service account. + private_key_pkcs12: string, The contents of a PKCS#12 keyfile. + private_key_password: string, (Optional) Password for PKCS#12 + private key. Defaults to ``notasecret``. + scopes: List or string, (Optional) Scopes to use when acquiring an + access token. + token_uri: string, URI for token endpoint. For convenience defaults + to Google's endpoints but any OAuth 2.0 provider can be + used. + revoke_uri: string, URI for revoke endpoint. For convenience + defaults to Google's endpoints but any OAuth 2.0 + provider can be used. + + Returns: + ServiceAccountCredentials, a credentials object created from + the keyfile. + + Raises: + NotImplementedError if pyOpenSSL is not installed / not the + active crypto library. + """ + if private_key_password is None: + private_key_password = _PASSWORD_DEFAULT + if crypt.Signer is not crypt.OpenSSLSigner: + raise NotImplementedError(_PKCS12_ERROR) + signer = crypt.Signer.from_string(private_key_pkcs12, + private_key_password) + credentials = cls(service_account_email, signer, scopes=scopes, + token_uri=token_uri, revoke_uri=revoke_uri) + credentials._private_key_pkcs12 = private_key_pkcs12 + credentials._private_key_password = private_key_password + return credentials + + @classmethod + def from_p12_keyfile(cls, service_account_email, filename, + private_key_password=None, scopes='', + token_uri=GOOGLE_TOKEN_URI, + revoke_uri=GOOGLE_REVOKE_URI): + + """Factory constructor from JSON keyfile. + + Args: + service_account_email: string, The email associated with the + service account. + filename: string, The location of the PKCS#12 keyfile. + private_key_password: string, (Optional) Password for PKCS#12 + private key. Defaults to ``notasecret``. + scopes: List or string, (Optional) Scopes to use when acquiring an + access token. + token_uri: string, URI for token endpoint. For convenience defaults + to Google's endpoints but any OAuth 2.0 provider can be + used. + revoke_uri: string, URI for revoke endpoint. For convenience + defaults to Google's endpoints but any OAuth 2.0 + provider can be used. + + Returns: + ServiceAccountCredentials, a credentials object created from + the keyfile. + + Raises: + NotImplementedError if pyOpenSSL is not installed / not the + active crypto library. + """ + with open(filename, 'rb') as file_obj: + private_key_pkcs12 = file_obj.read() + return cls._from_p12_keyfile_contents( + service_account_email, private_key_pkcs12, + private_key_password=private_key_password, scopes=scopes, + token_uri=token_uri, revoke_uri=revoke_uri) + + @classmethod + def from_p12_keyfile_buffer(cls, service_account_email, file_buffer, + private_key_password=None, scopes='', + token_uri=GOOGLE_TOKEN_URI, + revoke_uri=GOOGLE_REVOKE_URI): + """Factory constructor from JSON keyfile. + + Args: + service_account_email: string, The email associated with the + service account. + file_buffer: stream, A buffer that implements ``read()`` + and contains the PKCS#12 key contents. + private_key_password: string, (Optional) Password for PKCS#12 + private key. Defaults to ``notasecret``. + scopes: List or string, (Optional) Scopes to use when acquiring an + access token. + token_uri: string, URI for token endpoint. For convenience defaults + to Google's endpoints but any OAuth 2.0 provider can be + used. + revoke_uri: string, URI for revoke endpoint. For convenience + defaults to Google's endpoints but any OAuth 2.0 + provider can be used. + + Returns: + ServiceAccountCredentials, a credentials object created from + the keyfile. + + Raises: + NotImplementedError if pyOpenSSL is not installed / not the + active crypto library. + """ + private_key_pkcs12 = file_buffer.read() + return cls._from_p12_keyfile_contents( + service_account_email, private_key_pkcs12, + private_key_password=private_key_password, scopes=scopes, + token_uri=token_uri, revoke_uri=revoke_uri) + + def _generate_assertion(self): + """Generate the assertion that will be used in the request.""" + now = int(time.time()) + payload = { + 'aud': self.token_uri, + 'scope': self._scopes, + 'iat': now, + 'exp': now + self.MAX_TOKEN_LIFETIME_SECS, + 'iss': self._service_account_email, + } + payload.update(self._kwargs) + return crypt.make_signed_jwt(self._signer, payload, + key_id=self._private_key_id) + + def sign_blob(self, blob): + """Cryptographically sign a blob (of bytes). + + Implements abstract method + :meth:`oauth2client.client.AssertionCredentials.sign_blob`. + + Args: + blob: bytes, Message to be signed. + + Returns: + tuple, A pair of the private key ID used to sign the blob and + the signed contents. + """ + return self._private_key_id, self._signer.sign(blob) + + @property + def service_account_email(self): + """Get the email for the current service account. + + Returns: + string, The email associated with the service account. + """ + return self._service_account_email + + @property + def serialization_data(self): + # NOTE: This is only useful for JSON keyfile. + return { + 'type': 'service_account', + 'client_email': self._service_account_email, + 'private_key_id': self._private_key_id, + 'private_key': self._private_key_pkcs8_pem, + 'client_id': self.client_id, + } + + @classmethod + def from_json(cls, json_data): + """Deserialize a JSON-serialized instance. + + Inverse to :meth:`to_json`. + + Args: + json_data: dict or string, Serialized JSON (as a string or an + already parsed dictionary) representing a credential. + + Returns: + ServiceAccountCredentials from the serialized data. + """ + if not isinstance(json_data, dict): + json_data = json.loads(_from_bytes(json_data)) + + private_key_pkcs8_pem = None + pkcs12_val = json_data.get(_PKCS12_KEY) + password = None + if pkcs12_val is None: + private_key_pkcs8_pem = json_data['_private_key_pkcs8_pem'] + signer = crypt.Signer.from_string(private_key_pkcs8_pem) + else: + # NOTE: This assumes that private_key_pkcs8_pem is not also + # in the serialized data. This would be very incorrect + # state. + pkcs12_val = base64.b64decode(pkcs12_val) + password = json_data['_private_key_password'] + signer = crypt.Signer.from_string(pkcs12_val, password) + + credentials = cls( + json_data['_service_account_email'], + signer, + scopes=json_data['_scopes'], + private_key_id=json_data['_private_key_id'], + client_id=json_data['client_id'], + user_agent=json_data['_user_agent'], + **json_data['_kwargs'] + ) + if private_key_pkcs8_pem is not None: + credentials._private_key_pkcs8_pem = private_key_pkcs8_pem + if pkcs12_val is not None: + credentials._private_key_pkcs12 = pkcs12_val + if password is not None: + credentials._private_key_password = password + credentials.invalid = json_data['invalid'] + credentials.access_token = json_data['access_token'] + credentials.token_uri = json_data['token_uri'] + credentials.revoke_uri = json_data['revoke_uri'] + token_expiry = json_data.get('token_expiry', None) + if token_expiry is not None: + credentials.token_expiry = datetime.datetime.strptime( + token_expiry, EXPIRY_FORMAT) + return credentials + + def create_scoped_required(self): + return not self._scopes + + def create_scoped(self, scopes): + result = self.__class__(self._service_account_email, + self._signer, + scopes=scopes, + private_key_id=self._private_key_id, + client_id=self.client_id, + user_agent=self._user_agent, + **self._kwargs) + result.token_uri = self.token_uri + result.revoke_uri = self.revoke_uri + result._private_key_pkcs8_pem = self._private_key_pkcs8_pem + result._private_key_pkcs12 = self._private_key_pkcs12 + result._private_key_password = self._private_key_password + return result + + def create_with_claims(self, claims): + """Create credentials that specify additional claims. + + Args: + claims: dict, key-value pairs for claims. + + Returns: + ServiceAccountCredentials, a copy of the current service account + credentials with updated claims to use when obtaining access tokens. + """ + new_kwargs = dict(self._kwargs) + new_kwargs.update(claims) + result = self.__class__(self._service_account_email, + self._signer, + scopes=self._scopes, + private_key_id=self._private_key_id, + client_id=self.client_id, + user_agent=self._user_agent, + **new_kwargs) + result.token_uri = self.token_uri + result.revoke_uri = self.revoke_uri + result._private_key_pkcs8_pem = self._private_key_pkcs8_pem + result._private_key_pkcs12 = self._private_key_pkcs12 + result._private_key_password = self._private_key_password + return result + + def create_delegated(self, sub): + """Create credentials that act as domain-wide delegation of authority. + + Use the ``sub`` parameter as the subject to delegate on behalf of + that user. + + For example:: + + >>> account_sub = 'foo@email.com' + >>> delegate_creds = creds.create_delegated(account_sub) + + Args: + sub: string, An email address that this service account will + act on behalf of (via domain-wide delegation). + + Returns: + ServiceAccountCredentials, a copy of the current service account + updated to act on behalf of ``sub``. + """ + return self.create_with_claims({'sub': sub}) + + +def _datetime_to_secs(utc_time): + # TODO(issue 298): use time_delta.total_seconds() + # time_delta.total_seconds() not supported in Python 2.6 + epoch = datetime.datetime(1970, 1, 1) + time_delta = utc_time - epoch + return time_delta.days * 86400 + time_delta.seconds + + +class _JWTAccessCredentials(ServiceAccountCredentials): + """Self signed JWT credentials. + + Makes an assertion to server using a self signed JWT from service account + credentials. These credentials do NOT use OAuth 2.0 and instead + authenticate directly. + """ + _MAX_TOKEN_LIFETIME_SECS = 3600 + """Max lifetime of the token (one hour, in seconds).""" + + def __init__(self, + service_account_email, + signer, + scopes=None, + private_key_id=None, + client_id=None, + user_agent=None, + token_uri=GOOGLE_TOKEN_URI, + revoke_uri=GOOGLE_REVOKE_URI, + additional_claims=None): + if additional_claims is None: + additional_claims = {} + super(_JWTAccessCredentials, self).__init__( + service_account_email, + signer, + private_key_id=private_key_id, + client_id=client_id, + user_agent=user_agent, + token_uri=token_uri, + revoke_uri=revoke_uri, + **additional_claims) + + def authorize(self, http): + """Authorize an httplib2.Http instance with a JWT assertion. + + Unless specified, the 'aud' of the assertion will be the base + uri of the request. + + Args: + http: An instance of ``httplib2.Http`` or something that acts + like it. + Returns: + A modified instance of http that was passed in. + Example:: + h = httplib2.Http() + h = credentials.authorize(h) + """ + request_orig = http.request + request_auth = super(_JWTAccessCredentials, self).authorize(http).request + + # The closure that will replace 'httplib2.Http.request'. + def new_request(uri, method='GET', body=None, headers=None, + redirections=httplib2.DEFAULT_MAX_REDIRECTS, + connection_type=None): + if 'aud' in self._kwargs: + # Preemptively refresh token, this is not done for OAuth2 + if self.access_token is None or self.access_token_expired: + self.refresh(None) + return request_auth(uri, method, body, + headers, redirections, + connection_type) + else: + # If we don't have an 'aud' (audience) claim, + # create a 1-time token with the uri root as the audience + headers = _initialize_headers(headers) + _apply_user_agent(headers, self.user_agent) + uri_root = uri.split('?', 1)[0] + token, unused_expiry = self._create_token({'aud': uri_root}) + + headers['Authorization'] = 'Bearer ' + token + return request_orig(uri, method, body, + clean_headers(headers), + redirections, connection_type) + + # Replace the request method with our own closure. + http.request = new_request + + return http + + def get_access_token(self, http=None, additional_claims=None): + """Create a signed jwt. + + Args: + http: unused + additional_claims: dict, additional claims to add to + the payload of the JWT. + Returns: + An AccessTokenInfo with the signed jwt + """ + if additional_claims is None: + if self.access_token is None or self.access_token_expired: + self.refresh(None) + return AccessTokenInfo(access_token=self.access_token, + expires_in=self._expires_in()) + else: + # Create a 1 time token + token, unused_expiry = self._create_token(additional_claims) + return AccessTokenInfo(access_token=token, + expires_in=self._MAX_TOKEN_LIFETIME_SECS) + + def revoke(self, http): + """Cannot revoke JWTAccessCredentials tokens.""" + pass + + def create_scoped_required(self): + # JWTAccessCredentials are unscoped by definition + return True + + def create_scoped(self, scopes, token_uri=GOOGLE_TOKEN_URI, + revoke_uri=GOOGLE_REVOKE_URI): + # Returns an OAuth2 credentials with the given scope + result = ServiceAccountCredentials(self._service_account_email, + self._signer, + scopes=scopes, + private_key_id=self._private_key_id, + client_id=self.client_id, + user_agent=self._user_agent, + token_uri=token_uri, + revoke_uri=revoke_uri, + **self._kwargs) + if self._private_key_pkcs8_pem is not None: + result._private_key_pkcs8_pem = self._private_key_pkcs8_pem + if self._private_key_pkcs12 is not None: + result._private_key_pkcs12 = self._private_key_pkcs12 + if self._private_key_password is not None: + result._private_key_password = self._private_key_password + return result + + def refresh(self, http): + self._refresh(None) + + def _refresh(self, http_request): + self.access_token, self.token_expiry = self._create_token() + + def _create_token(self, additional_claims=None): + now = _UTCNOW() + expiry = now + datetime.timedelta(seconds=self._MAX_TOKEN_LIFETIME_SECS) + payload = { + 'iat': _datetime_to_secs(now), + 'exp': _datetime_to_secs(expiry), + 'iss': self._service_account_email, + 'sub': self._service_account_email + } + payload.update(self._kwargs) + if additional_claims is not None: + payload.update(additional_claims) + jwt = crypt.make_signed_jwt(self._signer, payload, + key_id=self._private_key_id) + return jwt.decode('ascii'), expiry diff --git a/oauth2client/tools.py b/oauth2client/tools.py new file mode 100644 index 0000000..9e3e85d --- /dev/null +++ b/oauth2client/tools.py @@ -0,0 +1,248 @@ +# Copyright 2014 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Command-line tools for authenticating via OAuth 2.0 + +Do the OAuth 2.0 Web Server dance for a command line application. Stores the +generated credentials in a common file that is used by other example apps in +the same directory. +""" + +from __future__ import print_function + +import logging +import socket +import sys + +from six.moves import BaseHTTPServer +from six.moves import http_client +from six.moves import urllib +from six.moves import input + +from oauth2client import client +from oauth2client import util + + +__author__ = 'jcgregorio@google.com (Joe Gregorio)' +__all__ = ['argparser', 'run_flow', 'message_if_missing'] + +_CLIENT_SECRETS_MESSAGE = """WARNING: Please configure OAuth 2.0 + +To make this sample run you will need to populate the client_secrets.json file +found at: + + %s + +with information from the APIs Console . + +""" + + +def _CreateArgumentParser(): + try: + import argparse + except ImportError: # pragma: NO COVER + return None + parser = argparse.ArgumentParser(add_help=False) + parser.add_argument('--auth_host_name', default='localhost', + help='Hostname when running a local web server.') + parser.add_argument('--noauth_local_webserver', action='store_true', + default=False, help='Do not run a local web server.') + parser.add_argument('--auth_host_port', default=[8080, 8090], type=int, + nargs='*', help='Port web server should listen on.') + parser.add_argument( + '--logging_level', default='ERROR', + choices=['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL'], + help='Set the logging level of detail.') + return parser + +# argparser is an ArgumentParser that contains command-line options expected +# by tools.run(). Pass it in as part of the 'parents' argument to your own +# ArgumentParser. +argparser = _CreateArgumentParser() + + +class ClientRedirectServer(BaseHTTPServer.HTTPServer): + """A server to handle OAuth 2.0 redirects back to localhost. + + Waits for a single request and parses the query parameters + into query_params and then stops serving. + """ + query_params = {} + + +class ClientRedirectHandler(BaseHTTPServer.BaseHTTPRequestHandler): + """A handler for OAuth 2.0 redirects back to localhost. + + Waits for a single request and parses the query parameters + into the servers query_params and then stops serving. + """ + + def do_GET(self): + """Handle a GET request. + + Parses the query parameters and prints a message + if the flow has completed. Note that we can't detect + if an error occurred. + """ + self.send_response(http_client.OK) + self.send_header("Content-type", "text/html") + self.end_headers() + query = self.path.split('?', 1)[-1] + query = dict(urllib.parse.parse_qsl(query)) + self.server.query_params = query + self.wfile.write( + b"Authentication Status") + self.wfile.write( + b"

The authentication flow has completed.

") + self.wfile.write(b"") + + def log_message(self, format, *args): + """Do not log messages to stdout while running as cmd. line program.""" + + +@util.positional(3) +def run_flow(flow, storage, flags=None, http=None): + """Core code for a command-line application. + + The ``run()`` function is called from your application and runs + through all the steps to obtain credentials. It takes a ``Flow`` + argument and attempts to open an authorization server page in the + user's default web browser. The server asks the user to grant your + application access to the user's data. If the user grants access, + the ``run()`` function returns new credentials. The new credentials + are also stored in the ``storage`` argument, which updates the file + associated with the ``Storage`` object. + + It presumes it is run from a command-line application and supports the + following flags: + + ``--auth_host_name`` (string, default: ``localhost``) + Host name to use when running a local web server to handle + redirects during OAuth authorization. + + ``--auth_host_port`` (integer, default: ``[8080, 8090]``) + Port to use when running a local web server to handle redirects + during OAuth authorization. Repeat this option to specify a list + of values. + + ``--[no]auth_local_webserver`` (boolean, default: ``True``) + Run a local web server to handle redirects during OAuth + authorization. + + The tools module defines an ``ArgumentParser`` the already contains the + flag definitions that ``run()`` requires. You can pass that + ``ArgumentParser`` to your ``ArgumentParser`` constructor:: + + parser = argparse.ArgumentParser( + description=__doc__, + formatter_class=argparse.RawDescriptionHelpFormatter, + parents=[tools.argparser]) + flags = parser.parse_args(argv) + + Args: + flow: Flow, an OAuth 2.0 Flow to step through. + storage: Storage, a ``Storage`` to store the credential in. + flags: ``argparse.Namespace``, (Optional) The command-line flags. This + is the object returned from calling ``parse_args()`` on + ``argparse.ArgumentParser`` as described above. Defaults + to ``argparser.parse_args()``. + http: An instance of ``httplib2.Http.request`` or something that + acts like it. + + Returns: + Credentials, the obtained credential. + """ + if flags is None: + flags = argparser.parse_args() + logging.getLogger().setLevel(getattr(logging, flags.logging_level)) + if not flags.noauth_local_webserver: + success = False + port_number = 0 + for port in flags.auth_host_port: + port_number = port + try: + httpd = ClientRedirectServer((flags.auth_host_name, port), + ClientRedirectHandler) + except socket.error: + pass + else: + success = True + break + flags.noauth_local_webserver = not success + if not success: + print('Failed to start a local webserver listening ' + 'on either port 8080') + print('or port 8090. Please check your firewall settings and locally') + print('running programs that may be blocking or using those ports.') + print() + print('Falling back to --noauth_local_webserver and continuing with') + print('authorization.') + print() + + if not flags.noauth_local_webserver: + oauth_callback = 'http://%s:%s/' % (flags.auth_host_name, port_number) + else: + oauth_callback = client.OOB_CALLBACK_URN + flow.redirect_uri = oauth_callback + authorize_url = flow.step1_get_authorize_url() + + if not flags.noauth_local_webserver: + import webbrowser + webbrowser.open(authorize_url, new=1, autoraise=True) + print('Your browser has been opened to visit:') + print() + print(' ' + authorize_url) + print() + print('If your browser is on a different machine then ' + 'exit and re-run this') + print('application with the command-line parameter ') + print() + print(' --noauth_local_webserver') + print() + else: + print('Go to the following link in your browser:') + print() + print(' ' + authorize_url) + print() + + code = None + if not flags.noauth_local_webserver: + httpd.handle_request() + if 'error' in httpd.query_params: + sys.exit('Authentication request was rejected.') + if 'code' in httpd.query_params: + code = httpd.query_params['code'] + else: + print('Failed to find "code" in the query parameters ' + 'of the redirect.') + sys.exit('Try running with --noauth_local_webserver.') + else: + code = input('Enter verification code: ').strip() + + try: + credential = flow.step2_exchange(code, http=http) + except client.FlowExchangeError as e: + sys.exit('Authentication has failed: %s' % e) + + storage.put(credential) + credential.set_store(storage) + print('Authentication successful.') + + return credential + + +def message_if_missing(filename): + """Helpful message to display if the CLIENT_SECRETS file is missing.""" + return _CLIENT_SECRETS_MESSAGE % filename diff --git a/oauth2client/transport.py b/oauth2client/transport.py new file mode 100644 index 0000000..79a61f1 --- /dev/null +++ b/oauth2client/transport.py @@ -0,0 +1,285 @@ +# Copyright 2016 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +import logging + +import httplib2 +import six +from six.moves import http_client + +from oauth2client import _helpers + + +_LOGGER = logging.getLogger(__name__) +# Properties present in file-like streams / buffers. +_STREAM_PROPERTIES = ('read', 'seek', 'tell') + +# Google Data client libraries may need to set this to [401, 403]. +REFRESH_STATUS_CODES = (http_client.UNAUTHORIZED,) + + +class MemoryCache(object): + """httplib2 Cache implementation which only caches locally.""" + + def __init__(self): + self.cache = {} + + def get(self, key): + return self.cache.get(key) + + def set(self, key, value): + self.cache[key] = value + + def delete(self, key): + self.cache.pop(key, None) + + +def get_cached_http(): + """Return an HTTP object which caches results returned. + + This is intended to be used in methods like + oauth2client.client.verify_id_token(), which calls to the same URI + to retrieve certs. + + Returns: + httplib2.Http, an HTTP object with a MemoryCache + """ + return _CACHED_HTTP + + +def get_http_object(*args, **kwargs): + """Return a new HTTP object. + + Args: + *args: tuple, The positional arguments to be passed when + contructing a new HTTP object. + **kwargs: dict, The keyword arguments to be passed when + contructing a new HTTP object. + + Returns: + httplib2.Http, an HTTP object. + """ + return httplib2.Http(*args, **kwargs) + + +def _initialize_headers(headers): + """Creates a copy of the headers. + + Args: + headers: dict, request headers to copy. + + Returns: + dict, the copied headers or a new dictionary if the headers + were None. + """ + return {} if headers is None else dict(headers) + + +def _apply_user_agent(headers, user_agent): + """Adds a user-agent to the headers. + + Args: + headers: dict, request headers to add / modify user + agent within. + user_agent: str, the user agent to add. + + Returns: + dict, the original headers passed in, but modified if the + user agent is not None. + """ + if user_agent is not None: + if 'user-agent' in headers: + headers['user-agent'] = (user_agent + ' ' + headers['user-agent']) + else: + headers['user-agent'] = user_agent + + return headers + + +def clean_headers(headers): + """Forces header keys and values to be strings, i.e not unicode. + + The httplib module just concats the header keys and values in a way that + may make the message header a unicode string, which, if it then tries to + contatenate to a binary request body may result in a unicode decode error. + + Args: + headers: dict, A dictionary of headers. + + Returns: + The same dictionary but with all the keys converted to strings. + """ + clean = {} + try: + for k, v in six.iteritems(headers): + if not isinstance(k, six.binary_type): + k = str(k) + if not isinstance(v, six.binary_type): + v = str(v) + clean[_helpers._to_bytes(k)] = _helpers._to_bytes(v) + except UnicodeEncodeError: + from oauth2client.client import NonAsciiHeaderError + raise NonAsciiHeaderError(k, ': ', v) + return clean + + +def wrap_http_for_auth(credentials, http): + """Prepares an HTTP object's request method for auth. + + Wraps HTTP requests with logic to catch auth failures (typically + identified via a 401 status code). In the event of failure, tries + to refresh the token used and then retry the original request. + + Args: + credentials: Credentials, the credentials used to identify + the authenticated user. + http: httplib2.Http, an http object to be used to make + auth requests. + """ + orig_request_method = http.request + + # The closure that will replace 'httplib2.Http.request'. + def new_request(uri, method='GET', body=None, headers=None, + redirections=httplib2.DEFAULT_MAX_REDIRECTS, + connection_type=None): + if not credentials.access_token: + _LOGGER.info('Attempting refresh to obtain ' + 'initial access_token') + credentials._refresh(orig_request_method) + + # Clone and modify the request headers to add the appropriate + # Authorization header. + headers = _initialize_headers(headers) + credentials.apply(headers) + _apply_user_agent(headers, credentials.user_agent) + + body_stream_position = None + # Check if the body is a file-like stream. + if all(getattr(body, stream_prop, None) for stream_prop in + _STREAM_PROPERTIES): + body_stream_position = body.tell() + + resp, content = request(orig_request_method, uri, method, body, + clean_headers(headers), + redirections, connection_type) + + # A stored token may expire between the time it is retrieved and + # the time the request is made, so we may need to try twice. + max_refresh_attempts = 2 + for refresh_attempt in range(max_refresh_attempts): + if resp.status not in REFRESH_STATUS_CODES: + break + _LOGGER.info('Refreshing due to a %s (attempt %s/%s)', + resp.status, refresh_attempt + 1, + max_refresh_attempts) + credentials._refresh(orig_request_method) + credentials.apply(headers) + if body_stream_position is not None: + body.seek(body_stream_position) + + resp, content = request(orig_request_method, uri, method, body, + clean_headers(headers), + redirections, connection_type) + + return resp, content + + # Replace the request method with our own closure. + http.request = new_request + + # Set credentials as a property of the request method. + http.request.credentials = credentials + + +def wrap_http_for_jwt_access(credentials, http): + """Prepares an HTTP object's request method for JWT access. + + Wraps HTTP requests with logic to catch auth failures (typically + identified via a 401 status code). In the event of failure, tries + to refresh the token used and then retry the original request. + + Args: + credentials: _JWTAccessCredentials, the credentials used to identify + a service account that uses JWT access tokens. + http: httplib2.Http, an http object to be used to make + auth requests. + """ + orig_request_method = http.request + wrap_http_for_auth(credentials, http) + # The new value of ``http.request`` set by ``wrap_http_for_auth``. + authenticated_request_method = http.request + + # The closure that will replace 'httplib2.Http.request'. + def new_request(uri, method='GET', body=None, headers=None, + redirections=httplib2.DEFAULT_MAX_REDIRECTS, + connection_type=None): + if 'aud' in credentials._kwargs: + # Preemptively refresh token, this is not done for OAuth2 + if (credentials.access_token is None or + credentials.access_token_expired): + credentials.refresh(None) + return request(authenticated_request_method, uri, + method, body, headers, redirections, + connection_type) + else: + # If we don't have an 'aud' (audience) claim, + # create a 1-time token with the uri root as the audience + headers = _initialize_headers(headers) + _apply_user_agent(headers, credentials.user_agent) + uri_root = uri.split('?', 1)[0] + token, unused_expiry = credentials._create_token({'aud': uri_root}) + + headers['Authorization'] = 'Bearer ' + token + return request(orig_request_method, uri, method, body, + clean_headers(headers), + redirections, connection_type) + + # Replace the request method with our own closure. + http.request = new_request + + # Set credentials as a property of the request method. + http.request.credentials = credentials + + +def request(http, uri, method='GET', body=None, headers=None, + redirections=httplib2.DEFAULT_MAX_REDIRECTS, + connection_type=None): + """Make an HTTP request with an HTTP object and arguments. + + Args: + http: httplib2.Http, an http object to be used to make requests. + uri: string, The URI to be requested. + method: string, The HTTP method to use for the request. Defaults + to 'GET'. + body: string, The payload / body in HTTP request. By default + there is no payload. + headers: dict, Key-value pairs of request headers. By default + there are no headers. + redirections: int, The number of allowed 203 redirects for + the request. Defaults to 5. + connection_type: httplib.HTTPConnection, a subclass to be used for + establishing connection. If not set, the type + will be determined from the ``uri``. + + Returns: + tuple, a pair of a httplib2.Response with the status code and other + headers and the bytes of the content returned. + """ + # NOTE: Allowing http or http.request is temporary (See Issue 601). + http_callable = getattr(http, 'request', http) + return http_callable(uri, method=method, body=body, headers=headers, + redirections=redirections, + connection_type=connection_type) + + +_CACHED_HTTP = httplib2.Http(MemoryCache()) diff --git a/oauth2client/util.py b/oauth2client/util.py new file mode 100644 index 0000000..8c907d8 --- /dev/null +++ b/oauth2client/util.py @@ -0,0 +1,204 @@ +# Copyright 2014 Google Inc. All rights reserved. +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Common utility library.""" + +import functools +import inspect +import logging + +import six +from six.moves import urllib + + +__author__ = [ + 'rafek@google.com (Rafe Kaplan)', + 'guido@google.com (Guido van Rossum)', +] + +__all__ = [ + 'positional', + 'POSITIONAL_WARNING', + 'POSITIONAL_EXCEPTION', + 'POSITIONAL_IGNORE', +] + +logger = logging.getLogger(__name__) + +POSITIONAL_WARNING = 'WARNING' +POSITIONAL_EXCEPTION = 'EXCEPTION' +POSITIONAL_IGNORE = 'IGNORE' +POSITIONAL_SET = frozenset([POSITIONAL_WARNING, POSITIONAL_EXCEPTION, + POSITIONAL_IGNORE]) + +positional_parameters_enforcement = POSITIONAL_WARNING + + +def positional(max_positional_args): + """A decorator to declare that only the first N arguments my be positional. + + This decorator makes it easy to support Python 3 style keyword-only + parameters. For example, in Python 3 it is possible to write:: + + def fn(pos1, *, kwonly1=None, kwonly1=None): + ... + + All named parameters after ``*`` must be a keyword:: + + fn(10, 'kw1', 'kw2') # Raises exception. + fn(10, kwonly1='kw1') # Ok. + + Example + ^^^^^^^ + + To define a function like above, do:: + + @positional(1) + def fn(pos1, kwonly1=None, kwonly2=None): + ... + + If no default value is provided to a keyword argument, it becomes a + required keyword argument:: + + @positional(0) + def fn(required_kw): + ... + + This must be called with the keyword parameter:: + + fn() # Raises exception. + fn(10) # Raises exception. + fn(required_kw=10) # Ok. + + When defining instance or class methods always remember to account for + ``self`` and ``cls``:: + + class MyClass(object): + + @positional(2) + def my_method(self, pos1, kwonly1=None): + ... + + @classmethod + @positional(2) + def my_method(cls, pos1, kwonly1=None): + ... + + The positional decorator behavior is controlled by + ``util.positional_parameters_enforcement``, which may be set to + ``POSITIONAL_EXCEPTION``, ``POSITIONAL_WARNING`` or + ``POSITIONAL_IGNORE`` to raise an exception, log a warning, or do + nothing, respectively, if a declaration is violated. + + Args: + max_positional_arguments: Maximum number of positional arguments. All + parameters after the this index must be + keyword only. + + Returns: + A decorator that prevents using arguments after max_positional_args + from being used as positional parameters. + + Raises: + TypeError: if a key-word only argument is provided as a positional + parameter, but only if + util.positional_parameters_enforcement is set to + POSITIONAL_EXCEPTION. + """ + + def positional_decorator(wrapped): + @functools.wraps(wrapped) + def positional_wrapper(*args, **kwargs): + if len(args) > max_positional_args: + plural_s = '' + if max_positional_args != 1: + plural_s = 's' + message = ('%s() takes at most %d positional ' + 'argument%s (%d given)' % ( + wrapped.__name__, max_positional_args, + plural_s, len(args))) + if positional_parameters_enforcement == POSITIONAL_EXCEPTION: + raise TypeError(message) + elif positional_parameters_enforcement == POSITIONAL_WARNING: + logger.warning(message) + return wrapped(*args, **kwargs) + return positional_wrapper + + if isinstance(max_positional_args, six.integer_types): + return positional_decorator + else: + args, _, _, defaults = inspect.getargspec(max_positional_args) + return positional(len(args) - len(defaults))(max_positional_args) + + +def scopes_to_string(scopes): + """Converts scope value to a string. + + If scopes is a string then it is simply passed through. If scopes is an + iterable then a string is returned that is all the individual scopes + concatenated with spaces. + + Args: + scopes: string or iterable of strings, the scopes. + + Returns: + The scopes formatted as a single string. + """ + if isinstance(scopes, six.string_types): + return scopes + else: + return ' '.join(scopes) + + +def string_to_scopes(scopes): + """Converts stringifed scope value to a list. + + If scopes is a list then it is simply passed through. If scopes is an + string then a list of each individual scope is returned. + + Args: + scopes: a string or iterable of strings, the scopes. + + Returns: + The scopes in a list. + """ + if not scopes: + return [] + if isinstance(scopes, six.string_types): + return scopes.split(' ') + else: + return scopes + + +def _add_query_parameter(url, name, value): + """Adds a query parameter to a url. + + Replaces the current value if it already exists in the URL. + + Args: + url: string, url to add the query parameter to. + name: string, query parameter name. + value: string, query parameter value. + + Returns: + Updated query parameter. Does not update the url if value is None. + """ + if value is None: + return url + else: + parsed = list(urllib.parse.urlparse(url)) + q = dict(urllib.parse.parse_qsl(parsed[4])) + q[name] = value + parsed[4] = urllib.parse.urlencode(q) + return urllib.parse.urlunparse(parsed) diff --git a/pyasn1/__init__.py b/pyasn1/__init__.py new file mode 100644 index 0000000..5f09300 --- /dev/null +++ b/pyasn1/__init__.py @@ -0,0 +1,8 @@ +import sys + +# http://www.python.org/dev/peps/pep-0396/ +__version__ = '0.1.9' + +if sys.version_info[:2] < (2, 4): + raise RuntimeError('PyASN1 requires Python 2.4 or later') + diff --git a/pyasn1/codec/__init__.py b/pyasn1/codec/__init__.py new file mode 100644 index 0000000..8c3066b --- /dev/null +++ b/pyasn1/codec/__init__.py @@ -0,0 +1 @@ +# This file is necessary to make this directory a package. diff --git a/pyasn1/codec/ber/__init__.py b/pyasn1/codec/ber/__init__.py new file mode 100644 index 0000000..8c3066b --- /dev/null +++ b/pyasn1/codec/ber/__init__.py @@ -0,0 +1 @@ +# This file is necessary to make this directory a package. diff --git a/pyasn1/codec/ber/decoder.py b/pyasn1/codec/ber/decoder.py new file mode 100644 index 0000000..61bfbce --- /dev/null +++ b/pyasn1/codec/ber/decoder.py @@ -0,0 +1,841 @@ +# BER decoder +from pyasn1.type import tag, univ, char, useful, tagmap +from pyasn1.codec.ber import eoo +from pyasn1.compat.octets import oct2int, isOctetsType +from pyasn1 import debug, error + +class AbstractDecoder: + protoComponent = None + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + raise error.PyAsn1Error('Decoder not implemented for %s' % (tagSet,)) + + def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + raise error.PyAsn1Error('Indefinite length mode decoder not implemented for %s' % (tagSet,)) + +class AbstractSimpleDecoder(AbstractDecoder): + tagFormats = (tag.tagFormatSimple,) + def _createComponent(self, asn1Spec, tagSet, value=None): + if tagSet[0][1] not in self.tagFormats: + raise error.PyAsn1Error('Invalid tag format %s for %s' % (tagSet[0], self.protoComponent.prettyPrintType())) + if asn1Spec is None: + return self.protoComponent.clone(value, tagSet) + elif value is None: + return asn1Spec + else: + return asn1Spec.clone(value) + +class AbstractConstructedDecoder(AbstractDecoder): + tagFormats = (tag.tagFormatConstructed,) + def _createComponent(self, asn1Spec, tagSet, value=None): + if tagSet[0][1] not in self.tagFormats: + raise error.PyAsn1Error('Invalid tag format %s for %s' % (tagSet[0], self.protoComponent.prettyPrintType())) + if asn1Spec is None: + return self.protoComponent.clone(tagSet) + else: + return asn1Spec.clone() + +class ExplicitTagDecoder(AbstractSimpleDecoder): + protoComponent = univ.Any('') + tagFormats = (tag.tagFormatConstructed,) + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + if substrateFun: + return substrateFun( + self._createComponent(asn1Spec, tagSet, ''), + substrate, length + ) + head, tail = substrate[:length], substrate[length:] + value, _ = decodeFun(head, asn1Spec, tagSet, length) + return value, tail + + def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + if substrateFun: + return substrateFun( + self._createComponent(asn1Spec, tagSet, ''), + substrate, length + ) + value, substrate = decodeFun(substrate, asn1Spec, tagSet, length) + terminator, substrate = decodeFun(substrate, allowEoo=True) + if eoo.endOfOctets.isSameTypeWith(terminator) and \ + terminator == eoo.endOfOctets: + return value, substrate + else: + raise error.PyAsn1Error('Missing end-of-octets terminator') + +explicitTagDecoder = ExplicitTagDecoder() + +class IntegerDecoder(AbstractSimpleDecoder): + protoComponent = univ.Integer(0) + precomputedValues = { + '\x00': 0, + '\x01': 1, + '\x02': 2, + '\x03': 3, + '\x04': 4, + '\x05': 5, + '\x06': 6, + '\x07': 7, + '\x08': 8, + '\x09': 9, + '\xff': -1, + '\xfe': -2, + '\xfd': -3, + '\xfc': -4, + '\xfb': -5 + } + + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, + state, decodeFun, substrateFun): + head, tail = substrate[:length], substrate[length:] + if not head: + return self._createComponent(asn1Spec, tagSet, 0), tail + if head in self.precomputedValues: + value = self.precomputedValues[head] + else: + firstOctet = oct2int(head[0]) + if firstOctet & 0x80: + value = -1 + else: + value = 0 + for octet in head: + value = value << 8 | oct2int(octet) + return self._createComponent(asn1Spec, tagSet, value), tail + +class BooleanDecoder(IntegerDecoder): + protoComponent = univ.Boolean(0) + def _createComponent(self, asn1Spec, tagSet, value=None): + return IntegerDecoder._createComponent(self, asn1Spec, tagSet, value and 1 or 0) + +class BitStringDecoder(AbstractSimpleDecoder): + protoComponent = univ.BitString(()) + tagFormats = (tag.tagFormatSimple, tag.tagFormatConstructed) + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, + state, decodeFun, substrateFun): + head, tail = substrate[:length], substrate[length:] + if tagSet[0][1] == tag.tagFormatSimple: # XXX what tag to check? + if not head: + raise error.PyAsn1Error('Empty substrate') + trailingBits = oct2int(head[0]) + if trailingBits > 7: + raise error.PyAsn1Error( + 'Trailing bits overflow %s' % trailingBits + ) + head = head[1:] + lsb = p = 0; l = len(head)-1; b = [] + while p <= l: + if p == l: + lsb = trailingBits + j = 7 + o = oct2int(head[p]) + while j >= lsb: + b.append((o>>j)&0x01) + j = j - 1 + p = p + 1 + return self._createComponent(asn1Spec, tagSet, b), tail + r = self._createComponent(asn1Spec, tagSet, ()) + if substrateFun: + return substrateFun(r, substrate, length) + while head: + component, head = decodeFun(head, self.protoComponent) + r = r + component + return r, tail + + def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + r = self._createComponent(asn1Spec, tagSet, '') + if substrateFun: + return substrateFun(r, substrate, length) + while substrate: + component, substrate = decodeFun(substrate, self.protoComponent, + allowEoo=True) + if eoo.endOfOctets.isSameTypeWith(component) and \ + component == eoo.endOfOctets: + break + r = r + component + else: + raise error.SubstrateUnderrunError( + 'No EOO seen before substrate ends' + ) + return r, substrate + +class OctetStringDecoder(AbstractSimpleDecoder): + protoComponent = univ.OctetString('') + tagFormats = (tag.tagFormatSimple, tag.tagFormatConstructed) + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, + state, decodeFun, substrateFun): + head, tail = substrate[:length], substrate[length:] + if tagSet[0][1] == tag.tagFormatSimple: # XXX what tag to check? + return self._createComponent(asn1Spec, tagSet, head), tail + r = self._createComponent(asn1Spec, tagSet, '') + if substrateFun: + return substrateFun(r, substrate, length) + while head: + component, head = decodeFun(head, self.protoComponent) + r = r + component + return r, tail + + def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + r = self._createComponent(asn1Spec, tagSet, '') + if substrateFun: + return substrateFun(r, substrate, length) + while substrate: + component, substrate = decodeFun(substrate, self.protoComponent, + allowEoo=True) + if eoo.endOfOctets.isSameTypeWith(component) and \ + component == eoo.endOfOctets: + break + r = r + component + else: + raise error.SubstrateUnderrunError( + 'No EOO seen before substrate ends' + ) + return r, substrate + +class NullDecoder(AbstractSimpleDecoder): + protoComponent = univ.Null('') + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + head, tail = substrate[:length], substrate[length:] + r = self._createComponent(asn1Spec, tagSet) + if head: + raise error.PyAsn1Error('Unexpected %d-octet substrate for Null' % length) + return r, tail + +class ObjectIdentifierDecoder(AbstractSimpleDecoder): + protoComponent = univ.ObjectIdentifier(()) + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, + state, decodeFun, substrateFun): + head, tail = substrate[:length], substrate[length:] + if not head: + raise error.PyAsn1Error('Empty substrate') + + oid = () + index = 0 + substrateLen = len(head) + while index < substrateLen: + subId = oct2int(head[index]) + index += 1 + if subId < 128: + oid = oid + (subId,) + elif subId > 128: + # Construct subid from a number of octets + nextSubId = subId + subId = 0 + while nextSubId >= 128: + subId = (subId << 7) + (nextSubId & 0x7F) + if index >= substrateLen: + raise error.SubstrateUnderrunError( + 'Short substrate for sub-OID past %s' % (oid,) + ) + nextSubId = oct2int(head[index]) + index += 1 + oid = oid + ((subId << 7) + nextSubId,) + elif subId == 128: + # ASN.1 spec forbids leading zeros (0x80) in OID + # encoding, tolerating it opens a vulnerability. See + # http://www.cosic.esat.kuleuven.be/publications/article-1432.pdf + # page 7 + raise error.PyAsn1Error('Invalid octet 0x80 in OID encoding') + + # Decode two leading arcs + if 0 <= oid[0] <= 39: + oid = (0,) + oid + elif 40 <= oid[0] <= 79: + oid = (1, oid[0]-40) + oid[1:] + elif oid[0] >= 80: + oid = (2, oid[0]-80) + oid[1:] + else: + raise error.PyAsn1Error('Malformed first OID octet: %s' % head[0]) + + return self._createComponent(asn1Spec, tagSet, oid), tail + +class RealDecoder(AbstractSimpleDecoder): + protoComponent = univ.Real() + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + head, tail = substrate[:length], substrate[length:] + if not head: + return self._createComponent(asn1Spec, tagSet, 0.0), tail + fo = oct2int(head[0]); head = head[1:] + if fo & 0x80: # binary encoding + if not head: + raise error.PyAsn1Error("Incomplete floating-point value") + n = (fo & 0x03) + 1 + if n == 4: + n = oct2int(head[0]) + head = head[1:] + eo, head = head[:n], head[n:] + if not eo or not head: + raise error.PyAsn1Error('Real exponent screwed') + e = oct2int(eo[0]) & 0x80 and -1 or 0 + while eo: # exponent + e <<= 8 + e |= oct2int(eo[0]) + eo = eo[1:] + b = fo >> 4 & 0x03 # base bits + if b > 2: + raise error.PyAsn1Error('Illegal Real base') + if b == 1: # encbase = 8 + e *= 3 + elif b == 2: # encbase = 16 + e *= 4 + p = 0 + while head: # value + p <<= 8 + p |= oct2int(head[0]) + head = head[1:] + if fo & 0x40: # sign bit + p = -p + sf = fo >> 2 & 0x03 # scale bits + p *= 2**sf + value = (p, 2, e) + elif fo & 0x40: # infinite value + value = fo & 0x01 and '-inf' or 'inf' + elif fo & 0xc0 == 0: # character encoding + if not head: + raise error.PyAsn1Error("Incomplete floating-point value") + try: + if fo & 0x3 == 0x1: # NR1 + value = (int(head), 10, 0) + elif fo & 0x3 == 0x2: # NR2 + value = float(head) + elif fo & 0x3 == 0x3: # NR3 + value = float(head) + else: + raise error.SubstrateUnderrunError( + 'Unknown NR (tag %s)' % fo + ) + except ValueError: + raise error.SubstrateUnderrunError( + 'Bad character Real syntax' + ) + else: + raise error.SubstrateUnderrunError( + 'Unknown encoding (tag %s)' % fo + ) + return self._createComponent(asn1Spec, tagSet, value), tail + +class SequenceDecoder(AbstractConstructedDecoder): + protoComponent = univ.Sequence() + def _getComponentTagMap(self, r, idx): + try: + return r.getComponentTagMapNearPosition(idx) + except error.PyAsn1Error: + return + + def _getComponentPositionByType(self, r, t, idx): + return r.getComponentPositionNearType(t, idx) + + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + head, tail = substrate[:length], substrate[length:] + r = self._createComponent(asn1Spec, tagSet) + idx = 0 + if substrateFun: + return substrateFun(r, substrate, length) + while head: + asn1Spec = self._getComponentTagMap(r, idx) + component, head = decodeFun(head, asn1Spec) + idx = self._getComponentPositionByType( + r, component.getEffectiveTagSet(), idx + ) + r.setComponentByPosition(idx, component, asn1Spec is None) + idx = idx + 1 + r.setDefaultComponents() + r.verifySizeSpec() + return r, tail + + def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + r = self._createComponent(asn1Spec, tagSet) + if substrateFun: + return substrateFun(r, substrate, length) + idx = 0 + while substrate: + asn1Spec = self._getComponentTagMap(r, idx) + component, substrate = decodeFun(substrate, asn1Spec, allowEoo=True) + if eoo.endOfOctets.isSameTypeWith(component) and \ + component == eoo.endOfOctets: + break + idx = self._getComponentPositionByType( + r, component.getEffectiveTagSet(), idx + ) + r.setComponentByPosition(idx, component, asn1Spec is None) + idx = idx + 1 + else: + raise error.SubstrateUnderrunError( + 'No EOO seen before substrate ends' + ) + r.setDefaultComponents() + r.verifySizeSpec() + return r, substrate + +class SequenceOfDecoder(AbstractConstructedDecoder): + protoComponent = univ.SequenceOf() + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + head, tail = substrate[:length], substrate[length:] + r = self._createComponent(asn1Spec, tagSet) + if substrateFun: + return substrateFun(r, substrate, length) + asn1Spec = r.getComponentType() + idx = 0 + while head: + component, head = decodeFun(head, asn1Spec) + r.setComponentByPosition(idx, component, asn1Spec is None) + idx = idx + 1 + r.verifySizeSpec() + return r, tail + + def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + r = self._createComponent(asn1Spec, tagSet) + if substrateFun: + return substrateFun(r, substrate, length) + asn1Spec = r.getComponentType() + idx = 0 + while substrate: + component, substrate = decodeFun(substrate, asn1Spec, allowEoo=True) + if eoo.endOfOctets.isSameTypeWith(component) and \ + component == eoo.endOfOctets: + break + r.setComponentByPosition(idx, component, asn1Spec is None) + idx = idx + 1 + else: + raise error.SubstrateUnderrunError( + 'No EOO seen before substrate ends' + ) + r.verifySizeSpec() + return r, substrate + +class SetDecoder(SequenceDecoder): + protoComponent = univ.Set() + def _getComponentTagMap(self, r, idx): + return r.getComponentTagMap() + + def _getComponentPositionByType(self, r, t, idx): + nextIdx = r.getComponentPositionByType(t) + if nextIdx is None: + return idx + else: + return nextIdx + +class SetOfDecoder(SequenceOfDecoder): + protoComponent = univ.SetOf() + +class ChoiceDecoder(AbstractConstructedDecoder): + protoComponent = univ.Choice() + tagFormats = (tag.tagFormatSimple, tag.tagFormatConstructed) + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + head, tail = substrate[:length], substrate[length:] + r = self._createComponent(asn1Spec, tagSet) + if substrateFun: + return substrateFun(r, substrate, length) + if r.getTagSet() == tagSet: # explicitly tagged Choice + component, head = decodeFun( + head, r.getComponentTagMap() + ) + else: + component, head = decodeFun( + head, r.getComponentTagMap(), tagSet, length, state + ) + if isinstance(component, univ.Choice): + effectiveTagSet = component.getEffectiveTagSet() + else: + effectiveTagSet = component.getTagSet() + r.setComponentByType(effectiveTagSet, component, 0, asn1Spec is None) + return r, tail + + def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + r = self._createComponent(asn1Spec, tagSet) + if substrateFun: + return substrateFun(r, substrate, length) + if r.getTagSet() == tagSet: # explicitly tagged Choice + component, substrate = decodeFun(substrate, r.getComponentTagMap()) + # eat up EOO marker + eooMarker, substrate = decodeFun(substrate, allowEoo=True) + if not eoo.endOfOctets.isSameTypeWith(eooMarker) or \ + eooMarker != eoo.endOfOctets: + raise error.PyAsn1Error('No EOO seen before substrate ends') + else: + component, substrate= decodeFun( + substrate, r.getComponentTagMap(), tagSet, length, state + ) + if isinstance(component, univ.Choice): + effectiveTagSet = component.getEffectiveTagSet() + else: + effectiveTagSet = component.getTagSet() + r.setComponentByType(effectiveTagSet, component, 0, asn1Spec is None) + return r, substrate + +class AnyDecoder(AbstractSimpleDecoder): + protoComponent = univ.Any() + tagFormats = (tag.tagFormatSimple, tag.tagFormatConstructed) + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + if asn1Spec is None or \ + asn1Spec is not None and tagSet != asn1Spec.getTagSet(): + # untagged Any container, recover inner header substrate + length = length + len(fullSubstrate) - len(substrate) + substrate = fullSubstrate + if substrateFun: + return substrateFun(self._createComponent(asn1Spec, tagSet), + substrate, length) + head, tail = substrate[:length], substrate[length:] + return self._createComponent(asn1Spec, tagSet, value=head), tail + + def indefLenValueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, + length, state, decodeFun, substrateFun): + if asn1Spec is not None and tagSet == asn1Spec.getTagSet(): + # tagged Any type -- consume header substrate + header = '' + else: + # untagged Any, recover header substrate + header = fullSubstrate[:-len(substrate)] + + r = self._createComponent(asn1Spec, tagSet, header) + + # Any components do not inherit initial tag + asn1Spec = self.protoComponent + + if substrateFun: + return substrateFun(r, substrate, length) + while substrate: + component, substrate = decodeFun(substrate, asn1Spec, allowEoo=True) + if eoo.endOfOctets.isSameTypeWith(component) and \ + component == eoo.endOfOctets: + break + r = r + component + else: + raise error.SubstrateUnderrunError( + 'No EOO seen before substrate ends' + ) + return r, substrate + +# character string types +class UTF8StringDecoder(OctetStringDecoder): + protoComponent = char.UTF8String() +class NumericStringDecoder(OctetStringDecoder): + protoComponent = char.NumericString() +class PrintableStringDecoder(OctetStringDecoder): + protoComponent = char.PrintableString() +class TeletexStringDecoder(OctetStringDecoder): + protoComponent = char.TeletexString() +class VideotexStringDecoder(OctetStringDecoder): + protoComponent = char.VideotexString() +class IA5StringDecoder(OctetStringDecoder): + protoComponent = char.IA5String() +class GraphicStringDecoder(OctetStringDecoder): + protoComponent = char.GraphicString() +class VisibleStringDecoder(OctetStringDecoder): + protoComponent = char.VisibleString() +class GeneralStringDecoder(OctetStringDecoder): + protoComponent = char.GeneralString() +class UniversalStringDecoder(OctetStringDecoder): + protoComponent = char.UniversalString() +class BMPStringDecoder(OctetStringDecoder): + protoComponent = char.BMPString() + +# "useful" types +class ObjectDescriptorDecoder(OctetStringDecoder): + protoComponent = useful.ObjectDescriptor() +class GeneralizedTimeDecoder(OctetStringDecoder): + protoComponent = useful.GeneralizedTime() +class UTCTimeDecoder(OctetStringDecoder): + protoComponent = useful.UTCTime() + +tagMap = { + univ.Integer.tagSet: IntegerDecoder(), + univ.Boolean.tagSet: BooleanDecoder(), + univ.BitString.tagSet: BitStringDecoder(), + univ.OctetString.tagSet: OctetStringDecoder(), + univ.Null.tagSet: NullDecoder(), + univ.ObjectIdentifier.tagSet: ObjectIdentifierDecoder(), + univ.Enumerated.tagSet: IntegerDecoder(), + univ.Real.tagSet: RealDecoder(), + univ.Sequence.tagSet: SequenceDecoder(), # conflicts with SequenceOf + univ.Set.tagSet: SetDecoder(), # conflicts with SetOf + univ.Choice.tagSet: ChoiceDecoder(), # conflicts with Any + # character string types + char.UTF8String.tagSet: UTF8StringDecoder(), + char.NumericString.tagSet: NumericStringDecoder(), + char.PrintableString.tagSet: PrintableStringDecoder(), + char.TeletexString.tagSet: TeletexStringDecoder(), + char.VideotexString.tagSet: VideotexStringDecoder(), + char.IA5String.tagSet: IA5StringDecoder(), + char.GraphicString.tagSet: GraphicStringDecoder(), + char.VisibleString.tagSet: VisibleStringDecoder(), + char.GeneralString.tagSet: GeneralStringDecoder(), + char.UniversalString.tagSet: UniversalStringDecoder(), + char.BMPString.tagSet: BMPStringDecoder(), + # useful types + useful.ObjectDescriptor.tagSet: ObjectDescriptorDecoder(), + useful.GeneralizedTime.tagSet: GeneralizedTimeDecoder(), + useful.UTCTime.tagSet: UTCTimeDecoder() +} + +# Type-to-codec map for ambiguous ASN.1 types +typeMap = { + univ.Set.typeId: SetDecoder(), + univ.SetOf.typeId: SetOfDecoder(), + univ.Sequence.typeId: SequenceDecoder(), + univ.SequenceOf.typeId: SequenceOfDecoder(), + univ.Choice.typeId: ChoiceDecoder(), + univ.Any.typeId: AnyDecoder() +} + +( stDecodeTag, stDecodeLength, stGetValueDecoder, stGetValueDecoderByAsn1Spec, + stGetValueDecoderByTag, stTryAsExplicitTag, stDecodeValue, + stDumpRawValue, stErrorCondition, stStop ) = [x for x in range(10)] + +class Decoder: + defaultErrorState = stErrorCondition +# defaultErrorState = stDumpRawValue + defaultRawDecoder = AnyDecoder() + supportIndefLength = True + def __init__(self, tagMap, typeMap={}): + self.__tagMap = tagMap + self.__typeMap = typeMap + # Tag & TagSet objects caches + self.__tagCache = {} + self.__tagSetCache = {} + + def __call__(self, substrate, asn1Spec=None, tagSet=None, + length=None, state=stDecodeTag, recursiveFlag=1, + substrateFun=None, allowEoo=False): + if debug.logger & debug.flagDecoder: + debug.logger('decoder called at scope %s with state %d, working with up to %d octets of substrate: %s' % (debug.scope, state, len(substrate), debug.hexdump(substrate))) + fullSubstrate = substrate + while state != stStop: + if state == stDecodeTag: + if not substrate: + raise error.SubstrateUnderrunError( + 'Short octet stream on tag decoding' + ) + if not isOctetsType(substrate) and \ + not isinstance(substrate, univ.OctetString): + raise error.PyAsn1Error('Bad octet stream type') + # Decode tag + firstOctet = substrate[0] + substrate = substrate[1:] + if firstOctet in self.__tagCache: + lastTag = self.__tagCache[firstOctet] + else: + t = oct2int(firstOctet) + # Look for end-of-octets sentinel + if t == 0: + if substrate and oct2int(substrate[0]) == 0: + if allowEoo and self.supportIndefLength: + debug.logger and debug.logger & debug.flagDecoder and debug.logger('end-of-octets sentinel found') + value, substrate = eoo.endOfOctets, substrate[1:] + state = stStop + continue + else: + raise error.PyAsn1Error('Unexpected end-of-contents sentinel') + else: + raise error.PyAsn1Error('Zero tag encountered') + tagClass = t&0xC0 + tagFormat = t&0x20 + tagId = t&0x1F + if tagId == 0x1F: + tagId = 0 + while 1: + if not substrate: + raise error.SubstrateUnderrunError( + 'Short octet stream on long tag decoding' + ) + t = oct2int(substrate[0]) + tagId = tagId << 7 | (t&0x7F) + substrate = substrate[1:] + if not t&0x80: + break + lastTag = tag.Tag( + tagClass=tagClass, tagFormat=tagFormat, tagId=tagId + ) + if tagId < 31: + # cache short tags + self.__tagCache[firstOctet] = lastTag + if tagSet is None: + if firstOctet in self.__tagSetCache: + tagSet = self.__tagSetCache[firstOctet] + else: + # base tag not recovered + tagSet = tag.TagSet((), lastTag) + if firstOctet in self.__tagCache: + self.__tagSetCache[firstOctet] = tagSet + else: + tagSet = lastTag + tagSet + state = stDecodeLength + debug.logger and debug.logger & debug.flagDecoder and debug.logger('tag decoded into %s, decoding length' % tagSet) + if state == stDecodeLength: + # Decode length + if not substrate: + raise error.SubstrateUnderrunError( + 'Short octet stream on length decoding' + ) + firstOctet = oct2int(substrate[0]) + if firstOctet == 128: + size = 1 + length = -1 + elif firstOctet < 128: + length, size = firstOctet, 1 + else: + size = firstOctet & 0x7F + # encoded in size bytes + length = 0 + lengthString = substrate[1:size+1] + # missing check on maximum size, which shouldn't be a + # problem, we can handle more than is possible + if len(lengthString) != size: + raise error.SubstrateUnderrunError( + '%s<%s at %s' % + (size, len(lengthString), tagSet) + ) + for char in lengthString: + length = (length << 8) | oct2int(char) + size = size + 1 + substrate = substrate[size:] + if length != -1 and len(substrate) < length: + raise error.SubstrateUnderrunError( + '%d-octet short' % (length - len(substrate)) + ) + if length == -1 and not self.supportIndefLength: + error.PyAsn1Error('Indefinite length encoding not supported by this codec') + state = stGetValueDecoder + debug.logger and debug.logger & debug.flagDecoder and debug.logger('value length decoded into %d, payload substrate is: %s' % (length, debug.hexdump(length == -1 and substrate or substrate[:length]))) + if state == stGetValueDecoder: + if asn1Spec is None: + state = stGetValueDecoderByTag + else: + state = stGetValueDecoderByAsn1Spec + # + # There're two ways of creating subtypes in ASN.1 what influences + # decoder operation. These methods are: + # 1) Either base types used in or no IMPLICIT tagging has been + # applied on subtyping. + # 2) Subtype syntax drops base type information (by means of + # IMPLICIT tagging. + # The first case allows for complete tag recovery from substrate + # while the second one requires original ASN.1 type spec for + # decoding. + # + # In either case a set of tags (tagSet) is coming from substrate + # in an incremental, tag-by-tag fashion (this is the case of + # EXPLICIT tag which is most basic). Outermost tag comes first + # from the wire. + # + if state == stGetValueDecoderByTag: + if tagSet in self.__tagMap: + concreteDecoder = self.__tagMap[tagSet] + else: + concreteDecoder = None + if concreteDecoder: + state = stDecodeValue + else: + _k = tagSet[:1] + if _k in self.__tagMap: + concreteDecoder = self.__tagMap[_k] + else: + concreteDecoder = None + if concreteDecoder: + state = stDecodeValue + else: + state = stTryAsExplicitTag + if debug.logger and debug.logger & debug.flagDecoder: + debug.logger('codec %s chosen by a built-in type, decoding %s' % (concreteDecoder and concreteDecoder.__class__.__name__ or "", state == stDecodeValue and 'value' or 'as explicit tag')) + debug.scope.push(concreteDecoder is None and '?' or concreteDecoder.protoComponent.__class__.__name__) + if state == stGetValueDecoderByAsn1Spec: + if isinstance(asn1Spec, (dict, tagmap.TagMap)): + if tagSet in asn1Spec: + __chosenSpec = asn1Spec[tagSet] + else: + __chosenSpec = None + if debug.logger and debug.logger & debug.flagDecoder: + debug.logger('candidate ASN.1 spec is a map of:') + for t, v in asn1Spec.getPosMap().items(): + debug.logger(' %s -> %s' % (t, v.__class__.__name__)) + if asn1Spec.getNegMap(): + debug.logger('but neither of: ') + for t, v in asn1Spec.getNegMap().items(): + debug.logger(' %s -> %s' % (t, v.__class__.__name__)) + debug.logger('new candidate ASN.1 spec is %s, chosen by %s' % (__chosenSpec is None and '' or __chosenSpec.prettyPrintType(), tagSet)) + else: + __chosenSpec = asn1Spec + debug.logger and debug.logger & debug.flagDecoder and debug.logger('candidate ASN.1 spec is %s' % asn1Spec.__class__.__name__) + if __chosenSpec is not None and ( + tagSet == __chosenSpec.getTagSet() or \ + tagSet in __chosenSpec.getTagMap() + ): + # use base type for codec lookup to recover untagged types + baseTagSet = __chosenSpec.baseTagSet + if __chosenSpec.typeId is not None and \ + __chosenSpec.typeId in self.__typeMap: + # ambiguous type + concreteDecoder = self.__typeMap[__chosenSpec.typeId] + debug.logger and debug.logger & debug.flagDecoder and debug.logger('value decoder chosen for an ambiguous type by type ID %s' % (__chosenSpec.typeId,)) + elif baseTagSet in self.__tagMap: + # base type or tagged subtype + concreteDecoder = self.__tagMap[baseTagSet] + debug.logger and debug.logger & debug.flagDecoder and debug.logger('value decoder chosen by base %s' % (baseTagSet,)) + else: + concreteDecoder = None + if concreteDecoder: + asn1Spec = __chosenSpec + state = stDecodeValue + else: + state = stTryAsExplicitTag + else: + concreteDecoder = None + state = stTryAsExplicitTag + if debug.logger and debug.logger & debug.flagDecoder: + debug.logger('codec %s chosen by ASN.1 spec, decoding %s' % (state == stDecodeValue and concreteDecoder.__class__.__name__ or "", state == stDecodeValue and 'value' or 'as explicit tag')) + debug.scope.push(__chosenSpec is None and '?' or __chosenSpec.__class__.__name__) + if state == stTryAsExplicitTag: + if tagSet and \ + tagSet[0][1] == tag.tagFormatConstructed and \ + tagSet[0][0] != tag.tagClassUniversal: + # Assume explicit tagging + concreteDecoder = explicitTagDecoder + state = stDecodeValue + else: + concreteDecoder = None + state = self.defaultErrorState + debug.logger and debug.logger & debug.flagDecoder and debug.logger('codec %s chosen, decoding %s' % (concreteDecoder and concreteDecoder.__class__.__name__ or "", state == stDecodeValue and 'value' or 'as failure')) + if state == stDumpRawValue: + concreteDecoder = self.defaultRawDecoder + debug.logger and debug.logger & debug.flagDecoder and debug.logger('codec %s chosen, decoding value' % concreteDecoder.__class__.__name__) + state = stDecodeValue + if state == stDecodeValue: + if recursiveFlag == 0 and not substrateFun: # legacy + substrateFun = lambda a,b,c: (a,b[:c]) + if length == -1: # indef length + value, substrate = concreteDecoder.indefLenValueDecoder( + fullSubstrate, substrate, asn1Spec, tagSet, length, + stGetValueDecoder, self, substrateFun + ) + else: + value, substrate = concreteDecoder.valueDecoder( + fullSubstrate, substrate, asn1Spec, tagSet, length, + stGetValueDecoder, self, substrateFun + ) + state = stStop + debug.logger and debug.logger & debug.flagDecoder and debug.logger('codec %s yields type %s, value:\n%s\n...remaining substrate is: %s' % (concreteDecoder.__class__.__name__, value.__class__.__name__, value.prettyPrint(), substrate and debug.hexdump(substrate) or '')) + if state == stErrorCondition: + raise error.PyAsn1Error( + '%s not in asn1Spec: %s' % (tagSet, asn1Spec) + ) + if debug.logger and debug.logger & debug.flagDecoder: + debug.scope.pop() + debug.logger('decoder left scope %s, call completed' % debug.scope) + return value, substrate + +decode = Decoder(tagMap, typeMap) + +# XXX +# non-recursive decoding; return position rather than substrate diff --git a/pyasn1/codec/ber/encoder.py b/pyasn1/codec/ber/encoder.py new file mode 100644 index 0000000..0fb4ae7 --- /dev/null +++ b/pyasn1/codec/ber/encoder.py @@ -0,0 +1,433 @@ +# BER encoder +from pyasn1.type import base, tag, univ, char, useful +from pyasn1.codec.ber import eoo +from pyasn1.compat.octets import int2oct, oct2int, ints2octs, null, str2octs +from pyasn1 import debug, error + +class Error(Exception): pass + +class AbstractItemEncoder: + supportIndefLenMode = 1 + def encodeTag(self, t, isConstructed): + tagClass, tagFormat, tagId = t.asTuple() # this is a hotspot + v = tagClass | tagFormat + if isConstructed: + v = v|tag.tagFormatConstructed + if tagId < 31: + return int2oct(v|tagId) + else: + s = int2oct(tagId&0x7f) + tagId = tagId >> 7 + while tagId: + s = int2oct(0x80|(tagId&0x7f)) + s + tagId = tagId >> 7 + return int2oct(v|0x1F) + s + + def encodeLength(self, length, defMode): + if not defMode and self.supportIndefLenMode: + return int2oct(0x80) + if length < 0x80: + return int2oct(length) + else: + substrate = null + while length: + substrate = int2oct(length&0xff) + substrate + length = length >> 8 + substrateLen = len(substrate) + if substrateLen > 126: + raise Error('Length octets overflow (%d)' % substrateLen) + return int2oct(0x80 | substrateLen) + substrate + + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + raise Error('Not implemented') + + def _encodeEndOfOctets(self, encodeFun, defMode): + if defMode or not self.supportIndefLenMode: + return null + else: + return encodeFun(eoo.endOfOctets, defMode) + + def encode(self, encodeFun, value, defMode, maxChunkSize): + substrate, isConstructed = self.encodeValue( + encodeFun, value, defMode, maxChunkSize + ) + tagSet = value.getTagSet() + if tagSet: + if not isConstructed: # primitive form implies definite mode + defMode = 1 + return self.encodeTag( + tagSet[-1], isConstructed + ) + self.encodeLength( + len(substrate), defMode + ) + substrate + self._encodeEndOfOctets(encodeFun, defMode) + else: + return substrate # untagged value + +class EndOfOctetsEncoder(AbstractItemEncoder): + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + return null, 0 + +class ExplicitlyTaggedItemEncoder(AbstractItemEncoder): + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + if isinstance(value, base.AbstractConstructedAsn1Item): + value = value.clone(tagSet=value.getTagSet()[:-1], + cloneValueFlag=1) + else: + value = value.clone(tagSet=value.getTagSet()[:-1]) + return encodeFun(value, defMode, maxChunkSize), 1 + +explicitlyTaggedItemEncoder = ExplicitlyTaggedItemEncoder() + +class BooleanEncoder(AbstractItemEncoder): + supportIndefLenMode = 0 + _true = ints2octs((1,)) + _false = ints2octs((0,)) + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + return value and self._true or self._false, 0 + +class IntegerEncoder(AbstractItemEncoder): + supportIndefLenMode = 0 + supportCompactZero = False + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + if value == 0: # shortcut for zero value + if self.supportCompactZero: + # this seems to be a correct way for encoding zeros + return null, 0 + else: + # this seems to be a widespread way for encoding zeros + return ints2octs((0,)), 0 + octets = [] + value = int(value) # to save on ops on asn1 type + while 1: + octets.insert(0, value & 0xff) + if value == 0 or value == -1: + break + value = value >> 8 + if value == 0 and octets[0] & 0x80: + octets.insert(0, 0) + while len(octets) > 1 and \ + (octets[0] == 0 and octets[1] & 0x80 == 0 or \ + octets[0] == 0xff and octets[1] & 0x80 != 0): + del octets[0] + return ints2octs(octets), 0 + +class BitStringEncoder(AbstractItemEncoder): + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + if not maxChunkSize or len(value) <= maxChunkSize*8: + out_len = (len(value) + 7) // 8 + out_list = out_len * [0] + j = 7 + i = -1 + for val in value: + j += 1 + if j == 8: + i += 1 + j = 0 + out_list[i] = out_list[i] | val << (7-j) + return int2oct(7-j) + ints2octs(out_list), 0 + else: + pos = 0; substrate = null + while 1: + # count in octets + v = value.clone(value[pos*8:pos*8+maxChunkSize*8]) + if not v: + break + substrate = substrate + encodeFun(v, defMode, maxChunkSize) + pos = pos + maxChunkSize + return substrate, 1 + +class OctetStringEncoder(AbstractItemEncoder): + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + if not maxChunkSize or len(value) <= maxChunkSize: + return value.asOctets(), 0 + else: + pos = 0; substrate = null + while 1: + v = value.clone(value[pos:pos+maxChunkSize]) + if not v: + break + substrate = substrate + encodeFun(v, defMode, maxChunkSize) + pos = pos + maxChunkSize + return substrate, 1 + +class NullEncoder(AbstractItemEncoder): + supportIndefLenMode = 0 + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + return null, 0 + +class ObjectIdentifierEncoder(AbstractItemEncoder): + supportIndefLenMode = 0 + precomputedValues = { + (1, 3, 6, 1, 2): (43, 6, 1, 2), + (1, 3, 6, 1, 4): (43, 6, 1, 4) + } + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + oid = value.asTuple() + if oid[:5] in self.precomputedValues: + octets = self.precomputedValues[oid[:5]] + oid = oid[5:] + else: + if len(oid) < 2: + raise error.PyAsn1Error('Short OID %s' % (value,)) + + octets = () + + # Build the first twos + if oid[0] == 0 and 0 <= oid[1] <= 39: + oid = (oid[1],) + oid[2:] + elif oid[0] == 1 and 0 <= oid[1] <= 39: + oid = (oid[1] + 40,) + oid[2:] + elif oid[0] == 2: + oid = (oid[1] + 80,) + oid[2:] + else: + raise error.PyAsn1Error( + 'Impossible initial arcs %s at %s' % (oid[:2], value) + ) + + # Cycle through subIds + for subId in oid: + if subId > -1 and subId < 128: + # Optimize for the common case + octets = octets + (subId & 0x7f,) + elif subId < 0: + raise error.PyAsn1Error( + 'Negative OID arc %s at %s' % (subId, value) + ) + else: + # Pack large Sub-Object IDs + res = (subId & 0x7f,) + subId = subId >> 7 + while subId > 0: + res = (0x80 | (subId & 0x7f),) + res + subId = subId >> 7 + # Add packed Sub-Object ID to resulted Object ID + octets += res + + return ints2octs(octets), 0 + +class RealEncoder(AbstractItemEncoder): + supportIndefLenMode = 0 + binEncBase = 2 # set to None to choose encoding base automatically + def _dropFloatingPoint(self, m, encbase, e): + ms, es = 1, 1 + if m < 0: + ms = -1 # mantissa sign + if e < 0: + es = -1 # exponenta sign + m *= ms + if encbase == 8: + m = m*2**(abs(e) % 3 * es) + e = abs(e) // 3 * es + elif encbase == 16: + m = m*2**(abs(e) % 4 * es) + e = abs(e) // 4 * es + + while 1: + if int(m) != m: + m *= encbase + e -= 1 + continue + break + return ms, int(m), encbase, e + + def _chooseEncBase(self, value): + m, b, e = value + base = [2, 8, 16] + if value.binEncBase in base: + return self._dropFloatingPoint(m, value.binEncBase, e) + elif self.binEncBase in base: + return self._dropFloatingPoint(m, self.binEncBase, e) + # auto choosing base 2/8/16 + mantissa = [m, m, m] + exponenta = [e, e, e] + encbase = 2 + e = float('inf') + for i in range(3): + sign, mantissa[i], base[i], exponenta[i] = \ + self._dropFloatingPoint(mantissa[i], base[i], exponenta[i]) + if abs(exponenta[i]) < abs(e) or \ + (abs(exponenta[i]) == abs(e) and mantissa[i] < m): + e = exponenta[i] + m = int(mantissa[i]) + encbase = base[i] + return sign, m, encbase, e + + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + if value.isPlusInfinity(): + return int2oct(0x40), 0 + if value.isMinusInfinity(): + return int2oct(0x41), 0 + m, b, e = value + if not m: + return null, 0 + if b == 10: + return str2octs('\x03%dE%s%d' % (m, e == 0 and '+' or '', e)), 0 + elif b == 2: + fo = 0x80 # binary encoding + ms, m, encbase, e = self._chooseEncBase(value) + if ms < 0: # mantissa sign + fo = fo | 0x40 # sign bit + # exponenta & mantissa normalization + if encbase == 2: + while m & 0x1 == 0: + m >>= 1 + e += 1 + elif encbase == 8: + while m & 0x7 == 0: + m >>= 3 + e += 1 + fo |= 0x10 + else: # encbase = 16 + while m & 0xf == 0: + m >>= 4 + e += 1 + fo |= 0x20 + sf = 0 # scale factor + while m & 0x1 == 0: + m >>= 1 + sf += 1 + if sf > 3: + raise error.PyAsn1Error('Scale factor overflow') # bug if raised + fo |= sf << 2 + eo = null + if e == 0 or e == -1: + eo = int2oct(e&0xff) + else: + while e not in (0, -1): + eo = int2oct(e&0xff) + eo + e >>= 8 + if e == 0 and eo and oct2int(eo[0]) & 0x80: + eo = int2oct(0) + eo + if e == -1 and eo and not (oct2int(eo[0]) & 0x80): + eo = int2oct(0xff) + eo + n = len(eo) + if n > 0xff: + raise error.PyAsn1Error('Real exponent overflow') + if n == 1: + pass + elif n == 2: + fo |= 1 + elif n == 3: + fo |= 2 + else: + fo |= 3 + eo = int2oct(n&0xff) + eo + po = null + while m: + po = int2oct(m&0xff) + po + m >>= 8 + substrate = int2oct(fo) + eo + po + return substrate, 0 + else: + raise error.PyAsn1Error('Prohibited Real base %s' % b) + +class SequenceEncoder(AbstractItemEncoder): + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + value.setDefaultComponents() + value.verifySizeSpec() + substrate = null; idx = len(value) + while idx > 0: + idx = idx - 1 + if value[idx] is None: # Optional component + continue + component = value.getDefaultComponentByPosition(idx) + if component is not None and component == value[idx]: + continue + substrate = encodeFun( + value[idx], defMode, maxChunkSize + ) + substrate + return substrate, 1 + +class SequenceOfEncoder(AbstractItemEncoder): + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + value.verifySizeSpec() + substrate = null; idx = len(value) + while idx > 0: + idx = idx - 1 + substrate = encodeFun( + value[idx], defMode, maxChunkSize + ) + substrate + return substrate, 1 + +class ChoiceEncoder(AbstractItemEncoder): + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + return encodeFun(value.getComponent(), defMode, maxChunkSize), 1 + +class AnyEncoder(OctetStringEncoder): + def encodeValue(self, encodeFun, value, defMode, maxChunkSize): + return value.asOctets(), defMode == 0 + +tagMap = { + eoo.endOfOctets.tagSet: EndOfOctetsEncoder(), + univ.Boolean.tagSet: BooleanEncoder(), + univ.Integer.tagSet: IntegerEncoder(), + univ.BitString.tagSet: BitStringEncoder(), + univ.OctetString.tagSet: OctetStringEncoder(), + univ.Null.tagSet: NullEncoder(), + univ.ObjectIdentifier.tagSet: ObjectIdentifierEncoder(), + univ.Enumerated.tagSet: IntegerEncoder(), + univ.Real.tagSet: RealEncoder(), + # Sequence & Set have same tags as SequenceOf & SetOf + univ.SequenceOf.tagSet: SequenceOfEncoder(), + univ.SetOf.tagSet: SequenceOfEncoder(), + univ.Choice.tagSet: ChoiceEncoder(), + # character string types + char.UTF8String.tagSet: OctetStringEncoder(), + char.NumericString.tagSet: OctetStringEncoder(), + char.PrintableString.tagSet: OctetStringEncoder(), + char.TeletexString.tagSet: OctetStringEncoder(), + char.VideotexString.tagSet: OctetStringEncoder(), + char.IA5String.tagSet: OctetStringEncoder(), + char.GraphicString.tagSet: OctetStringEncoder(), + char.VisibleString.tagSet: OctetStringEncoder(), + char.GeneralString.tagSet: OctetStringEncoder(), + char.UniversalString.tagSet: OctetStringEncoder(), + char.BMPString.tagSet: OctetStringEncoder(), + # useful types + useful.ObjectDescriptor.tagSet: OctetStringEncoder(), + useful.GeneralizedTime.tagSet: OctetStringEncoder(), + useful.UTCTime.tagSet: OctetStringEncoder() + } + +# Type-to-codec map for ambiguous ASN.1 types +typeMap = { + univ.Set.typeId: SequenceEncoder(), + univ.SetOf.typeId: SequenceOfEncoder(), + univ.Sequence.typeId: SequenceEncoder(), + univ.SequenceOf.typeId: SequenceOfEncoder(), + univ.Choice.typeId: ChoiceEncoder(), + univ.Any.typeId: AnyEncoder() + } + +class Encoder: + supportIndefLength = True + def __init__(self, tagMap, typeMap={}): + self.__tagMap = tagMap + self.__typeMap = typeMap + + def __call__(self, value, defMode=True, maxChunkSize=0): + if not defMode and not self.supportIndefLength: + raise error.PyAsn1Error('Indefinite length encoding not supported by this codec') + debug.logger & debug.flagEncoder and debug.logger('encoder called in %sdef mode, chunk size %s for type %s, value:\n%s' % (not defMode and 'in' or '', maxChunkSize, value.prettyPrintType(), value.prettyPrint())) + tagSet = value.getTagSet() + if len(tagSet) > 1: + concreteEncoder = explicitlyTaggedItemEncoder + else: + if value.typeId is not None and value.typeId in self.__typeMap: + concreteEncoder = self.__typeMap[value.typeId] + elif tagSet in self.__tagMap: + concreteEncoder = self.__tagMap[tagSet] + else: + tagSet = value.baseTagSet + if tagSet in self.__tagMap: + concreteEncoder = self.__tagMap[tagSet] + else: + raise Error('No encoder for %s' % (value,)) + debug.logger & debug.flagEncoder and debug.logger('using value codec %s chosen by %s' % (concreteEncoder.__class__.__name__, tagSet)) + substrate = concreteEncoder.encode( + self, value, defMode, maxChunkSize + ) + debug.logger & debug.flagEncoder and debug.logger('built %s octets of substrate: %s\nencoder completed' % (len(substrate), debug.hexdump(substrate))) + return substrate + +encode = Encoder(tagMap, typeMap) diff --git a/pyasn1/codec/ber/eoo.py b/pyasn1/codec/ber/eoo.py new file mode 100644 index 0000000..379be19 --- /dev/null +++ b/pyasn1/codec/ber/eoo.py @@ -0,0 +1,8 @@ +from pyasn1.type import base, tag + +class EndOfOctets(base.AbstractSimpleAsn1Item): + defaultValue = 0 + tagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x00) + ) +endOfOctets = EndOfOctets() diff --git a/pyasn1/codec/cer/__init__.py b/pyasn1/codec/cer/__init__.py new file mode 100644 index 0000000..8c3066b --- /dev/null +++ b/pyasn1/codec/cer/__init__.py @@ -0,0 +1 @@ +# This file is necessary to make this directory a package. diff --git a/pyasn1/codec/cer/decoder.py b/pyasn1/codec/cer/decoder.py new file mode 100644 index 0000000..1770cd8 --- /dev/null +++ b/pyasn1/codec/cer/decoder.py @@ -0,0 +1,35 @@ +# CER decoder +from pyasn1.type import univ +from pyasn1.codec.ber import decoder +from pyasn1.compat.octets import oct2int +from pyasn1 import error + +class BooleanDecoder(decoder.AbstractSimpleDecoder): + protoComponent = univ.Boolean(0) + def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length, + state, decodeFun, substrateFun): + head, tail = substrate[:length], substrate[length:] + if not head or length != 1: + raise error.PyAsn1Error('Not single-octet Boolean payload') + byte = oct2int(head[0]) + # CER/DER specifies encoding of TRUE as 0xFF and FALSE as 0x0, while + # BER allows any non-zero value as TRUE; cf. sections 8.2.2. and 11.1 + # in http://www.itu.int/ITU-T/studygroups/com17/languages/X.690-0207.pdf + if byte == 0xff: + value = 1 + elif byte == 0x00: + value = 0 + else: + raise error.PyAsn1Error('Unexpected Boolean payload: %s' % byte) + return self._createComponent(asn1Spec, tagSet, value), tail + +tagMap = decoder.tagMap.copy() +tagMap.update({ + univ.Boolean.tagSet: BooleanDecoder() + }) + +typeMap = decoder.typeMap + +class Decoder(decoder.Decoder): pass + +decode = Decoder(tagMap, decoder.typeMap) diff --git a/pyasn1/codec/cer/encoder.py b/pyasn1/codec/cer/encoder.py new file mode 100644 index 0000000..61ce8a1 --- /dev/null +++ b/pyasn1/codec/cer/encoder.py @@ -0,0 +1,130 @@ +# CER encoder +from pyasn1.type import univ +from pyasn1.type import useful +from pyasn1.codec.ber import encoder +from pyasn1.compat.octets import int2oct, str2octs, null +from pyasn1 import error + +class BooleanEncoder(encoder.IntegerEncoder): + def encodeValue(self, encodeFun, client, defMode, maxChunkSize): + if client == 0: + substrate = int2oct(0) + else: + substrate = int2oct(255) + return substrate, 0 + +class BitStringEncoder(encoder.BitStringEncoder): + def encodeValue(self, encodeFun, client, defMode, maxChunkSize): + return encoder.BitStringEncoder.encodeValue( + self, encodeFun, client, defMode, 1000 + ) + +class OctetStringEncoder(encoder.OctetStringEncoder): + def encodeValue(self, encodeFun, client, defMode, maxChunkSize): + return encoder.OctetStringEncoder.encodeValue( + self, encodeFun, client, defMode, 1000 + ) + +class RealEncoder(encoder.RealEncoder): + def _chooseEncBase(self, value): + m, b, e = value + return self._dropFloatingPoint(m, b, e) + +# specialized GeneralStringEncoder here + +class GeneralizedTimeEncoder(OctetStringEncoder): + zchar = str2octs('Z') + pluschar = str2octs('+') + minuschar = str2octs('-') + zero = str2octs('0') + def encodeValue(self, encodeFun, client, defMode, maxChunkSize): + octets = client.asOctets() +# This breaks too many existing data items +# if '.' not in octets: +# raise error.PyAsn1Error('Format must include fraction of second: %r' % octets) + if len(octets) < 15: + raise error.PyAsn1Error('Bad UTC time length: %r' % octets) + if self.pluschar in octets or self.minuschar in octets: + raise error.PyAsn1Error('Must be UTC time: %r' % octets) + if octets[-1] != self.zchar[0]: + raise error.PyAsn1Error('Missing timezone specifier: %r' % octets) + return encoder.OctetStringEncoder.encodeValue( + self, encodeFun, client, defMode, 1000 + ) + +class UTCTimeEncoder(encoder.OctetStringEncoder): + zchar = str2octs('Z') + pluschar = str2octs('+') + minuschar = str2octs('-') + def encodeValue(self, encodeFun, client, defMode, maxChunkSize): + octets = client.asOctets() + if self.pluschar in octets or self.minuschar in octets: + raise error.PyAsn1Error('Must be UTC time: %r' % octets) + if octets and octets[-1] != self.zchar[0]: + client = client.clone(octets + self.zchar) + if len(client) != 13: + raise error.PyAsn1Error('Bad UTC time length: %r' % client) + return encoder.OctetStringEncoder.encodeValue( + self, encodeFun, client, defMode, 1000 + ) + +class SetOfEncoder(encoder.SequenceOfEncoder): + def encodeValue(self, encodeFun, client, defMode, maxChunkSize): + if isinstance(client, univ.SequenceAndSetBase): + client.setDefaultComponents() + client.verifySizeSpec() + substrate = null; idx = len(client) + # This is certainly a hack but how else do I distinguish SetOf + # from Set if they have the same tags&constraints? + if isinstance(client, univ.SequenceAndSetBase): + # Set + comps = [] + while idx > 0: + idx = idx - 1 + if client[idx] is None: # Optional component + continue + if client.getDefaultComponentByPosition(idx) == client[idx]: + continue + comps.append(client[idx]) + comps.sort(key=lambda x: isinstance(x, univ.Choice) and \ + x.getMinTagSet() or x.getTagSet()) + for c in comps: + substrate += encodeFun(c, defMode, maxChunkSize) + else: + # SetOf + compSubs = [] + while idx > 0: + idx = idx - 1 + compSubs.append( + encodeFun(client[idx], defMode, maxChunkSize) + ) + compSubs.sort() # perhaps padding's not needed + substrate = null + for compSub in compSubs: + substrate += compSub + return substrate, 1 + +tagMap = encoder.tagMap.copy() +tagMap.update({ + univ.Boolean.tagSet: BooleanEncoder(), + univ.BitString.tagSet: BitStringEncoder(), + univ.OctetString.tagSet: OctetStringEncoder(), + univ.Real.tagSet: RealEncoder(), + useful.GeneralizedTime.tagSet: GeneralizedTimeEncoder(), + useful.UTCTime.tagSet: UTCTimeEncoder(), + univ.SetOf().tagSet: SetOfEncoder() # conflcts with Set +}) + +typeMap = encoder.typeMap.copy() +typeMap.update({ + univ.Set.typeId: SetOfEncoder(), + univ.SetOf.typeId: SetOfEncoder() +}) + +class Encoder(encoder.Encoder): + def __call__(self, client, defMode=False, maxChunkSize=0): + return encoder.Encoder.__call__(self, client, defMode, maxChunkSize) + +encode = Encoder(tagMap, typeMap) + +# EncoderFactory queries class instance and builds a map of tags -> encoders diff --git a/pyasn1/codec/der/__init__.py b/pyasn1/codec/der/__init__.py new file mode 100644 index 0000000..8c3066b --- /dev/null +++ b/pyasn1/codec/der/__init__.py @@ -0,0 +1 @@ +# This file is necessary to make this directory a package. diff --git a/pyasn1/codec/der/decoder.py b/pyasn1/codec/der/decoder.py new file mode 100644 index 0000000..ea58d6d --- /dev/null +++ b/pyasn1/codec/der/decoder.py @@ -0,0 +1,9 @@ +# DER decoder +from pyasn1.codec.cer import decoder + +tagMap = decoder.tagMap +typeMap = decoder.typeMap +class Decoder(decoder.Decoder): + supportIndefLength = False + +decode = Decoder(tagMap, typeMap) diff --git a/pyasn1/codec/der/encoder.py b/pyasn1/codec/der/encoder.py new file mode 100644 index 0000000..7f55eeb --- /dev/null +++ b/pyasn1/codec/der/encoder.py @@ -0,0 +1,32 @@ +# DER encoder +from pyasn1.type import univ +from pyasn1.codec.cer import encoder +from pyasn1 import error + +class SetOfEncoder(encoder.SetOfEncoder): + def _cmpSetComponents(self, c1, c2): + tagSet1 = isinstance(c1, univ.Choice) and \ + c1.getEffectiveTagSet() or c1.getTagSet() + tagSet2 = isinstance(c2, univ.Choice) and \ + c2.getEffectiveTagSet() or c2.getTagSet() + return cmp(tagSet1, tagSet2) + +tagMap = encoder.tagMap.copy() +tagMap.update({ + # Overload CER encoders with BER ones (a bit hackerish XXX) + univ.BitString.tagSet: encoder.encoder.BitStringEncoder(), + univ.OctetString.tagSet: encoder.encoder.OctetStringEncoder(), + # Set & SetOf have same tags + univ.SetOf().tagSet: SetOfEncoder() +}) + +typeMap = encoder.typeMap + +class Encoder(encoder.Encoder): + supportIndefLength = False + def __call__(self, client, defMode=True, maxChunkSize=0): + if not defMode: + raise error.PyAsn1Error('DER forbids indefinite length mode') + return encoder.Encoder.__call__(self, client, defMode, maxChunkSize) + +encode = Encoder(tagMap, typeMap) diff --git a/pyasn1/compat/__init__.py b/pyasn1/compat/__init__.py new file mode 100644 index 0000000..8c3066b --- /dev/null +++ b/pyasn1/compat/__init__.py @@ -0,0 +1 @@ +# This file is necessary to make this directory a package. diff --git a/pyasn1/compat/binary.py b/pyasn1/compat/binary.py new file mode 100644 index 0000000..b38932a --- /dev/null +++ b/pyasn1/compat/binary.py @@ -0,0 +1,10 @@ +from sys import version_info + +if version_info[0:2] < (2, 6): + def bin(x): + if x <= 1: + return '0b'+str(x) + else: + return bin(x>>1) + str(x&1) +else: + bin = bin diff --git a/pyasn1/compat/octets.py b/pyasn1/compat/octets.py new file mode 100644 index 0000000..e812737 --- /dev/null +++ b/pyasn1/compat/octets.py @@ -0,0 +1,22 @@ +from sys import version_info + +if version_info[0] <= 2: + int2oct = chr + ints2octs = lambda s: ''.join([ int2oct(x) for x in s ]) + null = '' + oct2int = ord + octs2ints = lambda s: [ oct2int(x) for x in s ] + str2octs = lambda x: x + octs2str = lambda x: x + isOctetsType = lambda s: isinstance(s, str) + isStringType = lambda s: isinstance(s, (str, unicode)) +else: + ints2octs = bytes + int2oct = lambda x: ints2octs((x,)) + null = ints2octs() + oct2int = lambda x: x + octs2ints = lambda s: [ x for x in s ] + str2octs = lambda x: x.encode() + octs2str = lambda x: x.decode() + isOctetsType = lambda s: isinstance(s, bytes) + isStringType = lambda s: isinstance(s, str) diff --git a/pyasn1/debug.py b/pyasn1/debug.py new file mode 100644 index 0000000..9b69886 --- /dev/null +++ b/pyasn1/debug.py @@ -0,0 +1,110 @@ +import time +import logging +from pyasn1.compat.octets import octs2ints +from pyasn1 import error +from pyasn1 import __version__ + +flagNone = 0x0000 +flagEncoder = 0x0001 +flagDecoder = 0x0002 +flagAll = 0xffff + +flagMap = { + 'encoder': flagEncoder, + 'decoder': flagDecoder, + 'all': flagAll + } + +class Printer: + def __init__(self, logger=None, handler=None, formatter=None): + if logger is None: + logger = logging.getLogger('pyasn1') + logger.setLevel(logging.DEBUG) + if handler is None: + handler = logging.StreamHandler() + if formatter is None: + formatter = logging.Formatter('%(asctime)s %(name)s: %(message)s') + handler.setFormatter(formatter) + handler.setLevel(logging.DEBUG) + logger.addHandler(handler) + self.__logger = logger + + def __call__(self, msg): self.__logger.debug(msg) + def __str__(self): return '' + +if hasattr(logging, 'NullHandler'): + NullHandler = logging.NullHandler +else: + # Python 2.6 and older + class NullHandler(logging.Handler): + def emit(self, record): + pass + +class Debug: + defaultPrinter = None + def __init__(self, *flags, **options): + self._flags = flagNone + if options.get('printer') is not None: + self._printer = options.get('printer') + elif self.defaultPrinter is not None: + self._printer = self.defaultPrinter + if 'loggerName' in options: + # route our logs to parent logger + self._printer = Printer( + logger=logging.getLogger(options['loggerName']), + handler=NullHandler() + ) + else: + self._printer = Printer() + self('running pyasn1 version %s' % __version__) + for f in flags: + inverse = f and f[0] in ('!', '~') + if inverse: + f = f[1:] + try: + if inverse: + self._flags &= ~flagMap[f] + else: + self._flags |= flagMap[f] + except KeyError: + raise error.PyAsn1Error('bad debug flag %s' % f) + + self('debug category \'%s\' %s' % (f, inverse and 'disabled' or 'enabled')) + + def __str__(self): + return 'logger %s, flags %x' % (self._printer, self._flags) + + def __call__(self, msg): + self._printer(msg) + + def __and__(self, flag): + return self._flags & flag + + def __rand__(self, flag): + return flag & self._flags + +logger = 0 + +def setLogger(l): + global logger + logger = l + +def hexdump(octets): + return ' '.join( + [ '%s%.2X' % (n%16 == 0 and ('\n%.5d: ' % n) or '', x) + for n,x in zip(range(len(octets)), octs2ints(octets)) ] + ) + +class Scope: + def __init__(self): + self._list = [] + + def __str__(self): return '.'.join(self._list) + + def push(self, token): + self._list.append(token) + + def pop(self): + return self._list.pop() + +scope = Scope() diff --git a/pyasn1/error.py b/pyasn1/error.py new file mode 100644 index 0000000..716406f --- /dev/null +++ b/pyasn1/error.py @@ -0,0 +1,3 @@ +class PyAsn1Error(Exception): pass +class ValueConstraintError(PyAsn1Error): pass +class SubstrateUnderrunError(PyAsn1Error): pass diff --git a/pyasn1/type/__init__.py b/pyasn1/type/__init__.py new file mode 100644 index 0000000..8c3066b --- /dev/null +++ b/pyasn1/type/__init__.py @@ -0,0 +1 @@ +# This file is necessary to make this directory a package. diff --git a/pyasn1/type/base.py b/pyasn1/type/base.py new file mode 100644 index 0000000..72920a9 --- /dev/null +++ b/pyasn1/type/base.py @@ -0,0 +1,278 @@ +# Base classes for ASN.1 types +import sys +from pyasn1.type import constraint, tagmap, tag +from pyasn1 import error + +class Asn1Item: pass + +class Asn1ItemBase(Asn1Item): + # Set of tags for this ASN.1 type + tagSet = tag.TagSet() + + # A list of constraint.Constraint instances for checking values + subtypeSpec = constraint.ConstraintsIntersection() + + # Used for ambiguous ASN.1 types identification + typeId = None + + def __init__(self, tagSet=None, subtypeSpec=None): + if tagSet is None: + self._tagSet = self.tagSet + else: + self._tagSet = tagSet + if subtypeSpec is None: + self._subtypeSpec = self.subtypeSpec + else: + self._subtypeSpec = subtypeSpec + + def _verifySubtypeSpec(self, value, idx=None): + try: + self._subtypeSpec(value, idx) + except error.PyAsn1Error: + c, i, t = sys.exc_info() + raise c('%s at %s' % (i, self.__class__.__name__)) + + def getSubtypeSpec(self): return self._subtypeSpec + + def getTagSet(self): return self._tagSet + def getEffectiveTagSet(self): return self._tagSet # used by untagged types + def getTagMap(self): return tagmap.TagMap({self._tagSet: self}) + + def isSameTypeWith(self, other, matchTags=True, matchConstraints=True): + return self is other or \ + (not matchTags or \ + self._tagSet == other.getTagSet()) and \ + (not matchConstraints or \ + self._subtypeSpec==other.getSubtypeSpec()) + + def isSuperTypeOf(self, other, matchTags=True, matchConstraints=True): + """Returns true if argument is a ASN1 subtype of ourselves""" + return (not matchTags or \ + self._tagSet.isSuperTagSetOf(other.getTagSet())) and \ + (not matchConstraints or \ + (self._subtypeSpec.isSuperTypeOf(other.getSubtypeSpec()))) + +class NoValue: + def __getattr__(self, attr): + raise error.PyAsn1Error('No value for %s()' % attr) + def __getitem__(self, i): + raise error.PyAsn1Error('No value') + def __repr__(self): return '%s()' % self.__class__.__name__ + +noValue = NoValue() + +# Base class for "simple" ASN.1 objects. These are immutable. +class AbstractSimpleAsn1Item(Asn1ItemBase): + defaultValue = noValue + def __init__(self, value=None, tagSet=None, subtypeSpec=None): + Asn1ItemBase.__init__(self, tagSet, subtypeSpec) + if value is None or value is noValue: + value = self.defaultValue + if value is None or value is noValue: + self.__hashedValue = value = noValue + else: + value = self.prettyIn(value) + self._verifySubtypeSpec(value) + self.__hashedValue = hash(value) + self._value = value + self._len = None + + def __repr__(self): + r = [] + if self._value is not self.defaultValue: + r.append(self.prettyOut(self._value)) + if self._tagSet is not self.tagSet: + r.append('tagSet=%r' % (self._tagSet,)) + if self._subtypeSpec is not self.subtypeSpec: + r.append('subtypeSpec=%r' % (self._subtypeSpec,)) + return '%s(%s)' % (self.__class__.__name__, ', '.join(r)) + + def __str__(self): return str(self._value) + def __eq__(self, other): + return self is other and True or self._value == other + def __ne__(self, other): return self._value != other + def __lt__(self, other): return self._value < other + def __le__(self, other): return self._value <= other + def __gt__(self, other): return self._value > other + def __ge__(self, other): return self._value >= other + if sys.version_info[0] <= 2: + def __nonzero__(self): return bool(self._value) + else: + def __bool__(self): return bool(self._value) + def __hash__(self): + return self.__hashedValue is noValue and hash(noValue) or self.__hashedValue + + def hasValue(self): + return not isinstance(self._value, NoValue) + + def clone(self, value=None, tagSet=None, subtypeSpec=None): + if value is None and tagSet is None and subtypeSpec is None: + return self + if value is None: + value = self._value + if tagSet is None: + tagSet = self._tagSet + if subtypeSpec is None: + subtypeSpec = self._subtypeSpec + return self.__class__(value, tagSet, subtypeSpec) + + def subtype(self, value=None, implicitTag=None, explicitTag=None, + subtypeSpec=None): + if value is None: + value = self._value + if implicitTag is not None: + tagSet = self._tagSet.tagImplicitly(implicitTag) + elif explicitTag is not None: + tagSet = self._tagSet.tagExplicitly(explicitTag) + else: + tagSet = self._tagSet + if subtypeSpec is None: + subtypeSpec = self._subtypeSpec + else: + subtypeSpec = subtypeSpec + self._subtypeSpec + return self.__class__(value, tagSet, subtypeSpec) + + def prettyIn(self, value): return value + def prettyOut(self, value): return str(value) + + def prettyPrint(self, scope=0): + if self.hasValue(): + return self.prettyOut(self._value) + else: + return '' + + # XXX Compatibility stub + def prettyPrinter(self, scope=0): return self.prettyPrint(scope) + + def prettyPrintType(self, scope=0): + return '%s -> %s' % (self.getTagSet(), self.__class__.__name__) + +# +# Constructed types: +# * There are five of them: Sequence, SequenceOf/SetOf, Set and Choice +# * ASN1 types and values are represened by Python class instances +# * Value initialization is made for defaulted components only +# * Primary method of component addressing is by-position. Data model for base +# type is Python sequence. Additional type-specific addressing methods +# may be implemented for particular types. +# * SequenceOf and SetOf types do not implement any additional methods +# * Sequence, Set and Choice types also implement by-identifier addressing +# * Sequence, Set and Choice types also implement by-asn1-type (tag) addressing +# * Sequence and Set types may include optional and defaulted +# components +# * Constructed types hold a reference to component types used for value +# verification and ordering. +# * Component type is a scalar type for SequenceOf/SetOf types and a list +# of types for Sequence/Set/Choice. +# + +class AbstractConstructedAsn1Item(Asn1ItemBase): + componentType = None + sizeSpec = constraint.ConstraintsIntersection() + def __init__(self, componentType=None, tagSet=None, + subtypeSpec=None, sizeSpec=None): + Asn1ItemBase.__init__(self, tagSet, subtypeSpec) + if componentType is None: + self._componentType = self.componentType + else: + self._componentType = componentType + if sizeSpec is None: + self._sizeSpec = self.sizeSpec + else: + self._sizeSpec = sizeSpec + self._componentValues = [] + self._componentValuesSet = 0 + + def __repr__(self): + r = [] + if self._componentType is not self.componentType: + r.append('componentType=%r' % (self._componentType,)) + if self._tagSet is not self.tagSet: + r.append('tagSet=%r' % (self._tagSet,)) + if self._subtypeSpec is not self.subtypeSpec: + r.append('subtypeSpec=%r' % (self._subtypeSpec,)) + r = '%s(%s)' % (self.__class__.__name__, ', '.join(r)) + if self._componentValues: + r += '.setComponents(%s)' % ', '.join([repr(x) for x in self._componentValues]) + return r + + def __eq__(self, other): + return self is other and True or self._componentValues == other + def __ne__(self, other): return self._componentValues != other + def __lt__(self, other): return self._componentValues < other + def __le__(self, other): return self._componentValues <= other + def __gt__(self, other): return self._componentValues > other + def __ge__(self, other): return self._componentValues >= other + if sys.version_info[0] <= 2: + def __nonzero__(self): return bool(self._componentValues) + else: + def __bool__(self): return bool(self._componentValues) + + def getComponentTagMap(self): + raise error.PyAsn1Error('Method not implemented') + + def _cloneComponentValues(self, myClone, cloneValueFlag): pass + + def clone(self, tagSet=None, subtypeSpec=None, sizeSpec=None, + cloneValueFlag=None): + if tagSet is None: + tagSet = self._tagSet + if subtypeSpec is None: + subtypeSpec = self._subtypeSpec + if sizeSpec is None: + sizeSpec = self._sizeSpec + r = self.__class__(self._componentType, tagSet, subtypeSpec, sizeSpec) + if cloneValueFlag: + self._cloneComponentValues(r, cloneValueFlag) + return r + + def subtype(self, implicitTag=None, explicitTag=None, subtypeSpec=None, + sizeSpec=None, cloneValueFlag=None): + if implicitTag is not None: + tagSet = self._tagSet.tagImplicitly(implicitTag) + elif explicitTag is not None: + tagSet = self._tagSet.tagExplicitly(explicitTag) + else: + tagSet = self._tagSet + if subtypeSpec is None: + subtypeSpec = self._subtypeSpec + else: + subtypeSpec = subtypeSpec + self._subtypeSpec + if sizeSpec is None: + sizeSpec = self._sizeSpec + else: + sizeSpec = sizeSpec + self._sizeSpec + r = self.__class__(self._componentType, tagSet, subtypeSpec, sizeSpec) + if cloneValueFlag: + self._cloneComponentValues(r, cloneValueFlag) + return r + + def _verifyComponent(self, idx, value): pass + + def verifySizeSpec(self): self._sizeSpec(self) + + def getComponentByPosition(self, idx): + raise error.PyAsn1Error('Method not implemented') + def setComponentByPosition(self, idx, value, verifyConstraints=True): + raise error.PyAsn1Error('Method not implemented') + + def setComponents(self, *args, **kwargs): + for idx in range(len(args)): + self[idx] = args[idx] + for k in kwargs: + self[k] = kwargs[k] + return self + + def getComponentType(self): return self._componentType + + def setDefaultComponents(self): pass + + def __getitem__(self, idx): return self.getComponentByPosition(idx) + def __setitem__(self, idx, value): self.setComponentByPosition(idx, value) + + def __len__(self): return len(self._componentValues) + + def clear(self): + self._componentValues = [] + self._componentValuesSet = 0 + diff --git a/pyasn1/type/char.py b/pyasn1/type/char.py new file mode 100644 index 0000000..af49ab3 --- /dev/null +++ b/pyasn1/type/char.py @@ -0,0 +1,64 @@ +# ASN.1 "character string" types +from pyasn1.type import univ, tag + +class NumericString(univ.OctetString): + tagSet = univ.OctetString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 18) + ) + +class PrintableString(univ.OctetString): + tagSet = univ.OctetString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 19) + ) + +class TeletexString(univ.OctetString): + tagSet = univ.OctetString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 20) + ) + +class T61String(TeletexString): pass + +class VideotexString(univ.OctetString): + tagSet = univ.OctetString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 21) + ) + +class IA5String(univ.OctetString): + tagSet = univ.OctetString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 22) + ) + +class GraphicString(univ.OctetString): + tagSet = univ.OctetString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 25) + ) + +class VisibleString(univ.OctetString): + tagSet = univ.OctetString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 26) + ) + +class ISO646String(VisibleString): pass + +class GeneralString(univ.OctetString): + tagSet = univ.OctetString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 27) + ) + +class UniversalString(univ.OctetString): + tagSet = univ.OctetString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 28) + ) + encoding = "utf-32-be" + +class BMPString(univ.OctetString): + tagSet = univ.OctetString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 30) + ) + encoding = "utf-16-be" + +class UTF8String(univ.OctetString): + tagSet = univ.OctetString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 12) + ) + encoding = "utf-8" diff --git a/pyasn1/type/constraint.py b/pyasn1/type/constraint.py new file mode 100644 index 0000000..6687393 --- /dev/null +++ b/pyasn1/type/constraint.py @@ -0,0 +1,200 @@ +# +# ASN.1 subtype constraints classes. +# +# Constraints are relatively rare, but every ASN1 object +# is doing checks all the time for whether they have any +# constraints and whether they are applicable to the object. +# +# What we're going to do is define objects/functions that +# can be called unconditionally if they are present, and that +# are simply not present if there are no constraints. +# +# Original concept and code by Mike C. Fletcher. +# +import sys +from pyasn1.type import error + +class AbstractConstraint: + """Abstract base-class for constraint objects + + Constraints should be stored in a simple sequence in the + namespace of their client Asn1Item sub-classes. + """ + def __init__(self, *values): + self._valueMap = {} + self._setValues(values) + self.__hashedValues = None + def __call__(self, value, idx=None): + try: + self._testValue(value, idx) + except error.ValueConstraintError: + raise error.ValueConstraintError( + '%s failed at: \"%s\"' % (self, sys.exc_info()[1]) + ) + def __repr__(self): + return '%s(%s)' % ( + self.__class__.__name__, + ', '.join([repr(x) for x in self._values]) + ) + def __eq__(self, other): + return self is other and True or self._values == other + def __ne__(self, other): return self._values != other + def __lt__(self, other): return self._values < other + def __le__(self, other): return self._values <= other + def __gt__(self, other): return self._values > other + def __ge__(self, other): return self._values >= other + if sys.version_info[0] <= 2: + def __nonzero__(self): return bool(self._values) + else: + def __bool__(self): return bool(self._values) + + def __hash__(self): + if self.__hashedValues is None: + self.__hashedValues = hash((self.__class__.__name__, self._values)) + return self.__hashedValues + + def _setValues(self, values): self._values = values + def _testValue(self, value, idx): + raise error.ValueConstraintError(value) + + # Constraints derivation logic + def getValueMap(self): return self._valueMap + def isSuperTypeOf(self, otherConstraint): + return self in otherConstraint.getValueMap() or \ + otherConstraint is self or otherConstraint == self + def isSubTypeOf(self, otherConstraint): + return otherConstraint in self._valueMap or \ + otherConstraint is self or otherConstraint == self + +class SingleValueConstraint(AbstractConstraint): + """Value must be part of defined values constraint""" + def _testValue(self, value, idx): + # XXX index vals for performance? + if value not in self._values: + raise error.ValueConstraintError(value) + +class ContainedSubtypeConstraint(AbstractConstraint): + """Value must satisfy all of defined set of constraints""" + def _testValue(self, value, idx): + for c in self._values: + c(value, idx) + +class ValueRangeConstraint(AbstractConstraint): + """Value must be within start and stop values (inclusive)""" + def _testValue(self, value, idx): + if value < self.start or value > self.stop: + raise error.ValueConstraintError(value) + + def _setValues(self, values): + if len(values) != 2: + raise error.PyAsn1Error( + '%s: bad constraint values' % (self.__class__.__name__,) + ) + self.start, self.stop = values + if self.start > self.stop: + raise error.PyAsn1Error( + '%s: screwed constraint values (start > stop): %s > %s' % ( + self.__class__.__name__, + self.start, self.stop + ) + ) + AbstractConstraint._setValues(self, values) + +class ValueSizeConstraint(ValueRangeConstraint): + """len(value) must be within start and stop values (inclusive)""" + def _testValue(self, value, idx): + l = len(value) + if l < self.start or l > self.stop: + raise error.ValueConstraintError(value) + +class PermittedAlphabetConstraint(SingleValueConstraint): + def _setValues(self, values): + self._values = () + for v in values: + self._values = self._values + tuple(v) + + def _testValue(self, value, idx): + for v in value: + if v not in self._values: + raise error.ValueConstraintError(value) + +# This is a bit kludgy, meaning two op modes within a single constraing +class InnerTypeConstraint(AbstractConstraint): + """Value must satisfy type and presense constraints""" + def _testValue(self, value, idx): + if self.__singleTypeConstraint: + self.__singleTypeConstraint(value) + elif self.__multipleTypeConstraint: + if idx not in self.__multipleTypeConstraint: + raise error.ValueConstraintError(value) + constraint, status = self.__multipleTypeConstraint[idx] + if status == 'ABSENT': # XXX presense is not checked! + raise error.ValueConstraintError(value) + constraint(value) + + def _setValues(self, values): + self.__multipleTypeConstraint = {} + self.__singleTypeConstraint = None + for v in values: + if isinstance(v, tuple): + self.__multipleTypeConstraint[v[0]] = v[1], v[2] + else: + self.__singleTypeConstraint = v + AbstractConstraint._setValues(self, values) + +# Boolean ops on constraints + +class ConstraintsExclusion(AbstractConstraint): + """Value must not fit the single constraint""" + def _testValue(self, value, idx): + try: + self._values[0](value, idx) + except error.ValueConstraintError: + return + else: + raise error.ValueConstraintError(value) + + def _setValues(self, values): + if len(values) != 1: + raise error.PyAsn1Error('Single constraint expected') + AbstractConstraint._setValues(self, values) + +class AbstractConstraintSet(AbstractConstraint): + """Value must not satisfy the single constraint""" + def __getitem__(self, idx): return self._values[idx] + + def __add__(self, value): return self.__class__(self, value) + def __radd__(self, value): return self.__class__(self, value) + + def __len__(self): return len(self._values) + + # Constraints inclusion in sets + + def _setValues(self, values): + self._values = values + for v in values: + self._valueMap[v] = 1 + self._valueMap.update(v.getValueMap()) + +class ConstraintsIntersection(AbstractConstraintSet): + """Value must satisfy all constraints""" + def _testValue(self, value, idx): + for v in self._values: + v(value, idx) + +class ConstraintsUnion(AbstractConstraintSet): + """Value must satisfy at least one constraint""" + def _testValue(self, value, idx): + for v in self._values: + try: + v(value, idx) + except error.ValueConstraintError: + pass + else: + return + raise error.ValueConstraintError( + 'all of %s failed for \"%s\"' % (self._values, value) + ) + +# XXX +# add tests for type check diff --git a/pyasn1/type/error.py b/pyasn1/type/error.py new file mode 100644 index 0000000..3e68484 --- /dev/null +++ b/pyasn1/type/error.py @@ -0,0 +1,3 @@ +from pyasn1.error import PyAsn1Error + +class ValueConstraintError(PyAsn1Error): pass diff --git a/pyasn1/type/namedtype.py b/pyasn1/type/namedtype.py new file mode 100644 index 0000000..aca4282 --- /dev/null +++ b/pyasn1/type/namedtype.py @@ -0,0 +1,149 @@ +# NamedType specification for constructed types +import sys +from pyasn1.type import tagmap +from pyasn1 import error + +class NamedType: + isOptional = 0 + isDefaulted = 0 + def __init__(self, name, t): + self.__name = name; self.__type = t + def __repr__(self): return '%s(%r, %r)' % ( + self.__class__.__name__, self.__name, self.__type + ) + def __eq__(self, other): return tuple(self) == tuple(other) + def __ne__(self, other): return tuple(self) != tuple(other) + def __lt__(self, other): return tuple(self) < tuple(other) + def __le__(self, other): return tuple(self) <= tuple(other) + def __gt__(self, other): return tuple(self) > tuple(other) + def __ge__(self, other): return tuple(self) >= tuple(other) + def __hash__(self): return hash(tuple(self)) + + def getType(self): return self.__type + def getName(self): return self.__name + def __getitem__(self, idx): + if idx == 0: return self.__name + if idx == 1: return self.__type + raise IndexError() + +class OptionalNamedType(NamedType): + isOptional = 1 +class DefaultedNamedType(NamedType): + isDefaulted = 1 + +class NamedTypes: + def __init__(self, *namedTypes): + self.__namedTypes = namedTypes + self.__namedTypesLen = len(self.__namedTypes) + self.__minTagSet = None + self.__tagToPosIdx = {}; self.__nameToPosIdx = {} + self.__tagMap = { False: None, True: None } + self.__ambigiousTypes = {} + + def __repr__(self): + return '%s(%s)' % ( + self.__class__.__name__, + ', '.join([ repr(x) for x in self.__namedTypes ]) + ) + def __eq__(self, other): return tuple(self) == tuple(other) + def __ne__(self, other): return tuple(self) != tuple(other) + def __lt__(self, other): return tuple(self) < tuple(other) + def __le__(self, other): return tuple(self) <= tuple(other) + def __gt__(self, other): return tuple(self) > tuple(other) + def __ge__(self, other): return tuple(self) >= tuple(other) + def __hash__(self): return hash(tuple(self)) + + def __getitem__(self, idx): return self.__namedTypes[idx] + + if sys.version_info[0] <= 2: + def __nonzero__(self): return bool(self.__namedTypesLen) + else: + def __bool__(self): return bool(self.__namedTypesLen) + def __len__(self): return self.__namedTypesLen + + def clone(self): return self.__class__(*self.__namedTypes) + + def getTypeByPosition(self, idx): + if idx < 0 or idx >= self.__namedTypesLen: + raise error.PyAsn1Error('Type position out of range') + else: + return self.__namedTypes[idx].getType() + + def getPositionByType(self, tagSet): + if not self.__tagToPosIdx: + idx = self.__namedTypesLen + while idx > 0: + idx = idx - 1 + tagMap = self.__namedTypes[idx].getType().getTagMap() + for t in tagMap.getPosMap(): + if t in self.__tagToPosIdx: + raise error.PyAsn1Error('Duplicate type %s' % (t,)) + self.__tagToPosIdx[t] = idx + try: + return self.__tagToPosIdx[tagSet] + except KeyError: + raise error.PyAsn1Error('Type %s not found' % (tagSet,)) + + def getNameByPosition(self, idx): + try: + return self.__namedTypes[idx].getName() + except IndexError: + raise error.PyAsn1Error('Type position out of range') + def getPositionByName(self, name): + if not self.__nameToPosIdx: + idx = self.__namedTypesLen + while idx > 0: + idx = idx - 1 + n = self.__namedTypes[idx].getName() + if n in self.__nameToPosIdx: + raise error.PyAsn1Error('Duplicate name %s' % (n,)) + self.__nameToPosIdx[n] = idx + try: + return self.__nameToPosIdx[name] + except KeyError: + raise error.PyAsn1Error('Name %s not found' % (name,)) + + def __buildAmbigiousTagMap(self): + ambigiousTypes = () + idx = self.__namedTypesLen + while idx > 0: + idx = idx - 1 + t = self.__namedTypes[idx] + if t.isOptional or t.isDefaulted: + ambigiousTypes = (t, ) + ambigiousTypes + else: + ambigiousTypes = (t, ) + self.__ambigiousTypes[idx] = NamedTypes(*ambigiousTypes) + + def getTagMapNearPosition(self, idx): + if not self.__ambigiousTypes: self.__buildAmbigiousTagMap() + try: + return self.__ambigiousTypes[idx].getTagMap() + except KeyError: + raise error.PyAsn1Error('Type position out of range') + + def getPositionNearType(self, tagSet, idx): + if not self.__ambigiousTypes: self.__buildAmbigiousTagMap() + try: + return idx+self.__ambigiousTypes[idx].getPositionByType(tagSet) + except KeyError: + raise error.PyAsn1Error('Type position out of range') + + def genMinTagSet(self): + if self.__minTagSet is None: + for t in self.__namedTypes: + __type = t.getType() + tagSet = getattr(__type,'getMinTagSet',__type.getTagSet)() + if self.__minTagSet is None or tagSet < self.__minTagSet: + self.__minTagSet = tagSet + return self.__minTagSet + + def getTagMap(self, uniq=False): + if self.__tagMap[uniq] is None: + tagMap = tagmap.TagMap() + for nt in self.__namedTypes: + tagMap = tagMap.clone( + nt.getType(), nt.getType().getTagMap(), uniq + ) + self.__tagMap[uniq] = tagMap + return self.__tagMap[uniq] diff --git a/pyasn1/type/namedval.py b/pyasn1/type/namedval.py new file mode 100644 index 0000000..676cb93 --- /dev/null +++ b/pyasn1/type/namedval.py @@ -0,0 +1,58 @@ +# ASN.1 named integers +from pyasn1 import error + +__all__ = [ 'NamedValues' ] + +class NamedValues: + def __init__(self, *namedValues): + self.nameToValIdx = {}; self.valToNameIdx = {} + self.namedValues = () + automaticVal = 1 + for namedValue in namedValues: + if isinstance(namedValue, tuple): + name, val = namedValue + else: + name = namedValue + val = automaticVal + if name in self.nameToValIdx: + raise error.PyAsn1Error('Duplicate name %s' % (name,)) + self.nameToValIdx[name] = val + if val in self.valToNameIdx: + raise error.PyAsn1Error('Duplicate value %s=%s' % (name, val)) + self.valToNameIdx[val] = name + self.namedValues = self.namedValues + ((name, val),) + automaticVal = automaticVal + 1 + + def __repr__(self): + return '%s(%s)' % (self.__class__.__name__, ', '.join([repr(x) for x in self.namedValues])) + + def __str__(self): return str(self.namedValues) + + def __eq__(self, other): return tuple(self) == tuple(other) + def __ne__(self, other): return tuple(self) != tuple(other) + def __lt__(self, other): return tuple(self) < tuple(other) + def __le__(self, other): return tuple(self) <= tuple(other) + def __gt__(self, other): return tuple(self) > tuple(other) + def __ge__(self, other): return tuple(self) >= tuple(other) + def __hash__(self): return hash(tuple(self)) + + def getName(self, value): + if value in self.valToNameIdx: + return self.valToNameIdx[value] + + def getValue(self, name): + if name in self.nameToValIdx: + return self.nameToValIdx[name] + + def __getitem__(self, i): return self.namedValues[i] + def __len__(self): return len(self.namedValues) + + def __add__(self, namedValues): + return self.__class__(*self.namedValues + namedValues) + def __radd__(self, namedValues): + return self.__class__(*namedValues + tuple(self)) + + def clone(self, *namedValues): + return self.__class__(*tuple(self) + namedValues) + +# XXX clone/subtype? diff --git a/pyasn1/type/tag.py b/pyasn1/type/tag.py new file mode 100644 index 0000000..7471a9b --- /dev/null +++ b/pyasn1/type/tag.py @@ -0,0 +1,128 @@ +# ASN.1 types tags +from operator import getitem +from pyasn1 import error + +tagClassUniversal = 0x00 +tagClassApplication = 0x40 +tagClassContext = 0x80 +tagClassPrivate = 0xC0 + +tagFormatSimple = 0x00 +tagFormatConstructed = 0x20 + +tagCategoryImplicit = 0x01 +tagCategoryExplicit = 0x02 +tagCategoryUntagged = 0x04 + +class Tag: + def __init__(self, tagClass, tagFormat, tagId): + if tagId < 0: + raise error.PyAsn1Error( + 'Negative tag ID (%s) not allowed' % (tagId,) + ) + self.__tag = (tagClass, tagFormat, tagId) + self.uniq = (tagClass, tagId) + self.__hashedUniqTag = hash(self.uniq) + + def __str__(self): + return '[%s:%s:%s]' % self.__tag + + def __repr__(self): + return '%s(tagClass=%s, tagFormat=%s, tagId=%s)' % ( + (self.__class__.__name__,) + self.__tag + ) + # These is really a hotspot -- expose public "uniq" attribute to save on + # function calls + def __eq__(self, other): return self.uniq == other.uniq + def __ne__(self, other): return self.uniq != other.uniq + def __lt__(self, other): return self.uniq < other.uniq + def __le__(self, other): return self.uniq <= other.uniq + def __gt__(self, other): return self.uniq > other.uniq + def __ge__(self, other): return self.uniq >= other.uniq + def __hash__(self): return self.__hashedUniqTag + def __getitem__(self, idx): return self.__tag[idx] + def __and__(self, otherTag): + (tagClass, tagFormat, tagId) = otherTag + return self.__class__( + self.__tag&tagClass, self.__tag&tagFormat, self.__tag&tagId + ) + def __or__(self, otherTag): + (tagClass, tagFormat, tagId) = otherTag + return self.__class__( + self.__tag[0]|tagClass, + self.__tag[1]|tagFormat, + self.__tag[2]|tagId + ) + def asTuple(self): return self.__tag # __getitem__() is slow + +class TagSet: + def __init__(self, baseTag=(), *superTags): + self.__baseTag = baseTag + self.__superTags = superTags + self.__hashedSuperTags = hash(superTags) + _uniq = () + for t in superTags: + _uniq = _uniq + t.uniq + self.uniq = _uniq + self.__lenOfSuperTags = len(superTags) + + def __str__(self): + return self.__superTags and '+'.join([str(x) for x in self.__superTags]) or '[untagged]' + + def __repr__(self): + return '%s(%s)' % ( + self.__class__.__name__, + '(), ' + ', '.join([repr(x) for x in self.__superTags]) + ) + + def __add__(self, superTag): + return self.__class__( + self.__baseTag, *self.__superTags + (superTag,) + ) + def __radd__(self, superTag): + return self.__class__( + self.__baseTag, *(superTag,) + self.__superTags + ) + + def tagExplicitly(self, superTag): + tagClass, tagFormat, tagId = superTag + if tagClass == tagClassUniversal: + raise error.PyAsn1Error( + 'Can\'t tag with UNIVERSAL-class tag' + ) + if tagFormat != tagFormatConstructed: + superTag = Tag(tagClass, tagFormatConstructed, tagId) + return self + superTag + + def tagImplicitly(self, superTag): + tagClass, tagFormat, tagId = superTag + if self.__superTags: + superTag = Tag(tagClass, self.__superTags[-1][1], tagId) + return self[:-1] + superTag + + def getBaseTag(self): return self.__baseTag + def __getitem__(self, idx): + if isinstance(idx, slice): + return self.__class__( + self.__baseTag, *getitem(self.__superTags, idx) + ) + return self.__superTags[idx] + def __eq__(self, other): return self.uniq == other.uniq + def __ne__(self, other): return self.uniq != other.uniq + def __lt__(self, other): return self.uniq < other.uniq + def __le__(self, other): return self.uniq <= other.uniq + def __gt__(self, other): return self.uniq > other.uniq + def __ge__(self, other): return self.uniq >= other.uniq + def __hash__(self): return self.__hashedSuperTags + def __len__(self): return self.__lenOfSuperTags + def isSuperTagSetOf(self, tagSet): + if len(tagSet) < self.__lenOfSuperTags: + return + idx = self.__lenOfSuperTags - 1 + while idx >= 0: + if self.__superTags[idx] != tagSet[idx]: + return + idx = idx - 1 + return 1 + +def initTagSet(tag): return TagSet(tag, tag) diff --git a/pyasn1/type/tagmap.py b/pyasn1/type/tagmap.py new file mode 100644 index 0000000..feb91ae --- /dev/null +++ b/pyasn1/type/tagmap.py @@ -0,0 +1,66 @@ +from pyasn1 import error + +class TagMap: + def __init__(self, posMap={}, negMap={}, defType=None): + self.__posMap = posMap.copy() + self.__negMap = negMap.copy() + self.__defType = defType + + def __contains__(self, tagSet): + return tagSet in self.__posMap or \ + self.__defType is not None and tagSet not in self.__negMap + + def __getitem__(self, tagSet): + if tagSet in self.__posMap: + return self.__posMap[tagSet] + elif tagSet in self.__negMap: + raise error.PyAsn1Error('Key in negative map') + elif self.__defType is not None: + return self.__defType + else: + raise KeyError() + + def __repr__(self): + s = self.__class__.__name__ + '(' + if self.__posMap: + s = s + 'posMap=%r, ' % (self.__posMap,) + if self.__negMap: + s = s + 'negMap=%r, ' % (self.__negMap,) + if self.__defType is not None: + s = s + 'defType=%r' % (self.__defType,) + return s + ')' + + def __str__(self): + s = self.__class__.__name__ + ':\n' + if self.__posMap: + s = s + 'posMap:\n%s, ' % ',\n '.join([ x.prettyPrintType() for x in self.__posMap.values()]) + if self.__negMap: + s = s + 'negMap:\n%s, ' % ',\n '.join([ x.prettyPrintType() for x in self.__negMap.values()]) + if self.__defType is not None: + s = s + 'defType:\n%s, ' % self.__defType.prettyPrintType() + return s + + def clone(self, parentType, tagMap, uniq=False): + if self.__defType is not None and tagMap.getDef() is not None: + raise error.PyAsn1Error('Duplicate default value at %s' % (self,)) + if tagMap.getDef() is not None: + defType = tagMap.getDef() + else: + defType = self.__defType + + posMap = self.__posMap.copy() + for k in tagMap.getPosMap(): + if uniq and k in posMap: + raise error.PyAsn1Error('Duplicate positive key %s' % (k,)) + posMap[k] = parentType + + negMap = self.__negMap.copy() + negMap.update(tagMap.getNegMap()) + + return self.__class__( + posMap, negMap, defType, + ) + + def getPosMap(self): return self.__posMap.copy() + def getNegMap(self): return self.__negMap.copy() + def getDef(self): return self.__defType diff --git a/pyasn1/type/univ.py b/pyasn1/type/univ.py new file mode 100644 index 0000000..4ed640f --- /dev/null +++ b/pyasn1/type/univ.py @@ -0,0 +1,1156 @@ +# ASN.1 "universal" data types +import operator, sys, math +from pyasn1.type import base, tag, constraint, namedtype, namedval, tagmap +from pyasn1.codec.ber import eoo +from pyasn1.compat import octets +from pyasn1 import error + +# "Simple" ASN.1 types (yet incomplete) + +class Integer(base.AbstractSimpleAsn1Item): + tagSet = baseTagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x02) + ) + namedValues = namedval.NamedValues() + def __init__(self, value=None, tagSet=None, subtypeSpec=None, + namedValues=None): + if namedValues is None: + self.__namedValues = self.namedValues + else: + self.__namedValues = namedValues + base.AbstractSimpleAsn1Item.__init__( + self, value, tagSet, subtypeSpec + ) + + def __repr__(self): + if self.__namedValues is not self.namedValues: + return '%s, %r)' % (base.AbstractSimpleAsn1Item.__repr__(self)[:-1], self.__namedValues) + else: + return base.AbstractSimpleAsn1Item.__repr__(self) + + def __and__(self, value): return self.clone(self._value & value) + def __rand__(self, value): return self.clone(value & self._value) + def __or__(self, value): return self.clone(self._value | value) + def __ror__(self, value): return self.clone(value | self._value) + def __xor__(self, value): return self.clone(self._value ^ value) + def __rxor__(self, value): return self.clone(value ^ self._value) + def __lshift__(self, value): return self.clone(self._value << value) + def __rshift__(self, value): return self.clone(self._value >> value) + + def __add__(self, value): return self.clone(self._value + value) + def __radd__(self, value): return self.clone(value + self._value) + def __sub__(self, value): return self.clone(self._value - value) + def __rsub__(self, value): return self.clone(value - self._value) + def __mul__(self, value): return self.clone(self._value * value) + def __rmul__(self, value): return self.clone(value * self._value) + def __mod__(self, value): return self.clone(self._value % value) + def __rmod__(self, value): return self.clone(value % self._value) + def __pow__(self, value, modulo=None): return self.clone(pow(self._value, value, modulo)) + def __rpow__(self, value): return self.clone(pow(value, self._value)) + + if sys.version_info[0] <= 2: + def __div__(self, value): return self.clone(self._value // value) + def __rdiv__(self, value): return self.clone(value // self._value) + else: + def __truediv__(self, value): return self.clone(self._value / value) + def __rtruediv__(self, value): return self.clone(value / self._value) + def __divmod__(self, value): return self.clone(self._value // value) + def __rdivmod__(self, value): return self.clone(value // self._value) + + __hash__ = base.AbstractSimpleAsn1Item.__hash__ + + def __int__(self): return int(self._value) + if sys.version_info[0] <= 2: + def __long__(self): return long(self._value) + def __float__(self): return float(self._value) + def __abs__(self): return self.clone(abs(self._value)) + def __index__(self): return int(self._value) + def __pos__(self): return self.clone(+self._value) + def __neg__(self): return self.clone(-self._value) + def __invert__(self): return self.clone(~self._value) + def __round__(self, n=0): + r = round(self._value, n) + if n: + return self.clone(r) + else: + return r + def __floor__(self): return math.floor(self._value) + def __ceil__(self): return math.ceil(self._value) + if sys.version_info[0:2] > (2, 5): + def __trunc__(self): return self.clone(math.trunc(self._value)) + + def __lt__(self, value): return self._value < value + def __le__(self, value): return self._value <= value + def __eq__(self, value): return self._value == value + def __ne__(self, value): return self._value != value + def __gt__(self, value): return self._value > value + def __ge__(self, value): return self._value >= value + + def prettyIn(self, value): + if not isinstance(value, str): + try: + return int(value) + except: + raise error.PyAsn1Error( + 'Can\'t coerce %r into integer: %s' % (value, sys.exc_info()[1]) + ) + r = self.__namedValues.getValue(value) + if r is not None: + return r + try: + return int(value) + except: + raise error.PyAsn1Error( + 'Can\'t coerce %r into integer: %s' % (value, sys.exc_info()[1]) + ) + + def prettyOut(self, value): + r = self.__namedValues.getName(value) + return r is None and str(value) or repr(r) + + def getNamedValues(self): return self.__namedValues + + def clone(self, value=None, tagSet=None, subtypeSpec=None, + namedValues=None): + if value is None and tagSet is None and subtypeSpec is None \ + and namedValues is None: + return self + if value is None: + value = self._value + if tagSet is None: + tagSet = self._tagSet + if subtypeSpec is None: + subtypeSpec = self._subtypeSpec + if namedValues is None: + namedValues = self.__namedValues + return self.__class__(value, tagSet, subtypeSpec, namedValues) + + def subtype(self, value=None, implicitTag=None, explicitTag=None, + subtypeSpec=None, namedValues=None): + if value is None: + value = self._value + if implicitTag is not None: + tagSet = self._tagSet.tagImplicitly(implicitTag) + elif explicitTag is not None: + tagSet = self._tagSet.tagExplicitly(explicitTag) + else: + tagSet = self._tagSet + if subtypeSpec is None: + subtypeSpec = self._subtypeSpec + else: + subtypeSpec = subtypeSpec + self._subtypeSpec + if namedValues is None: + namedValues = self.__namedValues + else: + namedValues = namedValues + self.__namedValues + return self.__class__(value, tagSet, subtypeSpec, namedValues) + +class Boolean(Integer): + tagSet = baseTagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x01), + ) + subtypeSpec = Integer.subtypeSpec+constraint.SingleValueConstraint(0,1) + namedValues = Integer.namedValues.clone(('False', 0), ('True', 1)) + +class BitString(base.AbstractSimpleAsn1Item): + tagSet = baseTagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x03) + ) + namedValues = namedval.NamedValues() + def __init__(self, value=None, tagSet=None, subtypeSpec=None, + namedValues=None): + if namedValues is None: + self.__namedValues = self.namedValues + else: + self.__namedValues = namedValues + base.AbstractSimpleAsn1Item.__init__( + self, value, tagSet, subtypeSpec + ) + + def clone(self, value=None, tagSet=None, subtypeSpec=None, + namedValues=None): + if value is None and tagSet is None and subtypeSpec is None \ + and namedValues is None: + return self + if value is None: + value = self._value + if tagSet is None: + tagSet = self._tagSet + if subtypeSpec is None: + subtypeSpec = self._subtypeSpec + if namedValues is None: + namedValues = self.__namedValues + return self.__class__(value, tagSet, subtypeSpec, namedValues) + + def subtype(self, value=None, implicitTag=None, explicitTag=None, + subtypeSpec=None, namedValues=None): + if value is None: + value = self._value + if implicitTag is not None: + tagSet = self._tagSet.tagImplicitly(implicitTag) + elif explicitTag is not None: + tagSet = self._tagSet.tagExplicitly(explicitTag) + else: + tagSet = self._tagSet + if subtypeSpec is None: + subtypeSpec = self._subtypeSpec + else: + subtypeSpec = subtypeSpec + self._subtypeSpec + if namedValues is None: + namedValues = self.__namedValues + else: + namedValues = namedValues + self.__namedValues + return self.__class__(value, tagSet, subtypeSpec, namedValues) + + def __str__(self): return str(tuple(self)) + + # Immutable sequence object protocol + + def __len__(self): + if self._len is None: + self._len = len(self._value) + return self._len + def __getitem__(self, i): + if isinstance(i, slice): + return self.clone(operator.getitem(self._value, i)) + else: + return self._value[i] + + def __add__(self, value): return self.clone(self._value + value) + def __radd__(self, value): return self.clone(value + self._value) + def __mul__(self, value): return self.clone(self._value * value) + def __rmul__(self, value): return self * value + + def prettyIn(self, value): + r = [] + if not value: + return () + elif isinstance(value, str): + if value[0] == '\'': + if value[-2:] == '\'B': + for v in value[1:-2]: + if v == '0': + r.append(0) + elif v == '1': + r.append(1) + else: + raise error.PyAsn1Error( + 'Non-binary BIT STRING initializer %s' % (v,) + ) + return tuple(r) + elif value[-2:] == '\'H': + for v in value[1:-2]: + i = 4 + v = int(v, 16) + while i: + i = i - 1 + r.append((v>>i)&0x01) + return tuple(r) + else: + raise error.PyAsn1Error( + 'Bad BIT STRING value notation %s' % (value,) + ) + else: + for i in value.split(','): + j = self.__namedValues.getValue(i) + if j is None: + raise error.PyAsn1Error( + 'Unknown bit identifier \'%s\'' % (i,) + ) + if j >= len(r): + r.extend([0]*(j-len(r)+1)) + r[j] = 1 + return tuple(r) + elif isinstance(value, (tuple, list)): + r = tuple(value) + for b in r: + if b and b != 1: + raise error.PyAsn1Error( + 'Non-binary BitString initializer \'%s\'' % (r,) + ) + return r + elif isinstance(value, BitString): + return tuple(value) + else: + raise error.PyAsn1Error( + 'Bad BitString initializer type \'%s\'' % (value,) + ) + + def prettyOut(self, value): + return '\"\'%s\'B\"' % ''.join([str(x) for x in value]) + +try: + all +except NameError: # Python 2.4 + def all(iterable): + for element in iterable: + if not element: + return False + return True + +class OctetString(base.AbstractSimpleAsn1Item): + tagSet = baseTagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x04) + ) + defaultBinValue = defaultHexValue = base.noValue + encoding = 'us-ascii' + def __init__(self, value=None, tagSet=None, subtypeSpec=None, + encoding=None, binValue=None, hexValue=None): + if encoding is None: + self._encoding = self.encoding + else: + self._encoding = encoding + if binValue is not None: + value = self.fromBinaryString(binValue) + if hexValue is not None: + value = self.fromHexString(hexValue) + if value is None or value is base.noValue: + value = self.defaultHexValue + if value is None or value is base.noValue: + value = self.defaultBinValue + self.__asNumbersCache = None + base.AbstractSimpleAsn1Item.__init__(self, value, tagSet, subtypeSpec) + + def clone(self, value=None, tagSet=None, subtypeSpec=None, + encoding=None, binValue=None, hexValue=None): + if value is None and tagSet is None and subtypeSpec is None and \ + encoding is None and binValue is None and hexValue is None: + return self + if value is None and binValue is None and hexValue is None: + value = self._value + if tagSet is None: + tagSet = self._tagSet + if subtypeSpec is None: + subtypeSpec = self._subtypeSpec + if encoding is None: + encoding = self._encoding + return self.__class__( + value, tagSet, subtypeSpec, encoding, binValue, hexValue + ) + + if sys.version_info[0] <= 2: + def prettyIn(self, value): + if isinstance(value, str): + return value + elif isinstance(value, unicode): + try: + return value.encode(self._encoding) + except (LookupError, UnicodeEncodeError): + raise error.PyAsn1Error( + 'Can\'t encode string \'%s\' with \'%s\' codec' % (value, self._encoding) + ) + elif isinstance(value, (tuple, list)): + try: + return ''.join([ chr(x) for x in value ]) + except ValueError: + raise error.PyAsn1Error( + 'Bad OctetString initializer \'%s\'' % (value,) + ) + else: + return str(value) + else: + def prettyIn(self, value): + if isinstance(value, bytes): + return value + elif isinstance(value, str): + try: + return value.encode(self._encoding) + except UnicodeEncodeError: + raise error.PyAsn1Error( + 'Can\'t encode string \'%s\' with \'%s\' codec' % (value, self._encoding) + ) + elif isinstance(value, OctetString): + return value.asOctets() + elif isinstance(value, (tuple, list, map)): + try: + return bytes(value) + except ValueError: + raise error.PyAsn1Error( + 'Bad OctetString initializer \'%s\'' % (value,) + ) + else: + try: + return str(value).encode(self._encoding) + except UnicodeEncodeError: + raise error.PyAsn1Error( + 'Can\'t encode string \'%s\' with \'%s\' codec' % (value, self._encoding) + ) + + + def fromBinaryString(self, value): + bitNo = 8; byte = 0; r = () + for v in value: + if bitNo: + bitNo = bitNo - 1 + else: + bitNo = 7 + r = r + (byte,) + byte = 0 + if v == '0': + v = 0 + elif v == '1': + v = 1 + else: + raise error.PyAsn1Error( + 'Non-binary OCTET STRING initializer %s' % (v,) + ) + byte = byte | (v << bitNo) + return octets.ints2octs(r + (byte,)) + + def fromHexString(self, value): + r = p = () + for v in value: + if p: + r = r + (int(p+v, 16),) + p = () + else: + p = v + if p: + r = r + (int(p+'0', 16),) + return octets.ints2octs(r) + + def prettyOut(self, value): + if sys.version_info[0] <= 2: + numbers = tuple(( ord(x) for x in value )) + else: + numbers = tuple(value) + if all(x >= 32 and x <= 126 for x in numbers): + return str(value) + else: + return '0x' + ''.join(( '%.2x' % x for x in numbers )) + + def __repr__(self): + r = [] + doHex = False + if self._value is not self.defaultValue: + for x in self.asNumbers(): + if x < 32 or x > 126: + doHex = True + break + if not doHex: + r.append('%r' % (self._value,)) + if self._tagSet is not self.tagSet: + r.append('tagSet=%r' % (self._tagSet,)) + if self._subtypeSpec is not self.subtypeSpec: + r.append('subtypeSpec=%r' % (self._subtypeSpec,)) + if self.encoding is not self._encoding: + r.append('encoding=%r' % (self._encoding,)) + if doHex: + r.append('hexValue=%r' % ''.join([ '%.2x' % x for x in self.asNumbers() ])) + return '%s(%s)' % (self.__class__.__name__, ', '.join(r)) + + if sys.version_info[0] <= 2: + def __str__(self): return str(self._value) + def __unicode__(self): + return self._value.decode(self._encoding, 'ignore') + def asOctets(self): return self._value + def asNumbers(self): + if self.__asNumbersCache is None: + self.__asNumbersCache = tuple([ ord(x) for x in self._value ]) + return self.__asNumbersCache + else: + def __str__(self): return self._value.decode(self._encoding, 'ignore') + def __bytes__(self): return self._value + def asOctets(self): return self._value + def asNumbers(self): + if self.__asNumbersCache is None: + self.__asNumbersCache = tuple(self._value) + return self.__asNumbersCache + + # Immutable sequence object protocol + + def __len__(self): + if self._len is None: + self._len = len(self._value) + return self._len + def __getitem__(self, i): + if isinstance(i, slice): + return self.clone(operator.getitem(self._value, i)) + else: + return self._value[i] + + def __add__(self, value): return self.clone(self._value + self.prettyIn(value)) + def __radd__(self, value): return self.clone(self.prettyIn(value) + self._value) + def __mul__(self, value): return self.clone(self._value * value) + def __rmul__(self, value): return self * value + def __int__(self): return int(self._value) + def __float__(self): return float(self._value) + +class Null(OctetString): + defaultValue = ''.encode() # This is tightly constrained + tagSet = baseTagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x05) + ) + subtypeSpec = OctetString.subtypeSpec+constraint.SingleValueConstraint(''.encode()) + +if sys.version_info[0] <= 2: + intTypes = (int, long) +else: + intTypes = (int,) + +numericTypes = intTypes + (float,) + +class ObjectIdentifier(base.AbstractSimpleAsn1Item): + tagSet = baseTagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x06) + ) + def __add__(self, other): return self.clone(self._value + other) + def __radd__(self, other): return self.clone(other + self._value) + + def asTuple(self): return self._value + + # Sequence object protocol + + def __len__(self): + if self._len is None: + self._len = len(self._value) + return self._len + def __getitem__(self, i): + if isinstance(i, slice): + return self.clone( + operator.getitem(self._value, i) + ) + else: + return self._value[i] + + def __str__(self): return self.prettyPrint() + def __repr__(self): + return '%s(%r)' % (self.__class__.__name__, self.prettyPrint()) + + def index(self, suboid): return self._value.index(suboid) + + def isPrefixOf(self, value): + """Returns true if argument OID resides deeper in the OID tree""" + l = len(self) + if l <= len(value): + if self._value[:l] == value[:l]: + return 1 + return 0 + + def prettyIn(self, value): + """Dotted -> tuple of numerics OID converter""" + if isinstance(value, tuple): + pass + elif isinstance(value, ObjectIdentifier): + return tuple(value) + elif octets.isStringType(value): + r = [] + for element in [ x for x in value.split('.') if x != '' ]: + try: + r.append(int(element, 0)) + except ValueError: + raise error.PyAsn1Error( + 'Malformed Object ID %s at %s: %s' % + (str(value), self.__class__.__name__, sys.exc_info()[1]) + ) + value = tuple(r) + else: + try: + value = tuple(value) + except TypeError: + raise error.PyAsn1Error( + 'Malformed Object ID %s at %s: %s' % + (str(value), self.__class__.__name__,sys.exc_info()[1]) + ) + + for x in value: + if not isinstance(x, intTypes) or x < 0: + raise error.PyAsn1Error( + 'Invalid sub-ID in %s at %s' % (value, self.__class__.__name__) + ) + + return value + + def prettyOut(self, value): return '.'.join([ str(x) for x in value ]) + +class Real(base.AbstractSimpleAsn1Item): + binEncBase = None # binEncBase = 16 is recommended for large numbers + try: + _plusInf = float('inf') + _minusInf = float('-inf') + _inf = (_plusInf, _minusInf) + except ValueError: + # Infinity support is platform and Python dependent + _plusInf = _minusInf = None + _inf = () + + tagSet = baseTagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x09) + ) + + def __normalizeBase10(self, value): + m, b, e = value + while m and m % 10 == 0: + m = m / 10 + e = e + 1 + return m, b, e + + def prettyIn(self, value): + if isinstance(value, tuple) and len(value) == 3: + if not isinstance(value[0], numericTypes) or \ + not isinstance(value[1], intTypes) or \ + not isinstance(value[2], intTypes): + raise error.PyAsn1Error('Lame Real value syntax: %s' % (value,)) + if isinstance(value[0], float) and \ + self._inf and value[0] in self._inf: + return value[0] + if value[1] not in (2, 10): + raise error.PyAsn1Error( + 'Prohibited base for Real value: %s' % (value[1],) + ) + if value[1] == 10: + value = self.__normalizeBase10(value) + return value + elif isinstance(value, intTypes): + return self.__normalizeBase10((value, 10, 0)) + elif isinstance(value, (str, float)): + if isinstance(value, str): + try: + value = float(value) + except ValueError: + raise error.PyAsn1Error( + 'Bad real value syntax: %s' % (value,) + ) + if self._inf and value in self._inf: + return value + else: + e = 0 + while int(value) != value: + value = value * 10 + e = e - 1 + return self.__normalizeBase10((int(value), 10, e)) + elif isinstance(value, Real): + return tuple(value) + raise error.PyAsn1Error( + 'Bad real value syntax: %s' % (value,) + ) + + def prettyOut(self, value): + if value in self._inf: + return '\'%s\'' % value + else: + return str(value) + + def prettyPrint(self, scope=0): + if self.isInfinity(): + return self.prettyOut(self._value) + else: + return str(float(self)) + + def isPlusInfinity(self): return self._value == self._plusInf + def isMinusInfinity(self): return self._value == self._minusInf + def isInfinity(self): return self._value in self._inf + + def __str__(self): return str(float(self)) + + def __add__(self, value): return self.clone(float(self) + value) + def __radd__(self, value): return self + value + def __mul__(self, value): return self.clone(float(self) * value) + def __rmul__(self, value): return self * value + def __sub__(self, value): return self.clone(float(self) - value) + def __rsub__(self, value): return self.clone(value - float(self)) + def __mod__(self, value): return self.clone(float(self) % value) + def __rmod__(self, value): return self.clone(value % float(self)) + def __pow__(self, value, modulo=None): return self.clone(pow(float(self), value, modulo)) + def __rpow__(self, value): return self.clone(pow(value, float(self))) + + if sys.version_info[0] <= 2: + def __div__(self, value): return self.clone(float(self) / value) + def __rdiv__(self, value): return self.clone(value / float(self)) + else: + def __truediv__(self, value): return self.clone(float(self) / value) + def __rtruediv__(self, value): return self.clone(value / float(self)) + def __divmod__(self, value): return self.clone(float(self) // value) + def __rdivmod__(self, value): return self.clone(value // float(self)) + + def __int__(self): return int(float(self)) + if sys.version_info[0] <= 2: + def __long__(self): return long(float(self)) + def __float__(self): + if self._value in self._inf: + return self._value + else: + return float( + self._value[0] * pow(self._value[1], self._value[2]) + ) + def __abs__(self): return self.clone(abs(float(self))) + def __pos__(self): return self.clone(+float(self)) + def __neg__(self): return self.clone(-float(self)) + def __round__(self, n=0): + r = round(float(self), n) + if n: + return self.clone(r) + else: + return r + def __floor__(self): return self.clone(math.floor(float(self))) + def __ceil__(self): return self.clone(math.ceil(float(self))) + if sys.version_info[0:2] > (2, 5): + def __trunc__(self): return self.clone(math.trunc(float(self))) + + def __lt__(self, value): return float(self) < value + def __le__(self, value): return float(self) <= value + def __eq__(self, value): return float(self) == value + def __ne__(self, value): return float(self) != value + def __gt__(self, value): return float(self) > value + def __ge__(self, value): return float(self) >= value + + if sys.version_info[0] <= 2: + def __nonzero__(self): return bool(float(self)) + else: + def __bool__(self): return bool(float(self)) + __hash__ = base.AbstractSimpleAsn1Item.__hash__ + + def __getitem__(self, idx): + if self._value in self._inf: + raise error.PyAsn1Error('Invalid infinite value operation') + else: + return self._value[idx] + +class Enumerated(Integer): + tagSet = baseTagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 0x0A) + ) + +# "Structured" ASN.1 types + +class SetOf(base.AbstractConstructedAsn1Item): + componentType = None + tagSet = baseTagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x11) + ) + typeId = 1 + strictConstraints = False + + def _cloneComponentValues(self, myClone, cloneValueFlag): + idx = 0; l = len(self._componentValues) + while idx < l: + c = self._componentValues[idx] + if c is not None: + if isinstance(c, base.AbstractConstructedAsn1Item): + myClone.setComponentByPosition( + idx, c.clone(cloneValueFlag=cloneValueFlag) + ) + else: + myClone.setComponentByPosition(idx, c.clone()) + idx = idx + 1 + + def _verifyComponent(self, idx, value): + t = self._componentType + if t is None: + return + if not t.isSameTypeWith(value,matchConstraints=self.strictConstraints): + raise error.PyAsn1Error('Component value is tag-incompatible: %r vs %r' % (value, t)) + if self.strictConstraints and \ + not t.isSuperTypeOf(value, matchTags=False): + raise error.PyAsn1Error('Component value is constraints-incompatible: %r vs %r' % (value, t)) + + def getComponentByPosition(self, idx): return self._componentValues[idx] + def setComponentByPosition(self, idx, value=None, verifyConstraints=True): + l = len(self._componentValues) + if idx >= l: + self._componentValues = self._componentValues + (idx-l+1)*[None] + if value is None: + if self._componentValues[idx] is None: + if self._componentType is None: + raise error.PyAsn1Error('Component type not defined') + self._componentValues[idx] = self._componentType.clone() + self._componentValuesSet = self._componentValuesSet + 1 + return self + elif not isinstance(value, base.Asn1Item): + if self._componentType is None: + raise error.PyAsn1Error('Component type not defined') + if isinstance(self._componentType, base.AbstractSimpleAsn1Item): + value = self._componentType.clone(value=value) + else: + raise error.PyAsn1Error('Instance value required') + if verifyConstraints: + if self._componentType is not None: + self._verifyComponent(idx, value) + self._verifySubtypeSpec(value, idx) + if self._componentValues[idx] is None: + self._componentValuesSet = self._componentValuesSet + 1 + self._componentValues[idx] = value + return self + + def getComponentTagMap(self): + if self._componentType is not None: + return self._componentType.getTagMap() + + def prettyPrint(self, scope=0): + scope = scope + 1 + r = self.__class__.__name__ + ':\n' + for idx in range(len(self._componentValues)): + r = r + ' '*scope + if self._componentValues[idx] is None: + r = r + '' + else: + r = r + self._componentValues[idx].prettyPrint(scope) + return r + + def prettyPrintType(self, scope=0): + scope = scope + 1 + r = '%s -> %s {\n' % (self.getTagSet(), self.__class__.__name__) + if self._componentType is not None: + r = r + ' '*scope + r = r + self._componentType.prettyPrintType(scope) + return r + '\n' + ' '*(scope-1) + '}' + +class SequenceOf(SetOf): + tagSet = baseTagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x10) + ) + typeId = 2 + +class SequenceAndSetBase(base.AbstractConstructedAsn1Item): + componentType = namedtype.NamedTypes() + strictConstraints = False + def __init__(self, componentType=None, tagSet=None, + subtypeSpec=None, sizeSpec=None): + if componentType is None: + componentType = self.componentType + base.AbstractConstructedAsn1Item.__init__( + self, componentType.clone(), tagSet, subtypeSpec, sizeSpec + ) + self._componentTypeLen = len(self._componentType) + + def __getitem__(self, idx): + if isinstance(idx, str): + return self.getComponentByName(idx) + else: + return base.AbstractConstructedAsn1Item.__getitem__(self, idx) + + def __setitem__(self, idx, value): + if isinstance(idx, str): + self.setComponentByName(idx, value) + else: + base.AbstractConstructedAsn1Item.__setitem__(self, idx, value) + + def _cloneComponentValues(self, myClone, cloneValueFlag): + idx = 0; l = len(self._componentValues) + while idx < l: + c = self._componentValues[idx] + if c is not None: + if isinstance(c, base.AbstractConstructedAsn1Item): + myClone.setComponentByPosition( + idx, c.clone(cloneValueFlag=cloneValueFlag) + ) + else: + myClone.setComponentByPosition(idx, c.clone()) + idx = idx + 1 + + def _verifyComponent(self, idx, value): + if idx >= self._componentTypeLen: + raise error.PyAsn1Error( + 'Component type error out of range' + ) + t = self._componentType[idx].getType() + if not t.isSameTypeWith(value,matchConstraints=self.strictConstraints): + raise error.PyAsn1Error('Component value is tag-incompatible: %r vs %r' % (value, t)) + if self.strictConstraints and \ + not t.isSuperTypeOf(value, matchTags=False): + raise error.PyAsn1Error('Component value is constraints-incompatible: %r vs %r' % (value, t)) + + def getComponentByName(self, name): + return self.getComponentByPosition( + self._componentType.getPositionByName(name) + ) + def setComponentByName(self, name, value=None, verifyConstraints=True): + return self.setComponentByPosition( + self._componentType.getPositionByName(name),value,verifyConstraints + ) + + def getComponentByPosition(self, idx): + try: + return self._componentValues[idx] + except IndexError: + if idx < self._componentTypeLen: + return + raise + def setComponentByPosition(self, idx, value=None, + verifyConstraints=True, + exactTypes=False, + matchTags=True, + matchConstraints=True): + l = len(self._componentValues) + if idx >= l: + self._componentValues = self._componentValues + (idx-l+1)*[None] + if value is None: + if self._componentValues[idx] is None: + self._componentValues[idx] = self._componentType.getTypeByPosition(idx).clone() + self._componentValuesSet = self._componentValuesSet + 1 + return self + elif not isinstance(value, base.Asn1Item): + t = self._componentType.getTypeByPosition(idx) + if isinstance(t, base.AbstractSimpleAsn1Item): + value = t.clone(value=value) + else: + raise error.PyAsn1Error('Instance value required') + if verifyConstraints: + if self._componentTypeLen: + self._verifyComponent(idx, value) + self._verifySubtypeSpec(value, idx) + if self._componentValues[idx] is None: + self._componentValuesSet = self._componentValuesSet + 1 + self._componentValues[idx] = value + return self + + def getNameByPosition(self, idx): + if self._componentTypeLen: + return self._componentType.getNameByPosition(idx) + + def getDefaultComponentByPosition(self, idx): + if self._componentTypeLen and self._componentType[idx].isDefaulted: + return self._componentType[idx].getType() + + def getComponentType(self): + if self._componentTypeLen: + return self._componentType + + def setDefaultComponents(self): + if self._componentTypeLen == self._componentValuesSet: + return + idx = self._componentTypeLen + while idx: + idx = idx - 1 + if self._componentType[idx].isDefaulted: + if self.getComponentByPosition(idx) is None: + self.setComponentByPosition(idx) + elif not self._componentType[idx].isOptional: + if self.getComponentByPosition(idx) is None: + raise error.PyAsn1Error( + 'Uninitialized component #%s at %r' % (idx, self) + ) + + def prettyPrint(self, scope=0): + scope = scope + 1 + r = self.__class__.__name__ + ':\n' + for idx in range(len(self._componentValues)): + if self._componentValues[idx] is not None: + r = r + ' '*scope + componentType = self.getComponentType() + if componentType is None: + r = r + '' + else: + r = r + componentType.getNameByPosition(idx) + r = '%s=%s\n' % ( + r, self._componentValues[idx].prettyPrint(scope) + ) + return r + + def prettyPrintType(self, scope=0): + scope = scope + 1 + r = '%s -> %s {\n' % (self.getTagSet(), self.__class__.__name__) + for idx in range(len(self.componentType)): + r = r + ' '*scope + r = r + '"%s"' % self.componentType.getNameByPosition(idx) + r = '%s = %s\n' % ( + r, self._componentType.getTypeByPosition(idx).prettyPrintType(scope) + ) + return r + '\n' + ' '*(scope-1) + '}' + +class Sequence(SequenceAndSetBase): + tagSet = baseTagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x10) + ) + typeId = 3 + + def getComponentTagMapNearPosition(self, idx): + if self._componentType: + return self._componentType.getTagMapNearPosition(idx) + + def getComponentPositionNearType(self, tagSet, idx): + if self._componentType: + return self._componentType.getPositionNearType(tagSet, idx) + else: + return idx + +class Set(SequenceAndSetBase): + tagSet = baseTagSet = tag.initTagSet( + tag.Tag(tag.tagClassUniversal, tag.tagFormatConstructed, 0x11) + ) + typeId = 4 + + def getComponent(self, innerFlag=0): return self + + def getComponentByType(self, tagSet, innerFlag=0): + c = self.getComponentByPosition( + self._componentType.getPositionByType(tagSet) + ) + if innerFlag and isinstance(c, Set): + # get inner component by inner tagSet + return c.getComponent(1) + else: + # get outer component by inner tagSet + return c + + def setComponentByType(self, tagSet, value=None, innerFlag=0, + verifyConstraints=True): + idx = self._componentType.getPositionByType(tagSet) + t = self._componentType.getTypeByPosition(idx) + if innerFlag: # set inner component by inner tagSet + if t.getTagSet(): + return self.setComponentByPosition( + idx, value, verifyConstraints + ) + else: + t = self.setComponentByPosition(idx).getComponentByPosition(idx) + return t.setComponentByType( + tagSet, value, innerFlag, verifyConstraints + ) + else: # set outer component by inner tagSet + return self.setComponentByPosition( + idx, value, verifyConstraints + ) + + def getComponentTagMap(self): + if self._componentType: + return self._componentType.getTagMap(True) + + def getComponentPositionByType(self, tagSet): + if self._componentType: + return self._componentType.getPositionByType(tagSet) + +class Choice(Set): + tagSet = baseTagSet = tag.TagSet() # untagged + sizeSpec = constraint.ConstraintsIntersection( + constraint.ValueSizeConstraint(1, 1) + ) + typeId = 5 + _currentIdx = None + + def __eq__(self, other): + if self._componentValues: + return self._componentValues[self._currentIdx] == other + return NotImplemented + def __ne__(self, other): + if self._componentValues: + return self._componentValues[self._currentIdx] != other + return NotImplemented + def __lt__(self, other): + if self._componentValues: + return self._componentValues[self._currentIdx] < other + return NotImplemented + def __le__(self, other): + if self._componentValues: + return self._componentValues[self._currentIdx] <= other + return NotImplemented + def __gt__(self, other): + if self._componentValues: + return self._componentValues[self._currentIdx] > other + return NotImplemented + def __ge__(self, other): + if self._componentValues: + return self._componentValues[self._currentIdx] >= other + return NotImplemented + if sys.version_info[0] <= 2: + def __nonzero__(self): return bool(self._componentValues) + else: + def __bool__(self): return bool(self._componentValues) + + def __len__(self): return self._currentIdx is not None and 1 or 0 + + def verifySizeSpec(self): + if self._currentIdx is None: + raise error.PyAsn1Error('Component not chosen') + else: + self._sizeSpec(' ') + + def _cloneComponentValues(self, myClone, cloneValueFlag): + try: + c = self.getComponent() + except error.PyAsn1Error: + pass + else: + if isinstance(c, Choice): + tagSet = c.getEffectiveTagSet() + else: + tagSet = c.getTagSet() + if isinstance(c, base.AbstractConstructedAsn1Item): + myClone.setComponentByType( + tagSet, c.clone(cloneValueFlag=cloneValueFlag) + ) + else: + myClone.setComponentByType(tagSet, c.clone()) + + def setComponentByPosition(self, idx, value=None, verifyConstraints=True): + l = len(self._componentValues) + if idx >= l: + self._componentValues = self._componentValues + (idx-l+1)*[None] + if self._currentIdx is not None: + self._componentValues[self._currentIdx] = None + if value is None: + if self._componentValues[idx] is None: + self._componentValues[idx] = self._componentType.getTypeByPosition(idx).clone() + self._componentValuesSet = 1 + self._currentIdx = idx + return self + elif not isinstance(value, base.Asn1Item): + value = self._componentType.getTypeByPosition(idx).clone( + value=value + ) + if verifyConstraints: + if self._componentTypeLen: + self._verifyComponent(idx, value) + self._verifySubtypeSpec(value, idx) + self._componentValues[idx] = value + self._currentIdx = idx + self._componentValuesSet = 1 + return self + + def getMinTagSet(self): + if self._tagSet: + return self._tagSet + else: + return self._componentType.genMinTagSet() + + def getEffectiveTagSet(self): + if self._tagSet: + return self._tagSet + else: + c = self.getComponent() + if isinstance(c, Choice): + return c.getEffectiveTagSet() + else: + return c.getTagSet() + + def getTagMap(self): + if self._tagSet: + return Set.getTagMap(self) + else: + return Set.getComponentTagMap(self) + + def getComponent(self, innerFlag=0): + if self._currentIdx is None: + raise error.PyAsn1Error('Component not chosen') + else: + c = self._componentValues[self._currentIdx] + if innerFlag and isinstance(c, Choice): + return c.getComponent(innerFlag) + else: + return c + + def getName(self, innerFlag=0): + if self._currentIdx is None: + raise error.PyAsn1Error('Component not chosen') + else: + if innerFlag: + c = self._componentValues[self._currentIdx] + if isinstance(c, Choice): + return c.getName(innerFlag) + return self._componentType.getNameByPosition(self._currentIdx) + + def setDefaultComponents(self): pass + +class Any(OctetString): + tagSet = baseTagSet = tag.TagSet() # untagged + typeId = 6 + + def getTagMap(self): + return tagmap.TagMap( + { self.getTagSet(): self }, + { eoo.endOfOctets.getTagSet(): eoo.endOfOctets }, + self + ) + +# XXX +# coercion rules? diff --git a/pyasn1/type/useful.py b/pyasn1/type/useful.py new file mode 100644 index 0000000..1766534 --- /dev/null +++ b/pyasn1/type/useful.py @@ -0,0 +1,17 @@ +# ASN.1 "useful" types +from pyasn1.type import char, tag + +class ObjectDescriptor(char.GraphicString): + tagSet = char.GraphicString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 7) + ) + +class GeneralizedTime(char.VisibleString): + tagSet = char.VisibleString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 24) + ) + +class UTCTime(char.VisibleString): + tagSet = char.VisibleString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassUniversal, tag.tagFormatSimple, 23) + ) diff --git a/pyasn1_modules/__init__.py b/pyasn1_modules/__init__.py new file mode 100644 index 0000000..6598192 --- /dev/null +++ b/pyasn1_modules/__init__.py @@ -0,0 +1,2 @@ +# http://www.python.org/dev/peps/pep-0396/ +__version__ = '0.0.8' diff --git a/pyasn1_modules/pem.py b/pyasn1_modules/pem.py new file mode 100644 index 0000000..d8d8158 --- /dev/null +++ b/pyasn1_modules/pem.py @@ -0,0 +1,51 @@ +import base64, sys + +stSpam, stHam, stDump = 0, 1, 2 + +# The markers parameters is in form ('start1', 'stop1'), ('start2', 'stop2')... +# Return is (marker-index, substrate) +def readPemBlocksFromFile(fileObj, *markers): + startMarkers = dict(map(lambda x: (x[1],x[0]), + enumerate(map(lambda x: x[0], markers)))) + stopMarkers = dict(map(lambda x: (x[1],x[0]), + enumerate(map(lambda x: x[1], markers)))) + idx = -1; substrate = '' + state = stSpam + while 1: + certLine = fileObj.readline() + if not certLine: + break + certLine = certLine.strip() + if state == stSpam: + if certLine in startMarkers: + certLines = [] + idx = startMarkers[certLine] + state = stHam + continue + if state == stHam: + if certLine in stopMarkers and stopMarkers[certLine] == idx: + state = stDump + else: + certLines.append(certLine) + if state == stDump: + if sys.version_info[0] <= 2: + substrate = ''.join([ base64.b64decode(x) for x in certLines ]) + else: + substrate = ''.encode().join([ base64.b64decode(x.encode()) for x in certLines ]) + break + return idx, substrate + +# Backward compatibility routine +def readPemFromFile(fileObj, + startMarker='-----BEGIN CERTIFICATE-----', + endMarker='-----END CERTIFICATE-----'): + idx, substrate = readPemBlocksFromFile(fileObj, (startMarker, endMarker)) + return substrate + +def readBase64FromFile(fileObj): + if sys.version_info[0] <= 2: + return ''.join([ base64.b64decode(x) for x in fileObj.readlines() ]) + else: + return ''.encode().join( + [ base64.b64decode(x.encode()) for x in fileObj.readlines() ] + ) diff --git a/pyasn1_modules/rfc1155.py b/pyasn1_modules/rfc1155.py new file mode 100644 index 0000000..9e3c5cd --- /dev/null +++ b/pyasn1_modules/rfc1155.py @@ -0,0 +1,73 @@ +# +# SNMPv1 message syntax +# +# ASN.1 source from: +# http://www.ietf.org/rfc/rfc1155.txt +# +# Sample captures from: +# http://wiki.wireshark.org/SampleCaptures/ +# +from pyasn1.type import univ, namedtype, namedval, tag, constraint + +class ObjectName(univ.ObjectIdentifier): pass + +class SimpleSyntax(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('number', univ.Integer()), + namedtype.NamedType('string', univ.OctetString()), + namedtype.NamedType('object', univ.ObjectIdentifier()), + namedtype.NamedType('empty', univ.Null()) + ) + +class IpAddress(univ.OctetString): + tagSet = univ.OctetString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 0) + ) + subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueSizeConstraint( + 4, 4 + ) +class NetworkAddress(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('internet', IpAddress()) + ) + +class Counter(univ.Integer): + tagSet = univ.Integer.tagSet.tagImplicitly( + tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 1) + ) + subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueRangeConstraint( + 0, 4294967295 + ) +class Gauge(univ.Integer): + tagSet = univ.Integer.tagSet.tagImplicitly( + tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 2) + ) + subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueRangeConstraint( + 0, 4294967295 + ) +class TimeTicks(univ.Integer): + tagSet = univ.Integer.tagSet.tagImplicitly( + tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 3) + ) + subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueRangeConstraint( + 0, 4294967295 + ) +class Opaque(univ.OctetString): + tagSet = univ.OctetString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 4) + ) + +class ApplicationSyntax(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('address', NetworkAddress()), + namedtype.NamedType('counter', Counter()), + namedtype.NamedType('gauge', Gauge()), + namedtype.NamedType('ticks', TimeTicks()), + namedtype.NamedType('arbitrary', Opaque()) + ) + +class ObjectSyntax(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('simple', SimpleSyntax()), + namedtype.NamedType('application-wide', ApplicationSyntax()) + ) diff --git a/pyasn1_modules/rfc1157.py b/pyasn1_modules/rfc1157.py new file mode 100644 index 0000000..6a36b06 --- /dev/null +++ b/pyasn1_modules/rfc1157.py @@ -0,0 +1,90 @@ +# +# SNMPv1 message syntax +# +# ASN.1 source from: +# http://www.ietf.org/rfc/rfc1157.txt +# +# Sample captures from: +# http://wiki.wireshark.org/SampleCaptures/ +# +from pyasn1.type import univ, namedtype, namedval, tag, constraint +from pyasn1_modules import rfc1155 + +class Version(univ.Integer): + namedValues = namedval.NamedValues( + ('version-1', 0) + ) + defaultValue = 0 + +class Community(univ.OctetString): pass + +class RequestID(univ.Integer): pass +class ErrorStatus(univ.Integer): + namedValues = namedval.NamedValues( + ('noError', 0), + ('tooBig', 1), + ('noSuchName', 2), + ('badValue', 3), + ('readOnly', 4), + ('genErr', 5) + ) +class ErrorIndex(univ.Integer): pass + +class VarBind(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('name', rfc1155.ObjectName()), + namedtype.NamedType('value', rfc1155.ObjectSyntax()) + ) +class VarBindList(univ.SequenceOf): + componentType = VarBind() + +class _RequestBase(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('request-id', RequestID()), + namedtype.NamedType('error-status', ErrorStatus()), + namedtype.NamedType('error-index', ErrorIndex()), + namedtype.NamedType('variable-bindings', VarBindList()) + ) + +class GetRequestPDU(_RequestBase): + tagSet = _RequestBase.tagSet.tagImplicitly( + tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0) + ) +class GetNextRequestPDU(_RequestBase): + tagSet = _RequestBase.tagSet.tagImplicitly( + tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1) + ) +class GetResponsePDU(_RequestBase): + tagSet = _RequestBase.tagSet.tagImplicitly( + tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2) + ) +class SetRequestPDU(_RequestBase): + tagSet = _RequestBase.tagSet.tagImplicitly( + tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3) + ) + +class TrapPDU(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('enterprise', univ.ObjectIdentifier()), + namedtype.NamedType('agent-addr', rfc1155.NetworkAddress()), + namedtype.NamedType('generic-trap', univ.Integer().clone(namedValues=namedval.NamedValues(('coldStart', 0), ('warmStart', 1), ('linkDown', 2), ('linkUp', 3), ('authenticationFailure', 4), ('egpNeighborLoss', 5), ('enterpriseSpecific', 6)))), + namedtype.NamedType('specific-trap', univ.Integer()), + namedtype.NamedType('time-stamp', rfc1155.TimeTicks()), + namedtype.NamedType('variable-bindings', VarBindList()) + ) + +class Pdus(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('get-request', GetRequestPDU()), + namedtype.NamedType('get-next-request', GetNextRequestPDU()), + namedtype.NamedType('get-response', GetResponsePDU()), + namedtype.NamedType('set-request', SetRequestPDU()), + namedtype.NamedType('trap', TrapPDU()) + ) + +class Message(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('version', Version()), + namedtype.NamedType('community', Community()), + namedtype.NamedType('data', Pdus()) + ) diff --git a/pyasn1_modules/rfc1901.py b/pyasn1_modules/rfc1901.py new file mode 100644 index 0000000..8cd7e7d --- /dev/null +++ b/pyasn1_modules/rfc1901.py @@ -0,0 +1,15 @@ +# +# SNMPv2c message syntax +# +# ASN.1 source from: +# http://www.ietf.org/rfc/rfc1901.txt +# +from pyasn1.type import univ, namedtype, namedval + +class Message(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('version', univ.Integer(namedValues = namedval.NamedValues(('version-2c', 1)))), + namedtype.NamedType('community', univ.OctetString()), + namedtype.NamedType('data', univ.Any()) + ) + diff --git a/pyasn1_modules/rfc1902.py b/pyasn1_modules/rfc1902.py new file mode 100644 index 0000000..df0b0c3 --- /dev/null +++ b/pyasn1_modules/rfc1902.py @@ -0,0 +1,105 @@ +# +# SNMPv2c message syntax +# +# ASN.1 source from: +# http://www.ietf.org/rfc/rfc1902.txt +# +from pyasn1.type import univ, namedtype, namedval, tag, constraint + +class Integer(univ.Integer): + subtypeSpec = univ.Integer.subtypeSpec+constraint.ValueRangeConstraint( + -2147483648, 2147483647 + ) + +class Integer32(univ.Integer): + subtypeSpec = univ.Integer.subtypeSpec+constraint.ValueRangeConstraint( + -2147483648, 2147483647 + ) + +class OctetString(univ.OctetString): + subtypeSpec = univ.Integer.subtypeSpec+constraint.ValueSizeConstraint( + 0, 65535 + ) + +class IpAddress(univ.OctetString): + tagSet = univ.OctetString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 0x00) + ) + subtypeSpec = univ.OctetString.subtypeSpec+constraint.ValueSizeConstraint( + 4, 4 + ) + +class Counter32(univ.Integer): + tagSet = univ.Integer.tagSet.tagImplicitly( + tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 0x01) + ) + subtypeSpec = univ.Integer.subtypeSpec+constraint.ValueRangeConstraint( + 0, 4294967295 + ) + +class Gauge32(univ.Integer): + tagSet = univ.Integer.tagSet.tagImplicitly( + tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 0x02) + ) + subtypeSpec = univ.Integer.subtypeSpec+constraint.ValueRangeConstraint( + 0, 4294967295 + ) + +class Unsigned32(univ.Integer): + tagSet = univ.Integer.tagSet.tagImplicitly( + tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 0x02) + ) + subtypeSpec = univ.Integer.subtypeSpec+constraint.ValueRangeConstraint( + 0, 4294967295 + ) + +class TimeTicks(univ.Integer): + tagSet = univ.Integer.tagSet.tagImplicitly( + tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 0x03) + ) + subtypeSpec = univ.Integer.subtypeSpec+constraint.ValueRangeConstraint( + 0, 4294967295 + ) + +class Opaque(univ.OctetString): + tagSet = univ.OctetString.tagSet.tagImplicitly( + tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 0x04) + ) + +class Counter64(univ.Integer): + tagSet = univ.Integer.tagSet.tagImplicitly( + tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 0x06) + ) + subtypeSpec = univ.Integer.subtypeSpec+constraint.ValueRangeConstraint( + 0, 18446744073709551615 + ) + +class Bits(univ.OctetString): pass + +class ObjectName(univ.ObjectIdentifier): pass + +class SimpleSyntax(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('integer-value', Integer()), + namedtype.NamedType('string-value', OctetString()), + namedtype.NamedType('objectID-value', univ.ObjectIdentifier()) + ) + +class ApplicationSyntax(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('ipAddress-value', IpAddress()), + namedtype.NamedType('counter-value', Counter32()), + namedtype.NamedType('timeticks-value', TimeTicks()), + namedtype.NamedType('arbitrary-value', Opaque()), + namedtype.NamedType('big-counter-value', Counter64()), +# This conflicts with Counter32 +# namedtype.NamedType('unsigned-integer-value', Unsigned32()), + namedtype.NamedType('gauge32-value', Gauge32()) + ) # BITS misplaced? + +class ObjectSyntax(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('simple', SimpleSyntax()), + namedtype.NamedType('application-wide', ApplicationSyntax()) + ) + diff --git a/pyasn1_modules/rfc1905.py b/pyasn1_modules/rfc1905.py new file mode 100644 index 0000000..bec60f8 --- /dev/null +++ b/pyasn1_modules/rfc1905.py @@ -0,0 +1,100 @@ +# +# SNMPv2c PDU syntax +# +# ASN.1 source from: +# http://www.ietf.org/rfc/rfc1905.txt +# +from pyasn1.type import univ, namedtype, namedval, tag, constraint +from pyasn1_modules import rfc1902 + +max_bindings = rfc1902.Integer(2147483647) + +class _BindValue(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('value', rfc1902.ObjectSyntax()), + namedtype.NamedType('unSpecified', univ.Null()), + namedtype.NamedType('noSuchObject', univ.Null().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), + namedtype.NamedType('noSuchInstance', univ.Null().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), + namedtype.NamedType('endOfMibView', univ.Null().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))) + ) + +class VarBind(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('name', rfc1902.ObjectName()), + namedtype.NamedType('', _BindValue()) + ) + +class VarBindList(univ.SequenceOf): + componentType = VarBind() + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint( + 0, max_bindings + ) + +class PDU(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('request-id', rfc1902.Integer32()), + namedtype.NamedType('error-status', univ.Integer(namedValues=namedval.NamedValues(('noError', 0), ('tooBig', 1), ('noSuchName', 2), ('badValue', 3), ('readOnly', 4), ('genErr', 5), ('noAccess', 6), ('wrongType', 7), ('wrongLength', 8), ('wrongEncoding', 9), ('wrongValue', 10), ('noCreation', 11), ('inconsistentValue', 12), ('resourceUnavailable', 13), ('commitFailed', 14), ('undoFailed', 15), ('authorizationError', 16), ('notWritable', 17), ('inconsistentName', 18)))), + namedtype.NamedType('error-index', univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, max_bindings))), + namedtype.NamedType('variable-bindings', VarBindList()) + ) + +class BulkPDU(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('request-id', rfc1902.Integer32()), + namedtype.NamedType('non-repeaters', univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, max_bindings))), + namedtype.NamedType('max-repetitions', univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, max_bindings))), + namedtype.NamedType('variable-bindings', VarBindList()) + ) + +class GetRequestPDU(PDU): + tagSet = PDU.tagSet.tagImplicitly( + tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0) + ) + +class GetNextRequestPDU(PDU): + tagSet = PDU.tagSet.tagImplicitly( + tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1) + ) + +class ResponsePDU(PDU): + tagSet = PDU.tagSet.tagImplicitly( + tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2) + ) + +class SetRequestPDU(PDU): + tagSet = PDU.tagSet.tagImplicitly( + tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3) + ) + +class GetBulkRequestPDU(BulkPDU): + tagSet = PDU.tagSet.tagImplicitly( + tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 5) + ) + +class InformRequestPDU(PDU): + tagSet = PDU.tagSet.tagImplicitly( + tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 6) + ) + +class SNMPv2TrapPDU(PDU): + tagSet = PDU.tagSet.tagImplicitly( + tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 7) + ) + +class ReportPDU(PDU): + tagSet = PDU.tagSet.tagImplicitly( + tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 8) + ) + +class PDUs(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('get-request', GetRequestPDU()), + namedtype.NamedType('get-next-request', GetNextRequestPDU()), + namedtype.NamedType('get-bulk-request', GetBulkRequestPDU()), + namedtype.NamedType('response', ResponsePDU()), + namedtype.NamedType('set-request', SetRequestPDU()), + namedtype.NamedType('inform-request', InformRequestPDU()), + namedtype.NamedType('snmpV2-trap', SNMPv2TrapPDU()), + namedtype.NamedType('report', ReportPDU()) + ) + diff --git a/pyasn1_modules/rfc2251.py b/pyasn1_modules/rfc2251.py new file mode 100644 index 0000000..3074c67 --- /dev/null +++ b/pyasn1_modules/rfc2251.py @@ -0,0 +1,319 @@ +# +# LDAP message syntax +# +# ASN.1 source from: +# http://www.trl.ibm.com/projects/xml/xss4j/data/asn1/grammars/ldap.asn +# +# Sample captures from: +# http://wiki.wireshark.org/SampleCaptures/ +# +from pyasn1.type import tag, namedtype, namedval, univ, constraint,char,useful +from pyasn1.codec.der import decoder, encoder + +maxInt = univ.Integer(2147483647) + +class LDAPString(univ.OctetString): pass +class LDAPOID(univ.OctetString): pass + +class LDAPDN(LDAPString): pass +class RelativeLDAPDN(LDAPString): pass +class AttributeType(LDAPString): pass +class AttributeDescription(LDAPString): pass + +class AttributeDescriptionList(univ.SequenceOf): + componentType = AttributeDescription() + +class AttributeValue(univ.OctetString): pass + +class AssertionValue(univ.OctetString): pass + +class AttributeValueAssertion(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('attributeDesc', AttributeDescription()), + namedtype.NamedType('assertionValue', AssertionValue()) + ) + +class Attribute(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('type', AttributeDescription()), + namedtype.NamedType('vals', univ.SetOf(componentType=AttributeValue())) + ) + +class MatchingRuleId(LDAPString): pass + +class Control(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('controlType', LDAPOID()), + namedtype.DefaultedNamedType('criticality', univ.Boolean('False')), + namedtype.OptionalNamedType('controlValue', univ.OctetString()) + ) + +class Controls(univ.SequenceOf): + componentType = Control() + +class LDAPURL(LDAPString): pass + +class Referral(univ.SequenceOf): + componentType = LDAPURL() + +class SaslCredentials(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('mechanism', LDAPString()), + namedtype.OptionalNamedType('credentials', univ.OctetString()) + ) + +class AuthenticationChoice(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('simple', univ.OctetString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), + namedtype.NamedType('reserved-1', univ.OctetString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), + namedtype.NamedType('reserved-2', univ.OctetString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))), + namedtype.NamedType('sasl', SaslCredentials().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))) + ) + +class BindRequest(univ.Sequence): + tagSet = univ.Sequence.tagSet.tagImplicitly( + tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 0) + ) + componentType = namedtype.NamedTypes( + namedtype.NamedType('version', univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(1, 127))), + namedtype.NamedType('name', LDAPDN()), + namedtype.NamedType('authentication', AuthenticationChoice()) + ) + +class PartialAttributeList(univ.SequenceOf): + componentType = univ.Sequence(componentType=namedtype.NamedTypes(namedtype.NamedType('type', AttributeDescription()), namedtype.NamedType('vals', univ.SetOf(componentType=AttributeValue())))) + +class SearchResultEntry(univ.Sequence): + tagSet = univ.Sequence.tagSet.tagImplicitly( + tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 4) + ) + componentType = namedtype.NamedTypes( + namedtype.NamedType('objectName', LDAPDN()), + namedtype.NamedType('attributes', PartialAttributeList()) + ) + +class MatchingRuleAssertion(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.OptionalNamedType('matchingRule', MatchingRuleId().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), + namedtype.OptionalNamedType('type', AttributeDescription().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))), + namedtype.NamedType('matchValue', AssertionValue().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))), + namedtype.DefaultedNamedType('dnAttributes', univ.Boolean('False').subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4))) + ) + +class SubstringFilter(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('type', AttributeDescription()), + namedtype.NamedType('substrings', univ.SequenceOf(componentType=univ.Choice(componentType=namedtype.NamedTypes(namedtype.NamedType('initial', LDAPString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), namedtype.NamedType('any', LDAPString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), namedtype.NamedType('final', LDAPString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))))))) + ) + +# Ugly hack to handle recursive Filter reference (up to 3-levels deep). + +class Filter3(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('equalityMatch', AttributeValueAssertion().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3))), + namedtype.NamedType('substrings', SubstringFilter().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 4))), + namedtype.NamedType('greaterOrEqual', AttributeValueAssertion().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 5))), + namedtype.NamedType('lessOrEqual', AttributeValueAssertion().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 6))), + namedtype.NamedType('present', AttributeDescription().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 7))), + namedtype.NamedType('approxMatch', AttributeValueAssertion().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 8))), + namedtype.NamedType('extensibleMatch', MatchingRuleAssertion().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 9))) + ) + +class Filter2(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('and', univ.SetOf(componentType=Filter3()).subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), + namedtype.NamedType('or', univ.SetOf(componentType=Filter3()).subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))), + namedtype.NamedType('not', Filter3().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2))), + namedtype.NamedType('equalityMatch', AttributeValueAssertion().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3))), + namedtype.NamedType('substrings', SubstringFilter().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 4))), + namedtype.NamedType('greaterOrEqual', AttributeValueAssertion().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 5))), + namedtype.NamedType('lessOrEqual', AttributeValueAssertion().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 6))), + namedtype.NamedType('present', AttributeDescription().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 7))), + namedtype.NamedType('approxMatch', AttributeValueAssertion().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 8))), + namedtype.NamedType('extensibleMatch', MatchingRuleAssertion().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 9))) + ) + +class Filter(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('and', univ.SetOf(componentType=Filter2()).subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), + namedtype.NamedType('or', univ.SetOf(componentType=Filter2()).subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))), + namedtype.NamedType('not', Filter2().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2))), + namedtype.NamedType('equalityMatch', AttributeValueAssertion().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3))), + namedtype.NamedType('substrings', SubstringFilter().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 4))), + namedtype.NamedType('greaterOrEqual', AttributeValueAssertion().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 5))), + namedtype.NamedType('lessOrEqual', AttributeValueAssertion().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 6))), + namedtype.NamedType('present', AttributeDescription().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 7))), + namedtype.NamedType('approxMatch', AttributeValueAssertion().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 8))), + namedtype.NamedType('extensibleMatch', MatchingRuleAssertion().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 9))) + ) + +# End of Filter hack + +class SearchRequest(univ.Sequence): + tagSet = univ.Sequence.tagSet.tagImplicitly( + tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 3) + ) + componentType = namedtype.NamedTypes( + namedtype.NamedType('baseObject', LDAPDN()), + namedtype.NamedType('scope', univ.Enumerated(namedValues=namedval.NamedValues(('baseObject', 0), ('singleLevel', 1), ('wholeSubtree', 2)))), + namedtype.NamedType('derefAliases', univ.Enumerated(namedValues=namedval.NamedValues(('neverDerefAliases', 0), ('derefInSearching', 1), ('derefFindingBaseObj', 2), ('derefAlways', 3)))), + namedtype.NamedType('sizeLimit', univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, maxInt))), + namedtype.NamedType('timeLimit', univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, maxInt))), + namedtype.NamedType('typesOnly', univ.Boolean()), + namedtype.NamedType('filter', Filter()), + namedtype.NamedType('attributes', AttributeDescriptionList()) + ) + +class UnbindRequest(univ.Null): + tagSet = univ.Sequence.tagSet.tagImplicitly( + tag.Tag(tag.tagClassApplication, tag.tagFormatSimple, 2) + ) + +class BindResponse(univ.Sequence): + tagSet = univ.Sequence.tagSet.tagImplicitly( + tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 1) + ) + componentType = namedtype.NamedTypes( + namedtype.NamedType('resultCode', univ.Enumerated(namedValues=namedval.NamedValues(('success', 0), ('operationsError', 1), ('protocolError', 2), ('timeLimitExceeded', 3), ('sizeLimitExceeded', 4), ('compareFalse', 5), ('compareTrue', 6), ('authMethodNotSupported', 7), ('strongAuthRequired', 8), ('reserved-9', 9), ('referral', 10), ('adminLimitExceeded', 11), ('unavailableCriticalExtension', 12), ('confidentialityRequired', 13), ('saslBindInProgress', 14), ('noSuchAttribute', 16), ('undefinedAttributeType', 17), ('inappropriateMatching', 18), ('constraintViolation', 19), ('attributeOrValueExists', 20), ('invalidAttributeSyntax', 21), ('noSuchObject', 32), ('aliasProblem', 33), ('invalidDNSyntax', 34), ('reserved-35', 35), ('aliasDereferencingProblem', 36), ('inappropriateAuthentication', 48), ('invalidCredentials', 49), ('insufficientAccessRights', 50), ('busy', 51), ('unavailable', 52), ('unwillingToPerform', 53), ('loopDetect', 54), ('namingViolation', 64), ('objectClassViolation', 65), ('notAllowedOnNonLeaf', 66), ('notAllowedOnRDN', 67), ('entryAlreadyExists', 68), ('objectClassModsProhibited', 69), ('reserved-70', 70), ('affectsMultipleDSAs', 71), ('other', 80), ('reserved-81', 81), ('reserved-82', 82), ('reserved-83', 83), ('reserved-84', 84), ('reserved-85', 85), ('reserved-86', 86), ('reserved-87', 87), ('reserved-88', 88), ('reserved-89', 89), ('reserved-90', 90)))), + namedtype.NamedType('matchedDN', LDAPDN()), + namedtype.NamedType('errorMessage', LDAPString()), + namedtype.OptionalNamedType('referral', Referral().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3))), + namedtype.OptionalNamedType('serverSaslCreds', univ.OctetString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 7))) + ) + +class LDAPResult(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('resultCode', univ.Enumerated(namedValues=namedval.NamedValues(('success', 0), ('operationsError', 1), ('protocolError', 2), ('timeLimitExceeded', 3), ('sizeLimitExceeded', 4), ('compareFalse', 5), ('compareTrue', 6), ('authMethodNotSupported', 7), ('strongAuthRequired', 8), ('reserved-9', 9), ('referral', 10), ('adminLimitExceeded', 11), ('unavailableCriticalExtension', 12), ('confidentialityRequired', 13), ('saslBindInProgress', 14), ('noSuchAttribute', 16), ('undefinedAttributeType', 17), ('inappropriateMatching', 18), ('constraintViolation', 19), ('attributeOrValueExists', 20), ('invalidAttributeSyntax', 21), ('noSuchObject', 32), ('aliasProblem', 33), ('invalidDNSyntax', 34), ('reserved-35', 35), ('aliasDereferencingProblem', 36), ('inappropriateAuthentication', 48), ('invalidCredentials', 49), ('insufficientAccessRights', 50), ('busy', 51), ('unavailable', 52), ('unwillingToPerform', 53), ('loopDetect', 54), ('namingViolation', 64), ('objectClassViolation', 65), ('notAllowedOnNonLeaf', 66), ('notAllowedOnRDN', 67), ('entryAlreadyExists', 68), ('objectClassModsProhibited', 69), ('reserved-70', 70), ('affectsMultipleDSAs', 71), ('other', 80), ('reserved-81', 81), ('reserved-82', 82), ('reserved-83', 83), ('reserved-84', 84), ('reserved-85', 85), ('reserved-86', 86), ('reserved-87', 87), ('reserved-88', 88), ('reserved-89', 89), ('reserved-90', 90)))), + namedtype.NamedType('matchedDN', LDAPDN()), + namedtype.NamedType('errorMessage', LDAPString()), + namedtype.OptionalNamedType('referral', Referral().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3))) + ) + +class SearchResultReference(univ.SequenceOf): + tagSet = univ.Sequence.tagSet.tagImplicitly( + tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 19) + ) + componentType = LDAPURL() + +class SearchResultDone(LDAPResult): + tagSet = univ.Sequence.tagSet.tagImplicitly( + tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 5) + ) + +class AttributeTypeAndValues(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('type', AttributeDescription()), + namedtype.NamedType('vals', univ.SetOf(componentType=AttributeValue())) + ) + +class ModifyRequest(univ.Sequence): + tagSet = univ.Sequence.tagSet.tagImplicitly( + tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 6) + ) + componentType = namedtype.NamedTypes( + namedtype.NamedType('object', LDAPDN()), + namedtype.NamedType('modification', univ.SequenceOf(componentType=univ.Sequence(componentType=namedtype.NamedTypes(namedtype.NamedType('operation', univ.Enumerated(namedValues=namedval.NamedValues(('add', 0), ('delete', 1), ('replace', 2)))), namedtype.NamedType('modification', AttributeTypeAndValues()))))) + ) + +class ModifyResponse(LDAPResult): + tagSet = univ.Sequence.tagSet.tagImplicitly( + tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 7) + ) + +class AttributeList(univ.SequenceOf): + componentType = univ.Sequence(componentType=namedtype.NamedTypes(namedtype.NamedType('type', AttributeDescription()), namedtype.NamedType('vals', univ.SetOf(componentType=AttributeValue())))) + +class AddRequest(univ.Sequence): + tagSet = univ.Sequence.tagSet.tagImplicitly( + tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 8) + ) + componentType = namedtype.NamedTypes( + namedtype.NamedType('entry', LDAPDN()), + namedtype.NamedType('attributes', AttributeList()) + ) + +class AddResponse(LDAPResult): + tagSet = univ.Sequence.tagSet.tagImplicitly( + tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 9) + ) + +class DelRequest(LDAPResult): + tagSet = univ.Sequence.tagSet.tagImplicitly( + tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 10) + ) + +class DelResponse(LDAPResult): + tagSet = univ.Sequence.tagSet.tagImplicitly( + tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 11) + ) + +class ModifyDNRequest(univ.Sequence): + tagSet = univ.Sequence.tagSet.tagImplicitly( + tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 12) + ) + componentType = namedtype.NamedTypes( + namedtype.NamedType('entry', LDAPDN()), + namedtype.NamedType('newrdn', RelativeLDAPDN()), + namedtype.NamedType('deleteoldrdn', univ.Boolean()), + namedtype.OptionalNamedType('newSuperior', LDAPDN().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))) + + ) + +class ModifyDNResponse(LDAPResult): + tagSet = univ.Sequence.tagSet.tagImplicitly( + tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 13) + ) + +class CompareRequest(univ.Sequence): + tagSet = univ.Sequence.tagSet.tagImplicitly( + tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 14) + ) + componentType = namedtype.NamedTypes( + namedtype.NamedType('entry', LDAPDN()), + namedtype.NamedType('ava', AttributeValueAssertion()) + ) + +class CompareResponse(LDAPResult): + tagSet = univ.Sequence.tagSet.tagImplicitly( + tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 15) + ) + +class AbandonRequest(LDAPResult): + tagSet = univ.Sequence.tagSet.tagImplicitly( + tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 16) + ) + +class ExtendedRequest(univ.Sequence): + tagSet = univ.Sequence.tagSet.tagImplicitly( + tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 23) + ) + componentType = namedtype.NamedTypes( + namedtype.NamedType('requestName', LDAPOID().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), + namedtype.OptionalNamedType('requestValue', univ.OctetString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))) + ) + +class ExtendedResponse(univ.Sequence): + tagSet = univ.Sequence.tagSet.tagImplicitly( + tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 24) + ) + componentType = namedtype.NamedTypes( + namedtype.NamedType('resultCode', univ.Enumerated(namedValues=namedval.NamedValues(('success', 0), ('operationsError', 1), ('protocolError', 2), ('timeLimitExceeded', 3), ('sizeLimitExceeded', 4), ('compareFalse', 5), ('compareTrue', 6), ('authMethodNotSupported', 7), ('strongAuthRequired', 8), ('reserved-9', 9), ('referral', 10), ('adminLimitExceeded', 11), ('unavailableCriticalExtension', 12), ('confidentialityRequired', 13), ('saslBindInProgress', 14), ('noSuchAttribute', 16), ('undefinedAttributeType', 17), ('inappropriateMatching', 18), ('constraintViolation', 19), ('attributeOrValueExists', 20), ('invalidAttributeSyntax', 21), ('noSuchObject', 32), ('aliasProblem', 33), ('invalidDNSyntax', 34), ('reserved-35', 35), ('aliasDereferencingProblem', 36), ('inappropriateAuthentication', 48), ('invalidCredentials', 49), ('insufficientAccessRights', 50), ('busy', 51), ('unavailable', 52), ('unwillingToPerform', 53), ('loopDetect', 54), ('namingViolation', 64), ('objectClassViolation', 65), ('notAllowedOnNonLeaf', 66), ('notAllowedOnRDN', 67), ('entryAlreadyExists', 68), ('objectClassModsProhibited', 69), ('reserved-70', 70), ('affectsMultipleDSAs', 71), ('other', 80), ('reserved-81', 81), ('reserved-82', 82), ('reserved-83', 83), ('reserved-84', 84), ('reserved-85', 85), ('reserved-86', 86), ('reserved-87', 87), ('reserved-88', 88), ('reserved-89', 89), ('reserved-90', 90)))), + namedtype.NamedType('matchedDN', LDAPDN()), + namedtype.NamedType('errorMessage', LDAPString()), + namedtype.OptionalNamedType('referral', Referral().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3))), + + namedtype.OptionalNamedType('responseName', LDAPOID().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 10))), + namedtype.OptionalNamedType('response', univ.OctetString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 11))) + ) + +class MessageID(univ.Integer): + subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueRangeConstraint( + 0, maxInt + ) + +class LDAPMessage(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('messageID', MessageID()), + namedtype.NamedType('protocolOp', univ.Choice(componentType=namedtype.NamedTypes(namedtype.NamedType('bindRequest', BindRequest()), namedtype.NamedType('bindResponse', BindResponse()), namedtype.NamedType('unbindRequest', UnbindRequest()), namedtype.NamedType('searchRequest', SearchRequest()), namedtype.NamedType('searchResEntry', SearchResultEntry()), namedtype.NamedType('searchResDone', SearchResultDone()), namedtype.NamedType('searchResRef', SearchResultReference()), namedtype.NamedType('modifyRequest', ModifyRequest()), namedtype.NamedType('modifyResponse', ModifyResponse()), namedtype.NamedType('addRequest', AddRequest()), namedtype.NamedType('addResponse', AddResponse()), namedtype.NamedType('delRequest', DelRequest()), namedtype.NamedType('delResponse', DelResponse()), namedtype.NamedType('modDNRequest', ModifyDNRequest()), namedtype.NamedType('modDNResponse', ModifyDNResponse()), namedtype.NamedType('compareRequest', CompareRequest()), namedtype.NamedType('compareResponse', CompareResponse()), namedtype.NamedType('abandonRequest', AbandonRequest()), namedtype.NamedType('extendedReq', ExtendedRequest()), namedtype.NamedType('extendedResp', ExtendedResponse())))), + namedtype.OptionalNamedType('controls', Controls().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))) + ) diff --git a/pyasn1_modules/rfc2314.py b/pyasn1_modules/rfc2314.py new file mode 100644 index 0000000..86b11fb --- /dev/null +++ b/pyasn1_modules/rfc2314.py @@ -0,0 +1,33 @@ +# +# PKCS#10 syntax +# +# ASN.1 source from: +# http://tools.ietf.org/html/rfc2314 +# +# Sample captures could be obtained with "openssl req" command +# +from pyasn1.type import tag, namedtype, namedval, univ, constraint +from pyasn1_modules.rfc2459 import * + +class Attributes(univ.SetOf): + componentType = Attribute() + +class Version(univ.Integer): pass + +class CertificationRequestInfo(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('version', Version()), + namedtype.NamedType('subject', Name()), + namedtype.NamedType('subjectPublicKeyInfo', SubjectPublicKeyInfo()), + namedtype.NamedType('attributes', Attributes().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))) + ) + +class Signature(univ.BitString): pass +class SignatureAlgorithmIdentifier(AlgorithmIdentifier): pass + +class CertificationRequest(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('certificationRequestInfo', CertificationRequestInfo()), + namedtype.NamedType('signatureAlgorithm', SignatureAlgorithmIdentifier()), + namedtype.NamedType('signature', Signature()) + ) diff --git a/pyasn1_modules/rfc2315.py b/pyasn1_modules/rfc2315.py new file mode 100644 index 0000000..76bb957 --- /dev/null +++ b/pyasn1_modules/rfc2315.py @@ -0,0 +1,205 @@ +# +# PKCS#7 message syntax +# +# ASN.1 source from: +# http://www.trl.ibm.com/projects/xml/xss4j/data/asn1/grammars/pkcs7.asn +# +# Sample captures from: +# openssl crl2pkcs7 -nocrl -certfile cert1.cer -out outfile.p7b +# +from pyasn1.type import tag,namedtype,namedval,univ,constraint,char,useful +from pyasn1_modules.rfc2459 import * + +class Attribute(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('type', AttributeType()), + namedtype.NamedType('values', univ.SetOf(componentType=AttributeValue())) + ) + +class AttributeValueAssertion(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('attributeType', AttributeType()), + namedtype.NamedType('attributeValue', AttributeValue()) + ) + +pkcs_7 = univ.ObjectIdentifier('1.2.840.113549.1.7') +data = univ.ObjectIdentifier('1.2.840.113549.1.7.1') +signedData = univ.ObjectIdentifier('1.2.840.113549.1.7.2') +envelopedData = univ.ObjectIdentifier('1.2.840.113549.1.7.3') +signedAndEnvelopedData = univ.ObjectIdentifier('1.2.840.113549.1.7.4') +digestedData = univ.ObjectIdentifier('1.2.840.113549.1.7.5') +encryptedData = univ.ObjectIdentifier('1.2.840.113549.1.7.6') + +class ContentType(univ.ObjectIdentifier): pass + +class ContentEncryptionAlgorithmIdentifier(AlgorithmIdentifier): pass + +class EncryptedContent(univ.OctetString): pass + +class EncryptedContentInfo(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('contentType', ContentType()), + namedtype.NamedType('contentEncryptionAlgorithm', ContentEncryptionAlgorithmIdentifier()), + namedtype.OptionalNamedType('encryptedContent', EncryptedContent().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))) + ) + +class Version(univ.Integer): pass # overrides x509.Version + +class EncryptedData(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('version', Version()), + namedtype.NamedType('encryptedContentInfo', EncryptedContentInfo()) + ) + +class DigestAlgorithmIdentifier(AlgorithmIdentifier): pass + +class DigestAlgorithmIdentifiers(univ.SetOf): + componentType = DigestAlgorithmIdentifier() + +class Digest(univ.OctetString): pass + +class ContentInfo(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('contentType', ContentType()), + namedtype.OptionalNamedType('content', univ.Any().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))) + ) + +class DigestedData(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('version', Version()), + namedtype.NamedType('digestAlgorithm', DigestAlgorithmIdentifier()), + namedtype.NamedType('contentInfo', ContentInfo()), + namedtype.NamedType('digest', Digest) + ) + +class IssuerAndSerialNumber(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('issuer', Name()), + namedtype.NamedType('serialNumber', CertificateSerialNumber()) + ) + +class KeyEncryptionAlgorithmIdentifier(AlgorithmIdentifier): pass + +class EncryptedKey(univ.OctetString): pass + +class RecipientInfo(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('version', Version()), + namedtype.NamedType('issuerAndSerialNumber', IssuerAndSerialNumber()), + namedtype.NamedType('keyEncryptionAlgorithm', KeyEncryptionAlgorithmIdentifier()), + namedtype.NamedType('encryptedKey', EncryptedKey()) + ) + +class RecipientInfos(univ.SetOf): + componentType = RecipientInfo() + +class Attributes(univ.SetOf): + componentType = Attribute() + +class ExtendedCertificateInfo(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('version', Version()), + namedtype.NamedType('certificate', Certificate()), + namedtype.NamedType('attributes', Attributes()) + ) + +class SignatureAlgorithmIdentifier(AlgorithmIdentifier): pass + +class Signature(univ.BitString): pass + +class ExtendedCertificate(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('extendedCertificateInfo', ExtendedCertificateInfo()), + namedtype.NamedType('signatureAlgorithm', SignatureAlgorithmIdentifier()), + namedtype.NamedType('signature', Signature()) + ) + +class ExtendedCertificateOrCertificate(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('certificate', Certificate()), + namedtype.NamedType('extendedCertificate', ExtendedCertificate().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))) + ) + +class ExtendedCertificatesAndCertificates(univ.SetOf): + componentType = ExtendedCertificateOrCertificate() + +class SerialNumber(univ.Integer): pass + +class CRLEntry(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('userCertificate', SerialNumber()), + namedtype.NamedType('revocationDate', useful.UTCTime()) + ) + +class TBSCertificateRevocationList(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('signature', AlgorithmIdentifier()), + namedtype.NamedType('issuer', Name()), + namedtype.NamedType('lastUpdate', useful.UTCTime()), + namedtype.NamedType('nextUpdate', useful.UTCTime()), + namedtype.OptionalNamedType('revokedCertificates', univ.SequenceOf(componentType=CRLEntry())) + ) + +class CertificateRevocationList(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('tbsCertificateRevocationList', TBSCertificateRevocationList()), + namedtype.NamedType('signatureAlgorithm', AlgorithmIdentifier()), + namedtype.NamedType('signature', univ.BitString()) + ) + +class CertificateRevocationLists(univ.SetOf): + componentType = CertificateRevocationList() + +class DigestEncryptionAlgorithmIdentifier(AlgorithmIdentifier): pass + +class EncryptedDigest(univ.OctetString): pass + +class SignerInfo(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('version', Version()), + namedtype.NamedType('issuerAndSerialNumber', IssuerAndSerialNumber()), + namedtype.NamedType('digestAlgorithm', DigestAlgorithmIdentifier()), + namedtype.OptionalNamedType('authenticatedAttributes', Attributes().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), + namedtype.NamedType('digestEncryptionAlgorithm', DigestEncryptionAlgorithmIdentifier()), + namedtype.NamedType('encryptedDigest', EncryptedDigest()), + namedtype.OptionalNamedType('unauthenticatedAttributes', Attributes().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))) + ) + +class SignerInfos(univ.SetOf): + componentType = SignerInfo() + +class SignedAndEnvelopedData(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('version', Version()), + namedtype.NamedType('recipientInfos', RecipientInfos()), + namedtype.NamedType('digestAlgorithms', DigestAlgorithmIdentifiers()), + namedtype.NamedType('encryptedContentInfo', EncryptedContentInfo()), + namedtype.OptionalNamedType('certificates', ExtendedCertificatesAndCertificates().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), + namedtype.OptionalNamedType('crls', CertificateRevocationLists().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))), + namedtype.NamedType('signerInfos', SignerInfos()) + ) + +class EnvelopedData(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('version', Version()), + namedtype.NamedType('recipientInfos', RecipientInfos()), + namedtype.NamedType('encryptedContentInfo', EncryptedContentInfo()) + ) + +class DigestInfo(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('digestAlgorithm', DigestAlgorithmIdentifier()), + namedtype.NamedType('digest', Digest()) + ) + +class SignedData(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('version', Version()), + namedtype.NamedType('digestAlgorithms', DigestAlgorithmIdentifiers()), + namedtype.NamedType('contentInfo', ContentInfo()), + namedtype.OptionalNamedType('certificates', ExtendedCertificatesAndCertificates().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), + namedtype.OptionalNamedType('crls', CertificateRevocationLists().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))), + namedtype.NamedType('signerInfos', SignerInfos()) + ) + +class Data(univ.OctetString): pass diff --git a/pyasn1_modules/rfc2437.py b/pyasn1_modules/rfc2437.py new file mode 100644 index 0000000..3abf6dc --- /dev/null +++ b/pyasn1_modules/rfc2437.py @@ -0,0 +1,53 @@ +# +# PKCS#1 syntax +# +# ASN.1 source from: +# ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2.asn +# +# Sample captures could be obtained with "openssl genrsa" command +# +from pyasn1.type import tag, namedtype, namedval, univ, constraint +from pyasn1_modules.rfc2459 import AlgorithmIdentifier + +pkcs_1 = univ.ObjectIdentifier('1.2.840.113549.1.1') +rsaEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.1') +md2WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.2') +md4WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.3') +md5WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.4') +sha1WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.5') +rsaOAEPEncryptionSET = univ.ObjectIdentifier('1.2.840.113549.1.1.6') +id_RSAES_OAEP = univ.ObjectIdentifier('1.2.840.113549.1.1.7') +id_mgf1 = univ.ObjectIdentifier('1.2.840.113549.1.1.8') +id_pSpecified = univ.ObjectIdentifier('1.2.840.113549.1.1.9') +id_sha1 = univ.ObjectIdentifier('1.3.14.3.2.26') + +MAX = 16 + +class Version(univ.Integer): pass + +class RSAPrivateKey(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('version', Version()), + namedtype.NamedType('modulus', univ.Integer()), + namedtype.NamedType('publicExponent', univ.Integer()), + namedtype.NamedType('privateExponent', univ.Integer()), + namedtype.NamedType('prime1', univ.Integer()), + namedtype.NamedType('prime2', univ.Integer()), + namedtype.NamedType('exponent1', univ.Integer()), + namedtype.NamedType('exponent2', univ.Integer()), + namedtype.NamedType('coefficient', univ.Integer()) + ) + +class RSAPublicKey(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('modulus', univ.Integer()), + namedtype.NamedType('publicExponent', univ.Integer()) + ) + +# XXX defaults not set +class RSAES_OAEP_params(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('hashFunc', AlgorithmIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), + namedtype.NamedType('maskGenFunc', AlgorithmIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))), + namedtype.NamedType('pSourceFunc', AlgorithmIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2))) + ) diff --git a/pyasn1_modules/rfc2459.py b/pyasn1_modules/rfc2459.py new file mode 100644 index 0000000..c52ab09 --- /dev/null +++ b/pyasn1_modules/rfc2459.py @@ -0,0 +1,904 @@ +# +# X.509 message syntax +# +# ASN.1 source from: +# http://www.trl.ibm.com/projects/xml/xss4j/data/asn1/grammars/x509.asn +# http://www.ietf.org/rfc/rfc2459.txt +# +# Sample captures from: +# http://wiki.wireshark.org/SampleCaptures/ +# +from pyasn1.type import tag,namedtype,namedval,univ,constraint,char,useful + +MAX = 64 # XXX ? + +# +# PKIX1Explicit88 +# + +# Upper Bounds +ub_name = univ.Integer(32768) +ub_common_name = univ.Integer(64) +ub_locality_name = univ.Integer(128) +ub_state_name = univ.Integer(128) +ub_organization_name = univ.Integer(64) +ub_organizational_unit_name = univ.Integer(64) +ub_title = univ.Integer(64) +ub_match = univ.Integer(128) +ub_emailaddress_length = univ.Integer(128) +ub_common_name_length = univ.Integer(64) +ub_country_name_alpha_length = univ.Integer(2) +ub_country_name_numeric_length = univ.Integer(3) +ub_domain_defined_attributes = univ.Integer(4) +ub_domain_defined_attribute_type_length = univ.Integer(8) +ub_domain_defined_attribute_value_length = univ.Integer(128) +ub_domain_name_length = univ.Integer(16) +ub_extension_attributes = univ.Integer(256) +ub_e163_4_number_length = univ.Integer(15) +ub_e163_4_sub_address_length = univ.Integer(40) +ub_generation_qualifier_length = univ.Integer(3) +ub_given_name_length = univ.Integer(16) +ub_initials_length = univ.Integer(5) +ub_integer_options = univ.Integer(256) +ub_numeric_user_id_length = univ.Integer(32) +ub_organization_name_length = univ.Integer(64) +ub_organizational_unit_name_length = univ.Integer(32) +ub_organizational_units = univ.Integer(4) +ub_pds_name_length = univ.Integer(16) +ub_pds_parameter_length = univ.Integer(30) +ub_pds_physical_address_lines = univ.Integer(6) +ub_postal_code_length = univ.Integer(16) +ub_surname_length = univ.Integer(40) +ub_terminal_id_length = univ.Integer(24) +ub_unformatted_address_length = univ.Integer(180) +ub_x121_address_length = univ.Integer(16) + +class UniversalString(char.UniversalString): pass +class BMPString(char.BMPString): pass +class UTF8String(char.UTF8String): pass + +id_pkix = univ.ObjectIdentifier('1.3.6.1.5.5.7') +id_pe = univ.ObjectIdentifier('1.3.6.1.5.5.7.1') +id_qt = univ.ObjectIdentifier('1.3.6.1.5.5.7.2') +id_kp = univ.ObjectIdentifier('1.3.6.1.5.5.7.3') +id_ad = univ.ObjectIdentifier('1.3.6.1.5.5.7.48') + +id_qt_cps = univ.ObjectIdentifier('1.3.6.1.5.5.7.2.1') +id_qt_unotice = univ.ObjectIdentifier('1.3.6.1.5.5.7.2.2') + +id_ad_ocsp = univ.ObjectIdentifier('1.3.6.1.5.5.7.48.1') +id_ad_caIssuers = univ.ObjectIdentifier('1.3.6.1.5.5.7.48.2') + +class AttributeValue(univ.Any): pass + +class AttributeType(univ.ObjectIdentifier): pass + +class AttributeTypeAndValue(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('type', AttributeType()), + namedtype.NamedType('value', AttributeValue()) + ) + +class Attribute(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('type', AttributeType()), + namedtype.NamedType('vals', univ.SetOf(componentType=AttributeValue())) + ) + +id_at = univ.ObjectIdentifier('2.5.4') +id_at_name = univ.ObjectIdentifier('2.5.4.41') +# preserve misspelled variable for compatibility +id_at_sutname = id_at_surname = univ.ObjectIdentifier('2.5.4.4') +id_at_givenName = univ.ObjectIdentifier('2.5.4.42') +id_at_initials = univ.ObjectIdentifier('2.5.4.43') +id_at_generationQualifier = univ.ObjectIdentifier('2.5.4.44') + +class X520name(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('teletexString', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name))), + namedtype.NamedType('printableString', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name))), + namedtype.NamedType('universalString', char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name))), + namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name))), + namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_name))) + ) + +id_at_commonName = univ.ObjectIdentifier('2.5.4.3') + +class X520CommonName(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('teletexString', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name))), + namedtype.NamedType('printableString', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name))), + namedtype.NamedType('universalString', char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name))), + namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name))), + namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_common_name))) + ) + +id_at_localityName = univ.ObjectIdentifier('2.5.4.7') + +class X520LocalityName(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('teletexString', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name))), + namedtype.NamedType('printableString', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name))), + namedtype.NamedType('universalString', char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name))), + namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name))), + namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_locality_name))) + ) + +id_at_stateOrProvinceName = univ.ObjectIdentifier('2.5.4.8') + +class X520StateOrProvinceName(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('teletexString', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name))), + namedtype.NamedType('printableString', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name))), + namedtype.NamedType('universalString', char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name))), + namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name))), + namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_state_name))) + ) + +id_at_organizationName = univ.ObjectIdentifier('2.5.4.10') + +class X520OrganizationName(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('teletexString', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name))), + namedtype.NamedType('printableString', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name))), + namedtype.NamedType('universalString', char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name))), + namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name))), + namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organization_name))) + ) + +id_at_organizationalUnitName = univ.ObjectIdentifier('2.5.4.11') + +class X520OrganizationalUnitName(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('teletexString', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name))), + namedtype.NamedType('printableString', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name))), + namedtype.NamedType('universalString', char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name))), + namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name))), + namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_organizational_unit_name))) + ) + +id_at_title = univ.ObjectIdentifier('2.5.4.12') + +class X520Title(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('teletexString', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title))), + namedtype.NamedType('printableString', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title))), + namedtype.NamedType('universalString', char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title))), + namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title))), + namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_title))) + ) + +id_at_dnQualifier = univ.ObjectIdentifier('2.5.4.46') + +class X520dnQualifier(char.PrintableString): pass + +id_at_countryName = univ.ObjectIdentifier('2.5.4.6') + +class X520countryName(char.PrintableString): + subtypeSpec = char.PrintableString.subtypeSpec + constraint.ValueSizeConstraint(2, 2) + +pkcs_9 = univ.ObjectIdentifier('1.2.840.113549.1.9') + +emailAddress = univ.ObjectIdentifier('1.2.840.113549.1.9.1') + +class Pkcs9email(char.IA5String): + subtypeSpec = char.IA5String.subtypeSpec + constraint.ValueSizeConstraint(1, ub_emailaddress_length) + +# ---- + +class DSAPrivateKey(univ.Sequence): + """PKIX compliant DSA private key structure""" + componentType = namedtype.NamedTypes( + namedtype.NamedType('version', univ.Integer(namedValues=namedval.NamedValues(('v1', 0)))), + namedtype.NamedType('p', univ.Integer()), + namedtype.NamedType('q', univ.Integer()), + namedtype.NamedType('g', univ.Integer()), + namedtype.NamedType('public', univ.Integer()), + namedtype.NamedType('private', univ.Integer()) + ) + +# ---- + +class RelativeDistinguishedName(univ.SetOf): + componentType = AttributeTypeAndValue() + +class RDNSequence(univ.SequenceOf): + componentType = RelativeDistinguishedName() + +class Name(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('', RDNSequence()) + ) + +class DirectoryString(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('teletexString', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))), + namedtype.NamedType('printableString', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))), + namedtype.NamedType('universalString', char.UniversalString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))), + namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))), + namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))), + namedtype.NamedType('ia5String', char.IA5String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))) # hm, this should not be here!? XXX + ) + +# certificate and CRL specific structures begin here + +class AlgorithmIdentifier(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('algorithm', univ.ObjectIdentifier()), + namedtype.OptionalNamedType('parameters', univ.Any()) + ) + +class Extension(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('extnID', univ.ObjectIdentifier()), + namedtype.DefaultedNamedType('critical', univ.Boolean('False')), + namedtype.NamedType('extnValue', univ.Any()) + ) + +class Extensions(univ.SequenceOf): + componentType = Extension() + sizeSpec = univ.SequenceOf.sizeSpec + constraint.ValueSizeConstraint(1, MAX) + +class SubjectPublicKeyInfo(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('algorithm', AlgorithmIdentifier()), + namedtype.NamedType('subjectPublicKey', univ.BitString()) + ) + +class UniqueIdentifier(univ.BitString): pass + +class Time(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('utcTime', useful.UTCTime()), + namedtype.NamedType('generalTime', useful.GeneralizedTime()) + ) + +class Validity(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('notBefore', Time()), + namedtype.NamedType('notAfter', Time()) + ) + +class CertificateSerialNumber(univ.Integer): pass + +class Version(univ.Integer): + namedValues = namedval.NamedValues( + ('v1', 0), ('v2', 1), ('v3', 2) + ) + +class TBSCertificate(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.DefaultedNamedType('version', Version('v1').subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), + namedtype.NamedType('serialNumber', CertificateSerialNumber()), + namedtype.NamedType('signature', AlgorithmIdentifier()), + namedtype.NamedType('issuer', Name()), + namedtype.NamedType('validity', Validity()), + namedtype.NamedType('subject', Name()), + namedtype.NamedType('subjectPublicKeyInfo', SubjectPublicKeyInfo()), + namedtype.OptionalNamedType('issuerUniqueID', UniqueIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), + namedtype.OptionalNamedType('subjectUniqueID', UniqueIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))), + namedtype.OptionalNamedType('extensions', Extensions().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))) + ) + +class Certificate(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('tbsCertificate', TBSCertificate()), + namedtype.NamedType('signatureAlgorithm', AlgorithmIdentifier()), + namedtype.NamedType('signatureValue', univ.BitString()) + ) + +# CRL structures + +class RevokedCertificate(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('userCertificate', CertificateSerialNumber()), + namedtype.NamedType('revocationDate', Time()), + namedtype.OptionalNamedType('crlEntryExtensions', Extensions()) + ) + +class TBSCertList(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.OptionalNamedType('version', Version()), + namedtype.NamedType('signature', AlgorithmIdentifier()), + namedtype.NamedType('issuer', Name()), + namedtype.NamedType('thisUpdate', Time()), + namedtype.OptionalNamedType('nextUpdate', Time()), + namedtype.OptionalNamedType('revokedCertificates', univ.SequenceOf(componentType=RevokedCertificate())), + namedtype.OptionalNamedType('crlExtensions', Extensions().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))) + ) + +class CertificateList(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('tbsCertList', TBSCertList()), + namedtype.NamedType('signatureAlgorithm', AlgorithmIdentifier()), + namedtype.NamedType('signature', univ.BitString()) + ) + +# Algorithm OIDs and parameter structures + +pkcs_1 = univ.ObjectIdentifier('1.2.840.113549.1.1') +rsaEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.1') +md2WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.2') +md5WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.4') +sha1WithRSAEncryption = univ.ObjectIdentifier('1.2.840.113549.1.1.5') +id_dsa_with_sha1 = univ.ObjectIdentifier('1.2.840.10040.4.3') + +class Dss_Sig_Value(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('r', univ.Integer()), + namedtype.NamedType('s', univ.Integer()) + ) + +dhpublicnumber = univ.ObjectIdentifier('1.2.840.10046.2.1') + +class ValidationParms(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('seed', univ.BitString()), + namedtype.NamedType('pgenCounter', univ.Integer()) + ) + +class DomainParameters(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('p', univ.Integer()), + namedtype.NamedType('g', univ.Integer()), + namedtype.NamedType('q', univ.Integer()), + namedtype.NamedType('j', univ.Integer()), + namedtype.OptionalNamedType('validationParms', ValidationParms()) + ) + +id_dsa = univ.ObjectIdentifier('1.2.840.10040.4.1') + +class Dss_Parms(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('p', univ.Integer()), + namedtype.NamedType('q', univ.Integer()), + namedtype.NamedType('g', univ.Integer()) + ) + +# x400 address syntax starts here + +teletex_domain_defined_attributes = univ.Integer(6) + +class TeletexDomainDefinedAttribute(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('type', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_defined_attribute_type_length))), + namedtype.NamedType('value', char.TeletexString()) + ) + +class TeletexDomainDefinedAttributes(univ.SequenceOf): + componentType = TeletexDomainDefinedAttribute() + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_domain_defined_attributes) + +terminal_type = univ.Integer(23) + +class TerminalType(univ.Integer): + subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueSizeConstraint(0, ub_integer_options) + namedValues = namedval.NamedValues( + ('telex', 3), + ('teletelex', 4), + ('g3-facsimile', 5), + ('g4-facsimile', 6), + ('ia5-terminal', 7), + ('videotex', 8) + ) + +class PresentationAddress(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.OptionalNamedType('pSelector', univ.OctetString().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), + namedtype.OptionalNamedType('sSelector', univ.OctetString().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), + namedtype.OptionalNamedType('tSelector', univ.OctetString().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))), + namedtype.OptionalNamedType('nAddresses', univ.SetOf(componentType=univ.OctetString()).subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3), subtypeSpec=constraint.ValueSizeConstraint(1, MAX))), + ) + +extended_network_address = univ.Integer(22) + +class E163_4_address(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('number', char.NumericString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_e163_4_number_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), + namedtype.OptionalNamedType('sub-address', char.NumericString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_e163_4_sub_address_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))) + ) + +class ExtendedNetworkAddress(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('e163-4-address', E163_4_address()), + namedtype.NamedType('psap-address', PresentationAddress().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))) + ) + +class PDSParameter(univ.Set): + componentType = namedtype.NamedTypes( + namedtype.OptionalNamedType('printable-string', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_pds_parameter_length))), + namedtype.OptionalNamedType('teletex-string', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_pds_parameter_length))) + ) + +local_postal_attributes = univ.Integer(21) + +class LocalPostalAttributes(PDSParameter): pass + +class UniquePostalName(PDSParameter): pass + +unique_postal_name = univ.Integer(20) + +poste_restante_address = univ.Integer(19) + +class PosteRestanteAddress(PDSParameter): pass + +post_office_box_address = univ.Integer(18) + +class PostOfficeBoxAddress(PDSParameter): pass + +street_address = univ.Integer(17) + +class StreetAddress(PDSParameter): pass + +class UnformattedPostalAddress(univ.Set): + componentType = namedtype.NamedTypes( + namedtype.OptionalNamedType('printable-address', univ.SequenceOf(componentType=char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_pds_parameter_length)).subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_pds_physical_address_lines)))), + namedtype.OptionalNamedType('teletex-string', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_unformatted_address_length))) + ) + +physical_delivery_office_name = univ.Integer(10) + +class PhysicalDeliveryOfficeName(PDSParameter): pass + +physical_delivery_office_number = univ.Integer(11) + +class PhysicalDeliveryOfficeNumber(PDSParameter): pass + +extension_OR_address_components = univ.Integer(12) + +class ExtensionORAddressComponents(PDSParameter): pass + +physical_delivery_personal_name = univ.Integer(13) + +class PhysicalDeliveryPersonalName(PDSParameter): pass + +physical_delivery_organization_name = univ.Integer(14) + +class PhysicalDeliveryOrganizationName(PDSParameter): pass + +extension_physical_delivery_address_components = univ.Integer(15) + +class ExtensionPhysicalDeliveryAddressComponents(PDSParameter): pass + +unformatted_postal_address = univ.Integer(16) + +postal_code = univ.Integer(9) + +class PostalCode(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('numeric-code', char.NumericString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_postal_code_length))), + namedtype.NamedType('printable-code', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_postal_code_length))) + ) + +class PhysicalDeliveryCountryName(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('x121-dcc-code', char.NumericString().subtype(subtypeSpec=constraint.ValueSizeConstraint(ub_country_name_numeric_length, ub_country_name_numeric_length))), + namedtype.NamedType('iso-3166-alpha2-code', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(ub_country_name_alpha_length, ub_country_name_alpha_length))) + ) + +class PDSName(char.PrintableString): + subtypeSpec = char.PrintableString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_pds_name_length) + +physical_delivery_country_name = univ.Integer(8) + +class TeletexOrganizationalUnitName(char.TeletexString): + subtypeSpec = char.TeletexString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_organizational_unit_name_length) + +pds_name = univ.Integer(7) + +teletex_organizational_unit_names = univ.Integer(5) + +class TeletexOrganizationalUnitNames(univ.SequenceOf): + componentType = TeletexOrganizationalUnitName() + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_organizational_units) + +teletex_personal_name = univ.Integer(4) + +class TeletexPersonalName(univ.Set): + componentType = namedtype.NamedTypes( + namedtype.NamedType('surname', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_surname_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), + namedtype.OptionalNamedType('given-name', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_given_name_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), + namedtype.OptionalNamedType('initials', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_initials_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))), + namedtype.OptionalNamedType('generation-qualifier', char.TeletexString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_generation_qualifier_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))) + ) + +teletex_organization_name = univ.Integer(3) + +class TeletexOrganizationName(char.TeletexString): + subtypeSpec = char.TeletexString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_organization_name_length) + +teletex_common_name = univ.Integer(2) + +class TeletexCommonName(char.TeletexString): + subtypeSpec = char.TeletexString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_common_name_length) + +class CommonName(char.PrintableString): + subtypeSpec = char.PrintableString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_common_name_length) + +common_name = univ.Integer(1) + +class ExtensionAttribute(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('extension-attribute-type', univ.Integer().subtype(subtypeSpec=constraint.ValueSizeConstraint(0, ub_extension_attributes), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), + namedtype.NamedType('extension-attribute-value', univ.Any().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))) + ) + +class ExtensionAttributes(univ.SetOf): + componentType = ExtensionAttribute() + subtypeSpec = univ.SetOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_extension_attributes) + +class BuiltInDomainDefinedAttribute(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('type', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_defined_attribute_type_length))), + namedtype.NamedType('value', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_defined_attribute_value_length))) + ) + +class BuiltInDomainDefinedAttributes(univ.SequenceOf): + componentType = BuiltInDomainDefinedAttribute() + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_domain_defined_attributes) + +class OrganizationalUnitName(char.PrintableString): + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_organizational_unit_name_length) + +class OrganizationalUnitNames(univ.SequenceOf): + componentType = OrganizationalUnitName() + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, ub_organizational_units) + +class PersonalName(univ.Set): + componentType = namedtype.NamedTypes( + namedtype.NamedType('surname', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_surname_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), + namedtype.OptionalNamedType('given-name', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_given_name_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), + namedtype.OptionalNamedType('initials', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_initials_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))), + namedtype.OptionalNamedType('generation-qualifier', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_generation_qualifier_length), explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))) + ) + +class NumericUserIdentifier(char.NumericString): + subtypeSpec = char.NumericString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_numeric_user_id_length) + +class OrganizationName(char.PrintableString): + subtypeSpec = char.PrintableString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_organization_name_length) + +class PrivateDomainName(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('numeric', char.NumericString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_name_length))), + namedtype.NamedType('printable', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, ub_domain_name_length))) + ) + +class TerminalIdentifier(char.PrintableString): + subtypeSpec = char.PrintableString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_terminal_id_length) + +class X121Address(char.NumericString): + subtypeSpec = char.NumericString.subtypeSpec + constraint.ValueSizeConstraint(1, ub_x121_address_length) + +class NetworkAddress(X121Address): pass + +class AdministrationDomainName(univ.Choice): + tagSet = univ.Choice.tagSet.tagExplicitly( + tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 2) + ) + componentType = namedtype.NamedTypes( + namedtype.NamedType('numeric', char.NumericString().subtype(subtypeSpec=constraint.ValueSizeConstraint(0, ub_domain_name_length))), + namedtype.NamedType('printable', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(0, ub_domain_name_length))) + ) + +class CountryName(univ.Choice): + tagSet = univ.Choice.tagSet.tagExplicitly( + tag.Tag(tag.tagClassApplication, tag.tagFormatConstructed, 1) + ) + componentType = namedtype.NamedTypes( + namedtype.NamedType('x121-dcc-code', char.NumericString().subtype(subtypeSpec=constraint.ValueSizeConstraint(ub_country_name_numeric_length, ub_country_name_numeric_length))), + namedtype.NamedType('iso-3166-alpha2-code', char.PrintableString().subtype(subtypeSpec=constraint.ValueSizeConstraint(ub_country_name_alpha_length, ub_country_name_alpha_length))) + ) + +class BuiltInStandardAttributes(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.OptionalNamedType('country-name', CountryName()), + namedtype.OptionalNamedType('administration-domain-name', AdministrationDomainName()), + namedtype.OptionalNamedType('network-address', NetworkAddress().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), + namedtype.OptionalNamedType('terminal-identifier', TerminalIdentifier().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), + namedtype.OptionalNamedType('private-domain-name', PrivateDomainName().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))), + namedtype.OptionalNamedType('organization-name', OrganizationName().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))), + namedtype.OptionalNamedType('numeric-user-identifier', NumericUserIdentifier().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4))), + namedtype.OptionalNamedType('personal-name', PersonalName().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 5))), + namedtype.OptionalNamedType('organizational-unit-names', OrganizationalUnitNames().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 6))) + ) + +class ORAddress(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('built-in-standard-attributes', BuiltInStandardAttributes()), + namedtype.OptionalNamedType('built-in-domain-defined-attributes', BuiltInDomainDefinedAttributes()), + namedtype.OptionalNamedType('extension-attributes', ExtensionAttributes()) + ) + +# +# PKIX1Implicit88 +# + +id_ce_invalidityDate = univ.ObjectIdentifier('2.5.29.24') + +class InvalidityDate(useful.GeneralizedTime): pass + +id_holdinstruction_none = univ.ObjectIdentifier('2.2.840.10040.2.1') +id_holdinstruction_callissuer = univ.ObjectIdentifier('2.2.840.10040.2.2') +id_holdinstruction_reject = univ.ObjectIdentifier('2.2.840.10040.2.3') + +holdInstruction = univ.ObjectIdentifier('2.2.840.10040.2') + +id_ce_holdInstructionCode = univ.ObjectIdentifier('2.5.29.23') + +class HoldInstructionCode(univ.ObjectIdentifier): pass + +id_ce_cRLReasons = univ.ObjectIdentifier('2.5.29.21') + +class CRLReason(univ.Enumerated): + namedValues = namedval.NamedValues( + ('unspecified', 0), + ('keyCompromise', 1), + ('cACompromise', 2), + ('affiliationChanged', 3), + ('superseded', 4), + ('cessationOfOperation', 5), + ('certificateHold', 6), + ('removeFromCRL', 8) + ) + +id_ce_cRLNumber = univ.ObjectIdentifier('2.5.29.20') + +class CRLNumber(univ.Integer): + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(0, MAX) + +class BaseCRLNumber(CRLNumber): pass + +id_kp_serverAuth = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.1') +id_kp_clientAuth = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.2') +id_kp_codeSigning = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.3') +id_kp_emailProtection = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.4') +id_kp_ipsecEndSystem = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.5') +id_kp_ipsecTunnel = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.6') +id_kp_ipsecUser = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.7') +id_kp_timeStamping = univ.ObjectIdentifier('1.3.6.1.5.5.7.3.8') +id_pe_authorityInfoAccess = univ.ObjectIdentifier('1.3.6.1.5.5.7.1.1') +id_ce_extKeyUsage = univ.ObjectIdentifier('2.5.29.37') + +class KeyPurposeId(univ.ObjectIdentifier): pass + +class ExtKeyUsageSyntax(univ.SequenceOf): + componentType = KeyPurposeId() + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX) + +class ReasonFlags(univ.BitString): + namedValues = namedval.NamedValues( + ('unused', 0), + ('keyCompromise', 1), + ('cACompromise', 2), + ('affiliationChanged', 3), + ('superseded', 4), + ('cessationOfOperation', 5), + ('certificateHold', 6) + ) + + +class SkipCerts(univ.Integer): + subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueSizeConstraint(0, MAX) + +id_ce_policyConstraints = univ.ObjectIdentifier('2.5.29.36') + +class PolicyConstraints(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.OptionalNamedType('requireExplicitPolicy', SkipCerts().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), + namedtype.OptionalNamedType('inhibitPolicyMapping', SkipCerts().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))) + ) + +id_ce_basicConstraints = univ.ObjectIdentifier('2.5.29.19') + +class BasicConstraints(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.DefaultedNamedType('cA', univ.Boolean(False)), + namedtype.OptionalNamedType('pathLenConstraint', univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, MAX))) + ) + +id_ce_subjectDirectoryAttributes = univ.ObjectIdentifier('2.5.29.9') + +class SubjectDirectoryAttributes(univ.SequenceOf): + componentType = Attribute() + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX) + +class EDIPartyName(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.OptionalNamedType('nameAssigner', DirectoryString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), + namedtype.NamedType('partyName', DirectoryString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))) + ) + +class AnotherName(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('type-id', univ.ObjectIdentifier()), + namedtype.NamedType('value', univ.Any().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))) + ) + +class GeneralName(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('otherName', AnotherName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), + namedtype.NamedType('rfc822Name', char.IA5String().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), + namedtype.NamedType('dNSName', char.IA5String().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))), + namedtype.NamedType('x400Address', ORAddress().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))), + namedtype.NamedType('directoryName', Name().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4))), + namedtype.NamedType('ediPartyName', EDIPartyName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 5))), + namedtype.NamedType('uniformResourceIdentifier', char.IA5String().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 6))), + namedtype.NamedType('iPAddress', univ.OctetString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 7))), + namedtype.NamedType('registeredID', univ.ObjectIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 8))) + ) + +class GeneralNames(univ.SequenceOf): + componentType = GeneralName() + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX) + +class AccessDescription(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('accessMethod', univ.ObjectIdentifier()), + namedtype.NamedType('accessLocation', GeneralName()) + ) + +class AuthorityInfoAccessSyntax(univ.SequenceOf): + componentType = AccessDescription() + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX) + +id_ce_deltaCRLIndicator = univ.ObjectIdentifier('2.5.29.27') + +class DistributionPointName(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('fullName', GeneralNames().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), + namedtype.NamedType('nameRelativeToCRLIssuer', RelativeDistinguishedName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))) + ) + +class DistributionPoint(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.OptionalNamedType('distributionPoint', DistributionPointName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), + namedtype.OptionalNamedType('reasons', ReasonFlags().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), + namedtype.OptionalNamedType('cRLIssuer', GeneralNames().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2))) + ) +class BaseDistance(univ.Integer): + subtypeSpec = univ.Integer.subtypeSpec + constraint.ValueRangeConstraint(0, MAX) + +id_ce_cRLDistributionPoints = univ.ObjectIdentifier('2.5.29.31') + +class CRLDistPointsSyntax(univ.SequenceOf): + componentType = DistributionPoint() + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX) +id_ce_issuingDistributionPoint = univ.ObjectIdentifier('2.5.29.28') + +class IssuingDistributionPoint(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.OptionalNamedType('distributionPoint', DistributionPointName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), + namedtype.NamedType('onlyContainsUserCerts', univ.Boolean(False).subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), + namedtype.NamedType('onlyContainsCACerts', univ.Boolean(False).subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))), + namedtype.OptionalNamedType('onlySomeReasons', ReasonFlags().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))), + namedtype.NamedType('indirectCRL', univ.Boolean(False).subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4))) + ) + +class GeneralSubtree(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('base', GeneralName()), + namedtype.DefaultedNamedType('minimum', BaseDistance(0).subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), + namedtype.OptionalNamedType('maximum', BaseDistance().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))) + ) + +class GeneralSubtrees(univ.SequenceOf): + componentType = GeneralSubtree() + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX) + +id_ce_nameConstraints = univ.ObjectIdentifier('2.5.29.30') + +class NameConstraints(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.OptionalNamedType('permittedSubtrees', GeneralSubtrees().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), + namedtype.OptionalNamedType('excludedSubtrees', GeneralSubtrees().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))) + ) + + +class DisplayText(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('visibleString', char.VisibleString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, 200))), + namedtype.NamedType('bmpString', char.BMPString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, 200))), + namedtype.NamedType('utf8String', char.UTF8String().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, 200))) + ) + +class NoticeReference(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('organization', DisplayText()), + namedtype.NamedType('noticeNumbers', univ.SequenceOf(componentType=univ.Integer())) + ) + +class UserNotice(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.OptionalNamedType('noticeRef', NoticeReference()), + namedtype.OptionalNamedType('explicitText', DisplayText()) + ) + +class CPSuri(char.IA5String): pass + +class PolicyQualifierId(univ.ObjectIdentifier): + subtypeSpec = univ.ObjectIdentifier.subtypeSpec + constraint.SingleValueConstraint(id_qt_cps, id_qt_unotice) + +class CertPolicyId(univ.ObjectIdentifier): pass + +class PolicyQualifierInfo(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('policyQualifierId', PolicyQualifierId()), + namedtype.NamedType('qualifier', univ.Any()) + ) + +id_ce_certificatePolicies = univ.ObjectIdentifier('2.5.29.32') + +class PolicyInformation(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('policyIdentifier', CertPolicyId()), + namedtype.OptionalNamedType('policyQualifiers', univ.SequenceOf(componentType=PolicyQualifierInfo()).subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))) + ) + +class CertificatePolicies(univ.SequenceOf): + componentType = PolicyInformation() + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX) + +id_ce_policyMappings = univ.ObjectIdentifier('2.5.29.33') + +class PolicyMapping(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('issuerDomainPolicy', CertPolicyId()), + namedtype.NamedType('subjectDomainPolicy', CertPolicyId()) + ) + +class PolicyMappings(univ.SequenceOf): + componentType = PolicyMapping() + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX) + +id_ce_privateKeyUsagePeriod = univ.ObjectIdentifier('2.5.29.16') + +class PrivateKeyUsagePeriod(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.OptionalNamedType('notBefore', useful.GeneralizedTime().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), + namedtype.OptionalNamedType('notAfter', useful.GeneralizedTime().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))) + ) + +id_ce_keyUsage = univ.ObjectIdentifier('2.5.29.15') + +class KeyUsage(univ.BitString): + namedValues = namedval.NamedValues( + ('digitalSignature', 0), + ('nonRepudiation', 1), + ('keyEncipherment', 2), + ('dataEncipherment', 3), + ('keyAgreement', 4), + ('keyCertSign', 5), + ('cRLSign', 6), + ('encipherOnly', 7), + ('decipherOnly', 8) + ) + +id_ce = univ.ObjectIdentifier('2.5.29') + +id_ce_authorityKeyIdentifier = univ.ObjectIdentifier('2.5.29.35') + +class KeyIdentifier(univ.OctetString): pass + +id_ce_subjectKeyIdentifier = univ.ObjectIdentifier('2.5.29.14') + +class SubjectKeyIdentifier(KeyIdentifier): pass + +class AuthorityKeyIdentifier(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.OptionalNamedType('keyIdentifier', KeyIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), + namedtype.OptionalNamedType('authorityCertIssuer', GeneralNames().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), + namedtype.OptionalNamedType('authorityCertSerialNumber', CertificateSerialNumber().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))) + ) + +id_ce_certificateIssuer = univ.ObjectIdentifier('2.5.29.29') + +class CertificateIssuer(GeneralNames): pass + +id_ce_subjectAltName = univ.ObjectIdentifier('2.5.29.17') + +class SubjectAltName(GeneralNames): pass + +id_ce_issuerAltName = univ.ObjectIdentifier('2.5.29.18') + +class IssuerAltName(GeneralNames): pass diff --git a/pyasn1_modules/rfc2511.py b/pyasn1_modules/rfc2511.py new file mode 100644 index 0000000..2fc592c --- /dev/null +++ b/pyasn1_modules/rfc2511.py @@ -0,0 +1,176 @@ +# +# X.509 certificate Request Message Format (CRMF) syntax +# +# ASN.1 source from: +# http://tools.ietf.org/html/rfc2511 +# +# Sample captures could be obtained with OpenSSL +# +from pyasn1.type import tag, namedtype, namedval, univ, constraint, char,useful +from pyasn1_modules.rfc2459 import * +from pyasn1_modules import rfc2315 + +MAX=16 + +id_pkix = univ.ObjectIdentifier('1.3.6.1.5.5.7') +id_pkip = univ.ObjectIdentifier('1.3.6.1.5.5.7.5') +id_regCtrl = univ.ObjectIdentifier('1.3.6.1.5.5.7.5.1') +id_regCtrl_regToken = univ.ObjectIdentifier('1.3.6.1.5.5.7.5.1.1') +id_regCtrl_authenticator = univ.ObjectIdentifier('1.3.6.1.5.5.7.5.1.2') +id_regCtrl_pkiPublicationInfo = univ.ObjectIdentifier('1.3.6.1.5.5.7.5.1.3') +id_regCtrl_pkiArchiveOptions = univ.ObjectIdentifier('1.3.6.1.5.5.7.5.1.4') +id_regCtrl_oldCertID = univ.ObjectIdentifier('1.3.6.1.5.5.7.5.1.5') +id_regCtrl_protocolEncrKey = univ.ObjectIdentifier('1.3.6.1.5.5.7.5.1.6') +id_regInfo = univ.ObjectIdentifier('1.3.6.1.5.5.7.5.2') +id_regInfo_utf8Pairs = univ.ObjectIdentifier('1.3.6.1.5.5.7.5.2.1') +id_regInfo_certReq = univ.ObjectIdentifier('1.3.6.1.5.5.7.5.2.2') + +# This should be in PKIX Certificate Extensions module + +class GeneralName(univ.OctetString): pass + +# end of PKIX Certificate Extensions module + +class UTF8Pairs(char.UTF8String): pass + +class ProtocolEncrKey(SubjectPublicKeyInfo): pass + +class CertId(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('issuer', GeneralName()), + namedtype.NamedType('serialNumber', univ.Integer()) + ) + +class OldCertId(CertId): pass + +class KeyGenParameters(univ.OctetString): pass + +class EncryptedValue(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.OptionalNamedType('intendedAlg', AlgorithmIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), + namedtype.OptionalNamedType('symmAlg', AlgorithmIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))), + namedtype.OptionalNamedType('encSymmKey', univ.BitString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2))), + namedtype.OptionalNamedType('keyAlg', AlgorithmIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3))), + namedtype.OptionalNamedType('valueHint', univ.OctetString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 4))), + namedtype.NamedType('encValue', univ.BitString()) + ) + +class EncryptedKey(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('encryptedValue', EncryptedValue()), + namedtype.NamedType('envelopedData', rfc2315.EnvelopedData().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))) + ) + +class PKIArchiveOptions(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('encryptedPrivKey', EncryptedKey().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), + namedtype.NamedType('keyGenParameters', KeyGenParameters().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), + namedtype.NamedType('archiveRemGenPrivKey', univ.Boolean().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))) + ) + +class SinglePubInfo(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('pubMethod', univ.Integer(namedValues=namedval.NamedValues(('dontCare', 0), ('x500', 1), ('web', 2), ('ldap', 3)))), + namedtype.OptionalNamedType('pubLocation', GeneralName()) + ) + +class PKIPublicationInfo(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('action', univ.Integer(namedValues=namedval.NamedValues(('dontPublish', 0), ('pleasePublish', 1)))), + namedtype.OptionalNamedType('pubInfos', univ.SequenceOf(componentType=SinglePubInfo()).subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))) + ) + +class Authenticator(char.UTF8String): pass +class RegToken(char.UTF8String): pass + +class SubsequentMessage(univ.Integer): + namedValues = namedval.NamedValues( + ('encrCert', 0), + ('challengeResp', 1) + ) + +class POPOPrivKey(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('thisMessage', univ.BitString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), + namedtype.NamedType('subsequentMessage', SubsequentMessage().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), + namedtype.NamedType('dhMAC', univ.BitString().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))) + ) + +class PBMParameter(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('salt', univ.OctetString()), + namedtype.NamedType('owf', AlgorithmIdentifier()), + namedtype.NamedType('iterationCount', univ.Integer()), + namedtype.NamedType('mac', AlgorithmIdentifier()) + ) + +class PKMACValue(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('algId', AlgorithmIdentifier()), + namedtype.NamedType('value', univ.BitString()) + ) + +class POPOSigningKeyInput(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('authInfo', univ.Choice(componentType=namedtype.NamedTypes(namedtype.NamedType('sender', GeneralName().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), namedtype.NamedType('publicKeyMAC', PKMACValue())))), + namedtype.NamedType('publicKey', SubjectPublicKeyInfo()) + ) + +class POPOSigningKey(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.OptionalNamedType('poposkInput', POPOSigningKeyInput().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))), + namedtype.NamedType('algorithmIdentifier', AlgorithmIdentifier()), + namedtype.NamedType('signature', univ.BitString()) + ) + +class ProofOfPossession(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('raVerified', univ.Null().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), + namedtype.NamedType('signature', POPOSigningKey().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))), + namedtype.NamedType('keyEncipherment', POPOPrivKey().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2))), + namedtype.NamedType('keyAgreement', POPOPrivKey().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3))) + ) + +class Controls(univ.SequenceOf): + componentType = AttributeTypeAndValue() + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX) + +class OptionalValidity(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.OptionalNamedType('notBefore', Time().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), + namedtype.OptionalNamedType('notAfter', Time().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))) + ) + +class CertTemplate(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.OptionalNamedType('version', Version().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), + namedtype.OptionalNamedType('serialNumber', univ.Integer().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), + namedtype.OptionalNamedType('signingAlg', AlgorithmIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2))), + namedtype.OptionalNamedType('issuer', Name().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 3))), + namedtype.OptionalNamedType('validity', OptionalValidity().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 4))), + namedtype.OptionalNamedType('subject', Name().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 5))), + namedtype.OptionalNamedType('publicKey', SubjectPublicKeyInfo().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 6))), + namedtype.OptionalNamedType('issuerUID', UniqueIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 7))), + namedtype.OptionalNamedType('subjectUID', UniqueIdentifier().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 8))), + namedtype.OptionalNamedType('extensions', Extensions().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 9))) + ) + +class CertRequest(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('certReqId', univ.Integer()), + namedtype.NamedType('certTemplate', CertTemplate()), + namedtype.OptionalNamedType('controls', Controls()) + ) + +class CertReq(CertRequest): pass + +class CertReqMsg(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('certReq', CertRequest()), + namedtype.OptionalNamedType('pop', ProofOfPossession()), + namedtype.OptionalNamedType('regInfo', univ.SequenceOf(componentType=AttributeTypeAndValue()).subtype(subtypeSpec=constraint.ValueSizeConstraint(1, MAX))) + ) + +class CertReqMessages(univ.SequenceOf): + componentType = CertReqMsg() + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX) diff --git a/pyasn1_modules/rfc2560.py b/pyasn1_modules/rfc2560.py new file mode 100644 index 0000000..0be1091 --- /dev/null +++ b/pyasn1_modules/rfc2560.py @@ -0,0 +1,171 @@ +# +# OCSP request/response syntax +# +# Derived from a minimal OCSP library (RFC2560) code written by +# Bud P. Bruegger +# Copyright: Ancitel, S.p.a, Rome, Italy +# License: BSD +# + +# +# current limitations: +# * request and response works only for a single certificate +# * only some values are parsed out of the response +# * the request does't set a nonce nor signature +# * there is no signature validation of the response +# * dates are left as strings in GeneralizedTime format -- datetime.datetime +# would be nicer +# +from pyasn1.type import tag, namedtype, namedval, univ, constraint, useful +from pyasn1_modules import rfc2459 + +# Start of OCSP module definitions + +# This should be in directory Authentication Framework (X.509) module + +class CRLReason(univ.Enumerated): + namedValues = namedval.NamedValues( + ('unspecified', 0), + ('keyCompromise', 1), + ('cACompromise', 2), + ('affiliationChanged', 3), + ('superseded', 4), + ('cessationOfOperation', 5), + ('certificateHold', 6), + ('removeFromCRL', 8), + ('privilegeWithdrawn', 9), + ('aACompromise', 10) + ) + +# end of directory Authentication Framework (X.509) module + +# This should be in PKIX Certificate Extensions module + +class GeneralName(univ.OctetString): pass + +# end of PKIX Certificate Extensions module + +id_kp_OCSPSigning = univ.ObjectIdentifier((1, 3, 6, 1, 5, 5, 7, 3, 9)) +id_pkix_ocsp = univ.ObjectIdentifier((1, 3, 6, 1, 5, 5, 7, 48, 1)) +id_pkix_ocsp_basic = univ.ObjectIdentifier((1, 3, 6, 1, 5, 5, 7, 48, 1, 1)) +id_pkix_ocsp_nonce = univ.ObjectIdentifier((1, 3, 6, 1, 5, 5, 7, 48, 1, 2)) +id_pkix_ocsp_crl = univ.ObjectIdentifier((1, 3, 6, 1, 5, 5, 7, 48, 1, 3)) +id_pkix_ocsp_response = univ.ObjectIdentifier((1, 3, 6, 1, 5, 5, 7, 48, 1, 4)) +id_pkix_ocsp_nocheck = univ.ObjectIdentifier((1, 3, 6, 1, 5, 5, 7, 48, 1, 5)) +id_pkix_ocsp_archive_cutoff = univ.ObjectIdentifier((1, 3, 6, 1, 5, 5, 7, 48, 1, 6)) +id_pkix_ocsp_service_locator = univ.ObjectIdentifier((1, 3, 6, 1, 5, 5, 7, 48, 1, 7)) + +class AcceptableResponses(univ.SequenceOf): + componentType = univ.ObjectIdentifier() + +class ArchiveCutoff(useful.GeneralizedTime): pass + +class UnknownInfo(univ.Null): pass + +class RevokedInfo(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('revocationTime', useful.GeneralizedTime()), + namedtype.OptionalNamedType('revocationReason', CRLReason().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))) + ) + +class CertID(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('hashAlgorithm', rfc2459.AlgorithmIdentifier()), + namedtype.NamedType('issuerNameHash', univ.OctetString()), + namedtype.NamedType('issuerKeyHash', univ.OctetString()), + namedtype.NamedType('serialNumber', rfc2459.CertificateSerialNumber()) + ) + +class CertStatus(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('good', univ.Null().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), + namedtype.NamedType('revoked', RevokedInfo().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), + namedtype.NamedType('unknown', UnknownInfo().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))) + ) + +class SingleResponse(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('certID', CertID()), + namedtype.NamedType('certStatus', CertStatus()), + namedtype.NamedType('thisUpdate', useful.GeneralizedTime()), + namedtype.OptionalNamedType('nextUpdate', useful.GeneralizedTime().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), + namedtype.OptionalNamedType('singleExtensions', rfc2459.Extensions().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))) + ) + +class KeyHash(univ.OctetString): pass + +class ResponderID(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('byName', rfc2459.Name().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), + namedtype.NamedType('byKey', KeyHash().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))) + ) + +class Version(univ.Integer): + namedValues = namedval.NamedValues(('v1', 0)) + +class ResponseData(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.DefaultedNamedType('version', Version('v1').subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), + namedtype.NamedType('responderID', ResponderID()), + namedtype.NamedType('producedAt', useful.GeneralizedTime()), + namedtype.NamedType('responses', univ.SequenceOf(SingleResponse())), + namedtype.OptionalNamedType('responseExtensions', rfc2459.Extensions().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))) + ) + +class BasicOCSPResponse(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('tbsResponseData', ResponseData()), + namedtype.NamedType('signatureAlgorithm', rfc2459.AlgorithmIdentifier()), + namedtype.NamedType('signature', univ.BitString()), + namedtype.OptionalNamedType('certs', univ.SequenceOf(rfc2459.Certificate()).subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))) + ) + +class ResponseBytes(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('responseType', univ.ObjectIdentifier()), + namedtype.NamedType('response', univ.OctetString()) + ) + +class OCSPResponseStatus(univ.Enumerated): + namedValues = namedval.NamedValues( + ('successful', 0), + ('malformedRequest', 1), + ('internalError', 2), + ('tryLater', 3), + ('undefinedStatus', 4), # should never occur + ('sigRequired', 5), + ('unauthorized', 6) + ) + +class OCSPResponse(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('responseStatus', OCSPResponseStatus()), + namedtype.OptionalNamedType('responseBytes', ResponseBytes().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))) + ) + +class Request(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('reqCert', CertID()), + namedtype.OptionalNamedType('singleRequestExtensions', rfc2459.Extensions().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))) + ) + +class Signature(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('signatureAlgorithm', rfc2459.AlgorithmIdentifier()), + namedtype.NamedType('signature', univ.BitString()), + namedtype.OptionalNamedType('certs', univ.SequenceOf(rfc2459.Certificate()).subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))) + ) + +class TBSRequest(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.DefaultedNamedType('version', Version('v1').subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), + namedtype.OptionalNamedType('requestorName', GeneralName().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 1))), + namedtype.NamedType('requestList', univ.SequenceOf(Request())), + namedtype.OptionalNamedType('requestExtensions', rfc2459.Extensions().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))) + ) + +class OCSPRequest(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('tbsRequest', TBSRequest()), + namedtype.OptionalNamedType('optionalSignature', Signature().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))) + ) diff --git a/pyasn1_modules/rfc3412.py b/pyasn1_modules/rfc3412.py new file mode 100644 index 0000000..e80ce31 --- /dev/null +++ b/pyasn1_modules/rfc3412.py @@ -0,0 +1,38 @@ +# +# SNMPv3 message syntax +# +# ASN.1 source from: +# http://www.ietf.org/rfc/rfc3412.txt +# +from pyasn1.type import univ, namedtype, namedval, tag, constraint +from pyasn1_modules import rfc1905 + +class ScopedPDU(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('contextEngineId', univ.OctetString()), + namedtype.NamedType('contextName', univ.OctetString()), + namedtype.NamedType('data', rfc1905.PDUs()) + ) + +class ScopedPduData(univ.Choice): + componentType = namedtype.NamedTypes( + namedtype.NamedType('plaintext', ScopedPDU()), + namedtype.NamedType('encryptedPDU', univ.OctetString()), + ) + +class HeaderData(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('msgID', univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, 2147483647))), + namedtype.NamedType('msgMaxSize', univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(484, 2147483647))), + namedtype.NamedType('msgFlags', univ.OctetString().subtype(subtypeSpec=constraint.ValueSizeConstraint(1, 1))), + namedtype.NamedType('msgSecurityModel', univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(1, 2147483647))) + ) + +class SNMPv3Message(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('msgVersion', univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, 2147483647))), + namedtype.NamedType('msgGlobalData', HeaderData()), + namedtype.NamedType('msgSecurityParameters', univ.OctetString()), + namedtype.NamedType('msgData', ScopedPduData()) + ) + diff --git a/pyasn1_modules/rfc3414.py b/pyasn1_modules/rfc3414.py new file mode 100644 index 0000000..580c88e --- /dev/null +++ b/pyasn1_modules/rfc3414.py @@ -0,0 +1,17 @@ +# +# SNMPv3 message syntax +# +# ASN.1 source from: +# http://www.ietf.org/rfc/rfc3414.txt +# +from pyasn1.type import univ, namedtype, namedval, tag, constraint + +class UsmSecurityParameters(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('msgAuthoritativeEngineID', univ.OctetString()), + namedtype.NamedType('msgAuthoritativeEngineBoots', univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, 2147483647))), + namedtype.NamedType('msgAuthoritativeEngineTime', univ.Integer().subtype(subtypeSpec=constraint.ValueRangeConstraint(0, 2147483647))), + namedtype.NamedType('msgUserName', univ.OctetString().subtype(subtypeSpec=constraint.ValueSizeConstraint(0, 32))), + namedtype.NamedType('msgAuthenticationParameters', univ.OctetString()), + namedtype.NamedType('msgPrivacyParameters', univ.OctetString()) + ) diff --git a/pyasn1_modules/rfc3447.py b/pyasn1_modules/rfc3447.py new file mode 100644 index 0000000..96dea7f --- /dev/null +++ b/pyasn1_modules/rfc3447.py @@ -0,0 +1,35 @@ +# +# PKCS#1 syntax +# +# ASN.1 source from: +# ftp://ftp.rsasecurity.com/pub/pkcs/pkcs-1/pkcs-1v2-1.asn +# +# Sample captures could be obtained with "openssl genrsa" command +# +from pyasn1_modules.rfc2437 import * + +class OtherPrimeInfo(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('prime', univ.Integer()), + namedtype.NamedType('exponent', univ.Integer()), + namedtype.NamedType('coefficient', univ.Integer()) + ) + +class OtherPrimeInfos(univ.SequenceOf): + componentType = OtherPrimeInfo() + subtypeSpec = univ.SequenceOf.subtypeSpec + \ + constraint.ValueSizeConstraint(1, MAX) + +class RSAPrivateKey(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('version', univ.Integer(namedValues=namedval.NamedValues(('two-prime', 0), ('multi', 1)))), + namedtype.NamedType('modulus', univ.Integer()), + namedtype.NamedType('publicExponent', univ.Integer()), + namedtype.NamedType('privateExponent', univ.Integer()), + namedtype.NamedType('prime1', univ.Integer()), + namedtype.NamedType('prime2', univ.Integer()), + namedtype.NamedType('exponent1', univ.Integer()), + namedtype.NamedType('exponent2', univ.Integer()), + namedtype.NamedType('coefficient', univ.Integer()), + namedtype.OptionalNamedType('otherPrimeInfos', OtherPrimeInfos()) + ) diff --git a/pyasn1_modules/rfc4210.py b/pyasn1_modules/rfc4210.py new file mode 100644 index 0000000..c577560 --- /dev/null +++ b/pyasn1_modules/rfc4210.py @@ -0,0 +1,720 @@ +# +# Certificate Management Protocol structures as per RFC4210 +# +# Based on Alex Railean's work +# +from pyasn1.type import tag,namedtype,namedval,univ,constraint,char,useful +from pyasn1_modules import rfc2459, rfc2511, rfc2314 + +MAX = 64 + +class KeyIdentifier(univ.OctetString): pass + +class CMPCertificate(rfc2459.Certificate): pass + +class OOBCert(CMPCertificate): pass + +class CertAnnContent(CMPCertificate): pass + +class PKIFreeText(univ.SequenceOf): + """ + PKIFreeText ::= SEQUENCE SIZE (1..MAX) OF UTF8String + """ + componentType = char.UTF8String() + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX) + +class PollRepContent(univ.SequenceOf): + """ + PollRepContent ::= SEQUENCE OF SEQUENCE { + certReqId INTEGER, + checkAfter INTEGER, -- time in seconds + reason PKIFreeText OPTIONAL + } + """ + class CertReq(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('certReqId', univ.Integer()), + namedtype.NamedType('checkAfter', univ.Integer()), + namedtype.OptionalNamedType('reason', PKIFreeText()) + ) + componentType = CertReq() + +class PollReqContent(univ.SequenceOf): + """ + PollReqContent ::= SEQUENCE OF SEQUENCE { + certReqId INTEGER + } + + """ + class CertReq(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('certReqId', univ.Integer()) + ) + componentType = CertReq() + +class InfoTypeAndValue(univ.Sequence): + """ + InfoTypeAndValue ::= SEQUENCE { + infoType OBJECT IDENTIFIER, + infoValue ANY DEFINED BY infoType OPTIONAL + }""" + componentType = namedtype.NamedTypes( + namedtype.NamedType('infoType', univ.ObjectIdentifier()), + namedtype.OptionalNamedType('infoValue', univ.Any()) + ) + +class GenRepContent(univ.SequenceOf): + componentType = InfoTypeAndValue() + +class GenMsgContent(univ.SequenceOf): + componentType = InfoTypeAndValue() + +class PKIConfirmContent(univ.Null): pass + +class CRLAnnContent(univ.SequenceOf): + componentType = rfc2459.CertificateList() + +class CAKeyUpdAnnContent(univ.Sequence): + """ + CAKeyUpdAnnContent ::= SEQUENCE { + oldWithNew CMPCertificate, + newWithOld CMPCertificate, + newWithNew CMPCertificate + } + """ + componentType = namedtype.NamedTypes( + namedtype.NamedType('oldWithNew', CMPCertificate()), + namedtype.NamedType('newWithOld', CMPCertificate()), + namedtype.NamedType('newWithNew', CMPCertificate()) + ) + +class RevDetails(univ.Sequence): + """ + RevDetails ::= SEQUENCE { + certDetails CertTemplate, + crlEntryDetails Extensions OPTIONAL + } + """ + componentType = namedtype.NamedTypes( + namedtype.NamedType('certDetails', rfc2511.CertTemplate()), + namedtype.OptionalNamedType('crlEntryDetails', rfc2459.Extensions()) + ) + +class RevReqContent(univ.SequenceOf): + componentType = RevDetails() + +class CertOrEncCert(univ.Choice): + """ + CertOrEncCert ::= CHOICE { + certificate [0] CMPCertificate, + encryptedCert [1] EncryptedValue + } + """ + componentType = namedtype.NamedTypes( + namedtype.NamedType('certificate', CMPCertificate().subtype( + explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0) + ) + ), + namedtype.NamedType('encryptedCert', rfc2511.EncryptedValue().subtype( + explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1) + ) + ) + ) + +class CertifiedKeyPair(univ.Sequence): + """ + CertifiedKeyPair ::= SEQUENCE { + certOrEncCert CertOrEncCert, + privateKey [0] EncryptedValue OPTIONAL, + publicationInfo [1] PKIPublicationInfo OPTIONAL + } + """ + componentType = namedtype.NamedTypes( + namedtype.NamedType('certOrEncCert', CertOrEncCert()), + namedtype.OptionalNamedType('privateKey', rfc2511.EncryptedValue().subtype( + explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0) + ) + ), + namedtype.OptionalNamedType('publicationInfo', rfc2511.PKIPublicationInfo().subtype( + explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1) + ) + ) + ) + + +class POPODecKeyRespContent(univ.SequenceOf): + componentType = univ.Integer() + +class Challenge(univ.Sequence): + """ + Challenge ::= SEQUENCE { + owf AlgorithmIdentifier OPTIONAL, + witness OCTET STRING, + challenge OCTET STRING + } + """ + componentType = namedtype.NamedTypes( + namedtype.OptionalNamedType('owf', rfc2459.AlgorithmIdentifier()), + namedtype.NamedType('witness', univ.OctetString()), + namedtype.NamedType('challenge', univ.OctetString()) + ) + +class PKIStatus(univ.Integer): + """ + PKIStatus ::= INTEGER { + accepted (0), + grantedWithMods (1), + rejection (2), + waiting (3), + revocationWarning (4), + revocationNotification (5), + keyUpdateWarning (6) + } + """ + namedValues = namedval.NamedValues( + ('accepted', 0), + ('grantedWithMods', 1), + ('rejection', 2), + ('waiting', 3), + ('revocationWarning', 4), + ('revocationNotification', 5), + ('keyUpdateWarning', 6) + ) + +class PKIFailureInfo(univ.BitString): + """ + PKIFailureInfo ::= BIT STRING { + badAlg (0), + badMessageCheck (1), + badRequest (2), + badTime (3), + badCertId (4), + badDataFormat (5), + wrongAuthority (6), + incorrectData (7), + missingTimeStamp (8), + badPOP (9), + certRevoked (10), + certConfirmed (11), + wrongIntegrity (12), + badRecipientNonce (13), + timeNotAvailable (14), + unacceptedPolicy (15), + unacceptedExtension (16), + addInfoNotAvailable (17), + badSenderNonce (18), + badCertTemplate (19), + signerNotTrusted (20), + transactionIdInUse (21), + unsupportedVersion (22), + notAuthorized (23), + systemUnavail (24), + systemFailure (25), + duplicateCertReq (26) + """ + namedValues = namedval.NamedValues( + ('badAlg', 0), + ('badMessageCheck', 1), + ('badRequest', 2), + ('badTime', 3), + ('badCertId', 4), + ('badDataFormat', 5), + ('wrongAuthority', 6), + ('incorrectData', 7), + ('missingTimeStamp', 8), + ('badPOP', 9), + ('certRevoked', 10), + ('certConfirmed', 11), + ('wrongIntegrity', 12), + ('badRecipientNonce', 13), + ('timeNotAvailable', 14), + ('unacceptedPolicy', 15), + ('unacceptedExtension', 16), + ('addInfoNotAvailable', 17), + ('badSenderNonce', 18), + ('badCertTemplate', 19), + ('signerNotTrusted', 20), + ('transactionIdInUse', 21), + ('unsupportedVersion', 22), + ('notAuthorized', 23), + ('systemUnavail', 24), + ('systemFailure', 25), + ('duplicateCertReq', 26) + ) + +class PKIStatusInfo(univ.Sequence): + """ + PKIStatusInfo ::= SEQUENCE { + status PKIStatus, + statusString PKIFreeText OPTIONAL, + failInfo PKIFailureInfo OPTIONAL + } + """ + componentType = namedtype.NamedTypes( + namedtype.NamedType('status', PKIStatus()), + namedtype.OptionalNamedType('statusString', PKIFreeText()), + namedtype.OptionalNamedType('failInfo', PKIFailureInfo()) + ) + +class ErrorMsgContent(univ.Sequence): + """ + ErrorMsgContent ::= SEQUENCE { + pKIStatusInfo PKIStatusInfo, + errorCode INTEGER OPTIONAL, + -- implementation-specific error codes + errorDetails PKIFreeText OPTIONAL + -- implementation-specific error details + } + """ + componentType = namedtype.NamedTypes( + namedtype.NamedType('pKIStatusInfo', PKIStatusInfo()), + namedtype.OptionalNamedType('errorCode', univ.Integer()), + namedtype.OptionalNamedType('errorDetails', PKIFreeText()) + ) + +class CertStatus(univ.Sequence): + """ + CertStatus ::= SEQUENCE { + certHash OCTET STRING, + certReqId INTEGER, + statusInfo PKIStatusInfo OPTIONAL + } + """ + componentType = namedtype.NamedTypes( + namedtype.NamedType('certHash', univ.OctetString()), + namedtype.NamedType('certReqId', univ.Integer()), + namedtype.OptionalNamedType('statusInfo', PKIStatusInfo()) + ) + +class CertConfirmContent(univ.SequenceOf): + componentType = CertStatus() + +class RevAnnContent(univ.Sequence): + """ + RevAnnContent ::= SEQUENCE { + status PKIStatus, + certId CertId, + willBeRevokedAt GeneralizedTime, + badSinceDate GeneralizedTime, + crlDetails Extensions OPTIONAL + } + """ + componentType = namedtype.NamedTypes( + namedtype.NamedType('status', PKIStatus()), + namedtype.NamedType('certId', rfc2511.CertId()), + namedtype.NamedType('willBeRevokedAt', useful.GeneralizedTime()), + namedtype.NamedType('badSinceDate', useful.GeneralizedTime()), + namedtype.OptionalNamedType('crlDetails', rfc2459.Extensions()) + ) + +class RevRepContent(univ.Sequence): + """ + RevRepContent ::= SEQUENCE { + status SEQUENCE SIZE (1..MAX) OF PKIStatusInfo, + revCerts [0] SEQUENCE SIZE (1..MAX) OF CertId + OPTIONAL, + crls [1] SEQUENCE SIZE (1..MAX) OF CertificateList + OPTIONAL + """ + componentType = namedtype.NamedTypes( + namedtype.NamedType('status', PKIStatusInfo()), + namedtype.OptionalNamedType('revCerts', univ.SequenceOf( + componentType=rfc2511.CertId() + ).subtype( + subtypeSpec=constraint.ValueSizeConstraint(1, MAX), + explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0) + ) + ), + namedtype.OptionalNamedType('crls', univ.SequenceOf( + componentType=rfc2459.CertificateList() + ).subtype( + subtypeSpec=constraint.ValueSizeConstraint(1, MAX), + explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1) + ) + ) + ) + +class KeyRecRepContent(univ.Sequence): + """ + KeyRecRepContent ::= SEQUENCE { + status PKIStatusInfo, + newSigCert [0] CMPCertificate OPTIONAL, + caCerts [1] SEQUENCE SIZE (1..MAX) OF + CMPCertificate OPTIONAL, + keyPairHist [2] SEQUENCE SIZE (1..MAX) OF + CertifiedKeyPair OPTIONAL + } + """ + componentType = namedtype.NamedTypes( + namedtype.NamedType('status', PKIStatusInfo()), + namedtype.OptionalNamedType('newSigCert', CMPCertificate().subtype( + explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0) + ) + ), + namedtype.OptionalNamedType('caCerts', univ.SequenceOf( + componentType=CMPCertificate() + ).subtype( + explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1), + subtypeSpec=constraint.ValueSizeConstraint(1, MAX) + ) + ), + namedtype.OptionalNamedType('keyPairHist', univ.SequenceOf( + componentType=CertifiedKeyPair() + ).subtype( + explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 2), + subtypeSpec=constraint.ValueSizeConstraint(1, MAX) + ) + ) + ) + +class CertResponse(univ.Sequence): + """ + CertResponse ::= SEQUENCE { + certReqId INTEGER, + status PKIStatusInfo, + certifiedKeyPair CertifiedKeyPair OPTIONAL, + rspInfo OCTET STRING OPTIONAL + } + """ + componentType = namedtype.NamedTypes( + namedtype.NamedType('certReqId', univ.Integer()), + namedtype.NamedType('status', PKIStatusInfo()), + namedtype.OptionalNamedType('certifiedKeyPair', CertifiedKeyPair()), + namedtype.OptionalNamedType('rspInfo', univ.OctetString()) + ) + +class CertRepMessage(univ.Sequence): + """ + CertRepMessage ::= SEQUENCE { + caPubs [1] SEQUENCE SIZE (1..MAX) OF CMPCertificate + OPTIONAL, + response SEQUENCE OF CertResponse + } + """ + componentType = namedtype.NamedTypes( + namedtype.OptionalNamedType('caPubs', univ.SequenceOf( + componentType=CMPCertificate() + ).subtype( + subtypeSpec=constraint.ValueSizeConstraint(1, MAX), + explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,1) + ) + ), + namedtype.NamedType('response', univ.SequenceOf( + componentType=CertResponse()) + ) + ) + +class POPODecKeyChallContent(univ.SequenceOf): + componentType = Challenge() + +class OOBCertHash(univ.Sequence): + """ + OOBCertHash ::= SEQUENCE { + hashAlg [0] AlgorithmIdentifier OPTIONAL, + certId [1] CertId OPTIONAL, + hashVal BIT STRING + } + """ + componentType = namedtype.NamedTypes( + namedtype.OptionalNamedType('hashAlg', + rfc2459.AlgorithmIdentifier().subtype( + explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,0) + ) + ), + namedtype.OptionalNamedType('certId', rfc2511.CertId().subtype( + explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,1) + ) + ), + namedtype.NamedType('hashVal', univ.BitString()) + ) + +# pyasn1 does not naturally handle recursive definitions, thus this hack: +# NestedMessageContent ::= PKIMessages +class NestedMessageContent(univ.SequenceOf): + """ + NestedMessageContent ::= PKIMessages + """ + componentType = univ.Any() + +class DHBMParameter(univ.Sequence): + """ + DHBMParameter ::= SEQUENCE { + owf AlgorithmIdentifier, + -- AlgId for a One-Way Function (SHA-1 recommended) + mac AlgorithmIdentifier + -- the MAC AlgId (e.g., DES-MAC, Triple-DES-MAC [PKCS11], + } -- or HMAC [RFC2104, RFC2202]) + """ + componentType = namedtype.NamedTypes( + namedtype.NamedType('owf', rfc2459.AlgorithmIdentifier()), + namedtype.NamedType('mac', rfc2459.AlgorithmIdentifier()) + ) + +id_DHBasedMac = univ.ObjectIdentifier('1.2.840.113533.7.66.30') + +class PBMParameter(univ.Sequence): + """ + PBMParameter ::= SEQUENCE { + salt OCTET STRING, + owf AlgorithmIdentifier, + iterationCount INTEGER, + mac AlgorithmIdentifier + } + """ + componentType = namedtype.NamedTypes( + namedtype.NamedType('salt', univ.OctetString().subtype( + subtypeSpec=constraint.ValueSizeConstraint(0, 128) + ) + ), + namedtype.NamedType('owf', rfc2459.AlgorithmIdentifier()), + namedtype.NamedType('iterationCount', univ.Integer()), + namedtype.NamedType('mac', rfc2459.AlgorithmIdentifier()) + ) + +id_PasswordBasedMac = univ.ObjectIdentifier('1.2.840.113533.7.66.13') + +class PKIProtection(univ.BitString): pass + +# pyasn1 does not naturally handle recursive definitions, thus this hack: +# NestedMessageContent ::= PKIMessages +nestedMessageContent = NestedMessageContent().subtype(explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,20)) + +class PKIBody(univ.Choice): + """ + PKIBody ::= CHOICE { -- message-specific body elements + ir [0] CertReqMessages, --Initialization Request + ip [1] CertRepMessage, --Initialization Response + cr [2] CertReqMessages, --Certification Request + cp [3] CertRepMessage, --Certification Response + p10cr [4] CertificationRequest, --imported from [PKCS10] + popdecc [5] POPODecKeyChallContent, --pop Challenge + popdecr [6] POPODecKeyRespContent, --pop Response + kur [7] CertReqMessages, --Key Update Request + kup [8] CertRepMessage, --Key Update Response + krr [9] CertReqMessages, --Key Recovery Request + krp [10] KeyRecRepContent, --Key Recovery Response + rr [11] RevReqContent, --Revocation Request + rp [12] RevRepContent, --Revocation Response + ccr [13] CertReqMessages, --Cross-Cert. Request + ccp [14] CertRepMessage, --Cross-Cert. Response + ckuann [15] CAKeyUpdAnnContent, --CA Key Update Ann. + cann [16] CertAnnContent, --Certificate Ann. + rann [17] RevAnnContent, --Revocation Ann. + crlann [18] CRLAnnContent, --CRL Announcement + pkiconf [19] PKIConfirmContent, --Confirmation + nested [20] NestedMessageContent, --Nested Message + genm [21] GenMsgContent, --General Message + genp [22] GenRepContent, --General Response + error [23] ErrorMsgContent, --Error Message + certConf [24] CertConfirmContent, --Certificate confirm + pollReq [25] PollReqContent, --Polling request + pollRep [26] PollRepContent --Polling response + + """ + componentType = namedtype.NamedTypes( + namedtype.NamedType('ir', rfc2511.CertReqMessages().subtype( + explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,0) + ) + ), + namedtype.NamedType('ip', CertRepMessage().subtype( + explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,1) + ) + ), + namedtype.NamedType('cr', rfc2511.CertReqMessages().subtype( + explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,2) + ) + ), + namedtype.NamedType('cp', CertRepMessage().subtype( + explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,3) + ) + ), + namedtype.NamedType('p10cr', rfc2314.CertificationRequest().subtype( + explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,4) + ) + ), + namedtype.NamedType('popdecc', POPODecKeyChallContent().subtype( + explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,5) + ) + ), + namedtype.NamedType('popdecr', POPODecKeyRespContent().subtype( + explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,6) + ) + ), + namedtype.NamedType('kur', rfc2511.CertReqMessages().subtype( + explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,7) + ) + ), + namedtype.NamedType('kup', CertRepMessage().subtype( + explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,8) + ) + ), + namedtype.NamedType('krr', rfc2511.CertReqMessages().subtype( + explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,9) + ) + ), + namedtype.NamedType('krp', KeyRecRepContent().subtype( + explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,10) + ) + ), + namedtype.NamedType('rr', RevReqContent().subtype( + explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,11) + ) + ), + namedtype.NamedType('rp', RevRepContent().subtype( + explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,12) + ) + ), + namedtype.NamedType('ccr', rfc2511.CertReqMessages().subtype( + explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,13) + ) + ), + namedtype.NamedType('ccp', CertRepMessage().subtype( + explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,14) + ) + ), + namedtype.NamedType('ckuann', CAKeyUpdAnnContent().subtype( + explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,15) + ) + ), + namedtype.NamedType('cann', CertAnnContent().subtype( + explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,16) + ) + ), + namedtype.NamedType('rann', RevAnnContent().subtype( + explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,17) + ) + ), + namedtype.NamedType('crlann', CRLAnnContent().subtype( + explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,18) + ) + ), + namedtype.NamedType('pkiconf', PKIConfirmContent().subtype( + explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,19) + ) + ), + namedtype.NamedType('nested', nestedMessageContent), +# namedtype.NamedType('nested', NestedMessageContent().subtype( +# explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,20) +# ) +# ), + namedtype.NamedType('genm', GenMsgContent().subtype( + explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,21) + ) + ), + namedtype.NamedType('gen', GenRepContent().subtype( + explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,22) + ) + ), + namedtype.NamedType('error', ErrorMsgContent().subtype( + explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,23) + ) + ), + namedtype.NamedType('certConf', CertConfirmContent().subtype( + explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,24) + ) + ), + namedtype.NamedType('pollReq', PollReqContent().subtype( + explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,25) + ) + ), + namedtype.NamedType('pollRep', PollRepContent().subtype( + explicitTag=tag.Tag(tag.tagClassContext,tag.tagFormatConstructed,26) + ) + ) + ) + + +class PKIHeader(univ.Sequence): + """ + PKIHeader ::= SEQUENCE { + pvno INTEGER { cmp1999(1), cmp2000(2) }, + sender GeneralName, + recipient GeneralName, + messageTime [0] GeneralizedTime OPTIONAL, + protectionAlg [1] AlgorithmIdentifier OPTIONAL, + senderKID [2] KeyIdentifier OPTIONAL, + recipKID [3] KeyIdentifier OPTIONAL, + transactionID [4] OCTET STRING OPTIONAL, + senderNonce [5] OCTET STRING OPTIONAL, + recipNonce [6] OCTET STRING OPTIONAL, + freeText [7] PKIFreeText OPTIONAL, + generalInfo [8] SEQUENCE SIZE (1..MAX) OF + InfoTypeAndValue OPTIONAL + } + + """ + componentType = namedtype.NamedTypes( + namedtype.NamedType('pvno', univ.Integer( + namedValues=namedval.NamedValues( + ('cmp1999', 1), + ('cmp2000', 2) + ) + ) + ), + namedtype.NamedType('sender', rfc2459.GeneralName()), + namedtype.NamedType('recipient', rfc2459.GeneralName()), + namedtype.OptionalNamedType('messageTime', useful.GeneralizedTime().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), + namedtype.OptionalNamedType('protectionAlg', rfc2459.AlgorithmIdentifier().subtype( + explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1))), + namedtype.OptionalNamedType('senderKID', rfc2459.KeyIdentifier().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 2))), + namedtype.OptionalNamedType('recipKID', rfc2459.KeyIdentifier().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 3))), + namedtype.OptionalNamedType('transactionID', univ.OctetString().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 4))), + namedtype.OptionalNamedType('senderNonce', univ.OctetString().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 5))), + namedtype.OptionalNamedType('recipNonce', univ.OctetString().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 6))), + namedtype.OptionalNamedType('freeText', PKIFreeText().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 7))), + namedtype.OptionalNamedType('generalInfo', + univ.SequenceOf( + componentType=InfoTypeAndValue().subtype( + subtypeSpec=constraint.ValueSizeConstraint(1, MAX), + explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 8) + ) + ) + ) + ) + +class ProtectedPart(univ.Sequence): + """ + ProtectedPart ::= SEQUENCE { + header PKIHeader, + body PKIBody + } + """ + componentType = namedtype.NamedTypes( + namedtype.NamedType('header', PKIHeader()), + namedtype.NamedType('infoValue', PKIBody()) + ) + +class PKIMessage(univ.Sequence): + """ + PKIMessage ::= SEQUENCE { + header PKIHeader, + body PKIBody, + protection [0] PKIProtection OPTIONAL, + extraCerts [1] SEQUENCE SIZE (1..MAX) OF CMPCertificate + OPTIONAL + }""" + componentType = namedtype.NamedTypes( + namedtype.NamedType('header', PKIHeader()), + namedtype.NamedType('body', PKIBody()), + namedtype.OptionalNamedType('protection', PKIProtection().subtype(explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatSimple, 0))), + namedtype.OptionalNamedType( 'extraCerts', + univ.SequenceOf( + componentType=CMPCertificate() + ).subtype( + subtypeSpec=constraint.ValueSizeConstraint(1, MAX), + explicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 1) + ) + ) + ) + +class PKIMessages(univ.SequenceOf): + """ + PKIMessages ::= SEQUENCE SIZE (1..MAX) OF PKIMessage + """ + componentType = PKIMessage() + subtypeSpec = univ.SequenceOf.subtypeSpec + constraint.ValueSizeConstraint(1, MAX) + +# pyasn1 does not naturally handle recursive definitions, thus this hack: +# NestedMessageContent ::= PKIMessages +NestedMessageContent.componentType = PKIMessages() +nestedMessageContent.componentType = PKIMessages() diff --git a/pyasn1_modules/rfc5208.py b/pyasn1_modules/rfc5208.py new file mode 100644 index 0000000..d1d2c16 --- /dev/null +++ b/pyasn1_modules/rfc5208.py @@ -0,0 +1,39 @@ +# +# PKCS#8 syntax +# +# ASN.1 source from: +# http://tools.ietf.org/html/rfc5208 +# +# Sample captures could be obtained with "openssl pkcs8 -topk8" command +# +from pyasn1.type import tag, namedtype, namedval, univ, constraint +from pyasn1_modules.rfc2459 import * +from pyasn1_modules import rfc2251 + +class KeyEncryptionAlgorithms(AlgorithmIdentifier): pass + +class PrivateKeyAlgorithms(AlgorithmIdentifier): pass + +class EncryptedData(univ.OctetString): pass + +class EncryptedPrivateKeyInfo(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('encryptionAlgorithm', AlgorithmIdentifier()), + namedtype.NamedType('encryptedData', EncryptedData()) + ) + +class PrivateKey(univ.OctetString): pass + +class Attributes(univ.SetOf): + componentType = rfc2251.Attribute() + +class Version(univ.Integer): + namedValues = namedval.NamedValues(('v1', 0), ('v2', 1)) + +class PrivateKeyInfo(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('version', Version()), + namedtype.NamedType('privateKeyAlgorithm', AlgorithmIdentifier()), + namedtype.NamedType('privateKey', PrivateKey()), + namedtype.OptionalNamedType('attributes', Attributes().subtype(implicitTag=tag.Tag(tag.tagClassContext, tag.tagFormatConstructed, 0))) + ) diff --git a/rsa/__init__.py b/rsa/__init__.py new file mode 100644 index 0000000..c572c06 --- /dev/null +++ b/rsa/__init__.py @@ -0,0 +1,42 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. Stüvel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +"""RSA module + +Module for calculating large primes, and RSA encryption, decryption, signing +and verification. Includes generating public and private keys. + +WARNING: this implementation does not use random padding, compression of the +cleartext input to prevent repetitions, or other common security improvements. +Use with care. + +""" + +from rsa.key import newkeys, PrivateKey, PublicKey +from rsa.pkcs1 import encrypt, decrypt, sign, verify, DecryptionError, \ + VerificationError + +__author__ = "Sybren Stuvel, Barry Mead and Yesudeep Mangalapilly" +__date__ = "2016-03-29" +__version__ = '3.4.2' + +# Do doctest if we're run directly +if __name__ == "__main__": + import doctest + + doctest.testmod() + +__all__ = ["newkeys", "encrypt", "decrypt", "sign", "verify", 'PublicKey', + 'PrivateKey', 'DecryptionError', 'VerificationError'] diff --git a/rsa/_compat.py b/rsa/_compat.py new file mode 100644 index 0000000..93393d9 --- /dev/null +++ b/rsa/_compat.py @@ -0,0 +1,148 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. Stüvel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Python compatibility wrappers.""" + +from __future__ import absolute_import + +import sys +from struct import pack + +try: + MAX_INT = sys.maxsize +except AttributeError: + MAX_INT = sys.maxint + +MAX_INT64 = (1 << 63) - 1 +MAX_INT32 = (1 << 31) - 1 +MAX_INT16 = (1 << 15) - 1 + +# Determine the word size of the processor. +if MAX_INT == MAX_INT64: + # 64-bit processor. + MACHINE_WORD_SIZE = 64 +elif MAX_INT == MAX_INT32: + # 32-bit processor. + MACHINE_WORD_SIZE = 32 +else: + # Else we just assume 64-bit processor keeping up with modern times. + MACHINE_WORD_SIZE = 64 + +try: + # < Python3 + unicode_type = unicode +except NameError: + # Python3. + unicode_type = str + +# Fake byte literals. +if str is unicode_type: + def byte_literal(s): + return s.encode('latin1') +else: + def byte_literal(s): + return s + +# ``long`` is no more. Do type detection using this instead. +try: + integer_types = (int, long) +except NameError: + integer_types = (int,) + +b = byte_literal + +# To avoid calling b() multiple times in tight loops. +ZERO_BYTE = b('\x00') +EMPTY_BYTE = b('') + + +def is_bytes(obj): + """ + Determines whether the given value is a byte string. + + :param obj: + The value to test. + :returns: + ``True`` if ``value`` is a byte string; ``False`` otherwise. + """ + return isinstance(obj, bytes) + + +def is_integer(obj): + """ + Determines whether the given value is an integer. + + :param obj: + The value to test. + :returns: + ``True`` if ``value`` is an integer; ``False`` otherwise. + """ + return isinstance(obj, integer_types) + + +def byte(num): + """ + Converts a number between 0 and 255 (both inclusive) to a base-256 (byte) + representation. + + Use it as a replacement for ``chr`` where you are expecting a byte + because this will work on all current versions of Python:: + + :param num: + An unsigned integer between 0 and 255 (both inclusive). + :returns: + A single byte. + """ + return pack("B", num) + + +def get_word_alignment(num, force_arch=64, + _machine_word_size=MACHINE_WORD_SIZE): + """ + Returns alignment details for the given number based on the platform + Python is running on. + + :param num: + Unsigned integral number. + :param force_arch: + If you don't want to use 64-bit unsigned chunks, set this to + anything other than 64. 32-bit chunks will be preferred then. + Default 64 will be used when on a 64-bit machine. + :param _machine_word_size: + (Internal) The machine word size used for alignment. + :returns: + 4-tuple:: + + (word_bits, word_bytes, + max_uint, packing_format_type) + """ + max_uint64 = 0xffffffffffffffff + max_uint32 = 0xffffffff + max_uint16 = 0xffff + max_uint8 = 0xff + + if force_arch == 64 and _machine_word_size >= 64 and num > max_uint32: + # 64-bit unsigned integer. + return 64, 8, max_uint64, "Q" + elif num > max_uint16: + # 32-bit unsigned integer + return 32, 4, max_uint32, "L" + elif num > max_uint8: + # 16-bit unsigned integer. + return 16, 2, max_uint16, "H" + else: + # 8-bit unsigned integer. + return 8, 1, max_uint8, "B" diff --git a/rsa/_version133.py b/rsa/_version133.py new file mode 100644 index 0000000..ff03b45 --- /dev/null +++ b/rsa/_version133.py @@ -0,0 +1,441 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. Stüvel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Deprecated version of the RSA module + +.. deprecated:: 2.0 + + This submodule is deprecated and will be completely removed as of version 4.0. + +Module for calculating large primes, and RSA encryption, decryption, +signing and verification. Includes generating public and private keys. + +WARNING: this code implements the mathematics of RSA. It is not suitable for +real-world secure cryptography purposes. It has not been reviewed by a security +expert. It does not include padding of data. There are many ways in which the +output of this module, when used without any modification, can be sucessfully +attacked. +""" + +__author__ = "Sybren Stuvel, Marloes de Boer and Ivo Tamboer" +__date__ = "2010-02-05" +__version__ = '1.3.3' + +# NOTE: Python's modulo can return negative numbers. We compensate for +# this behaviour using the abs() function + +try: + import cPickle as pickle +except ImportError: + import pickle +from pickle import dumps, loads +import base64 +import math +import os +import random +import sys +import types +import zlib + +from rsa._compat import byte + +# Display a warning that this insecure version is imported. +import warnings +warnings.warn('Insecure version of the RSA module is imported as %s, be careful' + % __name__) +warnings.warn('This submodule is deprecated and will be completely removed as of version 4.0.', + DeprecationWarning) + + +def gcd(p, q): + """Returns the greatest common divisor of p and q + + + >>> gcd(42, 6) + 6 + """ + if p 0: + string = "%s%s" % (byte(number & 0xFF), string) + number /= 256 + + return string + +def fast_exponentiation(a, p, n): + """Calculates r = a^p mod n + """ + result = a % n + remainders = [] + while p != 1: + remainders.append(p & 1) + p = p >> 1 + while remainders: + rem = remainders.pop() + result = ((a ** rem) * result ** 2) % n + return result + +def read_random_int(nbits): + """Reads a random integer of approximately nbits bits rounded up + to whole bytes""" + + nbytes = ceil(nbits/8.) + randomdata = os.urandom(nbytes) + return bytes2int(randomdata) + +def ceil(x): + """ceil(x) -> int(math.ceil(x))""" + + return int(math.ceil(x)) + +def randint(minvalue, maxvalue): + """Returns a random integer x with minvalue <= x <= maxvalue""" + + # Safety - get a lot of random data even if the range is fairly + # small + min_nbits = 32 + + # The range of the random numbers we need to generate + range = maxvalue - minvalue + + # Which is this number of bytes + rangebytes = ceil(math.log(range, 2) / 8.) + + # Convert to bits, but make sure it's always at least min_nbits*2 + rangebits = max(rangebytes * 8, min_nbits * 2) + + # Take a random number of bits between min_nbits and rangebits + nbits = random.randint(min_nbits, rangebits) + + return (read_random_int(nbits) % range) + minvalue + +def fermat_little_theorem(p): + """Returns 1 if p may be prime, and something else if p definitely + is not prime""" + + a = randint(1, p-1) + return fast_exponentiation(a, p-1, p) + +def jacobi(a, b): + """Calculates the value of the Jacobi symbol (a/b) + """ + + if a % b == 0: + return 0 + result = 1 + while a > 1: + if a & 1: + if ((a-1)*(b-1) >> 2) & 1: + result = -result + b, a = a, b % a + else: + if ((b ** 2 - 1) >> 3) & 1: + result = -result + a = a >> 1 + return result + +def jacobi_witness(x, n): + """Returns False if n is an Euler pseudo-prime with base x, and + True otherwise. + """ + + j = jacobi(x, n) % n + f = fast_exponentiation(x, (n-1)/2, n) + + if j == f: return False + return True + +def randomized_primality_testing(n, k): + """Calculates whether n is composite (which is always correct) or + prime (which is incorrect with error probability 2**-k) + + Returns False if the number if composite, and True if it's + probably prime. + """ + + q = 0.5 # Property of the jacobi_witness function + + # t = int(math.ceil(k / math.log(1/q, 2))) + t = ceil(k / math.log(1/q, 2)) + for i in range(t+1): + x = randint(1, n-1) + if jacobi_witness(x, n): return False + + return True + +def is_prime(number): + """Returns True if the number is prime, and False otherwise. + """ + + """ + if not fermat_little_theorem(number) == 1: + # Not prime, according to Fermat's little theorem + return False + """ + + if randomized_primality_testing(number, 5): + # Prime, according to Jacobi + return True + + # Not prime + return False + + +def getprime(nbits): + """Returns a prime number of max. 'math.ceil(nbits/8)*8' bits. In + other words: nbits is rounded up to whole bytes. + """ + + nbytes = int(math.ceil(nbits/8.)) + + while True: + integer = read_random_int(nbits) + + # Make sure it's odd + integer |= 1 + + # Test for primeness + if is_prime(integer): break + + # Retry if not prime + + return integer + +def are_relatively_prime(a, b): + """Returns True if a and b are relatively prime, and False if they + are not. + """ + + d = gcd(a, b) + return (d == 1) + +def find_p_q(nbits): + """Returns a tuple of two different primes of nbits bits""" + + p = getprime(nbits) + while True: + q = getprime(nbits) + if not q == p: break + + return (p, q) + +def extended_euclid_gcd(a, b): + """Returns a tuple (d, i, j) such that d = gcd(a, b) = ia + jb + """ + + if b == 0: + return (a, 1, 0) + + q = abs(a % b) + r = long(a / b) + (d, k, l) = extended_euclid_gcd(b, q) + + return (d, l, k - l*r) + +# Main function: calculate encryption and decryption keys +def calculate_keys(p, q, nbits): + """Calculates an encryption and a decryption key for p and q, and + returns them as a tuple (e, d)""" + + n = p * q + phi_n = (p-1) * (q-1) + + while True: + # Make sure e has enough bits so we ensure "wrapping" through + # modulo n + e = getprime(max(8, nbits/2)) + if are_relatively_prime(e, n) and are_relatively_prime(e, phi_n): break + + (d, i, j) = extended_euclid_gcd(e, phi_n) + + if not d == 1: + raise Exception("e (%d) and phi_n (%d) are not relatively prime" % (e, phi_n)) + + if not (e * i) % phi_n == 1: + raise Exception("e (%d) and i (%d) are not mult. inv. modulo phi_n (%d)" % (e, i, phi_n)) + + return (e, i) + + +def gen_keys(nbits): + """Generate RSA keys of nbits bits. Returns (p, q, e, d). + + Note: this can take a long time, depending on the key size. + """ + + while True: + (p, q) = find_p_q(nbits) + (e, d) = calculate_keys(p, q, nbits) + + # For some reason, d is sometimes negative. We don't know how + # to fix it (yet), so we keep trying until everything is shiny + if d > 0: break + + return (p, q, e, d) + +def gen_pubpriv_keys(nbits): + """Generates public and private keys, and returns them as (pub, + priv). + + The public key consists of a dict {e: ..., , n: ....). The private + key consists of a dict {d: ...., p: ...., q: ....). + """ + + (p, q, e, d) = gen_keys(nbits) + + return ( {'e': e, 'n': p*q}, {'d': d, 'p': p, 'q': q} ) + +def encrypt_int(message, ekey, n): + """Encrypts a message using encryption key 'ekey', working modulo + n""" + + if type(message) is types.IntType: + return encrypt_int(long(message), ekey, n) + + if not type(message) is types.LongType: + raise TypeError("You must pass a long or an int") + + if message > 0 and \ + math.floor(math.log(message, 2)) > math.floor(math.log(n, 2)): + raise OverflowError("The message is too long") + + return fast_exponentiation(message, ekey, n) + +def decrypt_int(cyphertext, dkey, n): + """Decrypts a cypher text using the decryption key 'dkey', working + modulo n""" + + return encrypt_int(cyphertext, dkey, n) + +def sign_int(message, dkey, n): + """Signs 'message' using key 'dkey', working modulo n""" + + return decrypt_int(message, dkey, n) + +def verify_int(signed, ekey, n): + """verifies 'signed' using key 'ekey', working modulo n""" + + return encrypt_int(signed, ekey, n) + +def picklechops(chops): + """Pickles and base64encodes it's argument chops""" + + value = zlib.compress(dumps(chops)) + encoded = base64.encodestring(value) + return encoded.strip() + +def unpicklechops(string): + """base64decodes and unpickes it's argument string into chops""" + + return loads(zlib.decompress(base64.decodestring(string))) + +def chopstring(message, key, n, funcref): + """Splits 'message' into chops that are at most as long as n, + converts these into integers, and calls funcref(integer, key, n) + for each chop. + + Used by 'encrypt' and 'sign'. + """ + + msglen = len(message) + mbits = msglen * 8 + nbits = int(math.floor(math.log(n, 2))) + nbytes = nbits / 8 + blocks = msglen / nbytes + + if msglen % nbytes > 0: + blocks += 1 + + cypher = [] + + for bindex in range(blocks): + offset = bindex * nbytes + block = message[offset:offset+nbytes] + value = bytes2int(block) + cypher.append(funcref(value, key, n)) + + return picklechops(cypher) + +def gluechops(chops, key, n, funcref): + """Glues chops back together into a string. calls + funcref(integer, key, n) for each chop. + + Used by 'decrypt' and 'verify'. + """ + message = "" + + chops = unpicklechops(chops) + + for cpart in chops: + mpart = funcref(cpart, key, n) + message += int2bytes(mpart) + + return message + +def encrypt(message, key): + """Encrypts a string 'message' with the public key 'key'""" + + return chopstring(message, key['e'], key['n'], encrypt_int) + +def sign(message, key): + """Signs a string 'message' with the private key 'key'""" + + return chopstring(message, key['d'], key['p']*key['q'], decrypt_int) + +def decrypt(cypher, key): + """Decrypts a cypher with the private key 'key'""" + + return gluechops(cypher, key['d'], key['p']*key['q'], decrypt_int) + +def verify(cypher, key): + """Verifies a cypher with the public key 'key'""" + + return gluechops(cypher, key['e'], key['n'], encrypt_int) + +# Do doctest if we're not imported +if __name__ == "__main__": + import doctest + doctest.testmod() + +__all__ = ["gen_pubpriv_keys", "encrypt", "decrypt", "sign", "verify"] + diff --git a/rsa/_version200.py b/rsa/_version200.py new file mode 100644 index 0000000..1a16949 --- /dev/null +++ b/rsa/_version200.py @@ -0,0 +1,513 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. Stüvel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Deprecated version of the RSA module + +.. deprecated:: 3.0 + + This submodule is deprecated and will be completely removed as of version 4.0. + +""" + +__author__ = "Sybren Stuvel, Marloes de Boer, Ivo Tamboer, and Barry Mead" +__date__ = "2010-02-08" +__version__ = '2.0' + +import math +import os +import random +import sys +import types +from rsa._compat import byte + +# Display a warning that this insecure version is imported. +import warnings +warnings.warn('Insecure version of the RSA module is imported as %s' % __name__) +warnings.warn('This submodule is deprecated and will be completely removed as of version 4.0.', + DeprecationWarning) + + +def bit_size(number): + """Returns the number of bits required to hold a specific long number""" + + return int(math.ceil(math.log(number,2))) + +def gcd(p, q): + """Returns the greatest common divisor of p and q + >>> gcd(48, 180) + 12 + """ + # Iterateive Version is faster and uses much less stack space + while q != 0: + if p < q: (p,q) = (q,p) + (p,q) = (q, p % q) + return p + + +def bytes2int(bytes): + r"""Converts a list of bytes or a string to an integer + """ + + if not (type(bytes) is types.ListType or type(bytes) is types.StringType): + raise TypeError("You must pass a string or a list") + + # Convert byte stream to integer + integer = 0 + for byte in bytes: + integer *= 256 + if type(byte) is types.StringType: byte = ord(byte) + integer += byte + + return integer + +def int2bytes(number): + """ + Converts a number to a string of bytes + """ + + if not (type(number) is types.LongType or type(number) is types.IntType): + raise TypeError("You must pass a long or an int") + + string = "" + + while number > 0: + string = "%s%s" % (byte(number & 0xFF), string) + number /= 256 + + return string + +def to64(number): + """Converts a number in the range of 0 to 63 into base 64 digit + character in the range of '0'-'9', 'A'-'Z', 'a'-'z','-','_'. + """ + + if not (type(number) is types.LongType or type(number) is types.IntType): + raise TypeError("You must pass a long or an int") + + if 0 <= number <= 9: #00-09 translates to '0' - '9' + return byte(number + 48) + + if 10 <= number <= 35: + return byte(number + 55) #10-35 translates to 'A' - 'Z' + + if 36 <= number <= 61: + return byte(number + 61) #36-61 translates to 'a' - 'z' + + if number == 62: # 62 translates to '-' (minus) + return byte(45) + + if number == 63: # 63 translates to '_' (underscore) + return byte(95) + + raise ValueError('Invalid Base64 value: %i' % number) + + +def from64(number): + """Converts an ordinal character value in the range of + 0-9,A-Z,a-z,-,_ to a number in the range of 0-63. + """ + + if not (type(number) is types.LongType or type(number) is types.IntType): + raise TypeError("You must pass a long or an int") + + if 48 <= number <= 57: #ord('0') - ord('9') translates to 0-9 + return(number - 48) + + if 65 <= number <= 90: #ord('A') - ord('Z') translates to 10-35 + return(number - 55) + + if 97 <= number <= 122: #ord('a') - ord('z') translates to 36-61 + return(number - 61) + + if number == 45: #ord('-') translates to 62 + return(62) + + if number == 95: #ord('_') translates to 63 + return(63) + + raise ValueError('Invalid Base64 value: %i' % number) + + +def int2str64(number): + """Converts a number to a string of base64 encoded characters in + the range of '0'-'9','A'-'Z,'a'-'z','-','_'. + """ + + if not (type(number) is types.LongType or type(number) is types.IntType): + raise TypeError("You must pass a long or an int") + + string = "" + + while number > 0: + string = "%s%s" % (to64(number & 0x3F), string) + number /= 64 + + return string + + +def str642int(string): + """Converts a base64 encoded string into an integer. + The chars of this string in in the range '0'-'9','A'-'Z','a'-'z','-','_' + """ + + if not (type(string) is types.ListType or type(string) is types.StringType): + raise TypeError("You must pass a string or a list") + + integer = 0 + for byte in string: + integer *= 64 + if type(byte) is types.StringType: byte = ord(byte) + integer += from64(byte) + + return integer + +def read_random_int(nbits): + """Reads a random integer of approximately nbits bits rounded up + to whole bytes""" + + nbytes = int(math.ceil(nbits/8.)) + randomdata = os.urandom(nbytes) + return bytes2int(randomdata) + +def randint(minvalue, maxvalue): + """Returns a random integer x with minvalue <= x <= maxvalue""" + + # Safety - get a lot of random data even if the range is fairly + # small + min_nbits = 32 + + # The range of the random numbers we need to generate + range = (maxvalue - minvalue) + 1 + + # Which is this number of bytes + rangebytes = ((bit_size(range) + 7) / 8) + + # Convert to bits, but make sure it's always at least min_nbits*2 + rangebits = max(rangebytes * 8, min_nbits * 2) + + # Take a random number of bits between min_nbits and rangebits + nbits = random.randint(min_nbits, rangebits) + + return (read_random_int(nbits) % range) + minvalue + +def jacobi(a, b): + """Calculates the value of the Jacobi symbol (a/b) + where both a and b are positive integers, and b is odd + """ + + if a == 0: return 0 + result = 1 + while a > 1: + if a & 1: + if ((a-1)*(b-1) >> 2) & 1: + result = -result + a, b = b % a, a + else: + if (((b * b) - 1) >> 3) & 1: + result = -result + a >>= 1 + if a == 0: return 0 + return result + +def jacobi_witness(x, n): + """Returns False if n is an Euler pseudo-prime with base x, and + True otherwise. + """ + + j = jacobi(x, n) % n + f = pow(x, (n-1)/2, n) + + if j == f: return False + return True + +def randomized_primality_testing(n, k): + """Calculates whether n is composite (which is always correct) or + prime (which is incorrect with error probability 2**-k) + + Returns False if the number is composite, and True if it's + probably prime. + """ + + # 50% of Jacobi-witnesses can report compositness of non-prime numbers + + for i in range(k): + x = randint(1, n-1) + if jacobi_witness(x, n): return False + + return True + +def is_prime(number): + """Returns True if the number is prime, and False otherwise. + """ + + if randomized_primality_testing(number, 6): + # Prime, according to Jacobi + return True + + # Not prime + return False + + +def getprime(nbits): + """Returns a prime number of max. 'math.ceil(nbits/8)*8' bits. In + other words: nbits is rounded up to whole bytes. + """ + + while True: + integer = read_random_int(nbits) + + # Make sure it's odd + integer |= 1 + + # Test for primeness + if is_prime(integer): break + + # Retry if not prime + + return integer + +def are_relatively_prime(a, b): + """Returns True if a and b are relatively prime, and False if they + are not. + + >>> are_relatively_prime(2, 3) + 1 + >>> are_relatively_prime(2, 4) + 0 + """ + + d = gcd(a, b) + return (d == 1) + +def find_p_q(nbits): + """Returns a tuple of two different primes of nbits bits""" + pbits = nbits + (nbits/16) #Make sure that p and q aren't too close + qbits = nbits - (nbits/16) #or the factoring programs can factor n + p = getprime(pbits) + while True: + q = getprime(qbits) + #Make sure p and q are different. + if not q == p: break + return (p, q) + +def extended_gcd(a, b): + """Returns a tuple (r, i, j) such that r = gcd(a, b) = ia + jb + """ + # r = gcd(a,b) i = multiplicitive inverse of a mod b + # or j = multiplicitive inverse of b mod a + # Neg return values for i or j are made positive mod b or a respectively + # Iterateive Version is faster and uses much less stack space + x = 0 + y = 1 + lx = 1 + ly = 0 + oa = a #Remember original a/b to remove + ob = b #negative values from return results + while b != 0: + q = long(a/b) + (a, b) = (b, a % b) + (x, lx) = ((lx - (q * x)),x) + (y, ly) = ((ly - (q * y)),y) + if (lx < 0): lx += ob #If neg wrap modulo orignal b + if (ly < 0): ly += oa #If neg wrap modulo orignal a + return (a, lx, ly) #Return only positive values + +# Main function: calculate encryption and decryption keys +def calculate_keys(p, q, nbits): + """Calculates an encryption and a decryption key for p and q, and + returns them as a tuple (e, d)""" + + n = p * q + phi_n = (p-1) * (q-1) + + while True: + # Make sure e has enough bits so we ensure "wrapping" through + # modulo n + e = max(65537,getprime(nbits/4)) + if are_relatively_prime(e, n) and are_relatively_prime(e, phi_n): break + + (d, i, j) = extended_gcd(e, phi_n) + + if not d == 1: + raise Exception("e (%d) and phi_n (%d) are not relatively prime" % (e, phi_n)) + if (i < 0): + raise Exception("New extended_gcd shouldn't return negative values") + if not (e * i) % phi_n == 1: + raise Exception("e (%d) and i (%d) are not mult. inv. modulo phi_n (%d)" % (e, i, phi_n)) + + return (e, i) + + +def gen_keys(nbits): + """Generate RSA keys of nbits bits. Returns (p, q, e, d). + + Note: this can take a long time, depending on the key size. + """ + + (p, q) = find_p_q(nbits) + (e, d) = calculate_keys(p, q, nbits) + + return (p, q, e, d) + +def newkeys(nbits): + """Generates public and private keys, and returns them as (pub, + priv). + + The public key consists of a dict {e: ..., , n: ....). The private + key consists of a dict {d: ...., p: ...., q: ....). + """ + nbits = max(9,nbits) # Don't let nbits go below 9 bits + (p, q, e, d) = gen_keys(nbits) + + return ( {'e': e, 'n': p*q}, {'d': d, 'p': p, 'q': q} ) + +def encrypt_int(message, ekey, n): + """Encrypts a message using encryption key 'ekey', working modulo n""" + + if type(message) is types.IntType: + message = long(message) + + if not type(message) is types.LongType: + raise TypeError("You must pass a long or int") + + if message < 0 or message > n: + raise OverflowError("The message is too long") + + #Note: Bit exponents start at zero (bit counts start at 1) this is correct + safebit = bit_size(n) - 2 #compute safe bit (MSB - 1) + message += (1 << safebit) #add safebit to ensure folding + + return pow(message, ekey, n) + +def decrypt_int(cyphertext, dkey, n): + """Decrypts a cypher text using the decryption key 'dkey', working + modulo n""" + + message = pow(cyphertext, dkey, n) + + safebit = bit_size(n) - 2 #compute safe bit (MSB - 1) + message -= (1 << safebit) #remove safebit before decode + + return message + +def encode64chops(chops): + """base64encodes chops and combines them into a ',' delimited string""" + + chips = [] #chips are character chops + + for value in chops: + chips.append(int2str64(value)) + + #delimit chops with comma + encoded = ','.join(chips) + + return encoded + +def decode64chops(string): + """base64decodes and makes a ',' delimited string into chops""" + + chips = string.split(',') #split chops at commas + + chops = [] + + for string in chips: #make char chops (chips) into chops + chops.append(str642int(string)) + + return chops + +def chopstring(message, key, n, funcref): + """Chops the 'message' into integers that fit into n, + leaving room for a safebit to be added to ensure that all + messages fold during exponentiation. The MSB of the number n + is not independant modulo n (setting it could cause overflow), so + use the next lower bit for the safebit. Therefore reserve 2-bits + in the number n for non-data bits. Calls specified encryption + function for each chop. + + Used by 'encrypt' and 'sign'. + """ + + msglen = len(message) + mbits = msglen * 8 + #Set aside 2-bits so setting of safebit won't overflow modulo n. + nbits = bit_size(n) - 2 # leave room for safebit + nbytes = nbits / 8 + blocks = msglen / nbytes + + if msglen % nbytes > 0: + blocks += 1 + + cypher = [] + + for bindex in range(blocks): + offset = bindex * nbytes + block = message[offset:offset+nbytes] + value = bytes2int(block) + cypher.append(funcref(value, key, n)) + + return encode64chops(cypher) #Encode encrypted ints to base64 strings + +def gluechops(string, key, n, funcref): + """Glues chops back together into a string. calls + funcref(integer, key, n) for each chop. + + Used by 'decrypt' and 'verify'. + """ + message = "" + + chops = decode64chops(string) #Decode base64 strings into integer chops + + for cpart in chops: + mpart = funcref(cpart, key, n) #Decrypt each chop + message += int2bytes(mpart) #Combine decrypted strings into a msg + + return message + +def encrypt(message, key): + """Encrypts a string 'message' with the public key 'key'""" + if 'n' not in key: + raise Exception("You must use the public key with encrypt") + + return chopstring(message, key['e'], key['n'], encrypt_int) + +def sign(message, key): + """Signs a string 'message' with the private key 'key'""" + if 'p' not in key: + raise Exception("You must use the private key with sign") + + return chopstring(message, key['d'], key['p']*key['q'], encrypt_int) + +def decrypt(cypher, key): + """Decrypts a string 'cypher' with the private key 'key'""" + if 'p' not in key: + raise Exception("You must use the private key with decrypt") + + return gluechops(cypher, key['d'], key['p']*key['q'], decrypt_int) + +def verify(cypher, key): + """Verifies a string 'cypher' with the public key 'key'""" + if 'n' not in key: + raise Exception("You must use the public key with verify") + + return gluechops(cypher, key['e'], key['n'], decrypt_int) + +# Do doctest if we're not imported +if __name__ == "__main__": + import doctest + doctest.testmod() + +__all__ = ["newkeys", "encrypt", "decrypt", "sign", "verify"] + diff --git a/rsa/asn1.py b/rsa/asn1.py new file mode 100644 index 0000000..b724b8f --- /dev/null +++ b/rsa/asn1.py @@ -0,0 +1,53 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. Stüvel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""ASN.1 definitions. + +Not all ASN.1-handling code use these definitions, but when it does, they should be here. +""" + +from pyasn1.type import univ, namedtype, tag + + +class PubKeyHeader(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('oid', univ.ObjectIdentifier()), + namedtype.NamedType('parameters', univ.Null()), + ) + + +class OpenSSLPubKey(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('header', PubKeyHeader()), + + # This little hack (the implicit tag) allows us to get a Bit String as Octet String + namedtype.NamedType('key', univ.OctetString().subtype( + implicitTag=tag.Tag(tagClass=0, tagFormat=0, tagId=3))), + ) + + +class AsnPubKey(univ.Sequence): + """ASN.1 contents of DER encoded public key: + + RSAPublicKey ::= SEQUENCE { + modulus INTEGER, -- n + publicExponent INTEGER, -- e + """ + + componentType = namedtype.NamedTypes( + namedtype.NamedType('modulus', univ.Integer()), + namedtype.NamedType('publicExponent', univ.Integer()), + ) diff --git a/rsa/bigfile.py b/rsa/bigfile.py new file mode 100644 index 0000000..3a09716 --- /dev/null +++ b/rsa/bigfile.py @@ -0,0 +1,135 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. Stüvel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Large file support + +.. deprecated:: 3.4 + + The VARBLOCK format is NOT recommended for general use, has been deprecated since + Python-RSA 3.4, and will be removed in a future release. It's vulnerable to a + number of attacks: + + 1. decrypt/encrypt_bigfile() does not implement `Authenticated encryption`_ nor + uses MACs to verify messages before decrypting public key encrypted messages. + + 2. decrypt/encrypt_bigfile() does not use hybrid encryption (it uses plain RSA) + and has no method for chaining, so block reordering is possible. + + See `issue #19 on Github`_ for more information. + +.. _Authenticated encryption: https://en.wikipedia.org/wiki/Authenticated_encryption +.. _issue #19 on Github: https://github.com/sybrenstuvel/python-rsa/issues/13 + + +This module contains functions to: + + - break a file into smaller blocks, and encrypt them, and store the + encrypted blocks in another file. + + - take such an encrypted files, decrypt its blocks, and reconstruct the + original file. + +The encrypted file format is as follows, where || denotes byte concatenation: + + FILE := VERSION || BLOCK || BLOCK ... + + BLOCK := LENGTH || DATA + + LENGTH := varint-encoded length of the subsequent data. Varint comes from + Google Protobuf, and encodes an integer into a variable number of bytes. + Each byte uses the 7 lowest bits to encode the value. The highest bit set + to 1 indicates the next byte is also part of the varint. The last byte will + have this bit set to 0. + +This file format is called the VARBLOCK format, in line with the varint format +used to denote the block sizes. + +""" + +import warnings + +from rsa import key, common, pkcs1, varblock +from rsa._compat import byte + + +def encrypt_bigfile(infile, outfile, pub_key): + """Encrypts a file, writing it to 'outfile' in VARBLOCK format. + + .. deprecated:: 3.4 + This function was deprecated in Python-RSA version 3.4 due to security issues + in the VARBLOCK format. See the documentation_ for more information. + + .. _documentation: https://stuvel.eu/python-rsa-doc/usage.html#working-with-big-files + + :param infile: file-like object to read the cleartext from + :param outfile: file-like object to write the crypto in VARBLOCK format to + :param pub_key: :py:class:`rsa.PublicKey` to encrypt with + + """ + + warnings.warn("The 'rsa.bigfile.encrypt_bigfile' function was deprecated in Python-RSA version " + "3.4 due to security issues in the VARBLOCK format. See " + "https://stuvel.eu/python-rsa-doc/usage.html#working-with-big-files " + "for more information.", + DeprecationWarning, stacklevel=2) + + if not isinstance(pub_key, key.PublicKey): + raise TypeError('Public key required, but got %r' % pub_key) + + key_bytes = common.bit_size(pub_key.n) // 8 + blocksize = key_bytes - 11 # keep space for PKCS#1 padding + + # Write the version number to the VARBLOCK file + outfile.write(byte(varblock.VARBLOCK_VERSION)) + + # Encrypt and write each block + for block in varblock.yield_fixedblocks(infile, blocksize): + crypto = pkcs1.encrypt(block, pub_key) + + varblock.write_varint(outfile, len(crypto)) + outfile.write(crypto) + + +def decrypt_bigfile(infile, outfile, priv_key): + """Decrypts an encrypted VARBLOCK file, writing it to 'outfile' + + .. deprecated:: 3.4 + This function was deprecated in Python-RSA version 3.4 due to security issues + in the VARBLOCK format. See the documentation_ for more information. + + .. _documentation: https://stuvel.eu/python-rsa-doc/usage.html#working-with-big-files + + :param infile: file-like object to read the crypto in VARBLOCK format from + :param outfile: file-like object to write the cleartext to + :param priv_key: :py:class:`rsa.PrivateKey` to decrypt with + + """ + + warnings.warn("The 'rsa.bigfile.decrypt_bigfile' function was deprecated in Python-RSA version " + "3.4 due to security issues in the VARBLOCK format. See " + "https://stuvel.eu/python-rsa-doc/usage.html#working-with-big-files " + "for more information.", + DeprecationWarning, stacklevel=2) + + if not isinstance(priv_key, key.PrivateKey): + raise TypeError('Private key required, but got %r' % priv_key) + + for block in varblock.yield_varblocks(infile): + cleartext = pkcs1.decrypt(block, priv_key) + outfile.write(cleartext) + + +__all__ = ['encrypt_bigfile', 'decrypt_bigfile'] diff --git a/rsa/cli.py b/rsa/cli.py new file mode 100644 index 0000000..3a21878 --- /dev/null +++ b/rsa/cli.py @@ -0,0 +1,383 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. Stüvel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Commandline scripts. + +These scripts are called by the executables defined in setup.py. +""" + +from __future__ import with_statement, print_function + +import abc +import sys +from optparse import OptionParser + +import rsa +import rsa.bigfile +import rsa.pkcs1 + +HASH_METHODS = sorted(rsa.pkcs1.HASH_METHODS.keys()) + + +def keygen(): + """Key generator.""" + + # Parse the CLI options + parser = OptionParser(usage='usage: %prog [options] keysize', + description='Generates a new RSA keypair of "keysize" bits.') + + parser.add_option('--pubout', type='string', + help='Output filename for the public key. The public key is ' + 'not saved if this option is not present. You can use ' + 'pyrsa-priv2pub to create the public key file later.') + + parser.add_option('-o', '--out', type='string', + help='Output filename for the private key. The key is ' + 'written to stdout if this option is not present.') + + parser.add_option('--form', + help='key format of the private and public keys - default PEM', + choices=('PEM', 'DER'), default='PEM') + + (cli, cli_args) = parser.parse_args(sys.argv[1:]) + + if len(cli_args) != 1: + parser.print_help() + raise SystemExit(1) + + try: + keysize = int(cli_args[0]) + except ValueError: + parser.print_help() + print('Not a valid number: %s' % cli_args[0], file=sys.stderr) + raise SystemExit(1) + + print('Generating %i-bit key' % keysize, file=sys.stderr) + (pub_key, priv_key) = rsa.newkeys(keysize) + + # Save public key + if cli.pubout: + print('Writing public key to %s' % cli.pubout, file=sys.stderr) + data = pub_key.save_pkcs1(format=cli.form) + with open(cli.pubout, 'wb') as outfile: + outfile.write(data) + + # Save private key + data = priv_key.save_pkcs1(format=cli.form) + + if cli.out: + print('Writing private key to %s' % cli.out, file=sys.stderr) + with open(cli.out, 'wb') as outfile: + outfile.write(data) + else: + print('Writing private key to stdout', file=sys.stderr) + sys.stdout.write(data) + + +class CryptoOperation(object): + """CLI callable that operates with input, output, and a key.""" + + __metaclass__ = abc.ABCMeta + + keyname = 'public' # or 'private' + usage = 'usage: %%prog [options] %(keyname)s_key' + description = None + operation = 'decrypt' + operation_past = 'decrypted' + operation_progressive = 'decrypting' + input_help = 'Name of the file to %(operation)s. Reads from stdin if ' \ + 'not specified.' + output_help = 'Name of the file to write the %(operation_past)s file ' \ + 'to. Written to stdout if this option is not present.' + expected_cli_args = 1 + has_output = True + + key_class = rsa.PublicKey + + def __init__(self): + self.usage = self.usage % self.__class__.__dict__ + self.input_help = self.input_help % self.__class__.__dict__ + self.output_help = self.output_help % self.__class__.__dict__ + + @abc.abstractmethod + def perform_operation(self, indata, key, cli_args=None): + """Performs the program's operation. + + Implement in a subclass. + + :returns: the data to write to the output. + """ + + def __call__(self): + """Runs the program.""" + + (cli, cli_args) = self.parse_cli() + + key = self.read_key(cli_args[0], cli.keyform) + + indata = self.read_infile(cli.input) + + print(self.operation_progressive.title(), file=sys.stderr) + outdata = self.perform_operation(indata, key, cli_args) + + if self.has_output: + self.write_outfile(outdata, cli.output) + + def parse_cli(self): + """Parse the CLI options + + :returns: (cli_opts, cli_args) + """ + + parser = OptionParser(usage=self.usage, description=self.description) + + parser.add_option('-i', '--input', type='string', help=self.input_help) + + if self.has_output: + parser.add_option('-o', '--output', type='string', help=self.output_help) + + parser.add_option('--keyform', + help='Key format of the %s key - default PEM' % self.keyname, + choices=('PEM', 'DER'), default='PEM') + + (cli, cli_args) = parser.parse_args(sys.argv[1:]) + + if len(cli_args) != self.expected_cli_args: + parser.print_help() + raise SystemExit(1) + + return cli, cli_args + + def read_key(self, filename, keyform): + """Reads a public or private key.""" + + print('Reading %s key from %s' % (self.keyname, filename), file=sys.stderr) + with open(filename, 'rb') as keyfile: + keydata = keyfile.read() + + return self.key_class.load_pkcs1(keydata, keyform) + + def read_infile(self, inname): + """Read the input file""" + + if inname: + print('Reading input from %s' % inname, file=sys.stderr) + with open(inname, 'rb') as infile: + return infile.read() + + print('Reading input from stdin', file=sys.stderr) + return sys.stdin.read() + + def write_outfile(self, outdata, outname): + """Write the output file""" + + if outname: + print('Writing output to %s' % outname, file=sys.stderr) + with open(outname, 'wb') as outfile: + outfile.write(outdata) + else: + print('Writing output to stdout', file=sys.stderr) + sys.stdout.write(outdata) + + +class EncryptOperation(CryptoOperation): + """Encrypts a file.""" + + keyname = 'public' + description = ('Encrypts a file. The file must be shorter than the key ' + 'length in order to be encrypted. For larger files, use the ' + 'pyrsa-encrypt-bigfile command.') + operation = 'encrypt' + operation_past = 'encrypted' + operation_progressive = 'encrypting' + + def perform_operation(self, indata, pub_key, cli_args=None): + """Encrypts files.""" + + return rsa.encrypt(indata, pub_key) + + +class DecryptOperation(CryptoOperation): + """Decrypts a file.""" + + keyname = 'private' + description = ('Decrypts a file. The original file must be shorter than ' + 'the key length in order to have been encrypted. For larger ' + 'files, use the pyrsa-decrypt-bigfile command.') + operation = 'decrypt' + operation_past = 'decrypted' + operation_progressive = 'decrypting' + key_class = rsa.PrivateKey + + def perform_operation(self, indata, priv_key, cli_args=None): + """Decrypts files.""" + + return rsa.decrypt(indata, priv_key) + + +class SignOperation(CryptoOperation): + """Signs a file.""" + + keyname = 'private' + usage = 'usage: %%prog [options] private_key hash_method' + description = ('Signs a file, outputs the signature. Choose the hash ' + 'method from %s' % ', '.join(HASH_METHODS)) + operation = 'sign' + operation_past = 'signature' + operation_progressive = 'Signing' + key_class = rsa.PrivateKey + expected_cli_args = 2 + + output_help = ('Name of the file to write the signature to. Written ' + 'to stdout if this option is not present.') + + def perform_operation(self, indata, priv_key, cli_args): + """Signs files.""" + + hash_method = cli_args[1] + if hash_method not in HASH_METHODS: + raise SystemExit('Invalid hash method, choose one of %s' % + ', '.join(HASH_METHODS)) + + return rsa.sign(indata, priv_key, hash_method) + + +class VerifyOperation(CryptoOperation): + """Verify a signature.""" + + keyname = 'public' + usage = 'usage: %%prog [options] public_key signature_file' + description = ('Verifies a signature, exits with status 0 upon success, ' + 'prints an error message and exits with status 1 upon error.') + operation = 'verify' + operation_past = 'verified' + operation_progressive = 'Verifying' + key_class = rsa.PublicKey + expected_cli_args = 2 + has_output = False + + def perform_operation(self, indata, pub_key, cli_args): + """Verifies files.""" + + signature_file = cli_args[1] + + with open(signature_file, 'rb') as sigfile: + signature = sigfile.read() + + try: + rsa.verify(indata, signature, pub_key) + except rsa.VerificationError: + raise SystemExit('Verification failed.') + + print('Verification OK', file=sys.stderr) + + +class BigfileOperation(CryptoOperation): + """CryptoOperation that doesn't read the entire file into memory.""" + + def __init__(self): + CryptoOperation.__init__(self) + + self.file_objects = [] + + def __del__(self): + """Closes any open file handles.""" + + for fobj in self.file_objects: + fobj.close() + + def __call__(self): + """Runs the program.""" + + (cli, cli_args) = self.parse_cli() + + key = self.read_key(cli_args[0], cli.keyform) + + # Get the file handles + infile = self.get_infile(cli.input) + outfile = self.get_outfile(cli.output) + + # Call the operation + print(self.operation_progressive.title(), file=sys.stderr) + self.perform_operation(infile, outfile, key, cli_args) + + def get_infile(self, inname): + """Returns the input file object""" + + if inname: + print('Reading input from %s' % inname, file=sys.stderr) + fobj = open(inname, 'rb') + self.file_objects.append(fobj) + else: + print('Reading input from stdin', file=sys.stderr) + fobj = sys.stdin + + return fobj + + def get_outfile(self, outname): + """Returns the output file object""" + + if outname: + print('Will write output to %s' % outname, file=sys.stderr) + fobj = open(outname, 'wb') + self.file_objects.append(fobj) + else: + print('Will write output to stdout', file=sys.stderr) + fobj = sys.stdout + + return fobj + + +class EncryptBigfileOperation(BigfileOperation): + """Encrypts a file to VARBLOCK format.""" + + keyname = 'public' + description = ('Encrypts a file to an encrypted VARBLOCK file. The file ' + 'can be larger than the key length, but the output file is only ' + 'compatible with Python-RSA.') + operation = 'encrypt' + operation_past = 'encrypted' + operation_progressive = 'encrypting' + + def perform_operation(self, infile, outfile, pub_key, cli_args=None): + """Encrypts files to VARBLOCK.""" + + return rsa.bigfile.encrypt_bigfile(infile, outfile, pub_key) + + +class DecryptBigfileOperation(BigfileOperation): + """Decrypts a file in VARBLOCK format.""" + + keyname = 'private' + description = ('Decrypts an encrypted VARBLOCK file that was encrypted ' + 'with pyrsa-encrypt-bigfile') + operation = 'decrypt' + operation_past = 'decrypted' + operation_progressive = 'decrypting' + key_class = rsa.PrivateKey + + def perform_operation(self, infile, outfile, priv_key, cli_args=None): + """Decrypts a VARBLOCK file.""" + + return rsa.bigfile.decrypt_bigfile(infile, outfile, priv_key) + + +encrypt = EncryptOperation() +decrypt = DecryptOperation() +sign = SignOperation() +verify = VerifyOperation() +encrypt_bigfile = EncryptBigfileOperation() +decrypt_bigfile = DecryptBigfileOperation() diff --git a/rsa/common.py b/rsa/common.py new file mode 100644 index 0000000..e074334 --- /dev/null +++ b/rsa/common.py @@ -0,0 +1,188 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. Stüvel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Common functionality shared by several modules.""" + + +def bit_size(num): + """ + Number of bits needed to represent a integer excluding any prefix + 0 bits. + + As per definition from https://wiki.python.org/moin/BitManipulation and + to match the behavior of the Python 3 API. + + Usage:: + + >>> bit_size(1023) + 10 + >>> bit_size(1024) + 11 + >>> bit_size(1025) + 11 + + :param num: + Integer value. If num is 0, returns 0. Only the absolute value of the + number is considered. Therefore, signed integers will be abs(num) + before the number's bit length is determined. + :returns: + Returns the number of bits in the integer. + """ + if num == 0: + return 0 + if num < 0: + num = -num + + # Make sure this is an int and not a float. + num & 1 + + hex_num = "%x" % num + return ((len(hex_num) - 1) * 4) + { + '0': 0, '1': 1, '2': 2, '3': 2, + '4': 3, '5': 3, '6': 3, '7': 3, + '8': 4, '9': 4, 'a': 4, 'b': 4, + 'c': 4, 'd': 4, 'e': 4, 'f': 4, + }[hex_num[0]] + + +def _bit_size(number): + """ + Returns the number of bits required to hold a specific long number. + """ + if number < 0: + raise ValueError('Only nonnegative numbers possible: %s' % number) + + if number == 0: + return 0 + + # This works, even with very large numbers. When using math.log(number, 2), + # you'll get rounding errors and it'll fail. + bits = 0 + while number: + bits += 1 + number >>= 1 + + return bits + + +def byte_size(number): + """ + Returns the number of bytes required to hold a specific long number. + + The number of bytes is rounded up. + + Usage:: + + >>> byte_size(1 << 1023) + 128 + >>> byte_size((1 << 1024) - 1) + 128 + >>> byte_size(1 << 1024) + 129 + + :param number: + An unsigned integer + :returns: + The number of bytes required to hold a specific long number. + """ + quanta, mod = divmod(bit_size(number), 8) + if mod or number == 0: + quanta += 1 + return quanta + # return int(math.ceil(bit_size(number) / 8.0)) + + +def extended_gcd(a, b): + """Returns a tuple (r, i, j) such that r = gcd(a, b) = ia + jb + """ + # r = gcd(a,b) i = multiplicitive inverse of a mod b + # or j = multiplicitive inverse of b mod a + # Neg return values for i or j are made positive mod b or a respectively + # Iterateive Version is faster and uses much less stack space + x = 0 + y = 1 + lx = 1 + ly = 0 + oa = a # Remember original a/b to remove + ob = b # negative values from return results + while b != 0: + q = a // b + (a, b) = (b, a % b) + (x, lx) = ((lx - (q * x)), x) + (y, ly) = ((ly - (q * y)), y) + if lx < 0: + lx += ob # If neg wrap modulo orignal b + if ly < 0: + ly += oa # If neg wrap modulo orignal a + return a, lx, ly # Return only positive values + + +def inverse(x, n): + """Returns x^-1 (mod n) + + >>> inverse(7, 4) + 3 + >>> (inverse(143, 4) * 143) % 4 + 1 + """ + + (divider, inv, _) = extended_gcd(x, n) + + if divider != 1: + raise ValueError("x (%d) and n (%d) are not relatively prime" % (x, n)) + + return inv + + +def crt(a_values, modulo_values): + """Chinese Remainder Theorem. + + Calculates x such that x = a[i] (mod m[i]) for each i. + + :param a_values: the a-values of the above equation + :param modulo_values: the m-values of the above equation + :returns: x such that x = a[i] (mod m[i]) for each i + + + >>> crt([2, 3], [3, 5]) + 8 + + >>> crt([2, 3, 2], [3, 5, 7]) + 23 + + >>> crt([2, 3, 0], [7, 11, 15]) + 135 + """ + + m = 1 + x = 0 + + for modulo in modulo_values: + m *= modulo + + for (m_i, a_i) in zip(modulo_values, a_values): + M_i = m // m_i + inv = inverse(M_i, m_i) + + x = (x + a_i * M_i * inv) % m + + return x + + +if __name__ == '__main__': + import doctest + + doctest.testmod() diff --git a/rsa/core.py b/rsa/core.py new file mode 100644 index 0000000..b3114d9 --- /dev/null +++ b/rsa/core.py @@ -0,0 +1,57 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. Stüvel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Core mathematical operations. + +This is the actual core RSA implementation, which is only defined +mathematically on integers. +""" + +from rsa._compat import is_integer + + +def assert_int(var, name): + if is_integer(var): + return + + raise TypeError('%s should be an integer, not %s' % (name, var.__class__)) + + +def encrypt_int(message, ekey, n): + """Encrypts a message using encryption key 'ekey', working modulo n""" + + assert_int(message, 'message') + assert_int(ekey, 'ekey') + assert_int(n, 'n') + + if message < 0: + raise ValueError('Only non-negative numbers are supported') + + if message > n: + raise OverflowError("The message %i is too long for n=%i" % (message, n)) + + return pow(message, ekey, n) + + +def decrypt_int(cyphertext, dkey, n): + """Decrypts a cypher text using the decryption key 'dkey', working modulo n""" + + assert_int(cyphertext, 'cyphertext') + assert_int(dkey, 'dkey') + assert_int(n, 'n') + + message = pow(cyphertext, dkey, n) + return message diff --git a/rsa/key.py b/rsa/key.py new file mode 100644 index 0000000..64600a2 --- /dev/null +++ b/rsa/key.py @@ -0,0 +1,739 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. Stüvel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""RSA key generation code. + +Create new keys with the newkeys() function. It will give you a PublicKey and a +PrivateKey object. + +Loading and saving keys requires the pyasn1 module. This module is imported as +late as possible, such that other functionality will remain working in absence +of pyasn1. + +.. note:: + + Storing public and private keys via the `pickle` module is possible. + However, it is insecure to load a key from an untrusted source. + The pickle module is not secure against erroneous or maliciously + constructed data. Never unpickle data received from an untrusted + or unauthenticated source. + +""" + +import logging +from rsa._compat import b + +import rsa.prime +import rsa.pem +import rsa.common +import rsa.randnum +import rsa.core + +log = logging.getLogger(__name__) +DEFAULT_EXPONENT = 65537 + + +class AbstractKey(object): + """Abstract superclass for private and public keys.""" + + __slots__ = ('n', 'e') + + def __init__(self, n, e): + self.n = n + self.e = e + + @classmethod + def load_pkcs1(cls, keyfile, format='PEM'): + """Loads a key in PKCS#1 DER or PEM format. + + :param keyfile: contents of a DER- or PEM-encoded file that contains + the public key. + :param format: the format of the file to load; 'PEM' or 'DER' + + :return: a PublicKey object + """ + + methods = { + 'PEM': cls._load_pkcs1_pem, + 'DER': cls._load_pkcs1_der, + } + + method = cls._assert_format_exists(format, methods) + return method(keyfile) + + @staticmethod + def _assert_format_exists(file_format, methods): + """Checks whether the given file format exists in 'methods'. + """ + + try: + return methods[file_format] + except KeyError: + formats = ', '.join(sorted(methods.keys())) + raise ValueError('Unsupported format: %r, try one of %s' % (file_format, + formats)) + + def save_pkcs1(self, format='PEM'): + """Saves the public key in PKCS#1 DER or PEM format. + + :param format: the format to save; 'PEM' or 'DER' + :returns: the DER- or PEM-encoded public key. + """ + + methods = { + 'PEM': self._save_pkcs1_pem, + 'DER': self._save_pkcs1_der, + } + + method = self._assert_format_exists(format, methods) + return method() + + def blind(self, message, r): + """Performs blinding on the message using random number 'r'. + + :param message: the message, as integer, to blind. + :type message: int + :param r: the random number to blind with. + :type r: int + :return: the blinded message. + :rtype: int + + The blinding is such that message = unblind(decrypt(blind(encrypt(message))). + + See https://en.wikipedia.org/wiki/Blinding_%28cryptography%29 + """ + + return (message * pow(r, self.e, self.n)) % self.n + + def unblind(self, blinded, r): + """Performs blinding on the message using random number 'r'. + + :param blinded: the blinded message, as integer, to unblind. + :param r: the random number to unblind with. + :return: the original message. + + The blinding is such that message = unblind(decrypt(blind(encrypt(message))). + + See https://en.wikipedia.org/wiki/Blinding_%28cryptography%29 + """ + + return (rsa.common.inverse(r, self.n) * blinded) % self.n + + +class PublicKey(AbstractKey): + """Represents a public RSA key. + + This key is also known as the 'encryption key'. It contains the 'n' and 'e' + values. + + Supports attributes as well as dictionary-like access. Attribute accesss is + faster, though. + + >>> PublicKey(5, 3) + PublicKey(5, 3) + + >>> key = PublicKey(5, 3) + >>> key.n + 5 + >>> key['n'] + 5 + >>> key.e + 3 + >>> key['e'] + 3 + + """ + + __slots__ = ('n', 'e') + + def __getitem__(self, key): + return getattr(self, key) + + def __repr__(self): + return 'PublicKey(%i, %i)' % (self.n, self.e) + + def __getstate__(self): + """Returns the key as tuple for pickling.""" + return self.n, self.e + + def __setstate__(self, state): + """Sets the key from tuple.""" + self.n, self.e = state + + def __eq__(self, other): + if other is None: + return False + + if not isinstance(other, PublicKey): + return False + + return self.n == other.n and self.e == other.e + + def __ne__(self, other): + return not (self == other) + + @classmethod + def _load_pkcs1_der(cls, keyfile): + """Loads a key in PKCS#1 DER format. + + :param keyfile: contents of a DER-encoded file that contains the public + key. + :return: a PublicKey object + + First let's construct a DER encoded key: + + >>> import base64 + >>> b64der = 'MAwCBQCNGmYtAgMBAAE=' + >>> der = base64.standard_b64decode(b64der) + + This loads the file: + + >>> PublicKey._load_pkcs1_der(der) + PublicKey(2367317549, 65537) + + """ + + from pyasn1.codec.der import decoder + from rsa.asn1 import AsnPubKey + + (priv, _) = decoder.decode(keyfile, asn1Spec=AsnPubKey()) + return cls(n=int(priv['modulus']), e=int(priv['publicExponent'])) + + def _save_pkcs1_der(self): + """Saves the public key in PKCS#1 DER format. + + @returns: the DER-encoded public key. + """ + + from pyasn1.codec.der import encoder + from rsa.asn1 import AsnPubKey + + # Create the ASN object + asn_key = AsnPubKey() + asn_key.setComponentByName('modulus', self.n) + asn_key.setComponentByName('publicExponent', self.e) + + return encoder.encode(asn_key) + + @classmethod + def _load_pkcs1_pem(cls, keyfile): + """Loads a PKCS#1 PEM-encoded public key file. + + The contents of the file before the "-----BEGIN RSA PUBLIC KEY-----" and + after the "-----END RSA PUBLIC KEY-----" lines is ignored. + + :param keyfile: contents of a PEM-encoded file that contains the public + key. + :return: a PublicKey object + """ + + der = rsa.pem.load_pem(keyfile, 'RSA PUBLIC KEY') + return cls._load_pkcs1_der(der) + + def _save_pkcs1_pem(self): + """Saves a PKCS#1 PEM-encoded public key file. + + :return: contents of a PEM-encoded file that contains the public key. + """ + + der = self._save_pkcs1_der() + return rsa.pem.save_pem(der, 'RSA PUBLIC KEY') + + @classmethod + def load_pkcs1_openssl_pem(cls, keyfile): + """Loads a PKCS#1.5 PEM-encoded public key file from OpenSSL. + + These files can be recognised in that they start with BEGIN PUBLIC KEY + rather than BEGIN RSA PUBLIC KEY. + + The contents of the file before the "-----BEGIN PUBLIC KEY-----" and + after the "-----END PUBLIC KEY-----" lines is ignored. + + :param keyfile: contents of a PEM-encoded file that contains the public + key, from OpenSSL. + :return: a PublicKey object + """ + + der = rsa.pem.load_pem(keyfile, 'PUBLIC KEY') + return cls.load_pkcs1_openssl_der(der) + + @classmethod + def load_pkcs1_openssl_der(cls, keyfile): + """Loads a PKCS#1 DER-encoded public key file from OpenSSL. + + :param keyfile: contents of a DER-encoded file that contains the public + key, from OpenSSL. + :return: a PublicKey object + + """ + + from rsa.asn1 import OpenSSLPubKey + from pyasn1.codec.der import decoder + from pyasn1.type import univ + + (keyinfo, _) = decoder.decode(keyfile, asn1Spec=OpenSSLPubKey()) + + if keyinfo['header']['oid'] != univ.ObjectIdentifier('1.2.840.113549.1.1.1'): + raise TypeError("This is not a DER-encoded OpenSSL-compatible public key") + + return cls._load_pkcs1_der(keyinfo['key'][1:]) + + +class PrivateKey(AbstractKey): + """Represents a private RSA key. + + This key is also known as the 'decryption key'. It contains the 'n', 'e', + 'd', 'p', 'q' and other values. + + Supports attributes as well as dictionary-like access. Attribute accesss is + faster, though. + + >>> PrivateKey(3247, 65537, 833, 191, 17) + PrivateKey(3247, 65537, 833, 191, 17) + + exp1, exp2 and coef can be given, but if None or omitted they will be calculated: + + >>> pk = PrivateKey(3727264081, 65537, 3349121513, 65063, 57287, exp2=4) + >>> pk.exp1 + 55063 + >>> pk.exp2 # this is of course not a correct value, but it is the one we passed. + 4 + >>> pk.coef + 50797 + + If you give exp1, exp2 or coef, they will be used as-is: + + >>> pk = PrivateKey(1, 2, 3, 4, 5, 6, 7, 8) + >>> pk.exp1 + 6 + >>> pk.exp2 + 7 + >>> pk.coef + 8 + + """ + + __slots__ = ('n', 'e', 'd', 'p', 'q', 'exp1', 'exp2', 'coef') + + def __init__(self, n, e, d, p, q, exp1=None, exp2=None, coef=None): + AbstractKey.__init__(self, n, e) + self.d = d + self.p = p + self.q = q + + # Calculate the other values if they aren't supplied + if exp1 is None: + self.exp1 = int(d % (p - 1)) + else: + self.exp1 = exp1 + + if exp2 is None: + self.exp2 = int(d % (q - 1)) + else: + self.exp2 = exp2 + + if coef is None: + self.coef = rsa.common.inverse(q, p) + else: + self.coef = coef + + def __getitem__(self, key): + return getattr(self, key) + + def __repr__(self): + return 'PrivateKey(%(n)i, %(e)i, %(d)i, %(p)i, %(q)i)' % self + + def __getstate__(self): + """Returns the key as tuple for pickling.""" + return self.n, self.e, self.d, self.p, self.q, self.exp1, self.exp2, self.coef + + def __setstate__(self, state): + """Sets the key from tuple.""" + self.n, self.e, self.d, self.p, self.q, self.exp1, self.exp2, self.coef = state + + def __eq__(self, other): + if other is None: + return False + + if not isinstance(other, PrivateKey): + return False + + return (self.n == other.n and + self.e == other.e and + self.d == other.d and + self.p == other.p and + self.q == other.q and + self.exp1 == other.exp1 and + self.exp2 == other.exp2 and + self.coef == other.coef) + + def __ne__(self, other): + return not (self == other) + + def blinded_decrypt(self, encrypted): + """Decrypts the message using blinding to prevent side-channel attacks. + + :param encrypted: the encrypted message + :type encrypted: int + + :returns: the decrypted message + :rtype: int + """ + + blind_r = rsa.randnum.randint(self.n - 1) + blinded = self.blind(encrypted, blind_r) # blind before decrypting + decrypted = rsa.core.decrypt_int(blinded, self.d, self.n) + + return self.unblind(decrypted, blind_r) + + def blinded_encrypt(self, message): + """Encrypts the message using blinding to prevent side-channel attacks. + + :param message: the message to encrypt + :type message: int + + :returns: the encrypted message + :rtype: int + """ + + blind_r = rsa.randnum.randint(self.n - 1) + blinded = self.blind(message, blind_r) # blind before encrypting + encrypted = rsa.core.encrypt_int(blinded, self.d, self.n) + return self.unblind(encrypted, blind_r) + + @classmethod + def _load_pkcs1_der(cls, keyfile): + """Loads a key in PKCS#1 DER format. + + :param keyfile: contents of a DER-encoded file that contains the private + key. + :return: a PrivateKey object + + First let's construct a DER encoded key: + + >>> import base64 + >>> b64der = 'MC4CAQACBQDeKYlRAgMBAAECBQDHn4npAgMA/icCAwDfxwIDANcXAgInbwIDAMZt' + >>> der = base64.standard_b64decode(b64der) + + This loads the file: + + >>> PrivateKey._load_pkcs1_der(der) + PrivateKey(3727264081, 65537, 3349121513, 65063, 57287) + + """ + + from pyasn1.codec.der import decoder + (priv, _) = decoder.decode(keyfile) + + # ASN.1 contents of DER encoded private key: + # + # RSAPrivateKey ::= SEQUENCE { + # version Version, + # modulus INTEGER, -- n + # publicExponent INTEGER, -- e + # privateExponent INTEGER, -- d + # prime1 INTEGER, -- p + # prime2 INTEGER, -- q + # exponent1 INTEGER, -- d mod (p-1) + # exponent2 INTEGER, -- d mod (q-1) + # coefficient INTEGER, -- (inverse of q) mod p + # otherPrimeInfos OtherPrimeInfos OPTIONAL + # } + + if priv[0] != 0: + raise ValueError('Unable to read this file, version %s != 0' % priv[0]) + + as_ints = tuple(int(x) for x in priv[1:9]) + return cls(*as_ints) + + def _save_pkcs1_der(self): + """Saves the private key in PKCS#1 DER format. + + @returns: the DER-encoded private key. + """ + + from pyasn1.type import univ, namedtype + from pyasn1.codec.der import encoder + + class AsnPrivKey(univ.Sequence): + componentType = namedtype.NamedTypes( + namedtype.NamedType('version', univ.Integer()), + namedtype.NamedType('modulus', univ.Integer()), + namedtype.NamedType('publicExponent', univ.Integer()), + namedtype.NamedType('privateExponent', univ.Integer()), + namedtype.NamedType('prime1', univ.Integer()), + namedtype.NamedType('prime2', univ.Integer()), + namedtype.NamedType('exponent1', univ.Integer()), + namedtype.NamedType('exponent2', univ.Integer()), + namedtype.NamedType('coefficient', univ.Integer()), + ) + + # Create the ASN object + asn_key = AsnPrivKey() + asn_key.setComponentByName('version', 0) + asn_key.setComponentByName('modulus', self.n) + asn_key.setComponentByName('publicExponent', self.e) + asn_key.setComponentByName('privateExponent', self.d) + asn_key.setComponentByName('prime1', self.p) + asn_key.setComponentByName('prime2', self.q) + asn_key.setComponentByName('exponent1', self.exp1) + asn_key.setComponentByName('exponent2', self.exp2) + asn_key.setComponentByName('coefficient', self.coef) + + return encoder.encode(asn_key) + + @classmethod + def _load_pkcs1_pem(cls, keyfile): + """Loads a PKCS#1 PEM-encoded private key file. + + The contents of the file before the "-----BEGIN RSA PRIVATE KEY-----" and + after the "-----END RSA PRIVATE KEY-----" lines is ignored. + + :param keyfile: contents of a PEM-encoded file that contains the private + key. + :return: a PrivateKey object + """ + + der = rsa.pem.load_pem(keyfile, b('RSA PRIVATE KEY')) + return cls._load_pkcs1_der(der) + + def _save_pkcs1_pem(self): + """Saves a PKCS#1 PEM-encoded private key file. + + :return: contents of a PEM-encoded file that contains the private key. + """ + + der = self._save_pkcs1_der() + return rsa.pem.save_pem(der, b('RSA PRIVATE KEY')) + + +def find_p_q(nbits, getprime_func=rsa.prime.getprime, accurate=True): + """Returns a tuple of two different primes of nbits bits each. + + The resulting p * q has exacty 2 * nbits bits, and the returned p and q + will not be equal. + + :param nbits: the number of bits in each of p and q. + :param getprime_func: the getprime function, defaults to + :py:func:`rsa.prime.getprime`. + + *Introduced in Python-RSA 3.1* + + :param accurate: whether to enable accurate mode or not. + :returns: (p, q), where p > q + + >>> (p, q) = find_p_q(128) + >>> from rsa import common + >>> common.bit_size(p * q) + 256 + + When not in accurate mode, the number of bits can be slightly less + + >>> (p, q) = find_p_q(128, accurate=False) + >>> from rsa import common + >>> common.bit_size(p * q) <= 256 + True + >>> common.bit_size(p * q) > 240 + True + + """ + + total_bits = nbits * 2 + + # Make sure that p and q aren't too close or the factoring programs can + # factor n. + shift = nbits // 16 + pbits = nbits + shift + qbits = nbits - shift + + # Choose the two initial primes + log.debug('find_p_q(%i): Finding p', nbits) + p = getprime_func(pbits) + log.debug('find_p_q(%i): Finding q', nbits) + q = getprime_func(qbits) + + def is_acceptable(p, q): + """Returns True iff p and q are acceptable: + + - p and q differ + - (p * q) has the right nr of bits (when accurate=True) + """ + + if p == q: + return False + + if not accurate: + return True + + # Make sure we have just the right amount of bits + found_size = rsa.common.bit_size(p * q) + return total_bits == found_size + + # Keep choosing other primes until they match our requirements. + change_p = False + while not is_acceptable(p, q): + # Change p on one iteration and q on the other + if change_p: + p = getprime_func(pbits) + else: + q = getprime_func(qbits) + + change_p = not change_p + + # We want p > q as described on + # http://www.di-mgt.com.au/rsa_alg.html#crt + return max(p, q), min(p, q) + + +def calculate_keys_custom_exponent(p, q, exponent): + """Calculates an encryption and a decryption key given p, q and an exponent, + and returns them as a tuple (e, d) + + :param p: the first large prime + :param q: the second large prime + :param exponent: the exponent for the key; only change this if you know + what you're doing, as the exponent influences how difficult your + private key can be cracked. A very common choice for e is 65537. + :type exponent: int + + """ + + phi_n = (p - 1) * (q - 1) + + try: + d = rsa.common.inverse(exponent, phi_n) + except ValueError: + raise ValueError("e (%d) and phi_n (%d) are not relatively prime" % + (exponent, phi_n)) + + if (exponent * d) % phi_n != 1: + raise ValueError("e (%d) and d (%d) are not mult. inv. modulo " + "phi_n (%d)" % (exponent, d, phi_n)) + + return exponent, d + + +def calculate_keys(p, q): + """Calculates an encryption and a decryption key given p and q, and + returns them as a tuple (e, d) + + :param p: the first large prime + :param q: the second large prime + + :return: tuple (e, d) with the encryption and decryption exponents. + """ + + return calculate_keys_custom_exponent(p, q, DEFAULT_EXPONENT) + + +def gen_keys(nbits, getprime_func, accurate=True, exponent=DEFAULT_EXPONENT): + """Generate RSA keys of nbits bits. Returns (p, q, e, d). + + Note: this can take a long time, depending on the key size. + + :param nbits: the total number of bits in ``p`` and ``q``. Both ``p`` and + ``q`` will use ``nbits/2`` bits. + :param getprime_func: either :py:func:`rsa.prime.getprime` or a function + with similar signature. + :param exponent: the exponent for the key; only change this if you know + what you're doing, as the exponent influences how difficult your + private key can be cracked. A very common choice for e is 65537. + :type exponent: int + """ + + # Regenerate p and q values, until calculate_keys doesn't raise a + # ValueError. + while True: + (p, q) = find_p_q(nbits // 2, getprime_func, accurate) + try: + (e, d) = calculate_keys_custom_exponent(p, q, exponent=exponent) + break + except ValueError: + pass + + return p, q, e, d + + +def newkeys(nbits, accurate=True, poolsize=1, exponent=DEFAULT_EXPONENT): + """Generates public and private keys, and returns them as (pub, priv). + + The public key is also known as the 'encryption key', and is a + :py:class:`rsa.PublicKey` object. The private key is also known as the + 'decryption key' and is a :py:class:`rsa.PrivateKey` object. + + :param nbits: the number of bits required to store ``n = p*q``. + :param accurate: when True, ``n`` will have exactly the number of bits you + asked for. However, this makes key generation much slower. When False, + `n`` may have slightly less bits. + :param poolsize: the number of processes to use to generate the prime + numbers. If set to a number > 1, a parallel algorithm will be used. + This requires Python 2.6 or newer. + :param exponent: the exponent for the key; only change this if you know + what you're doing, as the exponent influences how difficult your + private key can be cracked. A very common choice for e is 65537. + :type exponent: int + + :returns: a tuple (:py:class:`rsa.PublicKey`, :py:class:`rsa.PrivateKey`) + + The ``poolsize`` parameter was added in *Python-RSA 3.1* and requires + Python 2.6 or newer. + + """ + + if nbits < 16: + raise ValueError('Key too small') + + if poolsize < 1: + raise ValueError('Pool size (%i) should be >= 1' % poolsize) + + # Determine which getprime function to use + if poolsize > 1: + from rsa import parallel + import functools + + getprime_func = functools.partial(parallel.getprime, poolsize=poolsize) + else: + getprime_func = rsa.prime.getprime + + # Generate the key components + (p, q, e, d) = gen_keys(nbits, getprime_func, accurate=accurate, exponent=exponent) + + # Create the key objects + n = p * q + + return ( + PublicKey(n, e), + PrivateKey(n, e, d, p, q) + ) + + +__all__ = ['PublicKey', 'PrivateKey', 'newkeys'] + +if __name__ == '__main__': + import doctest + + try: + for count in range(100): + (failures, tests) = doctest.testmod() + if failures: + break + + if (count and count % 10 == 0) or count == 1: + print('%i times' % count) + except KeyboardInterrupt: + print('Aborted') + else: + print('Doctests done') diff --git a/rsa/parallel.py b/rsa/parallel.py new file mode 100644 index 0000000..edc924f --- /dev/null +++ b/rsa/parallel.py @@ -0,0 +1,100 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. Stüvel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Functions for parallel computation on multiple cores. + +Introduced in Python-RSA 3.1. + +.. note:: + + Requires Python 2.6 or newer. + +""" + +from __future__ import print_function + +import multiprocessing as mp + +import rsa.prime +import rsa.randnum + + +def _find_prime(nbits, pipe): + while True: + integer = rsa.randnum.read_random_odd_int(nbits) + + # Test for primeness + if rsa.prime.is_prime(integer): + pipe.send(integer) + return + + +def getprime(nbits, poolsize): + """Returns a prime number that can be stored in 'nbits' bits. + + Works in multiple threads at the same time. + + >>> p = getprime(128, 3) + >>> rsa.prime.is_prime(p-1) + False + >>> rsa.prime.is_prime(p) + True + >>> rsa.prime.is_prime(p+1) + False + + >>> from rsa import common + >>> common.bit_size(p) == 128 + True + + """ + + (pipe_recv, pipe_send) = mp.Pipe(duplex=False) + + # Create processes + try: + procs = [mp.Process(target=_find_prime, args=(nbits, pipe_send)) + for _ in range(poolsize)] + # Start processes + for p in procs: + p.start() + + result = pipe_recv.recv() + finally: + pipe_recv.close() + pipe_send.close() + + # Terminate processes + for p in procs: + p.terminate() + + return result + + +__all__ = ['getprime'] + +if __name__ == '__main__': + print('Running doctests 1000x or until failure') + import doctest + + for count in range(100): + (failures, tests) = doctest.testmod() + if failures: + break + + if count and count % 10 == 0: + print('%i times' % count) + + print('Doctests done') diff --git a/rsa/pem.py b/rsa/pem.py new file mode 100644 index 0000000..0f68cb2 --- /dev/null +++ b/rsa/pem.py @@ -0,0 +1,125 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. Stüvel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Functions that load and write PEM-encoded files.""" + +import base64 +from rsa._compat import b, is_bytes + + +def _markers(pem_marker): + """ + Returns the start and end PEM markers + """ + + if is_bytes(pem_marker): + pem_marker = pem_marker.decode('utf-8') + + return (b('-----BEGIN %s-----' % pem_marker), + b('-----END %s-----' % pem_marker)) + + +def load_pem(contents, pem_marker): + """Loads a PEM file. + + :param contents: the contents of the file to interpret + :param pem_marker: the marker of the PEM content, such as 'RSA PRIVATE KEY' + when your file has '-----BEGIN RSA PRIVATE KEY-----' and + '-----END RSA PRIVATE KEY-----' markers. + + :return: the base64-decoded content between the start and end markers. + + @raise ValueError: when the content is invalid, for example when the start + marker cannot be found. + + """ + + # We want bytes, not text. If it's text, it can be converted to ASCII bytes. + if not is_bytes(contents): + contents = contents.encode('ascii') + + (pem_start, pem_end) = _markers(pem_marker) + + pem_lines = [] + in_pem_part = False + + for line in contents.splitlines(): + line = line.strip() + + # Skip empty lines + if not line: + continue + + # Handle start marker + if line == pem_start: + if in_pem_part: + raise ValueError('Seen start marker "%s" twice' % pem_start) + + in_pem_part = True + continue + + # Skip stuff before first marker + if not in_pem_part: + continue + + # Handle end marker + if in_pem_part and line == pem_end: + in_pem_part = False + break + + # Load fields + if b(':') in line: + continue + + pem_lines.append(line) + + # Do some sanity checks + if not pem_lines: + raise ValueError('No PEM start marker "%s" found' % pem_start) + + if in_pem_part: + raise ValueError('No PEM end marker "%s" found' % pem_end) + + # Base64-decode the contents + pem = b('').join(pem_lines) + return base64.standard_b64decode(pem) + + +def save_pem(contents, pem_marker): + """Saves a PEM file. + + :param contents: the contents to encode in PEM format + :param pem_marker: the marker of the PEM content, such as 'RSA PRIVATE KEY' + when your file has '-----BEGIN RSA PRIVATE KEY-----' and + '-----END RSA PRIVATE KEY-----' markers. + + :return: the base64-encoded content between the start and end markers. + + """ + + (pem_start, pem_end) = _markers(pem_marker) + + b64 = base64.standard_b64encode(contents).replace(b('\n'), b('')) + pem_lines = [pem_start] + + for block_start in range(0, len(b64), 64): + block = b64[block_start:block_start + 64] + pem_lines.append(block) + + pem_lines.append(pem_end) + pem_lines.append(b('')) + + return b('\n').join(pem_lines) diff --git a/rsa/pkcs1.py b/rsa/pkcs1.py new file mode 100644 index 0000000..28f0dc5 --- /dev/null +++ b/rsa/pkcs1.py @@ -0,0 +1,381 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. Stüvel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Functions for PKCS#1 version 1.5 encryption and signing + +This module implements certain functionality from PKCS#1 version 1.5. For a +very clear example, read http://www.di-mgt.com.au/rsa_alg.html#pkcs1schemes + +At least 8 bytes of random padding is used when encrypting a message. This makes +these methods much more secure than the ones in the ``rsa`` module. + +WARNING: this module leaks information when decryption fails. The exceptions +that are raised contain the Python traceback information, which can be used to +deduce where in the process the failure occurred. DO NOT PASS SUCH INFORMATION +to your users. +""" + +import hashlib +import os + +from rsa._compat import b +from rsa import common, transform, core + +# ASN.1 codes that describe the hash algorithm used. +HASH_ASN1 = { + 'MD5': b('\x30\x20\x30\x0c\x06\x08\x2a\x86\x48\x86\xf7\x0d\x02\x05\x05\x00\x04\x10'), + 'SHA-1': b('\x30\x21\x30\x09\x06\x05\x2b\x0e\x03\x02\x1a\x05\x00\x04\x14'), + 'SHA-256': b('\x30\x31\x30\x0d\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x01\x05\x00\x04\x20'), + 'SHA-384': b('\x30\x41\x30\x0d\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x02\x05\x00\x04\x30'), + 'SHA-512': b('\x30\x51\x30\x0d\x06\x09\x60\x86\x48\x01\x65\x03\x04\x02\x03\x05\x00\x04\x40'), +} + +HASH_METHODS = { + 'MD5': hashlib.md5, + 'SHA-1': hashlib.sha1, + 'SHA-256': hashlib.sha256, + 'SHA-384': hashlib.sha384, + 'SHA-512': hashlib.sha512, +} + + +class CryptoError(Exception): + """Base class for all exceptions in this module.""" + + +class DecryptionError(CryptoError): + """Raised when decryption fails.""" + + +class VerificationError(CryptoError): + """Raised when verification fails.""" + + +def _pad_for_encryption(message, target_length): + r"""Pads the message for encryption, returning the padded message. + + :return: 00 02 RANDOM_DATA 00 MESSAGE + + >>> block = _pad_for_encryption(b'hello', 16) + >>> len(block) + 16 + >>> block[0:2] + b'\x00\x02' + >>> block[-6:] + b'\x00hello' + + """ + + max_msglength = target_length - 11 + msglength = len(message) + + if msglength > max_msglength: + raise OverflowError('%i bytes needed for message, but there is only' + ' space for %i' % (msglength, max_msglength)) + + # Get random padding + padding = b('') + padding_length = target_length - msglength - 3 + + # We remove 0-bytes, so we'll end up with less padding than we've asked for, + # so keep adding data until we're at the correct length. + while len(padding) < padding_length: + needed_bytes = padding_length - len(padding) + + # Always read at least 8 bytes more than we need, and trim off the rest + # after removing the 0-bytes. This increases the chance of getting + # enough bytes, especially when needed_bytes is small + new_padding = os.urandom(needed_bytes + 5) + new_padding = new_padding.replace(b('\x00'), b('')) + padding = padding + new_padding[:needed_bytes] + + assert len(padding) == padding_length + + return b('').join([b('\x00\x02'), + padding, + b('\x00'), + message]) + + +def _pad_for_signing(message, target_length): + r"""Pads the message for signing, returning the padded message. + + The padding is always a repetition of FF bytes. + + :return: 00 01 PADDING 00 MESSAGE + + >>> block = _pad_for_signing(b'hello', 16) + >>> len(block) + 16 + >>> block[0:2] + b'\x00\x01' + >>> block[-6:] + b'\x00hello' + >>> block[2:-6] + b'\xff\xff\xff\xff\xff\xff\xff\xff' + + """ + + max_msglength = target_length - 11 + msglength = len(message) + + if msglength > max_msglength: + raise OverflowError('%i bytes needed for message, but there is only' + ' space for %i' % (msglength, max_msglength)) + + padding_length = target_length - msglength - 3 + + return b('').join([b('\x00\x01'), + padding_length * b('\xff'), + b('\x00'), + message]) + + +def encrypt(message, pub_key): + """Encrypts the given message using PKCS#1 v1.5 + + :param message: the message to encrypt. Must be a byte string no longer than + ``k-11`` bytes, where ``k`` is the number of bytes needed to encode + the ``n`` component of the public key. + :param pub_key: the :py:class:`rsa.PublicKey` to encrypt with. + :raise OverflowError: when the message is too large to fit in the padded + block. + + >>> from rsa import key, common + >>> (pub_key, priv_key) = key.newkeys(256) + >>> message = b'hello' + >>> crypto = encrypt(message, pub_key) + + The crypto text should be just as long as the public key 'n' component: + + >>> len(crypto) == common.byte_size(pub_key.n) + True + + """ + + keylength = common.byte_size(pub_key.n) + padded = _pad_for_encryption(message, keylength) + + payload = transform.bytes2int(padded) + encrypted = core.encrypt_int(payload, pub_key.e, pub_key.n) + block = transform.int2bytes(encrypted, keylength) + + return block + + +def decrypt(crypto, priv_key): + r"""Decrypts the given message using PKCS#1 v1.5 + + The decryption is considered 'failed' when the resulting cleartext doesn't + start with the bytes 00 02, or when the 00 byte between the padding and + the message cannot be found. + + :param crypto: the crypto text as returned by :py:func:`rsa.encrypt` + :param priv_key: the :py:class:`rsa.PrivateKey` to decrypt with. + :raise DecryptionError: when the decryption fails. No details are given as + to why the code thinks the decryption fails, as this would leak + information about the private key. + + + >>> import rsa + >>> (pub_key, priv_key) = rsa.newkeys(256) + + It works with strings: + + >>> crypto = encrypt(b'hello', pub_key) + >>> decrypt(crypto, priv_key) + b'hello' + + And with binary data: + + >>> crypto = encrypt(b'\x00\x00\x00\x00\x01', pub_key) + >>> decrypt(crypto, priv_key) + b'\x00\x00\x00\x00\x01' + + Altering the encrypted information will *likely* cause a + :py:class:`rsa.pkcs1.DecryptionError`. If you want to be *sure*, use + :py:func:`rsa.sign`. + + + .. warning:: + + Never display the stack trace of a + :py:class:`rsa.pkcs1.DecryptionError` exception. It shows where in the + code the exception occurred, and thus leaks information about the key. + It's only a tiny bit of information, but every bit makes cracking the + keys easier. + + >>> crypto = encrypt(b'hello', pub_key) + >>> crypto = crypto[0:5] + b'X' + crypto[6:] # change a byte + >>> decrypt(crypto, priv_key) + Traceback (most recent call last): + ... + rsa.pkcs1.DecryptionError: Decryption failed + + """ + + blocksize = common.byte_size(priv_key.n) + encrypted = transform.bytes2int(crypto) + decrypted = priv_key.blinded_decrypt(encrypted) + cleartext = transform.int2bytes(decrypted, blocksize) + + # If we can't find the cleartext marker, decryption failed. + if cleartext[0:2] != b('\x00\x02'): + raise DecryptionError('Decryption failed') + + # Find the 00 separator between the padding and the message + try: + sep_idx = cleartext.index(b('\x00'), 2) + except ValueError: + raise DecryptionError('Decryption failed') + + return cleartext[sep_idx + 1:] + + +def sign(message, priv_key, hash): + """Signs the message with the private key. + + Hashes the message, then signs the hash with the given key. This is known + as a "detached signature", because the message itself isn't altered. + + :param message: the message to sign. Can be an 8-bit string or a file-like + object. If ``message`` has a ``read()`` method, it is assumed to be a + file-like object. + :param priv_key: the :py:class:`rsa.PrivateKey` to sign with + :param hash: the hash method used on the message. Use 'MD5', 'SHA-1', + 'SHA-256', 'SHA-384' or 'SHA-512'. + :return: a message signature block. + :raise OverflowError: if the private key is too small to contain the + requested hash. + + """ + + # Get the ASN1 code for this hash method + if hash not in HASH_ASN1: + raise ValueError('Invalid hash method: %s' % hash) + asn1code = HASH_ASN1[hash] + + # Calculate the hash + hash = _hash(message, hash) + + # Encrypt the hash with the private key + cleartext = asn1code + hash + keylength = common.byte_size(priv_key.n) + padded = _pad_for_signing(cleartext, keylength) + + payload = transform.bytes2int(padded) + encrypted = priv_key.blinded_encrypt(payload) + block = transform.int2bytes(encrypted, keylength) + + return block + + +def verify(message, signature, pub_key): + """Verifies that the signature matches the message. + + The hash method is detected automatically from the signature. + + :param message: the signed message. Can be an 8-bit string or a file-like + object. If ``message`` has a ``read()`` method, it is assumed to be a + file-like object. + :param signature: the signature block, as created with :py:func:`rsa.sign`. + :param pub_key: the :py:class:`rsa.PublicKey` of the person signing the message. + :raise VerificationError: when the signature doesn't match the message. + + """ + + keylength = common.byte_size(pub_key.n) + encrypted = transform.bytes2int(signature) + decrypted = core.decrypt_int(encrypted, pub_key.e, pub_key.n) + clearsig = transform.int2bytes(decrypted, keylength) + + # Get the hash method + method_name = _find_method_hash(clearsig) + message_hash = _hash(message, method_name) + + # Reconstruct the expected padded hash + cleartext = HASH_ASN1[method_name] + message_hash + expected = _pad_for_signing(cleartext, keylength) + + # Compare with the signed one + if expected != clearsig: + raise VerificationError('Verification failed') + + return True + + +def _hash(message, method_name): + """Returns the message digest. + + :param message: the signed message. Can be an 8-bit string or a file-like + object. If ``message`` has a ``read()`` method, it is assumed to be a + file-like object. + :param method_name: the hash method, must be a key of + :py:const:`HASH_METHODS`. + + """ + + if method_name not in HASH_METHODS: + raise ValueError('Invalid hash method: %s' % method_name) + + method = HASH_METHODS[method_name] + hasher = method() + + if hasattr(message, 'read') and hasattr(message.read, '__call__'): + # Late import to prevent DeprecationWarnings. + from . import varblock + + # read as 1K blocks + for block in varblock.yield_fixedblocks(message, 1024): + hasher.update(block) + else: + # hash the message object itself. + hasher.update(message) + + return hasher.digest() + + +def _find_method_hash(clearsig): + """Finds the hash method. + + :param clearsig: full padded ASN1 and hash. + :return: the used hash method. + :raise VerificationFailed: when the hash method cannot be found + """ + + for (hashname, asn1code) in HASH_ASN1.items(): + if asn1code in clearsig: + return hashname + + raise VerificationError('Verification failed') + + +__all__ = ['encrypt', 'decrypt', 'sign', 'verify', + 'DecryptionError', 'VerificationError', 'CryptoError'] + +if __name__ == '__main__': + print('Running doctests 1000x or until failure') + import doctest + + for count in range(1000): + (failures, tests) = doctest.testmod() + if failures: + break + + if count and count % 100 == 0: + print('%i times' % count) + + print('Doctests done') diff --git a/rsa/prime.py b/rsa/prime.py new file mode 100644 index 0000000..6f23f9d --- /dev/null +++ b/rsa/prime.py @@ -0,0 +1,178 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. Stüvel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Numerical functions related to primes. + +Implementation based on the book Algorithm Design by Michael T. Goodrich and +Roberto Tamassia, 2002. +""" + +import rsa.randnum + +__all__ = ['getprime', 'are_relatively_prime'] + + +def gcd(p, q): + """Returns the greatest common divisor of p and q + + >>> gcd(48, 180) + 12 + """ + + while q != 0: + (p, q) = (q, p % q) + return p + + +def miller_rabin_primality_testing(n, k): + """Calculates whether n is composite (which is always correct) or prime + (which theoretically is incorrect with error probability 4**-k), by + applying Miller-Rabin primality testing. + + For reference and implementation example, see: + https://en.wikipedia.org/wiki/Miller%E2%80%93Rabin_primality_test + + :param n: Integer to be tested for primality. + :type n: int + :param k: Number of rounds (witnesses) of Miller-Rabin testing. + :type k: int + :return: False if the number is composite, True if it's probably prime. + :rtype: bool + """ + + # prevent potential infinite loop when d = 0 + if n < 2: + return False + + # Decompose (n - 1) to write it as (2 ** r) * d + # While d is even, divide it by 2 and increase the exponent. + d = n - 1 + r = 0 + + while not (d & 1): + r += 1 + d >>= 1 + + # Test k witnesses. + for _ in range(k): + # Generate random integer a, where 2 <= a <= (n - 2) + a = rsa.randnum.randint(n - 4) + 2 + + x = pow(a, d, n) + if x == 1 or x == n - 1: + continue + + for _ in range(r - 1): + x = pow(x, 2, n) + if x == 1: + # n is composite. + return False + if x == n - 1: + # Exit inner loop and continue with next witness. + break + else: + # If loop doesn't break, n is composite. + return False + + return True + + +def is_prime(number): + """Returns True if the number is prime, and False otherwise. + + >>> is_prime(2) + True + >>> is_prime(42) + False + >>> is_prime(41) + True + >>> [x for x in range(901, 1000) if is_prime(x)] + [907, 911, 919, 929, 937, 941, 947, 953, 967, 971, 977, 983, 991, 997] + """ + + # Check for small numbers. + if number < 10: + return number in [2, 3, 5, 7] + + # Check for even numbers. + if not (number & 1): + return False + + # According to NIST FIPS 186-4, Appendix C, Table C.3, minimum number of + # rounds of M-R testing, using an error probability of 2 ** (-100), for + # different p, q bitsizes are: + # * p, q bitsize: 512; rounds: 7 + # * p, q bitsize: 1024; rounds: 4 + # * p, q bitsize: 1536; rounds: 3 + # See: http://nvlpubs.nist.gov/nistpubs/FIPS/NIST.FIPS.186-4.pdf + return miller_rabin_primality_testing(number, 7) + + +def getprime(nbits): + """Returns a prime number that can be stored in 'nbits' bits. + + >>> p = getprime(128) + >>> is_prime(p-1) + False + >>> is_prime(p) + True + >>> is_prime(p+1) + False + + >>> from rsa import common + >>> common.bit_size(p) == 128 + True + """ + + assert nbits > 3 # the loop wil hang on too small numbers + + while True: + integer = rsa.randnum.read_random_odd_int(nbits) + + # Test for primeness + if is_prime(integer): + return integer + + # Retry if not prime + + +def are_relatively_prime(a, b): + """Returns True if a and b are relatively prime, and False if they + are not. + + >>> are_relatively_prime(2, 3) + True + >>> are_relatively_prime(2, 4) + False + """ + + d = gcd(a, b) + return d == 1 + + +if __name__ == '__main__': + print('Running doctests 1000x or until failure') + import doctest + + for count in range(1000): + (failures, tests) = doctest.testmod() + if failures: + break + + if count and count % 100 == 0: + print('%i times' % count) + + print('Doctests done') diff --git a/rsa/randnum.py b/rsa/randnum.py new file mode 100644 index 0000000..3c788a5 --- /dev/null +++ b/rsa/randnum.py @@ -0,0 +1,98 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. Stüvel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Functions for generating random numbers.""" + +# Source inspired by code by Yesudeep Mangalapilly + +import os + +from rsa import common, transform +from rsa._compat import byte + + +def read_random_bits(nbits): + """Reads 'nbits' random bits. + + If nbits isn't a whole number of bytes, an extra byte will be appended with + only the lower bits set. + """ + + nbytes, rbits = divmod(nbits, 8) + + # Get the random bytes + randomdata = os.urandom(nbytes) + + # Add the remaining random bits + if rbits > 0: + randomvalue = ord(os.urandom(1)) + randomvalue >>= (8 - rbits) + randomdata = byte(randomvalue) + randomdata + + return randomdata + + +def read_random_int(nbits): + """Reads a random integer of approximately nbits bits. + """ + + randomdata = read_random_bits(nbits) + value = transform.bytes2int(randomdata) + + # Ensure that the number is large enough to just fill out the required + # number of bits. + value |= 1 << (nbits - 1) + + return value + + +def read_random_odd_int(nbits): + """Reads a random odd integer of approximately nbits bits. + + >>> read_random_odd_int(512) & 1 + 1 + """ + + value = read_random_int(nbits) + + # Make sure it's odd + return value | 1 + + +def randint(maxvalue): + """Returns a random integer x with 1 <= x <= maxvalue + + May take a very long time in specific situations. If maxvalue needs N bits + to store, the closer maxvalue is to (2 ** N) - 1, the faster this function + is. + """ + + bit_size = common.bit_size(maxvalue) + + tries = 0 + while True: + value = read_random_int(bit_size) + if value <= maxvalue: + break + + if tries and tries % 10 == 0: + # After a lot of tries to get the right number of bits but still + # smaller than maxvalue, decrease the number of bits by 1. That'll + # dramatically increase the chances to get a large enough number. + bit_size -= 1 + tries += 1 + + return value diff --git a/rsa/transform.py b/rsa/transform.py new file mode 100644 index 0000000..16061a9 --- /dev/null +++ b/rsa/transform.py @@ -0,0 +1,224 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. Stüvel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Data transformation functions. + +From bytes to a number, number to bytes, etc. +""" + +from __future__ import absolute_import + +try: + # We'll use psyco if available on 32-bit architectures to speed up code. + # Using psyco (if available) cuts down the execution time on Python 2.5 + # at least by half. + import psyco + + psyco.full() +except ImportError: + pass + +import binascii +from struct import pack +from rsa import common +from rsa._compat import is_integer, b, byte, get_word_alignment, ZERO_BYTE, EMPTY_BYTE + + +def bytes2int(raw_bytes): + r"""Converts a list of bytes or an 8-bit string to an integer. + + When using unicode strings, encode it to some encoding like UTF8 first. + + >>> (((128 * 256) + 64) * 256) + 15 + 8405007 + >>> bytes2int(b'\x80@\x0f') + 8405007 + + """ + + return int(binascii.hexlify(raw_bytes), 16) + + +def _int2bytes(number, block_size=None): + r"""Converts a number to a string of bytes. + + Usage:: + + >>> _int2bytes(123456789) + b'\x07[\xcd\x15' + >>> bytes2int(_int2bytes(123456789)) + 123456789 + + >>> _int2bytes(123456789, 6) + b'\x00\x00\x07[\xcd\x15' + >>> bytes2int(_int2bytes(123456789, 128)) + 123456789 + + >>> _int2bytes(123456789, 3) + Traceback (most recent call last): + ... + OverflowError: Needed 4 bytes for number, but block size is 3 + + @param number: the number to convert + @param block_size: the number of bytes to output. If the number encoded to + bytes is less than this, the block will be zero-padded. When not given, + the returned block is not padded. + + @throws OverflowError when block_size is given and the number takes up more + bytes than fit into the block. + """ + + # Type checking + if not is_integer(number): + raise TypeError("You must pass an integer for 'number', not %s" % + number.__class__) + + if number < 0: + raise ValueError('Negative numbers cannot be used: %i' % number) + + # Do some bounds checking + if number == 0: + needed_bytes = 1 + raw_bytes = [ZERO_BYTE] + else: + needed_bytes = common.byte_size(number) + raw_bytes = [] + + # You cannot compare None > 0 in Python 3x. It will fail with a TypeError. + if block_size and block_size > 0: + if needed_bytes > block_size: + raise OverflowError('Needed %i bytes for number, but block size ' + 'is %i' % (needed_bytes, block_size)) + + # Convert the number to bytes. + while number > 0: + raw_bytes.insert(0, byte(number & 0xFF)) + number >>= 8 + + # Pad with zeroes to fill the block + if block_size and block_size > 0: + padding = (block_size - needed_bytes) * ZERO_BYTE + else: + padding = EMPTY_BYTE + + return padding + EMPTY_BYTE.join(raw_bytes) + + +def bytes_leading(raw_bytes, needle=ZERO_BYTE): + """ + Finds the number of prefixed byte occurrences in the haystack. + + Useful when you want to deal with padding. + + :param raw_bytes: + Raw bytes. + :param needle: + The byte to count. Default \000. + :returns: + The number of leading needle bytes. + """ + + leading = 0 + # Indexing keeps compatibility between Python 2.x and Python 3.x + _byte = needle[0] + for x in raw_bytes: + if x == _byte: + leading += 1 + else: + break + return leading + + +def int2bytes(number, fill_size=None, chunk_size=None, overflow=False): + """ + Convert an unsigned integer to bytes (base-256 representation):: + + Does not preserve leading zeros if you don't specify a chunk size or + fill size. + + .. NOTE: + You must not specify both fill_size and chunk_size. Only one + of them is allowed. + + :param number: + Integer value + :param fill_size: + If the optional fill size is given the length of the resulting + byte string is expected to be the fill size and will be padded + with prefix zero bytes to satisfy that length. + :param chunk_size: + If optional chunk size is given and greater than zero, pad the front of + the byte string with binary zeros so that the length is a multiple of + ``chunk_size``. + :param overflow: + ``False`` (default). If this is ``True``, no ``OverflowError`` + will be raised when the fill_size is shorter than the length + of the generated byte sequence. Instead the byte sequence will + be returned as is. + :returns: + Raw bytes (base-256 representation). + :raises: + ``OverflowError`` when fill_size is given and the number takes up more + bytes than fit into the block. This requires the ``overflow`` + argument to this function to be set to ``False`` otherwise, no + error will be raised. + """ + + if number < 0: + raise ValueError("Number must be an unsigned integer: %d" % number) + + if fill_size and chunk_size: + raise ValueError("You can either fill or pad chunks, but not both") + + # Ensure these are integers. + number & 1 + + raw_bytes = b('') + + # Pack the integer one machine word at a time into bytes. + num = number + word_bits, _, max_uint, pack_type = get_word_alignment(num) + pack_format = ">%s" % pack_type + while num > 0: + raw_bytes = pack(pack_format, num & max_uint) + raw_bytes + num >>= word_bits + # Obtain the index of the first non-zero byte. + zero_leading = bytes_leading(raw_bytes) + if number == 0: + raw_bytes = ZERO_BYTE + # De-padding. + raw_bytes = raw_bytes[zero_leading:] + + length = len(raw_bytes) + if fill_size and fill_size > 0: + if not overflow and length > fill_size: + raise OverflowError( + "Need %d bytes for number, but fill size is %d" % + (length, fill_size) + ) + raw_bytes = raw_bytes.rjust(fill_size, ZERO_BYTE) + elif chunk_size and chunk_size > 0: + remainder = length % chunk_size + if remainder: + padding_size = chunk_size - remainder + raw_bytes = raw_bytes.rjust(length + padding_size, ZERO_BYTE) + return raw_bytes + + +if __name__ == '__main__': + import doctest + + doctest.testmod() diff --git a/rsa/util.py b/rsa/util.py new file mode 100644 index 0000000..29d5eb1 --- /dev/null +++ b/rsa/util.py @@ -0,0 +1,79 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. Stüvel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""Utility functions.""" + +from __future__ import with_statement, print_function + +import sys +from optparse import OptionParser + +import rsa.key + + +def private_to_public(): + """Reads a private key and outputs the corresponding public key.""" + + # Parse the CLI options + parser = OptionParser(usage='usage: %prog [options]', + description='Reads a private key and outputs the ' + 'corresponding public key. Both private and public keys use ' + 'the format described in PKCS#1 v1.5') + + parser.add_option('-i', '--input', dest='infilename', type='string', + help='Input filename. Reads from stdin if not specified') + parser.add_option('-o', '--output', dest='outfilename', type='string', + help='Output filename. Writes to stdout of not specified') + + parser.add_option('--inform', dest='inform', + help='key format of input - default PEM', + choices=('PEM', 'DER'), default='PEM') + + parser.add_option('--outform', dest='outform', + help='key format of output - default PEM', + choices=('PEM', 'DER'), default='PEM') + + (cli, cli_args) = parser.parse_args(sys.argv) + + # Read the input data + if cli.infilename: + print('Reading private key from %s in %s format' % + (cli.infilename, cli.inform), file=sys.stderr) + with open(cli.infilename, 'rb') as infile: + in_data = infile.read() + else: + print('Reading private key from stdin in %s format' % cli.inform, + file=sys.stderr) + in_data = sys.stdin.read().encode('ascii') + + assert type(in_data) == bytes, type(in_data) + + # Take the public fields and create a public key + priv_key = rsa.key.PrivateKey.load_pkcs1(in_data, cli.inform) + pub_key = rsa.key.PublicKey(priv_key.n, priv_key.e) + + # Save to the output file + out_data = pub_key.save_pkcs1(cli.outform) + + if cli.outfilename: + print('Writing public key to %s in %s format' % + (cli.outfilename, cli.outform), file=sys.stderr) + with open(cli.outfilename, 'wb') as outfile: + outfile.write(out_data) + else: + print('Writing public key to stdout in %s format' % cli.outform, + file=sys.stderr) + sys.stdout.write(out_data.decode('ascii')) diff --git a/rsa/varblock.py b/rsa/varblock.py new file mode 100644 index 0000000..1c8d839 --- /dev/null +++ b/rsa/varblock.py @@ -0,0 +1,179 @@ +# -*- coding: utf-8 -*- +# +# Copyright 2011 Sybren A. Stüvel +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# https://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +"""VARBLOCK file support + +.. deprecated:: 3.4 + + The VARBLOCK format is NOT recommended for general use, has been deprecated since + Python-RSA 3.4, and will be removed in a future release. It's vulnerable to a + number of attacks: + + 1. decrypt/encrypt_bigfile() does not implement `Authenticated encryption`_ nor + uses MACs to verify messages before decrypting public key encrypted messages. + + 2. decrypt/encrypt_bigfile() does not use hybrid encryption (it uses plain RSA) + and has no method for chaining, so block reordering is possible. + + See `issue #19 on Github`_ for more information. + +.. _Authenticated encryption: https://en.wikipedia.org/wiki/Authenticated_encryption +.. _issue #19 on Github: https://github.com/sybrenstuvel/python-rsa/issues/13 + + +The VARBLOCK file format is as follows, where || denotes byte concatenation: + + FILE := VERSION || BLOCK || BLOCK ... + + BLOCK := LENGTH || DATA + + LENGTH := varint-encoded length of the subsequent data. Varint comes from + Google Protobuf, and encodes an integer into a variable number of bytes. + Each byte uses the 7 lowest bits to encode the value. The highest bit set + to 1 indicates the next byte is also part of the varint. The last byte will + have this bit set to 0. + +This file format is called the VARBLOCK format, in line with the varint format +used to denote the block sizes. + +""" + +import warnings + +from rsa._compat import byte, b + +ZERO_BYTE = b('\x00') +VARBLOCK_VERSION = 1 + +warnings.warn("The 'rsa.varblock' module was deprecated in Python-RSA version " + "3.4 due to security issues in the VARBLOCK format. See " + "https://github.com/sybrenstuvel/python-rsa/issues/13 for more information.", + DeprecationWarning) + + +def read_varint(infile): + """Reads a varint from the file. + + When the first byte to be read indicates EOF, (0, 0) is returned. When an + EOF occurs when at least one byte has been read, an EOFError exception is + raised. + + :param infile: the file-like object to read from. It should have a read() + method. + :returns: (varint, length), the read varint and the number of read bytes. + """ + + varint = 0 + read_bytes = 0 + + while True: + char = infile.read(1) + if len(char) == 0: + if read_bytes == 0: + return 0, 0 + raise EOFError('EOF while reading varint, value is %i so far' % + varint) + + byte = ord(char) + varint += (byte & 0x7F) << (7 * read_bytes) + + read_bytes += 1 + + if not byte & 0x80: + return varint, read_bytes + + +def write_varint(outfile, value): + """Writes a varint to a file. + + :param outfile: the file-like object to write to. It should have a write() + method. + :returns: the number of written bytes. + """ + + # there is a big difference between 'write the value 0' (this case) and + # 'there is nothing left to write' (the false-case of the while loop) + + if value == 0: + outfile.write(ZERO_BYTE) + return 1 + + written_bytes = 0 + while value > 0: + to_write = value & 0x7f + value >>= 7 + + if value > 0: + to_write |= 0x80 + + outfile.write(byte(to_write)) + written_bytes += 1 + + return written_bytes + + +def yield_varblocks(infile): + """Generator, yields each block in the input file. + + :param infile: file to read, is expected to have the VARBLOCK format as + described in the module's docstring. + @yields the contents of each block. + """ + + # Check the version number + first_char = infile.read(1) + if len(first_char) == 0: + raise EOFError('Unable to read VARBLOCK version number') + + version = ord(first_char) + if version != VARBLOCK_VERSION: + raise ValueError('VARBLOCK version %i not supported' % version) + + while True: + (block_size, read_bytes) = read_varint(infile) + + # EOF at block boundary, that's fine. + if read_bytes == 0 and block_size == 0: + break + + block = infile.read(block_size) + + read_size = len(block) + if read_size != block_size: + raise EOFError('Block size is %i, but could read only %i bytes' % + (block_size, read_size)) + + yield block + + +def yield_fixedblocks(infile, blocksize): + """Generator, yields each block of ``blocksize`` bytes in the input file. + + :param infile: file to read and separate in blocks. + :returns: a generator that yields the contents of each block + """ + + while True: + block = infile.read(blocksize) + + read_bytes = len(block) + if read_bytes == 0: + break + + yield block + + if read_bytes < blocksize: + break diff --git a/server.py b/server.py new file mode 100644 index 0000000..c3a22b4 --- /dev/null +++ b/server.py @@ -0,0 +1,232 @@ +#!/usr/bin/env python +import json +import io +import os + +import config +import jinja2 +import webapp2 +import logging + +from google.appengine.api import memcache +from google.appengine.ext import ndb + +############################################################################### +# Web request handlers. # +############################################################################### + +class MainHandler(webapp2.RequestHandler): + """A servlet to handle requests to load the main grudsby web page.""" + def get(self, path=''): + template_values = { + } + template = JINJA2_ENVIRONMENT.get_template('index.html') + self.response.out.write(template.render(template_values)) + + +class RegionHandler(webapp2.RequestHandler): + """A servlet to handle requests for details about a Region.""" + def get(self): + polygon_id = self.request.get('polygon_id') + loadedPath = getString(polygon_id + "_region") + try: + content = json.dumps(json.loads(loadedPath), sort_keys=True, indent=2) + except ValueError, e: + content = json.dumps({'error': 'Stored data not formatted correctly'}) + self.response.headers['Content-Type'] = 'application/json' + self.response.out.write(content) + +class SaveRegionHandler(webapp2.RequestHandler): + """A servlet to save details about a Region.""" + def get(self): + rawJson = str(self.request.get('jsonData')) + try: + polygonData = json.loads(rawJson) + except ValueError, e: + False + if ('coordinates' in polygonData and 'regionID' in polygonData): + content = json.dumps({'result': 'Successfully received region ' + polygonData['regionID']}) + setString(polygonData['regionID']+"_region", json.dumps(polygonData, ensure_ascii=False)) + else: + content = json.dumps({'error': 'Request not formatted correctly'}) + self.response.headers['Content-Type'] = 'application/json' + self.response.out.write(content) + +class MowingPlanHandler(webapp2.RequestHandler): + """A servlet to handle requests for details about a Mowing Plan.""" + def get(self): + polygon_id = self.request.get('polygon_id') + loadedPath = getString(polygon_id + "_plan") + try: + content = json.dumps(json.loads(loadedPath), sort_keys=True, indent=2) + except ValueError, e: + content = json.dumps({'error': 'Stored data not formatted correctly'}) + self.response.headers['Content-Type'] = 'application/json' + self.response.out.write(content) + +class SaveMowingPlanHandler(webapp2.RequestHandler): + """A servlet to save details about a Mowing Region.""" + def get(self): + rawJson = str(self.request.get('jsonData')) + try: + polygonData = json.loads(rawJson) + except ValueError, e: + False + if ('coordinates' in polygonData and 'regionID' in polygonData): + content = json.dumps({'result': 'Successfully received plan ' + polygonData['regionID']}) + setString(polygonData['regionID']+"_plan", json.dumps(polygonData, ensure_ascii=False)) + else: + content = json.dumps({'error': 'Request not formatted correctly'}) + self.response.headers['Content-Type'] = 'application/json' + self.response.out.write(content) + def post(self): + dataJson = self.request.get('jsonData') + try: + polygonData = json.loads(dataJson) + except ValueError, e: + False + if ('coordinates' in polygonData and 'regionID' in polygonData): + content = json.dumps({'result': 'Successfully received plan ' + polygonData['regionID']}) + setString(polygonData['regionID']+"_plan", json.dumps(polygonData, ensure_ascii=False)) + else: + content = json.dumps({'error': 'Request not formatted correctly'}) + self.response.headers['Content-Type'] = 'application/json' + self.response.out.write(content) + +class ApprovalHandler(webapp2.RequestHandler): + """A servlet to handle requests for details about an Approval.""" + def get(self): + polygon_id = self.request.get('polygon_id') + loadedPath = getString(polygon_id + "_approval") + try: + content = json.dumps(json.loads(loadedPath), sort_keys=True, indent=2) + except ValueError, e: + content = json.dumps({'error': 'Stored data not formatted correctly'}) + self.response.headers['Content-Type'] = 'application/json' + self.response.out.write(content) + + +class SetApprovalHandler(webapp2.RequestHandler): + """A servlet to save an Approval.""" + def get(self): + rawJson = str(self.request.get('jsonData')) + try: + polygonData = json.loads(rawJson) + except ValueError, e: + False + if ('approval' in polygonData and 'regionID' in polygonData): + content = json.dumps({'result': 'Successfully received approval ' + polygonData['regionID']}) + setString(polygonData['regionID']+"_approval", json.dumps(polygonData, ensure_ascii=False)) + else: + content = json.dumps({'error': 'Request not formatted correctly'}) + self.response.headers['Content-Type'] = 'application/json' + self.response.out.write(content) + +class MowerPosHandler(webapp2.RequestHandler): + """A servlet to handle requests for details about an Approval.""" + def get(self): + loadedPath = getString("mowerPos") + try: + content = json.dumps(json.loads(loadedPath), sort_keys=True, indent=2) + except ValueError, e: + content = json.dumps({'error': 'Stored data not formatted correctly'}) + self.response.headers['Content-Type'] = 'application/json' + self.response.out.write(content) + + +class SetMowerPosHandler(webapp2.RequestHandler): + """A servlet to save an Approval.""" + def get(self): + rawJson = str(self.request.get('jsonData')) + try: + mowerPos = json.loads(rawJson) + except ValueError, e: + False + if ('lat' in mowerPos and 'lng' in mowerPos and 'rot' in mowerPos): + content = json.dumps({'result': 'Successfully received moweor position.'}) + setString("mowerPos", json.dumps(mowerPos, ensure_ascii=False)) + else: + content = json.dumps({'error': 'Request not formatted correctly'}) + self.response.headers['Content-Type'] = 'application/json' + self.response.out.write(content) + + + + +# Define webapp2 routing from URL paths to web request handlers. See: +# http://webapp-improved.appspot.com/tutorials/quickstart.html +app = webapp2.WSGIApplication([ + #('/details', DetailsHandler), + ('/', MainHandler), + ('/region', RegionHandler), + ('/saveRegion', SaveRegionHandler), + ('/plan', MowingPlanHandler), + ('/savePlan', SaveMowingPlanHandler), + ('/approval', ApprovalHandler), + ('/setApproval', SetApprovalHandler), + ('/mowerPos', MowerPosHandler), + ('/setMowerPos', SetMowerPosHandler), +]) + + +############################################################################### +# Helpers. # +############################################################################### + + +class StoredData(ndb.Model): + """A main model for representing a data entry.""" + dataID = ndb.StringProperty(indexed=True) + storedData = ndb.StringProperty(indexed=False) + +def getString(stringID): + """Get a stored string based on ID""" + data_query = StoredData.query( + StoredData.dataID == stringID) + data = data_query.fetch(1) + if (len(data) == 0): + return "" + else: + return data[0].storedData + +def setString(stringID, newValue): + """Store a string by ID and value""" + data_query = StoredData.query( + StoredData.dataID == stringID) + data = data_query.fetch(1) + if (len(data) == 0): + newData = StoredData() + newData.dataID = stringID + newData.storedData = newValue + newData.put() + else: + refreshedData = data[0] + refreshedData.storedData = newValue + refreshedData.put() + + +############################################################################### +# Constants. # +############################################################################### + + +# Memcache is used to avoid exceeding our EE quota. Entries in the cache expire +# 24 hours after they are added. See: +# https://cloud.google.com/appengine/docs/python/memcache/ +MEMCACHE_EXPIRATION = 60 * 60 * 24 + +# The ImageCollection of the night-time lights dataset. See: +# https://earthengine.google.org/#detail/NOAA%2FDMSP-OLS%2FNIGHTTIME_LIGHTS +IMAGE_COLLECTION_ID = 'USDA/NAIP/DOQQ' + +############################################################################### +# Initialization. # +############################################################################### + +# Create the Jinja templating system we use to dynamically generate HTML. See: +# http://jinja.pocoo.org/docs/dev/ +JINJA2_ENVIRONMENT = jinja2.Environment( + loader=jinja2.FileSystemLoader(os.path.dirname(__file__)), + autoescape=True, + extensions=['jinja2.ext.autoescape']) + diff --git a/six.py b/six.py new file mode 100644 index 0000000..190c023 --- /dev/null +++ b/six.py @@ -0,0 +1,868 @@ +"""Utilities for writing code that runs on Python 2 and 3""" + +# Copyright (c) 2010-2015 Benjamin Peterson +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +from __future__ import absolute_import + +import functools +import itertools +import operator +import sys +import types + +__author__ = "Benjamin Peterson " +__version__ = "1.10.0" + + +# Useful for very coarse version differentiation. +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 +PY34 = sys.version_info[0:2] >= (3, 4) + +if PY3: + string_types = str, + integer_types = int, + class_types = type, + text_type = str + binary_type = bytes + + MAXSIZE = sys.maxsize +else: + string_types = basestring, + integer_types = (int, long) + class_types = (type, types.ClassType) + text_type = unicode + binary_type = str + + if sys.platform.startswith("java"): + # Jython always uses 32 bits. + MAXSIZE = int((1 << 31) - 1) + else: + # It's possible to have sizeof(long) != sizeof(Py_ssize_t). + class X(object): + + def __len__(self): + return 1 << 31 + try: + len(X()) + except OverflowError: + # 32-bit + MAXSIZE = int((1 << 31) - 1) + else: + # 64-bit + MAXSIZE = int((1 << 63) - 1) + del X + + +def _add_doc(func, doc): + """Add documentation to a function.""" + func.__doc__ = doc + + +def _import_module(name): + """Import module, returning the module after the last dot.""" + __import__(name) + return sys.modules[name] + + +class _LazyDescr(object): + + def __init__(self, name): + self.name = name + + def __get__(self, obj, tp): + result = self._resolve() + setattr(obj, self.name, result) # Invokes __set__. + try: + # This is a bit ugly, but it avoids running this again by + # removing this descriptor. + delattr(obj.__class__, self.name) + except AttributeError: + pass + return result + + +class MovedModule(_LazyDescr): + + def __init__(self, name, old, new=None): + super(MovedModule, self).__init__(name) + if PY3: + if new is None: + new = name + self.mod = new + else: + self.mod = old + + def _resolve(self): + return _import_module(self.mod) + + def __getattr__(self, attr): + _module = self._resolve() + value = getattr(_module, attr) + setattr(self, attr, value) + return value + + +class _LazyModule(types.ModuleType): + + def __init__(self, name): + super(_LazyModule, self).__init__(name) + self.__doc__ = self.__class__.__doc__ + + def __dir__(self): + attrs = ["__doc__", "__name__"] + attrs += [attr.name for attr in self._moved_attributes] + return attrs + + # Subclasses should override this + _moved_attributes = [] + + +class MovedAttribute(_LazyDescr): + + def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): + super(MovedAttribute, self).__init__(name) + if PY3: + if new_mod is None: + new_mod = name + self.mod = new_mod + if new_attr is None: + if old_attr is None: + new_attr = name + else: + new_attr = old_attr + self.attr = new_attr + else: + self.mod = old_mod + if old_attr is None: + old_attr = name + self.attr = old_attr + + def _resolve(self): + module = _import_module(self.mod) + return getattr(module, self.attr) + + +class _SixMetaPathImporter(object): + + """ + A meta path importer to import six.moves and its submodules. + + This class implements a PEP302 finder and loader. It should be compatible + with Python 2.5 and all existing versions of Python3 + """ + + def __init__(self, six_module_name): + self.name = six_module_name + self.known_modules = {} + + def _add_module(self, mod, *fullnames): + for fullname in fullnames: + self.known_modules[self.name + "." + fullname] = mod + + def _get_module(self, fullname): + return self.known_modules[self.name + "." + fullname] + + def find_module(self, fullname, path=None): + if fullname in self.known_modules: + return self + return None + + def __get_module(self, fullname): + try: + return self.known_modules[fullname] + except KeyError: + raise ImportError("This loader does not know module " + fullname) + + def load_module(self, fullname): + try: + # in case of a reload + return sys.modules[fullname] + except KeyError: + pass + mod = self.__get_module(fullname) + if isinstance(mod, MovedModule): + mod = mod._resolve() + else: + mod.__loader__ = self + sys.modules[fullname] = mod + return mod + + def is_package(self, fullname): + """ + Return true, if the named module is a package. + + We need this method to get correct spec objects with + Python 3.4 (see PEP451) + """ + return hasattr(self.__get_module(fullname), "__path__") + + def get_code(self, fullname): + """Return None + + Required, if is_package is implemented""" + self.__get_module(fullname) # eventually raises ImportError + return None + get_source = get_code # same as get_code + +_importer = _SixMetaPathImporter(__name__) + + +class _MovedItems(_LazyModule): + + """Lazy loading of moved objects""" + __path__ = [] # mark as package + + +_moved_attributes = [ + MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), + MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), + MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), + MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), + MovedAttribute("intern", "__builtin__", "sys"), + MovedAttribute("map", "itertools", "builtins", "imap", "map"), + MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), + MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), + MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), + MovedAttribute("reduce", "__builtin__", "functools"), + MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), + MovedAttribute("StringIO", "StringIO", "io"), + MovedAttribute("UserDict", "UserDict", "collections"), + MovedAttribute("UserList", "UserList", "collections"), + MovedAttribute("UserString", "UserString", "collections"), + MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), + MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), + MovedModule("builtins", "__builtin__"), + MovedModule("configparser", "ConfigParser"), + MovedModule("copyreg", "copy_reg"), + MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), + MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), + MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), + MovedModule("http_cookies", "Cookie", "http.cookies"), + MovedModule("html_entities", "htmlentitydefs", "html.entities"), + MovedModule("html_parser", "HTMLParser", "html.parser"), + MovedModule("http_client", "httplib", "http.client"), + MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), + MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), + MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), + MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), + MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), + MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), + MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), + MovedModule("cPickle", "cPickle", "pickle"), + MovedModule("queue", "Queue"), + MovedModule("reprlib", "repr"), + MovedModule("socketserver", "SocketServer"), + MovedModule("_thread", "thread", "_thread"), + MovedModule("tkinter", "Tkinter"), + MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), + MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), + MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), + MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), + MovedModule("tkinter_tix", "Tix", "tkinter.tix"), + MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), + MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), + MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), + MovedModule("tkinter_colorchooser", "tkColorChooser", + "tkinter.colorchooser"), + MovedModule("tkinter_commondialog", "tkCommonDialog", + "tkinter.commondialog"), + MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), + MovedModule("tkinter_font", "tkFont", "tkinter.font"), + MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), + MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", + "tkinter.simpledialog"), + MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), + MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), + MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), + MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), + MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), + MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), +] +# Add windows specific modules. +if sys.platform == "win32": + _moved_attributes += [ + MovedModule("winreg", "_winreg"), + ] + +for attr in _moved_attributes: + setattr(_MovedItems, attr.name, attr) + if isinstance(attr, MovedModule): + _importer._add_module(attr, "moves." + attr.name) +del attr + +_MovedItems._moved_attributes = _moved_attributes + +moves = _MovedItems(__name__ + ".moves") +_importer._add_module(moves, "moves") + + +class Module_six_moves_urllib_parse(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_parse""" + + +_urllib_parse_moved_attributes = [ + MovedAttribute("ParseResult", "urlparse", "urllib.parse"), + MovedAttribute("SplitResult", "urlparse", "urllib.parse"), + MovedAttribute("parse_qs", "urlparse", "urllib.parse"), + MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), + MovedAttribute("urldefrag", "urlparse", "urllib.parse"), + MovedAttribute("urljoin", "urlparse", "urllib.parse"), + MovedAttribute("urlparse", "urlparse", "urllib.parse"), + MovedAttribute("urlsplit", "urlparse", "urllib.parse"), + MovedAttribute("urlunparse", "urlparse", "urllib.parse"), + MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), + MovedAttribute("quote", "urllib", "urllib.parse"), + MovedAttribute("quote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote", "urllib", "urllib.parse"), + MovedAttribute("unquote_plus", "urllib", "urllib.parse"), + MovedAttribute("urlencode", "urllib", "urllib.parse"), + MovedAttribute("splitquery", "urllib", "urllib.parse"), + MovedAttribute("splittag", "urllib", "urllib.parse"), + MovedAttribute("splituser", "urllib", "urllib.parse"), + MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), + MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), + MovedAttribute("uses_params", "urlparse", "urllib.parse"), + MovedAttribute("uses_query", "urlparse", "urllib.parse"), + MovedAttribute("uses_relative", "urlparse", "urllib.parse"), +] +for attr in _urllib_parse_moved_attributes: + setattr(Module_six_moves_urllib_parse, attr.name, attr) +del attr + +Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes + +_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), + "moves.urllib_parse", "moves.urllib.parse") + + +class Module_six_moves_urllib_error(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_error""" + + +_urllib_error_moved_attributes = [ + MovedAttribute("URLError", "urllib2", "urllib.error"), + MovedAttribute("HTTPError", "urllib2", "urllib.error"), + MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), +] +for attr in _urllib_error_moved_attributes: + setattr(Module_six_moves_urllib_error, attr.name, attr) +del attr + +Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes + +_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), + "moves.urllib_error", "moves.urllib.error") + + +class Module_six_moves_urllib_request(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_request""" + + +_urllib_request_moved_attributes = [ + MovedAttribute("urlopen", "urllib2", "urllib.request"), + MovedAttribute("install_opener", "urllib2", "urllib.request"), + MovedAttribute("build_opener", "urllib2", "urllib.request"), + MovedAttribute("pathname2url", "urllib", "urllib.request"), + MovedAttribute("url2pathname", "urllib", "urllib.request"), + MovedAttribute("getproxies", "urllib", "urllib.request"), + MovedAttribute("Request", "urllib2", "urllib.request"), + MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), + MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), + MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), + MovedAttribute("BaseHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), + MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), + MovedAttribute("FileHandler", "urllib2", "urllib.request"), + MovedAttribute("FTPHandler", "urllib2", "urllib.request"), + MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), + MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), + MovedAttribute("urlretrieve", "urllib", "urllib.request"), + MovedAttribute("urlcleanup", "urllib", "urllib.request"), + MovedAttribute("URLopener", "urllib", "urllib.request"), + MovedAttribute("FancyURLopener", "urllib", "urllib.request"), + MovedAttribute("proxy_bypass", "urllib", "urllib.request"), +] +for attr in _urllib_request_moved_attributes: + setattr(Module_six_moves_urllib_request, attr.name, attr) +del attr + +Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes + +_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), + "moves.urllib_request", "moves.urllib.request") + + +class Module_six_moves_urllib_response(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_response""" + + +_urllib_response_moved_attributes = [ + MovedAttribute("addbase", "urllib", "urllib.response"), + MovedAttribute("addclosehook", "urllib", "urllib.response"), + MovedAttribute("addinfo", "urllib", "urllib.response"), + MovedAttribute("addinfourl", "urllib", "urllib.response"), +] +for attr in _urllib_response_moved_attributes: + setattr(Module_six_moves_urllib_response, attr.name, attr) +del attr + +Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes + +_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), + "moves.urllib_response", "moves.urllib.response") + + +class Module_six_moves_urllib_robotparser(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_robotparser""" + + +_urllib_robotparser_moved_attributes = [ + MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), +] +for attr in _urllib_robotparser_moved_attributes: + setattr(Module_six_moves_urllib_robotparser, attr.name, attr) +del attr + +Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes + +_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), + "moves.urllib_robotparser", "moves.urllib.robotparser") + + +class Module_six_moves_urllib(types.ModuleType): + + """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" + __path__ = [] # mark as package + parse = _importer._get_module("moves.urllib_parse") + error = _importer._get_module("moves.urllib_error") + request = _importer._get_module("moves.urllib_request") + response = _importer._get_module("moves.urllib_response") + robotparser = _importer._get_module("moves.urllib_robotparser") + + def __dir__(self): + return ['parse', 'error', 'request', 'response', 'robotparser'] + +_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), + "moves.urllib") + + +def add_move(move): + """Add an item to six.moves.""" + setattr(_MovedItems, move.name, move) + + +def remove_move(name): + """Remove item from six.moves.""" + try: + delattr(_MovedItems, name) + except AttributeError: + try: + del moves.__dict__[name] + except KeyError: + raise AttributeError("no such move, %r" % (name,)) + + +if PY3: + _meth_func = "__func__" + _meth_self = "__self__" + + _func_closure = "__closure__" + _func_code = "__code__" + _func_defaults = "__defaults__" + _func_globals = "__globals__" +else: + _meth_func = "im_func" + _meth_self = "im_self" + + _func_closure = "func_closure" + _func_code = "func_code" + _func_defaults = "func_defaults" + _func_globals = "func_globals" + + +try: + advance_iterator = next +except NameError: + def advance_iterator(it): + return it.next() +next = advance_iterator + + +try: + callable = callable +except NameError: + def callable(obj): + return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) + + +if PY3: + def get_unbound_function(unbound): + return unbound + + create_bound_method = types.MethodType + + def create_unbound_method(func, cls): + return func + + Iterator = object +else: + def get_unbound_function(unbound): + return unbound.im_func + + def create_bound_method(func, obj): + return types.MethodType(func, obj, obj.__class__) + + def create_unbound_method(func, cls): + return types.MethodType(func, None, cls) + + class Iterator(object): + + def next(self): + return type(self).__next__(self) + + callable = callable +_add_doc(get_unbound_function, + """Get the function out of a possibly unbound function""") + + +get_method_function = operator.attrgetter(_meth_func) +get_method_self = operator.attrgetter(_meth_self) +get_function_closure = operator.attrgetter(_func_closure) +get_function_code = operator.attrgetter(_func_code) +get_function_defaults = operator.attrgetter(_func_defaults) +get_function_globals = operator.attrgetter(_func_globals) + + +if PY3: + def iterkeys(d, **kw): + return iter(d.keys(**kw)) + + def itervalues(d, **kw): + return iter(d.values(**kw)) + + def iteritems(d, **kw): + return iter(d.items(**kw)) + + def iterlists(d, **kw): + return iter(d.lists(**kw)) + + viewkeys = operator.methodcaller("keys") + + viewvalues = operator.methodcaller("values") + + viewitems = operator.methodcaller("items") +else: + def iterkeys(d, **kw): + return d.iterkeys(**kw) + + def itervalues(d, **kw): + return d.itervalues(**kw) + + def iteritems(d, **kw): + return d.iteritems(**kw) + + def iterlists(d, **kw): + return d.iterlists(**kw) + + viewkeys = operator.methodcaller("viewkeys") + + viewvalues = operator.methodcaller("viewvalues") + + viewitems = operator.methodcaller("viewitems") + +_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") +_add_doc(itervalues, "Return an iterator over the values of a dictionary.") +_add_doc(iteritems, + "Return an iterator over the (key, value) pairs of a dictionary.") +_add_doc(iterlists, + "Return an iterator over the (key, [values]) pairs of a dictionary.") + + +if PY3: + def b(s): + return s.encode("latin-1") + + def u(s): + return s + unichr = chr + import struct + int2byte = struct.Struct(">B").pack + del struct + byte2int = operator.itemgetter(0) + indexbytes = operator.getitem + iterbytes = iter + import io + StringIO = io.StringIO + BytesIO = io.BytesIO + _assertCountEqual = "assertCountEqual" + if sys.version_info[1] <= 1: + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" + else: + _assertRaisesRegex = "assertRaisesRegex" + _assertRegex = "assertRegex" +else: + def b(s): + return s + # Workaround for standalone backslash + + def u(s): + return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") + unichr = unichr + int2byte = chr + + def byte2int(bs): + return ord(bs[0]) + + def indexbytes(buf, i): + return ord(buf[i]) + iterbytes = functools.partial(itertools.imap, ord) + import StringIO + StringIO = BytesIO = StringIO.StringIO + _assertCountEqual = "assertItemsEqual" + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" +_add_doc(b, """Byte literal""") +_add_doc(u, """Text literal""") + + +def assertCountEqual(self, *args, **kwargs): + return getattr(self, _assertCountEqual)(*args, **kwargs) + + +def assertRaisesRegex(self, *args, **kwargs): + return getattr(self, _assertRaisesRegex)(*args, **kwargs) + + +def assertRegex(self, *args, **kwargs): + return getattr(self, _assertRegex)(*args, **kwargs) + + +if PY3: + exec_ = getattr(moves.builtins, "exec") + + def reraise(tp, value, tb=None): + if value is None: + value = tp() + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + +else: + def exec_(_code_, _globs_=None, _locs_=None): + """Execute code in a namespace.""" + if _globs_ is None: + frame = sys._getframe(1) + _globs_ = frame.f_globals + if _locs_ is None: + _locs_ = frame.f_locals + del frame + elif _locs_ is None: + _locs_ = _globs_ + exec("""exec _code_ in _globs_, _locs_""") + + exec_("""def reraise(tp, value, tb=None): + raise tp, value, tb +""") + + +if sys.version_info[:2] == (3, 2): + exec_("""def raise_from(value, from_value): + if from_value is None: + raise value + raise value from from_value +""") +elif sys.version_info[:2] > (3, 2): + exec_("""def raise_from(value, from_value): + raise value from from_value +""") +else: + def raise_from(value, from_value): + raise value + + +print_ = getattr(moves.builtins, "print", None) +if print_ is None: + def print_(*args, **kwargs): + """The new-style print function for Python 2.4 and 2.5.""" + fp = kwargs.pop("file", sys.stdout) + if fp is None: + return + + def write(data): + if not isinstance(data, basestring): + data = str(data) + # If the file has an encoding, encode unicode with it. + if (isinstance(fp, file) and + isinstance(data, unicode) and + fp.encoding is not None): + errors = getattr(fp, "errors", None) + if errors is None: + errors = "strict" + data = data.encode(fp.encoding, errors) + fp.write(data) + want_unicode = False + sep = kwargs.pop("sep", None) + if sep is not None: + if isinstance(sep, unicode): + want_unicode = True + elif not isinstance(sep, str): + raise TypeError("sep must be None or a string") + end = kwargs.pop("end", None) + if end is not None: + if isinstance(end, unicode): + want_unicode = True + elif not isinstance(end, str): + raise TypeError("end must be None or a string") + if kwargs: + raise TypeError("invalid keyword arguments to print()") + if not want_unicode: + for arg in args: + if isinstance(arg, unicode): + want_unicode = True + break + if want_unicode: + newline = unicode("\n") + space = unicode(" ") + else: + newline = "\n" + space = " " + if sep is None: + sep = space + if end is None: + end = newline + for i, arg in enumerate(args): + if i: + write(sep) + write(arg) + write(end) +if sys.version_info[:2] < (3, 3): + _print = print_ + + def print_(*args, **kwargs): + fp = kwargs.get("file", sys.stdout) + flush = kwargs.pop("flush", False) + _print(*args, **kwargs) + if flush and fp is not None: + fp.flush() + +_add_doc(reraise, """Reraise an exception.""") + +if sys.version_info[0:2] < (3, 4): + def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES): + def wrapper(f): + f = functools.wraps(wrapped, assigned, updated)(f) + f.__wrapped__ = wrapped + return f + return wrapper +else: + wraps = functools.wraps + + +def with_metaclass(meta, *bases): + """Create a base class with a metaclass.""" + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(meta): + + def __new__(cls, name, this_bases, d): + return meta(name, bases, d) + return type.__new__(metaclass, 'temporary_class', (), {}) + + +def add_metaclass(metaclass): + """Class decorator for creating a class with a metaclass.""" + def wrapper(cls): + orig_vars = cls.__dict__.copy() + slots = orig_vars.get('__slots__') + if slots is not None: + if isinstance(slots, str): + slots = [slots] + for slots_var in slots: + orig_vars.pop(slots_var) + orig_vars.pop('__dict__', None) + orig_vars.pop('__weakref__', None) + return metaclass(cls.__name__, cls.__bases__, orig_vars) + return wrapper + + +def python_2_unicode_compatible(klass): + """ + A decorator that defines __unicode__ and __str__ methods under Python 2. + Under Python 3 it does nothing. + + To support Python 2 and 3 with a single code base, define a __str__ method + returning text and apply this decorator to the class. + """ + if PY2: + if '__str__' not in klass.__dict__: + raise ValueError("@python_2_unicode_compatible cannot be applied " + "to %s because it doesn't define __str__()." % + klass.__name__) + klass.__unicode__ = klass.__str__ + klass.__str__ = lambda self: self.__unicode__().encode('utf-8') + return klass + + +# Complete the moves implementation. +# This code is at the end of this module to speed up module loading. +# Turn this module into a package. +__path__ = [] # required for PEP 302 and PEP 451 +__package__ = __name__ # see PEP 366 @ReservedAssignment +if globals().get("__spec__") is not None: + __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable +# Remove other six meta path importers, since they cause problems. This can +# happen if six is removed from sys.modules and then reloaded. (Setuptools does +# this for some reason.) +if sys.meta_path: + for i, importer in enumerate(sys.meta_path): + # Here's some real nastiness: Another "instance" of the six module might + # be floating around. Therefore, we can't use isinstance() to check for + # the six meta path importer, since the other six instance will have + # inserted an importer with different class. + if (type(importer).__name__ == "_SixMetaPathImporter" and + importer.name == __name__): + del sys.meta_path[i] + break + del i, importer +# Finally, add the importer to the meta path import hook. +sys.meta_path.append(_importer) diff --git a/static/google_earthengine_powered_400px.png b/static/google_earthengine_powered_400px.png new file mode 100644 index 0000000..b74c685 Binary files /dev/null and b/static/google_earthengine_powered_400px.png differ diff --git a/static/grudsby_top.png b/static/grudsby_top.png new file mode 100644 index 0000000..93d3bd2 Binary files /dev/null and b/static/grudsby_top.png differ diff --git a/static/grudsby_top_tiny.png b/static/grudsby_top_tiny.png new file mode 100644 index 0000000..c6f2c47 Binary files /dev/null and b/static/grudsby_top_tiny.png differ diff --git a/static/script.js b/static/script.js new file mode 100644 index 0000000..9faf0cd --- /dev/null +++ b/static/script.js @@ -0,0 +1,362 @@ +/** + * @fileoverview Runs the grudsby application. The code is executed in the + * user's browser. It communicates with the App Engine backend, renders output + * to the screen, and handles user interactions. + */ + +var RotateIcon = function(options){ + this.options = options || {}; + this.rImg = options.img || new Image(); + this.rImg.src = this.rImg.src || this.options.url; + this.options.width = this.options.width || this.rImg.width || 52; + this.options.height = this.options.height || this.rImg.height || 60; + canvas = document.createElement("canvas"); + canvas.width = this.options.width; + canvas.height = this.options.height; + this.context = canvas.getContext("2d"); + this.canvas = canvas; +}; +RotateIcon.makeIcon = function(url) { + + + return new RotateIcon({url: url}); +}; +RotateIcon.prototype.setRotation = function(options){ + var canvas = this.context, + angle = options.deg ? options.deg * Math.PI / 180: + options.rad, + centerX = this.options.width/2, + centerY = this.options.height/2; + + canvas.clearRect(0, 0, this.options.width, this.options.height); + canvas.save(); + canvas.translate(centerX, centerY); + canvas.rotate(angle); + canvas.translate(-centerX, -centerY); + canvas.drawImage(this.rImg, 0, 0); + canvas.restore(); + return this; +}; + +RotateIcon.prototype.getUrl = function(){ + return this.canvas.toDataURL('image/png'); +}; + +grudsby = {}; // Our namespace. + +grudsby.boot = function() { + // Load external libraries. + google.load('visualization', '1.0'); + google.load('jquery', '1'); + google.load('maps', '3', {'other_params': 'libraries=drawing&key=AIzaSyC6xdjB0IXsF7N04S64n_5blQOqb0dkYcU'}); + + // Create the grudsby app. + google.setOnLoadCallback(function() { + var app = new grudsby.App(); + }); +}; + + +/////////////////////////////////////////////////////////////////////////////// +// The application. // +/////////////////////////////////////////////////////////////////////////////// + + +grudsby.App = function() { + // Create and display the map. + this.map = this.createMap(); + + // Add the polygons to the map. + this.addPolygon(); + + // Register a click handler to toggle polygon drawing mode. + $('.drawRegion').click(this.drawRegion.bind(this)); + + // Register a click handler to toggle polygon drawing mode. + $('.approvePlan').click(this.approvePlan.bind(this)); + + // Timer for loading the mowing plan + this.checkPlan(); + var timer = setInterval(this.checkPlan.bind(this),250); + + // Write the unapproved state at the start of loading the page. + this.loadApproval(); + + + var marker = new google.maps.Marker({ + position: {lat: 40.444505, lng: -79.940777}, + map: this.map, + title: 'grudsby', + zIndex:400 + }); + var step = 1; + var angle = 1; + setInterval(function(){ + $.get('/mowerPos').done((function(data) { + if (data['error']) + { + console.log("Failure: %s", data.error); + } + else + { + $('.grudsby').attr('src', RotateIcon.makeIcon("/static/grudsby_top_tiny.png") + .setRotation({deg: -data.rot}) + .getUrl()); + marker.setOptions({ + icon: { + url:$('.grudsby').attr('src'), + size: new google.maps.Size(80, 80), + origin: new google.maps.Point(0, 0), + anchor: new google.maps.Point(40, 40), + scaledSize: new google.maps.Size(80, 80) }, + position: {lat: data.lat, lng: data.lng}, + + }); + }; + }).bind(this)); + }, 500); + + +}; + + +grudsby.App.prototype.createMap = function(mapType) { + var mapOptions = { + center: grudsby.App.DEFAULT_CENTER, + disableDefaultUI: true, + zoom: grudsby.App.DEFAULT_ZOOM + }; + var mapEl = $('.map').get(0); + var map = new google.maps.Map(mapEl, mapOptions); + //map.overlayMapTypes.push(mapType); // For adding overlays + map.setMapTypeId('satellite'); + grudsby.App.polyDrawingManager = new google.maps.drawing.DrawingManager({ + drawingMode: null, + drawingControl: false, + map: map, + polygonOptions: { + fillColor: 'white', + strokeColor: 'white', + editable: false + } + }); + google.maps.event.addListener(grudsby.App.polyDrawingManager, 'polygoncomplete', this.regionComplete.bind(this)); + return map; +}; + +grudsby.App.prototype.drawRegion = function() { + if (grudsby.App.polyDrawingManager.getDrawingMode() == google.maps.drawing.OverlayType.POLYGON) + { + grudsby.App.polyDrawingManager.setOptions({ + drawingMode: null + }); + $('.drawRegion').get(0).innerHTML="Draw New Region"; + if (grudsby.App.currentRegion!=null){ + grudsby.App.currentRegion.setEditable(true); + }; + } + else + { + grudsby.App.polyDrawingManager.setOptions({ + drawingMode: google.maps.drawing.OverlayType.POLYGON + }); + $('.drawRegion').get(0).innerHTML='Edit Current Region'; + if (grudsby.App.currentRegion!=null){ + grudsby.App.currentRegion.setEditable(false); + }; + }; +}; + +grudsby.App.prototype.regionComplete = function(polygon) { + grudsby.App.polyDrawingManager.setOptions({ + drawingMode: null + }); + $('.drawRegion').get(0).innerHTML="Draw New Region"; + if (grudsby.App.currentRegion!=null){ + grudsby.App.currentRegion.setMap(null); + }; + polygon.setEditable(true); + polygon.setOptions({ + zIndex:10 + }); + grudsby.App.currentRegion = polygon; + this.planInvalidated(); + var polyPath = polygon.getPath() + google.maps.event.addListener(polyPath, 'set_at', this.polyMoved.bind(this)); + google.maps.event.addListener(polyPath, 'insert_at', this.polyMoved.bind(this)); + +}; + +grudsby.App.prototype.approvePlan = function() { + if ((grudsby.App.planApproved == false) && (grudsby.App.currentRegion!=null)) { + $('.approvePlan').get(0).innerHTML="Mowing Plan Approved"; + $('.approvePlan').get(0).setAttribute("style","background-color: #4CAF50"); + grudsby.App.planApproved = true; + this.saveApproval(); + } + else { + this.planInvalidated(); + }; +}; + +grudsby.App.prototype.planInvalidated = function() { + $('.approvePlan').get(0).innerHTML="Approve Mowing Plan"; + $('.approvePlan').get(0).setAttribute("style","background-color: #f44336"); + grudsby.App.planApproved = false; + this.saveApproval(); + this.savePolygon(); +}; + +grudsby.App.prototype.addPolygon = function() { + polygonId = "sve"; + $.get('/region?polygon_id=' + polygonId).done((function(data) { + if (data['error']) + { + console.log("Failure: %s", data.error); + } + else + { + grudsby.App.currentRegion = new google.maps.Polygon({ + paths: data.coordinates, + fillColor: 'white', + strokeColor: 'white', + strokeWeight: 3, + map: this.map, + editable: true, + zIndex: 10 + }); + var polyPath = grudsby.App.currentRegion.getPath() + google.maps.event.addListener(polyPath, 'set_at', this.polyMoved.bind(this)); + google.maps.event.addListener(polyPath, 'insert_at', this.polyMoved.bind(this)); + }; + }).bind(this)); +}; + +grudsby.App.prototype.savePolygon = function() { + var dataOut = { + "coordinates":grudsby.App.currentRegion.getPath().getArray(), + "regionID":"sve" + }; + + $.get('/saveRegion?jsonData=' + JSON.stringify(dataOut)).done((function(data) { + if (data['error']) + { + console.log("Failure: %s", data.error); + } + else + { + + }; + }).bind(this)); + +}; + +grudsby.App.prototype.polyMoved = function() { + this.planInvalidated(); +} + +grudsby.App.prototype.checkPlan = function() { + polygonId = "sve"; + $.get('/plan?polygon_id=' + polygonId).done((function(data) { + if (data['error']) + { + console.log("Failure: %s", data.error); + } + else + { + if (grudsby.App.currentPlanText != data) + { + var oldPlan = grudsby.App.currentPlan; + if (data.coordinates.length > 1) + { + grudsby.App.currentPlan = new google.maps.Polyline({ + path: data.coordinates, + strokeColor: 'red', + strokeWeight: 2, + strokeOpacity: 1.0, + editable: false, + zIndex: 100 + }); + + grudsby.App.currentPlan.setMap(this.map); + } + if (oldPlan!=null) + { + oldPlan.setMap(null); + }; + }; + grudsby.App.currentPlanText = data; + }; + }).bind(this)); +} + +grudsby.App.prototype.saveApproval = function() { + var dataOut = { + "approval":"false", + "regionID":"sve" + }; + if (grudsby.App.planApproved) + { + dataOut.approval = "true"; + }; + + $.get('/setApproval?jsonData=' +JSON.stringify(dataOut)).done((function(data) { + if (data['error']) + { + console.log("Failure: %s", data.error); + } + else + { + + }; + }).bind(this)); +} + +grudsby.App.prototype.loadApproval = function() { + $.get('/approval?polygon_id=sve').done((function(data) { + if (data['error']) + { + console.log("Failure: %s", data.error); + } + else + { + grudsby.App.planApproved = false; + if (data.approval == "true") + { + grudsby.App.planApproved = true; + $('.approvePlan').get(0).innerHTML="Mowing Plan Approved"; + $('.approvePlan').get(0).setAttribute("style","background-color: #4CAF50"); + } + }; + }).bind(this)); +} + + +/////////////////////////////////////////////////////////////////////////////// +// Static helpers and constants. // +/////////////////////////////////////////////////////////////////////////////// + +/** @type {number} The default zoom level for the map. */ +grudsby.App.DEFAULT_ZOOM = 20; + + +/** @type {Object} The default center of the map. */ +grudsby.App.DEFAULT_CENTER = {lng: -78.906790, lat: 33.671536}; + +/** @type {Object} The polygon drawing manager. */ +grudsby.App.polyDrawingManager; + + +/** @type {Object} The current polygon. */ +grudsby.App.currentRegion; + + +grudsby.App.currentPlanText = ""; + + +/** @type {Object} The current polygon. */ +grudsby.App.currentPlan; + +/** @type {Object} Tracks whether the user has approved the region. */ +grudsby.App.planApproved = false; + diff --git a/static/style.css b/static/style.css new file mode 100644 index 0000000..760dc03 --- /dev/null +++ b/static/style.css @@ -0,0 +1,215 @@ +/** User interface styles for the grudsby Lights app. */ + +.template { + display: none; +} + +/* Shared across all screen sizes. */ + +.grudsby h1 { + font-family: 'Roboto Condensed', sans-serif; + position: absolute; + z-index: 1; + margin: 0px; + color: white; + text-shadow: 0px 0px 20px black; + top: 22px; + left: 30px; +} + +.grudsby .drawRegion { + position: absolute; + background-color: #4CAF50; /* Green */ + border: none; + color: white; + padding: 15px 32px; + text-align: center; + text-decoration: none; + display: inline-block; + font-size: 16px; + left: 30px; + z-index: 1; + width:230px; +} + + +.grudsby .approvePlan { + position: absolute; + background-color: #f44336; /* red */ + border: none; + color: white; + padding: 15px 32px; + text-align: center; + text-decoration: none; + display: inline-block; + font-size: 16px; + left: 30px; + z-index: 1; + width:230px; +} + +.grudsby .drawRegion:hover { + box-shadow: 0 12px 16px 0 rgba(0,0,0,0.24), 0 17px 50px 0 rgba(0,0,0,0.19); +} + + +.grudsby .approvePlan:hover { + box-shadow: 0 12px 16px 0 rgba(0,0,0,0.24), 0 17px 50px 0 rgba(0,0,0,0.19); +} + +html, +body, +.grudsby, +.grudsby .map { + height: 100%; + margin: 0px; + padding: 0px; + overflow: hidden; +} + + +/* Small screens. */ + +@media (max-width: 500px) { + .grudsby h1 { + font-size: 19px; + } + .grudsby .approvePlan { + top: 115px; + height:45px; + } + .grudsby .drawRegion { + top: 55px; + height:45px; + } +} + + +/* Large screens. */ + +@media (min-width: 500px) { + .grudsby h1 { + font-size: 37px; + } + .grudsby .approvePlan { + top: 160px; + height:55px; + } + .grudsby .drawRegion { + top: 80px; + height:55px; + } + +} + +/*.grudsby .panel { + display: none; + position: absolute; + z-index: 1; + background-color: white; + box-shadow: 0px 0px 20px black; + font-family: 'Roboto', sans-serif; + padding: 10px; + box-sizing: border-box; +} + +.grudsby .panel h2 { + margin: 0px; + padding-bottom: 10px; +} + +.grudsby .panel .details { + overflow: hidden; + max-height: 500px; +} + +.grudsby .panel .error { + display: none; +} + +.grudsby .powered-by-ee { + position: absolute; + z-index: 1; + bottom: 10px; + left: 50%; + margin-left: -150px; + width: 300px; +} +*/ +/* Small screens. */ + +/*@media (max-width: 900px) { + .grudsby h1 { + top: 22px; + font-size: 37px; + left: 30px; + } + + /*.grudsby .panel { + width: 100%; + bottom: 0px; + height: 50px; + } + + .grudsby .panel .details { + visibility: hidden; + max-height: 400px; + padding-right: 10px; + } + + .grudsby .panel .close { + display: none; + } + + .grudsby .panel .toggler { + position: absolute; + top: 10px; + right: 10px; + cursor: pointer; + } + + .grudsby .panel.expanded .details { + visibility: visible; + } + + .grudsby .panel.expanded { + height: 278px; + } + + .grudsby .powered-by-ee { + width: 180px; + } +} + +/* Large screens. */ +/* +@media (min-width: 900px) { + .grudsby h1 { + left: 30px; + top: 30px; + font-size: 56px; + opacity: .98; + } + + /* + .grudsby .panel { + width: 400px; + left: 40px; + top: 126px; + padding: 14px; + opacity: .98; + border-radius: 4px; + } + + .grudsby .panel .close { + position: absolute; + top: 14px; + right: 14px; + cursor: pointer; + } + + .grudsby .panel .toggler { + display: none; + } + */ +/*}*/ diff --git a/storage.py b/storage.py new file mode 100644 index 0000000..54895fa --- /dev/null +++ b/storage.py @@ -0,0 +1,9 @@ +class Region(ndb.Model): + region = ndb.StringProperty(indexed=False) + +class Path(ndb.Model): + path = ndb.StringProperty(indexed=False) + +class Approval(ndb.Model): + approval = ndb.StringProperty(indexed=False) +