diff --git a/AUTHORS b/AUTHORS index 20a2ceafde..0de4027d1d 100644 --- a/AUTHORS +++ b/AUTHORS @@ -36,3 +36,13 @@ Stephen Goss James Murty Thomas Ballinger Rick Harding +Kirill Pinchuk +Ales Zoulek +Casey Banner +Roman Imankulov +Rodrigue Alcazar +Jeremy Avnet +Matt Chisholm +Mark Merritt +Max Arnold +Szymon Reichmann diff --git a/docs/api/core/decorators.rst b/docs/api/core/decorators.rst index 78228ea2a9..ae0c2a9c4b 100644 --- a/docs/api/core/decorators.rst +++ b/docs/api/core/decorators.rst @@ -3,4 +3,4 @@ Decorators ========== .. automodule:: fabric.decorators - :members: hosts, roles, runs_once + :members: hosts, roles, runs_once, task, with_settings diff --git a/docs/api/core/tasks.rst b/docs/api/core/tasks.rst new file mode 100644 index 0000000000..8c69ac76b9 --- /dev/null +++ b/docs/api/core/tasks.rst @@ -0,0 +1,6 @@ +===== +Tasks +===== + +.. automodule:: fabric.tasks + :members: Task diff --git a/docs/changes/0.9.6.rst b/docs/changes/0.9.6.rst new file mode 100644 index 0000000000..ade90155b7 --- /dev/null +++ b/docs/changes/0.9.6.rst @@ -0,0 +1,12 @@ +======================== +Changes in version 0.9.6 +======================== + +The following changes were implemented in Fabric 0.9.6: + +Bugfixes +======== + +* :issue:`347`: `~fabric.contrib.files.append` incorrectly tested for ``str`` + instead of ``types.StringTypes``, causing it to split up Unicode strings as + if they were one character per line. This has been fixed. diff --git a/docs/changes/0.9.7.rst b/docs/changes/0.9.7.rst new file mode 100644 index 0000000000..a13c1b69e4 --- /dev/null +++ b/docs/changes/0.9.7.rst @@ -0,0 +1,10 @@ +======================== +Changes in version 0.9.7 +======================== + +The following changes were implemented in Fabric 0.9.7: + +Bugfixes +======== + +* :issue:`329`: `~fabric.operations.reboot` would have problems reconnecting post-reboot (resulting in a traceback) if ``env.host_string`` was not fully-formed (did not contain user and port specifiers.) This has been fixed. diff --git a/docs/changes/1.0.1.rst b/docs/changes/1.0.1.rst index ab6268963c..8c5c94cc25 100644 --- a/docs/changes/1.0.1.rst +++ b/docs/changes/1.0.1.rst @@ -12,13 +12,21 @@ Bugfixes * :issue:`301`: Fixed a bug in `~fabric.operations.local`'s behavior when ``capture=False`` and ``output.stdout`` (or ``.stderr``) was also ``False``. Thanks to Chris Rose for the catch. +* :issue:`310`: Update edge case in `~fabric.operations.put` where using the + ``mode`` kwarg alongside ``use_sudo=True`` runs a hidden + `~fabric.operations.sudo` command. The ``mode`` kwarg needs to be octal but + was being interpolated in the ``sudo`` call as a string/integer. Thanks to + Adam Ernst for the catch and suggested fix. * :issue:`311`: `~fabric.contrib.files.append` was supposed to have its ``partial`` kwarg's default flipped from ``True`` to ``False``. However, only the documentation was altered. This has been fixed. Thanks to Adam Ernst for bringing it to our attention. * :issue:`312`: Tweak internal I/O related loops to prevent high CPU usage and - poor screen-printing behavior on some systems. Thanks to Redmine user ``cbr - grind`` for the initial patch. + poor screen-printing behavior on some systems. Thanks to Kirill Pinchuk for + the initial patch. +* :issue:`320`: Some users reported problems with dropped input, particularly + while entering `~fabric.operations.sudo` passwords. This was fixed via the + same change as for :issue:`312`. Documentation ============= diff --git a/docs/changes/1.0.2.rst b/docs/changes/1.0.2.rst new file mode 100644 index 0000000000..7e7cacf05f --- /dev/null +++ b/docs/changes/1.0.2.rst @@ -0,0 +1,35 @@ +======================== +Changes in version 1.0.2 +======================== + +.. note:: + This release also includes all applicable changes from the :doc:`0.9.7 + release `. + +Bugfixes +======== + +* :issue:`258`: Bugfix to a previous, incorrectly applied fix regarding + `~fabric.operations.local` on Windows platforms. +* :issue:`324`: Update `~fabric.operations.run`/`~fabric.operations.sudo`'s + ``combine_stderr`` kwarg so that it correctly overrides the global setting in + all cases. This required changing its default value to ``None``, but the + default behavior (behaving as if the setting were ``True``) has not changed. + Thanks to Matthew Woodcraft and Connor Smith for the catch. +* :issue:`337`: Fix logic bug in `~fabric.operations.put` preventing use of + ``mirror_local_mode``. Thanks to Roman Imankulov for catch & patch. +* :issue:`352` (also :issue:`320`): Seemingly random issues with output lockup + and input problems (e.g. sudo prompts incorrectly rejecting passwords) appear + to have been caused by an I/O race condition. This has been fixed. Thanks to + Max Arnold and Paul Oswald for the detailed reports and to Max for the + diagnosis and patch. + + +Documentation +============= + +* Updated the API documentation for `~fabric.context_managers.cd` to explicitly + point users to `~fabric.context_managers.lcd` for modifying local paths. +* Clarified the behavior of `~fabric.contrib.project.rsync_project` re: how + trailing slashes in ``local_dir`` affect ``remote_dir``. Thanks to Mark + Merritt for the catch. diff --git a/docs/changes/1.1.1.rst b/docs/changes/1.1.1.rst new file mode 100644 index 0000000000..e2ef5e29e6 --- /dev/null +++ b/docs/changes/1.1.1.rst @@ -0,0 +1,19 @@ +======================== +Changes in version 1.1.1 +======================== + +Bugfixes +======== + +* The public API for `~fabric.tasks.Task` mentioned use of the ``run()`` + method, but Fabric's main execution loop had not been updated to look for and + call it, forcing users who subclassed `~fabric.tasks.Task` to define + ``__call__()`` instead. This was an oversight and has been corrected. + + +Documentation +============= + +* The documentation for use of `~fabric.tasks.Task` subclasses (contained under + :ref:`new-style-tasks`) has been slightly fleshed out and has also grown an + example snippet or two. diff --git a/docs/changes/1.1.rst b/docs/changes/1.1.rst index a574bc3590..bcd8d31d2e 100644 --- a/docs/changes/1.1.rst +++ b/docs/changes/1.1.rst @@ -4,20 +4,91 @@ Changes in version 1.1 This page lists all changes made to Fabric in its 1.1.0 release. +.. note:: + This release also includes all applicable changes from the :doc:`1.0.2 + release `. + +Highlights +========== + +* :issue:`76`: :ref:`New-style tasks ` have been added. With + the addition of the `~fabric.decorators.task` decorator and the + `~fabric.tasks.Task` class, you can now "opt-in" and explicitly mark task + functions as tasks, and Fabric will ignore the rest. The original behavior + (now referred to as :ref:`"classic" tasks `) will still take + effect if no new-style tasks are found. Major thanks to Travis Swicegood for + the original implementation. +* :issue:`56`: Namespacing is now possible: Fabric will crawl imported module + objects looking for new-style task objects and build a dotted hierarchy + (tasks named e.g. ``web.deploy`` or ``db.migrations.run``), allowing for + greater organization. See :ref:`namespaces` for details. Thanks again to + Travis Swicegood. + Feature additions ================= +* :issue:`10`: `~fabric.contrib.upload_project` now allows control over the + local and remote directory paths, and has improved error handling. Thanks to + Rodrigue Alcazar for the patch. +* As part of :issue:`56` (highlighted above), added :option:`--list-format + <-F>` to allow specification of a nested output format from :option:`--list + <-l>`. * :issue:`107`: `~fabric.operations.require`'s ``provided_by`` kwarg now accepts iterables in addition to single values. Thanks to Thomas Ballinger for the patch. +* :issue:`117`: `~fabric.contrib.files.upload_template` now supports the + `~fabric.operations.put` flags ``mirror_local_mode`` and ``mode``. Thanks to + Joe Stump for the suggestion and Thomas Ballinger for the patch. +* :issue:`154`: `~fabric.contrib.files.sed` now allows customized regex flags + to be specified via a new ``flags`` parameter. Thanks to Nick Trew for the + suggestion and Morgan Goose for initial implementation. +* :issue:`170`: Allow :ref:`exclusion ` of specific hosts from + the final run list. Thanks to Casey Banner for the suggestion and patch. +* :issue:`189`: Added :option:`--abort-on-prompts`/:ref:`env.abort_on_prompts + ` to allow a more non-interactive behavior, + aborting/exiting instead of trying to prompt the running user. Thanks to + Jeremy Avnet and Matt Chisholm for the initial patch. +* :issue:`273`: `~fabric.contrib.files.upload_template` now offers control over + whether it attempts to create backups of pre-existing destination files. + Thanks to Ales Zoulek for the suggestion and initial patch. +* :issue:`283`: Added the `~fabric.decorators.with_settings` decorator to allow + application of env var settings to an entire function, as an alternative to + using the `~fabric.context_managers.settings` context manager. Thanks to + Travis Swicegood for the patch. +* :issue:`353`: Added :option:`--keepalive`/:ref:`env.keepalive ` to + allow specification of an SSH keepalive parameter for troublesome network + connections. Thanks to Mark Merritt for catch & patch. Bugfixes ======== -* N/A +* :issue:`115`: An implementation detail causing host lists to lose order + when deduped by the ``fab`` execution loop, has been patched to preserve + order instead. So e.g. ``fab -H a,b,c`` (or setting ``env.hosts = ['a', 'b', + 'c']``) will now always run on ``a``, then ``b``, then ``c``. Previously, + there was a chance the order could get mixed up during deduplication. Thanks + to Rohit Aggarwal for the report. +* :issue:`345`: `~fabric.contrib.files.contains` returned the stdout of its + internal ``grep`` command instead of success/failure, causing incorrect + behavior when stderr exists and is combined with stdout. This has been + corrected. Thanks to Szymon Reichmann for catch and patch. Documentation updates ===================== -* N/A +* Documentation for task declaration has been moved from + :doc:`/usage/execution` into its own docs page, :doc:`/usage/tasks`, as a + result of the changes added in :issue:`76` and :issue:`56`. +* :issue:`184`: Make the usage of `~fabric.contrib.project.rsync_project`'s + ``local_dir`` argument more obvious, regarding its use in the ``rsync`` call. + (Specifically, so users know they can pass in multiple, space-joined + directory names instead of just one single directory.) + +Internals +========= + +* :issue:`307`: A whole pile of minor PEP8 tweaks. Thanks to Markus Gattol for + highlighting the ``pep8`` tool and to Rick Harding for the patch. +* :issue:`314`: Test utility decorator improvements. Thanks to Rick Harding for + initial catch & patch. diff --git a/docs/conf.py b/docs/conf.py index 6c64ca30c5..d34e6f6518 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -120,7 +120,8 @@ def issues_role(name, rawtext, text, lineno, inliner, options={}, content=[]): from fabric.api import local, hide with hide('everything'): - fabric_tags = local('git tag | sort -r | egrep "(0\.9|1\.0)\.."', True).split() + get_tags = 'git tag | sort -r | egrep "(0\.9|1\.[[:digit:]]+)\.."' + fabric_tags = local(get_tags, True).split() html_context = {'fabric_tags': fabric_tags} diff --git a/docs/tutorial.rst b/docs/tutorial.rst index c065b5f694..9ae6c3016e 100644 --- a/docs/tutorial.rst +++ b/docs/tutorial.rst @@ -55,7 +55,7 @@ That's all there is to it. This functionality allows Fabric to be used as a functions you instruct it to. There's nothing magic about it -- anything you can do in a normal Python script can be done in a fabfile! -.. seealso:: :ref:`execution-strategy`, :ref:`tasks-and-imports`, :doc:`usage/fab` +.. seealso:: :ref:`execution-strategy`, :doc:`/usage/tasks`, :doc:`/usage/fab` Task arguments diff --git a/docs/usage/env.rst b/docs/usage/env.rst index e2af31d5db..15a413e924 100644 --- a/docs/usage/env.rst +++ b/docs/usage/env.rst @@ -104,6 +104,23 @@ as `~fabric.context_managers.cd`. Note that many of these may be set via ``fab``'s command-line switches -- see :doc:`fab` for details. Cross-links will be provided where appropriate. +.. _abort-on-prompts: + +``abort_on_prompts`` +-------------------- + +**Default:** ``False`` + +When ``True``, Fabric will run in a non-interactive mode, calling +`~fabric.utils.abort` anytime it would normally prompt the user for input (such +as password prompts, "What host to connect to?" prompts, fabfile invocation of +`~fabric.operations.prompt`, and so forth.) This allows users to ensure a Fabric +session will always terminate cleanly instead of blocking on user input forever +when unforeseen circumstances arise. + +.. versionadded:: 1.1 +.. seealso:: :option:`--abort-on-prompts` + ``all_hosts`` ------------- @@ -185,6 +202,19 @@ host key is actually valid (e.g. cloud servers such as EC2.) .. seealso:: :doc:`ssh` +.. _exclude-hosts: + +``exclude_hosts`` +----------------- + +**Default:** ``[]`` + +Specifies a list of host strings to be :ref:`skipped over ` +during ``fab`` execution. Typically set via :option:`--exclude-hosts/-x <-x>`. + +.. versionadded:: 1.1 + + ``fabfile`` ----------- @@ -229,6 +259,20 @@ The global host list used when composing per-task host lists. .. seealso:: :doc:`execution` +.. _keepalive: + +``keepalive`` +------------- + +**Default:** ``0`` (i.e. no keepalive) + +An integer specifying an SSH keepalive interval to use; basically maps to the +SSH config option ``ClientAliveInterval``. Useful if you find connections are +timing out due to meddlesome network hardware or what have you. + +.. seealso:: :option:`--keepalive` +.. versionadded:: 1.1 + .. _key-filename: ``key_filename`` diff --git a/docs/usage/execution.rst b/docs/usage/execution.rst index c2d514f735..6152c52aec 100644 --- a/docs/usage/execution.rst +++ b/docs/usage/execution.rst @@ -66,76 +66,10 @@ down to the individual function calls) enables shell script-like logic where you may introspect the output or return code of a given command and decide what to do next. - -.. _tasks-and-imports: - Defining tasks ============== -When looking for tasks to execute, Fabric imports your fabfile and will -consider any callable object, **except** for the following: - -* Callables whose name starts with an underscore (``_``). In other words, - Python's usual "private" convention holds true here. -* Callables defined within Fabric itself. Fabric's own functions such as - `~fabric.operations.run` and `~fabric.operations.sudo` will not show up in - your task list. - -.. note:: - - To see exactly which callables in your fabfile may be executed via ``fab``, - use :option:`fab --list <-l>`. - -Imports -------- - -Python's ``import`` statement effectively includes the imported objects in your -module's namespace. Since Fabric's fabfiles are just Python modules, this means -that imports are also considered as possible tasks, alongside anything defined -in the fabfile itself. - -Because of this, we strongly recommend that you use the ``import module`` form -of importing, followed by ``module.callable()``, which will result in a cleaner -fabfile API than doing ``from module import callable``. - -For example, here's a sample fabfile which uses ``urllib.urlopen`` to get some -data out of a webservice:: - - from urllib import urlopen - - from fabric.api import run - - def webservice_read(): - objects = urlopen('http://my/web/service/?foo=bar').read().split() - print(objects) - -This looks simple enough, and will run without error. However, look what -happens if we run :option:`fab --list <-l>` on this fabfile:: - - $ fab --list - Available commands: - - webservice_read List some directories. - urlopen urlopen(url [, data]) -> open file-like object - -Our fabfile of only one task is showing two "tasks", which is bad enough, and -an unsuspecting user might accidentally try to call ``fab urlopen``, which -probably won't work very well. Imagine any real-world fabfile, which is likely -to be much more complex, and hopefully you can see how this could get messy -fast. - -For reference, here's the recommended way to do it:: - - import urllib - - from fabric.api import run - - def webservice_read(): - objects = urllib.urlopen('http://my/web/service/?foo=bar').read().split() - print(objects) - -It's a simple change, but it'll make anyone using your fabfile a bit happier. - +For details on what constitutes a Fabric task and how to organize them, please see :doc:`/usage/tasks`. Defining host lists =================== @@ -265,10 +199,10 @@ look up in ``env.roledefs``. Globally, via the command line ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ -In addition to modifying ``env.hosts`` and ``env.roles`` at the module level, -you may define them by passing comma-separated string arguments to the -command-line switches :option:`--hosts/-H <-H>` and :option:`--roles/-R <-R>`, -e.g.:: +In addition to modifying ``env.hosts``, ``env.roles``, and +``env.exclude_hosts`` at the module level, you may define them by passing +comma-separated string arguments to the command-line switches +:option:`--hosts/-H <-H>` and :option:`--roles/-R <-R>`, e.g.:: $ fab -H host1,host2 mytask @@ -298,7 +232,7 @@ instead:: run('ls /var/www') When this fabfile is run as ``fab -H host1,host2 mytask``, ``env.hosts`` will -end contain ``['host1', 'host2', 'host3', 'host4']`` at the time that +then contain ``['host1', 'host2', 'host3', 'host4']`` at the time that ``mytask`` is executed. .. note:: @@ -344,6 +278,7 @@ To specify per-task hosts for ``mytask``, execute it like so:: This will override any other host list and ensure ``mytask`` always runs on just those two hosts. + Per-task, via decorators ~~~~~~~~~~~~~~~~~~~~~~~~ @@ -424,6 +359,36 @@ Assuming no command-line hosts or roles are given when ``mytask`` is executed, this fabfile will call ``mytask`` on a host list of ``['a', 'b', 'c']`` -- the union of ``role1`` and the contents of the `~fabric.decorators.hosts` call. +.. _excluding-hosts: + +Excluding specific hosts +------------------------ + +At times, it is useful to exclude one or more specific hosts, e.g. to override +a few bad or otherwise undesirable hosts which are pulled in from a role or an +autogenerated host list. This may be accomplished globally with +:option:`--exclude-hosts/-x <-x>`:: + + $ fab -R myrole -x host2,host5 mytask + +If ``myrole`` was defined as ``['host1', 'host2', ..., 'host15']``, the above +invocation would run with an effective host list of ``['host1', 'host3', +'host4', 'host6', ..., 'host15']``. + + .. note:: + Using this option does not modify ``env.hosts`` -- it only causes the + main execution loop to skip the requested hosts. + +Exclusions may be specified per-task by using an extra ``exclude_hosts`` kwarg, +which is implemented similarly to the abovementioned ``hosts`` and ``roles`` +per-task kwargs, in that it is stripped from the actual task invocation. This +example would have the same result as the global exclude above:: + + $ fab -R myrole mytask:exclude_hosts="host2;host5" + +Note that the host list is semicolon-separated, just as with the ``hosts`` +per-task argument. + .. _failures: diff --git a/docs/usage/fab.rst b/docs/usage/fab.rst index 5dc2008dd1..96d48de9a2 100644 --- a/docs/usage/fab.rst +++ b/docs/usage/fab.rst @@ -90,6 +90,13 @@ below. .. versionadded:: 0.9.1 +.. cmdoption:: --abort-on-prompts + + Sets :ref:`env.abort_on_prompts ` to ``True``, forcing + Fabric to abort whenever it would prompt for input. + + .. versionadded:: 1.1 + .. cmdoption:: -c RCFILE, --config=RCFILE Sets :ref:`env.rcfile ` to the given file path, which Fabric will @@ -113,7 +120,17 @@ below. alternately an explicit file path to load as the fabfile (e.g. ``/path/to/my/fabfile.py``.) -.. seealso:: :doc:`fabfiles` + .. seealso:: :doc:`fabfiles` + +.. cmdoption:: -F LIST_FORMAT, --list-format=LIST_FORMAT + + Allows control over the output format of :option:`--list <-l>`. ``short`` is + equivalent to :option:`--shortlist`, ``normal`` is the same as simply + omitting this option entirely (i.e. the default), and ``nested`` prints out + a nested namespace tree. + + .. versionadded:: 1.1 + .. seealso:: :option:`--shortlist`, :option:`--list <-l>` .. cmdoption:: -h, --help @@ -131,6 +148,13 @@ below. Sets :ref:`env.hosts ` to the given comma-delimited list of host strings. +.. cmdoption:: -x HOSTS, --exclude-hosts=HOSTS + + Sets :ref:`env.exclude_hosts ` to the given comma-delimited + list of host strings to then keep out of the final host list. + + .. versionadded:: 1.1 + .. cmdoption:: -i KEY_FILENAME When set to a file path, will load the given file as an SSH identity file @@ -144,6 +168,12 @@ below. .. versionadded:: 0.9.1 +.. cmdoption:: --keepalive=KEEPALIVE + + Sets :ref:`env.keepalive ` to the given (integer) value, specifying an SSH keepalive interval. + + .. versionadded:: 1.1 + .. cmdoption:: -l, --list Imports a fabfile as normal, but then prints a list of all discovered tasks @@ -152,7 +182,7 @@ below. .. versionchanged:: 0.9.1 Added docstring to output. - .. seealso:: :option:`--shortlist` + .. seealso:: :option:`--shortlist`, :option:`--list-format <-F>` .. cmdoption:: -p PASSWORD, --password=PASSWORD diff --git a/docs/usage/fabfiles.rst b/docs/usage/fabfiles.rst index 4e742a4e7a..52c626ce72 100644 --- a/docs/usage/fabfiles.rst +++ b/docs/usage/fabfiles.rst @@ -88,4 +88,4 @@ Defining tasks and importing callables For important information on what exactly Fabric will consider as a task when it loads your fabfile, as well as notes on how best to import other code, -please see :ref:`tasks-and-imports` in the :doc:`execution` documentation. +please see :doc:`/usage/tasks` in the :doc:`execution` documentation. diff --git a/docs/usage/tasks.rst b/docs/usage/tasks.rst new file mode 100644 index 0000000000..ca7eb55c6d --- /dev/null +++ b/docs/usage/tasks.rst @@ -0,0 +1,345 @@ +============== +Defining tasks +============== + +As of Fabric 1.1, there are two distinct methods you may use in order to define +which objects in your fabfile show up as tasks: + +* The "new" method starting in 1.1 considers instances of `~fabric.tasks.Task` + or its subclasses, and also descends into imported modules to allow building + nested namespaces. +* The "classic" method from 1.0 and earlier considers all public callable + objects (functions, classes etc) and only considers the objects in the + fabfile itself with no recursing into imported module. + +.. note:: + These two methods are **mutually exclusive**: if Fabric finds *any* + new-style task objects in your fabfile or in modules it imports, it will + assume you've committed to this method of task declaration and won't + consider any non-`~fabric.tasks.Task` callables. If *no* new-style tasks + are found, it reverts to the classic behavior. + +The rest of this document explores these two methods in detail. + +.. note:: + + To see exactly what tasks in your fabfile may be executed via ``fab``, use + :option:`fab --list <-l>`. + +.. _new-style-tasks: + +New-style tasks +=============== + +Fabric 1.1 introduced the `~fabric.tasks.Task` class to facilitate new features +and enable some programming best practices, specifically: + +* **Object-oriented tasks**. Inheritance and all that comes with it can make + for much more sensible code reuse than passing around simple function + objects. The classic style of task declaration didn't entirely rule this + out, but it also didn't make it terribly easy. +* **Namespaces**. Having an explicit method of declaring tasks makes it easier + to set up recursive namespaces without e.g. polluting your task list with the + contents of Python's ``os`` module (which would show up as valid "tasks" + under the classic methodology.) + +With the introduction of `~fabric.tasks.Task`, there are two ways to set up new +tasks: + +* Decorate a regular module level function with `~fabric.decorators.task`, + which transparently wraps the function in a `~fabric.tasks.Task` subclass. + The function name will be used as the task name when invoking. +* Subclass `~fabric.tasks.Task` (`~fabric.tasks.Task` itself is intended to be + abstract), define a ``run`` method, and instantiate your subclass at module + level. Instances' ``name`` attributes are used as the task name; if omitted + the instance's variable name will be used instead. + +Use of new-style tasks also allows you to set up task namespaces (see below.) + +The `~fabric.decorators.task` decorator is pretty straightforward, but using `~fabric.tasks.Task` is less obvious, so we'll cover it in detail here. + + +``Task`` subclasses +------------------- + +If you're used to :ref:`classic-style tasks `, an easy way to +think about `~fabric.tasks.Task` subclasses is that their ``run`` method is +directly equivalent to a classic task; its arguments are the task arguments +(other than ``self``) and its body is what gets executed. For example, this +new-style task:: + + class MyTask(Task): + name = "deploy" + def run(self, environment, domain="whatever.com"): + run("git clone foo") + sudo("service apache2 restart") + + instance = MyTask() + +is exactly equivalent to this function-based task (which, if you dropped the +``@task``, would also be a normal classic-style task):: + + @task + def deploy(environment, domain="whatever.com"): + run("git clone foo") + sudo("service apache2 restart") + +Note how we had to instantiate an instance of our class; that's simply normal +Python object-oriented programming at work. While it's a small bit of +boilerplate right now -- for example, Fabric doesn't care about the name you +give the instantiation, only the instance's ``name`` attribute -- it's well +worth the benefit of having the power of classes available. + +We may also extend the API in the future to make this experience a bit +smoother. + + +.. _namespaces: + +Namespaces +---------- + +With :ref:`classic tasks `, fabfiles were limited to a single, +flat set of task names with no real way to organize them. In Fabric 1.1 and +newer, if you declare tasks the new way (via `~fabric.decorators.task` or your +own `~fabric.tasks.Task` subclass instances) you may take advantage of +**namespacing**: + +* Any module objects imported into your fabfile will be recursed into, looking + for additional task objects. +* Within submodules, you may control which objects are "exported" by using the + standard Python ``__all__`` module-level variable name (thought they should + still be valid new-style task objects.) +* These tasks will be given new dotted-notation names based on the modules they + came from, similar to Python's own import syntax. + +Let's build up a fabfile package from simple to complex and see how this works. + +Basic +~~~~~ + +We start with a single `__init__.py` containing a few tasks (the Fabric API +import omitted for brevity):: + + @task + def deploy(): + ... + + @task + def compress(): + ... + +The output of ``fab --list`` would look something like this:: + + deploy + compress + +There's just one namespace here: the "root" or global namespace. Looks simple +now, but in a real-world fabfile with dozens of tasks, it can get difficult to +manage. + +Importing a submodule +~~~~~~~~~~~~~~~~~~~~~ + +As mentioned above, Fabric will examine any imported module objects for tasks, +regardless of where that module exists on your Python import path. For now we +just want to include our own, "nearby" tasks, so we'll make a new submodule in +our package for dealing with, say, load balancers -- ``lb.py``:: + + @task + def add_backend(): + ... + +And we'll add this to the top of ``__init__.py``:: + + import lb + +Now ``fab --list`` shows us:: + + deploy + compress + lb.add_backend + +Again, with only one task in its own submodule, it looks kind of silly, but the +benefits should be pretty obvious. + +Going deeper +~~~~~~~~~~~~ + +Namespacing isn't limited to just one level. Let's say we had a larger setup +and wanted a namespace for database related tasks, with additional +differentiation inside that. We make a sub-package named ``db/`` and inside it, +a ``migrations.py`` module:: + + @task + def list(): + ... + + @task + def run(): + ... + +We need to make sure that this module is visible to anybody importing ``db``, +so we add it to the sub-package's ``__init__.py``:: + + import migrations + +As a final step, we import the sub-package into our root-level ``__init__.py``, +so now its first few lines look like this:: + + import lb + import db + +After all that, our file tree looks like this:: + + . + ├── __init__.py + ├── db + │   ├── __init__.py + │   └── migrations.py + └── lb.py + +and ``fab --list`` shows:: + + deploy + compress + lb.add_backend + db.migrations.list + db.migrations.run + +We could also have specified (or imported) tasks directly into +``db/__init__.py``, and they would show up as ``db.`` as you might +expect. + +Limiting with ``__all__`` +~~~~~~~~~~~~~~~~~~~~~~~~~ + +You may limit what Fabric "sees" when it examines imported modules, by using +the Python convention of a module level ``__all__`` variable (a list of +variable names.) If we didn't want the ``db.migrations.run`` task to show up by +default for some reason, we could add this to the top of ``db/migrations.py``:: + + __all__ = ['list'] + +Note the lack of ``'run'`` there. You could, if needed, import ``run`` directly +into some other part of the hierarchy, but otherwise it'll remain hidden. + +Switching it up +~~~~~~~~~~~~~~~ + +We've been keeping our fabfile package neatly organized and importing it in a +straightforward manner, but the filesystem layout doesn't actually matter here. +All Fabric's loader cares about is the names the modules are given when they're +imported. + +For example, if we changed the top of our root ``__init__.py`` to look like +this:: + + import db as database + +Our task list would change thusly:: + + deploy + compress + lb.add_backend + database.migrations.list + database.migrations.run + +This applies to any other import -- you could import third party modules into +your own task hierarchy, or grab a deeply nested module and make it appear near +the top level. + +Nested list output +~~~~~~~~~~~~~~~~~~ + +As a final note, we've been using the default Fabric :option:`--list <-l>` +output during this section -- it makes it more obvious what the actual task +names are. However, you can get a more nested or tree-like view by passing +``nested`` to the :option:`--list-format <-F>` option:: + + $ fab --list-format=nested --list + Available commands (remember to call as module.[...].task): + + deploy + compress + lb: + add_backend + database: + migrations: + list + run + +While it slightly obfuscates the "real" task names, this view provides a handy +way of noting the organization of tasks in large namespaces. + + +.. _classic-tasks: + +Classic tasks +============= + +When no new-style `~fabric.tasks.Task`-based tasks are found, Fabric will +consider any callable object found in your fabfile, **except** the following: + +* Callables whose name starts with an underscore (``_``). In other words, + Python's usual "private" convention holds true here. +* Callables defined within Fabric itself. Fabric's own functions such as + `~fabric.operations.run` and `~fabric.operations.sudo` will not show up in + your task list. + + +Imports +------- + +Python's ``import`` statement effectively includes the imported objects in your +module's namespace. Since Fabric's fabfiles are just Python modules, this means +that imports are also considered as possible classic-style tasks, alongside +anything defined in the fabfile itself. + + .. note:: + This only applies to imported *callable objects* -- not modules. + Imported modules only come into play if they contain :ref:`new-style + tasks `, at which point this section no longer + applies. + +Because of this, we strongly recommend that you use the ``import module`` form +of importing, followed by ``module.callable()``, which will result in a cleaner +fabfile API than doing ``from module import callable``. + +For example, here's a sample fabfile which uses ``urllib.urlopen`` to get some +data out of a webservice:: + + from urllib import urlopen + + from fabric.api import run + + def webservice_read(): + objects = urlopen('http://my/web/service/?foo=bar').read().split() + print(objects) + +This looks simple enough, and will run without error. However, look what +happens if we run :option:`fab --list <-l>` on this fabfile:: + + $ fab --list + Available commands: + + webservice_read List some directories. + urlopen urlopen(url [, data]) -> open file-like object + +Our fabfile of only one task is showing two "tasks", which is bad enough, and +an unsuspecting user might accidentally try to call ``fab urlopen``, which +probably won't work very well. Imagine any real-world fabfile, which is likely +to be much more complex, and hopefully you can see how this could get messy +fast. + +For reference, here's the recommended way to do it:: + + import urllib + + from fabric.api import run + + def webservice_read(): + objects = urllib.urlopen('http://my/web/service/?foo=bar').read().split() + print(objects) + +It's a simple change, but it'll make anyone using your fabfile a bit happier. diff --git a/fabfile.py b/fabfile/__init__.py similarity index 82% rename from fabfile.py rename to fabfile/__init__.py index d9bc12b634..c0739b416d 100644 --- a/fabfile.py +++ b/fabfile/__init__.py @@ -7,16 +7,16 @@ import nose from fabric.api import * -from fabric.contrib.project import rsync_project # Need to import this as fabric.version for reload() purposes import fabric.version # But nothing is stopping us from making a convenient binding! _version = fabric.version.get_version -docs_host = 'jforcier@fabfile.org' +import docs +@task def test(args=None): """ Run all unit tests and doctests. @@ -31,33 +31,10 @@ def test(args=None): abort("Nose encountered an error; you may be missing newly added test dependencies. Try running 'pip install -r requirements.txt'.") -def build_docs(clean='no', browse='no'): - """ - Generate the Sphinx documentation. - """ - c = "" - if clean.lower() in ['yes', 'y']: - c = "clean " - b = "" - if browse.lower() in ['yes', 'y']: - b = " && open _build/html/index.html" - local('cd docs; make %shtml%s' % (c, b)) - - -@hosts(docs_host) -def push_docs(): - """ - Build docs and zip for upload to RTD - """ - build_docs(clean='yes') - v = _version('short') - local("cd docs/_build/html && zip -r ../%s.zip ." % v) - - -def _code_version_is_tagged(): +def code_version_is_tagged(): return local('git tag | egrep "^%s$"' % _version('short')) -def _update_code_version(force): +def update_code_version(force): """ Update version data structure in-code and commit that change to git. @@ -76,12 +53,14 @@ def _update_code_version(force): local("git add %s" % version_file) local("git commit -m \"Cut %s\"" % _version('verbose')) -def _commits_since_tag(): +def commits_since_tag(): """ Has any work been done since the last tag? """ return local("git log %s.." % _version('short')) + +@task def tag(force='no', push='no'): """ Tag a new release. @@ -101,12 +80,12 @@ def tag(force='no', push='no'): # Does the current in-code version exist as a Git tag already? # If so, this means we haven't updated the in-code version specifier # yet, and need to do so. - if _code_version_is_tagged(): + if code_version_is_tagged(): # That is, if any work has been done since. Sanity check! - if not _commits_since_tag() and not force: + if not commits_since_tag() and not force: abort("No work done since last tag!") # Open editor, update version, commit that change to Git. - _update_code_version(force) + update_code_version(force) # If the tag doesn't exist, the user has already updated version info # and we can just move on. else: @@ -124,6 +103,7 @@ def tag(force='no', push='no'): local("git push origin %s" % _version('short')) +@task def build(): """ Build (but don't upload) via setup.py @@ -131,6 +111,7 @@ def build(): local('python setup.py sdist') +@task def upload(): """ Build, register and upload to PyPI @@ -138,6 +119,7 @@ def upload(): local('python setup.py sdist register upload') +@task def release(force='no'): """ Tag/push, build, upload new version and build/upload documentation. diff --git a/fabfile/docs.py b/fabfile/docs.py new file mode 100644 index 0000000000..5fbd0e710d --- /dev/null +++ b/fabfile/docs.py @@ -0,0 +1,42 @@ +from __future__ import with_statement + +from fabric.api import * +from fabric.contrib.project import rsync_project +from fabric.version import get_version + + +docs_host = 'jforcier@fabfile.org' + + +@task +def build(clean='no', browse_='no'): + """ + Generate the Sphinx documentation. + """ + c = "" + if clean.lower() in ['yes', 'y']: + c = "clean " + b = "" + with lcd('docs'): + local('make %shtml%s' % (c, b)) + if browse_.lower() in ['yes', 'y']: + browse() + + +@task +def browse(): + """ + Open the current dev docs in a browser tab. + """ + local("open docs/_build/html/index.html") + + +@task +@hosts(docs_host) +def push(): + """ + Build docs and zip for upload to RTD + """ + build(clean='yes') + v = get_version('short') + local("cd docs/_build/html && zip -r ../%s.zip ." % v) diff --git a/fabric/api.py b/fabric/api.py index 5f4763fc0c..8a3445509b 100644 --- a/fabric/api.py +++ b/fabric/api.py @@ -7,7 +7,7 @@ well when you're using setup.py to install e.g. paramiko! """ from fabric.context_managers import cd, hide, settings, show, path, prefix, lcd -from fabric.decorators import hosts, roles, runs_once +from fabric.decorators import hosts, roles, runs_once, with_settings, task from fabric.operations import (require, prompt, put, get, run, sudo, local, reboot, open_shell) from fabric.state import env, output diff --git a/fabric/auth.py b/fabric/auth.py index 7e770c6e6b..3f5975f299 100644 --- a/fabric/auth.py +++ b/fabric/auth.py @@ -7,6 +7,7 @@ def get_password(): from fabric.state import env return env.passwords.get(env.host_string, env.password) + def set_password(password): from fabric.state import env env.password = env.passwords[env.host_string] = password diff --git a/fabric/colors.py b/fabric/colors.py index 4894e661ac..88736fb605 100644 --- a/fabric/colors.py +++ b/fabric/colors.py @@ -16,14 +16,17 @@ from fabric.colors import red, green - print(red("This sentence is red, except for " + green("these words, which are green") + ".")) + print(red("This sentence is red, except for " + \ + green("these words, which are green") + ".")) If ``bold`` is set to ``True``, the ANSI flag for bolding will be flipped on for that particular invocation, which usually shows up as a bold or brighter version of the original color on most terminals. """ + def _wrap_with(code): + def inner(text, bold=False): c = code if bold: diff --git a/fabric/context_managers.py b/fabric/context_managers.py index 964e2573bf..8ddd5bbe41 100644 --- a/fabric/context_managers.py +++ b/fabric/context_managers.py @@ -143,12 +143,15 @@ def my_task(): def cd(path): """ - Context manager that keeps directory state when calling operations. + Context manager that keeps directory state when calling remote operations. Any calls to `run`, `sudo`, `get`, or `put` within the wrapped block will implicitly have a string similar to ``"cd && "`` prefixed in order - to give the sense that there is actually statefulness involved. `cd` only - affects the remote paths for `get` and `put` -- local paths are untouched. + to give the sense that there is actually statefulness involved. + + .. note:: + `cd` only affects *remote* paths -- to modify *local* paths, use + `~fabric.context_managers.lcd`. Because use of `cd` affects all such invocations, any code making use of those operations, such as much of the ``contrib`` section, will also be @@ -195,6 +198,8 @@ def cd(path): .. versionchanged:: 1.0 Applies to `get` and `put` in addition to the command-running operations. + + .. seealso:: `~fabric.context_managers.lcd` """ return _change_cwd('cwd', path) diff --git a/fabric/contrib/files.py b/fabric/contrib/files.py index d21315c072..fe0f006a32 100644 --- a/fabric/contrib/files.py +++ b/fabric/contrib/files.py @@ -6,8 +6,10 @@ import hashlib import tempfile +import types import re import os +from StringIO import StringIO from fabric.api import * @@ -49,7 +51,8 @@ def first(*args, **kwargs): def upload_template(filename, destination, context=None, use_jinja=False, - template_dir=None, use_sudo=False): + template_dir=None, use_sudo=False, backup=True, mirror_local_mode=False, + mode=None): """ Render and upload a template text file to a remote host. @@ -62,24 +65,37 @@ def upload_template(filename, destination, context=None, use_jinja=False, templating library available, Jinja will be used to render the template instead. Templates will be loaded from the invoking user's current working directory by default, or from ``template_dir`` if given. - + The resulting rendered file will be uploaded to the remote file path - ``destination`` (which should include the desired remote filename.) If the - destination file already exists, it will be renamed with a ``.bak`` - extension. + ``destination``. If the destination file already exists, it will be + renamed with a ``.bak`` extension unless ``backup=False`` is specified. By default, the file will be copied to ``destination`` as the logged-in user; specify ``use_sudo=True`` to use `sudo` instead. + + The ``mirror_local_mode`` and ``mode`` kwargs are passed directly to an + internal `~fabric.operations.put` call; please see its documentation for + details on these two options. + + .. versionchanged:: 1.1 + Added the ``backup``, ``mirror_local_mode`` and ``mode`` kwargs. """ - basename = os.path.basename(filename) - temp_destination = '/tmp/' + basename - - # This temporary file should not be automatically deleted on close, as we - # need it there to upload it (Windows locks the file for reading while - # open). - tempfile_fd, tempfile_name = tempfile.mkstemp() - output = open(tempfile_name, "w+b") - # Init + func = use_sudo and sudo or run + # Normalize destination to be an actual filename, due to using StringIO + with settings(hide('everything'), warn_only=True): + if func('test -d %s' % destination).succeeded: + sep = "" if destination.endswith('/') else "/" + destination += sep + os.path.basename(filename) + + # Use mode kwarg to implement mirror_local_mode, again due to using + # StringIO + if mirror_local_mode and mode is None: + mode = os.stat(filename).st_mode + # To prevent put() from trying to do this + # logic itself + mirror_local_mode = False + + # Process template text = None if use_jinja: try: @@ -93,33 +109,27 @@ def upload_template(filename, destination, context=None, use_jinja=False, text = inputfile.read() if context: text = text % context - output.write(text) - output.close() - # Upload the file. - put(tempfile_name, temp_destination) - os.close(tempfile_fd) - os.remove(tempfile_name) + # Back up original file + if backup and exists(destination): + func("cp %s{,.bak}" % destination) - func = use_sudo and sudo or run - # Back up any original file (need to do figure out ultimate destination) - to_backup = destination - with settings(hide('everything'), warn_only=True): - # Is destination a directory? - if func('test -f %s' % to_backup).failed: - # If so, tack on the filename to get "real" destination - to_backup = destination + '/' + basename - if exists(to_backup): - func("cp %s %s.bak" % (to_backup, to_backup)) - # Actually move uploaded template to destination - func("mv %s %s" % (temp_destination, destination)) + # Upload the file. + put( + local_path=StringIO(text), + remote_path=destination, + use_sudo=use_sudo, + mirror_local_mode=mirror_local_mode, + mode=mode + ) -def sed(filename, before, after, limit='', use_sudo=False, backup='.bak'): +def sed(filename, before, after, limit='', use_sudo=False, backup='.bak', + flags=''): """ Run a search-and-replace on ``filename`` with given regex patterns. - Equivalent to ``sed -i -r -e "// s///g + Equivalent to ``sed -i -r -e "// s///g "``. For convenience, ``before`` and ``after`` will automatically escape forward @@ -131,6 +141,14 @@ def sed(filename, before, after, limit='', use_sudo=False, backup='.bak'): `sed` will pass ``shell=False`` to `run`/`sudo`, in order to avoid problems with many nested levels of quotes and backslashes. + + Other options may be specified with sed-compatible regex flags -- for + example, to make the search and replace case insensitive, specify + ``flags="i"``. The ``g`` flag is always specified regardless, so you do not + need to remember to include it when overriding this parameter. + + .. versionadded:: 1.1 + The ``flags`` parameter. """ func = use_sudo and sudo or run # Characters to be escaped in both @@ -144,6 +162,7 @@ def sed(filename, before, after, limit='', use_sudo=False, backup='.bak'): if limit: limit = r'/%s/ ' % limit # Test the OS because of differences between sed versions + with hide('running', 'stdout'): platform = run("uname") if platform in ('NetBSD', 'OpenBSD'): @@ -154,13 +173,13 @@ def sed(filename, before, after, limit='', use_sudo=False, backup='.bak'): tmp = "/tmp/%s" % hasher.hexdigest() # Use temp file to work around lack of -i expr = r"""cp -p %(filename)s %(tmp)s \ -&& sed -r -e '%(limit)ss/%(before)s/%(after)s/g' %(filename)s > %(tmp)s \ +&& sed -r -e '%(limit)ss/%(before)s/%(after)s/%(flags)sg' %(filename)s > %(tmp)s \ && cp -p %(filename)s %(filename)s%(backup)s \ && mv %(tmp)s %(filename)s""" command = expr % locals() else: - expr = r"sed -i%s -r -e '%ss/%s/%s/g' %s" - command = expr % (backup, limit, before, after, filename) + expr = r"sed -i%s -r -e '%ss/%s/%s/%sg' %s" + command = expr % (backup, limit, before, after, flags, filename) return func(command, shell=False) @@ -211,7 +230,7 @@ def comment(filename, regex, use_sudo=False, char='#', backup='.bak'): sometimes do when inserted by hand. Neither will they have a trailing space unless you specify e.g. ``char='# '``. - .. note:: + .. note:: In order to preserve the line being commented out, this function will wrap your ``regex`` argument in parentheses, so you don't need to. It @@ -264,7 +283,7 @@ def contains(filename, text, exact=False, use_sudo=False): return func('egrep "%s" "%s"' % ( text.replace('"', r'\"'), filename.replace('"', r'\"') - )) + )).succeeded def append(filename, text, use_sudo=False, partial=False, escape=True): @@ -299,7 +318,7 @@ def append(filename, text, use_sudo=False, partial=False, escape=True): """ func = use_sudo and sudo or run # Normalize non-list input to be a list - if isinstance(text, str): + if isinstance(text, types.StringTypes): text = [text] for line in text: regex = '^' + re.escape(line) + ('' if partial else '$') diff --git a/fabric/contrib/project.py b/fabric/contrib/project.py index d2c6cfdc14..aa6a65b2ee 100644 --- a/fabric/contrib/project.py +++ b/fabric/contrib/project.py @@ -3,12 +3,15 @@ """ from os import getcwd, sep +import os.path from datetime import datetime +from tempfile import mkdtemp from fabric.network import needs_host from fabric.operations import local, run, put from fabric.state import env, output +__all__ = ['rsync_project', 'upload_project'] @needs_host def rsync_project(remote_dir, local_dir=None, exclude=(), delete=False, @@ -33,13 +36,26 @@ def rsync_project(remote_dir, local_dir=None, exclude=(), delete=False, ``rsync_project()`` takes the following parameters: * ``remote_dir``: the only required parameter, this is the path to the - **parent** directory on the remote server; the project directory will be - created inside this directory. For example, if one's project directory is - named ``myproject`` and one invokes ``rsync_project('/home/username/')``, - the resulting project directory will be ``/home/username/myproject/``. + directory on the remote server. Due to how ``rsync`` is implemented, the + exact behavior depends on the value of ``local_dir``: + + * If ``local_dir`` ends with a trailing slash, the files will be + dropped inside of ``remote_dir``. E.g. + ``rsync_project("/home/username/project", "foldername/")`` will drop + the contents of ``foldername`` inside of ``/home/username/project``. + * If ``local_dir`` does **not** end with a trailing slash (and this + includes the default scenario, when ``local_dir`` is not specified), + ``remote_dir`` is effectively the "parent" directory, and a new + directory named after ``local_dir`` will be created inside of it. So + ``rsync_project("/home/username", "foldername")`` would create a new + directory ``/home/username/foldername`` (if needed) and place the + files there. + * ``local_dir``: by default, ``rsync_project`` uses your current working - directory as the source directory; you may override this with - ``local_dir``, which should be a directory path. + directory as the source directory. This may be overridden by specifying + ``local_dir``, which is a string passed verbatim to ``rsync``, and thus + may be a single directory (``"my_directory"``) or multiple directories + (``"dir1 dir2"``). See the ``rsync`` documentation for details. * ``exclude``: optional, may be a single string, or an iterable of strings, and is used to pass one or more ``--exclude`` options to ``rsync``. * ``delete``: a boolean controlling whether ``rsync``'s ``--delete`` option @@ -100,23 +116,42 @@ def rsync_project(remote_dir, local_dir=None, exclude=(), delete=False, return local(cmd) -def upload_project(): +def upload_project(local_dir=None, remote_dir=""): """ - Upload the current project to a remote system, tar/gzipping during the move. - - This function makes use of the ``/tmp/`` directory and the ``tar`` and - ``gzip`` programs/libraries; thus it will not work too well on Win32 - systems unless one is using Cygwin or something similar. - - ``upload_project`` will attempt to clean up the tarfiles when it finishes - executing. + Upload the current project to a remote system via ``tar``/``gzip``. + + ``local_dir`` specifies the local project directory to upload, and defaults + to the current working directory. + + ``remote_dir`` specifies the target directory to upload into (meaning that + a copy of ``local_dir`` will appear as a subdirectory of ``remote_dir``) + and defaults to the remote user's home directory. + + This function makes use of the ``tar`` and ``gzip`` programs/libraries, + thus it will not work too well on Win32 systems unless one is using Cygwin + or something similar. It will attempt to clean up the local and remote + tarfiles when it finishes executing, even in the event of a failure. + + .. versionchanged:: 1.1 + Added the ``local_dir`` and ``remote_dir`` kwargs. """ - tar_file = "/tmp/fab.%s.tar" % datetime.utcnow().strftime( - '%Y_%m_%d_%H-%M-%S') - cwd_name = getcwd().split(sep)[-1] - tgz_name = cwd_name + ".tar.gz" - local("tar -czf %s ." % tar_file) - put(tar_file, cwd_name + ".tar.gz") - local("rm -f " + tar_file) - run("tar -xzf " + tgz_name) - run("rm -f " + tgz_name) + local_dir = local_dir or os.getcwd() + + # Remove final '/' in local_dir so that basename() works + local_dir = local_dir.rstrip(os.sep) + + local_path, local_name = os.path.split(local_dir) + tar_file = "%s.tar.gz" % local_name + target_tar = os.path.join(remote_dir, tar_file) + tmp_folder = mkdtemp() + + try: + tar_path = os.path.join(tmp_folder, tar_file) + local("tar -czf %s -C %s %s" % (tar_path, local_path, local_name)) + put(tar_path, target_tar) + try: + run("tar -xzf %s" % tar_file) + finally: + run("rm -f %s" % tar_file) + finally: + local("rm -rf %s" % tmp_folder) diff --git a/fabric/decorators.py b/fabric/decorators.py index e68eeecf75..b7a4bbcde8 100644 --- a/fabric/decorators.py +++ b/fabric/decorators.py @@ -1,10 +1,21 @@ """ Convenience decorators for use in fabfiles. """ +from __future__ import with_statement from functools import wraps from types import StringTypes +from fabric import tasks +from .context_managers import settings + + +def task(func): + """ + Decorator declaring the wrapped function as a :ref:`new-style task `. + """ + return tasks.WrappedCallableTask(func) + def hosts(*host_list): """ @@ -29,6 +40,7 @@ def my_func(): Allow a single, iterable argument (``@hosts(iterable)``) to be used instead of requiring ``@hosts(*iterable)``. """ + def attach_hosts(func): @wraps(func) def inner_decorator(*args, **kwargs): @@ -100,3 +112,29 @@ def decorated(*args, **kwargs): decorated.return_value = func(*args, **kwargs) return decorated.return_value return decorated + + +def with_settings(**kw_settings): + """ + Decorator equivalent of ``fabric.context_managers.settings``. + + Allows you to wrap an entire function as if it was called inside a block + with the ``settings`` context manager. This may be useful if you know you + want a given setting applied to an entire function body, or wish to + retrofit old code without indenting everything. + + For example, to turn aborts into warnings for an entire task function:: + + @with_settings(warn_only=True) + def foo(): + ... + + .. seealso:: `~fabric.context_managers.settings` + .. versionadded:: 1.1 + """ + def outer(func): + def inner(*args, **kwargs): + with settings(**kw_settings): + return func(*args, **kwargs) + return inner + return outer diff --git a/fabric/io.py b/fabric/io.py index 85f52b8c70..f4cc268275 100644 --- a/fabric/io.py +++ b/fabric/io.py @@ -20,7 +20,7 @@ def _flush(pipe, text): def _endswith(char_list, substring): - tail = char_list[-1*len(substring):] + tail = char_list[-1 * len(substring):] substring = list(substring) return tail == substring @@ -80,7 +80,7 @@ def output_loop(chan, which, capture): # backwards compatible with Fabric 0.9.x behavior; the user # will still see the prompt on their screen (no way to avoid # this) but at least it won't clutter up the captured text. - del capture[-1*len(env.sudo_prompt):] + del capture[-1 * len(env.sudo_prompt):] # If the password we just tried was bad, prompt the user again. if (not password) or reprompt: # Print the prompt and/or the "try again" notice if @@ -95,9 +95,11 @@ def output_loop(chan, which, capture): # Prompt for, and store, password. Give empty prompt so the # initial display "hides" just after the actually-displayed # prompt from the remote end. + chan.input_enabled = False password = fabric.network.prompt_for_password( prompt=" ", no_colon=True, stream=pipe ) + chan.input_enabled = True # Update env.password, env.passwords if necessary set_password(password) # Reset reprompt flag @@ -118,7 +120,7 @@ def input_loop(chan, using_pty): else: r, w, x = select([sys.stdin], [], [], 0.0) have_char = (r and r[0] == sys.stdin) - if have_char: + if have_char and chan.input_enabled: # Send all local stdin to remote end's stdin byte = msvcrt.getch() if win32 else sys.stdin.read(1) chan.sendall(byte) diff --git a/fabric/main.py b/fabric/main.py index 25a66d02e3..d99059ee92 100644 --- a/fabric/main.py +++ b/fabric/main.py @@ -9,16 +9,18 @@ to individuals leveraging Fabric as a library, should be kept elsewhere. """ -from operator import add +from collections import defaultdict +from operator import add, isMappingType from optparse import OptionParser import os import sys +import types -from fabric import api # For checking callables against the API -from fabric.contrib import console, files, project # Ditto +from fabric import api, state # For checking callables against the API, & easy mocking +from fabric.contrib import console, files, project # Ditto from fabric.network import denormalize, interpret_host_string, disconnect_all -from fabric import state # For easily-mockable access to roles, env and etc from fabric.state import commands, connections, env_options +from fabric.tasks import Task from fabric.utils import abort, indent from fabric.logger import logger @@ -31,6 +33,26 @@ [] ) +# Module recursion cache +class _ModuleCache(object): + """ + Set-like object operating on modules and storing __name__s internally. + """ + def __init__(self): + self.cache = set() + + def __contains__(self, value): + return value.__name__ in self.cache + + def add(self, value): + return self.cache.add(value.__name__) + + def clear(self): + return self.cache.clear() + +_seen = _ModuleCache() + + def load_settings(path): """ Take given file path and return dictionary of any key=value pairs found. @@ -140,9 +162,76 @@ def load_fabfile(path, importer=None): if index is not None: sys.path.insert(index + 1, directory) del sys.path[0] - # Return our two-tuple - tasks = dict(filter(is_task, vars(imported).items())) - return imported.__doc__, tasks + + # Actually load tasks + docstring, new_style, classic = load_tasks_from_module(imported) + tasks = new_style if state.env.new_style_tasks else classic + # Clean up after ourselves + _seen.clear() + return docstring, tasks + + +def load_tasks_from_module(imported): + """ + Handles loading all of the tasks for a given `imported` module + """ + # Obey the use of .__all__ if it is present + imported_vars = vars(imported) + if "__all__" in imported_vars: + imported_vars = [(name, imported_vars[name]) for name in \ + imported_vars if name in imported_vars["__all__"]] + else: + imported_vars = imported_vars.items() + # Return a two-tuple value. First is the documentation, second is a + # dictionary of callables only (and don't include Fab operations or + # underscored callables) + new_style, classic = extract_tasks(imported_vars) + return imported.__doc__, new_style, classic + + +def extract_tasks(imported_vars): + """ + Handle extracting tasks from a given list of variables + """ + new_style_tasks = defaultdict(dict) + classic_tasks = {} + if 'new_style_tasks' not in state.env: + state.env.new_style_tasks = False + for tup in imported_vars: + name, obj = tup + if is_task_object(obj): + state.env.new_style_tasks = True + new_style_tasks[obj.name] = obj + elif is_task(tup): + classic_tasks[name] = obj + elif is_task_module(obj): + docs, newstyle, classic = load_tasks_from_module(obj) + for task_name, task in newstyle.items(): + new_style_tasks[name][task_name] = task + return (new_style_tasks, classic_tasks) + + +def is_task_module(a): + """ + Determine if the provided value is a task module + """ + #return (type(a) is types.ModuleType and + # any(map(is_task_object, vars(a).values()))) + if type(a) is types.ModuleType and a not in _seen: + # Flag module as seen + _seen.add(a) + # Signal that we need to check it out + return True + + +def is_task_object(a): + """ + Determine if the provided value is a ``Task`` object. + + This returning True signals that all tasks within the fabfile + module must be Task objects. + """ + return isinstance(a, Task) and a.use_task_objects def parse_options(): @@ -159,7 +248,8 @@ def parse_options(): # # Define options that don't become `env` vars (typically ones which cause - # Fabric to do something other than its normal execution, such as --version) + # Fabric to do something other than its normal execution, such as + # --version) # # Version number (optparse gives you --version but we have to do it @@ -184,7 +274,15 @@ def parse_options(): action='store_true', dest='shortlist', default=False, - help="print non-verbose list of possible commands and exit" + help="alias for -F short --list" + ) + + # Control behavior of --list + LIST_FORMAT_OPTIONS = ('short', 'normal', 'nested') + parser.add_option('-F', '--list-format', + choices=LIST_FORMAT_OPTIONS, + default='normal', + help="formats --list, choices: %s" % ", ".join(LIST_FORMAT_OPTIONS) ) # Display info about a specific command @@ -208,31 +306,68 @@ def parse_options(): opts, args = parser.parse_args() return parser, opts, args +def _sift_tasks(mapping): + tasks, collections = [], [] + for name, value in mapping.iteritems(): + (collections if isMappingType(value) else tasks).append(name) + tasks = sorted(tasks) + collections = sorted(collections) + return tasks, collections -def _command_names(): - return sorted(commands.keys()) - +def _task_names(mapping): + """ + Flatten & sort task names in a breadth-first fashion. -def list_commands(docstring): + Tasks are always listed before submodules at the same level, but within + those two groups, sorting is alphabetical. """ - Print all found commands/tasks, then exit. Invoked with ``-l/--list.`` + tasks, collections = _sift_tasks(mapping) + for collection in collections: + module = mapping[collection] + join = lambda x: ".".join((collection, x)) + tasks.extend(map(join, _task_names(module))) + return tasks - If ``docstring`` is non-empty, it will be printed before the task list. +def _crawl(name, mapping): """ if docstring: trailer = "\n" if not docstring.endswith("\n") else "" logger.info(docstring + trailer) logger.info("Available commands:\n") + ``name`` of ``'a.b.c'`` => ``mapping['a']['b']['c']`` + """ + key, _, rest = name.partition('.') + value = mapping[key] + if not rest: + return value + return _crawl(rest, value) + +def crawl(name, mapping): + try: + return _crawl(name, mapping) + except (KeyError, TypeError): + return None + +def _print_docstring(docstrings, name): + if not docstrings: + return False + docstring = crawl(name, state.commands).__doc__ + if type(docstring) in types.StringTypes: + return docstring + + +def _normal_list(docstrings=True): + result = [] + task_names = _task_names(state.commands) # Want separator between name, description to be straight col - max_len = reduce(lambda a, b: max(a, len(b)), commands.keys(), 0) + max_len = reduce(lambda a, b: max(a, len(b)), task_names, 0) sep = ' ' trail = '...' - for name in _command_names(): + for name in task_names: output = None - # Print first line of docstring - func = commands[name] - if func.__doc__: - lines = filter(None, func.__doc__.splitlines()) + docstring = _print_docstring(docstrings, name) + if docstring: + lines = filter(None, docstring.splitlines()) first_line = lines[0].strip() # Truncate it if it's longer than N chars size = 75 - (max_len + len(sep) + len(trail)) @@ -242,26 +377,60 @@ def list_commands(docstring): # Or nothing (so just the name) else: output = name - logger.info(indent(output)) - sys.exit(0) + result.append(indent(output)) + return result + +def _nested_list(mapping, level=1): + result = [] + tasks, collections = _sift_tasks(mapping) + # Tasks come first + result.extend(map(lambda x: indent(x, spaces=level * 4), tasks)) + for collection in collections: + module = mapping[collection] + # Section/module "header" + result.append(indent(collection + ":", spaces=level * 4)) + # Recurse + result.extend(_nested_list(module, level + 1)) + return result + +COMMANDS_HEADER = "Available commands" +NESTED_REMINDER = " (remember to call as module.[...].task)" + +def list_commands(docstring, format_): + """ + Print all found commands/tasks, then exit. Invoked with ``-l/--list.`` + If ``docstring`` is non-empty, it will be printed before the task list. -def shortlist(): - """ - Print all task names separated by newlines with no embellishment. + ``format_`` should conform to the options specified in + ``LIST_FORMAT_OPTIONS``, e.g. ``"short"``, ``"normal"``. """ - logger.info("\n".join(_command_names())) - sys.exit(0) - + # Short-circuit with simple short output + if format_ == "short": + return _task_names(state.commands) + # Otherwise, handle more verbose modes + result = [] + # Docstring at top, if applicable + if docstring: + trailer = "\n" if not docstring.endswith("\n") else "" + result.append(docstring + trailer) + header = COMMANDS_HEADER + if format_ == "nested": + header += NESTED_REMINDER + result.append(header + ":\n") + c = _normal_list() if format_ == "normal" else _nested_list(state.commands) + result.extend(c) + return result -def display_command(command): +def display_command(name): """ Print command function's docstring, then exit. Invoked with -d/--display. """ # Sanity check - if command not in commands: - abort("Command '%s' not found, exiting." % command) - cmd = commands[command] + command = crawl(name, state.commands) + if command is None: + msg = "Task '%s' does not appear to exist. Valid task names:\n%s" + abort(msg % (name, "\n".join(_normal_list(False)))) # Print out nicely presented docstring if found if cmd.__doc__: logger.info("Displaying detailed information for command '%s':" % command) @@ -287,7 +456,7 @@ def _escape_split(sep, argstr): return argstr.split(sep) before, _, after = argstr.partition(escaped_sep) - startlist = before.split(sep) # a regular split is fine here + startlist = before.split(sep) # a regular split is fine here unfinished = startlist[-1] startlist = startlist[:-1] @@ -298,7 +467,7 @@ def _escape_split(sep, argstr): # part of the string sent in recursion is the rest of the escaped value. unfinished += sep + endlist[0] - return startlist + [unfinished] + endlist[1:] # put together all the parts + return startlist + [unfinished] + endlist[1:] # put together all the parts def parse_arguments(arguments): @@ -313,13 +482,14 @@ def parse_arguments(arguments): kwargs = {} hosts = [] roles = [] + exclude_hosts = [] if ':' in cmd: cmd, argstr = cmd.split(':', 1) for pair in _escape_split(',', argstr): k, _, v = pair.partition('=') if _: - # Catch, interpret host/hosts/role/roles kwargs - if k in ['host', 'hosts', 'role', 'roles']: + # Catch, interpret host/hosts/role/roles/exclude_hosts kwargs + if k in ['host', 'hosts', 'role', 'roles','exclude_hosts']: if k == 'host': hosts = [v.strip()] elif k == 'hosts': @@ -328,12 +498,14 @@ def parse_arguments(arguments): roles = [v.strip()] elif k == 'roles': roles = [x.strip() for x in v.split(';')] + elif k == 'exclude_hosts': + exclude_hosts = [x.strip() for x in v.split(';')] # Otherwise, record as usual else: kwargs[k] = v else: args.append(k) - cmds.append((cmd, args, kwargs, hosts, roles)) + cmds.append((cmd, args, kwargs, hosts, roles, exclude_hosts)) return cmds @@ -344,7 +516,7 @@ def parse_remainder(arguments): return ' '.join(arguments) -def _merge(hosts, roles): +def _merge(hosts, roles, exclude=[]): """ Merge given host and role lists into one list of deduped hosts. """ @@ -363,9 +535,15 @@ def _merge(hosts, roles): if callable(value): value = value() role_hosts += value - # Return deduped combo of hosts and role_hosts - return list(set(_clean_hosts(hosts + role_hosts))) + # Return deduped combo of hosts and role_hosts, preserving order within + # them (vs using set(), which may lose ordering). + cleaned_hosts = _clean_hosts(list(hosts) + list(role_hosts)) + all_hosts = [] + for host in cleaned_hosts: + if host not in all_hosts: + all_hosts.append(host) + return all_hosts def _clean_hosts(host_list): """ @@ -373,8 +551,7 @@ def _clean_hosts(host_list): """ return [host.strip() for host in host_list] - -def get_hosts(command, cli_hosts, cli_roles): +def get_hosts(command, cli_hosts, cli_roles, cli_exclude_hosts): """ Return the host list the given command should be using. @@ -383,18 +560,18 @@ def get_hosts(command, cli_hosts, cli_roles): """ # Command line per-command takes precedence over anything else. if cli_hosts or cli_roles: - return _merge(cli_hosts, cli_roles) + return _merge(cli_hosts, cli_roles, cli_exclude_hosts) # Decorator-specific hosts/roles go next func_hosts = getattr(command, 'hosts', []) func_roles = getattr(command, 'roles', []) + func_exclude_hosts = getattr(command, 'exclude_hosts', []) if func_hosts or func_roles: - return _merge(func_hosts, func_roles) + return _merge(func_hosts, func_roles, func_exclude_hosts) # Finally, the env is checked (which might contain globally set lists from # the CLI or from module-level code). This will be the empty list if these # have not been set -- which is fine, this method should return an empty # list if no hosts have been set anywhere. - return _merge(state.env['hosts'], state.env['roles']) - + return _merge(state.env['hosts'], state.env['roles'], state.env['exclude_hosts']) def update_output_levels(show, hide): """ @@ -413,6 +590,14 @@ def update_output_levels(show, hide): state.output[key] = False +def _run_task(task, args, kwargs): + # First, try class-based tasks + if hasattr(task, 'run') and callable(task.run): + return task.run(*args, **kwargs) + # Fallback to callable behavior + return task(*args, **kwargs) + + def main(): """ Main command-line execution loop. @@ -431,8 +616,8 @@ def main(): for option in env_options: state.env[option.dest] = getattr(options, option.dest) - # Handle --hosts, --roles (comma separated string => list) - for key in ['hosts', 'roles']: + # Handle --hosts, --roles, --exclude-hosts (comma separated string => list) + for key in ['hosts', 'roles', 'exclude_hosts']: if key in state.env and isinstance(state.env[key], str): state.env[key] = state.env[key].split(',') @@ -468,10 +653,10 @@ def main(): # dict if fabfile: docstring, callables = load_fabfile(fabfile) - commands.update(callables) + state.commands.update(callables) # Abort if no commands found - if not commands and not remainder_arguments: + if not state.commands and not remainder_arguments: abort("Fabfile didn't contain any commands!") # Now that we're settled on a fabfile, inform user. @@ -481,13 +666,15 @@ def main(): else: logger.info("No fabfile loaded -- remainder command only") - # Non-verbose command list + # Shortlist is now just an alias for the "short" list format; + # it overrides use of --list-format if somebody were to specify both if options.shortlist: - shortlist() + options.list_format = 'short' - # Handle list-commands option (now that commands are loaded) + # List available commands if options.list_commands: - list_commands(docstring) + print("\n".join(list_commands(docstring, options.list_format))) + sys.exit(0) # Handle show (command-specific help) option if options.display: @@ -496,7 +683,7 @@ def main(): # If user didn't specify any commands to run, show help if not (arguments or remainder_arguments): parser.print_help() - sys.exit(0) # Or should it exit with error (1)? + sys.exit(0) # Or should it exit with error (1)? # Parse arguments into commands to run (plus args/kwargs/hosts) commands_to_run = parse_arguments(arguments) @@ -507,7 +694,7 @@ def main(): # Figure out if any specified task names are invalid unknown_commands = [] for tup in commands_to_run: - if tup[0] not in commands: + if crawl(tup[0], state.commands) is None: unknown_commands.append(tup[0]) # Abort if any unknown commands were specified @@ -518,22 +705,22 @@ def main(): # Generate remainder command and insert into commands, commands_to_run if remainder_command: r = '' - commands[r] = lambda: api.run(remainder_command) - commands_to_run.append((r, [], {}, [], [])) + state.commands[r] = lambda: api.run(remainder_command) + commands_to_run.append((r, [], {}, [], [], [])) if state.output.debug: names = ", ".join(x[0] for x in commands_to_run) logger.info("Commands to run: %s" % names) # At this point all commands must exist, so execute them in order. - for name, args, kwargs, cli_hosts, cli_roles in commands_to_run: + for name, args, kwargs, cli_hosts, cli_roles, cli_exclude_hosts in commands_to_run: # Get callable by itself - command = commands[name] - # Set current command name (used for some error messages) + task = crawl(name, state.commands) + # Set current task name (used for some error messages) state.env.command = name # Set host list (also copy to env) state.env.all_hosts = hosts = get_hosts( - command, cli_hosts, cli_roles) + task, cli_hosts, cli_roles, cli_exclude_hosts) # If hosts found, execute the function on each host in turn for host in hosts: # Preserve user @@ -544,12 +731,12 @@ def main(): if state.output.running: logger.info("[%s] Executing task '%s'" % (host, name)) # Actually run command - commands[name](*args, **kwargs) + _run_task(task, args, kwargs) # Put old user back state.env.user = prev_user # If no hosts found, assume local-only and run once if not hosts: - commands[name](*args, **kwargs) + _run_task(task, args, kwargs) # If we got here, no errors occurred, so print a final note. if state.output.status: logger.info("\nDone.") diff --git a/fabric/network.py b/fabric/network.py index 256a75976f..bf1bc1e672 100644 --- a/fabric/network.py +++ b/fabric/network.py @@ -10,17 +10,17 @@ import socket import sys -from fabric.utils import abort from fabric.auth import get_password, set_password from fabric.logger import logger +from fabric.utils import abort, handle_prompt_abort try: import warnings warnings.simplefilter('ignore', DeprecationWarning) import paramiko as ssh except ImportError: - abort("paramiko is a required module. Please install it:\n\t$ sudo easy_install paramiko") - + abort("paramiko is a required module. Please install it:\n\t" + "$ sudo easy_install paramiko") host_pattern = r'((?P.+)@)?(?P[^:]+)(:(?P\d+))?' @@ -53,7 +53,7 @@ class HostConnectionCache(dict): ``user1@example.com`` will create a connection to ``example.com``, logged in as ``user1``; later specifying ``user2@example.com`` will create a new, 2nd connection as ``user2``. - + The same applies to ports: specifying two different ports will result in two different connections to the same host being made. If no port is given, 22 is assumed, so ``example.com`` is equivalent to ``example.com:22``. @@ -69,9 +69,14 @@ def __getitem__(self, key): # Return the value either way return dict.__getitem__(self, real_key) + def __setitem__(self, key, value): + return dict.__setitem__(self, normalize_to_string(key), value) + def __delitem__(self, key): - return dict.__delitem__(self, join_host_strings(*normalize(key))) + return dict.__delitem__(self, normalize_to_string(key)) + def __contains__(self, key): + return dict.__contains__(self, normalize_to_string(key)) def normalize(host_string, omit_port=False): """ @@ -98,8 +103,8 @@ def denormalize(host_string): """ Strips out default values for the given host string. - If the user part is the default user, it is removed; if the port is port 22, - it also is removed. + If the user part is the default user, it is removed; + if the port is port 22, it also is removed. """ from state import env r = host_regex.match(host_string).groupdict() @@ -116,8 +121,8 @@ def join_host_strings(user, host, port=None): """ Turns user/host/port strings into ``user@host:port`` combined string. - This function is not responsible for handling missing user/port strings; for - that, see the ``normalize`` function. + This function is not responsible for handling missing user/port strings; + for that, see the ``normalize`` function. If ``port`` is omitted, the returned string will be of the form ``user@host``. @@ -128,6 +133,13 @@ def join_host_strings(user, host, port=None): return "%s@%s%s" % (user, host, port_string) +def normalize_to_string(host_string): + """ + normalize() returns a tuple; this returns another valid host string. + """ + return join_host_strings(*normalize(host_string)) + + def connect(user, host, port): """ Create and return a new SSHClient instance connected to given host. @@ -148,7 +160,6 @@ def connect(user, host, port): if not env.reject_unknown_hosts: client.set_missing_host_key_policy(ssh.AutoAddPolicy()) - # # Connection attempt loop # @@ -172,12 +183,19 @@ def connect(user, host, port): look_for_keys=not env.no_keys ) connected = True + + # set a keepalive if desired + if env.keepalive: + client.get_transport().set_keepalive(env.keepalive) + return client # BadHostKeyException corresponds to key mismatch, i.e. what on the # command line results in the big banner error about man-in-the-middle # attacks. except ssh.BadHostKeyException: - abort("Host key for %s did not match pre-existing key! Server's key was changed recently, or possible man-in-the-middle attack." % env.host) + abort("Host key for %s did not match pre-existing key! Server's" + " key was changed recently, or possible man-in-the-middle" + "attack." % env.host) # Prompt for new password to try on auth failure except ( ssh.AuthenticationException, @@ -244,9 +262,11 @@ def connect(user, host, port): host, e[1]) ) + def prompt_for_password(prompt=None, no_colon=False, stream=None): """ - Prompts for and returns a new password if required; otherwise, returns None. + Prompts for and returns a new password if required; otherwise, returns + None. A trailing colon is appended unless ``no_colon`` is True. @@ -261,6 +281,7 @@ def prompt_for_password(prompt=None, no_colon=False, stream=None): defaults to ``sys.stderr``. """ from fabric.state import env + handle_prompt_abort() stream = stream or sys.stderr # Construct prompt default = "[%s] Login password" % env.host_string @@ -285,7 +306,7 @@ def needs_host(func): This decorator is basically a safety net for silly users who forgot to specify the host/host list in one way or another. It should be used to wrap operations which require a network connection. - + Due to how we execute commands per-host in ``main()``, it's not possible to specify multiple hosts at this point in time, so only a single host will be prompted for. @@ -296,10 +317,13 @@ def needs_host(func): command (in the case where multiple commands have no hosts set, of course.) """ from fabric.state import env + @wraps(func) def host_prompting_wrapper(*args, **kwargs): + handle_prompt_abort() while not env.get('host_string', False): - host_string = raw_input("No hosts found. Please specify (single) host string for connection: ") + host_string = raw_input("No hosts found. Please specify (single)" + " host string for connection: ") interpret_host_string(host_string) return func(*args, **kwargs) return host_prompting_wrapper diff --git a/fabric/operations.py b/fabric/operations.py index 735280a3e8..d32474ec6a 100644 --- a/fabric/operations.py +++ b/fabric/operations.py @@ -19,12 +19,13 @@ from fabric.context_managers import settings, char_buffered from fabric.io import output_loop, input_loop from fabric.network import needs_host +from fabric.sftp import SFTP from fabric.state import (env, connections, output, win32, default_channel, io_sleep) -from fabric.utils import abort, indent, warn, puts from fabric.thread_handling import ThreadHandler from fabric.sftp import SFTP from fabric.logger import logger +from fabric.utils import abort, indent, warn, puts, handle_prompt_abort # For terminal size logic below if not win32: @@ -145,6 +146,9 @@ def require(*keys, **kwargs): Note: it is assumed that the keyword arguments apply to all given keys as a group. If you feel the need to specify more than one ``used_for``, for example, you should break your logic into multiple calls to ``require()``. + + .. versionchanged:: 1.1 + Allow iterable ``provided_by`` values instead of just single values. """ # If all keys exist, we're good, so keep going. missing_keys = filter(lambda x: x not in env, keys) @@ -219,6 +223,13 @@ def prompt(text, key=None, default='', validate=None): Either way, `prompt` will re-prompt until validation passes (or the user hits ``Ctrl-C``). + .. note:: + `~fabric.operations.prompt` honors :ref:`env.abort_on_prompts + ` and will call `~fabric.utils.abort` instead of + prompting if that flag is set to ``True``. If you want to block on user + input regardless, try wrapping with + `~fabric.context_managers.settings`. + Examples:: # Simplest form: @@ -234,7 +245,12 @@ def prompt(text, key=None, default='', validate=None): release = prompt('Please supply a release name', validate=r'^\w+-\d+(\.\d+)?$') + # Prompt regardless of the global abort-on-prompts setting: + with settings(abort_on_prompts=False): + prompt('I seriously need an answer on this! ') + """ + handle_prompt_abort() # Store previous env value for later display, if necessary if key: previous_value = env.get(key) @@ -263,6 +279,7 @@ def prompt(text, key=None, default='', validate=None): # Reset value so we stay in the loop value = None logger.info("Validation failed for the following reason: %s" % e.message) + logger.info(indent(e.message)) # String / regex must match and will be empty if validation fails. else: # Need to transform regex into full-matching one if it's not. @@ -697,7 +714,7 @@ def _prefix_env_vars(command): return path + command -def _execute(channel, command, pty=True, combine_stderr=True, +def _execute(channel, command, pty=True, combine_stderr=None, invoke_shell=False): """ Execute ``command`` over ``channel``. @@ -705,6 +722,9 @@ def _execute(channel, command, pty=True, combine_stderr=True, ``pty`` controls whether a pseudo-terminal is created. ``combine_stderr`` controls whether we call ``channel.set_combine_stderr``. + By default, the global setting for this behavior (:ref:`env.combine_stderr + `) is consulted, but you may specify ``True`` or ``False`` + here to override it. ``invoke_shell`` controls whether we use ``exec_command`` or ``invoke_shell`` (plus a handful of other things, such as always forcing a @@ -716,8 +736,9 @@ def _execute(channel, command, pty=True, combine_stderr=True, """ with char_buffered(sys.stdin): # Combine stdout and stderr to get around oddball mixing issues - if combine_stderr or env.combine_stderr: - channel.set_combine_stderr(True) + if combine_stderr is None: + combine_stderr = env.combine_stderr + channel.set_combine_stderr(combine_stderr) # Assume pty use, and allow overriding of this either via kwarg or env # var. (invoke_shell always wants a pty no matter what.) @@ -869,7 +890,7 @@ def _run_command(command, shell=True, pty=True, combine_stderr=True, @needs_host -def run(command, shell=True, pty=True, combine_stderr=True): +def run(command, shell=True, pty=True, combine_stderr=None): """ Run a shell command on a remote host. @@ -917,12 +938,17 @@ def run(command, shell=True, pty=True, combine_stderr=True): .. versionchanged:: 1.0 The default value of ``pty`` is now ``True``. + + .. versionchanged:: 1.0.2 + The default value of ``combine_stderr`` is now ``None`` instead of + ``True``. However, the default *behavior* is unchanged, as the global + setting is still ``True``. """ return _run_command(command, shell, pty, combine_stderr) @needs_host -def sudo(command, shell=True, pty=True, combine_stderr=True, user=None): +def sudo(command, shell=True, pty=True, combine_stderr=None, user=None): """ Run a shell command on a remote host, with superuser privileges. @@ -1005,7 +1031,7 @@ def local(command, capture=False): out_stream = None if output.stdout else dev_null err_stream = None if output.stderr else dev_null try: - cmd_arg = [wrapped_command] if win32 else wrapped_command + cmd_arg = wrapped_command if win32 else [wrapped_command] p = subprocess.Popen(cmd_arg, shell=True, stdout=out_stream, stderr=err_stream) (stdout, stderr) = p.communicate() diff --git a/fabric/sftp.py b/fabric/sftp.py index 29954f6c21..01b149f52d 100644 --- a/fabric/sftp.py +++ b/fabric/sftp.py @@ -17,27 +17,23 @@ class SFTP(object): def __init__(self, host_string): self.ftp = connections[host_string].open_sftp() - # Recall that __getattr__ is the "fallback" attribute getter, and is thus # pretty safe to use for facade-like behavior as we're doing here. def __getattr__(self, attr): return getattr(self.ftp, attr) - def isdir(self, path): try: return stat.S_ISDIR(self.ftp.lstat(path).st_mode) except IOError: return False - def islink(self, path): try: return stat.S_ISLNK(self.ftp.lstat(path).st_mode) except IOError: return False - def exists(self, path): try: self.ftp.lstat(path).st_mode @@ -45,7 +41,6 @@ def exists(self, path): return False return True - def glob(self, path): from fabric.state import win32 dirpart, pattern = os.path.split(path) @@ -60,7 +55,6 @@ def glob(self, path): ret = [os.path.join(dirpart, name) for name in names] return ret - def walk(self, top, topdown=True, onerror=None, followlinks=False): from os.path import join, isdir, islink @@ -96,7 +90,6 @@ def walk(self, top, topdown=True, onerror=None, followlinks=False): if not topdown: yield top, dirs, nondirs - def mkdir(self, path, use_sudo): from fabric.api import sudo, hide if use_sudo: @@ -105,7 +98,6 @@ def mkdir(self, path, use_sudo): else: self.ftp.mkdir(path) - def get(self, remote_path, local_path, local_is_path, rremote=None): # rremote => relative remote path, so get(/var/log) would result in # this function being called with @@ -157,7 +149,6 @@ def get(self, remote_path, local_path, local_is_path, rremote=None): result = real_local_path return result - def get_dir(self, remote_path, local_path): # Decide what needs to be stripped from remote paths so they're all # relative to the given remote_path @@ -197,7 +188,6 @@ def get_dir(self, remote_path, local_path): result.append(self.get(rpath, lpath, True, rremote)) return result - def put(self, local_path, remote_path, use_sudo, mirror_local_mode, mode, local_is_path): from fabric.api import sudo, hide @@ -236,14 +226,14 @@ def put(self, local_path, remote_path, use_sudo, mirror_local_mode, mode, if not local_is_path: os.remove(real_local_path) # Handle modes if necessary - if local_is_path and (mirror_local_mode or mode is not None): + if (local_is_path and mirror_local_mode) or (mode is not None): lmode = os.stat(local_path).st_mode if mirror_local_mode else mode lmode = lmode & 07777 rmode = rattrs.st_mode & 07777 if lmode != rmode: if use_sudo: with hide('everything'): - sudo('chmod %s \"%s\"' % (lmode, remote_path)) + sudo('chmod %o \"%s\"' % (lmode, remote_path)) else: self.ftp.chmod(remote_path, lmode) if use_sudo: @@ -253,7 +243,6 @@ def put(self, local_path, remote_path, use_sudo, mirror_local_mode, mode, remote_path = target_path return remote_path - def put_dir(self, local_path, remote_path, use_sudo, mirror_local_mode, mode): if os.path.basename(local_path): @@ -272,13 +261,13 @@ def put_dir(self, local_path, remote_path, use_sudo, mirror_local_mode, self.mkdir(rcontext, use_sudo) for d in dirs: - n = os.path.join(rcontext,d) + n = os.path.join(rcontext, d) if not self.exists(n): self.mkdir(n, use_sudo) for f in files: - local_path = os.path.join(context,f) - n = os.path.join(rcontext,f) + local_path = os.path.join(context, f) + n = os.path.join(rcontext, f) p = self.put(local_path, n, use_sudo, mirror_local_mode, mode, True) remote_paths.append(p) diff --git a/fabric/state.py b/fabric/state.py index 343510c91c..c59f158aa5 100644 --- a/fabric/state.py +++ b/fabric/state.py @@ -23,7 +23,7 @@ # # Environment dictionary - support structures -# +# class _AttributeDict(dict): """ @@ -91,7 +91,7 @@ def _rc_path(): from win32com.shell.shell import SHGetSpecialFolderPath from win32com.shell.shellcon import CSIDL_PROFILE return "%s/%s" % ( - SHGetSpecialFolderPath(0,CSIDL_PROFILE), + SHGetSpecialFolderPath(0, CSIDL_PROFILE), rc_file ) @@ -144,6 +144,11 @@ def _rc_path(): help="comma-separated list of roles to operate on" ), + make_option('-x', '--exclude-hosts', + default=[], + help="comma-separated list of hosts to exclude" + ), + make_option('-i', action='append', dest='key_filename', @@ -203,8 +208,22 @@ def _rc_path(): action='store_false', default=True, help="do not use pseudo-terminal in run/sudo" - ) - + ), + + # Abort on prompting flag + make_option('--abort-on-prompts', + action='store_true', + default=False, + help="Abort instead of prompting (for password, host, etc)" + ), + + # Keepalive + make_option('--keepalive', + dest='keepalive', + type=int, + default=0, + help="enables a keepalive every n seconds" + ), ] @@ -224,11 +243,12 @@ def _rc_path(): 'combine_stderr': True, 'command': None, 'command_prefixes': [], - 'cwd': '', # Must be empty string, not None, for concatenation purposes + 'cwd': '', # Must be empty string, not None, for concatenation purposes 'echo_stdin': True, + 'exclude_hosts': [], 'host': None, 'host_string': None, - 'lcwd': '', # Must be empty string, not None, for concatenation purposes + 'lcwd': '', # Must be empty string, not None, for concatenation purposes 'local_user': _get_system_username(), 'output_prefix': True, 'passwords': {}, @@ -236,7 +256,7 @@ def _rc_path(): 'path_behavior': 'append', 'port': None, 'real_fabfile': None, - 'roledefs': {}, + 'roles': [], 'roledefs': {}, # -S so sudo accepts passwd via stdin, -p with our known-value prompt for # later detection (thus %s -- gets filled with env.sudo_prompt at runtime) @@ -251,7 +271,6 @@ def _rc_path(): for option in env_options: env[option.dest] = option.default - # # Command dictionary # @@ -267,11 +286,14 @@ def _rc_path(): connections = HostConnectionCache() + def default_channel(): """ Return a channel object based on ``env.host_string``. """ - return connections[env.host_string].get_transport().open_session() + chan = connections[env.host_string].get_transport().open_session() + chan.input_enabled = True + return chan # @@ -298,7 +320,7 @@ class _AliasDict(_AttributeDict): This also means they will not show up in e.g. ``dict.keys()``. ..note:: - + Aliases are recursive, so you may refer to an alias within the key list of another alias. Naturally, this means that you can end up with infinite loops if you're not careful. diff --git a/fabric/tasks.py b/fabric/tasks.py new file mode 100644 index 0000000000..88d5cdc177 --- /dev/null +++ b/fabric/tasks.py @@ -0,0 +1,47 @@ +from functools import wraps + +class Task(object): + """ + Abstract base class for objects wishing to be picked up as Fabric tasks. + + Instances of subclasses will be treated as valid tasks when present in + fabfiles loaded by the :doc:`fab ` tool. + + For details on how to implement and use `~fabric.tasks.Task` subclasses, + please see the usage documentation on :ref:`new-style tasks + `. + + .. versionadded:: 1.1 + """ + name = 'undefined' + use_task_objects = True + + # TODO: make it so that this wraps other decorators as expected + + def run(self): + raise NotImplementedError + + +class WrappedCallableTask(Task): + """ + Wraps a given callable transparently, while marking it as a valid Task. + + Generally used via the `~fabric.decorators.task` decorator and not + directly. + + .. versionadded:: 1.1 + """ + def __init__(self, callable): + super(WrappedCallableTask, self).__init__() + self.wrapped = callable + self.__name__ = self.name = callable.__name__ + self.__doc__ = callable.__doc__ + + def __call__(self, *args, **kwargs): + return self.run(*args, **kwargs) + + def run(self, *args, **kwargs): + return self.wrapped(*args, **kwargs) + + def __getattr__(self, k): + return getattr(self.wrapped, k) diff --git a/fabric/thread_handling.py b/fabric/thread_handling.py index 25aa3a2326..790a576bef 100644 --- a/fabric/thread_handling.py +++ b/fabric/thread_handling.py @@ -6,6 +6,7 @@ class ThreadHandler(object): def __init__(self, name, callable, *args, **kwargs): # Set up exception handling self.exception = None + def wrapper(*args, **kwargs): try: callable(*args, **kwargs) diff --git a/fabric/utils.py b/fabric/utils.py index ab6e6ad2f0..f6c737fa13 100644 --- a/fabric/utils.py +++ b/fabric/utils.py @@ -12,7 +12,7 @@ def abort(msg): """ Abort execution, print ``msg`` to stderr and exit with error status (1.) - This function currently makes use of `sys.exit`_, which raises + This function currently makes use of `sys.exit`_, which raises `SystemExit`_. Therefore, it's possible to detect and recover from inner calls to `abort` by using ``except SystemExit`` or similar. @@ -21,7 +21,7 @@ def abort(msg): """ from fabric.state import output if output.aborts: - logger.error( "FATAL: %s" % msg ) + logger.error( "Fatal error: %s" % msg ) logger.error( "Aborting." ) sys.exit(1) @@ -37,7 +37,7 @@ def warn(msg): """ from fabric.state import output if output.warnings: - logger.warn( "%s" % msg ) + logger.warn( "Warning: %s" % msg ) def indent(text, spaces=4, strip=False): @@ -68,7 +68,7 @@ def indent(text, spaces=4, strip=False): return output -def puts(text, show_prefix=True, end="\n", flush=False): +def puts(text, show_prefix=True, end="", flush=False): """ An alias for ``print`` whose output is managed by Fabric's output controls. @@ -121,3 +121,9 @@ def fastprint(text, show_prefix=False, end="", flush=True): .. seealso:: `~fabric.utils.puts` """ return puts(text=text, show_prefix=show_prefix, end=end, flush=flush) + + +def handle_prompt_abort(): + import fabric.state + if fabric.state.env.abort_on_prompts: + abort("Needed to prompt, but abort-on-prompts was set to True!") diff --git a/fabric/version.py b/fabric/version.py index 5c6c735afc..bf49200cf2 100644 --- a/fabric/version.py +++ b/fabric/version.py @@ -21,7 +21,8 @@ def git_sha(): return p.communicate()[0] -VERSION = (1, 0, 0, 'final', 0) +VERSION = (1, 2, 0, 'alpha', 0) + def get_version(form='short'): """ @@ -92,6 +93,6 @@ def get_version(form='short'): try: return versions[form] except KeyError: - raise TypeError, '"%s" is not a valid form specifier.' % form + raise TypeError('"%s" is not a valid form specifier.' % form) __version__ = get_version('short') diff --git a/setup.py b/setup.py index cbdf292a96..2353782268 100644 --- a/setup.py +++ b/setup.py @@ -9,9 +9,10 @@ readme = open('README').read() +v = get_version('short') long_description = """ To find out what's new in this version of Fabric, please see `the changelog -`_. +`_. You can also install the `in-development version `_ using @@ -24,7 +25,7 @@ ---- For more information, please see the Fabric website or execute ``fab --help``. -""" % (get_version('short'), readme) +""" % (v, v, readme) setup( name='Fabric', diff --git a/tests/Python26SocketServer.py b/tests/Python26SocketServer.py index f01cb5f2cc..b26854023a 100644 --- a/tests/Python26SocketServer.py +++ b/tests/Python26SocketServer.py @@ -138,17 +138,17 @@ class will essentially render the service "deaf" while one request is except ImportError: import dummy_threading as threading -__all__ = ["TCPServer","UDPServer","ForkingUDPServer","ForkingTCPServer", - "ThreadingUDPServer","ThreadingTCPServer","BaseRequestHandler", - "StreamRequestHandler","DatagramRequestHandler", +__all__ = ["TCPServer", "UDPServer", "ForkingUDPServer", "ForkingTCPServer", + "ThreadingUDPServer", "ThreadingTCPServer", "BaseRequestHandler", + "StreamRequestHandler", "DatagramRequestHandler", "ThreadingMixIn", "ForkingMixIn"] if hasattr(socket, "AF_UNIX"): - __all__.extend(["UnixStreamServer","UnixDatagramServer", + __all__.extend(["UnixStreamServer", "UnixDatagramServer", "ThreadingUnixStreamServer", "ThreadingUnixDatagramServer"]) -class BaseServer: +class BaseServer: """Base class for server classes. Methods for the caller: @@ -329,12 +329,12 @@ def handle_error(self, request, client_address): The default is to print a traceback and continue. """ - print '-'*40 + print '-' * 40 print 'Exception happened during processing of request from', print client_address import traceback - traceback.print_exc() # XXX But this goes to stderr! - print '-'*40 + traceback.print_exc() # XXX But this goes to stderr! + print '-' * 40 class TCPServer(BaseServer): @@ -391,7 +391,8 @@ class TCPServer(BaseServer): allow_reuse_address = False - def __init__(self, server_address, RequestHandlerClass, bind_and_activate=True): + def __init__(self, server_address, RequestHandlerClass, + bind_and_activate=True): """Constructor. May be extended, do not override.""" BaseServer.__init__(self, server_address, RequestHandlerClass) self.socket = socket.socket(self.address_family, @@ -470,8 +471,8 @@ def close_request(self, request): # No need to close anything. pass -class ForkingMixIn: +class ForkingMixIn: """Mix-in class to handle each request in a new process.""" timeout = 300 @@ -480,7 +481,8 @@ class ForkingMixIn: def collect_children(self): """Internal routine to wait for children that have exited.""" - if self.active_children is None: return + if self.active_children is None: + return while len(self.active_children) >= self.max_children: # XXX: This will wait for any child process, not just ones # spawned by this library. This could confuse other @@ -490,7 +492,8 @@ def collect_children(self): pid, status = os.waitpid(0, 0) except os.error: pid = None - if pid not in self.active_children: continue + if pid not in self.active_children: + continue self.active_children.remove(pid) # XXX: This loop runs more system calls than it ought @@ -503,12 +506,13 @@ def collect_children(self): pid, status = os.waitpid(child, os.WNOHANG) except os.error: pid = None - if not pid: continue + if not pid: + continue try: self.active_children.remove(pid) except ValueError, e: - raise ValueError('%s. x=%d and list=%r' % (e.message, pid, - self.active_children)) + raise ValueError('%s. x=%d and list=%r' % \ + (e.message, pid, self.active_children)) def handle_timeout(self): """Wait for zombies after self.timeout seconds of inactivity. @@ -563,18 +567,28 @@ def process_request_thread(self, request, client_address): def process_request(self, request, client_address): """Start a new thread to process the request.""" - t = threading.Thread(target = self.process_request_thread, - args = (request, client_address)) + t = threading.Thread(target=self.process_request_thread, + args=(request, client_address)) if self.daemon_threads: - t.setDaemon (1) + t.setDaemon(1) t.start() -class ForkingUDPServer(ForkingMixIn, UDPServer): pass -class ForkingTCPServer(ForkingMixIn, TCPServer): pass +class ForkingUDPServer(ForkingMixIn, UDPServer): + pass + + +class ForkingTCPServer(ForkingMixIn, TCPServer): + pass + + +class ThreadingUDPServer(ThreadingMixIn, UDPServer): + pass + + +class ThreadingTCPServer(ThreadingMixIn, TCPServer): + pass -class ThreadingUDPServer(ThreadingMixIn, UDPServer): pass -class ThreadingTCPServer(ThreadingMixIn, TCPServer): pass if hasattr(socket, 'AF_UNIX'): @@ -584,9 +598,12 @@ class UnixStreamServer(TCPServer): class UnixDatagramServer(UDPServer): address_family = socket.AF_UNIX - class ThreadingUnixStreamServer(ThreadingMixIn, UnixStreamServer): pass + class ThreadingUnixStreamServer(ThreadingMixIn, UnixStreamServer): + pass + + class ThreadingUnixDatagramServer(ThreadingMixIn, UnixDatagramServer): + pass - class ThreadingUnixDatagramServer(ThreadingMixIn, UnixDatagramServer): pass class BaseRequestHandler: diff --git a/tests/fake_filesystem.py b/tests/fake_filesystem.py index 42bc084512..c9627c5480 100644 --- a/tests/fake_filesystem.py +++ b/tests/fake_filesystem.py @@ -7,6 +7,7 @@ class FakeFile(StringIO): + def __init__(self, value=None, path=None): init = lambda x: StringIO.__init__(self, x) if value is None: diff --git a/tests/integration.py b/tests/integration.py new file mode 100644 index 0000000000..1a264651c5 --- /dev/null +++ b/tests/integration.py @@ -0,0 +1,28 @@ +# "Integration test" for Fabric to be run occasionally / before releasing. +# Executes idempotent/nonthreatening commands against localhost by default. + +from __future__ import with_statement + +from fabric.api import * + + +@hosts('localhost') +def test(): + flags = (True, False) + funcs = (run, sudo) + cmd = "ls /" + line = "#" * 72 + for shell in flags: + for pty in flags: + for combine_stderr in flags: + for func in funcs: + print(">>> %s(%s, shell=%s, pty=%s, combine_stderr=%s)" % ( + func.func_name, cmd, shell, pty, combine_stderr)) + print(line) + func( + cmd, + shell=shell, + pty=pty, + combine_stderr=combine_stderr + ) + print(line + "\n") diff --git a/tests/server.py b/tests/server.py index eba404a5c4..f241c1afda 100644 --- a/tests/server.py +++ b/tests/server.py @@ -56,7 +56,11 @@ fabric requirements.txt setup.py -tests""" +tests""", + "both_streams": [ + "stdout", + "stderr" + ] } FILES = FakeFilesystem({ '/file.txt': 'contents', @@ -67,7 +71,7 @@ '/tree/file2.txt': 'y', '/tree/subfolder/file3.txt': 'z', '/etc/apache2/apache2.conf': 'Include other.conf', - HOME: None # So $HOME is a directory + HOME: None # So $HOME is a directory }) PASSWORDS = { 'root': 'root', @@ -139,7 +143,7 @@ def check_auth_password(self, username, password): def check_auth_publickey(self, username, key): self.username = username - return ssh.AUTH_SUCCESSFUL if self.pubkeys else ssh.AUTH_FAILED + return ssh.AUTH_SUCCESSFUL if self.pubkeys else ssh.AUTH_FAILED def get_allowed_auths(self, username): return 'password,publickey' @@ -202,6 +206,7 @@ class PrependList(list): def prepend(self, val): self.insert(0, val) + def expand(path): """ '/foo/bar/biz' => ('/', 'foo', 'bar', 'biz') @@ -220,6 +225,7 @@ def expand(path): ret.prepend(directory if directory == os.path.sep else '') return ret + def contains(folder, path): """ contains(('a', 'b', 'c'), ('a', 'b')) => True @@ -227,6 +233,7 @@ def contains(folder, path): """ return False if len(path) >= len(folder) else folder[:len(path)] == path + def missing_folders(paths): """ missing_folders(['a/b/c']) => ['a', 'a/b', 'a/b/c'] @@ -236,7 +243,7 @@ def missing_folders(paths): for path in paths: expanded = expand(path) for i in range(len(expanded)): - folder = os.path.join(*expanded[:len(expanded)-i]) + folder = os.path.join(*expanded[:len(expanded) - i]) if folder and folder not in pool: pool.add(folder) ret.append(folder) @@ -272,7 +279,7 @@ def list_folder(self, path): candidates = [x for x in expanded_files if contains(x, expanded_path)] children = [] for candidate in candidates: - cut = candidate[:len(expanded_path)+1] + cut = candidate[:len(expanded_path) + 1] if cut not in children: children.append(cut) results = [self.stat(os.path.join(*x)) for x in children] @@ -326,6 +333,7 @@ def mkdir(self, path, attr): self.files[path] = None return ssh.SFTP_OK + def serve_responses(responses, files, passwords, home, pubkeys, port): """ Return a threading TCP based SocketServer listening on ``port``. diff --git a/tests/support/__init__.py b/tests/support/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/tests/support/decorated_fabfile.py b/tests/support/decorated_fabfile.py new file mode 100644 index 0000000000..fcab3a2e9a --- /dev/null +++ b/tests/support/decorated_fabfile.py @@ -0,0 +1,8 @@ +from fabric.decorators import task + +@task +def foo(): + pass + +def bar(): + pass diff --git a/tests/support/decorated_fabfile_with_classbased_task.py b/tests/support/decorated_fabfile_with_classbased_task.py new file mode 100644 index 0000000000..143c392085 --- /dev/null +++ b/tests/support/decorated_fabfile_with_classbased_task.py @@ -0,0 +1,12 @@ +from fabric import tasks +from fabric.decorators import task + +class ClassBasedTask(tasks.Task): + def __init__(self): + self.name = "foo" + self.use_decorated = True + + def run(self, *args, **kwargs): + pass + +foo = ClassBasedTask() diff --git a/tests/support/decorated_fabfile_with_modules.py b/tests/support/decorated_fabfile_with_modules.py new file mode 100644 index 0000000000..e35aadae61 --- /dev/null +++ b/tests/support/decorated_fabfile_with_modules.py @@ -0,0 +1,9 @@ +from fabric.decorators import task +import module_fabtasks as tasks + +@task +def foo(): + pass + +def bar(): + pass diff --git a/tests/support/deep.py b/tests/support/deep.py new file mode 100644 index 0000000000..1960a71192 --- /dev/null +++ b/tests/support/deep.py @@ -0,0 +1 @@ +import submodule diff --git a/tests/support/docstring.py b/tests/support/docstring.py new file mode 100644 index 0000000000..76801afda1 --- /dev/null +++ b/tests/support/docstring.py @@ -0,0 +1,8 @@ +from fabric.decorators import task + +@task +def foo(): + """ + Foos! + """ + pass diff --git a/tests/support/explicit_fabfile.py b/tests/support/explicit_fabfile.py new file mode 100644 index 0000000000..c00f4f53f2 --- /dev/null +++ b/tests/support/explicit_fabfile.py @@ -0,0 +1,7 @@ +__all__ = ['foo'] + +def foo(): + pass + +def bar(): + pass diff --git a/tests/support/implicit_fabfile.py b/tests/support/implicit_fabfile.py new file mode 100644 index 0000000000..8489841709 --- /dev/null +++ b/tests/support/implicit_fabfile.py @@ -0,0 +1,5 @@ +def foo(): + pass + +def bar(): + pass diff --git a/tests/support/module_fabtasks.py b/tests/support/module_fabtasks.py new file mode 100644 index 0000000000..2c54ef9adf --- /dev/null +++ b/tests/support/module_fabtasks.py @@ -0,0 +1,5 @@ +def hello(): + print "hello" + +def world(): + print "world" diff --git a/tests/support/submodule/__init__.py b/tests/support/submodule/__init__.py new file mode 100644 index 0000000000..50bf4be035 --- /dev/null +++ b/tests/support/submodule/__init__.py @@ -0,0 +1,4 @@ +import subsubmodule + +def classic_task(): + pass diff --git a/tests/support/submodule/subsubmodule/__init__.py b/tests/support/submodule/subsubmodule/__init__.py new file mode 100644 index 0000000000..95d91db646 --- /dev/null +++ b/tests/support/submodule/subsubmodule/__init__.py @@ -0,0 +1,5 @@ +from fabric.api import task + +@task +def deeptask(): + pass diff --git a/tests/support/tree/__init__.py b/tests/support/tree/__init__.py new file mode 100644 index 0000000000..b7f3bf9f97 --- /dev/null +++ b/tests/support/tree/__init__.py @@ -0,0 +1,12 @@ +from fabric.api import task + +import system, db + + +@task +def deploy(): + pass + +@task +def build_docs(): + pass diff --git a/tests/support/tree/db.py b/tests/support/tree/db.py new file mode 100644 index 0000000000..318982fb47 --- /dev/null +++ b/tests/support/tree/db.py @@ -0,0 +1,6 @@ +from fabric.api import task + + +@task +def migrate(): + pass diff --git a/tests/support/tree/system/__init__.py b/tests/support/tree/system/__init__.py new file mode 100644 index 0000000000..a7d2bfe08e --- /dev/null +++ b/tests/support/tree/system/__init__.py @@ -0,0 +1,7 @@ +from fabric.api import task + +import debian + +@task +def install_package(): + pass diff --git a/tests/support/tree/system/debian.py b/tests/support/tree/system/debian.py new file mode 100644 index 0000000000..f1e17c29d3 --- /dev/null +++ b/tests/support/tree/system/debian.py @@ -0,0 +1,6 @@ +from fabric.api import task + + +@task +def update_apt(): + pass diff --git a/tests/test_context_managers.py b/tests/test_context_managers.py index 8eabfa2168..edd589be17 100644 --- a/tests/test_context_managers.py +++ b/tests/test_context_managers.py @@ -30,7 +30,7 @@ def test_cwd_with_absolute_paths(): """ cd() should append arg if non-absolute or overwrite otherwise """ - existing = '/some/existing/path' + existing = '/some/existing/path' additional = 'another' absolute = '/absolute/path' with settings(cwd=existing): diff --git a/tests/test_contrib.py b/tests/test_contrib.py new file mode 100644 index 0000000000..b64327b81a --- /dev/null +++ b/tests/test_contrib.py @@ -0,0 +1,51 @@ +from __future__ import with_statement + +from fabric.api import hide, get, show +from fabric.contrib.files import upload_template, contains + +from utils import FabricTest, eq_contents +from server import server + + +class TestContrib(FabricTest): + # Make sure it knows / is a directory. + # This is in lieu of starting down the "actual honest to god fake operating + # system" road...:( + @server(responses={'test -d /': ""}) + def test_upload_template_uses_correct_remote_filename(self): + """ + upload_template() shouldn't munge final remote filename + """ + template = self.mkfile('template.txt', 'text') + with hide('everything'): + upload_template(template, '/') + assert self.exists_remotely('/template.txt') + + @server() + def test_upload_template_handles_file_destination(self): + """ + upload_template() should work OK with file and directory destinations + """ + template = self.mkfile('template.txt', '%(varname)s') + local = self.path('result.txt') + remote = '/configfile.txt' + var = 'foobar' + with hide('everything'): + upload_template(template, remote, {'varname': var}) + get(remote, local) + eq_contents(local, var) + + @server(responses={ + 'egrep "text" "/file.txt"': ( + "sudo: unable to resolve host fabric", + "", + 1 + )} + ) + def test_contains_checks_only_succeeded_flag(self): + """ + contains() should return False on bad grep even if stdout isn't empty + """ + with hide('everything'): + result = contains('/file.txt', 'text', use_sudo=True) + assert result == False diff --git a/tests/test_decorators.py b/tests/test_decorators.py index 4b6144428d..1f55807132 100644 --- a/tests/test_decorators.py +++ b/tests/test_decorators.py @@ -1,8 +1,15 @@ -from nose.tools import eq_ +from nose.tools import eq_, ok_ from fudge import Fake, with_fakes +import random -from fabric import decorators +from fabric import decorators, tasks +from fabric.state import env +def test_task_returns_an_instance_of_wrappedfunctask_object(): + def foo(): + pass + task = decorators.task(foo) + ok_(isinstance(task, tasks.WrappedCallableTask)) def fake_function(*args, **kwargs): """ @@ -38,3 +45,13 @@ def test_runs_once_returns_same_value_each_run(): task = decorators.runs_once(fake_function().returns(return_value)) for i in range(2): eq_(task(), return_value) + +def test_with_settings_passes_env_vars_into_decorated_function(): + env.value = True + random_return = random.randint(1000, 2000) + def some_task(): + return env.value + decorated_task = decorators.with_settings(value=random_return)(some_task) + ok_(some_task(), msg="sanity check") + eq_(random_return, decorated_task()) + diff --git a/tests/test_main.py b/tests/test_main.py index 2582011826..cd056df0b3 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -1,58 +1,88 @@ +from __future__ import with_statement + import sys import copy +from contextlib import contextmanager -from fudge.patcher import with_patched_object -from fudge import Fake -from nose.tools import eq_, raises +from fudge import Fake, patched_context +from nose.tools import ok_, eq_, raises -from fabric.decorators import hosts, roles +from fabric.decorators import hosts, roles, task from fabric.main import (get_hosts, parse_arguments, _merge, _escape_split, - load_fabfile) + load_fabfile, list_commands, _task_names, _crawl, crawl, + COMMANDS_HEADER, NESTED_REMINDER) + import fabric.state from fabric.state import _AttributeDict -from utils import mock_streams +from utils import mock_streams, patched_env, eq_, FabricTest +import os +import sys +# +# Basic CLI stuff +# + def test_argument_parsing(): for args, output in [ # Basic - ('abc', ('abc', [], {}, [], [])), + ('abc', ('abc', [], {}, [], [], [])), # Arg - ('ab:c', ('ab', ['c'], {}, [], [])), + ('ab:c', ('ab', ['c'], {}, [], [], [])), # Kwarg - ('a:b=c', ('a', [], {'b':'c'}, [], [])), + ('a:b=c', ('a', [], {'b':'c'}, [], [], [])), # Arg and kwarg - ('a:b=c,d', ('a', ['d'], {'b':'c'}, [], [])), + ('a:b=c,d', ('a', ['d'], {'b':'c'}, [], [], [])), # Multiple kwargs - ('a:b=c,d=e', ('a', [], {'b':'c','d':'e'}, [], [])), + ('a:b=c,d=e', ('a', [], {'b':'c','d':'e'}, [], [], [])), # Host - ('abc:host=foo', ('abc', [], {}, ['foo'], [])), + ('abc:host=foo', ('abc', [], {}, ['foo'], [], [])), # Hosts with single host - ('abc:hosts=foo', ('abc', [], {}, ['foo'], [])), + ('abc:hosts=foo', ('abc', [], {}, ['foo'], [], [])), # Hosts with multiple hosts # Note: in a real shell, one would need to quote or escape "foo;bar". # But in pure-Python that would get interpreted literally, so we don't. - ('abc:hosts=foo;bar', ('abc', [], {}, ['foo', 'bar'], [])), - # Empty string args - ("task:x=y,z=", ('task', [], {'x': 'y', 'z': ''}, [], [])), - ("task:foo,,x=y", ('task', ['foo', ''], {'x': 'y'}, [], [])), + ('abc:hosts=foo;bar', ('abc', [], {}, ['foo', 'bar'], [], [])), + + # Exclude hosts + ('abc:hosts=foo;bar,exclude_hosts=foo', ('abc', [], {}, ['foo', 'bar'], [], ['foo'])), + ('abc:hosts=foo;bar,exclude_hosts=foo;bar', ('abc', [], {}, ['foo', 'bar'], [], ['foo','bar'])), + # Empty string args + ("task:x=y,z=", ('task', [], {'x': 'y', 'z': ''}, [], [], [])), + ("task:foo,,x=y", ('task', ['foo', ''], {'x': 'y'}, [], [], [])), ]: yield eq_, parse_arguments([args]), [output] +def test_escaped_task_arg_split(): + """ + Allow backslashes to escape the task argument separator character + """ + argstr = r"foo,bar\,biz\,baz,what comes after baz?" + eq_( + _escape_split(',', argstr), + ['foo', 'bar,biz,baz', 'what comes after baz?'] + ) + + +# +# Host/role decorators +# + def eq_hosts(command, host_list): - eq_(set(get_hosts(command, [], [])), set(host_list)) - + eq_(set(get_hosts(command, [], [], [])), set(host_list)) def test_hosts_decorator_by_itself(): """ Use of @hosts only """ host_list = ['a', 'b'] + @hosts(*host_list) def command(): pass + eq_hosts(command, host_list) @@ -61,9 +91,7 @@ def command(): 'r2': ['b', 'c'] } -@with_patched_object( - 'fabric.state', 'env', _AttributeDict({'roledefs': fake_roles}) -) +@patched_env({'roledefs': fake_roles}) def test_roles_decorator_by_itself(): """ Use of @roles only @@ -74,9 +102,7 @@ def command(): eq_hosts(command, ['a', 'b']) -@with_patched_object( - 'fabric.state', 'env', _AttributeDict({'roledefs': fake_roles}) -) +@patched_env({'roledefs': fake_roles}) def test_hosts_and_roles_together(): """ Use of @roles and @hosts together results in union of both @@ -87,8 +113,34 @@ def command(): pass eq_hosts(command, ['a', 'b', 'c']) +tuple_roles = { + 'r1': ('a', 'b'), + 'r2': ('b', 'c'), +} -@with_patched_object('fabric.state', 'env', {'hosts': ['foo']}) + +@patched_env({'roledefs': tuple_roles}) +def test_roles_as_tuples(): + """ + Test that a list of roles as a tuple succeeds + """ + @roles('r1') + def command(): + pass + eq_hosts(command, ['a', 'b']) + + +@patched_env({'hosts': ('foo', 'bar')}) +def test_hosts_as_tuples(): + """ + Test that a list of hosts as a tuple succeeds + """ + def command(): + pass + eq_hosts(command, ['foo', 'bar']) + + +@patched_env({'hosts': ['foo']}) def test_hosts_decorator_overrides_env_hosts(): """ If @hosts is used it replaces any env.hosts value @@ -97,12 +149,32 @@ def test_hosts_decorator_overrides_env_hosts(): def command(): pass eq_hosts(command, ['bar']) + assert 'foo' not in get_hosts(command, [], [], []) + +@patched_env({'hosts': ['foo']}) +def test_hosts_decorator_overrides_env_hosts_with_task_decorator_first(): + """ + If @hosts is used it replaces any env.hosts value even with @task + """ + @task + @hosts('bar') + def command(): + pass + eq_hosts(command, ['bar']) assert 'foo' not in get_hosts(command, [], []) +@patched_env({'hosts': ['foo']}) +def test_hosts_decorator_overrides_env_hosts_with_task_decorator_last(): + @hosts('bar') + @task + def command(): + pass + eq_hosts(command, ['bar']) + assert 'foo' not in get_hosts(command, [], []) -@with_patched_object( - 'fabric.state', 'env', {'hosts': [' foo ', 'bar '], 'roles': []} -) + +@patched_env({'hosts': [' foo ', 'bar '], 'roles': [], + 'exclude_hosts':[]}) def test_hosts_stripped_env_hosts(): """ Make sure hosts defined in env.hosts are cleaned of extra spaces @@ -117,9 +189,7 @@ def command(): 'r2': ['b', 'c'], } -@with_patched_object( - 'fabric.state', 'env', _AttributeDict({'roledefs': spaced_roles}) -) +@patched_env({'roledefs': spaced_roles}) def test_roles_stripped_env_hosts(): """ Make sure hosts defined in env.roles are cleaned of extra spaces @@ -135,26 +205,31 @@ def test_hosts_decorator_expands_single_iterable(): @hosts(iterable) should behave like @hosts(*iterable) """ host_list = ['foo', 'bar'] + @hosts(host_list) def command(): pass - eq_(command.hosts, host_list) + eq_(command.hosts, host_list) def test_roles_decorator_expands_single_iterable(): """ @roles(iterable) should behave like @roles(*iterable) """ role_list = ['foo', 'bar'] + @roles(role_list) def command(): pass + eq_(command.roles, role_list) -@with_patched_object( - 'fabric.state', 'env', _AttributeDict({'roledefs': fake_roles}) -) +# +# Basic role behavior +# + +@patched_env({'roledefs': fake_roles}) @raises(SystemExit) @mock_streams('stderr') def test_aborts_on_nonexistent_roles(): @@ -166,9 +241,7 @@ def test_aborts_on_nonexistent_roles(): lazy_role = {'r1': lambda: ['a', 'b']} -@with_patched_object( - 'fabric.state', 'env', _AttributeDict({'roledefs': lazy_role}) -) +@patched_env({'roledefs': lazy_role}) def test_lazy_roles(): """ Roles may be callables returning lists, as well as regular lists @@ -179,16 +252,9 @@ def command(): eq_hosts(command, ['a', 'b']) -def test_escaped_task_arg_split(): - """ - Allow backslashes to escape the task argument separator character - """ - argstr = r"foo,bar\,biz\,baz,what comes after baz?" - eq_( - _escape_split(',', argstr), - ['foo', 'bar,biz,baz', 'what comes after baz?'] - ) - +# +# Fabfile loading +# def run_load_fabfile(path, sys_path): # Module-esque object @@ -205,7 +271,6 @@ def run_load_fabfile(path, sys_path): # Restore sys.path = orig_path - def test_load_fabfile_should_not_remove_real_path_elements(): for fabfile_path, sys_dot_path in ( # Directory not in path @@ -221,3 +286,179 @@ def test_load_fabfile_should_not_remove_real_path_elements(): ('fabfile.py', ['', 'some_dir', 'some_other_dir']), ): yield run_load_fabfile, fabfile_path, sys_dot_path + + +# +# Namespacing and new-style tasks +# + +def fabfile(name): + return os.path.join(os.path.dirname(__file__), 'support', name) + +@contextmanager +def path_prefix(module): + i = 0 + sys.path.insert(i, os.path.dirname(module)) + yield + sys.path.pop(i) + +class TestNamespaces(FabricTest): + def setup(self): + # Parent class preserves current env + super(TestNamespaces, self).setup() + # Reset new-style-tests flag so running tests via Fab itself doesn't + # muck with it. + import fabric.state + if 'new_style_tasks' in fabric.state.env: + del fabric.state.env['new_style_tasks'] + + def test_implicit_discovery(self): + """ + Default to automatically collecting all tasks in a fabfile module + """ + implicit = fabfile("implicit_fabfile.py") + with path_prefix(implicit): + docs, funcs = load_fabfile(implicit) + eq_(len(funcs), 2) + ok_("foo" in funcs) + ok_("bar" in funcs) + + def test_explicit_discovery(self): + """ + If __all__ is present, only collect the tasks it specifies + """ + explicit = fabfile("explicit_fabfile.py") + with path_prefix(explicit): + docs, funcs = load_fabfile(explicit) + eq_(len(funcs), 1) + ok_("foo" in funcs) + ok_("bar" not in funcs) + + def test_should_load_decorated_tasks_only_if_one_is_found(self): + """ + If any new-style tasks are found, *only* new-style tasks should load + """ + module = fabfile('decorated_fabfile.py') + with path_prefix(module): + docs, funcs = load_fabfile(module) + eq_(len(funcs), 1) + ok_('foo' in funcs) + + def test_class_based_tasks_are_found_with_proper_name(self): + """ + Wrapped new-style tasks should preserve their function names + """ + module = fabfile('decorated_fabfile_with_classbased_task.py') + from fabric.state import env + with path_prefix(module): + docs, funcs = load_fabfile(module) + eq_(len(funcs), 1) + ok_('foo' in funcs) + + def test_recursion_steps_into_nontask_modules(self): + """ + Recursive loading will continue through modules with no tasks + """ + module = fabfile('deep') + with path_prefix(module): + docs, funcs = load_fabfile(module) + eq_(len(funcs), 1) + ok_('submodule.subsubmodule.deeptask' in _task_names(funcs)) + + def test_newstyle_task_presence_skips_classic_task_modules(self): + """ + Classic-task-only modules shouldn't add tasks if any new-style tasks exist + """ + module = fabfile('deep') + with path_prefix(module): + docs, funcs = load_fabfile(module) + eq_(len(funcs), 1) + ok_('submodule.classic_task' not in _task_names(funcs)) + + +# +# --list output +# + +def eq_output(docstring, format_, expected): + return eq_( + "\n".join(list_commands(docstring, format_)), + expected + ) + +def list_output(module, format_, expected): + module = fabfile(module) + with path_prefix(module): + docstring, tasks = load_fabfile(module) + with patched_context(fabric.state, 'commands', tasks): + eq_output(docstring, format_, expected) + +def test_list_output(): + lead = ":\n\n " + normal_head = COMMANDS_HEADER + lead + nested_head = COMMANDS_HEADER + NESTED_REMINDER + lead + for desc, module, format_, expected in ( + ("shorthand (& with namespacing)", 'deep', 'short', "submodule.subsubmodule.deeptask"), + ("normal (& with namespacing)", 'deep', 'normal', normal_head + "submodule.subsubmodule.deeptask"), + ("normal (with docstring)", 'docstring', 'normal', normal_head + "foo Foos!"), + ("nested (leaf only)", 'deep', 'nested', nested_head + """submodule: + subsubmodule: + deeptask"""), + ("nested (full)", 'tree', 'nested', nested_head + """build_docs + deploy + db: + migrate + system: + install_package + debian: + update_apt"""), + ): + list_output.description = "--list output: %s" % desc + yield list_output, module, format_, expected + del list_output.description + + +def test_task_names(): + for desc, input_, output in ( + ('top level (single)', {'a': 5}, ['a']), + ('top level (multiple, sorting)', {'a': 5, 'b': 6}, ['a', 'b']), + ('just nested', {'a': {'b': 5}}, ['a.b']), + ('mixed', {'a': 5, 'b': {'c': 6}}, ['a', 'b.c']), + ('top level comes before nested', {'z': 5, 'b': {'c': 6}}, ['z', 'b.c']), + ('peers sorted equally', {'z': 5, 'b': {'c': 6}, 'd': {'e': 7}}, ['z', 'b.c', 'd.e']), + ( + 'complex tree', + { + 'z': 5, + 'b': { + 'c': 6, + 'd': { + 'e': { + 'f': '7' + } + }, + 'g': 8 + }, + 'h': 9, + 'w': { + 'y': 10 + } + }, + ['h', 'z', 'b.c', 'b.g', 'b.d.e.f', 'w.y'] + ), + ): + eq_.description = "task name flattening: %s" % desc + yield eq_, _task_names(input_), output + del eq_.description + + +def test_crawl(): + for desc, name, mapping, output in ( + ("base case", 'a', {'a': 5}, 5), + ("one level", 'a.b', {'a': {'b': 5}}, 5), + ("deep", 'a.b.c.d.e', {'a': {'b': {'c': {'d': {'e': 5}}}}}, 5), + ("full tree", 'a.b.c', {'a': {'b': {'c': 5}, 'd': 6}, 'z': 7}, 5) + ): + eq_.description = "crawling dotted names: %s" % desc + yield eq_, _crawl(name, mapping), output + del eq_.description diff --git a/tests/test_network.py b/tests/test_network.py index a6b10146e2..6d40295ad8 100644 --- a/tests/test_network.py +++ b/tests/test_network.py @@ -6,22 +6,22 @@ import sys import paramiko -from nose.tools import with_setup +from nose.tools import with_setup, raises, ok_ from fudge import (Fake, clear_calls, clear_expectations, patch_object, verify, with_patched_object, patched_context, with_fakes) -from fabric.context_managers import settings, hide, show -from fabric.network import (HostConnectionCache, join_host_strings, normalize, - denormalize) -from fabric.io import output_loop -import fabric.network # So I can call patch_object correctly. Sigh. -from fabric.state import env, output, _get_system_username -from fabric.operations import run, sudo from utils import * from server import (server, PORT, RESPONSES, PASSWORDS, CLIENT_PRIVKEY, USER, CLIENT_PRIVKEY_PASSPHRASE) +from fabric.context_managers import settings, hide, show +from fabric.network import (HostConnectionCache, join_host_strings, normalize, + denormalize) +from fabric.io import output_loop +import fabric.network # So I can call patch_object correctly. Sigh. +from fabric.state import env, output, _get_system_username +from fabric.operations import run, sudo, prompt # # Subroutines, e.g. host string normalization @@ -95,14 +95,12 @@ def test_host_string_denormalization(self): 'localhost', username + '@localhost:22'), ): eq_.description = "Host-string denormalization: %s" % description - yield eq_, denormalize(string1), denormalize(string2) + yield eq_, denormalize(string1), denormalize(string2) del eq_.description - # # Connection caching # - @staticmethod @with_fakes def check_connection_calls(host_strings, num_calls): @@ -136,17 +134,33 @@ def test_connection_caching(self): TestNetwork.check_connection_calls.description = description yield TestNetwork.check_connection_calls, host_strings, num_calls + def test_connection_cache_deletion(self): + """ + HostConnectionCache should delete correctly w/ non-full keys + """ + hcc = HostConnectionCache() + fake = Fake('connect', callable=True) + with patched_context('fabric.network', 'connect', fake): + for host_string in ('hostname', 'user@hostname', + 'user@hostname:222'): + # Prime + hcc[host_string] + # Test + ok_(host_string in hcc) + # Delete + del hcc[host_string] + # Test + ok_(host_string not in hcc) + # # Connection loop flow # - @server() def test_saved_authentication_returns_client_object(self): cache = HostConnectionCache() assert isinstance(cache[env.host_string], paramiko.SSHClient) - @server() @with_fakes def test_prompts_for_password_without_good_authentication(self): @@ -156,6 +170,24 @@ def test_prompts_for_password_without_good_authentication(self): cache[env.host_string] + @raises(SystemExit) + @with_patched_object(output, 'aborts', False) + def test_aborts_on_prompt_with_abort_on_prompt(self): + env.abort_on_prompts = True + prompt("This will abort") + + + @server() + @raises(SystemExit) + @with_patched_object(output, 'aborts', False) + def test_aborts_on_password_prompt_with_abort_on_prompt(self): + env.password = None + env.abort_on_prompts = True + with password_response(PASSWORDS[env.user], times_called=1): + cache = HostConnectionCache() + cache[env.host_string] + + @mock_streams('stdout') @server() def test_trailing_newline_line_drop(self): @@ -176,7 +208,6 @@ def test_trailing_newline_line_drop(self): # Also test that the captured value matches, too. eq_(output_string, result) - @server() def test_sudo_prompt_kills_capturing(self): """ @@ -186,7 +217,6 @@ def test_sudo_prompt_kills_capturing(self): with hide('everything'): eq_(sudo(cmd), RESPONSES[cmd]) - @server() def test_password_memory_on_user_switch(self): """ @@ -223,7 +253,6 @@ def _to_user(user): ): sudo("ls /simple") - @mock_streams('stderr') @server() def test_password_prompt_displays_host_string(self): @@ -238,7 +267,6 @@ def test_password_prompt_displays_host_string(self): regex = r'^\[%s\] Login password: ' % env.host_string assert_contains(regex, sys.stderr.getvalue()) - @mock_streams('stderr') @server(pubkeys=True) def test_passphrase_prompt_displays_host_string(self): @@ -254,7 +282,6 @@ def test_passphrase_prompt_displays_host_string(self): regex = r'^\[%s\] Login password: ' % env.host_string assert_contains(regex, sys.stderr.getvalue()) - def test_sudo_prompt_display_passthrough(self): """ Sudo prompt should display (via passthrough) when stdout/stderr shown @@ -297,8 +324,7 @@ def _prompt_display(display_output): [%(prefix)s] Login password: [%(prefix)s] out: Sorry, try again. [%(prefix)s] out: sudo password: """ % {'prefix': env.host_string} - eq_(expected[1:], sys.stdall.getvalue()) - + eq_(expected=expected[1:], result=sys.stdall.getvalue()) @mock_streams('both') @server( @@ -330,8 +356,7 @@ def test_consecutive_sudos_should_not_have_blank_line(self): [%(prefix)s] out: result1 [%(prefix)s] out: result2 """ % {'prefix': env.host_string} - eq_(expected[1:], sys.stdall.getvalue()) - + eq_(expected=expected[1:], result=sys.stdall.getvalue()) @mock_streams('both') @server(pubkeys=True, responses={'silent': '', 'normal': 'foo'}) @@ -363,8 +388,7 @@ def test_silent_commands_should_not_have_blank_line(self): [%(prefix)s] run: normal [%(prefix)s] out: foo """ % {'prefix': env.host_string} - eq_(expected[1:], sys.stdall.getvalue()) - + eq_(expected=expected[1:], result=sys.stdall.getvalue()) @mock_streams('both') @server( @@ -392,8 +416,7 @@ def test_io_should_print_prefix_if_ouput_prefix_is_true(self): [%(prefix)s] out: result1 [%(prefix)s] out: result2 """ % {'prefix': env.host_string} - eq_(expected[1:], sys.stdall.getvalue()) - + eq_(expected=expected[1:], result=sys.stdall.getvalue()) @mock_streams('both') @server( @@ -422,4 +445,4 @@ def test_io_should_not_print_prefix_if_ouput_prefix_is_false(self): result1 result2 """ % {'prefix': env.host_string} - eq_(expected[1:], sys.stdall.getvalue()) + eq_(expected=expected[1:], result=sys.stdall.getvalue()) diff --git a/tests/test_operations.py b/tests/test_operations.py index c8bbfb8a6d..ba172a7c51 100644 --- a/tests/test_operations.py +++ b/tests/test_operations.py @@ -3,20 +3,24 @@ import os import shutil import sys -import tempfile import types from contextlib import nested from StringIO import StringIO +import unittest +import random +import types + from nose.tools import raises, eq_ from fudge import with_patched_object -from fabric.state import env +from fabric.state import env, output from fabric.operations import require, prompt, _sudo_prefix, _shell_wrap, \ _shell_escape -from fabric.api import get, put, hide, show, cd, lcd, local +from fabric.api import get, put, hide, show, cd, lcd, local, run, sudo from fabric.sftp import SFTP +from fabric.decorators import with_settings from utils import * from server import (server, PORT, RESPONSES, FILES, PASSWORDS, CLIENT_PRIVKEY, USER, CLIENT_PRIVKEY_PASSPHRASE) @@ -25,6 +29,7 @@ # require() # + def test_require_single_existing_key(): """ When given a single existing key, require() throws no exceptions @@ -67,7 +72,7 @@ def test_require_mixed_state_keys(): require('foo', 'version') -@mock_streams('stderr') +@mock_streams('both') def test_require_mixed_state_keys_prints_missing_only(): """ When given mixed-state keys, require() prints missing keys only @@ -75,9 +80,9 @@ def test_require_mixed_state_keys_prints_missing_only(): try: require('foo', 'version') except SystemExit: - err = sys.stderr.getvalue() - assert 'version' not in err - assert 'foo' in err + all = sys.stdall.getvalue() + assert 'version' not in all + assert 'foo' in all @mock_streams('stderr') @@ -111,6 +116,7 @@ def fake_providing_function(): def p(x): print x, + @mock_streams('stdout') @with_patched_object(sys.modules['__builtin__'], 'raw_input', p) def test_prompt_appends_space(): @@ -119,7 +125,7 @@ def test_prompt_appends_space(): """ s = "This is my prompt" prompt(s) - eq_(sys.stdout.getvalue(), s + ' ') + eq_(result=sys.stdout.getvalue(), expected=s + ' ') @mock_streams('stdout') @@ -131,8 +137,8 @@ def test_prompt_with_default(): s = "This is my prompt" d = "default!" prompt(s, default=d) - eq_(sys.stdout.getvalue(), "%s [%s] " % (s, d)) - + eq_(result=sys.stdout.getvalue(), expected="%s [%s] " % (s, d)) + # # run()/sudo() @@ -155,6 +161,7 @@ def test_sudo_prefix_without_user(): eq_(_sudo_prefix(user=None), env.sudo_prefix % env.sudo_prompt) +@with_settings(use_shell=True) def test_shell_wrap(): prefix = "prefix" command = "command" @@ -173,6 +180,7 @@ def test_shell_wrap(): del eq_.description +@with_settings(use_shell=True) def test_shell_wrap_escapes_command_if_shell_is_true(): """ _shell_wrap() escapes given command if shell=True @@ -216,33 +224,60 @@ def test_shell_escape_escapes_backticks(): eq_(_shell_escape(cmd), "touch test.pid && kill \`cat test.pid\`") -# -# get() and put() -# - -class TestFileTransfers(FabricTest): - def setup(self): - super(TestFileTransfers, self).setup() - self.tmpdir = tempfile.mkdtemp() - - def teardown(self): - super(TestFileTransfers, self).teardown() - shutil.rmtree(self.tmpdir) +class TestCombineStderr(FabricTest): + @server() + def test_local_none_global_true(self): + """ + combine_stderr: no kwarg => uses global value (True) + """ + output.everything = False + r = run("both_streams") + # Note: the exact way the streams are jumbled here is an implementation + # detail of our fake SSH server and may change in the future. + eq_("ssttddoeurtr", r.stdout) + eq_(r.stderr, "") - def path(self, *path_parts): - return os.path.join(self.tmpdir, *path_parts) + @server() + def test_local_none_global_false(self): + """ + combine_stderr: no kwarg => uses global value (False) + """ + output.everything = False + env.combine_stderr = False + r = run("both_streams") + eq_("stdout", r.stdout) + eq_("stderr", r.stderr) - def exists_remotely(self, path): - return SFTP(env.host_string).exists(path) + @server() + def test_local_true_global_false(self): + """ + combine_stderr: True kwarg => overrides global False value + """ + output.everything = False + env.combine_stderr = False + r = run("both_streams", combine_stderr=True) + eq_("ssttddoeurtr", r.stdout) + eq_(r.stderr, "") - def exists_locally(self, path): - return os.path.exists(path) + @server() + def test_local_false_global_true(self): + """ + combine_stderr: False kwarg => overrides global True value + """ + output.everything = False + env.combine_stderr = True + r = run("both_streams", combine_stderr=False) + eq_("stdout", r.stdout) + eq_("stderr", r.stderr) +# +# get() and put() +# +class TestFileTransfers(FabricTest): # # get() # - @server(files={'/home/user/.bashrc': 'bash!'}, home='/home/user') def test_get_relative_remote_dir_uses_home(self): """ @@ -252,8 +287,6 @@ def test_get_relative_remote_dir_uses_home(self): # Another if-it-doesn't-error-out-it-passed test; meh. eq_(get('.bashrc', self.path()), [self.path('.bashrc')]) - - @server() def test_get_single_file(self): """ @@ -265,7 +298,6 @@ def test_get_single_file(self): get(remote, local) eq_contents(local, FILES[remote]) - @server() def test_get_sibling_globs(self): """ @@ -277,7 +309,6 @@ def test_get_sibling_globs(self): for remote in remotes: eq_contents(self.path(remote), FILES[remote]) - @server() def test_get_single_file_in_folder(self): """ @@ -288,7 +319,6 @@ def test_get_single_file_in_folder(self): get('folder', self.tmpdir) eq_contents(self.path(remote), FILES[remote]) - @server() def test_get_tree(self): """ @@ -300,7 +330,6 @@ def test_get_tree(self): for path, contents in leaves: eq_contents(self.path(path[1:]), contents) - @server() def test_get_tree_with_implicit_local_path(self): """ @@ -320,7 +349,6 @@ def test_get_tree_with_implicit_local_path(self): if os.path.exists(dirname): shutil.rmtree(dirname) - @server() def test_get_absolute_path_should_save_relative(self): """ @@ -333,7 +361,6 @@ def test_get_absolute_path_should_save_relative(self): assert self.exists_locally(os.path.join(lpath, 'subfolder')) assert not self.exists_locally(os.path.join(lpath, 'tree/subfolder')) - @server() def test_path_formatstr_nonrecursively_is_just_filename(self): """ @@ -345,7 +372,6 @@ def test_path_formatstr_nonrecursively_is_just_filename(self): get('/tree/subfolder/file3.txt', ltarget) assert self.exists_locally(os.path.join(lpath, 'file3.txt')) - @server() @mock_streams('stderr') def _invalid_file_obj_situations(self, remote_path): @@ -365,7 +391,6 @@ def test_directory_and_file_object_invalid(self): """ self._invalid_file_obj_situations('/tree') - @server() def test_get_single_file_absolutely(self): """ @@ -376,7 +401,6 @@ def test_get_single_file_absolutely(self): get(target, self.tmpdir) eq_contents(self.path(os.path.basename(target)), FILES[target]) - @server() def test_get_file_with_nonexistent_target(self): """ @@ -388,7 +412,6 @@ def test_get_file_with_nonexistent_target(self): get(target, local) eq_contents(local, FILES[target]) - @server() @mock_streams('stderr') def test_get_file_with_existing_file_target(self): @@ -404,7 +427,6 @@ def test_get_file_with_existing_file_target(self): assert "%s already exists" % local in sys.stderr.getvalue() eq_contents(local, FILES[target]) - @server() def test_get_file_to_directory(self): """ @@ -418,7 +440,6 @@ def test_get_file_to_directory(self): get(target, self.tmpdir) eq_contents(self.path(target), FILES[target]) - @server(port=2200) @server(port=2201) def test_get_from_multiple_servers(self): @@ -443,7 +464,6 @@ def test_get_from_multiple_servers(self): tmp, "127.0.0.1-%s" % port, 'file3.txt' )) - @server() def test_get_from_empty_directory_uses_cwd(self): """ @@ -456,7 +476,6 @@ def test_get_from_empty_directory_uses_cwd(self): for x in "file.txt file2.txt tree/file1.txt".split(): assert os.path.exists(os.path.join(self.tmpdir, x)) - @server() def _get_to_cwd(self, arg): path = 'file.txt' @@ -485,7 +504,6 @@ def test_get_to_None_uses_default_format_string(self): """ self._get_to_cwd(None) - @server() def test_get_should_accept_file_like_objects(self): """ @@ -497,7 +515,6 @@ def test_get_should_accept_file_like_objects(self): get(target, fake_file) eq_(fake_file.getvalue(), FILES[target]) - @server() def test_get_interpolation_without_host(self): """ @@ -514,7 +531,6 @@ def test_get_interpolation_without_host(self): get('/folder/file3.txt', local_path) assert self.exists_locally(tmp + "bar/file3.txt") - @server() def test_get_returns_list_of_local_paths(self): """ @@ -526,7 +542,6 @@ def test_get_returns_list_of_local_paths(self): files = ['file1.txt', 'file2.txt', 'subfolder/file3.txt'] eq_(map(lambda x: os.path.join(d, 'tree', x), files), retval) - @server() def test_get_returns_none_for_stringio(self): """ @@ -535,18 +550,17 @@ def test_get_returns_none_for_stringio(self): with hide('everything'): eq_([], get('/file.txt', StringIO())) - @server() def test_get_return_value_failed_attribute(self): """ - get()'s return value should indicate any paths which failed to download. + get()'s return value should indicate any paths which failed to + download. """ with settings(hide('everything'), warn_only=True): retval = get('/doesnt/exist', self.path()) eq_(['/doesnt/exist'], retval.failed) assert not retval.succeeded - @server() def test_get_should_not_use_windows_slashes_in_remote_paths(self): """ @@ -557,28 +571,22 @@ def test_get_should_not_use_windows_slashes_in_remote_paths(self): sftp = SFTP(env.host_string) eq_(sftp.glob(path), [path]) - - # # put() # - @server() def test_put_file_to_existing_directory(self): """ put() a single file into an existing remote directory """ text = "foo!" - local = self.path('foo.txt') + local = self.mkfile('foo.txt', text) local2 = self.path('foo2.txt') - with open(local, 'w') as fd: - fd.write(text) with hide('everything'): put(local, '/') get('/foo.txt', local2) eq_contents(local2, text) - @server() def test_put_to_empty_directory_uses_cwd(self): """ @@ -597,7 +605,6 @@ def test_put_to_empty_directory_uses_cwd(self): get('foo.txt', local2) eq_contents(local2, text) - @server() def test_put_from_empty_directory_uses_cwd(self): """ @@ -623,7 +630,6 @@ def test_put_from_empty_directory_uses_cwd(self): # Restore cwd os.chdir(old_cwd) - @server() def test_put_should_accept_file_like_objects(self): """ @@ -641,7 +647,6 @@ def test_put_should_accept_file_like_objects(self): # Sanity test of file pointer eq_(pointer, fake_file.tell()) - @server() @raises(ValueError) def test_put_should_raise_exception_for_nonexistent_local_path(self): @@ -650,7 +655,6 @@ def test_put_should_raise_exception_for_nonexistent_local_path(self): """ put('thisfiledoesnotexist', '/tmp') - @server() def test_put_returns_list_of_remote_paths(self): """ @@ -664,7 +668,6 @@ def test_put_returns_list_of_remote_paths(self): retval = put(f, p) eq_(retval, [p]) - @server() def test_put_returns_list_of_remote_paths_with_stringio(self): """ @@ -674,7 +677,6 @@ def test_put_returns_list_of_remote_paths_with_stringio(self): with hide('everything'): eq_(put(StringIO('contents'), f), [f]) - @server() def test_put_return_value_failed_attribute(self): """ @@ -686,12 +688,9 @@ def test_put_return_value_failed_attribute(self): eq_([""], retval.failed) assert not retval.succeeded - - # # Interactions with cd() # - @server() def test_cd_should_apply_to_put(self): """ @@ -706,7 +705,6 @@ def test_cd_should_apply_to_put(self): put(local, f) assert self.exists_remotely('%s/%s' % (d, f)) - @server(files={'/tmp/test.txt': 'test'}) def test_cd_should_apply_to_get(self): """ @@ -717,7 +715,6 @@ def test_cd_should_apply_to_get(self): get('test.txt', local) assert os.path.exists(local) - @server() def test_cd_should_not_apply_to_absolute_put(self): """ @@ -731,7 +728,6 @@ def test_cd_should_not_apply_to_absolute_put(self): assert not self.exists_remotely('/tmp/test.txt') assert self.exists_remotely('/test.txt') - @server(files={'/test.txt': 'test'}) def test_cd_should_not_apply_to_absolute_get(self): """ @@ -742,7 +738,6 @@ def test_cd_should_not_apply_to_absolute_get(self): get('/test.txt', local) assert os.path.exists(local) - @server() def test_lcd_should_apply_to_put(self): """ @@ -758,7 +753,6 @@ def test_lcd_should_apply_to_put(self): put(f, '/') assert self.exists_remotely('/%s' % f) - @server() def test_lcd_should_apply_to_get(self): """ diff --git a/tests/test_project.py b/tests/test_project.py new file mode 100644 index 0000000000..ef4ce9a032 --- /dev/null +++ b/tests/test_project.py @@ -0,0 +1,161 @@ +import unittest +import os + +import fudge +from fudge.inspector import arg + +from fabric.contrib import project + + +class UploadProjectTestCase(unittest.TestCase): + """Test case for :func: `fabric.contrib.project.upload_project`.""" + + fake_tmp = "testtempfolder" + + + def setUp(self): + fudge.clear_expectations() + + # We need to mock out run, local, and put + + self.fake_run = fudge.Fake('project.run', callable=True) + self.patched_run = fudge.patch_object( + project, + 'run', + self.fake_run + ) + + self.fake_local = fudge.Fake('local', callable=True) + self.patched_local = fudge.patch_object( + project, + 'local', + self.fake_local + ) + + self.fake_put = fudge.Fake('put', callable=True) + self.patched_put = fudge.patch_object( + project, + 'put', + self.fake_put + ) + + # We don't want to create temp folders + self.fake_mkdtemp = fudge.Fake( + 'mkdtemp', + expect_call=True + ).returns(self.fake_tmp) + self.patched_mkdtemp = fudge.patch_object( + project, + 'mkdtemp', + self.fake_mkdtemp + ) + + + def tearDown(self): + self.patched_run.restore() + self.patched_local.restore() + self.patched_put.restore() + + fudge.clear_expectations() + + + @fudge.with_fakes + def test_temp_folder_is_used(self): + """A unique temp folder is used for creating the archive to upload.""" + + # Exercise + project.upload_project() + + + @fudge.with_fakes + def test_project_is_archived_locally(self): + """The project should be archived locally before being uploaded.""" + + # local() is called more than once so we need an extra next_call() + # otherwise fudge compares the args to the last call to local() + self.fake_local.with_args(arg.startswith("tar -czf")).next_call() + + # Exercise + project.upload_project() + + + @fudge.with_fakes + def test_current_directory_is_uploaded_by_default(self): + """By default the project uploaded is the current working directory.""" + + cwd_path, cwd_name = os.path.split(os.getcwd()) + + # local() is called more than once so we need an extra next_call() + # otherwise fudge compares the args to the last call to local() + self.fake_local.with_args( + arg.endswith("-C %s %s" % (cwd_path, cwd_name)) + ).next_call() + + # Exercise + project.upload_project() + + + @fudge.with_fakes + def test_path_to_local_project_can_be_specified(self): + """It should be possible to specify which local folder to upload.""" + + project_path = "path/to/my/project" + + # local() is called more than once so we need an extra next_call() + # otherwise fudge compares the args to the last call to local() + self.fake_local.with_args( + arg.endswith("-C %s %s" % os.path.split(project_path)) + ).next_call() + + # Exercise + project.upload_project(local_dir=project_path) + + + @fudge.with_fakes + def test_path_to_local_project_can_end_in_separator(self): + """A local path ending in a separator should be handled correctly.""" + + project_path = "path/to/my" + base = "project" + + # local() is called more than once so we need an extra next_call() + # otherwise fudge compares the args to the last call to local() + self.fake_local.with_args( + arg.endswith("-C %s %s" % (project_path, base)) + ).next_call() + + # Exercise + project.upload_project(local_dir="%s/%s/" % (project_path, base)) + + + @fudge.with_fakes + def test_default_remote_folder_is_home(self): + """Project is uploaded to remote home by default.""" + + local_dir = "folder" + + # local() is called more than once so we need an extra next_call() + # otherwise fudge compares the args to the last call to local() + self.fake_put.with_args( + "%s/folder.tar.gz" % self.fake_tmp, "folder.tar.gz" + ).next_call() + + # Exercise + project.upload_project(local_dir=local_dir) + + @fudge.with_fakes + def test_path_to_remote_folder_can_be_specified(self): + """It should be possible to specify which local folder to upload to.""" + + local_dir = "folder" + remote_path = "path/to/remote/folder" + + # local() is called more than once so we need an extra next_call() + # otherwise fudge compares the args to the last call to local() + self.fake_put.with_args( + "%s/folder.tar.gz" % self.fake_tmp, "%s/folder.tar.gz" % remote_path + ).next_call() + + # Exercise + project.upload_project(local_dir=local_dir, remote_dir=remote_path) + diff --git a/tests/test_state.py b/tests/test_state.py index b9a4d4f49d..e6fcea52b9 100644 --- a/tests/test_state.py +++ b/tests/test_state.py @@ -8,7 +8,7 @@ def test_dict_aliasing(): Assigning values to aliases updates aliased keys """ ad = _AliasDict( - {'bar': False, 'biz': True, 'baz': False}, + {'bar': False, 'biz': True, 'baz': False}, aliases={'foo': ['bar', 'biz', 'baz']} ) # Before @@ -28,7 +28,7 @@ def test_nested_dict_aliasing(): Aliases can be nested """ ad = _AliasDict( - {'bar': False, 'biz': True}, + {'bar': False, 'biz': True}, aliases={'foo': ['bar', 'nested'], 'nested': ['biz']} ) # Before @@ -46,7 +46,7 @@ def test_dict_alias_expansion(): Alias expansion """ ad = _AliasDict( - {'bar': False, 'biz': True}, + {'bar': False, 'biz': True}, aliases={'foo': ['bar', 'nested'], 'nested': ['biz']} ) eq_(ad.expand_aliases(['foo']), ['bar', 'biz']) diff --git a/tests/test_tasks.py b/tests/test_tasks.py new file mode 100644 index 0000000000..e760f01f4d --- /dev/null +++ b/tests/test_tasks.py @@ -0,0 +1,98 @@ +import unittest +from nose.tools import eq_, raises +import random + +from fabric import tasks + +def test_base_task_provides_undefined_name(): + task = tasks.Task() + eq_("undefined", task.name) + +@raises(NotImplementedError) +def test_base_task_raises_exception_on_call_to_run(): + task = tasks.Task() + task.run() + +class TestWrappedCallableTask(unittest.TestCase): + def test_run_is_wrapped_callable(self): + def foo(): pass + + task = tasks.WrappedCallableTask(foo) + self.assertEqual(task.wrapped, foo) + + def test_name_is_the_name_of_the_wrapped_callable(self): + def foo(): pass + foo.__name__ = "random_name_%d" % random.randint(1000, 2000) + + task = tasks.WrappedCallableTask(foo) + self.assertEqual(task.name, foo.__name__) + + def test_reads_double_under_doc_from_callable(self): + def foo(): pass + foo.__doc__ = "Some random __doc__: %d" % random.randint(1000, 2000) + + task = tasks.WrappedCallableTask(foo) + self.assertEqual(task.__doc__, foo.__doc__) + + def test_dispatches_to_wrapped_callable_on_run(self): + random_value = "some random value %d" % random.randint(1000, 2000) + def foo(): return random_value + + task = tasks.WrappedCallableTask(foo) + self.assertEqual(random_value, task()) + + def test_passes_all_regular_args_to_run(self): + def foo(*args): return args + + random_args = tuple([random.randint(1000, 2000) for i in range(random.randint(1, 5))]) + task = tasks.WrappedCallableTask(foo) + self.assertEqual(random_args, task(*random_args)) + + def test_passes_all_keyword_args_to_run(self): + def foo(**kwargs): return kwargs + + random_kwargs = {} + for i in range(random.randint(1, 5)): + random_key = ("foo", "bar", "baz", "foobar", "barfoo")[i] + random_kwargs[random_key] = random.randint(1000, 2000) + + task = tasks.WrappedCallableTask(foo) + self.assertEqual(random_kwargs, task(**random_kwargs)) + + def test_calling_the_object_is_the_same_as_run(self): + random_return = random.randint(1000, 2000) + def foo(): return random_return + + task = tasks.WrappedCallableTask(foo) + self.assertEqual(task(), task.run()) + + +# Reminder: decorator syntax, e.g.: +# @foo +# def bar():... +# +# is semantically equivalent to: +# def bar():... +# bar = foo(bar) +# +# this simplifies testing :) + +def test_decorator_incompatibility_on_task(): + from fabric.decorators import task, hosts, runs_once, roles + def foo(): return "foo" + foo = task(foo) + + # since we aren't setting foo to be the newly decorated thing, its cool + hosts('me@localhost')(foo) + runs_once(foo) + roles('www')(foo) + +def test_decorator_closure_hiding(): + from fabric.decorators import task, hosts + def foo(): print env.host_string + foo = hosts("me@localhost")(foo) + foo = task(foo) + + # this broke in the old way, due to closure stuff hiding in the + # function, but task making an object + eq_(["me@localhost"], foo.hosts) diff --git a/tests/test_utils.py b/tests/test_utils.py index 04284c1b5f..3d980f0611 100644 --- a/tests/test_utils.py +++ b/tests/test_utils.py @@ -9,19 +9,24 @@ from fabric.state import output, env from fabric.utils import warn, indent, abort, puts, fastprint -from fabric import utils # For patching +from fabric import utils # For patching from fabric.context_managers import settings -from utils import mock_streams +from utils import mock_streams, eq_ -@mock_streams('stderr') +@mock_streams('both') @with_patched_object(output, 'warnings', True) def test_warn(): """ warn() should print 'Warning' plus given text """ warn("Test") - assert "\nWarning: Test\n\n" == sys.stderr.getvalue() + result = sys.stdall.getvalue() + expected = """ +Warning: Test +""" + eq_(expected=expected[1:], result=result) +# "\nWarning: Test\n\n" == "%s" % result def test_indent(): @@ -71,10 +76,14 @@ def test_abort_message(): except SystemExit: pass result = sys.stderr.getvalue() - eq_("\nFatal error: Test\n\nAborting.\n", result) - + expected = """ +Fatal error: Test +Aborting. +""" + eq_(expected=expected[1:], result=result) + -@mock_streams('stdout') +@mock_streams('both') def test_puts_with_user_output_on(): """ puts() should print input to sys.stdout if "user" output level is on @@ -82,20 +91,20 @@ def test_puts_with_user_output_on(): s = "string!" output.user = True puts(s, show_prefix=False) - eq_(sys.stdout.getvalue(), s + "\n") + eq_(sys.stdall.getvalue(), s + "\n") -@mock_streams('stdout') +@mock_streams('both') def test_puts_with_user_output_off(): """ puts() shouldn't print input to sys.stdout if "user" output level is off """ output.user = False puts("You aren't reading this.") - eq_(sys.stdout.getvalue(), "") + eq_(sys.stdall.getvalue(), "") -@mock_streams('stdout') +@mock_streams('both') def test_puts_with_prefix(): """ puts() should prefix output with env.host_string if non-empty @@ -104,10 +113,11 @@ def test_puts_with_prefix(): h = "localhost" with settings(host_string=h): puts(s) - eq_(sys.stdout.getvalue(), "[%s] %s" % (h, s + "\n")) + expected = "[%s] %s\n" % (h, s) + eq_(result=sys.stdall.getvalue(),expected=expected ) -@mock_streams('stdout') +@mock_streams('both') def test_puts_without_prefix(): """ puts() shouldn't prefix output with env.host_string if show_prefix is False @@ -115,7 +125,9 @@ def test_puts_without_prefix(): s = "my output" h = "localhost" puts(s, show_prefix=False) - eq_(sys.stdout.getvalue(), "%s" % (s + "\n")) + result = sys.stdall.getvalue() + expected = "%s\n" % s + eq_(expected=expected, result=result) def test_fastprint_calls_puts(): diff --git a/tests/utils.py b/tests/utils.py index ce3754e8e7..96969e6ea8 100644 --- a/tests/utils.py +++ b/tests/utils.py @@ -1,27 +1,34 @@ from __future__ import with_statement -from StringIO import StringIO # No need for cStringIO at this time +from StringIO import StringIO # No need for cStringIO at this time from contextlib import contextmanager +from copy import deepcopy +from fudge.patcher import with_patched_object from functools import wraps, partial from types import StringTypes import copy import getpass +import os import re +import shutil import sys +import tempfile from fudge import Fake, patched_context, clear_expectations from fabric.context_managers import settings from fabric.network import interpret_host_string from fabric.state import env, output +from fabric.sftp import SFTP import fabric.network +from fabric.logger import consolelogger from server import PORT, PASSWORDS, USER, HOST class FabricTest(object): """ - Nose-oriented test runner class that wipes env after every test. + Nose-oriented test runner which wipes state.env and provides file helpers. """ def setup(self): # Clear Fudge mock expectations @@ -38,10 +45,28 @@ def setup(self): # Command response mocking is easier without having to account for # shell wrapping everywhere. env.use_shell = False + # Temporary local file dir + self.tmpdir = tempfile.mkdtemp() def teardown(self): env.update(self.previous_env) output.update(self.previous_output) + shutil.rmtree(self.tmpdir) + + def path(self, *path_parts): + return os.path.join(self.tmpdir, *path_parts) + + def mkfile(self, path, contents): + dest = self.path(path) + with open(dest, 'w') as fd: + fd.write(contents) + return dest + + def exists_remotely(self, path): + return SFTP(env.host_string).exists(path) + + def exists_locally(self, path): + return os.path.exists(path) class CarbonCopy(StringIO): @@ -94,6 +119,7 @@ def func() both = (which == 'both') stdout = (which == 'stdout') or both stderr = (which == 'stderr') or both + def mocked_streams_decorator(func): @wraps(func) def inner_wrapper(*args, **kwargs): @@ -101,12 +127,14 @@ def inner_wrapper(*args, **kwargs): sys.stdall = StringIO() fake_stdout = CarbonCopy(cc=sys.stdall) fake_stderr = CarbonCopy(cc=sys.stdall) + consolelogger.stream = fake_stderr else: fake_stdout, fake_stderr = StringIO(), StringIO() if stdout: my_stdout, sys.stdout = sys.stdout, fake_stdout if stderr: my_stderr, sys.stderr = sys.stderr, fake_stderr + consolelogger.stream = fake_stderr try: ret = func(*args, **kwargs) finally: @@ -114,8 +142,10 @@ def inner_wrapper(*args, **kwargs): sys.stdout = my_stdout if stderr: sys.stderr = my_stderr + consolelogger.stream = my_stderr if both: del sys.stdall + consolelogger.stream = my_stderr return inner_wrapper return mocked_streams_decorator @@ -183,28 +213,51 @@ def line_prefix(prefix, string): return "\n".join(prefix + x for x in string.splitlines()) -def eq_(a, b, msg=None): +def eq_(result, expected, msg=None): """ Shadow of the Nose builtin which presents easier to read multiline output. """ - default_msg = """ -Expected: -%s - -Got: -%s + params = {'expected': expected, 'result': result} + aka = """ --------------------------------- aka ----------------------------------------- Expected: -%r +%(expected)r Got: -%r -""" % (a, b, a, b) - assert a == b, msg or default_msg +%(result)r +""" % params + default_msg = """ +Expected: +%(expected)s + +Got: +%(result)s +""" % params + if (repr(result) != str(result)) or (repr(expected) != str(expected)): + default_msg += aka + assert result == expected, msg or default_msg def eq_contents(path, text): with open(path) as fd: eq_(text, fd.read()) + + +def patched_env(updates): + """ + Execute a function with a patched copy of ``fabric.state.env``. + + ``fabric.state.env`` is patched during the wrapped functions' run, with an + equivalent copy that has been ``update``d with the given ``updates``. + + E.g. with ``fabric.state.env = {'foo': 'bar', 'biz': 'baz'}``, a function + decorated with ``@patched_env({'foo': 'notbar'})`` would see + ``fabric.state.env`` as equal to ``{'biz': 'baz', 'foo': 'notbar'}``. + """ + from fabric.state import env + def wrapper(func): + new_env = deepcopy(env).update(updates) + return with_patched_object('fabric.state', 'env', new_env) + return wrapper