Imported Upstream version 1.1.2
Chris Lamb
12 years ago
30 | 30 | Erich Heine |
31 | 31 | Travis Swicegood |
32 | 32 | Paul Smith |
33 | Alex Koshelev | |
34 | Stephen Goss | |
35 | James Murty | |
36 | Thomas Ballinger | |
33 | 37 | Rick Harding |
34 | 38 | Kirill Pinchuk |
39 | Ales Zoulek | |
40 | Casey Banner | |
35 | 41 | Roman Imankulov |
42 | Rodrigue Alcazar | |
43 | Jeremy Avnet | |
44 | Matt Chisholm | |
45 | Mark Merritt | |
36 | 46 | Max Arnold |
47 | Szymon Reichmann |
0 | 0 | Metadata-Version: 1.0 |
1 | 1 | Name: Fabric |
2 | Version: 1.0.2 | |
2 | Version: 1.1.2 | |
3 | 3 | Summary: Fabric is a simple, Pythonic tool for remote execution and deployment. |
4 | 4 | Home-page: http://fabfile.org |
5 | 5 | Author: Jeff Forcier |
7 | 7 | License: UNKNOWN |
8 | 8 | Description: |
9 | 9 | To find out what's new in this version of Fabric, please see `the changelog |
10 | <http://docs.fabfile.org/en/1.0.2/changes/1.0.2.html>`_. | |
10 | <http://docs.fabfile.org/en/1.1.2/changes/1.1.2.html>`_. | |
11 | 11 | |
12 | 12 | You can also install the `in-development version |
13 | 13 | <https://github.com/bitprophet/fabric/tarball/master#egg=fabric-dev>`_ using |
2 | 2 | LICENSE |
3 | 3 | MANIFEST.in |
4 | 4 | README |
5 | fabfile.py | |
6 | 5 | requirements.txt |
7 | 6 | setup.py |
8 | 7 | Fabric.egg-info/PKG-INFO |
25 | 24 | docs/api/core/decorators.rst |
26 | 25 | docs/api/core/network.rst |
27 | 26 | docs/api/core/operations.rst |
27 | docs/api/core/tasks.rst | |
28 | 28 | docs/api/core/utils.rst |
29 | 29 | docs/changes/0.9.1.rst |
30 | 30 | docs/changes/0.9.2.rst |
37 | 37 | docs/changes/1.0.1.rst |
38 | 38 | docs/changes/1.0.2.rst |
39 | 39 | docs/changes/1.0.rst |
40 | docs/changes/1.1.1.rst | |
41 | docs/changes/1.1.2.rst | |
42 | docs/changes/1.1.rst | |
40 | 43 | docs/usage/env.rst |
41 | 44 | docs/usage/execution.rst |
42 | 45 | docs/usage/fab.rst |
45 | 48 | docs/usage/library.rst |
46 | 49 | docs/usage/output_controls.rst |
47 | 50 | docs/usage/ssh.rst |
51 | docs/usage/tasks.rst | |
52 | fabfile/__init__.py | |
53 | fabfile/docs.py | |
48 | 54 | fabric/__init__.py |
49 | 55 | fabric/api.py |
50 | 56 | fabric/auth.py |
57 | 63 | fabric/operations.py |
58 | 64 | fabric/sftp.py |
59 | 65 | fabric/state.py |
66 | fabric/tasks.py | |
60 | 67 | fabric/thread_handling.py |
61 | 68 | fabric/utils.py |
62 | 69 | fabric/version.py |
70 | 77 | tests/integration.py |
71 | 78 | tests/server.py |
72 | 79 | tests/test_context_managers.py |
80 | tests/test_contrib.py | |
73 | 81 | tests/test_decorators.py |
74 | 82 | tests/test_main.py |
75 | 83 | tests/test_network.py |
76 | 84 | tests/test_operations.py |
85 | tests/test_project.py | |
77 | 86 | tests/test_server.py |
78 | 87 | tests/test_state.py |
88 | tests/test_tasks.py | |
79 | 89 | tests/test_utils.py |
80 | 90 | tests/test_version.py |
81 | tests/utils.py⏎ | |
91 | tests/utils.py | |
92 | tests/support/__init__.py | |
93 | tests/support/decorated_fabfile.py | |
94 | tests/support/decorated_fabfile_with_classbased_task.py | |
95 | tests/support/decorated_fabfile_with_modules.py | |
96 | tests/support/deep.py | |
97 | tests/support/docstring.py | |
98 | tests/support/explicit_fabfile.py | |
99 | tests/support/implicit_fabfile.py | |
100 | tests/support/mapping.py | |
101 | tests/support/module_fabtasks.py | |
102 | tests/support/submodule/__init__.py | |
103 | tests/support/submodule/subsubmodule/__init__.py | |
104 | tests/support/tree/__init__.py | |
105 | tests/support/tree/db.py | |
106 | tests/support/tree/system/__init__.py | |
107 | tests/support/tree/system/debian.py⏎ |
0 | 0 | Metadata-Version: 1.0 |
1 | 1 | Name: Fabric |
2 | Version: 1.0.2 | |
2 | Version: 1.1.2 | |
3 | 3 | Summary: Fabric is a simple, Pythonic tool for remote execution and deployment. |
4 | 4 | Home-page: http://fabfile.org |
5 | 5 | Author: Jeff Forcier |
7 | 7 | License: UNKNOWN |
8 | 8 | Description: |
9 | 9 | To find out what's new in this version of Fabric, please see `the changelog |
10 | <http://docs.fabfile.org/en/1.0.2/changes/1.0.2.html>`_. | |
10 | <http://docs.fabfile.org/en/1.1.2/changes/1.1.2.html>`_. | |
11 | 11 | |
12 | 12 | You can also install the `in-development version |
13 | 13 | <https://github.com/bitprophet/fabric/tarball/master#egg=fabric-dev>`_ using |
2 | 2 | ========== |
3 | 3 | |
4 | 4 | .. automodule:: fabric.decorators |
5 | :members: hosts, roles, runs_once | |
5 | :members: hosts, roles, runs_once, task, with_settings |
0 | ======================== | |
1 | Changes in version 1.1.1 | |
2 | ======================== | |
3 | ||
4 | Bugfixes | |
5 | ======== | |
6 | ||
7 | * The public API for `~fabric.tasks.Task` mentioned use of the ``run()`` | |
8 | method, but Fabric's main execution loop had not been updated to look for and | |
9 | call it, forcing users who subclassed `~fabric.tasks.Task` to define | |
10 | ``__call__()`` instead. This was an oversight and has been corrected. | |
11 | ||
12 | .. seealso:: :ref:`task-subclasses` |
0 | ======================== | |
1 | Changes in version 1.1.2 | |
2 | ======================== | |
3 | ||
4 | Bugfixes | |
5 | ======== | |
6 | ||
7 | * :issue:`375`: The logic used to separate tasks from modules when running | |
8 | ``fab --list`` incorrectly considered task classes implementing the mapping | |
9 | interface to be modules, not individual tasks. This has been corrected. | |
10 | Thanks to Vladimir Mihailenco for the catch. |
0 | ====================== | |
1 | Changes in version 1.1 | |
2 | ====================== | |
3 | ||
4 | This page lists all changes made to Fabric in its 1.1.0 release. | |
5 | ||
6 | .. note:: | |
7 | This release also includes all applicable changes from the :doc:`1.0.2 | |
8 | release </changes/1.0.2>`. | |
9 | ||
10 | Highlights | |
11 | ========== | |
12 | ||
13 | * :issue:`76`: :ref:`New-style tasks <new-style-tasks>` have been added. With | |
14 | the addition of the `~fabric.decorators.task` decorator and the | |
15 | `~fabric.tasks.Task` class, you can now "opt-in" and explicitly mark task | |
16 | functions as tasks, and Fabric will ignore the rest. The original behavior | |
17 | (now referred to as :ref:`"classic" tasks <classic-tasks>`) will still take | |
18 | effect if no new-style tasks are found. Major thanks to Travis Swicegood for | |
19 | the original implementation. | |
20 | * :issue:`56`: Namespacing is now possible: Fabric will crawl imported module | |
21 | objects looking for new-style task objects and build a dotted hierarchy | |
22 | (tasks named e.g. ``web.deploy`` or ``db.migrations.run``), allowing for | |
23 | greater organization. See :ref:`namespaces` for details. Thanks again to | |
24 | Travis Swicegood. | |
25 | ||
26 | ||
27 | Feature additions | |
28 | ================= | |
29 | ||
30 | * :issue:`10`: `~fabric.contrib.upload_project` now allows control over the | |
31 | local and remote directory paths, and has improved error handling. Thanks to | |
32 | Rodrigue Alcazar for the patch. | |
33 | * As part of :issue:`56` (highlighted above), added :option:`--list-format | |
34 | <-F>` to allow specification of a nested output format from :option:`--list | |
35 | <-l>`. | |
36 | * :issue:`107`: `~fabric.operations.require`'s ``provided_by`` kwarg now | |
37 | accepts iterables in addition to single values. Thanks to Thomas Ballinger | |
38 | for the patch. | |
39 | * :issue:`117`: `~fabric.contrib.files.upload_template` now supports the | |
40 | `~fabric.operations.put` flags ``mirror_local_mode`` and ``mode``. Thanks to | |
41 | Joe Stump for the suggestion and Thomas Ballinger for the patch. | |
42 | * :issue:`154`: `~fabric.contrib.files.sed` now allows customized regex flags | |
43 | to be specified via a new ``flags`` parameter. Thanks to Nick Trew for the | |
44 | suggestion and Morgan Goose for initial implementation. | |
45 | * :issue:`170`: Allow :ref:`exclusion <excluding-hosts>` of specific hosts from | |
46 | the final run list. Thanks to Casey Banner for the suggestion and patch. | |
47 | * :issue:`189`: Added :option:`--abort-on-prompts`/:ref:`env.abort_on_prompts | |
48 | <abort-on-prompts>` to allow a more non-interactive behavior, | |
49 | aborting/exiting instead of trying to prompt the running user. Thanks to | |
50 | Jeremy Avnet and Matt Chisholm for the initial patch. | |
51 | * :issue:`273`: `~fabric.contrib.files.upload_template` now offers control over | |
52 | whether it attempts to create backups of pre-existing destination files. | |
53 | Thanks to Ales Zoulek for the suggestion and initial patch. | |
54 | * :issue:`283`: Added the `~fabric.decorators.with_settings` decorator to allow | |
55 | application of env var settings to an entire function, as an alternative to | |
56 | using the `~fabric.context_managers.settings` context manager. Thanks to | |
57 | Travis Swicegood for the patch. | |
58 | * :issue:`353`: Added :option:`--keepalive`/:ref:`env.keepalive <keepalive>` to | |
59 | allow specification of an SSH keepalive parameter for troublesome network | |
60 | connections. Thanks to Mark Merritt for catch & patch. | |
61 | ||
62 | Bugfixes | |
63 | ======== | |
64 | ||
65 | * :issue:`115`: An implementation detail causing host lists to lose order | |
66 | when deduped by the ``fab`` execution loop, has been patched to preserve | |
67 | order instead. So e.g. ``fab -H a,b,c`` (or setting ``env.hosts = ['a', 'b', | |
68 | 'c']``) will now always run on ``a``, then ``b``, then ``c``. Previously, | |
69 | there was a chance the order could get mixed up during deduplication. Thanks | |
70 | to Rohit Aggarwal for the report. | |
71 | * :issue:`345`: `~fabric.contrib.files.contains` returned the stdout of its | |
72 | internal ``grep`` command instead of success/failure, causing incorrect | |
73 | behavior when stderr exists and is combined with stdout. This has been | |
74 | corrected. Thanks to Szymon Reichmann for catch and patch. | |
75 | ||
76 | Documentation updates | |
77 | ===================== | |
78 | ||
79 | * Documentation for task declaration has been moved from | |
80 | :doc:`/usage/execution` into its own docs page, :doc:`/usage/tasks`, as a | |
81 | result of the changes added in :issue:`76` and :issue:`56`. | |
82 | * :issue:`184`: Make the usage of `~fabric.contrib.project.rsync_project`'s | |
83 | ``local_dir`` argument more obvious, regarding its use in the ``rsync`` call. | |
84 | (Specifically, so users know they can pass in multiple, space-joined | |
85 | directory names instead of just one single directory.) | |
86 | ||
87 | Internals | |
88 | ========= | |
89 | ||
90 | * :issue:`307`: A whole pile of minor PEP8 tweaks. Thanks to Markus Gattol for | |
91 | highlighting the ``pep8`` tool and to Rick Harding for the patch. | |
92 | * :issue:`314`: Test utility decorator improvements. Thanks to Rick Harding for | |
93 | initial catch & patch. |
54 | 54 | functions you instruct it to. There's nothing magic about it -- anything |
55 | 55 | you can do in a normal Python script can be done in a fabfile! |
56 | 56 | |
57 | .. seealso:: :ref:`execution-strategy`, :ref:`tasks-and-imports`, :doc:`usage/fab` | |
57 | .. seealso:: :ref:`execution-strategy`, :doc:`/usage/tasks`, :doc:`/usage/fab` | |
58 | 58 | |
59 | 59 | |
60 | 60 | Task arguments |
103 | 103 | Note that many of these may be set via ``fab``'s command-line switches -- see |
104 | 104 | :doc:`fab` for details. Cross-links will be provided where appropriate. |
105 | 105 | |
106 | .. _abort-on-prompts: | |
107 | ||
108 | ``abort_on_prompts`` | |
109 | -------------------- | |
110 | ||
111 | **Default:** ``False`` | |
112 | ||
113 | When ``True``, Fabric will run in a non-interactive mode, calling | |
114 | `~fabric.utils.abort` anytime it would normally prompt the user for input (such | |
115 | as password prompts, "What host to connect to?" prompts, fabfile invocation of | |
116 | `~fabric.operations.prompt`, and so forth.) This allows users to ensure a Fabric | |
117 | session will always terminate cleanly instead of blocking on user input forever | |
118 | when unforeseen circumstances arise. | |
119 | ||
120 | .. versionadded:: 1.1 | |
121 | .. seealso:: :option:`--abort-on-prompts` | |
122 | ||
106 | 123 | ``all_hosts`` |
107 | 124 | ------------- |
108 | 125 | |
184 | 201 | |
185 | 202 | .. seealso:: :doc:`ssh` |
186 | 203 | |
204 | .. _exclude-hosts: | |
205 | ||
206 | ``exclude_hosts`` | |
207 | ----------------- | |
208 | ||
209 | **Default:** ``[]`` | |
210 | ||
211 | Specifies a list of host strings to be :ref:`skipped over <exclude-hosts>` | |
212 | during ``fab`` execution. Typically set via :option:`--exclude-hosts/-x <-x>`. | |
213 | ||
214 | .. versionadded:: 1.1 | |
215 | ||
216 | ||
187 | 217 | ``fabfile`` |
188 | 218 | ----------- |
189 | 219 | |
227 | 257 | The global host list used when composing per-task host lists. |
228 | 258 | |
229 | 259 | .. seealso:: :doc:`execution` |
260 | ||
261 | .. _keepalive: | |
262 | ||
263 | ``keepalive`` | |
264 | ------------- | |
265 | ||
266 | **Default:** ``0`` (i.e. no keepalive) | |
267 | ||
268 | An integer specifying an SSH keepalive interval to use; basically maps to the | |
269 | SSH config option ``ClientAliveInterval``. Useful if you find connections are | |
270 | timing out due to meddlesome network hardware or what have you. | |
271 | ||
272 | .. seealso:: :option:`--keepalive` | |
273 | .. versionadded:: 1.1 | |
230 | 274 | |
231 | 275 | .. _key-filename: |
232 | 276 |
65 | 65 | you may introspect the output or return code of a given command and decide what |
66 | 66 | to do next. |
67 | 67 | |
68 | ||
69 | .. _tasks-and-imports: | |
70 | ||
71 | 68 | Defining tasks |
72 | 69 | ============== |
73 | 70 | |
74 | When looking for tasks to execute, Fabric imports your fabfile and will | |
75 | consider any callable object, **except** for the following: | |
76 | ||
77 | * Callables whose name starts with an underscore (``_``). In other words, | |
78 | Python's usual "private" convention holds true here. | |
79 | * Callables defined within Fabric itself. Fabric's own functions such as | |
80 | `~fabric.operations.run` and `~fabric.operations.sudo` will not show up in | |
81 | your task list. | |
82 | ||
83 | .. note:: | |
84 | ||
85 | To see exactly which callables in your fabfile may be executed via ``fab``, | |
86 | use :option:`fab --list <-l>`. | |
87 | ||
88 | Imports | |
89 | ------- | |
90 | ||
91 | Python's ``import`` statement effectively includes the imported objects in your | |
92 | module's namespace. Since Fabric's fabfiles are just Python modules, this means | |
93 | that imports are also considered as possible tasks, alongside anything defined | |
94 | in the fabfile itself. | |
95 | ||
96 | Because of this, we strongly recommend that you use the ``import module`` form | |
97 | of importing, followed by ``module.callable()``, which will result in a cleaner | |
98 | fabfile API than doing ``from module import callable``. | |
99 | ||
100 | For example, here's a sample fabfile which uses ``urllib.urlopen`` to get some | |
101 | data out of a webservice:: | |
102 | ||
103 | from urllib import urlopen | |
104 | ||
105 | from fabric.api import run | |
106 | ||
107 | def webservice_read(): | |
108 | objects = urlopen('http://my/web/service/?foo=bar').read().split() | |
109 | print(objects) | |
110 | ||
111 | This looks simple enough, and will run without error. However, look what | |
112 | happens if we run :option:`fab --list <-l>` on this fabfile:: | |
113 | ||
114 | $ fab --list | |
115 | Available commands: | |
116 | ||
117 | webservice_read List some directories. | |
118 | urlopen urlopen(url [, data]) -> open file-like object | |
119 | ||
120 | Our fabfile of only one task is showing two "tasks", which is bad enough, and | |
121 | an unsuspecting user might accidentally try to call ``fab urlopen``, which | |
122 | probably won't work very well. Imagine any real-world fabfile, which is likely | |
123 | to be much more complex, and hopefully you can see how this could get messy | |
124 | fast. | |
125 | ||
126 | For reference, here's the recommended way to do it:: | |
127 | ||
128 | import urllib | |
129 | ||
130 | from fabric.api import run | |
131 | ||
132 | def webservice_read(): | |
133 | objects = urllib.urlopen('http://my/web/service/?foo=bar').read().split() | |
134 | print(objects) | |
135 | ||
136 | It's a simple change, but it'll make anyone using your fabfile a bit happier. | |
137 | ||
71 | For details on what constitutes a Fabric task and how to organize them, please see :doc:`/usage/tasks`. | |
138 | 72 | |
139 | 73 | Defining host lists |
140 | 74 | =================== |
264 | 198 | Globally, via the command line |
265 | 199 | ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ |
266 | 200 | |
267 | In addition to modifying ``env.hosts`` and ``env.roles`` at the module level, | |
268 | you may define them by passing comma-separated string arguments to the | |
269 | command-line switches :option:`--hosts/-H <-H>` and :option:`--roles/-R <-R>`, | |
270 | e.g.:: | |
201 | In addition to modifying ``env.hosts``, ``env.roles``, and | |
202 | ``env.exclude_hosts`` at the module level, you may define them by passing | |
203 | comma-separated string arguments to the command-line switches | |
204 | :option:`--hosts/-H <-H>` and :option:`--roles/-R <-R>`, e.g.:: | |
271 | 205 | |
272 | 206 | $ fab -H host1,host2 mytask |
273 | 207 | |
297 | 231 | run('ls /var/www') |
298 | 232 | |
299 | 233 | When this fabfile is run as ``fab -H host1,host2 mytask``, ``env.hosts`` will |
300 | end contain ``['host1', 'host2', 'host3', 'host4']`` at the time that | |
234 | then contain ``['host1', 'host2', 'host3', 'host4']`` at the time that | |
301 | 235 | ``mytask`` is executed. |
302 | 236 | |
303 | 237 | .. note:: |
342 | 276 | |
343 | 277 | This will override any other host list and ensure ``mytask`` always runs on |
344 | 278 | just those two hosts. |
279 | ||
345 | 280 | |
346 | 281 | Per-task, via decorators |
347 | 282 | ~~~~~~~~~~~~~~~~~~~~~~~~ |
422 | 357 | Assuming no command-line hosts or roles are given when ``mytask`` is executed, |
423 | 358 | this fabfile will call ``mytask`` on a host list of ``['a', 'b', 'c']`` -- the |
424 | 359 | union of ``role1`` and the contents of the `~fabric.decorators.hosts` call. |
360 | ||
361 | .. _excluding-hosts: | |
362 | ||
363 | Excluding specific hosts | |
364 | ------------------------ | |
365 | ||
366 | At times, it is useful to exclude one or more specific hosts, e.g. to override | |
367 | a few bad or otherwise undesirable hosts which are pulled in from a role or an | |
368 | autogenerated host list. This may be accomplished globally with | |
369 | :option:`--exclude-hosts/-x <-x>`:: | |
370 | ||
371 | $ fab -R myrole -x host2,host5 mytask | |
372 | ||
373 | If ``myrole`` was defined as ``['host1', 'host2', ..., 'host15']``, the above | |
374 | invocation would run with an effective host list of ``['host1', 'host3', | |
375 | 'host4', 'host6', ..., 'host15']``. | |
376 | ||
377 | .. note:: | |
378 | Using this option does not modify ``env.hosts`` -- it only causes the | |
379 | main execution loop to skip the requested hosts. | |
380 | ||
381 | Exclusions may be specified per-task by using an extra ``exclude_hosts`` kwarg, | |
382 | which is implemented similarly to the abovementioned ``hosts`` and ``roles`` | |
383 | per-task kwargs, in that it is stripped from the actual task invocation. This | |
384 | example would have the same result as the global exclude above:: | |
385 | ||
386 | $ fab -R myrole mytask:exclude_hosts="host2;host5" | |
387 | ||
388 | Note that the host list is semicolon-separated, just as with the ``hosts`` | |
389 | per-task argument. | |
425 | 390 | |
426 | 391 | |
427 | 392 | .. _failures: |
89 | 89 | |
90 | 90 | .. versionadded:: 0.9.1 |
91 | 91 | |
92 | .. cmdoption:: --abort-on-prompts | |
93 | ||
94 | Sets :ref:`env.abort_on_prompts <abort-on-prompts>` to ``True``, forcing | |
95 | Fabric to abort whenever it would prompt for input. | |
96 | ||
97 | .. versionadded:: 1.1 | |
98 | ||
92 | 99 | .. cmdoption:: -c RCFILE, --config=RCFILE |
93 | 100 | |
94 | 101 | Sets :ref:`env.rcfile <rcfile>` to the given file path, which Fabric will |
112 | 119 | alternately an explicit file path to load as the fabfile (e.g. |
113 | 120 | ``/path/to/my/fabfile.py``.) |
114 | 121 | |
115 | .. seealso:: :doc:`fabfiles` | |
122 | .. seealso:: :doc:`fabfiles` | |
123 | ||
124 | .. cmdoption:: -F LIST_FORMAT, --list-format=LIST_FORMAT | |
125 | ||
126 | Allows control over the output format of :option:`--list <-l>`. ``short`` is | |
127 | equivalent to :option:`--shortlist`, ``normal`` is the same as simply | |
128 | omitting this option entirely (i.e. the default), and ``nested`` prints out | |
129 | a nested namespace tree. | |
130 | ||
131 | .. versionadded:: 1.1 | |
132 | .. seealso:: :option:`--shortlist`, :option:`--list <-l>` | |
116 | 133 | |
117 | 134 | .. cmdoption:: -h, --help |
118 | 135 | |
129 | 146 | |
130 | 147 | Sets :ref:`env.hosts <hosts>` to the given comma-delimited list of host |
131 | 148 | strings. |
149 | ||
150 | .. cmdoption:: -x HOSTS, --exclude-hosts=HOSTS | |
151 | ||
152 | Sets :ref:`env.exclude_hosts <exclude-hosts>` to the given comma-delimited | |
153 | list of host strings to then keep out of the final host list. | |
154 | ||
155 | .. versionadded:: 1.1 | |
132 | 156 | |
133 | 157 | .. cmdoption:: -i KEY_FILENAME |
134 | 158 | |
143 | 167 | |
144 | 168 | .. versionadded:: 0.9.1 |
145 | 169 | |
170 | .. cmdoption:: --keepalive=KEEPALIVE | |
171 | ||
172 | Sets :ref:`env.keepalive <keepalive>` to the given (integer) value, specifying an SSH keepalive interval. | |
173 | ||
174 | .. versionadded:: 1.1 | |
175 | ||
146 | 176 | .. cmdoption:: -l, --list |
147 | 177 | |
148 | 178 | Imports a fabfile as normal, but then prints a list of all discovered tasks |
151 | 181 | |
152 | 182 | .. versionchanged:: 0.9.1 |
153 | 183 | Added docstring to output. |
154 | .. seealso:: :option:`--shortlist` | |
184 | .. seealso:: :option:`--shortlist`, :option:`--list-format <-F>` | |
155 | 185 | |
156 | 186 | .. cmdoption:: -p PASSWORD, --password=PASSWORD |
157 | 187 |
87 | 87 | |
88 | 88 | For important information on what exactly Fabric will consider as a task when |
89 | 89 | it loads your fabfile, as well as notes on how best to import other code, |
90 | please see :ref:`tasks-and-imports` in the :doc:`execution` documentation. | |
90 | please see :doc:`/usage/tasks` in the :doc:`execution` documentation. |
0 | ============== | |
1 | Defining tasks | |
2 | ============== | |
3 | ||
4 | As of Fabric 1.1, there are two distinct methods you may use in order to define | |
5 | which objects in your fabfile show up as tasks: | |
6 | ||
7 | * The "new" method starting in 1.1 considers instances of `~fabric.tasks.Task` | |
8 | or its subclasses, and also descends into imported modules to allow building | |
9 | nested namespaces. | |
10 | * The "classic" method from 1.0 and earlier considers all public callable | |
11 | objects (functions, classes etc) and only considers the objects in the | |
12 | fabfile itself with no recursing into imported module. | |
13 | ||
14 | .. note:: | |
15 | These two methods are **mutually exclusive**: if Fabric finds *any* | |
16 | new-style task objects in your fabfile or in modules it imports, it will | |
17 | assume you've committed to this method of task declaration and won't | |
18 | consider any non-`~fabric.tasks.Task` callables. If *no* new-style tasks | |
19 | are found, it reverts to the classic behavior. | |
20 | ||
21 | The rest of this document explores these two methods in detail. | |
22 | ||
23 | .. note:: | |
24 | ||
25 | To see exactly what tasks in your fabfile may be executed via ``fab``, use | |
26 | :option:`fab --list <-l>`. | |
27 | ||
28 | .. _new-style-tasks: | |
29 | ||
30 | New-style tasks | |
31 | =============== | |
32 | ||
33 | Fabric 1.1 introduced the `~fabric.tasks.Task` class to facilitate new features | |
34 | and enable some programming best practices, specifically: | |
35 | ||
36 | * **Object-oriented tasks**. Inheritance and all that comes with it can make | |
37 | for much more sensible code reuse than passing around simple function | |
38 | objects. The classic style of task declaration didn't entirely rule this | |
39 | out, but it also didn't make it terribly easy. | |
40 | * **Namespaces**. Having an explicit method of declaring tasks makes it easier | |
41 | to set up recursive namespaces without e.g. polluting your task list with the | |
42 | contents of Python's ``os`` module (which would show up as valid "tasks" | |
43 | under the classic methodology.) | |
44 | ||
45 | With the introduction of `~fabric.tasks.Task`, there are two ways to set up new | |
46 | tasks: | |
47 | ||
48 | * Decorate a regular module level function with `~fabric.decorators.task`, | |
49 | which transparently wraps the function in a `~fabric.tasks.Task` subclass. | |
50 | The function name will be used as the task name when invoking. | |
51 | * Subclass `~fabric.tasks.Task` (`~fabric.tasks.Task` itself is intended to be | |
52 | abstract), define a ``run`` method, and instantiate your subclass at module | |
53 | level. Instances' ``name`` attributes are used as the task name; if omitted | |
54 | the instance's variable name will be used instead. | |
55 | ||
56 | Use of new-style tasks also allows you to set up task namespaces (see below.) | |
57 | ||
58 | The `~fabric.decorators.task` decorator is pretty straightforward, but using `~fabric.tasks.Task` is less obvious, so we'll cover it in detail here. | |
59 | ||
60 | .. _task-subclasses: | |
61 | ||
62 | ``Task`` subclasses | |
63 | ------------------- | |
64 | ||
65 | If you're used to :ref:`classic-style tasks <classic-tasks>`, an easy way to | |
66 | think about `~fabric.tasks.Task` subclasses is that their ``run`` method is | |
67 | directly equivalent to a classic task; its arguments are the task arguments | |
68 | (other than ``self``) and its body is what gets executed. For example, this | |
69 | new-style task:: | |
70 | ||
71 | class MyTask(Task): | |
72 | name = "deploy" | |
73 | def run(self, environment, domain="whatever.com"): | |
74 | run("git clone foo") | |
75 | sudo("service apache2 restart") | |
76 | ||
77 | instance = MyTask() | |
78 | ||
79 | is exactly equivalent to this function-based task (which, if you dropped the | |
80 | ``@task``, would also be a normal classic-style task):: | |
81 | ||
82 | @task | |
83 | def deploy(environment, domain="whatever.com"): | |
84 | run("git clone foo") | |
85 | sudo("service apache2 restart") | |
86 | ||
87 | Note how we had to instantiate an instance of our class; that's simply normal | |
88 | Python object-oriented programming at work. While it's a small bit of | |
89 | boilerplate right now -- for example, Fabric doesn't care about the name you | |
90 | give the instantiation, only the instance's ``name`` attribute -- it's well | |
91 | worth the benefit of having the power of classes available. | |
92 | ||
93 | We may also extend the API in the future to make this experience a bit | |
94 | smoother. | |
95 | ||
96 | ||
97 | .. _namespaces: | |
98 | ||
99 | Namespaces | |
100 | ---------- | |
101 | ||
102 | With :ref:`classic tasks <classic-tasks>`, fabfiles were limited to a single, | |
103 | flat set of task names with no real way to organize them. In Fabric 1.1 and | |
104 | newer, if you declare tasks the new way (via `~fabric.decorators.task` or your | |
105 | own `~fabric.tasks.Task` subclass instances) you may take advantage of | |
106 | **namespacing**: | |
107 | ||
108 | * Any module objects imported into your fabfile will be recursed into, looking | |
109 | for additional task objects. | |
110 | * Within submodules, you may control which objects are "exported" by using the | |
111 | standard Python ``__all__`` module-level variable name (thought they should | |
112 | still be valid new-style task objects.) | |
113 | * These tasks will be given new dotted-notation names based on the modules they | |
114 | came from, similar to Python's own import syntax. | |
115 | ||
116 | Let's build up a fabfile package from simple to complex and see how this works. | |
117 | ||
118 | Basic | |
119 | ~~~~~ | |
120 | ||
121 | We start with a single `__init__.py` containing a few tasks (the Fabric API | |
122 | import omitted for brevity):: | |
123 | ||
124 | @task | |
125 | def deploy(): | |
126 | ... | |
127 | ||
128 | @task | |
129 | def compress(): | |
130 | ... | |
131 | ||
132 | The output of ``fab --list`` would look something like this:: | |
133 | ||
134 | deploy | |
135 | compress | |
136 | ||
137 | There's just one namespace here: the "root" or global namespace. Looks simple | |
138 | now, but in a real-world fabfile with dozens of tasks, it can get difficult to | |
139 | manage. | |
140 | ||
141 | Importing a submodule | |
142 | ~~~~~~~~~~~~~~~~~~~~~ | |
143 | ||
144 | As mentioned above, Fabric will examine any imported module objects for tasks, | |
145 | regardless of where that module exists on your Python import path. For now we | |
146 | just want to include our own, "nearby" tasks, so we'll make a new submodule in | |
147 | our package for dealing with, say, load balancers -- ``lb.py``:: | |
148 | ||
149 | @task | |
150 | def add_backend(): | |
151 | ... | |
152 | ||
153 | And we'll add this to the top of ``__init__.py``:: | |
154 | ||
155 | import lb | |
156 | ||
157 | Now ``fab --list`` shows us:: | |
158 | ||
159 | deploy | |
160 | compress | |
161 | lb.add_backend | |
162 | ||
163 | Again, with only one task in its own submodule, it looks kind of silly, but the | |
164 | benefits should be pretty obvious. | |
165 | ||
166 | Going deeper | |
167 | ~~~~~~~~~~~~ | |
168 | ||
169 | Namespacing isn't limited to just one level. Let's say we had a larger setup | |
170 | and wanted a namespace for database related tasks, with additional | |
171 | differentiation inside that. We make a sub-package named ``db/`` and inside it, | |
172 | a ``migrations.py`` module:: | |
173 | ||
174 | @task | |
175 | def list(): | |
176 | ... | |
177 | ||
178 | @task | |
179 | def run(): | |
180 | ... | |
181 | ||
182 | We need to make sure that this module is visible to anybody importing ``db``, | |
183 | so we add it to the sub-package's ``__init__.py``:: | |
184 | ||
185 | import migrations | |
186 | ||
187 | As a final step, we import the sub-package into our root-level ``__init__.py``, | |
188 | so now its first few lines look like this:: | |
189 | ||
190 | import lb | |
191 | import db | |
192 | ||
193 | After all that, our file tree looks like this:: | |
194 | ||
195 | . | |
196 | ├── __init__.py | |
197 | ├── db | |
198 | │ ├── __init__.py | |
199 | │ └── migrations.py | |
200 | └── lb.py | |
201 | ||
202 | and ``fab --list`` shows:: | |
203 | ||
204 | deploy | |
205 | compress | |
206 | lb.add_backend | |
207 | db.migrations.list | |
208 | db.migrations.run | |
209 | ||
210 | We could also have specified (or imported) tasks directly into | |
211 | ``db/__init__.py``, and they would show up as ``db.<whatever>`` as you might | |
212 | expect. | |
213 | ||
214 | Limiting with ``__all__`` | |
215 | ~~~~~~~~~~~~~~~~~~~~~~~~~ | |
216 | ||
217 | You may limit what Fabric "sees" when it examines imported modules, by using | |
218 | the Python convention of a module level ``__all__`` variable (a list of | |
219 | variable names.) If we didn't want the ``db.migrations.run`` task to show up by | |
220 | default for some reason, we could add this to the top of ``db/migrations.py``:: | |
221 | ||
222 | __all__ = ['list'] | |
223 | ||
224 | Note the lack of ``'run'`` there. You could, if needed, import ``run`` directly | |
225 | into some other part of the hierarchy, but otherwise it'll remain hidden. | |
226 | ||
227 | Switching it up | |
228 | ~~~~~~~~~~~~~~~ | |
229 | ||
230 | We've been keeping our fabfile package neatly organized and importing it in a | |
231 | straightforward manner, but the filesystem layout doesn't actually matter here. | |
232 | All Fabric's loader cares about is the names the modules are given when they're | |
233 | imported. | |
234 | ||
235 | For example, if we changed the top of our root ``__init__.py`` to look like | |
236 | this:: | |
237 | ||
238 | import db as database | |
239 | ||
240 | Our task list would change thusly:: | |
241 | ||
242 | deploy | |
243 | compress | |
244 | lb.add_backend | |
245 | database.migrations.list | |
246 | database.migrations.run | |
247 | ||
248 | This applies to any other import -- you could import third party modules into | |
249 | your own task hierarchy, or grab a deeply nested module and make it appear near | |
250 | the top level. | |
251 | ||
252 | Nested list output | |
253 | ~~~~~~~~~~~~~~~~~~ | |
254 | ||
255 | As a final note, we've been using the default Fabric :option:`--list <-l>` | |
256 | output during this section -- it makes it more obvious what the actual task | |
257 | names are. However, you can get a more nested or tree-like view by passing | |
258 | ``nested`` to the :option:`--list-format <-F>` option:: | |
259 | ||
260 | $ fab --list-format=nested --list | |
261 | Available commands (remember to call as module.[...].task): | |
262 | ||
263 | deploy | |
264 | compress | |
265 | lb: | |
266 | add_backend | |
267 | database: | |
268 | migrations: | |
269 | list | |
270 | run | |
271 | ||
272 | While it slightly obfuscates the "real" task names, this view provides a handy | |
273 | way of noting the organization of tasks in large namespaces. | |
274 | ||
275 | ||
276 | .. _classic-tasks: | |
277 | ||
278 | Classic tasks | |
279 | ============= | |
280 | ||
281 | When no new-style `~fabric.tasks.Task`-based tasks are found, Fabric will | |
282 | consider any callable object found in your fabfile, **except** the following: | |
283 | ||
284 | * Callables whose name starts with an underscore (``_``). In other words, | |
285 | Python's usual "private" convention holds true here. | |
286 | * Callables defined within Fabric itself. Fabric's own functions such as | |
287 | `~fabric.operations.run` and `~fabric.operations.sudo` will not show up in | |
288 | your task list. | |
289 | ||
290 | ||
291 | Imports | |
292 | ------- | |
293 | ||
294 | Python's ``import`` statement effectively includes the imported objects in your | |
295 | module's namespace. Since Fabric's fabfiles are just Python modules, this means | |
296 | that imports are also considered as possible classic-style tasks, alongside | |
297 | anything defined in the fabfile itself. | |
298 | ||
299 | .. note:: | |
300 | This only applies to imported *callable objects* -- not modules. | |
301 | Imported modules only come into play if they contain :ref:`new-style | |
302 | tasks <new-style-tasks>`, at which point this section no longer | |
303 | applies. | |
304 | ||
305 | Because of this, we strongly recommend that you use the ``import module`` form | |
306 | of importing, followed by ``module.callable()``, which will result in a cleaner | |
307 | fabfile API than doing ``from module import callable``. | |
308 | ||
309 | For example, here's a sample fabfile which uses ``urllib.urlopen`` to get some | |
310 | data out of a webservice:: | |
311 | ||
312 | from urllib import urlopen | |
313 | ||
314 | from fabric.api import run | |
315 | ||
316 | def webservice_read(): | |
317 | objects = urlopen('http://my/web/service/?foo=bar').read().split() | |
318 | print(objects) | |
319 | ||
320 | This looks simple enough, and will run without error. However, look what | |
321 | happens if we run :option:`fab --list <-l>` on this fabfile:: | |
322 | ||
323 | $ fab --list | |
324 | Available commands: | |
325 | ||
326 | webservice_read List some directories. | |
327 | urlopen urlopen(url [, data]) -> open file-like object | |
328 | ||
329 | Our fabfile of only one task is showing two "tasks", which is bad enough, and | |
330 | an unsuspecting user might accidentally try to call ``fab urlopen``, which | |
331 | probably won't work very well. Imagine any real-world fabfile, which is likely | |
332 | to be much more complex, and hopefully you can see how this could get messy | |
333 | fast. | |
334 | ||
335 | For reference, here's the recommended way to do it:: | |
336 | ||
337 | import urllib | |
338 | ||
339 | from fabric.api import run | |
340 | ||
341 | def webservice_read(): | |
342 | objects = urllib.urlopen('http://my/web/service/?foo=bar').read().split() | |
343 | print(objects) | |
344 | ||
345 | It's a simple change, but it'll make anyone using your fabfile a bit happier. |
0 | """ | |
1 | Fabric's own fabfile. | |
2 | """ | |
3 | ||
4 | from __future__ import with_statement | |
5 | ||
6 | import nose | |
7 | ||
8 | from fabric.api import * | |
9 | # Need to import this as fabric.version for reload() purposes | |
10 | import fabric.version | |
11 | # But nothing is stopping us from making a convenient binding! | |
12 | _version = fabric.version.get_version | |
13 | ||
14 | ||
15 | import docs | |
16 | ||
17 | ||
18 | @task | |
19 | def test(args=None): | |
20 | """ | |
21 | Run all unit tests and doctests. | |
22 | ||
23 | Specify string argument ``args`` for additional args to ``nosetests``. | |
24 | """ | |
25 | default_args = "-sv --with-doctest --nologcapture --with-color" | |
26 | default_args += (" " + args) if args else "" | |
27 | try: | |
28 | nose.core.run(argv=[''] + default_args.split()) | |
29 | except SystemExit: | |
30 | abort("Nose encountered an error; you may be missing newly added test dependencies. Try running 'pip install -r requirements.txt'.") | |
31 | ||
32 | ||
33 | def code_version_is_tagged(): | |
34 | return local('git tag | egrep "^%s$"' % _version('short')) | |
35 | ||
36 | def update_code_version(force): | |
37 | """ | |
38 | Update version data structure in-code and commit that change to git. | |
39 | ||
40 | Normally, if the version file has not been modified, we abort assuming the | |
41 | user quit without saving. Specify ``force=yes`` to override this. | |
42 | """ | |
43 | version_file = "fabric/version.py" | |
44 | raw_input("Work has been done since last tag, version update is needed. Hit Enter to load version info in your editor: ") | |
45 | local("$EDITOR %s" % version_file) | |
46 | # Try to detect whether user bailed out of the edit | |
47 | if not local("git diff -- %s" % version_file) and not force: | |
48 | abort("You seem to have aborted the file edit, so I'm aborting too.") | |
49 | # Reload version module to get new version | |
50 | reload(fabric.version) | |
51 | # Commit the version update | |
52 | local("git add %s" % version_file) | |
53 | local("git commit -m \"Cut %s\"" % _version('verbose')) | |
54 | ||
55 | def commits_since_tag(): | |
56 | """ | |
57 | Has any work been done since the last tag? | |
58 | """ | |
59 | return local("git log %s.." % _version('short')) | |
60 | ||
61 | ||
62 | @task | |
63 | def tag(force='no', push='no'): | |
64 | """ | |
65 | Tag a new release. | |
66 | ||
67 | Normally, if a Git tag exists matching the current version, and no Git | |
68 | commits appear after that tag, we abort assuming the user is making a | |
69 | mistake or forgot to commit their work. | |
70 | ||
71 | To override this -- i.e. to re-tag and re-upload -- specify ``force=yes``. | |
72 | We assume you know what you're doing if you use this. | |
73 | ||
74 | By default we do not push the tag remotely; specify ``push=yes`` to force a | |
75 | ``git push origin <tag>``. | |
76 | """ | |
77 | force = force.lower() in ['y', 'yes'] | |
78 | with settings(warn_only=True): | |
79 | # Does the current in-code version exist as a Git tag already? | |
80 | # If so, this means we haven't updated the in-code version specifier | |
81 | # yet, and need to do so. | |
82 | if code_version_is_tagged(): | |
83 | # That is, if any work has been done since. Sanity check! | |
84 | if not commits_since_tag() and not force: | |
85 | abort("No work done since last tag!") | |
86 | # Open editor, update version, commit that change to Git. | |
87 | update_code_version(force) | |
88 | # If the tag doesn't exist, the user has already updated version info | |
89 | # and we can just move on. | |
90 | else: | |
91 | print("Version has already been updated, no need to edit...") | |
92 | # At this point, we've incremented the in-code version and just need to | |
93 | # tag it in Git. | |
94 | f = 'f' if force else '' | |
95 | local("git tag -%sam \"Fabric %s\" %s" % ( | |
96 | f, | |
97 | _version('verbose'), | |
98 | _version('short') | |
99 | )) | |
100 | # And push to the central server, if we were told to | |
101 | if push.lower() in ['y', 'yes']: | |
102 | local("git push origin %s" % _version('short')) | |
103 | ||
104 | ||
105 | @task | |
106 | def build(): | |
107 | """ | |
108 | Build (but don't upload) via setup.py | |
109 | """ | |
110 | local('python setup.py sdist') | |
111 | ||
112 | ||
113 | @task | |
114 | def upload(): | |
115 | """ | |
116 | Build, register and upload to PyPI | |
117 | """ | |
118 | local('python setup.py sdist register upload') | |
119 | ||
120 | ||
121 | @task | |
122 | def release(force='no'): | |
123 | """ | |
124 | Tag/push, build, upload new version and build/upload documentation. | |
125 | """ | |
126 | tag(force=force, push='yes') | |
127 | upload() |
0 | from __future__ import with_statement | |
1 | ||
2 | from fabric.api import * | |
3 | from fabric.contrib.project import rsync_project | |
4 | from fabric.version import get_version | |
5 | ||
6 | ||
7 | docs_host = 'jforcier@fabfile.org' | |
8 | ||
9 | ||
10 | @task | |
11 | def build(clean='no', browse_='no'): | |
12 | """ | |
13 | Generate the Sphinx documentation. | |
14 | """ | |
15 | c = "" | |
16 | if clean.lower() in ['yes', 'y']: | |
17 | c = "clean " | |
18 | b = "" | |
19 | with lcd('docs'): | |
20 | local('make %shtml%s' % (c, b)) | |
21 | if browse_.lower() in ['yes', 'y']: | |
22 | browse() | |
23 | ||
24 | ||
25 | @task | |
26 | def browse(): | |
27 | """ | |
28 | Open the current dev docs in a browser tab. | |
29 | """ | |
30 | local("open docs/_build/html/index.html") | |
31 | ||
32 | ||
33 | @task | |
34 | @hosts(docs_host) | |
35 | def push(): | |
36 | """ | |
37 | Build docs and zip for upload to RTD | |
38 | """ | |
39 | build(clean='yes') | |
40 | v = get_version('short') | |
41 | local("cd docs/_build/html && zip -r ../%s.zip ." % v) |
0 | """ | |
1 | Fabric's own fabfile. | |
2 | """ | |
3 | ||
4 | from __future__ import with_statement | |
5 | ||
6 | import nose | |
7 | ||
8 | from fabric.api import * | |
9 | from fabric.contrib.project import rsync_project | |
10 | # Need to import this as fabric.version for reload() purposes | |
11 | import fabric.version | |
12 | # But nothing is stopping us from making a convenient binding! | |
13 | _version = fabric.version.get_version | |
14 | ||
15 | ||
16 | docs_host = 'jforcier@fabfile.org' | |
17 | ||
18 | ||
19 | def test(args=None): | |
20 | """ | |
21 | Run all unit tests and doctests. | |
22 | ||
23 | Specify string argument ``args`` for additional args to ``nosetests``. | |
24 | """ | |
25 | default_args = "-sv --with-doctest --nologcapture --with-color" | |
26 | default_args += (" " + args) if args else "" | |
27 | try: | |
28 | nose.core.run(argv=[''] + default_args.split()) | |
29 | except SystemExit: | |
30 | abort("Nose encountered an error; you may be missing newly added test dependencies. Try running 'pip install -r requirements.txt'.") | |
31 | ||
32 | ||
33 | def build_docs(clean='no', browse='no'): | |
34 | """ | |
35 | Generate the Sphinx documentation. | |
36 | """ | |
37 | c = "" | |
38 | if clean.lower() in ['yes', 'y']: | |
39 | c = "clean " | |
40 | b = "" | |
41 | with lcd('docs'): | |
42 | local('make %shtml%s' % (c, b)) | |
43 | if browse.lower() in ['yes', 'y']: | |
44 | browse_docs() | |
45 | ||
46 | ||
47 | def browse_docs(): | |
48 | """ | |
49 | Open the current dev docs in a browser tab. | |
50 | """ | |
51 | local("open docs/_build/html/index.html") | |
52 | ||
53 | ||
54 | ||
55 | @hosts(docs_host) | |
56 | def push_docs(): | |
57 | """ | |
58 | Build docs and zip for upload to RTD | |
59 | """ | |
60 | build_docs(clean='yes') | |
61 | v = _version('short') | |
62 | local("cd docs/_build/html && zip -r ../%s.zip ." % v) | |
63 | ||
64 | ||
65 | def _code_version_is_tagged(): | |
66 | return local('git tag | egrep "^%s$"' % _version('short')) | |
67 | ||
68 | def _update_code_version(force): | |
69 | """ | |
70 | Update version data structure in-code and commit that change to git. | |
71 | ||
72 | Normally, if the version file has not been modified, we abort assuming the | |
73 | user quit without saving. Specify ``force=yes`` to override this. | |
74 | """ | |
75 | version_file = "fabric/version.py" | |
76 | raw_input("Work has been done since last tag, version update is needed. Hit Enter to load version info in your editor: ") | |
77 | local("$EDITOR %s" % version_file) | |
78 | # Try to detect whether user bailed out of the edit | |
79 | if not local("git diff -- %s" % version_file) and not force: | |
80 | abort("You seem to have aborted the file edit, so I'm aborting too.") | |
81 | # Reload version module to get new version | |
82 | reload(fabric.version) | |
83 | # Commit the version update | |
84 | local("git add %s" % version_file) | |
85 | local("git commit -m \"Cut %s\"" % _version('verbose')) | |
86 | ||
87 | def _commits_since_tag(): | |
88 | """ | |
89 | Has any work been done since the last tag? | |
90 | """ | |
91 | return local("git log %s.." % _version('short')) | |
92 | ||
93 | def tag(force='no', push='no'): | |
94 | """ | |
95 | Tag a new release. | |
96 | ||
97 | Normally, if a Git tag exists matching the current version, and no Git | |
98 | commits appear after that tag, we abort assuming the user is making a | |
99 | mistake or forgot to commit their work. | |
100 | ||
101 | To override this -- i.e. to re-tag and re-upload -- specify ``force=yes``. | |
102 | We assume you know what you're doing if you use this. | |
103 | ||
104 | By default we do not push the tag remotely; specify ``push=yes`` to force a | |
105 | ``git push origin <tag>``. | |
106 | """ | |
107 | force = force.lower() in ['y', 'yes'] | |
108 | with settings(warn_only=True): | |
109 | # Does the current in-code version exist as a Git tag already? | |
110 | # If so, this means we haven't updated the in-code version specifier | |
111 | # yet, and need to do so. | |
112 | if _code_version_is_tagged(): | |
113 | # That is, if any work has been done since. Sanity check! | |
114 | if not _commits_since_tag() and not force: | |
115 | abort("No work done since last tag!") | |
116 | # Open editor, update version, commit that change to Git. | |
117 | _update_code_version(force) | |
118 | # If the tag doesn't exist, the user has already updated version info | |
119 | # and we can just move on. | |
120 | else: | |
121 | print("Version has already been updated, no need to edit...") | |
122 | # At this point, we've incremented the in-code version and just need to | |
123 | # tag it in Git. | |
124 | f = 'f' if force else '' | |
125 | local("git tag -%sam \"Fabric %s\" %s" % ( | |
126 | f, | |
127 | _version('verbose'), | |
128 | _version('short') | |
129 | )) | |
130 | # And push to the central server, if we were told to | |
131 | if push.lower() in ['y', 'yes']: | |
132 | local("git push origin %s" % _version('short')) | |
133 | ||
134 | ||
135 | def build(): | |
136 | """ | |
137 | Build (but don't upload) via setup.py | |
138 | """ | |
139 | local('python setup.py sdist') | |
140 | ||
141 | ||
142 | def upload(): | |
143 | """ | |
144 | Build, register and upload to PyPI | |
145 | """ | |
146 | local('python setup.py sdist register upload') | |
147 | ||
148 | ||
149 | def release(force='no'): | |
150 | """ | |
151 | Tag/push, build, upload new version and build/upload documentation. | |
152 | """ | |
153 | tag(force=force, push='yes') | |
154 | upload() |
6 | 6 | well when you're using setup.py to install e.g. paramiko! |
7 | 7 | """ |
8 | 8 | from fabric.context_managers import cd, hide, settings, show, path, prefix, lcd |
9 | from fabric.decorators import hosts, roles, runs_once | |
9 | from fabric.decorators import hosts, roles, runs_once, with_settings, task | |
10 | 10 | from fabric.operations import (require, prompt, put, get, run, sudo, local, |
11 | 11 | reboot, open_shell) |
12 | 12 | from fabric.state import env, output |
6 | 6 | from fabric.state import env |
7 | 7 | return env.passwords.get(env.host_string, env.password) |
8 | 8 | |
9 | ||
9 | 10 | def set_password(password): |
10 | 11 | from fabric.state import env |
11 | 12 | env.password = env.passwords[env.host_string] = password |
15 | 15 | |
16 | 16 | from fabric.colors import red, green |
17 | 17 | |
18 | print(red("This sentence is red, except for " + green("these words, which are green") + ".")) | |
18 | print(red("This sentence is red, except for " + \ | |
19 | green("these words, which are green") + ".")) | |
19 | 20 | |
20 | 21 | If ``bold`` is set to ``True``, the ANSI flag for bolding will be flipped on |
21 | 22 | for that particular invocation, which usually shows up as a bold or brighter |
22 | 23 | version of the original color on most terminals. |
23 | 24 | """ |
24 | 25 | |
26 | ||
25 | 27 | def _wrap_with(code): |
28 | ||
26 | 29 | def inner(text, bold=False): |
27 | 30 | c = code |
28 | 31 | if bold: |
8 | 8 | import types |
9 | 9 | import re |
10 | 10 | import os |
11 | from StringIO import StringIO | |
11 | 12 | |
12 | 13 | from fabric.api import * |
13 | 14 | |
49 | 50 | |
50 | 51 | |
51 | 52 | def upload_template(filename, destination, context=None, use_jinja=False, |
52 | template_dir=None, use_sudo=False): | |
53 | template_dir=None, use_sudo=False, backup=True, mirror_local_mode=False, | |
54 | mode=None): | |
53 | 55 | """ |
54 | 56 | Render and upload a template text file to a remote host. |
55 | 57 | |
62 | 64 | templating library available, Jinja will be used to render the template |
63 | 65 | instead. Templates will be loaded from the invoking user's current working |
64 | 66 | directory by default, or from ``template_dir`` if given. |
65 | ||
67 | ||
66 | 68 | The resulting rendered file will be uploaded to the remote file path |
67 | ``destination`` (which should include the desired remote filename.) If the | |
68 | destination file already exists, it will be renamed with a ``.bak`` | |
69 | extension. | |
69 | ``destination``. If the destination file already exists, it will be | |
70 | renamed with a ``.bak`` extension unless ``backup=False`` is specified. | |
70 | 71 | |
71 | 72 | By default, the file will be copied to ``destination`` as the logged-in |
72 | 73 | user; specify ``use_sudo=True`` to use `sudo` instead. |
73 | """ | |
74 | basename = os.path.basename(filename) | |
75 | temp_destination = '/tmp/' + basename | |
76 | ||
77 | # This temporary file should not be automatically deleted on close, as we | |
78 | # need it there to upload it (Windows locks the file for reading while | |
79 | # open). | |
80 | tempfile_fd, tempfile_name = tempfile.mkstemp() | |
81 | output = open(tempfile_name, "w+b") | |
82 | # Init | |
74 | ||
75 | The ``mirror_local_mode`` and ``mode`` kwargs are passed directly to an | |
76 | internal `~fabric.operations.put` call; please see its documentation for | |
77 | details on these two options. | |
78 | ||
79 | .. versionchanged:: 1.1 | |
80 | Added the ``backup``, ``mirror_local_mode`` and ``mode`` kwargs. | |
81 | """ | |
82 | func = use_sudo and sudo or run | |
83 | # Normalize destination to be an actual filename, due to using StringIO | |
84 | with settings(hide('everything'), warn_only=True): | |
85 | if func('test -d %s' % destination).succeeded: | |
86 | sep = "" if destination.endswith('/') else "/" | |
87 | destination += sep + os.path.basename(filename) | |
88 | ||
89 | # Use mode kwarg to implement mirror_local_mode, again due to using | |
90 | # StringIO | |
91 | if mirror_local_mode and mode is None: | |
92 | mode = os.stat(filename).st_mode | |
93 | # To prevent put() from trying to do this | |
94 | # logic itself | |
95 | mirror_local_mode = False | |
96 | ||
97 | # Process template | |
83 | 98 | text = None |
84 | 99 | if use_jinja: |
85 | 100 | try: |
93 | 108 | text = inputfile.read() |
94 | 109 | if context: |
95 | 110 | text = text % context |
96 | output.write(text) | |
97 | output.close() | |
111 | ||
112 | # Back up original file | |
113 | if backup and exists(destination): | |
114 | func("cp %s{,.bak}" % destination) | |
98 | 115 | |
99 | 116 | # Upload the file. |
100 | put(tempfile_name, temp_destination) | |
101 | os.close(tempfile_fd) | |
102 | os.remove(tempfile_name) | |
103 | ||
104 | func = use_sudo and sudo or run | |
105 | # Back up any original file (need to do figure out ultimate destination) | |
106 | to_backup = destination | |
107 | with settings(hide('everything'), warn_only=True): | |
108 | # Is destination a directory? | |
109 | if func('test -f %s' % to_backup).failed: | |
110 | # If so, tack on the filename to get "real" destination | |
111 | to_backup = destination + '/' + basename | |
112 | if exists(to_backup): | |
113 | func("cp %s %s.bak" % (to_backup, to_backup)) | |
114 | # Actually move uploaded template to destination | |
115 | func("mv %s %s" % (temp_destination, destination)) | |
116 | ||
117 | ||
118 | def sed(filename, before, after, limit='', use_sudo=False, backup='.bak'): | |
117 | put( | |
118 | local_path=StringIO(text), | |
119 | remote_path=destination, | |
120 | use_sudo=use_sudo, | |
121 | mirror_local_mode=mirror_local_mode, | |
122 | mode=mode | |
123 | ) | |
124 | ||
125 | ||
126 | def sed(filename, before, after, limit='', use_sudo=False, backup='.bak', | |
127 | flags=''): | |
119 | 128 | """ |
120 | 129 | Run a search-and-replace on ``filename`` with given regex patterns. |
121 | 130 | |
122 | Equivalent to ``sed -i<backup> -r -e "/<limit>/ s/<before>/<after>/g | |
131 | Equivalent to ``sed -i<backup> -r -e "/<limit>/ s/<before>/<after>/<flags>g | |
123 | 132 | <filename>"``. |
124 | 133 | |
125 | 134 | For convenience, ``before`` and ``after`` will automatically escape forward |
131 | 140 | |
132 | 141 | `sed` will pass ``shell=False`` to `run`/`sudo`, in order to avoid problems |
133 | 142 | with many nested levels of quotes and backslashes. |
143 | ||
144 | Other options may be specified with sed-compatible regex flags -- for | |
145 | example, to make the search and replace case insensitive, specify | |
146 | ``flags="i"``. The ``g`` flag is always specified regardless, so you do not | |
147 | need to remember to include it when overriding this parameter. | |
148 | ||
149 | .. versionadded:: 1.1 | |
150 | The ``flags`` parameter. | |
134 | 151 | """ |
135 | 152 | func = use_sudo and sudo or run |
136 | 153 | # Characters to be escaped in both |
144 | 161 | if limit: |
145 | 162 | limit = r'/%s/ ' % limit |
146 | 163 | # Test the OS because of differences between sed versions |
164 | ||
147 | 165 | with hide('running', 'stdout'): |
148 | 166 | platform = run("uname") |
149 | 167 | if platform in ('NetBSD', 'OpenBSD'): |
154 | 172 | tmp = "/tmp/%s" % hasher.hexdigest() |
155 | 173 | # Use temp file to work around lack of -i |
156 | 174 | expr = r"""cp -p %(filename)s %(tmp)s \ |
157 | && sed -r -e '%(limit)ss/%(before)s/%(after)s/g' %(filename)s > %(tmp)s \ | |
175 | && sed -r -e '%(limit)ss/%(before)s/%(after)s/%(flags)sg' %(filename)s > %(tmp)s \ | |
158 | 176 | && cp -p %(filename)s %(filename)s%(backup)s \ |
159 | 177 | && mv %(tmp)s %(filename)s""" |
160 | 178 | command = expr % locals() |
161 | 179 | else: |
162 | expr = r"sed -i%s -r -e '%ss/%s/%s/g' %s" | |
163 | command = expr % (backup, limit, before, after, filename) | |
180 | expr = r"sed -i%s -r -e '%ss/%s/%s/%sg' %s" | |
181 | command = expr % (backup, limit, before, after, flags, filename) | |
164 | 182 | return func(command, shell=False) |
165 | 183 | |
166 | 184 | |
211 | 229 | sometimes do when inserted by hand. Neither will they have a trailing space |
212 | 230 | unless you specify e.g. ``char='# '``. |
213 | 231 | |
214 | .. note:: | |
232 | .. note:: | |
215 | 233 | |
216 | 234 | In order to preserve the line being commented out, this function will |
217 | 235 | wrap your ``regex`` argument in parentheses, so you don't need to. It |
264 | 282 | return func('egrep "%s" "%s"' % ( |
265 | 283 | text.replace('"', r'\"'), |
266 | 284 | filename.replace('"', r'\"') |
267 | )) | |
285 | )).succeeded | |
268 | 286 | |
269 | 287 | |
270 | 288 | def append(filename, text, use_sudo=False, partial=False, escape=True): |
2 | 2 | """ |
3 | 3 | |
4 | 4 | from os import getcwd, sep |
5 | import os.path | |
5 | 6 | from datetime import datetime |
7 | from tempfile import mkdtemp | |
6 | 8 | |
7 | 9 | from fabric.network import needs_host |
8 | 10 | from fabric.operations import local, run, put |
9 | 11 | from fabric.state import env, output |
10 | 12 | |
13 | __all__ = ['rsync_project', 'upload_project'] | |
11 | 14 | |
12 | 15 | @needs_host |
13 | 16 | def rsync_project(remote_dir, local_dir=None, exclude=(), delete=False, |
48 | 51 | files there. |
49 | 52 | |
50 | 53 | * ``local_dir``: by default, ``rsync_project`` uses your current working |
51 | directory as the source directory; you may override this with | |
52 | ``local_dir``, which should be a directory path. | |
54 | directory as the source directory. This may be overridden by specifying | |
55 | ``local_dir``, which is a string passed verbatim to ``rsync``, and thus | |
56 | may be a single directory (``"my_directory"``) or multiple directories | |
57 | (``"dir1 dir2"``). See the ``rsync`` documentation for details. | |
53 | 58 | * ``exclude``: optional, may be a single string, or an iterable of strings, |
54 | 59 | and is used to pass one or more ``--exclude`` options to ``rsync``. |
55 | 60 | * ``delete``: a boolean controlling whether ``rsync``'s ``--delete`` option |
110 | 115 | return local(cmd) |
111 | 116 | |
112 | 117 | |
113 | def upload_project(): | |
118 | def upload_project(local_dir=None, remote_dir=""): | |
114 | 119 | """ |
115 | Upload the current project to a remote system, tar/gzipping during the move. | |
120 | Upload the current project to a remote system via ``tar``/``gzip``. | |
116 | 121 | |
117 | This function makes use of the ``/tmp/`` directory and the ``tar`` and | |
118 | ``gzip`` programs/libraries; thus it will not work too well on Win32 | |
119 | systems unless one is using Cygwin or something similar. | |
122 | ``local_dir`` specifies the local project directory to upload, and defaults | |
123 | to the current working directory. | |
124 | ||
125 | ``remote_dir`` specifies the target directory to upload into (meaning that | |
126 | a copy of ``local_dir`` will appear as a subdirectory of ``remote_dir``) | |
127 | and defaults to the remote user's home directory. | |
120 | 128 | |
121 | ``upload_project`` will attempt to clean up the tarfiles when it finishes | |
122 | executing. | |
129 | This function makes use of the ``tar`` and ``gzip`` programs/libraries, | |
130 | thus it will not work too well on Win32 systems unless one is using Cygwin | |
131 | or something similar. It will attempt to clean up the local and remote | |
132 | tarfiles when it finishes executing, even in the event of a failure. | |
133 | ||
134 | .. versionchanged:: 1.1 | |
135 | Added the ``local_dir`` and ``remote_dir`` kwargs. | |
123 | 136 | """ |
124 | tar_file = "/tmp/fab.%s.tar" % datetime.utcnow().strftime( | |
125 | '%Y_%m_%d_%H-%M-%S') | |
126 | cwd_name = getcwd().split(sep)[-1] | |
127 | tgz_name = cwd_name + ".tar.gz" | |
128 | local("tar -czf %s ." % tar_file) | |
129 | put(tar_file, cwd_name + ".tar.gz") | |
130 | local("rm -f " + tar_file) | |
131 | run("tar -xzf " + tgz_name) | |
132 | run("rm -f " + tgz_name) | |
137 | local_dir = local_dir or os.getcwd() | |
138 | ||
139 | # Remove final '/' in local_dir so that basename() works | |
140 | local_dir = local_dir.rstrip(os.sep) | |
141 | ||
142 | local_path, local_name = os.path.split(local_dir) | |
143 | tar_file = "%s.tar.gz" % local_name | |
144 | target_tar = os.path.join(remote_dir, tar_file) | |
145 | tmp_folder = mkdtemp() | |
146 | ||
147 | try: | |
148 | tar_path = os.path.join(tmp_folder, tar_file) | |
149 | local("tar -czf %s -C %s %s" % (tar_path, local_path, local_name)) | |
150 | put(tar_path, target_tar) | |
151 | try: | |
152 | run("tar -xzf %s" % tar_file) | |
153 | finally: | |
154 | run("rm -f %s" % tar_file) | |
155 | finally: | |
156 | local("rm -rf %s" % tmp_folder) |
0 | 0 | """ |
1 | 1 | Convenience decorators for use in fabfiles. |
2 | 2 | """ |
3 | from __future__ import with_statement | |
3 | 4 | |
4 | 5 | from functools import wraps |
5 | 6 | from types import StringTypes |
7 | ||
8 | from fabric import tasks | |
9 | from .context_managers import settings | |
10 | ||
11 | ||
12 | def task(func): | |
13 | """ | |
14 | Decorator declaring the wrapped function as a :ref:`new-style task <new-style-tasks>`. | |
15 | """ | |
16 | return tasks.WrappedCallableTask(func) | |
6 | 17 | |
7 | 18 | |
8 | 19 | def hosts(*host_list): |
28 | 39 | Allow a single, iterable argument (``@hosts(iterable)``) to be used |
29 | 40 | instead of requiring ``@hosts(*iterable)``. |
30 | 41 | """ |
42 | ||
31 | 43 | def attach_hosts(func): |
32 | 44 | @wraps(func) |
33 | 45 | def inner_decorator(*args, **kwargs): |
99 | 111 | decorated.return_value = func(*args, **kwargs) |
100 | 112 | return decorated.return_value |
101 | 113 | return decorated |
114 | ||
115 | ||
116 | def with_settings(**kw_settings): | |
117 | """ | |
118 | Decorator equivalent of ``fabric.context_managers.settings``. | |
119 | ||
120 | Allows you to wrap an entire function as if it was called inside a block | |
121 | with the ``settings`` context manager. This may be useful if you know you | |
122 | want a given setting applied to an entire function body, or wish to | |
123 | retrofit old code without indenting everything. | |
124 | ||
125 | For example, to turn aborts into warnings for an entire task function:: | |
126 | ||
127 | @with_settings(warn_only=True) | |
128 | def foo(): | |
129 | ... | |
130 | ||
131 | .. seealso:: `~fabric.context_managers.settings` | |
132 | .. versionadded:: 1.1 | |
133 | """ | |
134 | def outer(func): | |
135 | def inner(*args, **kwargs): | |
136 | with settings(**kw_settings): | |
137 | return func(*args, **kwargs) | |
138 | return inner | |
139 | return outer |
18 | 18 | |
19 | 19 | |
20 | 20 | def _endswith(char_list, substring): |
21 | tail = char_list[-1*len(substring):] | |
21 | tail = char_list[-1 * len(substring):] | |
22 | 22 | substring = list(substring) |
23 | 23 | return tail == substring |
24 | 24 | |
78 | 78 | # backwards compatible with Fabric 0.9.x behavior; the user |
79 | 79 | # will still see the prompt on their screen (no way to avoid |
80 | 80 | # this) but at least it won't clutter up the captured text. |
81 | del capture[-1*len(env.sudo_prompt):] | |
81 | del capture[-1 * len(env.sudo_prompt):] | |
82 | 82 | # If the password we just tried was bad, prompt the user again. |
83 | 83 | if (not password) or reprompt: |
84 | 84 | # Print the prompt and/or the "try again" notice if |
8 | 8 | to individuals leveraging Fabric as a library, should be kept elsewhere. |
9 | 9 | """ |
10 | 10 | |
11 | from operator import add | |
11 | from collections import defaultdict | |
12 | from operator import add, isMappingType | |
12 | 13 | from optparse import OptionParser |
13 | 14 | import os |
14 | 15 | import sys |
15 | ||
16 | from fabric import api # For checking callables against the API | |
17 | from fabric.contrib import console, files, project # Ditto | |
16 | import types | |
17 | ||
18 | from fabric import api, state # For checking callables against the API, & easy mocking | |
19 | from fabric.contrib import console, files, project # Ditto | |
18 | 20 | from fabric.network import denormalize, interpret_host_string, disconnect_all |
19 | from fabric import state # For easily-mockable access to roles, env and etc | |
20 | 21 | from fabric.state import commands, connections, env_options |
22 | from fabric.tasks import Task | |
21 | 23 | from fabric.utils import abort, indent |
22 | 24 | |
23 | 25 | |
24 | 26 | # One-time calculation of "all internal callables" to avoid doing this on every |
25 | # check of a given fabfile callable (in is_task()). | |
27 | # check of a given fabfile callable (in is_classic_task()). | |
26 | 28 | _modules = [api, project, files, console] |
27 | 29 | _internals = reduce(lambda x, y: x + filter(callable, vars(y).values()), |
28 | 30 | _modules, |
29 | 31 | [] |
30 | 32 | ) |
33 | ||
34 | # Module recursion cache | |
35 | class _ModuleCache(object): | |
36 | """ | |
37 | Set-like object operating on modules and storing __name__s internally. | |
38 | """ | |
39 | def __init__(self): | |
40 | self.cache = set() | |
41 | ||
42 | def __contains__(self, value): | |
43 | return value.__name__ in self.cache | |
44 | ||
45 | def add(self, value): | |
46 | return self.cache.add(value.__name__) | |
47 | ||
48 | def clear(self): | |
49 | return self.cache.clear() | |
50 | ||
51 | _seen = _ModuleCache() | |
52 | ||
31 | 53 | |
32 | 54 | def load_settings(path): |
33 | 55 | """ |
88 | 110 | # Implicit 'return None' if nothing was found |
89 | 111 | |
90 | 112 | |
91 | def is_task(tup): | |
113 | def is_classic_task(tup): | |
92 | 114 | """ |
93 | 115 | Takes (name, object) tuple, returns True if it's a non-Fab public callable. |
94 | 116 | """ |
138 | 160 | if index is not None: |
139 | 161 | sys.path.insert(index + 1, directory) |
140 | 162 | del sys.path[0] |
141 | # Return our two-tuple | |
142 | tasks = dict(filter(is_task, vars(imported).items())) | |
143 | return imported.__doc__, tasks | |
163 | ||
164 | # Actually load tasks | |
165 | docstring, new_style, classic = load_tasks_from_module(imported) | |
166 | tasks = new_style if state.env.new_style_tasks else classic | |
167 | # Clean up after ourselves | |
168 | _seen.clear() | |
169 | return docstring, tasks | |
170 | ||
171 | ||
172 | def load_tasks_from_module(imported): | |
173 | """ | |
174 | Handles loading all of the tasks for a given `imported` module | |
175 | """ | |
176 | # Obey the use of <module>.__all__ if it is present | |
177 | imported_vars = vars(imported) | |
178 | if "__all__" in imported_vars: | |
179 | imported_vars = [(name, imported_vars[name]) for name in \ | |
180 | imported_vars if name in imported_vars["__all__"]] | |
181 | else: | |
182 | imported_vars = imported_vars.items() | |
183 | # Return a two-tuple value. First is the documentation, second is a | |
184 | # dictionary of callables only (and don't include Fab operations or | |
185 | # underscored callables) | |
186 | new_style, classic = extract_tasks(imported_vars) | |
187 | return imported.__doc__, new_style, classic | |
188 | ||
189 | ||
190 | def extract_tasks(imported_vars): | |
191 | """ | |
192 | Handle extracting tasks from a given list of variables | |
193 | """ | |
194 | new_style_tasks = defaultdict(dict) | |
195 | classic_tasks = {} | |
196 | if 'new_style_tasks' not in state.env: | |
197 | state.env.new_style_tasks = False | |
198 | for tup in imported_vars: | |
199 | name, obj = tup | |
200 | if is_task_object(obj): | |
201 | state.env.new_style_tasks = True | |
202 | new_style_tasks[obj.name] = obj | |
203 | elif is_classic_task(tup): | |
204 | classic_tasks[name] = obj | |
205 | elif is_task_module(obj): | |
206 | docs, newstyle, classic = load_tasks_from_module(obj) | |
207 | for task_name, task in newstyle.items(): | |
208 | new_style_tasks[name][task_name] = task | |
209 | return (new_style_tasks, classic_tasks) | |
210 | ||
211 | ||
212 | def is_task_module(a): | |
213 | """ | |
214 | Determine if the provided value is a task module | |
215 | """ | |
216 | #return (type(a) is types.ModuleType and | |
217 | # any(map(is_task_object, vars(a).values()))) | |
218 | if type(a) is types.ModuleType and a not in _seen: | |
219 | # Flag module as seen | |
220 | _seen.add(a) | |
221 | # Signal that we need to check it out | |
222 | return True | |
223 | ||
224 | ||
225 | def is_task_object(a): | |
226 | """ | |
227 | Determine if the provided value is a ``Task`` object. | |
228 | ||
229 | This returning True signals that all tasks within the fabfile | |
230 | module must be Task objects. | |
231 | """ | |
232 | return isinstance(a, Task) and a.use_task_objects | |
144 | 233 | |
145 | 234 | |
146 | 235 | def parse_options(): |
157 | 246 | |
158 | 247 | # |
159 | 248 | # Define options that don't become `env` vars (typically ones which cause |
160 | # Fabric to do something other than its normal execution, such as --version) | |
249 | # Fabric to do something other than its normal execution, such as | |
250 | # --version) | |
161 | 251 | # |
162 | 252 | |
163 | 253 | # Version number (optparse gives you --version but we have to do it |
182 | 272 | action='store_true', |
183 | 273 | dest='shortlist', |
184 | 274 | default=False, |
185 | help="print non-verbose list of possible commands and exit" | |
275 | help="alias for -F short --list" | |
276 | ) | |
277 | ||
278 | # Control behavior of --list | |
279 | LIST_FORMAT_OPTIONS = ('short', 'normal', 'nested') | |
280 | parser.add_option('-F', '--list-format', | |
281 | choices=LIST_FORMAT_OPTIONS, | |
282 | default='normal', | |
283 | help="formats --list, choices: %s" % ", ".join(LIST_FORMAT_OPTIONS) | |
186 | 284 | ) |
187 | 285 | |
188 | 286 | # Display info about a specific command |
206 | 304 | opts, args = parser.parse_args() |
207 | 305 | return parser, opts, args |
208 | 306 | |
209 | ||
210 | def _command_names(): | |
211 | return sorted(commands.keys()) | |
212 | ||
213 | ||
214 | def list_commands(docstring): | |
215 | """ | |
216 | Print all found commands/tasks, then exit. Invoked with ``-l/--list.`` | |
217 | ||
218 | If ``docstring`` is non-empty, it will be printed before the task list. | |
219 | """ | |
220 | if docstring: | |
221 | trailer = "\n" if not docstring.endswith("\n") else "" | |
222 | print(docstring + trailer) | |
223 | print("Available commands:\n") | |
307 | def _is_task(name, value): | |
308 | """ | |
309 | Is the object a task as opposed to e.g. a dict or int? | |
310 | """ | |
311 | return is_classic_task((name, value)) or is_task_object(value) | |
312 | ||
313 | def _sift_tasks(mapping): | |
314 | tasks, collections = [], [] | |
315 | for name, value in mapping.iteritems(): | |
316 | if _is_task(name, value): | |
317 | tasks.append(name) | |
318 | elif isMappingType(value): | |
319 | collections.append(name) | |
320 | tasks = sorted(tasks) | |
321 | collections = sorted(collections) | |
322 | return tasks, collections | |
323 | ||
324 | def _task_names(mapping): | |
325 | """ | |
326 | Flatten & sort task names in a breadth-first fashion. | |
327 | ||
328 | Tasks are always listed before submodules at the same level, but within | |
329 | those two groups, sorting is alphabetical. | |
330 | """ | |
331 | tasks, collections = _sift_tasks(mapping) | |
332 | for collection in collections: | |
333 | module = mapping[collection] | |
334 | join = lambda x: ".".join((collection, x)) | |
335 | tasks.extend(map(join, _task_names(module))) | |
336 | return tasks | |
337 | ||
338 | def _crawl(name, mapping): | |
339 | """ | |
340 | ``name`` of ``'a.b.c'`` => ``mapping['a']['b']['c']`` | |
341 | """ | |
342 | key, _, rest = name.partition('.') | |
343 | value = mapping[key] | |
344 | if not rest: | |
345 | return value | |
346 | return _crawl(rest, value) | |
347 | ||
348 | def crawl(name, mapping): | |
349 | try: | |
350 | return _crawl(name, mapping) | |
351 | except (KeyError, TypeError): | |
352 | return None | |
353 | ||
354 | def _print_docstring(docstrings, name): | |
355 | if not docstrings: | |
356 | return False | |
357 | docstring = crawl(name, state.commands).__doc__ | |
358 | if type(docstring) in types.StringTypes: | |
359 | return docstring | |
360 | ||
361 | ||
362 | def _normal_list(docstrings=True): | |
363 | result = [] | |
364 | task_names = _task_names(state.commands) | |
224 | 365 | # Want separator between name, description to be straight col |
225 | max_len = reduce(lambda a, b: max(a, len(b)), commands.keys(), 0) | |
366 | max_len = reduce(lambda a, b: max(a, len(b)), task_names, 0) | |
226 | 367 | sep = ' ' |
227 | 368 | trail = '...' |
228 | for name in _command_names(): | |
369 | for name in task_names: | |
229 | 370 | output = None |
230 | # Print first line of docstring | |
231 | func = commands[name] | |
232 | if func.__doc__: | |
233 | lines = filter(None, func.__doc__.splitlines()) | |
371 | docstring = _print_docstring(docstrings, name) | |
372 | if docstring: | |
373 | lines = filter(None, docstring.splitlines()) | |
234 | 374 | first_line = lines[0].strip() |
235 | 375 | # Truncate it if it's longer than N chars |
236 | 376 | size = 75 - (max_len + len(sep) + len(trail)) |
240 | 380 | # Or nothing (so just the name) |
241 | 381 | else: |
242 | 382 | output = name |
243 | print(indent(output)) | |
244 | sys.exit(0) | |
245 | ||
246 | ||
247 | def shortlist(): | |
248 | """ | |
249 | Print all task names separated by newlines with no embellishment. | |
250 | """ | |
251 | print("\n".join(_command_names())) | |
252 | sys.exit(0) | |
253 | ||
254 | ||
255 | def display_command(command): | |
383 | result.append(indent(output)) | |
384 | return result | |
385 | ||
386 | ||
387 | def _nested_list(mapping, level=1): | |
388 | result = [] | |
389 | tasks, collections = _sift_tasks(mapping) | |
390 | # Tasks come first | |
391 | result.extend(map(lambda x: indent(x, spaces=level * 4), tasks)) | |
392 | for collection in collections: | |
393 | module = mapping[collection] | |
394 | # Section/module "header" | |
395 | result.append(indent(collection + ":", spaces=level * 4)) | |
396 | # Recurse | |
397 | result.extend(_nested_list(module, level + 1)) | |
398 | return result | |
399 | ||
400 | COMMANDS_HEADER = "Available commands" | |
401 | NESTED_REMINDER = " (remember to call as module.[...].task)" | |
402 | ||
403 | def list_commands(docstring, format_): | |
404 | """ | |
405 | Print all found commands/tasks, then exit. Invoked with ``-l/--list.`` | |
406 | ||
407 | If ``docstring`` is non-empty, it will be printed before the task list. | |
408 | ||
409 | ``format_`` should conform to the options specified in | |
410 | ``LIST_FORMAT_OPTIONS``, e.g. ``"short"``, ``"normal"``. | |
411 | """ | |
412 | # Short-circuit with simple short output | |
413 | if format_ == "short": | |
414 | return _task_names(state.commands) | |
415 | # Otherwise, handle more verbose modes | |
416 | result = [] | |
417 | # Docstring at top, if applicable | |
418 | if docstring: | |
419 | trailer = "\n" if not docstring.endswith("\n") else "" | |
420 | result.append(docstring + trailer) | |
421 | header = COMMANDS_HEADER | |
422 | if format_ == "nested": | |
423 | header += NESTED_REMINDER | |
424 | result.append(header + ":\n") | |
425 | c = _normal_list() if format_ == "normal" else _nested_list(state.commands) | |
426 | result.extend(c) | |
427 | return result | |
428 | ||
429 | ||
430 | def display_command(name): | |
256 | 431 | """ |
257 | 432 | Print command function's docstring, then exit. Invoked with -d/--display. |
258 | 433 | """ |
259 | 434 | # Sanity check |
260 | if command not in commands: | |
261 | abort("Command '%s' not found, exiting." % command) | |
262 | cmd = commands[command] | |
435 | command = crawl(name, state.commands) | |
436 | if command is None: | |
437 | msg = "Task '%s' does not appear to exist. Valid task names:\n%s" | |
438 | abort(msg % (name, "\n".join(_normal_list(False)))) | |
263 | 439 | # Print out nicely presented docstring if found |
264 | if cmd.__doc__: | |
265 | print("Displaying detailed information for command '%s':" % command) | |
440 | if command.__doc__: | |
441 | print("Displaying detailed information for task '%s':" % name) | |
266 | 442 | print('') |
267 | print(indent(cmd.__doc__, strip=True)) | |
443 | print(indent(command.__doc__, strip=True)) | |
268 | 444 | print('') |
269 | 445 | # Or print notice if not |
270 | 446 | else: |
271 | print("No detailed information available for command '%s':" % command) | |
447 | print("No detailed information available for task '%s':" % name) | |
272 | 448 | sys.exit(0) |
273 | 449 | |
274 | 450 | |
285 | 461 | return argstr.split(sep) |
286 | 462 | |
287 | 463 | before, _, after = argstr.partition(escaped_sep) |
288 | startlist = before.split(sep) # a regular split is fine here | |
464 | startlist = before.split(sep) # a regular split is fine here | |
289 | 465 | unfinished = startlist[-1] |
290 | 466 | startlist = startlist[:-1] |
291 | 467 | |
296 | 472 | # part of the string sent in recursion is the rest of the escaped value. |
297 | 473 | unfinished += sep + endlist[0] |
298 | 474 | |
299 | return startlist + [unfinished] + endlist[1:] # put together all the parts | |
475 | return startlist + [unfinished] + endlist[1:] # put together all the parts | |
300 | 476 | |
301 | 477 | |
302 | 478 | def parse_arguments(arguments): |
311 | 487 | kwargs = {} |
312 | 488 | hosts = [] |
313 | 489 | roles = [] |
490 | exclude_hosts = [] | |
314 | 491 | if ':' in cmd: |
315 | 492 | cmd, argstr = cmd.split(':', 1) |
316 | 493 | for pair in _escape_split(',', argstr): |
317 | 494 | k, _, v = pair.partition('=') |
318 | 495 | if _: |
319 | # Catch, interpret host/hosts/role/roles kwargs | |
320 | if k in ['host', 'hosts', 'role', 'roles']: | |
496 | # Catch, interpret host/hosts/role/roles/exclude_hosts kwargs | |
497 | if k in ['host', 'hosts', 'role', 'roles','exclude_hosts']: | |
321 | 498 | if k == 'host': |
322 | 499 | hosts = [v.strip()] |
323 | 500 | elif k == 'hosts': |
326 | 503 | roles = [v.strip()] |
327 | 504 | elif k == 'roles': |
328 | 505 | roles = [x.strip() for x in v.split(';')] |
506 | elif k == 'exclude_hosts': | |
507 | exclude_hosts = [x.strip() for x in v.split(';')] | |
329 | 508 | # Otherwise, record as usual |
330 | 509 | else: |
331 | 510 | kwargs[k] = v |
332 | 511 | else: |
333 | 512 | args.append(k) |
334 | cmds.append((cmd, args, kwargs, hosts, roles)) | |
513 | cmds.append((cmd, args, kwargs, hosts, roles, exclude_hosts)) | |
335 | 514 | return cmds |
336 | 515 | |
337 | 516 | |
342 | 521 | return ' '.join(arguments) |
343 | 522 | |
344 | 523 | |
345 | def _merge(hosts, roles): | |
524 | def _merge(hosts, roles, exclude=[]): | |
346 | 525 | """ |
347 | 526 | Merge given host and role lists into one list of deduped hosts. |
348 | 527 | """ |
361 | 540 | if callable(value): |
362 | 541 | value = value() |
363 | 542 | role_hosts += value |
364 | # Return deduped combo of hosts and role_hosts | |
365 | return list(set(_clean_hosts(hosts + role_hosts))) | |
366 | ||
543 | ||
544 | # Return deduped combo of hosts and role_hosts, preserving order within | |
545 | # them (vs using set(), which may lose ordering). | |
546 | cleaned_hosts = _clean_hosts(list(hosts) + list(role_hosts)) | |
547 | all_hosts = [] | |
548 | for host in cleaned_hosts: | |
549 | if host not in all_hosts: | |
550 | all_hosts.append(host) | |
551 | return all_hosts | |
367 | 552 | |
368 | 553 | def _clean_hosts(host_list): |
369 | 554 | """ |
371 | 556 | """ |
372 | 557 | return [host.strip() for host in host_list] |
373 | 558 | |
374 | ||
375 | def get_hosts(command, cli_hosts, cli_roles): | |
559 | def get_hosts(command, cli_hosts, cli_roles, cli_exclude_hosts): | |
376 | 560 | """ |
377 | 561 | Return the host list the given command should be using. |
378 | 562 | |
381 | 565 | """ |
382 | 566 | # Command line per-command takes precedence over anything else. |
383 | 567 | if cli_hosts or cli_roles: |
384 | return _merge(cli_hosts, cli_roles) | |
568 | return _merge(cli_hosts, cli_roles, cli_exclude_hosts) | |
385 | 569 | # Decorator-specific hosts/roles go next |
386 | 570 | func_hosts = getattr(command, 'hosts', []) |
387 | 571 | func_roles = getattr(command, 'roles', []) |
572 | func_exclude_hosts = getattr(command, 'exclude_hosts', []) | |
388 | 573 | if func_hosts or func_roles: |
389 | return _merge(func_hosts, func_roles) | |
574 | return _merge(func_hosts, func_roles, func_exclude_hosts) | |
390 | 575 | # Finally, the env is checked (which might contain globally set lists from |
391 | 576 | # the CLI or from module-level code). This will be the empty list if these |
392 | 577 | # have not been set -- which is fine, this method should return an empty |
393 | 578 | # list if no hosts have been set anywhere. |
394 | return _merge(state.env['hosts'], state.env['roles']) | |
395 | ||
579 | return _merge(state.env['hosts'], state.env['roles'], state.env['exclude_hosts']) | |
396 | 580 | |
397 | 581 | def update_output_levels(show, hide): |
398 | 582 | """ |
411 | 595 | state.output[key] = False |
412 | 596 | |
413 | 597 | |
598 | def _run_task(task, args, kwargs): | |
599 | # First, try class-based tasks | |
600 | if hasattr(task, 'run') and callable(task.run): | |
601 | return task.run(*args, **kwargs) | |
602 | # Fallback to callable behavior | |
603 | return task(*args, **kwargs) | |
604 | ||
605 | ||
414 | 606 | def main(): |
415 | 607 | """ |
416 | 608 | Main command-line execution loop. |
429 | 621 | for option in env_options: |
430 | 622 | state.env[option.dest] = getattr(options, option.dest) |
431 | 623 | |
432 | # Handle --hosts, --roles (comma separated string => list) | |
433 | for key in ['hosts', 'roles']: | |
624 | # Handle --hosts, --roles, --exclude-hosts (comma separated string => list) | |
625 | for key in ['hosts', 'roles', 'exclude_hosts']: | |
434 | 626 | if key in state.env and isinstance(state.env[key], str): |
435 | 627 | state.env[key] = state.env[key].split(',') |
436 | 628 | |
466 | 658 | # dict |
467 | 659 | if fabfile: |
468 | 660 | docstring, callables = load_fabfile(fabfile) |
469 | commands.update(callables) | |
661 | state.commands.update(callables) | |
470 | 662 | |
471 | 663 | # Abort if no commands found |
472 | if not commands and not remainder_arguments: | |
664 | if not state.commands and not remainder_arguments: | |
473 | 665 | abort("Fabfile didn't contain any commands!") |
474 | 666 | |
475 | 667 | # Now that we're settled on a fabfile, inform user. |
479 | 671 | else: |
480 | 672 | print("No fabfile loaded -- remainder command only") |
481 | 673 | |
482 | # Non-verbose command list | |
674 | # Shortlist is now just an alias for the "short" list format; | |
675 | # it overrides use of --list-format if somebody were to specify both | |
483 | 676 | if options.shortlist: |
484 | shortlist() | |
485 | ||
486 | # Handle list-commands option (now that commands are loaded) | |
677 | options.list_format = 'short' | |
678 | ||
679 | # List available commands | |
487 | 680 | if options.list_commands: |
488 | list_commands(docstring) | |
681 | print("\n".join(list_commands(docstring, options.list_format))) | |
682 | sys.exit(0) | |
489 | 683 | |
490 | 684 | # Handle show (command-specific help) option |
491 | 685 | if options.display: |
494 | 688 | # If user didn't specify any commands to run, show help |
495 | 689 | if not (arguments or remainder_arguments): |
496 | 690 | parser.print_help() |
497 | sys.exit(0) # Or should it exit with error (1)? | |
691 | sys.exit(0) # Or should it exit with error (1)? | |
498 | 692 | |
499 | 693 | # Parse arguments into commands to run (plus args/kwargs/hosts) |
500 | 694 | commands_to_run = parse_arguments(arguments) |
505 | 699 | # Figure out if any specified task names are invalid |
506 | 700 | unknown_commands = [] |
507 | 701 | for tup in commands_to_run: |
508 | if tup[0] not in commands: | |
702 | if crawl(tup[0], state.commands) is None: | |
509 | 703 | unknown_commands.append(tup[0]) |
510 | 704 | |
511 | 705 | # Abort if any unknown commands were specified |
516 | 710 | # Generate remainder command and insert into commands, commands_to_run |
517 | 711 | if remainder_command: |
518 | 712 | r = '<remainder>' |
519 | commands[r] = lambda: api.run(remainder_command) | |
520 | commands_to_run.append((r, [], {}, [], [])) | |
713 | state.commands[r] = lambda: api.run(remainder_command) | |
714 | commands_to_run.append((r, [], {}, [], [], [])) | |
521 | 715 | |
522 | 716 | if state.output.debug: |
523 | 717 | names = ", ".join(x[0] for x in commands_to_run) |
524 | 718 | print("Commands to run: %s" % names) |
525 | 719 | |
526 | 720 | # At this point all commands must exist, so execute them in order. |
527 | for name, args, kwargs, cli_hosts, cli_roles in commands_to_run: | |
721 | for name, args, kwargs, cli_hosts, cli_roles, cli_exclude_hosts in commands_to_run: | |
528 | 722 | # Get callable by itself |
529 | command = commands[name] | |
530 | # Set current command name (used for some error messages) | |
723 | task = crawl(name, state.commands) | |
724 | # Set current task name (used for some error messages) | |
531 | 725 | state.env.command = name |
532 | 726 | # Set host list (also copy to env) |
533 | 727 | state.env.all_hosts = hosts = get_hosts( |
534 | command, cli_hosts, cli_roles) | |
728 | task, cli_hosts, cli_roles, cli_exclude_hosts) | |
535 | 729 | # If hosts found, execute the function on each host in turn |
536 | 730 | for host in hosts: |
537 | 731 | # Preserve user |
542 | 736 | if state.output.running: |
543 | 737 | print("[%s] Executing task '%s'" % (host, name)) |
544 | 738 | # Actually run command |
545 | commands[name](*args, **kwargs) | |
739 | _run_task(task, args, kwargs) | |
546 | 740 | # Put old user back |
547 | 741 | state.env.user = prev_user |
548 | 742 | # If no hosts found, assume local-only and run once |
549 | 743 | if not hosts: |
550 | commands[name](*args, **kwargs) | |
744 | _run_task(task, args, kwargs) | |
551 | 745 | # If we got here, no errors occurred, so print a final note. |
552 | 746 | if state.output.status: |
553 | 747 | print("\nDone.") |
9 | 9 | import socket |
10 | 10 | import sys |
11 | 11 | |
12 | from fabric.utils import abort | |
13 | 12 | from fabric.auth import get_password, set_password |
13 | from fabric.utils import abort, handle_prompt_abort | |
14 | 14 | |
15 | 15 | try: |
16 | 16 | import warnings |
17 | 17 | warnings.simplefilter('ignore', DeprecationWarning) |
18 | 18 | import paramiko as ssh |
19 | 19 | except ImportError: |
20 | abort("paramiko is a required module. Please install it:\n\t$ sudo easy_install paramiko") | |
21 | ||
20 | abort("paramiko is a required module. Please install it:\n\t" | |
21 | "$ sudo easy_install paramiko") | |
22 | 22 | |
23 | 23 | |
24 | 24 | host_pattern = r'((?P<user>.+)@)?(?P<host>[^:]+)(:(?P<port>\d+))?' |
51 | 51 | ``user1@example.com`` will create a connection to ``example.com``, logged |
52 | 52 | in as ``user1``; later specifying ``user2@example.com`` will create a new, |
53 | 53 | 2nd connection as ``user2``. |
54 | ||
54 | ||
55 | 55 | The same applies to ports: specifying two different ports will result in |
56 | 56 | two different connections to the same host being made. If no port is given, |
57 | 57 | 22 is assumed, so ``example.com`` is equivalent to ``example.com:22``. |
101 | 101 | """ |
102 | 102 | Strips out default values for the given host string. |
103 | 103 | |
104 | If the user part is the default user, it is removed; if the port is port 22, | |
105 | it also is removed. | |
104 | If the user part is the default user, it is removed; | |
105 | if the port is port 22, it also is removed. | |
106 | 106 | """ |
107 | 107 | from state import env |
108 | 108 | r = host_regex.match(host_string).groupdict() |
119 | 119 | """ |
120 | 120 | Turns user/host/port strings into ``user@host:port`` combined string. |
121 | 121 | |
122 | This function is not responsible for handling missing user/port strings; for | |
123 | that, see the ``normalize`` function. | |
122 | This function is not responsible for handling missing user/port strings; | |
123 | for that, see the ``normalize`` function. | |
124 | 124 | |
125 | 125 | If ``port`` is omitted, the returned string will be of the form |
126 | 126 | ``user@host``. |
157 | 157 | # Unless user specified not to, accept/add new, unknown host keys |
158 | 158 | if not env.reject_unknown_hosts: |
159 | 159 | client.set_missing_host_key_policy(ssh.AutoAddPolicy()) |
160 | ||
161 | 160 | |
162 | 161 | # |
163 | 162 | # Connection attempt loop |
182 | 181 | look_for_keys=not env.no_keys |
183 | 182 | ) |
184 | 183 | connected = True |
184 | ||
185 | # set a keepalive if desired | |
186 | if env.keepalive: | |
187 | client.get_transport().set_keepalive(env.keepalive) | |
188 | ||
185 | 189 | return client |
186 | 190 | # BadHostKeyException corresponds to key mismatch, i.e. what on the |
187 | 191 | # command line results in the big banner error about man-in-the-middle |
188 | 192 | # attacks. |
189 | 193 | except ssh.BadHostKeyException: |
190 | abort("Host key for %s did not match pre-existing key! Server's key was changed recently, or possible man-in-the-middle attack." % env.host) | |
194 | abort("Host key for %s did not match pre-existing key! Server's" | |
195 | " key was changed recently, or possible man-in-the-middle" | |
196 | "attack." % env.host) | |
191 | 197 | # Prompt for new password to try on auth failure |
192 | 198 | except ( |
193 | 199 | ssh.AuthenticationException, |
254 | 260 | host, e[1]) |
255 | 261 | ) |
256 | 262 | |
263 | ||
257 | 264 | def prompt_for_password(prompt=None, no_colon=False, stream=None): |
258 | 265 | """ |
259 | Prompts for and returns a new password if required; otherwise, returns None. | |
266 | Prompts for and returns a new password if required; otherwise, returns | |
267 | None. | |
260 | 268 | |
261 | 269 | A trailing colon is appended unless ``no_colon`` is True. |
262 | 270 | |
271 | 279 | defaults to ``sys.stderr``. |
272 | 280 | """ |
273 | 281 | from fabric.state import env |
282 | handle_prompt_abort() | |
274 | 283 | stream = stream or sys.stderr |
275 | 284 | # Construct prompt |
276 | 285 | default = "[%s] Login password" % env.host_string |
295 | 304 | This decorator is basically a safety net for silly users who forgot to |
296 | 305 | specify the host/host list in one way or another. It should be used to wrap |
297 | 306 | operations which require a network connection. |
298 | ||
307 | ||
299 | 308 | Due to how we execute commands per-host in ``main()``, it's not possible to |
300 | 309 | specify multiple hosts at this point in time, so only a single host will be |
301 | 310 | prompted for. |
306 | 315 | command (in the case where multiple commands have no hosts set, of course.) |
307 | 316 | """ |
308 | 317 | from fabric.state import env |
318 | ||
309 | 319 | @wraps(func) |
310 | 320 | def host_prompting_wrapper(*args, **kwargs): |
321 | handle_prompt_abort() | |
311 | 322 | while not env.get('host_string', False): |
312 | host_string = raw_input("No hosts found. Please specify (single) host string for connection: ") | |
323 | host_string = raw_input("No hosts found. Please specify (single)" | |
324 | " host string for connection: ") | |
313 | 325 | interpret_host_string(host_string) |
314 | 326 | return func(*args, **kwargs) |
315 | 327 | return host_prompting_wrapper |
18 | 18 | from fabric.context_managers import settings, char_buffered |
19 | 19 | from fabric.io import output_loop, input_loop |
20 | 20 | from fabric.network import needs_host |
21 | from fabric.sftp import SFTP | |
21 | 22 | from fabric.state import (env, connections, output, win32, default_channel, |
22 | 23 | io_sleep) |
23 | from fabric.utils import abort, indent, warn, puts | |
24 | 24 | from fabric.thread_handling import ThreadHandler |
25 | from fabric.sftp import SFTP | |
25 | from fabric.utils import abort, indent, warn, puts, handle_prompt_abort | |
26 | 26 | |
27 | 27 | # For terminal size logic below |
28 | 28 | if not win32: |
135 | 135 | so format it appropriately. |
136 | 136 | |
137 | 137 | The optional keyword argument ``provided_by`` may be a list of functions or |
138 | function names which the user should be able to execute in order to set the | |
139 | key or keys; it will be included in the error output if requirements are | |
140 | not met. | |
138 | function names or a single function or function name which the user should | |
139 | be able to execute in order to set the key or keys; it will be included in | |
140 | the error output if requirements are not met. | |
141 | 141 | |
142 | 142 | Note: it is assumed that the keyword arguments apply to all given keys as a |
143 | 143 | group. If you feel the need to specify more than one ``used_for``, for |
144 | 144 | example, you should break your logic into multiple calls to ``require()``. |
145 | ||
146 | .. versionchanged:: 1.1 | |
147 | Allow iterable ``provided_by`` values instead of just single values. | |
145 | 148 | """ |
146 | 149 | # If all keys exist, we're good, so keep going. |
147 | 150 | missing_keys = filter(lambda x: x not in env, keys) |
169 | 172 | # And print provided_by if given |
170 | 173 | if 'provided_by' in kwargs: |
171 | 174 | funcs = kwargs['provided_by'] |
172 | # Pluralize this too | |
175 | # non-iterable is given, treat it as a list of this single item | |
176 | if not hasattr(funcs, '__iter__'): | |
177 | funcs = [funcs] | |
173 | 178 | if len(funcs) > 1: |
174 | 179 | command = "one of the following commands" |
175 | 180 | else: |
214 | 219 | Either way, `prompt` will re-prompt until validation passes (or the user |
215 | 220 | hits ``Ctrl-C``). |
216 | 221 | |
222 | .. note:: | |
223 | `~fabric.operations.prompt` honors :ref:`env.abort_on_prompts | |
224 | <abort-on-prompts>` and will call `~fabric.utils.abort` instead of | |
225 | prompting if that flag is set to ``True``. If you want to block on user | |
226 | input regardless, try wrapping with | |
227 | `~fabric.context_managers.settings`. | |
228 | ||
217 | 229 | Examples:: |
218 | 230 | |
219 | 231 | # Simplest form: |
229 | 241 | release = prompt('Please supply a release name', |
230 | 242 | validate=r'^\w+-\d+(\.\d+)?$') |
231 | 243 | |
232 | """ | |
244 | # Prompt regardless of the global abort-on-prompts setting: | |
245 | with settings(abort_on_prompts=False): | |
246 | prompt('I seriously need an answer on this! ') | |
247 | ||
248 | """ | |
249 | handle_prompt_abort() | |
233 | 250 | # Store previous env value for later display, if necessary |
234 | 251 | if key: |
235 | 252 | previous_value = env.get(key) |
16 | 16 | def __init__(self, host_string): |
17 | 17 | self.ftp = connections[host_string].open_sftp() |
18 | 18 | |
19 | ||
20 | 19 | # Recall that __getattr__ is the "fallback" attribute getter, and is thus |
21 | 20 | # pretty safe to use for facade-like behavior as we're doing here. |
22 | 21 | def __getattr__(self, attr): |
23 | 22 | return getattr(self.ftp, attr) |
24 | 23 | |
25 | ||
26 | 24 | def isdir(self, path): |
27 | 25 | try: |
28 | 26 | return stat.S_ISDIR(self.ftp.lstat(path).st_mode) |
29 | 27 | except IOError: |
30 | 28 | return False |
31 | 29 | |
32 | ||
33 | 30 | def islink(self, path): |
34 | 31 | try: |
35 | 32 | return stat.S_ISLNK(self.ftp.lstat(path).st_mode) |
36 | 33 | except IOError: |
37 | 34 | return False |
38 | ||
39 | 35 | |
40 | 36 | def exists(self, path): |
41 | 37 | try: |
43 | 39 | except IOError: |
44 | 40 | return False |
45 | 41 | return True |
46 | ||
47 | 42 | |
48 | 43 | def glob(self, path): |
49 | 44 | from fabric.state import win32 |
59 | 54 | ret = [os.path.join(dirpart, name) for name in names] |
60 | 55 | return ret |
61 | 56 | |
62 | ||
63 | 57 | def walk(self, top, topdown=True, onerror=None, followlinks=False): |
64 | 58 | from os.path import join, isdir, islink |
65 | 59 | |
95 | 89 | if not topdown: |
96 | 90 | yield top, dirs, nondirs |
97 | 91 | |
98 | ||
99 | 92 | def mkdir(self, path, use_sudo): |
100 | 93 | from fabric.api import sudo, hide |
101 | 94 | if use_sudo: |
103 | 96 | sudo('mkdir %s' % path) |
104 | 97 | else: |
105 | 98 | self.ftp.mkdir(path) |
106 | ||
107 | 99 | |
108 | 100 | def get(self, remote_path, local_path, local_is_path, rremote=None): |
109 | 101 | # rremote => relative remote path, so get(/var/log) would result in |
156 | 148 | result = real_local_path |
157 | 149 | return result |
158 | 150 | |
159 | ||
160 | 151 | def get_dir(self, remote_path, local_path): |
161 | 152 | # Decide what needs to be stripped from remote paths so they're all |
162 | 153 | # relative to the given remote_path |
195 | 186 | # on both ends. |
196 | 187 | result.append(self.get(rpath, lpath, True, rremote)) |
197 | 188 | return result |
198 | ||
199 | 189 | |
200 | 190 | def put(self, local_path, remote_path, use_sudo, mirror_local_mode, mode, |
201 | 191 | local_is_path): |
252 | 242 | remote_path = target_path |
253 | 243 | return remote_path |
254 | 244 | |
255 | ||
256 | 245 | def put_dir(self, local_path, remote_path, use_sudo, mirror_local_mode, |
257 | 246 | mode): |
258 | 247 | if os.path.basename(local_path): |
271 | 260 | self.mkdir(rcontext, use_sudo) |
272 | 261 | |
273 | 262 | for d in dirs: |
274 | n = os.path.join(rcontext,d) | |
263 | n = os.path.join(rcontext, d) | |
275 | 264 | if not self.exists(n): |
276 | 265 | self.mkdir(n, use_sudo) |
277 | 266 | |
278 | 267 | for f in files: |
279 | local_path = os.path.join(context,f) | |
280 | n = os.path.join(rcontext,f) | |
268 | local_path = os.path.join(context, f) | |
269 | n = os.path.join(rcontext, f) | |
281 | 270 | p = self.put(local_path, n, use_sudo, mirror_local_mode, mode, |
282 | 271 | True) |
283 | 272 | remote_paths.append(p) |
22 | 22 | |
23 | 23 | # |
24 | 24 | # Environment dictionary - support structures |
25 | # | |
25 | # | |
26 | 26 | |
27 | 27 | class _AttributeDict(dict): |
28 | 28 | """ |
90 | 90 | from win32com.shell.shell import SHGetSpecialFolderPath |
91 | 91 | from win32com.shell.shellcon import CSIDL_PROFILE |
92 | 92 | return "%s/%s" % ( |
93 | SHGetSpecialFolderPath(0,CSIDL_PROFILE), | |
93 | SHGetSpecialFolderPath(0, CSIDL_PROFILE), | |
94 | 94 | rc_file |
95 | 95 | ) |
96 | 96 | |
141 | 141 | make_option('-R', '--roles', |
142 | 142 | default=[], |
143 | 143 | help="comma-separated list of roles to operate on" |
144 | ), | |
145 | ||
146 | make_option('-x', '--exclude-hosts', | |
147 | default=[], | |
148 | help="comma-separated list of hosts to exclude" | |
144 | 149 | ), |
145 | 150 | |
146 | 151 | make_option('-i', |
202 | 207 | action='store_false', |
203 | 208 | default=True, |
204 | 209 | help="do not use pseudo-terminal in run/sudo" |
205 | ) | |
206 | ||
210 | ), | |
211 | ||
212 | # Abort on prompting flag | |
213 | make_option('--abort-on-prompts', | |
214 | action='store_true', | |
215 | default=False, | |
216 | help="Abort instead of prompting (for password, host, etc)" | |
217 | ), | |
218 | ||
219 | # Keepalive | |
220 | make_option('--keepalive', | |
221 | dest='keepalive', | |
222 | type=int, | |
223 | default=0, | |
224 | help="enables a keepalive every n seconds" | |
225 | ), | |
207 | 226 | ] |
208 | 227 | |
209 | 228 | |
223 | 242 | 'combine_stderr': True, |
224 | 243 | 'command': None, |
225 | 244 | 'command_prefixes': [], |
226 | 'cwd': '', # Must be empty string, not None, for concatenation purposes | |
245 | 'cwd': '', # Must be empty string, not None, for concatenation purposes | |
227 | 246 | 'echo_stdin': True, |
247 | 'exclude_hosts': [], | |
228 | 248 | 'host': None, |
229 | 249 | 'host_string': None, |
230 | 'lcwd': '', # Must be empty string, not None, for concatenation purposes | |
250 | 'lcwd': '', # Must be empty string, not None, for concatenation purposes | |
231 | 251 | 'local_user': _get_system_username(), |
232 | 252 | 'output_prefix': True, |
233 | 253 | 'passwords': {}, |
235 | 255 | 'path_behavior': 'append', |
236 | 256 | 'port': None, |
237 | 257 | 'real_fabfile': None, |
238 | 'roledefs': {}, | |
258 | 'roles': [], | |
239 | 259 | 'roledefs': {}, |
240 | 260 | # -S so sudo accepts passwd via stdin, -p with our known-value prompt for |
241 | 261 | # later detection (thus %s -- gets filled with env.sudo_prompt at runtime) |
250 | 270 | for option in env_options: |
251 | 271 | env[option.dest] = option.default |
252 | 272 | |
253 | ||
254 | 273 | # |
255 | 274 | # Command dictionary |
256 | 275 | # |
265 | 284 | # |
266 | 285 | |
267 | 286 | connections = HostConnectionCache() |
287 | ||
268 | 288 | |
269 | 289 | def default_channel(): |
270 | 290 | """ |
299 | 319 | This also means they will not show up in e.g. ``dict.keys()``. |
300 | 320 | |
301 | 321 | ..note:: |
302 | ||
322 | ||
303 | 323 | Aliases are recursive, so you may refer to an alias within the key list |
304 | 324 | of another alias. Naturally, this means that you can end up with |
305 | 325 | infinite loops if you're not careful. |
0 | from functools import wraps | |
1 | ||
2 | class Task(object): | |
3 | """ | |
4 | Abstract base class for objects wishing to be picked up as Fabric tasks. | |
5 | ||
6 | Instances of subclasses will be treated as valid tasks when present in | |
7 | fabfiles loaded by the :doc:`fab </usage/fab>` tool. | |
8 | ||
9 | For details on how to implement and use `~fabric.tasks.Task` subclasses, | |
10 | please see the usage documentation on :ref:`new-style tasks | |
11 | <new-style-tasks>`. | |
12 | ||
13 | .. versionadded:: 1.1 | |
14 | """ | |
15 | name = 'undefined' | |
16 | use_task_objects = True | |
17 | ||
18 | # TODO: make it so that this wraps other decorators as expected | |
19 | ||
20 | def run(self): | |
21 | raise NotImplementedError | |
22 | ||
23 | ||
24 | class WrappedCallableTask(Task): | |
25 | """ | |
26 | Wraps a given callable transparently, while marking it as a valid Task. | |
27 | ||
28 | Generally used via the `~fabric.decorators.task` decorator and not | |
29 | directly. | |
30 | ||
31 | .. versionadded:: 1.1 | |
32 | """ | |
33 | def __init__(self, callable): | |
34 | super(WrappedCallableTask, self).__init__() | |
35 | self.wrapped = callable | |
36 | self.__name__ = self.name = callable.__name__ | |
37 | self.__doc__ = callable.__doc__ | |
38 | ||
39 | def __call__(self, *args, **kwargs): | |
40 | return self.run(*args, **kwargs) | |
41 | ||
42 | def run(self, *args, **kwargs): | |
43 | return self.wrapped(*args, **kwargs) | |
44 | ||
45 | def __getattr__(self, k): | |
46 | return getattr(self.wrapped, k) |
5 | 5 | def __init__(self, name, callable, *args, **kwargs): |
6 | 6 | # Set up exception handling |
7 | 7 | self.exception = None |
8 | ||
8 | 9 | def wrapper(*args, **kwargs): |
9 | 10 | try: |
10 | 11 | callable(*args, **kwargs) |
9 | 9 | """ |
10 | 10 | Abort execution, print ``msg`` to stderr and exit with error status (1.) |
11 | 11 | |
12 | This function currently makes use of `sys.exit`_, which raises | |
12 | This function currently makes use of `sys.exit`_, which raises | |
13 | 13 | `SystemExit`_. Therefore, it's possible to detect and recover from inner |
14 | 14 | calls to `abort` by using ``except SystemExit`` or similar. |
15 | 15 | |
120 | 120 | .. seealso:: `~fabric.utils.puts` |
121 | 121 | """ |
122 | 122 | return puts(text=text, show_prefix=show_prefix, end=end, flush=flush) |
123 | ||
124 | ||
125 | def handle_prompt_abort(): | |
126 | import fabric.state | |
127 | if fabric.state.env.abort_on_prompts: | |
128 | abort("Needed to prompt, but abort-on-prompts was set to True!") |
20 | 20 | return p.communicate()[0] |
21 | 21 | |
22 | 22 | |
23 | VERSION = (1, 0, 2, 'final', 0) | |
23 | VERSION = (1, 1, 2, 'final', 0) | |
24 | ||
24 | 25 | |
25 | 26 | def get_version(form='short'): |
26 | 27 | """ |
91 | 92 | try: |
92 | 93 | return versions[form] |
93 | 94 | except KeyError: |
94 | raise TypeError, '"%s" is not a valid form specifier.' % form | |
95 | raise TypeError('"%s" is not a valid form specifier.' % form) | |
95 | 96 | |
96 | 97 | __version__ = get_version('short') |
137 | 137 | except ImportError: |
138 | 138 | import dummy_threading as threading |
139 | 139 | |
140 | __all__ = ["TCPServer","UDPServer","ForkingUDPServer","ForkingTCPServer", | |
141 | "ThreadingUDPServer","ThreadingTCPServer","BaseRequestHandler", | |
142 | "StreamRequestHandler","DatagramRequestHandler", | |
140 | __all__ = ["TCPServer", "UDPServer", "ForkingUDPServer", "ForkingTCPServer", | |
141 | "ThreadingUDPServer", "ThreadingTCPServer", "BaseRequestHandler", | |
142 | "StreamRequestHandler", "DatagramRequestHandler", | |
143 | 143 | "ThreadingMixIn", "ForkingMixIn"] |
144 | 144 | if hasattr(socket, "AF_UNIX"): |
145 | __all__.extend(["UnixStreamServer","UnixDatagramServer", | |
145 | __all__.extend(["UnixStreamServer", "UnixDatagramServer", | |
146 | 146 | "ThreadingUnixStreamServer", |
147 | 147 | "ThreadingUnixDatagramServer"]) |
148 | 148 | |
149 | ||
149 | 150 | class BaseServer: |
150 | ||
151 | 151 | """Base class for server classes. |
152 | 152 | |
153 | 153 | Methods for the caller: |
328 | 328 | The default is to print a traceback and continue. |
329 | 329 | |
330 | 330 | """ |
331 | print '-'*40 | |
331 | print '-' * 40 | |
332 | 332 | print 'Exception happened during processing of request from', |
333 | 333 | print client_address |
334 | 334 | import traceback |
335 | traceback.print_exc() # XXX But this goes to stderr! | |
336 | print '-'*40 | |
335 | traceback.print_exc() # XXX But this goes to stderr! | |
336 | print '-' * 40 | |
337 | 337 | |
338 | 338 | |
339 | 339 | class TCPServer(BaseServer): |
390 | 390 | |
391 | 391 | allow_reuse_address = False |
392 | 392 | |
393 | def __init__(self, server_address, RequestHandlerClass, bind_and_activate=True): | |
393 | def __init__(self, server_address, RequestHandlerClass, | |
394 | bind_and_activate=True): | |
394 | 395 | """Constructor. May be extended, do not override.""" |
395 | 396 | BaseServer.__init__(self, server_address, RequestHandlerClass) |
396 | 397 | self.socket = socket.socket(self.address_family, |
469 | 470 | # No need to close anything. |
470 | 471 | pass |
471 | 472 | |
473 | ||
472 | 474 | class ForkingMixIn: |
473 | ||
474 | 475 | """Mix-in class to handle each request in a new process.""" |
475 | 476 | |
476 | 477 | timeout = 300 |
479 | 480 | |
480 | 481 | def collect_children(self): |
481 | 482 | """Internal routine to wait for children that have exited.""" |
482 | if self.active_children is None: return | |
483 | if self.active_children is None: | |
484 | return | |
483 | 485 | while len(self.active_children) >= self.max_children: |
484 | 486 | # XXX: This will wait for any child process, not just ones |
485 | 487 | # spawned by this library. This could confuse other |
489 | 491 | pid, status = os.waitpid(0, 0) |
490 | 492 | except os.error: |
491 | 493 | pid = None |
492 | if pid not in self.active_children: continue | |
494 | if pid not in self.active_children: | |
495 | continue | |
493 | 496 | self.active_children.remove(pid) |
494 | 497 | |
495 | 498 | # XXX: This loop runs more system calls than it ought |
502 | 505 | pid, status = os.waitpid(child, os.WNOHANG) |
503 | 506 | except os.error: |
504 | 507 | pid = None |
505 | if not pid: continue | |
508 | if not pid: | |
509 | continue | |
506 | 510 | try: |
507 | 511 | self.active_children.remove(pid) |
508 | 512 | except ValueError, e: |
509 | raise ValueError('%s. x=%d and list=%r' % (e.message, pid, | |
510 | self.active_children)) | |
513 | raise ValueError('%s. x=%d and list=%r' % \ | |
514 | (e.message, pid, self.active_children)) | |
511 | 515 | |
512 | 516 | def handle_timeout(self): |
513 | 517 | """Wait for zombies after self.timeout seconds of inactivity. |
562 | 566 | |
563 | 567 | def process_request(self, request, client_address): |
564 | 568 | """Start a new thread to process the request.""" |
565 | t = threading.Thread(target = self.process_request_thread, | |
566 | args = (request, client_address)) | |
569 | t = threading.Thread(target=self.process_request_thread, | |
570 | args=(request, client_address)) | |
567 | 571 | if self.daemon_threads: |
568 | t.setDaemon (1) | |
572 | t.setDaemon(1) | |
569 | 573 | t.start() |
570 | 574 | |
571 | 575 | |
572 | class ForkingUDPServer(ForkingMixIn, UDPServer): pass | |
573 | class ForkingTCPServer(ForkingMixIn, TCPServer): pass | |
574 | ||
575 | class ThreadingUDPServer(ThreadingMixIn, UDPServer): pass | |
576 | class ThreadingTCPServer(ThreadingMixIn, TCPServer): pass | |
576 | class ForkingUDPServer(ForkingMixIn, UDPServer): | |
577 | pass | |
578 | ||
579 | ||
580 | class ForkingTCPServer(ForkingMixIn, TCPServer): | |
581 | pass | |
582 | ||
583 | ||
584 | class ThreadingUDPServer(ThreadingMixIn, UDPServer): | |
585 | pass | |
586 | ||
587 | ||
588 | class ThreadingTCPServer(ThreadingMixIn, TCPServer): | |
589 | pass | |
590 | ||
577 | 591 | |
578 | 592 | if hasattr(socket, 'AF_UNIX'): |
579 | 593 | |
583 | 597 | class UnixDatagramServer(UDPServer): |
584 | 598 | address_family = socket.AF_UNIX |
585 | 599 | |
586 | class ThreadingUnixStreamServer(ThreadingMixIn, UnixStreamServer): pass | |
587 | ||
588 | class ThreadingUnixDatagramServer(ThreadingMixIn, UnixDatagramServer): pass | |
600 | class ThreadingUnixStreamServer(ThreadingMixIn, UnixStreamServer): | |
601 | pass | |
602 | ||
603 | class ThreadingUnixDatagramServer(ThreadingMixIn, UnixDatagramServer): | |
604 | pass | |
605 | ||
589 | 606 | |
590 | 607 | class BaseRequestHandler: |
591 | 608 |
6 | 6 | |
7 | 7 | |
8 | 8 | class FakeFile(StringIO): |
9 | ||
9 | 10 | def __init__(self, value=None, path=None): |
10 | 11 | init = lambda x: StringIO.__init__(self, x) |
11 | 12 | if value is None: |
70 | 70 | '/tree/file2.txt': 'y', |
71 | 71 | '/tree/subfolder/file3.txt': 'z', |
72 | 72 | '/etc/apache2/apache2.conf': 'Include other.conf', |
73 | HOME: None # So $HOME is a directory | |
73 | HOME: None # So $HOME is a directory | |
74 | 74 | }) |
75 | 75 | PASSWORDS = { |
76 | 76 | 'root': 'root', |
142 | 142 | |
143 | 143 | def check_auth_publickey(self, username, key): |
144 | 144 | self.username = username |
145 | return ssh.AUTH_SUCCESSFUL if self.pubkeys else ssh.AUTH_FAILED | |
145 | return ssh.AUTH_SUCCESSFUL if self.pubkeys else ssh.AUTH_FAILED | |
146 | 146 | |
147 | 147 | def get_allowed_auths(self, username): |
148 | 148 | return 'password,publickey' |
205 | 205 | def prepend(self, val): |
206 | 206 | self.insert(0, val) |
207 | 207 | |
208 | ||
208 | 209 | def expand(path): |
209 | 210 | """ |
210 | 211 | '/foo/bar/biz' => ('/', 'foo', 'bar', 'biz') |
223 | 224 | ret.prepend(directory if directory == os.path.sep else '') |
224 | 225 | return ret |
225 | 226 | |
227 | ||
226 | 228 | def contains(folder, path): |
227 | 229 | """ |
228 | 230 | contains(('a', 'b', 'c'), ('a', 'b')) => True |
229 | 231 | contains('a', 'b', 'c'), ('f',)) => False |
230 | 232 | """ |
231 | 233 | return False if len(path) >= len(folder) else folder[:len(path)] == path |
234 | ||
232 | 235 | |
233 | 236 | def missing_folders(paths): |
234 | 237 | """ |
239 | 242 | for path in paths: |
240 | 243 | expanded = expand(path) |
241 | 244 | for i in range(len(expanded)): |
242 | folder = os.path.join(*expanded[:len(expanded)-i]) | |
245 | folder = os.path.join(*expanded[:len(expanded) - i]) | |
243 | 246 | if folder and folder not in pool: |
244 | 247 | pool.add(folder) |
245 | 248 | ret.append(folder) |
275 | 278 | candidates = [x for x in expanded_files if contains(x, expanded_path)] |
276 | 279 | children = [] |
277 | 280 | for candidate in candidates: |
278 | cut = candidate[:len(expanded_path)+1] | |
281 | cut = candidate[:len(expanded_path) + 1] | |
279 | 282 | if cut not in children: |
280 | 283 | children.append(cut) |
281 | 284 | results = [self.stat(os.path.join(*x)) for x in children] |
328 | 331 | def mkdir(self, path, attr): |
329 | 332 | self.files[path] = None |
330 | 333 | return ssh.SFTP_OK |
334 | ||
331 | 335 | |
332 | 336 | def serve_responses(responses, files, passwords, home, pubkeys, port): |
333 | 337 | """ |
0 | from fabric import tasks | |
1 | from fabric.decorators import task | |
2 | ||
3 | class ClassBasedTask(tasks.Task): | |
4 | def __init__(self): | |
5 | self.name = "foo" | |
6 | self.use_decorated = True | |
7 | ||
8 | def run(self, *args, **kwargs): | |
9 | pass | |
10 | ||
11 | foo = ClassBasedTask() |
0 | from fabric.decorators import task | |
1 | import module_fabtasks as tasks | |
2 | ||
3 | @task | |
4 | def foo(): | |
5 | pass | |
6 | ||
7 | def bar(): | |
8 | pass |
0 | import submodule |
0 | from fabric.tasks import Task | |
1 | ||
2 | class MappingTask(dict, Task): | |
3 | def run(self): | |
4 | pass | |
5 | ||
6 | mapping_task = MappingTask() | |
7 | mapping_task.name = "mapping_task" |
0 | from fabric.api import task | |
1 | ||
2 | import system, db | |
3 | ||
4 | ||
5 | @task | |
6 | def deploy(): | |
7 | pass | |
8 | ||
9 | @task | |
10 | def build_docs(): | |
11 | pass |
29 | 29 | """ |
30 | 30 | cd() should append arg if non-absolute or overwrite otherwise |
31 | 31 | """ |
32 | existing = '/some/existing/path' | |
32 | existing = '/some/existing/path' | |
33 | 33 | additional = 'another' |
34 | 34 | absolute = '/absolute/path' |
35 | 35 | with settings(cwd=existing): |
0 | from __future__ import with_statement | |
1 | ||
2 | from fabric.api import hide, get, show | |
3 | from fabric.contrib.files import upload_template, contains | |
4 | ||
5 | from utils import FabricTest, eq_contents | |
6 | from server import server | |
7 | ||
8 | ||
9 | class TestContrib(FabricTest): | |
10 | # Make sure it knows / is a directory. | |
11 | # This is in lieu of starting down the "actual honest to god fake operating | |
12 | # system" road...:( | |
13 | @server(responses={'test -d /': ""}) | |
14 | def test_upload_template_uses_correct_remote_filename(self): | |
15 | """ | |
16 | upload_template() shouldn't munge final remote filename | |
17 | """ | |
18 | template = self.mkfile('template.txt', 'text') | |
19 | with hide('everything'): | |
20 | upload_template(template, '/') | |
21 | assert self.exists_remotely('/template.txt') | |
22 | ||
23 | @server() | |
24 | def test_upload_template_handles_file_destination(self): | |
25 | """ | |
26 | upload_template() should work OK with file and directory destinations | |
27 | """ | |
28 | template = self.mkfile('template.txt', '%(varname)s') | |
29 | local = self.path('result.txt') | |
30 | remote = '/configfile.txt' | |
31 | var = 'foobar' | |
32 | with hide('everything'): | |
33 | upload_template(template, remote, {'varname': var}) | |
34 | get(remote, local) | |
35 | eq_contents(local, var) | |
36 | ||
37 | @server(responses={ | |
38 | 'egrep "text" "/file.txt"': ( | |
39 | "sudo: unable to resolve host fabric", | |
40 | "", | |
41 | 1 | |
42 | )} | |
43 | ) | |
44 | def test_contains_checks_only_succeeded_flag(self): | |
45 | """ | |
46 | contains() should return False on bad grep even if stdout isn't empty | |
47 | """ | |
48 | with hide('everything'): | |
49 | result = contains('/file.txt', 'text', use_sudo=True) | |
50 | assert result == False |
0 | from nose.tools import eq_ | |
0 | from nose.tools import eq_, ok_ | |
1 | 1 | from fudge import Fake, with_fakes |
2 | import random | |
2 | 3 | |
3 | from fabric import decorators | |
4 | from fabric import decorators, tasks | |
5 | from fabric.state import env | |
4 | 6 | |
7 | def test_task_returns_an_instance_of_wrappedfunctask_object(): | |
8 | def foo(): | |
9 | pass | |
10 | task = decorators.task(foo) | |
11 | ok_(isinstance(task, tasks.WrappedCallableTask)) | |
5 | 12 | |
6 | 13 | def fake_function(*args, **kwargs): |
7 | 14 | """ |
37 | 44 | task = decorators.runs_once(fake_function().returns(return_value)) |
38 | 45 | for i in range(2): |
39 | 46 | eq_(task(), return_value) |
47 | ||
48 | def test_with_settings_passes_env_vars_into_decorated_function(): | |
49 | env.value = True | |
50 | random_return = random.randint(1000, 2000) | |
51 | def some_task(): | |
52 | return env.value | |
53 | decorated_task = decorators.with_settings(value=random_return)(some_task) | |
54 | ok_(some_task(), msg="sanity check") | |
55 | eq_(random_return, decorated_task()) | |
56 |
0 | from __future__ import with_statement | |
1 | ||
2 | import copy | |
3 | from operator import isMappingType | |
4 | import os | |
0 | 5 | import sys |
1 | import copy | |
2 | ||
3 | from fudge.patcher import with_patched_object | |
4 | from fudge import Fake | |
5 | from nose.tools import eq_, raises | |
6 | ||
7 | from fabric.decorators import hosts, roles | |
6 | from contextlib import contextmanager | |
7 | ||
8 | from fudge import Fake, patched_context | |
9 | from nose.tools import ok_, eq_, raises | |
10 | ||
11 | from fabric.decorators import hosts, roles, task | |
8 | 12 | from fabric.main import (get_hosts, parse_arguments, _merge, _escape_split, |
9 | load_fabfile) | |
13 | load_fabfile, list_commands, _task_names, _crawl, crawl, | |
14 | COMMANDS_HEADER, NESTED_REMINDER) | |
10 | 15 | import fabric.state |
11 | 16 | from fabric.state import _AttributeDict |
12 | ||
13 | from utils import mock_streams | |
14 | ||
17 | from fabric.tasks import Task | |
18 | ||
19 | from utils import mock_streams, patched_env, eq_, FabricTest | |
20 | ||
21 | ||
22 | # | |
23 | # Basic CLI stuff | |
24 | # | |
15 | 25 | |
16 | 26 | def test_argument_parsing(): |
17 | 27 | for args, output in [ |
18 | 28 | # Basic |
19 | ('abc', ('abc', [], {}, [], [])), | |
29 | ('abc', ('abc', [], {}, [], [], [])), | |
20 | 30 | # Arg |
21 | ('ab:c', ('ab', ['c'], {}, [], [])), | |
31 | ('ab:c', ('ab', ['c'], {}, [], [], [])), | |
22 | 32 | # Kwarg |
23 | ('a:b=c', ('a', [], {'b':'c'}, [], [])), | |
33 | ('a:b=c', ('a', [], {'b':'c'}, [], [], [])), | |
24 | 34 | # Arg and kwarg |
25 | ('a:b=c,d', ('a', ['d'], {'b':'c'}, [], [])), | |
35 | ('a:b=c,d', ('a', ['d'], {'b':'c'}, [], [], [])), | |
26 | 36 | # Multiple kwargs |
27 | ('a:b=c,d=e', ('a', [], {'b':'c','d':'e'}, [], [])), | |
37 | ('a:b=c,d=e', ('a', [], {'b':'c','d':'e'}, [], [], [])), | |
28 | 38 | # Host |
29 | ('abc:host=foo', ('abc', [], {}, ['foo'], [])), | |
39 | ('abc:host=foo', ('abc', [], {}, ['foo'], [], [])), | |
30 | 40 | # Hosts with single host |
31 | ('abc:hosts=foo', ('abc', [], {}, ['foo'], [])), | |
41 | ('abc:hosts=foo', ('abc', [], {}, ['foo'], [], [])), | |
32 | 42 | # Hosts with multiple hosts |
33 | 43 | # Note: in a real shell, one would need to quote or escape "foo;bar". |
34 | 44 | # But in pure-Python that would get interpreted literally, so we don't. |
35 | ('abc:hosts=foo;bar', ('abc', [], {}, ['foo', 'bar'], [])), | |
36 | # Empty string args | |
37 | ("task:x=y,z=", ('task', [], {'x': 'y', 'z': ''}, [], [])), | |
38 | ("task:foo,,x=y", ('task', ['foo', ''], {'x': 'y'}, [], [])), | |
45 | ('abc:hosts=foo;bar', ('abc', [], {}, ['foo', 'bar'], [], [])), | |
46 | ||
47 | # Exclude hosts | |
48 | ('abc:hosts=foo;bar,exclude_hosts=foo', ('abc', [], {}, ['foo', 'bar'], [], ['foo'])), | |
49 | ('abc:hosts=foo;bar,exclude_hosts=foo;bar', ('abc', [], {}, ['foo', 'bar'], [], ['foo','bar'])), | |
50 | # Empty string args | |
51 | ("task:x=y,z=", ('task', [], {'x': 'y', 'z': ''}, [], [], [])), | |
52 | ("task:foo,,x=y", ('task', ['foo', ''], {'x': 'y'}, [], [], [])), | |
39 | 53 | ]: |
40 | 54 | yield eq_, parse_arguments([args]), [output] |
41 | ||
42 | ||
43 | def eq_hosts(command, host_list): | |
44 | eq_(set(get_hosts(command, [], [])), set(host_list)) | |
45 | ||
46 | ||
47 | def test_hosts_decorator_by_itself(): | |
48 | """ | |
49 | Use of @hosts only | |
50 | """ | |
51 | host_list = ['a', 'b'] | |
52 | @hosts(*host_list) | |
53 | def command(): | |
54 | pass | |
55 | eq_hosts(command, host_list) | |
56 | ||
57 | ||
58 | fake_roles = { | |
59 | 'r1': ['a', 'b'], | |
60 | 'r2': ['b', 'c'] | |
61 | } | |
62 | ||
63 | @with_patched_object( | |
64 | 'fabric.state', 'env', _AttributeDict({'roledefs': fake_roles}) | |
65 | ) | |
66 | def test_roles_decorator_by_itself(): | |
67 | """ | |
68 | Use of @roles only | |
69 | """ | |
70 | @roles('r1') | |
71 | def command(): | |
72 | pass | |
73 | eq_hosts(command, ['a', 'b']) | |
74 | ||
75 | ||
76 | @with_patched_object( | |
77 | 'fabric.state', 'env', _AttributeDict({'roledefs': fake_roles}) | |
78 | ) | |
79 | def test_hosts_and_roles_together(): | |
80 | """ | |
81 | Use of @roles and @hosts together results in union of both | |
82 | """ | |
83 | @roles('r1', 'r2') | |
84 | @hosts('a') | |
85 | def command(): | |
86 | pass | |
87 | eq_hosts(command, ['a', 'b', 'c']) | |
88 | ||
89 | ||
90 | @with_patched_object('fabric.state', 'env', {'hosts': ['foo']}) | |
91 | def test_hosts_decorator_overrides_env_hosts(): | |
92 | """ | |
93 | If @hosts is used it replaces any env.hosts value | |
94 | """ | |
95 | @hosts('bar') | |
96 | def command(): | |
97 | pass | |
98 | eq_hosts(command, ['bar']) | |
99 | assert 'foo' not in get_hosts(command, [], []) | |
100 | ||
101 | ||
102 | @with_patched_object( | |
103 | 'fabric.state', 'env', {'hosts': [' foo ', 'bar '], 'roles': []} | |
104 | ) | |
105 | def test_hosts_stripped_env_hosts(): | |
106 | """ | |
107 | Make sure hosts defined in env.hosts are cleaned of extra spaces | |
108 | """ | |
109 | def command(): | |
110 | pass | |
111 | eq_hosts(command, ['foo', 'bar']) | |
112 | ||
113 | ||
114 | spaced_roles = { | |
115 | 'r1': [' a ', ' b '], | |
116 | 'r2': ['b', 'c'], | |
117 | } | |
118 | ||
119 | @with_patched_object( | |
120 | 'fabric.state', 'env', _AttributeDict({'roledefs': spaced_roles}) | |
121 | ) | |
122 | def test_roles_stripped_env_hosts(): | |
123 | """ | |
124 | Make sure hosts defined in env.roles are cleaned of extra spaces | |
125 | """ | |
126 | @roles('r1') | |
127 | def command(): | |
128 | pass | |
129 | eq_hosts(command, ['a', 'b']) | |
130 | ||
131 | ||
132 | def test_hosts_decorator_expands_single_iterable(): | |
133 | """ | |
134 | @hosts(iterable) should behave like @hosts(*iterable) | |
135 | """ | |
136 | host_list = ['foo', 'bar'] | |
137 | @hosts(host_list) | |
138 | def command(): | |
139 | pass | |
140 | eq_(command.hosts, host_list) | |
141 | ||
142 | ||
143 | def test_roles_decorator_expands_single_iterable(): | |
144 | """ | |
145 | @roles(iterable) should behave like @roles(*iterable) | |
146 | """ | |
147 | role_list = ['foo', 'bar'] | |
148 | @roles(role_list) | |
149 | def command(): | |
150 | pass | |
151 | eq_(command.roles, role_list) | |
152 | ||
153 | ||
154 | @with_patched_object( | |
155 | 'fabric.state', 'env', _AttributeDict({'roledefs': fake_roles}) | |
156 | ) | |
157 | @raises(SystemExit) | |
158 | @mock_streams('stderr') | |
159 | def test_aborts_on_nonexistent_roles(): | |
160 | """ | |
161 | Aborts if any given roles aren't found | |
162 | """ | |
163 | _merge([], ['badrole']) | |
164 | ||
165 | ||
166 | lazy_role = {'r1': lambda: ['a', 'b']} | |
167 | ||
168 | @with_patched_object( | |
169 | 'fabric.state', 'env', _AttributeDict({'roledefs': lazy_role}) | |
170 | ) | |
171 | def test_lazy_roles(): | |
172 | """ | |
173 | Roles may be callables returning lists, as well as regular lists | |
174 | """ | |
175 | @roles('r1') | |
176 | def command(): | |
177 | pass | |
178 | eq_hosts(command, ['a', 'b']) | |
179 | 55 | |
180 | 56 | |
181 | 57 | def test_escaped_task_arg_split(): |
188 | 64 | ['foo', 'bar,biz,baz', 'what comes after baz?'] |
189 | 65 | ) |
190 | 66 | |
67 | ||
68 | # | |
69 | # Host/role decorators | |
70 | # | |
71 | ||
72 | def eq_hosts(command, host_list): | |
73 | eq_(set(get_hosts(command, [], [], [])), set(host_list)) | |
74 | ||
75 | def test_hosts_decorator_by_itself(): | |
76 | """ | |
77 | Use of @hosts only | |
78 | """ | |
79 | host_list = ['a', 'b'] | |
80 | ||
81 | @hosts(*host_list) | |
82 | def command(): | |
83 | pass | |
84 | ||
85 | eq_hosts(command, host_list) | |
86 | ||
87 | ||
88 | fake_roles = { | |
89 | 'r1': ['a', 'b'], | |
90 | 'r2': ['b', 'c'] | |
91 | } | |
92 | ||
93 | @patched_env({'roledefs': fake_roles}) | |
94 | def test_roles_decorator_by_itself(): | |
95 | """ | |
96 | Use of @roles only | |
97 | """ | |
98 | @roles('r1') | |
99 | def command(): | |
100 | pass | |
101 | eq_hosts(command, ['a', 'b']) | |
102 | ||
103 | ||
104 | @patched_env({'roledefs': fake_roles}) | |
105 | def test_hosts_and_roles_together(): | |
106 | """ | |
107 | Use of @roles and @hosts together results in union of both | |
108 | """ | |
109 | @roles('r1', 'r2') | |
110 | @hosts('a') | |
111 | def command(): | |
112 | pass | |
113 | eq_hosts(command, ['a', 'b', 'c']) | |
114 | ||
115 | tuple_roles = { | |
116 | 'r1': ('a', 'b'), | |
117 | 'r2': ('b', 'c'), | |
118 | } | |
119 | ||
120 | ||
121 | @patched_env({'roledefs': tuple_roles}) | |
122 | def test_roles_as_tuples(): | |
123 | """ | |
124 | Test that a list of roles as a tuple succeeds | |
125 | """ | |
126 | @roles('r1') | |
127 | def command(): | |
128 | pass | |
129 | eq_hosts(command, ['a', 'b']) | |
130 | ||
131 | ||
132 | @patched_env({'hosts': ('foo', 'bar')}) | |
133 | def test_hosts_as_tuples(): | |
134 | """ | |
135 | Test that a list of hosts as a tuple succeeds | |
136 | """ | |
137 | def command(): | |
138 | pass | |
139 | eq_hosts(command, ['foo', 'bar']) | |
140 | ||
141 | ||
142 | @patched_env({'hosts': ['foo']}) | |
143 | def test_hosts_decorator_overrides_env_hosts(): | |
144 | """ | |
145 | If @hosts is used it replaces any env.hosts value | |
146 | """ | |
147 | @hosts('bar') | |
148 | def command(): | |
149 | pass | |
150 | eq_hosts(command, ['bar']) | |
151 | assert 'foo' not in get_hosts(command, [], [], []) | |
152 | ||
153 | @patched_env({'hosts': ['foo']}) | |
154 | def test_hosts_decorator_overrides_env_hosts_with_task_decorator_first(): | |
155 | """ | |
156 | If @hosts is used it replaces any env.hosts value even with @task | |
157 | """ | |
158 | @task | |
159 | @hosts('bar') | |
160 | def command(): | |
161 | pass | |
162 | eq_hosts(command, ['bar']) | |
163 | assert 'foo' not in get_hosts(command, [], []) | |
164 | ||
165 | @patched_env({'hosts': ['foo']}) | |
166 | def test_hosts_decorator_overrides_env_hosts_with_task_decorator_last(): | |
167 | @hosts('bar') | |
168 | @task | |
169 | def command(): | |
170 | pass | |
171 | eq_hosts(command, ['bar']) | |
172 | assert 'foo' not in get_hosts(command, [], []) | |
173 | ||
174 | ||
175 | @patched_env({'hosts': [' foo ', 'bar '], 'roles': [], | |
176 | 'exclude_hosts':[]}) | |
177 | def test_hosts_stripped_env_hosts(): | |
178 | """ | |
179 | Make sure hosts defined in env.hosts are cleaned of extra spaces | |
180 | """ | |
181 | def command(): | |
182 | pass | |
183 | eq_hosts(command, ['foo', 'bar']) | |
184 | ||
185 | ||
186 | spaced_roles = { | |
187 | 'r1': [' a ', ' b '], | |
188 | 'r2': ['b', 'c'], | |
189 | } | |
190 | ||
191 | @patched_env({'roledefs': spaced_roles}) | |
192 | def test_roles_stripped_env_hosts(): | |
193 | """ | |
194 | Make sure hosts defined in env.roles are cleaned of extra spaces | |
195 | """ | |
196 | @roles('r1') | |
197 | def command(): | |
198 | pass | |
199 | eq_hosts(command, ['a', 'b']) | |
200 | ||
201 | ||
202 | def test_hosts_decorator_expands_single_iterable(): | |
203 | """ | |
204 | @hosts(iterable) should behave like @hosts(*iterable) | |
205 | """ | |
206 | host_list = ['foo', 'bar'] | |
207 | ||
208 | @hosts(host_list) | |
209 | def command(): | |
210 | pass | |
211 | ||
212 | eq_(command.hosts, host_list) | |
213 | ||
214 | def test_roles_decorator_expands_single_iterable(): | |
215 | """ | |
216 | @roles(iterable) should behave like @roles(*iterable) | |
217 | """ | |
218 | role_list = ['foo', 'bar'] | |
219 | ||
220 | @roles(role_list) | |
221 | def command(): | |
222 | pass | |
223 | ||
224 | eq_(command.roles, role_list) | |
225 | ||
226 | ||
227 | # | |
228 | # Basic role behavior | |
229 | # | |
230 | ||
231 | @patched_env({'roledefs': fake_roles}) | |
232 | @raises(SystemExit) | |
233 | @mock_streams('stderr') | |
234 | def test_aborts_on_nonexistent_roles(): | |
235 | """ | |
236 | Aborts if any given roles aren't found | |
237 | """ | |
238 | _merge([], ['badrole']) | |
239 | ||
240 | ||
241 | lazy_role = {'r1': lambda: ['a', 'b']} | |
242 | ||
243 | @patched_env({'roledefs': lazy_role}) | |
244 | def test_lazy_roles(): | |
245 | """ | |
246 | Roles may be callables returning lists, as well as regular lists | |
247 | """ | |
248 | @roles('r1') | |
249 | def command(): | |
250 | pass | |
251 | eq_hosts(command, ['a', 'b']) | |
252 | ||
253 | ||
254 | # | |
255 | # Fabfile loading | |
256 | # | |
191 | 257 | |
192 | 258 | def run_load_fabfile(path, sys_path): |
193 | 259 | # Module-esque object |
203 | 269 | eq_(sys.path, sys_path) |
204 | 270 | # Restore |
205 | 271 | sys.path = orig_path |
206 | ||
207 | 272 | |
208 | 273 | def test_load_fabfile_should_not_remove_real_path_elements(): |
209 | 274 | for fabfile_path, sys_dot_path in ( |
220 | 285 | ('fabfile.py', ['', 'some_dir', 'some_other_dir']), |
221 | 286 | ): |
222 | 287 | yield run_load_fabfile, fabfile_path, sys_dot_path |
288 | ||
289 | ||
290 | # | |
291 | # Namespacing and new-style tasks | |
292 | # | |
293 | ||
294 | def fabfile(name): | |
295 | return os.path.join(os.path.dirname(__file__), 'support', name) | |
296 | ||
297 | @contextmanager | |
298 | def path_prefix(module): | |
299 | i = 0 | |
300 | sys.path.insert(i, os.path.dirname(module)) | |
301 | yield | |
302 | sys.path.pop(i) | |
303 | ||
304 | class TestNamespaces(FabricTest): | |
305 | def setup(self): | |
306 | # Parent class preserves current env | |
307 | super(TestNamespaces, self).setup() | |
308 | # Reset new-style-tests flag so running tests via Fab itself doesn't | |
309 | # muck with it. | |
310 | import fabric.state | |
311 | if 'new_style_tasks' in fabric.state.env: | |
312 | del fabric.state.env['new_style_tasks'] | |
313 | ||
314 | def test_implicit_discovery(self): | |
315 | """ | |
316 | Default to automatically collecting all tasks in a fabfile module | |
317 | """ | |
318 | implicit = fabfile("implicit_fabfile.py") | |
319 | with path_prefix(implicit): | |
320 | docs, funcs = load_fabfile(implicit) | |
321 | eq_(len(funcs), 2) | |
322 | ok_("foo" in funcs) | |
323 | ok_("bar" in funcs) | |
324 | ||
325 | def test_explicit_discovery(self): | |
326 | """ | |
327 | If __all__ is present, only collect the tasks it specifies | |
328 | """ | |
329 | explicit = fabfile("explicit_fabfile.py") | |
330 | with path_prefix(explicit): | |
331 | docs, funcs = load_fabfile(explicit) | |
332 | eq_(len(funcs), 1) | |
333 | ok_("foo" in funcs) | |
334 | ok_("bar" not in funcs) | |
335 | ||
336 | def test_should_load_decorated_tasks_only_if_one_is_found(self): | |
337 | """ | |
338 | If any new-style tasks are found, *only* new-style tasks should load | |
339 | """ | |
340 | module = fabfile('decorated_fabfile.py') | |
341 | with path_prefix(module): | |
342 | docs, funcs = load_fabfile(module) | |
343 | eq_(len(funcs), 1) | |
344 | ok_('foo' in funcs) | |
345 | ||
346 | def test_class_based_tasks_are_found_with_proper_name(self): | |
347 | """ | |
348 | Wrapped new-style tasks should preserve their function names | |
349 | """ | |
350 | module = fabfile('decorated_fabfile_with_classbased_task.py') | |
351 | from fabric.state import env | |
352 | with path_prefix(module): | |
353 | docs, funcs = load_fabfile(module) | |
354 | eq_(len(funcs), 1) | |
355 | ok_('foo' in funcs) | |
356 | ||
357 | def test_recursion_steps_into_nontask_modules(self): | |
358 | """ | |
359 | Recursive loading will continue through modules with no tasks | |
360 | """ | |
361 | module = fabfile('deep') | |
362 | with path_prefix(module): | |
363 | docs, funcs = load_fabfile(module) | |
364 | eq_(len(funcs), 1) | |
365 | ok_('submodule.subsubmodule.deeptask' in _task_names(funcs)) | |
366 | ||
367 | def test_newstyle_task_presence_skips_classic_task_modules(self): | |
368 | """ | |
369 | Classic-task-only modules shouldn't add tasks if any new-style tasks exist | |
370 | """ | |
371 | module = fabfile('deep') | |
372 | with path_prefix(module): | |
373 | docs, funcs = load_fabfile(module) | |
374 | eq_(len(funcs), 1) | |
375 | ok_('submodule.classic_task' not in _task_names(funcs)) | |
376 | ||
377 | ||
378 | # | |
379 | # --list output | |
380 | # | |
381 | ||
382 | def eq_output(docstring, format_, expected): | |
383 | return eq_( | |
384 | "\n".join(list_commands(docstring, format_)), | |
385 | expected | |
386 | ) | |
387 | ||
388 | def list_output(module, format_, expected): | |
389 | module = fabfile(module) | |
390 | with path_prefix(module): | |
391 | docstring, tasks = load_fabfile(module) | |
392 | with patched_context(fabric.state, 'commands', tasks): | |
393 | eq_output(docstring, format_, expected) | |
394 | ||
395 | def test_list_output(): | |
396 | lead = ":\n\n " | |
397 | normal_head = COMMANDS_HEADER + lead | |
398 | nested_head = COMMANDS_HEADER + NESTED_REMINDER + lead | |
399 | for desc, module, format_, expected in ( | |
400 | ("shorthand (& with namespacing)", 'deep', 'short', "submodule.subsubmodule.deeptask"), | |
401 | ("normal (& with namespacing)", 'deep', 'normal', normal_head + "submodule.subsubmodule.deeptask"), | |
402 | ("normal (with docstring)", 'docstring', 'normal', normal_head + "foo Foos!"), | |
403 | ("nested (leaf only)", 'deep', 'nested', nested_head + """submodule: | |
404 | subsubmodule: | |
405 | deeptask"""), | |
406 | ("nested (full)", 'tree', 'nested', nested_head + """build_docs | |
407 | deploy | |
408 | db: | |
409 | migrate | |
410 | system: | |
411 | install_package | |
412 | debian: | |
413 | update_apt"""), | |
414 | ): | |
415 | list_output.description = "--list output: %s" % desc | |
416 | yield list_output, module, format_, expected | |
417 | del list_output.description | |
418 | ||
419 | ||
420 | def name_to_task(name): | |
421 | t = Task() | |
422 | t.name = name | |
423 | return t | |
424 | ||
425 | def strings_to_tasks(d): | |
426 | ret = {} | |
427 | for key, value in d.iteritems(): | |
428 | if isMappingType(value): | |
429 | val = strings_to_tasks(value) | |
430 | else: | |
431 | val = name_to_task(value) | |
432 | ret[key] = val | |
433 | return ret | |
434 | ||
435 | def test_task_names(): | |
436 | for desc, input_, output in ( | |
437 | ('top level (single)', {'a': 5}, ['a']), | |
438 | ('top level (multiple, sorting)', {'a': 5, 'b': 6}, ['a', 'b']), | |
439 | ('just nested', {'a': {'b': 5}}, ['a.b']), | |
440 | ('mixed', {'a': 5, 'b': {'c': 6}}, ['a', 'b.c']), | |
441 | ('top level comes before nested', {'z': 5, 'b': {'c': 6}}, ['z', 'b.c']), | |
442 | ('peers sorted equally', {'z': 5, 'b': {'c': 6}, 'd': {'e': 7}}, ['z', 'b.c', 'd.e']), | |
443 | ( | |
444 | 'complex tree', | |
445 | { | |
446 | 'z': 5, | |
447 | 'b': { | |
448 | 'c': 6, | |
449 | 'd': { | |
450 | 'e': { | |
451 | 'f': '7' | |
452 | } | |
453 | }, | |
454 | 'g': 8 | |
455 | }, | |
456 | 'h': 9, | |
457 | 'w': { | |
458 | 'y': 10 | |
459 | } | |
460 | }, | |
461 | ['h', 'z', 'b.c', 'b.g', 'b.d.e.f', 'w.y'] | |
462 | ), | |
463 | ): | |
464 | eq_.description = "task name flattening: %s" % desc | |
465 | yield eq_, _task_names(strings_to_tasks(input_)), output | |
466 | del eq_.description | |
467 | ||
468 | ||
469 | def test_crawl(): | |
470 | for desc, name, mapping, output in ( | |
471 | ("base case", 'a', {'a': 5}, 5), | |
472 | ("one level", 'a.b', {'a': {'b': 5}}, 5), | |
473 | ("deep", 'a.b.c.d.e', {'a': {'b': {'c': {'d': {'e': 5}}}}}, 5), | |
474 | ("full tree", 'a.b.c', {'a': {'b': {'c': 5}, 'd': 6}, 'z': 7}, 5) | |
475 | ): | |
476 | eq_.description = "crawling dotted names: %s" % desc | |
477 | yield eq_, _crawl(name, mapping), output | |
478 | del eq_.description | |
479 | ||
480 | ||
481 | def test_mapping_task_classes(): | |
482 | """ | |
483 | Task classes implementing the mapping interface shouldn't break --list | |
484 | """ | |
485 | docstring, tasks = load_fabfile(fabfile('mapping')) | |
486 | list_output('mapping', 'normal', COMMANDS_HEADER + """:\n | |
487 | mapping_task""") |
5 | 5 | import sys |
6 | 6 | |
7 | 7 | import paramiko |
8 | from nose.tools import with_setup, ok_ | |
8 | from nose.tools import with_setup, raises, ok_ | |
9 | 9 | from fudge import (Fake, clear_calls, clear_expectations, patch_object, verify, |
10 | 10 | with_patched_object, patched_context, with_fakes) |
11 | 11 | |
13 | 13 | from fabric.network import (HostConnectionCache, join_host_strings, normalize, |
14 | 14 | denormalize) |
15 | 15 | from fabric.io import output_loop |
16 | import fabric.network # So I can call patch_object correctly. Sigh. | |
16 | import fabric.network # So I can call patch_object correctly. Sigh. | |
17 | 17 | from fabric.state import env, output, _get_system_username |
18 | from fabric.operations import run, sudo | |
18 | from fabric.operations import run, sudo, prompt | |
19 | 19 | |
20 | 20 | from utils import * |
21 | 21 | from server import (server, PORT, RESPONSES, PASSWORDS, CLIENT_PRIVKEY, USER, |
94 | 94 | 'localhost', username + '@localhost:22'), |
95 | 95 | ): |
96 | 96 | eq_.description = "Host-string denormalization: %s" % description |
97 | yield eq_, denormalize(string1), denormalize(string2) | |
97 | yield eq_, denormalize(string1), denormalize(string2) | |
98 | 98 | del eq_.description |
99 | ||
100 | 99 | |
101 | 100 | # |
102 | 101 | # Connection caching |
103 | 102 | # |
104 | ||
105 | 103 | @staticmethod |
106 | 104 | @with_fakes |
107 | 105 | def check_connection_calls(host_strings, num_calls): |
135 | 133 | TestNetwork.check_connection_calls.description = description |
136 | 134 | yield TestNetwork.check_connection_calls, host_strings, num_calls |
137 | 135 | |
138 | ||
139 | 136 | def test_connection_cache_deletion(self): |
140 | 137 | """ |
141 | 138 | HostConnectionCache should delete correctly w/ non-full keys |
158 | 155 | # |
159 | 156 | # Connection loop flow |
160 | 157 | # |
161 | ||
162 | 158 | @server() |
163 | 159 | def test_saved_authentication_returns_client_object(self): |
164 | 160 | cache = HostConnectionCache() |
165 | 161 | assert isinstance(cache[env.host_string], paramiko.SSHClient) |
166 | 162 | |
167 | ||
168 | 163 | @server() |
169 | 164 | @with_fakes |
170 | 165 | def test_prompts_for_password_without_good_authentication(self): |
171 | 166 | env.password = None |
167 | with password_response(PASSWORDS[env.user], times_called=1): | |
168 | cache = HostConnectionCache() | |
169 | cache[env.host_string] | |
170 | ||
171 | ||
172 | @raises(SystemExit) | |
173 | @with_patched_object(output, 'aborts', False) | |
174 | def test_aborts_on_prompt_with_abort_on_prompt(self): | |
175 | env.abort_on_prompts = True | |
176 | prompt("This will abort") | |
177 | ||
178 | ||
179 | @server() | |
180 | @raises(SystemExit) | |
181 | @with_patched_object(output, 'aborts', False) | |
182 | def test_aborts_on_password_prompt_with_abort_on_prompt(self): | |
183 | env.password = None | |
184 | env.abort_on_prompts = True | |
172 | 185 | with password_response(PASSWORDS[env.user], times_called=1): |
173 | 186 | cache = HostConnectionCache() |
174 | 187 | cache[env.host_string] |
194 | 207 | # Also test that the captured value matches, too. |
195 | 208 | eq_(output_string, result) |
196 | 209 | |
197 | ||
198 | 210 | @server() |
199 | 211 | def test_sudo_prompt_kills_capturing(self): |
200 | 212 | """ |
203 | 215 | cmd = "ls /simple" |
204 | 216 | with hide('everything'): |
205 | 217 | eq_(sudo(cmd), RESPONSES[cmd]) |
206 | ||
207 | 218 | |
208 | 219 | @server() |
209 | 220 | def test_password_memory_on_user_switch(self): |
241 | 252 | ): |
242 | 253 | sudo("ls /simple") |
243 | 254 | |
244 | ||
245 | 255 | @mock_streams('stderr') |
246 | 256 | @server() |
247 | 257 | def test_password_prompt_displays_host_string(self): |
256 | 266 | regex = r'^\[%s\] Login password: ' % env.host_string |
257 | 267 | assert_contains(regex, sys.stderr.getvalue()) |
258 | 268 | |
259 | ||
260 | 269 | @mock_streams('stderr') |
261 | 270 | @server(pubkeys=True) |
262 | 271 | def test_passphrase_prompt_displays_host_string(self): |
271 | 280 | run("ls /simple") |
272 | 281 | regex = r'^\[%s\] Login password: ' % env.host_string |
273 | 282 | assert_contains(regex, sys.stderr.getvalue()) |
274 | ||
275 | 283 | |
276 | 284 | def test_sudo_prompt_display_passthrough(self): |
277 | 285 | """ |
317 | 325 | [%(prefix)s] out: sudo password: """ % {'prefix': env.host_string} |
318 | 326 | eq_(expected[1:], sys.stdall.getvalue()) |
319 | 327 | |
320 | ||
321 | 328 | @mock_streams('both') |
322 | 329 | @server( |
323 | 330 | pubkeys=True, |
350 | 357 | """ % {'prefix': env.host_string} |
351 | 358 | eq_(expected[1:], sys.stdall.getvalue()) |
352 | 359 | |
353 | ||
354 | 360 | @mock_streams('both') |
355 | 361 | @server(pubkeys=True, responses={'silent': '', 'normal': 'foo'}) |
356 | 362 | def test_silent_commands_should_not_have_blank_line(self): |
383 | 389 | """ % {'prefix': env.host_string} |
384 | 390 | eq_(expected[1:], sys.stdall.getvalue()) |
385 | 391 | |
386 | ||
387 | 392 | @mock_streams('both') |
388 | 393 | @server( |
389 | 394 | pubkeys=True, |
412 | 417 | """ % {'prefix': env.host_string} |
413 | 418 | eq_(expected[1:], sys.stdall.getvalue()) |
414 | 419 | |
415 | ||
416 | 420 | @mock_streams('both') |
417 | 421 | @server( |
418 | 422 | pubkeys=True, |
2 | 2 | import os |
3 | 3 | import shutil |
4 | 4 | import sys |
5 | import tempfile | |
6 | 5 | import types |
7 | 6 | from contextlib import nested |
8 | 7 | from StringIO import StringIO |
8 | ||
9 | import unittest | |
10 | import random | |
11 | import types | |
9 | 12 | |
10 | 13 | from nose.tools import raises, eq_ |
11 | 14 | from fudge import with_patched_object |
16 | 19 | from fabric.api import get, put, hide, show, cd, lcd, local, run, sudo |
17 | 20 | from fabric.sftp import SFTP |
18 | 21 | |
22 | from fabric.decorators import with_settings | |
19 | 23 | from utils import * |
20 | 24 | from server import (server, PORT, RESPONSES, FILES, PASSWORDS, CLIENT_PRIVKEY, |
21 | 25 | USER, CLIENT_PRIVKEY_PASSPHRASE) |
23 | 27 | # |
24 | 28 | # require() |
25 | 29 | # |
30 | ||
26 | 31 | |
27 | 32 | def test_require_single_existing_key(): |
28 | 33 | """ |
79 | 84 | assert 'foo' in err |
80 | 85 | |
81 | 86 | |
87 | @mock_streams('stderr') | |
88 | @raises(SystemExit) | |
89 | def test_require_iterable_provided_by_key(): | |
90 | """ | |
91 | When given a provided_by iterable value, require() raises SystemExit | |
92 | """ | |
93 | # 'version' is one of the default values, so we know it'll be there | |
94 | def fake_providing_function(): | |
95 | pass | |
96 | require('foo', provided_by=[fake_providing_function]) | |
97 | ||
98 | ||
99 | @mock_streams('stderr') | |
100 | @raises(SystemExit) | |
101 | def test_require_noniterable_provided_by_key(): | |
102 | """ | |
103 | When given a provided_by noniterable value, require() raises SystemExit | |
104 | """ | |
105 | # 'version' is one of the default values, so we know it'll be there | |
106 | def fake_providing_function(): | |
107 | pass | |
108 | require('foo', provided_by=fake_providing_function) | |
109 | ||
110 | ||
82 | 111 | # |
83 | 112 | # prompt() |
84 | 113 | # |
85 | 114 | |
86 | 115 | def p(x): |
87 | 116 | print x, |
117 | ||
88 | 118 | |
89 | 119 | @mock_streams('stdout') |
90 | 120 | @with_patched_object(sys.modules['__builtin__'], 'raw_input', p) |
107 | 137 | d = "default!" |
108 | 138 | prompt(s, default=d) |
109 | 139 | eq_(sys.stdout.getvalue(), "%s [%s] " % (s, d)) |
110 | ||
140 | ||
111 | 141 | |
112 | 142 | # |
113 | 143 | # run()/sudo() |
130 | 160 | eq_(_sudo_prefix(user=None), env.sudo_prefix % env.sudo_prompt) |
131 | 161 | |
132 | 162 | |
163 | @with_settings(use_shell=True) | |
133 | 164 | def test_shell_wrap(): |
134 | 165 | prefix = "prefix" |
135 | 166 | command = "command" |
148 | 179 | del eq_.description |
149 | 180 | |
150 | 181 | |
182 | @with_settings(use_shell=True) | |
151 | 183 | def test_shell_wrap_escapes_command_if_shell_is_true(): |
152 | 184 | """ |
153 | 185 | _shell_wrap() escapes given command if shell=True |
242 | 274 | # |
243 | 275 | |
244 | 276 | class TestFileTransfers(FabricTest): |
245 | def setup(self): | |
246 | super(TestFileTransfers, self).setup() | |
247 | self.tmpdir = tempfile.mkdtemp() | |
248 | ||
249 | def teardown(self): | |
250 | super(TestFileTransfers, self).teardown() | |
251 | shutil.rmtree(self.tmpdir) | |
252 | ||
253 | def path(self, *path_parts): | |
254 | return os.path.join(self.tmpdir, *path_parts) | |
255 | ||
256 | def exists_remotely(self, path): | |
257 | return SFTP(env.host_string).exists(path) | |
258 | ||
259 | def exists_locally(self, path): | |
260 | return os.path.exists(path) | |
261 | ||
262 | ||
263 | 277 | # |
264 | 278 | # get() |
265 | 279 | # |
266 | ||
267 | 280 | @server(files={'/home/user/.bashrc': 'bash!'}, home='/home/user') |
268 | 281 | def test_get_relative_remote_dir_uses_home(self): |
269 | 282 | """ |
273 | 286 | # Another if-it-doesn't-error-out-it-passed test; meh. |
274 | 287 | eq_(get('.bashrc', self.path()), [self.path('.bashrc')]) |
275 | 288 | |
276 | ||
277 | ||
278 | 289 | @server() |
279 | 290 | def test_get_single_file(self): |
280 | 291 | """ |
285 | 296 | with hide('everything'): |
286 | 297 | get(remote, local) |
287 | 298 | eq_contents(local, FILES[remote]) |
288 | ||
289 | 299 | |
290 | 300 | @server() |
291 | 301 | def test_get_sibling_globs(self): |
298 | 308 | for remote in remotes: |
299 | 309 | eq_contents(self.path(remote), FILES[remote]) |
300 | 310 | |
301 | ||
302 | 311 | @server() |
303 | 312 | def test_get_single_file_in_folder(self): |
304 | 313 | """ |
308 | 317 | with hide('everything'): |
309 | 318 | get('folder', self.tmpdir) |
310 | 319 | eq_contents(self.path(remote), FILES[remote]) |
311 | ||
312 | 320 | |
313 | 321 | @server() |
314 | 322 | def test_get_tree(self): |
320 | 328 | leaves = filter(lambda x: x[0].startswith('/tree'), FILES.items()) |
321 | 329 | for path, contents in leaves: |
322 | 330 | eq_contents(self.path(path[1:]), contents) |
323 | ||
324 | 331 | |
325 | 332 | @server() |
326 | 333 | def test_get_tree_with_implicit_local_path(self): |
341 | 348 | if os.path.exists(dirname): |
342 | 349 | shutil.rmtree(dirname) |
343 | 350 | |
344 | ||
345 | 351 | @server() |
346 | 352 | def test_get_absolute_path_should_save_relative(self): |
347 | 353 | """ |
354 | 360 | assert self.exists_locally(os.path.join(lpath, 'subfolder')) |
355 | 361 | assert not self.exists_locally(os.path.join(lpath, 'tree/subfolder')) |
356 | 362 | |
357 | ||
358 | 363 | @server() |
359 | 364 | def test_path_formatstr_nonrecursively_is_just_filename(self): |
360 | 365 | """ |
365 | 370 | with hide('everything'): |
366 | 371 | get('/tree/subfolder/file3.txt', ltarget) |
367 | 372 | assert self.exists_locally(os.path.join(lpath, 'file3.txt')) |
368 | ||
369 | 373 | |
370 | 374 | @server() |
371 | 375 | @mock_streams('stderr') |
386 | 390 | """ |
387 | 391 | self._invalid_file_obj_situations('/tree') |
388 | 392 | |
389 | ||
390 | 393 | @server() |
391 | 394 | def test_get_single_file_absolutely(self): |
392 | 395 | """ |
397 | 400 | get(target, self.tmpdir) |
398 | 401 | eq_contents(self.path(os.path.basename(target)), FILES[target]) |
399 | 402 | |
400 | ||
401 | 403 | @server() |
402 | 404 | def test_get_file_with_nonexistent_target(self): |
403 | 405 | """ |
408 | 410 | with hide('everything'): |
409 | 411 | get(target, local) |
410 | 412 | eq_contents(local, FILES[target]) |
411 | ||
412 | 413 | |
413 | 414 | @server() |
414 | 415 | @mock_streams('stderr') |
425 | 426 | assert "%s already exists" % local in sys.stderr.getvalue() |
426 | 427 | eq_contents(local, FILES[target]) |
427 | 428 | |
428 | ||
429 | 429 | @server() |
430 | 430 | def test_get_file_to_directory(self): |
431 | 431 | """ |
438 | 438 | with hide('everything'): |
439 | 439 | get(target, self.tmpdir) |
440 | 440 | eq_contents(self.path(target), FILES[target]) |
441 | ||
442 | 441 | |
443 | 442 | @server(port=2200) |
444 | 443 | @server(port=2201) |
464 | 463 | tmp, "127.0.0.1-%s" % port, 'file3.txt' |
465 | 464 | )) |
466 | 465 | |
467 | ||
468 | 466 | @server() |
469 | 467 | def test_get_from_empty_directory_uses_cwd(self): |
470 | 468 | """ |
476 | 474 | # server.FILES. |
477 | 475 | for x in "file.txt file2.txt tree/file1.txt".split(): |
478 | 476 | assert os.path.exists(os.path.join(self.tmpdir, x)) |
479 | ||
480 | 477 | |
481 | 478 | @server() |
482 | 479 | def _get_to_cwd(self, arg): |
506 | 503 | """ |
507 | 504 | self._get_to_cwd(None) |
508 | 505 | |
509 | ||
510 | 506 | @server() |
511 | 507 | def test_get_should_accept_file_like_objects(self): |
512 | 508 | """ |
517 | 513 | with hide('everything'): |
518 | 514 | get(target, fake_file) |
519 | 515 | eq_(fake_file.getvalue(), FILES[target]) |
520 | ||
521 | 516 | |
522 | 517 | @server() |
523 | 518 | def test_get_interpolation_without_host(self): |
535 | 530 | get('/folder/file3.txt', local_path) |
536 | 531 | assert self.exists_locally(tmp + "bar/file3.txt") |
537 | 532 | |
538 | ||
539 | 533 | @server() |
540 | 534 | def test_get_returns_list_of_local_paths(self): |
541 | 535 | """ |
547 | 541 | files = ['file1.txt', 'file2.txt', 'subfolder/file3.txt'] |
548 | 542 | eq_(map(lambda x: os.path.join(d, 'tree', x), files), retval) |
549 | 543 | |
550 | ||
551 | 544 | @server() |
552 | 545 | def test_get_returns_none_for_stringio(self): |
553 | 546 | """ |
556 | 549 | with hide('everything'): |
557 | 550 | eq_([], get('/file.txt', StringIO())) |
558 | 551 | |
559 | ||
560 | 552 | @server() |
561 | 553 | def test_get_return_value_failed_attribute(self): |
562 | 554 | """ |
563 | get()'s return value should indicate any paths which failed to download. | |
555 | get()'s return value should indicate any paths which failed to | |
556 | download. | |
564 | 557 | """ |
565 | 558 | with settings(hide('everything'), warn_only=True): |
566 | 559 | retval = get('/doesnt/exist', self.path()) |
567 | 560 | eq_(['/doesnt/exist'], retval.failed) |
568 | 561 | assert not retval.succeeded |
569 | 562 | |
570 | ||
571 | 563 | @server() |
572 | 564 | def test_get_should_not_use_windows_slashes_in_remote_paths(self): |
573 | 565 | """ |
578 | 570 | sftp = SFTP(env.host_string) |
579 | 571 | eq_(sftp.glob(path), [path]) |
580 | 572 | |
581 | ||
582 | ||
583 | 573 | # |
584 | 574 | # put() |
585 | 575 | # |
586 | ||
587 | 576 | @server() |
588 | 577 | def test_put_file_to_existing_directory(self): |
589 | 578 | """ |
590 | 579 | put() a single file into an existing remote directory |
580 | """ | |
581 | text = "foo!" | |
582 | local = self.mkfile('foo.txt', text) | |
583 | local2 = self.path('foo2.txt') | |
584 | with hide('everything'): | |
585 | put(local, '/') | |
586 | get('/foo.txt', local2) | |
587 | eq_contents(local2, text) | |
588 | ||
589 | @server() | |
590 | def test_put_to_empty_directory_uses_cwd(self): | |
591 | """ | |
592 | put() expands empty remote arg to remote cwd | |
593 | ||
594 | Not a terribly sharp test -- we just get() with a relative path and are | |
595 | testing to make sure they match up -- but should still suffice. | |
591 | 596 | """ |
592 | 597 | text = "foo!" |
593 | 598 | local = self.path('foo.txt') |
595 | 600 | with open(local, 'w') as fd: |
596 | 601 | fd.write(text) |
597 | 602 | with hide('everything'): |
598 | put(local, '/') | |
599 | get('/foo.txt', local2) | |
600 | eq_contents(local2, text) | |
601 | ||
602 | ||
603 | @server() | |
604 | def test_put_to_empty_directory_uses_cwd(self): | |
605 | """ | |
606 | put() expands empty remote arg to remote cwd | |
607 | ||
608 | Not a terribly sharp test -- we just get() with a relative path and are | |
609 | testing to make sure they match up -- but should still suffice. | |
610 | """ | |
611 | text = "foo!" | |
612 | local = self.path('foo.txt') | |
613 | local2 = self.path('foo2.txt') | |
614 | with open(local, 'w') as fd: | |
615 | fd.write(text) | |
616 | with hide('everything'): | |
617 | 603 | put(local) |
618 | 604 | get('foo.txt', local2) |
619 | 605 | eq_contents(local2, text) |
620 | ||
621 | 606 | |
622 | 607 | @server() |
623 | 608 | def test_put_from_empty_directory_uses_cwd(self): |
644 | 629 | # Restore cwd |
645 | 630 | os.chdir(old_cwd) |
646 | 631 | |
647 | ||
648 | 632 | @server() |
649 | 633 | def test_put_should_accept_file_like_objects(self): |
650 | 634 | """ |
662 | 646 | # Sanity test of file pointer |
663 | 647 | eq_(pointer, fake_file.tell()) |
664 | 648 | |
665 | ||
666 | 649 | @server() |
667 | 650 | @raises(ValueError) |
668 | 651 | def test_put_should_raise_exception_for_nonexistent_local_path(self): |
670 | 653 | put(nonexistent_file) should raise a ValueError |
671 | 654 | """ |
672 | 655 | put('thisfiledoesnotexist', '/tmp') |
673 | ||
674 | 656 | |
675 | 657 | @server() |
676 | 658 | def test_put_returns_list_of_remote_paths(self): |
685 | 667 | retval = put(f, p) |
686 | 668 | eq_(retval, [p]) |
687 | 669 | |
688 | ||
689 | 670 | @server() |
690 | 671 | def test_put_returns_list_of_remote_paths_with_stringio(self): |
691 | 672 | """ |
694 | 675 | f = 'uploaded.txt' |
695 | 676 | with hide('everything'): |
696 | 677 | eq_(put(StringIO('contents'), f), [f]) |
697 | ||
698 | 678 | |
699 | 679 | @server() |
700 | 680 | def test_put_return_value_failed_attribute(self): |
707 | 687 | eq_(["<StringIO>"], retval.failed) |
708 | 688 | assert not retval.succeeded |
709 | 689 | |
710 | ||
711 | ||
712 | 690 | # |
713 | 691 | # Interactions with cd() |
714 | 692 | # |
715 | ||
716 | 693 | @server() |
717 | 694 | def test_cd_should_apply_to_put(self): |
718 | 695 | """ |
727 | 704 | put(local, f) |
728 | 705 | assert self.exists_remotely('%s/%s' % (d, f)) |
729 | 706 | |
730 | ||
731 | 707 | @server(files={'/tmp/test.txt': 'test'}) |
732 | 708 | def test_cd_should_apply_to_get(self): |
733 | 709 | """ |
737 | 713 | with nested(cd('/tmp'), hide('everything')): |
738 | 714 | get('test.txt', local) |
739 | 715 | assert os.path.exists(local) |
740 | ||
741 | 716 | |
742 | 717 | @server() |
743 | 718 | def test_cd_should_not_apply_to_absolute_put(self): |
752 | 727 | assert not self.exists_remotely('/tmp/test.txt') |
753 | 728 | assert self.exists_remotely('/test.txt') |
754 | 729 | |
755 | ||
756 | 730 | @server(files={'/test.txt': 'test'}) |
757 | 731 | def test_cd_should_not_apply_to_absolute_get(self): |
758 | 732 | """ |
762 | 736 | with nested(cd('/tmp'), hide('everything')): |
763 | 737 | get('/test.txt', local) |
764 | 738 | assert os.path.exists(local) |
765 | ||
766 | 739 | |
767 | 740 | @server() |
768 | 741 | def test_lcd_should_apply_to_put(self): |
778 | 751 | with nested(lcd(self.path(d)), hide('everything')): |
779 | 752 | put(f, '/') |
780 | 753 | assert self.exists_remotely('/%s' % f) |
781 | ||
782 | 754 | |
783 | 755 | @server() |
784 | 756 | def test_lcd_should_apply_to_get(self): |
0 | import unittest | |
1 | import os | |
2 | ||
3 | import fudge | |
4 | from fudge.inspector import arg | |
5 | ||
6 | from fabric.contrib import project | |
7 | ||
8 | ||
9 | class UploadProjectTestCase(unittest.TestCase): | |
10 | """Test case for :func: `fabric.contrib.project.upload_project`.""" | |
11 | ||
12 | fake_tmp = "testtempfolder" | |
13 | ||
14 | ||
15 | def setUp(self): | |
16 | fudge.clear_expectations() | |
17 | ||
18 | # We need to mock out run, local, and put | |
19 | ||
20 | self.fake_run = fudge.Fake('project.run', callable=True) | |
21 | self.patched_run = fudge.patch_object( | |
22 | project, | |
23 | 'run', | |
24 | self.fake_run | |
25 | ) | |
26 | ||
27 | self.fake_local = fudge.Fake('local', callable=True) | |
28 | self.patched_local = fudge.patch_object( | |
29 | project, | |
30 | 'local', | |
31 | self.fake_local | |
32 | ) | |
33 | ||
34 | self.fake_put = fudge.Fake('put', callable=True) | |
35 | self.patched_put = fudge.patch_object( | |
36 | project, | |
37 | 'put', | |
38 | self.fake_put | |
39 | ) | |
40 | ||
41 | # We don't want to create temp folders | |
42 | self.fake_mkdtemp = fudge.Fake( | |
43 | 'mkdtemp', | |
44 | expect_call=True | |
45 | ).returns(self.fake_tmp) | |
46 | self.patched_mkdtemp = fudge.patch_object( | |
47 | project, | |
48 | 'mkdtemp', | |
49 | self.fake_mkdtemp | |
50 | ) | |
51 | ||
52 | ||
53 | def tearDown(self): | |
54 | self.patched_run.restore() | |
55 | self.patched_local.restore() | |
56 | self.patched_put.restore() | |
57 | ||
58 | fudge.clear_expectations() | |
59 | ||
60 | ||
61 | @fudge.with_fakes | |
62 | def test_temp_folder_is_used(self): | |
63 | """A unique temp folder is used for creating the archive to upload.""" | |
64 | ||
65 | # Exercise | |
66 | project.upload_project() | |
67 | ||
68 | ||
69 | @fudge.with_fakes | |
70 | def test_project_is_archived_locally(self): | |
71 | """The project should be archived locally before being uploaded.""" | |
72 | ||
73 | # local() is called more than once so we need an extra next_call() | |
74 | # otherwise fudge compares the args to the last call to local() | |
75 | self.fake_local.with_args(arg.startswith("tar -czf")).next_call() | |
76 | ||
77 | # Exercise | |
78 | project.upload_project() | |
79 | ||
80 | ||
81 | @fudge.with_fakes | |
82 | def test_current_directory_is_uploaded_by_default(self): | |
83 | """By default the project uploaded is the current working directory.""" | |
84 | ||
85 | cwd_path, cwd_name = os.path.split(os.getcwd()) | |
86 | ||
87 | # local() is called more than once so we need an extra next_call() | |
88 | # otherwise fudge compares the args to the last call to local() | |
89 | self.fake_local.with_args( | |
90 | arg.endswith("-C %s %s" % (cwd_path, cwd_name)) | |
91 | ).next_call() | |
92 | ||
93 | # Exercise | |
94 | project.upload_project() | |
95 | ||
96 | ||
97 | @fudge.with_fakes | |
98 | def test_path_to_local_project_can_be_specified(self): | |
99 | """It should be possible to specify which local folder to upload.""" | |
100 | ||
101 | project_path = "path/to/my/project" | |
102 | ||
103 | # local() is called more than once so we need an extra next_call() | |
104 | # otherwise fudge compares the args to the last call to local() | |
105 | self.fake_local.with_args( | |
106 | arg.endswith("-C %s %s" % os.path.split(project_path)) | |
107 | ).next_call() | |
108 | ||
109 | # Exercise | |
110 | project.upload_project(local_dir=project_path) | |
111 | ||
112 | ||
113 | @fudge.with_fakes | |
114 | def test_path_to_local_project_can_end_in_separator(self): | |
115 | """A local path ending in a separator should be handled correctly.""" | |
116 | ||
117 | project_path = "path/to/my" | |
118 | base = "project" | |
119 | ||
120 | # local() is called more than once so we need an extra next_call() | |
121 | # otherwise fudge compares the args to the last call to local() | |
122 | self.fake_local.with_args( | |
123 | arg.endswith("-C %s %s" % (project_path, base)) | |
124 | ).next_call() | |
125 | ||
126 | # Exercise | |
127 | project.upload_project(local_dir="%s/%s/" % (project_path, base)) | |
128 | ||
129 | ||
130 | @fudge.with_fakes | |
131 | def test_default_remote_folder_is_home(self): | |
132 | """Project is uploaded to remote home by default.""" | |
133 | ||
134 | local_dir = "folder" | |
135 | ||
136 | # local() is called more than once so we need an extra next_call() | |
137 | # otherwise fudge compares the args to the last call to local() | |
138 | self.fake_put.with_args( | |
139 | "%s/folder.tar.gz" % self.fake_tmp, "folder.tar.gz" | |
140 | ).next_call() | |
141 | ||
142 | # Exercise | |
143 | project.upload_project(local_dir=local_dir) | |
144 | ||
145 | @fudge.with_fakes | |
146 | def test_path_to_remote_folder_can_be_specified(self): | |
147 | """It should be possible to specify which local folder to upload to.""" | |
148 | ||
149 | local_dir = "folder" | |
150 | remote_path = "path/to/remote/folder" | |
151 | ||
152 | # local() is called more than once so we need an extra next_call() | |
153 | # otherwise fudge compares the args to the last call to local() | |
154 | self.fake_put.with_args( | |
155 | "%s/folder.tar.gz" % self.fake_tmp, "%s/folder.tar.gz" % remote_path | |
156 | ).next_call() | |
157 | ||
158 | # Exercise | |
159 | project.upload_project(local_dir=local_dir, remote_dir=remote_path) | |
160 |
7 | 7 | Assigning values to aliases updates aliased keys |
8 | 8 | """ |
9 | 9 | ad = _AliasDict( |
10 | {'bar': False, 'biz': True, 'baz': False}, | |
10 | {'bar': False, 'biz': True, 'baz': False}, | |
11 | 11 | aliases={'foo': ['bar', 'biz', 'baz']} |
12 | 12 | ) |
13 | 13 | # Before |
27 | 27 | Aliases can be nested |
28 | 28 | """ |
29 | 29 | ad = _AliasDict( |
30 | {'bar': False, 'biz': True}, | |
30 | {'bar': False, 'biz': True}, | |
31 | 31 | aliases={'foo': ['bar', 'nested'], 'nested': ['biz']} |
32 | 32 | ) |
33 | 33 | # Before |
45 | 45 | Alias expansion |
46 | 46 | """ |
47 | 47 | ad = _AliasDict( |
48 | {'bar': False, 'biz': True}, | |
48 | {'bar': False, 'biz': True}, | |
49 | 49 | aliases={'foo': ['bar', 'nested'], 'nested': ['biz']} |
50 | 50 | ) |
51 | 51 | eq_(ad.expand_aliases(['foo']), ['bar', 'biz']) |
0 | import unittest | |
1 | from nose.tools import eq_, raises | |
2 | import random | |
3 | ||
4 | from fabric import tasks | |
5 | ||
6 | def test_base_task_provides_undefined_name(): | |
7 | task = tasks.Task() | |
8 | eq_("undefined", task.name) | |
9 | ||
10 | @raises(NotImplementedError) | |
11 | def test_base_task_raises_exception_on_call_to_run(): | |
12 | task = tasks.Task() | |
13 | task.run() | |
14 | ||
15 | class TestWrappedCallableTask(unittest.TestCase): | |
16 | def test_run_is_wrapped_callable(self): | |
17 | def foo(): pass | |
18 | ||
19 | task = tasks.WrappedCallableTask(foo) | |
20 | self.assertEqual(task.wrapped, foo) | |
21 | ||
22 | def test_name_is_the_name_of_the_wrapped_callable(self): | |
23 | def foo(): pass | |
24 | foo.__name__ = "random_name_%d" % random.randint(1000, 2000) | |
25 | ||
26 | task = tasks.WrappedCallableTask(foo) | |
27 | self.assertEqual(task.name, foo.__name__) | |
28 | ||
29 | def test_reads_double_under_doc_from_callable(self): | |
30 | def foo(): pass | |
31 | foo.__doc__ = "Some random __doc__: %d" % random.randint(1000, 2000) | |
32 | ||
33 | task = tasks.WrappedCallableTask(foo) | |
34 | self.assertEqual(task.__doc__, foo.__doc__) | |
35 | ||
36 | def test_dispatches_to_wrapped_callable_on_run(self): | |
37 | random_value = "some random value %d" % random.randint(1000, 2000) | |
38 | def foo(): return random_value | |
39 | ||
40 | task = tasks.WrappedCallableTask(foo) | |
41 | self.assertEqual(random_value, task()) | |
42 | ||
43 | def test_passes_all_regular_args_to_run(self): | |
44 | def foo(*args): return args | |
45 | ||
46 | random_args = tuple([random.randint(1000, 2000) for i in range(random.randint(1, 5))]) | |
47 | task = tasks.WrappedCallableTask(foo) | |
48 | self.assertEqual(random_args, task(*random_args)) | |
49 | ||
50 | def test_passes_all_keyword_args_to_run(self): | |
51 | def foo(**kwargs): return kwargs | |
52 | ||
53 | random_kwargs = {} | |
54 | for i in range(random.randint(1, 5)): | |
55 | random_key = ("foo", "bar", "baz", "foobar", "barfoo")[i] | |
56 | random_kwargs[random_key] = random.randint(1000, 2000) | |
57 | ||
58 | task = tasks.WrappedCallableTask(foo) | |
59 | self.assertEqual(random_kwargs, task(**random_kwargs)) | |
60 | ||
61 | def test_calling_the_object_is_the_same_as_run(self): | |
62 | random_return = random.randint(1000, 2000) | |
63 | def foo(): return random_return | |
64 | ||
65 | task = tasks.WrappedCallableTask(foo) | |
66 | self.assertEqual(task(), task.run()) | |
67 | ||
68 | ||
69 | # Reminder: decorator syntax, e.g.: | |
70 | # @foo | |
71 | # def bar():... | |
72 | # | |
73 | # is semantically equivalent to: | |
74 | # def bar():... | |
75 | # bar = foo(bar) | |
76 | # | |
77 | # this simplifies testing :) | |
78 | ||
79 | def test_decorator_incompatibility_on_task(): | |
80 | from fabric.decorators import task, hosts, runs_once, roles | |
81 | def foo(): return "foo" | |
82 | foo = task(foo) | |
83 | ||
84 | # since we aren't setting foo to be the newly decorated thing, its cool | |
85 | hosts('me@localhost')(foo) | |
86 | runs_once(foo) | |
87 | roles('www')(foo) | |
88 | ||
89 | def test_decorator_closure_hiding(): | |
90 | from fabric.decorators import task, hosts | |
91 | def foo(): print env.host_string | |
92 | foo = hosts("me@localhost")(foo) | |
93 | foo = task(foo) | |
94 | ||
95 | # this broke in the old way, due to closure stuff hiding in the | |
96 | # function, but task making an object | |
97 | eq_(["me@localhost"], foo.hosts) |
8 | 8 | |
9 | 9 | from fabric.state import output, env |
10 | 10 | from fabric.utils import warn, indent, abort, puts, fastprint |
11 | from fabric import utils # For patching | |
11 | from fabric import utils # For patching | |
12 | 12 | from fabric.context_managers import settings |
13 | 13 | from utils import mock_streams |
14 | 14 | |
71 | 71 | pass |
72 | 72 | result = sys.stderr.getvalue() |
73 | 73 | eq_("\nFatal error: Test\n\nAborting.\n", result) |
74 | ||
74 | ||
75 | 75 | |
76 | 76 | @mock_streams('stdout') |
77 | 77 | def test_puts_with_user_output_on(): |
0 | 0 | from __future__ import with_statement |
1 | 1 | |
2 | from StringIO import StringIO # No need for cStringIO at this time | |
2 | from StringIO import StringIO # No need for cStringIO at this time | |
3 | 3 | from contextlib import contextmanager |
4 | from copy import deepcopy | |
5 | from fudge.patcher import with_patched_object | |
4 | 6 | from functools import wraps, partial |
5 | 7 | from types import StringTypes |
6 | 8 | import copy |
7 | 9 | import getpass |
10 | import os | |
8 | 11 | import re |
12 | import shutil | |
9 | 13 | import sys |
14 | import tempfile | |
10 | 15 | |
11 | 16 | from fudge import Fake, patched_context, clear_expectations |
12 | 17 | |
13 | 18 | from fabric.context_managers import settings |
14 | 19 | from fabric.network import interpret_host_string |
15 | 20 | from fabric.state import env, output |
21 | from fabric.sftp import SFTP | |
16 | 22 | import fabric.network |
17 | 23 | |
18 | 24 | from server import PORT, PASSWORDS, USER, HOST |
20 | 26 | |
21 | 27 | class FabricTest(object): |
22 | 28 | """ |
23 | Nose-oriented test runner class that wipes env after every test. | |
29 | Nose-oriented test runner which wipes state.env and provides file helpers. | |
24 | 30 | """ |
25 | 31 | def setup(self): |
26 | 32 | # Clear Fudge mock expectations |
37 | 43 | # Command response mocking is easier without having to account for |
38 | 44 | # shell wrapping everywhere. |
39 | 45 | env.use_shell = False |
46 | # Temporary local file dir | |
47 | self.tmpdir = tempfile.mkdtemp() | |
40 | 48 | |
41 | 49 | def teardown(self): |
42 | 50 | env.update(self.previous_env) |
43 | 51 | output.update(self.previous_output) |
52 | shutil.rmtree(self.tmpdir) | |
53 | ||
54 | def path(self, *path_parts): | |
55 | return os.path.join(self.tmpdir, *path_parts) | |
56 | ||
57 | def mkfile(self, path, contents): | |
58 | dest = self.path(path) | |
59 | with open(dest, 'w') as fd: | |
60 | fd.write(contents) | |
61 | return dest | |
62 | ||
63 | def exists_remotely(self, path): | |
64 | return SFTP(env.host_string).exists(path) | |
65 | ||
66 | def exists_locally(self, path): | |
67 | return os.path.exists(path) | |
44 | 68 | |
45 | 69 | |
46 | 70 | class CarbonCopy(StringIO): |
93 | 117 | both = (which == 'both') |
94 | 118 | stdout = (which == 'stdout') or both |
95 | 119 | stderr = (which == 'stderr') or both |
120 | ||
96 | 121 | def mocked_streams_decorator(func): |
97 | 122 | @wraps(func) |
98 | 123 | def inner_wrapper(*args, **kwargs): |
182 | 207 | return "\n".join(prefix + x for x in string.splitlines()) |
183 | 208 | |
184 | 209 | |
185 | def eq_(a, b, msg=None): | |
210 | def eq_(result, expected, msg=None): | |
186 | 211 | """ |
187 | 212 | Shadow of the Nose builtin which presents easier to read multiline output. |
188 | 213 | """ |
214 | params = {'expected': expected, 'result': result} | |
215 | aka = """ | |
216 | ||
217 | --------------------------------- aka ----------------------------------------- | |
218 | ||
219 | Expected: | |
220 | %(expected)r | |
221 | ||
222 | Got: | |
223 | %(result)r | |
224 | """ % params | |
189 | 225 | default_msg = """ |
190 | 226 | Expected: |
191 | %s | |
227 | %(expected)s | |
192 | 228 | |
193 | 229 | Got: |
194 | %s | |
195 | ||
196 | --------------------------------- aka ----------------------------------------- | |
197 | ||
198 | Expected: | |
199 | %r | |
200 | ||
201 | Got: | |
202 | %r | |
203 | """ % (a, b, a, b) | |
204 | assert a == b, msg or default_msg | |
230 | %(result)s | |
231 | """ % params | |
232 | if (repr(result) != str(result)) or (repr(expected) != str(expected)): | |
233 | default_msg += aka | |
234 | assert result == expected, msg or default_msg | |
205 | 235 | |
206 | 236 | |
207 | 237 | def eq_contents(path, text): |
208 | 238 | with open(path) as fd: |
209 | 239 | eq_(text, fd.read()) |
240 | ||
241 | ||
242 | def patched_env(updates): | |
243 | """ | |
244 | Execute a function with a patched copy of ``fabric.state.env``. | |
245 | ||
246 | ``fabric.state.env`` is patched during the wrapped functions' run, with an | |
247 | equivalent copy that has been ``update``d with the given ``updates``. | |
248 | ||
249 | E.g. with ``fabric.state.env = {'foo': 'bar', 'biz': 'baz'}``, a function | |
250 | decorated with ``@patched_env({'foo': 'notbar'})`` would see | |
251 | ``fabric.state.env`` as equal to ``{'biz': 'baz', 'foo': 'notbar'}``. | |
252 | """ | |
253 | from fabric.state import env | |
254 | def wrapper(func): | |
255 | new_env = deepcopy(env).update(updates) | |
256 | return with_patched_object('fabric.state', 'env', new_env) | |
257 | return wrapper |