add mitogen module, brings ansible 2.8 support
This commit is contained in:
parent
62eb3f28fb
commit
f8cf17f18a
82 changed files with 6 additions and 21264 deletions
3
.gitmodules
vendored
Normal file
3
.gitmodules
vendored
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
[submodule "mitogen"]
|
||||||
|
path = mitogen
|
||||||
|
url = https://github.com/dw/mitogen.git
|
|
@ -1,8 +1,8 @@
|
||||||
[defaults]
|
[defaults]
|
||||||
host_key_checking = False
|
host_key_checking = False
|
||||||
retry_files_enabled = False
|
retry_files_enabled = False
|
||||||
strategy_plugins = ./mitogen-0.2.7/ansible_mitogen/plugins/strategy
|
strategy_plugins = ./mitogen/ansible_mitogen/plugins/strategy
|
||||||
strategy = mitogen_linear
|
#strategy = mitogen_linear
|
||||||
|
|
||||||
[ssh_connection]
|
[ssh_connection]
|
||||||
pipelining = True
|
pipelining = True
|
||||||
|
|
1
mitogen
Submodule
1
mitogen
Submodule
|
@ -0,0 +1 @@
|
||||||
|
Subproject commit 4fedf88d3868830d02435ab2148ed6662bb3a434
|
|
@ -1,26 +0,0 @@
|
||||||
Copyright 2019, David Wilson
|
|
||||||
|
|
||||||
Redistribution and use in source and binary forms, with or without
|
|
||||||
modification, are permitted provided that the following conditions are met:
|
|
||||||
|
|
||||||
1. Redistributions of source code must retain the above copyright notice, this
|
|
||||||
list of conditions and the following disclaimer.
|
|
||||||
|
|
||||||
2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
this list of conditions and the following disclaimer in the documentation
|
|
||||||
and/or other materials provided with the distribution.
|
|
||||||
|
|
||||||
3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
may be used to endorse or promote products derived from this software without
|
|
||||||
specific prior written permission.
|
|
||||||
|
|
||||||
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
|
||||||
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
|
||||||
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
|
||||||
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
|
||||||
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
|
||||||
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
|
||||||
SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
|
||||||
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
|
||||||
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
||||||
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
|
@ -1 +0,0 @@
|
||||||
include LICENSE
|
|
|
@ -1,23 +0,0 @@
|
||||||
Metadata-Version: 1.1
|
|
||||||
Name: mitogen
|
|
||||||
Version: 0.2.7
|
|
||||||
Summary: Library for writing distributed self-replicating programs.
|
|
||||||
Home-page: https://github.com/dw/mitogen/
|
|
||||||
Author: David Wilson
|
|
||||||
Author-email: UNKNOWN
|
|
||||||
License: New BSD
|
|
||||||
Description: UNKNOWN
|
|
||||||
Platform: UNKNOWN
|
|
||||||
Classifier: Environment :: Console
|
|
||||||
Classifier: Intended Audience :: System Administrators
|
|
||||||
Classifier: License :: OSI Approved :: BSD License
|
|
||||||
Classifier: Operating System :: POSIX
|
|
||||||
Classifier: Programming Language :: Python
|
|
||||||
Classifier: Programming Language :: Python :: 2.4
|
|
||||||
Classifier: Programming Language :: Python :: 2.5
|
|
||||||
Classifier: Programming Language :: Python :: 2.6
|
|
||||||
Classifier: Programming Language :: Python :: 2.7
|
|
||||||
Classifier: Programming Language :: Python :: 3.6
|
|
||||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
|
||||||
Classifier: Topic :: System :: Distributed Computing
|
|
||||||
Classifier: Topic :: System :: Systems Administration
|
|
|
@ -1,13 +0,0 @@
|
||||||
|
|
||||||
# Mitogen
|
|
||||||
|
|
||||||
<!-- [![Build Status](https://travis-ci.org/dw/mitogen.png?branch=master)](https://travis-ci.org/dw/mitogen}) -->
|
|
||||||
<a href="https://mitogen.readthedocs.io/">Please see the documentation</a>.
|
|
||||||
|
|
||||||
![](https://i.imgur.com/eBM6LhJ.gif)
|
|
||||||
|
|
||||||
[![Total alerts](https://img.shields.io/lgtm/alerts/g/dw/mitogen.svg?logo=lgtm&logoWidth=18)](https://lgtm.com/projects/g/dw/mitogen/alerts/)
|
|
||||||
|
|
||||||
[![Build Status](https://travis-ci.org/dw/mitogen.svg?branch=master)](https://travis-ci.org/dw/mitogen)
|
|
||||||
|
|
||||||
[![Pipelines Status](https://dev.azure.com/dw-mitogen/Mitogen/_apis/build/status/dw.mitogen?branchName=master)](https://dev.azure.com/dw-mitogen/Mitogen/_build/latest?definitionId=1?branchName=master)
|
|
|
@ -1,269 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
"""
|
|
||||||
As Mitogen separates asynchronous IO out to a broker thread, communication
|
|
||||||
necessarily involves context switching and waking that thread. When application
|
|
||||||
threads and the broker share a CPU, this can be almost invisibly fast - around
|
|
||||||
25 microseconds for a full A->B->A round-trip.
|
|
||||||
|
|
||||||
However when threads are scheduled on different CPUs, round-trip delays
|
|
||||||
regularly vary wildly, and easily into milliseconds. Many contributing factors
|
|
||||||
exist, not least scenarios like:
|
|
||||||
|
|
||||||
1. A is preempted immediately after waking B, but before releasing the GIL.
|
|
||||||
2. B wakes from IO wait only to immediately enter futex wait.
|
|
||||||
3. A may wait 10ms or more for another timeslice, as the scheduler on its CPU
|
|
||||||
runs threads unrelated to its transaction (i.e. not B), wake only to release
|
|
||||||
its GIL, before entering IO sleep waiting for a reply from B, which cannot
|
|
||||||
exist yet.
|
|
||||||
4. B wakes, acquires GIL, performs work, and sends reply to A, causing it to
|
|
||||||
wake. B is preempted before releasing GIL.
|
|
||||||
5. A wakes from IO wait only to immediately enter futex wait.
|
|
||||||
6. B may wait 10ms or more for another timeslice, wake only to release its GIL,
|
|
||||||
before sleeping again.
|
|
||||||
7. A wakes, acquires GIL, finally receives reply.
|
|
||||||
|
|
||||||
Per above if we are unlucky, on an even moderately busy machine it is possible
|
|
||||||
to lose milliseconds just in scheduling delay, and the effect is compounded
|
|
||||||
when pairs of threads in process A are communicating with pairs of threads in
|
|
||||||
process B using the same scheme, such as when Ansible WorkerProcess is
|
|
||||||
communicating with ContextService in the connection multiplexer. In the worst
|
|
||||||
case it could involve 4 threads working in lockstep spread across 4 busy CPUs.
|
|
||||||
|
|
||||||
Since multithreading in Python is essentially useless except for waiting on IO
|
|
||||||
due to the presence of the GIL, at least in Ansible there is no good reason for
|
|
||||||
threads in the same process to run on distinct CPUs - they always operate in
|
|
||||||
lockstep due to the GIL, and are thus vulnerable to issues like above.
|
|
||||||
|
|
||||||
Linux lacks any natural API to describe what we want, it only permits
|
|
||||||
individual threads to be constrained to run on specific CPUs, and for that
|
|
||||||
constraint to be inherited by new threads and forks of the constrained thread.
|
|
||||||
|
|
||||||
This module therefore implements a CPU pinning policy for Ansible processes,
|
|
||||||
providing methods that should be called early in any new process, either to
|
|
||||||
rebalance which CPU it is pinned to, or in the case of subprocesses, to remove
|
|
||||||
the pinning entirely. It is likely to require ongoing tweaking, since pinning
|
|
||||||
necessarily involves preventing the scheduler from making load balancing
|
|
||||||
decisions.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import ctypes
|
|
||||||
import mmap
|
|
||||||
import multiprocessing
|
|
||||||
import os
|
|
||||||
import struct
|
|
||||||
|
|
||||||
import mitogen.core
|
|
||||||
import mitogen.parent
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
|
||||||
_libc = ctypes.CDLL(None, use_errno=True)
|
|
||||||
_strerror = _libc.strerror
|
|
||||||
_strerror.restype = ctypes.c_char_p
|
|
||||||
_pthread_mutex_init = _libc.pthread_mutex_init
|
|
||||||
_pthread_mutex_lock = _libc.pthread_mutex_lock
|
|
||||||
_pthread_mutex_unlock = _libc.pthread_mutex_unlock
|
|
||||||
_sched_setaffinity = _libc.sched_setaffinity
|
|
||||||
except (OSError, AttributeError):
|
|
||||||
_libc = None
|
|
||||||
_strerror = None
|
|
||||||
_pthread_mutex_init = None
|
|
||||||
_pthread_mutex_lock = None
|
|
||||||
_pthread_mutex_unlock = None
|
|
||||||
_sched_setaffinity = None
|
|
||||||
|
|
||||||
|
|
||||||
class pthread_mutex_t(ctypes.Structure):
|
|
||||||
"""
|
|
||||||
Wrap pthread_mutex_t to allow storing a lock in shared memory.
|
|
||||||
"""
|
|
||||||
_fields_ = [
|
|
||||||
('data', ctypes.c_uint8 * 512),
|
|
||||||
]
|
|
||||||
|
|
||||||
def init(self):
|
|
||||||
if _pthread_mutex_init(self.data, 0):
|
|
||||||
raise Exception(_strerror(ctypes.get_errno()))
|
|
||||||
|
|
||||||
def acquire(self):
|
|
||||||
if _pthread_mutex_lock(self.data):
|
|
||||||
raise Exception(_strerror(ctypes.get_errno()))
|
|
||||||
|
|
||||||
def release(self):
|
|
||||||
if _pthread_mutex_unlock(self.data):
|
|
||||||
raise Exception(_strerror(ctypes.get_errno()))
|
|
||||||
|
|
||||||
|
|
||||||
class State(ctypes.Structure):
|
|
||||||
"""
|
|
||||||
Contents of shared memory segment. This allows :meth:`Manager.assign` to be
|
|
||||||
called from any child, since affinity assignment must happen from within
|
|
||||||
the context of the new child process.
|
|
||||||
"""
|
|
||||||
_fields_ = [
|
|
||||||
('lock', pthread_mutex_t),
|
|
||||||
('counter', ctypes.c_uint8),
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class Policy(object):
|
|
||||||
"""
|
|
||||||
Process affinity policy.
|
|
||||||
"""
|
|
||||||
def assign_controller(self):
|
|
||||||
"""
|
|
||||||
Assign the Ansible top-level policy to this process.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def assign_muxprocess(self):
|
|
||||||
"""
|
|
||||||
Assign the MuxProcess policy to this process.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def assign_worker(self):
|
|
||||||
"""
|
|
||||||
Assign the WorkerProcess policy to this process.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def assign_subprocess(self):
|
|
||||||
"""
|
|
||||||
Assign the helper subprocess policy to this process.
|
|
||||||
"""
|
|
||||||
|
|
||||||
class FixedPolicy(Policy):
|
|
||||||
"""
|
|
||||||
:class:`Policy` for machines where the only control method available is
|
|
||||||
fixed CPU placement. The scheme here was tested on an otherwise idle 16
|
|
||||||
thread machine.
|
|
||||||
|
|
||||||
- The connection multiplexer is pinned to CPU 0.
|
|
||||||
- The Ansible top-level (strategy) is pinned to CPU 1.
|
|
||||||
- WorkerProcesses are pinned sequentually to 2..N, wrapping around when no
|
|
||||||
more CPUs exist.
|
|
||||||
- Children such as SSH may be scheduled on any CPU except 0/1.
|
|
||||||
|
|
||||||
If the machine has less than 4 cores available, the top-level and workers
|
|
||||||
are pinned between CPU 2..N, i.e. no CPU is reserved for the top-level
|
|
||||||
process.
|
|
||||||
|
|
||||||
This could at least be improved by having workers pinned to independent
|
|
||||||
cores, before reusing the second hyperthread of an existing core.
|
|
||||||
|
|
||||||
A hook is installed that causes :meth:`reset` to run in the child of any
|
|
||||||
process created with :func:`mitogen.parent.detach_popen`, ensuring
|
|
||||||
CPU-intensive children like SSH are not forced to share the same core as
|
|
||||||
the (otherwise potentially very busy) parent.
|
|
||||||
"""
|
|
||||||
def __init__(self, cpu_count=None):
|
|
||||||
#: For tests.
|
|
||||||
self.cpu_count = cpu_count or multiprocessing.cpu_count()
|
|
||||||
self.mem = mmap.mmap(-1, 4096)
|
|
||||||
self.state = State.from_buffer(self.mem)
|
|
||||||
self.state.lock.init()
|
|
||||||
|
|
||||||
if self.cpu_count < 2:
|
|
||||||
# uniprocessor
|
|
||||||
self._reserve_mux = False
|
|
||||||
self._reserve_controller = False
|
|
||||||
self._reserve_mask = 0
|
|
||||||
self._reserve_shift = 0
|
|
||||||
elif self.cpu_count < 4:
|
|
||||||
# small SMP
|
|
||||||
self._reserve_mux = True
|
|
||||||
self._reserve_controller = False
|
|
||||||
self._reserve_mask = 1
|
|
||||||
self._reserve_shift = 1
|
|
||||||
else:
|
|
||||||
# big SMP
|
|
||||||
self._reserve_mux = True
|
|
||||||
self._reserve_controller = True
|
|
||||||
self._reserve_mask = 3
|
|
||||||
self._reserve_shift = 2
|
|
||||||
|
|
||||||
def _set_affinity(self, mask):
|
|
||||||
mitogen.parent._preexec_hook = self._clear
|
|
||||||
self._set_cpu_mask(mask)
|
|
||||||
|
|
||||||
def _balance(self):
|
|
||||||
self.state.lock.acquire()
|
|
||||||
try:
|
|
||||||
n = self.state.counter
|
|
||||||
self.state.counter += 1
|
|
||||||
finally:
|
|
||||||
self.state.lock.release()
|
|
||||||
|
|
||||||
self._set_cpu(self._reserve_shift + (
|
|
||||||
(n % (self.cpu_count - self._reserve_shift))
|
|
||||||
))
|
|
||||||
|
|
||||||
def _set_cpu(self, cpu):
|
|
||||||
self._set_affinity(1 << cpu)
|
|
||||||
|
|
||||||
def _clear(self):
|
|
||||||
all_cpus = (1 << self.cpu_count) - 1
|
|
||||||
self._set_affinity(all_cpus & ~self._reserve_mask)
|
|
||||||
|
|
||||||
def assign_controller(self):
|
|
||||||
if self._reserve_controller:
|
|
||||||
self._set_cpu(1)
|
|
||||||
else:
|
|
||||||
self._balance()
|
|
||||||
|
|
||||||
def assign_muxprocess(self):
|
|
||||||
self._set_cpu(0)
|
|
||||||
|
|
||||||
def assign_worker(self):
|
|
||||||
self._balance()
|
|
||||||
|
|
||||||
def assign_subprocess(self):
|
|
||||||
self._clear()
|
|
||||||
|
|
||||||
|
|
||||||
class LinuxPolicy(FixedPolicy):
|
|
||||||
def _mask_to_bytes(self, mask):
|
|
||||||
"""
|
|
||||||
Convert the (type long) mask to a cpu_set_t.
|
|
||||||
"""
|
|
||||||
chunks = []
|
|
||||||
shiftmask = (2 ** 64) - 1
|
|
||||||
for x in range(16):
|
|
||||||
chunks.append(struct.pack('<Q', mask & shiftmask))
|
|
||||||
mask >>= 64
|
|
||||||
return mitogen.core.b('').join(chunks)
|
|
||||||
|
|
||||||
def _set_cpu_mask(self, mask):
|
|
||||||
s = self._mask_to_bytes(mask)
|
|
||||||
_sched_setaffinity(os.getpid(), len(s), s)
|
|
||||||
|
|
||||||
|
|
||||||
if _sched_setaffinity is not None:
|
|
||||||
policy = LinuxPolicy()
|
|
||||||
else:
|
|
||||||
policy = Policy()
|
|
|
@ -1,318 +0,0 @@
|
||||||
r"""JSON (JavaScript Object Notation) <http://json.org> is a subset of
|
|
||||||
JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data
|
|
||||||
interchange format.
|
|
||||||
|
|
||||||
:mod:`simplejson` exposes an API familiar to users of the standard library
|
|
||||||
:mod:`marshal` and :mod:`pickle` modules. It is the externally maintained
|
|
||||||
version of the :mod:`json` library contained in Python 2.6, but maintains
|
|
||||||
compatibility with Python 2.4 and Python 2.5 and (currently) has
|
|
||||||
significant performance advantages, even without using the optional C
|
|
||||||
extension for speedups.
|
|
||||||
|
|
||||||
Encoding basic Python object hierarchies::
|
|
||||||
|
|
||||||
>>> import simplejson as json
|
|
||||||
>>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
|
|
||||||
'["foo", {"bar": ["baz", null, 1.0, 2]}]'
|
|
||||||
>>> print json.dumps("\"foo\bar")
|
|
||||||
"\"foo\bar"
|
|
||||||
>>> print json.dumps(u'\u1234')
|
|
||||||
"\u1234"
|
|
||||||
>>> print json.dumps('\\')
|
|
||||||
"\\"
|
|
||||||
>>> print json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True)
|
|
||||||
{"a": 0, "b": 0, "c": 0}
|
|
||||||
>>> from StringIO import StringIO
|
|
||||||
>>> io = StringIO()
|
|
||||||
>>> json.dump(['streaming API'], io)
|
|
||||||
>>> io.getvalue()
|
|
||||||
'["streaming API"]'
|
|
||||||
|
|
||||||
Compact encoding::
|
|
||||||
|
|
||||||
>>> import simplejson as json
|
|
||||||
>>> json.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':'))
|
|
||||||
'[1,2,3,{"4":5,"6":7}]'
|
|
||||||
|
|
||||||
Pretty printing::
|
|
||||||
|
|
||||||
>>> import simplejson as json
|
|
||||||
>>> s = json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4)
|
|
||||||
>>> print '\n'.join([l.rstrip() for l in s.splitlines()])
|
|
||||||
{
|
|
||||||
"4": 5,
|
|
||||||
"6": 7
|
|
||||||
}
|
|
||||||
|
|
||||||
Decoding JSON::
|
|
||||||
|
|
||||||
>>> import simplejson as json
|
|
||||||
>>> obj = [u'foo', {u'bar': [u'baz', None, 1.0, 2]}]
|
|
||||||
>>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') == obj
|
|
||||||
True
|
|
||||||
>>> json.loads('"\\"foo\\bar"') == u'"foo\x08ar'
|
|
||||||
True
|
|
||||||
>>> from StringIO import StringIO
|
|
||||||
>>> io = StringIO('["streaming API"]')
|
|
||||||
>>> json.load(io)[0] == 'streaming API'
|
|
||||||
True
|
|
||||||
|
|
||||||
Specializing JSON object decoding::
|
|
||||||
|
|
||||||
>>> import simplejson as json
|
|
||||||
>>> def as_complex(dct):
|
|
||||||
... if '__complex__' in dct:
|
|
||||||
... return complex(dct['real'], dct['imag'])
|
|
||||||
... return dct
|
|
||||||
...
|
|
||||||
>>> json.loads('{"__complex__": true, "real": 1, "imag": 2}',
|
|
||||||
... object_hook=as_complex)
|
|
||||||
(1+2j)
|
|
||||||
>>> import decimal
|
|
||||||
>>> json.loads('1.1', parse_float=decimal.Decimal) == decimal.Decimal('1.1')
|
|
||||||
True
|
|
||||||
|
|
||||||
Specializing JSON object encoding::
|
|
||||||
|
|
||||||
>>> import simplejson as json
|
|
||||||
>>> def encode_complex(obj):
|
|
||||||
... if isinstance(obj, complex):
|
|
||||||
... return [obj.real, obj.imag]
|
|
||||||
... raise TypeError(repr(o) + " is not JSON serializable")
|
|
||||||
...
|
|
||||||
>>> json.dumps(2 + 1j, default=encode_complex)
|
|
||||||
'[2.0, 1.0]'
|
|
||||||
>>> json.JSONEncoder(default=encode_complex).encode(2 + 1j)
|
|
||||||
'[2.0, 1.0]'
|
|
||||||
>>> ''.join(json.JSONEncoder(default=encode_complex).iterencode(2 + 1j))
|
|
||||||
'[2.0, 1.0]'
|
|
||||||
|
|
||||||
|
|
||||||
Using simplejson.tool from the shell to validate and pretty-print::
|
|
||||||
|
|
||||||
$ echo '{"json":"obj"}' | python -m simplejson.tool
|
|
||||||
{
|
|
||||||
"json": "obj"
|
|
||||||
}
|
|
||||||
$ echo '{ 1.2:3.4}' | python -m simplejson.tool
|
|
||||||
Expecting property name: line 1 column 2 (char 2)
|
|
||||||
"""
|
|
||||||
__version__ = '2.0.9'
|
|
||||||
__all__ = [
|
|
||||||
'dump', 'dumps', 'load', 'loads',
|
|
||||||
'JSONDecoder', 'JSONEncoder',
|
|
||||||
]
|
|
||||||
|
|
||||||
__author__ = 'Bob Ippolito <bob@redivi.com>'
|
|
||||||
|
|
||||||
from decoder import JSONDecoder
|
|
||||||
from encoder import JSONEncoder
|
|
||||||
|
|
||||||
_default_encoder = JSONEncoder(
|
|
||||||
skipkeys=False,
|
|
||||||
ensure_ascii=True,
|
|
||||||
check_circular=True,
|
|
||||||
allow_nan=True,
|
|
||||||
indent=None,
|
|
||||||
separators=None,
|
|
||||||
encoding='utf-8',
|
|
||||||
default=None,
|
|
||||||
)
|
|
||||||
|
|
||||||
def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
|
|
||||||
allow_nan=True, cls=None, indent=None, separators=None,
|
|
||||||
encoding='utf-8', default=None, **kw):
|
|
||||||
"""Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
|
|
||||||
``.write()``-supporting file-like object).
|
|
||||||
|
|
||||||
If ``skipkeys`` is true then ``dict`` keys that are not basic types
|
|
||||||
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
|
|
||||||
will be skipped instead of raising a ``TypeError``.
|
|
||||||
|
|
||||||
If ``ensure_ascii`` is false, then the some chunks written to ``fp``
|
|
||||||
may be ``unicode`` instances, subject to normal Python ``str`` to
|
|
||||||
``unicode`` coercion rules. Unless ``fp.write()`` explicitly
|
|
||||||
understands ``unicode`` (as in ``codecs.getwriter()``) this is likely
|
|
||||||
to cause an error.
|
|
||||||
|
|
||||||
If ``check_circular`` is false, then the circular reference check
|
|
||||||
for container types will be skipped and a circular reference will
|
|
||||||
result in an ``OverflowError`` (or worse).
|
|
||||||
|
|
||||||
If ``allow_nan`` is false, then it will be a ``ValueError`` to
|
|
||||||
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``)
|
|
||||||
in strict compliance of the JSON specification, instead of using the
|
|
||||||
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
|
|
||||||
|
|
||||||
If ``indent`` is a non-negative integer, then JSON array elements and object
|
|
||||||
members will be pretty-printed with that indent level. An indent level
|
|
||||||
of 0 will only insert newlines. ``None`` is the most compact representation.
|
|
||||||
|
|
||||||
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
|
|
||||||
then it will be used instead of the default ``(', ', ': ')`` separators.
|
|
||||||
``(',', ':')`` is the most compact JSON representation.
|
|
||||||
|
|
||||||
``encoding`` is the character encoding for str instances, default is UTF-8.
|
|
||||||
|
|
||||||
``default(obj)`` is a function that should return a serializable version
|
|
||||||
of obj or raise TypeError. The default simply raises TypeError.
|
|
||||||
|
|
||||||
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
|
|
||||||
``.default()`` method to serialize additional types), specify it with
|
|
||||||
the ``cls`` kwarg.
|
|
||||||
|
|
||||||
"""
|
|
||||||
# cached encoder
|
|
||||||
if (not skipkeys and ensure_ascii and
|
|
||||||
check_circular and allow_nan and
|
|
||||||
cls is None and indent is None and separators is None and
|
|
||||||
encoding == 'utf-8' and default is None and not kw):
|
|
||||||
iterable = _default_encoder.iterencode(obj)
|
|
||||||
else:
|
|
||||||
if cls is None:
|
|
||||||
cls = JSONEncoder
|
|
||||||
iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
|
|
||||||
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
|
|
||||||
separators=separators, encoding=encoding,
|
|
||||||
default=default, **kw).iterencode(obj)
|
|
||||||
# could accelerate with writelines in some versions of Python, at
|
|
||||||
# a debuggability cost
|
|
||||||
for chunk in iterable:
|
|
||||||
fp.write(chunk)
|
|
||||||
|
|
||||||
|
|
||||||
def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
|
|
||||||
allow_nan=True, cls=None, indent=None, separators=None,
|
|
||||||
encoding='utf-8', default=None, **kw):
|
|
||||||
"""Serialize ``obj`` to a JSON formatted ``str``.
|
|
||||||
|
|
||||||
If ``skipkeys`` is false then ``dict`` keys that are not basic types
|
|
||||||
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
|
|
||||||
will be skipped instead of raising a ``TypeError``.
|
|
||||||
|
|
||||||
If ``ensure_ascii`` is false, then the return value will be a
|
|
||||||
``unicode`` instance subject to normal Python ``str`` to ``unicode``
|
|
||||||
coercion rules instead of being escaped to an ASCII ``str``.
|
|
||||||
|
|
||||||
If ``check_circular`` is false, then the circular reference check
|
|
||||||
for container types will be skipped and a circular reference will
|
|
||||||
result in an ``OverflowError`` (or worse).
|
|
||||||
|
|
||||||
If ``allow_nan`` is false, then it will be a ``ValueError`` to
|
|
||||||
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
|
|
||||||
strict compliance of the JSON specification, instead of using the
|
|
||||||
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
|
|
||||||
|
|
||||||
If ``indent`` is a non-negative integer, then JSON array elements and
|
|
||||||
object members will be pretty-printed with that indent level. An indent
|
|
||||||
level of 0 will only insert newlines. ``None`` is the most compact
|
|
||||||
representation.
|
|
||||||
|
|
||||||
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
|
|
||||||
then it will be used instead of the default ``(', ', ': ')`` separators.
|
|
||||||
``(',', ':')`` is the most compact JSON representation.
|
|
||||||
|
|
||||||
``encoding`` is the character encoding for str instances, default is UTF-8.
|
|
||||||
|
|
||||||
``default(obj)`` is a function that should return a serializable version
|
|
||||||
of obj or raise TypeError. The default simply raises TypeError.
|
|
||||||
|
|
||||||
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
|
|
||||||
``.default()`` method to serialize additional types), specify it with
|
|
||||||
the ``cls`` kwarg.
|
|
||||||
|
|
||||||
"""
|
|
||||||
# cached encoder
|
|
||||||
if (not skipkeys and ensure_ascii and
|
|
||||||
check_circular and allow_nan and
|
|
||||||
cls is None and indent is None and separators is None and
|
|
||||||
encoding == 'utf-8' and default is None and not kw):
|
|
||||||
return _default_encoder.encode(obj)
|
|
||||||
if cls is None:
|
|
||||||
cls = JSONEncoder
|
|
||||||
return cls(
|
|
||||||
skipkeys=skipkeys, ensure_ascii=ensure_ascii,
|
|
||||||
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
|
|
||||||
separators=separators, encoding=encoding, default=default,
|
|
||||||
**kw).encode(obj)
|
|
||||||
|
|
||||||
|
|
||||||
_default_decoder = JSONDecoder(encoding=None, object_hook=None)
|
|
||||||
|
|
||||||
|
|
||||||
def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None,
|
|
||||||
parse_int=None, parse_constant=None, **kw):
|
|
||||||
"""Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
|
|
||||||
a JSON document) to a Python object.
|
|
||||||
|
|
||||||
If the contents of ``fp`` is encoded with an ASCII based encoding other
|
|
||||||
than utf-8 (e.g. latin-1), then an appropriate ``encoding`` name must
|
|
||||||
be specified. Encodings that are not ASCII based (such as UCS-2) are
|
|
||||||
not allowed, and should be wrapped with
|
|
||||||
``codecs.getreader(fp)(encoding)``, or simply decoded to a ``unicode``
|
|
||||||
object and passed to ``loads()``
|
|
||||||
|
|
||||||
``object_hook`` is an optional function that will be called with the
|
|
||||||
result of any object literal decode (a ``dict``). The return value of
|
|
||||||
``object_hook`` will be used instead of the ``dict``. This feature
|
|
||||||
can be used to implement custom decoders (e.g. JSON-RPC class hinting).
|
|
||||||
|
|
||||||
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
|
|
||||||
kwarg.
|
|
||||||
|
|
||||||
"""
|
|
||||||
return loads(fp.read(),
|
|
||||||
encoding=encoding, cls=cls, object_hook=object_hook,
|
|
||||||
parse_float=parse_float, parse_int=parse_int,
|
|
||||||
parse_constant=parse_constant, **kw)
|
|
||||||
|
|
||||||
|
|
||||||
def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
|
|
||||||
parse_int=None, parse_constant=None, **kw):
|
|
||||||
"""Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
|
|
||||||
document) to a Python object.
|
|
||||||
|
|
||||||
If ``s`` is a ``str`` instance and is encoded with an ASCII based encoding
|
|
||||||
other than utf-8 (e.g. latin-1) then an appropriate ``encoding`` name
|
|
||||||
must be specified. Encodings that are not ASCII based (such as UCS-2)
|
|
||||||
are not allowed and should be decoded to ``unicode`` first.
|
|
||||||
|
|
||||||
``object_hook`` is an optional function that will be called with the
|
|
||||||
result of any object literal decode (a ``dict``). The return value of
|
|
||||||
``object_hook`` will be used instead of the ``dict``. This feature
|
|
||||||
can be used to implement custom decoders (e.g. JSON-RPC class hinting).
|
|
||||||
|
|
||||||
``parse_float``, if specified, will be called with the string
|
|
||||||
of every JSON float to be decoded. By default this is equivalent to
|
|
||||||
float(num_str). This can be used to use another datatype or parser
|
|
||||||
for JSON floats (e.g. decimal.Decimal).
|
|
||||||
|
|
||||||
``parse_int``, if specified, will be called with the string
|
|
||||||
of every JSON int to be decoded. By default this is equivalent to
|
|
||||||
int(num_str). This can be used to use another datatype or parser
|
|
||||||
for JSON integers (e.g. float).
|
|
||||||
|
|
||||||
``parse_constant``, if specified, will be called with one of the
|
|
||||||
following strings: -Infinity, Infinity, NaN, null, true, false.
|
|
||||||
This can be used to raise an exception if invalid JSON numbers
|
|
||||||
are encountered.
|
|
||||||
|
|
||||||
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
|
|
||||||
kwarg.
|
|
||||||
|
|
||||||
"""
|
|
||||||
if (cls is None and encoding is None and object_hook is None and
|
|
||||||
parse_int is None and parse_float is None and
|
|
||||||
parse_constant is None and not kw):
|
|
||||||
return _default_decoder.decode(s)
|
|
||||||
if cls is None:
|
|
||||||
cls = JSONDecoder
|
|
||||||
if object_hook is not None:
|
|
||||||
kw['object_hook'] = object_hook
|
|
||||||
if parse_float is not None:
|
|
||||||
kw['parse_float'] = parse_float
|
|
||||||
if parse_int is not None:
|
|
||||||
kw['parse_int'] = parse_int
|
|
||||||
if parse_constant is not None:
|
|
||||||
kw['parse_constant'] = parse_constant
|
|
||||||
return cls(encoding=encoding, **kw).decode(s)
|
|
|
@ -1,354 +0,0 @@
|
||||||
"""Implementation of JSONDecoder
|
|
||||||
"""
|
|
||||||
import re
|
|
||||||
import sys
|
|
||||||
import struct
|
|
||||||
|
|
||||||
from simplejson.scanner import make_scanner
|
|
||||||
try:
|
|
||||||
from simplejson._speedups import scanstring as c_scanstring
|
|
||||||
except ImportError:
|
|
||||||
c_scanstring = None
|
|
||||||
|
|
||||||
__all__ = ['JSONDecoder']
|
|
||||||
|
|
||||||
FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
|
|
||||||
|
|
||||||
def _floatconstants():
|
|
||||||
_BYTES = '7FF80000000000007FF0000000000000'.decode('hex')
|
|
||||||
if sys.byteorder != 'big':
|
|
||||||
_BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1]
|
|
||||||
nan, inf = struct.unpack('dd', _BYTES)
|
|
||||||
return nan, inf, -inf
|
|
||||||
|
|
||||||
NaN, PosInf, NegInf = _floatconstants()
|
|
||||||
|
|
||||||
|
|
||||||
def linecol(doc, pos):
|
|
||||||
lineno = doc.count('\n', 0, pos) + 1
|
|
||||||
if lineno == 1:
|
|
||||||
colno = pos
|
|
||||||
else:
|
|
||||||
colno = pos - doc.rindex('\n', 0, pos)
|
|
||||||
return lineno, colno
|
|
||||||
|
|
||||||
|
|
||||||
def errmsg(msg, doc, pos, end=None):
|
|
||||||
# Note that this function is called from _speedups
|
|
||||||
lineno, colno = linecol(doc, pos)
|
|
||||||
if end is None:
|
|
||||||
#fmt = '{0}: line {1} column {2} (char {3})'
|
|
||||||
#return fmt.format(msg, lineno, colno, pos)
|
|
||||||
fmt = '%s: line %d column %d (char %d)'
|
|
||||||
return fmt % (msg, lineno, colno, pos)
|
|
||||||
endlineno, endcolno = linecol(doc, end)
|
|
||||||
#fmt = '{0}: line {1} column {2} - line {3} column {4} (char {5} - {6})'
|
|
||||||
#return fmt.format(msg, lineno, colno, endlineno, endcolno, pos, end)
|
|
||||||
fmt = '%s: line %d column %d - line %d column %d (char %d - %d)'
|
|
||||||
return fmt % (msg, lineno, colno, endlineno, endcolno, pos, end)
|
|
||||||
|
|
||||||
|
|
||||||
_CONSTANTS = {
|
|
||||||
'-Infinity': NegInf,
|
|
||||||
'Infinity': PosInf,
|
|
||||||
'NaN': NaN,
|
|
||||||
}
|
|
||||||
|
|
||||||
STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS)
|
|
||||||
BACKSLASH = {
|
|
||||||
'"': u'"', '\\': u'\\', '/': u'/',
|
|
||||||
'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t',
|
|
||||||
}
|
|
||||||
|
|
||||||
DEFAULT_ENCODING = "utf-8"
|
|
||||||
|
|
||||||
def py_scanstring(s, end, encoding=None, strict=True, _b=BACKSLASH, _m=STRINGCHUNK.match):
|
|
||||||
"""Scan the string s for a JSON string. End is the index of the
|
|
||||||
character in s after the quote that started the JSON string.
|
|
||||||
Unescapes all valid JSON string escape sequences and raises ValueError
|
|
||||||
on attempt to decode an invalid string. If strict is False then literal
|
|
||||||
control characters are allowed in the string.
|
|
||||||
|
|
||||||
Returns a tuple of the decoded string and the index of the character in s
|
|
||||||
after the end quote."""
|
|
||||||
if encoding is None:
|
|
||||||
encoding = DEFAULT_ENCODING
|
|
||||||
chunks = []
|
|
||||||
_append = chunks.append
|
|
||||||
begin = end - 1
|
|
||||||
while 1:
|
|
||||||
chunk = _m(s, end)
|
|
||||||
if chunk is None:
|
|
||||||
raise ValueError(
|
|
||||||
errmsg("Unterminated string starting at", s, begin))
|
|
||||||
end = chunk.end()
|
|
||||||
content, terminator = chunk.groups()
|
|
||||||
# Content is contains zero or more unescaped string characters
|
|
||||||
if content:
|
|
||||||
if not isinstance(content, unicode):
|
|
||||||
content = unicode(content, encoding)
|
|
||||||
_append(content)
|
|
||||||
# Terminator is the end of string, a literal control character,
|
|
||||||
# or a backslash denoting that an escape sequence follows
|
|
||||||
if terminator == '"':
|
|
||||||
break
|
|
||||||
elif terminator != '\\':
|
|
||||||
if strict:
|
|
||||||
msg = "Invalid control character %r at" % (terminator,)
|
|
||||||
#msg = "Invalid control character {0!r} at".format(terminator)
|
|
||||||
raise ValueError(errmsg(msg, s, end))
|
|
||||||
else:
|
|
||||||
_append(terminator)
|
|
||||||
continue
|
|
||||||
try:
|
|
||||||
esc = s[end]
|
|
||||||
except IndexError:
|
|
||||||
raise ValueError(
|
|
||||||
errmsg("Unterminated string starting at", s, begin))
|
|
||||||
# If not a unicode escape sequence, must be in the lookup table
|
|
||||||
if esc != 'u':
|
|
||||||
try:
|
|
||||||
char = _b[esc]
|
|
||||||
except KeyError:
|
|
||||||
msg = "Invalid \\escape: " + repr(esc)
|
|
||||||
raise ValueError(errmsg(msg, s, end))
|
|
||||||
end += 1
|
|
||||||
else:
|
|
||||||
# Unicode escape sequence
|
|
||||||
esc = s[end + 1:end + 5]
|
|
||||||
next_end = end + 5
|
|
||||||
if len(esc) != 4:
|
|
||||||
msg = "Invalid \\uXXXX escape"
|
|
||||||
raise ValueError(errmsg(msg, s, end))
|
|
||||||
uni = int(esc, 16)
|
|
||||||
# Check for surrogate pair on UCS-4 systems
|
|
||||||
if 0xd800 <= uni <= 0xdbff and sys.maxunicode > 65535:
|
|
||||||
msg = "Invalid \\uXXXX\\uXXXX surrogate pair"
|
|
||||||
if not s[end + 5:end + 7] == '\\u':
|
|
||||||
raise ValueError(errmsg(msg, s, end))
|
|
||||||
esc2 = s[end + 7:end + 11]
|
|
||||||
if len(esc2) != 4:
|
|
||||||
raise ValueError(errmsg(msg, s, end))
|
|
||||||
uni2 = int(esc2, 16)
|
|
||||||
uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00))
|
|
||||||
next_end += 6
|
|
||||||
char = unichr(uni)
|
|
||||||
end = next_end
|
|
||||||
# Append the unescaped character
|
|
||||||
_append(char)
|
|
||||||
return u''.join(chunks), end
|
|
||||||
|
|
||||||
|
|
||||||
# Use speedup if available
|
|
||||||
scanstring = c_scanstring or py_scanstring
|
|
||||||
|
|
||||||
WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS)
|
|
||||||
WHITESPACE_STR = ' \t\n\r'
|
|
||||||
|
|
||||||
def JSONObject((s, end), encoding, strict, scan_once, object_hook, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
|
|
||||||
pairs = {}
|
|
||||||
# Use a slice to prevent IndexError from being raised, the following
|
|
||||||
# check will raise a more specific ValueError if the string is empty
|
|
||||||
nextchar = s[end:end + 1]
|
|
||||||
# Normally we expect nextchar == '"'
|
|
||||||
if nextchar != '"':
|
|
||||||
if nextchar in _ws:
|
|
||||||
end = _w(s, end).end()
|
|
||||||
nextchar = s[end:end + 1]
|
|
||||||
# Trivial empty object
|
|
||||||
if nextchar == '}':
|
|
||||||
return pairs, end + 1
|
|
||||||
elif nextchar != '"':
|
|
||||||
raise ValueError(errmsg("Expecting property name", s, end))
|
|
||||||
end += 1
|
|
||||||
while True:
|
|
||||||
key, end = scanstring(s, end, encoding, strict)
|
|
||||||
|
|
||||||
# To skip some function call overhead we optimize the fast paths where
|
|
||||||
# the JSON key separator is ": " or just ":".
|
|
||||||
if s[end:end + 1] != ':':
|
|
||||||
end = _w(s, end).end()
|
|
||||||
if s[end:end + 1] != ':':
|
|
||||||
raise ValueError(errmsg("Expecting : delimiter", s, end))
|
|
||||||
|
|
||||||
end += 1
|
|
||||||
|
|
||||||
try:
|
|
||||||
if s[end] in _ws:
|
|
||||||
end += 1
|
|
||||||
if s[end] in _ws:
|
|
||||||
end = _w(s, end + 1).end()
|
|
||||||
except IndexError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
|
||||||
value, end = scan_once(s, end)
|
|
||||||
except StopIteration:
|
|
||||||
raise ValueError(errmsg("Expecting object", s, end))
|
|
||||||
pairs[key] = value
|
|
||||||
|
|
||||||
try:
|
|
||||||
nextchar = s[end]
|
|
||||||
if nextchar in _ws:
|
|
||||||
end = _w(s, end + 1).end()
|
|
||||||
nextchar = s[end]
|
|
||||||
except IndexError:
|
|
||||||
nextchar = ''
|
|
||||||
end += 1
|
|
||||||
|
|
||||||
if nextchar == '}':
|
|
||||||
break
|
|
||||||
elif nextchar != ',':
|
|
||||||
raise ValueError(errmsg("Expecting , delimiter", s, end - 1))
|
|
||||||
|
|
||||||
try:
|
|
||||||
nextchar = s[end]
|
|
||||||
if nextchar in _ws:
|
|
||||||
end += 1
|
|
||||||
nextchar = s[end]
|
|
||||||
if nextchar in _ws:
|
|
||||||
end = _w(s, end + 1).end()
|
|
||||||
nextchar = s[end]
|
|
||||||
except IndexError:
|
|
||||||
nextchar = ''
|
|
||||||
|
|
||||||
end += 1
|
|
||||||
if nextchar != '"':
|
|
||||||
raise ValueError(errmsg("Expecting property name", s, end - 1))
|
|
||||||
|
|
||||||
if object_hook is not None:
|
|
||||||
pairs = object_hook(pairs)
|
|
||||||
return pairs, end
|
|
||||||
|
|
||||||
def JSONArray((s, end), scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
|
|
||||||
values = []
|
|
||||||
nextchar = s[end:end + 1]
|
|
||||||
if nextchar in _ws:
|
|
||||||
end = _w(s, end + 1).end()
|
|
||||||
nextchar = s[end:end + 1]
|
|
||||||
# Look-ahead for trivial empty array
|
|
||||||
if nextchar == ']':
|
|
||||||
return values, end + 1
|
|
||||||
_append = values.append
|
|
||||||
while True:
|
|
||||||
try:
|
|
||||||
value, end = scan_once(s, end)
|
|
||||||
except StopIteration:
|
|
||||||
raise ValueError(errmsg("Expecting object", s, end))
|
|
||||||
_append(value)
|
|
||||||
nextchar = s[end:end + 1]
|
|
||||||
if nextchar in _ws:
|
|
||||||
end = _w(s, end + 1).end()
|
|
||||||
nextchar = s[end:end + 1]
|
|
||||||
end += 1
|
|
||||||
if nextchar == ']':
|
|
||||||
break
|
|
||||||
elif nextchar != ',':
|
|
||||||
raise ValueError(errmsg("Expecting , delimiter", s, end))
|
|
||||||
|
|
||||||
try:
|
|
||||||
if s[end] in _ws:
|
|
||||||
end += 1
|
|
||||||
if s[end] in _ws:
|
|
||||||
end = _w(s, end + 1).end()
|
|
||||||
except IndexError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
return values, end
|
|
||||||
|
|
||||||
class JSONDecoder(object):
|
|
||||||
"""Simple JSON <http://json.org> decoder
|
|
||||||
|
|
||||||
Performs the following translations in decoding by default:
|
|
||||||
|
|
||||||
+---------------+-------------------+
|
|
||||||
| JSON | Python |
|
|
||||||
+===============+===================+
|
|
||||||
| object | dict |
|
|
||||||
+---------------+-------------------+
|
|
||||||
| array | list |
|
|
||||||
+---------------+-------------------+
|
|
||||||
| string | unicode |
|
|
||||||
+---------------+-------------------+
|
|
||||||
| number (int) | int, long |
|
|
||||||
+---------------+-------------------+
|
|
||||||
| number (real) | float |
|
|
||||||
+---------------+-------------------+
|
|
||||||
| true | True |
|
|
||||||
+---------------+-------------------+
|
|
||||||
| false | False |
|
|
||||||
+---------------+-------------------+
|
|
||||||
| null | None |
|
|
||||||
+---------------+-------------------+
|
|
||||||
|
|
||||||
It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as
|
|
||||||
their corresponding ``float`` values, which is outside the JSON spec.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, encoding=None, object_hook=None, parse_float=None,
|
|
||||||
parse_int=None, parse_constant=None, strict=True):
|
|
||||||
"""``encoding`` determines the encoding used to interpret any ``str``
|
|
||||||
objects decoded by this instance (utf-8 by default). It has no
|
|
||||||
effect when decoding ``unicode`` objects.
|
|
||||||
|
|
||||||
Note that currently only encodings that are a superset of ASCII work,
|
|
||||||
strings of other encodings should be passed in as ``unicode``.
|
|
||||||
|
|
||||||
``object_hook``, if specified, will be called with the result
|
|
||||||
of every JSON object decoded and its return value will be used in
|
|
||||||
place of the given ``dict``. This can be used to provide custom
|
|
||||||
deserializations (e.g. to support JSON-RPC class hinting).
|
|
||||||
|
|
||||||
``parse_float``, if specified, will be called with the string
|
|
||||||
of every JSON float to be decoded. By default this is equivalent to
|
|
||||||
float(num_str). This can be used to use another datatype or parser
|
|
||||||
for JSON floats (e.g. decimal.Decimal).
|
|
||||||
|
|
||||||
``parse_int``, if specified, will be called with the string
|
|
||||||
of every JSON int to be decoded. By default this is equivalent to
|
|
||||||
int(num_str). This can be used to use another datatype or parser
|
|
||||||
for JSON integers (e.g. float).
|
|
||||||
|
|
||||||
``parse_constant``, if specified, will be called with one of the
|
|
||||||
following strings: -Infinity, Infinity, NaN.
|
|
||||||
This can be used to raise an exception if invalid JSON numbers
|
|
||||||
are encountered.
|
|
||||||
|
|
||||||
"""
|
|
||||||
self.encoding = encoding
|
|
||||||
self.object_hook = object_hook
|
|
||||||
self.parse_float = parse_float or float
|
|
||||||
self.parse_int = parse_int or int
|
|
||||||
self.parse_constant = parse_constant or _CONSTANTS.__getitem__
|
|
||||||
self.strict = strict
|
|
||||||
self.parse_object = JSONObject
|
|
||||||
self.parse_array = JSONArray
|
|
||||||
self.parse_string = scanstring
|
|
||||||
self.scan_once = make_scanner(self)
|
|
||||||
|
|
||||||
def decode(self, s, _w=WHITESPACE.match):
|
|
||||||
"""Return the Python representation of ``s`` (a ``str`` or ``unicode``
|
|
||||||
instance containing a JSON document)
|
|
||||||
|
|
||||||
"""
|
|
||||||
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
|
|
||||||
end = _w(s, end).end()
|
|
||||||
if end != len(s):
|
|
||||||
raise ValueError(errmsg("Extra data", s, end, len(s)))
|
|
||||||
return obj
|
|
||||||
|
|
||||||
def raw_decode(self, s, idx=0):
|
|
||||||
"""Decode a JSON document from ``s`` (a ``str`` or ``unicode`` beginning
|
|
||||||
with a JSON document) and return a 2-tuple of the Python
|
|
||||||
representation and the index in ``s`` where the document ended.
|
|
||||||
|
|
||||||
This can be used to decode a JSON document from a string that may
|
|
||||||
have extraneous data at the end.
|
|
||||||
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
obj, end = self.scan_once(s, idx)
|
|
||||||
except StopIteration:
|
|
||||||
raise ValueError("No JSON object could be decoded")
|
|
||||||
return obj, end
|
|
|
@ -1,440 +0,0 @@
|
||||||
"""Implementation of JSONEncoder
|
|
||||||
"""
|
|
||||||
import re
|
|
||||||
|
|
||||||
try:
|
|
||||||
from simplejson._speedups import encode_basestring_ascii as c_encode_basestring_ascii
|
|
||||||
except ImportError:
|
|
||||||
c_encode_basestring_ascii = None
|
|
||||||
try:
|
|
||||||
from simplejson._speedups import make_encoder as c_make_encoder
|
|
||||||
except ImportError:
|
|
||||||
c_make_encoder = None
|
|
||||||
|
|
||||||
ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]')
|
|
||||||
ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
|
|
||||||
HAS_UTF8 = re.compile(r'[\x80-\xff]')
|
|
||||||
ESCAPE_DCT = {
|
|
||||||
'\\': '\\\\',
|
|
||||||
'"': '\\"',
|
|
||||||
'\b': '\\b',
|
|
||||||
'\f': '\\f',
|
|
||||||
'\n': '\\n',
|
|
||||||
'\r': '\\r',
|
|
||||||
'\t': '\\t',
|
|
||||||
}
|
|
||||||
for i in range(0x20):
|
|
||||||
#ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
|
|
||||||
ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
|
|
||||||
|
|
||||||
# Assume this produces an infinity on all machines (probably not guaranteed)
|
|
||||||
INFINITY = float('1e66666')
|
|
||||||
FLOAT_REPR = repr
|
|
||||||
|
|
||||||
def encode_basestring(s):
|
|
||||||
"""Return a JSON representation of a Python string
|
|
||||||
|
|
||||||
"""
|
|
||||||
def replace(match):
|
|
||||||
return ESCAPE_DCT[match.group(0)]
|
|
||||||
return '"' + ESCAPE.sub(replace, s) + '"'
|
|
||||||
|
|
||||||
|
|
||||||
def py_encode_basestring_ascii(s):
|
|
||||||
"""Return an ASCII-only JSON representation of a Python string
|
|
||||||
|
|
||||||
"""
|
|
||||||
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
|
|
||||||
s = s.decode('utf-8')
|
|
||||||
def replace(match):
|
|
||||||
s = match.group(0)
|
|
||||||
try:
|
|
||||||
return ESCAPE_DCT[s]
|
|
||||||
except KeyError:
|
|
||||||
n = ord(s)
|
|
||||||
if n < 0x10000:
|
|
||||||
#return '\\u{0:04x}'.format(n)
|
|
||||||
return '\\u%04x' % (n,)
|
|
||||||
else:
|
|
||||||
# surrogate pair
|
|
||||||
n -= 0x10000
|
|
||||||
s1 = 0xd800 | ((n >> 10) & 0x3ff)
|
|
||||||
s2 = 0xdc00 | (n & 0x3ff)
|
|
||||||
#return '\\u{0:04x}\\u{1:04x}'.format(s1, s2)
|
|
||||||
return '\\u%04x\\u%04x' % (s1, s2)
|
|
||||||
return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
|
|
||||||
|
|
||||||
|
|
||||||
encode_basestring_ascii = c_encode_basestring_ascii or py_encode_basestring_ascii
|
|
||||||
|
|
||||||
class JSONEncoder(object):
|
|
||||||
"""Extensible JSON <http://json.org> encoder for Python data structures.
|
|
||||||
|
|
||||||
Supports the following objects and types by default:
|
|
||||||
|
|
||||||
+-------------------+---------------+
|
|
||||||
| Python | JSON |
|
|
||||||
+===================+===============+
|
|
||||||
| dict | object |
|
|
||||||
+-------------------+---------------+
|
|
||||||
| list, tuple | array |
|
|
||||||
+-------------------+---------------+
|
|
||||||
| str, unicode | string |
|
|
||||||
+-------------------+---------------+
|
|
||||||
| int, long, float | number |
|
|
||||||
+-------------------+---------------+
|
|
||||||
| True | true |
|
|
||||||
+-------------------+---------------+
|
|
||||||
| False | false |
|
|
||||||
+-------------------+---------------+
|
|
||||||
| None | null |
|
|
||||||
+-------------------+---------------+
|
|
||||||
|
|
||||||
To extend this to recognize other objects, subclass and implement a
|
|
||||||
``.default()`` method with another method that returns a serializable
|
|
||||||
object for ``o`` if possible, otherwise it should call the superclass
|
|
||||||
implementation (to raise ``TypeError``).
|
|
||||||
|
|
||||||
"""
|
|
||||||
item_separator = ', '
|
|
||||||
key_separator = ': '
|
|
||||||
def __init__(self, skipkeys=False, ensure_ascii=True,
|
|
||||||
check_circular=True, allow_nan=True, sort_keys=False,
|
|
||||||
indent=None, separators=None, encoding='utf-8', default=None):
|
|
||||||
"""Constructor for JSONEncoder, with sensible defaults.
|
|
||||||
|
|
||||||
If skipkeys is false, then it is a TypeError to attempt
|
|
||||||
encoding of keys that are not str, int, long, float or None. If
|
|
||||||
skipkeys is True, such items are simply skipped.
|
|
||||||
|
|
||||||
If ensure_ascii is true, the output is guaranteed to be str
|
|
||||||
objects with all incoming unicode characters escaped. If
|
|
||||||
ensure_ascii is false, the output will be unicode object.
|
|
||||||
|
|
||||||
If check_circular is true, then lists, dicts, and custom encoded
|
|
||||||
objects will be checked for circular references during encoding to
|
|
||||||
prevent an infinite recursion (which would cause an OverflowError).
|
|
||||||
Otherwise, no such check takes place.
|
|
||||||
|
|
||||||
If allow_nan is true, then NaN, Infinity, and -Infinity will be
|
|
||||||
encoded as such. This behavior is not JSON specification compliant,
|
|
||||||
but is consistent with most JavaScript based encoders and decoders.
|
|
||||||
Otherwise, it will be a ValueError to encode such floats.
|
|
||||||
|
|
||||||
If sort_keys is true, then the output of dictionaries will be
|
|
||||||
sorted by key; this is useful for regression tests to ensure
|
|
||||||
that JSON serializations can be compared on a day-to-day basis.
|
|
||||||
|
|
||||||
If indent is a non-negative integer, then JSON array
|
|
||||||
elements and object members will be pretty-printed with that
|
|
||||||
indent level. An indent level of 0 will only insert newlines.
|
|
||||||
None is the most compact representation.
|
|
||||||
|
|
||||||
If specified, separators should be a (item_separator, key_separator)
|
|
||||||
tuple. The default is (', ', ': '). To get the most compact JSON
|
|
||||||
representation you should specify (',', ':') to eliminate whitespace.
|
|
||||||
|
|
||||||
If specified, default is a function that gets called for objects
|
|
||||||
that can't otherwise be serialized. It should return a JSON encodable
|
|
||||||
version of the object or raise a ``TypeError``.
|
|
||||||
|
|
||||||
If encoding is not None, then all input strings will be
|
|
||||||
transformed into unicode using that encoding prior to JSON-encoding.
|
|
||||||
The default is UTF-8.
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
self.skipkeys = skipkeys
|
|
||||||
self.ensure_ascii = ensure_ascii
|
|
||||||
self.check_circular = check_circular
|
|
||||||
self.allow_nan = allow_nan
|
|
||||||
self.sort_keys = sort_keys
|
|
||||||
self.indent = indent
|
|
||||||
if separators is not None:
|
|
||||||
self.item_separator, self.key_separator = separators
|
|
||||||
if default is not None:
|
|
||||||
self.default = default
|
|
||||||
self.encoding = encoding
|
|
||||||
|
|
||||||
def default(self, o):
|
|
||||||
"""Implement this method in a subclass such that it returns
|
|
||||||
a serializable object for ``o``, or calls the base implementation
|
|
||||||
(to raise a ``TypeError``).
|
|
||||||
|
|
||||||
For example, to support arbitrary iterators, you could
|
|
||||||
implement default like this::
|
|
||||||
|
|
||||||
def default(self, o):
|
|
||||||
try:
|
|
||||||
iterable = iter(o)
|
|
||||||
except TypeError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
return list(iterable)
|
|
||||||
return JSONEncoder.default(self, o)
|
|
||||||
|
|
||||||
"""
|
|
||||||
raise TypeError(repr(o) + " is not JSON serializable")
|
|
||||||
|
|
||||||
def encode(self, o):
|
|
||||||
"""Return a JSON string representation of a Python data structure.
|
|
||||||
|
|
||||||
>>> JSONEncoder().encode({"foo": ["bar", "baz"]})
|
|
||||||
'{"foo": ["bar", "baz"]}'
|
|
||||||
|
|
||||||
"""
|
|
||||||
# This is for extremely simple cases and benchmarks.
|
|
||||||
if isinstance(o, basestring):
|
|
||||||
if isinstance(o, str):
|
|
||||||
_encoding = self.encoding
|
|
||||||
if (_encoding is not None
|
|
||||||
and not (_encoding == 'utf-8')):
|
|
||||||
o = o.decode(_encoding)
|
|
||||||
if self.ensure_ascii:
|
|
||||||
return encode_basestring_ascii(o)
|
|
||||||
else:
|
|
||||||
return encode_basestring(o)
|
|
||||||
# This doesn't pass the iterator directly to ''.join() because the
|
|
||||||
# exceptions aren't as detailed. The list call should be roughly
|
|
||||||
# equivalent to the PySequence_Fast that ''.join() would do.
|
|
||||||
chunks = self.iterencode(o, _one_shot=True)
|
|
||||||
if not isinstance(chunks, (list, tuple)):
|
|
||||||
chunks = list(chunks)
|
|
||||||
return ''.join(chunks)
|
|
||||||
|
|
||||||
def iterencode(self, o, _one_shot=False):
|
|
||||||
"""Encode the given object and yield each string
|
|
||||||
representation as available.
|
|
||||||
|
|
||||||
For example::
|
|
||||||
|
|
||||||
for chunk in JSONEncoder().iterencode(bigobject):
|
|
||||||
mysocket.write(chunk)
|
|
||||||
|
|
||||||
"""
|
|
||||||
if self.check_circular:
|
|
||||||
markers = {}
|
|
||||||
else:
|
|
||||||
markers = None
|
|
||||||
if self.ensure_ascii:
|
|
||||||
_encoder = encode_basestring_ascii
|
|
||||||
else:
|
|
||||||
_encoder = encode_basestring
|
|
||||||
if self.encoding != 'utf-8':
|
|
||||||
def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding):
|
|
||||||
if isinstance(o, str):
|
|
||||||
o = o.decode(_encoding)
|
|
||||||
return _orig_encoder(o)
|
|
||||||
|
|
||||||
def floatstr(o, allow_nan=self.allow_nan, _repr=FLOAT_REPR, _inf=INFINITY, _neginf=-INFINITY):
|
|
||||||
# Check for specials. Note that this type of test is processor- and/or
|
|
||||||
# platform-specific, so do tests which don't depend on the internals.
|
|
||||||
|
|
||||||
if o != o:
|
|
||||||
text = 'NaN'
|
|
||||||
elif o == _inf:
|
|
||||||
text = 'Infinity'
|
|
||||||
elif o == _neginf:
|
|
||||||
text = '-Infinity'
|
|
||||||
else:
|
|
||||||
return _repr(o)
|
|
||||||
|
|
||||||
if not allow_nan:
|
|
||||||
raise ValueError(
|
|
||||||
"Out of range float values are not JSON compliant: " +
|
|
||||||
repr(o))
|
|
||||||
|
|
||||||
return text
|
|
||||||
|
|
||||||
|
|
||||||
if _one_shot and c_make_encoder is not None and not self.indent and not self.sort_keys:
|
|
||||||
_iterencode = c_make_encoder(
|
|
||||||
markers, self.default, _encoder, self.indent,
|
|
||||||
self.key_separator, self.item_separator, self.sort_keys,
|
|
||||||
self.skipkeys, self.allow_nan)
|
|
||||||
else:
|
|
||||||
_iterencode = _make_iterencode(
|
|
||||||
markers, self.default, _encoder, self.indent, floatstr,
|
|
||||||
self.key_separator, self.item_separator, self.sort_keys,
|
|
||||||
self.skipkeys, _one_shot)
|
|
||||||
return _iterencode(o, 0)
|
|
||||||
|
|
||||||
def _make_iterencode(markers, _default, _encoder, _indent, _floatstr, _key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
|
|
||||||
## HACK: hand-optimized bytecode; turn globals into locals
|
|
||||||
False=False,
|
|
||||||
True=True,
|
|
||||||
ValueError=ValueError,
|
|
||||||
basestring=basestring,
|
|
||||||
dict=dict,
|
|
||||||
float=float,
|
|
||||||
id=id,
|
|
||||||
int=int,
|
|
||||||
isinstance=isinstance,
|
|
||||||
list=list,
|
|
||||||
long=long,
|
|
||||||
str=str,
|
|
||||||
tuple=tuple,
|
|
||||||
):
|
|
||||||
|
|
||||||
def _iterencode_list(lst, _current_indent_level):
|
|
||||||
if not lst:
|
|
||||||
yield '[]'
|
|
||||||
return
|
|
||||||
if markers is not None:
|
|
||||||
markerid = id(lst)
|
|
||||||
if markerid in markers:
|
|
||||||
raise ValueError("Circular reference detected")
|
|
||||||
markers[markerid] = lst
|
|
||||||
buf = '['
|
|
||||||
if _indent is not None:
|
|
||||||
_current_indent_level += 1
|
|
||||||
newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
|
|
||||||
separator = _item_separator + newline_indent
|
|
||||||
buf += newline_indent
|
|
||||||
else:
|
|
||||||
newline_indent = None
|
|
||||||
separator = _item_separator
|
|
||||||
first = True
|
|
||||||
for value in lst:
|
|
||||||
if first:
|
|
||||||
first = False
|
|
||||||
else:
|
|
||||||
buf = separator
|
|
||||||
if isinstance(value, basestring):
|
|
||||||
yield buf + _encoder(value)
|
|
||||||
elif value is None:
|
|
||||||
yield buf + 'null'
|
|
||||||
elif value is True:
|
|
||||||
yield buf + 'true'
|
|
||||||
elif value is False:
|
|
||||||
yield buf + 'false'
|
|
||||||
elif isinstance(value, (int, long)):
|
|
||||||
yield buf + str(value)
|
|
||||||
elif isinstance(value, float):
|
|
||||||
yield buf + _floatstr(value)
|
|
||||||
else:
|
|
||||||
yield buf
|
|
||||||
if isinstance(value, (list, tuple)):
|
|
||||||
chunks = _iterencode_list(value, _current_indent_level)
|
|
||||||
elif isinstance(value, dict):
|
|
||||||
chunks = _iterencode_dict(value, _current_indent_level)
|
|
||||||
else:
|
|
||||||
chunks = _iterencode(value, _current_indent_level)
|
|
||||||
for chunk in chunks:
|
|
||||||
yield chunk
|
|
||||||
if newline_indent is not None:
|
|
||||||
_current_indent_level -= 1
|
|
||||||
yield '\n' + (' ' * (_indent * _current_indent_level))
|
|
||||||
yield ']'
|
|
||||||
if markers is not None:
|
|
||||||
del markers[markerid]
|
|
||||||
|
|
||||||
def _iterencode_dict(dct, _current_indent_level):
|
|
||||||
if not dct:
|
|
||||||
yield '{}'
|
|
||||||
return
|
|
||||||
if markers is not None:
|
|
||||||
markerid = id(dct)
|
|
||||||
if markerid in markers:
|
|
||||||
raise ValueError("Circular reference detected")
|
|
||||||
markers[markerid] = dct
|
|
||||||
yield '{'
|
|
||||||
if _indent is not None:
|
|
||||||
_current_indent_level += 1
|
|
||||||
newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
|
|
||||||
item_separator = _item_separator + newline_indent
|
|
||||||
yield newline_indent
|
|
||||||
else:
|
|
||||||
newline_indent = None
|
|
||||||
item_separator = _item_separator
|
|
||||||
first = True
|
|
||||||
if _sort_keys:
|
|
||||||
items = dct.items()
|
|
||||||
items.sort(key=lambda kv: kv[0])
|
|
||||||
else:
|
|
||||||
items = dct.iteritems()
|
|
||||||
for key, value in items:
|
|
||||||
if isinstance(key, basestring):
|
|
||||||
pass
|
|
||||||
# JavaScript is weakly typed for these, so it makes sense to
|
|
||||||
# also allow them. Many encoders seem to do something like this.
|
|
||||||
elif isinstance(key, float):
|
|
||||||
key = _floatstr(key)
|
|
||||||
elif key is True:
|
|
||||||
key = 'true'
|
|
||||||
elif key is False:
|
|
||||||
key = 'false'
|
|
||||||
elif key is None:
|
|
||||||
key = 'null'
|
|
||||||
elif isinstance(key, (int, long)):
|
|
||||||
key = str(key)
|
|
||||||
elif _skipkeys:
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
raise TypeError("key " + repr(key) + " is not a string")
|
|
||||||
if first:
|
|
||||||
first = False
|
|
||||||
else:
|
|
||||||
yield item_separator
|
|
||||||
yield _encoder(key)
|
|
||||||
yield _key_separator
|
|
||||||
if isinstance(value, basestring):
|
|
||||||
yield _encoder(value)
|
|
||||||
elif value is None:
|
|
||||||
yield 'null'
|
|
||||||
elif value is True:
|
|
||||||
yield 'true'
|
|
||||||
elif value is False:
|
|
||||||
yield 'false'
|
|
||||||
elif isinstance(value, (int, long)):
|
|
||||||
yield str(value)
|
|
||||||
elif isinstance(value, float):
|
|
||||||
yield _floatstr(value)
|
|
||||||
else:
|
|
||||||
if isinstance(value, (list, tuple)):
|
|
||||||
chunks = _iterencode_list(value, _current_indent_level)
|
|
||||||
elif isinstance(value, dict):
|
|
||||||
chunks = _iterencode_dict(value, _current_indent_level)
|
|
||||||
else:
|
|
||||||
chunks = _iterencode(value, _current_indent_level)
|
|
||||||
for chunk in chunks:
|
|
||||||
yield chunk
|
|
||||||
if newline_indent is not None:
|
|
||||||
_current_indent_level -= 1
|
|
||||||
yield '\n' + (' ' * (_indent * _current_indent_level))
|
|
||||||
yield '}'
|
|
||||||
if markers is not None:
|
|
||||||
del markers[markerid]
|
|
||||||
|
|
||||||
def _iterencode(o, _current_indent_level):
|
|
||||||
if isinstance(o, basestring):
|
|
||||||
yield _encoder(o)
|
|
||||||
elif o is None:
|
|
||||||
yield 'null'
|
|
||||||
elif o is True:
|
|
||||||
yield 'true'
|
|
||||||
elif o is False:
|
|
||||||
yield 'false'
|
|
||||||
elif isinstance(o, (int, long)):
|
|
||||||
yield str(o)
|
|
||||||
elif isinstance(o, float):
|
|
||||||
yield _floatstr(o)
|
|
||||||
elif isinstance(o, (list, tuple)):
|
|
||||||
for chunk in _iterencode_list(o, _current_indent_level):
|
|
||||||
yield chunk
|
|
||||||
elif isinstance(o, dict):
|
|
||||||
for chunk in _iterencode_dict(o, _current_indent_level):
|
|
||||||
yield chunk
|
|
||||||
else:
|
|
||||||
if markers is not None:
|
|
||||||
markerid = id(o)
|
|
||||||
if markerid in markers:
|
|
||||||
raise ValueError("Circular reference detected")
|
|
||||||
markers[markerid] = o
|
|
||||||
o = _default(o)
|
|
||||||
for chunk in _iterencode(o, _current_indent_level):
|
|
||||||
yield chunk
|
|
||||||
if markers is not None:
|
|
||||||
del markers[markerid]
|
|
||||||
|
|
||||||
return _iterencode
|
|
|
@ -1,65 +0,0 @@
|
||||||
"""JSON token scanner
|
|
||||||
"""
|
|
||||||
import re
|
|
||||||
try:
|
|
||||||
from simplejson._speedups import make_scanner as c_make_scanner
|
|
||||||
except ImportError:
|
|
||||||
c_make_scanner = None
|
|
||||||
|
|
||||||
__all__ = ['make_scanner']
|
|
||||||
|
|
||||||
NUMBER_RE = re.compile(
|
|
||||||
r'(-?(?:0|[1-9]\d*))(\.\d+)?([eE][-+]?\d+)?',
|
|
||||||
(re.VERBOSE | re.MULTILINE | re.DOTALL))
|
|
||||||
|
|
||||||
def py_make_scanner(context):
|
|
||||||
parse_object = context.parse_object
|
|
||||||
parse_array = context.parse_array
|
|
||||||
parse_string = context.parse_string
|
|
||||||
match_number = NUMBER_RE.match
|
|
||||||
encoding = context.encoding
|
|
||||||
strict = context.strict
|
|
||||||
parse_float = context.parse_float
|
|
||||||
parse_int = context.parse_int
|
|
||||||
parse_constant = context.parse_constant
|
|
||||||
object_hook = context.object_hook
|
|
||||||
|
|
||||||
def _scan_once(string, idx):
|
|
||||||
try:
|
|
||||||
nextchar = string[idx]
|
|
||||||
except IndexError:
|
|
||||||
raise StopIteration
|
|
||||||
|
|
||||||
if nextchar == '"':
|
|
||||||
return parse_string(string, idx + 1, encoding, strict)
|
|
||||||
elif nextchar == '{':
|
|
||||||
return parse_object((string, idx + 1), encoding, strict, _scan_once, object_hook)
|
|
||||||
elif nextchar == '[':
|
|
||||||
return parse_array((string, idx + 1), _scan_once)
|
|
||||||
elif nextchar == 'n' and string[idx:idx + 4] == 'null':
|
|
||||||
return None, idx + 4
|
|
||||||
elif nextchar == 't' and string[idx:idx + 4] == 'true':
|
|
||||||
return True, idx + 4
|
|
||||||
elif nextchar == 'f' and string[idx:idx + 5] == 'false':
|
|
||||||
return False, idx + 5
|
|
||||||
|
|
||||||
m = match_number(string, idx)
|
|
||||||
if m is not None:
|
|
||||||
integer, frac, exp = m.groups()
|
|
||||||
if frac or exp:
|
|
||||||
res = parse_float(integer + (frac or '') + (exp or ''))
|
|
||||||
else:
|
|
||||||
res = parse_int(integer)
|
|
||||||
return res, m.end()
|
|
||||||
elif nextchar == 'N' and string[idx:idx + 3] == 'NaN':
|
|
||||||
return parse_constant('NaN'), idx + 3
|
|
||||||
elif nextchar == 'I' and string[idx:idx + 8] == 'Infinity':
|
|
||||||
return parse_constant('Infinity'), idx + 8
|
|
||||||
elif nextchar == '-' and string[idx:idx + 9] == '-Infinity':
|
|
||||||
return parse_constant('-Infinity'), idx + 9
|
|
||||||
else:
|
|
||||||
raise StopIteration
|
|
||||||
|
|
||||||
return _scan_once
|
|
||||||
|
|
||||||
make_scanner = c_make_scanner or py_make_scanner
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,48 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
"""
|
|
||||||
Stable names for PluginLoader instances across Ansible versions.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
|
|
||||||
try:
|
|
||||||
from ansible.plugins.loader import action_loader
|
|
||||||
from ansible.plugins.loader import connection_loader
|
|
||||||
from ansible.plugins.loader import module_loader
|
|
||||||
from ansible.plugins.loader import module_utils_loader
|
|
||||||
from ansible.plugins.loader import shell_loader
|
|
||||||
from ansible.plugins.loader import strategy_loader
|
|
||||||
except ImportError: # Ansible <2.4
|
|
||||||
from ansible.plugins import action_loader
|
|
||||||
from ansible.plugins import connection_loader
|
|
||||||
from ansible.plugins import module_loader
|
|
||||||
from ansible.plugins import module_utils_loader
|
|
||||||
from ansible.plugins import shell_loader
|
|
||||||
from ansible.plugins import strategy_loader
|
|
|
@ -1,127 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
|
|
||||||
import mitogen.core
|
|
||||||
import mitogen.utils
|
|
||||||
|
|
||||||
try:
|
|
||||||
from __main__ import display
|
|
||||||
except ImportError:
|
|
||||||
from ansible.utils.display import Display
|
|
||||||
display = Display()
|
|
||||||
|
|
||||||
|
|
||||||
#: The process name set via :func:`set_process_name`.
|
|
||||||
_process_name = None
|
|
||||||
|
|
||||||
#: The PID of the process that last called :func:`set_process_name`, so its
|
|
||||||
#: value can be ignored in unknown fork children.
|
|
||||||
_process_pid = None
|
|
||||||
|
|
||||||
|
|
||||||
def set_process_name(name):
|
|
||||||
"""
|
|
||||||
Set a name to adorn log messages with.
|
|
||||||
"""
|
|
||||||
global _process_name
|
|
||||||
_process_name = name
|
|
||||||
|
|
||||||
global _process_pid
|
|
||||||
_process_pid = os.getpid()
|
|
||||||
|
|
||||||
|
|
||||||
class Handler(logging.Handler):
|
|
||||||
"""
|
|
||||||
Use Mitogen's log format, but send the result to a Display method.
|
|
||||||
"""
|
|
||||||
def __init__(self, normal_method):
|
|
||||||
logging.Handler.__init__(self)
|
|
||||||
self.formatter = mitogen.utils.log_get_formatter()
|
|
||||||
self.normal_method = normal_method
|
|
||||||
|
|
||||||
#: Set of target loggers that produce warnings and errors that spam the
|
|
||||||
#: console needlessly. Their log level is forced to INFO. A better strategy
|
|
||||||
#: may simply be to bury all target logs in DEBUG output, but not by
|
|
||||||
#: overriding their log level as done here.
|
|
||||||
NOISY_LOGGERS = frozenset([
|
|
||||||
'dnf', # issue #272; warns when a package is already installed.
|
|
||||||
'boto', # issue #541; normal boto retry logic can cause ERROR logs.
|
|
||||||
])
|
|
||||||
|
|
||||||
def emit(self, record):
|
|
||||||
mitogen_name = getattr(record, 'mitogen_name', '')
|
|
||||||
if mitogen_name == 'stderr':
|
|
||||||
record.levelno = logging.ERROR
|
|
||||||
if mitogen_name in self.NOISY_LOGGERS and record.levelno >= logging.WARNING:
|
|
||||||
record.levelno = logging.DEBUG
|
|
||||||
|
|
||||||
if _process_pid == os.getpid():
|
|
||||||
process_name = _process_name
|
|
||||||
else:
|
|
||||||
process_name = '?'
|
|
||||||
|
|
||||||
s = '[%-4s %d] %s' % (process_name, os.getpid(), self.format(record))
|
|
||||||
if record.levelno >= logging.ERROR:
|
|
||||||
display.error(s, wrap_text=False)
|
|
||||||
elif record.levelno >= logging.WARNING:
|
|
||||||
display.warning(s, formatted=True)
|
|
||||||
else:
|
|
||||||
self.normal_method(s)
|
|
||||||
|
|
||||||
|
|
||||||
def setup():
|
|
||||||
"""
|
|
||||||
Install handlers for Mitogen loggers to redirect them into the Ansible
|
|
||||||
display framework. Ansible installs its own logging framework handlers when
|
|
||||||
C.DEFAULT_LOG_PATH is set, therefore disable propagation for our handlers.
|
|
||||||
"""
|
|
||||||
l_mitogen = logging.getLogger('mitogen')
|
|
||||||
l_mitogen_io = logging.getLogger('mitogen.io')
|
|
||||||
l_ansible_mitogen = logging.getLogger('ansible_mitogen')
|
|
||||||
|
|
||||||
for logger in l_mitogen, l_mitogen_io, l_ansible_mitogen:
|
|
||||||
logger.handlers = [Handler(display.vvv)]
|
|
||||||
logger.propagate = False
|
|
||||||
|
|
||||||
if display.verbosity > 2:
|
|
||||||
l_ansible_mitogen.setLevel(logging.DEBUG)
|
|
||||||
l_mitogen.setLevel(logging.DEBUG)
|
|
||||||
else:
|
|
||||||
# Mitogen copies the active log level into new children, allowing them
|
|
||||||
# to filter tiny messages before they hit the network, and therefore
|
|
||||||
# before they wake the IO loop. Explicitly setting INFO saves ~4%
|
|
||||||
# running against just the local machine.
|
|
||||||
l_mitogen.setLevel(logging.ERROR)
|
|
||||||
l_ansible_mitogen.setLevel(logging.ERROR)
|
|
||||||
|
|
||||||
if display.verbosity > 3:
|
|
||||||
l_mitogen_io.setLevel(logging.DEBUG)
|
|
|
@ -1,432 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import pwd
|
|
||||||
import random
|
|
||||||
import traceback
|
|
||||||
|
|
||||||
try:
|
|
||||||
from shlex import quote as shlex_quote
|
|
||||||
except ImportError:
|
|
||||||
from pipes import quote as shlex_quote
|
|
||||||
|
|
||||||
from ansible.module_utils._text import to_bytes
|
|
||||||
from ansible.parsing.utils.jsonify import jsonify
|
|
||||||
|
|
||||||
import ansible
|
|
||||||
import ansible.constants
|
|
||||||
import ansible.plugins
|
|
||||||
import ansible.plugins.action
|
|
||||||
|
|
||||||
import mitogen.core
|
|
||||||
import mitogen.select
|
|
||||||
import mitogen.utils
|
|
||||||
|
|
||||||
import ansible_mitogen.connection
|
|
||||||
import ansible_mitogen.planner
|
|
||||||
import ansible_mitogen.target
|
|
||||||
from ansible.module_utils._text import to_text
|
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class ActionModuleMixin(ansible.plugins.action.ActionBase):
|
|
||||||
"""
|
|
||||||
The Mitogen-patched PluginLoader dynamically mixes this into every action
|
|
||||||
class that Ansible attempts to load. It exists to override all the
|
|
||||||
assumptions built into the base action class that should really belong in
|
|
||||||
some middle layer, or at least in the connection layer.
|
|
||||||
|
|
||||||
Functionality is defined here for:
|
|
||||||
|
|
||||||
* Capturing the final set of task variables and giving Connection a chance
|
|
||||||
to update its idea of the correct execution environment, before any
|
|
||||||
attempt is made to call a Connection method. While it's not expected for
|
|
||||||
the interpreter to change on a per-task basis, Ansible permits this, and
|
|
||||||
so it must be supported.
|
|
||||||
|
|
||||||
* Overriding lots of methods that try to call out to shell for mundane
|
|
||||||
reasons, such as copying files around, changing file permissions,
|
|
||||||
creating temporary directories and suchlike.
|
|
||||||
|
|
||||||
* Short-circuiting any use of Ansiballz or related code for executing a
|
|
||||||
module remotely using shell commands and SSH.
|
|
||||||
|
|
||||||
* Short-circuiting most of the logic in dealing with the fact that Ansible
|
|
||||||
always runs become: tasks across at least the SSH user account and the
|
|
||||||
destination user account, and handling the security permission issues
|
|
||||||
that crop up due to this. Mitogen always runs a task completely within
|
|
||||||
the target user account, so it's not a problem for us.
|
|
||||||
"""
|
|
||||||
def __init__(self, task, connection, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
Verify the received connection is really a Mitogen connection. If not,
|
|
||||||
transmute this instance back into the original unadorned base class.
|
|
||||||
|
|
||||||
This allows running the Mitogen strategy in mixed-target playbooks,
|
|
||||||
where some targets use SSH while others use WinRM or some fancier UNIX
|
|
||||||
connection plug-in. That's because when the Mitogen strategy is active,
|
|
||||||
ActionModuleMixin is unconditionally mixed into any action module that
|
|
||||||
is instantiated, and there is no direct way for the monkey-patch to
|
|
||||||
know what kind of connection will be used upfront.
|
|
||||||
"""
|
|
||||||
super(ActionModuleMixin, self).__init__(task, connection, *args, **kwargs)
|
|
||||||
if not isinstance(connection, ansible_mitogen.connection.Connection):
|
|
||||||
_, self.__class__ = type(self).__bases__
|
|
||||||
|
|
||||||
def run(self, tmp=None, task_vars=None):
|
|
||||||
"""
|
|
||||||
Override run() to notify Connection of task-specific data, so it has a
|
|
||||||
chance to know e.g. the Python interpreter in use.
|
|
||||||
"""
|
|
||||||
self._connection.on_action_run(
|
|
||||||
task_vars=task_vars,
|
|
||||||
delegate_to_hostname=self._task.delegate_to,
|
|
||||||
loader_basedir=self._loader.get_basedir(),
|
|
||||||
)
|
|
||||||
return super(ActionModuleMixin, self).run(tmp, task_vars)
|
|
||||||
|
|
||||||
COMMAND_RESULT = {
|
|
||||||
'rc': 0,
|
|
||||||
'stdout': '',
|
|
||||||
'stdout_lines': [],
|
|
||||||
'stderr': ''
|
|
||||||
}
|
|
||||||
|
|
||||||
def fake_shell(self, func, stdout=False):
|
|
||||||
"""
|
|
||||||
Execute a function and decorate its return value in the style of
|
|
||||||
_low_level_execute_command(). This produces a return value that looks
|
|
||||||
like some shell command was run, when really func() was implemented
|
|
||||||
entirely in Python.
|
|
||||||
|
|
||||||
If the function raises :py:class:`mitogen.core.CallError`, this will be
|
|
||||||
translated into a failed shell command with a non-zero exit status.
|
|
||||||
|
|
||||||
:param func:
|
|
||||||
Function invoked as `func()`.
|
|
||||||
:returns:
|
|
||||||
See :py:attr:`COMMAND_RESULT`.
|
|
||||||
"""
|
|
||||||
dct = self.COMMAND_RESULT.copy()
|
|
||||||
try:
|
|
||||||
rc = func()
|
|
||||||
if stdout:
|
|
||||||
dct['stdout'] = repr(rc)
|
|
||||||
except mitogen.core.CallError:
|
|
||||||
LOG.exception('While emulating a shell command')
|
|
||||||
dct['rc'] = 1
|
|
||||||
dct['stderr'] = traceback.format_exc()
|
|
||||||
|
|
||||||
return dct
|
|
||||||
|
|
||||||
def _remote_file_exists(self, path):
|
|
||||||
"""
|
|
||||||
Determine if `path` exists by directly invoking os.path.exists() in the
|
|
||||||
target user account.
|
|
||||||
"""
|
|
||||||
LOG.debug('_remote_file_exists(%r)', path)
|
|
||||||
return self._connection.get_chain().call(
|
|
||||||
ansible_mitogen.target.file_exists,
|
|
||||||
mitogen.utils.cast(path)
|
|
||||||
)
|
|
||||||
|
|
||||||
def _configure_module(self, module_name, module_args, task_vars=None):
|
|
||||||
"""
|
|
||||||
Mitogen does not use the Ansiballz framework. This call should never
|
|
||||||
happen when ActionMixin is active, so crash if it does.
|
|
||||||
"""
|
|
||||||
assert False, "_configure_module() should never be called."
|
|
||||||
|
|
||||||
def _is_pipelining_enabled(self, module_style, wrap_async=False):
|
|
||||||
"""
|
|
||||||
Mitogen does not use SSH pipelining. This call should never happen when
|
|
||||||
ActionMixin is active, so crash if it does.
|
|
||||||
"""
|
|
||||||
assert False, "_is_pipelining_enabled() should never be called."
|
|
||||||
|
|
||||||
def _generate_tmp_path(self):
|
|
||||||
return os.path.join(
|
|
||||||
self._connection.get_good_temp_dir(),
|
|
||||||
'ansible_mitogen_action_%016x' % (
|
|
||||||
random.getrandbits(8*8),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
def _generate_tmp_path(self):
|
|
||||||
return os.path.join(
|
|
||||||
self._connection.get_good_temp_dir(),
|
|
||||||
'ansible_mitogen_action_%016x' % (
|
|
||||||
random.getrandbits(8*8),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
def _make_tmp_path(self, remote_user=None):
|
|
||||||
"""
|
|
||||||
Create a temporary subdirectory as a child of the temporary directory
|
|
||||||
managed by the remote interpreter.
|
|
||||||
"""
|
|
||||||
LOG.debug('_make_tmp_path(remote_user=%r)', remote_user)
|
|
||||||
path = self._generate_tmp_path()
|
|
||||||
LOG.debug('Temporary directory: %r', path)
|
|
||||||
self._connection.get_chain().call_no_reply(os.mkdir, path)
|
|
||||||
self._connection._shell.tmpdir = path
|
|
||||||
return path
|
|
||||||
|
|
||||||
def _remove_tmp_path(self, tmp_path):
|
|
||||||
"""
|
|
||||||
Replace the base implementation's invocation of rm -rf, replacing it
|
|
||||||
with a pipelined call to :func:`ansible_mitogen.target.prune_tree`.
|
|
||||||
"""
|
|
||||||
LOG.debug('_remove_tmp_path(%r)', tmp_path)
|
|
||||||
if tmp_path is None and ansible.__version__ > '2.6':
|
|
||||||
tmp_path = self._connection._shell.tmpdir # 06f73ad578d
|
|
||||||
if tmp_path is not None:
|
|
||||||
self._connection.get_chain().call_no_reply(
|
|
||||||
ansible_mitogen.target.prune_tree,
|
|
||||||
tmp_path,
|
|
||||||
)
|
|
||||||
self._connection._shell.tmpdir = None
|
|
||||||
|
|
||||||
def _transfer_data(self, remote_path, data):
|
|
||||||
"""
|
|
||||||
Used by the base _execute_module(), and in <2.4 also by the template
|
|
||||||
action module, and probably others.
|
|
||||||
"""
|
|
||||||
if isinstance(data, dict):
|
|
||||||
data = jsonify(data)
|
|
||||||
if not isinstance(data, bytes):
|
|
||||||
data = to_bytes(data, errors='surrogate_or_strict')
|
|
||||||
|
|
||||||
LOG.debug('_transfer_data(%r, %s ..%d bytes)',
|
|
||||||
remote_path, type(data), len(data))
|
|
||||||
self._connection.put_data(remote_path, data)
|
|
||||||
return remote_path
|
|
||||||
|
|
||||||
#: Actions listed here cause :func:`_fixup_perms2` to avoid a needless
|
|
||||||
#: roundtrip, as they modify file modes separately afterwards. This is due
|
|
||||||
#: to the method prototype having a default of `execute=True`.
|
|
||||||
FIXUP_PERMS_RED_HERRING = set(['copy'])
|
|
||||||
|
|
||||||
def _fixup_perms2(self, remote_paths, remote_user=None, execute=True):
|
|
||||||
"""
|
|
||||||
Mitogen always executes ActionBase helper methods in the context of the
|
|
||||||
target user account, so it is never necessary to modify permissions
|
|
||||||
except to ensure the execute bit is set if requested.
|
|
||||||
"""
|
|
||||||
LOG.debug('_fixup_perms2(%r, remote_user=%r, execute=%r)',
|
|
||||||
remote_paths, remote_user, execute)
|
|
||||||
if execute and self._task.action not in self.FIXUP_PERMS_RED_HERRING:
|
|
||||||
return self._remote_chmod(remote_paths, mode='u+x')
|
|
||||||
return self.COMMAND_RESULT.copy()
|
|
||||||
|
|
||||||
def _remote_chmod(self, paths, mode, sudoable=False):
|
|
||||||
"""
|
|
||||||
Issue an asynchronous set_file_mode() call for every path in `paths`,
|
|
||||||
then format the resulting return value list with fake_shell().
|
|
||||||
"""
|
|
||||||
LOG.debug('_remote_chmod(%r, mode=%r, sudoable=%r)',
|
|
||||||
paths, mode, sudoable)
|
|
||||||
return self.fake_shell(lambda: mitogen.select.Select.all(
|
|
||||||
self._connection.get_chain().call_async(
|
|
||||||
ansible_mitogen.target.set_file_mode, path, mode
|
|
||||||
)
|
|
||||||
for path in paths
|
|
||||||
))
|
|
||||||
|
|
||||||
def _remote_chown(self, paths, user, sudoable=False):
|
|
||||||
"""
|
|
||||||
Issue an asynchronous os.chown() call for every path in `paths`, then
|
|
||||||
format the resulting return value list with fake_shell().
|
|
||||||
"""
|
|
||||||
LOG.debug('_remote_chown(%r, user=%r, sudoable=%r)',
|
|
||||||
paths, user, sudoable)
|
|
||||||
ent = self._connection.get_chain().call(pwd.getpwnam, user)
|
|
||||||
return self.fake_shell(lambda: mitogen.select.Select.all(
|
|
||||||
self._connection.get_chain().call_async(
|
|
||||||
os.chown, path, ent.pw_uid, ent.pw_gid
|
|
||||||
)
|
|
||||||
for path in paths
|
|
||||||
))
|
|
||||||
|
|
||||||
def _remote_expand_user(self, path, sudoable=True):
|
|
||||||
"""
|
|
||||||
Replace the base implementation's attempt to emulate
|
|
||||||
os.path.expanduser() with an actual call to os.path.expanduser().
|
|
||||||
|
|
||||||
:param bool sudoable:
|
|
||||||
If :data:`True`, indicate unqualified tilde ("~" with no username)
|
|
||||||
should be evaluated in the context of the login account, not any
|
|
||||||
become_user.
|
|
||||||
"""
|
|
||||||
LOG.debug('_remote_expand_user(%r, sudoable=%r)', path, sudoable)
|
|
||||||
if not path.startswith('~'):
|
|
||||||
# /home/foo -> /home/foo
|
|
||||||
return path
|
|
||||||
if sudoable or not self._play_context.become:
|
|
||||||
if path == '~':
|
|
||||||
# ~ -> /home/dmw
|
|
||||||
return self._connection.homedir
|
|
||||||
if path.startswith('~/'):
|
|
||||||
# ~/.ansible -> /home/dmw/.ansible
|
|
||||||
return os.path.join(self._connection.homedir, path[2:])
|
|
||||||
# ~root/.ansible -> /root/.ansible
|
|
||||||
return self._connection.get_chain(use_login=(not sudoable)).call(
|
|
||||||
os.path.expanduser,
|
|
||||||
mitogen.utils.cast(path),
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_task_timeout_secs(self):
|
|
||||||
"""
|
|
||||||
Return the task "async:" value, portable across 2.4-2.5.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
return self._task.async_val
|
|
||||||
except AttributeError:
|
|
||||||
return getattr(self._task, 'async')
|
|
||||||
|
|
||||||
def _temp_file_gibberish(self, module_args, wrap_async):
|
|
||||||
# Ansible>2.5 module_utils reuses the action's temporary directory if
|
|
||||||
# one exists. Older versions error if this key is present.
|
|
||||||
if ansible.__version__ > '2.5':
|
|
||||||
if wrap_async:
|
|
||||||
# Sharing is not possible with async tasks, as in that case,
|
|
||||||
# the directory must outlive the action plug-in.
|
|
||||||
module_args['_ansible_tmpdir'] = None
|
|
||||||
else:
|
|
||||||
module_args['_ansible_tmpdir'] = self._connection._shell.tmpdir
|
|
||||||
|
|
||||||
# If _ansible_tmpdir is unset, Ansible>2.6 module_utils will use
|
|
||||||
# _ansible_remote_tmp as the location to create the module's temporary
|
|
||||||
# directory. Older versions error if this key is present.
|
|
||||||
if ansible.__version__ > '2.6':
|
|
||||||
module_args['_ansible_remote_tmp'] = (
|
|
||||||
self._connection.get_good_temp_dir()
|
|
||||||
)
|
|
||||||
|
|
||||||
def _execute_module(self, module_name=None, module_args=None, tmp=None,
|
|
||||||
task_vars=None, persist_files=False,
|
|
||||||
delete_remote_tmp=True, wrap_async=False):
|
|
||||||
"""
|
|
||||||
Collect up a module's execution environment then use it to invoke
|
|
||||||
target.run_module() or helpers.run_module_async() in the target
|
|
||||||
context.
|
|
||||||
"""
|
|
||||||
if module_name is None:
|
|
||||||
module_name = self._task.action
|
|
||||||
if module_args is None:
|
|
||||||
module_args = self._task.args
|
|
||||||
if task_vars is None:
|
|
||||||
task_vars = {}
|
|
||||||
|
|
||||||
self._update_module_args(module_name, module_args, task_vars)
|
|
||||||
env = {}
|
|
||||||
self._compute_environment_string(env)
|
|
||||||
self._temp_file_gibberish(module_args, wrap_async)
|
|
||||||
|
|
||||||
self._connection._connect()
|
|
||||||
result = ansible_mitogen.planner.invoke(
|
|
||||||
ansible_mitogen.planner.Invocation(
|
|
||||||
action=self,
|
|
||||||
connection=self._connection,
|
|
||||||
module_name=mitogen.core.to_text(module_name),
|
|
||||||
module_args=mitogen.utils.cast(module_args),
|
|
||||||
task_vars=task_vars,
|
|
||||||
templar=self._templar,
|
|
||||||
env=mitogen.utils.cast(env),
|
|
||||||
wrap_async=wrap_async,
|
|
||||||
timeout_secs=self.get_task_timeout_secs(),
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
if ansible.__version__ < '2.5' and delete_remote_tmp and \
|
|
||||||
getattr(self._connection._shell, 'tmpdir', None) is not None:
|
|
||||||
# Built-in actions expected tmpdir to be cleaned up automatically
|
|
||||||
# on _execute_module().
|
|
||||||
self._remove_tmp_path(self._connection._shell.tmpdir)
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
def _postprocess_response(self, result):
|
|
||||||
"""
|
|
||||||
Apply fixups mimicking ActionBase._execute_module(); this is copied
|
|
||||||
verbatim from action/__init__.py, the guts of _parse_returned_data are
|
|
||||||
garbage and should be removed or reimplemented once tests exist.
|
|
||||||
|
|
||||||
:param dict result:
|
|
||||||
Dictionary with format::
|
|
||||||
|
|
||||||
{
|
|
||||||
"rc": int,
|
|
||||||
"stdout": "stdout data",
|
|
||||||
"stderr": "stderr data"
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
data = self._parse_returned_data(result)
|
|
||||||
|
|
||||||
# Cutpasted from the base implementation.
|
|
||||||
if 'stdout' in data and 'stdout_lines' not in data:
|
|
||||||
data['stdout_lines'] = (data['stdout'] or u'').splitlines()
|
|
||||||
if 'stderr' in data and 'stderr_lines' not in data:
|
|
||||||
data['stderr_lines'] = (data['stderr'] or u'').splitlines()
|
|
||||||
|
|
||||||
return data
|
|
||||||
|
|
||||||
def _low_level_execute_command(self, cmd, sudoable=True, in_data=None,
|
|
||||||
executable=None,
|
|
||||||
encoding_errors='surrogate_then_replace',
|
|
||||||
chdir=None):
|
|
||||||
"""
|
|
||||||
Override the base implementation by simply calling
|
|
||||||
target.exec_command() in the target context.
|
|
||||||
"""
|
|
||||||
LOG.debug('_low_level_execute_command(%r, in_data=%r, exe=%r, dir=%r)',
|
|
||||||
cmd, type(in_data), executable, chdir)
|
|
||||||
if executable is None: # executable defaults to False
|
|
||||||
executable = self._play_context.executable
|
|
||||||
if executable:
|
|
||||||
cmd = executable + ' -c ' + shlex_quote(cmd)
|
|
||||||
|
|
||||||
rc, stdout, stderr = self._connection.exec_command(
|
|
||||||
cmd=cmd,
|
|
||||||
in_data=in_data,
|
|
||||||
sudoable=sudoable,
|
|
||||||
mitogen_chdir=chdir,
|
|
||||||
)
|
|
||||||
stdout_text = to_text(stdout, errors=encoding_errors)
|
|
||||||
|
|
||||||
return {
|
|
||||||
'rc': rc,
|
|
||||||
'stdout': stdout_text,
|
|
||||||
'stdout_lines': stdout_text.splitlines(),
|
|
||||||
'stderr': stderr,
|
|
||||||
}
|
|
|
@ -1,157 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import collections
|
|
||||||
import imp
|
|
||||||
import os
|
|
||||||
|
|
||||||
import mitogen.master
|
|
||||||
|
|
||||||
|
|
||||||
PREFIX = 'ansible.module_utils.'
|
|
||||||
|
|
||||||
|
|
||||||
Module = collections.namedtuple('Module', 'name path kind parent')
|
|
||||||
|
|
||||||
|
|
||||||
def get_fullname(module):
|
|
||||||
"""
|
|
||||||
Reconstruct a Module's canonical path by recursing through its parents.
|
|
||||||
"""
|
|
||||||
bits = [str(module.name)]
|
|
||||||
while module.parent:
|
|
||||||
bits.append(str(module.parent.name))
|
|
||||||
module = module.parent
|
|
||||||
return '.'.join(reversed(bits))
|
|
||||||
|
|
||||||
|
|
||||||
def get_code(module):
|
|
||||||
"""
|
|
||||||
Compile and return a Module's code object.
|
|
||||||
"""
|
|
||||||
fp = open(module.path)
|
|
||||||
try:
|
|
||||||
return compile(fp.read(), str(module.name), 'exec')
|
|
||||||
finally:
|
|
||||||
fp.close()
|
|
||||||
|
|
||||||
|
|
||||||
def is_pkg(module):
|
|
||||||
"""
|
|
||||||
Return :data:`True` if a Module represents a package.
|
|
||||||
"""
|
|
||||||
return module.kind == imp.PKG_DIRECTORY
|
|
||||||
|
|
||||||
|
|
||||||
def find(name, path=(), parent=None):
|
|
||||||
"""
|
|
||||||
Return a Module instance describing the first matching module found on the
|
|
||||||
search path.
|
|
||||||
|
|
||||||
:param str name:
|
|
||||||
Module name.
|
|
||||||
:param list path:
|
|
||||||
List of directory names to search for the module.
|
|
||||||
:param Module parent:
|
|
||||||
Optional module parent.
|
|
||||||
"""
|
|
||||||
assert isinstance(path, tuple)
|
|
||||||
head, _, tail = name.partition('.')
|
|
||||||
try:
|
|
||||||
tup = imp.find_module(head, list(path))
|
|
||||||
except ImportError:
|
|
||||||
return parent
|
|
||||||
|
|
||||||
fp, modpath, (suffix, mode, kind) = tup
|
|
||||||
if fp:
|
|
||||||
fp.close()
|
|
||||||
|
|
||||||
if parent and modpath == parent.path:
|
|
||||||
# 'from timeout import timeout', where 'timeout' is a function but also
|
|
||||||
# the name of the module being imported.
|
|
||||||
return None
|
|
||||||
|
|
||||||
if kind == imp.PKG_DIRECTORY:
|
|
||||||
modpath = os.path.join(modpath, '__init__.py')
|
|
||||||
|
|
||||||
module = Module(head, modpath, kind, parent)
|
|
||||||
# TODO: this code is entirely wrong on Python 3.x, but works well enough
|
|
||||||
# for Ansible. We need a new find_child() that only looks in the package
|
|
||||||
# directory, never falling back to the parent search path.
|
|
||||||
if tail and kind == imp.PKG_DIRECTORY:
|
|
||||||
return find_relative(module, tail, path)
|
|
||||||
return module
|
|
||||||
|
|
||||||
|
|
||||||
def find_relative(parent, name, path=()):
|
|
||||||
if parent.kind == imp.PKG_DIRECTORY:
|
|
||||||
path = (os.path.dirname(parent.path),) + path
|
|
||||||
return find(name, path, parent=parent)
|
|
||||||
|
|
||||||
|
|
||||||
def scan_fromlist(code):
|
|
||||||
for level, modname_s, fromlist in mitogen.master.scan_code_imports(code):
|
|
||||||
for name in fromlist:
|
|
||||||
yield level, '%s.%s' % (modname_s, name)
|
|
||||||
if not fromlist:
|
|
||||||
yield level, modname_s
|
|
||||||
|
|
||||||
|
|
||||||
def scan(module_name, module_path, search_path):
|
|
||||||
module = Module(module_name, module_path, imp.PY_SOURCE, None)
|
|
||||||
stack = [module]
|
|
||||||
seen = set()
|
|
||||||
|
|
||||||
while stack:
|
|
||||||
module = stack.pop(0)
|
|
||||||
for level, fromname in scan_fromlist(get_code(module)):
|
|
||||||
if not fromname.startswith(PREFIX):
|
|
||||||
continue
|
|
||||||
|
|
||||||
imported = find(fromname[len(PREFIX):], search_path)
|
|
||||||
if imported is None or imported in seen:
|
|
||||||
continue
|
|
||||||
|
|
||||||
seen.add(imported)
|
|
||||||
stack.append(imported)
|
|
||||||
parent = imported.parent
|
|
||||||
while parent:
|
|
||||||
fullname = get_fullname(parent)
|
|
||||||
module = Module(fullname, parent.path, parent.kind, None)
|
|
||||||
if module not in seen:
|
|
||||||
seen.add(module)
|
|
||||||
stack.append(module)
|
|
||||||
parent = parent.parent
|
|
||||||
|
|
||||||
return sorted(
|
|
||||||
(PREFIX + get_fullname(module), module.path, is_pkg(module))
|
|
||||||
for module in seen
|
|
||||||
)
|
|
|
@ -1,84 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
"""
|
|
||||||
Classes to detect each case from [0] and prepare arguments necessary for the
|
|
||||||
corresponding Runner class within the target, including preloading requisite
|
|
||||||
files/modules known missing.
|
|
||||||
|
|
||||||
[0] "Ansible Module Architecture", developing_program_flow_modules.html
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import mitogen.core
|
|
||||||
|
|
||||||
|
|
||||||
def parse_script_interpreter(source):
|
|
||||||
"""
|
|
||||||
Parse the script interpreter portion of a UNIX hashbang using the rules
|
|
||||||
Linux uses.
|
|
||||||
|
|
||||||
:param str source: String like "/usr/bin/env python".
|
|
||||||
|
|
||||||
:returns:
|
|
||||||
Tuple of `(interpreter, arg)`, where `intepreter` is the script
|
|
||||||
interpreter and `arg` is its sole argument if present, otherwise
|
|
||||||
:py:data:`None`.
|
|
||||||
"""
|
|
||||||
# Find terminating newline. Assume last byte of binprm_buf if absent.
|
|
||||||
nl = source.find(b'\n', 0, 128)
|
|
||||||
if nl == -1:
|
|
||||||
nl = min(128, len(source))
|
|
||||||
|
|
||||||
# Split once on the first run of whitespace. If no whitespace exists,
|
|
||||||
# bits just contains the interpreter filename.
|
|
||||||
bits = source[0:nl].strip().split(None, 1)
|
|
||||||
if len(bits) == 1:
|
|
||||||
return mitogen.core.to_text(bits[0]), None
|
|
||||||
return mitogen.core.to_text(bits[0]), mitogen.core.to_text(bits[1])
|
|
||||||
|
|
||||||
|
|
||||||
def parse_hashbang(source):
|
|
||||||
"""
|
|
||||||
Parse a UNIX "hashbang line" using the syntax supported by Linux.
|
|
||||||
|
|
||||||
:param str source: String like "#!/usr/bin/env python".
|
|
||||||
|
|
||||||
:returns:
|
|
||||||
Tuple of `(interpreter, arg)`, where `intepreter` is the script
|
|
||||||
interpreter and `arg` is its sole argument if present, otherwise
|
|
||||||
:py:data:`None`.
|
|
||||||
"""
|
|
||||||
# Linux requires first 2 bytes with no whitespace, pretty sure it's the
|
|
||||||
# same everywhere. See binfmt_script.c.
|
|
||||||
if not source.startswith(b'#!'):
|
|
||||||
return None, None
|
|
||||||
|
|
||||||
return parse_script_interpreter(source[2:])
|
|
|
@ -1,499 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
"""
|
|
||||||
Classes to detect each case from [0] and prepare arguments necessary for the
|
|
||||||
corresponding Runner class within the target, including preloading requisite
|
|
||||||
files/modules known missing.
|
|
||||||
|
|
||||||
[0] "Ansible Module Architecture", developing_program_flow_modules.html
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import random
|
|
||||||
|
|
||||||
from ansible.executor import module_common
|
|
||||||
import ansible.errors
|
|
||||||
import ansible.module_utils
|
|
||||||
import mitogen.core
|
|
||||||
import mitogen.select
|
|
||||||
|
|
||||||
import ansible_mitogen.loaders
|
|
||||||
import ansible_mitogen.parsing
|
|
||||||
import ansible_mitogen.target
|
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
|
||||||
NO_METHOD_MSG = 'Mitogen: no invocation method found for: '
|
|
||||||
NO_INTERPRETER_MSG = 'module (%s) is missing interpreter line'
|
|
||||||
NO_MODULE_MSG = 'The module %s was not found in configured module paths.'
|
|
||||||
|
|
||||||
|
|
||||||
class Invocation(object):
|
|
||||||
"""
|
|
||||||
Collect up a module's execution environment then use it to invoke
|
|
||||||
target.run_module() or helpers.run_module_async() in the target context.
|
|
||||||
"""
|
|
||||||
def __init__(self, action, connection, module_name, module_args,
|
|
||||||
task_vars, templar, env, wrap_async, timeout_secs):
|
|
||||||
#: ActionBase instance invoking the module. Required to access some
|
|
||||||
#: output postprocessing methods that don't belong in ActionBase at
|
|
||||||
#: all.
|
|
||||||
self.action = action
|
|
||||||
#: Ansible connection to use to contact the target. Must be an
|
|
||||||
#: ansible_mitogen connection.
|
|
||||||
self.connection = connection
|
|
||||||
#: Name of the module ('command', 'shell', etc.) to execute.
|
|
||||||
self.module_name = module_name
|
|
||||||
#: Final module arguments.
|
|
||||||
self.module_args = module_args
|
|
||||||
#: Task variables, needed to extract ansible_*_interpreter.
|
|
||||||
self.task_vars = task_vars
|
|
||||||
#: Templar, needed to extract ansible_*_interpreter.
|
|
||||||
self.templar = templar
|
|
||||||
#: Final module environment.
|
|
||||||
self.env = env
|
|
||||||
#: Boolean, if :py:data:`True`, launch the module asynchronously.
|
|
||||||
self.wrap_async = wrap_async
|
|
||||||
#: Integer, if >0, limit the time an asynchronous job may run for.
|
|
||||||
self.timeout_secs = timeout_secs
|
|
||||||
#: Initially ``None``, but set by :func:`invoke`. The path on the
|
|
||||||
#: master to the module's implementation file.
|
|
||||||
self.module_path = None
|
|
||||||
#: Initially ``None``, but set by :func:`invoke`. The raw source or
|
|
||||||
#: binary contents of the module.
|
|
||||||
self.module_source = None
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'Invocation(module_name=%s)' % (self.module_name,)
|
|
||||||
|
|
||||||
|
|
||||||
class Planner(object):
|
|
||||||
"""
|
|
||||||
A Planner receives a module name and the contents of its implementation
|
|
||||||
file, indicates whether or not it understands how to run the module, and
|
|
||||||
exports a method to run the module.
|
|
||||||
"""
|
|
||||||
def __init__(self, invocation):
|
|
||||||
self._inv = invocation
|
|
||||||
|
|
||||||
def detect(self):
|
|
||||||
"""
|
|
||||||
Return true if the supplied `invocation` matches the module type
|
|
||||||
implemented by this planner.
|
|
||||||
"""
|
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
||||||
def should_fork(self):
|
|
||||||
"""
|
|
||||||
Asynchronous tasks must always be forked.
|
|
||||||
"""
|
|
||||||
return self._inv.wrap_async
|
|
||||||
|
|
||||||
def get_push_files(self):
|
|
||||||
"""
|
|
||||||
Return a list of files that should be propagated to the target context
|
|
||||||
using PushFileService. The default implementation pushes nothing.
|
|
||||||
"""
|
|
||||||
return []
|
|
||||||
|
|
||||||
def get_module_deps(self):
|
|
||||||
"""
|
|
||||||
Return a list of the Python module names imported by the module.
|
|
||||||
"""
|
|
||||||
return []
|
|
||||||
|
|
||||||
def get_kwargs(self, **kwargs):
|
|
||||||
"""
|
|
||||||
If :meth:`detect` returned :data:`True`, plan for the module's
|
|
||||||
execution, including granting access to or delivering any files to it
|
|
||||||
that are known to be absent, and finally return a dict::
|
|
||||||
|
|
||||||
{
|
|
||||||
# Name of the class from runners.py that implements the
|
|
||||||
# target-side execution of this module type.
|
|
||||||
"runner_name": "...",
|
|
||||||
|
|
||||||
# Remaining keys are passed to the constructor of the class
|
|
||||||
# named by `runner_name`.
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
new = dict((mitogen.core.UnicodeType(k), kwargs[k])
|
|
||||||
for k in kwargs)
|
|
||||||
new.setdefault('good_temp_dir',
|
|
||||||
self._inv.connection.get_good_temp_dir())
|
|
||||||
new.setdefault('cwd', self._inv.connection.get_default_cwd())
|
|
||||||
new.setdefault('extra_env', self._inv.connection.get_default_env())
|
|
||||||
new.setdefault('emulate_tty', True)
|
|
||||||
new.setdefault('service_context', self._inv.connection.parent)
|
|
||||||
return new
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return '%s()' % (type(self).__name__,)
|
|
||||||
|
|
||||||
|
|
||||||
class BinaryPlanner(Planner):
|
|
||||||
"""
|
|
||||||
Binary modules take their arguments and will return data to Ansible in the
|
|
||||||
same way as want JSON modules.
|
|
||||||
"""
|
|
||||||
runner_name = 'BinaryRunner'
|
|
||||||
|
|
||||||
def detect(self):
|
|
||||||
return module_common._is_binary(self._inv.module_source)
|
|
||||||
|
|
||||||
def get_push_files(self):
|
|
||||||
return [mitogen.core.to_text(self._inv.module_path)]
|
|
||||||
|
|
||||||
def get_kwargs(self, **kwargs):
|
|
||||||
return super(BinaryPlanner, self).get_kwargs(
|
|
||||||
runner_name=self.runner_name,
|
|
||||||
module=self._inv.module_name,
|
|
||||||
path=self._inv.module_path,
|
|
||||||
json_args=json.dumps(self._inv.module_args),
|
|
||||||
env=self._inv.env,
|
|
||||||
**kwargs
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ScriptPlanner(BinaryPlanner):
|
|
||||||
"""
|
|
||||||
Common functionality for script module planners -- handle interpreter
|
|
||||||
detection and rewrite.
|
|
||||||
"""
|
|
||||||
def _rewrite_interpreter(self, path):
|
|
||||||
"""
|
|
||||||
Given the original interpreter binary extracted from the script's
|
|
||||||
interpreter line, look up the associated `ansible_*_interpreter`
|
|
||||||
variable, render it and return it.
|
|
||||||
|
|
||||||
:param str path:
|
|
||||||
Absolute UNIX path to original interpreter.
|
|
||||||
|
|
||||||
:returns:
|
|
||||||
Shell fragment prefix used to execute the script via "/bin/sh -c".
|
|
||||||
While `ansible_*_interpreter` documentation suggests shell isn't
|
|
||||||
involved here, the vanilla implementation uses it and that use is
|
|
||||||
exploited in common playbooks.
|
|
||||||
"""
|
|
||||||
key = u'ansible_%s_interpreter' % os.path.basename(path).strip()
|
|
||||||
try:
|
|
||||||
template = self._inv.task_vars[key]
|
|
||||||
except KeyError:
|
|
||||||
return path
|
|
||||||
|
|
||||||
return mitogen.utils.cast(self._inv.templar.template(template))
|
|
||||||
|
|
||||||
def _get_interpreter(self):
|
|
||||||
path, arg = ansible_mitogen.parsing.parse_hashbang(
|
|
||||||
self._inv.module_source
|
|
||||||
)
|
|
||||||
if path is None:
|
|
||||||
raise ansible.errors.AnsibleError(NO_INTERPRETER_MSG % (
|
|
||||||
self._inv.module_name,
|
|
||||||
))
|
|
||||||
|
|
||||||
fragment = self._rewrite_interpreter(path)
|
|
||||||
if arg:
|
|
||||||
fragment += ' ' + arg
|
|
||||||
|
|
||||||
return fragment, path.startswith('python')
|
|
||||||
|
|
||||||
def get_kwargs(self, **kwargs):
|
|
||||||
interpreter_fragment, is_python = self._get_interpreter()
|
|
||||||
return super(ScriptPlanner, self).get_kwargs(
|
|
||||||
interpreter_fragment=interpreter_fragment,
|
|
||||||
is_python=is_python,
|
|
||||||
**kwargs
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class JsonArgsPlanner(ScriptPlanner):
|
|
||||||
"""
|
|
||||||
Script that has its interpreter directive and the task arguments
|
|
||||||
substituted into its source as a JSON string.
|
|
||||||
"""
|
|
||||||
runner_name = 'JsonArgsRunner'
|
|
||||||
|
|
||||||
def detect(self):
|
|
||||||
return module_common.REPLACER_JSONARGS in self._inv.module_source
|
|
||||||
|
|
||||||
|
|
||||||
class WantJsonPlanner(ScriptPlanner):
|
|
||||||
"""
|
|
||||||
If a module has the string WANT_JSON in it anywhere, Ansible treats it as a
|
|
||||||
non-native module that accepts a filename as its only command line
|
|
||||||
parameter. The filename is for a temporary file containing a JSON string
|
|
||||||
containing the module's parameters. The module needs to open the file, read
|
|
||||||
and parse the parameters, operate on the data, and print its return data as
|
|
||||||
a JSON encoded dictionary to stdout before exiting.
|
|
||||||
|
|
||||||
These types of modules are self-contained entities. As of Ansible 2.1,
|
|
||||||
Ansible only modifies them to change a shebang line if present.
|
|
||||||
"""
|
|
||||||
runner_name = 'WantJsonRunner'
|
|
||||||
|
|
||||||
def detect(self):
|
|
||||||
return b'WANT_JSON' in self._inv.module_source
|
|
||||||
|
|
||||||
|
|
||||||
class NewStylePlanner(ScriptPlanner):
|
|
||||||
"""
|
|
||||||
The Ansiballz framework differs from module replacer in that it uses real
|
|
||||||
Python imports of things in ansible/module_utils instead of merely
|
|
||||||
preprocessing the module.
|
|
||||||
"""
|
|
||||||
runner_name = 'NewStyleRunner'
|
|
||||||
marker = b'from ansible.module_utils.'
|
|
||||||
|
|
||||||
def detect(self):
|
|
||||||
return self.marker in self._inv.module_source
|
|
||||||
|
|
||||||
def _get_interpreter(self):
|
|
||||||
return None, None
|
|
||||||
|
|
||||||
def get_push_files(self):
|
|
||||||
return super(NewStylePlanner, self).get_push_files() + [
|
|
||||||
mitogen.core.to_text(path)
|
|
||||||
for fullname, path, is_pkg in self.get_module_map()['custom']
|
|
||||||
]
|
|
||||||
|
|
||||||
def get_module_deps(self):
|
|
||||||
return self.get_module_map()['builtin']
|
|
||||||
|
|
||||||
#: Module names appearing in this set always require forking, usually due
|
|
||||||
#: to some terminal leakage that cannot be worked around in any sane
|
|
||||||
#: manner.
|
|
||||||
ALWAYS_FORK_MODULES = frozenset([
|
|
||||||
'dnf', # issue #280; py-dnf/hawkey need therapy
|
|
||||||
'firewalld', # issue #570: ansible module_utils caches dbus conn
|
|
||||||
])
|
|
||||||
|
|
||||||
def should_fork(self):
|
|
||||||
"""
|
|
||||||
In addition to asynchronous tasks, new-style modules should be forked
|
|
||||||
if:
|
|
||||||
|
|
||||||
* the user specifies mitogen_task_isolation=fork, or
|
|
||||||
* the new-style module has a custom module search path, or
|
|
||||||
* the module is known to leak like a sieve.
|
|
||||||
"""
|
|
||||||
return (
|
|
||||||
super(NewStylePlanner, self).should_fork() or
|
|
||||||
(self._inv.task_vars.get('mitogen_task_isolation') == 'fork') or
|
|
||||||
(self._inv.module_name in self.ALWAYS_FORK_MODULES) or
|
|
||||||
(len(self.get_module_map()['custom']) > 0)
|
|
||||||
)
|
|
||||||
|
|
||||||
def get_search_path(self):
|
|
||||||
return tuple(
|
|
||||||
path
|
|
||||||
for path in ansible_mitogen.loaders.module_utils_loader._get_paths(
|
|
||||||
subdirs=False
|
|
||||||
)
|
|
||||||
if os.path.isdir(path)
|
|
||||||
)
|
|
||||||
|
|
||||||
_module_map = None
|
|
||||||
|
|
||||||
def get_module_map(self):
|
|
||||||
if self._module_map is None:
|
|
||||||
self._module_map = self._inv.connection.parent.call_service(
|
|
||||||
service_name='ansible_mitogen.services.ModuleDepService',
|
|
||||||
method_name='scan',
|
|
||||||
|
|
||||||
module_name='ansible_module_%s' % (self._inv.module_name,),
|
|
||||||
module_path=self._inv.module_path,
|
|
||||||
search_path=self.get_search_path(),
|
|
||||||
builtin_path=module_common._MODULE_UTILS_PATH,
|
|
||||||
context=self._inv.connection.context,
|
|
||||||
)
|
|
||||||
return self._module_map
|
|
||||||
|
|
||||||
def get_kwargs(self):
|
|
||||||
return super(NewStylePlanner, self).get_kwargs(
|
|
||||||
module_map=self.get_module_map(),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class ReplacerPlanner(NewStylePlanner):
|
|
||||||
"""
|
|
||||||
The Module Replacer framework is the original framework implementing
|
|
||||||
new-style modules. It is essentially a preprocessor (like the C
|
|
||||||
Preprocessor for those familiar with that programming language). It does
|
|
||||||
straight substitutions of specific substring patterns in the module file.
|
|
||||||
There are two types of substitutions.
|
|
||||||
|
|
||||||
* Replacements that only happen in the module file. These are public
|
|
||||||
replacement strings that modules can utilize to get helpful boilerplate
|
|
||||||
or access to arguments.
|
|
||||||
|
|
||||||
"from ansible.module_utils.MOD_LIB_NAME import *" is replaced with the
|
|
||||||
contents of the ansible/module_utils/MOD_LIB_NAME.py. These should only
|
|
||||||
be used with new-style Python modules.
|
|
||||||
|
|
||||||
"#<<INCLUDE_ANSIBLE_MODULE_COMMON>>" is equivalent to
|
|
||||||
"from ansible.module_utils.basic import *" and should also only apply to
|
|
||||||
new-style Python modules.
|
|
||||||
|
|
||||||
"# POWERSHELL_COMMON" substitutes the contents of
|
|
||||||
"ansible/module_utils/powershell.ps1". It should only be used with
|
|
||||||
new-style Powershell modules.
|
|
||||||
"""
|
|
||||||
runner_name = 'ReplacerRunner'
|
|
||||||
|
|
||||||
def detect(self):
|
|
||||||
return module_common.REPLACER in self._inv.module_source
|
|
||||||
|
|
||||||
|
|
||||||
class OldStylePlanner(ScriptPlanner):
|
|
||||||
runner_name = 'OldStyleRunner'
|
|
||||||
|
|
||||||
def detect(self):
|
|
||||||
# Everything else.
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
_planners = [
|
|
||||||
BinaryPlanner,
|
|
||||||
# ReplacerPlanner,
|
|
||||||
NewStylePlanner,
|
|
||||||
JsonArgsPlanner,
|
|
||||||
WantJsonPlanner,
|
|
||||||
OldStylePlanner,
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def get_module_data(name):
|
|
||||||
path = ansible_mitogen.loaders.module_loader.find_plugin(name, '')
|
|
||||||
if path is None:
|
|
||||||
raise ansible.errors.AnsibleError(NO_MODULE_MSG % (name,))
|
|
||||||
|
|
||||||
with open(path, 'rb') as fp:
|
|
||||||
source = fp.read()
|
|
||||||
return mitogen.core.to_text(path), source
|
|
||||||
|
|
||||||
|
|
||||||
def _propagate_deps(invocation, planner, context):
|
|
||||||
invocation.connection.parent.call_service(
|
|
||||||
service_name='mitogen.service.PushFileService',
|
|
||||||
method_name='propagate_paths_and_modules',
|
|
||||||
context=context,
|
|
||||||
paths=planner.get_push_files(),
|
|
||||||
modules=planner.get_module_deps(),
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _invoke_async_task(invocation, planner):
|
|
||||||
job_id = '%016x' % random.randint(0, 2**64)
|
|
||||||
context = invocation.connection.spawn_isolated_child()
|
|
||||||
_propagate_deps(invocation, planner, context)
|
|
||||||
|
|
||||||
with mitogen.core.Receiver(context.router) as started_recv:
|
|
||||||
call_recv = context.call_async(
|
|
||||||
ansible_mitogen.target.run_module_async,
|
|
||||||
job_id=job_id,
|
|
||||||
timeout_secs=invocation.timeout_secs,
|
|
||||||
started_sender=started_recv.to_sender(),
|
|
||||||
kwargs=planner.get_kwargs(),
|
|
||||||
)
|
|
||||||
|
|
||||||
# Wait for run_module_async() to crash, or for AsyncRunner to indicate
|
|
||||||
# the job file has been written.
|
|
||||||
for msg in mitogen.select.Select([started_recv, call_recv]):
|
|
||||||
if msg.receiver is call_recv:
|
|
||||||
# It can only be an exception.
|
|
||||||
raise msg.unpickle()
|
|
||||||
break
|
|
||||||
|
|
||||||
return {
|
|
||||||
'stdout': json.dumps({
|
|
||||||
# modules/utilities/logic/async_wrapper.py::_run_module().
|
|
||||||
'changed': True,
|
|
||||||
'started': 1,
|
|
||||||
'finished': 0,
|
|
||||||
'ansible_job_id': job_id,
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def _invoke_isolated_task(invocation, planner):
|
|
||||||
context = invocation.connection.spawn_isolated_child()
|
|
||||||
_propagate_deps(invocation, planner, context)
|
|
||||||
try:
|
|
||||||
return context.call(
|
|
||||||
ansible_mitogen.target.run_module,
|
|
||||||
kwargs=planner.get_kwargs(),
|
|
||||||
)
|
|
||||||
finally:
|
|
||||||
context.shutdown()
|
|
||||||
|
|
||||||
|
|
||||||
def _get_planner(invocation):
|
|
||||||
for klass in _planners:
|
|
||||||
planner = klass(invocation)
|
|
||||||
if planner.detect():
|
|
||||||
LOG.debug('%r accepted %r (filename %r)', planner,
|
|
||||||
invocation.module_name, invocation.module_path)
|
|
||||||
return planner
|
|
||||||
LOG.debug('%r rejected %r', planner, invocation.module_name)
|
|
||||||
raise ansible.errors.AnsibleError(NO_METHOD_MSG + repr(invocation))
|
|
||||||
|
|
||||||
|
|
||||||
def invoke(invocation):
|
|
||||||
"""
|
|
||||||
Find a Planner subclass corresnding to `invocation` and use it to invoke
|
|
||||||
the module.
|
|
||||||
|
|
||||||
:param Invocation invocation:
|
|
||||||
:returns:
|
|
||||||
Module return dict.
|
|
||||||
:raises ansible.errors.AnsibleError:
|
|
||||||
Unrecognized/unsupported module type.
|
|
||||||
"""
|
|
||||||
(invocation.module_path,
|
|
||||||
invocation.module_source) = get_module_data(invocation.module_name)
|
|
||||||
planner = _get_planner(invocation)
|
|
||||||
|
|
||||||
if invocation.wrap_async:
|
|
||||||
response = _invoke_async_task(invocation, planner)
|
|
||||||
elif planner.should_fork():
|
|
||||||
response = _invoke_isolated_task(invocation, planner)
|
|
||||||
else:
|
|
||||||
_propagate_deps(invocation, planner, invocation.connection.context)
|
|
||||||
response = invocation.connection.get_chain().call(
|
|
||||||
ansible_mitogen.target.run_module,
|
|
||||||
kwargs=planner.get_kwargs(),
|
|
||||||
)
|
|
||||||
|
|
||||||
return invocation.action._postprocess_response(response)
|
|
|
@ -1,54 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
"""
|
|
||||||
Fetch the connection configuration stack that would be used to connect to a
|
|
||||||
target, without actually connecting to it.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import ansible_mitogen.connection
|
|
||||||
|
|
||||||
from ansible.plugins.action import ActionBase
|
|
||||||
|
|
||||||
|
|
||||||
class ActionModule(ActionBase):
|
|
||||||
def run(self, tmp=None, task_vars=None):
|
|
||||||
if not isinstance(self._connection,
|
|
||||||
ansible_mitogen.connection.Connection):
|
|
||||||
return {
|
|
||||||
'skipped': True,
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
'changed': True,
|
|
||||||
'result': self._connection._build_stack(),
|
|
||||||
'_ansible_verbose_always': True,
|
|
||||||
}
|
|
|
@ -1,44 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
import os.path
|
|
||||||
import sys
|
|
||||||
|
|
||||||
try:
|
|
||||||
import ansible_mitogen.connection
|
|
||||||
except ImportError:
|
|
||||||
base_dir = os.path.dirname(__file__)
|
|
||||||
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
|
|
||||||
del base_dir
|
|
||||||
|
|
||||||
import ansible_mitogen.connection
|
|
||||||
|
|
||||||
|
|
||||||
class Connection(ansible_mitogen.connection.Connection):
|
|
||||||
transport = 'mitogen_doas'
|
|
|
@ -1,51 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
import os.path
|
|
||||||
import sys
|
|
||||||
|
|
||||||
try:
|
|
||||||
import ansible_mitogen
|
|
||||||
except ImportError:
|
|
||||||
base_dir = os.path.dirname(__file__)
|
|
||||||
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
|
|
||||||
del base_dir
|
|
||||||
|
|
||||||
import ansible_mitogen.connection
|
|
||||||
|
|
||||||
|
|
||||||
class Connection(ansible_mitogen.connection.Connection):
|
|
||||||
transport = 'docker'
|
|
||||||
|
|
||||||
@property
|
|
||||||
def docker_cmd(self):
|
|
||||||
"""
|
|
||||||
Ansible 2.3 synchronize module wants to know how we run Docker.
|
|
||||||
"""
|
|
||||||
return 'docker'
|
|
|
@ -1,44 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
import os.path
|
|
||||||
import sys
|
|
||||||
|
|
||||||
try:
|
|
||||||
import ansible_mitogen
|
|
||||||
except ImportError:
|
|
||||||
base_dir = os.path.dirname(__file__)
|
|
||||||
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
|
|
||||||
del base_dir
|
|
||||||
|
|
||||||
import ansible_mitogen.connection
|
|
||||||
|
|
||||||
|
|
||||||
class Connection(ansible_mitogen.connection.Connection):
|
|
||||||
transport = 'jail'
|
|
|
@ -1,71 +0,0 @@
|
||||||
# coding: utf-8
|
|
||||||
# Copyright 2018, Yannig Perré
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
import os.path
|
|
||||||
import sys
|
|
||||||
|
|
||||||
try:
|
|
||||||
from ansible.plugins.connection import kubectl
|
|
||||||
except ImportError:
|
|
||||||
kubectl = None
|
|
||||||
|
|
||||||
from ansible.errors import AnsibleConnectionFailure
|
|
||||||
from ansible.module_utils.six import iteritems
|
|
||||||
|
|
||||||
try:
|
|
||||||
import ansible_mitogen
|
|
||||||
except ImportError:
|
|
||||||
base_dir = os.path.dirname(__file__)
|
|
||||||
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
|
|
||||||
del base_dir
|
|
||||||
|
|
||||||
import ansible_mitogen.connection
|
|
||||||
|
|
||||||
|
|
||||||
class Connection(ansible_mitogen.connection.Connection):
|
|
||||||
transport = 'kubectl'
|
|
||||||
|
|
||||||
not_supported_msg = (
|
|
||||||
'The "mitogen_kubectl" plug-in requires a version of Ansible '
|
|
||||||
'that ships with the "kubectl" connection plug-in.'
|
|
||||||
)
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
if kubectl is None:
|
|
||||||
raise AnsibleConnectionFailure(self.not_supported_msg)
|
|
||||||
super(Connection, self).__init__(*args, **kwargs)
|
|
||||||
|
|
||||||
def get_extra_args(self):
|
|
||||||
parameters = []
|
|
||||||
for key, option in iteritems(kubectl.CONNECTION_OPTIONS):
|
|
||||||
if self.get_task_var('ansible_' + key) is not None:
|
|
||||||
parameters += [ option, self.get_task_var('ansible_' + key) ]
|
|
||||||
|
|
||||||
return parameters
|
|
|
@ -1,86 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
import os.path
|
|
||||||
import sys
|
|
||||||
|
|
||||||
try:
|
|
||||||
import ansible_mitogen.connection
|
|
||||||
except ImportError:
|
|
||||||
base_dir = os.path.dirname(__file__)
|
|
||||||
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
|
|
||||||
del base_dir
|
|
||||||
|
|
||||||
import ansible_mitogen.connection
|
|
||||||
import ansible_mitogen.process
|
|
||||||
|
|
||||||
|
|
||||||
if sys.version_info > (3,):
|
|
||||||
viewkeys = dict.keys
|
|
||||||
elif sys.version_info > (2, 7):
|
|
||||||
viewkeys = dict.viewkeys
|
|
||||||
else:
|
|
||||||
viewkeys = lambda dct: set(dct)
|
|
||||||
|
|
||||||
|
|
||||||
def dict_diff(old, new):
|
|
||||||
"""
|
|
||||||
Return a dict representing the differences between the dicts `old` and
|
|
||||||
`new`. Deleted keys appear as a key with the value :data:`None`, added and
|
|
||||||
changed keys appear as a key with the new value.
|
|
||||||
"""
|
|
||||||
old_keys = viewkeys(old)
|
|
||||||
new_keys = viewkeys(dict(new))
|
|
||||||
out = {}
|
|
||||||
for key in new_keys - old_keys:
|
|
||||||
out[key] = new[key]
|
|
||||||
for key in old_keys - new_keys:
|
|
||||||
out[key] = None
|
|
||||||
for key in old_keys & new_keys:
|
|
||||||
if old[key] != new[key]:
|
|
||||||
out[key] = new[key]
|
|
||||||
return out
|
|
||||||
|
|
||||||
|
|
||||||
class Connection(ansible_mitogen.connection.Connection):
|
|
||||||
transport = 'local'
|
|
||||||
|
|
||||||
def get_default_cwd(self):
|
|
||||||
# https://github.com/ansible/ansible/issues/14489
|
|
||||||
return self.loader_basedir
|
|
||||||
|
|
||||||
def get_default_env(self):
|
|
||||||
"""
|
|
||||||
Vanilla Ansible local commands execute with an environment inherited
|
|
||||||
from WorkerProcess, we must emulate that.
|
|
||||||
"""
|
|
||||||
return dict_diff(
|
|
||||||
old=ansible_mitogen.process.MuxProcess.original_env,
|
|
||||||
new=os.environ,
|
|
||||||
)
|
|
|
@ -1,44 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
import os.path
|
|
||||||
import sys
|
|
||||||
|
|
||||||
try:
|
|
||||||
import ansible_mitogen
|
|
||||||
except ImportError:
|
|
||||||
base_dir = os.path.dirname(__file__)
|
|
||||||
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
|
|
||||||
del base_dir
|
|
||||||
|
|
||||||
import ansible_mitogen.connection
|
|
||||||
|
|
||||||
|
|
||||||
class Connection(ansible_mitogen.connection.Connection):
|
|
||||||
transport = 'lxc'
|
|
|
@ -1,44 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
import os.path
|
|
||||||
import sys
|
|
||||||
|
|
||||||
try:
|
|
||||||
import ansible_mitogen
|
|
||||||
except ImportError:
|
|
||||||
base_dir = os.path.dirname(__file__)
|
|
||||||
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
|
|
||||||
del base_dir
|
|
||||||
|
|
||||||
import ansible_mitogen.connection
|
|
||||||
|
|
||||||
|
|
||||||
class Connection(ansible_mitogen.connection.Connection):
|
|
||||||
transport = 'lxd'
|
|
|
@ -1,44 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
import os.path
|
|
||||||
import sys
|
|
||||||
|
|
||||||
try:
|
|
||||||
import ansible_mitogen.connection
|
|
||||||
except ImportError:
|
|
||||||
base_dir = os.path.dirname(__file__)
|
|
||||||
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
|
|
||||||
del base_dir
|
|
||||||
|
|
||||||
import ansible_mitogen.connection
|
|
||||||
|
|
||||||
|
|
||||||
class Connection(ansible_mitogen.connection.Connection):
|
|
||||||
transport = 'machinectl'
|
|
|
@ -1,44 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
import os.path
|
|
||||||
import sys
|
|
||||||
|
|
||||||
try:
|
|
||||||
import ansible_mitogen.connection
|
|
||||||
except ImportError:
|
|
||||||
base_dir = os.path.dirname(__file__)
|
|
||||||
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
|
|
||||||
del base_dir
|
|
||||||
|
|
||||||
import ansible_mitogen.connection
|
|
||||||
|
|
||||||
|
|
||||||
class Connection(ansible_mitogen.connection.Connection):
|
|
||||||
transport = 'setns'
|
|
|
@ -1,65 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
import os.path
|
|
||||||
import sys
|
|
||||||
|
|
||||||
DOCUMENTATION = """
|
|
||||||
author: David Wilson <dw@botanicus.net>
|
|
||||||
connection: mitogen_ssh
|
|
||||||
short_description: Connect over SSH via Mitogen
|
|
||||||
description:
|
|
||||||
- This connects using an OpenSSH client controlled by the Mitogen for
|
|
||||||
Ansible extension. It accepts every option the vanilla ssh plugin
|
|
||||||
accepts.
|
|
||||||
version_added: "2.5"
|
|
||||||
options:
|
|
||||||
"""
|
|
||||||
|
|
||||||
import ansible.plugins.connection.ssh
|
|
||||||
|
|
||||||
try:
|
|
||||||
import ansible_mitogen.connection
|
|
||||||
except ImportError:
|
|
||||||
base_dir = os.path.dirname(__file__)
|
|
||||||
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
|
|
||||||
del base_dir
|
|
||||||
|
|
||||||
import ansible_mitogen.connection
|
|
||||||
|
|
||||||
|
|
||||||
class Connection(ansible_mitogen.connection.Connection):
|
|
||||||
transport = 'ssh'
|
|
||||||
vanilla_class = ansible.plugins.connection.ssh.Connection
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def _create_control_path(*args, **kwargs):
|
|
||||||
"""Forward _create_control_path() to the implementation in ssh.py."""
|
|
||||||
# https://github.com/dw/mitogen/issues/342
|
|
||||||
return Connection.vanilla_class._create_control_path(*args, **kwargs)
|
|
|
@ -1,44 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
import os.path
|
|
||||||
import sys
|
|
||||||
|
|
||||||
try:
|
|
||||||
import ansible_mitogen.connection
|
|
||||||
except ImportError:
|
|
||||||
base_dir = os.path.dirname(__file__)
|
|
||||||
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
|
|
||||||
del base_dir
|
|
||||||
|
|
||||||
import ansible_mitogen.connection
|
|
||||||
|
|
||||||
|
|
||||||
class Connection(ansible_mitogen.connection.Connection):
|
|
||||||
transport = 'mitogen_su'
|
|
|
@ -1,44 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
import os.path
|
|
||||||
import sys
|
|
||||||
|
|
||||||
try:
|
|
||||||
import ansible_mitogen.connection
|
|
||||||
except ImportError:
|
|
||||||
base_dir = os.path.dirname(__file__)
|
|
||||||
sys.path.insert(0, os.path.abspath(os.path.join(base_dir, '../../..')))
|
|
||||||
del base_dir
|
|
||||||
|
|
||||||
import ansible_mitogen.connection
|
|
||||||
|
|
||||||
|
|
||||||
class Connection(ansible_mitogen.connection.Connection):
|
|
||||||
transport = 'mitogen_sudo'
|
|
|
@ -1,61 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
import os.path
|
|
||||||
import sys
|
|
||||||
|
|
||||||
#
|
|
||||||
# This is not the real Strategy implementation module, it simply exists as a
|
|
||||||
# proxy to the real module, which is loaded using Python's regular import
|
|
||||||
# mechanism, to prevent Ansible's PluginLoader from making up a fake name that
|
|
||||||
# results in ansible_mitogen plugin modules being loaded twice: once by
|
|
||||||
# PluginLoader with a name like "ansible.plugins.strategy.mitogen", which is
|
|
||||||
# stuffed into sys.modules even though attempting to import it will trigger an
|
|
||||||
# ImportError, and once under its canonical name, "ansible_mitogen.strategy".
|
|
||||||
#
|
|
||||||
# Therefore we have a proxy module that imports it under the real name, and
|
|
||||||
# sets up the duff PluginLoader-imported module to just contain objects from
|
|
||||||
# the real module, so duplicate types don't exist in memory, and things like
|
|
||||||
# debuggers and isinstance() work predictably.
|
|
||||||
#
|
|
||||||
|
|
||||||
BASE_DIR = os.path.abspath(
|
|
||||||
os.path.join(os.path.dirname(__file__), '../../..')
|
|
||||||
)
|
|
||||||
|
|
||||||
if BASE_DIR not in sys.path:
|
|
||||||
sys.path.insert(0, BASE_DIR)
|
|
||||||
|
|
||||||
import ansible_mitogen.strategy
|
|
||||||
import ansible.plugins.strategy.linear
|
|
||||||
|
|
||||||
|
|
||||||
class StrategyModule(ansible_mitogen.strategy.StrategyMixin,
|
|
||||||
ansible.plugins.strategy.linear.StrategyModule):
|
|
||||||
pass
|
|
|
@ -1,62 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
import os.path
|
|
||||||
import sys
|
|
||||||
|
|
||||||
#
|
|
||||||
# This is not the real Strategy implementation module, it simply exists as a
|
|
||||||
# proxy to the real module, which is loaded using Python's regular import
|
|
||||||
# mechanism, to prevent Ansible's PluginLoader from making up a fake name that
|
|
||||||
# results in ansible_mitogen plugin modules being loaded twice: once by
|
|
||||||
# PluginLoader with a name like "ansible.plugins.strategy.mitogen", which is
|
|
||||||
# stuffed into sys.modules even though attempting to import it will trigger an
|
|
||||||
# ImportError, and once under its canonical name, "ansible_mitogen.strategy".
|
|
||||||
#
|
|
||||||
# Therefore we have a proxy module that imports it under the real name, and
|
|
||||||
# sets up the duff PluginLoader-imported module to just contain objects from
|
|
||||||
# the real module, so duplicate types don't exist in memory, and things like
|
|
||||||
# debuggers and isinstance() work predictably.
|
|
||||||
#
|
|
||||||
|
|
||||||
BASE_DIR = os.path.abspath(
|
|
||||||
os.path.join(os.path.dirname(__file__), '../../..')
|
|
||||||
)
|
|
||||||
|
|
||||||
if BASE_DIR not in sys.path:
|
|
||||||
sys.path.insert(0, BASE_DIR)
|
|
||||||
|
|
||||||
import ansible_mitogen.loaders
|
|
||||||
import ansible_mitogen.strategy
|
|
||||||
|
|
||||||
|
|
||||||
Base = ansible_mitogen.loaders.strategy_loader.get('free', class_only=True)
|
|
||||||
|
|
||||||
class StrategyModule(ansible_mitogen.strategy.StrategyMixin, Base):
|
|
||||||
pass
|
|
|
@ -1,67 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
import os.path
|
|
||||||
import sys
|
|
||||||
|
|
||||||
#
|
|
||||||
# This is not the real Strategy implementation module, it simply exists as a
|
|
||||||
# proxy to the real module, which is loaded using Python's regular import
|
|
||||||
# mechanism, to prevent Ansible's PluginLoader from making up a fake name that
|
|
||||||
# results in ansible_mitogen plugin modules being loaded twice: once by
|
|
||||||
# PluginLoader with a name like "ansible.plugins.strategy.mitogen", which is
|
|
||||||
# stuffed into sys.modules even though attempting to import it will trigger an
|
|
||||||
# ImportError, and once under its canonical name, "ansible_mitogen.strategy".
|
|
||||||
#
|
|
||||||
# Therefore we have a proxy module that imports it under the real name, and
|
|
||||||
# sets up the duff PluginLoader-imported module to just contain objects from
|
|
||||||
# the real module, so duplicate types don't exist in memory, and things like
|
|
||||||
# debuggers and isinstance() work predictably.
|
|
||||||
#
|
|
||||||
|
|
||||||
BASE_DIR = os.path.abspath(
|
|
||||||
os.path.join(os.path.dirname(__file__), '../../..')
|
|
||||||
)
|
|
||||||
|
|
||||||
if BASE_DIR not in sys.path:
|
|
||||||
sys.path.insert(0, BASE_DIR)
|
|
||||||
|
|
||||||
import ansible_mitogen.loaders
|
|
||||||
import ansible_mitogen.strategy
|
|
||||||
|
|
||||||
|
|
||||||
Base = ansible_mitogen.loaders.strategy_loader.get('host_pinned', class_only=True)
|
|
||||||
|
|
||||||
if Base is None:
|
|
||||||
raise ImportError(
|
|
||||||
'The host_pinned strategy is only available in Ansible 2.7 or newer.'
|
|
||||||
)
|
|
||||||
|
|
||||||
class StrategyModule(ansible_mitogen.strategy.StrategyMixin, Base):
|
|
||||||
pass
|
|
|
@ -1,62 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
import os.path
|
|
||||||
import sys
|
|
||||||
|
|
||||||
#
|
|
||||||
# This is not the real Strategy implementation module, it simply exists as a
|
|
||||||
# proxy to the real module, which is loaded using Python's regular import
|
|
||||||
# mechanism, to prevent Ansible's PluginLoader from making up a fake name that
|
|
||||||
# results in ansible_mitogen plugin modules being loaded twice: once by
|
|
||||||
# PluginLoader with a name like "ansible.plugins.strategy.mitogen", which is
|
|
||||||
# stuffed into sys.modules even though attempting to import it will trigger an
|
|
||||||
# ImportError, and once under its canonical name, "ansible_mitogen.strategy".
|
|
||||||
#
|
|
||||||
# Therefore we have a proxy module that imports it under the real name, and
|
|
||||||
# sets up the duff PluginLoader-imported module to just contain objects from
|
|
||||||
# the real module, so duplicate types don't exist in memory, and things like
|
|
||||||
# debuggers and isinstance() work predictably.
|
|
||||||
#
|
|
||||||
|
|
||||||
BASE_DIR = os.path.abspath(
|
|
||||||
os.path.join(os.path.dirname(__file__), '../../..')
|
|
||||||
)
|
|
||||||
|
|
||||||
if BASE_DIR not in sys.path:
|
|
||||||
sys.path.insert(0, BASE_DIR)
|
|
||||||
|
|
||||||
import ansible_mitogen.loaders
|
|
||||||
import ansible_mitogen.strategy
|
|
||||||
|
|
||||||
|
|
||||||
Base = ansible_mitogen.loaders.strategy_loader.get('linear', class_only=True)
|
|
||||||
|
|
||||||
class StrategyModule(ansible_mitogen.strategy.StrategyMixin, Base):
|
|
||||||
pass
|
|
|
@ -1,358 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
import atexit
|
|
||||||
import errno
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import signal
|
|
||||||
import socket
|
|
||||||
import sys
|
|
||||||
import time
|
|
||||||
|
|
||||||
try:
|
|
||||||
import faulthandler
|
|
||||||
except ImportError:
|
|
||||||
faulthandler = None
|
|
||||||
|
|
||||||
import mitogen
|
|
||||||
import mitogen.core
|
|
||||||
import mitogen.debug
|
|
||||||
import mitogen.master
|
|
||||||
import mitogen.parent
|
|
||||||
import mitogen.service
|
|
||||||
import mitogen.unix
|
|
||||||
import mitogen.utils
|
|
||||||
|
|
||||||
import ansible
|
|
||||||
import ansible.constants as C
|
|
||||||
import ansible_mitogen.logging
|
|
||||||
import ansible_mitogen.services
|
|
||||||
|
|
||||||
from mitogen.core import b
|
|
||||||
import ansible_mitogen.affinity
|
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
ANSIBLE_PKG_OVERRIDE = (
|
|
||||||
u"__version__ = %r\n"
|
|
||||||
u"__author__ = %r\n"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def clean_shutdown(sock):
|
|
||||||
"""
|
|
||||||
Shut the write end of `sock`, causing `recv` in the worker process to wake
|
|
||||||
up with a 0-byte read and initiate mux process exit, then wait for a 0-byte
|
|
||||||
read from the read end, which will occur after the the child closes the
|
|
||||||
descriptor on exit.
|
|
||||||
|
|
||||||
This is done using :mod:`atexit` since Ansible lacks any more sensible hook
|
|
||||||
to run code during exit, and unless some synchronization exists with
|
|
||||||
MuxProcess, debug logs may appear on the user's terminal *after* the prompt
|
|
||||||
has been printed.
|
|
||||||
"""
|
|
||||||
sock.shutdown(socket.SHUT_WR)
|
|
||||||
sock.recv(1)
|
|
||||||
|
|
||||||
|
|
||||||
def getenv_int(key, default=0):
|
|
||||||
"""
|
|
||||||
Get an integer-valued environment variable `key`, if it exists and parses
|
|
||||||
as an integer, otherwise return `default`.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
return int(os.environ.get(key, str(default)))
|
|
||||||
except ValueError:
|
|
||||||
return default
|
|
||||||
|
|
||||||
|
|
||||||
def save_pid(name):
|
|
||||||
"""
|
|
||||||
When debugging and profiling, it is very annoying to poke through the
|
|
||||||
process list to discover the currently running Ansible and MuxProcess IDs,
|
|
||||||
especially when trying to catch an issue during early startup. So here, if
|
|
||||||
a magic environment variable set, stash them in hidden files in the CWD::
|
|
||||||
|
|
||||||
alias muxpid="cat .ansible-mux.pid"
|
|
||||||
alias anspid="cat .ansible-controller.pid"
|
|
||||||
|
|
||||||
gdb -p $(muxpid)
|
|
||||||
perf top -p $(anspid)
|
|
||||||
"""
|
|
||||||
if os.environ.get('MITOGEN_SAVE_PIDS'):
|
|
||||||
with open('.ansible-%s.pid' % (name,), 'w') as fp:
|
|
||||||
fp.write(str(os.getpid()))
|
|
||||||
|
|
||||||
|
|
||||||
class MuxProcess(object):
|
|
||||||
"""
|
|
||||||
Implement a subprocess forked from the Ansible top-level, as a safe place
|
|
||||||
to contain the Mitogen IO multiplexer thread, keeping its use of the
|
|
||||||
logging package (and the logging package's heavy use of locks) far away
|
|
||||||
from the clutches of os.fork(), which is used continuously by the
|
|
||||||
multiprocessing package in the top-level process.
|
|
||||||
|
|
||||||
The problem with running the multiplexer in that process is that should the
|
|
||||||
multiplexer thread be in the process of emitting a log entry (and holding
|
|
||||||
its lock) at the point of fork, in the child, the first attempt to log any
|
|
||||||
log entry using the same handler will deadlock the child, as in the memory
|
|
||||||
image the child received, the lock will always be marked held.
|
|
||||||
|
|
||||||
See https://bugs.python.org/issue6721 for a thorough description of the
|
|
||||||
class of problems this worker is intended to avoid.
|
|
||||||
"""
|
|
||||||
|
|
||||||
#: In the top-level process, this references one end of a socketpair(),
|
|
||||||
#: which the MuxProcess blocks reading from in order to determine when
|
|
||||||
#: the master process dies. Once the read returns, the MuxProcess will
|
|
||||||
#: begin shutting itself down.
|
|
||||||
worker_sock = None
|
|
||||||
|
|
||||||
#: In the worker process, this references the other end of
|
|
||||||
#: :py:attr:`worker_sock`.
|
|
||||||
child_sock = None
|
|
||||||
|
|
||||||
#: In the top-level process, this is the PID of the single MuxProcess
|
|
||||||
#: that was spawned.
|
|
||||||
worker_pid = None
|
|
||||||
|
|
||||||
#: A copy of :data:`os.environ` at the time the multiplexer process was
|
|
||||||
#: started. It's used by mitogen_local.py to find changes made to the
|
|
||||||
#: top-level environment (e.g. vars plugins -- issue #297) that must be
|
|
||||||
#: applied to locally executed commands and modules.
|
|
||||||
original_env = None
|
|
||||||
|
|
||||||
#: In both processes, this is the temporary UNIX socket used for
|
|
||||||
#: forked WorkerProcesses to contact the MuxProcess
|
|
||||||
unix_listener_path = None
|
|
||||||
|
|
||||||
#: Singleton.
|
|
||||||
_instance = None
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def start(cls, _init_logging=True):
|
|
||||||
"""
|
|
||||||
Arrange for the subprocess to be started, if it is not already running.
|
|
||||||
|
|
||||||
The parent process picks a UNIX socket path the child will use prior to
|
|
||||||
fork, creates a socketpair used essentially as a semaphore, then blocks
|
|
||||||
waiting for the child to indicate the UNIX socket is ready for use.
|
|
||||||
|
|
||||||
:param bool _init_logging:
|
|
||||||
For testing, if :data:`False`, don't initialize logging.
|
|
||||||
"""
|
|
||||||
if cls.worker_sock is not None:
|
|
||||||
return
|
|
||||||
|
|
||||||
if faulthandler is not None:
|
|
||||||
faulthandler.enable()
|
|
||||||
|
|
||||||
mitogen.utils.setup_gil()
|
|
||||||
cls.unix_listener_path = mitogen.unix.make_socket_path()
|
|
||||||
cls.worker_sock, cls.child_sock = socket.socketpair()
|
|
||||||
atexit.register(lambda: clean_shutdown(cls.worker_sock))
|
|
||||||
mitogen.core.set_cloexec(cls.worker_sock.fileno())
|
|
||||||
mitogen.core.set_cloexec(cls.child_sock.fileno())
|
|
||||||
|
|
||||||
cls.profiling = os.environ.get('MITOGEN_PROFILING') is not None
|
|
||||||
if cls.profiling:
|
|
||||||
mitogen.core.enable_profiling()
|
|
||||||
if _init_logging:
|
|
||||||
ansible_mitogen.logging.setup()
|
|
||||||
|
|
||||||
cls.original_env = dict(os.environ)
|
|
||||||
cls.child_pid = os.fork()
|
|
||||||
if cls.child_pid:
|
|
||||||
save_pid('controller')
|
|
||||||
ansible_mitogen.logging.set_process_name('top')
|
|
||||||
ansible_mitogen.affinity.policy.assign_controller()
|
|
||||||
cls.child_sock.close()
|
|
||||||
cls.child_sock = None
|
|
||||||
mitogen.core.io_op(cls.worker_sock.recv, 1)
|
|
||||||
else:
|
|
||||||
save_pid('mux')
|
|
||||||
ansible_mitogen.logging.set_process_name('mux')
|
|
||||||
ansible_mitogen.affinity.policy.assign_muxprocess()
|
|
||||||
cls.worker_sock.close()
|
|
||||||
cls.worker_sock = None
|
|
||||||
self = cls()
|
|
||||||
self.worker_main()
|
|
||||||
|
|
||||||
def worker_main(self):
|
|
||||||
"""
|
|
||||||
The main function of for the mux process: setup the Mitogen broker
|
|
||||||
thread and ansible_mitogen services, then sleep waiting for the socket
|
|
||||||
connected to the parent to be closed (indicating the parent has died).
|
|
||||||
"""
|
|
||||||
self._setup_master()
|
|
||||||
self._setup_services()
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Let the parent know our listening socket is ready.
|
|
||||||
mitogen.core.io_op(self.child_sock.send, b('1'))
|
|
||||||
# Block until the socket is closed, which happens on parent exit.
|
|
||||||
mitogen.core.io_op(self.child_sock.recv, 1)
|
|
||||||
finally:
|
|
||||||
self.broker.shutdown()
|
|
||||||
self.broker.join()
|
|
||||||
|
|
||||||
# Test frameworks living somewhere higher on the stack of the
|
|
||||||
# original parent process may try to catch sys.exit(), so do a C
|
|
||||||
# level exit instead.
|
|
||||||
os._exit(0)
|
|
||||||
|
|
||||||
def _enable_router_debug(self):
|
|
||||||
if 'MITOGEN_ROUTER_DEBUG' in os.environ:
|
|
||||||
self.router.enable_debug()
|
|
||||||
|
|
||||||
def _enable_stack_dumps(self):
|
|
||||||
secs = getenv_int('MITOGEN_DUMP_THREAD_STACKS', default=0)
|
|
||||||
if secs:
|
|
||||||
mitogen.debug.dump_to_logger(secs=secs)
|
|
||||||
|
|
||||||
def _setup_simplejson(self, responder):
|
|
||||||
"""
|
|
||||||
We support serving simplejson for Python 2.4 targets on Ansible 2.3, at
|
|
||||||
least so the package's own CI Docker scripts can run without external
|
|
||||||
help, however newer versions of simplejson no longer support Python
|
|
||||||
2.4. Therefore override any installed/loaded version with a
|
|
||||||
2.4-compatible version we ship in the compat/ directory.
|
|
||||||
"""
|
|
||||||
responder.whitelist_prefix('simplejson')
|
|
||||||
|
|
||||||
# issue #536: must be at end of sys.path, in case existing newer
|
|
||||||
# version is already loaded.
|
|
||||||
compat_path = os.path.join(os.path.dirname(__file__), 'compat')
|
|
||||||
sys.path.append(compat_path)
|
|
||||||
|
|
||||||
for fullname, is_pkg, suffix in (
|
|
||||||
(u'simplejson', True, '__init__.py'),
|
|
||||||
(u'simplejson.decoder', False, 'decoder.py'),
|
|
||||||
(u'simplejson.encoder', False, 'encoder.py'),
|
|
||||||
(u'simplejson.scanner', False, 'scanner.py'),
|
|
||||||
):
|
|
||||||
path = os.path.join(compat_path, 'simplejson', suffix)
|
|
||||||
fp = open(path, 'rb')
|
|
||||||
try:
|
|
||||||
source = fp.read()
|
|
||||||
finally:
|
|
||||||
fp.close()
|
|
||||||
|
|
||||||
responder.add_source_override(
|
|
||||||
fullname=fullname,
|
|
||||||
path=path,
|
|
||||||
source=source,
|
|
||||||
is_pkg=is_pkg,
|
|
||||||
)
|
|
||||||
|
|
||||||
def _setup_responder(self, responder):
|
|
||||||
"""
|
|
||||||
Configure :class:`mitogen.master.ModuleResponder` to only permit
|
|
||||||
certain packages, and to generate custom responses for certain modules.
|
|
||||||
"""
|
|
||||||
responder.whitelist_prefix('ansible')
|
|
||||||
responder.whitelist_prefix('ansible_mitogen')
|
|
||||||
self._setup_simplejson(responder)
|
|
||||||
|
|
||||||
# Ansible 2.3 is compatible with Python 2.4 targets, however
|
|
||||||
# ansible/__init__.py is not. Instead, executor/module_common.py writes
|
|
||||||
# out a 2.4-compatible namespace package for unknown reasons. So we
|
|
||||||
# copy it here.
|
|
||||||
responder.add_source_override(
|
|
||||||
fullname='ansible',
|
|
||||||
path=ansible.__file__,
|
|
||||||
source=(ANSIBLE_PKG_OVERRIDE % (
|
|
||||||
ansible.__version__,
|
|
||||||
ansible.__author__,
|
|
||||||
)).encode(),
|
|
||||||
is_pkg=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
def _setup_master(self):
|
|
||||||
"""
|
|
||||||
Construct a Router, Broker, and mitogen.unix listener
|
|
||||||
"""
|
|
||||||
self.broker = mitogen.master.Broker(install_watcher=False)
|
|
||||||
self.router = mitogen.master.Router(
|
|
||||||
broker=self.broker,
|
|
||||||
max_message_size=4096 * 1048576,
|
|
||||||
)
|
|
||||||
self._setup_responder(self.router.responder)
|
|
||||||
mitogen.core.listen(self.broker, 'shutdown', self.on_broker_shutdown)
|
|
||||||
mitogen.core.listen(self.broker, 'exit', self.on_broker_exit)
|
|
||||||
self.listener = mitogen.unix.Listener(
|
|
||||||
router=self.router,
|
|
||||||
path=self.unix_listener_path,
|
|
||||||
backlog=C.DEFAULT_FORKS,
|
|
||||||
)
|
|
||||||
self._enable_router_debug()
|
|
||||||
self._enable_stack_dumps()
|
|
||||||
|
|
||||||
def _setup_services(self):
|
|
||||||
"""
|
|
||||||
Construct a ContextService and a thread to service requests for it
|
|
||||||
arriving from worker processes.
|
|
||||||
"""
|
|
||||||
self.pool = mitogen.service.Pool(
|
|
||||||
router=self.router,
|
|
||||||
services=[
|
|
||||||
mitogen.service.FileService(router=self.router),
|
|
||||||
mitogen.service.PushFileService(router=self.router),
|
|
||||||
ansible_mitogen.services.ContextService(self.router),
|
|
||||||
ansible_mitogen.services.ModuleDepService(self.router),
|
|
||||||
],
|
|
||||||
size=getenv_int('MITOGEN_POOL_SIZE', default=32),
|
|
||||||
)
|
|
||||||
LOG.debug('Service pool configured: size=%d', self.pool.size)
|
|
||||||
|
|
||||||
def on_broker_shutdown(self):
|
|
||||||
"""
|
|
||||||
Respond to broker shutdown by beginning service pool shutdown. Do not
|
|
||||||
join on the pool yet, since that would block the broker thread which
|
|
||||||
then cannot clean up pending handlers, which is required for the
|
|
||||||
threads to exit gracefully.
|
|
||||||
"""
|
|
||||||
# In normal operation we presently kill the process because there is
|
|
||||||
# not yet any way to cancel connect().
|
|
||||||
self.pool.stop(join=self.profiling)
|
|
||||||
|
|
||||||
def on_broker_exit(self):
|
|
||||||
"""
|
|
||||||
Respond to the broker thread about to exit by sending SIGTERM to
|
|
||||||
ourself. In future this should gracefully join the pool, but TERM is
|
|
||||||
fine for now.
|
|
||||||
"""
|
|
||||||
if not self.profiling:
|
|
||||||
# In normal operation we presently kill the process because there is
|
|
||||||
# not yet any way to cancel connect(). When profiling, threads
|
|
||||||
# including the broker must shut down gracefully, otherwise pstats
|
|
||||||
# won't be written.
|
|
||||||
os.kill(os.getpid(), signal.SIGTERM)
|
|
|
@ -1,928 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
# !mitogen: minify_safe
|
|
||||||
|
|
||||||
"""
|
|
||||||
These classes implement execution for each style of Ansible module. They are
|
|
||||||
instantiated in the target context by way of target.py::run_module().
|
|
||||||
|
|
||||||
Each class in here has a corresponding Planner class in planners.py that knows
|
|
||||||
how to build arguments for it, preseed related data, etc.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import atexit
|
|
||||||
import codecs
|
|
||||||
import imp
|
|
||||||
import os
|
|
||||||
import re
|
|
||||||
import shlex
|
|
||||||
import shutil
|
|
||||||
import sys
|
|
||||||
import tempfile
|
|
||||||
import traceback
|
|
||||||
import types
|
|
||||||
|
|
||||||
import mitogen.core
|
|
||||||
import ansible_mitogen.target # TODO: circular import
|
|
||||||
from mitogen.core import b
|
|
||||||
from mitogen.core import bytes_partition
|
|
||||||
from mitogen.core import str_partition
|
|
||||||
from mitogen.core import str_rpartition
|
|
||||||
from mitogen.core import to_text
|
|
||||||
|
|
||||||
try:
|
|
||||||
import ctypes
|
|
||||||
except ImportError:
|
|
||||||
# Python 2.4
|
|
||||||
ctypes = None
|
|
||||||
|
|
||||||
try:
|
|
||||||
import json
|
|
||||||
except ImportError:
|
|
||||||
# Python 2.4
|
|
||||||
import simplejson as json
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Cannot use cStringIO as it does not support Unicode.
|
|
||||||
from StringIO import StringIO
|
|
||||||
except ImportError:
|
|
||||||
from io import StringIO
|
|
||||||
|
|
||||||
try:
|
|
||||||
from shlex import quote as shlex_quote
|
|
||||||
except ImportError:
|
|
||||||
from pipes import quote as shlex_quote
|
|
||||||
|
|
||||||
# Absolute imports for <2.5.
|
|
||||||
logging = __import__('logging')
|
|
||||||
|
|
||||||
|
|
||||||
# Prevent accidental import of an Ansible module from hanging on stdin read.
|
|
||||||
import ansible.module_utils.basic
|
|
||||||
ansible.module_utils.basic._ANSIBLE_ARGS = '{}'
|
|
||||||
|
|
||||||
# For tasks that modify /etc/resolv.conf, non-Debian derivative glibcs cache
|
|
||||||
# resolv.conf at startup and never implicitly reload it. Cope with that via an
|
|
||||||
# explicit call to res_init() on each task invocation. BSD-alikes export it
|
|
||||||
# directly, Linux #defines it as "__res_init".
|
|
||||||
libc__res_init = None
|
|
||||||
if ctypes:
|
|
||||||
libc = ctypes.CDLL(None)
|
|
||||||
for symbol in 'res_init', '__res_init':
|
|
||||||
try:
|
|
||||||
libc__res_init = getattr(libc, symbol)
|
|
||||||
except AttributeError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
iteritems = getattr(dict, 'iteritems', dict.items)
|
|
||||||
LOG = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
if mitogen.core.PY3:
|
|
||||||
shlex_split = shlex.split
|
|
||||||
else:
|
|
||||||
def shlex_split(s, comments=False):
|
|
||||||
return [mitogen.core.to_text(token)
|
|
||||||
for token in shlex.split(str(s), comments=comments)]
|
|
||||||
|
|
||||||
|
|
||||||
class EnvironmentFileWatcher(object):
|
|
||||||
"""
|
|
||||||
Usually Ansible edits to /etc/environment and ~/.pam_environment are
|
|
||||||
reflected in subsequent tasks if become:true or SSH multiplexing is
|
|
||||||
disabled, due to sudo and/or SSH reinvoking pam_env. Rather than emulate
|
|
||||||
existing semantics, do our best to ensure edits are always reflected.
|
|
||||||
|
|
||||||
This can't perfectly replicate the existing behaviour, but it can safely
|
|
||||||
update and remove keys that appear to originate in `path`, and that do not
|
|
||||||
conflict with any existing environment key inherited from elsewhere.
|
|
||||||
|
|
||||||
A more robust future approach may simply be to arrange for the persistent
|
|
||||||
interpreter to restart when a change is detected.
|
|
||||||
"""
|
|
||||||
def __init__(self, path):
|
|
||||||
self.path = os.path.expanduser(path)
|
|
||||||
#: Inode data at time of last check.
|
|
||||||
self._st = self._stat()
|
|
||||||
#: List of inherited keys appearing to originated from this file.
|
|
||||||
self._keys = [key for key, value in self._load()
|
|
||||||
if value == os.environ.get(key)]
|
|
||||||
LOG.debug('%r installed; existing keys: %r', self, self._keys)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'EnvironmentFileWatcher(%r)' % (self.path,)
|
|
||||||
|
|
||||||
def _stat(self):
|
|
||||||
try:
|
|
||||||
return os.stat(self.path)
|
|
||||||
except OSError:
|
|
||||||
return None
|
|
||||||
|
|
||||||
def _load(self):
|
|
||||||
try:
|
|
||||||
fp = codecs.open(self.path, 'r', encoding='utf-8')
|
|
||||||
try:
|
|
||||||
return list(self._parse(fp))
|
|
||||||
finally:
|
|
||||||
fp.close()
|
|
||||||
except IOError:
|
|
||||||
return []
|
|
||||||
|
|
||||||
def _parse(self, fp):
|
|
||||||
"""
|
|
||||||
linux-pam-1.3.1/modules/pam_env/pam_env.c#L207
|
|
||||||
"""
|
|
||||||
for line in fp:
|
|
||||||
# ' #export foo=some var ' -> ['#export', 'foo=some var ']
|
|
||||||
bits = shlex_split(line, comments=True)
|
|
||||||
if (not bits) or bits[0].startswith('#'):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if bits[0] == u'export':
|
|
||||||
bits.pop(0)
|
|
||||||
|
|
||||||
key, sep, value = str_partition(u' '.join(bits), u'=')
|
|
||||||
if key and sep:
|
|
||||||
yield key, value
|
|
||||||
|
|
||||||
def _on_file_changed(self):
|
|
||||||
LOG.debug('%r: file changed, reloading', self)
|
|
||||||
for key, value in self._load():
|
|
||||||
if key in os.environ:
|
|
||||||
LOG.debug('%r: existing key %r=%r exists, not setting %r',
|
|
||||||
self, key, os.environ[key], value)
|
|
||||||
else:
|
|
||||||
LOG.debug('%r: setting key %r to %r', self, key, value)
|
|
||||||
self._keys.append(key)
|
|
||||||
os.environ[key] = value
|
|
||||||
|
|
||||||
def _remove_existing(self):
|
|
||||||
"""
|
|
||||||
When a change is detected, remove keys that existed in the old file.
|
|
||||||
"""
|
|
||||||
for key in self._keys:
|
|
||||||
if key in os.environ:
|
|
||||||
LOG.debug('%r: removing old key %r', self, key)
|
|
||||||
del os.environ[key]
|
|
||||||
self._keys = []
|
|
||||||
|
|
||||||
def check(self):
|
|
||||||
"""
|
|
||||||
Compare the :func:`os.stat` for the pam_env style environmnt file
|
|
||||||
`path` with the previous result `old_st`, which may be :data:`None` if
|
|
||||||
the previous stat attempt failed. Reload its contents if the file has
|
|
||||||
changed or appeared since last attempt.
|
|
||||||
|
|
||||||
:returns:
|
|
||||||
New :func:`os.stat` result. The new call to :func:`reload_env` should
|
|
||||||
pass it as the value of `old_st`.
|
|
||||||
"""
|
|
||||||
st = self._stat()
|
|
||||||
if self._st == st:
|
|
||||||
return
|
|
||||||
|
|
||||||
self._st = st
|
|
||||||
self._remove_existing()
|
|
||||||
|
|
||||||
if st is None:
|
|
||||||
LOG.debug('%r: file has disappeared', self)
|
|
||||||
else:
|
|
||||||
self._on_file_changed()
|
|
||||||
|
|
||||||
_pam_env_watcher = EnvironmentFileWatcher('~/.pam_environment')
|
|
||||||
_etc_env_watcher = EnvironmentFileWatcher('/etc/environment')
|
|
||||||
|
|
||||||
|
|
||||||
def utf8(s):
|
|
||||||
"""
|
|
||||||
Coerce an object to bytes if it is Unicode.
|
|
||||||
"""
|
|
||||||
if isinstance(s, mitogen.core.UnicodeType):
|
|
||||||
s = s.encode('utf-8')
|
|
||||||
return s
|
|
||||||
|
|
||||||
|
|
||||||
def reopen_readonly(fp):
|
|
||||||
"""
|
|
||||||
Replace the file descriptor belonging to the file object `fp` with one
|
|
||||||
open on the same file (`fp.name`), but opened with :py:data:`os.O_RDONLY`.
|
|
||||||
This enables temporary files to be executed on Linux, which usually throws
|
|
||||||
``ETXTBUSY`` if any writeable handle exists pointing to a file passed to
|
|
||||||
`execve()`.
|
|
||||||
"""
|
|
||||||
fd = os.open(fp.name, os.O_RDONLY)
|
|
||||||
os.dup2(fd, fp.fileno())
|
|
||||||
os.close(fd)
|
|
||||||
|
|
||||||
|
|
||||||
class Runner(object):
|
|
||||||
"""
|
|
||||||
Ansible module runner. After instantiation (with kwargs supplied by the
|
|
||||||
corresponding Planner), `.run()` is invoked, upon which `setup()`,
|
|
||||||
`_run()`, and `revert()` are invoked, with the return value of `_run()`
|
|
||||||
returned by `run()`.
|
|
||||||
|
|
||||||
Subclasses may override `_run`()` and extend `setup()` and `revert()`.
|
|
||||||
|
|
||||||
:param str module:
|
|
||||||
Name of the module to execute, e.g. "shell"
|
|
||||||
:param mitogen.core.Context service_context:
|
|
||||||
Context to which we should direct FileService calls. For now, always
|
|
||||||
the connection multiplexer process on the controller.
|
|
||||||
:param str json_args:
|
|
||||||
Ansible module arguments. A mixture of user and internal keys created
|
|
||||||
by :meth:`ansible.plugins.action.ActionBase._execute_module`.
|
|
||||||
|
|
||||||
This is passed as a string rather than a dict in order to mimic the
|
|
||||||
implicit bytes/str conversion behaviour of a 2.x controller running
|
|
||||||
against a 3.x target.
|
|
||||||
:param str good_temp_dir:
|
|
||||||
The writeable temporary directory for this user account reported by
|
|
||||||
:func:`ansible_mitogen.target.init_child` passed via the controller.
|
|
||||||
This is specified explicitly to remain compatible with Ansible<2.5, and
|
|
||||||
for forked tasks where init_child never runs.
|
|
||||||
:param dict env:
|
|
||||||
Additional environment variables to set during the run. Keys with
|
|
||||||
:data:`None` are unset if present.
|
|
||||||
:param str cwd:
|
|
||||||
If not :data:`None`, change to this directory before executing.
|
|
||||||
:param mitogen.core.ExternalContext econtext:
|
|
||||||
When `detach` is :data:`True`, a reference to the ExternalContext the
|
|
||||||
runner is executing in.
|
|
||||||
:param bool detach:
|
|
||||||
When :data:`True`, indicate the runner should detach the context from
|
|
||||||
its parent after setup has completed successfully.
|
|
||||||
"""
|
|
||||||
def __init__(self, module, service_context, json_args, good_temp_dir,
|
|
||||||
extra_env=None, cwd=None, env=None, econtext=None,
|
|
||||||
detach=False):
|
|
||||||
self.module = module
|
|
||||||
self.service_context = service_context
|
|
||||||
self.econtext = econtext
|
|
||||||
self.detach = detach
|
|
||||||
self.args = json.loads(mitogen.core.to_text(json_args))
|
|
||||||
self.good_temp_dir = good_temp_dir
|
|
||||||
self.extra_env = extra_env
|
|
||||||
self.env = env
|
|
||||||
self.cwd = cwd
|
|
||||||
#: If not :data:`None`, :meth:`get_temp_dir` had to create a temporary
|
|
||||||
#: directory for this run, because we're in an asynchronous task, or
|
|
||||||
#: because the originating action did not create a directory.
|
|
||||||
self._temp_dir = None
|
|
||||||
|
|
||||||
def get_temp_dir(self):
|
|
||||||
path = self.args.get('_ansible_tmpdir')
|
|
||||||
if path is not None:
|
|
||||||
return path
|
|
||||||
|
|
||||||
if self._temp_dir is None:
|
|
||||||
self._temp_dir = tempfile.mkdtemp(
|
|
||||||
prefix='ansible_mitogen_runner_',
|
|
||||||
dir=self.good_temp_dir,
|
|
||||||
)
|
|
||||||
|
|
||||||
return self._temp_dir
|
|
||||||
|
|
||||||
def revert_temp_dir(self):
|
|
||||||
if self._temp_dir is not None:
|
|
||||||
ansible_mitogen.target.prune_tree(self._temp_dir)
|
|
||||||
self._temp_dir = None
|
|
||||||
|
|
||||||
def setup(self):
|
|
||||||
"""
|
|
||||||
Prepare for running a module, including fetching necessary dependencies
|
|
||||||
from the parent, as :meth:`run` may detach prior to beginning
|
|
||||||
execution. The base implementation simply prepares the environment.
|
|
||||||
"""
|
|
||||||
self._setup_cwd()
|
|
||||||
self._setup_environ()
|
|
||||||
|
|
||||||
def _setup_cwd(self):
|
|
||||||
"""
|
|
||||||
For situations like sudo to a non-privileged account, CWD could be
|
|
||||||
$HOME of the old account, which could have mode go=, which means it is
|
|
||||||
impossible to restore the old directory, so don't even try.
|
|
||||||
"""
|
|
||||||
if self.cwd:
|
|
||||||
os.chdir(self.cwd)
|
|
||||||
|
|
||||||
def _setup_environ(self):
|
|
||||||
"""
|
|
||||||
Apply changes from /etc/environment files before creating a
|
|
||||||
TemporaryEnvironment to snapshot environment state prior to module run.
|
|
||||||
"""
|
|
||||||
_pam_env_watcher.check()
|
|
||||||
_etc_env_watcher.check()
|
|
||||||
env = dict(self.extra_env or {})
|
|
||||||
if self.env:
|
|
||||||
env.update(self.env)
|
|
||||||
self._env = TemporaryEnvironment(env)
|
|
||||||
|
|
||||||
def revert(self):
|
|
||||||
"""
|
|
||||||
Revert any changes made to the process after running a module. The base
|
|
||||||
implementation simply restores the original environment.
|
|
||||||
"""
|
|
||||||
self._env.revert()
|
|
||||||
self.revert_temp_dir()
|
|
||||||
|
|
||||||
def _run(self):
|
|
||||||
"""
|
|
||||||
The _run() method is expected to return a dictionary in the form of
|
|
||||||
ActionBase._low_level_execute_command() output, i.e. having::
|
|
||||||
|
|
||||||
{
|
|
||||||
"rc": int,
|
|
||||||
"stdout": "stdout data",
|
|
||||||
"stderr": "stderr data"
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
raise NotImplementedError()
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
"""
|
|
||||||
Set up the process environment in preparation for running an Ansible
|
|
||||||
module. This monkey-patches the Ansible libraries in various places to
|
|
||||||
prevent it from trying to kill the process on completion, and to
|
|
||||||
prevent it from reading sys.stdin.
|
|
||||||
|
|
||||||
:returns:
|
|
||||||
Module result dictionary.
|
|
||||||
"""
|
|
||||||
self.setup()
|
|
||||||
if self.detach:
|
|
||||||
self.econtext.detach()
|
|
||||||
|
|
||||||
try:
|
|
||||||
return self._run()
|
|
||||||
finally:
|
|
||||||
self.revert()
|
|
||||||
|
|
||||||
|
|
||||||
class AtExitWrapper(object):
|
|
||||||
"""
|
|
||||||
issue #397, #454: Newer Ansibles use :func:`atexit.register` to trigger
|
|
||||||
tmpdir cleanup when AnsibleModule.tmpdir is responsible for creating its
|
|
||||||
own temporary directory, however with Mitogen processes are preserved
|
|
||||||
across tasks, meaning cleanup must happen earlier.
|
|
||||||
|
|
||||||
Patch :func:`atexit.register`, catching :func:`shutil.rmtree` calls so they
|
|
||||||
can be executed on task completion, rather than on process shutdown.
|
|
||||||
"""
|
|
||||||
# Wrapped in a dict to avoid instance method decoration.
|
|
||||||
original = {
|
|
||||||
'register': atexit.register
|
|
||||||
}
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
assert atexit.register == self.original['register'], \
|
|
||||||
"AtExitWrapper installed twice."
|
|
||||||
atexit.register = self._atexit__register
|
|
||||||
self.deferred = []
|
|
||||||
|
|
||||||
def revert(self):
|
|
||||||
"""
|
|
||||||
Restore the original :func:`atexit.register`.
|
|
||||||
"""
|
|
||||||
assert atexit.register == self._atexit__register, \
|
|
||||||
"AtExitWrapper not installed."
|
|
||||||
atexit.register = self.original['register']
|
|
||||||
|
|
||||||
def run_callbacks(self):
|
|
||||||
while self.deferred:
|
|
||||||
func, targs, kwargs = self.deferred.pop()
|
|
||||||
try:
|
|
||||||
func(*targs, **kwargs)
|
|
||||||
except Exception:
|
|
||||||
LOG.exception('While running atexit callbacks')
|
|
||||||
|
|
||||||
def _atexit__register(self, func, *targs, **kwargs):
|
|
||||||
"""
|
|
||||||
Intercept :func:`atexit.register` calls, diverting any to
|
|
||||||
:func:`shutil.rmtree` into a private list.
|
|
||||||
"""
|
|
||||||
if func == shutil.rmtree:
|
|
||||||
self.deferred.append((func, targs, kwargs))
|
|
||||||
return
|
|
||||||
|
|
||||||
self.original['register'](func, *targs, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
class ModuleUtilsImporter(object):
|
|
||||||
"""
|
|
||||||
:param list module_utils:
|
|
||||||
List of `(fullname, path, is_pkg)` tuples.
|
|
||||||
"""
|
|
||||||
def __init__(self, context, module_utils):
|
|
||||||
self._context = context
|
|
||||||
self._by_fullname = dict(
|
|
||||||
(fullname, (path, is_pkg))
|
|
||||||
for fullname, path, is_pkg in module_utils
|
|
||||||
)
|
|
||||||
self._loaded = set()
|
|
||||||
sys.meta_path.insert(0, self)
|
|
||||||
|
|
||||||
def revert(self):
|
|
||||||
sys.meta_path.remove(self)
|
|
||||||
for fullname in self._loaded:
|
|
||||||
sys.modules.pop(fullname, None)
|
|
||||||
|
|
||||||
def find_module(self, fullname, path=None):
|
|
||||||
if fullname in self._by_fullname:
|
|
||||||
return self
|
|
||||||
|
|
||||||
def load_module(self, fullname):
|
|
||||||
path, is_pkg = self._by_fullname[fullname]
|
|
||||||
source = ansible_mitogen.target.get_small_file(self._context, path)
|
|
||||||
code = compile(source, path, 'exec', 0, 1)
|
|
||||||
mod = sys.modules.setdefault(fullname, imp.new_module(fullname))
|
|
||||||
mod.__file__ = "master:%s" % (path,)
|
|
||||||
mod.__loader__ = self
|
|
||||||
if is_pkg:
|
|
||||||
mod.__path__ = []
|
|
||||||
mod.__package__ = str(fullname)
|
|
||||||
else:
|
|
||||||
mod.__package__ = str(str_rpartition(to_text(fullname), '.')[0])
|
|
||||||
exec(code, mod.__dict__)
|
|
||||||
self._loaded.add(fullname)
|
|
||||||
return mod
|
|
||||||
|
|
||||||
|
|
||||||
class TemporaryEnvironment(object):
|
|
||||||
"""
|
|
||||||
Apply environment changes from `env` until :meth:`revert` is called. Values
|
|
||||||
in the dict may be :data:`None` to indicate the relevant key should be
|
|
||||||
deleted.
|
|
||||||
"""
|
|
||||||
def __init__(self, env=None):
|
|
||||||
self.original = dict(os.environ)
|
|
||||||
self.env = env or {}
|
|
||||||
for key, value in iteritems(self.env):
|
|
||||||
key = mitogen.core.to_text(key)
|
|
||||||
value = mitogen.core.to_text(value)
|
|
||||||
if value is None:
|
|
||||||
os.environ.pop(key, None)
|
|
||||||
else:
|
|
||||||
os.environ[key] = str(value)
|
|
||||||
|
|
||||||
def revert(self):
|
|
||||||
"""
|
|
||||||
Revert changes made by the module to the process environment. This must
|
|
||||||
always run, as some modules (e.g. git.py) set variables like GIT_SSH
|
|
||||||
that must be cleared out between runs.
|
|
||||||
"""
|
|
||||||
os.environ.clear()
|
|
||||||
os.environ.update(self.original)
|
|
||||||
|
|
||||||
|
|
||||||
class TemporaryArgv(object):
|
|
||||||
def __init__(self, argv):
|
|
||||||
self.original = sys.argv[:]
|
|
||||||
sys.argv[:] = map(str, argv)
|
|
||||||
|
|
||||||
def revert(self):
|
|
||||||
sys.argv[:] = self.original
|
|
||||||
|
|
||||||
|
|
||||||
class NewStyleStdio(object):
|
|
||||||
"""
|
|
||||||
Patch ansible.module_utils.basic argument globals.
|
|
||||||
"""
|
|
||||||
def __init__(self, args, temp_dir):
|
|
||||||
self.temp_dir = temp_dir
|
|
||||||
self.original_stdout = sys.stdout
|
|
||||||
self.original_stderr = sys.stderr
|
|
||||||
self.original_stdin = sys.stdin
|
|
||||||
sys.stdout = StringIO()
|
|
||||||
sys.stderr = StringIO()
|
|
||||||
encoded = json.dumps({'ANSIBLE_MODULE_ARGS': args})
|
|
||||||
ansible.module_utils.basic._ANSIBLE_ARGS = utf8(encoded)
|
|
||||||
sys.stdin = StringIO(mitogen.core.to_text(encoded))
|
|
||||||
|
|
||||||
self.original_get_path = getattr(ansible.module_utils.basic,
|
|
||||||
'get_module_path', None)
|
|
||||||
ansible.module_utils.basic.get_module_path = self._get_path
|
|
||||||
|
|
||||||
def _get_path(self):
|
|
||||||
return self.temp_dir
|
|
||||||
|
|
||||||
def revert(self):
|
|
||||||
ansible.module_utils.basic.get_module_path = self.original_get_path
|
|
||||||
sys.stdout = self.original_stdout
|
|
||||||
sys.stderr = self.original_stderr
|
|
||||||
sys.stdin = self.original_stdin
|
|
||||||
ansible.module_utils.basic._ANSIBLE_ARGS = '{}'
|
|
||||||
|
|
||||||
|
|
||||||
class ProgramRunner(Runner):
|
|
||||||
"""
|
|
||||||
Base class for runners that run external programs.
|
|
||||||
|
|
||||||
:param str path:
|
|
||||||
Absolute path to the program file on the master, as it can be retrieved
|
|
||||||
via :class:`mitogen.service.FileService`.
|
|
||||||
:param bool emulate_tty:
|
|
||||||
If :data:`True`, execute the program with `stdout` and `stderr` merged
|
|
||||||
into a single pipe, emulating Ansible behaviour when an SSH TTY is in
|
|
||||||
use.
|
|
||||||
"""
|
|
||||||
def __init__(self, path, emulate_tty=None, **kwargs):
|
|
||||||
super(ProgramRunner, self).__init__(**kwargs)
|
|
||||||
self.emulate_tty = emulate_tty
|
|
||||||
self.path = path
|
|
||||||
|
|
||||||
def setup(self):
|
|
||||||
super(ProgramRunner, self).setup()
|
|
||||||
self._setup_program()
|
|
||||||
|
|
||||||
def _get_program_filename(self):
|
|
||||||
"""
|
|
||||||
Return the filename used for program on disk. Ansible uses the original
|
|
||||||
filename for non-Ansiballz runs, and "ansible_module_+filename for
|
|
||||||
Ansiballz runs.
|
|
||||||
"""
|
|
||||||
return os.path.basename(self.path)
|
|
||||||
|
|
||||||
program_fp = None
|
|
||||||
|
|
||||||
def _setup_program(self):
|
|
||||||
"""
|
|
||||||
Create a temporary file containing the program code. The code is
|
|
||||||
fetched via :meth:`_get_program`.
|
|
||||||
"""
|
|
||||||
filename = self._get_program_filename()
|
|
||||||
path = os.path.join(self.get_temp_dir(), filename)
|
|
||||||
self.program_fp = open(path, 'wb')
|
|
||||||
self.program_fp.write(self._get_program())
|
|
||||||
self.program_fp.flush()
|
|
||||||
os.chmod(self.program_fp.name, int('0700', 8))
|
|
||||||
reopen_readonly(self.program_fp)
|
|
||||||
|
|
||||||
def _get_program(self):
|
|
||||||
"""
|
|
||||||
Fetch the module binary from the master if necessary.
|
|
||||||
"""
|
|
||||||
return ansible_mitogen.target.get_small_file(
|
|
||||||
context=self.service_context,
|
|
||||||
path=self.path,
|
|
||||||
)
|
|
||||||
|
|
||||||
def _get_program_args(self):
|
|
||||||
"""
|
|
||||||
Return any arguments to pass to the program.
|
|
||||||
"""
|
|
||||||
return []
|
|
||||||
|
|
||||||
def revert(self):
|
|
||||||
"""
|
|
||||||
Delete the temporary program file.
|
|
||||||
"""
|
|
||||||
if self.program_fp:
|
|
||||||
self.program_fp.close()
|
|
||||||
super(ProgramRunner, self).revert()
|
|
||||||
|
|
||||||
def _get_argv(self):
|
|
||||||
"""
|
|
||||||
Return the final argument vector used to execute the program.
|
|
||||||
"""
|
|
||||||
return [
|
|
||||||
self.args.get('_ansible_shell_executable', '/bin/sh'),
|
|
||||||
'-c',
|
|
||||||
self._get_shell_fragment(),
|
|
||||||
]
|
|
||||||
|
|
||||||
def _get_shell_fragment(self):
|
|
||||||
return "%s %s" % (
|
|
||||||
shlex_quote(self.program_fp.name),
|
|
||||||
' '.join(map(shlex_quote, self._get_program_args())),
|
|
||||||
)
|
|
||||||
|
|
||||||
def _run(self):
|
|
||||||
try:
|
|
||||||
rc, stdout, stderr = ansible_mitogen.target.exec_args(
|
|
||||||
args=self._get_argv(),
|
|
||||||
emulate_tty=self.emulate_tty,
|
|
||||||
)
|
|
||||||
except Exception:
|
|
||||||
LOG.exception('While running %s', self._get_argv())
|
|
||||||
e = sys.exc_info()[1]
|
|
||||||
return {
|
|
||||||
u'rc': 1,
|
|
||||||
u'stdout': u'',
|
|
||||||
u'stderr': u'%s: %s' % (type(e), e),
|
|
||||||
}
|
|
||||||
|
|
||||||
return {
|
|
||||||
u'rc': rc,
|
|
||||||
u'stdout': mitogen.core.to_text(stdout),
|
|
||||||
u'stderr': mitogen.core.to_text(stderr),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class ArgsFileRunner(Runner):
|
|
||||||
def setup(self):
|
|
||||||
super(ArgsFileRunner, self).setup()
|
|
||||||
self._setup_args()
|
|
||||||
|
|
||||||
def _setup_args(self):
|
|
||||||
"""
|
|
||||||
Create a temporary file containing the module's arguments. The
|
|
||||||
arguments are formatted via :meth:`_get_args`.
|
|
||||||
"""
|
|
||||||
self.args_fp = tempfile.NamedTemporaryFile(
|
|
||||||
prefix='ansible_mitogen',
|
|
||||||
suffix='-args',
|
|
||||||
dir=self.get_temp_dir(),
|
|
||||||
)
|
|
||||||
self.args_fp.write(utf8(self._get_args_contents()))
|
|
||||||
self.args_fp.flush()
|
|
||||||
reopen_readonly(self.program_fp)
|
|
||||||
|
|
||||||
def _get_args_contents(self):
|
|
||||||
"""
|
|
||||||
Return the module arguments formatted as JSON.
|
|
||||||
"""
|
|
||||||
return json.dumps(self.args)
|
|
||||||
|
|
||||||
def _get_program_args(self):
|
|
||||||
return [self.args_fp.name]
|
|
||||||
|
|
||||||
def revert(self):
|
|
||||||
"""
|
|
||||||
Delete the temporary argument file.
|
|
||||||
"""
|
|
||||||
self.args_fp.close()
|
|
||||||
super(ArgsFileRunner, self).revert()
|
|
||||||
|
|
||||||
|
|
||||||
class BinaryRunner(ArgsFileRunner, ProgramRunner):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class ScriptRunner(ProgramRunner):
|
|
||||||
def __init__(self, interpreter_fragment, is_python, **kwargs):
|
|
||||||
super(ScriptRunner, self).__init__(**kwargs)
|
|
||||||
self.interpreter_fragment = interpreter_fragment
|
|
||||||
self.is_python = is_python
|
|
||||||
|
|
||||||
b_ENCODING_STRING = b('# -*- coding: utf-8 -*-')
|
|
||||||
|
|
||||||
def _get_program(self):
|
|
||||||
return self._rewrite_source(
|
|
||||||
super(ScriptRunner, self)._get_program()
|
|
||||||
)
|
|
||||||
|
|
||||||
def _get_argv(self):
|
|
||||||
return [
|
|
||||||
self.args.get('_ansible_shell_executable', '/bin/sh'),
|
|
||||||
'-c',
|
|
||||||
self._get_shell_fragment(),
|
|
||||||
]
|
|
||||||
|
|
||||||
def _get_shell_fragment(self):
|
|
||||||
"""
|
|
||||||
Scripts are eligible for having their hashbang line rewritten, and to
|
|
||||||
be executed via /bin/sh using the ansible_*_interpreter value used as a
|
|
||||||
shell fragment prefixing to the invocation.
|
|
||||||
"""
|
|
||||||
return "%s %s %s" % (
|
|
||||||
self.interpreter_fragment,
|
|
||||||
shlex_quote(self.program_fp.name),
|
|
||||||
' '.join(map(shlex_quote, self._get_program_args())),
|
|
||||||
)
|
|
||||||
|
|
||||||
def _rewrite_source(self, s):
|
|
||||||
"""
|
|
||||||
Mutate the source according to the per-task parameters.
|
|
||||||
"""
|
|
||||||
# While Ansible rewrites the #! using ansible_*_interpreter, it is
|
|
||||||
# never actually used to execute the script, instead it is a shell
|
|
||||||
# fragment consumed by shell/__init__.py::build_module_command().
|
|
||||||
new = [b('#!') + utf8(self.interpreter_fragment)]
|
|
||||||
if self.is_python:
|
|
||||||
new.append(self.b_ENCODING_STRING)
|
|
||||||
|
|
||||||
_, _, rest = bytes_partition(s, b('\n'))
|
|
||||||
new.append(rest)
|
|
||||||
return b('\n').join(new)
|
|
||||||
|
|
||||||
|
|
||||||
class NewStyleRunner(ScriptRunner):
|
|
||||||
"""
|
|
||||||
Execute a new-style Ansible module, where Module Replacer-related tricks
|
|
||||||
aren't required.
|
|
||||||
"""
|
|
||||||
#: path => new-style module bytecode.
|
|
||||||
_code_by_path = {}
|
|
||||||
|
|
||||||
def __init__(self, module_map, **kwargs):
|
|
||||||
super(NewStyleRunner, self).__init__(**kwargs)
|
|
||||||
self.module_map = module_map
|
|
||||||
|
|
||||||
def _setup_imports(self):
|
|
||||||
"""
|
|
||||||
Ensure the local importer and PushFileService has everything for the
|
|
||||||
Ansible module before setup() completes, but before detach() is called
|
|
||||||
in an asynchronous task.
|
|
||||||
|
|
||||||
The master automatically streams modules towards us concurrent to the
|
|
||||||
runner invocation, however there is no public API to synchronize on the
|
|
||||||
completion of those preloads. Instead simply reuse the importer's
|
|
||||||
synchronization mechanism by importing everything the module will need
|
|
||||||
prior to detaching.
|
|
||||||
"""
|
|
||||||
for fullname, _, _ in self.module_map['custom']:
|
|
||||||
mitogen.core.import_module(fullname)
|
|
||||||
for fullname in self.module_map['builtin']:
|
|
||||||
mitogen.core.import_module(fullname)
|
|
||||||
|
|
||||||
def _setup_excepthook(self):
|
|
||||||
"""
|
|
||||||
Starting with Ansible 2.6, some modules (file.py) install a
|
|
||||||
sys.excepthook and never clean it up. So we must preserve the original
|
|
||||||
excepthook and restore it after the run completes.
|
|
||||||
"""
|
|
||||||
self.original_excepthook = sys.excepthook
|
|
||||||
|
|
||||||
def setup(self):
|
|
||||||
super(NewStyleRunner, self).setup()
|
|
||||||
|
|
||||||
self._stdio = NewStyleStdio(self.args, self.get_temp_dir())
|
|
||||||
# It is possible that not supplying the script filename will break some
|
|
||||||
# module, but this has never been a bug report. Instead act like an
|
|
||||||
# interpreter that had its script piped on stdin.
|
|
||||||
self._argv = TemporaryArgv([''])
|
|
||||||
self._importer = ModuleUtilsImporter(
|
|
||||||
context=self.service_context,
|
|
||||||
module_utils=self.module_map['custom'],
|
|
||||||
)
|
|
||||||
self._setup_imports()
|
|
||||||
self._setup_excepthook()
|
|
||||||
self.atexit_wrapper = AtExitWrapper()
|
|
||||||
if libc__res_init:
|
|
||||||
libc__res_init()
|
|
||||||
|
|
||||||
def _revert_excepthook(self):
|
|
||||||
sys.excepthook = self.original_excepthook
|
|
||||||
|
|
||||||
def revert(self):
|
|
||||||
self.atexit_wrapper.revert()
|
|
||||||
self._argv.revert()
|
|
||||||
self._stdio.revert()
|
|
||||||
self._revert_excepthook()
|
|
||||||
super(NewStyleRunner, self).revert()
|
|
||||||
|
|
||||||
def _get_program_filename(self):
|
|
||||||
"""
|
|
||||||
See ProgramRunner._get_program_filename().
|
|
||||||
"""
|
|
||||||
return 'ansible_module_' + os.path.basename(self.path)
|
|
||||||
|
|
||||||
def _setup_args(self):
|
|
||||||
pass
|
|
||||||
|
|
||||||
# issue #555: in old times it was considered good form to reload sys and
|
|
||||||
# change the default encoding. This hack was removed from Ansible long ago,
|
|
||||||
# but not before permeating into many third party modules.
|
|
||||||
PREHISTORIC_HACK_RE = re.compile(
|
|
||||||
b(r'reload\s*\(\s*sys\s*\)\s*'
|
|
||||||
r'sys\s*\.\s*setdefaultencoding\([^)]+\)')
|
|
||||||
)
|
|
||||||
|
|
||||||
def _setup_program(self):
|
|
||||||
source = ansible_mitogen.target.get_small_file(
|
|
||||||
context=self.service_context,
|
|
||||||
path=self.path,
|
|
||||||
)
|
|
||||||
self.source = self.PREHISTORIC_HACK_RE.sub(b(''), source)
|
|
||||||
|
|
||||||
def _get_code(self):
|
|
||||||
try:
|
|
||||||
return self._code_by_path[self.path]
|
|
||||||
except KeyError:
|
|
||||||
return self._code_by_path.setdefault(self.path, compile(
|
|
||||||
# Py2.4 doesn't support kwargs.
|
|
||||||
self.source, # source
|
|
||||||
"master:" + self.path, # filename
|
|
||||||
'exec', # mode
|
|
||||||
0, # flags
|
|
||||||
True, # dont_inherit
|
|
||||||
))
|
|
||||||
|
|
||||||
if mitogen.core.PY3:
|
|
||||||
main_module_name = '__main__'
|
|
||||||
else:
|
|
||||||
main_module_name = b('__main__')
|
|
||||||
|
|
||||||
def _handle_magic_exception(self, mod, exc):
|
|
||||||
"""
|
|
||||||
Beginning with Ansible >2.6, some modules (file.py) install a
|
|
||||||
sys.excepthook which is a closure over AnsibleModule, redirecting the
|
|
||||||
magical exception to AnsibleModule.fail_json().
|
|
||||||
|
|
||||||
For extra special needs bonus points, the class is not defined in
|
|
||||||
module_utils, but is defined in the module itself, meaning there is no
|
|
||||||
type for isinstance() that outlasts the invocation.
|
|
||||||
"""
|
|
||||||
klass = getattr(mod, 'AnsibleModuleError', None)
|
|
||||||
if klass and isinstance(exc, klass):
|
|
||||||
mod.module.fail_json(**exc.results)
|
|
||||||
|
|
||||||
def _run_code(self, code, mod):
|
|
||||||
try:
|
|
||||||
if mitogen.core.PY3:
|
|
||||||
exec(code, vars(mod))
|
|
||||||
else:
|
|
||||||
exec('exec code in vars(mod)')
|
|
||||||
except Exception:
|
|
||||||
self._handle_magic_exception(mod, sys.exc_info()[1])
|
|
||||||
raise
|
|
||||||
|
|
||||||
def _run(self):
|
|
||||||
mod = types.ModuleType(self.main_module_name)
|
|
||||||
mod.__package__ = None
|
|
||||||
# Some Ansible modules use __file__ to find the Ansiballz temporary
|
|
||||||
# directory. We must provide some temporary path in __file__, but we
|
|
||||||
# don't want to pointlessly write the module to disk when it never
|
|
||||||
# actually needs to exist. So just pass the filename as it would exist.
|
|
||||||
mod.__file__ = os.path.join(
|
|
||||||
self.get_temp_dir(),
|
|
||||||
'ansible_module_' + os.path.basename(self.path),
|
|
||||||
)
|
|
||||||
|
|
||||||
code = self._get_code()
|
|
||||||
rc = 2
|
|
||||||
try:
|
|
||||||
try:
|
|
||||||
self._run_code(code, mod)
|
|
||||||
except SystemExit:
|
|
||||||
exc = sys.exc_info()[1]
|
|
||||||
rc = exc.args[0]
|
|
||||||
except Exception:
|
|
||||||
# This writes to stderr by default.
|
|
||||||
traceback.print_exc()
|
|
||||||
rc = 1
|
|
||||||
|
|
||||||
finally:
|
|
||||||
self.atexit_wrapper.run_callbacks()
|
|
||||||
|
|
||||||
return {
|
|
||||||
u'rc': rc,
|
|
||||||
u'stdout': mitogen.core.to_text(sys.stdout.getvalue()),
|
|
||||||
u'stderr': mitogen.core.to_text(sys.stderr.getvalue()),
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
class JsonArgsRunner(ScriptRunner):
|
|
||||||
JSON_ARGS = b('<<INCLUDE_ANSIBLE_MODULE_JSON_ARGS>>')
|
|
||||||
|
|
||||||
def _get_args_contents(self):
|
|
||||||
return json.dumps(self.args).encode()
|
|
||||||
|
|
||||||
def _rewrite_source(self, s):
|
|
||||||
return (
|
|
||||||
super(JsonArgsRunner, self)._rewrite_source(s)
|
|
||||||
.replace(self.JSON_ARGS, self._get_args_contents())
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class WantJsonRunner(ArgsFileRunner, ScriptRunner):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class OldStyleRunner(ArgsFileRunner, ScriptRunner):
|
|
||||||
def _get_args_contents(self):
|
|
||||||
"""
|
|
||||||
Mimic the argument formatting behaviour of
|
|
||||||
ActionBase._execute_module().
|
|
||||||
"""
|
|
||||||
return ' '.join(
|
|
||||||
'%s=%s' % (key, shlex_quote(str(self.args[key])))
|
|
||||||
for key in self.args
|
|
||||||
) + ' ' # Bug-for-bug :(
|
|
|
@ -1,537 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
# !mitogen: minify_safe
|
|
||||||
|
|
||||||
"""
|
|
||||||
Classes in this file define Mitogen 'services' that run (initially) within the
|
|
||||||
connection multiplexer process that is forked off the top-level controller
|
|
||||||
process.
|
|
||||||
|
|
||||||
Once a worker process connects to a multiplexer process
|
|
||||||
(Connection._connect()), it communicates with these services to establish new
|
|
||||||
connections, grant access to files by children, and register for notification
|
|
||||||
when a child has completed a job.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import os.path
|
|
||||||
import sys
|
|
||||||
import threading
|
|
||||||
|
|
||||||
import ansible.constants
|
|
||||||
|
|
||||||
import mitogen
|
|
||||||
import mitogen.service
|
|
||||||
import mitogen.utils
|
|
||||||
import ansible_mitogen.loaders
|
|
||||||
import ansible_mitogen.module_finder
|
|
||||||
import ansible_mitogen.target
|
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
# Force load of plugin to ensure ConfigManager has definitions loaded. Done
|
|
||||||
# during module import to ensure a single-threaded environment; PluginLoader
|
|
||||||
# is not thread-safe.
|
|
||||||
ansible_mitogen.loaders.shell_loader.get('sh')
|
|
||||||
|
|
||||||
|
|
||||||
if sys.version_info[0] == 3:
|
|
||||||
def reraise(tp, value, tb):
|
|
||||||
if value is None:
|
|
||||||
value = tp()
|
|
||||||
if value.__traceback__ is not tb:
|
|
||||||
raise value.with_traceback(tb)
|
|
||||||
raise value
|
|
||||||
else:
|
|
||||||
exec(
|
|
||||||
"def reraise(tp, value, tb=None):\n"
|
|
||||||
" raise tp, value, tb\n"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _get_candidate_temp_dirs():
|
|
||||||
try:
|
|
||||||
# >=2.5
|
|
||||||
options = ansible.constants.config.get_plugin_options('shell', 'sh')
|
|
||||||
remote_tmp = options.get('remote_tmp') or ansible.constants.DEFAULT_REMOTE_TMP
|
|
||||||
system_tmpdirs = options.get('system_tmpdirs', ('/var/tmp', '/tmp'))
|
|
||||||
except AttributeError:
|
|
||||||
# 2.3
|
|
||||||
remote_tmp = ansible.constants.DEFAULT_REMOTE_TMP
|
|
||||||
system_tmpdirs = ('/var/tmp', '/tmp')
|
|
||||||
|
|
||||||
return mitogen.utils.cast([remote_tmp] + list(system_tmpdirs))
|
|
||||||
|
|
||||||
|
|
||||||
def key_from_dict(**kwargs):
|
|
||||||
"""
|
|
||||||
Return a unique string representation of a dict as quickly as possible.
|
|
||||||
Used to generated deduplication keys from a request.
|
|
||||||
"""
|
|
||||||
out = []
|
|
||||||
stack = [kwargs]
|
|
||||||
while stack:
|
|
||||||
obj = stack.pop()
|
|
||||||
if isinstance(obj, dict):
|
|
||||||
stack.extend(sorted(obj.items()))
|
|
||||||
elif isinstance(obj, (list, tuple)):
|
|
||||||
stack.extend(obj)
|
|
||||||
else:
|
|
||||||
out.append(str(obj))
|
|
||||||
return ''.join(out)
|
|
||||||
|
|
||||||
|
|
||||||
class Error(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class ContextService(mitogen.service.Service):
|
|
||||||
"""
|
|
||||||
Used by workers to fetch the single Context instance corresponding to a
|
|
||||||
connection configuration, creating the matching connection if it does not
|
|
||||||
exist.
|
|
||||||
|
|
||||||
For connection methods and their parameters, see:
|
|
||||||
https://mitogen.readthedocs.io/en/latest/api.html#context-factories
|
|
||||||
|
|
||||||
This concentrates connections in the top-level process, which may become a
|
|
||||||
bottleneck. The bottleneck can be removed using per-CPU connection
|
|
||||||
processes and arranging for the worker to select one according to a hash of
|
|
||||||
the connection parameters (sharding).
|
|
||||||
"""
|
|
||||||
max_interpreters = int(os.getenv('MITOGEN_MAX_INTERPRETERS', '20'))
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
super(ContextService, self).__init__(*args, **kwargs)
|
|
||||||
self._lock = threading.Lock()
|
|
||||||
#: Records the :meth:`get` result dict for successful calls, returned
|
|
||||||
#: for identical subsequent calls. Keyed by :meth:`key_from_dict`.
|
|
||||||
self._response_by_key = {}
|
|
||||||
#: List of :class:`mitogen.core.Latch` awaiting the result for a
|
|
||||||
#: particular key.
|
|
||||||
self._latches_by_key = {}
|
|
||||||
#: Mapping of :class:`mitogen.core.Context` -> reference count. Each
|
|
||||||
#: call to :meth:`get` increases this by one. Calls to :meth:`put`
|
|
||||||
#: decrease it by one.
|
|
||||||
self._refs_by_context = {}
|
|
||||||
#: List of contexts in creation order by via= parameter. When
|
|
||||||
#: :attr:`max_interpreters` is reached, the most recently used context
|
|
||||||
#: is destroyed to make room for any additional context.
|
|
||||||
self._lru_by_via = {}
|
|
||||||
#: :func:`key_from_dict` result by Context.
|
|
||||||
self._key_by_context = {}
|
|
||||||
#: Mapping of Context -> parent Context
|
|
||||||
self._via_by_context = {}
|
|
||||||
|
|
||||||
@mitogen.service.expose(mitogen.service.AllowParents())
|
|
||||||
@mitogen.service.arg_spec({
|
|
||||||
'context': mitogen.core.Context
|
|
||||||
})
|
|
||||||
def reset(self, context):
|
|
||||||
"""
|
|
||||||
Return a reference, forcing close and discard of the underlying
|
|
||||||
connection. Used for 'meta: reset_connection' or when some other error
|
|
||||||
is detected.
|
|
||||||
"""
|
|
||||||
LOG.debug('%r.reset(%r)', self, context)
|
|
||||||
self._lock.acquire()
|
|
||||||
try:
|
|
||||||
self._shutdown_unlocked(context)
|
|
||||||
finally:
|
|
||||||
self._lock.release()
|
|
||||||
|
|
||||||
@mitogen.service.expose(mitogen.service.AllowParents())
|
|
||||||
@mitogen.service.arg_spec({
|
|
||||||
'context': mitogen.core.Context
|
|
||||||
})
|
|
||||||
def put(self, context):
|
|
||||||
"""
|
|
||||||
Return a reference, making it eligable for recycling once its reference
|
|
||||||
count reaches zero.
|
|
||||||
"""
|
|
||||||
LOG.debug('%r.put(%r)', self, context)
|
|
||||||
self._lock.acquire()
|
|
||||||
try:
|
|
||||||
if self._refs_by_context.get(context, 0) == 0:
|
|
||||||
LOG.warning('%r.put(%r): refcount was 0. shutdown_all called?',
|
|
||||||
self, context)
|
|
||||||
return
|
|
||||||
self._refs_by_context[context] -= 1
|
|
||||||
finally:
|
|
||||||
self._lock.release()
|
|
||||||
|
|
||||||
def _produce_response(self, key, response):
|
|
||||||
"""
|
|
||||||
Reply to every waiting request matching a configuration key with a
|
|
||||||
response dictionary, deleting the list of waiters when done.
|
|
||||||
|
|
||||||
:param str key:
|
|
||||||
Result of :meth:`key_from_dict`
|
|
||||||
:param dict response:
|
|
||||||
Response dictionary
|
|
||||||
:returns:
|
|
||||||
Number of waiters that were replied to.
|
|
||||||
"""
|
|
||||||
self._lock.acquire()
|
|
||||||
try:
|
|
||||||
latches = self._latches_by_key.pop(key)
|
|
||||||
count = len(latches)
|
|
||||||
for latch in latches:
|
|
||||||
latch.put(response)
|
|
||||||
finally:
|
|
||||||
self._lock.release()
|
|
||||||
return count
|
|
||||||
|
|
||||||
def _forget_context_unlocked(self, context):
|
|
||||||
key = self._key_by_context.get(context)
|
|
||||||
if key is None:
|
|
||||||
LOG.debug('%r: attempt to forget unknown %r', self, context)
|
|
||||||
return
|
|
||||||
|
|
||||||
self._response_by_key.pop(key, None)
|
|
||||||
self._latches_by_key.pop(key, None)
|
|
||||||
self._key_by_context.pop(context, None)
|
|
||||||
self._refs_by_context.pop(context, None)
|
|
||||||
self._via_by_context.pop(context, None)
|
|
||||||
self._lru_by_via.pop(context, None)
|
|
||||||
|
|
||||||
def _shutdown_unlocked(self, context, lru=None, new_context=None):
|
|
||||||
"""
|
|
||||||
Arrange for `context` to be shut down, and optionally add `new_context`
|
|
||||||
to the LRU list while holding the lock.
|
|
||||||
"""
|
|
||||||
LOG.info('%r._shutdown_unlocked(): shutting down %r', self, context)
|
|
||||||
context.shutdown()
|
|
||||||
via = self._via_by_context.get(context)
|
|
||||||
if via:
|
|
||||||
lru = self._lru_by_via.get(via)
|
|
||||||
if lru:
|
|
||||||
if context in lru:
|
|
||||||
lru.remove(context)
|
|
||||||
if new_context:
|
|
||||||
lru.append(new_context)
|
|
||||||
self._forget_context_unlocked(context)
|
|
||||||
|
|
||||||
def _update_lru_unlocked(self, new_context, spec, via):
|
|
||||||
"""
|
|
||||||
Update the LRU ("MRU"?) list associated with the connection described
|
|
||||||
by `kwargs`, destroying the most recently created context if the list
|
|
||||||
is full. Finally add `new_context` to the list.
|
|
||||||
"""
|
|
||||||
self._via_by_context[new_context] = via
|
|
||||||
|
|
||||||
lru = self._lru_by_via.setdefault(via, [])
|
|
||||||
if len(lru) < self.max_interpreters:
|
|
||||||
lru.append(new_context)
|
|
||||||
return
|
|
||||||
|
|
||||||
for context in reversed(lru):
|
|
||||||
if self._refs_by_context[context] == 0:
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
LOG.warning('via=%r reached maximum number of interpreters, '
|
|
||||||
'but they are all marked as in-use.', via)
|
|
||||||
return
|
|
||||||
|
|
||||||
self._shutdown_unlocked(context, lru=lru, new_context=new_context)
|
|
||||||
|
|
||||||
def _update_lru(self, new_context, spec, via):
|
|
||||||
self._lock.acquire()
|
|
||||||
try:
|
|
||||||
self._update_lru_unlocked(new_context, spec, via)
|
|
||||||
finally:
|
|
||||||
self._lock.release()
|
|
||||||
|
|
||||||
@mitogen.service.expose(mitogen.service.AllowParents())
|
|
||||||
def dump(self):
|
|
||||||
"""
|
|
||||||
For testing, return a list of dicts describing every currently
|
|
||||||
connected context.
|
|
||||||
"""
|
|
||||||
return [
|
|
||||||
{
|
|
||||||
'context_name': context.name,
|
|
||||||
'via': getattr(self._via_by_context.get(context),
|
|
||||||
'name', None),
|
|
||||||
'refs': self._refs_by_context.get(context),
|
|
||||||
}
|
|
||||||
for context, key in sorted(self._key_by_context.items(),
|
|
||||||
key=lambda c_k: c_k[0].context_id)
|
|
||||||
]
|
|
||||||
|
|
||||||
@mitogen.service.expose(mitogen.service.AllowParents())
|
|
||||||
def shutdown_all(self):
|
|
||||||
"""
|
|
||||||
For testing use, arrange for all connections to be shut down.
|
|
||||||
"""
|
|
||||||
self._lock.acquire()
|
|
||||||
try:
|
|
||||||
for context in list(self._key_by_context):
|
|
||||||
self._shutdown_unlocked(context)
|
|
||||||
finally:
|
|
||||||
self._lock.release()
|
|
||||||
|
|
||||||
def _on_context_disconnect(self, context):
|
|
||||||
"""
|
|
||||||
Respond to Context disconnect event by deleting any record of the no
|
|
||||||
longer reachable context. This method runs in the Broker thread and
|
|
||||||
must not to block.
|
|
||||||
"""
|
|
||||||
self._lock.acquire()
|
|
||||||
try:
|
|
||||||
LOG.info('%r: Forgetting %r due to stream disconnect', self, context)
|
|
||||||
self._forget_context_unlocked(context)
|
|
||||||
finally:
|
|
||||||
self._lock.release()
|
|
||||||
|
|
||||||
ALWAYS_PRELOAD = (
|
|
||||||
'ansible.module_utils.basic',
|
|
||||||
'ansible.module_utils.json_utils',
|
|
||||||
'ansible.release',
|
|
||||||
'ansible_mitogen.runner',
|
|
||||||
'ansible_mitogen.target',
|
|
||||||
'mitogen.fork',
|
|
||||||
'mitogen.service',
|
|
||||||
)
|
|
||||||
|
|
||||||
def _send_module_forwards(self, context):
|
|
||||||
self.router.responder.forward_modules(context, self.ALWAYS_PRELOAD)
|
|
||||||
|
|
||||||
_candidate_temp_dirs = None
|
|
||||||
|
|
||||||
def _get_candidate_temp_dirs(self):
|
|
||||||
"""
|
|
||||||
Return a list of locations to try to create the single temporary
|
|
||||||
directory used by the run. This simply caches the (expensive) plugin
|
|
||||||
load of :func:`_get_candidate_temp_dirs`.
|
|
||||||
"""
|
|
||||||
if self._candidate_temp_dirs is None:
|
|
||||||
self._candidate_temp_dirs = _get_candidate_temp_dirs()
|
|
||||||
return self._candidate_temp_dirs
|
|
||||||
|
|
||||||
def _connect(self, key, spec, via=None):
|
|
||||||
"""
|
|
||||||
Actual connect implementation. Arranges for the Mitogen connection to
|
|
||||||
be created and enqueues an asynchronous call to start the forked task
|
|
||||||
parent in the remote context.
|
|
||||||
|
|
||||||
:param key:
|
|
||||||
Deduplication key representing the connection configuration.
|
|
||||||
:param spec:
|
|
||||||
Connection specification.
|
|
||||||
:returns:
|
|
||||||
Dict like::
|
|
||||||
|
|
||||||
{
|
|
||||||
'context': mitogen.core.Context or None,
|
|
||||||
'via': mitogen.core.Context or None,
|
|
||||||
'init_child_result': {
|
|
||||||
'fork_context': mitogen.core.Context,
|
|
||||||
'home_dir': str or None,
|
|
||||||
},
|
|
||||||
'msg': str or None
|
|
||||||
}
|
|
||||||
|
|
||||||
Where `context` is a reference to the newly constructed context,
|
|
||||||
`init_child_result` is the result of executing
|
|
||||||
:func:`ansible_mitogen.target.init_child` in that context, `msg` is
|
|
||||||
an error message and the remaining fields are :data:`None`, or
|
|
||||||
`msg` is :data:`None` and the remaining fields are set.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
method = getattr(self.router, spec['method'])
|
|
||||||
except AttributeError:
|
|
||||||
raise Error('unsupported method: %(transport)s' % spec)
|
|
||||||
|
|
||||||
context = method(via=via, unidirectional=True, **spec['kwargs'])
|
|
||||||
if via and spec.get('enable_lru'):
|
|
||||||
self._update_lru(context, spec, via)
|
|
||||||
|
|
||||||
# Forget the context when its disconnect event fires.
|
|
||||||
mitogen.core.listen(context, 'disconnect',
|
|
||||||
lambda: self._on_context_disconnect(context))
|
|
||||||
|
|
||||||
self._send_module_forwards(context)
|
|
||||||
init_child_result = context.call(
|
|
||||||
ansible_mitogen.target.init_child,
|
|
||||||
log_level=LOG.getEffectiveLevel(),
|
|
||||||
candidate_temp_dirs=self._get_candidate_temp_dirs(),
|
|
||||||
)
|
|
||||||
|
|
||||||
if os.environ.get('MITOGEN_DUMP_THREAD_STACKS'):
|
|
||||||
from mitogen import debug
|
|
||||||
context.call(debug.dump_to_logger)
|
|
||||||
|
|
||||||
self._key_by_context[context] = key
|
|
||||||
self._refs_by_context[context] = 0
|
|
||||||
return {
|
|
||||||
'context': context,
|
|
||||||
'via': via,
|
|
||||||
'init_child_result': init_child_result,
|
|
||||||
'msg': None,
|
|
||||||
}
|
|
||||||
|
|
||||||
def _wait_or_start(self, spec, via=None):
|
|
||||||
latch = mitogen.core.Latch()
|
|
||||||
key = key_from_dict(via=via, **spec)
|
|
||||||
self._lock.acquire()
|
|
||||||
try:
|
|
||||||
response = self._response_by_key.get(key)
|
|
||||||
if response is not None:
|
|
||||||
self._refs_by_context[response['context']] += 1
|
|
||||||
latch.put(response)
|
|
||||||
return latch
|
|
||||||
|
|
||||||
latches = self._latches_by_key.setdefault(key, [])
|
|
||||||
first = len(latches) == 0
|
|
||||||
latches.append(latch)
|
|
||||||
finally:
|
|
||||||
self._lock.release()
|
|
||||||
|
|
||||||
if first:
|
|
||||||
# I'm the first requestee, so I will create the connection.
|
|
||||||
try:
|
|
||||||
response = self._connect(key, spec, via=via)
|
|
||||||
count = self._produce_response(key, response)
|
|
||||||
# Only record the response for non-error results.
|
|
||||||
self._response_by_key[key] = response
|
|
||||||
# Set the reference count to the number of waiters.
|
|
||||||
self._refs_by_context[response['context']] += count
|
|
||||||
except Exception:
|
|
||||||
self._produce_response(key, sys.exc_info())
|
|
||||||
|
|
||||||
return latch
|
|
||||||
|
|
||||||
disconnect_msg = (
|
|
||||||
'Channel was disconnected while connection attempt was in progress; '
|
|
||||||
'this may be caused by an abnormal Ansible exit, or due to an '
|
|
||||||
'unreliable target.'
|
|
||||||
)
|
|
||||||
|
|
||||||
@mitogen.service.expose(mitogen.service.AllowParents())
|
|
||||||
@mitogen.service.arg_spec({
|
|
||||||
'stack': list
|
|
||||||
})
|
|
||||||
def get(self, msg, stack):
|
|
||||||
"""
|
|
||||||
Return a Context referring to an established connection with the given
|
|
||||||
configuration, establishing new connections as necessary.
|
|
||||||
|
|
||||||
:param list stack:
|
|
||||||
Connection descriptions. Each element is a dict containing 'method'
|
|
||||||
and 'kwargs' keys describing the Router method and arguments.
|
|
||||||
Subsequent elements are proxied via the previous.
|
|
||||||
|
|
||||||
:returns dict:
|
|
||||||
* context: mitogen.parent.Context or None.
|
|
||||||
* init_child_result: Result of :func:`init_child`.
|
|
||||||
* msg: StreamError exception text or None.
|
|
||||||
* method_name: string failing method name.
|
|
||||||
"""
|
|
||||||
via = None
|
|
||||||
for spec in stack:
|
|
||||||
try:
|
|
||||||
result = self._wait_or_start(spec, via=via).get()
|
|
||||||
if isinstance(result, tuple): # exc_info()
|
|
||||||
reraise(*result)
|
|
||||||
via = result['context']
|
|
||||||
except mitogen.core.ChannelError:
|
|
||||||
return {
|
|
||||||
'context': None,
|
|
||||||
'init_child_result': None,
|
|
||||||
'method_name': spec['method'],
|
|
||||||
'msg': self.disconnect_msg,
|
|
||||||
}
|
|
||||||
except mitogen.core.StreamError as e:
|
|
||||||
return {
|
|
||||||
'context': None,
|
|
||||||
'init_child_result': None,
|
|
||||||
'method_name': spec['method'],
|
|
||||||
'msg': str(e),
|
|
||||||
}
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
class ModuleDepService(mitogen.service.Service):
|
|
||||||
"""
|
|
||||||
Scan a new-style module and produce a cached mapping of module_utils names
|
|
||||||
to their resolved filesystem paths.
|
|
||||||
"""
|
|
||||||
invoker_class = mitogen.service.SerializedInvoker
|
|
||||||
|
|
||||||
def __init__(self, *args, **kwargs):
|
|
||||||
super(ModuleDepService, self).__init__(*args, **kwargs)
|
|
||||||
self._cache = {}
|
|
||||||
|
|
||||||
def _get_builtin_names(self, builtin_path, resolved):
|
|
||||||
return [
|
|
||||||
mitogen.core.to_text(fullname)
|
|
||||||
for fullname, path, is_pkg in resolved
|
|
||||||
if os.path.abspath(path).startswith(builtin_path)
|
|
||||||
]
|
|
||||||
|
|
||||||
def _get_custom_tups(self, builtin_path, resolved):
|
|
||||||
return [
|
|
||||||
(mitogen.core.to_text(fullname),
|
|
||||||
mitogen.core.to_text(path),
|
|
||||||
is_pkg)
|
|
||||||
for fullname, path, is_pkg in resolved
|
|
||||||
if not os.path.abspath(path).startswith(builtin_path)
|
|
||||||
]
|
|
||||||
|
|
||||||
@mitogen.service.expose(policy=mitogen.service.AllowParents())
|
|
||||||
@mitogen.service.arg_spec({
|
|
||||||
'module_name': mitogen.core.UnicodeType,
|
|
||||||
'module_path': mitogen.core.FsPathTypes,
|
|
||||||
'search_path': tuple,
|
|
||||||
'builtin_path': mitogen.core.FsPathTypes,
|
|
||||||
'context': mitogen.core.Context,
|
|
||||||
})
|
|
||||||
def scan(self, module_name, module_path, search_path, builtin_path, context):
|
|
||||||
key = (module_name, search_path)
|
|
||||||
if key not in self._cache:
|
|
||||||
resolved = ansible_mitogen.module_finder.scan(
|
|
||||||
module_name=module_name,
|
|
||||||
module_path=module_path,
|
|
||||||
search_path=tuple(search_path) + (builtin_path,),
|
|
||||||
)
|
|
||||||
builtin_path = os.path.abspath(builtin_path)
|
|
||||||
builtin = self._get_builtin_names(builtin_path, resolved)
|
|
||||||
custom = self._get_custom_tups(builtin_path, resolved)
|
|
||||||
self._cache[key] = {
|
|
||||||
'builtin': builtin,
|
|
||||||
'custom': custom,
|
|
||||||
}
|
|
||||||
return self._cache[key]
|
|
|
@ -1,296 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
import os
|
|
||||||
import signal
|
|
||||||
import threading
|
|
||||||
|
|
||||||
import mitogen.core
|
|
||||||
import ansible_mitogen.affinity
|
|
||||||
import ansible_mitogen.loaders
|
|
||||||
import ansible_mitogen.mixins
|
|
||||||
import ansible_mitogen.process
|
|
||||||
|
|
||||||
import ansible
|
|
||||||
import ansible.executor.process.worker
|
|
||||||
|
|
||||||
|
|
||||||
ANSIBLE_VERSION_MIN = '2.3'
|
|
||||||
ANSIBLE_VERSION_MAX = '2.7'
|
|
||||||
NEW_VERSION_MSG = (
|
|
||||||
"Your Ansible version (%s) is too recent. The most recent version\n"
|
|
||||||
"supported by Mitogen for Ansible is %s.x. Please check the Mitogen\n"
|
|
||||||
"release notes to see if a new version is available, otherwise\n"
|
|
||||||
"subscribe to the corresponding GitHub issue to be notified when\n"
|
|
||||||
"support becomes available.\n"
|
|
||||||
"\n"
|
|
||||||
" https://mitogen.rtfd.io/en/latest/changelog.html\n"
|
|
||||||
" https://github.com/dw/mitogen/issues/\n"
|
|
||||||
)
|
|
||||||
OLD_VERSION_MSG = (
|
|
||||||
"Your version of Ansible (%s) is too old. The oldest version supported by "
|
|
||||||
"Mitogen for Ansible is %s."
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _assert_supported_release():
|
|
||||||
"""
|
|
||||||
Throw AnsibleError with a descriptive message in case of being loaded into
|
|
||||||
an unsupported Ansible release.
|
|
||||||
"""
|
|
||||||
v = ansible.__version__
|
|
||||||
|
|
||||||
if v[:len(ANSIBLE_VERSION_MIN)] < ANSIBLE_VERSION_MIN:
|
|
||||||
raise ansible.errors.AnsibleError(
|
|
||||||
OLD_VERSION_MSG % (v, ANSIBLE_VERSION_MIN)
|
|
||||||
)
|
|
||||||
|
|
||||||
if v[:len(ANSIBLE_VERSION_MAX)] > ANSIBLE_VERSION_MAX:
|
|
||||||
raise ansible.errors.AnsibleError(
|
|
||||||
NEW_VERSION_MSG % (ansible.__version__, ANSIBLE_VERSION_MAX)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def _patch_awx_callback():
|
|
||||||
"""
|
|
||||||
issue #400: AWX loads a display callback that suffers from thread-safety
|
|
||||||
issues. Detect the presence of older AWX versions and patch the bug.
|
|
||||||
"""
|
|
||||||
# AWX uses sitecustomize.py to force-load this package. If it exists, we're
|
|
||||||
# running under AWX.
|
|
||||||
try:
|
|
||||||
from awx_display_callback.events import EventContext
|
|
||||||
from awx_display_callback.events import event_context
|
|
||||||
except ImportError:
|
|
||||||
return
|
|
||||||
|
|
||||||
if hasattr(EventContext(), '_local'):
|
|
||||||
# Patched version.
|
|
||||||
return
|
|
||||||
|
|
||||||
def patch_add_local(self, **kwargs):
|
|
||||||
tls = vars(self._local)
|
|
||||||
ctx = tls.setdefault('_ctx', {})
|
|
||||||
ctx.update(kwargs)
|
|
||||||
|
|
||||||
EventContext._local = threading.local()
|
|
||||||
EventContext.add_local = patch_add_local
|
|
||||||
|
|
||||||
_patch_awx_callback()
|
|
||||||
|
|
||||||
|
|
||||||
def wrap_action_loader__get(name, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
While the mitogen strategy is active, trap action_loader.get() calls,
|
|
||||||
augmenting any fetched class with ActionModuleMixin, which replaces various
|
|
||||||
helper methods inherited from ActionBase with implementations that avoid
|
|
||||||
the use of shell fragments wherever possible.
|
|
||||||
|
|
||||||
This is used instead of static subclassing as it generalizes to third party
|
|
||||||
action modules outside the Ansible tree.
|
|
||||||
"""
|
|
||||||
klass = action_loader__get(name, class_only=True)
|
|
||||||
if klass:
|
|
||||||
bases = (ansible_mitogen.mixins.ActionModuleMixin, klass)
|
|
||||||
adorned_klass = type(str(name), bases, {})
|
|
||||||
if kwargs.get('class_only'):
|
|
||||||
return adorned_klass
|
|
||||||
return adorned_klass(*args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def wrap_connection_loader__get(name, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
While the strategy is active, rewrite connection_loader.get() calls for
|
|
||||||
some transports into requests for a compatible Mitogen transport.
|
|
||||||
"""
|
|
||||||
if name in ('docker', 'kubectl', 'jail', 'local', 'lxc',
|
|
||||||
'lxd', 'machinectl', 'setns', 'ssh'):
|
|
||||||
name = 'mitogen_' + name
|
|
||||||
return connection_loader__get(name, *args, **kwargs)
|
|
||||||
|
|
||||||
|
|
||||||
def wrap_worker__run(*args, **kwargs):
|
|
||||||
"""
|
|
||||||
While the strategy is active, rewrite connection_loader.get() calls for
|
|
||||||
some transports into requests for a compatible Mitogen transport.
|
|
||||||
"""
|
|
||||||
# Ignore parent's attempts to murder us when we still need to write
|
|
||||||
# profiling output.
|
|
||||||
if mitogen.core._profile_hook.__name__ != '_profile_hook':
|
|
||||||
signal.signal(signal.SIGTERM, signal.SIG_IGN)
|
|
||||||
|
|
||||||
ansible_mitogen.logging.set_process_name('task')
|
|
||||||
ansible_mitogen.affinity.policy.assign_worker()
|
|
||||||
return mitogen.core._profile_hook('WorkerProcess',
|
|
||||||
lambda: worker__run(*args, **kwargs)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class StrategyMixin(object):
|
|
||||||
"""
|
|
||||||
This mix-in enhances any built-in strategy by arranging for various Mitogen
|
|
||||||
services to be initialized in the Ansible top-level process, and for worker
|
|
||||||
processes to grow support for using those top-level services to communicate
|
|
||||||
with and execute modules on remote hosts.
|
|
||||||
|
|
||||||
Mitogen:
|
|
||||||
|
|
||||||
A private Broker IO multiplexer thread is created to dispatch IO
|
|
||||||
between the local Router and any connected streams, including streams
|
|
||||||
connected to Ansible WorkerProcesses, and SSH commands implementing
|
|
||||||
connections to remote machines.
|
|
||||||
|
|
||||||
A Router is created that implements message dispatch to any locally
|
|
||||||
registered handlers, and message routing for remote streams. Router is
|
|
||||||
the junction point through which WorkerProceses and remote SSH contexts
|
|
||||||
can communicate.
|
|
||||||
|
|
||||||
Router additionally adds message handlers for a variety of base
|
|
||||||
services, review the Standard Handles section of the How It Works guide
|
|
||||||
in the documentation.
|
|
||||||
|
|
||||||
A ContextService is installed as a message handler in the master
|
|
||||||
process and run on a private thread. It is responsible for accepting
|
|
||||||
requests to establish new SSH connections from worker processes, and
|
|
||||||
ensuring precisely one connection exists and is reused for subsequent
|
|
||||||
playbook steps. The service presently runs in a single thread, so to
|
|
||||||
begin with, new SSH connections are serialized.
|
|
||||||
|
|
||||||
Finally a mitogen.unix listener is created through which WorkerProcess
|
|
||||||
can establish a connection back into the master process, in order to
|
|
||||||
avail of ContextService. A UNIX listener socket is necessary as there
|
|
||||||
is no more sane mechanism to arrange for IPC between the Router in the
|
|
||||||
master process, and the corresponding Router in the worker process.
|
|
||||||
|
|
||||||
Ansible:
|
|
||||||
|
|
||||||
PluginLoader monkey patches are installed to catch attempts to create
|
|
||||||
connection and action plug-ins.
|
|
||||||
|
|
||||||
For connection plug-ins, if the desired method is "local" or "ssh", it
|
|
||||||
is redirected to the "mitogen" connection plug-in. That plug-in
|
|
||||||
implements communication via a UNIX socket connection to the top-level
|
|
||||||
Ansible process, and uses ContextService running in the top-level
|
|
||||||
process to actually establish and manage the connection.
|
|
||||||
|
|
||||||
For action plug-ins, the original class is looked up as usual, but a
|
|
||||||
new subclass is created dynamically in order to mix-in
|
|
||||||
ansible_mitogen.target.ActionModuleMixin, which overrides many of the
|
|
||||||
methods usually inherited from ActionBase in order to replace them with
|
|
||||||
pure-Python equivalents that avoid the use of shell.
|
|
||||||
|
|
||||||
In particular, _execute_module() is overridden with an implementation
|
|
||||||
that uses ansible_mitogen.target.run_module() executed in the target
|
|
||||||
Context. run_module() implements module execution by importing the
|
|
||||||
module as if it were a normal Python module, and capturing its output
|
|
||||||
in the remote process. Since the Mitogen module loader is active in the
|
|
||||||
remote process, all the heavy lifting of transferring the action module
|
|
||||||
and its dependencies are automatically handled by Mitogen.
|
|
||||||
"""
|
|
||||||
def _install_wrappers(self):
|
|
||||||
"""
|
|
||||||
Install our PluginLoader monkey patches and update global variables
|
|
||||||
with references to the real functions.
|
|
||||||
"""
|
|
||||||
global action_loader__get
|
|
||||||
action_loader__get = ansible_mitogen.loaders.action_loader.get
|
|
||||||
ansible_mitogen.loaders.action_loader.get = wrap_action_loader__get
|
|
||||||
|
|
||||||
global connection_loader__get
|
|
||||||
connection_loader__get = ansible_mitogen.loaders.connection_loader.get
|
|
||||||
ansible_mitogen.loaders.connection_loader.get = wrap_connection_loader__get
|
|
||||||
|
|
||||||
global worker__run
|
|
||||||
worker__run = ansible.executor.process.worker.WorkerProcess.run
|
|
||||||
ansible.executor.process.worker.WorkerProcess.run = wrap_worker__run
|
|
||||||
|
|
||||||
def _remove_wrappers(self):
|
|
||||||
"""
|
|
||||||
Uninstall the PluginLoader monkey patches.
|
|
||||||
"""
|
|
||||||
ansible_mitogen.loaders.action_loader.get = action_loader__get
|
|
||||||
ansible_mitogen.loaders.connection_loader.get = connection_loader__get
|
|
||||||
ansible.executor.process.worker.WorkerProcess.run = worker__run
|
|
||||||
|
|
||||||
def _add_plugin_paths(self):
|
|
||||||
"""
|
|
||||||
Add the Mitogen plug-in directories to the ModuleLoader path, avoiding
|
|
||||||
the need for manual configuration.
|
|
||||||
"""
|
|
||||||
base_dir = os.path.join(os.path.dirname(__file__), 'plugins')
|
|
||||||
ansible_mitogen.loaders.connection_loader.add_directory(
|
|
||||||
os.path.join(base_dir, 'connection')
|
|
||||||
)
|
|
||||||
ansible_mitogen.loaders.action_loader.add_directory(
|
|
||||||
os.path.join(base_dir, 'action')
|
|
||||||
)
|
|
||||||
|
|
||||||
def _queue_task(self, host, task, task_vars, play_context):
|
|
||||||
"""
|
|
||||||
Many PluginLoader caches are defective as they are only populated in
|
|
||||||
the ephemeral WorkerProcess. Touch each plug-in path before forking to
|
|
||||||
ensure all workers receive a hot cache.
|
|
||||||
"""
|
|
||||||
ansible_mitogen.loaders.module_loader.find_plugin(
|
|
||||||
name=task.action,
|
|
||||||
mod_type='',
|
|
||||||
)
|
|
||||||
ansible_mitogen.loaders.connection_loader.get(
|
|
||||||
name=play_context.connection,
|
|
||||||
class_only=True,
|
|
||||||
)
|
|
||||||
ansible_mitogen.loaders.action_loader.get(
|
|
||||||
name=task.action,
|
|
||||||
class_only=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
return super(StrategyMixin, self)._queue_task(
|
|
||||||
host=host,
|
|
||||||
task=task,
|
|
||||||
task_vars=task_vars,
|
|
||||||
play_context=play_context,
|
|
||||||
)
|
|
||||||
|
|
||||||
def run(self, iterator, play_context, result=0):
|
|
||||||
"""
|
|
||||||
Arrange for a mitogen.master.Router to be available for the duration of
|
|
||||||
the strategy's real run() method.
|
|
||||||
"""
|
|
||||||
_assert_supported_release()
|
|
||||||
|
|
||||||
ansible_mitogen.process.MuxProcess.start()
|
|
||||||
run = super(StrategyMixin, self).run
|
|
||||||
self._add_plugin_paths()
|
|
||||||
self._install_wrappers()
|
|
||||||
try:
|
|
||||||
return mitogen.core._profile_hook('Strategy',
|
|
||||||
lambda: run(iterator, play_context)
|
|
||||||
)
|
|
||||||
finally:
|
|
||||||
self._remove_wrappers()
|
|
|
@ -1,777 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
# !mitogen: minify_safe
|
|
||||||
|
|
||||||
"""
|
|
||||||
Helper functions intended to be executed on the target. These are entrypoints
|
|
||||||
for file transfer, module execution and sundry bits like changing file modes.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import errno
|
|
||||||
import grp
|
|
||||||
import operator
|
|
||||||
import os
|
|
||||||
import pwd
|
|
||||||
import re
|
|
||||||
import signal
|
|
||||||
import stat
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
import tempfile
|
|
||||||
import traceback
|
|
||||||
import types
|
|
||||||
|
|
||||||
# Absolute imports for <2.5.
|
|
||||||
logging = __import__('logging')
|
|
||||||
|
|
||||||
import mitogen.core
|
|
||||||
import mitogen.fork
|
|
||||||
import mitogen.parent
|
|
||||||
import mitogen.service
|
|
||||||
from mitogen.core import b
|
|
||||||
|
|
||||||
try:
|
|
||||||
import json
|
|
||||||
except ImportError:
|
|
||||||
import simplejson as json
|
|
||||||
|
|
||||||
try:
|
|
||||||
reduce
|
|
||||||
except NameError:
|
|
||||||
# Python 3.x.
|
|
||||||
from functools import reduce
|
|
||||||
|
|
||||||
try:
|
|
||||||
BaseException
|
|
||||||
except NameError:
|
|
||||||
# Python 2.4
|
|
||||||
BaseException = Exception
|
|
||||||
|
|
||||||
|
|
||||||
# Ansible since PR #41749 inserts "import __main__" into
|
|
||||||
# ansible.module_utils.basic. Mitogen's importer will refuse such an import, so
|
|
||||||
# we must setup a fake "__main__" before that module is ever imported. The
|
|
||||||
# str() is to cast Unicode to bytes on Python 2.6.
|
|
||||||
if not sys.modules.get(str('__main__')):
|
|
||||||
sys.modules[str('__main__')] = types.ModuleType(str('__main__'))
|
|
||||||
|
|
||||||
import ansible.module_utils.json_utils
|
|
||||||
import ansible_mitogen.runner
|
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
MAKE_TEMP_FAILED_MSG = (
|
|
||||||
u"Unable to find a useable temporary directory. This likely means no\n"
|
|
||||||
u"system-supplied TMP directory can be written to, or all directories\n"
|
|
||||||
u"were mounted on 'noexec' filesystems.\n"
|
|
||||||
u"\n"
|
|
||||||
u"The following paths were tried:\n"
|
|
||||||
u" %(paths)s\n"
|
|
||||||
u"\n"
|
|
||||||
u"Please check '-vvv' output for a log of individual path errors."
|
|
||||||
)
|
|
||||||
|
|
||||||
# Python 2.4/2.5 cannot support fork+threads whatsoever, it doesn't even fix up
|
|
||||||
# interpreter state. So 2.4/2.5 interpreters start .local() contexts for
|
|
||||||
# isolation instead. Since we don't have any crazy memory sharing problems to
|
|
||||||
# avoid, there is no virginal fork parent either. The child is started directly
|
|
||||||
# from the login/become process. In future this will be default everywhere,
|
|
||||||
# fork is brainwrong from the stone age.
|
|
||||||
FORK_SUPPORTED = sys.version_info >= (2, 6)
|
|
||||||
|
|
||||||
#: Initialized to an econtext.parent.Context pointing at a pristine fork of
|
|
||||||
#: the target Python interpreter before it executes any code or imports.
|
|
||||||
_fork_parent = None
|
|
||||||
|
|
||||||
#: Set by :func:`init_child` to the name of a writeable and executable
|
|
||||||
#: temporary directory accessible by the active user account.
|
|
||||||
good_temp_dir = None
|
|
||||||
|
|
||||||
|
|
||||||
def subprocess__Popen__close_fds(self, but):
|
|
||||||
"""
|
|
||||||
issue #362, #435: subprocess.Popen(close_fds=True) aka.
|
|
||||||
AnsibleModule.run_command() loops the entire FD space on Python<3.2.
|
|
||||||
CentOS>5 ships with 1,048,576 FDs by default, resulting in huge (>500ms)
|
|
||||||
latency starting children. Therefore replace Popen._close_fds on Linux with
|
|
||||||
a version that is O(fds) rather than O(_SC_OPEN_MAX).
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
names = os.listdir(u'/proc/self/fd')
|
|
||||||
except OSError:
|
|
||||||
# May fail if acting on a container that does not have /proc mounted.
|
|
||||||
self._original_close_fds(but)
|
|
||||||
return
|
|
||||||
|
|
||||||
for name in names:
|
|
||||||
if not name.isdigit():
|
|
||||||
continue
|
|
||||||
|
|
||||||
fd = int(name, 10)
|
|
||||||
if fd > 2 and fd != but:
|
|
||||||
try:
|
|
||||||
os.close(fd)
|
|
||||||
except OSError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
if (
|
|
||||||
sys.platform.startswith(u'linux') and
|
|
||||||
sys.version < u'3.0' and
|
|
||||||
hasattr(subprocess.Popen, u'_close_fds') and
|
|
||||||
not mitogen.is_master
|
|
||||||
):
|
|
||||||
subprocess.Popen._original_close_fds = subprocess.Popen._close_fds
|
|
||||||
subprocess.Popen._close_fds = subprocess__Popen__close_fds
|
|
||||||
|
|
||||||
|
|
||||||
def get_small_file(context, path):
|
|
||||||
"""
|
|
||||||
Basic in-memory caching module fetcher. This generates one roundtrip for
|
|
||||||
every previously unseen file, so it is only a temporary solution.
|
|
||||||
|
|
||||||
:param context:
|
|
||||||
Context we should direct FileService requests to. For now (and probably
|
|
||||||
forever) this is just the top-level Mitogen connection manager process.
|
|
||||||
:param path:
|
|
||||||
Path to fetch from FileService, must previously have been registered by
|
|
||||||
a privileged context using the `register` command.
|
|
||||||
:returns:
|
|
||||||
Bytestring file data.
|
|
||||||
"""
|
|
||||||
pool = mitogen.service.get_or_create_pool(router=context.router)
|
|
||||||
service = pool.get_service(u'mitogen.service.PushFileService')
|
|
||||||
return service.get(path)
|
|
||||||
|
|
||||||
|
|
||||||
def transfer_file(context, in_path, out_path, sync=False, set_owner=False):
|
|
||||||
"""
|
|
||||||
Streamily download a file from the connection multiplexer process in the
|
|
||||||
controller.
|
|
||||||
|
|
||||||
:param mitogen.core.Context context:
|
|
||||||
Reference to the context hosting the FileService that will transmit the
|
|
||||||
file.
|
|
||||||
:param bytes in_path:
|
|
||||||
FileService registered name of the input file.
|
|
||||||
:param bytes out_path:
|
|
||||||
Name of the output path on the local disk.
|
|
||||||
:param bool sync:
|
|
||||||
If :data:`True`, ensure the file content and metadat are fully on disk
|
|
||||||
before renaming the temporary file over the existing file. This should
|
|
||||||
ensure in the case of system crash, either the entire old or new file
|
|
||||||
are visible post-reboot.
|
|
||||||
:param bool set_owner:
|
|
||||||
If :data:`True`, look up the metadata username and group on the local
|
|
||||||
system and file the file owner using :func:`os.fchmod`.
|
|
||||||
"""
|
|
||||||
out_path = os.path.abspath(out_path)
|
|
||||||
fd, tmp_path = tempfile.mkstemp(suffix='.tmp',
|
|
||||||
prefix='.ansible_mitogen_transfer-',
|
|
||||||
dir=os.path.dirname(out_path))
|
|
||||||
fp = os.fdopen(fd, 'wb', mitogen.core.CHUNK_SIZE)
|
|
||||||
LOG.debug('transfer_file(%r) temporary file: %s', out_path, tmp_path)
|
|
||||||
|
|
||||||
try:
|
|
||||||
try:
|
|
||||||
ok, metadata = mitogen.service.FileService.get(
|
|
||||||
context=context,
|
|
||||||
path=in_path,
|
|
||||||
out_fp=fp,
|
|
||||||
)
|
|
||||||
if not ok:
|
|
||||||
raise IOError('transfer of %r was interrupted.' % (in_path,))
|
|
||||||
|
|
||||||
set_file_mode(tmp_path, metadata['mode'], fd=fp.fileno())
|
|
||||||
if set_owner:
|
|
||||||
set_file_owner(tmp_path, metadata['owner'], metadata['group'],
|
|
||||||
fd=fp.fileno())
|
|
||||||
finally:
|
|
||||||
fp.close()
|
|
||||||
|
|
||||||
if sync:
|
|
||||||
os.fsync(fp.fileno())
|
|
||||||
os.rename(tmp_path, out_path)
|
|
||||||
except BaseException:
|
|
||||||
os.unlink(tmp_path)
|
|
||||||
raise
|
|
||||||
|
|
||||||
os.utime(out_path, (metadata['atime'], metadata['mtime']))
|
|
||||||
|
|
||||||
|
|
||||||
def prune_tree(path):
|
|
||||||
"""
|
|
||||||
Like shutil.rmtree(), but log errors rather than discard them, and do not
|
|
||||||
waste multiple os.stat() calls discovering whether the object can be
|
|
||||||
deleted, just try deleting it instead.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
os.unlink(path)
|
|
||||||
return
|
|
||||||
except OSError:
|
|
||||||
e = sys.exc_info()[1]
|
|
||||||
if not (os.path.isdir(path) and
|
|
||||||
e.args[0] in (errno.EPERM, errno.EISDIR)):
|
|
||||||
LOG.error('prune_tree(%r): %s', path, e)
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Ensure write access for readonly directories. Ignore error in case
|
|
||||||
# path is on a weird filesystem (e.g. vfat).
|
|
||||||
os.chmod(path, int('0700', 8))
|
|
||||||
except OSError:
|
|
||||||
e = sys.exc_info()[1]
|
|
||||||
LOG.warning('prune_tree(%r): %s', path, e)
|
|
||||||
|
|
||||||
try:
|
|
||||||
for name in os.listdir(path):
|
|
||||||
if name not in ('.', '..'):
|
|
||||||
prune_tree(os.path.join(path, name))
|
|
||||||
os.rmdir(path)
|
|
||||||
except OSError:
|
|
||||||
e = sys.exc_info()[1]
|
|
||||||
LOG.error('prune_tree(%r): %s', path, e)
|
|
||||||
|
|
||||||
|
|
||||||
def is_good_temp_dir(path):
|
|
||||||
"""
|
|
||||||
Return :data:`True` if `path` can be used as a temporary directory, logging
|
|
||||||
any failures that may cause it to be unsuitable. If the directory doesn't
|
|
||||||
exist, we attempt to create it using :func:`os.makedirs`.
|
|
||||||
"""
|
|
||||||
if not os.path.exists(path):
|
|
||||||
try:
|
|
||||||
os.makedirs(path, mode=int('0700', 8))
|
|
||||||
except OSError:
|
|
||||||
e = sys.exc_info()[1]
|
|
||||||
LOG.debug('temp dir %r unusable: did not exist and attempting '
|
|
||||||
'to create it failed: %s', path, e)
|
|
||||||
return False
|
|
||||||
|
|
||||||
try:
|
|
||||||
tmp = tempfile.NamedTemporaryFile(
|
|
||||||
prefix='ansible_mitogen_is_good_temp_dir',
|
|
||||||
dir=path,
|
|
||||||
)
|
|
||||||
except (OSError, IOError):
|
|
||||||
e = sys.exc_info()[1]
|
|
||||||
LOG.debug('temp dir %r unusable: %s', path, e)
|
|
||||||
return False
|
|
||||||
|
|
||||||
try:
|
|
||||||
try:
|
|
||||||
os.chmod(tmp.name, int('0700', 8))
|
|
||||||
except OSError:
|
|
||||||
e = sys.exc_info()[1]
|
|
||||||
LOG.debug('temp dir %r unusable: chmod failed: %s', path, e)
|
|
||||||
return False
|
|
||||||
|
|
||||||
try:
|
|
||||||
# access(.., X_OK) is sufficient to detect noexec.
|
|
||||||
if not os.access(tmp.name, os.X_OK):
|
|
||||||
raise OSError('filesystem appears to be mounted noexec')
|
|
||||||
except OSError:
|
|
||||||
e = sys.exc_info()[1]
|
|
||||||
LOG.debug('temp dir %r unusable: %s', path, e)
|
|
||||||
return False
|
|
||||||
finally:
|
|
||||||
tmp.close()
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
|
|
||||||
def find_good_temp_dir(candidate_temp_dirs):
|
|
||||||
"""
|
|
||||||
Given a list of candidate temp directories extracted from ``ansible.cfg``,
|
|
||||||
combine it with the Python-builtin list of candidate directories used by
|
|
||||||
:mod:`tempfile`, then iteratively try each until one is found that is both
|
|
||||||
writeable and executable.
|
|
||||||
|
|
||||||
:param list candidate_temp_dirs:
|
|
||||||
List of candidate $variable-expanded and tilde-expanded directory paths
|
|
||||||
that may be usable as a temporary directory.
|
|
||||||
"""
|
|
||||||
paths = [os.path.expandvars(os.path.expanduser(p))
|
|
||||||
for p in candidate_temp_dirs]
|
|
||||||
paths.extend(tempfile._candidate_tempdir_list())
|
|
||||||
|
|
||||||
for path in paths:
|
|
||||||
if is_good_temp_dir(path):
|
|
||||||
LOG.debug('Selected temp directory: %r (from %r)', path, paths)
|
|
||||||
return path
|
|
||||||
|
|
||||||
raise IOError(MAKE_TEMP_FAILED_MSG % {
|
|
||||||
'paths': '\n '.join(paths),
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
@mitogen.core.takes_econtext
|
|
||||||
def init_child(econtext, log_level, candidate_temp_dirs):
|
|
||||||
"""
|
|
||||||
Called by ContextService immediately after connection; arranges for the
|
|
||||||
(presently) spotless Python interpreter to be forked, where the newly
|
|
||||||
forked interpreter becomes the parent of any newly forked future
|
|
||||||
interpreters.
|
|
||||||
|
|
||||||
This is necessary to prevent modules that are executed in-process from
|
|
||||||
polluting the global interpreter state in a way that effects explicitly
|
|
||||||
isolated modules.
|
|
||||||
|
|
||||||
:param int log_level:
|
|
||||||
Logging package level active in the master.
|
|
||||||
:param list[str] candidate_temp_dirs:
|
|
||||||
List of $variable-expanded and tilde-expanded directory names to add to
|
|
||||||
candidate list of temporary directories.
|
|
||||||
|
|
||||||
:returns:
|
|
||||||
Dict like::
|
|
||||||
|
|
||||||
{
|
|
||||||
'fork_context': mitogen.core.Context or None,
|
|
||||||
'good_temp_dir': ...
|
|
||||||
'home_dir': str
|
|
||||||
}
|
|
||||||
|
|
||||||
Where `fork_context` refers to the newly forked 'fork parent' context
|
|
||||||
the controller will use to start forked jobs, and `home_dir` is the
|
|
||||||
home directory for the active user account.
|
|
||||||
"""
|
|
||||||
# Copying the master's log level causes log messages to be filtered before
|
|
||||||
# they reach LogForwarder, thus reducing an influx of tiny messges waking
|
|
||||||
# the connection multiplexer process in the master.
|
|
||||||
LOG.setLevel(log_level)
|
|
||||||
logging.getLogger('ansible_mitogen').setLevel(log_level)
|
|
||||||
|
|
||||||
# issue #536: if the json module is available, remove simplejson from the
|
|
||||||
# importer whitelist to avoid confusing certain Ansible modules.
|
|
||||||
if json.__name__ == 'json':
|
|
||||||
econtext.importer.whitelist.remove('simplejson')
|
|
||||||
|
|
||||||
global _fork_parent
|
|
||||||
if FORK_SUPPORTED:
|
|
||||||
mitogen.parent.upgrade_router(econtext)
|
|
||||||
_fork_parent = econtext.router.fork()
|
|
||||||
|
|
||||||
global good_temp_dir
|
|
||||||
good_temp_dir = find_good_temp_dir(candidate_temp_dirs)
|
|
||||||
|
|
||||||
return {
|
|
||||||
u'fork_context': _fork_parent,
|
|
||||||
u'home_dir': mitogen.core.to_text(os.path.expanduser('~')),
|
|
||||||
u'good_temp_dir': good_temp_dir,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@mitogen.core.takes_econtext
|
|
||||||
def spawn_isolated_child(econtext):
|
|
||||||
"""
|
|
||||||
For helper functions executed in the fork parent context, arrange for
|
|
||||||
the context's router to be upgraded as necessary and for a new child to be
|
|
||||||
prepared.
|
|
||||||
|
|
||||||
The actual fork occurs from the 'virginal fork parent', which does not have
|
|
||||||
any Ansible modules loaded prior to fork, to avoid conflicts resulting from
|
|
||||||
custom module_utils paths.
|
|
||||||
"""
|
|
||||||
mitogen.parent.upgrade_router(econtext)
|
|
||||||
if FORK_SUPPORTED:
|
|
||||||
context = econtext.router.fork()
|
|
||||||
else:
|
|
||||||
context = econtext.router.local()
|
|
||||||
LOG.debug('create_fork_child() -> %r', context)
|
|
||||||
return context
|
|
||||||
|
|
||||||
|
|
||||||
def run_module(kwargs):
|
|
||||||
"""
|
|
||||||
Set up the process environment in preparation for running an Ansible
|
|
||||||
module. This monkey-patches the Ansible libraries in various places to
|
|
||||||
prevent it from trying to kill the process on completion, and to prevent it
|
|
||||||
from reading sys.stdin.
|
|
||||||
"""
|
|
||||||
runner_name = kwargs.pop('runner_name')
|
|
||||||
klass = getattr(ansible_mitogen.runner, runner_name)
|
|
||||||
impl = klass(**mitogen.core.Kwargs(kwargs))
|
|
||||||
return impl.run()
|
|
||||||
|
|
||||||
|
|
||||||
def _get_async_dir():
|
|
||||||
return os.path.expanduser(
|
|
||||||
os.environ.get('ANSIBLE_ASYNC_DIR', '~/.ansible_async')
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class AsyncRunner(object):
|
|
||||||
def __init__(self, job_id, timeout_secs, started_sender, econtext, kwargs):
|
|
||||||
self.job_id = job_id
|
|
||||||
self.timeout_secs = timeout_secs
|
|
||||||
self.started_sender = started_sender
|
|
||||||
self.econtext = econtext
|
|
||||||
self.kwargs = kwargs
|
|
||||||
self._timed_out = False
|
|
||||||
self._init_path()
|
|
||||||
|
|
||||||
def _init_path(self):
|
|
||||||
async_dir = _get_async_dir()
|
|
||||||
if not os.path.exists(async_dir):
|
|
||||||
os.makedirs(async_dir)
|
|
||||||
self.path = os.path.join(async_dir, self.job_id)
|
|
||||||
|
|
||||||
def _update(self, dct):
|
|
||||||
"""
|
|
||||||
Update an async job status file.
|
|
||||||
"""
|
|
||||||
LOG.info('%r._update(%r, %r)', self, self.job_id, dct)
|
|
||||||
dct.setdefault('ansible_job_id', self.job_id)
|
|
||||||
dct.setdefault('data', '')
|
|
||||||
|
|
||||||
fp = open(self.path + '.tmp', 'w')
|
|
||||||
try:
|
|
||||||
fp.write(json.dumps(dct))
|
|
||||||
finally:
|
|
||||||
fp.close()
|
|
||||||
os.rename(self.path + '.tmp', self.path)
|
|
||||||
|
|
||||||
def _on_sigalrm(self, signum, frame):
|
|
||||||
"""
|
|
||||||
Respond to SIGALRM (job timeout) by updating the job file and killing
|
|
||||||
the process.
|
|
||||||
"""
|
|
||||||
msg = "Job reached maximum time limit of %d seconds." % (
|
|
||||||
self.timeout_secs,
|
|
||||||
)
|
|
||||||
self._update({
|
|
||||||
"failed": 1,
|
|
||||||
"finished": 1,
|
|
||||||
"msg": msg,
|
|
||||||
})
|
|
||||||
self._timed_out = True
|
|
||||||
self.econtext.broker.shutdown()
|
|
||||||
|
|
||||||
def _install_alarm(self):
|
|
||||||
signal.signal(signal.SIGALRM, self._on_sigalrm)
|
|
||||||
signal.alarm(self.timeout_secs)
|
|
||||||
|
|
||||||
def _run_module(self):
|
|
||||||
kwargs = dict(self.kwargs, **{
|
|
||||||
'detach': True,
|
|
||||||
'econtext': self.econtext,
|
|
||||||
'emulate_tty': False,
|
|
||||||
})
|
|
||||||
return run_module(kwargs)
|
|
||||||
|
|
||||||
def _parse_result(self, dct):
|
|
||||||
filtered, warnings = (
|
|
||||||
ansible.module_utils.json_utils.
|
|
||||||
_filter_non_json_lines(dct['stdout'])
|
|
||||||
)
|
|
||||||
result = json.loads(filtered)
|
|
||||||
result.setdefault('warnings', []).extend(warnings)
|
|
||||||
result['stderr'] = dct['stderr'] or result.get('stderr', '')
|
|
||||||
self._update(result)
|
|
||||||
|
|
||||||
def _run(self):
|
|
||||||
"""
|
|
||||||
1. Immediately updates the status file to mark the job as started.
|
|
||||||
2. Installs a timer/signal handler to implement the time limit.
|
|
||||||
3. Runs as with run_module(), writing the result to the status file.
|
|
||||||
|
|
||||||
:param dict kwargs:
|
|
||||||
Runner keyword arguments.
|
|
||||||
:param str job_id:
|
|
||||||
String job ID.
|
|
||||||
:param int timeout_secs:
|
|
||||||
If >0, limit the task's maximum run time.
|
|
||||||
"""
|
|
||||||
self._update({
|
|
||||||
'started': 1,
|
|
||||||
'finished': 0,
|
|
||||||
'pid': os.getpid()
|
|
||||||
})
|
|
||||||
self.started_sender.send(True)
|
|
||||||
|
|
||||||
if self.timeout_secs > 0:
|
|
||||||
self._install_alarm()
|
|
||||||
|
|
||||||
dct = self._run_module()
|
|
||||||
if not self._timed_out:
|
|
||||||
# After SIGALRM fires, there is a window between broker responding
|
|
||||||
# to shutdown() by killing the process, and work continuing on the
|
|
||||||
# main thread. If main thread was asleep in at least
|
|
||||||
# basic.py/select.select(), an EINTR will be raised. We want to
|
|
||||||
# discard that exception.
|
|
||||||
try:
|
|
||||||
self._parse_result(dct)
|
|
||||||
except Exception:
|
|
||||||
self._update({
|
|
||||||
"failed": 1,
|
|
||||||
"msg": traceback.format_exc(),
|
|
||||||
"data": dct['stdout'], # temporary notice only
|
|
||||||
"stderr": dct['stderr']
|
|
||||||
})
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
try:
|
|
||||||
try:
|
|
||||||
self._run()
|
|
||||||
except Exception:
|
|
||||||
self._update({
|
|
||||||
"failed": 1,
|
|
||||||
"msg": traceback.format_exc(),
|
|
||||||
})
|
|
||||||
finally:
|
|
||||||
self.econtext.broker.shutdown()
|
|
||||||
|
|
||||||
|
|
||||||
@mitogen.core.takes_econtext
|
|
||||||
def run_module_async(kwargs, job_id, timeout_secs, started_sender, econtext):
|
|
||||||
"""
|
|
||||||
Execute a module with its run status and result written to a file,
|
|
||||||
terminating on the process on completion. This function must run in a child
|
|
||||||
forked using :func:`create_fork_child`.
|
|
||||||
|
|
||||||
@param mitogen.core.Sender started_sender:
|
|
||||||
A sender that will receive :data:`True` once the job has reached a
|
|
||||||
point where its initial job file has been written. This is required to
|
|
||||||
avoid a race where an overly eager controller can check for a task
|
|
||||||
before it has reached that point in execution, which is possible at
|
|
||||||
least on Python 2.4, where forking is not available for async tasks.
|
|
||||||
"""
|
|
||||||
arunner = AsyncRunner(
|
|
||||||
job_id,
|
|
||||||
timeout_secs,
|
|
||||||
started_sender,
|
|
||||||
econtext,
|
|
||||||
kwargs
|
|
||||||
)
|
|
||||||
arunner.run()
|
|
||||||
|
|
||||||
|
|
||||||
def get_user_shell():
|
|
||||||
"""
|
|
||||||
For commands executed directly via an SSH command-line, SSH looks up the
|
|
||||||
user's shell via getpwuid() and only defaults to /bin/sh if that field is
|
|
||||||
missing or empty.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
pw_shell = pwd.getpwuid(os.geteuid()).pw_shell
|
|
||||||
except KeyError:
|
|
||||||
pw_shell = None
|
|
||||||
|
|
||||||
return pw_shell or '/bin/sh'
|
|
||||||
|
|
||||||
|
|
||||||
def exec_args(args, in_data='', chdir=None, shell=None, emulate_tty=False):
|
|
||||||
"""
|
|
||||||
Run a command in a subprocess, emulating the argument handling behaviour of
|
|
||||||
SSH.
|
|
||||||
|
|
||||||
:param list[str]:
|
|
||||||
Argument vector.
|
|
||||||
:param bytes in_data:
|
|
||||||
Optional standard input for the command.
|
|
||||||
:param bool emulate_tty:
|
|
||||||
If :data:`True`, arrange for stdout and stderr to be merged into the
|
|
||||||
stdout pipe and for LF to be translated into CRLF, emulating the
|
|
||||||
behaviour of a TTY.
|
|
||||||
:return:
|
|
||||||
(return code, stdout bytes, stderr bytes)
|
|
||||||
"""
|
|
||||||
LOG.debug('exec_args(%r, ..., chdir=%r)', args, chdir)
|
|
||||||
assert isinstance(args, list)
|
|
||||||
|
|
||||||
if emulate_tty:
|
|
||||||
stderr = subprocess.STDOUT
|
|
||||||
else:
|
|
||||||
stderr = subprocess.PIPE
|
|
||||||
|
|
||||||
proc = subprocess.Popen(
|
|
||||||
args=args,
|
|
||||||
stdout=subprocess.PIPE,
|
|
||||||
stderr=stderr,
|
|
||||||
stdin=subprocess.PIPE,
|
|
||||||
cwd=chdir,
|
|
||||||
)
|
|
||||||
stdout, stderr = proc.communicate(in_data)
|
|
||||||
|
|
||||||
if emulate_tty:
|
|
||||||
stdout = stdout.replace(b('\n'), b('\r\n'))
|
|
||||||
return proc.returncode, stdout, stderr or b('')
|
|
||||||
|
|
||||||
|
|
||||||
def exec_command(cmd, in_data='', chdir=None, shell=None, emulate_tty=False):
|
|
||||||
"""
|
|
||||||
Run a command in a subprocess, emulating the argument handling behaviour of
|
|
||||||
SSH.
|
|
||||||
|
|
||||||
:param bytes cmd:
|
|
||||||
String command line, passed to user's shell.
|
|
||||||
:param bytes in_data:
|
|
||||||
Optional standard input for the command.
|
|
||||||
:return:
|
|
||||||
(return code, stdout bytes, stderr bytes)
|
|
||||||
"""
|
|
||||||
assert isinstance(cmd, mitogen.core.UnicodeType)
|
|
||||||
return exec_args(
|
|
||||||
args=[get_user_shell(), '-c', cmd],
|
|
||||||
in_data=in_data,
|
|
||||||
chdir=chdir,
|
|
||||||
shell=shell,
|
|
||||||
emulate_tty=emulate_tty,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def read_path(path):
|
|
||||||
"""
|
|
||||||
Fetch the contents of a filesystem `path` as bytes.
|
|
||||||
"""
|
|
||||||
return open(path, 'rb').read()
|
|
||||||
|
|
||||||
|
|
||||||
def set_file_owner(path, owner, group=None, fd=None):
|
|
||||||
if owner:
|
|
||||||
uid = pwd.getpwnam(owner).pw_uid
|
|
||||||
else:
|
|
||||||
uid = os.geteuid()
|
|
||||||
|
|
||||||
if group:
|
|
||||||
gid = grp.getgrnam(group).gr_gid
|
|
||||||
else:
|
|
||||||
gid = os.getegid()
|
|
||||||
|
|
||||||
if fd is not None and hasattr(os, 'fchown'):
|
|
||||||
os.fchown(fd, (uid, gid))
|
|
||||||
else:
|
|
||||||
# Python<2.6
|
|
||||||
os.chown(path, (uid, gid))
|
|
||||||
|
|
||||||
|
|
||||||
def write_path(path, s, owner=None, group=None, mode=None,
|
|
||||||
utimes=None, sync=False):
|
|
||||||
"""
|
|
||||||
Writes bytes `s` to a filesystem `path`.
|
|
||||||
"""
|
|
||||||
path = os.path.abspath(path)
|
|
||||||
fd, tmp_path = tempfile.mkstemp(suffix='.tmp',
|
|
||||||
prefix='.ansible_mitogen_transfer-',
|
|
||||||
dir=os.path.dirname(path))
|
|
||||||
fp = os.fdopen(fd, 'wb', mitogen.core.CHUNK_SIZE)
|
|
||||||
LOG.debug('write_path(path=%r) temporary file: %s', path, tmp_path)
|
|
||||||
|
|
||||||
try:
|
|
||||||
try:
|
|
||||||
if mode:
|
|
||||||
set_file_mode(tmp_path, mode, fd=fp.fileno())
|
|
||||||
if owner or group:
|
|
||||||
set_file_owner(tmp_path, owner, group, fd=fp.fileno())
|
|
||||||
fp.write(s)
|
|
||||||
finally:
|
|
||||||
fp.close()
|
|
||||||
|
|
||||||
if sync:
|
|
||||||
os.fsync(fp.fileno())
|
|
||||||
os.rename(tmp_path, path)
|
|
||||||
except BaseException:
|
|
||||||
os.unlink(tmp_path)
|
|
||||||
raise
|
|
||||||
|
|
||||||
if utimes:
|
|
||||||
os.utime(path, utimes)
|
|
||||||
|
|
||||||
|
|
||||||
CHMOD_CLAUSE_PAT = re.compile(r'([uoga]*)([+\-=])([ugo]|[rwx]*)')
|
|
||||||
CHMOD_MASKS = {
|
|
||||||
'u': stat.S_IRWXU,
|
|
||||||
'g': stat.S_IRWXG,
|
|
||||||
'o': stat.S_IRWXO,
|
|
||||||
'a': (stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO),
|
|
||||||
}
|
|
||||||
CHMOD_BITS = {
|
|
||||||
'u': {'r': stat.S_IRUSR, 'w': stat.S_IWUSR, 'x': stat.S_IXUSR},
|
|
||||||
'g': {'r': stat.S_IRGRP, 'w': stat.S_IWGRP, 'x': stat.S_IXGRP},
|
|
||||||
'o': {'r': stat.S_IROTH, 'w': stat.S_IWOTH, 'x': stat.S_IXOTH},
|
|
||||||
'a': {
|
|
||||||
'r': (stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH),
|
|
||||||
'w': (stat.S_IWUSR | stat.S_IWGRP | stat.S_IWOTH),
|
|
||||||
'x': (stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def apply_mode_spec(spec, mode):
|
|
||||||
"""
|
|
||||||
Given a symbolic file mode change specification in the style of chmod(1)
|
|
||||||
`spec`, apply changes in the specification to the numeric file mode `mode`.
|
|
||||||
"""
|
|
||||||
for clause in mitogen.core.to_text(spec).split(','):
|
|
||||||
match = CHMOD_CLAUSE_PAT.match(clause)
|
|
||||||
who, op, perms = match.groups()
|
|
||||||
for ch in who or 'a':
|
|
||||||
mask = CHMOD_MASKS[ch]
|
|
||||||
bits = CHMOD_BITS[ch]
|
|
||||||
cur_perm_bits = mode & mask
|
|
||||||
new_perm_bits = reduce(operator.or_, (bits[p] for p in perms), 0)
|
|
||||||
mode &= ~mask
|
|
||||||
if op == '=':
|
|
||||||
mode |= new_perm_bits
|
|
||||||
elif op == '+':
|
|
||||||
mode |= new_perm_bits | cur_perm_bits
|
|
||||||
else:
|
|
||||||
mode |= cur_perm_bits & ~new_perm_bits
|
|
||||||
return mode
|
|
||||||
|
|
||||||
|
|
||||||
def set_file_mode(path, spec, fd=None):
|
|
||||||
"""
|
|
||||||
Update the permissions of a file using the same syntax as chmod(1).
|
|
||||||
"""
|
|
||||||
if isinstance(spec, int):
|
|
||||||
new_mode = spec
|
|
||||||
elif not mitogen.core.PY3 and isinstance(spec, long):
|
|
||||||
new_mode = spec
|
|
||||||
elif spec.isdigit():
|
|
||||||
new_mode = int(spec, 8)
|
|
||||||
else:
|
|
||||||
mode = os.stat(path).st_mode
|
|
||||||
new_mode = apply_mode_spec(spec, mode)
|
|
||||||
|
|
||||||
if fd is not None and hasattr(os, 'fchmod'):
|
|
||||||
os.fchmod(fd, new_mode)
|
|
||||||
else:
|
|
||||||
os.chmod(path, new_mode)
|
|
||||||
|
|
||||||
|
|
||||||
def file_exists(path):
|
|
||||||
"""
|
|
||||||
Return :data:`True` if `path` exists. This is a wrapper function over
|
|
||||||
:func:`os.path.exists`, since its implementation module varies across
|
|
||||||
Python versions.
|
|
||||||
"""
|
|
||||||
return os.path.exists(path)
|
|
|
@ -1,636 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
from __future__ import absolute_import
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
"""
|
|
||||||
Mitogen extends Ansible's target configuration mechanism in several ways that
|
|
||||||
require some care:
|
|
||||||
|
|
||||||
* Per-task configurables in Ansible like ansible_python_interpreter are
|
|
||||||
connection-layer configurables in Mitogen. They must be extracted during each
|
|
||||||
task execution to form the complete connection-layer configuration.
|
|
||||||
|
|
||||||
* Mitogen has extra configurables not supported by Ansible at all, such as
|
|
||||||
mitogen_ssh_debug_level. These are extracted the same way as
|
|
||||||
ansible_python_interpreter.
|
|
||||||
|
|
||||||
* Mitogen allows connections to be delegated to other machines. Ansible has no
|
|
||||||
internal framework for this, and so Mitogen must figure out a delegated
|
|
||||||
connection configuration all on its own. It cannot reuse much of the Ansible
|
|
||||||
machinery for building a connection configuration, as that machinery is
|
|
||||||
deeply spread out and hard-wired to expect Ansible's usual mode of operation.
|
|
||||||
|
|
||||||
For normal and delegate_to connections, Ansible's PlayContext is reused where
|
|
||||||
possible to maximize compatibility, but for proxy hops, configurations are
|
|
||||||
built up using the HostVars magic class to call VariableManager.get_vars()
|
|
||||||
behind the scenes on our behalf. Where Ansible has multiple sources of a
|
|
||||||
configuration item, for example, ansible_ssh_extra_args, Mitogen must (ideally
|
|
||||||
perfectly) reproduce how Ansible arrives at its value, without using mechanisms
|
|
||||||
that are hard-wired or change across Ansible versions.
|
|
||||||
|
|
||||||
That is what this file is for. It exports two spec classes, one that takes all
|
|
||||||
information from PlayContext, and another that takes (almost) all information
|
|
||||||
from HostVars.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import abc
|
|
||||||
import os
|
|
||||||
import ansible.utils.shlex
|
|
||||||
import ansible.constants as C
|
|
||||||
|
|
||||||
from ansible.module_utils.six import with_metaclass
|
|
||||||
|
|
||||||
|
|
||||||
import mitogen.core
|
|
||||||
|
|
||||||
|
|
||||||
def parse_python_path(s):
|
|
||||||
"""
|
|
||||||
Given the string set for ansible_python_interpeter, parse it using shell
|
|
||||||
syntax and return an appropriate argument vector.
|
|
||||||
"""
|
|
||||||
if s:
|
|
||||||
return ansible.utils.shlex.shlex_split(s)
|
|
||||||
|
|
||||||
|
|
||||||
def optional_secret(value):
|
|
||||||
"""
|
|
||||||
Wrap `value` in :class:`mitogen.core.Secret` if it is not :data:`None`,
|
|
||||||
otherwise return :data:`None`.
|
|
||||||
"""
|
|
||||||
if value is not None:
|
|
||||||
return mitogen.core.Secret(value)
|
|
||||||
|
|
||||||
|
|
||||||
def first_true(it, default=None):
|
|
||||||
"""
|
|
||||||
Return the first truthy element from `it`.
|
|
||||||
"""
|
|
||||||
for elem in it:
|
|
||||||
if elem:
|
|
||||||
return elem
|
|
||||||
return default
|
|
||||||
|
|
||||||
|
|
||||||
class Spec(with_metaclass(abc.ABCMeta, object)):
|
|
||||||
"""
|
|
||||||
A source for variables that comprise a connection configuration.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def transport(self):
|
|
||||||
"""
|
|
||||||
The name of the Ansible plug-in implementing the connection.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def inventory_name(self):
|
|
||||||
"""
|
|
||||||
The name of the target being connected to as it appears in Ansible's
|
|
||||||
inventory.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def remote_addr(self):
|
|
||||||
"""
|
|
||||||
The network address of the target, or for container and other special
|
|
||||||
targets, some other unique identifier.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def remote_user(self):
|
|
||||||
"""
|
|
||||||
The username of the login account on the target.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def password(self):
|
|
||||||
"""
|
|
||||||
The password of the login account on the target.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def become(self):
|
|
||||||
"""
|
|
||||||
:data:`True` if privilege escalation should be active.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def become_method(self):
|
|
||||||
"""
|
|
||||||
The name of the Ansible become method to use.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def become_user(self):
|
|
||||||
"""
|
|
||||||
The username of the target account for become.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def become_pass(self):
|
|
||||||
"""
|
|
||||||
The password of the target account for become.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def port(self):
|
|
||||||
"""
|
|
||||||
The port of the login service on the target machine.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def python_path(self):
|
|
||||||
"""
|
|
||||||
Path to the Python interpreter on the target machine.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def private_key_file(self):
|
|
||||||
"""
|
|
||||||
Path to the SSH private key file to use to login.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def ssh_executable(self):
|
|
||||||
"""
|
|
||||||
Path to the SSH executable.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def timeout(self):
|
|
||||||
"""
|
|
||||||
The generic timeout for all connections.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def ansible_ssh_timeout(self):
|
|
||||||
"""
|
|
||||||
The SSH-specific timeout for a connection.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def ssh_args(self):
|
|
||||||
"""
|
|
||||||
The list of additional arguments that should be included in an SSH
|
|
||||||
invocation.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def become_exe(self):
|
|
||||||
"""
|
|
||||||
The path to the executable implementing the become method on the remote
|
|
||||||
machine.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def sudo_args(self):
|
|
||||||
"""
|
|
||||||
The list of additional arguments that should be included in a become
|
|
||||||
invocation.
|
|
||||||
"""
|
|
||||||
# TODO: split out into sudo_args/become_args.
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def mitogen_via(self):
|
|
||||||
"""
|
|
||||||
The value of the mitogen_via= variable for this connection. Indicates
|
|
||||||
the connection should be established via an intermediary.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def mitogen_kind(self):
|
|
||||||
"""
|
|
||||||
The type of container to use with the "setns" transport.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def mitogen_mask_remote_name(self):
|
|
||||||
"""
|
|
||||||
Specifies whether to set a fixed "remote_name" field. The remote_name
|
|
||||||
is the suffix of `argv[0]` for remote interpreters. By default it
|
|
||||||
includes identifying information from the local process, which may be
|
|
||||||
undesirable in some circumstances.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def mitogen_docker_path(self):
|
|
||||||
"""
|
|
||||||
The path to the "docker" program for the 'docker' transport.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def mitogen_kubectl_path(self):
|
|
||||||
"""
|
|
||||||
The path to the "kubectl" program for the 'docker' transport.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def mitogen_lxc_path(self):
|
|
||||||
"""
|
|
||||||
The path to the "lxc" program for the 'lxd' transport.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def mitogen_lxc_attach_path(self):
|
|
||||||
"""
|
|
||||||
The path to the "lxc-attach" program for the 'lxc' transport.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def mitogen_lxc_info_path(self):
|
|
||||||
"""
|
|
||||||
The path to the "lxc-info" program for the 'lxc' transport.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def mitogen_machinectl_path(self):
|
|
||||||
"""
|
|
||||||
The path to the "machinectl" program for the 'setns' transport.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def mitogen_ssh_debug_level(self):
|
|
||||||
"""
|
|
||||||
The SSH debug level.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def mitogen_ssh_compression(self):
|
|
||||||
"""
|
|
||||||
Whether SSH compression is enabled.
|
|
||||||
"""
|
|
||||||
|
|
||||||
@abc.abstractmethod
|
|
||||||
def extra_args(self):
|
|
||||||
"""
|
|
||||||
Connection-specific arguments.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
class PlayContextSpec(Spec):
|
|
||||||
"""
|
|
||||||
PlayContextSpec takes almost all its information as-is from Ansible's
|
|
||||||
PlayContext. It is used for normal connections and delegate_to connections,
|
|
||||||
and should always be accurate.
|
|
||||||
"""
|
|
||||||
def __init__(self, connection, play_context, transport, inventory_name):
|
|
||||||
self._connection = connection
|
|
||||||
self._play_context = play_context
|
|
||||||
self._transport = transport
|
|
||||||
self._inventory_name = inventory_name
|
|
||||||
|
|
||||||
def transport(self):
|
|
||||||
return self._transport
|
|
||||||
|
|
||||||
def inventory_name(self):
|
|
||||||
return self._inventory_name
|
|
||||||
|
|
||||||
def remote_addr(self):
|
|
||||||
return self._play_context.remote_addr
|
|
||||||
|
|
||||||
def remote_user(self):
|
|
||||||
return self._play_context.remote_user
|
|
||||||
|
|
||||||
def become(self):
|
|
||||||
return self._play_context.become
|
|
||||||
|
|
||||||
def become_method(self):
|
|
||||||
return self._play_context.become_method
|
|
||||||
|
|
||||||
def become_user(self):
|
|
||||||
return self._play_context.become_user
|
|
||||||
|
|
||||||
def become_pass(self):
|
|
||||||
return optional_secret(self._play_context.become_pass)
|
|
||||||
|
|
||||||
def password(self):
|
|
||||||
return optional_secret(self._play_context.password)
|
|
||||||
|
|
||||||
def port(self):
|
|
||||||
return self._play_context.port
|
|
||||||
|
|
||||||
def python_path(self):
|
|
||||||
s = self._connection.get_task_var('ansible_python_interpreter')
|
|
||||||
# #511, #536: executor/module_common.py::_get_shebang() hard-wires
|
|
||||||
# "/usr/bin/python" as the default interpreter path if no other
|
|
||||||
# interpreter is specified.
|
|
||||||
return parse_python_path(s or '/usr/bin/python')
|
|
||||||
|
|
||||||
def private_key_file(self):
|
|
||||||
return self._play_context.private_key_file
|
|
||||||
|
|
||||||
def ssh_executable(self):
|
|
||||||
return self._play_context.ssh_executable
|
|
||||||
|
|
||||||
def timeout(self):
|
|
||||||
return self._play_context.timeout
|
|
||||||
|
|
||||||
def ansible_ssh_timeout(self):
|
|
||||||
return (
|
|
||||||
self._connection.get_task_var('ansible_timeout') or
|
|
||||||
self._connection.get_task_var('ansible_ssh_timeout') or
|
|
||||||
self.timeout()
|
|
||||||
)
|
|
||||||
|
|
||||||
def ssh_args(self):
|
|
||||||
return [
|
|
||||||
mitogen.core.to_text(term)
|
|
||||||
for s in (
|
|
||||||
getattr(self._play_context, 'ssh_args', ''),
|
|
||||||
getattr(self._play_context, 'ssh_common_args', ''),
|
|
||||||
getattr(self._play_context, 'ssh_extra_args', '')
|
|
||||||
)
|
|
||||||
for term in ansible.utils.shlex.shlex_split(s or '')
|
|
||||||
]
|
|
||||||
|
|
||||||
def become_exe(self):
|
|
||||||
return self._play_context.become_exe
|
|
||||||
|
|
||||||
def sudo_args(self):
|
|
||||||
return [
|
|
||||||
mitogen.core.to_text(term)
|
|
||||||
for term in ansible.utils.shlex.shlex_split(
|
|
||||||
first_true((
|
|
||||||
self._play_context.become_flags,
|
|
||||||
self._play_context.sudo_flags,
|
|
||||||
# Ansible 2.3.
|
|
||||||
getattr(C, 'DEFAULT_BECOME_FLAGS', ''),
|
|
||||||
getattr(C, 'DEFAULT_SUDO_FLAGS', '')
|
|
||||||
), default='')
|
|
||||||
)
|
|
||||||
]
|
|
||||||
|
|
||||||
def mitogen_via(self):
|
|
||||||
return self._connection.get_task_var('mitogen_via')
|
|
||||||
|
|
||||||
def mitogen_kind(self):
|
|
||||||
return self._connection.get_task_var('mitogen_kind')
|
|
||||||
|
|
||||||
def mitogen_mask_remote_name(self):
|
|
||||||
return self._connection.get_task_var('mitogen_mask_remote_name')
|
|
||||||
|
|
||||||
def mitogen_docker_path(self):
|
|
||||||
return self._connection.get_task_var('mitogen_docker_path')
|
|
||||||
|
|
||||||
def mitogen_kubectl_path(self):
|
|
||||||
return self._connection.get_task_var('mitogen_kubectl_path')
|
|
||||||
|
|
||||||
def mitogen_lxc_path(self):
|
|
||||||
return self._connection.get_task_var('mitogen_lxc_path')
|
|
||||||
|
|
||||||
def mitogen_lxc_attach_path(self):
|
|
||||||
return self._connection.get_task_var('mitogen_lxc_attach_path')
|
|
||||||
|
|
||||||
def mitogen_lxc_info_path(self):
|
|
||||||
return self._connection.get_task_var('mitogen_lxc_info_path')
|
|
||||||
|
|
||||||
def mitogen_machinectl_path(self):
|
|
||||||
return self._connection.get_task_var('mitogen_machinectl_path')
|
|
||||||
|
|
||||||
def mitogen_ssh_debug_level(self):
|
|
||||||
return self._connection.get_task_var('mitogen_ssh_debug_level')
|
|
||||||
|
|
||||||
def mitogen_ssh_compression(self):
|
|
||||||
return self._connection.get_task_var('mitogen_ssh_compression')
|
|
||||||
|
|
||||||
def extra_args(self):
|
|
||||||
return self._connection.get_extra_args()
|
|
||||||
|
|
||||||
|
|
||||||
class MitogenViaSpec(Spec):
|
|
||||||
"""
|
|
||||||
MitogenViaSpec takes most of its information from the HostVars of the
|
|
||||||
running task. HostVars is a lightweight wrapper around VariableManager, so
|
|
||||||
it is better to say that VariableManager.get_vars() is the ultimate source
|
|
||||||
of MitogenViaSpec's information.
|
|
||||||
|
|
||||||
Due to this, mitogen_via= hosts must have all their configuration
|
|
||||||
information represented as host and group variables. We cannot use any
|
|
||||||
per-task configuration, as all that data belongs to the real target host.
|
|
||||||
|
|
||||||
Ansible uses all kinds of strange historical logic for calculating
|
|
||||||
variables, including making their precedence configurable. MitogenViaSpec
|
|
||||||
must ultimately reimplement all of that logic. It is likely that if you are
|
|
||||||
having a configruation problem with connection delegation, the answer to
|
|
||||||
your problem lies in the method implementations below!
|
|
||||||
"""
|
|
||||||
def __init__(self, inventory_name, host_vars, become_method, become_user,
|
|
||||||
play_context):
|
|
||||||
"""
|
|
||||||
:param str inventory_name:
|
|
||||||
The inventory name of the intermediary machine, i.e. not the target
|
|
||||||
machine.
|
|
||||||
:param dict host_vars:
|
|
||||||
The HostVars magic dictionary provided by Ansible in task_vars.
|
|
||||||
:param str become_method:
|
|
||||||
If the mitogen_via= spec included a become method, the method it
|
|
||||||
specifies.
|
|
||||||
:param str become_user:
|
|
||||||
If the mitogen_via= spec included a become user, the user it
|
|
||||||
specifies.
|
|
||||||
:param PlayContext play_context:
|
|
||||||
For some global values **only**, the PlayContext used to describe
|
|
||||||
the real target machine. Values from this object are **strictly
|
|
||||||
restricted** to values that are Ansible-global, e.g. the passwords
|
|
||||||
specified interactively.
|
|
||||||
"""
|
|
||||||
self._inventory_name = inventory_name
|
|
||||||
self._host_vars = host_vars
|
|
||||||
self._become_method = become_method
|
|
||||||
self._become_user = become_user
|
|
||||||
# Dangerous! You may find a variable you want in this object, but it's
|
|
||||||
# almost certainly for the wrong machine!
|
|
||||||
self._dangerous_play_context = play_context
|
|
||||||
|
|
||||||
def transport(self):
|
|
||||||
return (
|
|
||||||
self._host_vars.get('ansible_connection') or
|
|
||||||
C.DEFAULT_TRANSPORT
|
|
||||||
)
|
|
||||||
|
|
||||||
def inventory_name(self):
|
|
||||||
return self._inventory_name
|
|
||||||
|
|
||||||
def remote_addr(self):
|
|
||||||
# play_context.py::MAGIC_VARIABLE_MAPPING
|
|
||||||
return (
|
|
||||||
self._host_vars.get('ansible_ssh_host') or
|
|
||||||
self._host_vars.get('ansible_host') or
|
|
||||||
self._inventory_name
|
|
||||||
)
|
|
||||||
|
|
||||||
def remote_user(self):
|
|
||||||
return (
|
|
||||||
self._host_vars.get('ansible_ssh_user') or
|
|
||||||
self._host_vars.get('ansible_user') or
|
|
||||||
C.DEFAULT_REMOTE_USER
|
|
||||||
)
|
|
||||||
|
|
||||||
def become(self):
|
|
||||||
return bool(self._become_user)
|
|
||||||
|
|
||||||
def become_method(self):
|
|
||||||
return (
|
|
||||||
self._become_method or
|
|
||||||
self._host_vars.get('ansible_become_method') or
|
|
||||||
C.DEFAULT_BECOME_METHOD
|
|
||||||
)
|
|
||||||
|
|
||||||
def become_user(self):
|
|
||||||
return self._become_user
|
|
||||||
|
|
||||||
def become_pass(self):
|
|
||||||
return optional_secret(
|
|
||||||
self._host_vars.get('ansible_become_password') or
|
|
||||||
self._host_vars.get('ansible_become_pass')
|
|
||||||
)
|
|
||||||
|
|
||||||
def password(self):
|
|
||||||
return optional_secret(
|
|
||||||
self._host_vars.get('ansible_ssh_pass') or
|
|
||||||
self._host_vars.get('ansible_password')
|
|
||||||
)
|
|
||||||
|
|
||||||
def port(self):
|
|
||||||
return (
|
|
||||||
self._host_vars.get('ansible_ssh_port') or
|
|
||||||
self._host_vars.get('ansible_port') or
|
|
||||||
C.DEFAULT_REMOTE_PORT
|
|
||||||
)
|
|
||||||
|
|
||||||
def python_path(self):
|
|
||||||
s = self._host_vars.get('ansible_python_interpreter')
|
|
||||||
# #511, #536: executor/module_common.py::_get_shebang() hard-wires
|
|
||||||
# "/usr/bin/python" as the default interpreter path if no other
|
|
||||||
# interpreter is specified.
|
|
||||||
return parse_python_path(s or '/usr/bin/python')
|
|
||||||
|
|
||||||
def private_key_file(self):
|
|
||||||
# TODO: must come from PlayContext too.
|
|
||||||
return (
|
|
||||||
self._host_vars.get('ansible_ssh_private_key_file') or
|
|
||||||
self._host_vars.get('ansible_private_key_file') or
|
|
||||||
C.DEFAULT_PRIVATE_KEY_FILE
|
|
||||||
)
|
|
||||||
|
|
||||||
def ssh_executable(self):
|
|
||||||
return (
|
|
||||||
self._host_vars.get('ansible_ssh_executable') or
|
|
||||||
C.ANSIBLE_SSH_EXECUTABLE
|
|
||||||
)
|
|
||||||
|
|
||||||
def timeout(self):
|
|
||||||
# TODO: must come from PlayContext too.
|
|
||||||
return C.DEFAULT_TIMEOUT
|
|
||||||
|
|
||||||
def ansible_ssh_timeout(self):
|
|
||||||
return (
|
|
||||||
self._host_vars.get('ansible_timeout') or
|
|
||||||
self._host_vars.get('ansible_ssh_timeout') or
|
|
||||||
self.timeout()
|
|
||||||
)
|
|
||||||
|
|
||||||
def ssh_args(self):
|
|
||||||
return [
|
|
||||||
mitogen.core.to_text(term)
|
|
||||||
for s in (
|
|
||||||
(
|
|
||||||
self._host_vars.get('ansible_ssh_args') or
|
|
||||||
getattr(C, 'ANSIBLE_SSH_ARGS', None) or
|
|
||||||
os.environ.get('ANSIBLE_SSH_ARGS')
|
|
||||||
# TODO: ini entry. older versions.
|
|
||||||
),
|
|
||||||
(
|
|
||||||
self._host_vars.get('ansible_ssh_common_args') or
|
|
||||||
os.environ.get('ANSIBLE_SSH_COMMON_ARGS')
|
|
||||||
# TODO: ini entry.
|
|
||||||
),
|
|
||||||
(
|
|
||||||
self._host_vars.get('ansible_ssh_extra_args') or
|
|
||||||
os.environ.get('ANSIBLE_SSH_EXTRA_ARGS')
|
|
||||||
# TODO: ini entry.
|
|
||||||
),
|
|
||||||
)
|
|
||||||
for term in ansible.utils.shlex.shlex_split(s)
|
|
||||||
if s
|
|
||||||
]
|
|
||||||
|
|
||||||
def become_exe(self):
|
|
||||||
return (
|
|
||||||
self._host_vars.get('ansible_become_exe') or
|
|
||||||
C.DEFAULT_BECOME_EXE
|
|
||||||
)
|
|
||||||
|
|
||||||
def sudo_args(self):
|
|
||||||
return [
|
|
||||||
mitogen.core.to_text(term)
|
|
||||||
for s in (
|
|
||||||
self._host_vars.get('ansible_sudo_flags') or '',
|
|
||||||
self._host_vars.get('ansible_become_flags') or '',
|
|
||||||
)
|
|
||||||
for term in ansible.utils.shlex.shlex_split(s)
|
|
||||||
]
|
|
||||||
|
|
||||||
def mitogen_via(self):
|
|
||||||
return self._host_vars.get('mitogen_via')
|
|
||||||
|
|
||||||
def mitogen_kind(self):
|
|
||||||
return self._host_vars.get('mitogen_kind')
|
|
||||||
|
|
||||||
def mitogen_mask_remote_name(self):
|
|
||||||
return self._host_vars.get('mitogen_mask_remote_name')
|
|
||||||
|
|
||||||
def mitogen_docker_path(self):
|
|
||||||
return self._host_vars.get('mitogen_docker_path')
|
|
||||||
|
|
||||||
def mitogen_kubectl_path(self):
|
|
||||||
return self._host_vars.get('mitogen_kubectl_path')
|
|
||||||
|
|
||||||
def mitogen_lxc_path(self):
|
|
||||||
return self.host_vars.get('mitogen_lxc_path')
|
|
||||||
|
|
||||||
def mitogen_lxc_attach_path(self):
|
|
||||||
return self._host_vars.get('mitogen_lxc_attach_path')
|
|
||||||
|
|
||||||
def mitogen_lxc_info_path(self):
|
|
||||||
return self._host_vars.get('mitogen_lxc_info_path')
|
|
||||||
|
|
||||||
def mitogen_machinectl_path(self):
|
|
||||||
return self._host_vars.get('mitogen_machinectl_path')
|
|
||||||
|
|
||||||
def mitogen_ssh_debug_level(self):
|
|
||||||
return self._host_vars.get('mitogen_ssh_debug_level')
|
|
||||||
|
|
||||||
def mitogen_ssh_compression(self):
|
|
||||||
return self._host_vars.get('mitogen_ssh_compression')
|
|
||||||
|
|
||||||
def extra_args(self):
|
|
||||||
return [] # TODO
|
|
|
@ -1,23 +0,0 @@
|
||||||
Metadata-Version: 1.1
|
|
||||||
Name: mitogen
|
|
||||||
Version: 0.2.7
|
|
||||||
Summary: Library for writing distributed self-replicating programs.
|
|
||||||
Home-page: https://github.com/dw/mitogen/
|
|
||||||
Author: David Wilson
|
|
||||||
Author-email: UNKNOWN
|
|
||||||
License: New BSD
|
|
||||||
Description: UNKNOWN
|
|
||||||
Platform: UNKNOWN
|
|
||||||
Classifier: Environment :: Console
|
|
||||||
Classifier: Intended Audience :: System Administrators
|
|
||||||
Classifier: License :: OSI Approved :: BSD License
|
|
||||||
Classifier: Operating System :: POSIX
|
|
||||||
Classifier: Programming Language :: Python
|
|
||||||
Classifier: Programming Language :: Python :: 2.4
|
|
||||||
Classifier: Programming Language :: Python :: 2.5
|
|
||||||
Classifier: Programming Language :: Python :: 2.6
|
|
||||||
Classifier: Programming Language :: Python :: 2.7
|
|
||||||
Classifier: Programming Language :: Python :: 3.6
|
|
||||||
Classifier: Programming Language :: Python :: Implementation :: CPython
|
|
||||||
Classifier: Topic :: System :: Distributed Computing
|
|
||||||
Classifier: Topic :: System :: Systems Administration
|
|
|
@ -1,78 +0,0 @@
|
||||||
LICENSE
|
|
||||||
MANIFEST.in
|
|
||||||
README.md
|
|
||||||
setup.cfg
|
|
||||||
setup.py
|
|
||||||
ansible_mitogen/__init__.py
|
|
||||||
ansible_mitogen/affinity.py
|
|
||||||
ansible_mitogen/connection.py
|
|
||||||
ansible_mitogen/loaders.py
|
|
||||||
ansible_mitogen/logging.py
|
|
||||||
ansible_mitogen/mixins.py
|
|
||||||
ansible_mitogen/module_finder.py
|
|
||||||
ansible_mitogen/parsing.py
|
|
||||||
ansible_mitogen/planner.py
|
|
||||||
ansible_mitogen/process.py
|
|
||||||
ansible_mitogen/runner.py
|
|
||||||
ansible_mitogen/services.py
|
|
||||||
ansible_mitogen/strategy.py
|
|
||||||
ansible_mitogen/target.py
|
|
||||||
ansible_mitogen/transport_config.py
|
|
||||||
ansible_mitogen/compat/__init__.py
|
|
||||||
ansible_mitogen/compat/simplejson/__init__.py
|
|
||||||
ansible_mitogen/compat/simplejson/decoder.py
|
|
||||||
ansible_mitogen/compat/simplejson/encoder.py
|
|
||||||
ansible_mitogen/compat/simplejson/scanner.py
|
|
||||||
ansible_mitogen/plugins/__init__.py
|
|
||||||
ansible_mitogen/plugins/action/__init__.py
|
|
||||||
ansible_mitogen/plugins/action/mitogen_get_stack.py
|
|
||||||
ansible_mitogen/plugins/connection/__init__.py
|
|
||||||
ansible_mitogen/plugins/connection/mitogen_doas.py
|
|
||||||
ansible_mitogen/plugins/connection/mitogen_docker.py
|
|
||||||
ansible_mitogen/plugins/connection/mitogen_jail.py
|
|
||||||
ansible_mitogen/plugins/connection/mitogen_kubectl.py
|
|
||||||
ansible_mitogen/plugins/connection/mitogen_local.py
|
|
||||||
ansible_mitogen/plugins/connection/mitogen_lxc.py
|
|
||||||
ansible_mitogen/plugins/connection/mitogen_lxd.py
|
|
||||||
ansible_mitogen/plugins/connection/mitogen_machinectl.py
|
|
||||||
ansible_mitogen/plugins/connection/mitogen_setns.py
|
|
||||||
ansible_mitogen/plugins/connection/mitogen_ssh.py
|
|
||||||
ansible_mitogen/plugins/connection/mitogen_su.py
|
|
||||||
ansible_mitogen/plugins/connection/mitogen_sudo.py
|
|
||||||
ansible_mitogen/plugins/strategy/__init__.py
|
|
||||||
ansible_mitogen/plugins/strategy/mitogen.py
|
|
||||||
ansible_mitogen/plugins/strategy/mitogen_free.py
|
|
||||||
ansible_mitogen/plugins/strategy/mitogen_host_pinned.py
|
|
||||||
ansible_mitogen/plugins/strategy/mitogen_linear.py
|
|
||||||
mitogen/__init__.py
|
|
||||||
mitogen/core.py
|
|
||||||
mitogen/debug.py
|
|
||||||
mitogen/doas.py
|
|
||||||
mitogen/docker.py
|
|
||||||
mitogen/fakessh.py
|
|
||||||
mitogen/fork.py
|
|
||||||
mitogen/jail.py
|
|
||||||
mitogen/kubectl.py
|
|
||||||
mitogen/lxc.py
|
|
||||||
mitogen/lxd.py
|
|
||||||
mitogen/master.py
|
|
||||||
mitogen/minify.py
|
|
||||||
mitogen/os_fork.py
|
|
||||||
mitogen/parent.py
|
|
||||||
mitogen/profiler.py
|
|
||||||
mitogen/select.py
|
|
||||||
mitogen/service.py
|
|
||||||
mitogen/setns.py
|
|
||||||
mitogen/ssh.py
|
|
||||||
mitogen/su.py
|
|
||||||
mitogen/sudo.py
|
|
||||||
mitogen/unix.py
|
|
||||||
mitogen/utils.py
|
|
||||||
mitogen.egg-info/PKG-INFO
|
|
||||||
mitogen.egg-info/SOURCES.txt
|
|
||||||
mitogen.egg-info/dependency_links.txt
|
|
||||||
mitogen.egg-info/not-zip-safe
|
|
||||||
mitogen.egg-info/top_level.txt
|
|
||||||
mitogen/compat/__init__.py
|
|
||||||
mitogen/compat/pkgutil.py
|
|
||||||
mitogen/compat/tokenize.py
|
|
|
@ -1 +0,0 @@
|
||||||
|
|
|
@ -1 +0,0 @@
|
||||||
|
|
|
@ -1,2 +0,0 @@
|
||||||
ansible_mitogen
|
|
||||||
mitogen
|
|
|
@ -1,120 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
# !mitogen: minify_safe
|
|
||||||
|
|
||||||
"""
|
|
||||||
On the Mitogen master, this is imported from ``mitogen/__init__.py`` as would
|
|
||||||
be expected. On the slave, it is built dynamically during startup.
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
#: Library version as a tuple.
|
|
||||||
__version__ = (0, 2, 7)
|
|
||||||
|
|
||||||
|
|
||||||
#: This is :data:`False` in slave contexts. Previously it was used to prevent
|
|
||||||
#: re-execution of :mod:`__main__` in single file programs, however that now
|
|
||||||
#: happens automatically.
|
|
||||||
is_master = True
|
|
||||||
|
|
||||||
|
|
||||||
#: This is `0` in a master, otherwise it is the master-assigned ID unique to
|
|
||||||
#: the slave context used for message routing.
|
|
||||||
context_id = 0
|
|
||||||
|
|
||||||
|
|
||||||
#: This is :data:`None` in a master, otherwise it is the master-assigned ID
|
|
||||||
#: unique to the slave's parent context.
|
|
||||||
parent_id = None
|
|
||||||
|
|
||||||
|
|
||||||
#: This is an empty list in a master, otherwise it is a list of parent context
|
|
||||||
#: IDs ordered from most direct to least direct.
|
|
||||||
parent_ids = []
|
|
||||||
|
|
||||||
|
|
||||||
import os
|
|
||||||
_default_profiling = os.environ.get('MITOGEN_PROFILING') is not None
|
|
||||||
del os
|
|
||||||
|
|
||||||
|
|
||||||
def main(log_level='INFO', profiling=_default_profiling):
|
|
||||||
"""
|
|
||||||
Convenience decorator primarily useful for writing discardable test
|
|
||||||
scripts.
|
|
||||||
|
|
||||||
In the master process, when `func` is defined in the :mod:`__main__`
|
|
||||||
module, arranges for `func(router)` to be invoked immediately, with
|
|
||||||
:py:class:`mitogen.master.Router` construction and destruction handled just
|
|
||||||
as in :py:func:`mitogen.utils.run_with_router`. In slaves, this function
|
|
||||||
does nothing.
|
|
||||||
|
|
||||||
:param str log_level:
|
|
||||||
Logging package level to configure via
|
|
||||||
:py:func:`mitogen.utils.log_to_file`.
|
|
||||||
|
|
||||||
:param bool profiling:
|
|
||||||
If :py:data:`True`, equivalent to setting
|
|
||||||
:py:attr:`mitogen.master.Router.profiling` prior to router
|
|
||||||
construction. This causes ``/tmp`` files to be created everywhere at
|
|
||||||
the end of a successful run with :py:mod:`cProfile` output for every
|
|
||||||
thread.
|
|
||||||
|
|
||||||
Example:
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
import mitogen
|
|
||||||
import requests
|
|
||||||
|
|
||||||
def get_url(url):
|
|
||||||
return requests.get(url).text
|
|
||||||
|
|
||||||
@mitogen.main()
|
|
||||||
def main(router):
|
|
||||||
z = router.ssh(hostname='k3')
|
|
||||||
print(z.call(get_url, 'https://example.org/')))))
|
|
||||||
|
|
||||||
"""
|
|
||||||
|
|
||||||
def wrapper(func):
|
|
||||||
if func.__module__ != '__main__':
|
|
||||||
return func
|
|
||||||
import mitogen.parent
|
|
||||||
import mitogen.utils
|
|
||||||
if profiling:
|
|
||||||
mitogen.core.enable_profiling()
|
|
||||||
mitogen.master.Router.profiling = profiling
|
|
||||||
utils.log_to_file(level=log_level)
|
|
||||||
return mitogen.core._profile_hook(
|
|
||||||
'app.main',
|
|
||||||
utils.run_with_router,
|
|
||||||
func,
|
|
||||||
)
|
|
||||||
return wrapper
|
|
|
@ -1,593 +0,0 @@
|
||||||
"""Utilities to support packages."""
|
|
||||||
|
|
||||||
# !mitogen: minify_safe
|
|
||||||
|
|
||||||
# NOTE: This module must remain compatible with Python 2.3, as it is shared
|
|
||||||
# by setuptools for distribution with Python 2.3 and up.
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import imp
|
|
||||||
import os.path
|
|
||||||
from types import ModuleType
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
'get_importer', 'iter_importers', 'get_loader', 'find_loader',
|
|
||||||
'walk_packages', 'iter_modules', 'get_data',
|
|
||||||
'ImpImporter', 'ImpLoader', 'read_code', 'extend_path',
|
|
||||||
]
|
|
||||||
|
|
||||||
def read_code(stream):
|
|
||||||
# This helper is needed in order for the PEP 302 emulation to
|
|
||||||
# correctly handle compiled files
|
|
||||||
import marshal
|
|
||||||
|
|
||||||
magic = stream.read(4)
|
|
||||||
if magic != imp.get_magic():
|
|
||||||
return None
|
|
||||||
|
|
||||||
stream.read(4) # Skip timestamp
|
|
||||||
return marshal.load(stream)
|
|
||||||
|
|
||||||
|
|
||||||
def simplegeneric(func):
|
|
||||||
"""Make a trivial single-dispatch generic function"""
|
|
||||||
registry = {}
|
|
||||||
def wrapper(*args, **kw):
|
|
||||||
ob = args[0]
|
|
||||||
try:
|
|
||||||
cls = ob.__class__
|
|
||||||
except AttributeError:
|
|
||||||
cls = type(ob)
|
|
||||||
try:
|
|
||||||
mro = cls.__mro__
|
|
||||||
except AttributeError:
|
|
||||||
try:
|
|
||||||
class cls(cls, object):
|
|
||||||
pass
|
|
||||||
mro = cls.__mro__[1:]
|
|
||||||
except TypeError:
|
|
||||||
mro = object, # must be an ExtensionClass or some such :(
|
|
||||||
for t in mro:
|
|
||||||
if t in registry:
|
|
||||||
return registry[t](*args, **kw)
|
|
||||||
else:
|
|
||||||
return func(*args, **kw)
|
|
||||||
try:
|
|
||||||
wrapper.__name__ = func.__name__
|
|
||||||
except (TypeError, AttributeError):
|
|
||||||
pass # Python 2.3 doesn't allow functions to be renamed
|
|
||||||
|
|
||||||
def register(typ, func=None):
|
|
||||||
if func is None:
|
|
||||||
return lambda f: register(typ, f)
|
|
||||||
registry[typ] = func
|
|
||||||
return func
|
|
||||||
|
|
||||||
wrapper.__dict__ = func.__dict__
|
|
||||||
wrapper.__doc__ = func.__doc__
|
|
||||||
wrapper.register = register
|
|
||||||
return wrapper
|
|
||||||
|
|
||||||
|
|
||||||
def walk_packages(path=None, prefix='', onerror=None):
|
|
||||||
"""Yields (module_loader, name, ispkg) for all modules recursively
|
|
||||||
on path, or, if path is None, all accessible modules.
|
|
||||||
|
|
||||||
'path' should be either None or a list of paths to look for
|
|
||||||
modules in.
|
|
||||||
|
|
||||||
'prefix' is a string to output on the front of every module name
|
|
||||||
on output.
|
|
||||||
|
|
||||||
Note that this function must import all *packages* (NOT all
|
|
||||||
modules!) on the given path, in order to access the __path__
|
|
||||||
attribute to find submodules.
|
|
||||||
|
|
||||||
'onerror' is a function which gets called with one argument (the
|
|
||||||
name of the package which was being imported) if any exception
|
|
||||||
occurs while trying to import a package. If no onerror function is
|
|
||||||
supplied, ImportErrors are caught and ignored, while all other
|
|
||||||
exceptions are propagated, terminating the search.
|
|
||||||
|
|
||||||
Examples:
|
|
||||||
|
|
||||||
# list all modules python can access
|
|
||||||
walk_packages()
|
|
||||||
|
|
||||||
# list all submodules of ctypes
|
|
||||||
walk_packages(ctypes.__path__, ctypes.__name__+'.')
|
|
||||||
"""
|
|
||||||
|
|
||||||
def seen(p, m={}):
|
|
||||||
if p in m:
|
|
||||||
return True
|
|
||||||
m[p] = True
|
|
||||||
|
|
||||||
for importer, name, ispkg in iter_modules(path, prefix):
|
|
||||||
yield importer, name, ispkg
|
|
||||||
|
|
||||||
if ispkg:
|
|
||||||
try:
|
|
||||||
__import__(name)
|
|
||||||
except ImportError:
|
|
||||||
if onerror is not None:
|
|
||||||
onerror(name)
|
|
||||||
except Exception:
|
|
||||||
if onerror is not None:
|
|
||||||
onerror(name)
|
|
||||||
else:
|
|
||||||
raise
|
|
||||||
else:
|
|
||||||
path = getattr(sys.modules[name], '__path__', None) or []
|
|
||||||
|
|
||||||
# don't traverse path items we've seen before
|
|
||||||
path = [p for p in path if not seen(p)]
|
|
||||||
|
|
||||||
for item in walk_packages(path, name+'.', onerror):
|
|
||||||
yield item
|
|
||||||
|
|
||||||
|
|
||||||
def iter_modules(path=None, prefix=''):
|
|
||||||
"""Yields (module_loader, name, ispkg) for all submodules on path,
|
|
||||||
or, if path is None, all top-level modules on sys.path.
|
|
||||||
|
|
||||||
'path' should be either None or a list of paths to look for
|
|
||||||
modules in.
|
|
||||||
|
|
||||||
'prefix' is a string to output on the front of every module name
|
|
||||||
on output.
|
|
||||||
"""
|
|
||||||
|
|
||||||
if path is None:
|
|
||||||
importers = iter_importers()
|
|
||||||
else:
|
|
||||||
importers = map(get_importer, path)
|
|
||||||
|
|
||||||
yielded = {}
|
|
||||||
for i in importers:
|
|
||||||
for name, ispkg in iter_importer_modules(i, prefix):
|
|
||||||
if name not in yielded:
|
|
||||||
yielded[name] = 1
|
|
||||||
yield i, name, ispkg
|
|
||||||
|
|
||||||
|
|
||||||
#@simplegeneric
|
|
||||||
def iter_importer_modules(importer, prefix=''):
|
|
||||||
if not hasattr(importer, 'iter_modules'):
|
|
||||||
return []
|
|
||||||
return importer.iter_modules(prefix)
|
|
||||||
|
|
||||||
iter_importer_modules = simplegeneric(iter_importer_modules)
|
|
||||||
|
|
||||||
|
|
||||||
class ImpImporter:
|
|
||||||
"""PEP 302 Importer that wraps Python's "classic" import algorithm
|
|
||||||
|
|
||||||
ImpImporter(dirname) produces a PEP 302 importer that searches that
|
|
||||||
directory. ImpImporter(None) produces a PEP 302 importer that searches
|
|
||||||
the current sys.path, plus any modules that are frozen or built-in.
|
|
||||||
|
|
||||||
Note that ImpImporter does not currently support being used by placement
|
|
||||||
on sys.meta_path.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, path=None):
|
|
||||||
self.path = path
|
|
||||||
|
|
||||||
def find_module(self, fullname, path=None):
|
|
||||||
# Note: we ignore 'path' argument since it is only used via meta_path
|
|
||||||
subname = fullname.split(".")[-1]
|
|
||||||
if subname != fullname and self.path is None:
|
|
||||||
return None
|
|
||||||
if self.path is None:
|
|
||||||
path = None
|
|
||||||
else:
|
|
||||||
path = [os.path.realpath(self.path)]
|
|
||||||
try:
|
|
||||||
file, filename, etc = imp.find_module(subname, path)
|
|
||||||
except ImportError:
|
|
||||||
return None
|
|
||||||
return ImpLoader(fullname, file, filename, etc)
|
|
||||||
|
|
||||||
def iter_modules(self, prefix=''):
|
|
||||||
if self.path is None or not os.path.isdir(self.path):
|
|
||||||
return
|
|
||||||
|
|
||||||
yielded = {}
|
|
||||||
import inspect
|
|
||||||
try:
|
|
||||||
filenames = os.listdir(self.path)
|
|
||||||
except OSError:
|
|
||||||
# ignore unreadable directories like import does
|
|
||||||
filenames = []
|
|
||||||
filenames.sort() # handle packages before same-named modules
|
|
||||||
|
|
||||||
for fn in filenames:
|
|
||||||
modname = inspect.getmodulename(fn)
|
|
||||||
if modname=='__init__' or modname in yielded:
|
|
||||||
continue
|
|
||||||
|
|
||||||
path = os.path.join(self.path, fn)
|
|
||||||
ispkg = False
|
|
||||||
|
|
||||||
if not modname and os.path.isdir(path) and '.' not in fn:
|
|
||||||
modname = fn
|
|
||||||
try:
|
|
||||||
dircontents = os.listdir(path)
|
|
||||||
except OSError:
|
|
||||||
# ignore unreadable directories like import does
|
|
||||||
dircontents = []
|
|
||||||
for fn in dircontents:
|
|
||||||
subname = inspect.getmodulename(fn)
|
|
||||||
if subname=='__init__':
|
|
||||||
ispkg = True
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
continue # not a package
|
|
||||||
|
|
||||||
if modname and '.' not in modname:
|
|
||||||
yielded[modname] = 1
|
|
||||||
yield prefix + modname, ispkg
|
|
||||||
|
|
||||||
|
|
||||||
class ImpLoader:
|
|
||||||
"""PEP 302 Loader that wraps Python's "classic" import algorithm
|
|
||||||
"""
|
|
||||||
code = source = None
|
|
||||||
|
|
||||||
def __init__(self, fullname, file, filename, etc):
|
|
||||||
self.file = file
|
|
||||||
self.filename = filename
|
|
||||||
self.fullname = fullname
|
|
||||||
self.etc = etc
|
|
||||||
|
|
||||||
def load_module(self, fullname):
|
|
||||||
self._reopen()
|
|
||||||
try:
|
|
||||||
mod = imp.load_module(fullname, self.file, self.filename, self.etc)
|
|
||||||
finally:
|
|
||||||
if self.file:
|
|
||||||
self.file.close()
|
|
||||||
# Note: we don't set __loader__ because we want the module to look
|
|
||||||
# normal; i.e. this is just a wrapper for standard import machinery
|
|
||||||
return mod
|
|
||||||
|
|
||||||
def get_data(self, pathname):
|
|
||||||
return open(pathname, "rb").read()
|
|
||||||
|
|
||||||
def _reopen(self):
|
|
||||||
if self.file and self.file.closed:
|
|
||||||
mod_type = self.etc[2]
|
|
||||||
if mod_type==imp.PY_SOURCE:
|
|
||||||
self.file = open(self.filename, 'rU')
|
|
||||||
elif mod_type in (imp.PY_COMPILED, imp.C_EXTENSION):
|
|
||||||
self.file = open(self.filename, 'rb')
|
|
||||||
|
|
||||||
def _fix_name(self, fullname):
|
|
||||||
if fullname is None:
|
|
||||||
fullname = self.fullname
|
|
||||||
elif fullname != self.fullname:
|
|
||||||
raise ImportError("Loader for module %s cannot handle "
|
|
||||||
"module %s" % (self.fullname, fullname))
|
|
||||||
return fullname
|
|
||||||
|
|
||||||
def is_package(self, fullname):
|
|
||||||
fullname = self._fix_name(fullname)
|
|
||||||
return self.etc[2]==imp.PKG_DIRECTORY
|
|
||||||
|
|
||||||
def get_code(self, fullname=None):
|
|
||||||
fullname = self._fix_name(fullname)
|
|
||||||
if self.code is None:
|
|
||||||
mod_type = self.etc[2]
|
|
||||||
if mod_type==imp.PY_SOURCE:
|
|
||||||
source = self.get_source(fullname)
|
|
||||||
self.code = compile(source, self.filename, 'exec')
|
|
||||||
elif mod_type==imp.PY_COMPILED:
|
|
||||||
self._reopen()
|
|
||||||
try:
|
|
||||||
self.code = read_code(self.file)
|
|
||||||
finally:
|
|
||||||
self.file.close()
|
|
||||||
elif mod_type==imp.PKG_DIRECTORY:
|
|
||||||
self.code = self._get_delegate().get_code()
|
|
||||||
return self.code
|
|
||||||
|
|
||||||
def get_source(self, fullname=None):
|
|
||||||
fullname = self._fix_name(fullname)
|
|
||||||
if self.source is None:
|
|
||||||
mod_type = self.etc[2]
|
|
||||||
if mod_type==imp.PY_SOURCE:
|
|
||||||
self._reopen()
|
|
||||||
try:
|
|
||||||
self.source = self.file.read()
|
|
||||||
finally:
|
|
||||||
self.file.close()
|
|
||||||
elif mod_type==imp.PY_COMPILED:
|
|
||||||
if os.path.exists(self.filename[:-1]):
|
|
||||||
f = open(self.filename[:-1], 'rU')
|
|
||||||
self.source = f.read()
|
|
||||||
f.close()
|
|
||||||
elif mod_type==imp.PKG_DIRECTORY:
|
|
||||||
self.source = self._get_delegate().get_source()
|
|
||||||
return self.source
|
|
||||||
|
|
||||||
|
|
||||||
def _get_delegate(self):
|
|
||||||
return ImpImporter(self.filename).find_module('__init__')
|
|
||||||
|
|
||||||
def get_filename(self, fullname=None):
|
|
||||||
fullname = self._fix_name(fullname)
|
|
||||||
mod_type = self.etc[2]
|
|
||||||
if self.etc[2]==imp.PKG_DIRECTORY:
|
|
||||||
return self._get_delegate().get_filename()
|
|
||||||
elif self.etc[2] in (imp.PY_SOURCE, imp.PY_COMPILED, imp.C_EXTENSION):
|
|
||||||
return self.filename
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
try:
|
|
||||||
import zipimport
|
|
||||||
from zipimport import zipimporter
|
|
||||||
|
|
||||||
def iter_zipimport_modules(importer, prefix=''):
|
|
||||||
dirlist = zipimport._zip_directory_cache[importer.archive].keys()
|
|
||||||
dirlist.sort()
|
|
||||||
_prefix = importer.prefix
|
|
||||||
plen = len(_prefix)
|
|
||||||
yielded = {}
|
|
||||||
import inspect
|
|
||||||
for fn in dirlist:
|
|
||||||
if not fn.startswith(_prefix):
|
|
||||||
continue
|
|
||||||
|
|
||||||
fn = fn[plen:].split(os.sep)
|
|
||||||
|
|
||||||
if len(fn)==2 and fn[1].startswith('__init__.py'):
|
|
||||||
if fn[0] not in yielded:
|
|
||||||
yielded[fn[0]] = 1
|
|
||||||
yield fn[0], True
|
|
||||||
|
|
||||||
if len(fn)!=1:
|
|
||||||
continue
|
|
||||||
|
|
||||||
modname = inspect.getmodulename(fn[0])
|
|
||||||
if modname=='__init__':
|
|
||||||
continue
|
|
||||||
|
|
||||||
if modname and '.' not in modname and modname not in yielded:
|
|
||||||
yielded[modname] = 1
|
|
||||||
yield prefix + modname, False
|
|
||||||
|
|
||||||
iter_importer_modules.register(zipimporter, iter_zipimport_modules)
|
|
||||||
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def get_importer(path_item):
|
|
||||||
"""Retrieve a PEP 302 importer for the given path item
|
|
||||||
|
|
||||||
The returned importer is cached in sys.path_importer_cache
|
|
||||||
if it was newly created by a path hook.
|
|
||||||
|
|
||||||
If there is no importer, a wrapper around the basic import
|
|
||||||
machinery is returned. This wrapper is never inserted into
|
|
||||||
the importer cache (None is inserted instead).
|
|
||||||
|
|
||||||
The cache (or part of it) can be cleared manually if a
|
|
||||||
rescan of sys.path_hooks is necessary.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
importer = sys.path_importer_cache[path_item]
|
|
||||||
except KeyError:
|
|
||||||
for path_hook in sys.path_hooks:
|
|
||||||
try:
|
|
||||||
importer = path_hook(path_item)
|
|
||||||
break
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
importer = None
|
|
||||||
sys.path_importer_cache.setdefault(path_item, importer)
|
|
||||||
|
|
||||||
if importer is None:
|
|
||||||
try:
|
|
||||||
importer = ImpImporter(path_item)
|
|
||||||
except ImportError:
|
|
||||||
importer = None
|
|
||||||
return importer
|
|
||||||
|
|
||||||
|
|
||||||
def iter_importers(fullname=""):
|
|
||||||
"""Yield PEP 302 importers for the given module name
|
|
||||||
|
|
||||||
If fullname contains a '.', the importers will be for the package
|
|
||||||
containing fullname, otherwise they will be importers for sys.meta_path,
|
|
||||||
sys.path, and Python's "classic" import machinery, in that order. If
|
|
||||||
the named module is in a package, that package is imported as a side
|
|
||||||
effect of invoking this function.
|
|
||||||
|
|
||||||
Non PEP 302 mechanisms (e.g. the Windows registry) used by the
|
|
||||||
standard import machinery to find files in alternative locations
|
|
||||||
are partially supported, but are searched AFTER sys.path. Normally,
|
|
||||||
these locations are searched BEFORE sys.path, preventing sys.path
|
|
||||||
entries from shadowing them.
|
|
||||||
|
|
||||||
For this to cause a visible difference in behaviour, there must
|
|
||||||
be a module or package name that is accessible via both sys.path
|
|
||||||
and one of the non PEP 302 file system mechanisms. In this case,
|
|
||||||
the emulation will find the former version, while the builtin
|
|
||||||
import mechanism will find the latter.
|
|
||||||
|
|
||||||
Items of the following types can be affected by this discrepancy:
|
|
||||||
imp.C_EXTENSION, imp.PY_SOURCE, imp.PY_COMPILED, imp.PKG_DIRECTORY
|
|
||||||
"""
|
|
||||||
if fullname.startswith('.'):
|
|
||||||
raise ImportError("Relative module names not supported")
|
|
||||||
if '.' in fullname:
|
|
||||||
# Get the containing package's __path__
|
|
||||||
pkg = '.'.join(fullname.split('.')[:-1])
|
|
||||||
if pkg not in sys.modules:
|
|
||||||
__import__(pkg)
|
|
||||||
path = getattr(sys.modules[pkg], '__path__', None) or []
|
|
||||||
else:
|
|
||||||
for importer in sys.meta_path:
|
|
||||||
yield importer
|
|
||||||
path = sys.path
|
|
||||||
for item in path:
|
|
||||||
yield get_importer(item)
|
|
||||||
if '.' not in fullname:
|
|
||||||
yield ImpImporter()
|
|
||||||
|
|
||||||
def get_loader(module_or_name):
|
|
||||||
"""Get a PEP 302 "loader" object for module_or_name
|
|
||||||
|
|
||||||
If the module or package is accessible via the normal import
|
|
||||||
mechanism, a wrapper around the relevant part of that machinery
|
|
||||||
is returned. Returns None if the module cannot be found or imported.
|
|
||||||
If the named module is not already imported, its containing package
|
|
||||||
(if any) is imported, in order to establish the package __path__.
|
|
||||||
|
|
||||||
This function uses iter_importers(), and is thus subject to the same
|
|
||||||
limitations regarding platform-specific special import locations such
|
|
||||||
as the Windows registry.
|
|
||||||
"""
|
|
||||||
if module_or_name in sys.modules:
|
|
||||||
module_or_name = sys.modules[module_or_name]
|
|
||||||
if isinstance(module_or_name, ModuleType):
|
|
||||||
module = module_or_name
|
|
||||||
loader = getattr(module, '__loader__', None)
|
|
||||||
if loader is not None:
|
|
||||||
return loader
|
|
||||||
fullname = module.__name__
|
|
||||||
else:
|
|
||||||
fullname = module_or_name
|
|
||||||
return find_loader(fullname)
|
|
||||||
|
|
||||||
def find_loader(fullname):
|
|
||||||
"""Find a PEP 302 "loader" object for fullname
|
|
||||||
|
|
||||||
If fullname contains dots, path must be the containing package's __path__.
|
|
||||||
Returns None if the module cannot be found or imported. This function uses
|
|
||||||
iter_importers(), and is thus subject to the same limitations regarding
|
|
||||||
platform-specific special import locations such as the Windows registry.
|
|
||||||
"""
|
|
||||||
for importer in iter_importers(fullname):
|
|
||||||
loader = importer.find_module(fullname)
|
|
||||||
if loader is not None:
|
|
||||||
return loader
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def extend_path(path, name):
|
|
||||||
"""Extend a package's path.
|
|
||||||
|
|
||||||
Intended use is to place the following code in a package's __init__.py:
|
|
||||||
|
|
||||||
from pkgutil import extend_path
|
|
||||||
__path__ = extend_path(__path__, __name__)
|
|
||||||
|
|
||||||
This will add to the package's __path__ all subdirectories of
|
|
||||||
directories on sys.path named after the package. This is useful
|
|
||||||
if one wants to distribute different parts of a single logical
|
|
||||||
package as multiple directories.
|
|
||||||
|
|
||||||
It also looks for *.pkg files beginning where * matches the name
|
|
||||||
argument. This feature is similar to *.pth files (see site.py),
|
|
||||||
except that it doesn't special-case lines starting with 'import'.
|
|
||||||
A *.pkg file is trusted at face value: apart from checking for
|
|
||||||
duplicates, all entries found in a *.pkg file are added to the
|
|
||||||
path, regardless of whether they are exist the filesystem. (This
|
|
||||||
is a feature.)
|
|
||||||
|
|
||||||
If the input path is not a list (as is the case for frozen
|
|
||||||
packages) it is returned unchanged. The input path is not
|
|
||||||
modified; an extended copy is returned. Items are only appended
|
|
||||||
to the copy at the end.
|
|
||||||
|
|
||||||
It is assumed that sys.path is a sequence. Items of sys.path that
|
|
||||||
are not (unicode or 8-bit) strings referring to existing
|
|
||||||
directories are ignored. Unicode items of sys.path that cause
|
|
||||||
errors when used as filenames may cause this function to raise an
|
|
||||||
exception (in line with os.path.isdir() behavior).
|
|
||||||
"""
|
|
||||||
|
|
||||||
if not isinstance(path, list):
|
|
||||||
# This could happen e.g. when this is called from inside a
|
|
||||||
# frozen package. Return the path unchanged in that case.
|
|
||||||
return path
|
|
||||||
|
|
||||||
pname = os.path.join(*name.split('.')) # Reconstitute as relative path
|
|
||||||
# Just in case os.extsep != '.'
|
|
||||||
sname = os.extsep.join(name.split('.'))
|
|
||||||
sname_pkg = sname + os.extsep + "pkg"
|
|
||||||
init_py = "__init__" + os.extsep + "py"
|
|
||||||
|
|
||||||
path = path[:] # Start with a copy of the existing path
|
|
||||||
|
|
||||||
for dir in sys.path:
|
|
||||||
if not isinstance(dir, basestring) or not os.path.isdir(dir):
|
|
||||||
continue
|
|
||||||
subdir = os.path.join(dir, pname)
|
|
||||||
# XXX This may still add duplicate entries to path on
|
|
||||||
# case-insensitive filesystems
|
|
||||||
initfile = os.path.join(subdir, init_py)
|
|
||||||
if subdir not in path and os.path.isfile(initfile):
|
|
||||||
path.append(subdir)
|
|
||||||
# XXX Is this the right thing for subpackages like zope.app?
|
|
||||||
# It looks for a file named "zope.app.pkg"
|
|
||||||
pkgfile = os.path.join(dir, sname_pkg)
|
|
||||||
if os.path.isfile(pkgfile):
|
|
||||||
try:
|
|
||||||
f = open(pkgfile)
|
|
||||||
except IOError, msg:
|
|
||||||
sys.stderr.write("Can't open %s: %s\n" %
|
|
||||||
(pkgfile, msg))
|
|
||||||
else:
|
|
||||||
for line in f:
|
|
||||||
line = line.rstrip('\n')
|
|
||||||
if not line or line.startswith('#'):
|
|
||||||
continue
|
|
||||||
path.append(line) # Don't check for existence!
|
|
||||||
f.close()
|
|
||||||
|
|
||||||
return path
|
|
||||||
|
|
||||||
def get_data(package, resource):
|
|
||||||
"""Get a resource from a package.
|
|
||||||
|
|
||||||
This is a wrapper round the PEP 302 loader get_data API. The package
|
|
||||||
argument should be the name of a package, in standard module format
|
|
||||||
(foo.bar). The resource argument should be in the form of a relative
|
|
||||||
filename, using '/' as the path separator. The parent directory name '..'
|
|
||||||
is not allowed, and nor is a rooted name (starting with a '/').
|
|
||||||
|
|
||||||
The function returns a binary string, which is the contents of the
|
|
||||||
specified resource.
|
|
||||||
|
|
||||||
For packages located in the filesystem, which have already been imported,
|
|
||||||
this is the rough equivalent of
|
|
||||||
|
|
||||||
d = os.path.dirname(sys.modules[package].__file__)
|
|
||||||
data = open(os.path.join(d, resource), 'rb').read()
|
|
||||||
|
|
||||||
If the package cannot be located or loaded, or it uses a PEP 302 loader
|
|
||||||
which does not support get_data(), then None is returned.
|
|
||||||
"""
|
|
||||||
|
|
||||||
loader = get_loader(package)
|
|
||||||
if loader is None or not hasattr(loader, 'get_data'):
|
|
||||||
return None
|
|
||||||
mod = sys.modules.get(package) or loader.load_module(package)
|
|
||||||
if mod is None or not hasattr(mod, '__file__'):
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Modify the resource name to be compatible with the loader.get_data
|
|
||||||
# signature - an os.path format "filename" starting with the dirname of
|
|
||||||
# the package's __file__
|
|
||||||
parts = resource.split('/')
|
|
||||||
parts.insert(0, os.path.dirname(mod.__file__))
|
|
||||||
resource_name = os.path.join(*parts)
|
|
||||||
return loader.get_data(resource_name)
|
|
|
@ -1,453 +0,0 @@
|
||||||
"""Tokenization help for Python programs.
|
|
||||||
|
|
||||||
generate_tokens(readline) is a generator that breaks a stream of
|
|
||||||
text into Python tokens. It accepts a readline-like method which is called
|
|
||||||
repeatedly to get the next line of input (or "" for EOF). It generates
|
|
||||||
5-tuples with these members:
|
|
||||||
|
|
||||||
the token type (see token.py)
|
|
||||||
the token (a string)
|
|
||||||
the starting (row, column) indices of the token (a 2-tuple of ints)
|
|
||||||
the ending (row, column) indices of the token (a 2-tuple of ints)
|
|
||||||
the original line (string)
|
|
||||||
|
|
||||||
It is designed to match the working of the Python tokenizer exactly, except
|
|
||||||
that it produces COMMENT tokens for comments and gives type OP for all
|
|
||||||
operators
|
|
||||||
|
|
||||||
Older entry points
|
|
||||||
tokenize_loop(readline, tokeneater)
|
|
||||||
tokenize(readline, tokeneater=printtoken)
|
|
||||||
are the same, except instead of generating tokens, tokeneater is a callback
|
|
||||||
function to which the 5 fields described above are passed as 5 arguments,
|
|
||||||
each time a new token is found."""
|
|
||||||
|
|
||||||
# !mitogen: minify_safe
|
|
||||||
|
|
||||||
__author__ = 'Ka-Ping Yee <ping@lfw.org>'
|
|
||||||
__credits__ = ('GvR, ESR, Tim Peters, Thomas Wouters, Fred Drake, '
|
|
||||||
'Skip Montanaro, Raymond Hettinger')
|
|
||||||
|
|
||||||
from itertools import chain
|
|
||||||
import string, re
|
|
||||||
from token import *
|
|
||||||
|
|
||||||
import token
|
|
||||||
__all__ = [x for x in dir(token) if not x.startswith("_")]
|
|
||||||
__all__ += ["COMMENT", "tokenize", "generate_tokens", "NL", "untokenize"]
|
|
||||||
del token
|
|
||||||
|
|
||||||
COMMENT = N_TOKENS
|
|
||||||
tok_name[COMMENT] = 'COMMENT'
|
|
||||||
NL = N_TOKENS + 1
|
|
||||||
tok_name[NL] = 'NL'
|
|
||||||
N_TOKENS += 2
|
|
||||||
|
|
||||||
def group(*choices): return '(' + '|'.join(choices) + ')'
|
|
||||||
def any(*choices): return group(*choices) + '*'
|
|
||||||
def maybe(*choices): return group(*choices) + '?'
|
|
||||||
|
|
||||||
Whitespace = r'[ \f\t]*'
|
|
||||||
Comment = r'#[^\r\n]*'
|
|
||||||
Ignore = Whitespace + any(r'\\\r?\n' + Whitespace) + maybe(Comment)
|
|
||||||
Name = r'[a-zA-Z_]\w*'
|
|
||||||
|
|
||||||
Hexnumber = r'0[xX][\da-fA-F]+[lL]?'
|
|
||||||
Octnumber = r'(0[oO][0-7]+)|(0[0-7]*)[lL]?'
|
|
||||||
Binnumber = r'0[bB][01]+[lL]?'
|
|
||||||
Decnumber = r'[1-9]\d*[lL]?'
|
|
||||||
Intnumber = group(Hexnumber, Binnumber, Octnumber, Decnumber)
|
|
||||||
Exponent = r'[eE][-+]?\d+'
|
|
||||||
Pointfloat = group(r'\d+\.\d*', r'\.\d+') + maybe(Exponent)
|
|
||||||
Expfloat = r'\d+' + Exponent
|
|
||||||
Floatnumber = group(Pointfloat, Expfloat)
|
|
||||||
Imagnumber = group(r'\d+[jJ]', Floatnumber + r'[jJ]')
|
|
||||||
Number = group(Imagnumber, Floatnumber, Intnumber)
|
|
||||||
|
|
||||||
# Tail end of ' string.
|
|
||||||
Single = r"[^'\\]*(?:\\.[^'\\]*)*'"
|
|
||||||
# Tail end of " string.
|
|
||||||
Double = r'[^"\\]*(?:\\.[^"\\]*)*"'
|
|
||||||
# Tail end of ''' string.
|
|
||||||
Single3 = r"[^'\\]*(?:(?:\\.|'(?!''))[^'\\]*)*'''"
|
|
||||||
# Tail end of """ string.
|
|
||||||
Double3 = r'[^"\\]*(?:(?:\\.|"(?!""))[^"\\]*)*"""'
|
|
||||||
Triple = group("[uUbB]?[rR]?'''", '[uUbB]?[rR]?"""')
|
|
||||||
# Single-line ' or " string.
|
|
||||||
String = group(r"[uUbB]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*'",
|
|
||||||
r'[uUbB]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*"')
|
|
||||||
|
|
||||||
# Because of leftmost-then-longest match semantics, be sure to put the
|
|
||||||
# longest operators first (e.g., if = came before ==, == would get
|
|
||||||
# recognized as two instances of =).
|
|
||||||
Operator = group(r"\*\*=?", r">>=?", r"<<=?", r"<>", r"!=",
|
|
||||||
r"//=?",
|
|
||||||
r"[+\-*/%&|^=<>]=?",
|
|
||||||
r"~")
|
|
||||||
|
|
||||||
Bracket = '[][(){}]'
|
|
||||||
Special = group(r'\r?\n', r'[:;.,`@]')
|
|
||||||
Funny = group(Operator, Bracket, Special)
|
|
||||||
|
|
||||||
PlainToken = group(Number, Funny, String, Name)
|
|
||||||
Token = Ignore + PlainToken
|
|
||||||
|
|
||||||
# First (or only) line of ' or " string.
|
|
||||||
ContStr = group(r"[uUbB]?[rR]?'[^\n'\\]*(?:\\.[^\n'\\]*)*" +
|
|
||||||
group("'", r'\\\r?\n'),
|
|
||||||
r'[uUbB]?[rR]?"[^\n"\\]*(?:\\.[^\n"\\]*)*' +
|
|
||||||
group('"', r'\\\r?\n'))
|
|
||||||
PseudoExtras = group(r'\\\r?\n|\Z', Comment, Triple)
|
|
||||||
PseudoToken = Whitespace + group(PseudoExtras, Number, Funny, ContStr, Name)
|
|
||||||
|
|
||||||
tokenprog, pseudoprog, single3prog, double3prog = map(
|
|
||||||
re.compile, (Token, PseudoToken, Single3, Double3))
|
|
||||||
endprogs = {"'": re.compile(Single), '"': re.compile(Double),
|
|
||||||
"'''": single3prog, '"""': double3prog,
|
|
||||||
"r'''": single3prog, 'r"""': double3prog,
|
|
||||||
"u'''": single3prog, 'u"""': double3prog,
|
|
||||||
"ur'''": single3prog, 'ur"""': double3prog,
|
|
||||||
"R'''": single3prog, 'R"""': double3prog,
|
|
||||||
"U'''": single3prog, 'U"""': double3prog,
|
|
||||||
"uR'''": single3prog, 'uR"""': double3prog,
|
|
||||||
"Ur'''": single3prog, 'Ur"""': double3prog,
|
|
||||||
"UR'''": single3prog, 'UR"""': double3prog,
|
|
||||||
"b'''": single3prog, 'b"""': double3prog,
|
|
||||||
"br'''": single3prog, 'br"""': double3prog,
|
|
||||||
"B'''": single3prog, 'B"""': double3prog,
|
|
||||||
"bR'''": single3prog, 'bR"""': double3prog,
|
|
||||||
"Br'''": single3prog, 'Br"""': double3prog,
|
|
||||||
"BR'''": single3prog, 'BR"""': double3prog,
|
|
||||||
'r': None, 'R': None, 'u': None, 'U': None,
|
|
||||||
'b': None, 'B': None}
|
|
||||||
|
|
||||||
triple_quoted = {}
|
|
||||||
for t in ("'''", '"""',
|
|
||||||
"r'''", 'r"""', "R'''", 'R"""',
|
|
||||||
"u'''", 'u"""', "U'''", 'U"""',
|
|
||||||
"ur'''", 'ur"""', "Ur'''", 'Ur"""',
|
|
||||||
"uR'''", 'uR"""', "UR'''", 'UR"""',
|
|
||||||
"b'''", 'b"""', "B'''", 'B"""',
|
|
||||||
"br'''", 'br"""', "Br'''", 'Br"""',
|
|
||||||
"bR'''", 'bR"""', "BR'''", 'BR"""'):
|
|
||||||
triple_quoted[t] = t
|
|
||||||
single_quoted = {}
|
|
||||||
for t in ("'", '"',
|
|
||||||
"r'", 'r"', "R'", 'R"',
|
|
||||||
"u'", 'u"', "U'", 'U"',
|
|
||||||
"ur'", 'ur"', "Ur'", 'Ur"',
|
|
||||||
"uR'", 'uR"', "UR'", 'UR"',
|
|
||||||
"b'", 'b"', "B'", 'B"',
|
|
||||||
"br'", 'br"', "Br'", 'Br"',
|
|
||||||
"bR'", 'bR"', "BR'", 'BR"' ):
|
|
||||||
single_quoted[t] = t
|
|
||||||
|
|
||||||
tabsize = 8
|
|
||||||
|
|
||||||
class TokenError(Exception): pass
|
|
||||||
|
|
||||||
class StopTokenizing(Exception): pass
|
|
||||||
|
|
||||||
def printtoken(type, token, srow_scol, erow_ecol, line): # for testing
|
|
||||||
srow, scol = srow_scol
|
|
||||||
erow, ecol = erow_ecol
|
|
||||||
print("%d,%d-%d,%d:\t%s\t%s" % \
|
|
||||||
(srow, scol, erow, ecol, tok_name[type], repr(token)))
|
|
||||||
|
|
||||||
def tokenize(readline, tokeneater=printtoken):
|
|
||||||
"""
|
|
||||||
The tokenize() function accepts two parameters: one representing the
|
|
||||||
input stream, and one providing an output mechanism for tokenize().
|
|
||||||
|
|
||||||
The first parameter, readline, must be a callable object which provides
|
|
||||||
the same interface as the readline() method of built-in file objects.
|
|
||||||
Each call to the function should return one line of input as a string.
|
|
||||||
|
|
||||||
The second parameter, tokeneater, must also be a callable object. It is
|
|
||||||
called once for each token, with five arguments, corresponding to the
|
|
||||||
tuples generated by generate_tokens().
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
tokenize_loop(readline, tokeneater)
|
|
||||||
except StopTokenizing:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# backwards compatible interface
|
|
||||||
def tokenize_loop(readline, tokeneater):
|
|
||||||
for token_info in generate_tokens(readline):
|
|
||||||
tokeneater(*token_info)
|
|
||||||
|
|
||||||
class Untokenizer:
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.tokens = []
|
|
||||||
self.prev_row = 1
|
|
||||||
self.prev_col = 0
|
|
||||||
|
|
||||||
def add_whitespace(self, start):
|
|
||||||
row, col = start
|
|
||||||
if row < self.prev_row or row == self.prev_row and col < self.prev_col:
|
|
||||||
raise ValueError("start ({},{}) precedes previous end ({},{})"
|
|
||||||
.format(row, col, self.prev_row, self.prev_col))
|
|
||||||
row_offset = row - self.prev_row
|
|
||||||
if row_offset:
|
|
||||||
self.tokens.append("\\\n" * row_offset)
|
|
||||||
self.prev_col = 0
|
|
||||||
col_offset = col - self.prev_col
|
|
||||||
if col_offset:
|
|
||||||
self.tokens.append(" " * col_offset)
|
|
||||||
|
|
||||||
def untokenize(self, iterable):
|
|
||||||
it = iter(iterable)
|
|
||||||
indents = []
|
|
||||||
startline = False
|
|
||||||
for t in it:
|
|
||||||
if len(t) == 2:
|
|
||||||
self.compat(t, it)
|
|
||||||
break
|
|
||||||
tok_type, token, start, end, line = t
|
|
||||||
if tok_type == ENDMARKER:
|
|
||||||
break
|
|
||||||
if tok_type == INDENT:
|
|
||||||
indents.append(token)
|
|
||||||
continue
|
|
||||||
elif tok_type == DEDENT:
|
|
||||||
indents.pop()
|
|
||||||
self.prev_row, self.prev_col = end
|
|
||||||
continue
|
|
||||||
elif tok_type in (NEWLINE, NL):
|
|
||||||
startline = True
|
|
||||||
elif startline and indents:
|
|
||||||
indent = indents[-1]
|
|
||||||
if start[1] >= len(indent):
|
|
||||||
self.tokens.append(indent)
|
|
||||||
self.prev_col = len(indent)
|
|
||||||
startline = False
|
|
||||||
self.add_whitespace(start)
|
|
||||||
self.tokens.append(token)
|
|
||||||
self.prev_row, self.prev_col = end
|
|
||||||
if tok_type in (NEWLINE, NL):
|
|
||||||
self.prev_row += 1
|
|
||||||
self.prev_col = 0
|
|
||||||
return "".join(self.tokens)
|
|
||||||
|
|
||||||
def compat(self, token, iterable):
|
|
||||||
indents = []
|
|
||||||
toks_append = self.tokens.append
|
|
||||||
startline = token[0] in (NEWLINE, NL)
|
|
||||||
prevstring = False
|
|
||||||
|
|
||||||
for tok in chain([token], iterable):
|
|
||||||
toknum, tokval = tok[:2]
|
|
||||||
|
|
||||||
if toknum in (NAME, NUMBER):
|
|
||||||
tokval += ' '
|
|
||||||
|
|
||||||
# Insert a space between two consecutive strings
|
|
||||||
if toknum == STRING:
|
|
||||||
if prevstring:
|
|
||||||
tokval = ' ' + tokval
|
|
||||||
prevstring = True
|
|
||||||
else:
|
|
||||||
prevstring = False
|
|
||||||
|
|
||||||
if toknum == INDENT:
|
|
||||||
indents.append(tokval)
|
|
||||||
continue
|
|
||||||
elif toknum == DEDENT:
|
|
||||||
indents.pop()
|
|
||||||
continue
|
|
||||||
elif toknum in (NEWLINE, NL):
|
|
||||||
startline = True
|
|
||||||
elif startline and indents:
|
|
||||||
toks_append(indents[-1])
|
|
||||||
startline = False
|
|
||||||
toks_append(tokval)
|
|
||||||
|
|
||||||
def untokenize(iterable):
|
|
||||||
"""Transform tokens back into Python source code.
|
|
||||||
|
|
||||||
Each element returned by the iterable must be a token sequence
|
|
||||||
with at least two elements, a token number and token value. If
|
|
||||||
only two tokens are passed, the resulting output is poor.
|
|
||||||
|
|
||||||
Round-trip invariant for full input:
|
|
||||||
Untokenized source will match input source exactly
|
|
||||||
|
|
||||||
Round-trip invariant for limited intput:
|
|
||||||
# Output text will tokenize the back to the input
|
|
||||||
t1 = [tok[:2] for tok in generate_tokens(f.readline)]
|
|
||||||
newcode = untokenize(t1)
|
|
||||||
readline = iter(newcode.splitlines(1)).next
|
|
||||||
t2 = [tok[:2] for tok in generate_tokens(readline)]
|
|
||||||
assert t1 == t2
|
|
||||||
"""
|
|
||||||
ut = Untokenizer()
|
|
||||||
return ut.untokenize(iterable)
|
|
||||||
|
|
||||||
def generate_tokens(readline):
|
|
||||||
"""
|
|
||||||
The generate_tokens() generator requires one argument, readline, which
|
|
||||||
must be a callable object which provides the same interface as the
|
|
||||||
readline() method of built-in file objects. Each call to the function
|
|
||||||
should return one line of input as a string. Alternately, readline
|
|
||||||
can be a callable function terminating with StopIteration:
|
|
||||||
readline = open(myfile).next # Example of alternate readline
|
|
||||||
|
|
||||||
The generator produces 5-tuples with these members: the token type; the
|
|
||||||
token string; a 2-tuple (srow, scol) of ints specifying the row and
|
|
||||||
column where the token begins in the source; a 2-tuple (erow, ecol) of
|
|
||||||
ints specifying the row and column where the token ends in the source;
|
|
||||||
and the line on which the token was found. The line passed is the
|
|
||||||
logical line; continuation lines are included.
|
|
||||||
"""
|
|
||||||
lnum = parenlev = continued = 0
|
|
||||||
namechars, numchars = string.ascii_letters + '_', '0123456789'
|
|
||||||
contstr, needcont = '', 0
|
|
||||||
contline = None
|
|
||||||
indents = [0]
|
|
||||||
|
|
||||||
while 1: # loop over lines in stream
|
|
||||||
try:
|
|
||||||
line = readline()
|
|
||||||
except StopIteration:
|
|
||||||
line = ''
|
|
||||||
lnum += 1
|
|
||||||
pos, max = 0, len(line)
|
|
||||||
|
|
||||||
if contstr: # continued string
|
|
||||||
if not line:
|
|
||||||
raise TokenError("EOF in multi-line string", strstart)
|
|
||||||
endmatch = endprog.match(line)
|
|
||||||
if endmatch:
|
|
||||||
pos = end = endmatch.end(0)
|
|
||||||
yield (STRING, contstr + line[:end],
|
|
||||||
strstart, (lnum, end), contline + line)
|
|
||||||
contstr, needcont = '', 0
|
|
||||||
contline = None
|
|
||||||
elif needcont and line[-2:] != '\\\n' and line[-3:] != '\\\r\n':
|
|
||||||
yield (ERRORTOKEN, contstr + line,
|
|
||||||
strstart, (lnum, len(line)), contline)
|
|
||||||
contstr = ''
|
|
||||||
contline = None
|
|
||||||
continue
|
|
||||||
else:
|
|
||||||
contstr = contstr + line
|
|
||||||
contline = contline + line
|
|
||||||
continue
|
|
||||||
|
|
||||||
elif parenlev == 0 and not continued: # new statement
|
|
||||||
if not line: break
|
|
||||||
column = 0
|
|
||||||
while pos < max: # measure leading whitespace
|
|
||||||
if line[pos] == ' ':
|
|
||||||
column += 1
|
|
||||||
elif line[pos] == '\t':
|
|
||||||
column = (column//tabsize + 1)*tabsize
|
|
||||||
elif line[pos] == '\f':
|
|
||||||
column = 0
|
|
||||||
else:
|
|
||||||
break
|
|
||||||
pos += 1
|
|
||||||
if pos == max:
|
|
||||||
break
|
|
||||||
|
|
||||||
if line[pos] in '#\r\n': # skip comments or blank lines
|
|
||||||
if line[pos] == '#':
|
|
||||||
comment_token = line[pos:].rstrip('\r\n')
|
|
||||||
nl_pos = pos + len(comment_token)
|
|
||||||
yield (COMMENT, comment_token,
|
|
||||||
(lnum, pos), (lnum, pos + len(comment_token)), line)
|
|
||||||
yield (NL, line[nl_pos:],
|
|
||||||
(lnum, nl_pos), (lnum, len(line)), line)
|
|
||||||
else:
|
|
||||||
yield ((NL, COMMENT)[line[pos] == '#'], line[pos:],
|
|
||||||
(lnum, pos), (lnum, len(line)), line)
|
|
||||||
continue
|
|
||||||
|
|
||||||
if column > indents[-1]: # count indents or dedents
|
|
||||||
indents.append(column)
|
|
||||||
yield (INDENT, line[:pos], (lnum, 0), (lnum, pos), line)
|
|
||||||
while column < indents[-1]:
|
|
||||||
if column not in indents:
|
|
||||||
raise IndentationError(
|
|
||||||
"unindent does not match any outer indentation level",
|
|
||||||
("<tokenize>", lnum, pos, line))
|
|
||||||
indents = indents[:-1]
|
|
||||||
yield (DEDENT, '', (lnum, pos), (lnum, pos), line)
|
|
||||||
|
|
||||||
else: # continued statement
|
|
||||||
if not line:
|
|
||||||
raise TokenError("EOF in multi-line statement", (lnum, 0))
|
|
||||||
continued = 0
|
|
||||||
|
|
||||||
while pos < max:
|
|
||||||
pseudomatch = pseudoprog.match(line, pos)
|
|
||||||
if pseudomatch: # scan for tokens
|
|
||||||
start, end = pseudomatch.span(1)
|
|
||||||
spos, epos, pos = (lnum, start), (lnum, end), end
|
|
||||||
if start == end:
|
|
||||||
continue
|
|
||||||
token, initial = line[start:end], line[start]
|
|
||||||
|
|
||||||
if initial in numchars or \
|
|
||||||
(initial == '.' and token != '.'): # ordinary number
|
|
||||||
yield (NUMBER, token, spos, epos, line)
|
|
||||||
elif initial in '\r\n':
|
|
||||||
if parenlev > 0:
|
|
||||||
n = NL
|
|
||||||
else:
|
|
||||||
n = NEWLINE
|
|
||||||
yield (n, token, spos, epos, line)
|
|
||||||
elif initial == '#':
|
|
||||||
assert not token.endswith("\n")
|
|
||||||
yield (COMMENT, token, spos, epos, line)
|
|
||||||
elif token in triple_quoted:
|
|
||||||
endprog = endprogs[token]
|
|
||||||
endmatch = endprog.match(line, pos)
|
|
||||||
if endmatch: # all on one line
|
|
||||||
pos = endmatch.end(0)
|
|
||||||
token = line[start:pos]
|
|
||||||
yield (STRING, token, spos, (lnum, pos), line)
|
|
||||||
else:
|
|
||||||
strstart = (lnum, start) # multiple lines
|
|
||||||
contstr = line[start:]
|
|
||||||
contline = line
|
|
||||||
break
|
|
||||||
elif initial in single_quoted or \
|
|
||||||
token[:2] in single_quoted or \
|
|
||||||
token[:3] in single_quoted:
|
|
||||||
if token[-1] == '\n': # continued string
|
|
||||||
strstart = (lnum, start)
|
|
||||||
endprog = (endprogs[initial] or endprogs[token[1]] or
|
|
||||||
endprogs[token[2]])
|
|
||||||
contstr, needcont = line[start:], 1
|
|
||||||
contline = line
|
|
||||||
break
|
|
||||||
else: # ordinary string
|
|
||||||
yield (STRING, token, spos, epos, line)
|
|
||||||
elif initial in namechars: # ordinary name
|
|
||||||
yield (NAME, token, spos, epos, line)
|
|
||||||
elif initial == '\\': # continued stmt
|
|
||||||
continued = 1
|
|
||||||
else:
|
|
||||||
if initial in '([{':
|
|
||||||
parenlev += 1
|
|
||||||
elif initial in ')]}':
|
|
||||||
parenlev -= 1
|
|
||||||
yield (OP, token, spos, epos, line)
|
|
||||||
else:
|
|
||||||
yield (ERRORTOKEN, line[pos],
|
|
||||||
(lnum, pos), (lnum, pos+1), line)
|
|
||||||
pos += 1
|
|
||||||
|
|
||||||
for indent in indents[1:]: # pop remaining indent levels
|
|
||||||
yield (DEDENT, '', (lnum, 0), (lnum, 0), '')
|
|
||||||
yield (ENDMARKER, '', (lnum, 0), (lnum, 0), '')
|
|
||||||
|
|
||||||
if __name__ == '__main__': # testing
|
|
||||||
import sys
|
|
||||||
if len(sys.argv) > 1:
|
|
||||||
tokenize(open(sys.argv[1]).readline)
|
|
||||||
else:
|
|
||||||
tokenize(sys.stdin.readline)
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,236 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
# !mitogen: minify_safe
|
|
||||||
|
|
||||||
"""
|
|
||||||
Basic signal handler for dumping thread stacks.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import difflib
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import gc
|
|
||||||
import signal
|
|
||||||
import sys
|
|
||||||
import threading
|
|
||||||
import time
|
|
||||||
import traceback
|
|
||||||
|
|
||||||
import mitogen.core
|
|
||||||
import mitogen.parent
|
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
|
||||||
_last = None
|
|
||||||
|
|
||||||
|
|
||||||
def enable_evil_interrupts():
|
|
||||||
signal.signal(signal.SIGALRM, (lambda a, b: None))
|
|
||||||
signal.setitimer(signal.ITIMER_REAL, 0.01, 0.01)
|
|
||||||
|
|
||||||
|
|
||||||
def disable_evil_interrupts():
|
|
||||||
signal.setitimer(signal.ITIMER_REAL, 0, 0)
|
|
||||||
|
|
||||||
|
|
||||||
def _hex(n):
|
|
||||||
return '%08x' % n
|
|
||||||
|
|
||||||
|
|
||||||
def get_subclasses(klass):
|
|
||||||
"""
|
|
||||||
Rather than statically import every interesting subclass, forcing it all to
|
|
||||||
be transferred and potentially disrupting the debugged environment,
|
|
||||||
enumerate only those loaded in memory. Also returns the original class.
|
|
||||||
"""
|
|
||||||
stack = [klass]
|
|
||||||
seen = set()
|
|
||||||
while stack:
|
|
||||||
klass = stack.pop()
|
|
||||||
seen.add(klass)
|
|
||||||
stack.extend(klass.__subclasses__())
|
|
||||||
return seen
|
|
||||||
|
|
||||||
|
|
||||||
def get_routers():
|
|
||||||
return dict(
|
|
||||||
(_hex(id(router)), router)
|
|
||||||
for klass in get_subclasses(mitogen.core.Router)
|
|
||||||
for router in gc.get_referrers(klass)
|
|
||||||
if isinstance(router, mitogen.core.Router)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def get_router_info():
|
|
||||||
return {
|
|
||||||
'routers': dict(
|
|
||||||
(id_, {
|
|
||||||
'id': id_,
|
|
||||||
'streams': len(set(router._stream_by_id.values())),
|
|
||||||
'contexts': len(set(router._context_by_id.values())),
|
|
||||||
'handles': len(router._handle_map),
|
|
||||||
})
|
|
||||||
for id_, router in get_routers().items()
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def get_stream_info(router_id):
|
|
||||||
router = get_routers().get(router_id)
|
|
||||||
return {
|
|
||||||
'streams': dict(
|
|
||||||
(_hex(id(stream)), ({
|
|
||||||
'name': stream.name,
|
|
||||||
'remote_id': stream.remote_id,
|
|
||||||
'sent_module_count': len(getattr(stream, 'sent_modules', [])),
|
|
||||||
'routes': sorted(getattr(stream, 'routes', [])),
|
|
||||||
'type': type(stream).__module__,
|
|
||||||
}))
|
|
||||||
for via_id, stream in router._stream_by_id.items()
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def format_stacks():
|
|
||||||
name_by_id = dict(
|
|
||||||
(t.ident, t.name)
|
|
||||||
for t in threading.enumerate()
|
|
||||||
)
|
|
||||||
|
|
||||||
l = ['', '']
|
|
||||||
for threadId, stack in sys._current_frames().items():
|
|
||||||
l += ["# PID %d ThreadID: (%s) %s; %r" % (
|
|
||||||
os.getpid(),
|
|
||||||
name_by_id.get(threadId, '<no name>'),
|
|
||||||
threadId,
|
|
||||||
stack,
|
|
||||||
)]
|
|
||||||
#stack = stack.f_back.f_back
|
|
||||||
|
|
||||||
for filename, lineno, name, line in traceback.extract_stack(stack):
|
|
||||||
l += [
|
|
||||||
'File: "%s", line %d, in %s' % (
|
|
||||||
filename,
|
|
||||||
lineno,
|
|
||||||
name
|
|
||||||
)
|
|
||||||
]
|
|
||||||
if line:
|
|
||||||
l += [' ' + line.strip()]
|
|
||||||
l += ['']
|
|
||||||
|
|
||||||
l += ['', '']
|
|
||||||
return '\n'.join(l)
|
|
||||||
|
|
||||||
|
|
||||||
def get_snapshot():
|
|
||||||
global _last
|
|
||||||
|
|
||||||
s = format_stacks()
|
|
||||||
snap = s
|
|
||||||
if _last:
|
|
||||||
snap += '\n'
|
|
||||||
diff = list(difflib.unified_diff(
|
|
||||||
a=_last.splitlines(),
|
|
||||||
b=s.splitlines(),
|
|
||||||
fromfile='then',
|
|
||||||
tofile='now'
|
|
||||||
))
|
|
||||||
|
|
||||||
if diff:
|
|
||||||
snap += '\n'.join(diff) + '\n'
|
|
||||||
else:
|
|
||||||
snap += '(no change since last time)\n'
|
|
||||||
_last = s
|
|
||||||
return snap
|
|
||||||
|
|
||||||
|
|
||||||
def _handler(*_):
|
|
||||||
fp = open('/dev/tty', 'w', 1)
|
|
||||||
fp.write(get_snapshot())
|
|
||||||
fp.close()
|
|
||||||
|
|
||||||
|
|
||||||
def install_handler():
|
|
||||||
signal.signal(signal.SIGUSR2, _handler)
|
|
||||||
|
|
||||||
|
|
||||||
def _logging_main(secs):
|
|
||||||
while True:
|
|
||||||
time.sleep(secs)
|
|
||||||
LOG.info('PERIODIC THREAD DUMP\n\n%s', get_snapshot())
|
|
||||||
|
|
||||||
|
|
||||||
def dump_to_logger(secs=5):
|
|
||||||
th = threading.Thread(
|
|
||||||
target=_logging_main,
|
|
||||||
kwargs={'secs': secs},
|
|
||||||
name='mitogen.debug.dump_to_logger',
|
|
||||||
)
|
|
||||||
th.setDaemon(True)
|
|
||||||
th.start()
|
|
||||||
|
|
||||||
|
|
||||||
class ContextDebugger(object):
|
|
||||||
@classmethod
|
|
||||||
@mitogen.core.takes_econtext
|
|
||||||
def _configure_context(cls, econtext):
|
|
||||||
mitogen.parent.upgrade_router(econtext)
|
|
||||||
econtext.debugger = cls(econtext.router)
|
|
||||||
|
|
||||||
def __init__(self, router):
|
|
||||||
self.router = router
|
|
||||||
self.router.add_handler(
|
|
||||||
func=self._on_debug_msg,
|
|
||||||
handle=mitogen.core.DEBUG,
|
|
||||||
persist=True,
|
|
||||||
policy=mitogen.core.has_parent_authority,
|
|
||||||
)
|
|
||||||
mitogen.core.listen(router, 'register', self._on_stream_register)
|
|
||||||
LOG.debug('Context debugging configured.')
|
|
||||||
|
|
||||||
def _on_stream_register(self, context, stream):
|
|
||||||
LOG.debug('_on_stream_register: sending configure() to %r', stream)
|
|
||||||
context.call_async(ContextDebugger._configure_context)
|
|
||||||
|
|
||||||
def _on_debug_msg(self, msg):
|
|
||||||
if msg != mitogen.core._DEAD:
|
|
||||||
threading.Thread(
|
|
||||||
target=self._handle_debug_msg,
|
|
||||||
name='ContextDebuggerHandler',
|
|
||||||
args=(msg,)
|
|
||||||
).start()
|
|
||||||
|
|
||||||
def _handle_debug_msg(self, msg):
|
|
||||||
try:
|
|
||||||
method, args, kwargs = msg.unpickle()
|
|
||||||
msg.reply(getattr(cls, method)(*args, **kwargs))
|
|
||||||
except Exception:
|
|
||||||
e = sys.exc_info()[1]
|
|
||||||
msg.reply(mitogen.core.CallError(e))
|
|
|
@ -1,113 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
# !mitogen: minify_safe
|
|
||||||
|
|
||||||
import logging
|
|
||||||
|
|
||||||
import mitogen.core
|
|
||||||
import mitogen.parent
|
|
||||||
from mitogen.core import b
|
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class PasswordError(mitogen.core.StreamError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class Stream(mitogen.parent.Stream):
|
|
||||||
create_child = staticmethod(mitogen.parent.hybrid_tty_create_child)
|
|
||||||
child_is_immediate_subprocess = False
|
|
||||||
|
|
||||||
username = 'root'
|
|
||||||
password = None
|
|
||||||
doas_path = 'doas'
|
|
||||||
password_prompt = b('Password:')
|
|
||||||
incorrect_prompts = (
|
|
||||||
b('doas: authentication failed'),
|
|
||||||
)
|
|
||||||
|
|
||||||
def construct(self, username=None, password=None, doas_path=None,
|
|
||||||
password_prompt=None, incorrect_prompts=None, **kwargs):
|
|
||||||
super(Stream, self).construct(**kwargs)
|
|
||||||
if username is not None:
|
|
||||||
self.username = username
|
|
||||||
if password is not None:
|
|
||||||
self.password = password
|
|
||||||
if doas_path is not None:
|
|
||||||
self.doas_path = doas_path
|
|
||||||
if password_prompt is not None:
|
|
||||||
self.password_prompt = password_prompt.lower()
|
|
||||||
if incorrect_prompts is not None:
|
|
||||||
self.incorrect_prompts = map(str.lower, incorrect_prompts)
|
|
||||||
|
|
||||||
def _get_name(self):
|
|
||||||
return u'doas.' + mitogen.core.to_text(self.username)
|
|
||||||
|
|
||||||
def get_boot_command(self):
|
|
||||||
bits = [self.doas_path, '-u', self.username, '--']
|
|
||||||
bits = bits + super(Stream, self).get_boot_command()
|
|
||||||
LOG.debug('doas command line: %r', bits)
|
|
||||||
return bits
|
|
||||||
|
|
||||||
password_incorrect_msg = 'doas password is incorrect'
|
|
||||||
password_required_msg = 'doas password is required'
|
|
||||||
|
|
||||||
def _connect_input_loop(self, it):
|
|
||||||
password_sent = False
|
|
||||||
for buf in it:
|
|
||||||
LOG.debug('%r: received %r', self, buf)
|
|
||||||
if buf.endswith(self.EC0_MARKER):
|
|
||||||
self._ec0_received()
|
|
||||||
return
|
|
||||||
if any(s in buf.lower() for s in self.incorrect_prompts):
|
|
||||||
if password_sent:
|
|
||||||
raise PasswordError(self.password_incorrect_msg)
|
|
||||||
elif self.password_prompt in buf.lower():
|
|
||||||
if self.password is None:
|
|
||||||
raise PasswordError(self.password_required_msg)
|
|
||||||
if password_sent:
|
|
||||||
raise PasswordError(self.password_incorrect_msg)
|
|
||||||
LOG.debug('sending password')
|
|
||||||
self.diag_stream.transmit_side.write(
|
|
||||||
mitogen.core.to_text(self.password + '\n').encode('utf-8')
|
|
||||||
)
|
|
||||||
password_sent = True
|
|
||||||
raise mitogen.core.StreamError('bootstrap failed')
|
|
||||||
|
|
||||||
def _connect_bootstrap(self):
|
|
||||||
it = mitogen.parent.iter_read(
|
|
||||||
fds=[self.receive_side.fd, self.diag_stream.receive_side.fd],
|
|
||||||
deadline=self.connect_deadline,
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
self._connect_input_loop(it)
|
|
||||||
finally:
|
|
||||||
it.close()
|
|
|
@ -1,81 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
# !mitogen: minify_safe
|
|
||||||
|
|
||||||
import logging
|
|
||||||
|
|
||||||
import mitogen.core
|
|
||||||
import mitogen.parent
|
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class Stream(mitogen.parent.Stream):
|
|
||||||
child_is_immediate_subprocess = False
|
|
||||||
|
|
||||||
container = None
|
|
||||||
image = None
|
|
||||||
username = None
|
|
||||||
docker_path = 'docker'
|
|
||||||
|
|
||||||
# TODO: better way of capturing errors such as "No such container."
|
|
||||||
create_child_args = {
|
|
||||||
'merge_stdio': True
|
|
||||||
}
|
|
||||||
|
|
||||||
def construct(self, container=None, image=None,
|
|
||||||
docker_path=None, username=None,
|
|
||||||
**kwargs):
|
|
||||||
assert container or image
|
|
||||||
super(Stream, self).construct(**kwargs)
|
|
||||||
if container:
|
|
||||||
self.container = container
|
|
||||||
if image:
|
|
||||||
self.image = image
|
|
||||||
if docker_path:
|
|
||||||
self.docker_path = docker_path
|
|
||||||
if username:
|
|
||||||
self.username = username
|
|
||||||
|
|
||||||
def _get_name(self):
|
|
||||||
return u'docker.' + (self.container or self.image)
|
|
||||||
|
|
||||||
def get_boot_command(self):
|
|
||||||
args = ['--interactive']
|
|
||||||
if self.username:
|
|
||||||
args += ['--user=' + self.username]
|
|
||||||
|
|
||||||
bits = [self.docker_path]
|
|
||||||
if self.container:
|
|
||||||
bits += ['exec'] + args + [self.container]
|
|
||||||
elif self.image:
|
|
||||||
bits += ['run'] + args + ['--rm', self.image]
|
|
||||||
|
|
||||||
return bits + super(Stream, self).get_boot_command()
|
|
|
@ -1,461 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
# !mitogen: minify_safe
|
|
||||||
|
|
||||||
"""
|
|
||||||
:mod:`mitogen.fakessh` is a stream implementation that starts a subprocess with
|
|
||||||
its environment modified such that ``PATH`` searches for `ssh` return a Mitogen
|
|
||||||
implementation of SSH. When invoked, this implementation arranges for the
|
|
||||||
command line supplied by the caller to be executed in a remote context, reusing
|
|
||||||
the parent context's (possibly proxied) connection to that remote context.
|
|
||||||
|
|
||||||
This allows tools like `rsync` and `scp` to transparently reuse the connections
|
|
||||||
and tunnels already established by the host program to connect to a target
|
|
||||||
machine, without wasteful redundant SSH connection setup, 3-way handshakes, or
|
|
||||||
firewall hopping configurations, and enables these tools to be used in
|
|
||||||
impossible scenarios, such as over `sudo` with ``requiretty`` enabled.
|
|
||||||
|
|
||||||
The fake `ssh` command source is written to a temporary file on disk, and
|
|
||||||
consists of a copy of the :py:mod:`mitogen.core` source code (just like any
|
|
||||||
other child context), with a line appended to cause it to connect back to the
|
|
||||||
host process over an FD it inherits. As there is no reliance on an existing
|
|
||||||
filesystem file, it is possible for child contexts to use fakessh.
|
|
||||||
|
|
||||||
As a consequence of connecting back through an inherited FD, only one SSH
|
|
||||||
invocation is possible, which is fine for tools like `rsync`, however in future
|
|
||||||
this restriction will be lifted.
|
|
||||||
|
|
||||||
Sequence:
|
|
||||||
|
|
||||||
1. ``fakessh`` Context and Stream created by parent context. The stream's
|
|
||||||
buffer has a :py:func:`_fakessh_main` :py:data:`CALL_FUNCTION
|
|
||||||
<mitogen.core.CALL_FUNCTION>` enqueued.
|
|
||||||
2. Target program (`rsync/scp/sftp`) invoked, which internally executes
|
|
||||||
`ssh` from ``PATH``.
|
|
||||||
3. :py:mod:`mitogen.core` bootstrap begins, recovers the stream FD
|
|
||||||
inherited via the target program, established itself as the fakessh
|
|
||||||
context.
|
|
||||||
4. :py:func:`_fakessh_main` :py:data:`CALL_FUNCTION
|
|
||||||
<mitogen.core.CALL_FUNCTION>` is read by fakessh context,
|
|
||||||
|
|
||||||
a. sets up :py:class:`IoPump` for stdio, registers
|
|
||||||
stdin_handle for local context.
|
|
||||||
b. Enqueues :py:data:`CALL_FUNCTION <mitogen.core.CALL_FUNCTION>` for
|
|
||||||
:py:func:`_start_slave` invoked in target context,
|
|
||||||
|
|
||||||
i. the program from the `ssh` command line is started
|
|
||||||
ii. sets up :py:class:`IoPump` for `ssh` command line process's
|
|
||||||
stdio pipes
|
|
||||||
iii. returns `(control_handle, stdin_handle)` to
|
|
||||||
:py:func:`_fakessh_main`
|
|
||||||
|
|
||||||
5. :py:func:`_fakessh_main` receives control/stdin handles from from
|
|
||||||
:py:func:`_start_slave`,
|
|
||||||
|
|
||||||
a. registers remote's stdin_handle with local :py:class:`IoPump`.
|
|
||||||
b. sends `("start", local_stdin_handle)` to remote's control_handle
|
|
||||||
c. registers local :py:class:`IoPump` with
|
|
||||||
:py:class:`mitogen.core.Broker`.
|
|
||||||
d. loops waiting for `local stdout closed && remote stdout closed`
|
|
||||||
|
|
||||||
6. :py:func:`_start_slave` control channel receives `("start", stdin_handle)`,
|
|
||||||
|
|
||||||
a. registers remote's stdin_handle with local :py:class:`IoPump`
|
|
||||||
b. registers local :py:class:`IoPump` with
|
|
||||||
:py:class:`mitogen.core.Broker`.
|
|
||||||
c. loops waiting for `local stdout closed && remote stdout closed`
|
|
||||||
"""
|
|
||||||
|
|
||||||
import getopt
|
|
||||||
import inspect
|
|
||||||
import os
|
|
||||||
import shutil
|
|
||||||
import socket
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
import tempfile
|
|
||||||
import threading
|
|
||||||
|
|
||||||
import mitogen.core
|
|
||||||
import mitogen.master
|
|
||||||
import mitogen.parent
|
|
||||||
|
|
||||||
from mitogen.core import LOG, IOLOG
|
|
||||||
|
|
||||||
|
|
||||||
SSH_GETOPTS = (
|
|
||||||
"1246ab:c:e:fgi:kl:m:no:p:qstvx"
|
|
||||||
"ACD:E:F:I:KL:MNO:PQ:R:S:TVw:W:XYy"
|
|
||||||
)
|
|
||||||
|
|
||||||
_mitogen = None
|
|
||||||
|
|
||||||
|
|
||||||
class IoPump(mitogen.core.BasicStream):
|
|
||||||
_output_buf = ''
|
|
||||||
_closed = False
|
|
||||||
|
|
||||||
def __init__(self, broker, stdin_fd, stdout_fd):
|
|
||||||
self._broker = broker
|
|
||||||
self.receive_side = mitogen.core.Side(self, stdout_fd)
|
|
||||||
self.transmit_side = mitogen.core.Side(self, stdin_fd)
|
|
||||||
|
|
||||||
def write(self, s):
|
|
||||||
self._output_buf += s
|
|
||||||
self._broker._start_transmit(self)
|
|
||||||
|
|
||||||
def close(self):
|
|
||||||
self._closed = True
|
|
||||||
# If local process hasn't exitted yet, ensure its write buffer is
|
|
||||||
# drained before lazily triggering disconnect in on_transmit.
|
|
||||||
if self.transmit_side.fd is not None:
|
|
||||||
self._broker._start_transmit(self)
|
|
||||||
|
|
||||||
def on_shutdown(self, broker):
|
|
||||||
self.close()
|
|
||||||
|
|
||||||
def on_transmit(self, broker):
|
|
||||||
written = self.transmit_side.write(self._output_buf)
|
|
||||||
IOLOG.debug('%r.on_transmit() -> len %r', self, written)
|
|
||||||
if written is None:
|
|
||||||
self.on_disconnect(broker)
|
|
||||||
else:
|
|
||||||
self._output_buf = self._output_buf[written:]
|
|
||||||
|
|
||||||
if not self._output_buf:
|
|
||||||
broker._stop_transmit(self)
|
|
||||||
if self._closed:
|
|
||||||
self.on_disconnect(broker)
|
|
||||||
|
|
||||||
def on_receive(self, broker):
|
|
||||||
s = self.receive_side.read()
|
|
||||||
IOLOG.debug('%r.on_receive() -> len %r', self, len(s))
|
|
||||||
if s:
|
|
||||||
mitogen.core.fire(self, 'receive', s)
|
|
||||||
else:
|
|
||||||
self.on_disconnect(broker)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'IoPump(%r, %r)' % (
|
|
||||||
self.receive_side.fd,
|
|
||||||
self.transmit_side.fd,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class Process(object):
|
|
||||||
"""
|
|
||||||
Manages the lifetime and pipe connections of the SSH command running in the
|
|
||||||
slave.
|
|
||||||
"""
|
|
||||||
def __init__(self, router, stdin_fd, stdout_fd, proc=None):
|
|
||||||
self.router = router
|
|
||||||
self.stdin_fd = stdin_fd
|
|
||||||
self.stdout_fd = stdout_fd
|
|
||||||
self.proc = proc
|
|
||||||
self.control_handle = router.add_handler(self._on_control)
|
|
||||||
self.stdin_handle = router.add_handler(self._on_stdin)
|
|
||||||
self.pump = IoPump(router.broker, stdin_fd, stdout_fd)
|
|
||||||
self.stdin = None
|
|
||||||
self.control = None
|
|
||||||
self.wake_event = threading.Event()
|
|
||||||
|
|
||||||
mitogen.core.listen(self.pump, 'disconnect', self._on_pump_disconnect)
|
|
||||||
mitogen.core.listen(self.pump, 'receive', self._on_pump_receive)
|
|
||||||
|
|
||||||
if proc:
|
|
||||||
pmon = mitogen.parent.ProcessMonitor.instance()
|
|
||||||
pmon.add(proc.pid, self._on_proc_exit)
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return 'Process(%r, %r)' % (self.stdin_fd, self.stdout_fd)
|
|
||||||
|
|
||||||
def _on_proc_exit(self, status):
|
|
||||||
LOG.debug('%r._on_proc_exit(%r)', self, status)
|
|
||||||
self.control.put(('exit', status))
|
|
||||||
|
|
||||||
def _on_stdin(self, msg):
|
|
||||||
if msg.is_dead:
|
|
||||||
IOLOG.debug('%r._on_stdin() -> %r', self, data)
|
|
||||||
self.pump.close()
|
|
||||||
return
|
|
||||||
|
|
||||||
data = msg.unpickle()
|
|
||||||
IOLOG.debug('%r._on_stdin() -> len %d', self, len(data))
|
|
||||||
self.pump.write(data)
|
|
||||||
|
|
||||||
def _on_control(self, msg):
|
|
||||||
if not msg.is_dead:
|
|
||||||
command, arg = msg.unpickle(throw=False)
|
|
||||||
LOG.debug('%r._on_control(%r, %s)', self, command, arg)
|
|
||||||
|
|
||||||
func = getattr(self, '_on_%s' % (command,), None)
|
|
||||||
if func:
|
|
||||||
return func(msg, arg)
|
|
||||||
|
|
||||||
LOG.warning('%r: unknown command %r', self, command)
|
|
||||||
|
|
||||||
def _on_start(self, msg, arg):
|
|
||||||
dest = mitogen.core.Context(self.router, msg.src_id)
|
|
||||||
self.control = mitogen.core.Sender(dest, arg[0])
|
|
||||||
self.stdin = mitogen.core.Sender(dest, arg[1])
|
|
||||||
self.router.broker.start_receive(self.pump)
|
|
||||||
|
|
||||||
def _on_exit(self, msg, arg):
|
|
||||||
LOG.debug('on_exit: proc = %r', self.proc)
|
|
||||||
if self.proc:
|
|
||||||
self.proc.terminate()
|
|
||||||
else:
|
|
||||||
self.router.broker.shutdown()
|
|
||||||
|
|
||||||
def _on_pump_receive(self, s):
|
|
||||||
IOLOG.info('%r._on_pump_receive(len %d)', self, len(s))
|
|
||||||
self.stdin.put(s)
|
|
||||||
|
|
||||||
def _on_pump_disconnect(self):
|
|
||||||
LOG.debug('%r._on_pump_disconnect()', self)
|
|
||||||
mitogen.core.fire(self, 'disconnect')
|
|
||||||
self.stdin.close()
|
|
||||||
self.wake_event.set()
|
|
||||||
|
|
||||||
def start_master(self, stdin, control):
|
|
||||||
self.stdin = stdin
|
|
||||||
self.control = control
|
|
||||||
control.put(('start', (self.control_handle, self.stdin_handle)))
|
|
||||||
self.router.broker.start_receive(self.pump)
|
|
||||||
|
|
||||||
def wait(self):
|
|
||||||
while not self.wake_event.isSet():
|
|
||||||
# Timeout is used so that sleep is interruptible, as blocking
|
|
||||||
# variants of libc thread operations cannot be interrupted e.g. via
|
|
||||||
# KeyboardInterrupt. isSet() test and wait() are separate since in
|
|
||||||
# <2.7 wait() always returns None.
|
|
||||||
self.wake_event.wait(0.1)
|
|
||||||
|
|
||||||
|
|
||||||
@mitogen.core.takes_router
|
|
||||||
def _start_slave(src_id, cmdline, router):
|
|
||||||
"""
|
|
||||||
This runs in the target context, it is invoked by _fakessh_main running in
|
|
||||||
the fakessh context immediately after startup. It starts the slave process
|
|
||||||
(the the point where it has a stdin_handle to target but not stdout_chan to
|
|
||||||
write to), and waits for main to.
|
|
||||||
"""
|
|
||||||
LOG.debug('_start_slave(%r, %r)', router, cmdline)
|
|
||||||
|
|
||||||
proc = subprocess.Popen(
|
|
||||||
cmdline,
|
|
||||||
# SSH server always uses user's shell.
|
|
||||||
shell=True,
|
|
||||||
# SSH server always executes new commands in the user's HOME.
|
|
||||||
cwd=os.path.expanduser('~'),
|
|
||||||
|
|
||||||
stdin=subprocess.PIPE,
|
|
||||||
stdout=subprocess.PIPE,
|
|
||||||
)
|
|
||||||
|
|
||||||
process = Process(
|
|
||||||
router,
|
|
||||||
proc.stdin.fileno(),
|
|
||||||
proc.stdout.fileno(),
|
|
||||||
proc,
|
|
||||||
)
|
|
||||||
|
|
||||||
return process.control_handle, process.stdin_handle
|
|
||||||
|
|
||||||
|
|
||||||
#
|
|
||||||
# SSH client interface.
|
|
||||||
#
|
|
||||||
|
|
||||||
|
|
||||||
def exit():
|
|
||||||
_mitogen.broker.shutdown()
|
|
||||||
|
|
||||||
|
|
||||||
def die(msg, *args):
|
|
||||||
if args:
|
|
||||||
msg %= args
|
|
||||||
sys.stderr.write('%s\n' % (msg,))
|
|
||||||
exit()
|
|
||||||
|
|
||||||
|
|
||||||
def parse_args():
|
|
||||||
hostname = None
|
|
||||||
remain = sys.argv[1:]
|
|
||||||
allopts = []
|
|
||||||
restarted = 0
|
|
||||||
|
|
||||||
while remain and restarted < 2:
|
|
||||||
opts, args = getopt.getopt(remain, SSH_GETOPTS)
|
|
||||||
remain = remain[:] # getopt bug!
|
|
||||||
allopts += opts
|
|
||||||
if not args:
|
|
||||||
break
|
|
||||||
|
|
||||||
if not hostname:
|
|
||||||
hostname = args.pop(0)
|
|
||||||
remain = remain[remain.index(hostname) + 1:]
|
|
||||||
|
|
||||||
restarted += 1
|
|
||||||
|
|
||||||
return hostname, allopts, args
|
|
||||||
|
|
||||||
|
|
||||||
@mitogen.core.takes_econtext
|
|
||||||
def _fakessh_main(dest_context_id, econtext):
|
|
||||||
hostname, opts, args = parse_args()
|
|
||||||
if not hostname:
|
|
||||||
die('Missing hostname')
|
|
||||||
|
|
||||||
subsystem = False
|
|
||||||
for opt, optarg in opts:
|
|
||||||
if opt == '-s':
|
|
||||||
subsystem = True
|
|
||||||
else:
|
|
||||||
LOG.debug('Warning option %s %s is ignored.', opt, optarg)
|
|
||||||
|
|
||||||
LOG.debug('hostname: %r', hostname)
|
|
||||||
LOG.debug('opts: %r', opts)
|
|
||||||
LOG.debug('args: %r', args)
|
|
||||||
|
|
||||||
if subsystem:
|
|
||||||
die('-s <subsystem> is not yet supported')
|
|
||||||
|
|
||||||
if not args:
|
|
||||||
die('fakessh: login mode not supported and no command specified')
|
|
||||||
|
|
||||||
dest = mitogen.parent.Context(econtext.router, dest_context_id)
|
|
||||||
|
|
||||||
# Even though SSH receives an argument vector, it still cats the vector
|
|
||||||
# together before sending to the server, the server just uses /bin/sh -c to
|
|
||||||
# run the command. We must remain puke-for-puke compatible.
|
|
||||||
control_handle, stdin_handle = dest.call(_start_slave,
|
|
||||||
mitogen.context_id, ' '.join(args))
|
|
||||||
|
|
||||||
LOG.debug('_fakessh_main: received control_handle=%r, stdin_handle=%r',
|
|
||||||
control_handle, stdin_handle)
|
|
||||||
|
|
||||||
process = Process(econtext.router, 1, 0)
|
|
||||||
process.start_master(
|
|
||||||
stdin=mitogen.core.Sender(dest, stdin_handle),
|
|
||||||
control=mitogen.core.Sender(dest, control_handle),
|
|
||||||
)
|
|
||||||
process.wait()
|
|
||||||
process.control.put(('exit', None))
|
|
||||||
|
|
||||||
|
|
||||||
def _get_econtext_config(context, sock2):
|
|
||||||
parent_ids = mitogen.parent_ids[:]
|
|
||||||
parent_ids.insert(0, mitogen.context_id)
|
|
||||||
return {
|
|
||||||
'context_id': context.context_id,
|
|
||||||
'core_src_fd': None,
|
|
||||||
'debug': getattr(context.router, 'debug', False),
|
|
||||||
'in_fd': sock2.fileno(),
|
|
||||||
'log_level': mitogen.parent.get_log_level(),
|
|
||||||
'max_message_size': context.router.max_message_size,
|
|
||||||
'out_fd': sock2.fileno(),
|
|
||||||
'parent_ids': parent_ids,
|
|
||||||
'profiling': getattr(context.router, 'profiling', False),
|
|
||||||
'unidirectional': getattr(context.router, 'unidirectional', False),
|
|
||||||
'setup_stdio': False,
|
|
||||||
'version': mitogen.__version__,
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
#
|
|
||||||
# Public API.
|
|
||||||
#
|
|
||||||
|
|
||||||
@mitogen.core.takes_econtext
|
|
||||||
@mitogen.core.takes_router
|
|
||||||
def run(dest, router, args, deadline=None, econtext=None):
|
|
||||||
"""
|
|
||||||
Run the command specified by `args` such that ``PATH`` searches for SSH by
|
|
||||||
the command will cause its attempt to use SSH to execute a remote program
|
|
||||||
to be redirected to use mitogen to execute that program using the context
|
|
||||||
`dest` instead.
|
|
||||||
|
|
||||||
:param list args:
|
|
||||||
Argument vector.
|
|
||||||
:param mitogen.core.Context dest:
|
|
||||||
The destination context to execute the SSH command line in.
|
|
||||||
|
|
||||||
:param mitogen.core.Router router:
|
|
||||||
|
|
||||||
:param list[str] args:
|
|
||||||
Command line arguments for local program, e.g.
|
|
||||||
``['rsync', '/tmp', 'remote:/tmp']``
|
|
||||||
|
|
||||||
:returns:
|
|
||||||
Exit status of the child process.
|
|
||||||
"""
|
|
||||||
if econtext is not None:
|
|
||||||
mitogen.parent.upgrade_router(econtext)
|
|
||||||
|
|
||||||
context_id = router.allocate_id()
|
|
||||||
fakessh = mitogen.parent.Context(router, context_id)
|
|
||||||
fakessh.name = u'fakessh.%d' % (context_id,)
|
|
||||||
|
|
||||||
sock1, sock2 = socket.socketpair()
|
|
||||||
|
|
||||||
stream = mitogen.core.Stream(router, context_id)
|
|
||||||
stream.name = u'fakessh'
|
|
||||||
stream.accept(sock1.fileno(), sock1.fileno())
|
|
||||||
router.register(fakessh, stream)
|
|
||||||
|
|
||||||
# Held in socket buffer until process is booted.
|
|
||||||
fakessh.call_async(_fakessh_main, dest.context_id)
|
|
||||||
|
|
||||||
tmp_path = tempfile.mkdtemp(prefix='mitogen_fakessh')
|
|
||||||
try:
|
|
||||||
ssh_path = os.path.join(tmp_path, 'ssh')
|
|
||||||
fp = open(ssh_path, 'w')
|
|
||||||
try:
|
|
||||||
fp.write('#!%s\n' % (mitogen.parent.get_sys_executable(),))
|
|
||||||
fp.write(inspect.getsource(mitogen.core))
|
|
||||||
fp.write('\n')
|
|
||||||
fp.write('ExternalContext(%r).main()\n' % (
|
|
||||||
_get_econtext_config(context, sock2),
|
|
||||||
))
|
|
||||||
finally:
|
|
||||||
fp.close()
|
|
||||||
|
|
||||||
os.chmod(ssh_path, int('0755', 8))
|
|
||||||
env = os.environ.copy()
|
|
||||||
env.update({
|
|
||||||
'PATH': '%s:%s' % (tmp_path, env.get('PATH', '')),
|
|
||||||
'ARGV0': mitogen.parent.get_sys_executable(),
|
|
||||||
'SSH_PATH': ssh_path,
|
|
||||||
})
|
|
||||||
|
|
||||||
proc = subprocess.Popen(args, env=env)
|
|
||||||
return proc.wait()
|
|
||||||
finally:
|
|
||||||
shutil.rmtree(tmp_path)
|
|
|
@ -1,223 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
# !mitogen: minify_safe
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import random
|
|
||||||
import sys
|
|
||||||
import threading
|
|
||||||
import traceback
|
|
||||||
|
|
||||||
import mitogen.core
|
|
||||||
import mitogen.parent
|
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger('mitogen')
|
|
||||||
|
|
||||||
# Python 2.4/2.5 cannot support fork+threads whatsoever, it doesn't even fix up
|
|
||||||
# interpreter state. So 2.4/2.5 interpreters start .local() contexts for
|
|
||||||
# isolation instead. Since we don't have any crazy memory sharing problems to
|
|
||||||
# avoid, there is no virginal fork parent either. The child is started directly
|
|
||||||
# from the login/become process. In future this will be default everywhere,
|
|
||||||
# fork is brainwrong from the stone age.
|
|
||||||
FORK_SUPPORTED = sys.version_info >= (2, 6)
|
|
||||||
|
|
||||||
|
|
||||||
class Error(mitogen.core.StreamError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def fixup_prngs():
|
|
||||||
"""
|
|
||||||
Add 256 bits of /dev/urandom to OpenSSL's PRNG in the child, and re-seed
|
|
||||||
the random package with the same data.
|
|
||||||
"""
|
|
||||||
s = os.urandom(256 // 8)
|
|
||||||
random.seed(s)
|
|
||||||
if 'ssl' in sys.modules:
|
|
||||||
sys.modules['ssl'].RAND_add(s, 75.0)
|
|
||||||
|
|
||||||
|
|
||||||
def reset_logging_framework():
|
|
||||||
"""
|
|
||||||
After fork, ensure any logging.Handler locks are recreated, as a variety of
|
|
||||||
threads in the parent may have been using the logging package at the moment
|
|
||||||
of fork.
|
|
||||||
|
|
||||||
It is not possible to solve this problem in general; see
|
|
||||||
https://github.com/dw/mitogen/issues/150 for a full discussion.
|
|
||||||
"""
|
|
||||||
logging._lock = threading.RLock()
|
|
||||||
|
|
||||||
# The root logger does not appear in the loggerDict.
|
|
||||||
for name in [None] + list(logging.Logger.manager.loggerDict):
|
|
||||||
for handler in logging.getLogger(name).handlers:
|
|
||||||
handler.createLock()
|
|
||||||
|
|
||||||
root = logging.getLogger()
|
|
||||||
root.handlers = [
|
|
||||||
handler
|
|
||||||
for handler in root.handlers
|
|
||||||
if not isinstance(handler, mitogen.core.LogHandler)
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def on_fork():
|
|
||||||
"""
|
|
||||||
Should be called by any program integrating Mitogen each time the process
|
|
||||||
is forked, in the context of the new child.
|
|
||||||
"""
|
|
||||||
reset_logging_framework() # Must be first!
|
|
||||||
fixup_prngs()
|
|
||||||
mitogen.core.Latch._on_fork()
|
|
||||||
mitogen.core.Side._on_fork()
|
|
||||||
mitogen.core.ExternalContext.service_stub_lock = threading.Lock()
|
|
||||||
|
|
||||||
mitogen__service = sys.modules.get('mitogen.service')
|
|
||||||
if mitogen__service:
|
|
||||||
mitogen__service._pool_lock = threading.Lock()
|
|
||||||
|
|
||||||
|
|
||||||
def handle_child_crash():
|
|
||||||
"""
|
|
||||||
Respond to _child_main() crashing by ensuring the relevant exception is
|
|
||||||
logged to /dev/tty.
|
|
||||||
"""
|
|
||||||
tty = open('/dev/tty', 'wb')
|
|
||||||
tty.write('\n\nFORKED CHILD PID %d CRASHED\n%s\n\n' % (
|
|
||||||
os.getpid(),
|
|
||||||
traceback.format_exc(),
|
|
||||||
))
|
|
||||||
tty.close()
|
|
||||||
os._exit(1)
|
|
||||||
|
|
||||||
|
|
||||||
class Stream(mitogen.parent.Stream):
|
|
||||||
child_is_immediate_subprocess = True
|
|
||||||
|
|
||||||
#: Reference to the importer, if any, recovered from the parent.
|
|
||||||
importer = None
|
|
||||||
|
|
||||||
#: User-supplied function for cleaning up child process state.
|
|
||||||
on_fork = None
|
|
||||||
|
|
||||||
python_version_msg = (
|
|
||||||
"The mitogen.fork method is not supported on Python versions "
|
|
||||||
"prior to 2.6, since those versions made no attempt to repair "
|
|
||||||
"critical interpreter state following a fork. Please use the "
|
|
||||||
"local() method instead."
|
|
||||||
)
|
|
||||||
|
|
||||||
def construct(self, old_router, max_message_size, on_fork=None,
|
|
||||||
debug=False, profiling=False, unidirectional=False,
|
|
||||||
on_start=None):
|
|
||||||
if not FORK_SUPPORTED:
|
|
||||||
raise Error(self.python_version_msg)
|
|
||||||
|
|
||||||
# fork method only supports a tiny subset of options.
|
|
||||||
super(Stream, self).construct(max_message_size=max_message_size,
|
|
||||||
debug=debug, profiling=profiling,
|
|
||||||
unidirectional=False)
|
|
||||||
self.on_fork = on_fork
|
|
||||||
self.on_start = on_start
|
|
||||||
|
|
||||||
responder = getattr(old_router, 'responder', None)
|
|
||||||
if isinstance(responder, mitogen.parent.ModuleForwarder):
|
|
||||||
self.importer = responder.importer
|
|
||||||
|
|
||||||
name_prefix = u'fork'
|
|
||||||
|
|
||||||
def start_child(self):
|
|
||||||
parentfp, childfp = mitogen.parent.create_socketpair()
|
|
||||||
self.pid = os.fork()
|
|
||||||
if self.pid:
|
|
||||||
childfp.close()
|
|
||||||
# Decouple the socket from the lifetime of the Python socket object.
|
|
||||||
fd = os.dup(parentfp.fileno())
|
|
||||||
parentfp.close()
|
|
||||||
return self.pid, fd, None
|
|
||||||
else:
|
|
||||||
parentfp.close()
|
|
||||||
self._wrap_child_main(childfp)
|
|
||||||
|
|
||||||
def _wrap_child_main(self, childfp):
|
|
||||||
try:
|
|
||||||
self._child_main(childfp)
|
|
||||||
except BaseException:
|
|
||||||
handle_child_crash()
|
|
||||||
|
|
||||||
def _child_main(self, childfp):
|
|
||||||
on_fork()
|
|
||||||
if self.on_fork:
|
|
||||||
self.on_fork()
|
|
||||||
mitogen.core.set_block(childfp.fileno())
|
|
||||||
|
|
||||||
# Expected by the ExternalContext.main().
|
|
||||||
os.dup2(childfp.fileno(), 1)
|
|
||||||
os.dup2(childfp.fileno(), 100)
|
|
||||||
|
|
||||||
# Overwritten by ExternalContext.main(); we must replace the
|
|
||||||
# parent-inherited descriptors that were closed by Side._on_fork() to
|
|
||||||
# avoid ExternalContext.main() accidentally allocating new files over
|
|
||||||
# the standard handles.
|
|
||||||
os.dup2(childfp.fileno(), 0)
|
|
||||||
|
|
||||||
# Avoid corrupting the stream on fork crash by dupping /dev/null over
|
|
||||||
# stderr. Instead, handle_child_crash() uses /dev/tty to log errors.
|
|
||||||
devnull = os.open('/dev/null', os.O_WRONLY)
|
|
||||||
if devnull != 2:
|
|
||||||
os.dup2(devnull, 2)
|
|
||||||
os.close(devnull)
|
|
||||||
|
|
||||||
# If we're unlucky, childfp.fileno() may coincidentally be one of our
|
|
||||||
# desired FDs. In that case closing it breaks ExternalContext.main().
|
|
||||||
if childfp.fileno() not in (0, 1, 100):
|
|
||||||
childfp.close()
|
|
||||||
|
|
||||||
config = self.get_econtext_config()
|
|
||||||
config['core_src_fd'] = None
|
|
||||||
config['importer'] = self.importer
|
|
||||||
config['setup_package'] = False
|
|
||||||
if self.on_start:
|
|
||||||
config['on_start'] = self.on_start
|
|
||||||
|
|
||||||
try:
|
|
||||||
try:
|
|
||||||
mitogen.core.ExternalContext(config).main()
|
|
||||||
except Exception:
|
|
||||||
# TODO: report exception somehow.
|
|
||||||
os._exit(72)
|
|
||||||
finally:
|
|
||||||
# Don't trigger atexit handlers, they were copied from the parent.
|
|
||||||
os._exit(0)
|
|
||||||
|
|
||||||
def _connect_bootstrap(self):
|
|
||||||
# None required.
|
|
||||||
pass
|
|
|
@ -1,65 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
# !mitogen: minify_safe
|
|
||||||
|
|
||||||
import logging
|
|
||||||
|
|
||||||
import mitogen.core
|
|
||||||
import mitogen.parent
|
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class Stream(mitogen.parent.Stream):
|
|
||||||
child_is_immediate_subprocess = False
|
|
||||||
create_child_args = {
|
|
||||||
'merge_stdio': True
|
|
||||||
}
|
|
||||||
|
|
||||||
container = None
|
|
||||||
username = None
|
|
||||||
jexec_path = '/usr/sbin/jexec'
|
|
||||||
|
|
||||||
def construct(self, container, jexec_path=None, username=None, **kwargs):
|
|
||||||
super(Stream, self).construct(**kwargs)
|
|
||||||
self.container = container
|
|
||||||
self.username = username
|
|
||||||
if jexec_path:
|
|
||||||
self.jexec_path = jexec_path
|
|
||||||
|
|
||||||
def _get_name(self):
|
|
||||||
return u'jail.' + self.container
|
|
||||||
|
|
||||||
def get_boot_command(self):
|
|
||||||
bits = [self.jexec_path]
|
|
||||||
if self.username:
|
|
||||||
bits += ['-U', self.username]
|
|
||||||
bits += [self.container]
|
|
||||||
return bits + super(Stream, self).get_boot_command()
|
|
|
@ -1,65 +0,0 @@
|
||||||
# Copyright 2018, Yannig Perre
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
# !mitogen: minify_safe
|
|
||||||
|
|
||||||
import logging
|
|
||||||
|
|
||||||
import mitogen.core
|
|
||||||
import mitogen.parent
|
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class Stream(mitogen.parent.Stream):
|
|
||||||
child_is_immediate_subprocess = True
|
|
||||||
|
|
||||||
pod = None
|
|
||||||
kubectl_path = 'kubectl'
|
|
||||||
kubectl_args = None
|
|
||||||
|
|
||||||
# TODO: better way of capturing errors such as "No such container."
|
|
||||||
create_child_args = {
|
|
||||||
'merge_stdio': True
|
|
||||||
}
|
|
||||||
|
|
||||||
def construct(self, pod, kubectl_path=None, kubectl_args=None, **kwargs):
|
|
||||||
super(Stream, self).construct(**kwargs)
|
|
||||||
assert pod
|
|
||||||
self.pod = pod
|
|
||||||
if kubectl_path:
|
|
||||||
self.kubectl_path = kubectl_path
|
|
||||||
self.kubectl_args = kubectl_args or []
|
|
||||||
|
|
||||||
def _get_name(self):
|
|
||||||
return u'kubectl.%s%s' % (self.pod, self.kubectl_args)
|
|
||||||
|
|
||||||
def get_boot_command(self):
|
|
||||||
bits = [self.kubectl_path] + self.kubectl_args + ['exec', '-it', self.pod]
|
|
||||||
return bits + ["--"] + super(Stream, self).get_boot_command()
|
|
|
@ -1,75 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
# !mitogen: minify_safe
|
|
||||||
|
|
||||||
import logging
|
|
||||||
|
|
||||||
import mitogen.core
|
|
||||||
import mitogen.parent
|
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class Stream(mitogen.parent.Stream):
|
|
||||||
child_is_immediate_subprocess = False
|
|
||||||
create_child_args = {
|
|
||||||
# If lxc-attach finds any of stdin, stdout, stderr connected to a TTY,
|
|
||||||
# to prevent input injection it creates a proxy pty, forcing all IO to
|
|
||||||
# be buffered in <4KiB chunks. So ensure stderr is also routed to the
|
|
||||||
# socketpair.
|
|
||||||
'merge_stdio': True
|
|
||||||
}
|
|
||||||
|
|
||||||
container = None
|
|
||||||
lxc_attach_path = 'lxc-attach'
|
|
||||||
|
|
||||||
eof_error_hint = (
|
|
||||||
'Note: many versions of LXC do not report program execution failure '
|
|
||||||
'meaningfully. Please check the host logs (/var/log) for more '
|
|
||||||
'information.'
|
|
||||||
)
|
|
||||||
|
|
||||||
def construct(self, container, lxc_attach_path=None, **kwargs):
|
|
||||||
super(Stream, self).construct(**kwargs)
|
|
||||||
self.container = container
|
|
||||||
if lxc_attach_path:
|
|
||||||
self.lxc_attach_path = lxc_attach_path
|
|
||||||
|
|
||||||
def _get_name(self):
|
|
||||||
return u'lxc.' + self.container
|
|
||||||
|
|
||||||
def get_boot_command(self):
|
|
||||||
bits = [
|
|
||||||
self.lxc_attach_path,
|
|
||||||
'--clear-env',
|
|
||||||
'--name', self.container,
|
|
||||||
'--',
|
|
||||||
]
|
|
||||||
return bits + super(Stream, self).get_boot_command()
|
|
|
@ -1,77 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
# !mitogen: minify_safe
|
|
||||||
|
|
||||||
import logging
|
|
||||||
|
|
||||||
import mitogen.core
|
|
||||||
import mitogen.parent
|
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class Stream(mitogen.parent.Stream):
|
|
||||||
child_is_immediate_subprocess = False
|
|
||||||
create_child_args = {
|
|
||||||
# If lxc finds any of stdin, stdout, stderr connected to a TTY, to
|
|
||||||
# prevent input injection it creates a proxy pty, forcing all IO to be
|
|
||||||
# buffered in <4KiB chunks. So ensure stderr is also routed to the
|
|
||||||
# socketpair.
|
|
||||||
'merge_stdio': True
|
|
||||||
}
|
|
||||||
|
|
||||||
container = None
|
|
||||||
lxc_path = 'lxc'
|
|
||||||
python_path = 'python'
|
|
||||||
|
|
||||||
eof_error_hint = (
|
|
||||||
'Note: many versions of LXC do not report program execution failure '
|
|
||||||
'meaningfully. Please check the host logs (/var/log) for more '
|
|
||||||
'information.'
|
|
||||||
)
|
|
||||||
|
|
||||||
def construct(self, container, lxc_path=None, **kwargs):
|
|
||||||
super(Stream, self).construct(**kwargs)
|
|
||||||
self.container = container
|
|
||||||
if lxc_path:
|
|
||||||
self.lxc_path = lxc_path
|
|
||||||
|
|
||||||
def _get_name(self):
|
|
||||||
return u'lxd.' + self.container
|
|
||||||
|
|
||||||
def get_boot_command(self):
|
|
||||||
bits = [
|
|
||||||
self.lxc_path,
|
|
||||||
'exec',
|
|
||||||
'--mode=noninteractive',
|
|
||||||
self.container,
|
|
||||||
'--',
|
|
||||||
]
|
|
||||||
return bits + super(Stream, self).get_boot_command()
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,139 +0,0 @@
|
||||||
# Copyright 2017, Alex Willmer
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
# !mitogen: minify_safe
|
|
||||||
|
|
||||||
import sys
|
|
||||||
|
|
||||||
try:
|
|
||||||
from io import StringIO
|
|
||||||
except ImportError:
|
|
||||||
from StringIO import StringIO
|
|
||||||
|
|
||||||
import mitogen.core
|
|
||||||
|
|
||||||
if sys.version_info < (2, 7, 11):
|
|
||||||
from mitogen.compat import tokenize
|
|
||||||
else:
|
|
||||||
import tokenize
|
|
||||||
|
|
||||||
|
|
||||||
def minimize_source(source):
|
|
||||||
"""Remove comments and docstrings from Python `source`, preserving line
|
|
||||||
numbers and syntax of empty blocks.
|
|
||||||
|
|
||||||
:param str source:
|
|
||||||
The source to minimize.
|
|
||||||
|
|
||||||
:returns str:
|
|
||||||
The minimized source.
|
|
||||||
"""
|
|
||||||
source = mitogen.core.to_text(source)
|
|
||||||
tokens = tokenize.generate_tokens(StringIO(source).readline)
|
|
||||||
tokens = strip_comments(tokens)
|
|
||||||
tokens = strip_docstrings(tokens)
|
|
||||||
tokens = reindent(tokens)
|
|
||||||
return tokenize.untokenize(tokens)
|
|
||||||
|
|
||||||
|
|
||||||
def strip_comments(tokens):
|
|
||||||
"""Drop comment tokens from a `tokenize` stream.
|
|
||||||
|
|
||||||
Comments on lines 1-2 are kept, to preserve hashbang and encoding.
|
|
||||||
Trailing whitespace is remove from all lines.
|
|
||||||
"""
|
|
||||||
prev_typ = None
|
|
||||||
prev_end_col = 0
|
|
||||||
for typ, tok, (start_row, start_col), (end_row, end_col), line in tokens:
|
|
||||||
if typ in (tokenize.NL, tokenize.NEWLINE):
|
|
||||||
if prev_typ in (tokenize.NL, tokenize.NEWLINE):
|
|
||||||
start_col = 0
|
|
||||||
else:
|
|
||||||
start_col = prev_end_col
|
|
||||||
end_col = start_col + 1
|
|
||||||
elif typ == tokenize.COMMENT and start_row > 2:
|
|
||||||
continue
|
|
||||||
prev_typ = typ
|
|
||||||
prev_end_col = end_col
|
|
||||||
yield typ, tok, (start_row, start_col), (end_row, end_col), line
|
|
||||||
|
|
||||||
|
|
||||||
def strip_docstrings(tokens):
|
|
||||||
"""Replace docstring tokens with NL tokens in a `tokenize` stream.
|
|
||||||
|
|
||||||
Any STRING token not part of an expression is deemed a docstring.
|
|
||||||
Indented docstrings are not yet recognised.
|
|
||||||
"""
|
|
||||||
stack = []
|
|
||||||
state = 'wait_string'
|
|
||||||
for t in tokens:
|
|
||||||
typ = t[0]
|
|
||||||
if state == 'wait_string':
|
|
||||||
if typ in (tokenize.NL, tokenize.COMMENT):
|
|
||||||
yield t
|
|
||||||
elif typ in (tokenize.DEDENT, tokenize.INDENT, tokenize.STRING):
|
|
||||||
stack.append(t)
|
|
||||||
elif typ == tokenize.NEWLINE:
|
|
||||||
stack.append(t)
|
|
||||||
start_line, end_line = stack[0][2][0], stack[-1][3][0]+1
|
|
||||||
for i in range(start_line, end_line):
|
|
||||||
yield tokenize.NL, '\n', (i, 0), (i,1), '\n'
|
|
||||||
for t in stack:
|
|
||||||
if t[0] in (tokenize.DEDENT, tokenize.INDENT):
|
|
||||||
yield t[0], t[1], (i+1, t[2][1]), (i+1, t[3][1]), t[4]
|
|
||||||
del stack[:]
|
|
||||||
else:
|
|
||||||
stack.append(t)
|
|
||||||
for t in stack: yield t
|
|
||||||
del stack[:]
|
|
||||||
state = 'wait_newline'
|
|
||||||
elif state == 'wait_newline':
|
|
||||||
if typ == tokenize.NEWLINE:
|
|
||||||
state = 'wait_string'
|
|
||||||
yield t
|
|
||||||
|
|
||||||
|
|
||||||
def reindent(tokens, indent=' '):
|
|
||||||
"""Replace existing indentation in a token steam, with `indent`.
|
|
||||||
"""
|
|
||||||
old_levels = []
|
|
||||||
old_level = 0
|
|
||||||
new_level = 0
|
|
||||||
for typ, tok, (start_row, start_col), (end_row, end_col), line in tokens:
|
|
||||||
if typ == tokenize.INDENT:
|
|
||||||
old_levels.append(old_level)
|
|
||||||
old_level = len(tok)
|
|
||||||
new_level += 1
|
|
||||||
tok = indent * new_level
|
|
||||||
elif typ == tokenize.DEDENT:
|
|
||||||
old_level = old_levels.pop()
|
|
||||||
new_level -= 1
|
|
||||||
start_col = max(0, start_col - old_level + new_level)
|
|
||||||
if start_row == end_row:
|
|
||||||
end_col = start_col + len(tok)
|
|
||||||
yield typ, tok, (start_row, start_col), (end_row, end_col), line
|
|
|
@ -1,183 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
# !mitogen: minify_safe
|
|
||||||
|
|
||||||
"""
|
|
||||||
Support for operating in a mixed threading/forking environment.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
import socket
|
|
||||||
import sys
|
|
||||||
import weakref
|
|
||||||
|
|
||||||
import mitogen.core
|
|
||||||
|
|
||||||
|
|
||||||
# List of weakrefs. On Python 2.4, mitogen.core registers its Broker on this
|
|
||||||
# list and mitogen.service registers its Pool too.
|
|
||||||
_brokers = weakref.WeakKeyDictionary()
|
|
||||||
_pools = weakref.WeakKeyDictionary()
|
|
||||||
|
|
||||||
|
|
||||||
def _notice_broker_or_pool(obj):
|
|
||||||
"""
|
|
||||||
Used by :mod:`mitogen.core` and :mod:`mitogen.service` to automatically
|
|
||||||
register every broker and pool on Python 2.4/2.5.
|
|
||||||
"""
|
|
||||||
if isinstance(obj, mitogen.core.Broker):
|
|
||||||
_brokers[obj] = True
|
|
||||||
else:
|
|
||||||
_pools[obj] = True
|
|
||||||
|
|
||||||
|
|
||||||
def wrap_os__fork():
|
|
||||||
corker = Corker(
|
|
||||||
brokers=list(_brokers),
|
|
||||||
pools=list(_pools),
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
corker.cork()
|
|
||||||
return os__fork()
|
|
||||||
finally:
|
|
||||||
corker.uncork()
|
|
||||||
|
|
||||||
|
|
||||||
# If Python 2.4/2.5 where threading state is not fixed up, subprocess.Popen()
|
|
||||||
# may still deadlock due to the broker thread. In this case, pause os.fork() so
|
|
||||||
# that all active threads are paused during fork.
|
|
||||||
if sys.version_info < (2, 6):
|
|
||||||
os__fork = os.fork
|
|
||||||
os.fork = wrap_os__fork
|
|
||||||
|
|
||||||
|
|
||||||
class Corker(object):
|
|
||||||
"""
|
|
||||||
Arrange for :class:`mitogen.core.Broker` and optionally
|
|
||||||
:class:`mitogen.service.Pool` to be temporarily "corked" while fork
|
|
||||||
operations may occur.
|
|
||||||
|
|
||||||
In a mixed threading/forking environment, it is critical no threads are
|
|
||||||
active at the moment of fork, as they could hold mutexes whose state is
|
|
||||||
unrecoverably snapshotted in the locked state in the fork child, causing
|
|
||||||
deadlocks at random future moments.
|
|
||||||
|
|
||||||
To ensure a target thread has all locks dropped, it is made to write a
|
|
||||||
large string to a socket with a small buffer that has :data:`os.O_NONBLOCK`
|
|
||||||
disabled. CPython will drop the GIL and enter the ``write()`` system call,
|
|
||||||
where it will block until the socket buffer is drained, or the write side
|
|
||||||
is closed.
|
|
||||||
|
|
||||||
:class:`mitogen.core.Poller` is used to ensure the thread really has
|
|
||||||
blocked outside any Python locks, by checking if the socket buffer has
|
|
||||||
started to fill.
|
|
||||||
|
|
||||||
Since this necessarily involves posting a message to every existent thread
|
|
||||||
and verifying acknowledgement, it will never be a fast operation.
|
|
||||||
|
|
||||||
This does not yet handle the case of corking being initiated from within a
|
|
||||||
thread that is also a cork target.
|
|
||||||
|
|
||||||
:param brokers:
|
|
||||||
Sequence of :class:`mitogen.core.Broker` instances to cork.
|
|
||||||
:param pools:
|
|
||||||
Sequence of :class:`mitogen.core.Pool` instances to cork.
|
|
||||||
"""
|
|
||||||
def __init__(self, brokers=(), pools=()):
|
|
||||||
self.brokers = brokers
|
|
||||||
self.pools = pools
|
|
||||||
|
|
||||||
def _do_cork(self, s, wsock):
|
|
||||||
try:
|
|
||||||
try:
|
|
||||||
while True:
|
|
||||||
# at least EINTR is possible. Do our best to keep handling
|
|
||||||
# outside the GIL in this case using sendall().
|
|
||||||
wsock.sendall(s)
|
|
||||||
except socket.error:
|
|
||||||
pass
|
|
||||||
finally:
|
|
||||||
wsock.close()
|
|
||||||
|
|
||||||
def _cork_one(self, s, obj):
|
|
||||||
"""
|
|
||||||
Construct a socketpair, saving one side of it, and passing the other to
|
|
||||||
`obj` to be written to by one of its threads.
|
|
||||||
"""
|
|
||||||
rsock, wsock = mitogen.parent.create_socketpair(size=4096)
|
|
||||||
mitogen.core.set_cloexec(rsock.fileno())
|
|
||||||
mitogen.core.set_cloexec(wsock.fileno())
|
|
||||||
mitogen.core.set_block(wsock) # gevent
|
|
||||||
self._rsocks.append(rsock)
|
|
||||||
obj.defer(self._do_cork, s, wsock)
|
|
||||||
|
|
||||||
def _verify_one(self, rsock):
|
|
||||||
"""
|
|
||||||
Pause until the socket `rsock` indicates readability, due to
|
|
||||||
:meth:`_do_cork` triggering a blocking write on another thread.
|
|
||||||
"""
|
|
||||||
poller = mitogen.core.Poller()
|
|
||||||
poller.start_receive(rsock.fileno())
|
|
||||||
try:
|
|
||||||
while True:
|
|
||||||
for fd in poller.poll():
|
|
||||||
return
|
|
||||||
finally:
|
|
||||||
poller.close()
|
|
||||||
|
|
||||||
def cork(self):
|
|
||||||
"""
|
|
||||||
Arrange for any associated brokers and pools to be paused with no locks
|
|
||||||
held. This will not return until each thread acknowledges it has ceased
|
|
||||||
execution.
|
|
||||||
"""
|
|
||||||
s = mitogen.core.b('CORK') * ((128 // 4) * 1024)
|
|
||||||
self._rsocks = []
|
|
||||||
|
|
||||||
# Pools must be paused first, as existing work may require the
|
|
||||||
# participation of a broker in order to complete.
|
|
||||||
for pool in self.pools:
|
|
||||||
if not pool.closed:
|
|
||||||
for x in range(pool.size):
|
|
||||||
self._cork_one(s, pool)
|
|
||||||
|
|
||||||
for broker in self.brokers:
|
|
||||||
if broker._alive:
|
|
||||||
self._cork_one(s, broker)
|
|
||||||
|
|
||||||
# Pause until we can detect every thread has entered write().
|
|
||||||
for rsock in self._rsocks:
|
|
||||||
self._verify_one(rsock)
|
|
||||||
|
|
||||||
def uncork(self):
|
|
||||||
"""
|
|
||||||
Arrange for paused threads to resume operation.
|
|
||||||
"""
|
|
||||||
for rsock in self._rsocks:
|
|
||||||
rsock.close()
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,166 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
# !mitogen: minify_safe
|
|
||||||
|
|
||||||
"""mitogen.profiler
|
|
||||||
Record and report cProfile statistics from a run. Creates one aggregated
|
|
||||||
output file, one aggregate containing only workers, and one for the
|
|
||||||
top-level process.
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
mitogen.profiler record <dest_path> <tool> [args ..]
|
|
||||||
mitogen.profiler report <dest_path> [sort_mode]
|
|
||||||
mitogen.profiler stat <sort_mode> <tool> [args ..]
|
|
||||||
|
|
||||||
Mode:
|
|
||||||
record: Record a trace.
|
|
||||||
report: Report on a previously recorded trace.
|
|
||||||
stat: Record and report in a single step.
|
|
||||||
|
|
||||||
Where:
|
|
||||||
dest_path: Filesystem prefix to write .pstats files to.
|
|
||||||
sort_mode: Sorting mode; defaults to "cumulative". See:
|
|
||||||
https://docs.python.org/2/library/profile.html#pstats.Stats.sort_stats
|
|
||||||
|
|
||||||
Example:
|
|
||||||
mitogen.profiler record /tmp/mypatch ansible-playbook foo.yml
|
|
||||||
mitogen.profiler dump /tmp/mypatch-worker.pstats
|
|
||||||
"""
|
|
||||||
|
|
||||||
from __future__ import print_function
|
|
||||||
import os
|
|
||||||
import pstats
|
|
||||||
import cProfile
|
|
||||||
import shutil
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
import tempfile
|
|
||||||
import time
|
|
||||||
|
|
||||||
import mitogen.core
|
|
||||||
|
|
||||||
|
|
||||||
def try_merge(stats, path):
|
|
||||||
try:
|
|
||||||
stats.add(path)
|
|
||||||
return True
|
|
||||||
except Exception as e:
|
|
||||||
print('Failed. Race? Will retry. %s' % (e,))
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def merge_stats(outpath, inpaths):
|
|
||||||
first, rest = inpaths[0], inpaths[1:]
|
|
||||||
for x in range(5):
|
|
||||||
try:
|
|
||||||
stats = pstats.Stats(first)
|
|
||||||
except EOFError:
|
|
||||||
time.sleep(0.2)
|
|
||||||
continue
|
|
||||||
|
|
||||||
print("Writing %r..." % (outpath,))
|
|
||||||
for path in rest:
|
|
||||||
#print("Merging %r into %r.." % (os.path.basename(path), outpath))
|
|
||||||
for x in range(5):
|
|
||||||
if try_merge(stats, path):
|
|
||||||
break
|
|
||||||
time.sleep(0.2)
|
|
||||||
|
|
||||||
stats.dump_stats(outpath)
|
|
||||||
|
|
||||||
|
|
||||||
def generate_stats(outpath, tmpdir):
|
|
||||||
print('Generating stats..')
|
|
||||||
all_paths = []
|
|
||||||
paths_by_ident = {}
|
|
||||||
|
|
||||||
for name in os.listdir(tmpdir):
|
|
||||||
if name.endswith('-dump.pstats'):
|
|
||||||
ident, _, pid = name.partition('-')
|
|
||||||
path = os.path.join(tmpdir, name)
|
|
||||||
all_paths.append(path)
|
|
||||||
paths_by_ident.setdefault(ident, []).append(path)
|
|
||||||
|
|
||||||
merge_stats('%s-all.pstat' % (outpath,), all_paths)
|
|
||||||
for ident, paths in paths_by_ident.items():
|
|
||||||
merge_stats('%s-%s.pstat' % (outpath, ident), paths)
|
|
||||||
|
|
||||||
|
|
||||||
def do_record(tmpdir, path, *args):
|
|
||||||
env = os.environ.copy()
|
|
||||||
fmt = '%(identity)s-%(pid)s.%(now)s-dump.%(ext)s'
|
|
||||||
env['MITOGEN_PROFILING'] = '1'
|
|
||||||
env['MITOGEN_PROFILE_FMT'] = os.path.join(tmpdir, fmt)
|
|
||||||
rc = subprocess.call(args, env=env)
|
|
||||||
generate_stats(path, tmpdir)
|
|
||||||
return rc
|
|
||||||
|
|
||||||
|
|
||||||
def do_report(tmpdir, path, sort='cumulative'):
|
|
||||||
stats = pstats.Stats(path).sort_stats(sort)
|
|
||||||
stats.print_stats(100)
|
|
||||||
|
|
||||||
|
|
||||||
def do_stat(tmpdir, sort, *args):
|
|
||||||
valid_sorts = pstats.Stats.sort_arg_dict_default
|
|
||||||
if sort not in valid_sorts:
|
|
||||||
sys.stderr.write('Invalid sort %r, must be one of %s\n' %
|
|
||||||
(sort, ', '.join(sorted(valid_sorts))))
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
outfile = os.path.join(tmpdir, 'combined')
|
|
||||||
do_record(tmpdir, outfile, *args)
|
|
||||||
aggs = ('app.main', 'mitogen.broker', 'mitogen.child_main',
|
|
||||||
'mitogen.service.pool', 'Strategy', 'WorkerProcess',
|
|
||||||
'all')
|
|
||||||
for agg in aggs:
|
|
||||||
path = '%s-%s.pstat' % (outfile, agg)
|
|
||||||
if os.path.exists(path):
|
|
||||||
print()
|
|
||||||
print()
|
|
||||||
print('------ Aggregation %r ------' % (agg,))
|
|
||||||
print()
|
|
||||||
do_report(tmpdir, path, sort)
|
|
||||||
print()
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
if len(sys.argv) < 2 or sys.argv[1] not in ('record', 'report', 'stat'):
|
|
||||||
sys.stderr.write(__doc__)
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
func = globals()['do_' + sys.argv[1]]
|
|
||||||
tmpdir = tempfile.mkdtemp(prefix='mitogen.profiler')
|
|
||||||
try:
|
|
||||||
sys.exit(func(tmpdir, *sys.argv[2:]) or 0)
|
|
||||||
finally:
|
|
||||||
shutil.rmtree(tmpdir)
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
main()
|
|
|
@ -1,333 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
# !mitogen: minify_safe
|
|
||||||
|
|
||||||
import mitogen.core
|
|
||||||
|
|
||||||
|
|
||||||
class Error(mitogen.core.Error):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class Event(object):
|
|
||||||
"""
|
|
||||||
Represents one selected event.
|
|
||||||
"""
|
|
||||||
#: The first Receiver or Latch the event traversed.
|
|
||||||
source = None
|
|
||||||
|
|
||||||
#: The :class:`mitogen.core.Message` delivered to a receiver, or the object
|
|
||||||
#: posted to a latch.
|
|
||||||
data = None
|
|
||||||
|
|
||||||
|
|
||||||
class Select(object):
|
|
||||||
"""
|
|
||||||
Support scatter/gather asynchronous calls and waiting on multiple
|
|
||||||
:class:`receivers <mitogen.core.Receiver>`,
|
|
||||||
:class:`channels <mitogen.core.Channel>`,
|
|
||||||
:class:`latches <mitogen.core.Latch>`, and
|
|
||||||
:class:`sub-selects <Select>`.
|
|
||||||
|
|
||||||
If `oneshot` is :data:`True`, then remove each receiver as it yields a
|
|
||||||
result; since :meth:`__iter__` terminates once the final receiver is
|
|
||||||
removed, this makes it convenient to respond to calls made in parallel:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
total = 0
|
|
||||||
recvs = [c.call_async(long_running_operation) for c in contexts]
|
|
||||||
|
|
||||||
for msg in mitogen.select.Select(recvs):
|
|
||||||
print('Got %s from %s' % (msg, msg.receiver))
|
|
||||||
total += msg.unpickle()
|
|
||||||
|
|
||||||
# Iteration ends when last Receiver yields a result.
|
|
||||||
print('Received total %s from %s receivers' % (total, len(recvs)))
|
|
||||||
|
|
||||||
:class:`Select` may drive a long-running scheduler:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
with mitogen.select.Select(oneshot=False) as select:
|
|
||||||
while running():
|
|
||||||
for msg in select:
|
|
||||||
process_result(msg.receiver.context, msg.unpickle())
|
|
||||||
for context, workfunc in get_new_work():
|
|
||||||
select.add(context.call_async(workfunc))
|
|
||||||
|
|
||||||
:class:`Select` may be nested:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
subselects = [
|
|
||||||
mitogen.select.Select(get_some_work()),
|
|
||||||
mitogen.select.Select(get_some_work()),
|
|
||||||
mitogen.select.Select([
|
|
||||||
mitogen.select.Select(get_some_work()),
|
|
||||||
mitogen.select.Select(get_some_work())
|
|
||||||
])
|
|
||||||
]
|
|
||||||
|
|
||||||
for msg in mitogen.select.Select(selects):
|
|
||||||
print(msg.unpickle())
|
|
||||||
|
|
||||||
:class:`Select` may be used to mix inter-thread and inter-process IO:
|
|
||||||
|
|
||||||
latch = mitogen.core.Latch()
|
|
||||||
start_thread(latch)
|
|
||||||
recv = remote_host.call_async(os.getuid)
|
|
||||||
|
|
||||||
sel = Select([latch, recv])
|
|
||||||
event = sel.get_event()
|
|
||||||
if event.source is latch:
|
|
||||||
# woken by a local thread
|
|
||||||
else:
|
|
||||||
# woken by function call result
|
|
||||||
"""
|
|
||||||
|
|
||||||
notify = None
|
|
||||||
|
|
||||||
def __init__(self, receivers=(), oneshot=True):
|
|
||||||
self._receivers = []
|
|
||||||
self._oneshot = oneshot
|
|
||||||
self._latch = mitogen.core.Latch()
|
|
||||||
for recv in receivers:
|
|
||||||
self.add(recv)
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def all(cls, receivers):
|
|
||||||
"""
|
|
||||||
Take an iterable of receivers and retrieve a :class:`Message` from
|
|
||||||
each, returning the result of calling `msg.unpickle()` on each in turn.
|
|
||||||
Results are returned in the order they arrived.
|
|
||||||
|
|
||||||
This is sugar for handling batch :meth:`Context.call_async
|
|
||||||
<mitogen.parent.Context.call_async>` invocations:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
print('Total disk usage: %.02fMiB' % (sum(
|
|
||||||
mitogen.select.Select.all(
|
|
||||||
context.call_async(get_disk_usage)
|
|
||||||
for context in contexts
|
|
||||||
) / 1048576.0
|
|
||||||
),))
|
|
||||||
|
|
||||||
However, unlike in a naive comprehension such as:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
recvs = [c.call_async(get_disk_usage) for c in contexts]
|
|
||||||
sum(recv.get().unpickle() for recv in recvs)
|
|
||||||
|
|
||||||
Result processing happens in the order results arrive, rather than the
|
|
||||||
order requests were issued, so :meth:`all` should always be faster.
|
|
||||||
"""
|
|
||||||
return list(msg.unpickle() for msg in cls(receivers))
|
|
||||||
|
|
||||||
def _put(self, value):
|
|
||||||
self._latch.put(value)
|
|
||||||
if self.notify:
|
|
||||||
self.notify(self)
|
|
||||||
|
|
||||||
def __bool__(self):
|
|
||||||
"""
|
|
||||||
Return :data:`True` if any receivers are registered with this select.
|
|
||||||
"""
|
|
||||||
return bool(self._receivers)
|
|
||||||
|
|
||||||
__nonzero__ = __bool__
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __exit__(self, e_type, e_val, e_tb):
|
|
||||||
self.close()
|
|
||||||
|
|
||||||
def iter_data(self):
|
|
||||||
"""
|
|
||||||
Yield :attr:`Event.data` until no receivers remain in the select,
|
|
||||||
either because `oneshot` is :data:`True`, or each receiver was
|
|
||||||
explicitly removed via :meth:`remove`.
|
|
||||||
|
|
||||||
:meth:`__iter__` is an alias for :meth:`iter_data`, allowing loops
|
|
||||||
like::
|
|
||||||
|
|
||||||
for msg in Select([recv1, recv2]):
|
|
||||||
print msg.unpickle()
|
|
||||||
"""
|
|
||||||
while self._receivers:
|
|
||||||
yield self.get_event().data
|
|
||||||
|
|
||||||
__iter__ = iter_data
|
|
||||||
|
|
||||||
def iter_events(self):
|
|
||||||
"""
|
|
||||||
Yield :class:`Event` instances until no receivers remain in the select.
|
|
||||||
"""
|
|
||||||
while self._receivers:
|
|
||||||
yield self.get_event()
|
|
||||||
|
|
||||||
loop_msg = 'Adding this Select instance would create a Select cycle'
|
|
||||||
|
|
||||||
def _check_no_loop(self, recv):
|
|
||||||
if recv is self:
|
|
||||||
raise Error(self.loop_msg)
|
|
||||||
|
|
||||||
for recv_ in self._receivers:
|
|
||||||
if recv_ == recv:
|
|
||||||
raise Error(self.loop_msg)
|
|
||||||
if isinstance(recv_, Select):
|
|
||||||
recv_._check_no_loop(recv)
|
|
||||||
|
|
||||||
owned_msg = 'Cannot add: Receiver is already owned by another Select'
|
|
||||||
|
|
||||||
def add(self, recv):
|
|
||||||
"""
|
|
||||||
Add a :class:`mitogen.core.Receiver`, :class:`Select` or
|
|
||||||
:class:`mitogen.core.Latch` to the select.
|
|
||||||
|
|
||||||
:raises mitogen.select.Error:
|
|
||||||
An attempt was made to add a :class:`Select` to which this select
|
|
||||||
is indirectly a member of.
|
|
||||||
"""
|
|
||||||
if isinstance(recv, Select):
|
|
||||||
recv._check_no_loop(self)
|
|
||||||
|
|
||||||
self._receivers.append(recv)
|
|
||||||
if recv.notify is not None:
|
|
||||||
raise Error(self.owned_msg)
|
|
||||||
|
|
||||||
recv.notify = self._put
|
|
||||||
# Avoid race by polling once after installation.
|
|
||||||
if not recv.empty():
|
|
||||||
self._put(recv)
|
|
||||||
|
|
||||||
not_present_msg = 'Instance is not a member of this Select'
|
|
||||||
|
|
||||||
def remove(self, recv):
|
|
||||||
"""
|
|
||||||
Remove an object from from the select. Note that if the receiver has
|
|
||||||
notified prior to :meth:`remove`, it will still be returned by a
|
|
||||||
subsequent :meth:`get`. This may change in a future version.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
if recv.notify != self._put:
|
|
||||||
raise ValueError
|
|
||||||
self._receivers.remove(recv)
|
|
||||||
recv.notify = None
|
|
||||||
except (IndexError, ValueError):
|
|
||||||
raise Error(self.not_present_msg)
|
|
||||||
|
|
||||||
def close(self):
|
|
||||||
"""
|
|
||||||
Remove the select's notifier function from each registered receiver,
|
|
||||||
mark the associated latch as closed, and cause any thread currently
|
|
||||||
sleeping in :meth:`get` to be woken with
|
|
||||||
:class:`mitogen.core.LatchError`.
|
|
||||||
|
|
||||||
This is necessary to prevent memory leaks in long-running receivers. It
|
|
||||||
is called automatically when the Python :keyword:`with` statement is
|
|
||||||
used.
|
|
||||||
"""
|
|
||||||
for recv in self._receivers[:]:
|
|
||||||
self.remove(recv)
|
|
||||||
self._latch.close()
|
|
||||||
|
|
||||||
def empty(self):
|
|
||||||
"""
|
|
||||||
Return :data:`True` if calling :meth:`get` would block.
|
|
||||||
|
|
||||||
As with :class:`Queue.Queue`, :data:`True` may be returned even though
|
|
||||||
a subsequent call to :meth:`get` will succeed, since a message may be
|
|
||||||
posted at any moment between :meth:`empty` and :meth:`get`.
|
|
||||||
|
|
||||||
:meth:`empty` may return :data:`False` even when :meth:`get` would
|
|
||||||
block if another thread has drained a receiver added to this select.
|
|
||||||
This can be avoided by only consuming each receiver from a single
|
|
||||||
thread.
|
|
||||||
"""
|
|
||||||
return self._latch.empty()
|
|
||||||
|
|
||||||
empty_msg = 'Cannot get(), Select instance is empty'
|
|
||||||
|
|
||||||
def get(self, timeout=None, block=True):
|
|
||||||
"""
|
|
||||||
Call `get_event(timeout, block)` returning :attr:`Event.data` of the
|
|
||||||
first available event.
|
|
||||||
"""
|
|
||||||
return self.get_event(timeout, block).data
|
|
||||||
|
|
||||||
def get_event(self, timeout=None, block=True):
|
|
||||||
"""
|
|
||||||
Fetch the next available :class:`Event` from any source, or raise
|
|
||||||
:class:`mitogen.core.TimeoutError` if no value is available within
|
|
||||||
`timeout` seconds.
|
|
||||||
|
|
||||||
On success, the message's :attr:`receiver
|
|
||||||
<mitogen.core.Message.receiver>` attribute is set to the receiver.
|
|
||||||
|
|
||||||
:param float timeout:
|
|
||||||
Timeout in seconds.
|
|
||||||
:param bool block:
|
|
||||||
If :data:`False`, immediately raise
|
|
||||||
:class:`mitogen.core.TimeoutError` if the select is empty.
|
|
||||||
:return:
|
|
||||||
:class:`Event`.
|
|
||||||
:raises mitogen.core.TimeoutError:
|
|
||||||
Timeout was reached.
|
|
||||||
:raises mitogen.core.LatchError:
|
|
||||||
:meth:`close` has been called, and the underlying latch is no
|
|
||||||
longer valid.
|
|
||||||
"""
|
|
||||||
if not self._receivers:
|
|
||||||
raise Error(self.empty_msg)
|
|
||||||
|
|
||||||
event = Event()
|
|
||||||
while True:
|
|
||||||
recv = self._latch.get(timeout=timeout, block=block)
|
|
||||||
try:
|
|
||||||
if isinstance(recv, Select):
|
|
||||||
event = recv.get_event(block=False)
|
|
||||||
else:
|
|
||||||
event.source = recv
|
|
||||||
event.data = recv.get(block=False)
|
|
||||||
if self._oneshot:
|
|
||||||
self.remove(recv)
|
|
||||||
if isinstance(recv, mitogen.core.Receiver):
|
|
||||||
# Remove in 0.3.x.
|
|
||||||
event.data.receiver = recv
|
|
||||||
return event
|
|
||||||
except mitogen.core.TimeoutError:
|
|
||||||
# A receiver may have been queued with no result if another
|
|
||||||
# thread drained it before we woke up, or because another
|
|
||||||
# thread drained it between add() calling recv.empty() and
|
|
||||||
# self._put(). In this case just sleep again.
|
|
||||||
continue
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,238 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
# !mitogen: minify_safe
|
|
||||||
|
|
||||||
import ctypes
|
|
||||||
import grp
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import pwd
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
|
|
||||||
import mitogen.core
|
|
||||||
import mitogen.parent
|
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
|
||||||
LIBC = ctypes.CDLL(None, use_errno=True)
|
|
||||||
LIBC__strerror = LIBC.strerror
|
|
||||||
LIBC__strerror.restype = ctypes.c_char_p
|
|
||||||
|
|
||||||
|
|
||||||
class Error(mitogen.core.StreamError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def setns(kind, fd):
|
|
||||||
if LIBC.setns(int(fd), 0) == -1:
|
|
||||||
errno = ctypes.get_errno()
|
|
||||||
msg = 'setns(%s, %s): %s' % (fd, kind, LIBC__strerror(errno))
|
|
||||||
raise OSError(errno, msg)
|
|
||||||
|
|
||||||
|
|
||||||
def _run_command(args):
|
|
||||||
argv = mitogen.parent.Argv(args)
|
|
||||||
try:
|
|
||||||
proc = subprocess.Popen(
|
|
||||||
args=args,
|
|
||||||
stdout=subprocess.PIPE,
|
|
||||||
stderr=subprocess.STDOUT
|
|
||||||
)
|
|
||||||
except OSError:
|
|
||||||
e = sys.exc_info()[1]
|
|
||||||
raise Error('could not execute %s: %s', argv, e)
|
|
||||||
|
|
||||||
output, _ = proc.communicate()
|
|
||||||
if not proc.returncode:
|
|
||||||
return output.decode('utf-8', 'replace')
|
|
||||||
|
|
||||||
raise Error("%s exitted with status %d: %s",
|
|
||||||
mitogen.parent.Argv(args), proc.returncode, output)
|
|
||||||
|
|
||||||
|
|
||||||
def get_docker_pid(path, name):
|
|
||||||
args = [path, 'inspect', '--format={{.State.Pid}}', name]
|
|
||||||
output = _run_command(args)
|
|
||||||
try:
|
|
||||||
return int(output)
|
|
||||||
except ValueError:
|
|
||||||
raise Error("could not find PID from docker output.\n%s", output)
|
|
||||||
|
|
||||||
|
|
||||||
def get_lxc_pid(path, name):
|
|
||||||
output = _run_command([path, '-n', name])
|
|
||||||
for line in output.splitlines():
|
|
||||||
bits = line.split()
|
|
||||||
if bits and bits[0] == 'PID:':
|
|
||||||
return int(bits[1])
|
|
||||||
|
|
||||||
raise Error("could not find PID from lxc-info output.\n%s", output)
|
|
||||||
|
|
||||||
|
|
||||||
def get_lxd_pid(path, name):
|
|
||||||
output = _run_command([path, 'info', name])
|
|
||||||
for line in output.splitlines():
|
|
||||||
bits = line.split()
|
|
||||||
if bits and bits[0] == 'Pid:':
|
|
||||||
return int(bits[1])
|
|
||||||
|
|
||||||
raise Error("could not find PID from lxc output.\n%s", output)
|
|
||||||
|
|
||||||
|
|
||||||
def get_machinectl_pid(path, name):
|
|
||||||
output = _run_command([path, 'status', name])
|
|
||||||
for line in output.splitlines():
|
|
||||||
bits = line.split()
|
|
||||||
if bits and bits[0] == 'Leader:':
|
|
||||||
return int(bits[1])
|
|
||||||
|
|
||||||
raise Error("could not find PID from machinectl output.\n%s", output)
|
|
||||||
|
|
||||||
|
|
||||||
class Stream(mitogen.parent.Stream):
|
|
||||||
child_is_immediate_subprocess = False
|
|
||||||
|
|
||||||
container = None
|
|
||||||
username = 'root'
|
|
||||||
kind = None
|
|
||||||
python_path = 'python'
|
|
||||||
docker_path = 'docker'
|
|
||||||
lxc_path = 'lxc'
|
|
||||||
lxc_info_path = 'lxc-info'
|
|
||||||
machinectl_path = 'machinectl'
|
|
||||||
|
|
||||||
GET_LEADER_BY_KIND = {
|
|
||||||
'docker': ('docker_path', get_docker_pid),
|
|
||||||
'lxc': ('lxc_info_path', get_lxc_pid),
|
|
||||||
'lxd': ('lxc_path', get_lxd_pid),
|
|
||||||
'machinectl': ('machinectl_path', get_machinectl_pid),
|
|
||||||
}
|
|
||||||
|
|
||||||
def construct(self, container, kind, username=None, docker_path=None,
|
|
||||||
lxc_path=None, lxc_info_path=None, machinectl_path=None,
|
|
||||||
**kwargs):
|
|
||||||
super(Stream, self).construct(**kwargs)
|
|
||||||
if kind not in self.GET_LEADER_BY_KIND:
|
|
||||||
raise Error('unsupported container kind: %r', kind)
|
|
||||||
|
|
||||||
self.container = container
|
|
||||||
self.kind = kind
|
|
||||||
if username:
|
|
||||||
self.username = username
|
|
||||||
if docker_path:
|
|
||||||
self.docker_path = docker_path
|
|
||||||
if lxc_path:
|
|
||||||
self.lxc_path = lxc_path
|
|
||||||
if lxc_info_path:
|
|
||||||
self.lxc_info_path = lxc_info_path
|
|
||||||
if machinectl_path:
|
|
||||||
self.machinectl_path = machinectl_path
|
|
||||||
|
|
||||||
# Order matters. https://github.com/karelzak/util-linux/commit/854d0fe/
|
|
||||||
NS_ORDER = ('ipc', 'uts', 'net', 'pid', 'mnt', 'user')
|
|
||||||
|
|
||||||
def preexec_fn(self):
|
|
||||||
nspath = '/proc/%d/ns/' % (self.leader_pid,)
|
|
||||||
selfpath = '/proc/self/ns/'
|
|
||||||
try:
|
|
||||||
ns_fps = [
|
|
||||||
open(nspath + name)
|
|
||||||
for name in self.NS_ORDER
|
|
||||||
if os.path.exists(nspath + name) and (
|
|
||||||
os.readlink(nspath + name) != os.readlink(selfpath + name)
|
|
||||||
)
|
|
||||||
]
|
|
||||||
except Exception:
|
|
||||||
e = sys.exc_info()[1]
|
|
||||||
raise Error(str(e))
|
|
||||||
|
|
||||||
os.chdir('/proc/%s/root' % (self.leader_pid,))
|
|
||||||
os.chroot('.')
|
|
||||||
os.chdir('/')
|
|
||||||
for fp in ns_fps:
|
|
||||||
setns(fp.name, fp.fileno())
|
|
||||||
fp.close()
|
|
||||||
|
|
||||||
for sym in 'endpwent', 'endgrent', 'endspent', 'endsgent':
|
|
||||||
try:
|
|
||||||
getattr(LIBC, sym)()
|
|
||||||
except AttributeError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
|
||||||
os.setgroups([grent.gr_gid
|
|
||||||
for grent in grp.getgrall()
|
|
||||||
if self.username in grent.gr_mem])
|
|
||||||
pwent = pwd.getpwnam(self.username)
|
|
||||||
os.setreuid(pwent.pw_uid, pwent.pw_uid)
|
|
||||||
# shadow-4.4/libmisc/setupenv.c. Not done: MAIL, PATH
|
|
||||||
os.environ.update({
|
|
||||||
'HOME': pwent.pw_dir,
|
|
||||||
'SHELL': pwent.pw_shell or '/bin/sh',
|
|
||||||
'LOGNAME': self.username,
|
|
||||||
'USER': self.username,
|
|
||||||
})
|
|
||||||
if ((os.path.exists(pwent.pw_dir) and
|
|
||||||
os.access(pwent.pw_dir, os.X_OK))):
|
|
||||||
os.chdir(pwent.pw_dir)
|
|
||||||
except Exception:
|
|
||||||
e = sys.exc_info()[1]
|
|
||||||
raise Error(self.username_msg, self.username, self.container,
|
|
||||||
type(e).__name__, e)
|
|
||||||
|
|
||||||
username_msg = 'while transitioning to user %r in container %r: %s: %s'
|
|
||||||
|
|
||||||
def get_boot_command(self):
|
|
||||||
# With setns(CLONE_NEWPID), new children of the caller receive a new
|
|
||||||
# PID namespace, however the caller's namespace won't change. That
|
|
||||||
# causes subsequent calls to clone() specifying CLONE_THREAD to fail
|
|
||||||
# with EINVAL, as threads in the same process can't have varying PID
|
|
||||||
# namespaces, meaning starting new threads in the exec'd program will
|
|
||||||
# fail. The solution is forking, so inject a /bin/sh call to achieve
|
|
||||||
# this.
|
|
||||||
argv = super(Stream, self).get_boot_command()
|
|
||||||
# bash will exec() if a single command was specified and the shell has
|
|
||||||
# nothing left to do, so "; exit $?" gives bash a reason to live.
|
|
||||||
return ['/bin/sh', '-c', '%s; exit $?' % (mitogen.parent.Argv(argv),)]
|
|
||||||
|
|
||||||
def create_child(self, args):
|
|
||||||
return mitogen.parent.create_child(args, preexec_fn=self.preexec_fn)
|
|
||||||
|
|
||||||
def _get_name(self):
|
|
||||||
return u'setns.' + self.container
|
|
||||||
|
|
||||||
def connect(self):
|
|
||||||
self.name = self._get_name()
|
|
||||||
attr, func = self.GET_LEADER_BY_KIND[self.kind]
|
|
||||||
tool_path = getattr(self, attr)
|
|
||||||
self.leader_pid = func(tool_path, self.container)
|
|
||||||
LOG.debug('Leader PID for %s container %r: %d',
|
|
||||||
self.kind, self.container, self.leader_pid)
|
|
||||||
super(Stream, self).connect()
|
|
|
@ -1,317 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
# !mitogen: minify_safe
|
|
||||||
|
|
||||||
"""
|
|
||||||
Functionality to allow establishing new slave contexts over an SSH connection.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import re
|
|
||||||
|
|
||||||
try:
|
|
||||||
from shlex import quote as shlex_quote
|
|
||||||
except ImportError:
|
|
||||||
from pipes import quote as shlex_quote
|
|
||||||
|
|
||||||
import mitogen.parent
|
|
||||||
from mitogen.core import b
|
|
||||||
from mitogen.core import bytes_partition
|
|
||||||
|
|
||||||
try:
|
|
||||||
any
|
|
||||||
except NameError:
|
|
||||||
from mitogen.core import any
|
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger('mitogen')
|
|
||||||
|
|
||||||
# sshpass uses 'assword' because it doesn't lowercase the input.
|
|
||||||
PASSWORD_PROMPT = b('password')
|
|
||||||
HOSTKEY_REQ_PROMPT = b('are you sure you want to continue connecting (yes/no)?')
|
|
||||||
HOSTKEY_FAIL = b('host key verification failed.')
|
|
||||||
|
|
||||||
# [user@host: ] permission denied
|
|
||||||
PERMDENIED_RE = re.compile(
|
|
||||||
('(?:[^@]+@[^:]+: )?' # Absent in OpenSSH <7.5
|
|
||||||
'Permission denied').encode(),
|
|
||||||
re.I
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
DEBUG_PREFIXES = (b('debug1:'), b('debug2:'), b('debug3:'))
|
|
||||||
|
|
||||||
|
|
||||||
def filter_debug(stream, it):
|
|
||||||
"""
|
|
||||||
Read line chunks from it, either yielding them directly, or building up and
|
|
||||||
logging individual lines if they look like SSH debug output.
|
|
||||||
|
|
||||||
This contains the mess of dealing with both line-oriented input, and partial
|
|
||||||
lines such as the password prompt.
|
|
||||||
|
|
||||||
Yields `(line, partial)` tuples, where `line` is the line, `partial` is
|
|
||||||
:data:`True` if no terminating newline character was present and no more
|
|
||||||
data exists in the read buffer. Consuming code can use this to unreliably
|
|
||||||
detect the presence of an interactive prompt.
|
|
||||||
"""
|
|
||||||
# The `partial` test is unreliable, but is only problematic when verbosity
|
|
||||||
# is enabled: it's possible for a combination of SSH banner, password
|
|
||||||
# prompt, verbose output, timing and OS buffering specifics to create a
|
|
||||||
# situation where an otherwise newline-terminated line appears to not be
|
|
||||||
# terminated, due to a partial read(). If something is broken when
|
|
||||||
# ssh_debug_level>0, this is the first place to look.
|
|
||||||
state = 'start_of_line'
|
|
||||||
buf = b('')
|
|
||||||
for chunk in it:
|
|
||||||
buf += chunk
|
|
||||||
while buf:
|
|
||||||
if state == 'start_of_line':
|
|
||||||
if len(buf) < 8:
|
|
||||||
# short read near buffer limit, block awaiting at least 8
|
|
||||||
# bytes so we can discern a debug line, or the minimum
|
|
||||||
# interesting token from above or the bootstrap
|
|
||||||
# ('password', 'MITO000\n').
|
|
||||||
break
|
|
||||||
elif any(buf.startswith(p) for p in DEBUG_PREFIXES):
|
|
||||||
state = 'in_debug'
|
|
||||||
else:
|
|
||||||
state = 'in_plain'
|
|
||||||
elif state == 'in_debug':
|
|
||||||
if b('\n') not in buf:
|
|
||||||
break
|
|
||||||
line, _, buf = bytes_partition(buf, b('\n'))
|
|
||||||
LOG.debug('%s: %s', stream.name,
|
|
||||||
mitogen.core.to_text(line.rstrip()))
|
|
||||||
state = 'start_of_line'
|
|
||||||
elif state == 'in_plain':
|
|
||||||
line, nl, buf = bytes_partition(buf, b('\n'))
|
|
||||||
yield line + nl, not (nl or buf)
|
|
||||||
if nl:
|
|
||||||
state = 'start_of_line'
|
|
||||||
|
|
||||||
|
|
||||||
class PasswordError(mitogen.core.StreamError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class HostKeyError(mitogen.core.StreamError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class Stream(mitogen.parent.Stream):
|
|
||||||
child_is_immediate_subprocess = False
|
|
||||||
|
|
||||||
#: Default to whatever is available as 'python' on the remote machine,
|
|
||||||
#: overriding sys.executable use.
|
|
||||||
python_path = 'python'
|
|
||||||
|
|
||||||
#: Number of -v invocations to pass on command line.
|
|
||||||
ssh_debug_level = 0
|
|
||||||
|
|
||||||
#: The path to the SSH binary.
|
|
||||||
ssh_path = 'ssh'
|
|
||||||
|
|
||||||
hostname = None
|
|
||||||
username = None
|
|
||||||
port = None
|
|
||||||
|
|
||||||
identity_file = None
|
|
||||||
password = None
|
|
||||||
ssh_args = None
|
|
||||||
|
|
||||||
check_host_keys_msg = 'check_host_keys= must be set to accept, enforce or ignore'
|
|
||||||
|
|
||||||
def construct(self, hostname, username=None, ssh_path=None, port=None,
|
|
||||||
check_host_keys='enforce', password=None, identity_file=None,
|
|
||||||
compression=True, ssh_args=None, keepalive_enabled=True,
|
|
||||||
keepalive_count=3, keepalive_interval=15,
|
|
||||||
identities_only=True, ssh_debug_level=None, **kwargs):
|
|
||||||
super(Stream, self).construct(**kwargs)
|
|
||||||
if check_host_keys not in ('accept', 'enforce', 'ignore'):
|
|
||||||
raise ValueError(self.check_host_keys_msg)
|
|
||||||
|
|
||||||
self.hostname = hostname
|
|
||||||
self.username = username
|
|
||||||
self.port = port
|
|
||||||
self.check_host_keys = check_host_keys
|
|
||||||
self.password = password
|
|
||||||
self.identity_file = identity_file
|
|
||||||
self.identities_only = identities_only
|
|
||||||
self.compression = compression
|
|
||||||
self.keepalive_enabled = keepalive_enabled
|
|
||||||
self.keepalive_count = keepalive_count
|
|
||||||
self.keepalive_interval = keepalive_interval
|
|
||||||
if ssh_path:
|
|
||||||
self.ssh_path = ssh_path
|
|
||||||
if ssh_args:
|
|
||||||
self.ssh_args = ssh_args
|
|
||||||
if ssh_debug_level:
|
|
||||||
self.ssh_debug_level = ssh_debug_level
|
|
||||||
|
|
||||||
self._init_create_child()
|
|
||||||
|
|
||||||
def _requires_pty(self):
|
|
||||||
"""
|
|
||||||
Return :data:`True` if the configuration requires a PTY to be
|
|
||||||
allocated. This is only true if we must interactively accept host keys,
|
|
||||||
or type a password.
|
|
||||||
"""
|
|
||||||
return (self.check_host_keys == 'accept' or
|
|
||||||
self.password is not None)
|
|
||||||
|
|
||||||
def _init_create_child(self):
|
|
||||||
"""
|
|
||||||
Initialize the base class :attr:`create_child` and
|
|
||||||
:attr:`create_child_args` according to whether we need a PTY or not.
|
|
||||||
"""
|
|
||||||
if self._requires_pty():
|
|
||||||
self.create_child = mitogen.parent.hybrid_tty_create_child
|
|
||||||
else:
|
|
||||||
self.create_child = mitogen.parent.create_child
|
|
||||||
self.create_child_args = {
|
|
||||||
'stderr_pipe': True,
|
|
||||||
}
|
|
||||||
|
|
||||||
def get_boot_command(self):
|
|
||||||
bits = [self.ssh_path]
|
|
||||||
if self.ssh_debug_level:
|
|
||||||
bits += ['-' + ('v' * min(3, self.ssh_debug_level))]
|
|
||||||
else:
|
|
||||||
# issue #307: suppress any login banner, as it may contain the
|
|
||||||
# password prompt, and there is no robust way to tell the
|
|
||||||
# difference.
|
|
||||||
bits += ['-o', 'LogLevel ERROR']
|
|
||||||
if self.username:
|
|
||||||
bits += ['-l', self.username]
|
|
||||||
if self.port is not None:
|
|
||||||
bits += ['-p', str(self.port)]
|
|
||||||
if self.identities_only and (self.identity_file or self.password):
|
|
||||||
bits += ['-o', 'IdentitiesOnly yes']
|
|
||||||
if self.identity_file:
|
|
||||||
bits += ['-i', self.identity_file]
|
|
||||||
if self.compression:
|
|
||||||
bits += ['-o', 'Compression yes']
|
|
||||||
if self.keepalive_enabled:
|
|
||||||
bits += [
|
|
||||||
'-o', 'ServerAliveInterval %s' % (self.keepalive_interval,),
|
|
||||||
'-o', 'ServerAliveCountMax %s' % (self.keepalive_count,),
|
|
||||||
]
|
|
||||||
if not self._requires_pty():
|
|
||||||
bits += ['-o', 'BatchMode yes']
|
|
||||||
if self.check_host_keys == 'enforce':
|
|
||||||
bits += ['-o', 'StrictHostKeyChecking yes']
|
|
||||||
if self.check_host_keys == 'accept':
|
|
||||||
bits += ['-o', 'StrictHostKeyChecking ask']
|
|
||||||
elif self.check_host_keys == 'ignore':
|
|
||||||
bits += [
|
|
||||||
'-o', 'StrictHostKeyChecking no',
|
|
||||||
'-o', 'UserKnownHostsFile /dev/null',
|
|
||||||
'-o', 'GlobalKnownHostsFile /dev/null',
|
|
||||||
]
|
|
||||||
if self.ssh_args:
|
|
||||||
bits += self.ssh_args
|
|
||||||
bits.append(self.hostname)
|
|
||||||
base = super(Stream, self).get_boot_command()
|
|
||||||
return bits + [shlex_quote(s).strip() for s in base]
|
|
||||||
|
|
||||||
def _get_name(self):
|
|
||||||
s = u'ssh.' + mitogen.core.to_text(self.hostname)
|
|
||||||
if self.port:
|
|
||||||
s += u':%s' % (self.port,)
|
|
||||||
return s
|
|
||||||
|
|
||||||
auth_incorrect_msg = 'SSH authentication is incorrect'
|
|
||||||
password_incorrect_msg = 'SSH password is incorrect'
|
|
||||||
password_required_msg = 'SSH password was requested, but none specified'
|
|
||||||
hostkey_config_msg = (
|
|
||||||
'SSH requested permission to accept unknown host key, but '
|
|
||||||
'check_host_keys=ignore. This is likely due to ssh_args= '
|
|
||||||
'conflicting with check_host_keys=. Please correct your '
|
|
||||||
'configuration.'
|
|
||||||
)
|
|
||||||
hostkey_failed_msg = (
|
|
||||||
'Host key checking is enabled, and SSH reported an unrecognized or '
|
|
||||||
'mismatching host key.'
|
|
||||||
)
|
|
||||||
|
|
||||||
def _host_key_prompt(self):
|
|
||||||
if self.check_host_keys == 'accept':
|
|
||||||
LOG.debug('%s: accepting host key', self.name)
|
|
||||||
self.diag_stream.transmit_side.write(b('yes\n'))
|
|
||||||
return
|
|
||||||
|
|
||||||
# _host_key_prompt() should never be reached with ignore or enforce
|
|
||||||
# mode, SSH should have handled that. User's ssh_args= is conflicting
|
|
||||||
# with ours.
|
|
||||||
raise HostKeyError(self.hostkey_config_msg)
|
|
||||||
|
|
||||||
def _connect_input_loop(self, it):
|
|
||||||
password_sent = False
|
|
||||||
for buf, partial in filter_debug(self, it):
|
|
||||||
LOG.debug('%s: stdout: %s', self.name, buf.rstrip())
|
|
||||||
if buf.endswith(self.EC0_MARKER):
|
|
||||||
self._ec0_received()
|
|
||||||
return
|
|
||||||
elif HOSTKEY_REQ_PROMPT in buf.lower():
|
|
||||||
self._host_key_prompt()
|
|
||||||
elif HOSTKEY_FAIL in buf.lower():
|
|
||||||
raise HostKeyError(self.hostkey_failed_msg)
|
|
||||||
elif PERMDENIED_RE.match(buf):
|
|
||||||
# issue #271: work around conflict with user shell reporting
|
|
||||||
# 'permission denied' e.g. during chdir($HOME) by only matching
|
|
||||||
# it at the start of the line.
|
|
||||||
if self.password is not None and password_sent:
|
|
||||||
raise PasswordError(self.password_incorrect_msg)
|
|
||||||
elif PASSWORD_PROMPT in buf and self.password is None:
|
|
||||||
# Permission denied (password,pubkey)
|
|
||||||
raise PasswordError(self.password_required_msg)
|
|
||||||
else:
|
|
||||||
raise PasswordError(self.auth_incorrect_msg)
|
|
||||||
elif partial and PASSWORD_PROMPT in buf.lower():
|
|
||||||
if self.password is None:
|
|
||||||
raise PasswordError(self.password_required_msg)
|
|
||||||
LOG.debug('%s: sending password', self.name)
|
|
||||||
self.diag_stream.transmit_side.write(
|
|
||||||
(self.password + '\n').encode()
|
|
||||||
)
|
|
||||||
password_sent = True
|
|
||||||
|
|
||||||
raise mitogen.core.StreamError('bootstrap failed')
|
|
||||||
|
|
||||||
def _connect_bootstrap(self):
|
|
||||||
fds = [self.receive_side.fd]
|
|
||||||
if self.diag_stream is not None:
|
|
||||||
fds.append(self.diag_stream.receive_side.fd)
|
|
||||||
|
|
||||||
it = mitogen.parent.iter_read(fds=fds, deadline=self.connect_deadline)
|
|
||||||
try:
|
|
||||||
self._connect_input_loop(it)
|
|
||||||
finally:
|
|
||||||
it.close()
|
|
|
@ -1,128 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
# !mitogen: minify_safe
|
|
||||||
|
|
||||||
import logging
|
|
||||||
|
|
||||||
import mitogen.core
|
|
||||||
import mitogen.parent
|
|
||||||
from mitogen.core import b
|
|
||||||
|
|
||||||
try:
|
|
||||||
any
|
|
||||||
except NameError:
|
|
||||||
from mitogen.core import any
|
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class PasswordError(mitogen.core.StreamError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class Stream(mitogen.parent.Stream):
|
|
||||||
# TODO: BSD su cannot handle stdin being a socketpair, but it does let the
|
|
||||||
# child inherit fds from the parent. So we can still pass a socketpair in
|
|
||||||
# for hybrid_tty_create_child(), there just needs to be either a shell
|
|
||||||
# snippet or bootstrap support for fixing things up afterwards.
|
|
||||||
create_child = staticmethod(mitogen.parent.tty_create_child)
|
|
||||||
child_is_immediate_subprocess = False
|
|
||||||
|
|
||||||
#: Once connected, points to the corresponding DiagLogStream, allowing it to
|
|
||||||
#: be disconnected at the same time this stream is being torn down.
|
|
||||||
|
|
||||||
username = 'root'
|
|
||||||
password = None
|
|
||||||
su_path = 'su'
|
|
||||||
password_prompt = b('password:')
|
|
||||||
incorrect_prompts = (
|
|
||||||
b('su: sorry'), # BSD
|
|
||||||
b('su: authentication failure'), # Linux
|
|
||||||
b('su: incorrect password'), # CentOS 6
|
|
||||||
b('authentication is denied'), # AIX
|
|
||||||
)
|
|
||||||
|
|
||||||
def construct(self, username=None, password=None, su_path=None,
|
|
||||||
password_prompt=None, incorrect_prompts=None, **kwargs):
|
|
||||||
super(Stream, self).construct(**kwargs)
|
|
||||||
if username is not None:
|
|
||||||
self.username = username
|
|
||||||
if password is not None:
|
|
||||||
self.password = password
|
|
||||||
if su_path is not None:
|
|
||||||
self.su_path = su_path
|
|
||||||
if password_prompt is not None:
|
|
||||||
self.password_prompt = password_prompt.lower()
|
|
||||||
if incorrect_prompts is not None:
|
|
||||||
self.incorrect_prompts = map(str.lower, incorrect_prompts)
|
|
||||||
|
|
||||||
def _get_name(self):
|
|
||||||
return u'su.' + mitogen.core.to_text(self.username)
|
|
||||||
|
|
||||||
def get_boot_command(self):
|
|
||||||
argv = mitogen.parent.Argv(super(Stream, self).get_boot_command())
|
|
||||||
return [self.su_path, self.username, '-c', str(argv)]
|
|
||||||
|
|
||||||
password_incorrect_msg = 'su password is incorrect'
|
|
||||||
password_required_msg = 'su password is required'
|
|
||||||
|
|
||||||
def _connect_input_loop(self, it):
|
|
||||||
password_sent = False
|
|
||||||
|
|
||||||
for buf in it:
|
|
||||||
LOG.debug('%r: received %r', self, buf)
|
|
||||||
if buf.endswith(self.EC0_MARKER):
|
|
||||||
self._ec0_received()
|
|
||||||
return
|
|
||||||
if any(s in buf.lower() for s in self.incorrect_prompts):
|
|
||||||
if password_sent:
|
|
||||||
raise PasswordError(self.password_incorrect_msg)
|
|
||||||
elif self.password_prompt in buf.lower():
|
|
||||||
if self.password is None:
|
|
||||||
raise PasswordError(self.password_required_msg)
|
|
||||||
if password_sent:
|
|
||||||
raise PasswordError(self.password_incorrect_msg)
|
|
||||||
LOG.debug('sending password')
|
|
||||||
self.transmit_side.write(
|
|
||||||
mitogen.core.to_text(self.password + '\n').encode('utf-8')
|
|
||||||
)
|
|
||||||
password_sent = True
|
|
||||||
|
|
||||||
raise mitogen.core.StreamError('bootstrap failed')
|
|
||||||
|
|
||||||
def _connect_bootstrap(self):
|
|
||||||
it = mitogen.parent.iter_read(
|
|
||||||
fds=[self.receive_side.fd],
|
|
||||||
deadline=self.connect_deadline,
|
|
||||||
)
|
|
||||||
try:
|
|
||||||
self._connect_input_loop(it)
|
|
||||||
finally:
|
|
||||||
it.close()
|
|
|
@ -1,277 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
# !mitogen: minify_safe
|
|
||||||
|
|
||||||
import base64
|
|
||||||
import logging
|
|
||||||
import optparse
|
|
||||||
import re
|
|
||||||
|
|
||||||
import mitogen.core
|
|
||||||
import mitogen.parent
|
|
||||||
from mitogen.core import b
|
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
# These are base64-encoded UTF-8 as our existing minifier/module server
|
|
||||||
# struggles with Unicode Python source in some (forgotten) circumstances.
|
|
||||||
PASSWORD_PROMPTS = [
|
|
||||||
'cGFzc3dvcmQ=', # english
|
|
||||||
'bG96aW5rYQ==', # sr@latin.po
|
|
||||||
'44OR44K544Ov44O844OJ', # ja.po
|
|
||||||
'4Kaq4Ka+4Ka44KaT4Kef4Ka+4Kaw4KeN4Kah', # bn.po
|
|
||||||
'2YPZhNmF2Kkg2KfZhNiz2LE=', # ar.po
|
|
||||||
'cGFzYWhpdHph', # eu.po
|
|
||||||
'0L/QsNGA0L7Qu9GM', # uk.po
|
|
||||||
'cGFyb29s', # et.po
|
|
||||||
'c2FsYXNhbmE=', # fi.po
|
|
||||||
'4Kiq4Ki+4Ki44Ki14Kiw4Kih', # pa.po
|
|
||||||
'Y29udHJhc2lnbm8=', # ia.po
|
|
||||||
'Zm9jYWwgZmFpcmU=', # ga.po
|
|
||||||
'16HXodee15Q=', # he.po
|
|
||||||
'4Kqq4Kq+4Kq44Kq14Kqw4KuN4Kqh', # gu.po
|
|
||||||
'0L/QsNGA0L7Qu9Cw', # bg.po
|
|
||||||
'4Kyq4K2N4Kyw4Kys4K2H4Ky2IOCsuOCsmeCtjeCsleCth+CspA==', # or.po
|
|
||||||
'4K6V4K6f4K614K+B4K6a4K+N4K6a4K+K4K6y4K+N', # ta.po
|
|
||||||
'cGFzc3dvcnQ=', # de.po
|
|
||||||
'7JWU7Zi4', # ko.po
|
|
||||||
'0LvQvtC30LjQvdC60LA=', # sr.po
|
|
||||||
'beG6rXQga2jhuql1', # vi.po
|
|
||||||
'c2VuaGE=', # pt_BR.po
|
|
||||||
'cGFzc3dvcmQ=', # it.po
|
|
||||||
'aGVzbG8=', # cs.po
|
|
||||||
'5a+G56K877ya', # zh_TW.po
|
|
||||||
'aGVzbG8=', # sk.po
|
|
||||||
'4LC44LCC4LCV4LGH4LCk4LCq4LCm4LCu4LGB', # te.po
|
|
||||||
'0L/QsNGA0L7Qu9GM', # kk.po
|
|
||||||
'aGFzxYJv', # pl.po
|
|
||||||
'Y29udHJhc2VueWE=', # ca.po
|
|
||||||
'Y29udHJhc2XDsWE=', # es.po
|
|
||||||
'4LSF4LSf4LSv4LS+4LSz4LS14LS+4LSV4LWN4LSV4LWN', # ml.po
|
|
||||||
'c2VuaGE=', # pt.po
|
|
||||||
'5a+G56CB77ya', # zh_CN.po
|
|
||||||
'4KSX4KWB4KSq4KWN4KSk4KS24KSs4KWN4KSm', # mr.po
|
|
||||||
'bMO2c2Vub3Jk', # sv.po
|
|
||||||
'4YOe4YOQ4YOg4YOd4YOa4YOY', # ka.po
|
|
||||||
'4KS24KSs4KWN4KSm4KSV4KWC4KSf', # hi.po
|
|
||||||
'YWRnYW5nc2tvZGU=', # da.po
|
|
||||||
'4La74LeE4LeD4LeK4La04Lav4La6', # si.po
|
|
||||||
'cGFzc29yZA==', # nb.po
|
|
||||||
'd2FjaHR3b29yZA==', # nl.po
|
|
||||||
'4Kaq4Ka+4Ka44KaT4Kef4Ka+4Kaw4KeN4Kah', # bn_IN.po
|
|
||||||
'cGFyb2xh', # tr.po
|
|
||||||
'4LKX4LOB4LKq4LON4LKk4LKq4LKm', # kn.po
|
|
||||||
'c2FuZGk=', # id.po
|
|
||||||
'0L/QsNGA0L7Qu9GM', # ru.po
|
|
||||||
'amVsc3rDsw==', # hu.po
|
|
||||||
'bW90IGRlIHBhc3Nl', # fr.po
|
|
||||||
'aXBoYXNpd2VkaQ==', # zu.po
|
|
||||||
'4Z6W4Z624Z6A4Z+S4Z6Z4Z6f4Z6Y4Z+S4Z6E4Z624Z6P4Z+LwqDhn5Y=', # km.po
|
|
||||||
'4KaX4KeB4Kaq4KeN4Kak4Ka24Kas4KeN4Kam', # as.po
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
PASSWORD_PROMPT_RE = re.compile(
|
|
||||||
u'|'.join(
|
|
||||||
base64.b64decode(s).decode('utf-8')
|
|
||||||
for s in PASSWORD_PROMPTS
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
PASSWORD_PROMPT = b('password')
|
|
||||||
SUDO_OPTIONS = [
|
|
||||||
#(False, 'bool', '--askpass', '-A')
|
|
||||||
#(False, 'str', '--auth-type', '-a')
|
|
||||||
#(False, 'bool', '--background', '-b')
|
|
||||||
#(False, 'str', '--close-from', '-C')
|
|
||||||
#(False, 'str', '--login-class', 'c')
|
|
||||||
(True, 'bool', '--preserve-env', '-E'),
|
|
||||||
#(False, 'bool', '--edit', '-e')
|
|
||||||
#(False, 'str', '--group', '-g')
|
|
||||||
(True, 'bool', '--set-home', '-H'),
|
|
||||||
#(False, 'str', '--host', '-h')
|
|
||||||
(False, 'bool', '--login', '-i'),
|
|
||||||
#(False, 'bool', '--remove-timestamp', '-K')
|
|
||||||
#(False, 'bool', '--reset-timestamp', '-k')
|
|
||||||
#(False, 'bool', '--list', '-l')
|
|
||||||
#(False, 'bool', '--preserve-groups', '-P')
|
|
||||||
#(False, 'str', '--prompt', '-p')
|
|
||||||
|
|
||||||
# SELinux options. Passed through as-is.
|
|
||||||
(False, 'str', '--role', '-r'),
|
|
||||||
(False, 'str', '--type', '-t'),
|
|
||||||
|
|
||||||
# These options are supplied by default by Ansible, but are ignored, as
|
|
||||||
# sudo always runs under a TTY with Mitogen.
|
|
||||||
(True, 'bool', '--stdin', '-S'),
|
|
||||||
(True, 'bool', '--non-interactive', '-n'),
|
|
||||||
|
|
||||||
#(False, 'str', '--shell', '-s')
|
|
||||||
#(False, 'str', '--other-user', '-U')
|
|
||||||
(False, 'str', '--user', '-u'),
|
|
||||||
#(False, 'bool', '--version', '-V')
|
|
||||||
#(False, 'bool', '--validate', '-v')
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
class OptionParser(optparse.OptionParser):
|
|
||||||
def help(self):
|
|
||||||
self.exit()
|
|
||||||
def error(self, msg):
|
|
||||||
self.exit(msg=msg)
|
|
||||||
def exit(self, status=0, msg=None):
|
|
||||||
msg = 'sudo: ' + (msg or 'unsupported option')
|
|
||||||
raise mitogen.core.StreamError(msg)
|
|
||||||
|
|
||||||
|
|
||||||
def make_sudo_parser():
|
|
||||||
parser = OptionParser()
|
|
||||||
for supported, kind, longopt, shortopt in SUDO_OPTIONS:
|
|
||||||
if kind == 'bool':
|
|
||||||
parser.add_option(longopt, shortopt, action='store_true')
|
|
||||||
else:
|
|
||||||
parser.add_option(longopt, shortopt)
|
|
||||||
return parser
|
|
||||||
|
|
||||||
|
|
||||||
def parse_sudo_flags(args):
|
|
||||||
parser = make_sudo_parser()
|
|
||||||
opts, args = parser.parse_args(args)
|
|
||||||
if len(args):
|
|
||||||
raise mitogen.core.StreamError('unsupported sudo arguments:'+str(args))
|
|
||||||
return opts
|
|
||||||
|
|
||||||
|
|
||||||
class PasswordError(mitogen.core.StreamError):
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def option(default, *args):
|
|
||||||
for arg in args:
|
|
||||||
if arg is not None:
|
|
||||||
return arg
|
|
||||||
return default
|
|
||||||
|
|
||||||
|
|
||||||
class Stream(mitogen.parent.Stream):
|
|
||||||
create_child = staticmethod(mitogen.parent.hybrid_tty_create_child)
|
|
||||||
child_is_immediate_subprocess = False
|
|
||||||
|
|
||||||
sudo_path = 'sudo'
|
|
||||||
username = 'root'
|
|
||||||
password = None
|
|
||||||
preserve_env = False
|
|
||||||
set_home = False
|
|
||||||
login = False
|
|
||||||
|
|
||||||
selinux_role = None
|
|
||||||
selinux_type = None
|
|
||||||
|
|
||||||
def construct(self, username=None, sudo_path=None, password=None,
|
|
||||||
preserve_env=None, set_home=None, sudo_args=None,
|
|
||||||
login=None, selinux_role=None, selinux_type=None, **kwargs):
|
|
||||||
super(Stream, self).construct(**kwargs)
|
|
||||||
opts = parse_sudo_flags(sudo_args or [])
|
|
||||||
|
|
||||||
self.username = option(self.username, username, opts.user)
|
|
||||||
self.sudo_path = option(self.sudo_path, sudo_path)
|
|
||||||
self.password = password or None
|
|
||||||
self.preserve_env = option(self.preserve_env,
|
|
||||||
preserve_env, opts.preserve_env)
|
|
||||||
self.set_home = option(self.set_home, set_home, opts.set_home)
|
|
||||||
self.login = option(self.login, login, opts.login)
|
|
||||||
self.selinux_role = option(self.selinux_role, selinux_role, opts.role)
|
|
||||||
self.selinux_type = option(self.selinux_type, selinux_type, opts.type)
|
|
||||||
|
|
||||||
def _get_name(self):
|
|
||||||
return u'sudo.' + mitogen.core.to_text(self.username)
|
|
||||||
|
|
||||||
def get_boot_command(self):
|
|
||||||
# Note: sudo did not introduce long-format option processing until July
|
|
||||||
# 2013, so even though we parse long-format options, supply short-form
|
|
||||||
# to the sudo command.
|
|
||||||
bits = [self.sudo_path, '-u', self.username]
|
|
||||||
if self.preserve_env:
|
|
||||||
bits += ['-E']
|
|
||||||
if self.set_home:
|
|
||||||
bits += ['-H']
|
|
||||||
if self.login:
|
|
||||||
bits += ['-i']
|
|
||||||
if self.selinux_role:
|
|
||||||
bits += ['-r', self.selinux_role]
|
|
||||||
if self.selinux_type:
|
|
||||||
bits += ['-t', self.selinux_type]
|
|
||||||
|
|
||||||
bits = bits + ['--'] + super(Stream, self).get_boot_command()
|
|
||||||
LOG.debug('sudo command line: %r', bits)
|
|
||||||
return bits
|
|
||||||
|
|
||||||
password_incorrect_msg = 'sudo password is incorrect'
|
|
||||||
password_required_msg = 'sudo password is required'
|
|
||||||
|
|
||||||
def _connect_input_loop(self, it):
|
|
||||||
password_sent = False
|
|
||||||
|
|
||||||
for buf in it:
|
|
||||||
LOG.debug('%s: received %r', self.name, buf)
|
|
||||||
if buf.endswith(self.EC0_MARKER):
|
|
||||||
self._ec0_received()
|
|
||||||
return
|
|
||||||
|
|
||||||
match = PASSWORD_PROMPT_RE.search(buf.decode('utf-8').lower())
|
|
||||||
if match is not None:
|
|
||||||
LOG.debug('%s: matched password prompt %r',
|
|
||||||
self.name, match.group(0))
|
|
||||||
if self.password is None:
|
|
||||||
raise PasswordError(self.password_required_msg)
|
|
||||||
if password_sent:
|
|
||||||
raise PasswordError(self.password_incorrect_msg)
|
|
||||||
self.diag_stream.transmit_side.write(
|
|
||||||
(mitogen.core.to_text(self.password) + '\n').encode('utf-8')
|
|
||||||
)
|
|
||||||
password_sent = True
|
|
||||||
|
|
||||||
raise mitogen.core.StreamError('bootstrap failed')
|
|
||||||
|
|
||||||
def _connect_bootstrap(self):
|
|
||||||
fds = [self.receive_side.fd]
|
|
||||||
if self.diag_stream is not None:
|
|
||||||
fds.append(self.diag_stream.receive_side.fd)
|
|
||||||
|
|
||||||
it = mitogen.parent.iter_read(
|
|
||||||
fds=fds,
|
|
||||||
deadline=self.connect_deadline,
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
self._connect_input_loop(it)
|
|
||||||
finally:
|
|
||||||
it.close()
|
|
|
@ -1,168 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
# !mitogen: minify_safe
|
|
||||||
|
|
||||||
"""
|
|
||||||
Permit connection of additional contexts that may act with the authority of
|
|
||||||
this context. For now, the UNIX socket is always mode 0600, i.e. can only be
|
|
||||||
accessed by root or the same UID. Therefore we can always trust connections to
|
|
||||||
have the same privilege (auth_id) as the current process.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import errno
|
|
||||||
import os
|
|
||||||
import socket
|
|
||||||
import struct
|
|
||||||
import sys
|
|
||||||
import tempfile
|
|
||||||
|
|
||||||
import mitogen.core
|
|
||||||
import mitogen.master
|
|
||||||
|
|
||||||
from mitogen.core import LOG
|
|
||||||
|
|
||||||
|
|
||||||
def is_path_dead(path):
|
|
||||||
s = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
|
|
||||||
try:
|
|
||||||
try:
|
|
||||||
s.connect(path)
|
|
||||||
except socket.error:
|
|
||||||
e = sys.exc_info()[1]
|
|
||||||
return e.args[0] in (errno.ECONNREFUSED, errno.ENOENT)
|
|
||||||
finally:
|
|
||||||
s.close()
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
def make_socket_path():
|
|
||||||
return tempfile.mktemp(prefix='mitogen_unix_', suffix='.sock')
|
|
||||||
|
|
||||||
|
|
||||||
class Listener(mitogen.core.BasicStream):
|
|
||||||
keep_alive = True
|
|
||||||
|
|
||||||
def __repr__(self):
|
|
||||||
return '%s.%s(%r)' % (
|
|
||||||
__name__,
|
|
||||||
self.__class__.__name__,
|
|
||||||
self.path,
|
|
||||||
)
|
|
||||||
|
|
||||||
def __init__(self, router, path=None, backlog=100):
|
|
||||||
self._router = router
|
|
||||||
self.path = path or make_socket_path()
|
|
||||||
self._sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
|
|
||||||
|
|
||||||
if os.path.exists(self.path) and is_path_dead(self.path):
|
|
||||||
LOG.debug('%r: deleting stale %r', self, self.path)
|
|
||||||
os.unlink(self.path)
|
|
||||||
|
|
||||||
self._sock.bind(self.path)
|
|
||||||
os.chmod(self.path, int('0600', 8))
|
|
||||||
self._sock.listen(backlog)
|
|
||||||
self.receive_side = mitogen.core.Side(self, self._sock.fileno())
|
|
||||||
router.broker.start_receive(self)
|
|
||||||
|
|
||||||
def _unlink_socket(self):
|
|
||||||
try:
|
|
||||||
os.unlink(self.path)
|
|
||||||
except OSError:
|
|
||||||
e = sys.exc_info()[1]
|
|
||||||
# Prevent a shutdown race with the parent process.
|
|
||||||
if e.args[0] != errno.ENOENT:
|
|
||||||
raise
|
|
||||||
|
|
||||||
def on_shutdown(self, broker):
|
|
||||||
broker.stop_receive(self)
|
|
||||||
self._unlink_socket()
|
|
||||||
self._sock.close()
|
|
||||||
self.receive_side.closed = True
|
|
||||||
|
|
||||||
def _accept_client(self, sock):
|
|
||||||
sock.setblocking(True)
|
|
||||||
try:
|
|
||||||
pid, = struct.unpack('>L', sock.recv(4))
|
|
||||||
except (struct.error, socket.error):
|
|
||||||
LOG.error('%r: failed to read remote identity: %s',
|
|
||||||
self, sys.exc_info()[1])
|
|
||||||
return
|
|
||||||
|
|
||||||
context_id = self._router.id_allocator.allocate()
|
|
||||||
context = mitogen.parent.Context(self._router, context_id)
|
|
||||||
stream = mitogen.core.Stream(self._router, context_id)
|
|
||||||
stream.name = u'unix_client.%d' % (pid,)
|
|
||||||
stream.auth_id = mitogen.context_id
|
|
||||||
stream.is_privileged = True
|
|
||||||
|
|
||||||
try:
|
|
||||||
sock.send(struct.pack('>LLL', context_id, mitogen.context_id,
|
|
||||||
os.getpid()))
|
|
||||||
except socket.error:
|
|
||||||
LOG.error('%r: failed to assign identity to PID %d: %s',
|
|
||||||
self, pid, sys.exc_info()[1])
|
|
||||||
return
|
|
||||||
|
|
||||||
LOG.debug('%r: accepted %r', self, stream)
|
|
||||||
stream.accept(sock.fileno(), sock.fileno())
|
|
||||||
self._router.register(context, stream)
|
|
||||||
|
|
||||||
def on_receive(self, broker):
|
|
||||||
sock, _ = self._sock.accept()
|
|
||||||
try:
|
|
||||||
self._accept_client(sock)
|
|
||||||
finally:
|
|
||||||
sock.close()
|
|
||||||
|
|
||||||
|
|
||||||
def connect(path, broker=None):
|
|
||||||
LOG.debug('unix.connect(path=%r)', path)
|
|
||||||
sock = socket.socket(socket.AF_UNIX, socket.SOCK_STREAM)
|
|
||||||
sock.connect(path)
|
|
||||||
sock.send(struct.pack('>L', os.getpid()))
|
|
||||||
mitogen.context_id, remote_id, pid = struct.unpack('>LLL', sock.recv(12))
|
|
||||||
mitogen.parent_id = remote_id
|
|
||||||
mitogen.parent_ids = [remote_id]
|
|
||||||
|
|
||||||
LOG.debug('unix.connect(): local ID is %r, remote is %r',
|
|
||||||
mitogen.context_id, remote_id)
|
|
||||||
|
|
||||||
router = mitogen.master.Router(broker=broker)
|
|
||||||
stream = mitogen.core.Stream(router, remote_id)
|
|
||||||
stream.accept(sock.fileno(), sock.fileno())
|
|
||||||
stream.name = u'unix_listener.%d' % (pid,)
|
|
||||||
|
|
||||||
context = mitogen.parent.Context(router, remote_id)
|
|
||||||
router.register(context, stream)
|
|
||||||
|
|
||||||
mitogen.core.listen(router.broker, 'shutdown',
|
|
||||||
lambda: router.disconnect_stream(stream))
|
|
||||||
|
|
||||||
sock.close()
|
|
||||||
return router, context
|
|
|
@ -1,227 +0,0 @@
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
|
|
||||||
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
|
|
||||||
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
|
|
||||||
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
|
|
||||||
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
|
|
||||||
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
|
|
||||||
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
|
|
||||||
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
|
|
||||||
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
|
|
||||||
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
|
|
||||||
# POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
# !mitogen: minify_safe
|
|
||||||
|
|
||||||
import datetime
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
import mitogen
|
|
||||||
import mitogen.core
|
|
||||||
import mitogen.master
|
|
||||||
import mitogen.parent
|
|
||||||
|
|
||||||
|
|
||||||
LOG = logging.getLogger('mitogen')
|
|
||||||
iteritems = getattr(dict, 'iteritems', dict.items)
|
|
||||||
|
|
||||||
if mitogen.core.PY3:
|
|
||||||
iteritems = dict.items
|
|
||||||
else:
|
|
||||||
iteritems = dict.iteritems
|
|
||||||
|
|
||||||
|
|
||||||
def setup_gil():
|
|
||||||
"""
|
|
||||||
Set extremely long GIL release interval to let threads naturally progress
|
|
||||||
through CPU-heavy sequences without forcing the wake of another thread that
|
|
||||||
may contend trying to run the same CPU-heavy code. For the new-style
|
|
||||||
Ansible work, this drops runtime ~33% and involuntary context switches by
|
|
||||||
>80%, essentially making threads cooperatively scheduled.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
# Python 2.
|
|
||||||
sys.setcheckinterval(100000)
|
|
||||||
except AttributeError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Python 3.
|
|
||||||
sys.setswitchinterval(10)
|
|
||||||
except AttributeError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def disable_site_packages():
|
|
||||||
"""
|
|
||||||
Remove all entries mentioning ``site-packages`` or ``Extras`` from
|
|
||||||
:attr:sys.path. Used primarily for testing on OS X within a virtualenv,
|
|
||||||
where OS X bundles some ancient version of the :mod:`six` module.
|
|
||||||
"""
|
|
||||||
for entry in sys.path[:]:
|
|
||||||
if 'site-packages' in entry or 'Extras' in entry:
|
|
||||||
sys.path.remove(entry)
|
|
||||||
|
|
||||||
|
|
||||||
def _formatTime(record, datefmt=None):
|
|
||||||
dt = datetime.datetime.fromtimestamp(record.created)
|
|
||||||
return dt.strftime(datefmt)
|
|
||||||
|
|
||||||
|
|
||||||
def log_get_formatter():
|
|
||||||
datefmt = '%H:%M:%S'
|
|
||||||
if sys.version_info > (2, 6):
|
|
||||||
datefmt += '.%f'
|
|
||||||
fmt = '%(asctime)s %(levelname).1s %(name)s: %(message)s'
|
|
||||||
formatter = logging.Formatter(fmt, datefmt)
|
|
||||||
formatter.formatTime = _formatTime
|
|
||||||
return formatter
|
|
||||||
|
|
||||||
|
|
||||||
def log_to_file(path=None, io=False, level='INFO'):
|
|
||||||
"""
|
|
||||||
Install a new :class:`logging.Handler` writing applications logs to the
|
|
||||||
filesystem. Useful when debugging slave IO problems.
|
|
||||||
|
|
||||||
Parameters to this function may be overridden at runtime using environment
|
|
||||||
variables. See :ref:`logging-env-vars`.
|
|
||||||
|
|
||||||
:param str path:
|
|
||||||
If not :data:`None`, a filesystem path to write logs to. Otherwise,
|
|
||||||
logs are written to :data:`sys.stderr`.
|
|
||||||
|
|
||||||
:param bool io:
|
|
||||||
If :data:`True`, include extremely verbose IO logs in the output.
|
|
||||||
Useful for debugging hangs, less useful for debugging application code.
|
|
||||||
|
|
||||||
:param str level:
|
|
||||||
Name of the :mod:`logging` package constant that is the minimum level
|
|
||||||
to log at. Useful levels are ``DEBUG``, ``INFO``, ``WARNING``, and
|
|
||||||
``ERROR``.
|
|
||||||
"""
|
|
||||||
log = logging.getLogger('')
|
|
||||||
if path:
|
|
||||||
fp = open(path, 'w', 1)
|
|
||||||
mitogen.core.set_cloexec(fp.fileno())
|
|
||||||
else:
|
|
||||||
fp = sys.stderr
|
|
||||||
|
|
||||||
level = os.environ.get('MITOGEN_LOG_LEVEL', level).upper()
|
|
||||||
io = level == 'IO'
|
|
||||||
if io:
|
|
||||||
level = 'DEBUG'
|
|
||||||
logging.getLogger('mitogen.io').setLevel(level)
|
|
||||||
|
|
||||||
level = getattr(logging, level, logging.INFO)
|
|
||||||
log.setLevel(level)
|
|
||||||
|
|
||||||
# Prevent accidental duplicate log_to_file() calls from generating
|
|
||||||
# duplicate output.
|
|
||||||
for handler_ in reversed(log.handlers):
|
|
||||||
if getattr(handler_, 'is_mitogen', None):
|
|
||||||
log.handlers.remove(handler_)
|
|
||||||
|
|
||||||
handler = logging.StreamHandler(fp)
|
|
||||||
handler.is_mitogen = True
|
|
||||||
handler.formatter = log_get_formatter()
|
|
||||||
log.handlers.insert(0, handler)
|
|
||||||
|
|
||||||
|
|
||||||
def run_with_router(func, *args, **kwargs):
|
|
||||||
"""
|
|
||||||
Arrange for `func(router, *args, **kwargs)` to run with a temporary
|
|
||||||
:class:`mitogen.master.Router`, ensuring the Router and Broker are
|
|
||||||
correctly shut down during normal or exceptional return.
|
|
||||||
|
|
||||||
:returns:
|
|
||||||
`func`'s return value.
|
|
||||||
"""
|
|
||||||
broker = mitogen.master.Broker()
|
|
||||||
router = mitogen.master.Router(broker)
|
|
||||||
try:
|
|
||||||
return func(router, *args, **kwargs)
|
|
||||||
finally:
|
|
||||||
broker.shutdown()
|
|
||||||
broker.join()
|
|
||||||
|
|
||||||
|
|
||||||
def with_router(func):
|
|
||||||
"""
|
|
||||||
Decorator version of :func:`run_with_router`. Example:
|
|
||||||
|
|
||||||
.. code-block:: python
|
|
||||||
|
|
||||||
@with_router
|
|
||||||
def do_stuff(router, arg):
|
|
||||||
pass
|
|
||||||
|
|
||||||
do_stuff(blah, 123)
|
|
||||||
"""
|
|
||||||
def wrapper(*args, **kwargs):
|
|
||||||
return run_with_router(func, *args, **kwargs)
|
|
||||||
if mitogen.core.PY3:
|
|
||||||
wrapper.func_name = func.__name__
|
|
||||||
else:
|
|
||||||
wrapper.func_name = func.func_name
|
|
||||||
return wrapper
|
|
||||||
|
|
||||||
|
|
||||||
PASSTHROUGH = (
|
|
||||||
int, float, bool,
|
|
||||||
type(None),
|
|
||||||
mitogen.core.Context,
|
|
||||||
mitogen.core.CallError,
|
|
||||||
mitogen.core.Blob,
|
|
||||||
mitogen.core.Secret,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def cast(obj):
|
|
||||||
"""
|
|
||||||
Many tools love to subclass built-in types in order to implement useful
|
|
||||||
functionality, such as annotating the safety of a Unicode string, or adding
|
|
||||||
additional methods to a dict. However, cPickle loves to preserve those
|
|
||||||
subtypes during serialization, resulting in CallError during :meth:`call
|
|
||||||
<mitogen.parent.Context.call>` in the target when it tries to deserialize
|
|
||||||
the data.
|
|
||||||
|
|
||||||
This function walks the object graph `obj`, producing a copy with any
|
|
||||||
custom sub-types removed. The functionality is not default since the
|
|
||||||
resulting walk may be computationally expensive given a large enough graph.
|
|
||||||
|
|
||||||
See :ref:`serialization-rules` for a list of supported types.
|
|
||||||
|
|
||||||
:param obj:
|
|
||||||
Object to undecorate.
|
|
||||||
:returns:
|
|
||||||
Undecorated object.
|
|
||||||
"""
|
|
||||||
if isinstance(obj, dict):
|
|
||||||
return dict((cast(k), cast(v)) for k, v in iteritems(obj))
|
|
||||||
if isinstance(obj, (list, tuple)):
|
|
||||||
return [cast(v) for v in obj]
|
|
||||||
if isinstance(obj, PASSTHROUGH):
|
|
||||||
return obj
|
|
||||||
if isinstance(obj, mitogen.core.UnicodeType):
|
|
||||||
return mitogen.core.UnicodeType(obj)
|
|
||||||
if isinstance(obj, mitogen.core.BytesType):
|
|
||||||
return mitogen.core.BytesType(obj)
|
|
||||||
|
|
||||||
raise TypeError("Cannot serialize: %r: %r" % (type(obj), obj))
|
|
|
@ -1,15 +0,0 @@
|
||||||
[coverage:run]
|
|
||||||
branch = true
|
|
||||||
source =
|
|
||||||
mitogen
|
|
||||||
omit =
|
|
||||||
mitogen/compat/*
|
|
||||||
|
|
||||||
[flake8]
|
|
||||||
ignore = E402,E128,W503,E731
|
|
||||||
exclude = mitogen/compat
|
|
||||||
|
|
||||||
[egg_info]
|
|
||||||
tag_build =
|
|
||||||
tag_date = 0
|
|
||||||
|
|
|
@ -1,67 +0,0 @@
|
||||||
#!/usr/bin/env python2
|
|
||||||
# Copyright 2019, David Wilson
|
|
||||||
#
|
|
||||||
# Redistribution and use in source and binary forms, with or without
|
|
||||||
# modification, are permitted provided that the following conditions are met:
|
|
||||||
#
|
|
||||||
# 1. Redistributions of source code must retain the above copyright notice, this
|
|
||||||
# list of conditions and the following disclaimer.
|
|
||||||
#
|
|
||||||
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
|
||||||
# this list of conditions and the following disclaimer in the documentation
|
|
||||||
# and/or other materials provided with the distribution.
|
|
||||||
#
|
|
||||||
# 3. Neither the name of the copyright holder nor the names of its contributors
|
|
||||||
# may be used to endorse or promote products derived from this software without
|
|
||||||
# specific prior written permission.
|
|
||||||
#
|
|
||||||
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
|
||||||
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
|
||||||
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
|
||||||
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
|
|
||||||
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
|
|
||||||
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
|
|
||||||
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
|
|
||||||
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
|
|
||||||
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
|
|
||||||
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
|
||||||
|
|
||||||
import os
|
|
||||||
|
|
||||||
from setuptools import find_packages, setup
|
|
||||||
|
|
||||||
|
|
||||||
def grep_version():
|
|
||||||
path = os.path.join(os.path.dirname(__file__), 'mitogen/__init__.py')
|
|
||||||
with open(path) as fp:
|
|
||||||
for line in fp:
|
|
||||||
if line.startswith('__version__'):
|
|
||||||
_, _, s = line.partition('=')
|
|
||||||
return '.'.join(map(str, eval(s)))
|
|
||||||
|
|
||||||
|
|
||||||
setup(
|
|
||||||
name = 'mitogen',
|
|
||||||
version = grep_version(),
|
|
||||||
description = 'Library for writing distributed self-replicating programs.',
|
|
||||||
author = 'David Wilson',
|
|
||||||
license = 'New BSD',
|
|
||||||
url = 'https://github.com/dw/mitogen/',
|
|
||||||
packages = find_packages(exclude=['tests', 'examples']),
|
|
||||||
zip_safe = False,
|
|
||||||
classifiers = [
|
|
||||||
'Environment :: Console',
|
|
||||||
'Intended Audience :: System Administrators',
|
|
||||||
'License :: OSI Approved :: BSD License',
|
|
||||||
'Operating System :: POSIX',
|
|
||||||
'Programming Language :: Python',
|
|
||||||
'Programming Language :: Python :: 2.4',
|
|
||||||
'Programming Language :: Python :: 2.5',
|
|
||||||
'Programming Language :: Python :: 2.6',
|
|
||||||
'Programming Language :: Python :: 2.7',
|
|
||||||
'Programming Language :: Python :: 3.6',
|
|
||||||
'Programming Language :: Python :: Implementation :: CPython',
|
|
||||||
'Topic :: System :: Distributed Computing',
|
|
||||||
'Topic :: System :: Systems Administration',
|
|
||||||
],
|
|
||||||
)
|
|
Loading…
Reference in a new issue