]> git-server-git.apps.pok.os.sepia.ceph.com Git - remoto.git/commitdiff
backends: allow easier localhost detection for needs_ssh()
authorAlfredo Deza <alfredo@deza.pe>
Tue, 12 Feb 2019 14:21:32 +0000 (09:21 -0500)
committerAlfredo Deza <alfredo@deza.pe>
Wed, 13 Feb 2019 20:14:36 +0000 (15:14 -0500)
Signed-off-by: Alfredo Deza <alfredo@deza.pe>
31 files changed:
.gitignore [new file with mode: 0644]
CHANGELOG.rst [new file with mode: 0644]
LICENSE [new file with mode: 0644]
MANIFEST.in [new file with mode: 0644]
README.rst [new file with mode: 0644]
remoto/__init__.py [new file with mode: 0644]
remoto/backends/__init__.py [new file with mode: 0644]
remoto/backends/docker.py [new file with mode: 0644]
remoto/backends/kubernetes.py [new file with mode: 0644]
remoto/backends/local.py [new file with mode: 0644]
remoto/backends/openshift.py [new file with mode: 0644]
remoto/backends/podman.py [new file with mode: 0644]
remoto/backends/ssh.py [new file with mode: 0644]
remoto/connection.py [new file with mode: 0644]
remoto/exc.py [new file with mode: 0644]
remoto/file_sync.py [new file with mode: 0644]
remoto/log.py [new file with mode: 0644]
remoto/process.py [new file with mode: 0644]
remoto/tests/__init__.py [new file with mode: 0644]
remoto/tests/backends/test_backends.py [new file with mode: 0644]
remoto/tests/backends/test_kubernetes.py [new file with mode: 0644]
remoto/tests/conftest.py [new file with mode: 0644]
remoto/tests/fake_module.py [new file with mode: 0644]
remoto/tests/test_connection.py [new file with mode: 0644]
remoto/tests/test_log.py [new file with mode: 0644]
remoto/tests/test_process.py [new file with mode: 0644]
remoto/tests/test_rsync.py [new file with mode: 0644]
remoto/tests/test_util.py [new file with mode: 0644]
remoto/util.py [new file with mode: 0644]
setup.py [new file with mode: 0644]
tox.ini [new file with mode: 0644]

diff --git a/.gitignore b/.gitignore
new file mode 100644 (file)
index 0000000..64b9a81
--- /dev/null
@@ -0,0 +1,33 @@
+*.py[cod]
+
+# C extensions
+*.so
+
+# Packages
+*.egg
+*.egg-info
+dist
+build
+eggs
+parts
+var
+sdist
+develop-eggs
+.installed.cfg
+lib64
+
+# Installer logs
+pip-log.txt
+
+# Unit test / coverage reports
+.coverage*
+.tox
+nosetests.xml
+
+# Translations
+*.mo
+
+# Mr Developer
+.mr.developer.cfg
+.project
+.pydevproject
diff --git a/CHANGELOG.rst b/CHANGELOG.rst
new file mode 100644 (file)
index 0000000..3c35b1b
--- /dev/null
@@ -0,0 +1,186 @@
+0.0.35
+------
+8-Jan-2019
+
+* Fix the botched 0.0.34 version which had stale commits from 0.0.32 - No code
+  changes.
+
+
+0.0.34
+------
+12-Dec-2018
+
+* Allow ``ssh_options`` to extend ssh flags in the ``Connection()`` object
+
+
+0.0.33
+------
+17-Jul-2018
+
+* ``extend_env`` needs to be removed from ``**kw`` **only** when present.
+
+
+0.0.32
+------
+16-Jul-2018
+
+* ``extend_env`` needs to be removed from ``**kw`` as it is being passed onto
+  subprocess, which renders it invalid
+
+
+0.0.31
+------
+10-Jul-2018
+
+* Extend environment variables, do not overwrite
+
+
+0.0.30
+------
+05-Jul-2016
+
+* Fix test issue with py3
+* Remove vendored execnet
+* Include tests when building
+* Strip carriage-returns from messages in logs
+
+0.0.29
+------
+17-May-2016
+* Catch possible errors when remotes are missing the right Python interpreter
+
+0.0.28
+------
+11-May-2016
+* Avoid needless list comprehension that caused issues with Python 3
+* Do not bare return when clients expect a three item tuple always
+* Fix an issue where ``process.check`` would need to raise exit but the
+  response had an error.
+
+22-Dec-2015
+0.0.27
+------
+22-Dec-2015
+* Fix a problem where stderr/stdout variables would be undefined on certain
+  conditions when running a remote command.
+
+0.0.26
+------
+15-Dec-2015
+* Fix (issue 19) where stdout and stderr would be prematurely ended and not
+  fully logged.
+
+0.0.25
+------
+21-Apr-2015
+* Fix (issue 15) where a child process could finish but output would not be
+  flushed to stdout/stderr.
+
+0.0.24
+------
+* Ship the ``LICENSE`` file and ``tests`` directory as part of the
+  distribution.
+
+0.0.23
+------
+* Output the exact same order of remote ``stdout`` and ``stderr``
+
+0.0.22
+------
+* Create a better detection mechanism for remote ``sudo`` needs
+
+0.0.21
+------
+* Do not override remote environment variables to set the ``$PATH``
+
+0.0.20
+------
+* Fix unneeded ssh connection when using FQDN hosts
+
+0.0.19
+------
+* Fix ``vendor.py`` to really include the proper tag for ``execnet``
+
+0.0.18
+------
+* Use execnet 1.2post2 that fixes a problem with ``None`` globals (see issue
+  #1)
+
+0.0.17
+------
+* add some imports to init so that they are easier to use
+* make vendor libraries optional
+
+0.0.16
+------
+* spit stdout before stderr as errors should be read last
+
+0.0.15
+------
+* eat typeerror when closing the connection (execnet)
+
+0.0.14
+------
+* Use new execnet  1.2.0
+* use new connection defaults for execent
+
+0.0.13
+------
+* Add a ``sync`` function to be able to synchronize directories between hosts.
+
+0.0.12
+------
+* Map ``stderr`` to ``WARNING`` log level
+* Do not spit out ``remoto``'s own tracebacks when raising remote errors
+  because some exception occurred just do it for non-remoto exceptions
+* Use version 1.1.1 of execnet with patches.
+
+0.0.11
+------
+* Catch more TypeError problems when closing the connections.
+
+0.0.10
+------
+* Allow configuration to raise on non-zero exit status
+
+0.0.9
+-----
+* If the exit status is non-zero on the remote end, raise an exception
+
+0.0.8
+-----
+* Raise RuntimeError on remote exceptions so others can actually
+  catch that.
+
+0.0.7
+-----
+* Patches execnet to allow local popen with sudo python
+
+0.0.6
+-----
+* Add a global timeout option
+* All processes use PATH variables passed to Popen
+* Do not mangle commands if they need sudo
+* Allow sudo python
+
+0.0.5
+-----
+* Allow more than one thread to be started in the connection
+* log at debug level the name of the function to be remotely
+  executed
+
+0.0.4
+-----
+* Create a way to execute functions remotely
+
+0.0.3
+-----
+* If the hostname passed in to the connection matches the local
+  hostname, then do a local connection (not an ssh one)
+
+0.0.2
+-----
+* Allow a context manager for running one-off commands with the connection
+  object.
+* ``process.run`` can now take in a timeout value so that it does not hang in
+  remote processes
diff --git a/LICENSE b/LICENSE
new file mode 100644 (file)
index 0000000..3589596
--- /dev/null
+++ b/LICENSE
@@ -0,0 +1,20 @@
+The MIT License (MIT)
+Copyright (c) 2013 Alfredo Deza
+
+Permission is hereby granted, free of charge, to any person obtaining a copy
+of this software and associated documentation files (the "Software"), to deal
+in the Software without restriction, including without limitation the rights
+to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
+copies of the Software, and to permit persons to whom the Software is
+furnished to do so, subject to the following conditions:
+
+The above copyright notice and this permission notice shall be included in all
+copies or substantial portions of the Software.
+
+THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
+EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
+IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE
+OR OTHER DEALINGS IN THE SOFTWARE.
diff --git a/MANIFEST.in b/MANIFEST.in
new file mode 100644 (file)
index 0000000..2f42070
--- /dev/null
@@ -0,0 +1,3 @@
+include setup.py
+include LICENSE
+include README.rst
diff --git a/README.rst b/README.rst
new file mode 100644 (file)
index 0000000..c4aa720
--- /dev/null
@@ -0,0 +1,148 @@
+remoto
+======
+A very simplistic remote-command-executor using ``ssh`` and Python in the
+remote end.
+
+All the heavy lifting is done by execnet, while this minimal API provides the
+bare minimum to handle easy logging and connections from the remote end.
+
+``remoto`` is a bit opinionated as it was conceived to replace helpers and
+remote utilities for ``ceph-deploy`` a tool to run remote commands to configure
+and setup the distributed file system Ceph.
+
+
+Example Usage
+-------------
+The usage aims to be extremely straightforward, with a very minimal set of
+helpers and utilities for remote processes and logging output.
+
+The most basic example will use the ``run`` helper to execute a command on the
+remote end. It does require a logging object, which needs to be one that, at
+the very least, has both ``error`` and ``debug``. Those are called for
+``stderr`` and ``stdout`` respectively.
+
+This is how it would look with a basic logger passed in::
+
+    >>> import logging
+    >>> logging.basicConfig(level=logging.DEBUG)
+    >>> logger = logging.getLogger('hostname')
+    >>> conn = remoto.Connection('hostname', logger=logger)
+    >>> run(conn, ['ls', '-a'])
+    INFO:hostname:Running command: ls -a
+    DEBUG:hostname:.
+    DEBUG:hostname:..
+    DEBUG:hostname:.bash_history
+    DEBUG:hostname:.bash_logout
+    DEBUG:hostname:.bash_profile
+    DEBUG:hostname:.bashrc
+    DEBUG:hostname:.gem
+    DEBUG:hostname:.lesshst
+    DEBUG:hostname:.pki
+    DEBUG:hostname:.puppet
+    DEBUG:hostname:.ssh
+    DEBUG:hostname:.vim
+    DEBUG:hostname:.viminfo
+
+The ``run`` helper will display the ``stderr`` and ``stdout`` as ``ERROR`` and
+``DEBUG`` respectively.
+
+For other types of usage (like checking exit status codes, or raising upon
+them) ``remoto`` does provide them too.
+
+
+Remote Commands
+===============
+
+``process.run``
+---------------
+Calling remote commands can be done in a few different ways. The most simple
+one is with ``process.run``::
+
+    >>> from remoto.process import run
+    >>> from remoto import Connection
+    >>> logger = logging.getLogger('myhost')
+    >>> conn = Connection('myhost', logger=logger)
+    >>> run(conn, ['whoami'])
+    INFO:myhost:Running command: whoami
+    DEBUG:myhost:root
+
+Note however, that you are not capturing results or information from the remote
+end. The intention here is only to be able to run a command and log its output.
+It is a *fire and forget* call.
+
+
+``process.check``
+-----------------
+This callable, allows the caller to deal with the ``stderr``, ``stdout`` and
+exit code. It returns it in a 3 item tuple::
+
+    >>> from remoto.process import check
+    >>> check(conn, ['ls', '/nonexistent/path'])
+    ([], ['ls: cannot access /nonexistent/path: No such file or directory'], 2)
+
+Note that the ``stdout`` and ``stderr`` items are returned as lists with the ``\n``
+characters removed.
+
+This is useful if you need to process the information back locally, as opposed
+to just firing and forgetting (while logging, like ``process.run``).
+
+
+Remote Functions
+================
+
+To execute remote functions (ideally) you would need to define them in a module
+and add the following to the end of that module::
+
+    if __name__ == '__channelexec__':
+        for item in channel:
+            channel.send(eval(item))
+
+
+If you had a function in a module named ``foo`` that looks like this::
+
+    import os
+
+    def listdir(path):
+        return os.listdir(path)
+
+To be able to execute that ``listdir`` function remotely you would need to pass
+the module to the connection object and then call that function::
+
+    >>> import foo
+    >>> conn = Connection('hostname')
+    >>> remote_foo = conn.import_module(foo)
+    >>> remote_foo.listdir('.')
+    ['.bash_logout',
+     '.profile',
+     '.veewee_version',
+     '.lesshst',
+     'python',
+     '.vbox_version',
+     'ceph',
+     '.cache',
+     '.ssh']
+
+Note that functions to be executed remotely **cannot** accept objects as
+arguments, just normal Python data structures, like tuples, lists and
+dictionaries. Also safe to use are ints and strings.
+
+
+Automatic detection for remote connections
+------------------------------------------
+There is automatic detection for the need to connect remotely (via SSH) or not
+that it is infered by the hostname of the current host (vs. the host that is
+connecting to).
+
+If the local host has the same as the remote hostname, a local connection (via
+`Popen`) will be opened and that will be used instead of `ssh`, and avoiding
+the issues of being able to ssh into the same host.
+
+Automatic detection for using `sudo`
+------------------------------------
+This magical detection can be enabled by using the `detect_sudo` flag in the
+`Connection` class. It is disabled by default.
+
+When enabled, it will prefix any command with `sudo`. This is useful for
+libraries that need super user permissions and want to avoid passing `sudo`
+everywhere, which can be non-trivial if dealing with `root` users that are
+connecting via SSH.
diff --git a/remoto/__init__.py b/remoto/__init__.py
new file mode 100644 (file)
index 0000000..09a29dd
--- /dev/null
@@ -0,0 +1,7 @@
+from .connection import Connection
+from .file_sync import rsync
+from . import process
+from . import connection
+
+
+__version__ = '0.0.35'
diff --git a/remoto/backends/__init__.py b/remoto/backends/__init__.py
new file mode 100644 (file)
index 0000000..49871fb
--- /dev/null
@@ -0,0 +1,274 @@
+import inspect
+import json
+import socket
+import sys
+import execnet
+import logging
+from remoto.process import check
+
+
+class BaseConnection(object):
+    """
+    Base class for Connection objects. Provides a generic interface to execnet
+    for setting up the connection
+    """
+    executable = ''
+    remote_import_system = 'legacy'
+
+    def __init__(self, hostname, logger=None, sudo=False, threads=1, eager=True,
+                 detect_sudo=False, interpreter=None, ssh_options=None):
+        self.sudo = sudo
+        self.hostname = hostname
+        self.ssh_options = ssh_options
+        self.logger = logger or basic_remote_logger()
+        self.remote_module = None
+        self.channel = None
+        self.global_timeout = None  # wait for ever
+
+        self.interpreter = interpreter or 'python%s' % sys.version_info[0]
+
+        if eager:
+            try:
+                if detect_sudo:
+                    self.sudo = self._detect_sudo()
+                self.gateway = self._make_gateway(hostname)
+            except OSError:
+                self.logger.error(
+                    "Can't communicate with remote host, possibly because "
+                    "%s is not installed there" % self.interpreter
+                )
+                raise
+
+    def _make_gateway(self, hostname):
+        gateway = execnet.makegateway(
+            self._make_connection_string(hostname)
+        )
+        gateway.reconfigure(py2str_as_py3str=False, py3str_as_py2str=False)
+        return gateway
+
+    def _detect_sudo(self, _execnet=None):
+        """
+        ``sudo`` detection has to create a different connection to the remote
+        host so that we can reliably ensure that ``getuser()`` will return the
+        right information.
+
+        After getting the user info it closes the connection and returns
+        a boolean
+        """
+        exc = _execnet or execnet
+        gw = exc.makegateway(
+            self._make_connection_string(self.hostname, use_sudo=False)
+        )
+
+        channel = gw.remote_exec(
+            'import getpass; channel.send(getpass.getuser())'
+        )
+
+        result = channel.receive()
+        gw.exit()
+
+        if result == 'root':
+            return False
+        self.logger.debug('connection detected need for sudo')
+        return True
+
+    def _make_connection_string(self, hostname, _needs_ssh=None, use_sudo=None):
+        _needs_ssh = _needs_ssh or needs_ssh
+        interpreter = self.interpreter
+        if use_sudo is not None:
+            if use_sudo:
+                interpreter = 'sudo ' + interpreter
+        elif self.sudo:
+            interpreter = 'sudo ' + interpreter
+        if _needs_ssh(hostname):
+            if self.ssh_options:
+                return 'ssh=%s %s//python=%s' % (
+                    self.ssh_options, hostname, interpreter
+                )
+            else:
+                return 'ssh=%s//python=%s' % (hostname, interpreter)
+        return 'popen//python=%s' % interpreter
+
+    def __enter__(self):
+        return self
+
+    def __exit__(self, exc_type, exc_val, exc_tb):
+        self.exit()
+        return False
+
+    def cmd(self, cmd):
+        """
+        In the base connection class, this method just returns the ``cmd``
+        as-is. Other implementations will end up doing transformations to the
+        command by prefixing it with other flags needed. See
+        :class:`KubernetesConnection` for an example
+        """
+        return cmd
+
+    def execute(self, function, **kw):
+        return self.gateway.remote_exec(function, **kw)
+
+    def exit(self):
+        self.gateway.exit()
+
+    def import_module(self, module):
+        """
+        Allows remote execution of a local module. Depending on the
+        ``remote_import_system`` attribute it may use execnet's implementation
+        or remoto's own based on JSON.
+
+        .. note:: It is not possible to use execnet's remote execution model on
+                  connections that aren't SSH or Local.
+        """
+        if self.remote_import_system is not None:
+            if self.remote_import_system == 'json':
+                self.remote_module = JsonModuleExecute(self, module, self.logger)
+            else:
+                self.remote_module = LegacyModuleExecute(self.gateway, module, self.logger)
+        else:
+            self.remote_module = LegacyModuleExecute(self.gateway, module, self.logger)
+        return self.remote_module
+
+
+class LegacyModuleExecute(object):
+    """
+    This (now legacy) class, is the way ``execnet`` does its remote module
+    execution: it sends it over a channel, and does a send/receive for
+    exchanging information. This only works when there is native support in
+    execnet for a given connection. This currently means it would only work for
+    ssh and local (Popen) connections, and will not work for anything like
+    kubernetes or containers.
+    """
+
+    def __init__(self, gateway, module, logger=None):
+        self.channel = gateway.remote_exec(module)
+        self.module = module
+        self.logger = logger
+
+    def __getattr__(self, name):
+        if not hasattr(self.module, name):
+            msg = "module %s does not have attribute %s" % (str(self.module), name)
+            raise AttributeError(msg)
+        docstring = self._get_func_doc(getattr(self.module, name))
+
+        def wrapper(*args):
+            arguments = self._convert_args(args)
+            if docstring:
+                self.logger.debug(docstring)
+            self.channel.send("%s(%s)" % (name, arguments))
+            try:
+                return self.channel.receive()
+            except Exception as error:
+                # Error will come as a string of a traceback, remove everything
+                # up to the actual exception since we do get garbage otherwise
+                # that points to non-existent lines in the compiled code
+                exc_line = str(error)
+                for tb_line in reversed(str(error).split('\n')):
+                    if tb_line:
+                        exc_line = tb_line
+                        break
+                raise RuntimeError(exc_line)
+
+        return wrapper
+
+    def _get_func_doc(self, func):
+        try:
+            return getattr(func, 'func_doc').strip()
+        except AttributeError:
+            return ''
+
+    def _convert_args(self, args):
+        if args:
+            if len(args) > 1:
+                arguments = str(args).rstrip(')').lstrip('(')
+            else:
+                arguments = str(args).rstrip(',)').lstrip('(')
+        else:
+            arguments = ''
+        return arguments
+
+
+dump_template = """
+if __name__ == '__main__':
+    import json
+    obj = {'return': None, 'exception': None}
+    try:
+        obj['return'] = %s%s
+    except Exception:
+        obj['exception'] = traceback.format_exc()
+    try:
+        print(json.dumps(obj).decode('utf-8'))
+    except AttributeError:
+        print(json.dumps(obj))
+"""
+
+
+class JsonModuleExecute(object):
+    """
+    This remote execution class allows to ship Python code over to the remote
+    node, load it via ``stdin`` and call any function with arguments. The
+    resulting response is dumped over JSON so that it can get printed to
+    ``stdout``, then captured locally, loaded into regular Python and returned.
+
+    If the remote end generates an exception with a traceback, that is captured
+    as well and raised accordingly.
+    """
+
+    def __init__(self, conn, module, logger=None):
+        self.conn = conn
+        self.module = module
+        self._module_source = inspect.getsource(module)
+        self.logger = logger
+
+    def __getattr__(self, name):
+        if not hasattr(self.module, name):
+            msg = "module %s does not have attribute %s" % (str(self.module), name)
+            raise AttributeError(msg)
+        docstring = self._get_func_doc(getattr(self.module, name))
+
+        def wrapper(*args):
+            if docstring:
+                self.logger.debug(docstring)
+            if len(args):
+                source = self._module_source + dump_template % (name, repr(args))
+            else:
+                source = self._module_source + dump_template % (name, '()')
+
+            out, err, code = check(self.conn, ['python'], stdin=source.encode('utf-8'))
+            response = json.loads(out[0])
+            if response['exception']:
+                raise Exception(response['exception'])
+            return response['return']
+
+        return wrapper
+
+    def _get_func_doc(self, func):
+        try:
+            return getattr(func, 'func_doc').strip()
+        except AttributeError:
+            return ''
+
+
+def basic_remote_logger():
+    logging.basicConfig()
+    logger = logging.getLogger(socket.gethostname())
+    logger.setLevel(logging.DEBUG)
+    return logger
+
+
+def needs_ssh(hostname, _socket=None):
+    """
+    Obtains remote hostname of the socket and cuts off the domain part
+    of its FQDN.
+    """
+    if hostname.lower() in ['localhost', '127.0.0.1', '127.0.1.1']:
+        return False
+    _socket = _socket or socket
+    fqdn = _socket.getfqdn()
+    if hostname == fqdn:
+        return False
+    local_hostname = _socket.gethostname()
+    local_short_hostname = local_hostname.split('.')[0]
+    if local_hostname == hostname or local_short_hostname == hostname:
+        return False
+    return True
diff --git a/remoto/backends/docker.py b/remoto/backends/docker.py
new file mode 100644 (file)
index 0000000..e57718b
--- /dev/null
@@ -0,0 +1,45 @@
+from . import BaseConnection
+
+
+class DockerConnection(BaseConnection):
+    """
+    This connection class allows to (optionally) define a remote hostname
+    to connect that holds a given container::
+
+        >>> conn = DockerConnection(hostname='srv-1', container_id='asdf-lkjh')
+
+    Either ``container_id`` or ``container_name`` can be provided to connect to
+    a given container.
+
+    .. note:: ``hostname`` defaults to 'localhost' when undefined
+    """
+
+    executable = 'docker'
+    remote_import_system = 'json'
+
+    def __init__(self, hostname=None, container_id=None, container_name=None, user=None, **kw):
+        self.hostname = hostname or 'localhost'
+        self.identifier = container_id or container_name
+        if not self.identifier:
+            raise TypeError('Either container_id or container_name must be provided')
+        self.user = user
+        super(DockerConnection, self).__init__(hostname=self.hostname, **kw)
+
+    def command_template(self):
+        if self.user:
+            prefix = [
+                self.executable, 'exec', '-i',
+                '-u', self.user,
+                self.identifier, '/bin/sh', '-c'
+            ]
+        else:
+            prefix = [
+                self.executable, 'exec', '-i',
+                self.identifier, '/bin/sh', '-c'
+            ]
+        return prefix
+
+    def cmd(self, cmd):
+        tmpl = self.command_template()
+        tmpl.append(' '.join(cmd))
+        return tmpl
diff --git a/remoto/backends/kubernetes.py b/remoto/backends/kubernetes.py
new file mode 100644 (file)
index 0000000..e524012
--- /dev/null
@@ -0,0 +1,30 @@
+from . import BaseConnection
+
+
+class KubernetesConnection(BaseConnection):
+
+    executable = 'kubectl'
+    remote_import_system = 'json'
+
+    def __init__(self, pod_name, namespace=None, **kw):
+        self.namespace = namespace
+        self.pod_name = pod_name
+        super(KubernetesConnection, self).__init__(hostname='localhost', **kw)
+
+    def command_template(self):
+        if self.namespace:
+            prefix = [
+                self.executable, 'exec', '-i', '-n',
+                self.namespace, self.pod_name, '--', '/bin/sh', '-c'
+            ]
+        else:
+            prefix = [
+                self.executable, 'exec', '-i',
+                self.pod_name, '--', '/bin/sh', '-c'
+            ]
+        return prefix
+
+    def cmd(self, cmd):
+        tmpl = self.command_template()
+        tmpl.append(' '.join(cmd))
+        return tmpl
diff --git a/remoto/backends/local.py b/remoto/backends/local.py
new file mode 100644 (file)
index 0000000..05a001e
--- /dev/null
@@ -0,0 +1,23 @@
+from . import BaseConnection
+import socket
+
+
+class LocalConnection(BaseConnection):
+
+    def __init__(self, **kw):
+        # hostname gets ignored, and forced to be localhost always
+        kw.pop('hostname', None)
+        super(LocalConnection, self).__init__(
+            hostname='localhost',
+            detect_sudo=False,
+            **kw
+        )
+
+    def _make_connection_string(self, hostname, _needs_ssh=None, use_sudo=None):
+        interpreter = self.interpreter
+        if use_sudo is not None:
+            if use_sudo:
+                interpreter = 'sudo ' + interpreter
+        elif self.sudo:
+            interpreter = 'sudo ' + interpreter
+        return 'popen//python=%s' % interpreter
diff --git a/remoto/backends/openshift.py b/remoto/backends/openshift.py
new file mode 100644 (file)
index 0000000..e093ec3
--- /dev/null
@@ -0,0 +1,6 @@
+from .kubernetes import KubernetesConnection
+
+
+class OpenshiftConnection(KubernetesConnection):
+
+    executable = 'oc'
diff --git a/remoto/backends/podman.py b/remoto/backends/podman.py
new file mode 100644 (file)
index 0000000..c6c743c
--- /dev/null
@@ -0,0 +1,6 @@
+from .docker import DockerConnection
+
+
+class PodmanConnection(DockerConnection):
+
+    executable = 'podman'
diff --git a/remoto/backends/ssh.py b/remoto/backends/ssh.py
new file mode 100644 (file)
index 0000000..f562fec
--- /dev/null
@@ -0,0 +1 @@
+from . import BaseConnection as SshConnection
diff --git a/remoto/connection.py b/remoto/connection.py
new file mode 100644 (file)
index 0000000..47c2895
--- /dev/null
@@ -0,0 +1,48 @@
+import logging
+# compatibility for older clients that rely on the previous ``Connection`` class
+from remoto.backends import BaseConnection as Connection # noqa
+from remoto.backends import ssh, openshift, kubernetes, local, podman, docker
+
+
+logger = logging.getLogger('remoto')
+
+
+def get(name, fallback='ssh'):
+    """
+    Retrieve the matching backend class from a string. If no backend can be
+    matched, it raises an error.
+
+    >>> get('ssh')
+    <class 'remoto.backends.BaseConnection'>
+    >>> get()
+    <class 'remoto.backends.BaseConnection'>
+    >>> get('non-existent')
+    <class 'remoto.backends.BaseConnection'>
+    >>> get('non-existent', 'openshift')
+    <class 'remoto.backends.openshift.OpenshiftConnection'>
+    """
+    mapping = {
+        'ssh': ssh.SshConnection,
+        'oc': openshift.OpenshiftConnection,
+        'openshift': openshift.OpenshiftConnection,
+        'kubernetes': kubernetes.KubernetesConnection,
+        'k8s': kubernetes.KubernetesConnection,
+        'local': local.LocalConnection,
+        'popen': local.LocalConnection,
+        'localhost': local.LocalConnection,
+        'docker': docker.DockerConnection,
+        'podman': podman.PodmanConnection,
+    }
+    if not name:
+        # fallsback to just plain local/ssh
+        name = 'ssh'
+
+    name = name.strip().lower()
+    connection_class = mapping.get(name)
+    if not connection_class:
+        logger.warning('no connection backend found for: "%s"' % name)
+        if fallback:
+            logger.info('falling back to "%s"' % fallback)
+            # this assumes that ``fallback`` is a valid mapping name
+            return mapping.get(fallback)
+    return connection_class
diff --git a/remoto/exc.py b/remoto/exc.py
new file mode 100644 (file)
index 0000000..03f891f
--- /dev/null
@@ -0,0 +1,6 @@
+import execnet
+
+HostNotFound = execnet.HostNotFound
+RemoteError = execnet.RemoteError
+TimeoutError = execnet.TimeoutError
+DataFormatError = execnet.DataFormatError
diff --git a/remoto/file_sync.py b/remoto/file_sync.py
new file mode 100644 (file)
index 0000000..db61c9f
--- /dev/null
@@ -0,0 +1,45 @@
+import execnet
+from remoto.backends import basic_remote_logger
+from remoto.backends import BaseConnection as Connection
+
+
+class _RSync(execnet.RSync):
+    """
+    Inherits from ``execnet.RSync`` so that we can log nicely with the user
+    logger instance (if any) back with the ``_report_send_file`` method
+    """
+
+    def __init__(self, sourcedir, callback=None, verbose=True, logger=None):
+        self.logger = logger
+        super(_RSync, self).__init__(sourcedir, callback, verbose)
+
+    def _report_send_file(self, gateway, modified_rel_path):
+        if self._verbose:
+            self.logger.info("syncing file: %s" % modified_rel_path)
+
+
+def rsync(hosts, source, destination, logger=None, sudo=False):
+    """
+    Grabs the hosts (or single host), creates the connection object for each
+    and set the rsync execnet engine to push the files.
+
+    It assumes that all of the destinations for the different hosts is the
+    same. This deviates from what execnet does because it has the flexibility
+    to push to different locations.
+    """
+    logger = logger or basic_remote_logger()
+    sync = _RSync(source, logger=logger)
+
+    # setup_targets
+    if not isinstance(hosts, list):
+        hosts = [hosts]
+
+    for host in hosts:
+        conn = Connection(
+            host,
+            logger,
+            sudo,
+        )
+        sync.add_target(conn.gateway, destination)
+
+    return sync.send()
diff --git a/remoto/log.py b/remoto/log.py
new file mode 100644 (file)
index 0000000..0d42f43
--- /dev/null
@@ -0,0 +1,28 @@
+
+
+def reporting(conn, result, timeout=None):
+    timeout = timeout or conn.global_timeout # -1 a.k.a. wait for ever
+    log_map = {
+        'debug': conn.logger.debug,
+        'error': conn.logger.error,
+        'warning': conn.logger.warning
+    }
+
+    while True:
+        try:
+            received = result.receive(timeout)
+            level_received, message = list(received.items())[0]
+            if not isinstance(message, str):
+                message = message.decode('utf-8')
+            log_map[level_received](message.strip('\r\n'))
+        except EOFError:
+            break
+        except Exception as err:
+            # the things we need to do here :(
+            # because execnet magic, we cannot catch this as
+            # `except TimeoutError`
+            if err.__class__.__name__ == 'TimeoutError':
+                msg = 'No data was received after %s seconds, disconnecting...' % timeout
+                conn.logger.warning(msg)
+                break
+            raise
diff --git a/remoto/process.py b/remoto/process.py
new file mode 100644 (file)
index 0000000..64a5216
--- /dev/null
@@ -0,0 +1,213 @@
+import traceback
+from .log import reporting
+from .util import admin_command, RemoteError
+
+
+def _remote_run(channel, cmd, **kw):
+    import subprocess
+    import sys
+    from select import select
+    stop_on_nonzero = kw.pop('stop_on_nonzero', True)
+
+    process = subprocess.Popen(
+        cmd,
+        stdout=subprocess.PIPE,
+        stderr=subprocess.PIPE,
+        close_fds=True,
+        **kw
+    )
+
+    while True:
+        reads, _, _ = select(
+            [process.stdout.fileno(), process.stderr.fileno()],
+            [], []
+        )
+
+        for descriptor in reads:
+            if descriptor == process.stdout.fileno():
+                read = process.stdout.readline()
+                if read:
+                    channel.send({'debug': read})
+                    sys.stdout.flush()
+
+            if descriptor == process.stderr.fileno():
+                read = process.stderr.readline()
+                if read:
+                    channel.send({'warning': read})
+                    sys.stderr.flush()
+
+        if process.poll() is not None:
+            # ensure we do not have anything pending in stdout or stderr
+            # unfortunately, we cannot abstract this repetitive loop into its
+            # own function because execnet does not allow for non-global (or
+            # even nested functions). This must be repeated here.
+            while True:
+                err_read = out_read = None
+                for descriptor in reads:
+                    if descriptor == process.stdout.fileno():
+                        out_read = process.stdout.readline()
+                        if out_read:
+                            channel.send({'debug': out_read})
+                            sys.stdout.flush()
+
+                    if descriptor == process.stderr.fileno():
+                        err_read = process.stderr.readline()
+                        if err_read:
+                            channel.send({'warning': err_read})
+                            sys.stderr.flush()
+                # At this point we have gone through all the possible
+                # descriptors and `read` was empty, so we now can break out of
+                # this since all stdout/stderr has been properly flushed to
+                # logging
+                if not err_read and not out_read:
+                    break
+
+            break
+
+    returncode = process.wait()
+    if returncode != 0:
+        if stop_on_nonzero:
+            raise RuntimeError(
+                "command returned non-zero exit status: %s" % returncode
+            )
+        else:
+            channel.send({'warning': "command returned non-zero exit status: %s" % returncode})
+
+
+def extend_env(conn, arguments):
+    """
+    get the remote environment's env so we can explicitly add the path without
+    wiping out everything
+    """
+    # retrieve the remote environment variables for the host
+    try:
+        result = conn.gateway.remote_exec("import os; channel.send(os.environ.copy())")
+        env = result.receive()
+    except Exception:
+        conn.logger.exception('failed to retrieve the remote environment variables')
+        env = {}
+
+    # get the $PATH and extend it (do not overwrite)
+    path = env.get('PATH', '')
+    env['PATH'] = path + '/usr/local/bin:/bin:/usr/bin:/usr/local/sbin:/usr/sbin:/sbin'
+    arguments['env'] = env
+    if arguments.get('extend_env'):
+        for key, value in arguments['extend_env'].items():
+            arguments['env'][key] = value
+        arguments.pop('extend_env')
+    return arguments
+
+
+def run(conn, command, exit=False, timeout=None, **kw):
+    """
+    A real-time-logging implementation of a remote subprocess.Popen call where
+    a command is just executed on the remote end and no other handling is done.
+
+    :param conn: A connection oject
+    :param command: The command to pass in to the remote subprocess.Popen
+    :param exit: If this call should close the connection at the end
+    :param timeout: How many seconds to wait after no remote data is received
+                    (defaults to wait for ever)
+    """
+    stop_on_error = kw.pop('stop_on_error', True)
+    if not kw.get('env'):
+        # get the remote environment's env so we can explicitly add
+        # the path without wiping out everything
+        kw = extend_env(conn, kw)
+
+    command = conn.cmd(command)
+
+    timeout = timeout or conn.global_timeout
+    conn.logger.info('Running command: %s' % ' '.join(admin_command(conn.sudo, command)))
+    result = conn.execute(_remote_run, cmd=command, **kw)
+    try:
+        reporting(conn, result, timeout)
+    except Exception:
+        remote_trace = traceback.format_exc()
+        remote_error = RemoteError(remote_trace)
+        if remote_error.exception_name == 'RuntimeError':
+            conn.logger.error(remote_error.exception_line)
+        else:
+            for tb_line in remote_trace.split('\n'):
+                conn.logger.error(tb_line)
+        if stop_on_error:
+            raise RuntimeError(
+                'Failed to execute command: %s' % ' '.join(command)
+            )
+    if exit:
+        conn.exit()
+
+
+def _remote_check(channel, cmd, **kw):
+    import subprocess
+    stdin = kw.pop('stdin', None)
+    process = subprocess.Popen(
+        cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, stdin=subprocess.PIPE, **kw
+    )
+
+    if stdin:
+        if not isinstance(stdin, bytes):
+            stdin.encode('utf-8', errors='ignore')
+        stdout_stream, stderr_stream = process.communicate(stdin)
+    else:
+        stdout_stream = process.stdout.read()
+        stderr_stream = process.stderr.read()
+
+    try:
+        stdout_stream = stdout_stream.decode('utf-8')
+        stderr_stream = stderr_stream.decode('utf-8')
+    except AttributeError:
+        pass
+
+    stdout = stdout_stream.splitlines()
+    stderr = stderr_stream.splitlines()
+    channel.send((stdout, stderr, process.wait()))
+
+
+def check(conn, command, exit=False, timeout=None, **kw):
+    """
+    Execute a remote command with ``subprocess.Popen`` but report back the
+    results in a tuple with three items: stdout, stderr, and exit status.
+
+    This helper function *does not* provide any logging as it is the caller's
+    responsibility to do so.
+    """
+    command = conn.cmd(command)
+
+    stop_on_error = kw.pop('stop_on_error', True)
+    timeout = timeout or conn.global_timeout
+    if not kw.get('env'):
+        # get the remote environment's env so we can explicitly add
+        # the path without wiping out everything
+        kw = extend_env(conn, kw)
+
+    conn.logger.info('Running command: %s' % ' '.join(admin_command(conn.sudo, command)))
+    result = conn.execute(_remote_check, cmd=command, **kw)
+    response = None
+    try:
+        response = result.receive(timeout)
+    except Exception as err:
+        # the things we need to do here :(
+        # because execnet magic, we cannot catch this as
+        # `except TimeoutError`
+        if err.__class__.__name__ == 'TimeoutError':
+            msg = 'No data was received after %s seconds, disconnecting...' % timeout
+            conn.logger.warning(msg)
+            # there is no stdout, stderr, or exit code but make the exit code
+            # an error condition (non-zero) regardless
+            return [], [], -1
+        else:
+            remote_trace = traceback.format_exc()
+            remote_error = RemoteError(remote_trace)
+            if remote_error.exception_name == 'RuntimeError':
+                conn.logger.error(remote_error.exception_line)
+            else:
+                for tb_line in remote_trace.split('\n'):
+                    conn.logger.error(tb_line)
+            if stop_on_error:
+                raise RuntimeError(
+                    'Failed to execute command: %s' % ' '.join(command)
+                )
+    if exit:
+        conn.exit()
+    return response
diff --git a/remoto/tests/__init__.py b/remoto/tests/__init__.py
new file mode 100644 (file)
index 0000000..e69de29
diff --git a/remoto/tests/backends/test_backends.py b/remoto/tests/backends/test_backends.py
new file mode 100644 (file)
index 0000000..e4d4e71
--- /dev/null
@@ -0,0 +1,171 @@
+import sys
+from mock import Mock, patch
+from py.test import raises
+from remoto import backends
+from remoto.tests import fake_module
+from remoto.tests.conftest import Capture, Factory
+
+
+class FakeSocket(object):
+
+    def __init__(self, gethostname, getfqdn=None):
+        self.gethostname = lambda: gethostname
+        self.getfqdn = lambda: getfqdn or gethostname
+
+
+class TestNeedsSsh(object):
+
+    def test_short_hostname_matches(self):
+        socket = FakeSocket('foo.example.org')
+        assert backends.needs_ssh('foo', socket) is False
+
+    def test_long_hostname_matches(self):
+        socket = FakeSocket('foo.example.org')
+        assert backends.needs_ssh('foo.example.org', socket) is False
+
+    def test_hostname_does_not_match(self):
+        socket = FakeSocket('foo')
+        assert backends.needs_ssh('meh', socket) is True
+
+    def test_fqdn_hostname_matches_short_hostname(self):
+        socket = FakeSocket('foo', getfqdn='foo.example.org')
+        assert backends.needs_ssh('foo.example.org', socket) is False
+
+
+class FakeGateway(object):
+
+    def remote_exec(self, module):
+        pass
+
+
+class TestLegacyRemoteModule(object):
+
+    def setup(self):
+        self.conn = backends.BaseConnection('localhost', sudo=True, eager=False)
+        self.conn.gateway = FakeGateway()
+
+    def test_importing_it_sets_it_as_remote_module(self):
+        self.conn.import_module(fake_module)
+        assert fake_module == self.conn.remote_module.module
+
+    def test_importing_it_returns_the_module_too(self):
+        remote_foo = self.conn.import_module(fake_module)
+        assert remote_foo.module == fake_module
+
+    def test_execute_the_remote_module_send(self):
+        stub_channel = Factory(send=Capture(), receive=Capture())
+        self.conn.gateway.channel = self.conn.gateway
+        remote_foo = self.conn.import_module(fake_module)
+        remote_foo.channel = stub_channel
+        remote_foo.function('argument')
+        assert stub_channel.send.calls[0]['args'][0] == "function('argument')"
+
+    def test_execute_the_remote_module_receive(self):
+        stub_channel = Factory(receive=Capture(return_values=[True]), send=Capture())
+        self.conn.gateway.channel = self.conn.gateway
+        remote_foo = self.conn.import_module(fake_module)
+        remote_foo.channel = stub_channel
+        assert remote_foo.function('argument') is True
+
+
+class TestLegacyModuleExecuteArgs(object):
+
+    def setup(self):
+        self.remote_module = backends.LegacyModuleExecute(FakeGateway(), None)
+
+    def test_single_argument(self):
+        assert self.remote_module._convert_args(('foo',)) == "'foo'"
+
+    def test_more_than_one_argument(self):
+        args = ('foo', 'bar', 1)
+        assert self.remote_module._convert_args(args) == "'foo', 'bar', 1"
+
+    def test_dictionary_as_argument(self):
+        args = ({'some key': 1},)
+        assert self.remote_module._convert_args(args) == "{'some key': 1}"
+
+
+class TestLegacyModuleExecuteGetAttr(object):
+
+    def setup(self):
+        self.remote_module = backends.LegacyModuleExecute(FakeGateway(), None)
+
+    def test_raise_attribute_error(self):
+        with raises(AttributeError) as err:
+            self.remote_module.foo()
+        assert err.value.args[0] == 'module None does not have attribute foo'
+
+
+class TestMakeConnectionString(object):
+
+    def test_makes_sudo_python_no_ssh(self):
+        conn = backends.BaseConnection('localhost', sudo=True, eager=False, interpreter='python')
+        conn_string = conn._make_connection_string('localhost', _needs_ssh=lambda x: False)
+        assert conn_string == 'popen//python=sudo python'
+
+    def test_makes_sudo_python_with_ssh(self):
+        conn = backends.BaseConnection('localhost', sudo=True, eager=False, interpreter='python')
+        conn_string = conn._make_connection_string('localhost', _needs_ssh=lambda x: True)
+        assert conn_string == 'ssh=localhost//python=sudo python'
+
+    def test_makes_sudo_python_with_ssh_options(self):
+        conn = backends.BaseConnection(
+            'localhost', sudo=True, eager=False,
+            interpreter='python', ssh_options='-F vagrant_ssh_config')
+        conn_string = conn._make_connection_string('localhost', _needs_ssh=lambda x: True)
+        assert conn_string == 'ssh=-F vagrant_ssh_config localhost//python=sudo python'
+
+    def test_makes_python_no_ssh(self):
+        conn = backends.BaseConnection('localhost', sudo=False, eager=False, interpreter='python')
+        conn_string = conn._make_connection_string('localhost', _needs_ssh=lambda x: False)
+        assert conn_string == 'popen//python=python'
+
+    def test_makes_python_with_ssh(self):
+        conn = backends.BaseConnection('localhost', sudo=False, eager=False, interpreter='python')
+        conn_string = conn._make_connection_string('localhost', _needs_ssh=lambda x: True)
+        assert conn_string == 'ssh=localhost//python=python'
+
+    def test_makes_sudo_python_with_forced_sudo(self):
+        conn = backends.BaseConnection('localhost', sudo=True, eager=False, interpreter='python')
+        conn_string = conn._make_connection_string(
+            'localhost', _needs_ssh=lambda x: False, use_sudo=True
+        )
+        assert conn_string == 'popen//python=sudo python'
+
+    def test_does_not_make_sudo_python_with_forced_sudo(self):
+        conn = backends.BaseConnection('localhost', sudo=True, eager=False, interpreter='python')
+        conn_string = conn._make_connection_string(
+            'localhost', _needs_ssh=lambda x: False, use_sudo=False
+        )
+        assert conn_string == 'popen//python=python'
+
+    def test_detects_python3(self):
+        with patch.object(sys, 'version_info', (3, 5, 1)):
+            conn = backends.BaseConnection('localhost', sudo=True, eager=False)
+            conn_string = conn._make_connection_string('localhost', _needs_ssh=lambda x: False)
+            assert conn_string == 'popen//python=sudo python3'
+
+    def test_detects_python2(self):
+        with patch.object(sys, 'version_info', (2, 7, 11)):
+            conn = backends.BaseConnection('localhost', sudo=False, eager=False)
+            conn_string = conn._make_connection_string('localhost', _needs_ssh=lambda x: True)
+            assert conn_string == 'ssh=localhost//python=python2'
+
+
+class TestDetectSudo(object):
+
+    def setup(self):
+        self.execnet = Mock()
+        self.execnet.return_value = self.execnet
+        self.execnet.makegateway.return_value = self.execnet
+        self.execnet.remote_exec.return_value = self.execnet
+
+    def test_does_not_need_sudo(self):
+        self.execnet.receive.return_value = 'root'
+        conn = backends.BaseConnection('localhost', sudo=True, eager=False)
+        assert conn._detect_sudo(_execnet=self.execnet) is False
+
+    def test_does_need_sudo(self):
+        self.execnet.receive.return_value = 'alfredo'
+        conn = backends.BaseConnection('localhost', sudo=True, eager=False)
+        assert conn._detect_sudo(_execnet=self.execnet) is True
diff --git a/remoto/tests/backends/test_kubernetes.py b/remoto/tests/backends/test_kubernetes.py
new file mode 100644 (file)
index 0000000..48952bd
--- /dev/null
@@ -0,0 +1,36 @@
+from remoto.backends import kubernetes
+
+
+class TestCommandTemplate(object):
+
+    def test_using_podname_only(self):
+        conn = kubernetes.KubernetesConnection('rook-ceph-asdf')
+        tmpl = conn.command_template()
+        assert tmpl == ['kubectl', 'exec', '-i', 'rook-ceph-asdf', '--', '/bin/sh', '-c']
+
+    def test_using_namespace(self):
+        conn = kubernetes.KubernetesConnection('rook-ceph-asdf', 'rook-ceph')
+        tmpl = conn.command_template()
+        assert tmpl == [
+            'kubectl', 'exec', '-i', '-n', 'rook-ceph',
+            'rook-ceph-asdf', '--', '/bin/sh', '-c'
+        ]
+
+
+class TestCommand(object):
+
+    def test_podname_conn_appends(self):
+        conn = kubernetes.KubernetesConnection('rook-ceph-asdf', 'rook-ceph')
+        result = conn.cmd(['ceph', '--version'])
+        assert result == [
+            'kubectl', 'exec', '-i', '-n', 'rook-ceph',
+            'rook-ceph-asdf', '--', '/bin/sh', '-c', 'ceph --version'
+        ]
+
+    def test_namespace_appends(self):
+        conn = kubernetes.KubernetesConnection('rook-ceph-asdf', 'rook-ceph')
+        result = conn.cmd(['ceph', 'health'])
+        assert result == [
+            'kubectl', 'exec', '-i', '-n', 'rook-ceph',
+            'rook-ceph-asdf', '--', '/bin/sh', '-c', 'ceph health'
+        ]
diff --git a/remoto/tests/conftest.py b/remoto/tests/conftest.py
new file mode 100644 (file)
index 0000000..3faf263
--- /dev/null
@@ -0,0 +1,25 @@
+import pytest
+
+
+class Capture(object):
+
+    def __init__(self, *a, **kw):
+        self.a = a
+        self.kw = kw
+        self.calls = []
+        self.return_values = kw.get('return_values', False)
+        self.always_returns = kw.get('always_returns', False)
+
+    def __call__(self, *a, **kw):
+        self.calls.append({'args': a, 'kwargs': kw})
+        if self.always_returns:
+            return self.always_returns
+        if self.return_values:
+            return self.return_values.pop()
+
+
+class Factory(object):
+
+    def __init__(self, **kw):
+        for k, v in kw.items():
+            setattr(self, k, v)
diff --git a/remoto/tests/fake_module.py b/remoto/tests/fake_module.py
new file mode 100644 (file)
index 0000000..0252def
--- /dev/null
@@ -0,0 +1,7 @@
+"""
+this is just a stub module to use to test the `import_module` functionality in
+remoto
+"""
+
+def function(conn):
+    return True
diff --git a/remoto/tests/test_connection.py b/remoto/tests/test_connection.py
new file mode 100644 (file)
index 0000000..cf9271d
--- /dev/null
@@ -0,0 +1,29 @@
+import pytest
+from remoto.connection import get
+
+
+base_names = [
+    'ssh', 'oc', 'openshift', 'kubernetes', 'k8s', 'local', 'popen', 'localhost', 'docker', 'podman',
+]
+
+capitalized_names = [n.capitalize() for n in base_names]
+
+spaced_names = [" %s " % n for n in base_names]
+
+valid_names = base_names + capitalized_names + spaced_names
+
+
+class TestGet(object):
+
+    @pytest.mark.parametrize('name', valid_names)
+    def test_valid_names(self, name):
+        conn_class = get(name)
+        assert conn_class.__name__.endswith('Connection')
+
+    def test_fallback(self):
+        conn_class = get('non-existent')
+        assert conn_class.__name__ == 'BaseConnection'
+
+    def test_custom_fallback(self):
+        conn_class = get('non-existent', 'openshift')
+        assert conn_class.__name__ == 'OpenshiftConnection'
diff --git a/remoto/tests/test_log.py b/remoto/tests/test_log.py
new file mode 100644 (file)
index 0000000..f6fcf78
--- /dev/null
@@ -0,0 +1,76 @@
+from pytest import raises
+from remoto import log
+from remoto.exc import TimeoutError
+from mock import Mock
+
+
+class TestReporting(object):
+
+    def test_reporting_when_channel_is_empty(self):
+        conn = Mock()
+        result = Mock()
+        result.receive.side_effect = EOFError
+        log.reporting(conn, result)
+
+    def test_write_debug_statements(self):
+        conn = Mock()
+        result = Mock()
+        result.receive.side_effect = [{'debug': 'a debug message'}, EOFError]
+        log.reporting(conn, result)
+        assert conn.logger.debug.called is True
+        assert conn.logger.info.called is False
+
+    def test_write_info_statements(self):
+        conn = Mock()
+        result = Mock()
+        result.receive.side_effect = [{'error': 'an error message'}, EOFError]
+        log.reporting(conn, result)
+        assert conn.logger.debug.called is False
+        assert conn.logger.error.called is True
+
+    def test_strip_new_lines(self):
+        conn = Mock()
+        result = Mock()
+        result.receive.side_effect = [{'error': 'an error message\n\n'}, EOFError]
+        log.reporting(conn, result)
+        message = conn.logger.error.call_args[0][0]
+        assert message == 'an error message'
+
+    def test_strip_new_line(self):
+        conn = Mock()
+        result = Mock()
+        result.receive.side_effect = [{'error': 'an error message\n'}, EOFError]
+        log.reporting(conn, result)
+        message = conn.logger.error.call_args[0][0]
+        assert message == 'an error message'
+
+    def test_strip_new_line_and_carriage_return(self):
+        conn = Mock()
+        result = Mock()
+        result.receive.side_effect = [{'error': 'an error message\r\n'}, EOFError]
+        log.reporting(conn, result)
+        message = conn.logger.error.call_args[0][0]
+        assert message == 'an error message'
+
+    def test_strip_return(self):
+        conn = Mock()
+        result = Mock()
+        result.receive.side_effect = [{'error': 'an error message\r'}, EOFError]
+        log.reporting(conn, result)
+        message = conn.logger.error.call_args[0][0]
+        assert message == 'an error message'
+
+    def test_timeout_error(self):
+        conn = Mock()
+        result = Mock()
+        result.receive.side_effect = TimeoutError
+        log.reporting(conn, result)
+        message = conn.logger.warning.call_args[0][0]
+        assert 'No data was received after ' in message
+
+    def test_raises_other_errors(self):
+        conn = Mock()
+        result = Mock()
+        result.receive.side_effect = OSError
+        with raises(OSError):
+            log.reporting(conn, result)
diff --git a/remoto/tests/test_process.py b/remoto/tests/test_process.py
new file mode 100644 (file)
index 0000000..5509705
--- /dev/null
@@ -0,0 +1,40 @@
+from mock import Mock
+from remoto import process
+
+
+class TestExtendPath(object):
+
+    def setup(self):
+        self.path = '/usr/local/bin:/bin:/usr/bin:/usr/local/sbin:/usr/sbin:/sbin'
+
+    def test_no_environment_sets_path(self):
+        fake_conn = Mock()
+        fake_conn.gateway.remote_exec.return_value = fake_conn
+        fake_conn.receive.return_value = {}
+        result = process.extend_env(fake_conn, {})
+        assert result['env']['PATH'] == self.path
+
+    def test_custom_path_does_not_get_overridden(self):
+        fake_conn = Mock()
+        fake_conn.gateway.remote_exec.return_value = fake_conn
+        fake_conn.receive.return_value = {'PATH': '/home/alfredo/bin'}
+        result = process.extend_env(fake_conn, {})
+        new_path = result['env']['PATH']
+        assert new_path.endswith(self.path)
+        assert '/home/alfredo/bin' in new_path
+
+    def test_custom_env_var_extends_existing_env(self):
+        fake_conn = Mock()
+        fake_conn.gateway.remote_exec.return_value = fake_conn
+        fake_conn.receive.return_value = {'PATH': '/home/alfredo/bin'}
+        result = process.extend_env(fake_conn, {'extend_env': {'CEPH_VOLUME_DEBUG': '1'}})
+        new_path = result['env']['PATH']
+        assert result['env']['PATH'].endswith(self.path)
+        assert result['env']['CEPH_VOLUME_DEBUG'] == '1'
+
+    def test_extend_env_gets_removed(self):
+        fake_conn = Mock()
+        fake_conn.gateway.remote_exec.return_value = fake_conn
+        fake_conn.receive.return_value = {'PATH': '/home/alfredo/bin'}
+        result = process.extend_env(fake_conn, {'extend_env': {'CEPH_VOLUME_DEBUG': '1'}})
+        assert result.get('extend_env') is None
diff --git a/remoto/tests/test_rsync.py b/remoto/tests/test_rsync.py
new file mode 100644 (file)
index 0000000..dc5b71d
--- /dev/null
@@ -0,0 +1,31 @@
+from mock import Mock, patch
+from remoto import file_sync
+
+
+class TestRsync(object):
+
+    def make_fake_sync(self):
+        fake_sync = Mock()
+        fake_sync.return_value = fake_sync
+        fake_sync.targets = []
+        fake_sync.add_target = lambda gw, destination: fake_sync.targets.append(destination)
+        return fake_sync
+
+    @patch('remoto.file_sync.Connection', Mock())
+    def test_rsync_fallback_to_host_list(self):
+        fake_sync = self.make_fake_sync()
+        with patch('remoto.file_sync._RSync', fake_sync):
+            file_sync.rsync('host1', '/source', '/destination')
+
+        # should've added just one target
+        assert len(fake_sync.targets) == 1
+
+    @patch('remoto.file_sync.Connection', Mock())
+    def test_rsync_use_host_list(self):
+        fake_sync = self.make_fake_sync()
+        with patch('remoto.file_sync._RSync', fake_sync):
+            file_sync.rsync(
+                ['host1', 'host2'], '/source', '/destination')
+
+        # should've added just one target
+        assert len(fake_sync.targets) == 2
diff --git a/remoto/tests/test_util.py b/remoto/tests/test_util.py
new file mode 100644 (file)
index 0000000..0556156
--- /dev/null
@@ -0,0 +1,34 @@
+from remoto import util
+
+
+class TestAdminCommand(object):
+
+    def test_prepend_list_if_sudo(self):
+        result = util.admin_command(True, ['ls'])
+        assert result == ['sudo', 'ls']
+
+    def test_skip_prepend_if_not_sudo(self):
+        result = util.admin_command(False, ['ls'])
+        assert result == ['ls']
+
+    def test_command_that_is_not_a_list(self):
+        result = util.admin_command(True, 'ls')
+        assert result == ['sudo', 'ls']
+
+
+class TestRemoteError(object):
+
+    def setup(self):
+        self.traceback = ('\n').join([
+            'Traceback (most recent call last):',
+            '  File "<string>", line 1, in <module>',
+            "NameError: name 'foo' is not defined"
+        ])
+
+    def test_exception_name(self):
+        error = util.RemoteError(self.traceback)
+        assert error.exception_name == 'NameError'
+
+    def test_exception_line(self):
+        error = util.RemoteError(self.traceback)
+        assert error.exception_line == "NameError: name 'foo' is not defined"
diff --git a/remoto/util.py b/remoto/util.py
new file mode 100644 (file)
index 0000000..d3151f0
--- /dev/null
@@ -0,0 +1,32 @@
+
+
+def admin_command(sudo, command):
+    """
+    If sudo is needed, make sure the command is prepended
+    correctly, otherwise return the command as it came.
+
+    :param sudo: A boolean representing the intention of having a sudo command
+                (or not)
+    :param command: A list of the actual command to execute with Popen.
+    """
+    if sudo:
+        if not isinstance(command, list):
+            command = [command]
+        return ['sudo'] + [cmd for cmd in command]
+    return command
+
+
+class RemoteError(object):
+
+    def __init__(self, traceback):
+        self.orig_traceback = traceback
+        self.exception_line = ''
+        self.exception_name = self.get_exception_name()
+
+    def get_exception_name(self):
+        for tb_line in reversed(self.orig_traceback.split('\n')):
+            if tb_line:
+                for word in tb_line.split():
+                    if word.endswith(':'):  # exception!
+                        self.exception_line = tb_line
+                        return word.strip().strip(':')
diff --git a/setup.py b/setup.py
new file mode 100644 (file)
index 0000000..064d763
--- /dev/null
+++ b/setup.py
@@ -0,0 +1,37 @@
+import re
+
+module_file = open("remoto/__init__.py").read()
+metadata = dict(re.findall(r"__([a-z]+)__\s*=\s*['\"]([^'\"]*)['\"]", module_file))
+long_description = open('README.rst').read()
+install_requires = []
+
+from setuptools import setup, find_packages
+
+
+setup(
+    name = 'remoto',
+    description = 'Execute remote commands or processes.',
+    packages = find_packages(),
+    author = 'Alfredo Deza',
+    author_email = 'contact@deza.pe',
+    version = metadata['version'],
+    url = 'http://github.com/alfredodeza/remoto',
+    license = "MIT",
+    zip_safe = False,
+    keywords = "remote, commands, unix, ssh, socket, execute, terminal",
+    install_requires=[
+        'execnet',
+    ] + install_requires,
+    long_description = long_description,
+    classifiers = [
+        'Development Status :: 4 - Beta',
+        'Intended Audience :: Developers',
+        'License :: OSI Approved :: MIT License',
+        'Topic :: Utilities',
+        'Operating System :: MacOS :: MacOS X',
+        'Operating System :: POSIX',
+        'Programming Language :: Python :: 2.6',
+        'Programming Language :: Python :: 2.7',
+        'Programming Language :: Python :: 3.3',
+    ]
+)
diff --git a/tox.ini b/tox.ini
new file mode 100644 (file)
index 0000000..1f71940
--- /dev/null
+++ b/tox.ini
@@ -0,0 +1,8 @@
+[tox]
+envlist =  py26, py27, py33
+
+[testenv]
+deps =
+    pytest
+    mock
+commands = py.test -v remoto/tests