aboutsummaryrefslogtreecommitdiffstats
path: root/contrib/python/ipython/py2/IPython/utils/_process_common.py
blob: 9ede30d3f8a539507f1d7b24db913fb1e47ecff2 (plain) (blame)
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
159
160
161
162
163
164
165
166
167
168
169
170
171
172
173
174
175
176
177
178
179
180
181
182
183
184
185
186
187
188
189
190
191
192
193
194
195
196
197
198
199
200
201
202
203
204
205
206
207
208
209
210
211
212
213
214
215
216
217
218
219
220
221
222
223
"""Common utilities for the various process_* implementations.

This file is only meant to be imported by the platform-specific implementations
of subprocess utilities, and it contains tools that are common to all of them.
"""

#-----------------------------------------------------------------------------
#  Copyright (C) 2010-2011  The IPython Development Team
#
#  Distributed under the terms of the BSD License.  The full license is in
#  the file COPYING, distributed as part of this software.
#-----------------------------------------------------------------------------

#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
import subprocess
import shlex
import sys
import os

from IPython.utils import py3compat

#-----------------------------------------------------------------------------
# Function definitions
#-----------------------------------------------------------------------------

def read_no_interrupt(p):
    """Read from a pipe ignoring EINTR errors.

    This is necessary because when reading from pipes with GUI event loops
    running in the background, often interrupts are raised that stop the
    command from completing."""
    import errno

    try:
        return p.read()
    except IOError as err:
        if err.errno != errno.EINTR:
            raise


def process_handler(cmd, callback, stderr=subprocess.PIPE):
    """Open a command in a shell subprocess and execute a callback.

    This function provides common scaffolding for creating subprocess.Popen()
    calls.  It creates a Popen object and then calls the callback with it.

    Parameters
    ----------
    cmd : str or list
      A command to be executed by the system, using :class:`subprocess.Popen`.
      If a string is passed, it will be run in the system shell. If a list is
      passed, it will be used directly as arguments.

    callback : callable
      A one-argument function that will be called with the Popen object.

    stderr : file descriptor number, optional
      By default this is set to ``subprocess.PIPE``, but you can also pass the
      value ``subprocess.STDOUT`` to force the subprocess' stderr to go into
      the same file descriptor as its stdout.  This is useful to read stdout
      and stderr combined in the order they are generated.

    Returns
    -------
    The return value of the provided callback is returned.
    """
    sys.stdout.flush()
    sys.stderr.flush()
    # On win32, close_fds can't be true when using pipes for stdin/out/err
    close_fds = sys.platform != 'win32'
    # Determine if cmd should be run with system shell.
    shell = isinstance(cmd, py3compat.string_types)
    # On POSIX systems run shell commands with user-preferred shell.
    executable = None
    if shell and os.name == 'posix' and 'SHELL' in os.environ:
        executable = os.environ['SHELL']
    p = subprocess.Popen(cmd, shell=shell,
                         executable=executable,
                         stdin=subprocess.PIPE,
                         stdout=subprocess.PIPE,
                         stderr=stderr,
                         close_fds=close_fds)

    try:
        out = callback(p)
    except KeyboardInterrupt:
        print('^C')
        sys.stdout.flush()
        sys.stderr.flush()
        out = None
    finally:
        # Make really sure that we don't leave processes behind, in case the
        # call above raises an exception
        # We start by assuming the subprocess finished (to avoid NameErrors
        # later depending on the path taken)
        if p.returncode is None:
            try:
                p.terminate()
                p.poll()
            except OSError:
                pass
        # One last try on our way out
        if p.returncode is None:
            try:
                p.kill()
            except OSError:
                pass

    return out


def getoutput(cmd):
    """Run a command and return its stdout/stderr as a string.

    Parameters
    ----------
    cmd : str or list
      A command to be executed in the system shell.

    Returns
    -------
    output : str
      A string containing the combination of stdout and stderr from the
    subprocess, in whatever order the subprocess originally wrote to its
    file descriptors (so the order of the information in this string is the
    correct order as would be seen if running the command in a terminal).
    """
    out = process_handler(cmd, lambda p: p.communicate()[0], subprocess.STDOUT)
    if out is None:
        return ''
    return py3compat.bytes_to_str(out)


def getoutputerror(cmd):
    """Return (standard output, standard error) of executing cmd in a shell.

    Accepts the same arguments as os.system().

    Parameters
    ----------
    cmd : str or list
      A command to be executed in the system shell.

    Returns
    -------
    stdout : str
    stderr : str
    """
    return get_output_error_code(cmd)[:2]

def get_output_error_code(cmd):
    """Return (standard output, standard error, return code) of executing cmd
    in a shell.

    Accepts the same arguments as os.system().

    Parameters
    ----------
    cmd : str or list
      A command to be executed in the system shell.

    Returns
    -------
    stdout : str
    stderr : str
    returncode: int
    """

    out_err, p = process_handler(cmd, lambda p: (p.communicate(), p))
    if out_err is None:
        return '', '', p.returncode
    out, err = out_err
    return py3compat.bytes_to_str(out), py3compat.bytes_to_str(err), p.returncode

def arg_split(s, posix=False, strict=True):
    """Split a command line's arguments in a shell-like manner.

    This is a modified version of the standard library's shlex.split()
    function, but with a default of posix=False for splitting, so that quotes
    in inputs are respected.

    if strict=False, then any errors shlex.split would raise will result in the
    unparsed remainder being the last element of the list, rather than raising.
    This is because we sometimes use arg_split to parse things other than
    command-line args.
    """

    # Unfortunately, python's shlex module is buggy with unicode input:
    # http://bugs.python.org/issue1170
    # At least encoding the input when it's unicode seems to help, but there
    # may be more problems lurking.  Apparently this is fixed in python3.
    is_unicode = False
    if (not py3compat.PY3) and isinstance(s, unicode):
        is_unicode = True
        s = s.encode('utf-8')
    lex = shlex.shlex(s, posix=posix)
    lex.whitespace_split = True
    # Extract tokens, ensuring that things like leaving open quotes
    # does not cause this to raise.  This is important, because we
    # sometimes pass Python source through this (e.g. %timeit f(" ")),
    # and it shouldn't raise an exception.
    # It may be a bad idea to parse things that are not command-line args
    # through this function, but we do, so let's be safe about it.
    lex.commenters='' #fix for GH-1269
    tokens = []
    while True:
        try:
            tokens.append(next(lex))
        except StopIteration:
            break
        except ValueError:
            if strict:
                raise
            # couldn't parse, get remaining blob as last token
            tokens.append(lex.token)
            break
    
    if is_unicode:
        # Convert the tokens back to unicode.
        tokens = [x.decode('utf-8') for x in tokens]
    return tokens