diff --git a/.cache/v/cache/lastfailed b/.cache/v/cache/lastfailed new file mode 100644 index 0000000..9e26dfe --- /dev/null +++ b/.cache/v/cache/lastfailed @@ -0,0 +1 @@ +{} \ No newline at end of file diff --git a/.travis.yml b/.travis.yml new file mode 100644 index 0000000..673c4f8 --- /dev/null +++ b/.travis.yml @@ -0,0 +1,9 @@ +language: python +python: +- "3.5" +#install: +# - pip install . + +script: pytest + + diff --git a/tests/test_anagram.py b/tests/test_anagram.py new file mode 100644 index 0000000..ff5e4b3 --- /dev/null +++ b/tests/test_anagram.py @@ -0,0 +1,10 @@ +import sys +sys.path.append('.') +import mylib + + +def test_go(): + assert mylib.is_anagram("abc", "cba") == True + +def test_nogo(): + assert mylib.is_anagram("fbc", "cba") == False diff --git a/venv/bin/activate b/venv/bin/activate new file mode 100644 index 0000000..42eba12 --- /dev/null +++ b/venv/bin/activate @@ -0,0 +1,76 @@ +# This file must be used with "source bin/activate" *from bash* +# you cannot run it directly + +deactivate () { + # reset old environment variables + if [ -n "$_OLD_VIRTUAL_PATH" ] ; then + PATH="$_OLD_VIRTUAL_PATH" + export PATH + unset _OLD_VIRTUAL_PATH + fi + if [ -n "$_OLD_VIRTUAL_PYTHONHOME" ] ; then + PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME" + export PYTHONHOME + unset _OLD_VIRTUAL_PYTHONHOME + fi + + # This should detect bash and zsh, which have a hash command that must + # be called to get it to forget past commands. Without forgetting + # past commands the $PATH changes we made may not be respected + if [ -n "$BASH" -o -n "$ZSH_VERSION" ] ; then + hash -r + fi + + if [ -n "$_OLD_VIRTUAL_PS1" ] ; then + PS1="$_OLD_VIRTUAL_PS1" + export PS1 + unset _OLD_VIRTUAL_PS1 + fi + + unset VIRTUAL_ENV + if [ ! "$1" = "nondestructive" ] ; then + # Self destruct! + unset -f deactivate + fi +} + +# unset irrelavent variables +deactivate nondestructive + +VIRTUAL_ENV="/home/yosi/co-learning-python-without-test/venv" +export VIRTUAL_ENV + +_OLD_VIRTUAL_PATH="$PATH" +PATH="$VIRTUAL_ENV/bin:$PATH" +export PATH + +# unset PYTHONHOME if set +# this will fail if PYTHONHOME is set to the empty string (which is bad anyway) +# could use `if (set -u; : $PYTHONHOME) ;` in bash +if [ -n "$PYTHONHOME" ] ; then + _OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME" + unset PYTHONHOME +fi + +if [ -z "$VIRTUAL_ENV_DISABLE_PROMPT" ] ; then + _OLD_VIRTUAL_PS1="$PS1" + if [ "x(venv) " != x ] ; then + PS1="(venv) $PS1" + else + if [ "`basename \"$VIRTUAL_ENV\"`" = "__" ] ; then + # special case for Aspen magic directories + # see http://www.zetadev.com/software/aspen/ + PS1="[`basename \`dirname \"$VIRTUAL_ENV\"\``] $PS1" + else + PS1="(`basename \"$VIRTUAL_ENV\"`)$PS1" + fi + fi + export PS1 +fi + +# This should detect bash and zsh, which have a hash command that must +# be called to get it to forget past commands. Without forgetting +# past commands the $PATH changes we made may not be respected +if [ -n "$BASH" -o -n "$ZSH_VERSION" ] ; then + hash -r +fi diff --git a/venv/bin/activate.csh b/venv/bin/activate.csh new file mode 100644 index 0000000..229729c --- /dev/null +++ b/venv/bin/activate.csh @@ -0,0 +1,37 @@ +# This file must be used with "source bin/activate.csh" *from csh*. +# You cannot run it directly. +# Created by Davide Di Blasi . +# Ported to Python 3.3 venv by Andrew Svetlov + +alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate' + +# Unset irrelavent variables. +deactivate nondestructive + +setenv VIRTUAL_ENV "/home/yosi/co-learning-python-without-test/venv" + +set _OLD_VIRTUAL_PATH="$PATH" +setenv PATH "$VIRTUAL_ENV/bin:$PATH" + + +set _OLD_VIRTUAL_PROMPT="$prompt" + +if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then + if ("venv" != "") then + set env_name = "venv" + else + if (`basename "VIRTUAL_ENV"` == "__") then + # special case for Aspen magic directories + # see http://www.zetadev.com/software/aspen/ + set env_name = `basename \`dirname "$VIRTUAL_ENV"\`` + else + set env_name = `basename "$VIRTUAL_ENV"` + endif + endif + set prompt = "[$env_name] $prompt" + unset env_name +endif + +alias pydoc python -m pydoc + +rehash diff --git a/venv/bin/activate.fish b/venv/bin/activate.fish new file mode 100644 index 0000000..47ddc52 --- /dev/null +++ b/venv/bin/activate.fish @@ -0,0 +1,74 @@ +# This file must be used with ". bin/activate.fish" *from fish* (http://fishshell.org) +# you cannot run it directly + +function deactivate -d "Exit virtualenv and return to normal shell environment" + # reset old environment variables + if test -n "$_OLD_VIRTUAL_PATH" + set -gx PATH $_OLD_VIRTUAL_PATH + set -e _OLD_VIRTUAL_PATH + end + if test -n "$_OLD_VIRTUAL_PYTHONHOME" + set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME + set -e _OLD_VIRTUAL_PYTHONHOME + end + + if test -n "$_OLD_FISH_PROMPT_OVERRIDE" + functions -e fish_prompt + set -e _OLD_FISH_PROMPT_OVERRIDE + . ( begin + printf "function fish_prompt\n\t#" + functions _old_fish_prompt + end | psub ) + functions -e _old_fish_prompt + end + + set -e VIRTUAL_ENV + if test "$argv[1]" != "nondestructive" + # Self destruct! + functions -e deactivate + end +end + +# unset irrelavent variables +deactivate nondestructive + +set -gx VIRTUAL_ENV "/home/yosi/co-learning-python-without-test/venv" + +set -gx _OLD_VIRTUAL_PATH $PATH +set -gx PATH "$VIRTUAL_ENV/bin" $PATH + +# unset PYTHONHOME if set +if set -q PYTHONHOME + set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME + set -e PYTHONHOME +end + +if test -z "$VIRTUAL_ENV_DISABLE_PROMPT" + # fish uses a function instead of an env var to generate the prompt. + + # save the current fish_prompt function as the function _old_fish_prompt + . ( begin + printf "function _old_fish_prompt\n\t#" + functions fish_prompt + end | psub ) + + # with the original prompt function renamed, we can override with our own. + function fish_prompt + # Prompt override? + if test -n "$(venv) " + printf "%s%s%s" "$(venv) " (set_color normal) (_old_fish_prompt) + return + end + # ...Otherwise, prepend env + set -l _checkbase (basename "$VIRTUAL_ENV") + if test $_checkbase = "__" + # special case for Aspen magic directories + # see http://www.zetadev.com/software/aspen/ + printf "%s[%s]%s %s" (set_color -b blue white) (basename (dirname "$VIRTUAL_ENV")) (set_color normal) (_old_fish_prompt) + else + printf "%s(%s)%s%s" (set_color -b blue white) (basename "$VIRTUAL_ENV") (set_color normal) (_old_fish_prompt) + end + end + + set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV" +end diff --git a/venv/bin/easy_install b/venv/bin/easy_install new file mode 100755 index 0000000..8b4d1aa --- /dev/null +++ b/venv/bin/easy_install @@ -0,0 +1,11 @@ +#!/home/yosi/co-learning-python-without-test/venv/bin/python3.5 + +# -*- coding: utf-8 -*- +import re +import sys + +from setuptools.command.easy_install import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/easy_install-3.5 b/venv/bin/easy_install-3.5 new file mode 100755 index 0000000..8b4d1aa --- /dev/null +++ b/venv/bin/easy_install-3.5 @@ -0,0 +1,11 @@ +#!/home/yosi/co-learning-python-without-test/venv/bin/python3.5 + +# -*- coding: utf-8 -*- +import re +import sys + +from setuptools.command.easy_install import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/pip b/venv/bin/pip new file mode 100755 index 0000000..223bea9 --- /dev/null +++ b/venv/bin/pip @@ -0,0 +1,11 @@ +#!/home/yosi/co-learning-python-without-test/venv/bin/python3.5 + +# -*- coding: utf-8 -*- +import re +import sys + +from pip import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/pip3 b/venv/bin/pip3 new file mode 100755 index 0000000..223bea9 --- /dev/null +++ b/venv/bin/pip3 @@ -0,0 +1,11 @@ +#!/home/yosi/co-learning-python-without-test/venv/bin/python3.5 + +# -*- coding: utf-8 -*- +import re +import sys + +from pip import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/pip3.5 b/venv/bin/pip3.5 new file mode 100755 index 0000000..223bea9 --- /dev/null +++ b/venv/bin/pip3.5 @@ -0,0 +1,11 @@ +#!/home/yosi/co-learning-python-without-test/venv/bin/python3.5 + +# -*- coding: utf-8 -*- +import re +import sys + +from pip import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/py.test b/venv/bin/py.test new file mode 100755 index 0000000..eb8a5c5 --- /dev/null +++ b/venv/bin/py.test @@ -0,0 +1,11 @@ +#!/home/yosi/co-learning-python-without-test/venv/bin/python3.5 + +# -*- coding: utf-8 -*- +import re +import sys + +from pytest import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/pytest b/venv/bin/pytest new file mode 100755 index 0000000..eb8a5c5 --- /dev/null +++ b/venv/bin/pytest @@ -0,0 +1,11 @@ +#!/home/yosi/co-learning-python-without-test/venv/bin/python3.5 + +# -*- coding: utf-8 -*- +import re +import sys + +from pytest import main + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(main()) diff --git a/venv/bin/python b/venv/bin/python new file mode 120000 index 0000000..f549cea --- /dev/null +++ b/venv/bin/python @@ -0,0 +1 @@ +python3.5 \ No newline at end of file diff --git a/venv/bin/python3 b/venv/bin/python3 new file mode 120000 index 0000000..f549cea --- /dev/null +++ b/venv/bin/python3 @@ -0,0 +1 @@ +python3.5 \ No newline at end of file diff --git a/venv/bin/python3.5 b/venv/bin/python3.5 new file mode 120000 index 0000000..baab9cb --- /dev/null +++ b/venv/bin/python3.5 @@ -0,0 +1 @@ +/usr/bin/python3.5 \ No newline at end of file diff --git a/venv/lib/python3.5/site-packages/_pytest/__init__.py b/venv/lib/python3.5/site-packages/_pytest/__init__.py new file mode 100644 index 0000000..6e41f05 --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/__init__.py @@ -0,0 +1,8 @@ +__all__ = ['__version__'] + +try: + from ._version import version as __version__ +except ImportError: + # broken installation, we don't even try + # unknown only works because we do poor mans version compare + __version__ = 'unknown' diff --git a/venv/lib/python3.5/site-packages/_pytest/_argcomplete.py b/venv/lib/python3.5/site-packages/_pytest/_argcomplete.py new file mode 100644 index 0000000..8c93e4c --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/_argcomplete.py @@ -0,0 +1,102 @@ + +"""allow bash-completion for argparse with argcomplete if installed +needs argcomplete>=0.5.6 for python 3.2/3.3 (older versions fail +to find the magic string, so _ARGCOMPLETE env. var is never set, and +this does not need special code. + +argcomplete does not support python 2.5 (although the changes for that +are minor). + +Function try_argcomplete(parser) should be called directly before +the call to ArgumentParser.parse_args(). + +The filescompleter is what you normally would use on the positional +arguments specification, in order to get "dirname/" after "dirn" +instead of the default "dirname ": + + optparser.add_argument(Config._file_or_dir, nargs='*' + ).completer=filescompleter + +Other, application specific, completers should go in the file +doing the add_argument calls as they need to be specified as .completer +attributes as well. (If argcomplete is not installed, the function the +attribute points to will not be used). + +SPEEDUP +======= +The generic argcomplete script for bash-completion +(/etc/bash_completion.d/python-argcomplete.sh ) +uses a python program to determine startup script generated by pip. +You can speed up completion somewhat by changing this script to include + # PYTHON_ARGCOMPLETE_OK +so the the python-argcomplete-check-easy-install-script does not +need to be called to find the entry point of the code and see if that is +marked with PYTHON_ARGCOMPLETE_OK + +INSTALL/DEBUGGING +================= +To include this support in another application that has setup.py generated +scripts: +- add the line: + # PYTHON_ARGCOMPLETE_OK + near the top of the main python entry point +- include in the file calling parse_args(): + from _argcomplete import try_argcomplete, filescompleter + , call try_argcomplete just before parse_args(), and optionally add + filescompleter to the positional arguments' add_argument() +If things do not work right away: +- switch on argcomplete debugging with (also helpful when doing custom + completers): + export _ARC_DEBUG=1 +- run: + python-argcomplete-check-easy-install-script $(which appname) + echo $? + will echo 0 if the magic line has been found, 1 if not +- sometimes it helps to find early on errors using: + _ARGCOMPLETE=1 _ARC_DEBUG=1 appname + which should throw a KeyError: 'COMPLINE' (which is properly set by the + global argcomplete script). +""" +from __future__ import absolute_import, division, print_function +import sys +import os +from glob import glob + +class FastFilesCompleter: + 'Fast file completer class' + def __init__(self, directories=True): + self.directories = directories + + def __call__(self, prefix, **kwargs): + """only called on non option completions""" + if os.path.sep in prefix[1:]: # + prefix_dir = len(os.path.dirname(prefix) + os.path.sep) + else: + prefix_dir = 0 + completion = [] + globbed = [] + if '*' not in prefix and '?' not in prefix: + if prefix[-1] == os.path.sep: # we are on unix, otherwise no bash + globbed.extend(glob(prefix + '.*')) + prefix += '*' + globbed.extend(glob(prefix)) + for x in sorted(globbed): + if os.path.isdir(x): + x += '/' + # append stripping the prefix (like bash, not like compgen) + completion.append(x[prefix_dir:]) + return completion + + +if os.environ.get('_ARGCOMPLETE'): + try: + import argcomplete.completers + except ImportError: + sys.exit(-1) + filescompleter = FastFilesCompleter() + + def try_argcomplete(parser): + argcomplete.autocomplete(parser) +else: + def try_argcomplete(parser): pass + filescompleter = None diff --git a/venv/lib/python3.5/site-packages/_pytest/_code/__init__.py b/venv/lib/python3.5/site-packages/_pytest/_code/__init__.py new file mode 100644 index 0000000..815c13b --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/_code/__init__.py @@ -0,0 +1,10 @@ +""" python inspection/code generation API """ +from __future__ import absolute_import, division, print_function +from .code import Code # noqa +from .code import ExceptionInfo # noqa +from .code import Frame # noqa +from .code import Traceback # noqa +from .code import getrawcode # noqa +from .source import Source # noqa +from .source import compile_ as compile # noqa +from .source import getfslineno # noqa diff --git a/venv/lib/python3.5/site-packages/_pytest/_code/_py2traceback.py b/venv/lib/python3.5/site-packages/_pytest/_code/_py2traceback.py new file mode 100644 index 0000000..d45ee01 --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/_code/_py2traceback.py @@ -0,0 +1,82 @@ +# copied from python-2.7.3's traceback.py +# CHANGES: +# - some_str is replaced, trying to create unicode strings +# +from __future__ import absolute_import, division, print_function +import types + +def format_exception_only(etype, value): + """Format the exception part of a traceback. + + The arguments are the exception type and value such as given by + sys.last_type and sys.last_value. The return value is a list of + strings, each ending in a newline. + + Normally, the list contains a single string; however, for + SyntaxError exceptions, it contains several lines that (when + printed) display detailed information about where the syntax + error occurred. + + The message indicating which exception occurred is always the last + string in the list. + + """ + + # An instance should not have a meaningful value parameter, but + # sometimes does, particularly for string exceptions, such as + # >>> raise string1, string2 # deprecated + # + # Clear these out first because issubtype(string1, SyntaxError) + # would throw another exception and mask the original problem. + if (isinstance(etype, BaseException) or + isinstance(etype, types.InstanceType) or + etype is None or type(etype) is str): + return [_format_final_exc_line(etype, value)] + + stype = etype.__name__ + + if not issubclass(etype, SyntaxError): + return [_format_final_exc_line(stype, value)] + + # It was a syntax error; show exactly where the problem was found. + lines = [] + try: + msg, (filename, lineno, offset, badline) = value.args + except Exception: + pass + else: + filename = filename or "" + lines.append(' File "%s", line %d\n' % (filename, lineno)) + if badline is not None: + if isinstance(badline, bytes): # python 2 only + badline = badline.decode('utf-8', 'replace') + lines.append(u' %s\n' % badline.strip()) + if offset is not None: + caretspace = badline.rstrip('\n')[:offset].lstrip() + # non-space whitespace (likes tabs) must be kept for alignment + caretspace = ((c.isspace() and c or ' ') for c in caretspace) + # only three spaces to account for offset1 == pos 0 + lines.append(' %s^\n' % ''.join(caretspace)) + value = msg + + lines.append(_format_final_exc_line(stype, value)) + return lines + +def _format_final_exc_line(etype, value): + """Return a list of a single line -- normal case for format_exception_only""" + valuestr = _some_str(value) + if value is None or not valuestr: + line = "%s\n" % etype + else: + line = "%s: %s\n" % (etype, valuestr) + return line + +def _some_str(value): + try: + return unicode(value) + except Exception: + try: + return str(value) + except Exception: + pass + return '' % type(value).__name__ diff --git a/venv/lib/python3.5/site-packages/_pytest/_code/code.py b/venv/lib/python3.5/site-packages/_pytest/_code/code.py new file mode 100644 index 0000000..5b7cc41 --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/_code/code.py @@ -0,0 +1,895 @@ +from __future__ import absolute_import, division, print_function +import sys +from inspect import CO_VARARGS, CO_VARKEYWORDS +import re +from weakref import ref +from _pytest.compat import _PY2, _PY3, PY35, safe_str + +import py +builtin_repr = repr + +reprlib = py.builtin._tryimport('repr', 'reprlib') + +if _PY3: + from traceback import format_exception_only +else: + from ._py2traceback import format_exception_only + + +class Code(object): + """ wrapper around Python code objects """ + def __init__(self, rawcode): + if not hasattr(rawcode, "co_filename"): + rawcode = getrawcode(rawcode) + try: + self.filename = rawcode.co_filename + self.firstlineno = rawcode.co_firstlineno - 1 + self.name = rawcode.co_name + except AttributeError: + raise TypeError("not a code object: %r" %(rawcode,)) + self.raw = rawcode + + def __eq__(self, other): + return self.raw == other.raw + + __hash__ = None + + def __ne__(self, other): + return not self == other + + @property + def path(self): + """ return a path object pointing to source code (note that it + might not point to an actually existing file). """ + try: + p = py.path.local(self.raw.co_filename) + # maybe don't try this checking + if not p.check(): + raise OSError("py.path check failed.") + except OSError: + # XXX maybe try harder like the weird logic + # in the standard lib [linecache.updatecache] does? + p = self.raw.co_filename + + return p + + @property + def fullsource(self): + """ return a _pytest._code.Source object for the full source file of the code + """ + from _pytest._code import source + full, _ = source.findsource(self.raw) + return full + + def source(self): + """ return a _pytest._code.Source object for the code object's source only + """ + # return source only for that part of code + import _pytest._code + return _pytest._code.Source(self.raw) + + def getargs(self, var=False): + """ return a tuple with the argument names for the code object + + if 'var' is set True also return the names of the variable and + keyword arguments when present + """ + # handfull shortcut for getting args + raw = self.raw + argcount = raw.co_argcount + if var: + argcount += raw.co_flags & CO_VARARGS + argcount += raw.co_flags & CO_VARKEYWORDS + return raw.co_varnames[:argcount] + +class Frame(object): + """Wrapper around a Python frame holding f_locals and f_globals + in which expressions can be evaluated.""" + + def __init__(self, frame): + self.lineno = frame.f_lineno - 1 + self.f_globals = frame.f_globals + self.f_locals = frame.f_locals + self.raw = frame + self.code = Code(frame.f_code) + + @property + def statement(self): + """ statement this frame is at """ + import _pytest._code + if self.code.fullsource is None: + return _pytest._code.Source("") + return self.code.fullsource.getstatement(self.lineno) + + def eval(self, code, **vars): + """ evaluate 'code' in the frame + + 'vars' are optional additional local variables + + returns the result of the evaluation + """ + f_locals = self.f_locals.copy() + f_locals.update(vars) + return eval(code, self.f_globals, f_locals) + + def exec_(self, code, **vars): + """ exec 'code' in the frame + + 'vars' are optiona; additional local variables + """ + f_locals = self.f_locals.copy() + f_locals.update(vars) + py.builtin.exec_(code, self.f_globals, f_locals ) + + def repr(self, object): + """ return a 'safe' (non-recursive, one-line) string repr for 'object' + """ + return py.io.saferepr(object) + + def is_true(self, object): + return object + + def getargs(self, var=False): + """ return a list of tuples (name, value) for all arguments + + if 'var' is set True also include the variable and keyword + arguments when present + """ + retval = [] + for arg in self.code.getargs(var): + try: + retval.append((arg, self.f_locals[arg])) + except KeyError: + pass # this can occur when using Psyco + return retval + +class TracebackEntry(object): + """ a single entry in a traceback """ + + _repr_style = None + exprinfo = None + + def __init__(self, rawentry, excinfo=None): + self._excinfo = excinfo + self._rawentry = rawentry + self.lineno = rawentry.tb_lineno - 1 + + def set_repr_style(self, mode): + assert mode in ("short", "long") + self._repr_style = mode + + @property + def frame(self): + import _pytest._code + return _pytest._code.Frame(self._rawentry.tb_frame) + + @property + def relline(self): + return self.lineno - self.frame.code.firstlineno + + def __repr__(self): + return "" %(self.frame.code.path, self.lineno+1) + + @property + def statement(self): + """ _pytest._code.Source object for the current statement """ + source = self.frame.code.fullsource + return source.getstatement(self.lineno) + + @property + def path(self): + """ path to the source code """ + return self.frame.code.path + + def getlocals(self): + return self.frame.f_locals + locals = property(getlocals, None, None, "locals of underlaying frame") + + def getfirstlinesource(self): + # on Jython this firstlineno can be -1 apparently + return max(self.frame.code.firstlineno, 0) + + def getsource(self, astcache=None): + """ return failing source code. """ + # we use the passed in astcache to not reparse asttrees + # within exception info printing + from _pytest._code.source import getstatementrange_ast + source = self.frame.code.fullsource + if source is None: + return None + key = astnode = None + if astcache is not None: + key = self.frame.code.path + if key is not None: + astnode = astcache.get(key, None) + start = self.getfirstlinesource() + try: + astnode, _, end = getstatementrange_ast(self.lineno, source, + astnode=astnode) + except SyntaxError: + end = self.lineno + 1 + else: + if key is not None: + astcache[key] = astnode + return source[start:end] + + source = property(getsource) + + def ishidden(self): + """ return True if the current frame has a var __tracebackhide__ + resolving to True + + If __tracebackhide__ is a callable, it gets called with the + ExceptionInfo instance and can decide whether to hide the traceback. + + mostly for internal use + """ + try: + tbh = self.frame.f_locals['__tracebackhide__'] + except KeyError: + try: + tbh = self.frame.f_globals['__tracebackhide__'] + except KeyError: + return False + + if py.builtin.callable(tbh): + return tbh(None if self._excinfo is None else self._excinfo()) + else: + return tbh + + def __str__(self): + try: + fn = str(self.path) + except py.error.Error: + fn = '???' + name = self.frame.code.name + try: + line = str(self.statement).lstrip() + except KeyboardInterrupt: + raise + except: + line = "???" + return " File %r:%d in %s\n %s\n" %(fn, self.lineno+1, name, line) + + def name(self): + return self.frame.code.raw.co_name + name = property(name, None, None, "co_name of underlaying code") + +class Traceback(list): + """ Traceback objects encapsulate and offer higher level + access to Traceback entries. + """ + Entry = TracebackEntry + def __init__(self, tb, excinfo=None): + """ initialize from given python traceback object and ExceptionInfo """ + self._excinfo = excinfo + if hasattr(tb, 'tb_next'): + def f(cur): + while cur is not None: + yield self.Entry(cur, excinfo=excinfo) + cur = cur.tb_next + list.__init__(self, f(tb)) + else: + list.__init__(self, tb) + + def cut(self, path=None, lineno=None, firstlineno=None, excludepath=None): + """ return a Traceback instance wrapping part of this Traceback + + by provding any combination of path, lineno and firstlineno, the + first frame to start the to-be-returned traceback is determined + + this allows cutting the first part of a Traceback instance e.g. + for formatting reasons (removing some uninteresting bits that deal + with handling of the exception/traceback) + """ + for x in self: + code = x.frame.code + codepath = code.path + if ((path is None or codepath == path) and + (excludepath is None or not hasattr(codepath, 'relto') or + not codepath.relto(excludepath)) and + (lineno is None or x.lineno == lineno) and + (firstlineno is None or x.frame.code.firstlineno == firstlineno)): + return Traceback(x._rawentry, self._excinfo) + return self + + def __getitem__(self, key): + val = super(Traceback, self).__getitem__(key) + if isinstance(key, type(slice(0))): + val = self.__class__(val) + return val + + def filter(self, fn=lambda x: not x.ishidden()): + """ return a Traceback instance with certain items removed + + fn is a function that gets a single argument, a TracebackEntry + instance, and should return True when the item should be added + to the Traceback, False when not + + by default this removes all the TracebackEntries which are hidden + (see ishidden() above) + """ + return Traceback(filter(fn, self), self._excinfo) + + def getcrashentry(self): + """ return last non-hidden traceback entry that lead + to the exception of a traceback. + """ + for i in range(-1, -len(self)-1, -1): + entry = self[i] + if not entry.ishidden(): + return entry + return self[-1] + + def recursionindex(self): + """ return the index of the frame/TracebackEntry where recursion + originates if appropriate, None if no recursion occurred + """ + cache = {} + for i, entry in enumerate(self): + # id for the code.raw is needed to work around + # the strange metaprogramming in the decorator lib from pypi + # which generates code objects that have hash/value equality + #XXX needs a test + key = entry.frame.code.path, id(entry.frame.code.raw), entry.lineno + #print "checking for recursion at", key + l = cache.setdefault(key, []) + if l: + f = entry.frame + loc = f.f_locals + for otherloc in l: + if f.is_true(f.eval(co_equal, + __recursioncache_locals_1=loc, + __recursioncache_locals_2=otherloc)): + return i + l.append(entry.frame.f_locals) + return None + + +co_equal = compile('__recursioncache_locals_1 == __recursioncache_locals_2', + '?', 'eval') + +class ExceptionInfo(object): + """ wraps sys.exc_info() objects and offers + help for navigating the traceback. + """ + _striptext = '' + _assert_start_repr = "AssertionError(u\'assert " if _PY2 else "AssertionError(\'assert " + + def __init__(self, tup=None, exprinfo=None): + import _pytest._code + if tup is None: + tup = sys.exc_info() + if exprinfo is None and isinstance(tup[1], AssertionError): + exprinfo = getattr(tup[1], 'msg', None) + if exprinfo is None: + exprinfo = py.io.saferepr(tup[1]) + if exprinfo and exprinfo.startswith(self._assert_start_repr): + self._striptext = 'AssertionError: ' + self._excinfo = tup + #: the exception class + self.type = tup[0] + #: the exception instance + self.value = tup[1] + #: the exception raw traceback + self.tb = tup[2] + #: the exception type name + self.typename = self.type.__name__ + #: the exception traceback (_pytest._code.Traceback instance) + self.traceback = _pytest._code.Traceback(self.tb, excinfo=ref(self)) + + def __repr__(self): + return "" % (self.typename, len(self.traceback)) + + def exconly(self, tryshort=False): + """ return the exception as a string + + when 'tryshort' resolves to True, and the exception is a + _pytest._code._AssertionError, only the actual exception part of + the exception representation is returned (so 'AssertionError: ' is + removed from the beginning) + """ + lines = format_exception_only(self.type, self.value) + text = ''.join(lines) + text = text.rstrip() + if tryshort: + if text.startswith(self._striptext): + text = text[len(self._striptext):] + return text + + def errisinstance(self, exc): + """ return True if the exception is an instance of exc """ + return isinstance(self.value, exc) + + def _getreprcrash(self): + exconly = self.exconly(tryshort=True) + entry = self.traceback.getcrashentry() + path, lineno = entry.frame.code.raw.co_filename, entry.lineno + return ReprFileLocation(path, lineno+1, exconly) + + def getrepr(self, showlocals=False, style="long", + abspath=False, tbfilter=True, funcargs=False): + """ return str()able representation of this exception info. + showlocals: show locals per traceback entry + style: long|short|no|native traceback style + tbfilter: hide entries (where __tracebackhide__ is true) + + in case of style==native, tbfilter and showlocals is ignored. + """ + if style == 'native': + return ReprExceptionInfo(ReprTracebackNative( + py.std.traceback.format_exception( + self.type, + self.value, + self.traceback[0]._rawentry, + )), self._getreprcrash()) + + fmt = FormattedExcinfo(showlocals=showlocals, style=style, + abspath=abspath, tbfilter=tbfilter, funcargs=funcargs) + return fmt.repr_excinfo(self) + + def __str__(self): + entry = self.traceback[-1] + loc = ReprFileLocation(entry.path, entry.lineno + 1, self.exconly()) + return str(loc) + + def __unicode__(self): + entry = self.traceback[-1] + loc = ReprFileLocation(entry.path, entry.lineno + 1, self.exconly()) + return unicode(loc) + + def match(self, regexp): + """ + Match the regular expression 'regexp' on the string representation of + the exception. If it matches then True is returned (so that it is + possible to write 'assert excinfo.match()'). If it doesn't match an + AssertionError is raised. + """ + __tracebackhide__ = True + if not re.search(regexp, str(self.value)): + assert 0, "Pattern '{0!s}' not found in '{1!s}'".format( + regexp, self.value) + return True + + +class FormattedExcinfo(object): + """ presenting information about failing Functions and Generators. """ + # for traceback entries + flow_marker = ">" + fail_marker = "E" + + def __init__(self, showlocals=False, style="long", abspath=True, tbfilter=True, funcargs=False): + self.showlocals = showlocals + self.style = style + self.tbfilter = tbfilter + self.funcargs = funcargs + self.abspath = abspath + self.astcache = {} + + def _getindent(self, source): + # figure out indent for given source + try: + s = str(source.getstatement(len(source)-1)) + except KeyboardInterrupt: + raise + except: + try: + s = str(source[-1]) + except KeyboardInterrupt: + raise + except: + return 0 + return 4 + (len(s) - len(s.lstrip())) + + def _getentrysource(self, entry): + source = entry.getsource(self.astcache) + if source is not None: + source = source.deindent() + return source + + def _saferepr(self, obj): + return py.io.saferepr(obj) + + def repr_args(self, entry): + if self.funcargs: + args = [] + for argname, argvalue in entry.frame.getargs(var=True): + args.append((argname, self._saferepr(argvalue))) + return ReprFuncArgs(args) + + def get_source(self, source, line_index=-1, excinfo=None, short=False): + """ return formatted and marked up source lines. """ + import _pytest._code + lines = [] + if source is None or line_index >= len(source.lines): + source = _pytest._code.Source("???") + line_index = 0 + if line_index < 0: + line_index += len(source) + space_prefix = " " + if short: + lines.append(space_prefix + source.lines[line_index].strip()) + else: + for line in source.lines[:line_index]: + lines.append(space_prefix + line) + lines.append(self.flow_marker + " " + source.lines[line_index]) + for line in source.lines[line_index+1:]: + lines.append(space_prefix + line) + if excinfo is not None: + indent = 4 if short else self._getindent(source) + lines.extend(self.get_exconly(excinfo, indent=indent, markall=True)) + return lines + + def get_exconly(self, excinfo, indent=4, markall=False): + lines = [] + indent = " " * indent + # get the real exception information out + exlines = excinfo.exconly(tryshort=True).split('\n') + failindent = self.fail_marker + indent[1:] + for line in exlines: + lines.append(failindent + line) + if not markall: + failindent = indent + return lines + + def repr_locals(self, locals): + if self.showlocals: + lines = [] + keys = [loc for loc in locals if loc[0] != "@"] + keys.sort() + for name in keys: + value = locals[name] + if name == '__builtins__': + lines.append("__builtins__ = ") + else: + # This formatting could all be handled by the + # _repr() function, which is only reprlib.Repr in + # disguise, so is very configurable. + str_repr = self._saferepr(value) + #if len(str_repr) < 70 or not isinstance(value, + # (list, tuple, dict)): + lines.append("%-10s = %s" %(name, str_repr)) + #else: + # self._line("%-10s =\\" % (name,)) + # # XXX + # py.std.pprint.pprint(value, stream=self.excinfowriter) + return ReprLocals(lines) + + def repr_traceback_entry(self, entry, excinfo=None): + import _pytest._code + source = self._getentrysource(entry) + if source is None: + source = _pytest._code.Source("???") + line_index = 0 + else: + # entry.getfirstlinesource() can be -1, should be 0 on jython + line_index = entry.lineno - max(entry.getfirstlinesource(), 0) + + lines = [] + style = entry._repr_style + if style is None: + style = self.style + if style in ("short", "long"): + short = style == "short" + reprargs = self.repr_args(entry) if not short else None + s = self.get_source(source, line_index, excinfo, short=short) + lines.extend(s) + if short: + message = "in %s" %(entry.name) + else: + message = excinfo and excinfo.typename or "" + path = self._makepath(entry.path) + filelocrepr = ReprFileLocation(path, entry.lineno+1, message) + localsrepr = None + if not short: + localsrepr = self.repr_locals(entry.locals) + return ReprEntry(lines, reprargs, localsrepr, filelocrepr, style) + if excinfo: + lines.extend(self.get_exconly(excinfo, indent=4)) + return ReprEntry(lines, None, None, None, style) + + def _makepath(self, path): + if not self.abspath: + try: + np = py.path.local().bestrelpath(path) + except OSError: + return path + if len(np) < len(str(path)): + path = np + return path + + def repr_traceback(self, excinfo): + traceback = excinfo.traceback + if self.tbfilter: + traceback = traceback.filter() + + if is_recursion_error(excinfo): + traceback, extraline = self._truncate_recursive_traceback(traceback) + else: + extraline = None + + last = traceback[-1] + entries = [] + for index, entry in enumerate(traceback): + einfo = (last == entry) and excinfo or None + reprentry = self.repr_traceback_entry(entry, einfo) + entries.append(reprentry) + return ReprTraceback(entries, extraline, style=self.style) + + def _truncate_recursive_traceback(self, traceback): + """ + Truncate the given recursive traceback trying to find the starting point + of the recursion. + + The detection is done by going through each traceback entry and finding the + point in which the locals of the frame are equal to the locals of a previous frame (see ``recursionindex()``. + + Handle the situation where the recursion process might raise an exception (for example + comparing numpy arrays using equality raises a TypeError), in which case we do our best to + warn the user of the error and show a limited traceback. + """ + try: + recursionindex = traceback.recursionindex() + except Exception as e: + max_frames = 10 + extraline = ( + '!!! Recursion error detected, but an error occurred locating the origin of recursion.\n' + ' The following exception happened when comparing locals in the stack frame:\n' + ' {exc_type}: {exc_msg}\n' + ' Displaying first and last {max_frames} stack frames out of {total}.' + ).format(exc_type=type(e).__name__, exc_msg=safe_str(e), max_frames=max_frames, total=len(traceback)) + traceback = traceback[:max_frames] + traceback[-max_frames:] + else: + if recursionindex is not None: + extraline = "!!! Recursion detected (same locals & position)" + traceback = traceback[:recursionindex + 1] + else: + extraline = None + + return traceback, extraline + + def repr_excinfo(self, excinfo): + if _PY2: + reprtraceback = self.repr_traceback(excinfo) + reprcrash = excinfo._getreprcrash() + + return ReprExceptionInfo(reprtraceback, reprcrash) + else: + repr_chain = [] + e = excinfo.value + descr = None + while e is not None: + if excinfo: + reprtraceback = self.repr_traceback(excinfo) + reprcrash = excinfo._getreprcrash() + else: + # fallback to native repr if the exception doesn't have a traceback: + # ExceptionInfo objects require a full traceback to work + reprtraceback = ReprTracebackNative(py.std.traceback.format_exception(type(e), e, None)) + reprcrash = None + + repr_chain += [(reprtraceback, reprcrash, descr)] + if e.__cause__ is not None: + e = e.__cause__ + excinfo = ExceptionInfo((type(e), e, e.__traceback__)) if e.__traceback__ else None + descr = 'The above exception was the direct cause of the following exception:' + elif e.__context__ is not None: + e = e.__context__ + excinfo = ExceptionInfo((type(e), e, e.__traceback__)) if e.__traceback__ else None + descr = 'During handling of the above exception, another exception occurred:' + else: + e = None + repr_chain.reverse() + return ExceptionChainRepr(repr_chain) + + +class TerminalRepr(object): + def __str__(self): + s = self.__unicode__() + if _PY2: + s = s.encode('utf-8') + return s + + def __unicode__(self): + # FYI this is called from pytest-xdist's serialization of exception + # information. + io = py.io.TextIO() + tw = py.io.TerminalWriter(file=io) + self.toterminal(tw) + return io.getvalue().strip() + + def __repr__(self): + return "<%s instance at %0x>" %(self.__class__, id(self)) + + +class ExceptionRepr(TerminalRepr): + def __init__(self): + self.sections = [] + + def addsection(self, name, content, sep="-"): + self.sections.append((name, content, sep)) + + def toterminal(self, tw): + for name, content, sep in self.sections: + tw.sep(sep, name) + tw.line(content) + + +class ExceptionChainRepr(ExceptionRepr): + def __init__(self, chain): + super(ExceptionChainRepr, self).__init__() + self.chain = chain + # reprcrash and reprtraceback of the outermost (the newest) exception + # in the chain + self.reprtraceback = chain[-1][0] + self.reprcrash = chain[-1][1] + + def toterminal(self, tw): + for element in self.chain: + element[0].toterminal(tw) + if element[2] is not None: + tw.line("") + tw.line(element[2], yellow=True) + super(ExceptionChainRepr, self).toterminal(tw) + + +class ReprExceptionInfo(ExceptionRepr): + def __init__(self, reprtraceback, reprcrash): + super(ReprExceptionInfo, self).__init__() + self.reprtraceback = reprtraceback + self.reprcrash = reprcrash + + def toterminal(self, tw): + self.reprtraceback.toterminal(tw) + super(ReprExceptionInfo, self).toterminal(tw) + +class ReprTraceback(TerminalRepr): + entrysep = "_ " + + def __init__(self, reprentries, extraline, style): + self.reprentries = reprentries + self.extraline = extraline + self.style = style + + def toterminal(self, tw): + # the entries might have different styles + for i, entry in enumerate(self.reprentries): + if entry.style == "long": + tw.line("") + entry.toterminal(tw) + if i < len(self.reprentries) - 1: + next_entry = self.reprentries[i+1] + if entry.style == "long" or \ + entry.style == "short" and next_entry.style == "long": + tw.sep(self.entrysep) + + if self.extraline: + tw.line(self.extraline) + +class ReprTracebackNative(ReprTraceback): + def __init__(self, tblines): + self.style = "native" + self.reprentries = [ReprEntryNative(tblines)] + self.extraline = None + +class ReprEntryNative(TerminalRepr): + style = "native" + + def __init__(self, tblines): + self.lines = tblines + + def toterminal(self, tw): + tw.write("".join(self.lines)) + +class ReprEntry(TerminalRepr): + localssep = "_ " + + def __init__(self, lines, reprfuncargs, reprlocals, filelocrepr, style): + self.lines = lines + self.reprfuncargs = reprfuncargs + self.reprlocals = reprlocals + self.reprfileloc = filelocrepr + self.style = style + + def toterminal(self, tw): + if self.style == "short": + self.reprfileloc.toterminal(tw) + for line in self.lines: + red = line.startswith("E ") + tw.line(line, bold=True, red=red) + #tw.line("") + return + if self.reprfuncargs: + self.reprfuncargs.toterminal(tw) + for line in self.lines: + red = line.startswith("E ") + tw.line(line, bold=True, red=red) + if self.reprlocals: + #tw.sep(self.localssep, "Locals") + tw.line("") + self.reprlocals.toterminal(tw) + if self.reprfileloc: + if self.lines: + tw.line("") + self.reprfileloc.toterminal(tw) + + def __str__(self): + return "%s\n%s\n%s" % ("\n".join(self.lines), + self.reprlocals, + self.reprfileloc) + +class ReprFileLocation(TerminalRepr): + def __init__(self, path, lineno, message): + self.path = str(path) + self.lineno = lineno + self.message = message + + def toterminal(self, tw): + # filename and lineno output for each entry, + # using an output format that most editors unterstand + msg = self.message + i = msg.find("\n") + if i != -1: + msg = msg[:i] + tw.write(self.path, bold=True, red=True) + tw.line(":%s: %s" % (self.lineno, msg)) + +class ReprLocals(TerminalRepr): + def __init__(self, lines): + self.lines = lines + + def toterminal(self, tw): + for line in self.lines: + tw.line(line) + +class ReprFuncArgs(TerminalRepr): + def __init__(self, args): + self.args = args + + def toterminal(self, tw): + if self.args: + linesofar = "" + for name, value in self.args: + ns = "%s = %s" %(name, value) + if len(ns) + len(linesofar) + 2 > tw.fullwidth: + if linesofar: + tw.line(linesofar) + linesofar = ns + else: + if linesofar: + linesofar += ", " + ns + else: + linesofar = ns + if linesofar: + tw.line(linesofar) + tw.line("") + + +def getrawcode(obj, trycall=True): + """ return code object for given function. """ + try: + return obj.__code__ + except AttributeError: + obj = getattr(obj, 'im_func', obj) + obj = getattr(obj, 'func_code', obj) + obj = getattr(obj, 'f_code', obj) + obj = getattr(obj, '__code__', obj) + if trycall and not hasattr(obj, 'co_firstlineno'): + if hasattr(obj, '__call__') and not py.std.inspect.isclass(obj): + x = getrawcode(obj.__call__, trycall=False) + if hasattr(x, 'co_firstlineno'): + return x + return obj + + +if PY35: # RecursionError introduced in 3.5 + def is_recursion_error(excinfo): + return excinfo.errisinstance(RecursionError) # noqa +else: + def is_recursion_error(excinfo): + if not excinfo.errisinstance(RuntimeError): + return False + try: + return "maximum recursion depth exceeded" in str(excinfo.value) + except UnicodeError: + return False diff --git a/venv/lib/python3.5/site-packages/_pytest/_code/source.py b/venv/lib/python3.5/site-packages/_pytest/_code/source.py new file mode 100644 index 0000000..8e61484 --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/_code/source.py @@ -0,0 +1,414 @@ +from __future__ import absolute_import, division, generators, print_function + +from bisect import bisect_right +import sys +import inspect, tokenize +import py +cpy_compile = compile + +try: + import _ast + from _ast import PyCF_ONLY_AST as _AST_FLAG +except ImportError: + _AST_FLAG = 0 + _ast = None + + +class Source(object): + """ a immutable object holding a source code fragment, + possibly deindenting it. + """ + _compilecounter = 0 + def __init__(self, *parts, **kwargs): + self.lines = lines = [] + de = kwargs.get('deindent', True) + rstrip = kwargs.get('rstrip', True) + for part in parts: + if not part: + partlines = [] + if isinstance(part, Source): + partlines = part.lines + elif isinstance(part, (tuple, list)): + partlines = [x.rstrip("\n") for x in part] + elif isinstance(part, py.builtin._basestring): + partlines = part.split('\n') + if rstrip: + while partlines: + if partlines[-1].strip(): + break + partlines.pop() + else: + partlines = getsource(part, deindent=de).lines + if de: + partlines = deindent(partlines) + lines.extend(partlines) + + def __eq__(self, other): + try: + return self.lines == other.lines + except AttributeError: + if isinstance(other, str): + return str(self) == other + return False + + __hash__ = None + + def __getitem__(self, key): + if isinstance(key, int): + return self.lines[key] + else: + if key.step not in (None, 1): + raise IndexError("cannot slice a Source with a step") + newsource = Source() + newsource.lines = self.lines[key.start:key.stop] + return newsource + + def __len__(self): + return len(self.lines) + + def strip(self): + """ return new source object with trailing + and leading blank lines removed. + """ + start, end = 0, len(self) + while start < end and not self.lines[start].strip(): + start += 1 + while end > start and not self.lines[end-1].strip(): + end -= 1 + source = Source() + source.lines[:] = self.lines[start:end] + return source + + def putaround(self, before='', after='', indent=' ' * 4): + """ return a copy of the source object with + 'before' and 'after' wrapped around it. + """ + before = Source(before) + after = Source(after) + newsource = Source() + lines = [ (indent + line) for line in self.lines] + newsource.lines = before.lines + lines + after.lines + return newsource + + def indent(self, indent=' ' * 4): + """ return a copy of the source object with + all lines indented by the given indent-string. + """ + newsource = Source() + newsource.lines = [(indent+line) for line in self.lines] + return newsource + + def getstatement(self, lineno, assertion=False): + """ return Source statement which contains the + given linenumber (counted from 0). + """ + start, end = self.getstatementrange(lineno, assertion) + return self[start:end] + + def getstatementrange(self, lineno, assertion=False): + """ return (start, end) tuple which spans the minimal + statement region which containing the given lineno. + """ + if not (0 <= lineno < len(self)): + raise IndexError("lineno out of range") + ast, start, end = getstatementrange_ast(lineno, self) + return start, end + + def deindent(self, offset=None): + """ return a new source object deindented by offset. + If offset is None then guess an indentation offset from + the first non-blank line. Subsequent lines which have a + lower indentation offset will be copied verbatim as + they are assumed to be part of multilines. + """ + # XXX maybe use the tokenizer to properly handle multiline + # strings etc.pp? + newsource = Source() + newsource.lines[:] = deindent(self.lines, offset) + return newsource + + def isparseable(self, deindent=True): + """ return True if source is parseable, heuristically + deindenting it by default. + """ + try: + import parser + except ImportError: + syntax_checker = lambda x: compile(x, 'asd', 'exec') + else: + syntax_checker = parser.suite + + if deindent: + source = str(self.deindent()) + else: + source = str(self) + try: + #compile(source+'\n', "x", "exec") + syntax_checker(source+'\n') + except KeyboardInterrupt: + raise + except Exception: + return False + else: + return True + + def __str__(self): + return "\n".join(self.lines) + + def compile(self, filename=None, mode='exec', + flag=generators.compiler_flag, + dont_inherit=0, _genframe=None): + """ return compiled code object. if filename is None + invent an artificial filename which displays + the source/line position of the caller frame. + """ + if not filename or py.path.local(filename).check(file=0): + if _genframe is None: + _genframe = sys._getframe(1) # the caller + fn,lineno = _genframe.f_code.co_filename, _genframe.f_lineno + base = "<%d-codegen " % self._compilecounter + self.__class__._compilecounter += 1 + if not filename: + filename = base + '%s:%d>' % (fn, lineno) + else: + filename = base + '%r %s:%d>' % (filename, fn, lineno) + source = "\n".join(self.lines) + '\n' + try: + co = cpy_compile(source, filename, mode, flag) + except SyntaxError: + ex = sys.exc_info()[1] + # re-represent syntax errors from parsing python strings + msglines = self.lines[:ex.lineno] + if ex.offset: + msglines.append(" "*ex.offset + '^') + msglines.append("(code was compiled probably from here: %s)" % filename) + newex = SyntaxError('\n'.join(msglines)) + newex.offset = ex.offset + newex.lineno = ex.lineno + newex.text = ex.text + raise newex + else: + if flag & _AST_FLAG: + return co + lines = [(x + "\n") for x in self.lines] + py.std.linecache.cache[filename] = (1, None, lines, filename) + return co + +# +# public API shortcut functions +# + +def compile_(source, filename=None, mode='exec', flags= + generators.compiler_flag, dont_inherit=0): + """ compile the given source to a raw code object, + and maintain an internal cache which allows later + retrieval of the source code for the code object + and any recursively created code objects. + """ + if _ast is not None and isinstance(source, _ast.AST): + # XXX should Source support having AST? + return cpy_compile(source, filename, mode, flags, dont_inherit) + _genframe = sys._getframe(1) # the caller + s = Source(source) + co = s.compile(filename, mode, flags, _genframe=_genframe) + return co + + +def getfslineno(obj): + """ Return source location (path, lineno) for the given object. + If the source cannot be determined return ("", -1) + """ + import _pytest._code + try: + code = _pytest._code.Code(obj) + except TypeError: + try: + fn = (py.std.inspect.getsourcefile(obj) or + py.std.inspect.getfile(obj)) + except TypeError: + return "", -1 + + fspath = fn and py.path.local(fn) or None + lineno = -1 + if fspath: + try: + _, lineno = findsource(obj) + except IOError: + pass + else: + fspath = code.path + lineno = code.firstlineno + assert isinstance(lineno, int) + return fspath, lineno + +# +# helper functions +# + +def findsource(obj): + try: + sourcelines, lineno = py.std.inspect.findsource(obj) + except py.builtin._sysex: + raise + except: + return None, -1 + source = Source() + source.lines = [line.rstrip() for line in sourcelines] + return source, lineno + + +def getsource(obj, **kwargs): + import _pytest._code + obj = _pytest._code.getrawcode(obj) + try: + strsrc = inspect.getsource(obj) + except IndentationError: + strsrc = "\"Buggy python version consider upgrading, cannot get source\"" + assert isinstance(strsrc, str) + return Source(strsrc, **kwargs) + + +def deindent(lines, offset=None): + if offset is None: + for line in lines: + line = line.expandtabs() + s = line.lstrip() + if s: + offset = len(line)-len(s) + break + else: + offset = 0 + if offset == 0: + return list(lines) + newlines = [] + + def readline_generator(lines): + for line in lines: + yield line + '\n' + while True: + yield '' + + it = readline_generator(lines) + + try: + for _, _, (sline, _), (eline, _), _ in tokenize.generate_tokens(lambda: next(it)): + if sline > len(lines): + break # End of input reached + if sline > len(newlines): + line = lines[sline - 1].expandtabs() + if line.lstrip() and line[:offset].isspace(): + line = line[offset:] # Deindent + newlines.append(line) + + for i in range(sline, eline): + # Don't deindent continuing lines of + # multiline tokens (i.e. multiline strings) + newlines.append(lines[i]) + except (IndentationError, tokenize.TokenError): + pass + # Add any lines we didn't see. E.g. if an exception was raised. + newlines.extend(lines[len(newlines):]) + return newlines + + +def get_statement_startend2(lineno, node): + import ast + # flatten all statements and except handlers into one lineno-list + # AST's line numbers start indexing at 1 + l = [] + for x in ast.walk(node): + if isinstance(x, _ast.stmt) or isinstance(x, _ast.ExceptHandler): + l.append(x.lineno - 1) + for name in "finalbody", "orelse": + val = getattr(x, name, None) + if val: + # treat the finally/orelse part as its own statement + l.append(val[0].lineno - 1 - 1) + l.sort() + insert_index = bisect_right(l, lineno) + start = l[insert_index - 1] + if insert_index >= len(l): + end = None + else: + end = l[insert_index] + return start, end + + +def getstatementrange_ast(lineno, source, assertion=False, astnode=None): + if astnode is None: + content = str(source) + if sys.version_info < (2,7): + content += "\n" + try: + astnode = compile(content, "source", "exec", 1024) # 1024 for AST + except ValueError: + start, end = getstatementrange_old(lineno, source, assertion) + return None, start, end + start, end = get_statement_startend2(lineno, astnode) + # we need to correct the end: + # - ast-parsing strips comments + # - there might be empty lines + # - we might have lesser indented code blocks at the end + if end is None: + end = len(source.lines) + + if end > start + 1: + # make sure we don't span differently indented code blocks + # by using the BlockFinder helper used which inspect.getsource() uses itself + block_finder = inspect.BlockFinder() + # if we start with an indented line, put blockfinder to "started" mode + block_finder.started = source.lines[start][0].isspace() + it = ((x + "\n") for x in source.lines[start:end]) + try: + for tok in tokenize.generate_tokens(lambda: next(it)): + block_finder.tokeneater(*tok) + except (inspect.EndOfBlock, IndentationError): + end = block_finder.last + start + except Exception: + pass + + # the end might still point to a comment or empty line, correct it + while end: + line = source.lines[end - 1].lstrip() + if line.startswith("#") or not line: + end -= 1 + else: + break + return astnode, start, end + + +def getstatementrange_old(lineno, source, assertion=False): + """ return (start, end) tuple which spans the minimal + statement region which containing the given lineno. + raise an IndexError if no such statementrange can be found. + """ + # XXX this logic is only used on python2.4 and below + # 1. find the start of the statement + from codeop import compile_command + for start in range(lineno, -1, -1): + if assertion: + line = source.lines[start] + # the following lines are not fully tested, change with care + if 'super' in line and 'self' in line and '__init__' in line: + raise IndexError("likely a subclass") + if "assert" not in line and "raise" not in line: + continue + trylines = source.lines[start:lineno+1] + # quick hack to prepare parsing an indented line with + # compile_command() (which errors on "return" outside defs) + trylines.insert(0, 'def xxx():') + trysource = '\n '.join(trylines) + # ^ space here + try: + compile_command(trysource) + except (SyntaxError, OverflowError, ValueError): + continue + + # 2. find the end of the statement + for end in range(lineno+1, len(source)+1): + trysource = source[start:end] + if trysource.isparseable(): + return start, end + raise SyntaxError("no valid source range around line %d " % (lineno,)) + + diff --git a/venv/lib/python3.5/site-packages/_pytest/_pluggy.py b/venv/lib/python3.5/site-packages/_pytest/_pluggy.py new file mode 100644 index 0000000..6cc1d3d --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/_pluggy.py @@ -0,0 +1,11 @@ +""" +imports symbols from vendored "pluggy" if available, otherwise +falls back to importing "pluggy" from the default namespace. +""" +from __future__ import absolute_import, division, print_function +try: + from _pytest.vendored_packages.pluggy import * # noqa + from _pytest.vendored_packages.pluggy import __version__ # noqa +except ImportError: + from pluggy import * # noqa + from pluggy import __version__ # noqa diff --git a/venv/lib/python3.5/site-packages/_pytest/_version.py b/venv/lib/python3.5/site-packages/_pytest/_version.py new file mode 100644 index 0000000..e03109e --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/_version.py @@ -0,0 +1,4 @@ +# coding: utf-8 +# file generated by setuptools_scm +# don't change, don't track in version control +version = '3.1.3' diff --git a/venv/lib/python3.5/site-packages/_pytest/assertion/__init__.py b/venv/lib/python3.5/site-packages/_pytest/assertion/__init__.py new file mode 100644 index 0000000..acb034d --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/assertion/__init__.py @@ -0,0 +1,149 @@ +""" +support for presenting detailed information in failing assertions. +""" +from __future__ import absolute_import, division, print_function +import py +import sys + +from _pytest.assertion import util +from _pytest.assertion import rewrite +from _pytest.assertion import truncate + + +def pytest_addoption(parser): + group = parser.getgroup("debugconfig") + group.addoption('--assert', + action="store", + dest="assertmode", + choices=("rewrite", "plain",), + default="rewrite", + metavar="MODE", + help="""Control assertion debugging tools. 'plain' + performs no assertion debugging. 'rewrite' + (the default) rewrites assert statements in + test modules on import to provide assert + expression information.""") + + + +def register_assert_rewrite(*names): + """Register one or more module names to be rewritten on import. + + This function will make sure that this module or all modules inside + the package will get their assert statements rewritten. + Thus you should make sure to call this before the module is + actually imported, usually in your __init__.py if you are a plugin + using a package. + + :raise TypeError: if the given module names are not strings. + """ + for name in names: + if not isinstance(name, str): + msg = 'expected module names as *args, got {0} instead' + raise TypeError(msg.format(repr(names))) + for hook in sys.meta_path: + if isinstance(hook, rewrite.AssertionRewritingHook): + importhook = hook + break + else: + importhook = DummyRewriteHook() + importhook.mark_rewrite(*names) + + +class DummyRewriteHook(object): + """A no-op import hook for when rewriting is disabled.""" + + def mark_rewrite(self, *names): + pass + + +class AssertionState: + """State for the assertion plugin.""" + + def __init__(self, config, mode): + self.mode = mode + self.trace = config.trace.root.get("assertion") + self.hook = None + + +def install_importhook(config): + """Try to install the rewrite hook, raise SystemError if it fails.""" + # Both Jython and CPython 2.6.0 have AST bugs that make the + # assertion rewriting hook malfunction. + if (sys.platform.startswith('java') or + sys.version_info[:3] == (2, 6, 0)): + raise SystemError('rewrite not supported') + + config._assertstate = AssertionState(config, 'rewrite') + config._assertstate.hook = hook = rewrite.AssertionRewritingHook(config) + sys.meta_path.insert(0, hook) + config._assertstate.trace('installed rewrite import hook') + + def undo(): + hook = config._assertstate.hook + if hook is not None and hook in sys.meta_path: + sys.meta_path.remove(hook) + + config.add_cleanup(undo) + return hook + + +def pytest_collection(session): + # this hook is only called when test modules are collected + # so for example not in the master process of pytest-xdist + # (which does not collect test modules) + assertstate = getattr(session.config, '_assertstate', None) + if assertstate: + if assertstate.hook is not None: + assertstate.hook.set_session(session) + + +def pytest_runtest_setup(item): + """Setup the pytest_assertrepr_compare hook + + The newinterpret and rewrite modules will use util._reprcompare if + it exists to use custom reporting via the + pytest_assertrepr_compare hook. This sets up this custom + comparison for the test. + """ + def callbinrepr(op, left, right): + """Call the pytest_assertrepr_compare hook and prepare the result + + This uses the first result from the hook and then ensures the + following: + * Overly verbose explanations are truncated unless configured otherwise + (eg. if running in verbose mode). + * Embedded newlines are escaped to help util.format_explanation() + later. + * If the rewrite mode is used embedded %-characters are replaced + to protect later % formatting. + + The result can be formatted by util.format_explanation() for + pretty printing. + """ + hook_result = item.ihook.pytest_assertrepr_compare( + config=item.config, op=op, left=left, right=right) + for new_expl in hook_result: + if new_expl: + new_expl = truncate.truncate_if_required(new_expl, item) + new_expl = [line.replace("\n", "\\n") for line in new_expl] + res = py.builtin._totext("\n~").join(new_expl) + if item.config.getvalue("assertmode") == "rewrite": + res = res.replace("%", "%%") + return res + util._reprcompare = callbinrepr + + +def pytest_runtest_teardown(item): + util._reprcompare = None + + +def pytest_sessionfinish(session): + assertstate = getattr(session.config, '_assertstate', None) + if assertstate: + if assertstate.hook is not None: + assertstate.hook.set_session(None) + + +# Expose this plugin's implementation for the pytest_assertrepr_compare hook +pytest_assertrepr_compare = util.assertrepr_compare diff --git a/venv/lib/python3.5/site-packages/_pytest/assertion/rewrite.py b/venv/lib/python3.5/site-packages/_pytest/assertion/rewrite.py new file mode 100644 index 0000000..6ec54d7 --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/assertion/rewrite.py @@ -0,0 +1,941 @@ +"""Rewrite assertion AST to produce nice error messages""" +from __future__ import absolute_import, division, print_function +import ast +import _ast +import errno +import itertools +import imp +import marshal +import os +import re +import struct +import sys +import types + +import py +from _pytest.assertion import util + + +# pytest caches rewritten pycs in __pycache__. +if hasattr(imp, "get_tag"): + PYTEST_TAG = imp.get_tag() + "-PYTEST" +else: + if hasattr(sys, "pypy_version_info"): + impl = "pypy" + elif sys.platform == "java": + impl = "jython" + else: + impl = "cpython" + ver = sys.version_info + PYTEST_TAG = "%s-%s%s-PYTEST" % (impl, ver[0], ver[1]) + del ver, impl + +PYC_EXT = ".py" + (__debug__ and "c" or "o") +PYC_TAIL = "." + PYTEST_TAG + PYC_EXT + +REWRITE_NEWLINES = sys.version_info[:2] != (2, 7) and sys.version_info < (3, 2) +ASCII_IS_DEFAULT_ENCODING = sys.version_info[0] < 3 + +if sys.version_info >= (3,5): + ast_Call = ast.Call +else: + ast_Call = lambda a,b,c: ast.Call(a, b, c, None, None) + + +class AssertionRewritingHook(object): + """PEP302 Import hook which rewrites asserts.""" + + def __init__(self, config): + self.config = config + self.fnpats = config.getini("python_files") + self.session = None + self.modules = {} + self._rewritten_names = set() + self._register_with_pkg_resources() + self._must_rewrite = set() + + def set_session(self, session): + self.session = session + + def find_module(self, name, path=None): + state = self.config._assertstate + state.trace("find_module called for: %s" % name) + names = name.rsplit(".", 1) + lastname = names[-1] + pth = None + if path is not None: + # Starting with Python 3.3, path is a _NamespacePath(), which + # causes problems if not converted to list. + path = list(path) + if len(path) == 1: + pth = path[0] + if pth is None: + try: + fd, fn, desc = imp.find_module(lastname, path) + except ImportError: + return None + if fd is not None: + fd.close() + tp = desc[2] + if tp == imp.PY_COMPILED: + if hasattr(imp, "source_from_cache"): + try: + fn = imp.source_from_cache(fn) + except ValueError: + # Python 3 doesn't like orphaned but still-importable + # .pyc files. + fn = fn[:-1] + else: + fn = fn[:-1] + elif tp != imp.PY_SOURCE: + # Don't know what this is. + return None + else: + fn = os.path.join(pth, name.rpartition(".")[2] + ".py") + + fn_pypath = py.path.local(fn) + if not self._should_rewrite(name, fn_pypath, state): + return None + + self._rewritten_names.add(name) + + # The requested module looks like a test file, so rewrite it. This is + # the most magical part of the process: load the source, rewrite the + # asserts, and load the rewritten source. We also cache the rewritten + # module code in a special pyc. We must be aware of the possibility of + # concurrent pytest processes rewriting and loading pycs. To avoid + # tricky race conditions, we maintain the following invariant: The + # cached pyc is always a complete, valid pyc. Operations on it must be + # atomic. POSIX's atomic rename comes in handy. + write = not sys.dont_write_bytecode + cache_dir = os.path.join(fn_pypath.dirname, "__pycache__") + if write: + try: + os.mkdir(cache_dir) + except OSError: + e = sys.exc_info()[1].errno + if e == errno.EEXIST: + # Either the __pycache__ directory already exists (the + # common case) or it's blocked by a non-dir node. In the + # latter case, we'll ignore it in _write_pyc. + pass + elif e in [errno.ENOENT, errno.ENOTDIR]: + # One of the path components was not a directory, likely + # because we're in a zip file. + write = False + elif e in [errno.EACCES, errno.EROFS, errno.EPERM]: + state.trace("read only directory: %r" % fn_pypath.dirname) + write = False + else: + raise + cache_name = fn_pypath.basename[:-3] + PYC_TAIL + pyc = os.path.join(cache_dir, cache_name) + # Notice that even if we're in a read-only directory, I'm going + # to check for a cached pyc. This may not be optimal... + co = _read_pyc(fn_pypath, pyc, state.trace) + if co is None: + state.trace("rewriting %r" % (fn,)) + source_stat, co = _rewrite_test(self.config, fn_pypath) + if co is None: + # Probably a SyntaxError in the test. + return None + if write: + _make_rewritten_pyc(state, source_stat, pyc, co) + else: + state.trace("found cached rewritten pyc for %r" % (fn,)) + self.modules[name] = co, pyc + return self + + def _should_rewrite(self, name, fn_pypath, state): + # always rewrite conftest files + fn = str(fn_pypath) + if fn_pypath.basename == 'conftest.py': + state.trace("rewriting conftest file: %r" % (fn,)) + return True + + if self.session is not None: + if self.session.isinitpath(fn): + state.trace("matched test file (was specified on cmdline): %r" % + (fn,)) + return True + + # modules not passed explicitly on the command line are only + # rewritten if they match the naming convention for test files + for pat in self.fnpats: + if fn_pypath.fnmatch(pat): + state.trace("matched test file %r" % (fn,)) + return True + + for marked in self._must_rewrite: + if name.startswith(marked): + state.trace("matched marked file %r (from %r)" % (name, marked)) + return True + + return False + + def mark_rewrite(self, *names): + """Mark import names as needing to be re-written. + + The named module or package as well as any nested modules will + be re-written on import. + """ + already_imported = set(names).intersection(set(sys.modules)) + if already_imported: + for name in already_imported: + if name not in self._rewritten_names: + self._warn_already_imported(name) + self._must_rewrite.update(names) + + def _warn_already_imported(self, name): + self.config.warn( + 'P1', + 'Module already imported so can not be re-written: %s' % name) + + def load_module(self, name): + # If there is an existing module object named 'fullname' in + # sys.modules, the loader must use that existing module. (Otherwise, + # the reload() builtin will not work correctly.) + if name in sys.modules: + return sys.modules[name] + + co, pyc = self.modules.pop(name) + # I wish I could just call imp.load_compiled here, but __file__ has to + # be set properly. In Python 3.2+, this all would be handled correctly + # by load_compiled. + mod = sys.modules[name] = imp.new_module(name) + try: + mod.__file__ = co.co_filename + # Normally, this attribute is 3.2+. + mod.__cached__ = pyc + mod.__loader__ = self + py.builtin.exec_(co, mod.__dict__) + except: + if name in sys.modules: + del sys.modules[name] + raise + return sys.modules[name] + + + + def is_package(self, name): + try: + fd, fn, desc = imp.find_module(name) + except ImportError: + return False + if fd is not None: + fd.close() + tp = desc[2] + return tp == imp.PKG_DIRECTORY + + @classmethod + def _register_with_pkg_resources(cls): + """ + Ensure package resources can be loaded from this loader. May be called + multiple times, as the operation is idempotent. + """ + try: + import pkg_resources + # access an attribute in case a deferred importer is present + pkg_resources.__name__ + except ImportError: + return + + # Since pytest tests are always located in the file system, the + # DefaultProvider is appropriate. + pkg_resources.register_loader_type(cls, pkg_resources.DefaultProvider) + + def get_data(self, pathname): + """Optional PEP302 get_data API. + """ + with open(pathname, 'rb') as f: + return f.read() + + +def _write_pyc(state, co, source_stat, pyc): + # Technically, we don't have to have the same pyc format as + # (C)Python, since these "pycs" should never be seen by builtin + # import. However, there's little reason deviate, and I hope + # sometime to be able to use imp.load_compiled to load them. (See + # the comment in load_module above.) + try: + fp = open(pyc, "wb") + except IOError: + err = sys.exc_info()[1].errno + state.trace("error writing pyc file at %s: errno=%s" %(pyc, err)) + # we ignore any failure to write the cache file + # there are many reasons, permission-denied, __pycache__ being a + # file etc. + return False + try: + fp.write(imp.get_magic()) + mtime = int(source_stat.mtime) + size = source_stat.size & 0xFFFFFFFF + fp.write(struct.pack(">", + ast.Add: "+", + ast.Sub: "-", + ast.Mult: "*", + ast.Div: "/", + ast.FloorDiv: "//", + ast.Mod: "%%", # escaped for string formatting + ast.Eq: "==", + ast.NotEq: "!=", + ast.Lt: "<", + ast.LtE: "<=", + ast.Gt: ">", + ast.GtE: ">=", + ast.Pow: "**", + ast.Is: "is", + ast.IsNot: "is not", + ast.In: "in", + ast.NotIn: "not in" +} +# Python 3.5+ compatibility +try: + binop_map[ast.MatMult] = "@" +except AttributeError: + pass + +# Python 3.4+ compatibility +if hasattr(ast, "NameConstant"): + _NameConstant = ast.NameConstant +else: + def _NameConstant(c): + return ast.Name(str(c), ast.Load()) + + +def set_location(node, lineno, col_offset): + """Set node location information recursively.""" + def _fix(node, lineno, col_offset): + if "lineno" in node._attributes: + node.lineno = lineno + if "col_offset" in node._attributes: + node.col_offset = col_offset + for child in ast.iter_child_nodes(node): + _fix(child, lineno, col_offset) + _fix(node, lineno, col_offset) + return node + + +class AssertionRewriter(ast.NodeVisitor): + """Assertion rewriting implementation. + + The main entrypoint is to call .run() with an ast.Module instance, + this will then find all the assert statements and re-write them to + provide intermediate values and a detailed assertion error. See + http://pybites.blogspot.be/2011/07/behind-scenes-of-pytests-new-assertion.html + for an overview of how this works. + + The entry point here is .run() which will iterate over all the + statements in an ast.Module and for each ast.Assert statement it + finds call .visit() with it. Then .visit_Assert() takes over and + is responsible for creating new ast statements to replace the + original assert statement: it re-writes the test of an assertion + to provide intermediate values and replace it with an if statement + which raises an assertion error with a detailed explanation in + case the expression is false. + + For this .visit_Assert() uses the visitor pattern to visit all the + AST nodes of the ast.Assert.test field, each visit call returning + an AST node and the corresponding explanation string. During this + state is kept in several instance attributes: + + :statements: All the AST statements which will replace the assert + statement. + + :variables: This is populated by .variable() with each variable + used by the statements so that they can all be set to None at + the end of the statements. + + :variable_counter: Counter to create new unique variables needed + by statements. Variables are created using .variable() and + have the form of "@py_assert0". + + :on_failure: The AST statements which will be executed if the + assertion test fails. This is the code which will construct + the failure message and raises the AssertionError. + + :explanation_specifiers: A dict filled by .explanation_param() + with %-formatting placeholders and their corresponding + expressions to use in the building of an assertion message. + This is used by .pop_format_context() to build a message. + + :stack: A stack of the explanation_specifiers dicts maintained by + .push_format_context() and .pop_format_context() which allows + to build another %-formatted string while already building one. + + This state is reset on every new assert statement visited and used + by the other visitors. + + """ + + def __init__(self, module_path, config): + super(AssertionRewriter, self).__init__() + self.module_path = module_path + self.config = config + + def run(self, mod): + """Find all assert statements in *mod* and rewrite them.""" + if not mod.body: + # Nothing to do. + return + # Insert some special imports at the top of the module but after any + # docstrings and __future__ imports. + aliases = [ast.alias(py.builtin.builtins.__name__, "@py_builtins"), + ast.alias("_pytest.assertion.rewrite", "@pytest_ar")] + expect_docstring = True + pos = 0 + lineno = 0 + for item in mod.body: + if (expect_docstring and isinstance(item, ast.Expr) and + isinstance(item.value, ast.Str)): + doc = item.value.s + if "PYTEST_DONT_REWRITE" in doc: + # The module has disabled assertion rewriting. + return + lineno += len(doc) - 1 + expect_docstring = False + elif (not isinstance(item, ast.ImportFrom) or item.level > 0 or + item.module != "__future__"): + lineno = item.lineno + break + pos += 1 + imports = [ast.Import([alias], lineno=lineno, col_offset=0) + for alias in aliases] + mod.body[pos:pos] = imports + # Collect asserts. + nodes = [mod] + while nodes: + node = nodes.pop() + for name, field in ast.iter_fields(node): + if isinstance(field, list): + new = [] + for i, child in enumerate(field): + if isinstance(child, ast.Assert): + # Transform assert. + new.extend(self.visit(child)) + else: + new.append(child) + if isinstance(child, ast.AST): + nodes.append(child) + setattr(node, name, new) + elif (isinstance(field, ast.AST) and + # Don't recurse into expressions as they can't contain + # asserts. + not isinstance(field, ast.expr)): + nodes.append(field) + + def variable(self): + """Get a new variable.""" + # Use a character invalid in python identifiers to avoid clashing. + name = "@py_assert" + str(next(self.variable_counter)) + self.variables.append(name) + return name + + def assign(self, expr): + """Give *expr* a name.""" + name = self.variable() + self.statements.append(ast.Assign([ast.Name(name, ast.Store())], expr)) + return ast.Name(name, ast.Load()) + + def display(self, expr): + """Call py.io.saferepr on the expression.""" + return self.helper("saferepr", expr) + + def helper(self, name, *args): + """Call a helper in this module.""" + py_name = ast.Name("@pytest_ar", ast.Load()) + attr = ast.Attribute(py_name, "_" + name, ast.Load()) + return ast_Call(attr, list(args), []) + + def builtin(self, name): + """Return the builtin called *name*.""" + builtin_name = ast.Name("@py_builtins", ast.Load()) + return ast.Attribute(builtin_name, name, ast.Load()) + + def explanation_param(self, expr): + """Return a new named %-formatting placeholder for expr. + + This creates a %-formatting placeholder for expr in the + current formatting context, e.g. ``%(py0)s``. The placeholder + and expr are placed in the current format context so that it + can be used on the next call to .pop_format_context(). + + """ + specifier = "py" + str(next(self.variable_counter)) + self.explanation_specifiers[specifier] = expr + return "%(" + specifier + ")s" + + def push_format_context(self): + """Create a new formatting context. + + The format context is used for when an explanation wants to + have a variable value formatted in the assertion message. In + this case the value required can be added using + .explanation_param(). Finally .pop_format_context() is used + to format a string of %-formatted values as added by + .explanation_param(). + + """ + self.explanation_specifiers = {} + self.stack.append(self.explanation_specifiers) + + def pop_format_context(self, expl_expr): + """Format the %-formatted string with current format context. + + The expl_expr should be an ast.Str instance constructed from + the %-placeholders created by .explanation_param(). This will + add the required code to format said string to .on_failure and + return the ast.Name instance of the formatted string. + + """ + current = self.stack.pop() + if self.stack: + self.explanation_specifiers = self.stack[-1] + keys = [ast.Str(key) for key in current.keys()] + format_dict = ast.Dict(keys, list(current.values())) + form = ast.BinOp(expl_expr, ast.Mod(), format_dict) + name = "@py_format" + str(next(self.variable_counter)) + self.on_failure.append(ast.Assign([ast.Name(name, ast.Store())], form)) + return ast.Name(name, ast.Load()) + + def generic_visit(self, node): + """Handle expressions we don't have custom code for.""" + assert isinstance(node, ast.expr) + res = self.assign(node) + return res, self.explanation_param(self.display(res)) + + def visit_Assert(self, assert_): + """Return the AST statements to replace the ast.Assert instance. + + This re-writes the test of an assertion to provide + intermediate values and replace it with an if statement which + raises an assertion error with a detailed explanation in case + the expression is false. + + """ + if isinstance(assert_.test, ast.Tuple) and self.config is not None: + fslocation = (self.module_path, assert_.lineno) + self.config.warn('R1', 'assertion is always true, perhaps ' + 'remove parentheses?', fslocation=fslocation) + self.statements = [] + self.variables = [] + self.variable_counter = itertools.count() + self.stack = [] + self.on_failure = [] + self.push_format_context() + # Rewrite assert into a bunch of statements. + top_condition, explanation = self.visit(assert_.test) + # Create failure message. + body = self.on_failure + negation = ast.UnaryOp(ast.Not(), top_condition) + self.statements.append(ast.If(negation, body, [])) + if assert_.msg: + assertmsg = self.helper('format_assertmsg', assert_.msg) + explanation = "\n>assert " + explanation + else: + assertmsg = ast.Str("") + explanation = "assert " + explanation + template = ast.BinOp(assertmsg, ast.Add(), ast.Str(explanation)) + msg = self.pop_format_context(template) + fmt = self.helper("format_explanation", msg) + err_name = ast.Name("AssertionError", ast.Load()) + exc = ast_Call(err_name, [fmt], []) + if sys.version_info[0] >= 3: + raise_ = ast.Raise(exc, None) + else: + raise_ = ast.Raise(exc, None, None) + body.append(raise_) + # Clear temporary variables by setting them to None. + if self.variables: + variables = [ast.Name(name, ast.Store()) + for name in self.variables] + clear = ast.Assign(variables, _NameConstant(None)) + self.statements.append(clear) + # Fix line numbers. + for stmt in self.statements: + set_location(stmt, assert_.lineno, assert_.col_offset) + return self.statements + + def visit_Name(self, name): + # Display the repr of the name if it's a local variable or + # _should_repr_global_name() thinks it's acceptable. + locs = ast_Call(self.builtin("locals"), [], []) + inlocs = ast.Compare(ast.Str(name.id), [ast.In()], [locs]) + dorepr = self.helper("should_repr_global_name", name) + test = ast.BoolOp(ast.Or(), [inlocs, dorepr]) + expr = ast.IfExp(test, self.display(name), ast.Str(name.id)) + return name, self.explanation_param(expr) + + def visit_BoolOp(self, boolop): + res_var = self.variable() + expl_list = self.assign(ast.List([], ast.Load())) + app = ast.Attribute(expl_list, "append", ast.Load()) + is_or = int(isinstance(boolop.op, ast.Or)) + body = save = self.statements + fail_save = self.on_failure + levels = len(boolop.values) - 1 + self.push_format_context() + # Process each operand, short-circuting if needed. + for i, v in enumerate(boolop.values): + if i: + fail_inner = [] + # cond is set in a prior loop iteration below + self.on_failure.append(ast.If(cond, fail_inner, [])) # noqa + self.on_failure = fail_inner + self.push_format_context() + res, expl = self.visit(v) + body.append(ast.Assign([ast.Name(res_var, ast.Store())], res)) + expl_format = self.pop_format_context(ast.Str(expl)) + call = ast_Call(app, [expl_format], []) + self.on_failure.append(ast.Expr(call)) + if i < levels: + cond = res + if is_or: + cond = ast.UnaryOp(ast.Not(), cond) + inner = [] + self.statements.append(ast.If(cond, inner, [])) + self.statements = body = inner + self.statements = save + self.on_failure = fail_save + expl_template = self.helper("format_boolop", expl_list, ast.Num(is_or)) + expl = self.pop_format_context(expl_template) + return ast.Name(res_var, ast.Load()), self.explanation_param(expl) + + def visit_UnaryOp(self, unary): + pattern = unary_map[unary.op.__class__] + operand_res, operand_expl = self.visit(unary.operand) + res = self.assign(ast.UnaryOp(unary.op, operand_res)) + return res, pattern % (operand_expl,) + + def visit_BinOp(self, binop): + symbol = binop_map[binop.op.__class__] + left_expr, left_expl = self.visit(binop.left) + right_expr, right_expl = self.visit(binop.right) + explanation = "(%s %s %s)" % (left_expl, symbol, right_expl) + res = self.assign(ast.BinOp(left_expr, binop.op, right_expr)) + return res, explanation + + def visit_Call_35(self, call): + """ + visit `ast.Call` nodes on Python3.5 and after + """ + new_func, func_expl = self.visit(call.func) + arg_expls = [] + new_args = [] + new_kwargs = [] + for arg in call.args: + res, expl = self.visit(arg) + arg_expls.append(expl) + new_args.append(res) + for keyword in call.keywords: + res, expl = self.visit(keyword.value) + new_kwargs.append(ast.keyword(keyword.arg, res)) + if keyword.arg: + arg_expls.append(keyword.arg + "=" + expl) + else: ## **args have `arg` keywords with an .arg of None + arg_expls.append("**" + expl) + + expl = "%s(%s)" % (func_expl, ', '.join(arg_expls)) + new_call = ast.Call(new_func, new_args, new_kwargs) + res = self.assign(new_call) + res_expl = self.explanation_param(self.display(res)) + outer_expl = "%s\n{%s = %s\n}" % (res_expl, res_expl, expl) + return res, outer_expl + + def visit_Starred(self, starred): + # From Python 3.5, a Starred node can appear in a function call + res, expl = self.visit(starred.value) + return starred, '*' + expl + + def visit_Call_legacy(self, call): + """ + visit `ast.Call nodes on 3.4 and below` + """ + new_func, func_expl = self.visit(call.func) + arg_expls = [] + new_args = [] + new_kwargs = [] + new_star = new_kwarg = None + for arg in call.args: + res, expl = self.visit(arg) + new_args.append(res) + arg_expls.append(expl) + for keyword in call.keywords: + res, expl = self.visit(keyword.value) + new_kwargs.append(ast.keyword(keyword.arg, res)) + arg_expls.append(keyword.arg + "=" + expl) + if call.starargs: + new_star, expl = self.visit(call.starargs) + arg_expls.append("*" + expl) + if call.kwargs: + new_kwarg, expl = self.visit(call.kwargs) + arg_expls.append("**" + expl) + expl = "%s(%s)" % (func_expl, ', '.join(arg_expls)) + new_call = ast.Call(new_func, new_args, new_kwargs, + new_star, new_kwarg) + res = self.assign(new_call) + res_expl = self.explanation_param(self.display(res)) + outer_expl = "%s\n{%s = %s\n}" % (res_expl, res_expl, expl) + return res, outer_expl + + # ast.Call signature changed on 3.5, + # conditionally change which methods is named + # visit_Call depending on Python version + if sys.version_info >= (3, 5): + visit_Call = visit_Call_35 + else: + visit_Call = visit_Call_legacy + + + def visit_Attribute(self, attr): + if not isinstance(attr.ctx, ast.Load): + return self.generic_visit(attr) + value, value_expl = self.visit(attr.value) + res = self.assign(ast.Attribute(value, attr.attr, ast.Load())) + res_expl = self.explanation_param(self.display(res)) + pat = "%s\n{%s = %s.%s\n}" + expl = pat % (res_expl, res_expl, value_expl, attr.attr) + return res, expl + + def visit_Compare(self, comp): + self.push_format_context() + left_res, left_expl = self.visit(comp.left) + if isinstance(comp.left, (_ast.Compare, _ast.BoolOp)): + left_expl = "({0})".format(left_expl) + res_variables = [self.variable() for i in range(len(comp.ops))] + load_names = [ast.Name(v, ast.Load()) for v in res_variables] + store_names = [ast.Name(v, ast.Store()) for v in res_variables] + it = zip(range(len(comp.ops)), comp.ops, comp.comparators) + expls = [] + syms = [] + results = [left_res] + for i, op, next_operand in it: + next_res, next_expl = self.visit(next_operand) + if isinstance(next_operand, (_ast.Compare, _ast.BoolOp)): + next_expl = "({0})".format(next_expl) + results.append(next_res) + sym = binop_map[op.__class__] + syms.append(ast.Str(sym)) + expl = "%s %s %s" % (left_expl, sym, next_expl) + expls.append(ast.Str(expl)) + res_expr = ast.Compare(left_res, [op], [next_res]) + self.statements.append(ast.Assign([store_names[i]], res_expr)) + left_res, left_expl = next_res, next_expl + # Use pytest.assertion.util._reprcompare if that's available. + expl_call = self.helper("call_reprcompare", + ast.Tuple(syms, ast.Load()), + ast.Tuple(load_names, ast.Load()), + ast.Tuple(expls, ast.Load()), + ast.Tuple(results, ast.Load())) + if len(comp.ops) > 1: + res = ast.BoolOp(ast.And(), load_names) + else: + res = load_names[0] + return res, self.explanation_param(self.pop_format_context(expl_call)) diff --git a/venv/lib/python3.5/site-packages/_pytest/assertion/truncate.py b/venv/lib/python3.5/site-packages/_pytest/assertion/truncate.py new file mode 100644 index 0000000..1e13063 --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/assertion/truncate.py @@ -0,0 +1,102 @@ +""" +Utilities for truncating assertion output. + +Current default behaviour is to truncate assertion explanations at +~8 terminal lines, unless running in "-vv" mode or running on CI. +""" +from __future__ import absolute_import, division, print_function +import os + +import py + + +DEFAULT_MAX_LINES = 8 +DEFAULT_MAX_CHARS = 8 * 80 +USAGE_MSG = "use '-vv' to show" + + +def truncate_if_required(explanation, item, max_length=None): + """ + Truncate this assertion explanation if the given test item is eligible. + """ + if _should_truncate_item(item): + return _truncate_explanation(explanation) + return explanation + + +def _should_truncate_item(item): + """ + Whether or not this test item is eligible for truncation. + """ + verbose = item.config.option.verbose + return verbose < 2 and not _running_on_ci() + + +def _running_on_ci(): + """Check if we're currently running on a CI system.""" + env_vars = ['CI', 'BUILD_NUMBER'] + return any(var in os.environ for var in env_vars) + + +def _truncate_explanation(input_lines, max_lines=None, max_chars=None): + """ + Truncate given list of strings that makes up the assertion explanation. + + Truncates to either 8 lines, or 640 characters - whichever the input reaches + first. The remaining lines will be replaced by a usage message. + """ + + if max_lines is None: + max_lines = DEFAULT_MAX_LINES + if max_chars is None: + max_chars = DEFAULT_MAX_CHARS + + # Check if truncation required + input_char_count = len("".join(input_lines)) + if len(input_lines) <= max_lines and input_char_count <= max_chars: + return input_lines + + # Truncate first to max_lines, and then truncate to max_chars if max_chars + # is exceeded. + truncated_explanation = input_lines[:max_lines] + truncated_explanation = _truncate_by_char_count(truncated_explanation, max_chars) + + # Add ellipsis to final line + truncated_explanation[-1] = truncated_explanation[-1] + "..." + + # Append useful message to explanation + truncated_line_count = len(input_lines) - len(truncated_explanation) + truncated_line_count += 1 # Account for the part-truncated final line + msg = '...Full output truncated' + if truncated_line_count == 1: + msg += ' ({0} line hidden)'.format(truncated_line_count) + else: + msg += ' ({0} lines hidden)'.format(truncated_line_count) + msg += ", {0}" .format(USAGE_MSG) + truncated_explanation.extend([ + py.builtin._totext(""), + py.builtin._totext(msg), + ]) + return truncated_explanation + + +def _truncate_by_char_count(input_lines, max_chars): + # Check if truncation required + if len("".join(input_lines)) <= max_chars: + return input_lines + + # Find point at which input length exceeds total allowed length + iterated_char_count = 0 + for iterated_index, input_line in enumerate(input_lines): + if iterated_char_count + len(input_line) > max_chars: + break + iterated_char_count += len(input_line) + + # Create truncated explanation with modified final line + truncated_result = input_lines[:iterated_index] + final_line = input_lines[iterated_index] + if final_line: + final_line_truncate_point = max_chars - iterated_char_count + final_line = final_line[:final_line_truncate_point] + truncated_result.append(final_line) + return truncated_result diff --git a/venv/lib/python3.5/site-packages/_pytest/assertion/util.py b/venv/lib/python3.5/site-packages/_pytest/assertion/util.py new file mode 100644 index 0000000..06eda8d --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/assertion/util.py @@ -0,0 +1,301 @@ +"""Utilities for assertion debugging""" +from __future__ import absolute_import, division, print_function +import pprint + +import _pytest._code +import py +try: + from collections import Sequence +except ImportError: + Sequence = list + + +u = py.builtin._totext + +# The _reprcompare attribute on the util module is used by the new assertion +# interpretation code and assertion rewriter to detect this plugin was +# loaded and in turn call the hooks defined here as part of the +# DebugInterpreter. +_reprcompare = None + + +# the re-encoding is needed for python2 repr +# with non-ascii characters (see issue 877 and 1379) +def ecu(s): + try: + return u(s, 'utf-8', 'replace') + except TypeError: + return s + + +def format_explanation(explanation): + """This formats an explanation + + Normally all embedded newlines are escaped, however there are + three exceptions: \n{, \n} and \n~. The first two are intended + cover nested explanations, see function and attribute explanations + for examples (.visit_Call(), visit_Attribute()). The last one is + for when one explanation needs to span multiple lines, e.g. when + displaying diffs. + """ + explanation = ecu(explanation) + lines = _split_explanation(explanation) + result = _format_lines(lines) + return u('\n').join(result) + + +def _split_explanation(explanation): + """Return a list of individual lines in the explanation + + This will return a list of lines split on '\n{', '\n}' and '\n~'. + Any other newlines will be escaped and appear in the line as the + literal '\n' characters. + """ + raw_lines = (explanation or u('')).split('\n') + lines = [raw_lines[0]] + for l in raw_lines[1:]: + if l and l[0] in ['{', '}', '~', '>']: + lines.append(l) + else: + lines[-1] += '\\n' + l + return lines + + +def _format_lines(lines): + """Format the individual lines + + This will replace the '{', '}' and '~' characters of our mini + formatting language with the proper 'where ...', 'and ...' and ' + + ...' text, taking care of indentation along the way. + + Return a list of formatted lines. + """ + result = lines[:1] + stack = [0] + stackcnt = [0] + for line in lines[1:]: + if line.startswith('{'): + if stackcnt[-1]: + s = u('and ') + else: + s = u('where ') + stack.append(len(result)) + stackcnt[-1] += 1 + stackcnt.append(0) + result.append(u(' +') + u(' ')*(len(stack)-1) + s + line[1:]) + elif line.startswith('}'): + stack.pop() + stackcnt.pop() + result[stack[-1]] += line[1:] + else: + assert line[0] in ['~', '>'] + stack[-1] += 1 + indent = len(stack) if line.startswith('~') else len(stack) - 1 + result.append(u(' ')*indent + line[1:]) + assert len(stack) == 1 + return result + + +# Provide basestring in python3 +try: + basestring = basestring +except NameError: + basestring = str + + +def assertrepr_compare(config, op, left, right): + """Return specialised explanations for some operators/operands""" + width = 80 - 15 - len(op) - 2 # 15 chars indentation, 1 space around op + left_repr = py.io.saferepr(left, maxsize=int(width//2)) + right_repr = py.io.saferepr(right, maxsize=width-len(left_repr)) + + summary = u('%s %s %s') % (ecu(left_repr), op, ecu(right_repr)) + + issequence = lambda x: (isinstance(x, (list, tuple, Sequence)) and + not isinstance(x, basestring)) + istext = lambda x: isinstance(x, basestring) + isdict = lambda x: isinstance(x, dict) + isset = lambda x: isinstance(x, (set, frozenset)) + + def isiterable(obj): + try: + iter(obj) + return not istext(obj) + except TypeError: + return False + + verbose = config.getoption('verbose') + explanation = None + try: + if op == '==': + if istext(left) and istext(right): + explanation = _diff_text(left, right, verbose) + else: + if issequence(left) and issequence(right): + explanation = _compare_eq_sequence(left, right, verbose) + elif isset(left) and isset(right): + explanation = _compare_eq_set(left, right, verbose) + elif isdict(left) and isdict(right): + explanation = _compare_eq_dict(left, right, verbose) + if isiterable(left) and isiterable(right): + expl = _compare_eq_iterable(left, right, verbose) + if explanation is not None: + explanation.extend(expl) + else: + explanation = expl + elif op == 'not in': + if istext(left) and istext(right): + explanation = _notin_text(left, right, verbose) + except Exception: + explanation = [ + u('(pytest_assertion plugin: representation of details failed. ' + 'Probably an object has a faulty __repr__.)'), + u(_pytest._code.ExceptionInfo())] + + if not explanation: + return None + + return [summary] + explanation + + +def _diff_text(left, right, verbose=False): + """Return the explanation for the diff between text or bytes + + Unless --verbose is used this will skip leading and trailing + characters which are identical to keep the diff minimal. + + If the input are bytes they will be safely converted to text. + """ + from difflib import ndiff + explanation = [] + if isinstance(left, py.builtin.bytes): + left = u(repr(left)[1:-1]).replace(r'\n', '\n') + if isinstance(right, py.builtin.bytes): + right = u(repr(right)[1:-1]).replace(r'\n', '\n') + if not verbose: + i = 0 # just in case left or right has zero length + for i in range(min(len(left), len(right))): + if left[i] != right[i]: + break + if i > 42: + i -= 10 # Provide some context + explanation = [u('Skipping %s identical leading ' + 'characters in diff, use -v to show') % i] + left = left[i:] + right = right[i:] + if len(left) == len(right): + for i in range(len(left)): + if left[-i] != right[-i]: + break + if i > 42: + i -= 10 # Provide some context + explanation += [u('Skipping %s identical trailing ' + 'characters in diff, use -v to show') % i] + left = left[:-i] + right = right[:-i] + keepends = True + explanation += [line.strip('\n') + for line in ndiff(left.splitlines(keepends), + right.splitlines(keepends))] + return explanation + + +def _compare_eq_iterable(left, right, verbose=False): + if not verbose: + return [u('Use -v to get the full diff')] + # dynamic import to speedup pytest + import difflib + + try: + left_formatting = pprint.pformat(left).splitlines() + right_formatting = pprint.pformat(right).splitlines() + explanation = [u('Full diff:')] + except Exception: + # hack: PrettyPrinter.pformat() in python 2 fails when formatting items that can't be sorted(), ie, calling + # sorted() on a list would raise. See issue #718. + # As a workaround, the full diff is generated by using the repr() string of each item of each container. + left_formatting = sorted(repr(x) for x in left) + right_formatting = sorted(repr(x) for x in right) + explanation = [u('Full diff (fallback to calling repr on each item):')] + explanation.extend(line.strip() for line in difflib.ndiff(left_formatting, right_formatting)) + return explanation + + +def _compare_eq_sequence(left, right, verbose=False): + explanation = [] + for i in range(min(len(left), len(right))): + if left[i] != right[i]: + explanation += [u('At index %s diff: %r != %r') + % (i, left[i], right[i])] + break + if len(left) > len(right): + explanation += [u('Left contains more items, first extra item: %s') + % py.io.saferepr(left[len(right)],)] + elif len(left) < len(right): + explanation += [ + u('Right contains more items, first extra item: %s') % + py.io.saferepr(right[len(left)],)] + return explanation + + +def _compare_eq_set(left, right, verbose=False): + explanation = [] + diff_left = left - right + diff_right = right - left + if diff_left: + explanation.append(u('Extra items in the left set:')) + for item in diff_left: + explanation.append(py.io.saferepr(item)) + if diff_right: + explanation.append(u('Extra items in the right set:')) + for item in diff_right: + explanation.append(py.io.saferepr(item)) + return explanation + + +def _compare_eq_dict(left, right, verbose=False): + explanation = [] + common = set(left).intersection(set(right)) + same = dict((k, left[k]) for k in common if left[k] == right[k]) + if same and verbose < 2: + explanation += [u('Omitting %s identical items, use -vv to show') % + len(same)] + elif same: + explanation += [u('Common items:')] + explanation += pprint.pformat(same).splitlines() + diff = set(k for k in common if left[k] != right[k]) + if diff: + explanation += [u('Differing items:')] + for k in diff: + explanation += [py.io.saferepr({k: left[k]}) + ' != ' + + py.io.saferepr({k: right[k]})] + extra_left = set(left) - set(right) + if extra_left: + explanation.append(u('Left contains more items:')) + explanation.extend(pprint.pformat( + dict((k, left[k]) for k in extra_left)).splitlines()) + extra_right = set(right) - set(left) + if extra_right: + explanation.append(u('Right contains more items:')) + explanation.extend(pprint.pformat( + dict((k, right[k]) for k in extra_right)).splitlines()) + return explanation + + +def _notin_text(term, text, verbose=False): + index = text.find(term) + head = text[:index] + tail = text[index+len(term):] + correct_text = head + tail + diff = _diff_text(correct_text, text, verbose) + newdiff = [u('%s is contained here:') % py.io.saferepr(term, maxsize=42)] + for line in diff: + if line.startswith(u('Skipping')): + continue + if line.startswith(u('- ')): + continue + if line.startswith(u('+ ')): + newdiff.append(u(' ') + line[2:]) + else: + newdiff.append(line) + return newdiff diff --git a/venv/lib/python3.5/site-packages/_pytest/cacheprovider.py b/venv/lib/python3.5/site-packages/_pytest/cacheprovider.py new file mode 100644 index 0000000..7fc08ff --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/cacheprovider.py @@ -0,0 +1,245 @@ +""" +merged implementation of the cache provider + +the name cache was not chosen to ensure pluggy automatically +ignores the external pytest-cache +""" +from __future__ import absolute_import, division, print_function +import py +import pytest +import json +from os.path import sep as _sep, altsep as _altsep + + +class Cache(object): + def __init__(self, config): + self.config = config + self._cachedir = config.rootdir.join(".cache") + self.trace = config.trace.root.get("cache") + if config.getvalue("cacheclear"): + self.trace("clearing cachedir") + if self._cachedir.check(): + self._cachedir.remove() + self._cachedir.mkdir() + + def makedir(self, name): + """ return a directory path object with the given name. If the + directory does not yet exist, it will be created. You can use it + to manage files likes e. g. store/retrieve database + dumps across test sessions. + + :param name: must be a string not containing a ``/`` separator. + Make sure the name contains your plugin or application + identifiers to prevent clashes with other cache users. + """ + if _sep in name or _altsep is not None and _altsep in name: + raise ValueError("name is not allowed to contain path separators") + return self._cachedir.ensure_dir("d", name) + + def _getvaluepath(self, key): + return self._cachedir.join('v', *key.split('/')) + + def get(self, key, default): + """ return cached value for the given key. If no value + was yet cached or the value cannot be read, the specified + default is returned. + + :param key: must be a ``/`` separated value. Usually the first + name is the name of your plugin or your application. + :param default: must be provided in case of a cache-miss or + invalid cache values. + + """ + path = self._getvaluepath(key) + if path.check(): + try: + with path.open("r") as f: + return json.load(f) + except ValueError: + self.trace("cache-invalid at %s" % (path,)) + return default + + def set(self, key, value): + """ save value for the given key. + + :param key: must be a ``/`` separated value. Usually the first + name is the name of your plugin or your application. + :param value: must be of any combination of basic + python types, including nested types + like e. g. lists of dictionaries. + """ + path = self._getvaluepath(key) + try: + path.dirpath().ensure_dir() + except (py.error.EEXIST, py.error.EACCES): + self.config.warn( + code='I9', message='could not create cache path %s' % (path,) + ) + return + try: + f = path.open('w') + except py.error.ENOTDIR: + self.config.warn( + code='I9', message='cache could not write path %s' % (path,)) + else: + with f: + self.trace("cache-write %s: %r" % (key, value,)) + json.dump(value, f, indent=2, sort_keys=True) + + +class LFPlugin: + """ Plugin which implements the --lf (run last-failing) option """ + def __init__(self, config): + self.config = config + active_keys = 'lf', 'failedfirst' + self.active = any(config.getvalue(key) for key in active_keys) + if self.active: + self.lastfailed = config.cache.get("cache/lastfailed", {}) + else: + self.lastfailed = {} + + def pytest_report_header(self): + if self.active: + if not self.lastfailed: + mode = "run all (no recorded failures)" + else: + mode = "rerun last %d failures%s" % ( + len(self.lastfailed), + " first" if self.config.getvalue("failedfirst") else "") + return "run-last-failure: %s" % mode + + def pytest_runtest_logreport(self, report): + if report.failed and "xfail" not in report.keywords: + self.lastfailed[report.nodeid] = True + elif not report.failed: + if report.when == "call": + self.lastfailed.pop(report.nodeid, None) + + def pytest_collectreport(self, report): + passed = report.outcome in ('passed', 'skipped') + if passed: + if report.nodeid in self.lastfailed: + self.lastfailed.pop(report.nodeid) + self.lastfailed.update( + (item.nodeid, True) + for item in report.result) + else: + self.lastfailed[report.nodeid] = True + + def pytest_collection_modifyitems(self, session, config, items): + if self.active and self.lastfailed: + previously_failed = [] + previously_passed = [] + for item in items: + if item.nodeid in self.lastfailed: + previously_failed.append(item) + else: + previously_passed.append(item) + if not previously_failed and previously_passed: + # running a subset of all tests with recorded failures outside + # of the set of tests currently executing + pass + elif self.config.getvalue("lf"): + items[:] = previously_failed + config.hook.pytest_deselected(items=previously_passed) + else: + items[:] = previously_failed + previously_passed + + def pytest_sessionfinish(self, session): + config = self.config + if config.getvalue("cacheshow") or hasattr(config, "slaveinput"): + return + prev_failed = config.cache.get("cache/lastfailed", None) is not None + if (session.testscollected and prev_failed) or self.lastfailed: + config.cache.set("cache/lastfailed", self.lastfailed) + + +def pytest_addoption(parser): + group = parser.getgroup("general") + group.addoption( + '--lf', '--last-failed', action='store_true', dest="lf", + help="rerun only the tests that failed " + "at the last run (or all if none failed)") + group.addoption( + '--ff', '--failed-first', action='store_true', dest="failedfirst", + help="run all tests but run the last failures first. " + "This may re-order tests and thus lead to " + "repeated fixture setup/teardown") + group.addoption( + '--cache-show', action='store_true', dest="cacheshow", + help="show cache contents, don't perform collection or tests") + group.addoption( + '--cache-clear', action='store_true', dest="cacheclear", + help="remove all cache contents at start of test run.") + + +def pytest_cmdline_main(config): + if config.option.cacheshow: + from _pytest.main import wrap_session + return wrap_session(config, cacheshow) + + + +@pytest.hookimpl(tryfirst=True) +def pytest_configure(config): + config.cache = Cache(config) + config.pluginmanager.register(LFPlugin(config), "lfplugin") + + +@pytest.fixture +def cache(request): + """ + Return a cache object that can persist state between testing sessions. + + cache.get(key, default) + cache.set(key, value) + + Keys must be a ``/`` separated value, where the first part is usually the + name of your plugin or application to avoid clashes with other cache users. + + Values can be any object handled by the json stdlib module. + """ + return request.config.cache + + +def pytest_report_header(config): + if config.option.verbose: + relpath = py.path.local().bestrelpath(config.cache._cachedir) + return "cachedir: %s" % relpath + + +def cacheshow(config, session): + from pprint import pprint + tw = py.io.TerminalWriter() + tw.line("cachedir: " + str(config.cache._cachedir)) + if not config.cache._cachedir.check(): + tw.line("cache is empty") + return 0 + dummy = object() + basedir = config.cache._cachedir + vdir = basedir.join("v") + tw.sep("-", "cache values") + for valpath in sorted(vdir.visit(lambda x: x.isfile())): + key = valpath.relto(vdir).replace(valpath.sep, "/") + val = config.cache.get(key, dummy) + if val is dummy: + tw.line("%s contains unreadable content, " + "will be ignored" % key) + else: + tw.line("%s contains:" % key) + stream = py.io.TextIO() + pprint(val, stream=stream) + for line in stream.getvalue().splitlines(): + tw.line(" " + line) + + ddir = basedir.join("d") + if ddir.isdir() and ddir.listdir(): + tw.sep("-", "cache directories") + for p in sorted(basedir.join("d").visit()): + #if p.check(dir=1): + # print("%s/" % p.relto(basedir)) + if p.isfile(): + key = p.relto(basedir) + tw.line("%s is a file of length %d" % ( + key, p.size())) + return 0 diff --git a/venv/lib/python3.5/site-packages/_pytest/capture.py b/venv/lib/python3.5/site-packages/_pytest/capture.py new file mode 100644 index 0000000..3661f26 --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/capture.py @@ -0,0 +1,542 @@ +""" +per-test stdout/stderr capturing mechanism. + +""" +from __future__ import absolute_import, division, print_function + +import contextlib +import sys +import os +import io +from io import UnsupportedOperation +from tempfile import TemporaryFile + +import py +import pytest +from _pytest.compat import CaptureIO + +unicode = py.builtin.text + +patchsysdict = {0: 'stdin', 1: 'stdout', 2: 'stderr'} + + +def pytest_addoption(parser): + group = parser.getgroup("general") + group._addoption( + '--capture', action="store", + default="fd" if hasattr(os, "dup") else "sys", + metavar="method", choices=['fd', 'sys', 'no'], + help="per-test capturing method: one of fd|sys|no.") + group._addoption( + '-s', action="store_const", const="no", dest="capture", + help="shortcut for --capture=no.") + + +@pytest.hookimpl(hookwrapper=True) +def pytest_load_initial_conftests(early_config, parser, args): + ns = early_config.known_args_namespace + if ns.capture == "fd": + _py36_windowsconsoleio_workaround() + _readline_workaround() + pluginmanager = early_config.pluginmanager + capman = CaptureManager(ns.capture) + pluginmanager.register(capman, "capturemanager") + + # make sure that capturemanager is properly reset at final shutdown + early_config.add_cleanup(capman.reset_capturings) + + # make sure logging does not raise exceptions at the end + def silence_logging_at_shutdown(): + if "logging" in sys.modules: + sys.modules["logging"].raiseExceptions = False + early_config.add_cleanup(silence_logging_at_shutdown) + + # finally trigger conftest loading but while capturing (issue93) + capman.init_capturings() + outcome = yield + out, err = capman.suspendcapture() + if outcome.excinfo is not None: + sys.stdout.write(out) + sys.stderr.write(err) + + +class CaptureManager: + def __init__(self, method): + self._method = method + + def _getcapture(self, method): + if method == "fd": + return MultiCapture(out=True, err=True, Capture=FDCapture) + elif method == "sys": + return MultiCapture(out=True, err=True, Capture=SysCapture) + elif method == "no": + return MultiCapture(out=False, err=False, in_=False) + else: + raise ValueError("unknown capturing method: %r" % method) + + def init_capturings(self): + assert not hasattr(self, "_capturing") + self._capturing = self._getcapture(self._method) + self._capturing.start_capturing() + + def reset_capturings(self): + cap = self.__dict__.pop("_capturing", None) + if cap is not None: + cap.pop_outerr_to_orig() + cap.stop_capturing() + + def resumecapture(self): + self._capturing.resume_capturing() + + def suspendcapture(self, in_=False): + self.deactivate_funcargs() + cap = getattr(self, "_capturing", None) + if cap is not None: + try: + outerr = cap.readouterr() + finally: + cap.suspend_capturing(in_=in_) + return outerr + + def activate_funcargs(self, pyfuncitem): + capfuncarg = pyfuncitem.__dict__.pop("_capfuncarg", None) + if capfuncarg is not None: + capfuncarg._start() + self._capfuncarg = capfuncarg + + def deactivate_funcargs(self): + capfuncarg = self.__dict__.pop("_capfuncarg", None) + if capfuncarg is not None: + capfuncarg.close() + + @pytest.hookimpl(hookwrapper=True) + def pytest_make_collect_report(self, collector): + if isinstance(collector, pytest.File): + self.resumecapture() + outcome = yield + out, err = self.suspendcapture() + rep = outcome.get_result() + if out: + rep.sections.append(("Captured stdout", out)) + if err: + rep.sections.append(("Captured stderr", err)) + else: + yield + + @pytest.hookimpl(hookwrapper=True) + def pytest_runtest_setup(self, item): + self.resumecapture() + yield + self.suspendcapture_item(item, "setup") + + @pytest.hookimpl(hookwrapper=True) + def pytest_runtest_call(self, item): + self.resumecapture() + self.activate_funcargs(item) + yield + #self.deactivate_funcargs() called from suspendcapture() + self.suspendcapture_item(item, "call") + + @pytest.hookimpl(hookwrapper=True) + def pytest_runtest_teardown(self, item): + self.resumecapture() + yield + self.suspendcapture_item(item, "teardown") + + @pytest.hookimpl(tryfirst=True) + def pytest_keyboard_interrupt(self, excinfo): + self.reset_capturings() + + @pytest.hookimpl(tryfirst=True) + def pytest_internalerror(self, excinfo): + self.reset_capturings() + + def suspendcapture_item(self, item, when, in_=False): + out, err = self.suspendcapture(in_=in_) + item.add_report_section(when, "stdout", out) + item.add_report_section(when, "stderr", err) + + +error_capsysfderror = "cannot use capsys and capfd at the same time" + + +@pytest.fixture +def capsys(request): + """Enable capturing of writes to sys.stdout/sys.stderr and make + captured output available via ``capsys.readouterr()`` method calls + which return a ``(out, err)`` tuple. + """ + if "capfd" in request.fixturenames: + raise request.raiseerror(error_capsysfderror) + request.node._capfuncarg = c = CaptureFixture(SysCapture, request) + return c + +@pytest.fixture +def capfd(request): + """Enable capturing of writes to file descriptors 1 and 2 and make + captured output available via ``capfd.readouterr()`` method calls + which return a ``(out, err)`` tuple. + """ + if "capsys" in request.fixturenames: + request.raiseerror(error_capsysfderror) + if not hasattr(os, 'dup'): + pytest.skip("capfd funcarg needs os.dup") + request.node._capfuncarg = c = CaptureFixture(FDCapture, request) + return c + + +class CaptureFixture: + def __init__(self, captureclass, request): + self.captureclass = captureclass + self.request = request + + def _start(self): + self._capture = MultiCapture(out=True, err=True, in_=False, + Capture=self.captureclass) + self._capture.start_capturing() + + def close(self): + cap = self.__dict__.pop("_capture", None) + if cap is not None: + self._outerr = cap.pop_outerr_to_orig() + cap.stop_capturing() + + def readouterr(self): + try: + return self._capture.readouterr() + except AttributeError: + return self._outerr + + @contextlib.contextmanager + def disabled(self): + capmanager = self.request.config.pluginmanager.getplugin('capturemanager') + capmanager.suspendcapture_item(self.request.node, "call", in_=True) + try: + yield + finally: + capmanager.resumecapture() + + +def safe_text_dupfile(f, mode, default_encoding="UTF8"): + """ return a open text file object that's a duplicate of f on the + FD-level if possible. + """ + encoding = getattr(f, "encoding", None) + try: + fd = f.fileno() + except Exception: + if "b" not in getattr(f, "mode", "") and hasattr(f, "encoding"): + # we seem to have a text stream, let's just use it + return f + else: + newfd = os.dup(fd) + if "b" not in mode: + mode += "b" + f = os.fdopen(newfd, mode, 0) # no buffering + return EncodedFile(f, encoding or default_encoding) + + +class EncodedFile(object): + errors = "strict" # possibly needed by py3 code (issue555) + def __init__(self, buffer, encoding): + self.buffer = buffer + self.encoding = encoding + + def write(self, obj): + if isinstance(obj, unicode): + obj = obj.encode(self.encoding, "replace") + self.buffer.write(obj) + + def writelines(self, linelist): + data = ''.join(linelist) + self.write(data) + + def __getattr__(self, name): + return getattr(object.__getattribute__(self, "buffer"), name) + + +class MultiCapture(object): + out = err = in_ = None + + def __init__(self, out=True, err=True, in_=True, Capture=None): + if in_: + self.in_ = Capture(0) + if out: + self.out = Capture(1) + if err: + self.err = Capture(2) + + def start_capturing(self): + if self.in_: + self.in_.start() + if self.out: + self.out.start() + if self.err: + self.err.start() + + def pop_outerr_to_orig(self): + """ pop current snapshot out/err capture and flush to orig streams. """ + out, err = self.readouterr() + if out: + self.out.writeorg(out) + if err: + self.err.writeorg(err) + return out, err + + def suspend_capturing(self, in_=False): + if self.out: + self.out.suspend() + if self.err: + self.err.suspend() + if in_ and self.in_: + self.in_.suspend() + self._in_suspended = True + + def resume_capturing(self): + if self.out: + self.out.resume() + if self.err: + self.err.resume() + if hasattr(self, "_in_suspended"): + self.in_.resume() + del self._in_suspended + + def stop_capturing(self): + """ stop capturing and reset capturing streams """ + if hasattr(self, '_reset'): + raise ValueError("was already stopped") + self._reset = True + if self.out: + self.out.done() + if self.err: + self.err.done() + if self.in_: + self.in_.done() + + def readouterr(self): + """ return snapshot unicode value of stdout/stderr capturings. """ + return (self.out.snap() if self.out is not None else "", + self.err.snap() if self.err is not None else "") + +class NoCapture: + __init__ = start = done = suspend = resume = lambda *args: None + +class FDCapture: + """ Capture IO to/from a given os-level filedescriptor. """ + + def __init__(self, targetfd, tmpfile=None): + self.targetfd = targetfd + try: + self.targetfd_save = os.dup(self.targetfd) + except OSError: + self.start = lambda: None + self.done = lambda: None + else: + if targetfd == 0: + assert not tmpfile, "cannot set tmpfile with stdin" + tmpfile = open(os.devnull, "r") + self.syscapture = SysCapture(targetfd) + else: + if tmpfile is None: + f = TemporaryFile() + with f: + tmpfile = safe_text_dupfile(f, mode="wb+") + if targetfd in patchsysdict: + self.syscapture = SysCapture(targetfd, tmpfile) + else: + self.syscapture = NoCapture() + self.tmpfile = tmpfile + self.tmpfile_fd = tmpfile.fileno() + + def __repr__(self): + return "" % (self.targetfd, self.targetfd_save) + + def start(self): + """ Start capturing on targetfd using memorized tmpfile. """ + try: + os.fstat(self.targetfd_save) + except (AttributeError, OSError): + raise ValueError("saved filedescriptor not valid anymore") + os.dup2(self.tmpfile_fd, self.targetfd) + self.syscapture.start() + + def snap(self): + f = self.tmpfile + f.seek(0) + res = f.read() + if res: + enc = getattr(f, "encoding", None) + if enc and isinstance(res, bytes): + res = py.builtin._totext(res, enc, "replace") + f.truncate(0) + f.seek(0) + return res + return '' + + def done(self): + """ stop capturing, restore streams, return original capture file, + seeked to position zero. """ + targetfd_save = self.__dict__.pop("targetfd_save") + os.dup2(targetfd_save, self.targetfd) + os.close(targetfd_save) + self.syscapture.done() + self.tmpfile.close() + + def suspend(self): + self.syscapture.suspend() + os.dup2(self.targetfd_save, self.targetfd) + + def resume(self): + self.syscapture.resume() + os.dup2(self.tmpfile_fd, self.targetfd) + + def writeorg(self, data): + """ write to original file descriptor. """ + if py.builtin._istext(data): + data = data.encode("utf8") # XXX use encoding of original stream + os.write(self.targetfd_save, data) + + +class SysCapture: + def __init__(self, fd, tmpfile=None): + name = patchsysdict[fd] + self._old = getattr(sys, name) + self.name = name + if tmpfile is None: + if name == "stdin": + tmpfile = DontReadFromInput() + else: + tmpfile = CaptureIO() + self.tmpfile = tmpfile + + def start(self): + setattr(sys, self.name, self.tmpfile) + + def snap(self): + f = self.tmpfile + res = f.getvalue() + f.truncate(0) + f.seek(0) + return res + + def done(self): + setattr(sys, self.name, self._old) + del self._old + self.tmpfile.close() + + def suspend(self): + setattr(sys, self.name, self._old) + + def resume(self): + setattr(sys, self.name, self.tmpfile) + + def writeorg(self, data): + self._old.write(data) + self._old.flush() + + +class DontReadFromInput: + """Temporary stub class. Ideally when stdin is accessed, the + capturing should be turned off, with possibly all data captured + so far sent to the screen. This should be configurable, though, + because in automated test runs it is better to crash than + hang indefinitely. + """ + + encoding = None + + def read(self, *args): + raise IOError("reading from stdin while output is captured") + readline = read + readlines = read + __iter__ = read + + def fileno(self): + raise UnsupportedOperation("redirected stdin is pseudofile, " + "has no fileno()") + + def isatty(self): + return False + + def close(self): + pass + + @property + def buffer(self): + if sys.version_info >= (3,0): + return self + else: + raise AttributeError('redirected stdin has no attribute buffer') + + +def _readline_workaround(): + """ + Ensure readline is imported so that it attaches to the correct stdio + handles on Windows. + + Pdb uses readline support where available--when not running from the Python + prompt, the readline module is not imported until running the pdb REPL. If + running pytest with the --pdb option this means the readline module is not + imported until after I/O capture has been started. + + This is a problem for pyreadline, which is often used to implement readline + support on Windows, as it does not attach to the correct handles for stdout + and/or stdin if they have been redirected by the FDCapture mechanism. This + workaround ensures that readline is imported before I/O capture is setup so + that it can attach to the actual stdin/out for the console. + + See https://github.com/pytest-dev/pytest/pull/1281 + """ + + if not sys.platform.startswith('win32'): + return + try: + import readline # noqa + except ImportError: + pass + + +def _py36_windowsconsoleio_workaround(): + """ + Python 3.6 implemented unicode console handling for Windows. This works + by reading/writing to the raw console handle using + ``{Read,Write}ConsoleW``. + + The problem is that we are going to ``dup2`` over the stdio file + descriptors when doing ``FDCapture`` and this will ``CloseHandle`` the + handles used by Python to write to the console. Though there is still some + weirdness and the console handle seems to only be closed randomly and not + on the first call to ``CloseHandle``, or maybe it gets reopened with the + same handle value when we suspend capturing. + + The workaround in this case will reopen stdio with a different fd which + also means a different handle by replicating the logic in + "Py_lifecycle.c:initstdio/create_stdio". + + See https://github.com/pytest-dev/py/issues/103 + """ + if not sys.platform.startswith('win32') or sys.version_info[:2] < (3, 6): + return + + buffered = hasattr(sys.stdout.buffer, 'raw') + raw_stdout = sys.stdout.buffer.raw if buffered else sys.stdout.buffer + + if not isinstance(raw_stdout, io._WindowsConsoleIO): + return + + def _reopen_stdio(f, mode): + if not buffered and mode[0] == 'w': + buffering = 0 + else: + buffering = -1 + + return io.TextIOWrapper( + open(os.dup(f.fileno()), mode, buffering), + f.encoding, + f.errors, + f.newlines, + f.line_buffering) + + sys.__stdin__ = sys.stdin = _reopen_stdio(sys.stdin, 'rb') + sys.__stdout__ = sys.stdout = _reopen_stdio(sys.stdout, 'wb') + sys.__stderr__ = sys.stderr = _reopen_stdio(sys.stderr, 'wb') diff --git a/venv/lib/python3.5/site-packages/_pytest/compat.py b/venv/lib/python3.5/site-packages/_pytest/compat.py new file mode 100644 index 0000000..8c200af --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/compat.py @@ -0,0 +1,307 @@ +""" +python version compatibility code +""" +from __future__ import absolute_import, division, print_function +import sys +import inspect +import types +import re +import functools + +import py + +import _pytest + + + +try: + import enum +except ImportError: # pragma: no cover + # Only available in Python 3.4+ or as a backport + enum = None + + +_PY3 = sys.version_info > (3, 0) +_PY2 = not _PY3 + + +NoneType = type(None) +NOTSET = object() + +PY35 = sys.version_info[:2] >= (3, 5) +PY36 = sys.version_info[:2] >= (3, 6) +MODULE_NOT_FOUND_ERROR = 'ModuleNotFoundError' if PY36 else 'ImportError' + +if hasattr(inspect, 'signature'): + def _format_args(func): + return str(inspect.signature(func)) +else: + def _format_args(func): + return inspect.formatargspec(*inspect.getargspec(func)) + +isfunction = inspect.isfunction +isclass = inspect.isclass +# used to work around a python2 exception info leak +exc_clear = getattr(sys, 'exc_clear', lambda: None) +# The type of re.compile objects is not exposed in Python. +REGEX_TYPE = type(re.compile('')) + + +def is_generator(func): + genfunc = inspect.isgeneratorfunction(func) + return genfunc and not iscoroutinefunction(func) + + +def iscoroutinefunction(func): + """Return True if func is a decorated coroutine function. + + Note: copied and modified from Python 3.5's builtin couroutines.py to avoid import asyncio directly, + which in turns also initializes the "logging" module as side-effect (see issue #8). + """ + return (getattr(func, '_is_coroutine', False) or + (hasattr(inspect, 'iscoroutinefunction') and inspect.iscoroutinefunction(func))) + + +def getlocation(function, curdir): + import inspect + fn = py.path.local(inspect.getfile(function)) + lineno = py.builtin._getcode(function).co_firstlineno + if fn.relto(curdir): + fn = fn.relto(curdir) + return "%s:%d" %(fn, lineno+1) + + +def num_mock_patch_args(function): + """ return number of arguments used up by mock arguments (if any) """ + patchings = getattr(function, "patchings", None) + if not patchings: + return 0 + mock = sys.modules.get("mock", sys.modules.get("unittest.mock", None)) + if mock is not None: + return len([p for p in patchings + if not p.attribute_name and p.new is mock.DEFAULT]) + return len(patchings) + + +def getfuncargnames(function, startindex=None): + # XXX merge with main.py's varnames + #assert not isclass(function) + realfunction = function + while hasattr(realfunction, "__wrapped__"): + realfunction = realfunction.__wrapped__ + if startindex is None: + startindex = inspect.ismethod(function) and 1 or 0 + if realfunction != function: + startindex += num_mock_patch_args(function) + function = realfunction + if isinstance(function, functools.partial): + argnames = inspect.getargs(_pytest._code.getrawcode(function.func))[0] + partial = function + argnames = argnames[len(partial.args):] + if partial.keywords: + for kw in partial.keywords: + argnames.remove(kw) + else: + argnames = inspect.getargs(_pytest._code.getrawcode(function))[0] + defaults = getattr(function, 'func_defaults', + getattr(function, '__defaults__', None)) or () + numdefaults = len(defaults) + if numdefaults: + return tuple(argnames[startindex:-numdefaults]) + return tuple(argnames[startindex:]) + + + +if sys.version_info[:2] == (2, 6): + def isclass(object): + """ Return true if the object is a class. Overrides inspect.isclass for + python 2.6 because it will return True for objects which always return + something on __getattr__ calls (see #1035). + Backport of https://hg.python.org/cpython/rev/35bf8f7a8edc + """ + return isinstance(object, (type, types.ClassType)) + + +if _PY3: + import codecs + imap = map + STRING_TYPES = bytes, str + UNICODE_TYPES = str, + + def _escape_strings(val): + """If val is pure ascii, returns it as a str(). Otherwise, escapes + bytes objects into a sequence of escaped bytes: + + b'\xc3\xb4\xc5\xd6' -> u'\\xc3\\xb4\\xc5\\xd6' + + and escapes unicode objects into a sequence of escaped unicode + ids, e.g.: + + '4\\nV\\U00043efa\\x0eMXWB\\x1e\\u3028\\u15fd\\xcd\\U0007d944' + + note: + the obvious "v.decode('unicode-escape')" will return + valid utf-8 unicode if it finds them in bytes, but we + want to return escaped bytes for any byte, even if they match + a utf-8 string. + + """ + if isinstance(val, bytes): + if val: + # source: http://goo.gl/bGsnwC + encoded_bytes, _ = codecs.escape_encode(val) + return encoded_bytes.decode('ascii') + else: + # empty bytes crashes codecs.escape_encode (#1087) + return '' + else: + return val.encode('unicode_escape').decode('ascii') +else: + STRING_TYPES = bytes, str, unicode + UNICODE_TYPES = unicode, + + from itertools import imap # NOQA + + def _escape_strings(val): + """In py2 bytes and str are the same type, so return if it's a bytes + object, return it unchanged if it is a full ascii string, + otherwise escape it into its binary form. + + If it's a unicode string, change the unicode characters into + unicode escapes. + + """ + if isinstance(val, bytes): + try: + return val.encode('ascii') + except UnicodeDecodeError: + return val.encode('string-escape') + else: + return val.encode('unicode-escape') + + +def get_real_func(obj): + """ gets the real function object of the (possibly) wrapped object by + functools.wraps or functools.partial. + """ + start_obj = obj + for i in range(100): + new_obj = getattr(obj, '__wrapped__', None) + if new_obj is None: + break + obj = new_obj + else: + raise ValueError( + ("could not find real function of {start}" + "\nstopped at {current}").format( + start=py.io.saferepr(start_obj), + current=py.io.saferepr(obj))) + if isinstance(obj, functools.partial): + obj = obj.func + return obj + + +def getfslineno(obj): + # xxx let decorators etc specify a sane ordering + obj = get_real_func(obj) + if hasattr(obj, 'place_as'): + obj = obj.place_as + fslineno = _pytest._code.getfslineno(obj) + assert isinstance(fslineno[1], int), obj + return fslineno + + +def getimfunc(func): + try: + return func.__func__ + except AttributeError: + try: + return func.im_func + except AttributeError: + return func + + +def safe_getattr(object, name, default): + """ Like getattr but return default upon any Exception. + + Attribute access can potentially fail for 'evil' Python objects. + See issue #214. + """ + try: + return getattr(object, name, default) + except Exception: + return default + + +def _is_unittest_unexpected_success_a_failure(): + """Return if the test suite should fail if a @expectedFailure unittest test PASSES. + + From https://docs.python.org/3/library/unittest.html?highlight=unittest#unittest.TestResult.wasSuccessful: + Changed in version 3.4: Returns False if there were any + unexpectedSuccesses from tests marked with the expectedFailure() decorator. + """ + return sys.version_info >= (3, 4) + + +if _PY3: + def safe_str(v): + """returns v as string""" + return str(v) +else: + def safe_str(v): + """returns v as string, converting to ascii if necessary""" + try: + return str(v) + except UnicodeError: + if not isinstance(v, unicode): + v = unicode(v) + errors = 'replace' + return v.encode('utf-8', errors) + + +COLLECT_FAKEMODULE_ATTRIBUTES = ( + 'Collector', + 'Module', + 'Generator', + 'Function', + 'Instance', + 'Session', + 'Item', + 'Class', + 'File', + '_fillfuncargs', +) + + +def _setup_collect_fakemodule(): + from types import ModuleType + import pytest + pytest.collect = ModuleType('pytest.collect') + pytest.collect.__all__ = [] # used for setns + for attr in COLLECT_FAKEMODULE_ATTRIBUTES: + setattr(pytest.collect, attr, getattr(pytest, attr)) + + +if _PY2: + from py.io import TextIO as CaptureIO +else: + import io + + class CaptureIO(io.TextIOWrapper): + def __init__(self): + super(CaptureIO, self).__init__( + io.BytesIO(), + encoding='UTF-8', newline='', write_through=True, + ) + + def getvalue(self): + return self.buffer.getvalue().decode('UTF-8') + +class FuncargnamesCompatAttr(object): + """ helper class so that Metafunc, Function and FixtureRequest + don't need to each define the "funcargnames" compatibility attribute. + """ + @property + def funcargnames(self): + """ alias attribute for ``fixturenames`` for pre-2.3 compatibility""" + return self.fixturenames diff --git a/venv/lib/python3.5/site-packages/_pytest/config.py b/venv/lib/python3.5/site-packages/_pytest/config.py new file mode 100644 index 0000000..dadd5ca --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/config.py @@ -0,0 +1,1397 @@ +""" command line options, ini-file and conftest.py processing. """ +from __future__ import absolute_import, division, print_function +import argparse +import shlex +import traceback +import types +import warnings + +import py +# DON't import pytest here because it causes import cycle troubles +import sys +import os +import _pytest._code +import _pytest.hookspec # the extension point definitions +import _pytest.assertion +from _pytest._pluggy import PluginManager, HookimplMarker, HookspecMarker +from _pytest.compat import safe_str + +hookimpl = HookimplMarker("pytest") +hookspec = HookspecMarker("pytest") + +# pytest startup +# + + +class ConftestImportFailure(Exception): + def __init__(self, path, excinfo): + Exception.__init__(self, path, excinfo) + self.path = path + self.excinfo = excinfo + + def __str__(self): + etype, evalue, etb = self.excinfo + formatted = traceback.format_tb(etb) + # The level of the tracebacks we want to print is hand crafted :( + return repr(evalue) + '\n' + ''.join(formatted[2:]) + + +def main(args=None, plugins=None): + """ return exit code, after performing an in-process test run. + + :arg args: list of command line arguments. + + :arg plugins: list of plugin objects to be auto-registered during + initialization. + """ + try: + try: + config = _prepareconfig(args, plugins) + except ConftestImportFailure as e: + tw = py.io.TerminalWriter(sys.stderr) + for line in traceback.format_exception(*e.excinfo): + tw.line(line.rstrip(), red=True) + tw.line("ERROR: could not load %s\n" % (e.path), red=True) + return 4 + else: + try: + return config.hook.pytest_cmdline_main(config=config) + finally: + config._ensure_unconfigure() + except UsageError as e: + for msg in e.args: + sys.stderr.write("ERROR: %s\n" %(msg,)) + return 4 + +class cmdline: # compatibility namespace + main = staticmethod(main) + + +class UsageError(Exception): + """ error in pytest usage or invocation""" + + +class PrintHelp(Exception): + """Raised when pytest should print it's help to skip the rest of the + argument parsing and validation.""" + pass + + +def filename_arg(path, optname): + """ Argparse type validator for filename arguments. + + :path: path of filename + :optname: name of the option + """ + if os.path.isdir(path): + raise UsageError("{0} must be a filename, given: {1}".format(optname, path)) + return path + + +def directory_arg(path, optname): + """Argparse type validator for directory arguments. + + :path: path of directory + :optname: name of the option + """ + if not os.path.isdir(path): + raise UsageError("{0} must be a directory, given: {1}".format(optname, path)) + return path + + +_preinit = [] + +default_plugins = ( + "mark main terminal runner python fixtures debugging unittest capture skipping " + "tmpdir monkeypatch recwarn pastebin helpconfig nose assertion " + "junitxml resultlog doctest cacheprovider freeze_support " + "setuponly setupplan warnings").split() + + +builtin_plugins = set(default_plugins) +builtin_plugins.add("pytester") + + +def _preloadplugins(): + assert not _preinit + _preinit.append(get_config()) + +def get_config(): + if _preinit: + return _preinit.pop(0) + # subsequent calls to main will create a fresh instance + pluginmanager = PytestPluginManager() + config = Config(pluginmanager) + for spec in default_plugins: + pluginmanager.import_plugin(spec) + return config + +def get_plugin_manager(): + """ + Obtain a new instance of the + :py:class:`_pytest.config.PytestPluginManager`, with default plugins + already loaded. + + This function can be used by integration with other tools, like hooking + into pytest to run tests into an IDE. + """ + return get_config().pluginmanager + +def _prepareconfig(args=None, plugins=None): + warning = None + if args is None: + args = sys.argv[1:] + elif isinstance(args, py.path.local): + args = [str(args)] + elif not isinstance(args, (tuple, list)): + if not isinstance(args, str): + raise ValueError("not a string or argument list: %r" % (args,)) + args = shlex.split(args, posix=sys.platform != "win32") + from _pytest import deprecated + warning = deprecated.MAIN_STR_ARGS + config = get_config() + pluginmanager = config.pluginmanager + try: + if plugins: + for plugin in plugins: + if isinstance(plugin, py.builtin._basestring): + pluginmanager.consider_pluginarg(plugin) + else: + pluginmanager.register(plugin) + if warning: + config.warn('C1', warning) + return pluginmanager.hook.pytest_cmdline_parse( + pluginmanager=pluginmanager, args=args) + except BaseException: + config._ensure_unconfigure() + raise + + +class PytestPluginManager(PluginManager): + """ + Overwrites :py:class:`pluggy.PluginManager <_pytest.vendored_packages.pluggy.PluginManager>` to add pytest-specific + functionality: + + * loading plugins from the command line, ``PYTEST_PLUGIN`` env variable and + ``pytest_plugins`` global variables found in plugins being loaded; + * ``conftest.py`` loading during start-up; + """ + def __init__(self): + super(PytestPluginManager, self).__init__("pytest", implprefix="pytest_") + self._conftest_plugins = set() + + # state related to local conftest plugins + self._path2confmods = {} + self._conftestpath2mod = {} + self._confcutdir = None + self._noconftest = False + self._duplicatepaths = set() + + self.add_hookspecs(_pytest.hookspec) + self.register(self) + if os.environ.get('PYTEST_DEBUG'): + err = sys.stderr + encoding = getattr(err, 'encoding', 'utf8') + try: + err = py.io.dupfile(err, encoding=encoding) + except Exception: + pass + self.trace.root.setwriter(err.write) + self.enable_tracing() + + # Config._consider_importhook will set a real object if required. + self.rewrite_hook = _pytest.assertion.DummyRewriteHook() + + def addhooks(self, module_or_class): + """ + .. deprecated:: 2.8 + + Use :py:meth:`pluggy.PluginManager.add_hookspecs <_pytest.vendored_packages.pluggy.PluginManager.add_hookspecs>` instead. + """ + warning = dict(code="I2", + fslocation=_pytest._code.getfslineno(sys._getframe(1)), + nodeid=None, + message="use pluginmanager.add_hookspecs instead of " + "deprecated addhooks() method.") + self._warn(warning) + return self.add_hookspecs(module_or_class) + + def parse_hookimpl_opts(self, plugin, name): + # pytest hooks are always prefixed with pytest_ + # so we avoid accessing possibly non-readable attributes + # (see issue #1073) + if not name.startswith("pytest_"): + return + # ignore some historic special names which can not be hooks anyway + if name == "pytest_plugins" or name.startswith("pytest_funcarg__"): + return + + method = getattr(plugin, name) + opts = super(PytestPluginManager, self).parse_hookimpl_opts(plugin, name) + if opts is not None: + for name in ("tryfirst", "trylast", "optionalhook", "hookwrapper"): + opts.setdefault(name, hasattr(method, name)) + return opts + + def parse_hookspec_opts(self, module_or_class, name): + opts = super(PytestPluginManager, self).parse_hookspec_opts( + module_or_class, name) + if opts is None: + method = getattr(module_or_class, name) + if name.startswith("pytest_"): + opts = {"firstresult": hasattr(method, "firstresult"), + "historic": hasattr(method, "historic")} + return opts + + def _verify_hook(self, hook, hookmethod): + super(PytestPluginManager, self)._verify_hook(hook, hookmethod) + if "__multicall__" in hookmethod.argnames: + fslineno = _pytest._code.getfslineno(hookmethod.function) + warning = dict(code="I1", + fslocation=fslineno, + nodeid=None, + message="%r hook uses deprecated __multicall__ " + "argument" % (hook.name)) + self._warn(warning) + + def register(self, plugin, name=None): + ret = super(PytestPluginManager, self).register(plugin, name) + if ret: + self.hook.pytest_plugin_registered.call_historic( + kwargs=dict(plugin=plugin, manager=self)) + + if isinstance(plugin, types.ModuleType): + self.consider_module(plugin) + return ret + + def getplugin(self, name): + # support deprecated naming because plugins (xdist e.g.) use it + return self.get_plugin(name) + + def hasplugin(self, name): + """Return True if the plugin with the given name is registered.""" + return bool(self.get_plugin(name)) + + def pytest_configure(self, config): + # XXX now that the pluginmanager exposes hookimpl(tryfirst...) + # we should remove tryfirst/trylast as markers + config.addinivalue_line("markers", + "tryfirst: mark a hook implementation function such that the " + "plugin machinery will try to call it first/as early as possible.") + config.addinivalue_line("markers", + "trylast: mark a hook implementation function such that the " + "plugin machinery will try to call it last/as late as possible.") + + def _warn(self, message): + kwargs = message if isinstance(message, dict) else { + 'code': 'I1', + 'message': message, + 'fslocation': None, + 'nodeid': None, + } + self.hook.pytest_logwarning.call_historic(kwargs=kwargs) + + # + # internal API for local conftest plugin handling + # + def _set_initial_conftests(self, namespace): + """ load initial conftest files given a preparsed "namespace". + As conftest files may add their own command line options + which have arguments ('--my-opt somepath') we might get some + false positives. All builtin and 3rd party plugins will have + been loaded, however, so common options will not confuse our logic + here. + """ + current = py.path.local() + self._confcutdir = current.join(namespace.confcutdir, abs=True) \ + if namespace.confcutdir else None + self._noconftest = namespace.noconftest + testpaths = namespace.file_or_dir + foundanchor = False + for path in testpaths: + path = str(path) + # remove node-id syntax + i = path.find("::") + if i != -1: + path = path[:i] + anchor = current.join(path, abs=1) + if exists(anchor): # we found some file object + self._try_load_conftest(anchor) + foundanchor = True + if not foundanchor: + self._try_load_conftest(current) + + def _try_load_conftest(self, anchor): + self._getconftestmodules(anchor) + # let's also consider test* subdirs + if anchor.check(dir=1): + for x in anchor.listdir("test*"): + if x.check(dir=1): + self._getconftestmodules(x) + + def _getconftestmodules(self, path): + if self._noconftest: + return [] + try: + return self._path2confmods[path] + except KeyError: + if path.isfile(): + clist = self._getconftestmodules(path.dirpath()) + else: + # XXX these days we may rather want to use config.rootdir + # and allow users to opt into looking into the rootdir parent + # directories instead of requiring to specify confcutdir + clist = [] + for parent in path.parts(): + if self._confcutdir and self._confcutdir.relto(parent): + continue + conftestpath = parent.join("conftest.py") + if conftestpath.isfile(): + mod = self._importconftest(conftestpath) + clist.append(mod) + + self._path2confmods[path] = clist + return clist + + def _rget_with_confmod(self, name, path): + modules = self._getconftestmodules(path) + for mod in reversed(modules): + try: + return mod, getattr(mod, name) + except AttributeError: + continue + raise KeyError(name) + + def _importconftest(self, conftestpath): + try: + return self._conftestpath2mod[conftestpath] + except KeyError: + pkgpath = conftestpath.pypkgpath() + if pkgpath is None: + _ensure_removed_sysmodule(conftestpath.purebasename) + try: + mod = conftestpath.pyimport() + except Exception: + raise ConftestImportFailure(conftestpath, sys.exc_info()) + + self._conftest_plugins.add(mod) + self._conftestpath2mod[conftestpath] = mod + dirpath = conftestpath.dirpath() + if dirpath in self._path2confmods: + for path, mods in self._path2confmods.items(): + if path and path.relto(dirpath) or path == dirpath: + assert mod not in mods + mods.append(mod) + self.trace("loaded conftestmodule %r" %(mod)) + self.consider_conftest(mod) + return mod + + # + # API for bootstrapping plugin loading + # + # + + def consider_preparse(self, args): + for opt1,opt2 in zip(args, args[1:]): + if opt1 == "-p": + self.consider_pluginarg(opt2) + + def consider_pluginarg(self, arg): + if arg.startswith("no:"): + name = arg[3:] + self.set_blocked(name) + if not name.startswith("pytest_"): + self.set_blocked("pytest_" + name) + else: + self.import_plugin(arg) + + def consider_conftest(self, conftestmodule): + self.register(conftestmodule, name=conftestmodule.__file__) + + def consider_env(self): + self._import_plugin_specs(os.environ.get("PYTEST_PLUGINS")) + + def consider_module(self, mod): + self._import_plugin_specs(getattr(mod, 'pytest_plugins', [])) + + def _import_plugin_specs(self, spec): + plugins = _get_plugin_specs_as_list(spec) + for import_spec in plugins: + self.import_plugin(import_spec) + + def import_plugin(self, modname): + # most often modname refers to builtin modules, e.g. "pytester", + # "terminal" or "capture". Those plugins are registered under their + # basename for historic purposes but must be imported with the + # _pytest prefix. + assert isinstance(modname, (py.builtin.text, str)), "module name as text required, got %r" % modname + modname = str(modname) + if self.get_plugin(modname) is not None: + return + if modname in builtin_plugins: + importspec = "_pytest." + modname + else: + importspec = modname + self.rewrite_hook.mark_rewrite(importspec) + try: + __import__(importspec) + except ImportError as e: + new_exc = ImportError('Error importing plugin "%s": %s' % (modname, safe_str(e.args[0]))) + # copy over name and path attributes + for attr in ('name', 'path'): + if hasattr(e, attr): + setattr(new_exc, attr, getattr(e, attr)) + raise new_exc + except Exception as e: + import pytest + if not hasattr(pytest, 'skip') or not isinstance(e, pytest.skip.Exception): + raise + self._warn("skipped plugin %r: %s" %((modname, e.msg))) + else: + mod = sys.modules[importspec] + self.register(mod, modname) + + +def _get_plugin_specs_as_list(specs): + """ + Parses a list of "plugin specs" and returns a list of plugin names. + + Plugin specs can be given as a list of strings separated by "," or already as a list/tuple in + which case it is returned as a list. Specs can also be `None` in which case an + empty list is returned. + """ + if specs is not None: + if isinstance(specs, str): + specs = specs.split(',') if specs else [] + if not isinstance(specs, (list, tuple)): + raise UsageError("Plugin specs must be a ','-separated string or a " + "list/tuple of strings for plugin names. Given: %r" % specs) + return list(specs) + return [] + + +class Parser: + """ Parser for command line arguments and ini-file values. + + :ivar extra_info: dict of generic param -> value to display in case + there's an error processing the command line arguments. + """ + + def __init__(self, usage=None, processopt=None): + self._anonymous = OptionGroup("custom options", parser=self) + self._groups = [] + self._processopt = processopt + self._usage = usage + self._inidict = {} + self._ininames = [] + self.extra_info = {} + + def processoption(self, option): + if self._processopt: + if option.dest: + self._processopt(option) + + def getgroup(self, name, description="", after=None): + """ get (or create) a named option Group. + + :name: name of the option group. + :description: long description for --help output. + :after: name of other group, used for ordering --help output. + + The returned group object has an ``addoption`` method with the same + signature as :py:func:`parser.addoption + <_pytest.config.Parser.addoption>` but will be shown in the + respective group in the output of ``pytest. --help``. + """ + for group in self._groups: + if group.name == name: + return group + group = OptionGroup(name, description, parser=self) + i = 0 + for i, grp in enumerate(self._groups): + if grp.name == after: + break + self._groups.insert(i+1, group) + return group + + def addoption(self, *opts, **attrs): + """ register a command line option. + + :opts: option names, can be short or long options. + :attrs: same attributes which the ``add_option()`` function of the + `argparse library + `_ + accepts. + + After command line parsing options are available on the pytest config + object via ``config.option.NAME`` where ``NAME`` is usually set + by passing a ``dest`` attribute, for example + ``addoption("--long", dest="NAME", ...)``. + """ + self._anonymous.addoption(*opts, **attrs) + + def parse(self, args, namespace=None): + from _pytest._argcomplete import try_argcomplete + self.optparser = self._getparser() + try_argcomplete(self.optparser) + return self.optparser.parse_args([str(x) for x in args], namespace=namespace) + + def _getparser(self): + from _pytest._argcomplete import filescompleter + optparser = MyOptionParser(self, self.extra_info) + groups = self._groups + [self._anonymous] + for group in groups: + if group.options: + desc = group.description or group.name + arggroup = optparser.add_argument_group(desc) + for option in group.options: + n = option.names() + a = option.attrs() + arggroup.add_argument(*n, **a) + # bash like autocompletion for dirs (appending '/') + optparser.add_argument(FILE_OR_DIR, nargs='*').completer=filescompleter + return optparser + + def parse_setoption(self, args, option, namespace=None): + parsedoption = self.parse(args, namespace=namespace) + for name, value in parsedoption.__dict__.items(): + setattr(option, name, value) + return getattr(parsedoption, FILE_OR_DIR) + + def parse_known_args(self, args, namespace=None): + """parses and returns a namespace object with known arguments at this + point. + """ + return self.parse_known_and_unknown_args(args, namespace=namespace)[0] + + def parse_known_and_unknown_args(self, args, namespace=None): + """parses and returns a namespace object with known arguments, and + the remaining arguments unknown at this point. + """ + optparser = self._getparser() + args = [str(x) for x in args] + return optparser.parse_known_args(args, namespace=namespace) + + def addini(self, name, help, type=None, default=None): + """ register an ini-file option. + + :name: name of the ini-variable + :type: type of the variable, can be ``pathlist``, ``args``, ``linelist`` + or ``bool``. + :default: default value if no ini-file option exists but is queried. + + The value of ini-variables can be retrieved via a call to + :py:func:`config.getini(name) <_pytest.config.Config.getini>`. + """ + assert type in (None, "pathlist", "args", "linelist", "bool") + self._inidict[name] = (help, type, default) + self._ininames.append(name) + + +class ArgumentError(Exception): + """ + Raised if an Argument instance is created with invalid or + inconsistent arguments. + """ + + def __init__(self, msg, option): + self.msg = msg + self.option_id = str(option) + + def __str__(self): + if self.option_id: + return "option %s: %s" % (self.option_id, self.msg) + else: + return self.msg + + +class Argument: + """class that mimics the necessary behaviour of optparse.Option + + its currently a least effort implementation + and ignoring choices and integer prefixes + https://docs.python.org/3/library/optparse.html#optparse-standard-option-types + """ + _typ_map = { + 'int': int, + 'string': str, + 'float': float, + 'complex': complex, + } + + def __init__(self, *names, **attrs): + """store parms in private vars for use in add_argument""" + self._attrs = attrs + self._short_opts = [] + self._long_opts = [] + self.dest = attrs.get('dest') + if '%default' in (attrs.get('help') or ''): + warnings.warn( + 'pytest now uses argparse. "%default" should be' + ' changed to "%(default)s" ', + DeprecationWarning, + stacklevel=3) + try: + typ = attrs['type'] + except KeyError: + pass + else: + # this might raise a keyerror as well, don't want to catch that + if isinstance(typ, py.builtin._basestring): + if typ == 'choice': + warnings.warn( + 'type argument to addoption() is a string %r.' + ' For parsearg this is optional and when supplied' + ' should be a type.' + ' (options: %s)' % (typ, names), + DeprecationWarning, + stacklevel=3) + # argparse expects a type here take it from + # the type of the first element + attrs['type'] = type(attrs['choices'][0]) + else: + warnings.warn( + 'type argument to addoption() is a string %r.' + ' For parsearg this should be a type.' + ' (options: %s)' % (typ, names), + DeprecationWarning, + stacklevel=3) + attrs['type'] = Argument._typ_map[typ] + # used in test_parseopt -> test_parse_defaultgetter + self.type = attrs['type'] + else: + self.type = typ + try: + # attribute existence is tested in Config._processopt + self.default = attrs['default'] + except KeyError: + pass + self._set_opt_strings(names) + if not self.dest: + if self._long_opts: + self.dest = self._long_opts[0][2:].replace('-', '_') + else: + try: + self.dest = self._short_opts[0][1:] + except IndexError: + raise ArgumentError( + 'need a long or short option', self) + + def names(self): + return self._short_opts + self._long_opts + + def attrs(self): + # update any attributes set by processopt + attrs = 'default dest help'.split() + if self.dest: + attrs.append(self.dest) + for attr in attrs: + try: + self._attrs[attr] = getattr(self, attr) + except AttributeError: + pass + if self._attrs.get('help'): + a = self._attrs['help'] + a = a.replace('%default', '%(default)s') + #a = a.replace('%prog', '%(prog)s') + self._attrs['help'] = a + return self._attrs + + def _set_opt_strings(self, opts): + """directly from optparse + + might not be necessary as this is passed to argparse later on""" + for opt in opts: + if len(opt) < 2: + raise ArgumentError( + "invalid option string %r: " + "must be at least two characters long" % opt, self) + elif len(opt) == 2: + if not (opt[0] == "-" and opt[1] != "-"): + raise ArgumentError( + "invalid short option string %r: " + "must be of the form -x, (x any non-dash char)" % opt, + self) + self._short_opts.append(opt) + else: + if not (opt[0:2] == "--" and opt[2] != "-"): + raise ArgumentError( + "invalid long option string %r: " + "must start with --, followed by non-dash" % opt, + self) + self._long_opts.append(opt) + + def __repr__(self): + args = [] + if self._short_opts: + args += ['_short_opts: ' + repr(self._short_opts)] + if self._long_opts: + args += ['_long_opts: ' + repr(self._long_opts)] + args += ['dest: ' + repr(self.dest)] + if hasattr(self, 'type'): + args += ['type: ' + repr(self.type)] + if hasattr(self, 'default'): + args += ['default: ' + repr(self.default)] + return 'Argument({0})'.format(', '.join(args)) + + +class OptionGroup: + def __init__(self, name, description="", parser=None): + self.name = name + self.description = description + self.options = [] + self.parser = parser + + def addoption(self, *optnames, **attrs): + """ add an option to this group. + + if a shortened version of a long option is specified it will + be suppressed in the help. addoption('--twowords', '--two-words') + results in help showing '--two-words' only, but --twowords gets + accepted **and** the automatic destination is in args.twowords + """ + conflict = set(optnames).intersection( + name for opt in self.options for name in opt.names()) + if conflict: + raise ValueError("option names %s already added" % conflict) + option = Argument(*optnames, **attrs) + self._addoption_instance(option, shortupper=False) + + def _addoption(self, *optnames, **attrs): + option = Argument(*optnames, **attrs) + self._addoption_instance(option, shortupper=True) + + def _addoption_instance(self, option, shortupper=False): + if not shortupper: + for opt in option._short_opts: + if opt[0] == '-' and opt[1].islower(): + raise ValueError("lowercase shortoptions reserved") + if self.parser: + self.parser.processoption(option) + self.options.append(option) + + +class MyOptionParser(argparse.ArgumentParser): + def __init__(self, parser, extra_info=None): + if not extra_info: + extra_info = {} + self._parser = parser + argparse.ArgumentParser.__init__(self, usage=parser._usage, + add_help=False, formatter_class=DropShorterLongHelpFormatter) + # extra_info is a dict of (param -> value) to display if there's + # an usage error to provide more contextual information to the user + self.extra_info = extra_info + + def parse_args(self, args=None, namespace=None): + """allow splitting of positional arguments""" + args, argv = self.parse_known_args(args, namespace) + if argv: + for arg in argv: + if arg and arg[0] == '-': + lines = ['unrecognized arguments: %s' % (' '.join(argv))] + for k, v in sorted(self.extra_info.items()): + lines.append(' %s: %s' % (k, v)) + self.error('\n'.join(lines)) + getattr(args, FILE_OR_DIR).extend(argv) + return args + + +class DropShorterLongHelpFormatter(argparse.HelpFormatter): + """shorten help for long options that differ only in extra hyphens + + - collapse **long** options that are the same except for extra hyphens + - special action attribute map_long_option allows surpressing additional + long options + - shortcut if there are only two options and one of them is a short one + - cache result on action object as this is called at least 2 times + """ + def _format_action_invocation(self, action): + orgstr = argparse.HelpFormatter._format_action_invocation(self, action) + if orgstr and orgstr[0] != '-': # only optional arguments + return orgstr + res = getattr(action, '_formatted_action_invocation', None) + if res: + return res + options = orgstr.split(', ') + if len(options) == 2 and (len(options[0]) == 2 or len(options[1]) == 2): + # a shortcut for '-h, --help' or '--abc', '-a' + action._formatted_action_invocation = orgstr + return orgstr + return_list = [] + option_map = getattr(action, 'map_long_option', {}) + if option_map is None: + option_map = {} + short_long = {} + for option in options: + if len(option) == 2 or option[2] == ' ': + continue + if not option.startswith('--'): + raise ArgumentError('long optional argument without "--": [%s]' + % (option), self) + xxoption = option[2:] + if xxoption.split()[0] not in option_map: + shortened = xxoption.replace('-', '') + if shortened not in short_long or \ + len(short_long[shortened]) < len(xxoption): + short_long[shortened] = xxoption + # now short_long has been filled out to the longest with dashes + # **and** we keep the right option ordering from add_argument + for option in options: # + if len(option) == 2 or option[2] == ' ': + return_list.append(option) + if option[2:] == short_long.get(option.replace('-', '')): + return_list.append(option.replace(' ', '=', 1)) + action._formatted_action_invocation = ', '.join(return_list) + return action._formatted_action_invocation + + + +def _ensure_removed_sysmodule(modname): + try: + del sys.modules[modname] + except KeyError: + pass + +class CmdOptions(object): + """ holds cmdline options as attributes.""" + def __init__(self, values=()): + self.__dict__.update(values) + def __repr__(self): + return "" %(self.__dict__,) + def copy(self): + return CmdOptions(self.__dict__) + +class Notset: + def __repr__(self): + return "" + + +notset = Notset() +FILE_OR_DIR = 'file_or_dir' + + +class Config(object): + """ access to configuration values, pluginmanager and plugin hooks. """ + + def __init__(self, pluginmanager): + #: access to command line option as attributes. + #: (deprecated), use :py:func:`getoption() <_pytest.config.Config.getoption>` instead + self.option = CmdOptions() + _a = FILE_OR_DIR + self._parser = Parser( + usage="%%(prog)s [options] [%s] [%s] [...]" % (_a, _a), + processopt=self._processopt, + ) + #: a pluginmanager instance + self.pluginmanager = pluginmanager + self.trace = self.pluginmanager.trace.root.get("config") + self.hook = self.pluginmanager.hook + self._inicache = {} + self._override_ini = () + self._opt2dest = {} + self._cleanup = [] + self._warn = self.pluginmanager._warn + self.pluginmanager.register(self, "pytestconfig") + self._configured = False + + def do_setns(dic): + import pytest + setns(pytest, dic) + + self.hook.pytest_namespace.call_historic(do_setns, {}) + self.hook.pytest_addoption.call_historic(kwargs=dict(parser=self._parser)) + + def add_cleanup(self, func): + """ Add a function to be called when the config object gets out of + use (usually coninciding with pytest_unconfigure).""" + self._cleanup.append(func) + + def _do_configure(self): + assert not self._configured + self._configured = True + self.hook.pytest_configure.call_historic(kwargs=dict(config=self)) + + def _ensure_unconfigure(self): + if self._configured: + self._configured = False + self.hook.pytest_unconfigure(config=self) + self.hook.pytest_configure._call_history = [] + while self._cleanup: + fin = self._cleanup.pop() + fin() + + def warn(self, code, message, fslocation=None, nodeid=None): + """ generate a warning for this test session. """ + self.hook.pytest_logwarning.call_historic(kwargs=dict( + code=code, message=message, + fslocation=fslocation, nodeid=nodeid)) + + def get_terminal_writer(self): + return self.pluginmanager.get_plugin("terminalreporter")._tw + + def pytest_cmdline_parse(self, pluginmanager, args): + # REF1 assert self == pluginmanager.config, (self, pluginmanager.config) + self.parse(args) + return self + + def notify_exception(self, excinfo, option=None): + if option and option.fulltrace: + style = "long" + else: + style = "native" + excrepr = excinfo.getrepr(funcargs=True, + showlocals=getattr(option, 'showlocals', False), + style=style, + ) + res = self.hook.pytest_internalerror(excrepr=excrepr, + excinfo=excinfo) + if not py.builtin.any(res): + for line in str(excrepr).split("\n"): + sys.stderr.write("INTERNALERROR> %s\n" %line) + sys.stderr.flush() + + def cwd_relative_nodeid(self, nodeid): + # nodeid's are relative to the rootpath, compute relative to cwd + if self.invocation_dir != self.rootdir: + fullpath = self.rootdir.join(nodeid) + nodeid = self.invocation_dir.bestrelpath(fullpath) + return nodeid + + @classmethod + def fromdictargs(cls, option_dict, args): + """ constructor useable for subprocesses. """ + config = get_config() + config.option.__dict__.update(option_dict) + config.parse(args, addopts=False) + for x in config.option.plugins: + config.pluginmanager.consider_pluginarg(x) + return config + + def _processopt(self, opt): + for name in opt._short_opts + opt._long_opts: + self._opt2dest[name] = opt.dest + + if hasattr(opt, 'default') and opt.dest: + if not hasattr(self.option, opt.dest): + setattr(self.option, opt.dest, opt.default) + + @hookimpl(trylast=True) + def pytest_load_initial_conftests(self, early_config): + self.pluginmanager._set_initial_conftests(early_config.known_args_namespace) + + def _initini(self, args): + ns, unknown_args = self._parser.parse_known_and_unknown_args(args, namespace=self.option.copy()) + r = determine_setup(ns.inifilename, ns.file_or_dir + unknown_args, warnfunc=self.warn) + self.rootdir, self.inifile, self.inicfg = r + self._parser.extra_info['rootdir'] = self.rootdir + self._parser.extra_info['inifile'] = self.inifile + self.invocation_dir = py.path.local() + self._parser.addini('addopts', 'extra command line options', 'args') + self._parser.addini('minversion', 'minimally required pytest version') + self._override_ini = ns.override_ini or () + + def _consider_importhook(self, args): + """Install the PEP 302 import hook if using assertion re-writing. + + Needs to parse the --assert= option from the commandline + and find all the installed plugins to mark them for re-writing + by the importhook. + """ + ns, unknown_args = self._parser.parse_known_and_unknown_args(args) + mode = ns.assertmode + if mode == 'rewrite': + try: + hook = _pytest.assertion.install_importhook(self) + except SystemError: + mode = 'plain' + else: + self._mark_plugins_for_rewrite(hook) + self._warn_about_missing_assertion(mode) + + def _mark_plugins_for_rewrite(self, hook): + """ + Given an importhook, mark for rewrite any top-level + modules or packages in the distribution package for + all pytest plugins. + """ + import pkg_resources + self.pluginmanager.rewrite_hook = hook + + # 'RECORD' available for plugins installed normally (pip install) + # 'SOURCES.txt' available for plugins installed in dev mode (pip install -e) + # for installed plugins 'SOURCES.txt' returns an empty list, and vice-versa + # so it shouldn't be an issue + metadata_files = 'RECORD', 'SOURCES.txt' + + package_files = ( + entry.split(',')[0] + for entrypoint in pkg_resources.iter_entry_points('pytest11') + for metadata in metadata_files + for entry in entrypoint.dist._get_metadata(metadata) + ) + + for fn in package_files: + is_simple_module = os.sep not in fn and fn.endswith('.py') + is_package = fn.count(os.sep) == 1 and fn.endswith('__init__.py') + if is_simple_module: + module_name, ext = os.path.splitext(fn) + hook.mark_rewrite(module_name) + elif is_package: + package_name = os.path.dirname(fn) + hook.mark_rewrite(package_name) + + def _warn_about_missing_assertion(self, mode): + try: + assert False + except AssertionError: + pass + else: + if mode == 'plain': + sys.stderr.write("WARNING: ASSERTIONS ARE NOT EXECUTED" + " and FAILING TESTS WILL PASS. Are you" + " using python -O?") + else: + sys.stderr.write("WARNING: assertions not in test modules or" + " plugins will be ignored" + " because assert statements are not executed " + "by the underlying Python interpreter " + "(are you using python -O?)\n") + + def _preparse(self, args, addopts=True): + self._initini(args) + if addopts: + args[:] = shlex.split(os.environ.get('PYTEST_ADDOPTS', '')) + args + args[:] = self.getini("addopts") + args + self._checkversion() + self._consider_importhook(args) + self.pluginmanager.consider_preparse(args) + self.pluginmanager.load_setuptools_entrypoints('pytest11') + self.pluginmanager.consider_env() + self.known_args_namespace = ns = self._parser.parse_known_args(args, namespace=self.option.copy()) + confcutdir = self.known_args_namespace.confcutdir + if self.known_args_namespace.confcutdir is None and self.inifile: + confcutdir = py.path.local(self.inifile).dirname + self.known_args_namespace.confcutdir = confcutdir + try: + self.hook.pytest_load_initial_conftests(early_config=self, + args=args, parser=self._parser) + except ConftestImportFailure: + e = sys.exc_info()[1] + if ns.help or ns.version: + # we don't want to prevent --help/--version to work + # so just let is pass and print a warning at the end + self._warn("could not load initial conftests (%s)\n" % e.path) + else: + raise + + def _checkversion(self): + import pytest + minver = self.inicfg.get('minversion', None) + if minver: + ver = minver.split(".") + myver = pytest.__version__.split(".") + if myver < ver: + raise pytest.UsageError( + "%s:%d: requires pytest-%s, actual pytest-%s'" %( + self.inicfg.config.path, self.inicfg.lineof('minversion'), + minver, pytest.__version__)) + + def parse(self, args, addopts=True): + # parse given cmdline arguments into this config object. + assert not hasattr(self, 'args'), ( + "can only parse cmdline args at most once per Config object") + self._origargs = args + self.hook.pytest_addhooks.call_historic( + kwargs=dict(pluginmanager=self.pluginmanager)) + self._preparse(args, addopts=addopts) + # XXX deprecated hook: + self.hook.pytest_cmdline_preparse(config=self, args=args) + self._parser.after_preparse = True + try: + args = self._parser.parse_setoption(args, self.option, namespace=self.option) + if not args: + cwd = os.getcwd() + if cwd == self.rootdir: + args = self.getini('testpaths') + if not args: + args = [cwd] + self.args = args + except PrintHelp: + pass + + def addinivalue_line(self, name, line): + """ add a line to an ini-file option. The option must have been + declared but might not yet be set in which case the line becomes the + the first line in its value. """ + x = self.getini(name) + assert isinstance(x, list) + x.append(line) # modifies the cached list inline + + def getini(self, name): + """ return configuration value from an :ref:`ini file `. If the + specified name hasn't been registered through a prior + :py:func:`parser.addini <_pytest.config.Parser.addini>` + call (usually from a plugin), a ValueError is raised. """ + try: + return self._inicache[name] + except KeyError: + self._inicache[name] = val = self._getini(name) + return val + + def _getini(self, name): + try: + description, type, default = self._parser._inidict[name] + except KeyError: + raise ValueError("unknown configuration value: %r" %(name,)) + value = self._get_override_ini_value(name) + if value is None: + try: + value = self.inicfg[name] + except KeyError: + if default is not None: + return default + if type is None: + return '' + return [] + if type == "pathlist": + dp = py.path.local(self.inicfg.config.path).dirpath() + l = [] + for relpath in shlex.split(value): + l.append(dp.join(relpath, abs=True)) + return l + elif type == "args": + return shlex.split(value) + elif type == "linelist": + return [t for t in map(lambda x: x.strip(), value.split("\n")) if t] + elif type == "bool": + return bool(_strtobool(value.strip())) + else: + assert type is None + return value + + def _getconftest_pathlist(self, name, path): + try: + mod, relroots = self.pluginmanager._rget_with_confmod(name, path) + except KeyError: + return None + modpath = py.path.local(mod.__file__).dirpath() + l = [] + for relroot in relroots: + if not isinstance(relroot, py.path.local): + relroot = relroot.replace("/", py.path.local.sep) + relroot = modpath.join(relroot, abs=True) + l.append(relroot) + return l + + def _get_override_ini_value(self, name): + value = None + # override_ini is a list of list, to support both -o foo1=bar1 foo2=bar2 and + # and -o foo1=bar1 -o foo2=bar2 options + # always use the last item if multiple value set for same ini-name, + # e.g. -o foo=bar1 -o foo=bar2 will set foo to bar2 + for ini_config_list in self._override_ini: + for ini_config in ini_config_list: + try: + (key, user_ini_value) = ini_config.split("=", 1) + except ValueError: + raise UsageError("-o/--override-ini expects option=value style.") + if key == name: + value = user_ini_value + return value + + def getoption(self, name, default=notset, skip=False): + """ return command line option value. + + :arg name: name of the option. You may also specify + the literal ``--OPT`` option instead of the "dest" option name. + :arg default: default value if no option of that name exists. + :arg skip: if True raise pytest.skip if option does not exists + or has a None value. + """ + name = self._opt2dest.get(name, name) + try: + val = getattr(self.option, name) + if val is None and skip: + raise AttributeError(name) + return val + except AttributeError: + if default is not notset: + return default + if skip: + import pytest + pytest.skip("no %r option found" %(name,)) + raise ValueError("no option named %r" % (name,)) + + def getvalue(self, name, path=None): + """ (deprecated, use getoption()) """ + return self.getoption(name) + + def getvalueorskip(self, name, path=None): + """ (deprecated, use getoption(skip=True)) """ + return self.getoption(name, skip=True) + +def exists(path, ignore=EnvironmentError): + try: + return path.check() + except ignore: + return False + +def getcfg(args, warnfunc=None): + """ + Search the list of arguments for a valid ini-file for pytest, + and return a tuple of (rootdir, inifile, cfg-dict). + + note: warnfunc is an optional function used to warn + about ini-files that use deprecated features. + This parameter should be removed when pytest + adopts standard deprecation warnings (#1804). + """ + from _pytest.deprecated import SETUP_CFG_PYTEST + inibasenames = ["pytest.ini", "tox.ini", "setup.cfg"] + args = [x for x in args if not str(x).startswith("-")] + if not args: + args = [py.path.local()] + for arg in args: + arg = py.path.local(arg) + for base in arg.parts(reverse=True): + for inibasename in inibasenames: + p = base.join(inibasename) + if exists(p): + iniconfig = py.iniconfig.IniConfig(p) + if 'pytest' in iniconfig.sections: + if inibasename == 'setup.cfg' and warnfunc: + warnfunc('C1', SETUP_CFG_PYTEST) + return base, p, iniconfig['pytest'] + if inibasename == 'setup.cfg' and 'tool:pytest' in iniconfig.sections: + return base, p, iniconfig['tool:pytest'] + elif inibasename == "pytest.ini": + # allowed to be empty + return base, p, {} + return None, None, None + + +def get_common_ancestor(paths): + common_ancestor = None + for path in paths: + if not path.exists(): + continue + if common_ancestor is None: + common_ancestor = path + else: + if path.relto(common_ancestor) or path == common_ancestor: + continue + elif common_ancestor.relto(path): + common_ancestor = path + else: + shared = path.common(common_ancestor) + if shared is not None: + common_ancestor = shared + if common_ancestor is None: + common_ancestor = py.path.local() + elif common_ancestor.isfile(): + common_ancestor = common_ancestor.dirpath() + return common_ancestor + + +def get_dirs_from_args(args): + def is_option(x): + return str(x).startswith('-') + + def get_file_part_from_node_id(x): + return str(x).split('::')[0] + + def get_dir_from_path(path): + if path.isdir(): + return path + return py.path.local(path.dirname) + + # These look like paths but may not exist + possible_paths = ( + py.path.local(get_file_part_from_node_id(arg)) + for arg in args + if not is_option(arg) + ) + + return [ + get_dir_from_path(path) + for path in possible_paths + if path.exists() + ] + + +def determine_setup(inifile, args, warnfunc=None): + dirs = get_dirs_from_args(args) + if inifile: + iniconfig = py.iniconfig.IniConfig(inifile) + try: + inicfg = iniconfig["pytest"] + except KeyError: + inicfg = None + rootdir = get_common_ancestor(dirs) + else: + ancestor = get_common_ancestor(dirs) + rootdir, inifile, inicfg = getcfg([ancestor], warnfunc=warnfunc) + if rootdir is None: + for rootdir in ancestor.parts(reverse=True): + if rootdir.join("setup.py").exists(): + break + else: + rootdir, inifile, inicfg = getcfg(dirs, warnfunc=warnfunc) + if rootdir is None: + rootdir = get_common_ancestor([py.path.local(), ancestor]) + is_fs_root = os.path.splitdrive(str(rootdir))[1] == os.sep + if is_fs_root: + rootdir = ancestor + return rootdir, inifile, inicfg or {} + + +def setns(obj, dic): + import pytest + for name, value in dic.items(): + if isinstance(value, dict): + mod = getattr(obj, name, None) + if mod is None: + modname = "pytest.%s" % name + mod = types.ModuleType(modname) + sys.modules[modname] = mod + mod.__all__ = [] + setattr(obj, name, mod) + obj.__all__.append(name) + setns(mod, value) + else: + setattr(obj, name, value) + obj.__all__.append(name) + #if obj != pytest: + # pytest.__all__.append(name) + setattr(pytest, name, value) + + +def create_terminal_writer(config, *args, **kwargs): + """Create a TerminalWriter instance configured according to the options + in the config object. Every code which requires a TerminalWriter object + and has access to a config object should use this function. + """ + tw = py.io.TerminalWriter(*args, **kwargs) + if config.option.color == 'yes': + tw.hasmarkup = True + if config.option.color == 'no': + tw.hasmarkup = False + return tw + + +def _strtobool(val): + """Convert a string representation of truth to true (1) or false (0). + + True values are 'y', 'yes', 't', 'true', 'on', and '1'; false values + are 'n', 'no', 'f', 'false', 'off', and '0'. Raises ValueError if + 'val' is anything else. + + .. note:: copied from distutils.util + """ + val = val.lower() + if val in ('y', 'yes', 't', 'true', 'on', '1'): + return 1 + elif val in ('n', 'no', 'f', 'false', 'off', '0'): + return 0 + else: + raise ValueError("invalid truth value %r" % (val,)) diff --git a/venv/lib/python3.5/site-packages/_pytest/debugging.py b/venv/lib/python3.5/site-packages/_pytest/debugging.py new file mode 100644 index 0000000..73a0a2e --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/debugging.py @@ -0,0 +1,123 @@ +""" interactive debugging with PDB, the Python Debugger. """ +from __future__ import absolute_import, division, print_function +import pdb +import sys + + + +def pytest_addoption(parser): + group = parser.getgroup("general") + group._addoption( + '--pdb', dest="usepdb", action="store_true", + help="start the interactive Python debugger on errors.") + group._addoption( + '--pdbcls', dest="usepdb_cls", metavar="modulename:classname", + help="start a custom interactive Python debugger on errors. " + "For example: --pdbcls=IPython.terminal.debugger:TerminalPdb") + + +def pytest_configure(config): + if config.getvalue("usepdb_cls"): + modname, classname = config.getvalue("usepdb_cls").split(":") + __import__(modname) + pdb_cls = getattr(sys.modules[modname], classname) + else: + pdb_cls = pdb.Pdb + + if config.getvalue("usepdb"): + config.pluginmanager.register(PdbInvoke(), 'pdbinvoke') + + old = (pdb.set_trace, pytestPDB._pluginmanager) + + def fin(): + pdb.set_trace, pytestPDB._pluginmanager = old + pytestPDB._config = None + pytestPDB._pdb_cls = pdb.Pdb + + pdb.set_trace = pytestPDB.set_trace + pytestPDB._pluginmanager = config.pluginmanager + pytestPDB._config = config + pytestPDB._pdb_cls = pdb_cls + config._cleanup.append(fin) + +class pytestPDB: + """ Pseudo PDB that defers to the real pdb. """ + _pluginmanager = None + _config = None + _pdb_cls = pdb.Pdb + + @classmethod + def set_trace(cls): + """ invoke PDB set_trace debugging, dropping any IO capturing. """ + import _pytest.config + frame = sys._getframe().f_back + if cls._pluginmanager is not None: + capman = cls._pluginmanager.getplugin("capturemanager") + if capman: + capman.suspendcapture(in_=True) + tw = _pytest.config.create_terminal_writer(cls._config) + tw.line() + tw.sep(">", "PDB set_trace (IO-capturing turned off)") + cls._pluginmanager.hook.pytest_enter_pdb(config=cls._config) + cls._pdb_cls().set_trace(frame) + + +class PdbInvoke: + def pytest_exception_interact(self, node, call, report): + capman = node.config.pluginmanager.getplugin("capturemanager") + if capman: + out, err = capman.suspendcapture(in_=True) + sys.stdout.write(out) + sys.stdout.write(err) + _enter_pdb(node, call.excinfo, report) + + def pytest_internalerror(self, excrepr, excinfo): + for line in str(excrepr).split("\n"): + sys.stderr.write("INTERNALERROR> %s\n" % line) + sys.stderr.flush() + tb = _postmortem_traceback(excinfo) + post_mortem(tb) + + +def _enter_pdb(node, excinfo, rep): + # XXX we re-use the TerminalReporter's terminalwriter + # because this seems to avoid some encoding related troubles + # for not completely clear reasons. + tw = node.config.pluginmanager.getplugin("terminalreporter")._tw + tw.line() + tw.sep(">", "traceback") + rep.toterminal(tw) + tw.sep(">", "entering PDB") + tb = _postmortem_traceback(excinfo) + post_mortem(tb) + rep._pdbshown = True + return rep + + +def _postmortem_traceback(excinfo): + # A doctest.UnexpectedException is not useful for post_mortem. + # Use the underlying exception instead: + from doctest import UnexpectedException + if isinstance(excinfo.value, UnexpectedException): + return excinfo.value.exc_info[2] + else: + return excinfo._excinfo[2] + + +def _find_last_non_hidden_frame(stack): + i = max(0, len(stack) - 1) + while i and stack[i][0].f_locals.get("__tracebackhide__", False): + i -= 1 + return i + + +def post_mortem(t): + class Pdb(pytestPDB._pdb_cls): + def get_stack(self, f, t): + stack, i = pdb.Pdb.get_stack(self, f, t) + if f is None: + i = _find_last_non_hidden_frame(stack) + return stack, i + p = Pdb() + p.reset() + p.interaction(None, t) diff --git a/venv/lib/python3.5/site-packages/_pytest/deprecated.py b/venv/lib/python3.5/site-packages/_pytest/deprecated.py new file mode 100644 index 0000000..e75ff09 --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/deprecated.py @@ -0,0 +1,24 @@ +""" +This module contains deprecation messages and bits of code used elsewhere in the codebase +that is planned to be removed in the next pytest release. + +Keeping it in a central location makes it easy to track what is deprecated and should +be removed when the time comes. +""" +from __future__ import absolute_import, division, print_function + +MAIN_STR_ARGS = 'passing a string to pytest.main() is deprecated, ' \ + 'pass a list of arguments instead.' + +YIELD_TESTS = 'yield tests are deprecated, and scheduled to be removed in pytest 4.0' + +FUNCARG_PREFIX = ( + '{name}: declaring fixtures using "pytest_funcarg__" prefix is deprecated ' + 'and scheduled to be removed in pytest 4.0. ' + 'Please remove the prefix and use the @pytest.fixture decorator instead.') + +SETUP_CFG_PYTEST = '[pytest] section in setup.cfg files is deprecated, use [tool:pytest] instead.' + +GETFUNCARGVALUE = "use of getfuncargvalue is deprecated, use getfixturevalue" + +RESULT_LOG = '--result-log is deprecated and scheduled for removal in pytest 4.0' diff --git a/venv/lib/python3.5/site-packages/_pytest/doctest.py b/venv/lib/python3.5/site-packages/_pytest/doctest.py new file mode 100644 index 0000000..fde6dd7 --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/doctest.py @@ -0,0 +1,360 @@ +""" discover and run doctests in modules and test files.""" +from __future__ import absolute_import, division, print_function + +import traceback + +import pytest +from _pytest._code.code import ExceptionInfo, ReprFileLocation, TerminalRepr +from _pytest.fixtures import FixtureRequest + + +DOCTEST_REPORT_CHOICE_NONE = 'none' +DOCTEST_REPORT_CHOICE_CDIFF = 'cdiff' +DOCTEST_REPORT_CHOICE_NDIFF = 'ndiff' +DOCTEST_REPORT_CHOICE_UDIFF = 'udiff' +DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE = 'only_first_failure' + +DOCTEST_REPORT_CHOICES = ( + DOCTEST_REPORT_CHOICE_NONE, + DOCTEST_REPORT_CHOICE_CDIFF, + DOCTEST_REPORT_CHOICE_NDIFF, + DOCTEST_REPORT_CHOICE_UDIFF, + DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE, +) + +def pytest_addoption(parser): + parser.addini('doctest_optionflags', 'option flags for doctests', + type="args", default=["ELLIPSIS"]) + parser.addini("doctest_encoding", 'encoding used for doctest files', default="utf-8") + group = parser.getgroup("collect") + group.addoption("--doctest-modules", + action="store_true", default=False, + help="run doctests in all .py modules", + dest="doctestmodules") + group.addoption("--doctest-report", + type=str.lower, default="udiff", + help="choose another output format for diffs on doctest failure", + choices=DOCTEST_REPORT_CHOICES, + dest="doctestreport") + group.addoption("--doctest-glob", + action="append", default=[], metavar="pat", + help="doctests file matching pattern, default: test*.txt", + dest="doctestglob") + group.addoption("--doctest-ignore-import-errors", + action="store_true", default=False, + help="ignore doctest ImportErrors", + dest="doctest_ignore_import_errors") + + +def pytest_collect_file(path, parent): + config = parent.config + if path.ext == ".py": + if config.option.doctestmodules: + return DoctestModule(path, parent) + elif _is_doctest(config, path, parent): + return DoctestTextfile(path, parent) + + +def _is_doctest(config, path, parent): + if path.ext in ('.txt', '.rst') and parent.session.isinitpath(path): + return True + globs = config.getoption("doctestglob") or ['test*.txt'] + for glob in globs: + if path.check(fnmatch=glob): + return True + return False + + +class ReprFailDoctest(TerminalRepr): + + def __init__(self, reprlocation, lines): + self.reprlocation = reprlocation + self.lines = lines + + def toterminal(self, tw): + for line in self.lines: + tw.line(line) + self.reprlocation.toterminal(tw) + + +class DoctestItem(pytest.Item): + def __init__(self, name, parent, runner=None, dtest=None): + super(DoctestItem, self).__init__(name, parent) + self.runner = runner + self.dtest = dtest + self.obj = None + self.fixture_request = None + + def setup(self): + if self.dtest is not None: + self.fixture_request = _setup_fixtures(self) + globs = dict(getfixture=self.fixture_request.getfixturevalue) + for name, value in self.fixture_request.getfixturevalue('doctest_namespace').items(): + globs[name] = value + self.dtest.globs.update(globs) + + def runtest(self): + _check_all_skipped(self.dtest) + self.runner.run(self.dtest) + + def repr_failure(self, excinfo): + import doctest + if excinfo.errisinstance((doctest.DocTestFailure, + doctest.UnexpectedException)): + doctestfailure = excinfo.value + example = doctestfailure.example + test = doctestfailure.test + filename = test.filename + if test.lineno is None: + lineno = None + else: + lineno = test.lineno + example.lineno + 1 + message = excinfo.type.__name__ + reprlocation = ReprFileLocation(filename, lineno, message) + checker = _get_checker() + report_choice = _get_report_choice(self.config.getoption("doctestreport")) + if lineno is not None: + lines = doctestfailure.test.docstring.splitlines(False) + # add line numbers to the left of the error message + lines = ["%03d %s" % (i + test.lineno + 1, x) + for (i, x) in enumerate(lines)] + # trim docstring error lines to 10 + lines = lines[example.lineno - 9:example.lineno + 1] + else: + lines = ['EXAMPLE LOCATION UNKNOWN, not showing all tests of that example'] + indent = '>>>' + for line in example.source.splitlines(): + lines.append('??? %s %s' % (indent, line)) + indent = '...' + if excinfo.errisinstance(doctest.DocTestFailure): + lines += checker.output_difference(example, + doctestfailure.got, report_choice).split("\n") + else: + inner_excinfo = ExceptionInfo(excinfo.value.exc_info) + lines += ["UNEXPECTED EXCEPTION: %s" % + repr(inner_excinfo.value)] + lines += traceback.format_exception(*excinfo.value.exc_info) + return ReprFailDoctest(reprlocation, lines) + else: + return super(DoctestItem, self).repr_failure(excinfo) + + def reportinfo(self): + return self.fspath, None, "[doctest] %s" % self.name + + +def _get_flag_lookup(): + import doctest + return dict(DONT_ACCEPT_TRUE_FOR_1=doctest.DONT_ACCEPT_TRUE_FOR_1, + DONT_ACCEPT_BLANKLINE=doctest.DONT_ACCEPT_BLANKLINE, + NORMALIZE_WHITESPACE=doctest.NORMALIZE_WHITESPACE, + ELLIPSIS=doctest.ELLIPSIS, + IGNORE_EXCEPTION_DETAIL=doctest.IGNORE_EXCEPTION_DETAIL, + COMPARISON_FLAGS=doctest.COMPARISON_FLAGS, + ALLOW_UNICODE=_get_allow_unicode_flag(), + ALLOW_BYTES=_get_allow_bytes_flag(), + ) + + +def get_optionflags(parent): + optionflags_str = parent.config.getini("doctest_optionflags") + flag_lookup_table = _get_flag_lookup() + flag_acc = 0 + for flag in optionflags_str: + flag_acc |= flag_lookup_table[flag] + return flag_acc + +class DoctestTextfile(pytest.Module): + obj = None + + def collect(self): + import doctest + + # inspired by doctest.testfile; ideally we would use it directly, + # but it doesn't support passing a custom checker + encoding = self.config.getini("doctest_encoding") + text = self.fspath.read_text(encoding) + filename = str(self.fspath) + name = self.fspath.basename + globs = {'__name__': '__main__'} + + optionflags = get_optionflags(self) + runner = doctest.DebugRunner(verbose=0, optionflags=optionflags, + checker=_get_checker()) + _fix_spoof_python2(runner, encoding) + + parser = doctest.DocTestParser() + test = parser.get_doctest(text, globs, name, filename, 0) + if test.examples: + yield DoctestItem(test.name, self, runner, test) + + +def _check_all_skipped(test): + """raises pytest.skip() if all examples in the given DocTest have the SKIP + option set. + """ + import doctest + all_skipped = all(x.options.get(doctest.SKIP, False) for x in test.examples) + if all_skipped: + pytest.skip('all tests skipped by +SKIP option') + + +class DoctestModule(pytest.Module): + def collect(self): + import doctest + if self.fspath.basename == "conftest.py": + module = self.config.pluginmanager._importconftest(self.fspath) + else: + try: + module = self.fspath.pyimport() + except ImportError: + if self.config.getvalue('doctest_ignore_import_errors'): + pytest.skip('unable to import module %r' % self.fspath) + else: + raise + # uses internal doctest module parsing mechanism + finder = doctest.DocTestFinder() + optionflags = get_optionflags(self) + runner = doctest.DebugRunner(verbose=0, optionflags=optionflags, + checker=_get_checker()) + + for test in finder.find(module, module.__name__): + if test.examples: # skip empty doctests + yield DoctestItem(test.name, self, runner, test) + + +def _setup_fixtures(doctest_item): + """ + Used by DoctestTextfile and DoctestItem to setup fixture information. + """ + def func(): + pass + + doctest_item.funcargs = {} + fm = doctest_item.session._fixturemanager + doctest_item._fixtureinfo = fm.getfixtureinfo(node=doctest_item, func=func, + cls=None, funcargs=False) + fixture_request = FixtureRequest(doctest_item) + fixture_request._fillfixtures() + return fixture_request + + +def _get_checker(): + """ + Returns a doctest.OutputChecker subclass that takes in account the + ALLOW_UNICODE option to ignore u'' prefixes in strings and ALLOW_BYTES + to strip b'' prefixes. + Useful when the same doctest should run in Python 2 and Python 3. + + An inner class is used to avoid importing "doctest" at the module + level. + """ + if hasattr(_get_checker, 'LiteralsOutputChecker'): + return _get_checker.LiteralsOutputChecker() + + import doctest + import re + + class LiteralsOutputChecker(doctest.OutputChecker): + """ + Copied from doctest_nose_plugin.py from the nltk project: + https://github.com/nltk/nltk + + Further extended to also support byte literals. + """ + + _unicode_literal_re = re.compile(r"(\W|^)[uU]([rR]?[\'\"])", re.UNICODE) + _bytes_literal_re = re.compile(r"(\W|^)[bB]([rR]?[\'\"])", re.UNICODE) + + def check_output(self, want, got, optionflags): + res = doctest.OutputChecker.check_output(self, want, got, + optionflags) + if res: + return True + + allow_unicode = optionflags & _get_allow_unicode_flag() + allow_bytes = optionflags & _get_allow_bytes_flag() + if not allow_unicode and not allow_bytes: + return False + + else: # pragma: no cover + def remove_prefixes(regex, txt): + return re.sub(regex, r'\1\2', txt) + + if allow_unicode: + want = remove_prefixes(self._unicode_literal_re, want) + got = remove_prefixes(self._unicode_literal_re, got) + if allow_bytes: + want = remove_prefixes(self._bytes_literal_re, want) + got = remove_prefixes(self._bytes_literal_re, got) + res = doctest.OutputChecker.check_output(self, want, got, + optionflags) + return res + + _get_checker.LiteralsOutputChecker = LiteralsOutputChecker + return _get_checker.LiteralsOutputChecker() + + +def _get_allow_unicode_flag(): + """ + Registers and returns the ALLOW_UNICODE flag. + """ + import doctest + return doctest.register_optionflag('ALLOW_UNICODE') + + +def _get_allow_bytes_flag(): + """ + Registers and returns the ALLOW_BYTES flag. + """ + import doctest + return doctest.register_optionflag('ALLOW_BYTES') + + +def _get_report_choice(key): + """ + This function returns the actual `doctest` module flag value, we want to do it as late as possible to avoid + importing `doctest` and all its dependencies when parsing options, as it adds overhead and breaks tests. + """ + import doctest + + return { + DOCTEST_REPORT_CHOICE_UDIFF: doctest.REPORT_UDIFF, + DOCTEST_REPORT_CHOICE_CDIFF: doctest.REPORT_CDIFF, + DOCTEST_REPORT_CHOICE_NDIFF: doctest.REPORT_NDIFF, + DOCTEST_REPORT_CHOICE_ONLY_FIRST_FAILURE: doctest.REPORT_ONLY_FIRST_FAILURE, + DOCTEST_REPORT_CHOICE_NONE: 0, + }[key] + + +def _fix_spoof_python2(runner, encoding): + """ + Installs a "SpoofOut" into the given DebugRunner so it properly deals with unicode output. This + should patch only doctests for text files because they don't have a way to declare their + encoding. Doctests in docstrings from Python modules don't have the same problem given that + Python already decoded the strings. + + This fixes the problem related in issue #2434. + """ + from _pytest.compat import _PY2 + if not _PY2: + return + + from doctest import _SpoofOut + + class UnicodeSpoof(_SpoofOut): + + def getvalue(self): + result = _SpoofOut.getvalue(self) + if encoding: + result = result.decode(encoding) + return result + + runner._fakeout = UnicodeSpoof() + + +@pytest.fixture(scope='session') +def doctest_namespace(): + """ + Inject names into the doctest namespace. + """ + return dict() diff --git a/venv/lib/python3.5/site-packages/_pytest/fixtures.py b/venv/lib/python3.5/site-packages/_pytest/fixtures.py new file mode 100644 index 0000000..64d21b9 --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/fixtures.py @@ -0,0 +1,1129 @@ +from __future__ import absolute_import, division, print_function +import sys + +from py._code.code import FormattedExcinfo + +import py +import warnings + +import inspect +import _pytest +from _pytest._code.code import TerminalRepr +from _pytest.compat import ( + NOTSET, exc_clear, _format_args, + getfslineno, get_real_func, + is_generator, isclass, getimfunc, + getlocation, getfuncargnames, + safe_getattr, +) +from _pytest.runner import fail +from _pytest.compat import FuncargnamesCompatAttr + +def pytest_sessionstart(session): + import _pytest.python + scopename2class.update({ + 'class': _pytest.python.Class, + 'module': _pytest.python.Module, + 'function': _pytest.main.Item, + }) + session._fixturemanager = FixtureManager(session) + + +scopename2class = {} + + +scope2props = dict(session=()) +scope2props["module"] = ("fspath", "module") +scope2props["class"] = scope2props["module"] + ("cls",) +scope2props["instance"] = scope2props["class"] + ("instance", ) +scope2props["function"] = scope2props["instance"] + ("function", "keywords") + +def scopeproperty(name=None, doc=None): + def decoratescope(func): + scopename = name or func.__name__ + + def provide(self): + if func.__name__ in scope2props[self.scope]: + return func(self) + raise AttributeError("%s not available in %s-scoped context" % ( + scopename, self.scope)) + + return property(provide, None, None, func.__doc__) + return decoratescope + + +def get_scope_node(node, scope): + cls = scopename2class.get(scope) + if cls is None: + if scope == "session": + return node.session + raise ValueError("unknown scope") + return node.getparent(cls) + + +def add_funcarg_pseudo_fixture_def(collector, metafunc, fixturemanager): + # this function will transform all collected calls to a functions + # if they use direct funcargs (i.e. direct parametrization) + # because we want later test execution to be able to rely on + # an existing FixtureDef structure for all arguments. + # XXX we can probably avoid this algorithm if we modify CallSpec2 + # to directly care for creating the fixturedefs within its methods. + if not metafunc._calls[0].funcargs: + return # this function call does not have direct parametrization + # collect funcargs of all callspecs into a list of values + arg2params = {} + arg2scope = {} + for callspec in metafunc._calls: + for argname, argvalue in callspec.funcargs.items(): + assert argname not in callspec.params + callspec.params[argname] = argvalue + arg2params_list = arg2params.setdefault(argname, []) + callspec.indices[argname] = len(arg2params_list) + arg2params_list.append(argvalue) + if argname not in arg2scope: + scopenum = callspec._arg2scopenum.get(argname, + scopenum_function) + arg2scope[argname] = scopes[scopenum] + callspec.funcargs.clear() + + # register artificial FixtureDef's so that later at test execution + # time we can rely on a proper FixtureDef to exist for fixture setup. + arg2fixturedefs = metafunc._arg2fixturedefs + for argname, valuelist in arg2params.items(): + # if we have a scope that is higher than function we need + # to make sure we only ever create an according fixturedef on + # a per-scope basis. We thus store and cache the fixturedef on the + # node related to the scope. + scope = arg2scope[argname] + node = None + if scope != "function": + node = get_scope_node(collector, scope) + if node is None: + assert scope == "class" and isinstance(collector, _pytest.python.Module) + # use module-level collector for class-scope (for now) + node = collector + if node and argname in node._name2pseudofixturedef: + arg2fixturedefs[argname] = [node._name2pseudofixturedef[argname]] + else: + fixturedef = FixtureDef(fixturemanager, '', argname, + get_direct_param_fixture_func, + arg2scope[argname], + valuelist, False, False) + arg2fixturedefs[argname] = [fixturedef] + if node is not None: + node._name2pseudofixturedef[argname] = fixturedef + + + +def getfixturemarker(obj): + """ return fixturemarker or None if it doesn't exist or raised + exceptions.""" + try: + return getattr(obj, "_pytestfixturefunction", None) + except Exception: + # some objects raise errors like request (from flask import request) + # we don't expect them to be fixture functions + return None + + + +def get_parametrized_fixture_keys(item, scopenum): + """ return list of keys for all parametrized arguments which match + the specified scope. """ + assert scopenum < scopenum_function # function + try: + cs = item.callspec + except AttributeError: + pass + else: + # cs.indictes.items() is random order of argnames but + # then again different functions (items) can change order of + # arguments so it doesn't matter much probably + for argname, param_index in cs.indices.items(): + if cs._arg2scopenum[argname] != scopenum: + continue + if scopenum == 0: # session + key = (argname, param_index) + elif scopenum == 1: # module + key = (argname, param_index, item.fspath) + elif scopenum == 2: # class + key = (argname, param_index, item.fspath, item.cls) + yield key + + +# algorithm for sorting on a per-parametrized resource setup basis +# it is called for scopenum==0 (session) first and performs sorting +# down to the lower scopes such as to minimize number of "high scope" +# setups and teardowns + +def reorder_items(items): + argkeys_cache = {} + for scopenum in range(0, scopenum_function): + argkeys_cache[scopenum] = d = {} + for item in items: + keys = set(get_parametrized_fixture_keys(item, scopenum)) + if keys: + d[item] = keys + return reorder_items_atscope(items, set(), argkeys_cache, 0) + +def reorder_items_atscope(items, ignore, argkeys_cache, scopenum): + if scopenum >= scopenum_function or len(items) < 3: + return items + items_done = [] + while 1: + items_before, items_same, items_other, newignore = \ + slice_items(items, ignore, argkeys_cache[scopenum]) + items_before = reorder_items_atscope( + items_before, ignore, argkeys_cache,scopenum+1) + if items_same is None: + # nothing to reorder in this scope + assert items_other is None + return items_done + items_before + items_done.extend(items_before) + items = items_same + items_other + ignore = newignore + + +def slice_items(items, ignore, scoped_argkeys_cache): + # we pick the first item which uses a fixture instance in the + # requested scope and which we haven't seen yet. We slice the input + # items list into a list of items_nomatch, items_same and + # items_other + if scoped_argkeys_cache: # do we need to do work at all? + it = iter(items) + # first find a slicing key + for i, item in enumerate(it): + argkeys = scoped_argkeys_cache.get(item) + if argkeys is not None: + argkeys = argkeys.difference(ignore) + if argkeys: # found a slicing key + slicing_argkey = argkeys.pop() + items_before = items[:i] + items_same = [item] + items_other = [] + # now slice the remainder of the list + for item in it: + argkeys = scoped_argkeys_cache.get(item) + if argkeys and slicing_argkey in argkeys and \ + slicing_argkey not in ignore: + items_same.append(item) + else: + items_other.append(item) + newignore = ignore.copy() + newignore.add(slicing_argkey) + return (items_before, items_same, items_other, newignore) + return items, None, None, None + + +def fillfixtures(function): + """ fill missing funcargs for a test function. """ + try: + request = function._request + except AttributeError: + # XXX this special code path is only expected to execute + # with the oejskit plugin. It uses classes with funcargs + # and we thus have to work a bit to allow this. + fm = function.session._fixturemanager + fi = fm.getfixtureinfo(function.parent, function.obj, None) + function._fixtureinfo = fi + request = function._request = FixtureRequest(function) + request._fillfixtures() + # prune out funcargs for jstests + newfuncargs = {} + for name in fi.argnames: + newfuncargs[name] = function.funcargs[name] + function.funcargs = newfuncargs + else: + request._fillfixtures() + + + +def get_direct_param_fixture_func(request): + return request.param + +class FuncFixtureInfo: + def __init__(self, argnames, names_closure, name2fixturedefs): + self.argnames = argnames + self.names_closure = names_closure + self.name2fixturedefs = name2fixturedefs + + +class FixtureRequest(FuncargnamesCompatAttr): + """ A request for a fixture from a test or fixture function. + + A request object gives access to the requesting test context + and has an optional ``param`` attribute in case + the fixture is parametrized indirectly. + """ + + def __init__(self, pyfuncitem): + self._pyfuncitem = pyfuncitem + #: fixture for which this request is being performed + self.fixturename = None + #: Scope string, one of "function", "class", "module", "session" + self.scope = "function" + self._fixture_values = {} # argname -> fixture value + self._fixture_defs = {} # argname -> FixtureDef + fixtureinfo = pyfuncitem._fixtureinfo + self._arg2fixturedefs = fixtureinfo.name2fixturedefs.copy() + self._arg2index = {} + self._fixturemanager = pyfuncitem.session._fixturemanager + + @property + def fixturenames(self): + # backward incompatible note: now a readonly property + return list(self._pyfuncitem._fixtureinfo.names_closure) + + @property + def node(self): + """ underlying collection node (depends on current request scope)""" + return self._getscopeitem(self.scope) + + + def _getnextfixturedef(self, argname): + fixturedefs = self._arg2fixturedefs.get(argname, None) + if fixturedefs is None: + # we arrive here because of a a dynamic call to + # getfixturevalue(argname) usage which was naturally + # not known at parsing/collection time + parentid = self._pyfuncitem.parent.nodeid + fixturedefs = self._fixturemanager.getfixturedefs(argname, parentid) + self._arg2fixturedefs[argname] = fixturedefs + # fixturedefs list is immutable so we maintain a decreasing index + index = self._arg2index.get(argname, 0) - 1 + if fixturedefs is None or (-index > len(fixturedefs)): + raise FixtureLookupError(argname, self) + self._arg2index[argname] = index + return fixturedefs[index] + + @property + def config(self): + """ the pytest config object associated with this request. """ + return self._pyfuncitem.config + + + @scopeproperty() + def function(self): + """ test function object if the request has a per-function scope. """ + return self._pyfuncitem.obj + + @scopeproperty("class") + def cls(self): + """ class (can be None) where the test function was collected. """ + clscol = self._pyfuncitem.getparent(_pytest.python.Class) + if clscol: + return clscol.obj + + @property + def instance(self): + """ instance (can be None) on which test function was collected. """ + # unittest support hack, see _pytest.unittest.TestCaseFunction + try: + return self._pyfuncitem._testcase + except AttributeError: + function = getattr(self, "function", None) + if function is not None: + return py.builtin._getimself(function) + + @scopeproperty() + def module(self): + """ python module object where the test function was collected. """ + return self._pyfuncitem.getparent(_pytest.python.Module).obj + + @scopeproperty() + def fspath(self): + """ the file system path of the test module which collected this test. """ + return self._pyfuncitem.fspath + + @property + def keywords(self): + """ keywords/markers dictionary for the underlying node. """ + return self.node.keywords + + @property + def session(self): + """ pytest session object. """ + return self._pyfuncitem.session + + def addfinalizer(self, finalizer): + """ add finalizer/teardown function to be called after the + last test within the requesting test context finished + execution. """ + # XXX usually this method is shadowed by fixturedef specific ones + self._addfinalizer(finalizer, scope=self.scope) + + def _addfinalizer(self, finalizer, scope): + colitem = self._getscopeitem(scope) + self._pyfuncitem.session._setupstate.addfinalizer( + finalizer=finalizer, colitem=colitem) + + def applymarker(self, marker): + """ Apply a marker to a single test function invocation. + This method is useful if you don't want to have a keyword/marker + on all function invocations. + + :arg marker: a :py:class:`_pytest.mark.MarkDecorator` object + created by a call to ``pytest.mark.NAME(...)``. + """ + try: + self.node.keywords[marker.markname] = marker + except AttributeError: + raise ValueError(marker) + + def raiseerror(self, msg): + """ raise a FixtureLookupError with the given message. """ + raise self._fixturemanager.FixtureLookupError(None, self, msg) + + def _fillfixtures(self): + item = self._pyfuncitem + fixturenames = getattr(item, "fixturenames", self.fixturenames) + for argname in fixturenames: + if argname not in item.funcargs: + item.funcargs[argname] = self.getfixturevalue(argname) + + def cached_setup(self, setup, teardown=None, scope="module", extrakey=None): + """ (deprecated) Return a testing resource managed by ``setup`` & + ``teardown`` calls. ``scope`` and ``extrakey`` determine when the + ``teardown`` function will be called so that subsequent calls to + ``setup`` would recreate the resource. With pytest-2.3 you often + do not need ``cached_setup()`` as you can directly declare a scope + on a fixture function and register a finalizer through + ``request.addfinalizer()``. + + :arg teardown: function receiving a previously setup resource. + :arg setup: a no-argument function creating a resource. + :arg scope: a string value out of ``function``, ``class``, ``module`` + or ``session`` indicating the caching lifecycle of the resource. + :arg extrakey: added to internal caching key of (funcargname, scope). + """ + if not hasattr(self.config, '_setupcache'): + self.config._setupcache = {} # XXX weakref? + cachekey = (self.fixturename, self._getscopeitem(scope), extrakey) + cache = self.config._setupcache + try: + val = cache[cachekey] + except KeyError: + self._check_scope(self.fixturename, self.scope, scope) + val = setup() + cache[cachekey] = val + if teardown is not None: + def finalizer(): + del cache[cachekey] + teardown(val) + self._addfinalizer(finalizer, scope=scope) + return val + + def getfixturevalue(self, argname): + """ Dynamically run a named fixture function. + + Declaring fixtures via function argument is recommended where possible. + But if you can only decide whether to use another fixture at test + setup time, you may use this function to retrieve it inside a fixture + or test function body. + """ + return self._get_active_fixturedef(argname).cached_result[0] + + def getfuncargvalue(self, argname): + """ Deprecated, use getfixturevalue. """ + from _pytest import deprecated + warnings.warn( + deprecated.GETFUNCARGVALUE, + DeprecationWarning) + return self.getfixturevalue(argname) + + def _get_active_fixturedef(self, argname): + try: + return self._fixture_defs[argname] + except KeyError: + try: + fixturedef = self._getnextfixturedef(argname) + except FixtureLookupError: + if argname == "request": + class PseudoFixtureDef: + cached_result = (self, [0], None) + scope = "function" + return PseudoFixtureDef + raise + # remove indent to prevent the python3 exception + # from leaking into the call + result = self._getfixturevalue(fixturedef) + self._fixture_values[argname] = result + self._fixture_defs[argname] = fixturedef + return fixturedef + + def _get_fixturestack(self): + current = self + l = [] + while 1: + fixturedef = getattr(current, "_fixturedef", None) + if fixturedef is None: + l.reverse() + return l + l.append(fixturedef) + current = current._parent_request + + def _getfixturevalue(self, fixturedef): + # prepare a subrequest object before calling fixture function + # (latter managed by fixturedef) + argname = fixturedef.argname + funcitem = self._pyfuncitem + scope = fixturedef.scope + try: + param = funcitem.callspec.getparam(argname) + except (AttributeError, ValueError): + param = NOTSET + param_index = 0 + if fixturedef.params is not None: + frame = inspect.stack()[3] + frameinfo = inspect.getframeinfo(frame[0]) + source_path = frameinfo.filename + source_lineno = frameinfo.lineno + source_path = py.path.local(source_path) + if source_path.relto(funcitem.config.rootdir): + source_path = source_path.relto(funcitem.config.rootdir) + msg = ( + "The requested fixture has no parameter defined for the " + "current test.\n\nRequested fixture '{0}' defined in:\n{1}" + "\n\nRequested here:\n{2}:{3}".format( + fixturedef.argname, + getlocation(fixturedef.func, funcitem.config.rootdir), + source_path, + source_lineno, + ) + ) + fail(msg) + else: + # indices might not be set if old-style metafunc.addcall() was used + param_index = funcitem.callspec.indices.get(argname, 0) + # if a parametrize invocation set a scope it will override + # the static scope defined with the fixture function + paramscopenum = funcitem.callspec._arg2scopenum.get(argname) + if paramscopenum is not None: + scope = scopes[paramscopenum] + + subrequest = SubRequest(self, scope, param, param_index, fixturedef) + + # check if a higher-level scoped fixture accesses a lower level one + subrequest._check_scope(argname, self.scope, scope) + + # clear sys.exc_info before invoking the fixture (python bug?) + # if its not explicitly cleared it will leak into the call + exc_clear() + try: + # call the fixture function + val = fixturedef.execute(request=subrequest) + finally: + # if fixture function failed it might have registered finalizers + self.session._setupstate.addfinalizer(fixturedef.finish, + subrequest.node) + return val + + def _check_scope(self, argname, invoking_scope, requested_scope): + if argname == "request": + return + if scopemismatch(invoking_scope, requested_scope): + # try to report something helpful + lines = self._factorytraceback() + fail("ScopeMismatch: You tried to access the %r scoped " + "fixture %r with a %r scoped request object, " + "involved factories\n%s" % ( + (requested_scope, argname, invoking_scope, "\n".join(lines))), + pytrace=False) + + def _factorytraceback(self): + lines = [] + for fixturedef in self._get_fixturestack(): + factory = fixturedef.func + fs, lineno = getfslineno(factory) + p = self._pyfuncitem.session.fspath.bestrelpath(fs) + args = _format_args(factory) + lines.append("%s:%d: def %s%s" % ( + p, lineno, factory.__name__, args)) + return lines + + def _getscopeitem(self, scope): + if scope == "function": + # this might also be a non-function Item despite its attribute name + return self._pyfuncitem + node = get_scope_node(self._pyfuncitem, scope) + if node is None and scope == "class": + # fallback to function item itself + node = self._pyfuncitem + assert node + return node + + def __repr__(self): + return "" %(self.node) + + +class SubRequest(FixtureRequest): + """ a sub request for handling getting a fixture from a + test function/fixture. """ + def __init__(self, request, scope, param, param_index, fixturedef): + self._parent_request = request + self.fixturename = fixturedef.argname + if param is not NOTSET: + self.param = param + self.param_index = param_index + self.scope = scope + self._fixturedef = fixturedef + self.addfinalizer = fixturedef.addfinalizer + self._pyfuncitem = request._pyfuncitem + self._fixture_values = request._fixture_values + self._fixture_defs = request._fixture_defs + self._arg2fixturedefs = request._arg2fixturedefs + self._arg2index = request._arg2index + self._fixturemanager = request._fixturemanager + + def __repr__(self): + return "" % (self.fixturename, self._pyfuncitem) + + +class ScopeMismatchError(Exception): + """ A fixture function tries to use a different fixture function which + which has a lower scope (e.g. a Session one calls a function one) + """ + + +scopes = "session module class function".split() +scopenum_function = scopes.index("function") + + +def scopemismatch(currentscope, newscope): + return scopes.index(newscope) > scopes.index(currentscope) + + +def scope2index(scope, descr, where=None): + """Look up the index of ``scope`` and raise a descriptive value error + if not defined. + """ + try: + return scopes.index(scope) + except ValueError: + raise ValueError( + "{0} {1}has an unsupported scope value '{2}'".format( + descr, 'from {0} '.format(where) if where else '', + scope) + ) + + +class FixtureLookupError(LookupError): + """ could not return a requested Fixture (missing or invalid). """ + def __init__(self, argname, request, msg=None): + self.argname = argname + self.request = request + self.fixturestack = request._get_fixturestack() + self.msg = msg + + def formatrepr(self): + tblines = [] + addline = tblines.append + stack = [self.request._pyfuncitem.obj] + stack.extend(map(lambda x: x.func, self.fixturestack)) + msg = self.msg + if msg is not None: + # the last fixture raise an error, let's present + # it at the requesting side + stack = stack[:-1] + for function in stack: + fspath, lineno = getfslineno(function) + try: + lines, _ = inspect.getsourcelines(get_real_func(function)) + except (IOError, IndexError, TypeError): + error_msg = "file %s, line %s: source code not available" + addline(error_msg % (fspath, lineno+1)) + else: + addline("file %s, line %s" % (fspath, lineno+1)) + for i, line in enumerate(lines): + line = line.rstrip() + addline(" " + line) + if line.lstrip().startswith('def'): + break + + if msg is None: + fm = self.request._fixturemanager + available = [] + parentid = self.request._pyfuncitem.parent.nodeid + for name, fixturedefs in fm._arg2fixturedefs.items(): + faclist = list(fm._matchfactories(fixturedefs, parentid)) + if faclist and name not in available: + available.append(name) + msg = "fixture %r not found" % (self.argname,) + msg += "\n available fixtures: %s" %(", ".join(sorted(available)),) + msg += "\n use 'pytest --fixtures [testpath]' for help on them." + + return FixtureLookupErrorRepr(fspath, lineno, tblines, msg, self.argname) + + +class FixtureLookupErrorRepr(TerminalRepr): + def __init__(self, filename, firstlineno, tblines, errorstring, argname): + self.tblines = tblines + self.errorstring = errorstring + self.filename = filename + self.firstlineno = firstlineno + self.argname = argname + + def toterminal(self, tw): + # tw.line("FixtureLookupError: %s" %(self.argname), red=True) + for tbline in self.tblines: + tw.line(tbline.rstrip()) + lines = self.errorstring.split("\n") + if lines: + tw.line('{0} {1}'.format(FormattedExcinfo.fail_marker, + lines[0].strip()), red=True) + for line in lines[1:]: + tw.line('{0} {1}'.format(FormattedExcinfo.flow_marker, + line.strip()), red=True) + tw.line() + tw.line("%s:%d" % (self.filename, self.firstlineno+1)) + + +def fail_fixturefunc(fixturefunc, msg): + fs, lineno = getfslineno(fixturefunc) + location = "%s:%s" % (fs, lineno+1) + source = _pytest._code.Source(fixturefunc) + fail(msg + ":\n\n" + str(source.indent()) + "\n" + location, + pytrace=False) + + +def call_fixture_func(fixturefunc, request, kwargs): + yieldctx = is_generator(fixturefunc) + if yieldctx: + it = fixturefunc(**kwargs) + res = next(it) + + def teardown(): + try: + next(it) + except StopIteration: + pass + else: + fail_fixturefunc(fixturefunc, + "yield_fixture function has more than one 'yield'") + + request.addfinalizer(teardown) + else: + res = fixturefunc(**kwargs) + return res + + +class FixtureDef: + """ A container for a factory definition. """ + def __init__(self, fixturemanager, baseid, argname, func, scope, params, + unittest=False, ids=None): + self._fixturemanager = fixturemanager + self.baseid = baseid or '' + self.has_location = baseid is not None + self.func = func + self.argname = argname + self.scope = scope + self.scopenum = scope2index( + scope or "function", + descr='fixture {0}'.format(func.__name__), + where=baseid + ) + self.params = params + startindex = unittest and 1 or None + self.argnames = getfuncargnames(func, startindex=startindex) + self.unittest = unittest + self.ids = ids + self._finalizer = [] + + def addfinalizer(self, finalizer): + self._finalizer.append(finalizer) + + def finish(self): + exceptions = [] + try: + while self._finalizer: + try: + func = self._finalizer.pop() + func() + except: + exceptions.append(sys.exc_info()) + if exceptions: + e = exceptions[0] + del exceptions # ensure we don't keep all frames alive because of the traceback + py.builtin._reraise(*e) + + finally: + ihook = self._fixturemanager.session.ihook + ihook.pytest_fixture_post_finalizer(fixturedef=self) + # even if finalization fails, we invalidate + # the cached fixture value + if hasattr(self, "cached_result"): + del self.cached_result + + def execute(self, request): + # get required arguments and register our own finish() + # with their finalization + for argname in self.argnames: + fixturedef = request._get_active_fixturedef(argname) + if argname != "request": + fixturedef.addfinalizer(self.finish) + + my_cache_key = request.param_index + cached_result = getattr(self, "cached_result", None) + if cached_result is not None: + result, cache_key, err = cached_result + if my_cache_key == cache_key: + if err is not None: + py.builtin._reraise(*err) + else: + return result + # we have a previous but differently parametrized fixture instance + # so we need to tear it down before creating a new one + self.finish() + assert not hasattr(self, "cached_result") + + ihook = self._fixturemanager.session.ihook + return ihook.pytest_fixture_setup(fixturedef=self, request=request) + + def __repr__(self): + return ("" % + (self.argname, self.scope, self.baseid)) + +def pytest_fixture_setup(fixturedef, request): + """ Execution of fixture setup. """ + kwargs = {} + for argname in fixturedef.argnames: + fixdef = request._get_active_fixturedef(argname) + result, arg_cache_key, exc = fixdef.cached_result + request._check_scope(argname, request.scope, fixdef.scope) + kwargs[argname] = result + + fixturefunc = fixturedef.func + if fixturedef.unittest: + if request.instance is not None: + # bind the unbound method to the TestCase instance + fixturefunc = fixturedef.func.__get__(request.instance) + else: + # the fixture function needs to be bound to the actual + # request.instance so that code working with "fixturedef" behaves + # as expected. + if request.instance is not None: + fixturefunc = getimfunc(fixturedef.func) + if fixturefunc != fixturedef.func: + fixturefunc = fixturefunc.__get__(request.instance) + my_cache_key = request.param_index + try: + result = call_fixture_func(fixturefunc, request, kwargs) + except Exception: + fixturedef.cached_result = (None, my_cache_key, sys.exc_info()) + raise + fixturedef.cached_result = (result, my_cache_key, None) + return result + + +class FixtureFunctionMarker: + def __init__(self, scope, params, autouse=False, ids=None, name=None): + self.scope = scope + self.params = params + self.autouse = autouse + self.ids = ids + self.name = name + + def __call__(self, function): + if isclass(function): + raise ValueError( + "class fixtures not supported (may be in the future)") + function._pytestfixturefunction = self + return function + + + +def fixture(scope="function", params=None, autouse=False, ids=None, name=None): + """ (return a) decorator to mark a fixture factory function. + + This decorator can be used (with or without parameters) to define a + fixture function. The name of the fixture function can later be + referenced to cause its invocation ahead of running tests: test + modules or classes can use the pytest.mark.usefixtures(fixturename) + marker. Test functions can directly use fixture names as input + arguments in which case the fixture instance returned from the fixture + function will be injected. + + :arg scope: the scope for which this fixture is shared, one of + "function" (default), "class", "module" or "session". + + :arg params: an optional list of parameters which will cause multiple + invocations of the fixture function and all of the tests + using it. + + :arg autouse: if True, the fixture func is activated for all tests that + can see it. If False (the default) then an explicit + reference is needed to activate the fixture. + + :arg ids: list of string ids each corresponding to the params + so that they are part of the test id. If no ids are provided + they will be generated automatically from the params. + + :arg name: the name of the fixture. This defaults to the name of the + decorated function. If a fixture is used in the same module in + which it is defined, the function name of the fixture will be + shadowed by the function arg that requests the fixture; one way + to resolve this is to name the decorated function + ``fixture_`` and then use + ``@pytest.fixture(name='')``. + + Fixtures can optionally provide their values to test functions using a ``yield`` statement, + instead of ``return``. In this case, the code block after the ``yield`` statement is executed + as teardown code regardless of the test outcome. A fixture function must yield exactly once. + """ + if callable(scope) and params is None and autouse == False: + # direct decoration + return FixtureFunctionMarker( + "function", params, autouse, name=name)(scope) + if params is not None and not isinstance(params, (list, tuple)): + params = list(params) + return FixtureFunctionMarker(scope, params, autouse, ids=ids, name=name) + + +def yield_fixture(scope="function", params=None, autouse=False, ids=None, name=None): + """ (return a) decorator to mark a yield-fixture factory function. + + .. deprecated:: 3.0 + Use :py:func:`pytest.fixture` directly instead. + """ + if callable(scope) and params is None and not autouse: + # direct decoration + return FixtureFunctionMarker( + "function", params, autouse, ids=ids, name=name)(scope) + else: + return FixtureFunctionMarker(scope, params, autouse, ids=ids, name=name) + + +defaultfuncargprefixmarker = fixture() + + +@fixture(scope="session") +def pytestconfig(request): + """ the pytest config object with access to command line opts.""" + return request.config + + +class FixtureManager: + """ + pytest fixtures definitions and information is stored and managed + from this class. + + During collection fm.parsefactories() is called multiple times to parse + fixture function definitions into FixtureDef objects and internal + data structures. + + During collection of test functions, metafunc-mechanics instantiate + a FuncFixtureInfo object which is cached per node/func-name. + This FuncFixtureInfo object is later retrieved by Function nodes + which themselves offer a fixturenames attribute. + + The FuncFixtureInfo object holds information about fixtures and FixtureDefs + relevant for a particular function. An initial list of fixtures is + assembled like this: + + - ini-defined usefixtures + - autouse-marked fixtures along the collection chain up from the function + - usefixtures markers at module/class/function level + - test function funcargs + + Subsequently the funcfixtureinfo.fixturenames attribute is computed + as the closure of the fixtures needed to setup the initial fixtures, + i. e. fixtures needed by fixture functions themselves are appended + to the fixturenames list. + + Upon the test-setup phases all fixturenames are instantiated, retrieved + by a lookup of their FuncFixtureInfo. + """ + + _argprefix = "pytest_funcarg__" + FixtureLookupError = FixtureLookupError + FixtureLookupErrorRepr = FixtureLookupErrorRepr + + def __init__(self, session): + self.session = session + self.config = session.config + self._arg2fixturedefs = {} + self._holderobjseen = set() + self._arg2finish = {} + self._nodeid_and_autousenames = [("", self.config.getini("usefixtures"))] + session.config.pluginmanager.register(self, "funcmanage") + + + def getfixtureinfo(self, node, func, cls, funcargs=True): + if funcargs and not hasattr(node, "nofuncargs"): + if cls is not None: + startindex = 1 + else: + startindex = None + argnames = getfuncargnames(func, startindex) + else: + argnames = () + usefixtures = getattr(func, "usefixtures", None) + initialnames = argnames + if usefixtures is not None: + initialnames = usefixtures.args + initialnames + fm = node.session._fixturemanager + names_closure, arg2fixturedefs = fm.getfixtureclosure(initialnames, + node) + return FuncFixtureInfo(argnames, names_closure, arg2fixturedefs) + + def pytest_plugin_registered(self, plugin): + nodeid = None + try: + p = py.path.local(plugin.__file__) + except AttributeError: + pass + else: + # construct the base nodeid which is later used to check + # what fixtures are visible for particular tests (as denoted + # by their test id) + if p.basename.startswith("conftest.py"): + nodeid = p.dirpath().relto(self.config.rootdir) + if p.sep != "/": + nodeid = nodeid.replace(p.sep, "/") + self.parsefactories(plugin, nodeid) + + def _getautousenames(self, nodeid): + """ return a tuple of fixture names to be used. """ + autousenames = [] + for baseid, basenames in self._nodeid_and_autousenames: + if nodeid.startswith(baseid): + if baseid: + i = len(baseid) + nextchar = nodeid[i:i+1] + if nextchar and nextchar not in ":/": + continue + autousenames.extend(basenames) + # make sure autousenames are sorted by scope, scopenum 0 is session + autousenames.sort( + key=lambda x: self._arg2fixturedefs[x][-1].scopenum) + return autousenames + + def getfixtureclosure(self, fixturenames, parentnode): + # collect the closure of all fixtures , starting with the given + # fixturenames as the initial set. As we have to visit all + # factory definitions anyway, we also return a arg2fixturedefs + # mapping so that the caller can reuse it and does not have + # to re-discover fixturedefs again for each fixturename + # (discovering matching fixtures for a given name/node is expensive) + + parentid = parentnode.nodeid + fixturenames_closure = self._getautousenames(parentid) + + def merge(otherlist): + for arg in otherlist: + if arg not in fixturenames_closure: + fixturenames_closure.append(arg) + + merge(fixturenames) + arg2fixturedefs = {} + lastlen = -1 + while lastlen != len(fixturenames_closure): + lastlen = len(fixturenames_closure) + for argname in fixturenames_closure: + if argname in arg2fixturedefs: + continue + fixturedefs = self.getfixturedefs(argname, parentid) + if fixturedefs: + arg2fixturedefs[argname] = fixturedefs + merge(fixturedefs[-1].argnames) + return fixturenames_closure, arg2fixturedefs + + def pytest_generate_tests(self, metafunc): + for argname in metafunc.fixturenames: + faclist = metafunc._arg2fixturedefs.get(argname) + if faclist: + fixturedef = faclist[-1] + if fixturedef.params is not None: + func_params = getattr(getattr(metafunc.function, 'parametrize', None), 'args', [[None]]) + # skip directly parametrized arguments + argnames = func_params[0] + if not isinstance(argnames, (tuple, list)): + argnames = [x.strip() for x in argnames.split(",") if x.strip()] + if argname not in func_params and argname not in argnames: + metafunc.parametrize(argname, fixturedef.params, + indirect=True, scope=fixturedef.scope, + ids=fixturedef.ids) + else: + continue # will raise FixtureLookupError at setup time + + def pytest_collection_modifyitems(self, items): + # separate parametrized setups + items[:] = reorder_items(items) + + def parsefactories(self, node_or_obj, nodeid=NOTSET, unittest=False): + if nodeid is not NOTSET: + holderobj = node_or_obj + else: + holderobj = node_or_obj.obj + nodeid = node_or_obj.nodeid + if holderobj in self._holderobjseen: + return + self._holderobjseen.add(holderobj) + autousenames = [] + for name in dir(holderobj): + # The attribute can be an arbitrary descriptor, so the attribute + # access below can raise. safe_getatt() ignores such exceptions. + obj = safe_getattr(holderobj, name, None) + # fixture functions have a pytest_funcarg__ prefix (pre-2.3 style) + # or are "@pytest.fixture" marked + marker = getfixturemarker(obj) + if marker is None: + if not name.startswith(self._argprefix): + continue + if not callable(obj): + continue + marker = defaultfuncargprefixmarker + from _pytest import deprecated + self.config.warn('C1', deprecated.FUNCARG_PREFIX.format(name=name), nodeid=nodeid) + name = name[len(self._argprefix):] + elif not isinstance(marker, FixtureFunctionMarker): + # magic globals with __getattr__ might have got us a wrong + # fixture attribute + continue + else: + if marker.name: + name = marker.name + msg = 'fixtures cannot have "pytest_funcarg__" prefix ' \ + 'and be decorated with @pytest.fixture:\n%s' % name + assert not name.startswith(self._argprefix), msg + + fixture_def = FixtureDef(self, nodeid, name, obj, + marker.scope, marker.params, + unittest=unittest, ids=marker.ids) + + faclist = self._arg2fixturedefs.setdefault(name, []) + if fixture_def.has_location: + faclist.append(fixture_def) + else: + # fixturedefs with no location are at the front + # so this inserts the current fixturedef after the + # existing fixturedefs from external plugins but + # before the fixturedefs provided in conftests. + i = len([f for f in faclist if not f.has_location]) + faclist.insert(i, fixture_def) + if marker.autouse: + autousenames.append(name) + + if autousenames: + self._nodeid_and_autousenames.append((nodeid or '', autousenames)) + + def getfixturedefs(self, argname, nodeid): + """ + Gets a list of fixtures which are applicable to the given node id. + + :param str argname: name of the fixture to search for + :param str nodeid: full node id of the requesting test. + :return: list[FixtureDef] + """ + try: + fixturedefs = self._arg2fixturedefs[argname] + except KeyError: + return None + else: + return tuple(self._matchfactories(fixturedefs, nodeid)) + + def _matchfactories(self, fixturedefs, nodeid): + for fixturedef in fixturedefs: + if nodeid.startswith(fixturedef.baseid): + yield fixturedef + diff --git a/venv/lib/python3.5/site-packages/_pytest/freeze_support.py b/venv/lib/python3.5/site-packages/_pytest/freeze_support.py new file mode 100644 index 0000000..52f8608 --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/freeze_support.py @@ -0,0 +1,44 @@ +""" +Provides a function to report all internal modules for using freezing tools +pytest +""" +from __future__ import absolute_import, division, print_function + + + +def freeze_includes(): + """ + Returns a list of module names used by py.test that should be + included by cx_freeze. + """ + import py + import _pytest + result = list(_iter_all_modules(py)) + result += list(_iter_all_modules(_pytest)) + return result + + +def _iter_all_modules(package, prefix=''): + """ + Iterates over the names of all modules that can be found in the given + package, recursively. + Example: + _iter_all_modules(_pytest) -> + ['_pytest.assertion.newinterpret', + '_pytest.capture', + '_pytest.core', + ... + ] + """ + import os + import pkgutil + if type(package) is not str: + path, prefix = package.__path__[0], package.__name__ + '.' + else: + path = package + for _, name, is_package in pkgutil.iter_modules([path]): + if is_package: + for m in _iter_all_modules(os.path.join(path, name), prefix=name + '.'): + yield prefix + m + else: + yield prefix + name diff --git a/venv/lib/python3.5/site-packages/_pytest/helpconfig.py b/venv/lib/python3.5/site-packages/_pytest/helpconfig.py new file mode 100644 index 0000000..e3c6b6e --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/helpconfig.py @@ -0,0 +1,179 @@ +""" version info, help messages, tracing configuration. """ +from __future__ import absolute_import, division, print_function + +import py +import pytest +from _pytest.config import PrintHelp +import os, sys +from argparse import Action + + +class HelpAction(Action): + """This is an argparse Action that will raise an exception in + order to skip the rest of the argument parsing when --help is passed. + This prevents argparse from quitting due to missing required arguments + when any are defined, for example by ``pytest_addoption``. + This is similar to the way that the builtin argparse --help option is + implemented by raising SystemExit. + """ + + def __init__(self, + option_strings, + dest=None, + default=False, + help=None): + super(HelpAction, self).__init__( + option_strings=option_strings, + dest=dest, + const=True, + default=default, + nargs=0, + help=help) + + def __call__(self, parser, namespace, values, option_string=None): + setattr(namespace, self.dest, self.const) + + # We should only skip the rest of the parsing after preparse is done + if getattr(parser._parser, 'after_preparse', False): + raise PrintHelp + + +def pytest_addoption(parser): + group = parser.getgroup('debugconfig') + group.addoption('--version', action="store_true", + help="display pytest lib version and import information.") + group._addoption("-h", "--help", action=HelpAction, dest="help", + help="show help message and configuration info") + group._addoption('-p', action="append", dest="plugins", default = [], + metavar="name", + help="early-load given plugin (multi-allowed). " + "To avoid loading of plugins, use the `no:` prefix, e.g. " + "`no:doctest`.") + group.addoption('--traceconfig', '--trace-config', + action="store_true", default=False, + help="trace considerations of conftest.py files."), + group.addoption('--debug', + action="store_true", dest="debug", default=False, + help="store internal tracing debug information in 'pytestdebug.log'.") + group._addoption( + '-o', '--override-ini', nargs='*', dest="override_ini", + action="append", + help="override config option with option=value style, e.g. `-o xfail_strict=True`.") + + +@pytest.hookimpl(hookwrapper=True) +def pytest_cmdline_parse(): + outcome = yield + config = outcome.get_result() + if config.option.debug: + path = os.path.abspath("pytestdebug.log") + debugfile = open(path, 'w') + debugfile.write("versions pytest-%s, py-%s, " + "python-%s\ncwd=%s\nargs=%s\n\n" %( + pytest.__version__, py.__version__, + ".".join(map(str, sys.version_info)), + os.getcwd(), config._origargs)) + config.trace.root.setwriter(debugfile.write) + undo_tracing = config.pluginmanager.enable_tracing() + sys.stderr.write("writing pytestdebug information to %s\n" % path) + + def unset_tracing(): + debugfile.close() + sys.stderr.write("wrote pytestdebug information to %s\n" % + debugfile.name) + config.trace.root.setwriter(None) + undo_tracing() + + config.add_cleanup(unset_tracing) + +def pytest_cmdline_main(config): + if config.option.version: + p = py.path.local(pytest.__file__) + sys.stderr.write("This is pytest version %s, imported from %s\n" % + (pytest.__version__, p)) + plugininfo = getpluginversioninfo(config) + if plugininfo: + for line in plugininfo: + sys.stderr.write(line + "\n") + return 0 + elif config.option.help: + config._do_configure() + showhelp(config) + config._ensure_unconfigure() + return 0 + +def showhelp(config): + reporter = config.pluginmanager.get_plugin('terminalreporter') + tw = reporter._tw + tw.write(config._parser.optparser.format_help()) + tw.line() + tw.line() + tw.line("[pytest] ini-options in the first " + "pytest.ini|tox.ini|setup.cfg file found:") + tw.line() + + for name in config._parser._ininames: + help, type, default = config._parser._inidict[name] + if type is None: + type = "string" + spec = "%s (%s)" % (name, type) + line = " %-24s %s" %(spec, help) + tw.line(line[:tw.fullwidth]) + + tw.line() + tw.line("environment variables:") + vars = [ + ("PYTEST_ADDOPTS", "extra command line options"), + ("PYTEST_PLUGINS", "comma-separated plugins to load during startup"), + ("PYTEST_DEBUG", "set to enable debug tracing of pytest's internals") + ] + for name, help in vars: + tw.line(" %-24s %s" % (name, help)) + tw.line() + tw.line() + + tw.line("to see available markers type: pytest --markers") + tw.line("to see available fixtures type: pytest --fixtures") + tw.line("(shown according to specified file_or_dir or current dir " + "if not specified)") + + for warningreport in reporter.stats.get('warnings', []): + tw.line("warning : " + warningreport.message, red=True) + return + + +conftest_options = [ + ('pytest_plugins', 'list of plugin names to load'), +] + +def getpluginversioninfo(config): + lines = [] + plugininfo = config.pluginmanager.list_plugin_distinfo() + if plugininfo: + lines.append("setuptools registered plugins:") + for plugin, dist in plugininfo: + loc = getattr(plugin, '__file__', repr(plugin)) + content = "%s-%s at %s" % (dist.project_name, dist.version, loc) + lines.append(" " + content) + return lines + +def pytest_report_header(config): + lines = [] + if config.option.debug or config.option.traceconfig: + lines.append("using: pytest-%s pylib-%s" % + (pytest.__version__,py.__version__)) + + verinfo = getpluginversioninfo(config) + if verinfo: + lines.extend(verinfo) + + if config.option.traceconfig: + lines.append("active plugins:") + items = config.pluginmanager.list_name_plugin() + for name, plugin in items: + if hasattr(plugin, '__file__'): + r = plugin.__file__ + else: + r = repr(plugin) + lines.append(" %-20s: %s" %(name, r)) + return lines diff --git a/venv/lib/python3.5/site-packages/_pytest/hookspec.py b/venv/lib/python3.5/site-packages/_pytest/hookspec.py new file mode 100644 index 0000000..2c9a661 --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/hookspec.py @@ -0,0 +1,353 @@ +""" hook specifications for pytest plugins, invoked from main.py and builtin plugins. """ + +from _pytest._pluggy import HookspecMarker + +hookspec = HookspecMarker("pytest") + +# ------------------------------------------------------------------------- +# Initialization hooks called for every plugin +# ------------------------------------------------------------------------- + +@hookspec(historic=True) +def pytest_addhooks(pluginmanager): + """called at plugin registration time to allow adding new hooks via a call to + pluginmanager.add_hookspecs(module_or_class, prefix).""" + + +@hookspec(historic=True) +def pytest_namespace(): + """ + DEPRECATED: this hook causes direct monkeypatching on pytest, its use is strongly discouraged + return dict of name->object to be made globally available in + the pytest namespace. This hook is called at plugin registration + time. + """ + +@hookspec(historic=True) +def pytest_plugin_registered(plugin, manager): + """ a new pytest plugin got registered. """ + + +@hookspec(historic=True) +def pytest_addoption(parser): + """register argparse-style options and ini-style config values, + called once at the beginning of a test run. + + .. note:: + + This function should be implemented only in plugins or ``conftest.py`` + files situated at the tests root directory due to how pytest + :ref:`discovers plugins during startup `. + + :arg parser: To add command line options, call + :py:func:`parser.addoption(...) <_pytest.config.Parser.addoption>`. + To add ini-file values call :py:func:`parser.addini(...) + <_pytest.config.Parser.addini>`. + + Options can later be accessed through the + :py:class:`config <_pytest.config.Config>` object, respectively: + + - :py:func:`config.getoption(name) <_pytest.config.Config.getoption>` to + retrieve the value of a command line option. + + - :py:func:`config.getini(name) <_pytest.config.Config.getini>` to retrieve + a value read from an ini-style file. + + The config object is passed around on many internal objects via the ``.config`` + attribute or can be retrieved as the ``pytestconfig`` fixture or accessed + via (deprecated) ``pytest.config``. + """ + +@hookspec(historic=True) +def pytest_configure(config): + """ called after command line options have been parsed + and all plugins and initial conftest files been loaded. + This hook is called for every plugin. + """ + +# ------------------------------------------------------------------------- +# Bootstrapping hooks called for plugins registered early enough: +# internal and 3rd party plugins as well as directly +# discoverable conftest.py local plugins. +# ------------------------------------------------------------------------- + +@hookspec(firstresult=True) +def pytest_cmdline_parse(pluginmanager, args): + """return initialized config object, parsing the specified args. + + Stops at first non-None result, see :ref:`firstresult` """ + +def pytest_cmdline_preparse(config, args): + """(deprecated) modify command line arguments before option parsing. """ + +@hookspec(firstresult=True) +def pytest_cmdline_main(config): + """ called for performing the main command line action. The default + implementation will invoke the configure hooks and runtest_mainloop. + + Stops at first non-None result, see :ref:`firstresult` """ + +def pytest_load_initial_conftests(early_config, parser, args): + """ implements the loading of initial conftest files ahead + of command line option parsing. """ + + +# ------------------------------------------------------------------------- +# collection hooks +# ------------------------------------------------------------------------- + +@hookspec(firstresult=True) +def pytest_collection(session): + """ perform the collection protocol for the given session. + + Stops at first non-None result, see :ref:`firstresult` """ + +def pytest_collection_modifyitems(session, config, items): + """ called after collection has been performed, may filter or re-order + the items in-place.""" + +def pytest_collection_finish(session): + """ called after collection has been performed and modified. """ + +@hookspec(firstresult=True) +def pytest_ignore_collect(path, config): + """ return True to prevent considering this path for collection. + This hook is consulted for all files and directories prior to calling + more specific hooks. + + Stops at first non-None result, see :ref:`firstresult` + """ + +@hookspec(firstresult=True) +def pytest_collect_directory(path, parent): + """ called before traversing a directory for collection files. + + Stops at first non-None result, see :ref:`firstresult` """ + +def pytest_collect_file(path, parent): + """ return collection Node or None for the given path. Any new node + needs to have the specified ``parent`` as a parent.""" + +# logging hooks for collection +def pytest_collectstart(collector): + """ collector starts collecting. """ + +def pytest_itemcollected(item): + """ we just collected a test item. """ + +def pytest_collectreport(report): + """ collector finished collecting. """ + +def pytest_deselected(items): + """ called for test items deselected by keyword. """ + +@hookspec(firstresult=True) +def pytest_make_collect_report(collector): + """ perform ``collector.collect()`` and return a CollectReport. + + Stops at first non-None result, see :ref:`firstresult` """ + +# ------------------------------------------------------------------------- +# Python test function related hooks +# ------------------------------------------------------------------------- + +@hookspec(firstresult=True) +def pytest_pycollect_makemodule(path, parent): + """ return a Module collector or None for the given path. + This hook will be called for each matching test module path. + The pytest_collect_file hook needs to be used if you want to + create test modules for files that do not match as a test module. + + Stops at first non-None result, see :ref:`firstresult` """ + +@hookspec(firstresult=True) +def pytest_pycollect_makeitem(collector, name, obj): + """ return custom item/collector for a python object in a module, or None. + + Stops at first non-None result, see :ref:`firstresult` """ + +@hookspec(firstresult=True) +def pytest_pyfunc_call(pyfuncitem): + """ call underlying test function. + + Stops at first non-None result, see :ref:`firstresult` """ + +def pytest_generate_tests(metafunc): + """ generate (multiple) parametrized calls to a test function.""" + +@hookspec(firstresult=True) +def pytest_make_parametrize_id(config, val, argname): + """Return a user-friendly string representation of the given ``val`` that will be used + by @pytest.mark.parametrize calls. Return None if the hook doesn't know about ``val``. + The parameter name is available as ``argname``, if required. + + Stops at first non-None result, see :ref:`firstresult` """ + +# ------------------------------------------------------------------------- +# generic runtest related hooks +# ------------------------------------------------------------------------- + +@hookspec(firstresult=True) +def pytest_runtestloop(session): + """ called for performing the main runtest loop + (after collection finished). + + Stops at first non-None result, see :ref:`firstresult` """ + +def pytest_itemstart(item, node): + """ (deprecated, use pytest_runtest_logstart). """ + +@hookspec(firstresult=True) +def pytest_runtest_protocol(item, nextitem): + """ implements the runtest_setup/call/teardown protocol for + the given test item, including capturing exceptions and calling + reporting hooks. + + :arg item: test item for which the runtest protocol is performed. + + :arg nextitem: the scheduled-to-be-next test item (or None if this + is the end my friend). This argument is passed on to + :py:func:`pytest_runtest_teardown`. + + :return boolean: True if no further hook implementations should be invoked. + + + Stops at first non-None result, see :ref:`firstresult` """ + +def pytest_runtest_logstart(nodeid, location): + """ signal the start of running a single test item. """ + +def pytest_runtest_setup(item): + """ called before ``pytest_runtest_call(item)``. """ + +def pytest_runtest_call(item): + """ called to execute the test ``item``. """ + +def pytest_runtest_teardown(item, nextitem): + """ called after ``pytest_runtest_call``. + + :arg nextitem: the scheduled-to-be-next test item (None if no further + test item is scheduled). This argument can be used to + perform exact teardowns, i.e. calling just enough finalizers + so that nextitem only needs to call setup-functions. + """ + +@hookspec(firstresult=True) +def pytest_runtest_makereport(item, call): + """ return a :py:class:`_pytest.runner.TestReport` object + for the given :py:class:`pytest.Item <_pytest.main.Item>` and + :py:class:`_pytest.runner.CallInfo`. + + Stops at first non-None result, see :ref:`firstresult` """ + +def pytest_runtest_logreport(report): + """ process a test setup/call/teardown report relating to + the respective phase of executing a test. """ + +# ------------------------------------------------------------------------- +# Fixture related hooks +# ------------------------------------------------------------------------- + +@hookspec(firstresult=True) +def pytest_fixture_setup(fixturedef, request): + """ performs fixture setup execution. + + Stops at first non-None result, see :ref:`firstresult` """ + +def pytest_fixture_post_finalizer(fixturedef): + """ called after fixture teardown, but before the cache is cleared so + the fixture result cache ``fixturedef.cached_result`` can + still be accessed.""" + +# ------------------------------------------------------------------------- +# test session related hooks +# ------------------------------------------------------------------------- + +def pytest_sessionstart(session): + """ before session.main() is called. """ + +def pytest_sessionfinish(session, exitstatus): + """ whole test run finishes. """ + +def pytest_unconfigure(config): + """ called before test process is exited. """ + + +# ------------------------------------------------------------------------- +# hooks for customizing the assert methods +# ------------------------------------------------------------------------- + +def pytest_assertrepr_compare(config, op, left, right): + """return explanation for comparisons in failing assert expressions. + + Return None for no custom explanation, otherwise return a list + of strings. The strings will be joined by newlines but any newlines + *in* a string will be escaped. Note that all but the first line will + be indented slightly, the intention is for the first line to be a summary. + """ + +# ------------------------------------------------------------------------- +# hooks for influencing reporting (invoked from _pytest_terminal) +# ------------------------------------------------------------------------- + +def pytest_report_header(config, startdir): + """ return a string to be displayed as header info for terminal reporting. + + .. note:: + + This function should be implemented only in plugins or ``conftest.py`` + files situated at the tests root directory due to how pytest + :ref:`discovers plugins during startup `. + """ + +@hookspec(firstresult=True) +def pytest_report_teststatus(report): + """ return result-category, shortletter and verbose word for reporting. + + Stops at first non-None result, see :ref:`firstresult` """ + +def pytest_terminal_summary(terminalreporter, exitstatus): + """ add additional section in terminal summary reporting. """ + + +@hookspec(historic=True) +def pytest_logwarning(message, code, nodeid, fslocation): + """ process a warning specified by a message, a code string, + a nodeid and fslocation (both of which may be None + if the warning is not tied to a partilar node/location).""" + +# ------------------------------------------------------------------------- +# doctest hooks +# ------------------------------------------------------------------------- + +@hookspec(firstresult=True) +def pytest_doctest_prepare_content(content): + """ return processed content for a given doctest + + Stops at first non-None result, see :ref:`firstresult` """ + +# ------------------------------------------------------------------------- +# error handling and internal debugging hooks +# ------------------------------------------------------------------------- + +def pytest_internalerror(excrepr, excinfo): + """ called for internal errors. """ + +def pytest_keyboard_interrupt(excinfo): + """ called for keyboard interrupt. """ + +def pytest_exception_interact(node, call, report): + """called when an exception was raised which can potentially be + interactively handled. + + This hook is only called if an exception was raised + that is not an internal exception like ``skip.Exception``. + """ + +def pytest_enter_pdb(config): + """ called upon pdb.set_trace(), can be used by plugins to take special + action just before the python debugger enters in interactive mode. + + :arg config: pytest config object + :type config: _pytest.config.Config + """ diff --git a/venv/lib/python3.5/site-packages/_pytest/junitxml.py b/venv/lib/python3.5/site-packages/_pytest/junitxml.py new file mode 100644 index 0000000..3016337 --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/junitxml.py @@ -0,0 +1,452 @@ +""" + report test results in JUnit-XML format, + for use with Jenkins and build integration servers. + + +Based on initial code from Ross Lawley. + +Output conforms to https://github.com/jenkinsci/xunit-plugin/blob/master/ +src/main/resources/org/jenkinsci/plugins/xunit/types/model/xsd/junit-10.xsd +""" +from __future__ import absolute_import, division, print_function + +import functools +import py +import os +import re +import sys +import time +import pytest +from _pytest.config import filename_arg + +# Python 2.X and 3.X compatibility +if sys.version_info[0] < 3: + from codecs import open +else: + unichr = chr + unicode = str + long = int + + +class Junit(py.xml.Namespace): + pass + + +# We need to get the subset of the invalid unicode ranges according to +# XML 1.0 which are valid in this python build. Hence we calculate +# this dynamically instead of hardcoding it. The spec range of valid +# chars is: Char ::= #x9 | #xA | #xD | [#x20-#xD7FF] | [#xE000-#xFFFD] +# | [#x10000-#x10FFFF] +_legal_chars = (0x09, 0x0A, 0x0d) +_legal_ranges = ( + (0x20, 0x7E), (0x80, 0xD7FF), (0xE000, 0xFFFD), (0x10000, 0x10FFFF), +) +_legal_xml_re = [ + unicode("%s-%s") % (unichr(low), unichr(high)) + for (low, high) in _legal_ranges if low < sys.maxunicode +] +_legal_xml_re = [unichr(x) for x in _legal_chars] + _legal_xml_re +illegal_xml_re = re.compile(unicode('[^%s]') % unicode('').join(_legal_xml_re)) +del _legal_chars +del _legal_ranges +del _legal_xml_re + +_py_ext_re = re.compile(r"\.py$") + + +def bin_xml_escape(arg): + def repl(matchobj): + i = ord(matchobj.group()) + if i <= 0xFF: + return unicode('#x%02X') % i + else: + return unicode('#x%04X') % i + + return py.xml.raw(illegal_xml_re.sub(repl, py.xml.escape(arg))) + + +class _NodeReporter(object): + def __init__(self, nodeid, xml): + + self.id = nodeid + self.xml = xml + self.add_stats = self.xml.add_stats + self.duration = 0 + self.properties = [] + self.nodes = [] + self.testcase = None + self.attrs = {} + + def append(self, node): + self.xml.add_stats(type(node).__name__) + self.nodes.append(node) + + def add_property(self, name, value): + self.properties.append((str(name), bin_xml_escape(value))) + + def make_properties_node(self): + """Return a Junit node containing custom properties, if any. + """ + if self.properties: + return Junit.properties([ + Junit.property(name=name, value=value) + for name, value in self.properties + ]) + return '' + + def record_testreport(self, testreport): + assert not self.testcase + names = mangle_test_address(testreport.nodeid) + classnames = names[:-1] + if self.xml.prefix: + classnames.insert(0, self.xml.prefix) + attrs = { + "classname": ".".join(classnames), + "name": bin_xml_escape(names[-1]), + "file": testreport.location[0], + } + if testreport.location[1] is not None: + attrs["line"] = testreport.location[1] + if hasattr(testreport, "url"): + attrs["url"] = testreport.url + self.attrs = attrs + + def to_xml(self): + testcase = Junit.testcase(time=self.duration, **self.attrs) + testcase.append(self.make_properties_node()) + for node in self.nodes: + testcase.append(node) + return testcase + + def _add_simple(self, kind, message, data=None): + data = bin_xml_escape(data) + node = kind(data, message=message) + self.append(node) + + def write_captured_output(self, report): + for capname in ('out', 'err'): + content = getattr(report, 'capstd' + capname) + if content: + tag = getattr(Junit, 'system-' + capname) + self.append(tag(bin_xml_escape(content))) + + def append_pass(self, report): + self.add_stats('passed') + + def append_failure(self, report): + # msg = str(report.longrepr.reprtraceback.extraline) + if hasattr(report, "wasxfail"): + self._add_simple( + Junit.skipped, + "xfail-marked test passes unexpectedly") + else: + if hasattr(report.longrepr, "reprcrash"): + message = report.longrepr.reprcrash.message + elif isinstance(report.longrepr, (unicode, str)): + message = report.longrepr + else: + message = str(report.longrepr) + message = bin_xml_escape(message) + fail = Junit.failure(message=message) + fail.append(bin_xml_escape(report.longrepr)) + self.append(fail) + + def append_collect_error(self, report): + # msg = str(report.longrepr.reprtraceback.extraline) + self.append(Junit.error(bin_xml_escape(report.longrepr), + message="collection failure")) + + def append_collect_skipped(self, report): + self._add_simple( + Junit.skipped, "collection skipped", report.longrepr) + + def append_error(self, report): + if getattr(report, 'when', None) == 'teardown': + msg = "test teardown failure" + else: + msg = "test setup failure" + self._add_simple( + Junit.error, msg, report.longrepr) + + def append_skipped(self, report): + if hasattr(report, "wasxfail"): + self._add_simple( + Junit.skipped, "expected test failure", report.wasxfail + ) + else: + filename, lineno, skipreason = report.longrepr + if skipreason.startswith("Skipped: "): + skipreason = bin_xml_escape(skipreason[9:]) + self.append( + Junit.skipped("%s:%s: %s" % (filename, lineno, skipreason), + type="pytest.skip", + message=skipreason)) + self.write_captured_output(report) + + def finalize(self): + data = self.to_xml().unicode(indent=0) + self.__dict__.clear() + self.to_xml = lambda: py.xml.raw(data) + + +@pytest.fixture +def record_xml_property(request): + """Add extra xml properties to the tag for the calling test. + The fixture is callable with ``(name, value)``, with value being automatically + xml-encoded. + """ + request.node.warn( + code='C3', + message='record_xml_property is an experimental feature', + ) + xml = getattr(request.config, "_xml", None) + if xml is not None: + node_reporter = xml.node_reporter(request.node.nodeid) + return node_reporter.add_property + else: + def add_property_noop(name, value): + pass + + return add_property_noop + + +def pytest_addoption(parser): + group = parser.getgroup("terminal reporting") + group.addoption( + '--junitxml', '--junit-xml', + action="store", + dest="xmlpath", + metavar="path", + type=functools.partial(filename_arg, optname="--junitxml"), + default=None, + help="create junit-xml style report file at given path.") + group.addoption( + '--junitprefix', '--junit-prefix', + action="store", + metavar="str", + default=None, + help="prepend prefix to classnames in junit-xml output") + parser.addini("junit_suite_name", "Test suite name for JUnit report", default="pytest") + + +def pytest_configure(config): + xmlpath = config.option.xmlpath + # prevent opening xmllog on slave nodes (xdist) + if xmlpath and not hasattr(config, 'slaveinput'): + config._xml = LogXML(xmlpath, config.option.junitprefix, config.getini("junit_suite_name")) + config.pluginmanager.register(config._xml) + + +def pytest_unconfigure(config): + xml = getattr(config, '_xml', None) + if xml: + del config._xml + config.pluginmanager.unregister(xml) + + +def mangle_test_address(address): + path, possible_open_bracket, params = address.partition('[') + names = path.split("::") + try: + names.remove('()') + except ValueError: + pass + # convert file path to dotted path + names[0] = names[0].replace("/", '.') + names[0] = _py_ext_re.sub("", names[0]) + # put any params back + names[-1] += possible_open_bracket + params + return names + + +class LogXML(object): + def __init__(self, logfile, prefix, suite_name="pytest"): + logfile = os.path.expanduser(os.path.expandvars(logfile)) + self.logfile = os.path.normpath(os.path.abspath(logfile)) + self.prefix = prefix + self.suite_name = suite_name + self.stats = dict.fromkeys([ + 'error', + 'passed', + 'failure', + 'skipped', + ], 0) + self.node_reporters = {} # nodeid -> _NodeReporter + self.node_reporters_ordered = [] + self.global_properties = [] + # List of reports that failed on call but teardown is pending. + self.open_reports = [] + self.cnt_double_fail_tests = 0 + + def finalize(self, report): + nodeid = getattr(report, 'nodeid', report) + # local hack to handle xdist report order + slavenode = getattr(report, 'node', None) + reporter = self.node_reporters.pop((nodeid, slavenode)) + if reporter is not None: + reporter.finalize() + + def node_reporter(self, report): + nodeid = getattr(report, 'nodeid', report) + # local hack to handle xdist report order + slavenode = getattr(report, 'node', None) + + key = nodeid, slavenode + + if key in self.node_reporters: + # TODO: breasks for --dist=each + return self.node_reporters[key] + + reporter = _NodeReporter(nodeid, self) + + self.node_reporters[key] = reporter + self.node_reporters_ordered.append(reporter) + + return reporter + + def add_stats(self, key): + if key in self.stats: + self.stats[key] += 1 + + def _opentestcase(self, report): + reporter = self.node_reporter(report) + reporter.record_testreport(report) + return reporter + + def pytest_runtest_logreport(self, report): + """handle a setup/call/teardown report, generating the appropriate + xml tags as necessary. + + note: due to plugins like xdist, this hook may be called in interlaced + order with reports from other nodes. for example: + + usual call order: + -> setup node1 + -> call node1 + -> teardown node1 + -> setup node2 + -> call node2 + -> teardown node2 + + possible call order in xdist: + -> setup node1 + -> call node1 + -> setup node2 + -> call node2 + -> teardown node2 + -> teardown node1 + """ + close_report = None + if report.passed: + if report.when == "call": # ignore setup/teardown + reporter = self._opentestcase(report) + reporter.append_pass(report) + elif report.failed: + if report.when == "teardown": + # The following vars are needed when xdist plugin is used + report_wid = getattr(report, "worker_id", None) + report_ii = getattr(report, "item_index", None) + close_report = next( + (rep for rep in self.open_reports + if (rep.nodeid == report.nodeid and + getattr(rep, "item_index", None) == report_ii and + getattr(rep, "worker_id", None) == report_wid + ) + ), None) + if close_report: + # We need to open new testcase in case we have failure in + # call and error in teardown in order to follow junit + # schema + self.finalize(close_report) + self.cnt_double_fail_tests += 1 + reporter = self._opentestcase(report) + if report.when == "call": + reporter.append_failure(report) + self.open_reports.append(report) + else: + reporter.append_error(report) + elif report.skipped: + reporter = self._opentestcase(report) + reporter.append_skipped(report) + self.update_testcase_duration(report) + if report.when == "teardown": + reporter = self._opentestcase(report) + reporter.write_captured_output(report) + self.finalize(report) + report_wid = getattr(report, "worker_id", None) + report_ii = getattr(report, "item_index", None) + close_report = next( + (rep for rep in self.open_reports + if (rep.nodeid == report.nodeid and + getattr(rep, "item_index", None) == report_ii and + getattr(rep, "worker_id", None) == report_wid + ) + ), None) + if close_report: + self.open_reports.remove(close_report) + + def update_testcase_duration(self, report): + """accumulates total duration for nodeid from given report and updates + the Junit.testcase with the new total if already created. + """ + reporter = self.node_reporter(report) + reporter.duration += getattr(report, 'duration', 0.0) + + def pytest_collectreport(self, report): + if not report.passed: + reporter = self._opentestcase(report) + if report.failed: + reporter.append_collect_error(report) + else: + reporter.append_collect_skipped(report) + + def pytest_internalerror(self, excrepr): + reporter = self.node_reporter('internal') + reporter.attrs.update(classname="pytest", name='internal') + reporter._add_simple(Junit.error, 'internal error', excrepr) + + def pytest_sessionstart(self): + self.suite_start_time = time.time() + + def pytest_sessionfinish(self): + dirname = os.path.dirname(os.path.abspath(self.logfile)) + if not os.path.isdir(dirname): + os.makedirs(dirname) + logfile = open(self.logfile, 'w', encoding='utf-8') + suite_stop_time = time.time() + suite_time_delta = suite_stop_time - self.suite_start_time + + numtests = (self.stats['passed'] + self.stats['failure'] + + self.stats['skipped'] + self.stats['error'] - + self.cnt_double_fail_tests) + logfile.write('') + + logfile.write(Junit.testsuite( + self._get_global_properties_node(), + [x.to_xml() for x in self.node_reporters_ordered], + name=self.suite_name, + errors=self.stats['error'], + failures=self.stats['failure'], + skips=self.stats['skipped'], + tests=numtests, + time="%.3f" % suite_time_delta, ).unicode(indent=0)) + logfile.close() + + def pytest_terminal_summary(self, terminalreporter): + terminalreporter.write_sep("-", + "generated xml file: %s" % (self.logfile)) + + def add_global_property(self, name, value): + self.global_properties.append((str(name), bin_xml_escape(value))) + + def _get_global_properties_node(self): + """Return a Junit node containing custom properties, if any. + """ + if self.global_properties: + return Junit.properties( + [ + Junit.property(name=name, value=value) + for name, value in self.global_properties + ] + ) + return '' diff --git a/venv/lib/python3.5/site-packages/_pytest/main.py b/venv/lib/python3.5/site-packages/_pytest/main.py new file mode 100644 index 0000000..e6f679a --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/main.py @@ -0,0 +1,784 @@ +""" core implementation of testing process: init, session, runtest loop. """ +from __future__ import absolute_import, division, print_function + +import functools +import os +import sys + +import _pytest +import _pytest._code +import py +try: + from collections import MutableMapping as MappingMixin +except ImportError: + from UserDict import DictMixin as MappingMixin + +from _pytest.config import directory_arg, UsageError, hookimpl +from _pytest.runner import collect_one_node, exit + +tracebackcutdir = py.path.local(_pytest.__file__).dirpath() + +# exitcodes for the command line +EXIT_OK = 0 +EXIT_TESTSFAILED = 1 +EXIT_INTERRUPTED = 2 +EXIT_INTERNALERROR = 3 +EXIT_USAGEERROR = 4 +EXIT_NOTESTSCOLLECTED = 5 + + +def pytest_addoption(parser): + parser.addini("norecursedirs", "directory patterns to avoid for recursion", + type="args", default=['.*', 'build', 'dist', 'CVS', '_darcs', '{arch}', '*.egg', 'venv']) + parser.addini("testpaths", "directories to search for tests when no files or directories are given in the command line.", + type="args", default=[]) + #parser.addini("dirpatterns", + # "patterns specifying possible locations of test files", + # type="linelist", default=["**/test_*.txt", + # "**/test_*.py", "**/*_test.py"] + #) + group = parser.getgroup("general", "running and selection options") + group._addoption('-x', '--exitfirst', action="store_const", + dest="maxfail", const=1, + help="exit instantly on first error or failed test."), + group._addoption('--maxfail', metavar="num", + action="store", type=int, dest="maxfail", default=0, + help="exit after first num failures or errors.") + group._addoption('--strict', action="store_true", + help="run pytest in strict mode, warnings become errors.") + group._addoption("-c", metavar="file", type=str, dest="inifilename", + help="load configuration from `file` instead of trying to locate one of the implicit configuration files.") + group._addoption("--continue-on-collection-errors", action="store_true", + default=False, dest="continue_on_collection_errors", + help="Force test execution even if collection errors occur.") + + group = parser.getgroup("collect", "collection") + group.addoption('--collectonly', '--collect-only', action="store_true", + help="only collect tests, don't execute them."), + group.addoption('--pyargs', action="store_true", + help="try to interpret all arguments as python packages.") + group.addoption("--ignore", action="append", metavar="path", + help="ignore path during collection (multi-allowed).") + # when changing this to --conf-cut-dir, config.py Conftest.setinitial + # needs upgrading as well + group.addoption('--confcutdir', dest="confcutdir", default=None, + metavar="dir", type=functools.partial(directory_arg, optname="--confcutdir"), + help="only load conftest.py's relative to specified dir.") + group.addoption('--noconftest', action="store_true", + dest="noconftest", default=False, + help="Don't load any conftest.py files.") + group.addoption('--keepduplicates', '--keep-duplicates', action="store_true", + dest="keepduplicates", default=False, + help="Keep duplicate tests.") + + group = parser.getgroup("debugconfig", + "test session debugging and configuration") + group.addoption('--basetemp', dest="basetemp", default=None, metavar="dir", + help="base temporary directory for this test run.") + + + +def pytest_namespace(): + """keeping this one works around a deeper startup issue in pytest + + i tried to find it for a while but the amount of time turned unsustainable, + so i put a hack in to revisit later + """ + return {} + + +def pytest_configure(config): + __import__('pytest').config = config # compatibiltiy + + +def wrap_session(config, doit): + """Skeleton command line program""" + session = Session(config) + session.exitstatus = EXIT_OK + initstate = 0 + try: + try: + config._do_configure() + initstate = 1 + config.hook.pytest_sessionstart(session=session) + initstate = 2 + session.exitstatus = doit(config, session) or 0 + except UsageError: + raise + except KeyboardInterrupt: + excinfo = _pytest._code.ExceptionInfo() + if initstate < 2 and isinstance(excinfo.value, exit.Exception): + sys.stderr.write('{0}: {1}\n'.format( + excinfo.typename, excinfo.value.msg)) + config.hook.pytest_keyboard_interrupt(excinfo=excinfo) + session.exitstatus = EXIT_INTERRUPTED + except: + excinfo = _pytest._code.ExceptionInfo() + config.notify_exception(excinfo, config.option) + session.exitstatus = EXIT_INTERNALERROR + if excinfo.errisinstance(SystemExit): + sys.stderr.write("mainloop: caught Spurious SystemExit!\n") + + finally: + excinfo = None # Explicitly break reference cycle. + session.startdir.chdir() + if initstate >= 2: + config.hook.pytest_sessionfinish( + session=session, + exitstatus=session.exitstatus) + config._ensure_unconfigure() + return session.exitstatus + + +def pytest_cmdline_main(config): + return wrap_session(config, _main) + + +def _main(config, session): + """ default command line protocol for initialization, session, + running tests and reporting. """ + config.hook.pytest_collection(session=session) + config.hook.pytest_runtestloop(session=session) + + if session.testsfailed: + return EXIT_TESTSFAILED + elif session.testscollected == 0: + return EXIT_NOTESTSCOLLECTED + + +def pytest_collection(session): + return session.perform_collect() + + +def pytest_runtestloop(session): + if (session.testsfailed and + not session.config.option.continue_on_collection_errors): + raise session.Interrupted( + "%d errors during collection" % session.testsfailed) + + if session.config.option.collectonly: + return True + + for i, item in enumerate(session.items): + nextitem = session.items[i+1] if i+1 < len(session.items) else None + item.config.hook.pytest_runtest_protocol(item=item, nextitem=nextitem) + if session.shouldstop: + raise session.Interrupted(session.shouldstop) + return True + + +def pytest_ignore_collect(path, config): + ignore_paths = config._getconftest_pathlist("collect_ignore", path=path.dirpath()) + ignore_paths = ignore_paths or [] + excludeopt = config.getoption("ignore") + if excludeopt: + ignore_paths.extend([py.path.local(x) for x in excludeopt]) + + if py.path.local(path) in ignore_paths: + return True + + # Skip duplicate paths. + keepduplicates = config.getoption("keepduplicates") + duplicate_paths = config.pluginmanager._duplicatepaths + if not keepduplicates: + if path in duplicate_paths: + return True + else: + duplicate_paths.add(path) + + return False + + +class FSHookProxy: + def __init__(self, fspath, pm, remove_mods): + self.fspath = fspath + self.pm = pm + self.remove_mods = remove_mods + + def __getattr__(self, name): + x = self.pm.subset_hook_caller(name, remove_plugins=self.remove_mods) + self.__dict__[name] = x + return x + +class _CompatProperty(object): + def __init__(self, name): + self.name = name + + def __get__(self, obj, owner): + if obj is None: + return self + + # TODO: reenable in the features branch + # warnings.warn( + # "usage of {owner!r}.{name} is deprecated, please use pytest.{name} instead".format( + # name=self.name, owner=type(owner).__name__), + # PendingDeprecationWarning, stacklevel=2) + return getattr(__import__('pytest'), self.name) + + + +class NodeKeywords(MappingMixin): + def __init__(self, node): + self.node = node + self.parent = node.parent + self._markers = {node.name: True} + + def __getitem__(self, key): + try: + return self._markers[key] + except KeyError: + if self.parent is None: + raise + return self.parent.keywords[key] + + def __setitem__(self, key, value): + self._markers[key] = value + + def __delitem__(self, key): + raise ValueError("cannot delete key in keywords dict") + + def __iter__(self): + seen = set(self._markers) + if self.parent is not None: + seen.update(self.parent.keywords) + return iter(seen) + + def __len__(self): + return len(self.__iter__()) + + def keys(self): + return list(self) + + def __repr__(self): + return "" % (self.node, ) + + +class Node(object): + """ base class for Collector and Item the test collection tree. + Collector subclasses have children, Items are terminal nodes.""" + + def __init__(self, name, parent=None, config=None, session=None): + #: a unique name within the scope of the parent node + self.name = name + + #: the parent collector node. + self.parent = parent + + #: the pytest config object + self.config = config or parent.config + + #: the session this node is part of + self.session = session or parent.session + + #: filesystem path where this node was collected from (can be None) + self.fspath = getattr(parent, 'fspath', None) + + #: keywords/markers collected from all scopes + self.keywords = NodeKeywords(self) + + #: allow adding of extra keywords to use for matching + self.extra_keyword_matches = set() + + # used for storing artificial fixturedefs for direct parametrization + self._name2pseudofixturedef = {} + + @property + def ihook(self): + """ fspath sensitive hook proxy used to call pytest hooks""" + return self.session.gethookproxy(self.fspath) + + Module = _CompatProperty("Module") + Class = _CompatProperty("Class") + Instance = _CompatProperty("Instance") + Function = _CompatProperty("Function") + File = _CompatProperty("File") + Item = _CompatProperty("Item") + + def _getcustomclass(self, name): + maybe_compatprop = getattr(type(self), name) + if isinstance(maybe_compatprop, _CompatProperty): + return getattr(__import__('pytest'), name) + else: + cls = getattr(self, name) + # TODO: reenable in the features branch + # warnings.warn("use of node.%s is deprecated, " + # "use pytest_pycollect_makeitem(...) to create custom " + # "collection nodes" % name, category=DeprecationWarning) + return cls + + def __repr__(self): + return "<%s %r>" %(self.__class__.__name__, + getattr(self, 'name', None)) + + def warn(self, code, message): + """ generate a warning with the given code and message for this + item. """ + assert isinstance(code, str) + fslocation = getattr(self, "location", None) + if fslocation is None: + fslocation = getattr(self, "fspath", None) + self.ihook.pytest_logwarning.call_historic(kwargs=dict( + code=code, message=message, + nodeid=self.nodeid, fslocation=fslocation)) + + # methods for ordering nodes + @property + def nodeid(self): + """ a ::-separated string denoting its collection tree address. """ + try: + return self._nodeid + except AttributeError: + self._nodeid = x = self._makeid() + return x + + def _makeid(self): + return self.parent.nodeid + "::" + self.name + + def __hash__(self): + return hash(self.nodeid) + + def setup(self): + pass + + def teardown(self): + pass + + def _memoizedcall(self, attrname, function): + exattrname = "_ex_" + attrname + failure = getattr(self, exattrname, None) + if failure is not None: + py.builtin._reraise(failure[0], failure[1], failure[2]) + if hasattr(self, attrname): + return getattr(self, attrname) + try: + res = function() + except py.builtin._sysex: + raise + except: + failure = sys.exc_info() + setattr(self, exattrname, failure) + raise + setattr(self, attrname, res) + return res + + def listchain(self): + """ return list of all parent collectors up to self, + starting from root of collection tree. """ + chain = [] + item = self + while item is not None: + chain.append(item) + item = item.parent + chain.reverse() + return chain + + def add_marker(self, marker): + """ dynamically add a marker object to the node. + + ``marker`` can be a string or pytest.mark.* instance. + """ + from _pytest.mark import MarkDecorator, MARK_GEN + if isinstance(marker, py.builtin._basestring): + marker = getattr(MARK_GEN, marker) + elif not isinstance(marker, MarkDecorator): + raise ValueError("is not a string or pytest.mark.* Marker") + self.keywords[marker.name] = marker + + def get_marker(self, name): + """ get a marker object from this node or None if + the node doesn't have a marker with that name. """ + val = self.keywords.get(name, None) + if val is not None: + from _pytest.mark import MarkInfo, MarkDecorator + if isinstance(val, (MarkDecorator, MarkInfo)): + return val + + def listextrakeywords(self): + """ Return a set of all extra keywords in self and any parents.""" + extra_keywords = set() + item = self + for item in self.listchain(): + extra_keywords.update(item.extra_keyword_matches) + return extra_keywords + + def listnames(self): + return [x.name for x in self.listchain()] + + def addfinalizer(self, fin): + """ register a function to be called when this node is finalized. + + This method can only be called when this node is active + in a setup chain, for example during self.setup(). + """ + self.session._setupstate.addfinalizer(fin, self) + + def getparent(self, cls): + """ get the next parent node (including ourself) + which is an instance of the given class""" + current = self + while current and not isinstance(current, cls): + current = current.parent + return current + + def _prunetraceback(self, excinfo): + pass + + def _repr_failure_py(self, excinfo, style=None): + fm = self.session._fixturemanager + if excinfo.errisinstance(fm.FixtureLookupError): + return excinfo.value.formatrepr() + tbfilter = True + if self.config.option.fulltrace: + style="long" + else: + tb = _pytest._code.Traceback([excinfo.traceback[-1]]) + self._prunetraceback(excinfo) + if len(excinfo.traceback) == 0: + excinfo.traceback = tb + tbfilter = False # prunetraceback already does it + if style == "auto": + style = "long" + # XXX should excinfo.getrepr record all data and toterminal() process it? + if style is None: + if self.config.option.tbstyle == "short": + style = "short" + else: + style = "long" + + try: + os.getcwd() + abspath = False + except OSError: + abspath = True + + return excinfo.getrepr(funcargs=True, abspath=abspath, + showlocals=self.config.option.showlocals, + style=style, tbfilter=tbfilter) + + repr_failure = _repr_failure_py + +class Collector(Node): + """ Collector instances create children through collect() + and thus iteratively build a tree. + """ + + class CollectError(Exception): + """ an error during collection, contains a custom message. """ + + def collect(self): + """ returns a list of children (items and collectors) + for this collection node. + """ + raise NotImplementedError("abstract") + + def repr_failure(self, excinfo): + """ represent a collection failure. """ + if excinfo.errisinstance(self.CollectError): + exc = excinfo.value + return str(exc.args[0]) + return self._repr_failure_py(excinfo, style="short") + + def _prunetraceback(self, excinfo): + if hasattr(self, 'fspath'): + traceback = excinfo.traceback + ntraceback = traceback.cut(path=self.fspath) + if ntraceback == traceback: + ntraceback = ntraceback.cut(excludepath=tracebackcutdir) + excinfo.traceback = ntraceback.filter() + +class FSCollector(Collector): + def __init__(self, fspath, parent=None, config=None, session=None): + fspath = py.path.local(fspath) # xxx only for test_resultlog.py? + name = fspath.basename + if parent is not None: + rel = fspath.relto(parent.fspath) + if rel: + name = rel + name = name.replace(os.sep, "/") + super(FSCollector, self).__init__(name, parent, config, session) + self.fspath = fspath + + def _makeid(self): + relpath = self.fspath.relto(self.config.rootdir) + if os.sep != "/": + relpath = relpath.replace(os.sep, "/") + return relpath + +class File(FSCollector): + """ base class for collecting tests from a file. """ + +class Item(Node): + """ a basic test invocation item. Note that for a single function + there might be multiple test invocation items. + """ + nextitem = None + + def __init__(self, name, parent=None, config=None, session=None): + super(Item, self).__init__(name, parent, config, session) + self._report_sections = [] + + def add_report_section(self, when, key, content): + if content: + self._report_sections.append((when, key, content)) + + def reportinfo(self): + return self.fspath, None, "" + + @property + def location(self): + try: + return self._location + except AttributeError: + location = self.reportinfo() + # bestrelpath is a quite slow function + cache = self.config.__dict__.setdefault("_bestrelpathcache", {}) + try: + fspath = cache[location[0]] + except KeyError: + fspath = self.session.fspath.bestrelpath(location[0]) + cache[location[0]] = fspath + location = (fspath, location[1], str(location[2])) + self._location = location + return location + +class NoMatch(Exception): + """ raised if matching cannot locate a matching names. """ + +class Interrupted(KeyboardInterrupt): + """ signals an interrupted test run. """ + __module__ = 'builtins' # for py3 + +class Session(FSCollector): + Interrupted = Interrupted + + def __init__(self, config): + FSCollector.__init__(self, config.rootdir, parent=None, + config=config, session=self) + self.testsfailed = 0 + self.testscollected = 0 + self.shouldstop = False + self.trace = config.trace.root.get("collection") + self._norecursepatterns = config.getini("norecursedirs") + self.startdir = py.path.local() + self.config.pluginmanager.register(self, name="session") + + def _makeid(self): + return "" + + @hookimpl(tryfirst=True) + def pytest_collectstart(self): + if self.shouldstop: + raise self.Interrupted(self.shouldstop) + + @hookimpl(tryfirst=True) + def pytest_runtest_logreport(self, report): + if report.failed and not hasattr(report, 'wasxfail'): + self.testsfailed += 1 + maxfail = self.config.getvalue("maxfail") + if maxfail and self.testsfailed >= maxfail: + self.shouldstop = "stopping after %d failures" % ( + self.testsfailed) + pytest_collectreport = pytest_runtest_logreport + + def isinitpath(self, path): + return path in self._initialpaths + + def gethookproxy(self, fspath): + # check if we have the common case of running + # hooks with all conftest.py filesall conftest.py + pm = self.config.pluginmanager + my_conftestmodules = pm._getconftestmodules(fspath) + remove_mods = pm._conftest_plugins.difference(my_conftestmodules) + if remove_mods: + # one or more conftests are not in use at this fspath + proxy = FSHookProxy(fspath, pm, remove_mods) + else: + # all plugis are active for this fspath + proxy = self.config.hook + return proxy + + def perform_collect(self, args=None, genitems=True): + hook = self.config.hook + try: + items = self._perform_collect(args, genitems) + self.config.pluginmanager.check_pending() + hook.pytest_collection_modifyitems(session=self, + config=self.config, items=items) + finally: + hook.pytest_collection_finish(session=self) + self.testscollected = len(items) + return items + + def _perform_collect(self, args, genitems): + if args is None: + args = self.config.args + self.trace("perform_collect", self, args) + self.trace.root.indent += 1 + self._notfound = [] + self._initialpaths = set() + self._initialparts = [] + self.items = items = [] + for arg in args: + parts = self._parsearg(arg) + self._initialparts.append(parts) + self._initialpaths.add(parts[0]) + rep = collect_one_node(self) + self.ihook.pytest_collectreport(report=rep) + self.trace.root.indent -= 1 + if self._notfound: + errors = [] + for arg, exc in self._notfound: + line = "(no name %r in any of %r)" % (arg, exc.args[0]) + errors.append("not found: %s\n%s" % (arg, line)) + # XXX: test this + raise UsageError(*errors) + if not genitems: + return rep.result + else: + if rep.passed: + for node in rep.result: + self.items.extend(self.genitems(node)) + return items + + def collect(self): + for parts in self._initialparts: + arg = "::".join(map(str, parts)) + self.trace("processing argument", arg) + self.trace.root.indent += 1 + try: + for x in self._collect(arg): + yield x + except NoMatch: + # we are inside a make_report hook so + # we cannot directly pass through the exception + self._notfound.append((arg, sys.exc_info()[1])) + + self.trace.root.indent -= 1 + + def _collect(self, arg): + names = self._parsearg(arg) + path = names.pop(0) + if path.check(dir=1): + assert not names, "invalid arg %r" % (arg,) + for path in path.visit(fil=lambda x: x.check(file=1), + rec=self._recurse, bf=True, sort=True): + for x in self._collectfile(path): + yield x + else: + assert path.check(file=1) + for x in self.matchnodes(self._collectfile(path), names): + yield x + + def _collectfile(self, path): + ihook = self.gethookproxy(path) + if not self.isinitpath(path): + if ihook.pytest_ignore_collect(path=path, config=self.config): + return () + return ihook.pytest_collect_file(path=path, parent=self) + + def _recurse(self, path): + ihook = self.gethookproxy(path.dirpath()) + if ihook.pytest_ignore_collect(path=path, config=self.config): + return + for pat in self._norecursepatterns: + if path.check(fnmatch=pat): + return False + ihook = self.gethookproxy(path) + ihook.pytest_collect_directory(path=path, parent=self) + return True + + def _tryconvertpyarg(self, x): + """Convert a dotted module name to path. + + """ + import pkgutil + try: + loader = pkgutil.find_loader(x) + except ImportError: + return x + if loader is None: + return x + # This method is sometimes invoked when AssertionRewritingHook, which + # does not define a get_filename method, is already in place: + try: + path = loader.get_filename(x) + except AttributeError: + # Retrieve path from AssertionRewritingHook: + path = loader.modules[x][0].co_filename + if loader.is_package(x): + path = os.path.dirname(path) + return path + + def _parsearg(self, arg): + """ return (fspath, names) tuple after checking the file exists. """ + parts = str(arg).split("::") + if self.config.option.pyargs: + parts[0] = self._tryconvertpyarg(parts[0]) + relpath = parts[0].replace("/", os.sep) + path = self.config.invocation_dir.join(relpath, abs=True) + if not path.check(): + if self.config.option.pyargs: + raise UsageError( + "file or package not found: " + arg + + " (missing __init__.py?)") + else: + raise UsageError("file not found: " + arg) + parts[0] = path + return parts + + def matchnodes(self, matching, names): + self.trace("matchnodes", matching, names) + self.trace.root.indent += 1 + nodes = self._matchnodes(matching, names) + num = len(nodes) + self.trace("matchnodes finished -> ", num, "nodes") + self.trace.root.indent -= 1 + if num == 0: + raise NoMatch(matching, names[:1]) + return nodes + + def _matchnodes(self, matching, names): + if not matching or not names: + return matching + name = names[0] + assert name + nextnames = names[1:] + resultnodes = [] + for node in matching: + if isinstance(node, Item): + if not names: + resultnodes.append(node) + continue + assert isinstance(node, Collector) + rep = collect_one_node(node) + if rep.passed: + has_matched = False + for x in rep.result: + # TODO: remove parametrized workaround once collection structure contains parametrization + if x.name == name or x.name.split("[")[0] == name: + resultnodes.extend(self.matchnodes([x], nextnames)) + has_matched = True + # XXX accept IDs that don't have "()" for class instances + if not has_matched and len(rep.result) == 1 and x.name == "()": + nextnames.insert(0, name) + resultnodes.extend(self.matchnodes([x], nextnames)) + else: + # report collection failures here to avoid failing to run some test + # specified in the command line because the module could not be + # imported (#134) + node.ihook.pytest_collectreport(report=rep) + return resultnodes + + def genitems(self, node): + self.trace("genitems", node) + if isinstance(node, Item): + node.ihook.pytest_itemcollected(item=node) + yield node + else: + assert isinstance(node, Collector) + rep = collect_one_node(node) + if rep.passed: + for subnode in rep.result: + for x in self.genitems(subnode): + yield x + node.ihook.pytest_collectreport(report=rep) diff --git a/venv/lib/python3.5/site-packages/_pytest/mark.py b/venv/lib/python3.5/site-packages/_pytest/mark.py new file mode 100644 index 0000000..8b40a4f --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/mark.py @@ -0,0 +1,391 @@ +""" generic mechanism for marking and selecting python functions. """ +from __future__ import absolute_import, division, print_function + +import inspect +from collections import namedtuple +from operator import attrgetter +from .compat import imap + + +def alias(name): + return property(attrgetter(name), doc='alias for ' + name) + + +class ParameterSet(namedtuple('ParameterSet', 'values, marks, id')): + @classmethod + def param(cls, *values, **kw): + marks = kw.pop('marks', ()) + if isinstance(marks, MarkDecorator): + marks = marks, + else: + assert isinstance(marks, (tuple, list, set)) + + def param_extract_id(id=None): + return id + + id = param_extract_id(**kw) + return cls(values, marks, id) + + @classmethod + def extract_from(cls, parameterset, legacy_force_tuple=False): + """ + :param parameterset: + a legacy style parameterset that may or may not be a tuple, + and may or may not be wrapped into a mess of mark objects + + :param legacy_force_tuple: + enforce tuple wrapping so single argument tuple values + don't get decomposed and break tests + + """ + + if isinstance(parameterset, cls): + return parameterset + if not isinstance(parameterset, MarkDecorator) and legacy_force_tuple: + return cls.param(parameterset) + + newmarks = [] + argval = parameterset + while isinstance(argval, MarkDecorator): + newmarks.append(MarkDecorator(Mark( + argval.markname, argval.args[:-1], argval.kwargs))) + argval = argval.args[-1] + assert not isinstance(argval, ParameterSet) + if legacy_force_tuple: + argval = argval, + + return cls(argval, marks=newmarks, id=None) + + @property + def deprecated_arg_dict(self): + return dict((mark.name, mark) for mark in self.marks) + + +class MarkerError(Exception): + + """Error in use of a pytest marker/attribute.""" + + +def param(*values, **kw): + return ParameterSet.param(*values, **kw) + + +def pytest_addoption(parser): + group = parser.getgroup("general") + group._addoption( + '-k', + action="store", dest="keyword", default='', metavar="EXPRESSION", + help="only run tests which match the given substring expression. " + "An expression is a python evaluatable expression " + "where all names are substring-matched against test names " + "and their parent classes. Example: -k 'test_method or test_" + "other' matches all test functions and classes whose name " + "contains 'test_method' or 'test_other'. " + "Additionally keywords are matched to classes and functions " + "containing extra names in their 'extra_keyword_matches' set, " + "as well as functions which have names assigned directly to them." + ) + + group._addoption( + "-m", + action="store", dest="markexpr", default="", metavar="MARKEXPR", + help="only run tests matching given mark expression. " + "example: -m 'mark1 and not mark2'." + ) + + group.addoption( + "--markers", action="store_true", + help="show markers (builtin, plugin and per-project ones)." + ) + + parser.addini("markers", "markers for test functions", 'linelist') + + +def pytest_cmdline_main(config): + import _pytest.config + if config.option.markers: + config._do_configure() + tw = _pytest.config.create_terminal_writer(config) + for line in config.getini("markers"): + name, rest = line.split(":", 1) + tw.write("@pytest.mark.%s:" % name, bold=True) + tw.line(rest) + tw.line() + config._ensure_unconfigure() + return 0 + + +pytest_cmdline_main.tryfirst = True + + +def pytest_collection_modifyitems(items, config): + keywordexpr = config.option.keyword.lstrip() + matchexpr = config.option.markexpr + if not keywordexpr and not matchexpr: + return + # pytest used to allow "-" for negating + # but today we just allow "-" at the beginning, use "not" instead + # we probably remove "-" altogether soon + if keywordexpr.startswith("-"): + keywordexpr = "not " + keywordexpr[1:] + selectuntil = False + if keywordexpr[-1:] == ":": + selectuntil = True + keywordexpr = keywordexpr[:-1] + + remaining = [] + deselected = [] + for colitem in items: + if keywordexpr and not matchkeyword(colitem, keywordexpr): + deselected.append(colitem) + else: + if selectuntil: + keywordexpr = None + if matchexpr: + if not matchmark(colitem, matchexpr): + deselected.append(colitem) + continue + remaining.append(colitem) + + if deselected: + config.hook.pytest_deselected(items=deselected) + items[:] = remaining + + +class MarkMapping: + """Provides a local mapping for markers where item access + resolves to True if the marker is present. """ + def __init__(self, keywords): + mymarks = set() + for key, value in keywords.items(): + if isinstance(value, MarkInfo) or isinstance(value, MarkDecorator): + mymarks.add(key) + self._mymarks = mymarks + + def __getitem__(self, name): + return name in self._mymarks + + +class KeywordMapping: + """Provides a local mapping for keywords. + Given a list of names, map any substring of one of these names to True. + """ + def __init__(self, names): + self._names = names + + def __getitem__(self, subname): + for name in self._names: + if subname in name: + return True + return False + + +def matchmark(colitem, markexpr): + """Tries to match on any marker names, attached to the given colitem.""" + return eval(markexpr, {}, MarkMapping(colitem.keywords)) + + +def matchkeyword(colitem, keywordexpr): + """Tries to match given keyword expression to given collector item. + + Will match on the name of colitem, including the names of its parents. + Only matches names of items which are either a :class:`Class` or a + :class:`Function`. + Additionally, matches on names in the 'extra_keyword_matches' set of + any item, as well as names directly assigned to test functions. + """ + mapped_names = set() + + # Add the names of the current item and any parent items + import pytest + for item in colitem.listchain(): + if not isinstance(item, pytest.Instance): + mapped_names.add(item.name) + + # Add the names added as extra keywords to current or parent items + for name in colitem.listextrakeywords(): + mapped_names.add(name) + + # Add the names attached to the current function through direct assignment + if hasattr(colitem, 'function'): + for name in colitem.function.__dict__: + mapped_names.add(name) + + mapping = KeywordMapping(mapped_names) + if " " not in keywordexpr: + # special case to allow for simple "-k pass" and "-k 1.3" + return mapping[keywordexpr] + elif keywordexpr.startswith("not ") and " " not in keywordexpr[4:]: + return not mapping[keywordexpr[4:]] + return eval(keywordexpr, {}, mapping) + + +def pytest_configure(config): + config._old_mark_config = MARK_GEN._config + if config.option.strict: + MARK_GEN._config = config + + +def pytest_unconfigure(config): + MARK_GEN._config = getattr(config, '_old_mark_config', None) + + +class MarkGenerator: + """ Factory for :class:`MarkDecorator` objects - exposed as + a ``pytest.mark`` singleton instance. Example:: + + import pytest + @pytest.mark.slowtest + def test_function(): + pass + + will set a 'slowtest' :class:`MarkInfo` object + on the ``test_function`` object. """ + _config = None + + + def __getattr__(self, name): + if name[0] == "_": + raise AttributeError("Marker name must NOT start with underscore") + if self._config is not None: + self._check(name) + return MarkDecorator(Mark(name, (), {})) + + def _check(self, name): + try: + if name in self._markers: + return + except AttributeError: + pass + self._markers = l = set() + for line in self._config.getini("markers"): + beginning = line.split(":", 1) + x = beginning[0].split("(", 1)[0] + l.add(x) + if name not in self._markers: + raise AttributeError("%r not a registered marker" % (name,)) + + +def istestfunc(func): + return hasattr(func, "__call__") and \ + getattr(func, "__name__", "") != "" + +class MarkDecorator: + """ A decorator for test functions and test classes. When applied + it will create :class:`MarkInfo` objects which may be + :ref:`retrieved by hooks as item keywords `. + MarkDecorator instances are often created like this:: + + mark1 = pytest.mark.NAME # simple MarkDecorator + mark2 = pytest.mark.NAME(name1=value) # parametrized MarkDecorator + + and can then be applied as decorators to test functions:: + + @mark2 + def test_function(): + pass + + When a MarkDecorator instance is called it does the following: + 1. If called with a single class as its only positional argument and no + additional keyword arguments, it attaches itself to the class so it + gets applied automatically to all test cases found in that class. + 2. If called with a single function as its only positional argument and + no additional keyword arguments, it attaches a MarkInfo object to the + function, containing all the arguments already stored internally in + the MarkDecorator. + 3. When called in any other case, it performs a 'fake construction' call, + i.e. it returns a new MarkDecorator instance with the original + MarkDecorator's content updated with the arguments passed to this + call. + + Note: The rules above prevent MarkDecorator objects from storing only a + single function or class reference as their positional argument with no + additional keyword or positional arguments. + + """ + def __init__(self, mark): + assert isinstance(mark, Mark), repr(mark) + self.mark = mark + + name = alias('mark.name') + args = alias('mark.args') + kwargs = alias('mark.kwargs') + + @property + def markname(self): + return self.name # for backward-compat (2.4.1 had this attr) + + def __eq__(self, other): + return self.mark == other.mark + + def __repr__(self): + return "" % (self.mark,) + + def __call__(self, *args, **kwargs): + """ if passed a single callable argument: decorate it with mark info. + otherwise add *args/**kwargs in-place to mark information. """ + if args and not kwargs: + func = args[0] + is_class = inspect.isclass(func) + if len(args) == 1 and (istestfunc(func) or is_class): + if is_class: + if hasattr(func, 'pytestmark'): + mark_list = func.pytestmark + if not isinstance(mark_list, list): + mark_list = [mark_list] + # always work on a copy to avoid updating pytestmark + # from a superclass by accident + mark_list = mark_list + [self] + func.pytestmark = mark_list + else: + func.pytestmark = [self] + else: + holder = getattr(func, self.name, None) + if holder is None: + holder = MarkInfo(self.mark) + setattr(func, self.name, holder) + else: + holder.add_mark(self.mark) + return func + + mark = Mark(self.name, args, kwargs) + return self.__class__(self.mark.combined_with(mark)) + + + + + +class Mark(namedtuple('Mark', 'name, args, kwargs')): + + def combined_with(self, other): + assert self.name == other.name + return Mark( + self.name, self.args + other.args, + dict(self.kwargs, **other.kwargs)) + + +class MarkInfo(object): + """ Marking object created by :class:`MarkDecorator` instances. """ + def __init__(self, mark): + assert isinstance(mark, Mark), repr(mark) + self.combined = mark + self._marks = [mark] + + name = alias('combined.name') + args = alias('combined.args') + kwargs = alias('combined.kwargs') + + def __repr__(self): + return "".format(self.combined) + + def add_mark(self, mark): + """ add a MarkInfo with the given args and kwargs. """ + self._marks.append(mark) + self.combined = self.combined.combined_with(mark) + + def __iter__(self): + """ yield MarkInfo objects each relating to a marking-call. """ + return imap(MarkInfo, self._marks) + + +MARK_GEN = MarkGenerator() diff --git a/venv/lib/python3.5/site-packages/_pytest/monkeypatch.py b/venv/lib/python3.5/site-packages/_pytest/monkeypatch.py new file mode 100644 index 0000000..a70b23d --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/monkeypatch.py @@ -0,0 +1,259 @@ +""" monkeypatching and mocking functionality. """ +from __future__ import absolute_import, division, print_function + +import os +import sys +import re + +from py.builtin import _basestring +from _pytest.fixtures import fixture + +RE_IMPORT_ERROR_NAME = re.compile("^No module named (.*)$") + + +@fixture +def monkeypatch(): + """The returned ``monkeypatch`` fixture provides these + helper methods to modify objects, dictionaries or os.environ:: + + monkeypatch.setattr(obj, name, value, raising=True) + monkeypatch.delattr(obj, name, raising=True) + monkeypatch.setitem(mapping, name, value) + monkeypatch.delitem(obj, name, raising=True) + monkeypatch.setenv(name, value, prepend=False) + monkeypatch.delenv(name, value, raising=True) + monkeypatch.syspath_prepend(path) + monkeypatch.chdir(path) + + All modifications will be undone after the requesting + test function or fixture has finished. The ``raising`` + parameter determines if a KeyError or AttributeError + will be raised if the set/deletion operation has no target. + """ + mpatch = MonkeyPatch() + yield mpatch + mpatch.undo() + + +def resolve(name): + # simplified from zope.dottedname + parts = name.split('.') + + used = parts.pop(0) + found = __import__(used) + for part in parts: + used += '.' + part + try: + found = getattr(found, part) + except AttributeError: + pass + else: + continue + # we use explicit un-nesting of the handling block in order + # to avoid nested exceptions on python 3 + try: + __import__(used) + except ImportError as ex: + # str is used for py2 vs py3 + expected = str(ex).split()[-1] + if expected == used: + raise + else: + raise ImportError( + 'import error in %s: %s' % (used, ex) + ) + found = annotated_getattr(found, part, used) + return found + + +def annotated_getattr(obj, name, ann): + try: + obj = getattr(obj, name) + except AttributeError: + raise AttributeError( + '%r object at %s has no attribute %r' % ( + type(obj).__name__, ann, name + ) + ) + return obj + + +def derive_importpath(import_path, raising): + if not isinstance(import_path, _basestring) or "." not in import_path: + raise TypeError("must be absolute import path string, not %r" % + (import_path,)) + module, attr = import_path.rsplit('.', 1) + target = resolve(module) + if raising: + annotated_getattr(target, attr, ann=module) + return attr, target + + +class Notset: + def __repr__(self): + return "" + + +notset = Notset() + + +class MonkeyPatch: + """ Object returned by the ``monkeypatch`` fixture keeping a record of setattr/item/env/syspath changes. + """ + + def __init__(self): + self._setattr = [] + self._setitem = [] + self._cwd = None + self._savesyspath = None + + def setattr(self, target, name, value=notset, raising=True): + """ Set attribute value on target, memorizing the old value. + By default raise AttributeError if the attribute did not exist. + + For convenience you can specify a string as ``target`` which + will be interpreted as a dotted import path, with the last part + being the attribute name. Example: + ``monkeypatch.setattr("os.getcwd", lambda x: "/")`` + would set the ``getcwd`` function of the ``os`` module. + + The ``raising`` value determines if the setattr should fail + if the attribute is not already present (defaults to True + which means it will raise). + """ + __tracebackhide__ = True + import inspect + + if value is notset: + if not isinstance(target, _basestring): + raise TypeError("use setattr(target, name, value) or " + "setattr(target, value) with target being a dotted " + "import string") + value = name + name, target = derive_importpath(target, raising) + + oldval = getattr(target, name, notset) + if raising and oldval is notset: + raise AttributeError("%r has no attribute %r" % (target, name)) + + # avoid class descriptors like staticmethod/classmethod + if inspect.isclass(target): + oldval = target.__dict__.get(name, notset) + self._setattr.append((target, name, oldval)) + setattr(target, name, value) + + def delattr(self, target, name=notset, raising=True): + """ Delete attribute ``name`` from ``target``, by default raise + AttributeError it the attribute did not previously exist. + + If no ``name`` is specified and ``target`` is a string + it will be interpreted as a dotted import path with the + last part being the attribute name. + + If ``raising`` is set to False, no exception will be raised if the + attribute is missing. + """ + __tracebackhide__ = True + if name is notset: + if not isinstance(target, _basestring): + raise TypeError("use delattr(target, name) or " + "delattr(target) with target being a dotted " + "import string") + name, target = derive_importpath(target, raising) + + if not hasattr(target, name): + if raising: + raise AttributeError(name) + else: + self._setattr.append((target, name, getattr(target, name, notset))) + delattr(target, name) + + def setitem(self, dic, name, value): + """ Set dictionary entry ``name`` to value. """ + self._setitem.append((dic, name, dic.get(name, notset))) + dic[name] = value + + def delitem(self, dic, name, raising=True): + """ Delete ``name`` from dict. Raise KeyError if it doesn't exist. + + If ``raising`` is set to False, no exception will be raised if the + key is missing. + """ + if name not in dic: + if raising: + raise KeyError(name) + else: + self._setitem.append((dic, name, dic.get(name, notset))) + del dic[name] + + def setenv(self, name, value, prepend=None): + """ Set environment variable ``name`` to ``value``. If ``prepend`` + is a character, read the current environment variable value + and prepend the ``value`` adjoined with the ``prepend`` character.""" + value = str(value) + if prepend and name in os.environ: + value = value + prepend + os.environ[name] + self.setitem(os.environ, name, value) + + def delenv(self, name, raising=True): + """ Delete ``name`` from the environment. Raise KeyError it does not + exist. + + If ``raising`` is set to False, no exception will be raised if the + environment variable is missing. + """ + self.delitem(os.environ, name, raising=raising) + + def syspath_prepend(self, path): + """ Prepend ``path`` to ``sys.path`` list of import locations. """ + if self._savesyspath is None: + self._savesyspath = sys.path[:] + sys.path.insert(0, str(path)) + + def chdir(self, path): + """ Change the current working directory to the specified path. + Path can be a string or a py.path.local object. + """ + if self._cwd is None: + self._cwd = os.getcwd() + if hasattr(path, "chdir"): + path.chdir() + else: + os.chdir(path) + + def undo(self): + """ Undo previous changes. This call consumes the + undo stack. Calling it a second time has no effect unless + you do more monkeypatching after the undo call. + + There is generally no need to call `undo()`, since it is + called automatically during tear-down. + + Note that the same `monkeypatch` fixture is used across a + single test function invocation. If `monkeypatch` is used both by + the test function itself and one of the test fixtures, + calling `undo()` will undo all of the changes made in + both functions. + """ + for obj, name, value in reversed(self._setattr): + if value is not notset: + setattr(obj, name, value) + else: + delattr(obj, name) + self._setattr[:] = [] + for dictionary, name, value in reversed(self._setitem): + if value is notset: + try: + del dictionary[name] + except KeyError: + pass # was already deleted, so we have the desired state + else: + dictionary[name] = value + self._setitem[:] = [] + if self._savesyspath is not None: + sys.path[:] = self._savesyspath + self._savesyspath = None + + if self._cwd is not None: + os.chdir(self._cwd) + self._cwd = None diff --git a/venv/lib/python3.5/site-packages/_pytest/nose.py b/venv/lib/python3.5/site-packages/_pytest/nose.py new file mode 100644 index 0000000..9d4fc0b --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/nose.py @@ -0,0 +1,72 @@ +""" run test suites written for nose. """ +from __future__ import absolute_import, division, print_function + +import sys + +import py +from _pytest import unittest, runner, python +from _pytest.config import hookimpl + + +def get_skip_exceptions(): + skip_classes = set() + for module_name in ('unittest', 'unittest2', 'nose'): + mod = sys.modules.get(module_name) + if hasattr(mod, 'SkipTest'): + skip_classes.add(mod.SkipTest) + return tuple(skip_classes) + + +def pytest_runtest_makereport(item, call): + if call.excinfo and call.excinfo.errisinstance(get_skip_exceptions()): + # let's substitute the excinfo with a pytest.skip one + call2 = call.__class__( + lambda: runner.skip(str(call.excinfo.value)), call.when) + call.excinfo = call2.excinfo + + +@hookimpl(trylast=True) +def pytest_runtest_setup(item): + if is_potential_nosetest(item): + if isinstance(item.parent, python.Generator): + gen = item.parent + if not hasattr(gen, '_nosegensetup'): + call_optional(gen.obj, 'setup') + if isinstance(gen.parent, python.Instance): + call_optional(gen.parent.obj, 'setup') + gen._nosegensetup = True + if not call_optional(item.obj, 'setup'): + # call module level setup if there is no object level one + call_optional(item.parent.obj, 'setup') + #XXX this implies we only call teardown when setup worked + item.session._setupstate.addfinalizer((lambda: teardown_nose(item)), item) + +def teardown_nose(item): + if is_potential_nosetest(item): + if not call_optional(item.obj, 'teardown'): + call_optional(item.parent.obj, 'teardown') + #if hasattr(item.parent, '_nosegensetup'): + # #call_optional(item._nosegensetup, 'teardown') + # del item.parent._nosegensetup + + +def pytest_make_collect_report(collector): + if isinstance(collector, python.Generator): + call_optional(collector.obj, 'setup') + + +def is_potential_nosetest(item): + # extra check needed since we do not do nose style setup/teardown + # on direct unittest style classes + return isinstance(item, python.Function) and \ + not isinstance(item, unittest.TestCaseFunction) + + +def call_optional(obj, name): + method = getattr(obj, name, None) + isfixture = hasattr(method, "_pytestfixturefunction") + if method is not None and not isfixture and py.builtin.callable(method): + # If there's any problems allow the exception to raise rather than + # silently ignoring them + method() + return True diff --git a/venv/lib/python3.5/site-packages/_pytest/pastebin.py b/venv/lib/python3.5/site-packages/_pytest/pastebin.py new file mode 100644 index 0000000..6f3ce8f --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/pastebin.py @@ -0,0 +1,100 @@ +""" submit failure or test session information to a pastebin service. """ +from __future__ import absolute_import, division, print_function + +import pytest +import sys +import tempfile + + +def pytest_addoption(parser): + group = parser.getgroup("terminal reporting") + group._addoption('--pastebin', metavar="mode", + action='store', dest="pastebin", default=None, + choices=['failed', 'all'], + help="send failed|all info to bpaste.net pastebin service.") + + +@pytest.hookimpl(trylast=True) +def pytest_configure(config): + import py + if config.option.pastebin == "all": + tr = config.pluginmanager.getplugin('terminalreporter') + # if no terminal reporter plugin is present, nothing we can do here; + # this can happen when this function executes in a slave node + # when using pytest-xdist, for example + if tr is not None: + # pastebin file will be utf-8 encoded binary file + config._pastebinfile = tempfile.TemporaryFile('w+b') + oldwrite = tr._tw.write + + def tee_write(s, **kwargs): + oldwrite(s, **kwargs) + if py.builtin._istext(s): + s = s.encode('utf-8') + config._pastebinfile.write(s) + + tr._tw.write = tee_write + + +def pytest_unconfigure(config): + if hasattr(config, '_pastebinfile'): + # get terminal contents and delete file + config._pastebinfile.seek(0) + sessionlog = config._pastebinfile.read() + config._pastebinfile.close() + del config._pastebinfile + # undo our patching in the terminal reporter + tr = config.pluginmanager.getplugin('terminalreporter') + del tr._tw.__dict__['write'] + # write summary + tr.write_sep("=", "Sending information to Paste Service") + pastebinurl = create_new_paste(sessionlog) + tr.write_line("pastebin session-log: %s\n" % pastebinurl) + + +def create_new_paste(contents): + """ + Creates a new paste using bpaste.net service. + + :contents: paste contents as utf-8 encoded bytes + :returns: url to the pasted contents + """ + import re + if sys.version_info < (3, 0): + from urllib import urlopen, urlencode + else: + from urllib.request import urlopen + from urllib.parse import urlencode + + params = { + 'code': contents, + 'lexer': 'python3' if sys.version_info[0] == 3 else 'python', + 'expiry': '1week', + } + url = 'https://bpaste.net' + response = urlopen(url, data=urlencode(params).encode('ascii')).read() + m = re.search(r'href="/raw/(\w+)"', response.decode('utf-8')) + if m: + return '%s/show/%s' % (url, m.group(1)) + else: + return 'bad response: ' + response + + +def pytest_terminal_summary(terminalreporter): + import _pytest.config + if terminalreporter.config.option.pastebin != "failed": + return + tr = terminalreporter + if 'failed' in tr.stats: + terminalreporter.write_sep("=", "Sending information to Paste Service") + for rep in terminalreporter.stats.get('failed'): + try: + msg = rep.longrepr.reprtraceback.reprentries[-1].reprfileloc + except AttributeError: + msg = tr._getfailureheadline(rep) + tw = _pytest.config.create_terminal_writer(terminalreporter.config, stringio=True) + rep.toterminal(tw) + s = tw.stringio.getvalue() + assert len(s) + pastebinurl = create_new_paste(s) + tr.write_line("%s --> %s" %(msg, pastebinurl)) diff --git a/venv/lib/python3.5/site-packages/_pytest/pytester.py b/venv/lib/python3.5/site-packages/_pytest/pytester.py new file mode 100644 index 0000000..901caa3 --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/pytester.py @@ -0,0 +1,1151 @@ +""" (disabled by default) support for testing pytest and pytest plugins. """ +from __future__ import absolute_import, division, print_function + +import codecs +import gc +import os +import platform +import re +import subprocess +import sys +import time +import traceback +from fnmatch import fnmatch + +from weakref import WeakKeyDictionary + +from _pytest.capture import MultiCapture, SysCapture +from _pytest._code import Source +import py +import pytest +from _pytest.main import Session, EXIT_OK +from _pytest.assertion.rewrite import AssertionRewritingHook + + +def pytest_addoption(parser): + # group = parser.getgroup("pytester", "pytester (self-tests) options") + parser.addoption('--lsof', + action="store_true", dest="lsof", default=False, + help=("run FD checks if lsof is available")) + + parser.addoption('--runpytest', default="inprocess", dest="runpytest", + choices=("inprocess", "subprocess", ), + help=("run pytest sub runs in tests using an 'inprocess' " + "or 'subprocess' (python -m main) method")) + + +def pytest_configure(config): + # This might be called multiple times. Only take the first. + global _pytest_fullpath + try: + _pytest_fullpath + except NameError: + _pytest_fullpath = os.path.abspath(pytest.__file__.rstrip("oc")) + _pytest_fullpath = _pytest_fullpath.replace("$py.class", ".py") + + if config.getvalue("lsof"): + checker = LsofFdLeakChecker() + if checker.matching_platform(): + config.pluginmanager.register(checker) + + +class LsofFdLeakChecker(object): + def get_open_files(self): + out = self._exec_lsof() + open_files = self._parse_lsof_output(out) + return open_files + + def _exec_lsof(self): + pid = os.getpid() + return py.process.cmdexec("lsof -Ffn0 -p %d" % pid) + + def _parse_lsof_output(self, out): + def isopen(line): + return line.startswith('f') and ("deleted" not in line and + 'mem' not in line and "txt" not in line and 'cwd' not in line) + + open_files = [] + + for line in out.split("\n"): + if isopen(line): + fields = line.split('\0') + fd = fields[0][1:] + filename = fields[1][1:] + if filename.startswith('/'): + open_files.append((fd, filename)) + + return open_files + + def matching_platform(self): + try: + py.process.cmdexec("lsof -v") + except (py.process.cmdexec.Error, UnicodeDecodeError): + # cmdexec may raise UnicodeDecodeError on Windows systems + # with locale other than english: + # https://bitbucket.org/pytest-dev/py/issues/66 + return False + else: + return True + + @pytest.hookimpl(hookwrapper=True, tryfirst=True) + def pytest_runtest_protocol(self, item): + lines1 = self.get_open_files() + yield + if hasattr(sys, "pypy_version_info"): + gc.collect() + lines2 = self.get_open_files() + + new_fds = set([t[0] for t in lines2]) - set([t[0] for t in lines1]) + leaked_files = [t for t in lines2 if t[0] in new_fds] + if leaked_files: + error = [] + error.append("***** %s FD leakage detected" % len(leaked_files)) + error.extend([str(f) for f in leaked_files]) + error.append("*** Before:") + error.extend([str(f) for f in lines1]) + error.append("*** After:") + error.extend([str(f) for f in lines2]) + error.append(error[0]) + error.append("*** function %s:%s: %s " % item.location) + error.append("See issue #2366") + item.warn('', "\n".join(error)) + + +# XXX copied from execnet's conftest.py - needs to be merged +winpymap = { + 'python2.7': r'C:\Python27\python.exe', + 'python2.6': r'C:\Python26\python.exe', + 'python3.1': r'C:\Python31\python.exe', + 'python3.2': r'C:\Python32\python.exe', + 'python3.3': r'C:\Python33\python.exe', + 'python3.4': r'C:\Python34\python.exe', + 'python3.5': r'C:\Python35\python.exe', +} + +def getexecutable(name, cache={}): + try: + return cache[name] + except KeyError: + executable = py.path.local.sysfind(name) + if executable: + import subprocess + popen = subprocess.Popen([str(executable), "--version"], + universal_newlines=True, stderr=subprocess.PIPE) + out, err = popen.communicate() + if name == "jython": + if not err or "2.5" not in err: + executable = None + if "2.5.2" in err: + executable = None # http://bugs.jython.org/issue1790 + elif popen.returncode != 0: + # Handle pyenv's 127. + executable = None + cache[name] = executable + return executable + +@pytest.fixture(params=['python2.6', 'python2.7', 'python3.3', "python3.4", + 'pypy', 'pypy3']) +def anypython(request): + name = request.param + executable = getexecutable(name) + if executable is None: + if sys.platform == "win32": + executable = winpymap.get(name, None) + if executable: + executable = py.path.local(executable) + if executable.check(): + return executable + pytest.skip("no suitable %s found" % (name,)) + return executable + +# used at least by pytest-xdist plugin +@pytest.fixture +def _pytest(request): + """ Return a helper which offers a gethookrecorder(hook) + method which returns a HookRecorder instance which helps + to make assertions about called hooks. + """ + return PytestArg(request) + +class PytestArg: + def __init__(self, request): + self.request = request + + def gethookrecorder(self, hook): + hookrecorder = HookRecorder(hook._pm) + self.request.addfinalizer(hookrecorder.finish_recording) + return hookrecorder + + +def get_public_names(l): + """Only return names from iterator l without a leading underscore.""" + return [x for x in l if x[0] != "_"] + + +class ParsedCall: + def __init__(self, name, kwargs): + self.__dict__.update(kwargs) + self._name = name + + def __repr__(self): + d = self.__dict__.copy() + del d['_name'] + return "" %(self._name, d) + + +class HookRecorder: + """Record all hooks called in a plugin manager. + + This wraps all the hook calls in the plugin manager, recording + each call before propagating the normal calls. + + """ + + def __init__(self, pluginmanager): + self._pluginmanager = pluginmanager + self.calls = [] + + def before(hook_name, hook_impls, kwargs): + self.calls.append(ParsedCall(hook_name, kwargs)) + + def after(outcome, hook_name, hook_impls, kwargs): + pass + + self._undo_wrapping = pluginmanager.add_hookcall_monitoring(before, after) + + def finish_recording(self): + self._undo_wrapping() + + def getcalls(self, names): + if isinstance(names, str): + names = names.split() + return [call for call in self.calls if call._name in names] + + def assert_contains(self, entries): + __tracebackhide__ = True + i = 0 + entries = list(entries) + backlocals = sys._getframe(1).f_locals + while entries: + name, check = entries.pop(0) + for ind, call in enumerate(self.calls[i:]): + if call._name == name: + print("NAMEMATCH", name, call) + if eval(check, backlocals, call.__dict__): + print("CHECKERMATCH", repr(check), "->", call) + else: + print("NOCHECKERMATCH", repr(check), "-", call) + continue + i += ind + 1 + break + print("NONAMEMATCH", name, "with", call) + else: + pytest.fail("could not find %r check %r" % (name, check)) + + def popcall(self, name): + __tracebackhide__ = True + for i, call in enumerate(self.calls): + if call._name == name: + del self.calls[i] + return call + lines = ["could not find call %r, in:" % (name,)] + lines.extend([" %s" % str(x) for x in self.calls]) + pytest.fail("\n".join(lines)) + + def getcall(self, name): + l = self.getcalls(name) + assert len(l) == 1, (name, l) + return l[0] + + # functionality for test reports + + def getreports(self, + names="pytest_runtest_logreport pytest_collectreport"): + return [x.report for x in self.getcalls(names)] + + def matchreport(self, inamepart="", + names="pytest_runtest_logreport pytest_collectreport", when=None): + """ return a testreport whose dotted import path matches """ + l = [] + for rep in self.getreports(names=names): + try: + if not when and rep.when != "call" and rep.passed: + # setup/teardown passing reports - let's ignore those + continue + except AttributeError: + pass + if when and getattr(rep, 'when', None) != when: + continue + if not inamepart or inamepart in rep.nodeid.split("::"): + l.append(rep) + if not l: + raise ValueError("could not find test report matching %r: " + "no test reports at all!" % (inamepart,)) + if len(l) > 1: + raise ValueError( + "found 2 or more testreports matching %r: %s" %(inamepart, l)) + return l[0] + + def getfailures(self, + names='pytest_runtest_logreport pytest_collectreport'): + return [rep for rep in self.getreports(names) if rep.failed] + + def getfailedcollections(self): + return self.getfailures('pytest_collectreport') + + def listoutcomes(self): + passed = [] + skipped = [] + failed = [] + for rep in self.getreports( + "pytest_collectreport pytest_runtest_logreport"): + if rep.passed: + if getattr(rep, "when", None) == "call": + passed.append(rep) + elif rep.skipped: + skipped.append(rep) + elif rep.failed: + failed.append(rep) + return passed, skipped, failed + + def countoutcomes(self): + return [len(x) for x in self.listoutcomes()] + + def assertoutcome(self, passed=0, skipped=0, failed=0): + realpassed, realskipped, realfailed = self.listoutcomes() + assert passed == len(realpassed) + assert skipped == len(realskipped) + assert failed == len(realfailed) + + def clear(self): + self.calls[:] = [] + + +@pytest.fixture +def linecomp(request): + return LineComp() + + +@pytest.fixture(name='LineMatcher') +def LineMatcher_fixture(request): + return LineMatcher + + +@pytest.fixture +def testdir(request, tmpdir_factory): + return Testdir(request, tmpdir_factory) + + +rex_outcome = re.compile(r"(\d+) ([\w-]+)") +class RunResult: + """The result of running a command. + + Attributes: + + :ret: The return value. + :outlines: List of lines captured from stdout. + :errlines: List of lines captures from stderr. + :stdout: :py:class:`LineMatcher` of stdout, use ``stdout.str()`` to + reconstruct stdout or the commonly used + ``stdout.fnmatch_lines()`` method. + :stderrr: :py:class:`LineMatcher` of stderr. + :duration: Duration in seconds. + + """ + def __init__(self, ret, outlines, errlines, duration): + self.ret = ret + self.outlines = outlines + self.errlines = errlines + self.stdout = LineMatcher(outlines) + self.stderr = LineMatcher(errlines) + self.duration = duration + + def parseoutcomes(self): + """ Return a dictionary of outcomestring->num from parsing + the terminal output that the test process produced.""" + for line in reversed(self.outlines): + if 'seconds' in line: + outcomes = rex_outcome.findall(line) + if outcomes: + d = {} + for num, cat in outcomes: + d[cat] = int(num) + return d + raise ValueError("Pytest terminal report not found") + + def assert_outcomes(self, passed=0, skipped=0, failed=0): + """ assert that the specified outcomes appear with the respective + numbers (0 means it didn't occur) in the text output from a test run.""" + d = self.parseoutcomes() + assert passed == d.get("passed", 0) + assert skipped == d.get("skipped", 0) + assert failed == d.get("failed", 0) + + + +class Testdir: + """Temporary test directory with tools to test/run pytest itself. + + This is based on the ``tmpdir`` fixture but provides a number of + methods which aid with testing pytest itself. Unless + :py:meth:`chdir` is used all methods will use :py:attr:`tmpdir` as + current working directory. + + Attributes: + + :tmpdir: The :py:class:`py.path.local` instance of the temporary + directory. + + :plugins: A list of plugins to use with :py:meth:`parseconfig` and + :py:meth:`runpytest`. Initially this is an empty list but + plugins can be added to the list. The type of items to add to + the list depend on the method which uses them so refer to them + for details. + + """ + + def __init__(self, request, tmpdir_factory): + self.request = request + self._mod_collections = WeakKeyDictionary() + # XXX remove duplication with tmpdir plugin + basetmp = tmpdir_factory.ensuretemp("testdir") + name = request.function.__name__ + for i in range(100): + try: + tmpdir = basetmp.mkdir(name + str(i)) + except py.error.EEXIST: + continue + break + self.tmpdir = tmpdir + self.plugins = [] + self._savesyspath = (list(sys.path), list(sys.meta_path)) + self._savemodulekeys = set(sys.modules) + self.chdir() # always chdir + self.request.addfinalizer(self.finalize) + method = self.request.config.getoption("--runpytest") + if method == "inprocess": + self._runpytest_method = self.runpytest_inprocess + elif method == "subprocess": + self._runpytest_method = self.runpytest_subprocess + + def __repr__(self): + return "" % (self.tmpdir,) + + def finalize(self): + """Clean up global state artifacts. + + Some methods modify the global interpreter state and this + tries to clean this up. It does not remove the temporary + directory however so it can be looked at after the test run + has finished. + + """ + sys.path[:], sys.meta_path[:] = self._savesyspath + if hasattr(self, '_olddir'): + self._olddir.chdir() + self.delete_loaded_modules() + + def delete_loaded_modules(self): + """Delete modules that have been loaded during a test. + + This allows the interpreter to catch module changes in case + the module is re-imported. + """ + for name in set(sys.modules).difference(self._savemodulekeys): + # some zope modules used by twisted-related tests keeps internal + # state and can't be deleted; we had some trouble in the past + # with zope.interface for example + if not name.startswith("zope"): + del sys.modules[name] + + def make_hook_recorder(self, pluginmanager): + """Create a new :py:class:`HookRecorder` for a PluginManager.""" + assert not hasattr(pluginmanager, "reprec") + pluginmanager.reprec = reprec = HookRecorder(pluginmanager) + self.request.addfinalizer(reprec.finish_recording) + return reprec + + def chdir(self): + """Cd into the temporary directory. + + This is done automatically upon instantiation. + + """ + old = self.tmpdir.chdir() + if not hasattr(self, '_olddir'): + self._olddir = old + + def _makefile(self, ext, args, kwargs, encoding="utf-8"): + items = list(kwargs.items()) + if args: + source = py.builtin._totext("\n").join( + map(py.builtin._totext, args)) + py.builtin._totext("\n") + basename = self.request.function.__name__ + items.insert(0, (basename, source)) + ret = None + for name, value in items: + p = self.tmpdir.join(name).new(ext=ext) + p.dirpath().ensure_dir() + source = Source(value) + + def my_totext(s, encoding="utf-8"): + if py.builtin._isbytes(s): + s = py.builtin._totext(s, encoding=encoding) + return s + + source_unicode = "\n".join([my_totext(line) for line in source.lines]) + source = py.builtin._totext(source_unicode) + content = source.strip().encode(encoding) # + "\n" + #content = content.rstrip() + "\n" + p.write(content, "wb") + if ret is None: + ret = p + return ret + + def makefile(self, ext, *args, **kwargs): + """Create a new file in the testdir. + + ext: The extension the file should use, including the dot. + E.g. ".py". + + args: All args will be treated as strings and joined using + newlines. The result will be written as contents to the + file. The name of the file will be based on the test + function requesting this fixture. + E.g. "testdir.makefile('.txt', 'line1', 'line2')" + + kwargs: Each keyword is the name of a file, while the value of + it will be written as contents of the file. + E.g. "testdir.makefile('.ini', pytest='[pytest]\naddopts=-rs\n')" + + """ + return self._makefile(ext, args, kwargs) + + def makeconftest(self, source): + """Write a contest.py file with 'source' as contents.""" + return self.makepyfile(conftest=source) + + def makeini(self, source): + """Write a tox.ini file with 'source' as contents.""" + return self.makefile('.ini', tox=source) + + def getinicfg(self, source): + """Return the pytest section from the tox.ini config file.""" + p = self.makeini(source) + return py.iniconfig.IniConfig(p)['pytest'] + + def makepyfile(self, *args, **kwargs): + """Shortcut for .makefile() with a .py extension.""" + return self._makefile('.py', args, kwargs) + + def maketxtfile(self, *args, **kwargs): + """Shortcut for .makefile() with a .txt extension.""" + return self._makefile('.txt', args, kwargs) + + def syspathinsert(self, path=None): + """Prepend a directory to sys.path, defaults to :py:attr:`tmpdir`. + + This is undone automatically after the test. + """ + if path is None: + path = self.tmpdir + sys.path.insert(0, str(path)) + # a call to syspathinsert() usually means that the caller + # wants to import some dynamically created files. + # with python3 we thus invalidate import caches. + self._possibly_invalidate_import_caches() + + def _possibly_invalidate_import_caches(self): + # invalidate caches if we can (py33 and above) + try: + import importlib + except ImportError: + pass + else: + if hasattr(importlib, "invalidate_caches"): + importlib.invalidate_caches() + + def mkdir(self, name): + """Create a new (sub)directory.""" + return self.tmpdir.mkdir(name) + + def mkpydir(self, name): + """Create a new python package. + + This creates a (sub)directory with an empty ``__init__.py`` + file so that is recognised as a python package. + + """ + p = self.mkdir(name) + p.ensure("__init__.py") + return p + + Session = Session + def getnode(self, config, arg): + """Return the collection node of a file. + + :param config: :py:class:`_pytest.config.Config` instance, see + :py:meth:`parseconfig` and :py:meth:`parseconfigure` to + create the configuration. + + :param arg: A :py:class:`py.path.local` instance of the file. + + """ + session = Session(config) + assert '::' not in str(arg) + p = py.path.local(arg) + config.hook.pytest_sessionstart(session=session) + res = session.perform_collect([str(p)], genitems=False)[0] + config.hook.pytest_sessionfinish(session=session, exitstatus=EXIT_OK) + return res + + def getpathnode(self, path): + """Return the collection node of a file. + + This is like :py:meth:`getnode` but uses + :py:meth:`parseconfigure` to create the (configured) pytest + Config instance. + + :param path: A :py:class:`py.path.local` instance of the file. + + """ + config = self.parseconfigure(path) + session = Session(config) + x = session.fspath.bestrelpath(path) + config.hook.pytest_sessionstart(session=session) + res = session.perform_collect([x], genitems=False)[0] + config.hook.pytest_sessionfinish(session=session, exitstatus=EXIT_OK) + return res + + def genitems(self, colitems): + """Generate all test items from a collection node. + + This recurses into the collection node and returns a list of + all the test items contained within. + + """ + session = colitems[0].session + result = [] + for colitem in colitems: + result.extend(session.genitems(colitem)) + return result + + def runitem(self, source): + """Run the "test_func" Item. + + The calling test instance (the class which contains the test + method) must provide a ``.getrunner()`` method which should + return a runner which can run the test protocol for a single + item, like e.g. :py:func:`_pytest.runner.runtestprotocol`. + + """ + # used from runner functional tests + item = self.getitem(source) + # the test class where we are called from wants to provide the runner + testclassinstance = self.request.instance + runner = testclassinstance.getrunner() + return runner(item) + + def inline_runsource(self, source, *cmdlineargs): + """Run a test module in process using ``pytest.main()``. + + This run writes "source" into a temporary file and runs + ``pytest.main()`` on it, returning a :py:class:`HookRecorder` + instance for the result. + + :param source: The source code of the test module. + + :param cmdlineargs: Any extra command line arguments to use. + + :return: :py:class:`HookRecorder` instance of the result. + + """ + p = self.makepyfile(source) + l = list(cmdlineargs) + [p] + return self.inline_run(*l) + + def inline_genitems(self, *args): + """Run ``pytest.main(['--collectonly'])`` in-process. + + Returns a tuple of the collected items and a + :py:class:`HookRecorder` instance. + + This runs the :py:func:`pytest.main` function to run all of + pytest inside the test process itself like + :py:meth:`inline_run`. However the return value is a tuple of + the collection items and a :py:class:`HookRecorder` instance. + + """ + rec = self.inline_run("--collect-only", *args) + items = [x.item for x in rec.getcalls("pytest_itemcollected")] + return items, rec + + def inline_run(self, *args, **kwargs): + """Run ``pytest.main()`` in-process, returning a HookRecorder. + + This runs the :py:func:`pytest.main` function to run all of + pytest inside the test process itself. This means it can + return a :py:class:`HookRecorder` instance which gives more + detailed results from then run then can be done by matching + stdout/stderr from :py:meth:`runpytest`. + + :param args: Any command line arguments to pass to + :py:func:`pytest.main`. + + :param plugin: (keyword-only) Extra plugin instances the + ``pytest.main()`` instance should use. + + :return: A :py:class:`HookRecorder` instance. + """ + # When running py.test inline any plugins active in the main + # test process are already imported. So this disables the + # warning which will trigger to say they can no longer be + # re-written, which is fine as they are already re-written. + orig_warn = AssertionRewritingHook._warn_already_imported + + def revert(): + AssertionRewritingHook._warn_already_imported = orig_warn + + self.request.addfinalizer(revert) + AssertionRewritingHook._warn_already_imported = lambda *a: None + + rec = [] + + class Collect: + def pytest_configure(x, config): + rec.append(self.make_hook_recorder(config.pluginmanager)) + + plugins = kwargs.get("plugins") or [] + plugins.append(Collect()) + ret = pytest.main(list(args), plugins=plugins) + self.delete_loaded_modules() + if len(rec) == 1: + reprec = rec.pop() + else: + class reprec: + pass + reprec.ret = ret + + # typically we reraise keyboard interrupts from the child run + # because it's our user requesting interruption of the testing + if ret == 2 and not kwargs.get("no_reraise_ctrlc"): + calls = reprec.getcalls("pytest_keyboard_interrupt") + if calls and calls[-1].excinfo.type == KeyboardInterrupt: + raise KeyboardInterrupt() + return reprec + + def runpytest_inprocess(self, *args, **kwargs): + """ Return result of running pytest in-process, providing a similar + interface to what self.runpytest() provides. """ + if kwargs.get("syspathinsert"): + self.syspathinsert() + now = time.time() + capture = MultiCapture(Capture=SysCapture) + capture.start_capturing() + try: + try: + reprec = self.inline_run(*args, **kwargs) + except SystemExit as e: + + class reprec: + ret = e.args[0] + + except Exception: + traceback.print_exc() + + class reprec: + ret = 3 + finally: + out, err = capture.readouterr() + capture.stop_capturing() + sys.stdout.write(out) + sys.stderr.write(err) + + res = RunResult(reprec.ret, + out.split("\n"), err.split("\n"), + time.time()-now) + res.reprec = reprec + return res + + def runpytest(self, *args, **kwargs): + """ Run pytest inline or in a subprocess, depending on the command line + option "--runpytest" and return a :py:class:`RunResult`. + + """ + args = self._ensure_basetemp(args) + return self._runpytest_method(*args, **kwargs) + + def _ensure_basetemp(self, args): + args = [str(x) for x in args] + for x in args: + if str(x).startswith('--basetemp'): + #print ("basedtemp exists: %s" %(args,)) + break + else: + args.append("--basetemp=%s" % self.tmpdir.dirpath('basetemp')) + #print ("added basetemp: %s" %(args,)) + return args + + def parseconfig(self, *args): + """Return a new pytest Config instance from given commandline args. + + This invokes the pytest bootstrapping code in _pytest.config + to create a new :py:class:`_pytest.core.PluginManager` and + call the pytest_cmdline_parse hook to create new + :py:class:`_pytest.config.Config` instance. + + If :py:attr:`plugins` has been populated they should be plugin + modules which will be registered with the PluginManager. + + """ + args = self._ensure_basetemp(args) + + import _pytest.config + config = _pytest.config._prepareconfig(args, self.plugins) + # we don't know what the test will do with this half-setup config + # object and thus we make sure it gets unconfigured properly in any + # case (otherwise capturing could still be active, for example) + self.request.addfinalizer(config._ensure_unconfigure) + return config + + def parseconfigure(self, *args): + """Return a new pytest configured Config instance. + + This returns a new :py:class:`_pytest.config.Config` instance + like :py:meth:`parseconfig`, but also calls the + pytest_configure hook. + + """ + config = self.parseconfig(*args) + config._do_configure() + self.request.addfinalizer(config._ensure_unconfigure) + return config + + def getitem(self, source, funcname="test_func"): + """Return the test item for a test function. + + This writes the source to a python file and runs pytest's + collection on the resulting module, returning the test item + for the requested function name. + + :param source: The module source. + + :param funcname: The name of the test function for which the + Item must be returned. + + """ + items = self.getitems(source) + for item in items: + if item.name == funcname: + return item + assert 0, "%r item not found in module:\n%s\nitems: %s" %( + funcname, source, items) + + def getitems(self, source): + """Return all test items collected from the module. + + This writes the source to a python file and runs pytest's + collection on the resulting module, returning all test items + contained within. + + """ + modcol = self.getmodulecol(source) + return self.genitems([modcol]) + + def getmodulecol(self, source, configargs=(), withinit=False): + """Return the module collection node for ``source``. + + This writes ``source`` to a file using :py:meth:`makepyfile` + and then runs the pytest collection on it, returning the + collection node for the test module. + + :param source: The source code of the module to collect. + + :param configargs: Any extra arguments to pass to + :py:meth:`parseconfigure`. + + :param withinit: Whether to also write a ``__init__.py`` file + to the temporary directory to ensure it is a package. + + """ + kw = {self.request.function.__name__: Source(source).strip()} + path = self.makepyfile(**kw) + if withinit: + self.makepyfile(__init__ = "#") + self.config = config = self.parseconfigure(path, *configargs) + node = self.getnode(config, path) + + return node + + def collect_by_name(self, modcol, name): + """Return the collection node for name from the module collection. + + This will search a module collection node for a collection + node matching the given name. + + :param modcol: A module collection node, see + :py:meth:`getmodulecol`. + + :param name: The name of the node to return. + + """ + if modcol not in self._mod_collections: + self._mod_collections[modcol] = list(modcol.collect()) + for colitem in self._mod_collections[modcol]: + if colitem.name == name: + return colitem + + def popen(self, cmdargs, stdout, stderr, **kw): + """Invoke subprocess.Popen. + + This calls subprocess.Popen making sure the current working + directory is the PYTHONPATH. + + You probably want to use :py:meth:`run` instead. + + """ + env = os.environ.copy() + env['PYTHONPATH'] = os.pathsep.join(filter(None, [ + str(os.getcwd()), env.get('PYTHONPATH', '')])) + kw['env'] = env + return subprocess.Popen(cmdargs, + stdout=stdout, stderr=stderr, **kw) + + def run(self, *cmdargs): + """Run a command with arguments. + + Run a process using subprocess.Popen saving the stdout and + stderr. + + Returns a :py:class:`RunResult`. + + """ + return self._run(*cmdargs) + + def _run(self, *cmdargs): + cmdargs = [str(x) for x in cmdargs] + p1 = self.tmpdir.join("stdout") + p2 = self.tmpdir.join("stderr") + print("running:", ' '.join(cmdargs)) + print(" in:", str(py.path.local())) + f1 = codecs.open(str(p1), "w", encoding="utf8") + f2 = codecs.open(str(p2), "w", encoding="utf8") + try: + now = time.time() + popen = self.popen(cmdargs, stdout=f1, stderr=f2, + close_fds=(sys.platform != "win32")) + ret = popen.wait() + finally: + f1.close() + f2.close() + f1 = codecs.open(str(p1), "r", encoding="utf8") + f2 = codecs.open(str(p2), "r", encoding="utf8") + try: + out = f1.read().splitlines() + err = f2.read().splitlines() + finally: + f1.close() + f2.close() + self._dump_lines(out, sys.stdout) + self._dump_lines(err, sys.stderr) + return RunResult(ret, out, err, time.time()-now) + + def _dump_lines(self, lines, fp): + try: + for line in lines: + print(line, file=fp) + except UnicodeEncodeError: + print("couldn't print to %s because of encoding" % (fp,)) + + def _getpytestargs(self): + # we cannot use "(sys.executable,script)" + # because on windows the script is e.g. a pytest.exe + return (sys.executable, _pytest_fullpath,) # noqa + + def runpython(self, script): + """Run a python script using sys.executable as interpreter. + + Returns a :py:class:`RunResult`. + """ + return self.run(sys.executable, script) + + def runpython_c(self, command): + """Run python -c "command", return a :py:class:`RunResult`.""" + return self.run(sys.executable, "-c", command) + + def runpytest_subprocess(self, *args, **kwargs): + """Run pytest as a subprocess with given arguments. + + Any plugins added to the :py:attr:`plugins` list will added + using the ``-p`` command line option. Addtionally + ``--basetemp`` is used put any temporary files and directories + in a numbered directory prefixed with "runpytest-" so they do + not conflict with the normal numberd pytest location for + temporary files and directories. + + Returns a :py:class:`RunResult`. + + """ + p = py.path.local.make_numbered_dir(prefix="runpytest-", + keep=None, rootdir=self.tmpdir) + args = ('--basetemp=%s' % p, ) + args + #for x in args: + # if '--confcutdir' in str(x): + # break + #else: + # pass + # args = ('--confcutdir=.',) + args + plugins = [x for x in self.plugins if isinstance(x, str)] + if plugins: + args = ('-p', plugins[0]) + args + args = self._getpytestargs() + args + return self.run(*args) + + def spawn_pytest(self, string, expect_timeout=10.0): + """Run pytest using pexpect. + + This makes sure to use the right pytest and sets up the + temporary directory locations. + + The pexpect child is returned. + + """ + basetemp = self.tmpdir.mkdir("temp-pexpect") + invoke = " ".join(map(str, self._getpytestargs())) + cmd = "%s --basetemp=%s %s" % (invoke, basetemp, string) + return self.spawn(cmd, expect_timeout=expect_timeout) + + def spawn(self, cmd, expect_timeout=10.0): + """Run a command using pexpect. + + The pexpect child is returned. + """ + pexpect = pytest.importorskip("pexpect", "3.0") + if hasattr(sys, 'pypy_version_info') and '64' in platform.machine(): + pytest.skip("pypy-64 bit not supported") + if sys.platform.startswith("freebsd"): + pytest.xfail("pexpect does not work reliably on freebsd") + logfile = self.tmpdir.join("spawn.out").open("wb") + child = pexpect.spawn(cmd, logfile=logfile) + self.request.addfinalizer(logfile.close) + child.timeout = expect_timeout + return child + +def getdecoded(out): + try: + return out.decode("utf-8") + except UnicodeDecodeError: + return "INTERNAL not-utf8-decodeable, truncated string:\n%s" % ( + py.io.saferepr(out),) + + +class LineComp: + def __init__(self): + self.stringio = py.io.TextIO() + + def assert_contains_lines(self, lines2): + """ assert that lines2 are contained (linearly) in lines1. + return a list of extralines found. + """ + __tracebackhide__ = True + val = self.stringio.getvalue() + self.stringio.truncate(0) + self.stringio.seek(0) + lines1 = val.split("\n") + return LineMatcher(lines1).fnmatch_lines(lines2) + + +class LineMatcher: + """Flexible matching of text. + + This is a convenience class to test large texts like the output of + commands. + + The constructor takes a list of lines without their trailing + newlines, i.e. ``text.splitlines()``. + + """ + + def __init__(self, lines): + self.lines = lines + self._log_output = [] + + def str(self): + """Return the entire original text.""" + return "\n".join(self.lines) + + def _getlines(self, lines2): + if isinstance(lines2, str): + lines2 = Source(lines2) + if isinstance(lines2, Source): + lines2 = lines2.strip().lines + return lines2 + + def fnmatch_lines_random(self, lines2): + """Check lines exist in the output. + + The argument is a list of lines which have to occur in the + output, in any order. Each line can contain glob whildcards. + + """ + lines2 = self._getlines(lines2) + for line in lines2: + for x in self.lines: + if line == x or fnmatch(x, line): + self._log("matched: ", repr(line)) + break + else: + self._log("line %r not found in output" % line) + raise ValueError(self._log_text) + + def get_lines_after(self, fnline): + """Return all lines following the given line in the text. + + The given line can contain glob wildcards. + """ + for i, line in enumerate(self.lines): + if fnline == line or fnmatch(line, fnline): + return self.lines[i+1:] + raise ValueError("line %r not found in output" % fnline) + + def _log(self, *args): + self._log_output.append(' '.join((str(x) for x in args))) + + @property + def _log_text(self): + return '\n'.join(self._log_output) + + def fnmatch_lines(self, lines2): + """Search the text for matching lines. + + The argument is a list of lines which have to match and can + use glob wildcards. If they do not match an pytest.fail() is + called. The matches and non-matches are also printed on + stdout. + + """ + lines2 = self._getlines(lines2) + lines1 = self.lines[:] + nextline = None + extralines = [] + __tracebackhide__ = True + for line in lines2: + nomatchprinted = False + while lines1: + nextline = lines1.pop(0) + if line == nextline: + self._log("exact match:", repr(line)) + break + elif fnmatch(nextline, line): + self._log("fnmatch:", repr(line)) + self._log(" with:", repr(nextline)) + break + else: + if not nomatchprinted: + self._log("nomatch:", repr(line)) + nomatchprinted = True + self._log(" and:", repr(nextline)) + extralines.append(nextline) + else: + self._log("remains unmatched: %r" % (line,)) + pytest.fail(self._log_text) diff --git a/venv/lib/python3.5/site-packages/_pytest/python.py b/venv/lib/python3.5/site-packages/_pytest/python.py new file mode 100644 index 0000000..06f74ce --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/python.py @@ -0,0 +1,1597 @@ +""" Python test discovery, setup and run of test functions. """ +from __future__ import absolute_import, division, print_function + +import fnmatch +import inspect +import sys +import os +import collections +import math +from itertools import count + +import py +from _pytest.mark import MarkerError +from _pytest.config import hookimpl + +import _pytest +import _pytest._pluggy as pluggy +from _pytest import fixtures +from _pytest import main +from _pytest.compat import ( + isclass, isfunction, is_generator, _escape_strings, + REGEX_TYPE, STRING_TYPES, NoneType, NOTSET, + get_real_func, getfslineno, safe_getattr, + safe_str, getlocation, enum, +) +from _pytest.runner import fail + +cutdir1 = py.path.local(pluggy.__file__.rstrip("oc")) +cutdir2 = py.path.local(_pytest.__file__).dirpath() +cutdir3 = py.path.local(py.__file__).dirpath() + + +def filter_traceback(entry): + """Return True if a TracebackEntry instance should be removed from tracebacks: + * dynamically generated code (no code to show up for it); + * internal traceback from pytest or its internal libraries, py and pluggy. + """ + # entry.path might sometimes return a str object when the entry + # points to dynamically generated code + # see https://bitbucket.org/pytest-dev/py/issues/71 + raw_filename = entry.frame.code.raw.co_filename + is_generated = '<' in raw_filename and '>' in raw_filename + if is_generated: + return False + # entry.path might point to an inexisting file, in which case it will + # alsso return a str object. see #1133 + p = py.path.local(entry.path) + return p != cutdir1 and not p.relto(cutdir2) and not p.relto(cutdir3) + + + +def pyobj_property(name): + def get(self): + node = self.getparent(getattr(__import__('pytest'), name)) + if node is not None: + return node.obj + doc = "python %s object this node was collected from (can be None)." % ( + name.lower(),) + return property(get, None, None, doc) + + +def pytest_addoption(parser): + group = parser.getgroup("general") + group.addoption('--fixtures', '--funcargs', + action="store_true", dest="showfixtures", default=False, + help="show available fixtures, sorted by plugin appearance") + group.addoption( + '--fixtures-per-test', + action="store_true", + dest="show_fixtures_per_test", + default=False, + help="show fixtures per test", + ) + parser.addini("usefixtures", type="args", default=[], + help="list of default fixtures to be used with this project") + parser.addini("python_files", type="args", + default=['test_*.py', '*_test.py'], + help="glob-style file patterns for Python test module discovery") + parser.addini("python_classes", type="args", default=["Test",], + help="prefixes or glob names for Python test class discovery") + parser.addini("python_functions", type="args", default=["test",], + help="prefixes or glob names for Python test function and " + "method discovery") + + group.addoption("--import-mode", default="prepend", + choices=["prepend", "append"], dest="importmode", + help="prepend/append to sys.path when importing test modules, " + "default is to prepend.") + + +def pytest_cmdline_main(config): + if config.option.showfixtures: + showfixtures(config) + return 0 + if config.option.show_fixtures_per_test: + show_fixtures_per_test(config) + return 0 + + +def pytest_generate_tests(metafunc): + # those alternative spellings are common - raise a specific error to alert + # the user + alt_spellings = ['parameterize', 'parametrise', 'parameterise'] + for attr in alt_spellings: + if hasattr(metafunc.function, attr): + msg = "{0} has '{1}', spelling should be 'parametrize'" + raise MarkerError(msg.format(metafunc.function.__name__, attr)) + try: + markers = metafunc.function.parametrize + except AttributeError: + return + for marker in markers: + metafunc.parametrize(*marker.args, **marker.kwargs) + +def pytest_configure(config): + config.addinivalue_line("markers", + "parametrize(argnames, argvalues): call a test function multiple " + "times passing in different arguments in turn. argvalues generally " + "needs to be a list of values if argnames specifies only one name " + "or a list of tuples of values if argnames specifies multiple names. " + "Example: @parametrize('arg1', [1,2]) would lead to two calls of the " + "decorated test function, one with arg1=1 and another with arg1=2." + "see http://pytest.org/latest/parametrize.html for more info and " + "examples." + ) + config.addinivalue_line("markers", + "usefixtures(fixturename1, fixturename2, ...): mark tests as needing " + "all of the specified fixtures. see http://pytest.org/latest/fixture.html#usefixtures " + ) + + +@hookimpl(trylast=True) +def pytest_pyfunc_call(pyfuncitem): + testfunction = pyfuncitem.obj + if pyfuncitem._isyieldedfunction(): + testfunction(*pyfuncitem._args) + else: + funcargs = pyfuncitem.funcargs + testargs = {} + for arg in pyfuncitem._fixtureinfo.argnames: + testargs[arg] = funcargs[arg] + testfunction(**testargs) + return True + + +def pytest_collect_file(path, parent): + ext = path.ext + if ext == ".py": + if not parent.session.isinitpath(path): + for pat in parent.config.getini('python_files'): + if path.fnmatch(pat): + break + else: + return + ihook = parent.session.gethookproxy(path) + return ihook.pytest_pycollect_makemodule(path=path, parent=parent) + +def pytest_pycollect_makemodule(path, parent): + return Module(path, parent) + +@hookimpl(hookwrapper=True) +def pytest_pycollect_makeitem(collector, name, obj): + outcome = yield + res = outcome.get_result() + if res is not None: + return + # nothing was collected elsewhere, let's do it here + if isclass(obj): + if collector.istestclass(obj, name): + Class = collector._getcustomclass("Class") + outcome.force_result(Class(name, parent=collector)) + elif collector.istestfunction(obj, name): + # mock seems to store unbound methods (issue473), normalize it + obj = getattr(obj, "__func__", obj) + # We need to try and unwrap the function if it's a functools.partial + # or a funtools.wrapped. + # We musn't if it's been wrapped with mock.patch (python 2 only) + if not (isfunction(obj) or isfunction(get_real_func(obj))): + collector.warn(code="C2", message= + "cannot collect %r because it is not a function." + % name, ) + elif getattr(obj, "__test__", True): + if is_generator(obj): + res = Generator(name, parent=collector) + else: + res = list(collector._genfunctions(name, obj)) + outcome.force_result(res) + +def pytest_make_parametrize_id(config, val, argname=None): + return None + + + +class PyobjContext(object): + module = pyobj_property("Module") + cls = pyobj_property("Class") + instance = pyobj_property("Instance") + +class PyobjMixin(PyobjContext): + def obj(): + def fget(self): + obj = getattr(self, '_obj', None) + if obj is None: + self._obj = obj = self._getobj() + return obj + + def fset(self, value): + self._obj = value + + return property(fget, fset, None, "underlying python object") + + obj = obj() + + def _getobj(self): + return getattr(self.parent.obj, self.name) + + def getmodpath(self, stopatmodule=True, includemodule=False): + """ return python path relative to the containing module. """ + chain = self.listchain() + chain.reverse() + parts = [] + for node in chain: + if isinstance(node, Instance): + continue + name = node.name + if isinstance(node, Module): + name = os.path.splitext(name)[0] + if stopatmodule: + if includemodule: + parts.append(name) + break + parts.append(name) + parts.reverse() + s = ".".join(parts) + return s.replace(".[", "[") + + def _getfslineno(self): + return getfslineno(self.obj) + + def reportinfo(self): + # XXX caching? + obj = self.obj + compat_co_firstlineno = getattr(obj, 'compat_co_firstlineno', None) + if isinstance(compat_co_firstlineno, int): + # nose compatibility + fspath = sys.modules[obj.__module__].__file__ + if fspath.endswith(".pyc"): + fspath = fspath[:-1] + lineno = compat_co_firstlineno + else: + fspath, lineno = getfslineno(obj) + modpath = self.getmodpath() + assert isinstance(lineno, int) + return fspath, lineno, modpath + +class PyCollector(PyobjMixin, main.Collector): + + def funcnamefilter(self, name): + return self._matches_prefix_or_glob_option('python_functions', name) + + def isnosetest(self, obj): + """ Look for the __test__ attribute, which is applied by the + @nose.tools.istest decorator + """ + # We explicitly check for "is True" here to not mistakenly treat + # classes with a custom __getattr__ returning something truthy (like a + # function) as test classes. + return safe_getattr(obj, '__test__', False) is True + + def classnamefilter(self, name): + return self._matches_prefix_or_glob_option('python_classes', name) + + def istestfunction(self, obj, name): + return ( + (self.funcnamefilter(name) or self.isnosetest(obj)) and + safe_getattr(obj, "__call__", False) and fixtures.getfixturemarker(obj) is None + ) + + def istestclass(self, obj, name): + return self.classnamefilter(name) or self.isnosetest(obj) + + def _matches_prefix_or_glob_option(self, option_name, name): + """ + checks if the given name matches the prefix or glob-pattern defined + in ini configuration. + """ + for option in self.config.getini(option_name): + if name.startswith(option): + return True + # check that name looks like a glob-string before calling fnmatch + # because this is called for every name in each collected module, + # and fnmatch is somewhat expensive to call + elif ('*' in option or '?' in option or '[' in option) and \ + fnmatch.fnmatch(name, option): + return True + return False + + def collect(self): + if not getattr(self.obj, "__test__", True): + return [] + + # NB. we avoid random getattrs and peek in the __dict__ instead + # (XXX originally introduced from a PyPy need, still true?) + dicts = [getattr(self.obj, '__dict__', {})] + for basecls in inspect.getmro(self.obj.__class__): + dicts.append(basecls.__dict__) + seen = {} + l = [] + for dic in dicts: + for name, obj in list(dic.items()): + if name in seen: + continue + seen[name] = True + res = self.makeitem(name, obj) + if res is None: + continue + if not isinstance(res, list): + res = [res] + l.extend(res) + l.sort(key=lambda item: item.reportinfo()[:2]) + return l + + def makeitem(self, name, obj): + #assert self.ihook.fspath == self.fspath, self + return self.ihook.pytest_pycollect_makeitem( + collector=self, name=name, obj=obj) + + def _genfunctions(self, name, funcobj): + module = self.getparent(Module).obj + clscol = self.getparent(Class) + cls = clscol and clscol.obj or None + transfer_markers(funcobj, cls, module) + fm = self.session._fixturemanager + fixtureinfo = fm.getfixtureinfo(self, funcobj, cls) + metafunc = Metafunc(funcobj, fixtureinfo, self.config, + cls=cls, module=module) + methods = [] + if hasattr(module, "pytest_generate_tests"): + methods.append(module.pytest_generate_tests) + if hasattr(cls, "pytest_generate_tests"): + methods.append(cls().pytest_generate_tests) + if methods: + self.ihook.pytest_generate_tests.call_extra(methods, + dict(metafunc=metafunc)) + else: + self.ihook.pytest_generate_tests(metafunc=metafunc) + + Function = self._getcustomclass("Function") + if not metafunc._calls: + yield Function(name, parent=self, fixtureinfo=fixtureinfo) + else: + # add funcargs() as fixturedefs to fixtureinfo.arg2fixturedefs + fixtures.add_funcarg_pseudo_fixture_def(self, metafunc, fm) + + for callspec in metafunc._calls: + subname = "%s[%s]" % (name, callspec.id) + yield Function(name=subname, parent=self, + callspec=callspec, callobj=funcobj, + fixtureinfo=fixtureinfo, + keywords={callspec.id:True}, + originalname=name, + ) + + +def _marked(func, mark): + """ Returns True if :func: is already marked with :mark:, False otherwise. + This can happen if marker is applied to class and the test file is + invoked more than once. + """ + try: + func_mark = getattr(func, mark.name) + except AttributeError: + return False + return mark.args == func_mark.args and mark.kwargs == func_mark.kwargs + + +def transfer_markers(funcobj, cls, mod): + # XXX this should rather be code in the mark plugin or the mark + # plugin should merge with the python plugin. + for holder in (cls, mod): + try: + pytestmark = holder.pytestmark + except AttributeError: + continue + if isinstance(pytestmark, list): + for mark in pytestmark: + if not _marked(funcobj, mark): + mark(funcobj) + else: + if not _marked(funcobj, pytestmark): + pytestmark(funcobj) + + +class Module(main.File, PyCollector): + """ Collector for test classes and functions. """ + + def _getobj(self): + return self._importtestmodule() + + def collect(self): + self.session._fixturemanager.parsefactories(self) + return super(Module, self).collect() + + def _importtestmodule(self): + # we assume we are only called once per module + importmode = self.config.getoption("--import-mode") + try: + mod = self.fspath.pyimport(ensuresyspath=importmode) + except SyntaxError: + raise self.CollectError( + _pytest._code.ExceptionInfo().getrepr(style="short")) + except self.fspath.ImportMismatchError: + e = sys.exc_info()[1] + raise self.CollectError( + "import file mismatch:\n" + "imported module %r has this __file__ attribute:\n" + " %s\n" + "which is not the same as the test file we want to collect:\n" + " %s\n" + "HINT: remove __pycache__ / .pyc files and/or use a " + "unique basename for your test file modules" + % e.args + ) + except ImportError: + from _pytest._code.code import ExceptionInfo + exc_info = ExceptionInfo() + if self.config.getoption('verbose') < 2: + exc_info.traceback = exc_info.traceback.filter(filter_traceback) + exc_repr = exc_info.getrepr(style='short') if exc_info.traceback else exc_info.exconly() + formatted_tb = safe_str(exc_repr) + raise self.CollectError( + "ImportError while importing test module '{fspath}'.\n" + "Hint: make sure your test modules/packages have valid Python names.\n" + "Traceback:\n" + "{traceback}".format(fspath=self.fspath, traceback=formatted_tb) + ) + except _pytest.runner.Skipped as e: + if e.allow_module_level: + raise + raise self.CollectError( + "Using pytest.skip outside of a test is not allowed. If you are " + "trying to decorate a test function, use the @pytest.mark.skip " + "or @pytest.mark.skipif decorators instead." + ) + self.config.pluginmanager.consider_module(mod) + return mod + + def setup(self): + setup_module = _get_xunit_setup_teardown(self.obj, "setUpModule") + if setup_module is None: + setup_module = _get_xunit_setup_teardown(self.obj, "setup_module") + if setup_module is not None: + setup_module() + + teardown_module = _get_xunit_setup_teardown(self.obj, 'tearDownModule') + if teardown_module is None: + teardown_module = _get_xunit_setup_teardown(self.obj, 'teardown_module') + if teardown_module is not None: + self.addfinalizer(teardown_module) + + +def _get_xunit_setup_teardown(holder, attr_name, param_obj=None): + """ + Return a callable to perform xunit-style setup or teardown if + the function exists in the ``holder`` object. + The ``param_obj`` parameter is the parameter which will be passed to the function + when the callable is called without arguments, defaults to the ``holder`` object. + Return ``None`` if a suitable callable is not found. + """ + param_obj = param_obj if param_obj is not None else holder + result = _get_xunit_func(holder, attr_name) + if result is not None: + arg_count = result.__code__.co_argcount + if inspect.ismethod(result): + arg_count -= 1 + if arg_count: + return lambda: result(param_obj) + else: + return result + + +def _get_xunit_func(obj, name): + """Return the attribute from the given object to be used as a setup/teardown + xunit-style function, but only if not marked as a fixture to + avoid calling it twice. + """ + meth = getattr(obj, name, None) + if fixtures.getfixturemarker(meth) is None: + return meth + + +class Class(PyCollector): + """ Collector for test methods. """ + def collect(self): + if not safe_getattr(self.obj, "__test__", True): + return [] + if hasinit(self.obj): + self.warn("C1", "cannot collect test class %r because it has a " + "__init__ constructor" % self.obj.__name__) + return [] + elif hasnew(self.obj): + self.warn("C1", "cannot collect test class %r because it has a " + "__new__ constructor" % self.obj.__name__) + return [] + return [self._getcustomclass("Instance")(name="()", parent=self)] + + def setup(self): + setup_class = _get_xunit_func(self.obj, 'setup_class') + if setup_class is not None: + setup_class = getattr(setup_class, 'im_func', setup_class) + setup_class = getattr(setup_class, '__func__', setup_class) + setup_class(self.obj) + + fin_class = getattr(self.obj, 'teardown_class', None) + if fin_class is not None: + fin_class = getattr(fin_class, 'im_func', fin_class) + fin_class = getattr(fin_class, '__func__', fin_class) + self.addfinalizer(lambda: fin_class(self.obj)) + +class Instance(PyCollector): + def _getobj(self): + return self.parent.obj() + + def collect(self): + self.session._fixturemanager.parsefactories(self) + return super(Instance, self).collect() + + def newinstance(self): + self.obj = self._getobj() + return self.obj + +class FunctionMixin(PyobjMixin): + """ mixin for the code common to Function and Generator. + """ + + def setup(self): + """ perform setup for this test function. """ + if hasattr(self, '_preservedparent'): + obj = self._preservedparent + elif isinstance(self.parent, Instance): + obj = self.parent.newinstance() + self.obj = self._getobj() + else: + obj = self.parent.obj + if inspect.ismethod(self.obj): + setup_name = 'setup_method' + teardown_name = 'teardown_method' + else: + setup_name = 'setup_function' + teardown_name = 'teardown_function' + setup_func_or_method = _get_xunit_setup_teardown(obj, setup_name, param_obj=self.obj) + if setup_func_or_method is not None: + setup_func_or_method() + teardown_func_or_method = _get_xunit_setup_teardown(obj, teardown_name, param_obj=self.obj) + if teardown_func_or_method is not None: + self.addfinalizer(teardown_func_or_method) + + def _prunetraceback(self, excinfo): + if hasattr(self, '_obj') and not self.config.option.fulltrace: + code = _pytest._code.Code(get_real_func(self.obj)) + path, firstlineno = code.path, code.firstlineno + traceback = excinfo.traceback + ntraceback = traceback.cut(path=path, firstlineno=firstlineno) + if ntraceback == traceback: + ntraceback = ntraceback.cut(path=path) + if ntraceback == traceback: + #ntraceback = ntraceback.cut(excludepath=cutdir2) + ntraceback = ntraceback.filter(filter_traceback) + if not ntraceback: + ntraceback = traceback + + excinfo.traceback = ntraceback.filter() + # issue364: mark all but first and last frames to + # only show a single-line message for each frame + if self.config.option.tbstyle == "auto": + if len(excinfo.traceback) > 2: + for entry in excinfo.traceback[1:-1]: + entry.set_repr_style('short') + + def _repr_failure_py(self, excinfo, style="long"): + if excinfo.errisinstance(fail.Exception): + if not excinfo.value.pytrace: + return py._builtin._totext(excinfo.value) + return super(FunctionMixin, self)._repr_failure_py(excinfo, + style=style) + + def repr_failure(self, excinfo, outerr=None): + assert outerr is None, "XXX outerr usage is deprecated" + style = self.config.option.tbstyle + if style == "auto": + style = "long" + return self._repr_failure_py(excinfo, style=style) + + +class Generator(FunctionMixin, PyCollector): + def collect(self): + # test generators are seen as collectors but they also + # invoke setup/teardown on popular request + # (induced by the common "test_*" naming shared with normal tests) + from _pytest import deprecated + self.session._setupstate.prepare(self) + # see FunctionMixin.setup and test_setupstate_is_preserved_134 + self._preservedparent = self.parent.obj + l = [] + seen = {} + for i, x in enumerate(self.obj()): + name, call, args = self.getcallargs(x) + if not callable(call): + raise TypeError("%r yielded non callable test %r" %(self.obj, call,)) + if name is None: + name = "[%d]" % i + else: + name = "['%s']" % name + if name in seen: + raise ValueError("%r generated tests with non-unique name %r" %(self, name)) + seen[name] = True + l.append(self.Function(name, self, args=args, callobj=call)) + self.config.warn('C1', deprecated.YIELD_TESTS, fslocation=self.fspath) + return l + + def getcallargs(self, obj): + if not isinstance(obj, (tuple, list)): + obj = (obj,) + # explicit naming + if isinstance(obj[0], py.builtin._basestring): + name = obj[0] + obj = obj[1:] + else: + name = None + call, args = obj[0], obj[1:] + return name, call, args + + +def hasinit(obj): + init = getattr(obj, '__init__', None) + if init: + return init != object.__init__ + + +def hasnew(obj): + new = getattr(obj, '__new__', None) + if new: + return new != object.__new__ + + +class CallSpec2(object): + def __init__(self, metafunc): + self.metafunc = metafunc + self.funcargs = {} + self._idlist = [] + self.params = {} + self._globalid = NOTSET + self._globalid_args = set() + self._globalparam = NOTSET + self._arg2scopenum = {} # used for sorting parametrized resources + self.keywords = {} + self.indices = {} + + def copy(self, metafunc): + cs = CallSpec2(self.metafunc) + cs.funcargs.update(self.funcargs) + cs.params.update(self.params) + cs.keywords.update(self.keywords) + cs.indices.update(self.indices) + cs._arg2scopenum.update(self._arg2scopenum) + cs._idlist = list(self._idlist) + cs._globalid = self._globalid + cs._globalid_args = self._globalid_args + cs._globalparam = self._globalparam + return cs + + def _checkargnotcontained(self, arg): + if arg in self.params or arg in self.funcargs: + raise ValueError("duplicate %r" %(arg,)) + + def getparam(self, name): + try: + return self.params[name] + except KeyError: + if self._globalparam is NOTSET: + raise ValueError(name) + return self._globalparam + + @property + def id(self): + return "-".join(map(str, filter(None, self._idlist))) + + def setmulti(self, valtypes, argnames, valset, id, keywords, scopenum, + param_index): + for arg,val in zip(argnames, valset): + self._checkargnotcontained(arg) + valtype_for_arg = valtypes[arg] + getattr(self, valtype_for_arg)[arg] = val + self.indices[arg] = param_index + self._arg2scopenum[arg] = scopenum + self._idlist.append(id) + self.keywords.update(keywords) + + def setall(self, funcargs, id, param): + for x in funcargs: + self._checkargnotcontained(x) + self.funcargs.update(funcargs) + if id is not NOTSET: + self._idlist.append(id) + if param is not NOTSET: + assert self._globalparam is NOTSET + self._globalparam = param + for arg in funcargs: + self._arg2scopenum[arg] = fixtures.scopenum_function + + +class Metafunc(fixtures.FuncargnamesCompatAttr): + """ + Metafunc objects are passed to the ``pytest_generate_tests`` hook. + They help to inspect a test function and to generate tests according to + test configuration or values specified in the class or module where a + test function is defined. + """ + def __init__(self, function, fixtureinfo, config, cls=None, module=None): + #: access to the :class:`_pytest.config.Config` object for the test session + self.config = config + + #: the module object where the test function is defined in. + self.module = module + + #: underlying python test function + self.function = function + + #: set of fixture names required by the test function + self.fixturenames = fixtureinfo.names_closure + + #: class object where the test function is defined in or ``None``. + self.cls = cls + + self._calls = [] + self._ids = py.builtin.set() + self._arg2fixturedefs = fixtureinfo.name2fixturedefs + + def parametrize(self, argnames, argvalues, indirect=False, ids=None, + scope=None): + """ Add new invocations to the underlying test function using the list + of argvalues for the given argnames. Parametrization is performed + during the collection phase. If you need to setup expensive resources + see about setting indirect to do it rather at test setup time. + + :arg argnames: a comma-separated string denoting one or more argument + names, or a list/tuple of argument strings. + + :arg argvalues: The list of argvalues determines how often a + test is invoked with different argument values. If only one + argname was specified argvalues is a list of values. If N + argnames were specified, argvalues must be a list of N-tuples, + where each tuple-element specifies a value for its respective + argname. + + :arg indirect: The list of argnames or boolean. A list of arguments' + names (subset of argnames). If True the list contains all names from + the argnames. Each argvalue corresponding to an argname in this list will + be passed as request.param to its respective argname fixture + function so that it can perform more expensive setups during the + setup phase of a test rather than at collection time. + + :arg ids: list of string ids, or a callable. + If strings, each is corresponding to the argvalues so that they are + part of the test id. If None is given as id of specific test, the + automatically generated id for that argument will be used. + If callable, it should take one argument (a single argvalue) and return + a string or return None. If None, the automatically generated id for that + argument will be used. + If no ids are provided they will be generated automatically from + the argvalues. + + :arg scope: if specified it denotes the scope of the parameters. + The scope is used for grouping tests by parameter instances. + It will also override any fixture-function defined scope, allowing + to set a dynamic scope using test context or configuration. + """ + from _pytest.fixtures import scope2index + from _pytest.mark import MARK_GEN, ParameterSet + from py.io import saferepr + + if not isinstance(argnames, (tuple, list)): + argnames = [x.strip() for x in argnames.split(",") if x.strip()] + force_tuple = len(argnames) == 1 + else: + force_tuple = False + parameters = [ + ParameterSet.extract_from(x, legacy_force_tuple=force_tuple) + for x in argvalues] + del argvalues + + if not parameters: + fs, lineno = getfslineno(self.function) + reason = "got empty parameter set %r, function %s at %s:%d" % ( + argnames, self.function.__name__, fs, lineno) + mark = MARK_GEN.skip(reason=reason) + parameters.append(ParameterSet( + values=(NOTSET,) * len(argnames), + marks=[mark], + id=None, + )) + + if scope is None: + scope = _find_parametrized_scope(argnames, self._arg2fixturedefs, indirect) + + scopenum = scope2index(scope, descr='call to {0}'.format(self.parametrize)) + valtypes = {} + for arg in argnames: + if arg not in self.fixturenames: + if isinstance(indirect, (tuple, list)): + name = 'fixture' if arg in indirect else 'argument' + else: + name = 'fixture' if indirect else 'argument' + raise ValueError( + "%r uses no %s %r" % ( + self.function, name, arg)) + + if indirect is True: + valtypes = dict.fromkeys(argnames, "params") + elif indirect is False: + valtypes = dict.fromkeys(argnames, "funcargs") + elif isinstance(indirect, (tuple, list)): + valtypes = dict.fromkeys(argnames, "funcargs") + for arg in indirect: + if arg not in argnames: + raise ValueError("indirect given to %r: fixture %r doesn't exist" % ( + self.function, arg)) + valtypes[arg] = "params" + idfn = None + if callable(ids): + idfn = ids + ids = None + if ids: + if len(ids) != len(parameters): + raise ValueError('%d tests specified with %d ids' % ( + len(parameters), len(ids))) + for id_value in ids: + if id_value is not None and not isinstance(id_value, py.builtin._basestring): + msg = 'ids must be list of strings, found: %s (type: %s)' + raise ValueError(msg % (saferepr(id_value), type(id_value).__name__)) + ids = idmaker(argnames, parameters, idfn, ids, self.config) + newcalls = [] + for callspec in self._calls or [CallSpec2(self)]: + elements = zip(ids, parameters, count()) + for a_id, param, param_index in elements: + if len(param.values) != len(argnames): + raise ValueError( + 'In "parametrize" the number of values ({0}) must be ' + 'equal to the number of names ({1})'.format( + param.values, argnames)) + newcallspec = callspec.copy(self) + newcallspec.setmulti(valtypes, argnames, param.values, a_id, + param.deprecated_arg_dict, scopenum, param_index) + newcalls.append(newcallspec) + self._calls = newcalls + + def addcall(self, funcargs=None, id=NOTSET, param=NOTSET): + """ (deprecated, use parametrize) Add a new call to the underlying + test function during the collection phase of a test run. Note that + request.addcall() is called during the test collection phase prior and + independently to actual test execution. You should only use addcall() + if you need to specify multiple arguments of a test function. + + :arg funcargs: argument keyword dictionary used when invoking + the test function. + + :arg id: used for reporting and identification purposes. If you + don't supply an `id` an automatic unique id will be generated. + + :arg param: a parameter which will be exposed to a later fixture function + invocation through the ``request.param`` attribute. + """ + assert funcargs is None or isinstance(funcargs, dict) + if funcargs is not None: + for name in funcargs: + if name not in self.fixturenames: + fail("funcarg %r not used in this function." % name) + else: + funcargs = {} + if id is None: + raise ValueError("id=None not allowed") + if id is NOTSET: + id = len(self._calls) + id = str(id) + if id in self._ids: + raise ValueError("duplicate id %r" % id) + self._ids.add(id) + + cs = CallSpec2(self) + cs.setall(funcargs, id, param) + self._calls.append(cs) + + +def _find_parametrized_scope(argnames, arg2fixturedefs, indirect): + """Find the most appropriate scope for a parametrized call based on its arguments. + + When there's at least one direct argument, always use "function" scope. + + When a test function is parametrized and all its arguments are indirect + (e.g. fixtures), return the most narrow scope based on the fixtures used. + + Related to issue #1832, based on code posted by @Kingdread. + """ + from _pytest.fixtures import scopes + indirect_as_list = isinstance(indirect, (list, tuple)) + all_arguments_are_fixtures = indirect is True or \ + indirect_as_list and len(indirect) == argnames + if all_arguments_are_fixtures: + fixturedefs = arg2fixturedefs or {} + used_scopes = [fixturedef[0].scope for name, fixturedef in fixturedefs.items()] + if used_scopes: + # Takes the most narrow scope from used fixtures + for scope in reversed(scopes): + if scope in used_scopes: + return scope + + return 'function' + + +def _idval(val, argname, idx, idfn, config=None): + if idfn: + s = None + try: + s = idfn(val) + except Exception: + # See issue https://github.com/pytest-dev/pytest/issues/2169 + import warnings + msg = "Raised while trying to determine id of parameter %s at position %d." % (argname, idx) + msg += '\nUpdate your code as this will raise an error in pytest-4.0.' + warnings.warn(msg, DeprecationWarning) + if s: + return _escape_strings(s) + + if config: + hook_id = config.hook.pytest_make_parametrize_id( + config=config, val=val, argname=argname) + if hook_id: + return hook_id + + if isinstance(val, STRING_TYPES): + return _escape_strings(val) + elif isinstance(val, (float, int, bool, NoneType)): + return str(val) + elif isinstance(val, REGEX_TYPE): + return _escape_strings(val.pattern) + elif enum is not None and isinstance(val, enum.Enum): + return str(val) + elif isclass(val) and hasattr(val, '__name__'): + return val.__name__ + return str(argname)+str(idx) + + +def _idvalset(idx, parameterset, argnames, idfn, ids, config=None): + if parameterset.id is not None: + return parameterset.id + if ids is None or (idx >= len(ids) or ids[idx] is None): + this_id = [_idval(val, argname, idx, idfn, config) + for val, argname in zip(parameterset.values, argnames)] + return "-".join(this_id) + else: + return _escape_strings(ids[idx]) + + +def idmaker(argnames, parametersets, idfn=None, ids=None, config=None): + ids = [_idvalset(valindex, parameterset, argnames, idfn, ids, config) + for valindex, parameterset in enumerate(parametersets)] + if len(set(ids)) != len(ids): + # The ids are not unique + duplicates = [testid for testid in ids if ids.count(testid) > 1] + counters = collections.defaultdict(lambda: 0) + for index, testid in enumerate(ids): + if testid in duplicates: + ids[index] = testid + str(counters[testid]) + counters[testid] += 1 + return ids + + +def show_fixtures_per_test(config): + from _pytest.main import wrap_session + return wrap_session(config, _show_fixtures_per_test) + + +def _show_fixtures_per_test(config, session): + import _pytest.config + session.perform_collect() + curdir = py.path.local() + tw = _pytest.config.create_terminal_writer(config) + verbose = config.getvalue("verbose") + + def get_best_rel(func): + loc = getlocation(func, curdir) + return curdir.bestrelpath(loc) + + def write_fixture(fixture_def): + argname = fixture_def.argname + + if verbose <= 0 and argname.startswith("_"): + return + if verbose > 0: + bestrel = get_best_rel(fixture_def.func) + funcargspec = "{0} -- {1}".format(argname, bestrel) + else: + funcargspec = argname + tw.line(funcargspec, green=True) + + INDENT = ' {0}' + fixture_doc = fixture_def.func.__doc__ + + if fixture_doc: + for line in fixture_doc.strip().split('\n'): + tw.line(INDENT.format(line.strip())) + else: + tw.line(INDENT.format('no docstring available'), red=True) + + def write_item(item): + name2fixturedefs = item._fixtureinfo.name2fixturedefs + + if not name2fixturedefs: + # The given test item does not use any fixtures + return + bestrel = get_best_rel(item.function) + + tw.line() + tw.sep('-', 'fixtures used by {0}'.format(item.name)) + tw.sep('-', '({0})'.format(bestrel)) + for argname, fixture_defs in sorted(name2fixturedefs.items()): + assert fixture_defs is not None + if not fixture_defs: + continue + # The last fixture def item in the list is expected + # to be the one used by the test item + write_fixture(fixture_defs[-1]) + + for item in session.items: + write_item(item) + + +def showfixtures(config): + from _pytest.main import wrap_session + return wrap_session(config, _showfixtures_main) + + +def _showfixtures_main(config, session): + import _pytest.config + session.perform_collect() + curdir = py.path.local() + tw = _pytest.config.create_terminal_writer(config) + verbose = config.getvalue("verbose") + + fm = session._fixturemanager + + available = [] + seen = set() + + for argname, fixturedefs in fm._arg2fixturedefs.items(): + assert fixturedefs is not None + if not fixturedefs: + continue + for fixturedef in fixturedefs: + loc = getlocation(fixturedef.func, curdir) + if (fixturedef.argname, loc) in seen: + continue + seen.add((fixturedef.argname, loc)) + available.append((len(fixturedef.baseid), + fixturedef.func.__module__, + curdir.bestrelpath(loc), + fixturedef.argname, fixturedef)) + + available.sort() + currentmodule = None + for baseid, module, bestrel, argname, fixturedef in available: + if currentmodule != module: + if not module.startswith("_pytest."): + tw.line() + tw.sep("-", "fixtures defined from %s" %(module,)) + currentmodule = module + if verbose <= 0 and argname[0] == "_": + continue + if verbose > 0: + funcargspec = "%s -- %s" %(argname, bestrel,) + else: + funcargspec = argname + tw.line(funcargspec, green=True) + loc = getlocation(fixturedef.func, curdir) + doc = fixturedef.func.__doc__ or "" + if doc: + for line in doc.strip().split("\n"): + tw.line(" " + line.strip()) + else: + tw.line(" %s: no docstring available" %(loc,), + red=True) + + +# builtin pytest.raises helper + +def raises(expected_exception, *args, **kwargs): + """ + Assert that a code block/function call raises ``expected_exception`` + and raise a failure exception otherwise. + + This helper produces a ``ExceptionInfo()`` object (see below). + + If using Python 2.5 or above, you may use this function as a + context manager:: + + >>> with raises(ZeroDivisionError): + ... 1/0 + + .. versionchanged:: 2.10 + + In the context manager form you may use the keyword argument + ``message`` to specify a custom failure message:: + + >>> with raises(ZeroDivisionError, message="Expecting ZeroDivisionError"): + ... pass + Traceback (most recent call last): + ... + Failed: Expecting ZeroDivisionError + + + .. note:: + + When using ``pytest.raises`` as a context manager, it's worthwhile to + note that normal context manager rules apply and that the exception + raised *must* be the final line in the scope of the context manager. + Lines of code after that, within the scope of the context manager will + not be executed. For example:: + + >>> value = 15 + >>> with raises(ValueError) as exc_info: + ... if value > 10: + ... raise ValueError("value must be <= 10") + ... assert exc_info.type == ValueError # this will not execute + + Instead, the following approach must be taken (note the difference in + scope):: + + >>> with raises(ValueError) as exc_info: + ... if value > 10: + ... raise ValueError("value must be <= 10") + ... + >>> assert exc_info.type == ValueError + + Or you can use the keyword argument ``match`` to assert that the + exception matches a text or regex:: + + >>> with raises(ValueError, match='must be 0 or None'): + ... raise ValueError("value must be 0 or None") + + >>> with raises(ValueError, match=r'must be \d+$'): + ... raise ValueError("value must be 42") + + + Or you can specify a callable by passing a to-be-called lambda:: + + >>> raises(ZeroDivisionError, lambda: 1/0) + + + or you can specify an arbitrary callable with arguments:: + + >>> def f(x): return 1/x + ... + >>> raises(ZeroDivisionError, f, 0) + + >>> raises(ZeroDivisionError, f, x=0) + + + A third possibility is to use a string to be executed:: + + >>> raises(ZeroDivisionError, "f(0)") + + + .. autoclass:: _pytest._code.ExceptionInfo + :members: + + .. note:: + Similar to caught exception objects in Python, explicitly clearing + local references to returned ``ExceptionInfo`` objects can + help the Python interpreter speed up its garbage collection. + + Clearing those references breaks a reference cycle + (``ExceptionInfo`` --> caught exception --> frame stack raising + the exception --> current frame stack --> local variables --> + ``ExceptionInfo``) which makes Python keep all objects referenced + from that cycle (including all local variables in the current + frame) alive until the next cyclic garbage collection run. See the + official Python ``try`` statement documentation for more detailed + information. + + """ + __tracebackhide__ = True + msg = ("exceptions must be old-style classes or" + " derived from BaseException, not %s") + if isinstance(expected_exception, tuple): + for exc in expected_exception: + if not isclass(exc): + raise TypeError(msg % type(exc)) + elif not isclass(expected_exception): + raise TypeError(msg % type(expected_exception)) + + message = "DID NOT RAISE {0}".format(expected_exception) + match_expr = None + + if not args: + if "message" in kwargs: + message = kwargs.pop("message") + if "match" in kwargs: + match_expr = kwargs.pop("match") + message += " matching '{0}'".format(match_expr) + return RaisesContext(expected_exception, message, match_expr) + elif isinstance(args[0], str): + code, = args + assert isinstance(code, str) + frame = sys._getframe(1) + loc = frame.f_locals.copy() + loc.update(kwargs) + #print "raises frame scope: %r" % frame.f_locals + try: + code = _pytest._code.Source(code).compile() + py.builtin.exec_(code, frame.f_globals, loc) + # XXX didn'T mean f_globals == f_locals something special? + # this is destroyed here ... + except expected_exception: + return _pytest._code.ExceptionInfo() + else: + func = args[0] + try: + func(*args[1:], **kwargs) + except expected_exception: + return _pytest._code.ExceptionInfo() + fail(message) + + +raises.Exception = fail.Exception + + +class RaisesContext(object): + def __init__(self, expected_exception, message, match_expr): + self.expected_exception = expected_exception + self.message = message + self.match_expr = match_expr + self.excinfo = None + + def __enter__(self): + self.excinfo = object.__new__(_pytest._code.ExceptionInfo) + return self.excinfo + + def __exit__(self, *tp): + __tracebackhide__ = True + if tp[0] is None: + fail(self.message) + if sys.version_info < (2, 7): + # py26: on __exit__() exc_value often does not contain the + # exception value. + # http://bugs.python.org/issue7853 + if not isinstance(tp[1], BaseException): + exc_type, value, traceback = tp + tp = exc_type, exc_type(value), traceback + self.excinfo.__init__(tp) + suppress_exception = issubclass(self.excinfo.type, self.expected_exception) + if sys.version_info[0] == 2 and suppress_exception: + sys.exc_clear() + if self.match_expr: + self.excinfo.match(self.match_expr) + return suppress_exception + + +# builtin pytest.approx helper + +class approx(object): + """ + Assert that two numbers (or two sets of numbers) are equal to each other + within some tolerance. + + Due to the `intricacies of floating-point arithmetic`__, numbers that we + would intuitively expect to be equal are not always so:: + + >>> 0.1 + 0.2 == 0.3 + False + + __ https://docs.python.org/3/tutorial/floatingpoint.html + + This problem is commonly encountered when writing tests, e.g. when making + sure that floating-point values are what you expect them to be. One way to + deal with this problem is to assert that two floating-point numbers are + equal to within some appropriate tolerance:: + + >>> abs((0.1 + 0.2) - 0.3) < 1e-6 + True + + However, comparisons like this are tedious to write and difficult to + understand. Furthermore, absolute comparisons like the one above are + usually discouraged because there's no tolerance that works well for all + situations. ``1e-6`` is good for numbers around ``1``, but too small for + very big numbers and too big for very small ones. It's better to express + the tolerance as a fraction of the expected value, but relative comparisons + like that are even more difficult to write correctly and concisely. + + The ``approx`` class performs floating-point comparisons using a syntax + that's as intuitive as possible:: + + >>> from pytest import approx + >>> 0.1 + 0.2 == approx(0.3) + True + + The same syntax also works on sequences of numbers:: + + >>> (0.1 + 0.2, 0.2 + 0.4) == approx((0.3, 0.6)) + True + + By default, ``approx`` considers numbers within a relative tolerance of + ``1e-6`` (i.e. one part in a million) of its expected value to be equal. + This treatment would lead to surprising results if the expected value was + ``0.0``, because nothing but ``0.0`` itself is relatively close to ``0.0``. + To handle this case less surprisingly, ``approx`` also considers numbers + within an absolute tolerance of ``1e-12`` of its expected value to be + equal. Infinite numbers are another special case. They are only + considered equal to themselves, regardless of the relative tolerance. Both + the relative and absolute tolerances can be changed by passing arguments to + the ``approx`` constructor:: + + >>> 1.0001 == approx(1) + False + >>> 1.0001 == approx(1, rel=1e-3) + True + >>> 1.0001 == approx(1, abs=1e-3) + True + + If you specify ``abs`` but not ``rel``, the comparison will not consider + the relative tolerance at all. In other words, two numbers that are within + the default relative tolerance of ``1e-6`` will still be considered unequal + if they exceed the specified absolute tolerance. If you specify both + ``abs`` and ``rel``, the numbers will be considered equal if either + tolerance is met:: + + >>> 1 + 1e-8 == approx(1) + True + >>> 1 + 1e-8 == approx(1, abs=1e-12) + False + >>> 1 + 1e-8 == approx(1, rel=1e-6, abs=1e-12) + True + + If you're thinking about using ``approx``, then you might want to know how + it compares to other good ways of comparing floating-point numbers. All of + these algorithms are based on relative and absolute tolerances and should + agree for the most part, but they do have meaningful differences: + + - ``math.isclose(a, b, rel_tol=1e-9, abs_tol=0.0)``: True if the relative + tolerance is met w.r.t. either ``a`` or ``b`` or if the absolute + tolerance is met. Because the relative tolerance is calculated w.r.t. + both ``a`` and ``b``, this test is symmetric (i.e. neither ``a`` nor + ``b`` is a "reference value"). You have to specify an absolute tolerance + if you want to compare to ``0.0`` because there is no tolerance by + default. Only available in python>=3.5. `More information...`__ + + __ https://docs.python.org/3/library/math.html#math.isclose + + - ``numpy.isclose(a, b, rtol=1e-5, atol=1e-8)``: True if the difference + between ``a`` and ``b`` is less that the sum of the relative tolerance + w.r.t. ``b`` and the absolute tolerance. Because the relative tolerance + is only calculated w.r.t. ``b``, this test is asymmetric and you can + think of ``b`` as the reference value. Support for comparing sequences + is provided by ``numpy.allclose``. `More information...`__ + + __ http://docs.scipy.org/doc/numpy-1.10.0/reference/generated/numpy.isclose.html + + - ``unittest.TestCase.assertAlmostEqual(a, b)``: True if ``a`` and ``b`` + are within an absolute tolerance of ``1e-7``. No relative tolerance is + considered and the absolute tolerance cannot be changed, so this function + is not appropriate for very large or very small numbers. Also, it's only + available in subclasses of ``unittest.TestCase`` and it's ugly because it + doesn't follow PEP8. `More information...`__ + + __ https://docs.python.org/3/library/unittest.html#unittest.TestCase.assertAlmostEqual + + - ``a == pytest.approx(b, rel=1e-6, abs=1e-12)``: True if the relative + tolerance is met w.r.t. ``b`` or if the absolute tolerance is met. + Because the relative tolerance is only calculated w.r.t. ``b``, this test + is asymmetric and you can think of ``b`` as the reference value. In the + special case that you explicitly specify an absolute tolerance but not a + relative tolerance, only the absolute tolerance is considered. + """ + + def __init__(self, expected, rel=None, abs=None): + self.expected = expected + self.abs = abs + self.rel = rel + + def __repr__(self): + return ', '.join(repr(x) for x in self.expected) + + def __eq__(self, actual): + from collections import Iterable + if not isinstance(actual, Iterable): + actual = [actual] + if len(actual) != len(self.expected): + return False + return all(a == x for a, x in zip(actual, self.expected)) + + __hash__ = None + + def __ne__(self, actual): + return not (actual == self) + + @property + def expected(self): + # Regardless of whether the user-specified expected value is a number + # or a sequence of numbers, return a list of ApproxNotIterable objects + # that can be compared against. + from collections import Iterable + approx_non_iter = lambda x: ApproxNonIterable(x, self.rel, self.abs) + if isinstance(self._expected, Iterable): + return [approx_non_iter(x) for x in self._expected] + else: + return [approx_non_iter(self._expected)] + + @expected.setter + def expected(self, expected): + self._expected = expected + + +class ApproxNonIterable(object): + """ + Perform approximate comparisons for single numbers only. + + In other words, the ``expected`` attribute for objects of this class must + be some sort of number. This is in contrast to the ``approx`` class, where + the ``expected`` attribute can either be a number of a sequence of numbers. + This class is responsible for making comparisons, while ``approx`` is + responsible for abstracting the difference between numbers and sequences of + numbers. Although this class can stand on its own, it's only meant to be + used within ``approx``. + """ + + def __init__(self, expected, rel=None, abs=None): + self.expected = expected + self.abs = abs + self.rel = rel + + def __repr__(self): + if isinstance(self.expected, complex): + return str(self.expected) + + # Infinities aren't compared using tolerances, so don't show a + # tolerance. + if math.isinf(self.expected): + return str(self.expected) + + # If a sensible tolerance can't be calculated, self.tolerance will + # raise a ValueError. In this case, display '???'. + try: + vetted_tolerance = '{:.1e}'.format(self.tolerance) + except ValueError: + vetted_tolerance = '???' + + if sys.version_info[0] == 2: + return '{0} +- {1}'.format(self.expected, vetted_tolerance) + else: + return u'{0} \u00b1 {1}'.format(self.expected, vetted_tolerance) + + def __eq__(self, actual): + # Short-circuit exact equality. + if actual == self.expected: + return True + + # Infinity shouldn't be approximately equal to anything but itself, but + # if there's a relative tolerance, it will be infinite and infinity + # will seem approximately equal to everything. The equal-to-itself + # case would have been short circuited above, so here we can just + # return false if the expected value is infinite. The abs() call is + # for compatibility with complex numbers. + if math.isinf(abs(self.expected)): + return False + + # Return true if the two numbers are within the tolerance. + return abs(self.expected - actual) <= self.tolerance + + __hash__ = None + + def __ne__(self, actual): + return not (actual == self) + + @property + def tolerance(self): + set_default = lambda x, default: x if x is not None else default + + # Figure out what the absolute tolerance should be. ``self.abs`` is + # either None or a value specified by the user. + absolute_tolerance = set_default(self.abs, 1e-12) + + if absolute_tolerance < 0: + raise ValueError("absolute tolerance can't be negative: {}".format(absolute_tolerance)) + if math.isnan(absolute_tolerance): + raise ValueError("absolute tolerance can't be NaN.") + + # If the user specified an absolute tolerance but not a relative one, + # just return the absolute tolerance. + if self.rel is None: + if self.abs is not None: + return absolute_tolerance + + # Figure out what the relative tolerance should be. ``self.rel`` is + # either None or a value specified by the user. This is done after + # we've made sure the user didn't ask for an absolute tolerance only, + # because we don't want to raise errors about the relative tolerance if + # we aren't even going to use it. + relative_tolerance = set_default(self.rel, 1e-6) * abs(self.expected) + + if relative_tolerance < 0: + raise ValueError("relative tolerance can't be negative: {}".format(absolute_tolerance)) + if math.isnan(relative_tolerance): + raise ValueError("relative tolerance can't be NaN.") + + # Return the larger of the relative and absolute tolerances. + return max(relative_tolerance, absolute_tolerance) + + +# +# the basic pytest Function item +# + +class Function(FunctionMixin, main.Item, fixtures.FuncargnamesCompatAttr): + """ a Function Item is responsible for setting up and executing a + Python test function. + """ + _genid = None + def __init__(self, name, parent, args=None, config=None, + callspec=None, callobj=NOTSET, keywords=None, session=None, + fixtureinfo=None, originalname=None): + super(Function, self).__init__(name, parent, config=config, + session=session) + self._args = args + if callobj is not NOTSET: + self.obj = callobj + + self.keywords.update(self.obj.__dict__) + if callspec: + self.callspec = callspec + self.keywords.update(callspec.keywords) + if keywords: + self.keywords.update(keywords) + + if fixtureinfo is None: + fixtureinfo = self.session._fixturemanager.getfixtureinfo( + self.parent, self.obj, self.cls, + funcargs=not self._isyieldedfunction()) + self._fixtureinfo = fixtureinfo + self.fixturenames = fixtureinfo.names_closure + self._initrequest() + + #: original function name, without any decorations (for example + #: parametrization adds a ``"[...]"`` suffix to function names). + #: + #: .. versionadded:: 3.0 + self.originalname = originalname + + def _initrequest(self): + self.funcargs = {} + if self._isyieldedfunction(): + assert not hasattr(self, "callspec"), ( + "yielded functions (deprecated) cannot have funcargs") + else: + if hasattr(self, "callspec"): + callspec = self.callspec + assert not callspec.funcargs + self._genid = callspec.id + if hasattr(callspec, "param"): + self.param = callspec.param + self._request = fixtures.FixtureRequest(self) + + @property + def function(self): + "underlying python 'function' object" + return getattr(self.obj, 'im_func', self.obj) + + def _getobj(self): + name = self.name + i = name.find("[") # parametrization + if i != -1: + name = name[:i] + return getattr(self.parent.obj, name) + + @property + def _pyfuncitem(self): + "(compatonly) for code expecting pytest-2.2 style request objects" + return self + + def _isyieldedfunction(self): + return getattr(self, "_args", None) is not None + + def runtest(self): + """ execute the underlying test function. """ + self.ihook.pytest_pyfunc_call(pyfuncitem=self) + + def setup(self): + super(Function, self).setup() + fixtures.fillfixtures(self) diff --git a/venv/lib/python3.5/site-packages/_pytest/recwarn.py b/venv/lib/python3.5/site-packages/_pytest/recwarn.py new file mode 100644 index 0000000..9cc404a --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/recwarn.py @@ -0,0 +1,204 @@ +""" recording warnings during test function execution. """ +from __future__ import absolute_import, division, print_function + +import inspect + +import _pytest._code +import py +import sys +import warnings +from _pytest.fixtures import yield_fixture + + +@yield_fixture +def recwarn(): + """Return a WarningsRecorder instance that provides these methods: + + * ``pop(category=None)``: return last warning matching the category. + * ``clear()``: clear list of warnings + + See http://docs.python.org/library/warnings.html for information + on warning categories. + """ + wrec = WarningsRecorder() + with wrec: + warnings.simplefilter('default') + yield wrec + + +def deprecated_call(func=None, *args, **kwargs): + """context manager that can be used to ensure a block of code triggers a + ``DeprecationWarning`` or ``PendingDeprecationWarning``:: + + >>> import warnings + >>> def api_call_v2(): + ... warnings.warn('use v3 of this api', DeprecationWarning) + ... return 200 + + >>> with deprecated_call(): + ... assert api_call_v2() == 200 + + ``deprecated_call`` can also be used by passing a function and ``*args`` and ``*kwargs``, + in which case it will ensure calling ``func(*args, **kwargs)`` produces one of the warnings + types above. + """ + if not func: + return _DeprecatedCallContext() + else: + __tracebackhide__ = True + with _DeprecatedCallContext(): + return func(*args, **kwargs) + + +class _DeprecatedCallContext(object): + """Implements the logic to capture deprecation warnings as a context manager.""" + + def __enter__(self): + self._captured_categories = [] + self._old_warn = warnings.warn + self._old_warn_explicit = warnings.warn_explicit + warnings.warn_explicit = self._warn_explicit + warnings.warn = self._warn + + def _warn_explicit(self, message, category, *args, **kwargs): + self._captured_categories.append(category) + + def _warn(self, message, category=None, *args, **kwargs): + if isinstance(message, Warning): + self._captured_categories.append(message.__class__) + else: + self._captured_categories.append(category) + + def __exit__(self, exc_type, exc_val, exc_tb): + warnings.warn_explicit = self._old_warn_explicit + warnings.warn = self._old_warn + + if exc_type is None: + deprecation_categories = (DeprecationWarning, PendingDeprecationWarning) + if not any(issubclass(c, deprecation_categories) for c in self._captured_categories): + __tracebackhide__ = True + msg = "Did not produce DeprecationWarning or PendingDeprecationWarning" + raise AssertionError(msg) + + +def warns(expected_warning, *args, **kwargs): + """Assert that code raises a particular class of warning. + + Specifically, the input @expected_warning can be a warning class or + tuple of warning classes, and the code must return that warning + (if a single class) or one of those warnings (if a tuple). + + This helper produces a list of ``warnings.WarningMessage`` objects, + one for each warning raised. + + This function can be used as a context manager, or any of the other ways + ``pytest.raises`` can be used:: + + >>> with warns(RuntimeWarning): + ... warnings.warn("my warning", RuntimeWarning) + """ + wcheck = WarningsChecker(expected_warning) + if not args: + return wcheck + elif isinstance(args[0], str): + code, = args + assert isinstance(code, str) + frame = sys._getframe(1) + loc = frame.f_locals.copy() + loc.update(kwargs) + + with wcheck: + code = _pytest._code.Source(code).compile() + py.builtin.exec_(code, frame.f_globals, loc) + else: + func = args[0] + with wcheck: + return func(*args[1:], **kwargs) + + +class WarningsRecorder(warnings.catch_warnings): + """A context manager to record raised warnings. + + Adapted from `warnings.catch_warnings`. + """ + + def __init__(self): + super(WarningsRecorder, self).__init__(record=True) + self._entered = False + self._list = [] + + @property + def list(self): + """The list of recorded warnings.""" + return self._list + + def __getitem__(self, i): + """Get a recorded warning by index.""" + return self._list[i] + + def __iter__(self): + """Iterate through the recorded warnings.""" + return iter(self._list) + + def __len__(self): + """The number of recorded warnings.""" + return len(self._list) + + def pop(self, cls=Warning): + """Pop the first recorded warning, raise exception if not exists.""" + for i, w in enumerate(self._list): + if issubclass(w.category, cls): + return self._list.pop(i) + __tracebackhide__ = True + raise AssertionError("%r not found in warning list" % cls) + + def clear(self): + """Clear the list of recorded warnings.""" + self._list[:] = [] + + def __enter__(self): + if self._entered: + __tracebackhide__ = True + raise RuntimeError("Cannot enter %r twice" % self) + self._list = super(WarningsRecorder, self).__enter__() + warnings.simplefilter('always') + return self + + def __exit__(self, *exc_info): + if not self._entered: + __tracebackhide__ = True + raise RuntimeError("Cannot exit %r without entering first" % self) + super(WarningsRecorder, self).__exit__(*exc_info) + + +class WarningsChecker(WarningsRecorder): + def __init__(self, expected_warning=None): + super(WarningsChecker, self).__init__() + + msg = ("exceptions must be old-style classes or " + "derived from Warning, not %s") + if isinstance(expected_warning, tuple): + for exc in expected_warning: + if not inspect.isclass(exc): + raise TypeError(msg % type(exc)) + elif inspect.isclass(expected_warning): + expected_warning = (expected_warning,) + elif expected_warning is not None: + raise TypeError(msg % type(expected_warning)) + + self.expected_warning = expected_warning + + def __exit__(self, *exc_info): + super(WarningsChecker, self).__exit__(*exc_info) + + # only check if we're not currently handling an exception + if all(a is None for a in exc_info): + if self.expected_warning is not None: + if not any(issubclass(r.category, self.expected_warning) + for r in self): + __tracebackhide__ = True + from _pytest.runner import fail + fail("DID NOT WARN. No warnings of type {0} was emitted. " + "The list of emitted warnings is: {1}.".format( + self.expected_warning, + [each.message for each in self])) diff --git a/venv/lib/python3.5/site-packages/_pytest/resultlog.py b/venv/lib/python3.5/site-packages/_pytest/resultlog.py new file mode 100644 index 0000000..3e4b00c --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/resultlog.py @@ -0,0 +1,108 @@ +""" log machine-parseable test session result information in a plain +text file. +""" +from __future__ import absolute_import, division, print_function + +import py +import os + +def pytest_addoption(parser): + group = parser.getgroup("terminal reporting", "resultlog plugin options") + group.addoption('--resultlog', '--result-log', action="store", + metavar="path", default=None, + help="DEPRECATED path for machine-readable result log.") + +def pytest_configure(config): + resultlog = config.option.resultlog + # prevent opening resultlog on slave nodes (xdist) + if resultlog and not hasattr(config, 'slaveinput'): + dirname = os.path.dirname(os.path.abspath(resultlog)) + if not os.path.isdir(dirname): + os.makedirs(dirname) + logfile = open(resultlog, 'w', 1) # line buffered + config._resultlog = ResultLog(config, logfile) + config.pluginmanager.register(config._resultlog) + + from _pytest.deprecated import RESULT_LOG + config.warn('C1', RESULT_LOG) + +def pytest_unconfigure(config): + resultlog = getattr(config, '_resultlog', None) + if resultlog: + resultlog.logfile.close() + del config._resultlog + config.pluginmanager.unregister(resultlog) + +def generic_path(item): + chain = item.listchain() + gpath = [chain[0].name] + fspath = chain[0].fspath + fspart = False + for node in chain[1:]: + newfspath = node.fspath + if newfspath == fspath: + if fspart: + gpath.append(':') + fspart = False + else: + gpath.append('.') + else: + gpath.append('/') + fspart = True + name = node.name + if name[0] in '([': + gpath.pop() + gpath.append(name) + fspath = newfspath + return ''.join(gpath) + +class ResultLog(object): + def __init__(self, config, logfile): + self.config = config + self.logfile = logfile # preferably line buffered + + def write_log_entry(self, testpath, lettercode, longrepr): + print("%s %s" % (lettercode, testpath), file=self.logfile) + for line in longrepr.splitlines(): + print(" %s" % line, file=self.logfile) + + def log_outcome(self, report, lettercode, longrepr): + testpath = getattr(report, 'nodeid', None) + if testpath is None: + testpath = report.fspath + self.write_log_entry(testpath, lettercode, longrepr) + + def pytest_runtest_logreport(self, report): + if report.when != "call" and report.passed: + return + res = self.config.hook.pytest_report_teststatus(report=report) + code = res[1] + if code == 'x': + longrepr = str(report.longrepr) + elif code == 'X': + longrepr = '' + elif report.passed: + longrepr = "" + elif report.failed: + longrepr = str(report.longrepr) + elif report.skipped: + longrepr = str(report.longrepr[2]) + self.log_outcome(report, code, longrepr) + + def pytest_collectreport(self, report): + if not report.passed: + if report.failed: + code = "F" + longrepr = str(report.longrepr) + else: + assert report.skipped + code = "S" + longrepr = "%s:%d: %s" % report.longrepr + self.log_outcome(report, code, longrepr) + + def pytest_internalerror(self, excrepr): + reprcrash = getattr(excrepr, 'reprcrash', None) + path = getattr(reprcrash, "path", None) + if path is None: + path = "cwd:%s" % py.path.local() + self.write_log_entry(path, '!', str(excrepr)) diff --git a/venv/lib/python3.5/site-packages/_pytest/runner.py b/venv/lib/python3.5/site-packages/_pytest/runner.py new file mode 100644 index 0000000..fd0b549 --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/runner.py @@ -0,0 +1,580 @@ +""" basic collect and runtest protocol implementations """ +from __future__ import absolute_import, division, print_function + +import bdb +import sys +from time import time + +import py +from _pytest._code.code import TerminalRepr, ExceptionInfo + + + +# +# pytest plugin hooks + +def pytest_addoption(parser): + group = parser.getgroup("terminal reporting", "reporting", after="general") + group.addoption('--durations', + action="store", type=int, default=None, metavar="N", + help="show N slowest setup/test durations (N=0 for all)."), + +def pytest_terminal_summary(terminalreporter): + durations = terminalreporter.config.option.durations + if durations is None: + return + tr = terminalreporter + dlist = [] + for replist in tr.stats.values(): + for rep in replist: + if hasattr(rep, 'duration'): + dlist.append(rep) + if not dlist: + return + dlist.sort(key=lambda x: x.duration) + dlist.reverse() + if not durations: + tr.write_sep("=", "slowest test durations") + else: + tr.write_sep("=", "slowest %s test durations" % durations) + dlist = dlist[:durations] + + for rep in dlist: + nodeid = rep.nodeid.replace("::()::", "::") + tr.write_line("%02.2fs %-8s %s" % + (rep.duration, rep.when, nodeid)) + +def pytest_sessionstart(session): + session._setupstate = SetupState() +def pytest_sessionfinish(session): + session._setupstate.teardown_all() + +class NodeInfo: + def __init__(self, location): + self.location = location + +def pytest_runtest_protocol(item, nextitem): + item.ihook.pytest_runtest_logstart( + nodeid=item.nodeid, location=item.location, + ) + runtestprotocol(item, nextitem=nextitem) + return True + +def runtestprotocol(item, log=True, nextitem=None): + hasrequest = hasattr(item, "_request") + if hasrequest and not item._request: + item._initrequest() + rep = call_and_report(item, "setup", log) + reports = [rep] + if rep.passed: + if item.config.option.setupshow: + show_test_item(item) + if not item.config.option.setuponly: + reports.append(call_and_report(item, "call", log)) + reports.append(call_and_report(item, "teardown", log, + nextitem=nextitem)) + # after all teardown hooks have been called + # want funcargs and request info to go away + if hasrequest: + item._request = False + item.funcargs = None + return reports + +def show_test_item(item): + """Show test function, parameters and the fixtures of the test item.""" + tw = item.config.get_terminal_writer() + tw.line() + tw.write(' ' * 8) + tw.write(item._nodeid) + used_fixtures = sorted(item._fixtureinfo.name2fixturedefs.keys()) + if used_fixtures: + tw.write(' (fixtures used: {0})'.format(', '.join(used_fixtures))) + +def pytest_runtest_setup(item): + item.session._setupstate.prepare(item) + +def pytest_runtest_call(item): + try: + item.runtest() + except Exception: + # Store trace info to allow postmortem debugging + type, value, tb = sys.exc_info() + tb = tb.tb_next # Skip *this* frame + sys.last_type = type + sys.last_value = value + sys.last_traceback = tb + del tb # Get rid of it in this namespace + raise + +def pytest_runtest_teardown(item, nextitem): + item.session._setupstate.teardown_exact(item, nextitem) + +def pytest_report_teststatus(report): + if report.when in ("setup", "teardown"): + if report.failed: + # category, shortletter, verbose-word + return "error", "E", "ERROR" + elif report.skipped: + return "skipped", "s", "SKIPPED" + else: + return "", "", "" + + +# +# Implementation + +def call_and_report(item, when, log=True, **kwds): + call = call_runtest_hook(item, when, **kwds) + hook = item.ihook + report = hook.pytest_runtest_makereport(item=item, call=call) + if log: + hook.pytest_runtest_logreport(report=report) + if check_interactive_exception(call, report): + hook.pytest_exception_interact(node=item, call=call, report=report) + return report + +def check_interactive_exception(call, report): + return call.excinfo and not ( + hasattr(report, "wasxfail") or + call.excinfo.errisinstance(skip.Exception) or + call.excinfo.errisinstance(bdb.BdbQuit)) + +def call_runtest_hook(item, when, **kwds): + hookname = "pytest_runtest_" + when + ihook = getattr(item.ihook, hookname) + return CallInfo(lambda: ihook(item=item, **kwds), when=when) + +class CallInfo: + """ Result/Exception info a function invocation. """ + #: None or ExceptionInfo object. + excinfo = None + def __init__(self, func, when): + #: context of invocation: one of "setup", "call", + #: "teardown", "memocollect" + self.when = when + self.start = time() + try: + self.result = func() + except KeyboardInterrupt: + self.stop = time() + raise + except: + self.excinfo = ExceptionInfo() + self.stop = time() + + def __repr__(self): + if self.excinfo: + status = "exception: %s" % str(self.excinfo.value) + else: + status = "result: %r" % (self.result,) + return "" % (self.when, status) + +def getslaveinfoline(node): + try: + return node._slaveinfocache + except AttributeError: + d = node.slaveinfo + ver = "%s.%s.%s" % d['version_info'][:3] + node._slaveinfocache = s = "[%s] %s -- Python %s %s" % ( + d['id'], d['sysplatform'], ver, d['executable']) + return s + +class BaseReport(object): + + def __init__(self, **kw): + self.__dict__.update(kw) + + def toterminal(self, out): + if hasattr(self, 'node'): + out.line(getslaveinfoline(self.node)) + + longrepr = self.longrepr + if longrepr is None: + return + + if hasattr(longrepr, 'toterminal'): + longrepr.toterminal(out) + else: + try: + out.line(longrepr) + except UnicodeEncodeError: + out.line("") + + def get_sections(self, prefix): + for name, content in self.sections: + if name.startswith(prefix): + yield prefix, content + + @property + def longreprtext(self): + """ + Read-only property that returns the full string representation + of ``longrepr``. + + .. versionadded:: 3.0 + """ + tw = py.io.TerminalWriter(stringio=True) + tw.hasmarkup = False + self.toterminal(tw) + exc = tw.stringio.getvalue() + return exc.strip() + + @property + def capstdout(self): + """Return captured text from stdout, if capturing is enabled + + .. versionadded:: 3.0 + """ + return ''.join(content for (prefix, content) in self.get_sections('Captured stdout')) + + @property + def capstderr(self): + """Return captured text from stderr, if capturing is enabled + + .. versionadded:: 3.0 + """ + return ''.join(content for (prefix, content) in self.get_sections('Captured stderr')) + + passed = property(lambda x: x.outcome == "passed") + failed = property(lambda x: x.outcome == "failed") + skipped = property(lambda x: x.outcome == "skipped") + + @property + def fspath(self): + return self.nodeid.split("::")[0] + +def pytest_runtest_makereport(item, call): + when = call.when + duration = call.stop-call.start + keywords = dict([(x,1) for x in item.keywords]) + excinfo = call.excinfo + sections = [] + if not call.excinfo: + outcome = "passed" + longrepr = None + else: + if not isinstance(excinfo, ExceptionInfo): + outcome = "failed" + longrepr = excinfo + elif excinfo.errisinstance(skip.Exception): + outcome = "skipped" + r = excinfo._getreprcrash() + longrepr = (str(r.path), r.lineno, r.message) + else: + outcome = "failed" + if call.when == "call": + longrepr = item.repr_failure(excinfo) + else: # exception in setup or teardown + longrepr = item._repr_failure_py(excinfo, + style=item.config.option.tbstyle) + for rwhen, key, content in item._report_sections: + sections.append(("Captured %s %s" %(key, rwhen), content)) + return TestReport(item.nodeid, item.location, + keywords, outcome, longrepr, when, + sections, duration) + +class TestReport(BaseReport): + """ Basic test report object (also used for setup and teardown calls if + they fail). + """ + def __init__(self, nodeid, location, keywords, outcome, + longrepr, when, sections=(), duration=0, **extra): + #: normalized collection node id + self.nodeid = nodeid + + #: a (filesystempath, lineno, domaininfo) tuple indicating the + #: actual location of a test item - it might be different from the + #: collected one e.g. if a method is inherited from a different module. + self.location = location + + #: a name -> value dictionary containing all keywords and + #: markers associated with a test invocation. + self.keywords = keywords + + #: test outcome, always one of "passed", "failed", "skipped". + self.outcome = outcome + + #: None or a failure representation. + self.longrepr = longrepr + + #: one of 'setup', 'call', 'teardown' to indicate runtest phase. + self.when = when + + #: list of pairs ``(str, str)`` of extra information which needs to + #: marshallable. Used by pytest to add captured text + #: from ``stdout`` and ``stderr``, but may be used by other plugins + #: to add arbitrary information to reports. + self.sections = list(sections) + + #: time it took to run just the test + self.duration = duration + + self.__dict__.update(extra) + + def __repr__(self): + return "" % ( + self.nodeid, self.when, self.outcome) + +class TeardownErrorReport(BaseReport): + outcome = "failed" + when = "teardown" + def __init__(self, longrepr, **extra): + self.longrepr = longrepr + self.sections = [] + self.__dict__.update(extra) + +def pytest_make_collect_report(collector): + call = CallInfo( + lambda: list(collector.collect()), + 'collect') + longrepr = None + if not call.excinfo: + outcome = "passed" + else: + from _pytest import nose + skip_exceptions = (Skipped,) + nose.get_skip_exceptions() + if call.excinfo.errisinstance(skip_exceptions): + outcome = "skipped" + r = collector._repr_failure_py(call.excinfo, "line").reprcrash + longrepr = (str(r.path), r.lineno, r.message) + else: + outcome = "failed" + errorinfo = collector.repr_failure(call.excinfo) + if not hasattr(errorinfo, "toterminal"): + errorinfo = CollectErrorRepr(errorinfo) + longrepr = errorinfo + rep = CollectReport(collector.nodeid, outcome, longrepr, + getattr(call, 'result', None)) + rep.call = call # see collect_one_node + return rep + + +class CollectReport(BaseReport): + def __init__(self, nodeid, outcome, longrepr, result, + sections=(), **extra): + self.nodeid = nodeid + self.outcome = outcome + self.longrepr = longrepr + self.result = result or [] + self.sections = list(sections) + self.__dict__.update(extra) + + @property + def location(self): + return (self.fspath, None, self.fspath) + + def __repr__(self): + return "" % ( + self.nodeid, len(self.result), self.outcome) + +class CollectErrorRepr(TerminalRepr): + def __init__(self, msg): + self.longrepr = msg + def toterminal(self, out): + out.line(self.longrepr, red=True) + +class SetupState(object): + """ shared state for setting up/tearing down test items or collectors. """ + def __init__(self): + self.stack = [] + self._finalizers = {} + + def addfinalizer(self, finalizer, colitem): + """ attach a finalizer to the given colitem. + if colitem is None, this will add a finalizer that + is called at the end of teardown_all(). + """ + assert colitem and not isinstance(colitem, tuple) + assert py.builtin.callable(finalizer) + #assert colitem in self.stack # some unit tests don't setup stack :/ + self._finalizers.setdefault(colitem, []).append(finalizer) + + def _pop_and_teardown(self): + colitem = self.stack.pop() + self._teardown_with_finalization(colitem) + + def _callfinalizers(self, colitem): + finalizers = self._finalizers.pop(colitem, None) + exc = None + while finalizers: + fin = finalizers.pop() + try: + fin() + except Exception: + # XXX Only first exception will be seen by user, + # ideally all should be reported. + if exc is None: + exc = sys.exc_info() + if exc: + py.builtin._reraise(*exc) + + def _teardown_with_finalization(self, colitem): + self._callfinalizers(colitem) + if hasattr(colitem, "teardown"): + colitem.teardown() + for colitem in self._finalizers: + assert colitem is None or colitem in self.stack \ + or isinstance(colitem, tuple) + + def teardown_all(self): + while self.stack: + self._pop_and_teardown() + for key in list(self._finalizers): + self._teardown_with_finalization(key) + assert not self._finalizers + + def teardown_exact(self, item, nextitem): + needed_collectors = nextitem and nextitem.listchain() or [] + self._teardown_towards(needed_collectors) + + def _teardown_towards(self, needed_collectors): + while self.stack: + if self.stack == needed_collectors[:len(self.stack)]: + break + self._pop_and_teardown() + + def prepare(self, colitem): + """ setup objects along the collector chain to the test-method + and teardown previously setup objects.""" + needed_collectors = colitem.listchain() + self._teardown_towards(needed_collectors) + + # check if the last collection node has raised an error + for col in self.stack: + if hasattr(col, '_prepare_exc'): + py.builtin._reraise(*col._prepare_exc) + for col in needed_collectors[len(self.stack):]: + self.stack.append(col) + try: + col.setup() + except Exception: + col._prepare_exc = sys.exc_info() + raise + +def collect_one_node(collector): + ihook = collector.ihook + ihook.pytest_collectstart(collector=collector) + rep = ihook.pytest_make_collect_report(collector=collector) + call = rep.__dict__.pop("call", None) + if call and check_interactive_exception(call, rep): + ihook.pytest_exception_interact(node=collector, call=call, report=rep) + return rep + + +# ============================================================= +# Test OutcomeExceptions and helpers for creating them. + + +class OutcomeException(Exception): + """ OutcomeException and its subclass instances indicate and + contain info about test and collection outcomes. + """ + def __init__(self, msg=None, pytrace=True): + Exception.__init__(self, msg) + self.msg = msg + self.pytrace = pytrace + + def __repr__(self): + if self.msg: + val = self.msg + if isinstance(val, bytes): + val = py._builtin._totext(val, errors='replace') + return val + return "<%s instance>" %(self.__class__.__name__,) + __str__ = __repr__ + +class Skipped(OutcomeException): + # XXX hackish: on 3k we fake to live in the builtins + # in order to have Skipped exception printing shorter/nicer + __module__ = 'builtins' + + def __init__(self, msg=None, pytrace=True, allow_module_level=False): + OutcomeException.__init__(self, msg=msg, pytrace=pytrace) + self.allow_module_level = allow_module_level + + +class Failed(OutcomeException): + """ raised from an explicit call to pytest.fail() """ + __module__ = 'builtins' + + +class Exit(KeyboardInterrupt): + """ raised for immediate program exits (no tracebacks/summaries)""" + def __init__(self, msg="unknown reason"): + self.msg = msg + KeyboardInterrupt.__init__(self, msg) + +# exposed helper methods + +def exit(msg): + """ exit testing process as if KeyboardInterrupt was triggered. """ + __tracebackhide__ = True + raise Exit(msg) + + +exit.Exception = Exit + + +def skip(msg=""): + """ skip an executing test with the given message. Note: it's usually + better to use the pytest.mark.skipif marker to declare a test to be + skipped under certain conditions like mismatching platforms or + dependencies. See the pytest_skipping plugin for details. + """ + __tracebackhide__ = True + raise Skipped(msg=msg) + + +skip.Exception = Skipped + + +def fail(msg="", pytrace=True): + """ explicitly fail an currently-executing test with the given Message. + + :arg pytrace: if false the msg represents the full failure information + and no python traceback will be reported. + """ + __tracebackhide__ = True + raise Failed(msg=msg, pytrace=pytrace) + + +fail.Exception = Failed + + +def importorskip(modname, minversion=None): + """ return imported module if it has at least "minversion" as its + __version__ attribute. If no minversion is specified the a skip + is only triggered if the module can not be imported. + """ + import warnings + __tracebackhide__ = True + compile(modname, '', 'eval') # to catch syntaxerrors + should_skip = False + + with warnings.catch_warnings(): + # make sure to ignore ImportWarnings that might happen because + # of existing directories with the same name we're trying to + # import but without a __init__.py file + warnings.simplefilter('ignore') + try: + __import__(modname) + except ImportError: + # Do not raise chained exception here(#1485) + should_skip = True + if should_skip: + raise Skipped("could not import %r" %(modname,), allow_module_level=True) + mod = sys.modules[modname] + if minversion is None: + return mod + verattr = getattr(mod, '__version__', None) + if minversion is not None: + try: + from pkg_resources import parse_version as pv + except ImportError: + raise Skipped("we have a required version for %r but can not import " + "pkg_resources to parse version strings." % (modname,), + allow_module_level=True) + if verattr is None or pv(verattr) < pv(minversion): + raise Skipped("module %r has __version__ %r, required is: %r" %( + modname, verattr, minversion), allow_module_level=True) + return mod diff --git a/venv/lib/python3.5/site-packages/_pytest/setuponly.py b/venv/lib/python3.5/site-packages/_pytest/setuponly.py new file mode 100644 index 0000000..15e195a --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/setuponly.py @@ -0,0 +1,74 @@ +from __future__ import absolute_import, division, print_function + +import pytest +import sys + + +def pytest_addoption(parser): + group = parser.getgroup("debugconfig") + group.addoption('--setuponly', '--setup-only', action="store_true", + help="only setup fixtures, do not execute tests.") + group.addoption('--setupshow', '--setup-show', action="store_true", + help="show setup of fixtures while executing tests.") + + +@pytest.hookimpl(hookwrapper=True) +def pytest_fixture_setup(fixturedef, request): + yield + config = request.config + if config.option.setupshow: + if hasattr(request, 'param'): + # Save the fixture parameter so ._show_fixture_action() can + # display it now and during the teardown (in .finish()). + if fixturedef.ids: + if callable(fixturedef.ids): + fixturedef.cached_param = fixturedef.ids(request.param) + else: + fixturedef.cached_param = fixturedef.ids[ + request.param_index] + else: + fixturedef.cached_param = request.param + _show_fixture_action(fixturedef, 'SETUP') + + +def pytest_fixture_post_finalizer(fixturedef): + if hasattr(fixturedef, "cached_result"): + config = fixturedef._fixturemanager.config + if config.option.setupshow: + _show_fixture_action(fixturedef, 'TEARDOWN') + if hasattr(fixturedef, "cached_param"): + del fixturedef.cached_param + + +def _show_fixture_action(fixturedef, msg): + config = fixturedef._fixturemanager.config + capman = config.pluginmanager.getplugin('capturemanager') + if capman: + out, err = capman.suspendcapture() + + tw = config.get_terminal_writer() + tw.line() + tw.write(' ' * 2 * fixturedef.scopenum) + tw.write('{step} {scope} {fixture}'.format( + step=msg.ljust(8), # align the output to TEARDOWN + scope=fixturedef.scope[0].upper(), + fixture=fixturedef.argname)) + + if msg == 'SETUP': + deps = sorted(arg for arg in fixturedef.argnames if arg != 'request') + if deps: + tw.write(' (fixtures used: {0})'.format(', '.join(deps))) + + if hasattr(fixturedef, 'cached_param'): + tw.write('[{0}]'.format(fixturedef.cached_param)) + + if capman: + capman.resumecapture() + sys.stdout.write(out) + sys.stderr.write(err) + + +@pytest.hookimpl(tryfirst=True) +def pytest_cmdline_main(config): + if config.option.setuponly: + config.option.setupshow = True diff --git a/venv/lib/python3.5/site-packages/_pytest/setupplan.py b/venv/lib/python3.5/site-packages/_pytest/setupplan.py new file mode 100644 index 0000000..e11bd40 --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/setupplan.py @@ -0,0 +1,25 @@ +from __future__ import absolute_import, division, print_function + +import pytest + + +def pytest_addoption(parser): + group = parser.getgroup("debugconfig") + group.addoption('--setupplan', '--setup-plan', action="store_true", + help="show what fixtures and tests would be executed but " + "don't execute anything.") + + +@pytest.hookimpl(tryfirst=True) +def pytest_fixture_setup(fixturedef, request): + # Will return a dummy fixture if the setuponly option is provided. + if request.config.option.setupplan: + fixturedef.cached_result = (None, None, None) + return fixturedef.cached_result + + +@pytest.hookimpl(tryfirst=True) +def pytest_cmdline_main(config): + if config.option.setupplan: + config.option.setuponly = True + config.option.setupshow = True diff --git a/venv/lib/python3.5/site-packages/_pytest/skipping.py b/venv/lib/python3.5/site-packages/_pytest/skipping.py new file mode 100644 index 0000000..5af1ca4 --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/skipping.py @@ -0,0 +1,380 @@ +""" support for skip/xfail functions and markers. """ +from __future__ import absolute_import, division, print_function + +import os +import sys +import traceback + +import py +from _pytest.config import hookimpl +from _pytest.mark import MarkInfo, MarkDecorator +from _pytest.runner import fail, skip + +def pytest_addoption(parser): + group = parser.getgroup("general") + group.addoption('--runxfail', + action="store_true", dest="runxfail", default=False, + help="run tests even if they are marked xfail") + + parser.addini("xfail_strict", "default for the strict parameter of xfail " + "markers when not given explicitly (default: " + "False)", + default=False, + type="bool") + + +def pytest_configure(config): + if config.option.runxfail: + # yay a hack + import pytest + old = pytest.xfail + config._cleanup.append(lambda: setattr(pytest, "xfail", old)) + + def nop(*args, **kwargs): + pass + + nop.Exception = XFailed + setattr(pytest, "xfail", nop) + + config.addinivalue_line("markers", + "skip(reason=None): skip the given test function with an optional reason. " + "Example: skip(reason=\"no way of currently testing this\") skips the " + "test." + ) + config.addinivalue_line("markers", + "skipif(condition): skip the given test function if eval(condition) " + "results in a True value. Evaluation happens within the " + "module global context. Example: skipif('sys.platform == \"win32\"') " + "skips the test if we are on the win32 platform. see " + "http://pytest.org/latest/skipping.html" + ) + config.addinivalue_line("markers", + "xfail(condition, reason=None, run=True, raises=None, strict=False): " + "mark the test function as an expected failure if eval(condition) " + "has a True value. Optionally specify a reason for better reporting " + "and run=False if you don't even want to execute the test function. " + "If only specific exception(s) are expected, you can list them in " + "raises, and if the test fails in other ways, it will be reported as " + "a true failure. See http://pytest.org/latest/skipping.html" + ) + + +class XFailed(fail.Exception): + """ raised from an explicit call to pytest.xfail() """ + + +def xfail(reason=""): + """ xfail an executing test or setup functions with the given reason.""" + __tracebackhide__ = True + raise XFailed(reason) + + +xfail.Exception = XFailed + + +class MarkEvaluator: + def __init__(self, item, name): + self.item = item + self.name = name + + @property + def holder(self): + return self.item.keywords.get(self.name) + + def __bool__(self): + return bool(self.holder) + __nonzero__ = __bool__ + + def wasvalid(self): + return not hasattr(self, 'exc') + + def invalidraise(self, exc): + raises = self.get('raises') + if not raises: + return + return not isinstance(exc, raises) + + def istrue(self): + try: + return self._istrue() + except Exception: + self.exc = sys.exc_info() + if isinstance(self.exc[1], SyntaxError): + msg = [" " * (self.exc[1].offset + 4) + "^", ] + msg.append("SyntaxError: invalid syntax") + else: + msg = traceback.format_exception_only(*self.exc[:2]) + fail("Error evaluating %r expression\n" + " %s\n" + "%s" + % (self.name, self.expr, "\n".join(msg)), + pytrace=False) + + def _getglobals(self): + d = {'os': os, 'sys': sys, 'config': self.item.config} + if hasattr(self.item, 'obj'): + d.update(self.item.obj.__globals__) + return d + + def _istrue(self): + if hasattr(self, 'result'): + return self.result + if self.holder: + if self.holder.args or 'condition' in self.holder.kwargs: + self.result = False + # "holder" might be a MarkInfo or a MarkDecorator; only + # MarkInfo keeps track of all parameters it received in an + # _arglist attribute + marks = getattr(self.holder, '_marks', None) \ + or [self.holder.mark] + for _, args, kwargs in marks: + if 'condition' in kwargs: + args = (kwargs['condition'],) + for expr in args: + self.expr = expr + if isinstance(expr, py.builtin._basestring): + d = self._getglobals() + result = cached_eval(self.item.config, expr, d) + else: + if "reason" not in kwargs: + # XXX better be checked at collection time + msg = "you need to specify reason=STRING " \ + "when using booleans as conditions." + fail(msg) + result = bool(expr) + if result: + self.result = True + self.reason = kwargs.get('reason', None) + self.expr = expr + return self.result + else: + self.result = True + return getattr(self, 'result', False) + + def get(self, attr, default=None): + return self.holder.kwargs.get(attr, default) + + def getexplanation(self): + expl = getattr(self, 'reason', None) or self.get('reason', None) + if not expl: + if not hasattr(self, 'expr'): + return "" + else: + return "condition: " + str(self.expr) + return expl + + +@hookimpl(tryfirst=True) +def pytest_runtest_setup(item): + # Check if skip or skipif are specified as pytest marks + + skipif_info = item.keywords.get('skipif') + if isinstance(skipif_info, (MarkInfo, MarkDecorator)): + eval_skipif = MarkEvaluator(item, 'skipif') + if eval_skipif.istrue(): + item._evalskip = eval_skipif + skip(eval_skipif.getexplanation()) + + skip_info = item.keywords.get('skip') + if isinstance(skip_info, (MarkInfo, MarkDecorator)): + item._evalskip = True + if 'reason' in skip_info.kwargs: + skip(skip_info.kwargs['reason']) + elif skip_info.args: + skip(skip_info.args[0]) + else: + skip("unconditional skip") + + item._evalxfail = MarkEvaluator(item, 'xfail') + check_xfail_no_run(item) + + +@hookimpl(hookwrapper=True) +def pytest_pyfunc_call(pyfuncitem): + check_xfail_no_run(pyfuncitem) + outcome = yield + passed = outcome.excinfo is None + if passed: + check_strict_xfail(pyfuncitem) + + +def check_xfail_no_run(item): + """check xfail(run=False)""" + if not item.config.option.runxfail: + evalxfail = item._evalxfail + if evalxfail.istrue(): + if not evalxfail.get('run', True): + xfail("[NOTRUN] " + evalxfail.getexplanation()) + + +def check_strict_xfail(pyfuncitem): + """check xfail(strict=True) for the given PASSING test""" + evalxfail = pyfuncitem._evalxfail + if evalxfail.istrue(): + strict_default = pyfuncitem.config.getini('xfail_strict') + is_strict_xfail = evalxfail.get('strict', strict_default) + if is_strict_xfail: + del pyfuncitem._evalxfail + explanation = evalxfail.getexplanation() + fail('[XPASS(strict)] ' + explanation, pytrace=False) + + +@hookimpl(hookwrapper=True) +def pytest_runtest_makereport(item, call): + outcome = yield + rep = outcome.get_result() + evalxfail = getattr(item, '_evalxfail', None) + evalskip = getattr(item, '_evalskip', None) + # unitttest special case, see setting of _unexpectedsuccess + if hasattr(item, '_unexpectedsuccess') and rep.when == "call": + from _pytest.compat import _is_unittest_unexpected_success_a_failure + if item._unexpectedsuccess: + rep.longrepr = "Unexpected success: {0}".format(item._unexpectedsuccess) + else: + rep.longrepr = "Unexpected success" + if _is_unittest_unexpected_success_a_failure(): + rep.outcome = "failed" + else: + rep.outcome = "passed" + rep.wasxfail = rep.longrepr + elif item.config.option.runxfail: + pass # don't interefere + elif call.excinfo and call.excinfo.errisinstance(xfail.Exception): + rep.wasxfail = "reason: " + call.excinfo.value.msg + rep.outcome = "skipped" + elif evalxfail and not rep.skipped and evalxfail.wasvalid() and \ + evalxfail.istrue(): + if call.excinfo: + if evalxfail.invalidraise(call.excinfo.value): + rep.outcome = "failed" + else: + rep.outcome = "skipped" + rep.wasxfail = evalxfail.getexplanation() + elif call.when == "call": + strict_default = item.config.getini('xfail_strict') + is_strict_xfail = evalxfail.get('strict', strict_default) + explanation = evalxfail.getexplanation() + if is_strict_xfail: + rep.outcome = "failed" + rep.longrepr = "[XPASS(strict)] {0}".format(explanation) + else: + rep.outcome = "passed" + rep.wasxfail = explanation + elif evalskip is not None and rep.skipped and type(rep.longrepr) is tuple: + # skipped by mark.skipif; change the location of the failure + # to point to the item definition, otherwise it will display + # the location of where the skip exception was raised within pytest + filename, line, reason = rep.longrepr + filename, line = item.location[:2] + rep.longrepr = filename, line, reason + +# called by terminalreporter progress reporting +def pytest_report_teststatus(report): + if hasattr(report, "wasxfail"): + if report.skipped: + return "xfailed", "x", "xfail" + elif report.passed: + return "xpassed", "X", ("XPASS", {'yellow': True}) + +# called by the terminalreporter instance/plugin +def pytest_terminal_summary(terminalreporter): + tr = terminalreporter + if not tr.reportchars: + #for name in "xfailed skipped failed xpassed": + # if not tr.stats.get(name, 0): + # tr.write_line("HINT: use '-r' option to see extra " + # "summary info about tests") + # break + return + + lines = [] + for char in tr.reportchars: + if char == "x": + show_xfailed(terminalreporter, lines) + elif char == "X": + show_xpassed(terminalreporter, lines) + elif char in "fF": + show_simple(terminalreporter, lines, 'failed', "FAIL %s") + elif char in "sS": + show_skipped(terminalreporter, lines) + elif char == "E": + show_simple(terminalreporter, lines, 'error', "ERROR %s") + elif char == 'p': + show_simple(terminalreporter, lines, 'passed', "PASSED %s") + + if lines: + tr._tw.sep("=", "short test summary info") + for line in lines: + tr._tw.line(line) + + +def show_simple(terminalreporter, lines, stat, format): + failed = terminalreporter.stats.get(stat) + if failed: + for rep in failed: + pos = terminalreporter.config.cwd_relative_nodeid(rep.nodeid) + lines.append(format % (pos,)) + + +def show_xfailed(terminalreporter, lines): + xfailed = terminalreporter.stats.get("xfailed") + if xfailed: + for rep in xfailed: + pos = terminalreporter.config.cwd_relative_nodeid(rep.nodeid) + reason = rep.wasxfail + lines.append("XFAIL %s" % (pos,)) + if reason: + lines.append(" " + str(reason)) + + +def show_xpassed(terminalreporter, lines): + xpassed = terminalreporter.stats.get("xpassed") + if xpassed: + for rep in xpassed: + pos = terminalreporter.config.cwd_relative_nodeid(rep.nodeid) + reason = rep.wasxfail + lines.append("XPASS %s %s" % (pos, reason)) + + +def cached_eval(config, expr, d): + if not hasattr(config, '_evalcache'): + config._evalcache = {} + try: + return config._evalcache[expr] + except KeyError: + import _pytest._code + exprcode = _pytest._code.compile(expr, mode="eval") + config._evalcache[expr] = x = eval(exprcode, d) + return x + + +def folded_skips(skipped): + d = {} + for event in skipped: + key = event.longrepr + assert len(key) == 3, (event, key) + d.setdefault(key, []).append(event) + l = [] + for key, events in d.items(): + l.append((len(events),) + key) + return l + + +def show_skipped(terminalreporter, lines): + tr = terminalreporter + skipped = tr.stats.get('skipped', []) + if skipped: + #if not tr.hasopt('skipped'): + # tr.write_line( + # "%d skipped tests, specify -rs for more info" % + # len(skipped)) + # return + fskips = folded_skips(skipped) + if fskips: + #tr.write_sep("_", "skipped test summary") + for num, fspath, lineno, reason in fskips: + if reason.startswith("Skipped: "): + reason = reason[9:] + lines.append( + "SKIP [%d] %s:%d: %s" % + (num, fspath, lineno, reason)) diff --git a/venv/lib/python3.5/site-packages/_pytest/terminal.py b/venv/lib/python3.5/site-packages/_pytest/terminal.py new file mode 100644 index 0000000..af89d0f --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/terminal.py @@ -0,0 +1,627 @@ +""" terminal reporting of the full testing process. + +This is a good source for looking at the various reporting hooks. +""" +from __future__ import absolute_import, division, print_function + +import itertools +from _pytest.main import EXIT_OK, EXIT_TESTSFAILED, EXIT_INTERRUPTED, \ + EXIT_USAGEERROR, EXIT_NOTESTSCOLLECTED +import pytest +import py +import sys +import time +import platform + +import _pytest._pluggy as pluggy + + +def pytest_addoption(parser): + group = parser.getgroup("terminal reporting", "reporting", after="general") + group._addoption('-v', '--verbose', action="count", + dest="verbose", default=0, help="increase verbosity."), + group._addoption('-q', '--quiet', action="count", + dest="quiet", default=0, help="decrease verbosity."), + group._addoption('-r', + action="store", dest="reportchars", default='', metavar="chars", + help="show extra test summary info as specified by chars (f)ailed, " + "(E)error, (s)skipped, (x)failed, (X)passed, " + "(p)passed, (P)passed with output, (a)all except pP. " + "Warnings are displayed at all times except when " + "--disable-warnings is set") + group._addoption('--disable-warnings', '--disable-pytest-warnings', default=False, + dest='disable_warnings', action='store_true', + help='disable warnings summary') + group._addoption('-l', '--showlocals', + action="store_true", dest="showlocals", default=False, + help="show locals in tracebacks (disabled by default).") + group._addoption('--tb', metavar="style", + action="store", dest="tbstyle", default='auto', + choices=['auto', 'long', 'short', 'no', 'line', 'native'], + help="traceback print mode (auto/long/short/line/native/no).") + group._addoption('--fulltrace', '--full-trace', + action="store_true", default=False, + help="don't cut any tracebacks (default is to cut).") + group._addoption('--color', metavar="color", + action="store", dest="color", default='auto', + choices=['yes', 'no', 'auto'], + help="color terminal output (yes/no/auto).") + +def pytest_configure(config): + config.option.verbose -= config.option.quiet + reporter = TerminalReporter(config, sys.stdout) + config.pluginmanager.register(reporter, 'terminalreporter') + if config.option.debug or config.option.traceconfig: + def mywriter(tags, args): + msg = " ".join(map(str, args)) + reporter.write_line("[traceconfig] " + msg) + config.trace.root.setprocessor("pytest:config", mywriter) + +def getreportopt(config): + reportopts = "" + reportchars = config.option.reportchars + if not config.option.disable_warnings and 'w' not in reportchars: + reportchars += 'w' + elif config.option.disable_warnings and 'w' in reportchars: + reportchars = reportchars.replace('w', '') + if reportchars: + for char in reportchars: + if char not in reportopts and char != 'a': + reportopts += char + elif char == 'a': + reportopts = 'fEsxXw' + return reportopts + +def pytest_report_teststatus(report): + if report.passed: + letter = "." + elif report.skipped: + letter = "s" + elif report.failed: + letter = "F" + if report.when != "call": + letter = "f" + return report.outcome, letter, report.outcome.upper() + + +class WarningReport: + """ + Simple structure to hold warnings information captured by ``pytest_logwarning``. + """ + def __init__(self, code, message, nodeid=None, fslocation=None): + """ + :param code: unused + :param str message: user friendly message about the warning + :param str|None nodeid: node id that generated the warning (see ``get_location``). + :param tuple|py.path.local fslocation: + file system location of the source of the warning (see ``get_location``). + """ + self.code = code + self.message = message + self.nodeid = nodeid + self.fslocation = fslocation + + def get_location(self, config): + """ + Returns the more user-friendly information about the location + of a warning, or None. + """ + if self.nodeid: + return self.nodeid + if self.fslocation: + if isinstance(self.fslocation, tuple) and len(self.fslocation) >= 2: + filename, linenum = self.fslocation[:2] + relpath = py.path.local(filename).relto(config.invocation_dir) + return '%s:%s' % (relpath, linenum) + else: + return str(self.fslocation) + return None + + +class TerminalReporter: + def __init__(self, config, file=None): + import _pytest.config + self.config = config + self.verbosity = self.config.option.verbose + self.showheader = self.verbosity >= 0 + self.showfspath = self.verbosity >= 0 + self.showlongtestinfo = self.verbosity > 0 + self._numcollected = 0 + + self.stats = {} + self.startdir = py.path.local() + if file is None: + file = sys.stdout + self._tw = self.writer = _pytest.config.create_terminal_writer(config, + file) + self.currentfspath = None + self.reportchars = getreportopt(config) + self.hasmarkup = self._tw.hasmarkup + self.isatty = file.isatty() + + def hasopt(self, char): + char = {'xfailed': 'x', 'skipped': 's'}.get(char, char) + return char in self.reportchars + + def write_fspath_result(self, nodeid, res): + fspath = self.config.rootdir.join(nodeid.split("::")[0]) + if fspath != self.currentfspath: + self.currentfspath = fspath + fspath = self.startdir.bestrelpath(fspath) + self._tw.line() + self._tw.write(fspath + " ") + self._tw.write(res) + + def write_ensure_prefix(self, prefix, extra="", **kwargs): + if self.currentfspath != prefix: + self._tw.line() + self.currentfspath = prefix + self._tw.write(prefix) + if extra: + self._tw.write(extra, **kwargs) + self.currentfspath = -2 + + def ensure_newline(self): + if self.currentfspath: + self._tw.line() + self.currentfspath = None + + def write(self, content, **markup): + self._tw.write(content, **markup) + + def write_line(self, line, **markup): + if not py.builtin._istext(line): + line = py.builtin.text(line, errors="replace") + self.ensure_newline() + self._tw.line(line, **markup) + + def rewrite(self, line, **markup): + line = str(line) + self._tw.write("\r" + line, **markup) + + def write_sep(self, sep, title=None, **markup): + self.ensure_newline() + self._tw.sep(sep, title, **markup) + + def section(self, title, sep="=", **kw): + self._tw.sep(sep, title, **kw) + + def line(self, msg, **kw): + self._tw.line(msg, **kw) + + def pytest_internalerror(self, excrepr): + for line in py.builtin.text(excrepr).split("\n"): + self.write_line("INTERNALERROR> " + line) + return 1 + + def pytest_logwarning(self, code, fslocation, message, nodeid): + warnings = self.stats.setdefault("warnings", []) + warning = WarningReport(code=code, fslocation=fslocation, + message=message, nodeid=nodeid) + warnings.append(warning) + + def pytest_plugin_registered(self, plugin): + if self.config.option.traceconfig: + msg = "PLUGIN registered: %s" % (plugin,) + # XXX this event may happen during setup/teardown time + # which unfortunately captures our output here + # which garbles our output if we use self.write_line + self.write_line(msg) + + def pytest_deselected(self, items): + self.stats.setdefault('deselected', []).extend(items) + + def pytest_runtest_logstart(self, nodeid, location): + # ensure that the path is printed before the + # 1st test of a module starts running + if self.showlongtestinfo: + line = self._locationline(nodeid, *location) + self.write_ensure_prefix(line, "") + elif self.showfspath: + fsid = nodeid.split("::")[0] + self.write_fspath_result(fsid, "") + + def pytest_runtest_logreport(self, report): + rep = report + res = self.config.hook.pytest_report_teststatus(report=rep) + cat, letter, word = res + self.stats.setdefault(cat, []).append(rep) + self._tests_ran = True + if not letter and not word: + # probably passed setup/teardown + return + if self.verbosity <= 0: + if not hasattr(rep, 'node') and self.showfspath: + self.write_fspath_result(rep.nodeid, letter) + else: + self._tw.write(letter) + else: + if isinstance(word, tuple): + word, markup = word + else: + if rep.passed: + markup = {'green':True} + elif rep.failed: + markup = {'red':True} + elif rep.skipped: + markup = {'yellow':True} + line = self._locationline(rep.nodeid, *rep.location) + if not hasattr(rep, 'node'): + self.write_ensure_prefix(line, word, **markup) + #self._tw.write(word, **markup) + else: + self.ensure_newline() + if hasattr(rep, 'node'): + self._tw.write("[%s] " % rep.node.gateway.id) + self._tw.write(word, **markup) + self._tw.write(" " + line) + self.currentfspath = -2 + + def pytest_collection(self): + if not self.isatty and self.config.option.verbose >= 1: + self.write("collecting ... ", bold=True) + + def pytest_collectreport(self, report): + if report.failed: + self.stats.setdefault("error", []).append(report) + elif report.skipped: + self.stats.setdefault("skipped", []).append(report) + items = [x for x in report.result if isinstance(x, pytest.Item)] + self._numcollected += len(items) + if self.isatty: + #self.write_fspath_result(report.nodeid, 'E') + self.report_collect() + + def report_collect(self, final=False): + if self.config.option.verbose < 0: + return + + errors = len(self.stats.get('error', [])) + skipped = len(self.stats.get('skipped', [])) + if final: + line = "collected " + else: + line = "collecting " + line += str(self._numcollected) + " item" + ('' if self._numcollected == 1 else 's') + if errors: + line += " / %d errors" % errors + if skipped: + line += " / %d skipped" % skipped + if self.isatty: + if final: + line += " \n" + self.rewrite(line, bold=True) + else: + self.write_line(line) + + def pytest_collection_modifyitems(self): + self.report_collect(True) + + @pytest.hookimpl(trylast=True) + def pytest_sessionstart(self, session): + self._sessionstarttime = time.time() + if not self.showheader: + return + self.write_sep("=", "test session starts", bold=True) + verinfo = platform.python_version() + msg = "platform %s -- Python %s" % (sys.platform, verinfo) + if hasattr(sys, 'pypy_version_info'): + verinfo = ".".join(map(str, sys.pypy_version_info[:3])) + msg += "[pypy-%s-%s]" % (verinfo, sys.pypy_version_info[3]) + msg += ", pytest-%s, py-%s, pluggy-%s" % ( + pytest.__version__, py.__version__, pluggy.__version__) + if self.verbosity > 0 or self.config.option.debug or \ + getattr(self.config.option, 'pastebin', None): + msg += " -- " + str(sys.executable) + self.write_line(msg) + lines = self.config.hook.pytest_report_header( + config=self.config, startdir=self.startdir) + lines.reverse() + for line in flatten(lines): + self.write_line(line) + + def pytest_report_header(self, config): + inifile = "" + if config.inifile: + inifile = " " + config.rootdir.bestrelpath(config.inifile) + lines = ["rootdir: %s, inifile:%s" % (config.rootdir, inifile)] + + plugininfo = config.pluginmanager.list_plugin_distinfo() + if plugininfo: + + lines.append( + "plugins: %s" % ", ".join(_plugin_nameversions(plugininfo))) + return lines + + def pytest_collection_finish(self, session): + if self.config.option.collectonly: + self._printcollecteditems(session.items) + if self.stats.get('failed'): + self._tw.sep("!", "collection failures") + for rep in self.stats.get('failed'): + rep.toterminal(self._tw) + return 1 + return 0 + if not self.showheader: + return + #for i, testarg in enumerate(self.config.args): + # self.write_line("test path %d: %s" %(i+1, testarg)) + + def _printcollecteditems(self, items): + # to print out items and their parent collectors + # we take care to leave out Instances aka () + # because later versions are going to get rid of them anyway + if self.config.option.verbose < 0: + if self.config.option.verbose < -1: + counts = {} + for item in items: + name = item.nodeid.split('::', 1)[0] + counts[name] = counts.get(name, 0) + 1 + for name, count in sorted(counts.items()): + self._tw.line("%s: %d" % (name, count)) + else: + for item in items: + nodeid = item.nodeid + nodeid = nodeid.replace("::()::", "::") + self._tw.line(nodeid) + return + stack = [] + indent = "" + for item in items: + needed_collectors = item.listchain()[1:] # strip root node + while stack: + if stack == needed_collectors[:len(stack)]: + break + stack.pop() + for col in needed_collectors[len(stack):]: + stack.append(col) + #if col.name == "()": + # continue + indent = (len(stack) - 1) * " " + self._tw.line("%s%s" % (indent, col)) + + @pytest.hookimpl(hookwrapper=True) + def pytest_sessionfinish(self, exitstatus): + outcome = yield + outcome.get_result() + self._tw.line("") + summary_exit_codes = ( + EXIT_OK, EXIT_TESTSFAILED, EXIT_INTERRUPTED, EXIT_USAGEERROR, + EXIT_NOTESTSCOLLECTED) + if exitstatus in summary_exit_codes: + self.config.hook.pytest_terminal_summary(terminalreporter=self, + exitstatus=exitstatus) + self.summary_errors() + self.summary_failures() + self.summary_warnings() + self.summary_passes() + if exitstatus == EXIT_INTERRUPTED: + self._report_keyboardinterrupt() + del self._keyboardinterrupt_memo + self.summary_deselected() + self.summary_stats() + + def pytest_keyboard_interrupt(self, excinfo): + self._keyboardinterrupt_memo = excinfo.getrepr(funcargs=True) + + def pytest_unconfigure(self): + if hasattr(self, '_keyboardinterrupt_memo'): + self._report_keyboardinterrupt() + + def _report_keyboardinterrupt(self): + excrepr = self._keyboardinterrupt_memo + msg = excrepr.reprcrash.message + self.write_sep("!", msg) + if "KeyboardInterrupt" in msg: + if self.config.option.fulltrace: + excrepr.toterminal(self._tw) + else: + self._tw.line("to show a full traceback on KeyboardInterrupt use --fulltrace", yellow=True) + excrepr.reprcrash.toterminal(self._tw) + + def _locationline(self, nodeid, fspath, lineno, domain): + def mkrel(nodeid): + line = self.config.cwd_relative_nodeid(nodeid) + if domain and line.endswith(domain): + line = line[:-len(domain)] + l = domain.split("[") + l[0] = l[0].replace('.', '::') # don't replace '.' in params + line += "[".join(l) + return line + # collect_fspath comes from testid which has a "/"-normalized path + + if fspath: + res = mkrel(nodeid).replace("::()", "") # parens-normalization + if nodeid.split("::")[0] != fspath.replace("\\", "/"): + res += " <- " + self.startdir.bestrelpath(fspath) + else: + res = "[location]" + return res + " " + + def _getfailureheadline(self, rep): + if hasattr(rep, 'location'): + fspath, lineno, domain = rep.location + return domain + else: + return "test session" # XXX? + + def _getcrashline(self, rep): + try: + return str(rep.longrepr.reprcrash) + except AttributeError: + try: + return str(rep.longrepr)[:50] + except AttributeError: + return "" + + # + # summaries for sessionfinish + # + def getreports(self, name): + l = [] + for x in self.stats.get(name, []): + if not hasattr(x, '_pdbshown'): + l.append(x) + return l + + def summary_warnings(self): + if self.hasopt("w"): + all_warnings = self.stats.get("warnings") + if not all_warnings: + return + + grouped = itertools.groupby(all_warnings, key=lambda wr: wr.get_location(self.config)) + + self.write_sep("=", "warnings summary", yellow=True, bold=False) + for location, warnings in grouped: + self._tw.line(str(location) or '') + for w in warnings: + lines = w.message.splitlines() + indented = '\n'.join(' ' + x for x in lines) + self._tw.line(indented) + self._tw.line() + self._tw.line('-- Docs: http://doc.pytest.org/en/latest/warnings.html') + + def summary_passes(self): + if self.config.option.tbstyle != "no": + if self.hasopt("P"): + reports = self.getreports('passed') + if not reports: + return + self.write_sep("=", "PASSES") + for rep in reports: + msg = self._getfailureheadline(rep) + self.write_sep("_", msg) + self._outrep_summary(rep) + + def print_teardown_sections(self, rep): + for secname, content in rep.sections: + if 'teardown' in secname: + self._tw.sep('-', secname) + if content[-1:] == "\n": + content = content[:-1] + self._tw.line(content) + + + def summary_failures(self): + if self.config.option.tbstyle != "no": + reports = self.getreports('failed') + if not reports: + return + self.write_sep("=", "FAILURES") + for rep in reports: + if self.config.option.tbstyle == "line": + line = self._getcrashline(rep) + self.write_line(line) + else: + msg = self._getfailureheadline(rep) + markup = {'red': True, 'bold': True} + self.write_sep("_", msg, **markup) + self._outrep_summary(rep) + for report in self.getreports(''): + if report.nodeid == rep.nodeid and report.when == 'teardown': + self.print_teardown_sections(report) + + def summary_errors(self): + if self.config.option.tbstyle != "no": + reports = self.getreports('error') + if not reports: + return + self.write_sep("=", "ERRORS") + for rep in self.stats['error']: + msg = self._getfailureheadline(rep) + if not hasattr(rep, 'when'): + # collect + msg = "ERROR collecting " + msg + elif rep.when == "setup": + msg = "ERROR at setup of " + msg + elif rep.when == "teardown": + msg = "ERROR at teardown of " + msg + self.write_sep("_", msg) + self._outrep_summary(rep) + + def _outrep_summary(self, rep): + rep.toterminal(self._tw) + for secname, content in rep.sections: + self._tw.sep("-", secname) + if content[-1:] == "\n": + content = content[:-1] + self._tw.line(content) + + def summary_stats(self): + session_duration = time.time() - self._sessionstarttime + (line, color) = build_summary_stats_line(self.stats) + msg = "%s in %.2f seconds" % (line, session_duration) + markup = {color: True, 'bold': True} + + if self.verbosity >= 0: + self.write_sep("=", msg, **markup) + if self.verbosity == -1: + self.write_line(msg, **markup) + + def summary_deselected(self): + if 'deselected' in self.stats: + self.write_sep("=", "%d tests deselected" % ( + len(self.stats['deselected'])), bold=True) + +def repr_pythonversion(v=None): + if v is None: + v = sys.version_info + try: + return "%s.%s.%s-%s-%s" % v + except (TypeError, ValueError): + return str(v) + +def flatten(l): + for x in l: + if isinstance(x, (list, tuple)): + for y in flatten(x): + yield y + else: + yield x + +def build_summary_stats_line(stats): + keys = ("failed passed skipped deselected " + "xfailed xpassed warnings error").split() + unknown_key_seen = False + for key in stats.keys(): + if key not in keys: + if key: # setup/teardown reports have an empty key, ignore them + keys.append(key) + unknown_key_seen = True + parts = [] + for key in keys: + val = stats.get(key, None) + if val: + parts.append("%d %s" % (len(val), key)) + + if parts: + line = ", ".join(parts) + else: + line = "no tests ran" + + if 'failed' in stats or 'error' in stats: + color = 'red' + elif 'warnings' in stats or unknown_key_seen: + color = 'yellow' + elif 'passed' in stats: + color = 'green' + else: + color = 'yellow' + + return (line, color) + + +def _plugin_nameversions(plugininfo): + l = [] + for plugin, dist in plugininfo: + # gets us name and version! + name = '{dist.project_name}-{dist.version}'.format(dist=dist) + # questionable convenience, but it keeps things short + if name.startswith("pytest-"): + name = name[7:] + # we decided to print python package names + # they can have more than one plugin + if name not in l: + l.append(name) + return l diff --git a/venv/lib/python3.5/site-packages/_pytest/tmpdir.py b/venv/lib/python3.5/site-packages/_pytest/tmpdir.py new file mode 100644 index 0000000..5960140 --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/tmpdir.py @@ -0,0 +1,126 @@ +""" support for providing temporary directories to test functions. """ +from __future__ import absolute_import, division, print_function + +import re + +import pytest +import py +from _pytest.monkeypatch import MonkeyPatch + + +class TempdirFactory: + """Factory for temporary directories under the common base temp directory. + + The base directory can be configured using the ``--basetemp`` option. + """ + + def __init__(self, config): + self.config = config + self.trace = config.trace.get("tmpdir") + + def ensuretemp(self, string, dir=1): + """ (deprecated) return temporary directory path with + the given string as the trailing part. It is usually + better to use the 'tmpdir' function argument which + provides an empty unique-per-test-invocation directory + and is guaranteed to be empty. + """ + #py.log._apiwarn(">1.1", "use tmpdir function argument") + return self.getbasetemp().ensure(string, dir=dir) + + def mktemp(self, basename, numbered=True): + """Create a subdirectory of the base temporary directory and return it. + If ``numbered``, ensure the directory is unique by adding a number + prefix greater than any existing one. + """ + basetemp = self.getbasetemp() + if not numbered: + p = basetemp.mkdir(basename) + else: + p = py.path.local.make_numbered_dir(prefix=basename, + keep=0, rootdir=basetemp, lock_timeout=None) + self.trace("mktemp", p) + return p + + def getbasetemp(self): + """ return base temporary directory. """ + try: + return self._basetemp + except AttributeError: + basetemp = self.config.option.basetemp + if basetemp: + basetemp = py.path.local(basetemp) + if basetemp.check(): + basetemp.remove() + basetemp.mkdir() + else: + temproot = py.path.local.get_temproot() + user = get_user() + if user: + # use a sub-directory in the temproot to speed-up + # make_numbered_dir() call + rootdir = temproot.join('pytest-of-%s' % user) + else: + rootdir = temproot + rootdir.ensure(dir=1) + basetemp = py.path.local.make_numbered_dir(prefix='pytest-', + rootdir=rootdir) + self._basetemp = t = basetemp.realpath() + self.trace("new basetemp", t) + return t + + def finish(self): + self.trace("finish") + + +def get_user(): + """Return the current user name, or None if getuser() does not work + in the current environment (see #1010). + """ + import getpass + try: + return getpass.getuser() + except (ImportError, KeyError): + return None + + +# backward compatibility +TempdirHandler = TempdirFactory + + +def pytest_configure(config): + """Create a TempdirFactory and attach it to the config object. + + This is to comply with existing plugins which expect the handler to be + available at pytest_configure time, but ideally should be moved entirely + to the tmpdir_factory session fixture. + """ + mp = MonkeyPatch() + t = TempdirFactory(config) + config._cleanup.extend([mp.undo, t.finish]) + mp.setattr(config, '_tmpdirhandler', t, raising=False) + mp.setattr(pytest, 'ensuretemp', t.ensuretemp, raising=False) + + +@pytest.fixture(scope='session') +def tmpdir_factory(request): + """Return a TempdirFactory instance for the test session. + """ + return request.config._tmpdirhandler + + +@pytest.fixture +def tmpdir(request, tmpdir_factory): + """Return a temporary directory path object + which is unique to each test function invocation, + created as a sub directory of the base temporary + directory. The returned object is a `py.path.local`_ + path object. + """ + name = request.node.name + name = re.sub(r"[\W]", "_", name) + MAXVAL = 30 + if len(name) > MAXVAL: + name = name[:MAXVAL] + x = tmpdir_factory.mktemp(name, numbered=True) + return x diff --git a/venv/lib/python3.5/site-packages/_pytest/unittest.py b/venv/lib/python3.5/site-packages/_pytest/unittest.py new file mode 100644 index 0000000..0cf0f17 --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/unittest.py @@ -0,0 +1,239 @@ +""" discovery and running of std-library "unittest" style tests. """ +from __future__ import absolute_import, division, print_function + +import sys +import traceback + +# for transferring markers +import _pytest._code +from _pytest.config import hookimpl +from _pytest.runner import fail, skip +from _pytest.python import transfer_markers, Class, Module, Function +from _pytest.skipping import MarkEvaluator, xfail + + +def pytest_pycollect_makeitem(collector, name, obj): + # has unittest been imported and is obj a subclass of its TestCase? + try: + if not issubclass(obj, sys.modules["unittest"].TestCase): + return + except Exception: + return + # yes, so let's collect it + return UnitTestCase(name, parent=collector) + + +class UnitTestCase(Class): + # marker for fixturemanger.getfixtureinfo() + # to declare that our children do not support funcargs + nofuncargs = True + + def setup(self): + cls = self.obj + if getattr(cls, '__unittest_skip__', False): + return # skipped + setup = getattr(cls, 'setUpClass', None) + if setup is not None: + setup() + teardown = getattr(cls, 'tearDownClass', None) + if teardown is not None: + self.addfinalizer(teardown) + super(UnitTestCase, self).setup() + + def collect(self): + from unittest import TestLoader + cls = self.obj + if not getattr(cls, "__test__", True): + return + self.session._fixturemanager.parsefactories(self, unittest=True) + loader = TestLoader() + module = self.getparent(Module).obj + foundsomething = False + for name in loader.getTestCaseNames(self.obj): + x = getattr(self.obj, name) + if not getattr(x, '__test__', True): + continue + funcobj = getattr(x, 'im_func', x) + transfer_markers(funcobj, cls, module) + yield TestCaseFunction(name, parent=self) + foundsomething = True + + if not foundsomething: + runtest = getattr(self.obj, 'runTest', None) + if runtest is not None: + ut = sys.modules.get("twisted.trial.unittest", None) + if ut is None or runtest != ut.TestCase.runTest: + yield TestCaseFunction('runTest', parent=self) + + +class TestCaseFunction(Function): + _excinfo = None + + def setup(self): + self._testcase = self.parent.obj(self.name) + self._fix_unittest_skip_decorator() + self._obj = getattr(self._testcase, self.name) + if hasattr(self._testcase, 'setup_method'): + self._testcase.setup_method(self._obj) + if hasattr(self, "_request"): + self._request._fillfixtures() + + def _fix_unittest_skip_decorator(self): + """ + The @unittest.skip decorator calls functools.wraps(self._testcase) + The call to functools.wraps() fails unless self._testcase + has a __name__ attribute. This is usually automatically supplied + if the test is a function or method, but we need to add manually + here. + + See issue #1169 + """ + if sys.version_info[0] == 2: + setattr(self._testcase, "__name__", self.name) + + def teardown(self): + if hasattr(self._testcase, 'teardown_method'): + self._testcase.teardown_method(self._obj) + # Allow garbage collection on TestCase instance attributes. + self._testcase = None + self._obj = None + + def startTest(self, testcase): + pass + + def _addexcinfo(self, rawexcinfo): + # unwrap potential exception info (see twisted trial support below) + rawexcinfo = getattr(rawexcinfo, '_rawexcinfo', rawexcinfo) + try: + excinfo = _pytest._code.ExceptionInfo(rawexcinfo) + except TypeError: + try: + try: + l = traceback.format_exception(*rawexcinfo) + l.insert(0, "NOTE: Incompatible Exception Representation, " + "displaying natively:\n\n") + fail("".join(l), pytrace=False) + except (fail.Exception, KeyboardInterrupt): + raise + except: + fail("ERROR: Unknown Incompatible Exception " + "representation:\n%r" % (rawexcinfo,), pytrace=False) + except KeyboardInterrupt: + raise + except fail.Exception: + excinfo = _pytest._code.ExceptionInfo() + self.__dict__.setdefault('_excinfo', []).append(excinfo) + + def addError(self, testcase, rawexcinfo): + self._addexcinfo(rawexcinfo) + + def addFailure(self, testcase, rawexcinfo): + self._addexcinfo(rawexcinfo) + + def addSkip(self, testcase, reason): + try: + skip(reason) + except skip.Exception: + self._evalskip = MarkEvaluator(self, 'SkipTest') + self._evalskip.result = True + self._addexcinfo(sys.exc_info()) + + def addExpectedFailure(self, testcase, rawexcinfo, reason=""): + try: + xfail(str(reason)) + except xfail.Exception: + self._addexcinfo(sys.exc_info()) + + def addUnexpectedSuccess(self, testcase, reason=""): + self._unexpectedsuccess = reason + + def addSuccess(self, testcase): + pass + + def stopTest(self, testcase): + pass + + def _handle_skip(self): + # implements the skipping machinery (see #2137) + # analog to pythons Lib/unittest/case.py:run + testMethod = getattr(self._testcase, self._testcase._testMethodName) + if (getattr(self._testcase.__class__, "__unittest_skip__", False) or + getattr(testMethod, "__unittest_skip__", False)): + # If the class or method was skipped. + skip_why = (getattr(self._testcase.__class__, '__unittest_skip_why__', '') or + getattr(testMethod, '__unittest_skip_why__', '')) + try: # PY3, unittest2 on PY2 + self._testcase._addSkip(self, self._testcase, skip_why) + except TypeError: # PY2 + if sys.version_info[0] != 2: + raise + self._testcase._addSkip(self, skip_why) + return True + return False + + def runtest(self): + if self.config.pluginmanager.get_plugin("pdbinvoke") is None: + self._testcase(result=self) + else: + # disables tearDown and cleanups for post mortem debugging (see #1890) + if self._handle_skip(): + return + self._testcase.debug() + + def _prunetraceback(self, excinfo): + Function._prunetraceback(self, excinfo) + traceback = excinfo.traceback.filter( + lambda x: not x.frame.f_globals.get('__unittest')) + if traceback: + excinfo.traceback = traceback + + +@hookimpl(tryfirst=True) +def pytest_runtest_makereport(item, call): + if isinstance(item, TestCaseFunction): + if item._excinfo: + call.excinfo = item._excinfo.pop(0) + try: + del call.result + except AttributeError: + pass + +# twisted trial support + + +@hookimpl(hookwrapper=True) +def pytest_runtest_protocol(item): + if isinstance(item, TestCaseFunction) and \ + 'twisted.trial.unittest' in sys.modules: + ut = sys.modules['twisted.python.failure'] + Failure__init__ = ut.Failure.__init__ + check_testcase_implements_trial_reporter() + + def excstore(self, exc_value=None, exc_type=None, exc_tb=None, + captureVars=None): + if exc_value is None: + self._rawexcinfo = sys.exc_info() + else: + if exc_type is None: + exc_type = type(exc_value) + self._rawexcinfo = (exc_type, exc_value, exc_tb) + try: + Failure__init__(self, exc_value, exc_type, exc_tb, + captureVars=captureVars) + except TypeError: + Failure__init__(self, exc_value, exc_type, exc_tb) + + ut.Failure.__init__ = excstore + yield + ut.Failure.__init__ = Failure__init__ + else: + yield + + +def check_testcase_implements_trial_reporter(done=[]): + if done: + return + from zope.interface import classImplements + from twisted.trial.itrial import IReporter + classImplements(TestCaseFunction, IReporter) + done.append(1) diff --git a/venv/lib/python3.5/site-packages/_pytest/vendored_packages/__init__.py b/venv/lib/python3.5/site-packages/_pytest/vendored_packages/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.5/site-packages/_pytest/vendored_packages/pluggy.py b/venv/lib/python3.5/site-packages/_pytest/vendored_packages/pluggy.py new file mode 100644 index 0000000..aebddad --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/vendored_packages/pluggy.py @@ -0,0 +1,802 @@ +""" +PluginManager, basic initialization and tracing. + +pluggy is the cristallized core of plugin management as used +by some 150 plugins for pytest. + +Pluggy uses semantic versioning. Breaking changes are only foreseen for +Major releases (incremented X in "X.Y.Z"). If you want to use pluggy in +your project you should thus use a dependency restriction like +"pluggy>=0.1.0,<1.0" to avoid surprises. + +pluggy is concerned with hook specification, hook implementations and hook +calling. For any given hook specification a hook call invokes up to N implementations. +A hook implementation can influence its position and type of execution: +if attributed "tryfirst" or "trylast" it will be tried to execute +first or last. However, if attributed "hookwrapper" an implementation +can wrap all calls to non-hookwrapper implementations. A hookwrapper +can thus execute some code ahead and after the execution of other hooks. + +Hook specification is done by way of a regular python function where +both the function name and the names of all its arguments are significant. +Each hook implementation function is verified against the original specification +function, including the names of all its arguments. To allow for hook specifications +to evolve over the livetime of a project, hook implementations can +accept less arguments. One can thus add new arguments and semantics to +a hook specification by adding another argument typically without breaking +existing hook implementations. + +The chosen approach is meant to let a hook designer think carefuly about +which objects are needed by an extension writer. By contrast, subclass-based +extension mechanisms often expose a lot more state and behaviour than needed, +thus restricting future developments. + +Pluggy currently consists of functionality for: + +- a way to register new hook specifications. Without a hook + specification no hook calling can be performed. + +- a registry of plugins which contain hook implementation functions. It + is possible to register plugins for which a hook specification is not yet + known and validate all hooks when the system is in a more referentially + consistent state. Setting an "optionalhook" attribution to a hook + implementation will avoid PluginValidationError's if a specification + is missing. This allows to have optional integration between plugins. + +- a "hook" relay object from which you can launch 1:N calls to + registered hook implementation functions + +- a mechanism for ordering hook implementation functions + +- mechanisms for two different type of 1:N calls: "firstresult" for when + the call should stop when the first implementation returns a non-None result. + And the other (default) way of guaranteeing that all hook implementations + will be called and their non-None result collected. + +- mechanisms for "historic" extension points such that all newly + registered functions will receive all hook calls that happened + before their registration. + +- a mechanism for discovering plugin objects which are based on + setuptools based entry points. + +- a simple tracing mechanism, including tracing of plugin calls and + their arguments. + +""" +import sys +import inspect + +__version__ = '0.4.0' + +__all__ = ["PluginManager", "PluginValidationError", "HookCallError", + "HookspecMarker", "HookimplMarker"] + +_py3 = sys.version_info > (3, 0) + + +class HookspecMarker: + """ Decorator helper class for marking functions as hook specifications. + + You can instantiate it with a project_name to get a decorator. + Calling PluginManager.add_hookspecs later will discover all marked functions + if the PluginManager uses the same project_name. + """ + + def __init__(self, project_name): + self.project_name = project_name + + def __call__(self, function=None, firstresult=False, historic=False): + """ if passed a function, directly sets attributes on the function + which will make it discoverable to add_hookspecs(). If passed no + function, returns a decorator which can be applied to a function + later using the attributes supplied. + + If firstresult is True the 1:N hook call (N being the number of registered + hook implementation functions) will stop at I<=N when the I'th function + returns a non-None result. + + If historic is True calls to a hook will be memorized and replayed + on later registered plugins. + + """ + def setattr_hookspec_opts(func): + if historic and firstresult: + raise ValueError("cannot have a historic firstresult hook") + setattr(func, self.project_name + "_spec", + dict(firstresult=firstresult, historic=historic)) + return func + + if function is not None: + return setattr_hookspec_opts(function) + else: + return setattr_hookspec_opts + + +class HookimplMarker: + """ Decorator helper class for marking functions as hook implementations. + + You can instantiate with a project_name to get a decorator. + Calling PluginManager.register later will discover all marked functions + if the PluginManager uses the same project_name. + """ + def __init__(self, project_name): + self.project_name = project_name + + def __call__(self, function=None, hookwrapper=False, optionalhook=False, + tryfirst=False, trylast=False): + + """ if passed a function, directly sets attributes on the function + which will make it discoverable to register(). If passed no function, + returns a decorator which can be applied to a function later using + the attributes supplied. + + If optionalhook is True a missing matching hook specification will not result + in an error (by default it is an error if no matching spec is found). + + If tryfirst is True this hook implementation will run as early as possible + in the chain of N hook implementations for a specfication. + + If trylast is True this hook implementation will run as late as possible + in the chain of N hook implementations. + + If hookwrapper is True the hook implementations needs to execute exactly + one "yield". The code before the yield is run early before any non-hookwrapper + function is run. The code after the yield is run after all non-hookwrapper + function have run. The yield receives an ``_CallOutcome`` object representing + the exception or result outcome of the inner calls (including other hookwrapper + calls). + + """ + def setattr_hookimpl_opts(func): + setattr(func, self.project_name + "_impl", + dict(hookwrapper=hookwrapper, optionalhook=optionalhook, + tryfirst=tryfirst, trylast=trylast)) + return func + + if function is None: + return setattr_hookimpl_opts + else: + return setattr_hookimpl_opts(function) + + +def normalize_hookimpl_opts(opts): + opts.setdefault("tryfirst", False) + opts.setdefault("trylast", False) + opts.setdefault("hookwrapper", False) + opts.setdefault("optionalhook", False) + + +class _TagTracer: + def __init__(self): + self._tag2proc = {} + self.writer = None + self.indent = 0 + + def get(self, name): + return _TagTracerSub(self, (name,)) + + def format_message(self, tags, args): + if isinstance(args[-1], dict): + extra = args[-1] + args = args[:-1] + else: + extra = {} + + content = " ".join(map(str, args)) + indent = " " * self.indent + + lines = [ + "%s%s [%s]\n" % (indent, content, ":".join(tags)) + ] + + for name, value in extra.items(): + lines.append("%s %s: %s\n" % (indent, name, value)) + return lines + + def processmessage(self, tags, args): + if self.writer is not None and args: + lines = self.format_message(tags, args) + self.writer(''.join(lines)) + try: + self._tag2proc[tags](tags, args) + except KeyError: + pass + + def setwriter(self, writer): + self.writer = writer + + def setprocessor(self, tags, processor): + if isinstance(tags, str): + tags = tuple(tags.split(":")) + else: + assert isinstance(tags, tuple) + self._tag2proc[tags] = processor + + +class _TagTracerSub: + def __init__(self, root, tags): + self.root = root + self.tags = tags + + def __call__(self, *args): + self.root.processmessage(self.tags, args) + + def setmyprocessor(self, processor): + self.root.setprocessor(self.tags, processor) + + def get(self, name): + return self.__class__(self.root, self.tags + (name,)) + + +def _raise_wrapfail(wrap_controller, msg): + co = wrap_controller.gi_code + raise RuntimeError("wrap_controller at %r %s:%d %s" % + (co.co_name, co.co_filename, co.co_firstlineno, msg)) + + +def _wrapped_call(wrap_controller, func): + """ Wrap calling to a function with a generator which needs to yield + exactly once. The yield point will trigger calling the wrapped function + and return its _CallOutcome to the yield point. The generator then needs + to finish (raise StopIteration) in order for the wrapped call to complete. + """ + try: + next(wrap_controller) # first yield + except StopIteration: + _raise_wrapfail(wrap_controller, "did not yield") + call_outcome = _CallOutcome(func) + try: + wrap_controller.send(call_outcome) + _raise_wrapfail(wrap_controller, "has second yield") + except StopIteration: + pass + return call_outcome.get_result() + + +class _CallOutcome: + """ Outcome of a function call, either an exception or a proper result. + Calling the ``get_result`` method will return the result or reraise + the exception raised when the function was called. """ + excinfo = None + + def __init__(self, func): + try: + self.result = func() + except BaseException: + self.excinfo = sys.exc_info() + + def force_result(self, result): + self.result = result + self.excinfo = None + + def get_result(self): + if self.excinfo is None: + return self.result + else: + ex = self.excinfo + if _py3: + raise ex[1].with_traceback(ex[2]) + _reraise(*ex) # noqa + +if not _py3: + exec(""" +def _reraise(cls, val, tb): + raise cls, val, tb +""") + + +class _TracedHookExecution: + def __init__(self, pluginmanager, before, after): + self.pluginmanager = pluginmanager + self.before = before + self.after = after + self.oldcall = pluginmanager._inner_hookexec + assert not isinstance(self.oldcall, _TracedHookExecution) + self.pluginmanager._inner_hookexec = self + + def __call__(self, hook, hook_impls, kwargs): + self.before(hook.name, hook_impls, kwargs) + outcome = _CallOutcome(lambda: self.oldcall(hook, hook_impls, kwargs)) + self.after(outcome, hook.name, hook_impls, kwargs) + return outcome.get_result() + + def undo(self): + self.pluginmanager._inner_hookexec = self.oldcall + + +class PluginManager(object): + """ Core Pluginmanager class which manages registration + of plugin objects and 1:N hook calling. + + You can register new hooks by calling ``add_hookspec(module_or_class)``. + You can register plugin objects (which contain hooks) by calling + ``register(plugin)``. The Pluginmanager is initialized with a + prefix that is searched for in the names of the dict of registered + plugin objects. An optional excludefunc allows to blacklist names which + are not considered as hooks despite a matching prefix. + + For debugging purposes you can call ``enable_tracing()`` + which will subsequently send debug information to the trace helper. + """ + + def __init__(self, project_name, implprefix=None): + """ if implprefix is given implementation functions + will be recognized if their name matches the implprefix. """ + self.project_name = project_name + self._name2plugin = {} + self._plugin2hookcallers = {} + self._plugin_distinfo = [] + self.trace = _TagTracer().get("pluginmanage") + self.hook = _HookRelay(self.trace.root.get("hook")) + self._implprefix = implprefix + self._inner_hookexec = lambda hook, methods, kwargs: \ + _MultiCall(methods, kwargs, hook.spec_opts).execute() + + def _hookexec(self, hook, methods, kwargs): + # called from all hookcaller instances. + # enable_tracing will set its own wrapping function at self._inner_hookexec + return self._inner_hookexec(hook, methods, kwargs) + + def register(self, plugin, name=None): + """ Register a plugin and return its canonical name or None if the name + is blocked from registering. Raise a ValueError if the plugin is already + registered. """ + plugin_name = name or self.get_canonical_name(plugin) + + if plugin_name in self._name2plugin or plugin in self._plugin2hookcallers: + if self._name2plugin.get(plugin_name, -1) is None: + return # blocked plugin, return None to indicate no registration + raise ValueError("Plugin already registered: %s=%s\n%s" % + (plugin_name, plugin, self._name2plugin)) + + # XXX if an error happens we should make sure no state has been + # changed at point of return + self._name2plugin[plugin_name] = plugin + + # register matching hook implementations of the plugin + self._plugin2hookcallers[plugin] = hookcallers = [] + for name in dir(plugin): + hookimpl_opts = self.parse_hookimpl_opts(plugin, name) + if hookimpl_opts is not None: + normalize_hookimpl_opts(hookimpl_opts) + method = getattr(plugin, name) + hookimpl = HookImpl(plugin, plugin_name, method, hookimpl_opts) + hook = getattr(self.hook, name, None) + if hook is None: + hook = _HookCaller(name, self._hookexec) + setattr(self.hook, name, hook) + elif hook.has_spec(): + self._verify_hook(hook, hookimpl) + hook._maybe_apply_history(hookimpl) + hook._add_hookimpl(hookimpl) + hookcallers.append(hook) + return plugin_name + + def parse_hookimpl_opts(self, plugin, name): + method = getattr(plugin, name) + try: + res = getattr(method, self.project_name + "_impl", None) + except Exception: + res = {} + if res is not None and not isinstance(res, dict): + # false positive + res = None + elif res is None and self._implprefix and name.startswith(self._implprefix): + res = {} + return res + + def unregister(self, plugin=None, name=None): + """ unregister a plugin object and all its contained hook implementations + from internal data structures. """ + if name is None: + assert plugin is not None, "one of name or plugin needs to be specified" + name = self.get_name(plugin) + + if plugin is None: + plugin = self.get_plugin(name) + + # if self._name2plugin[name] == None registration was blocked: ignore + if self._name2plugin.get(name): + del self._name2plugin[name] + + for hookcaller in self._plugin2hookcallers.pop(plugin, []): + hookcaller._remove_plugin(plugin) + + return plugin + + def set_blocked(self, name): + """ block registrations of the given name, unregister if already registered. """ + self.unregister(name=name) + self._name2plugin[name] = None + + def is_blocked(self, name): + """ return True if the name blogs registering plugins of that name. """ + return name in self._name2plugin and self._name2plugin[name] is None + + def add_hookspecs(self, module_or_class): + """ add new hook specifications defined in the given module_or_class. + Functions are recognized if they have been decorated accordingly. """ + names = [] + for name in dir(module_or_class): + spec_opts = self.parse_hookspec_opts(module_or_class, name) + if spec_opts is not None: + hc = getattr(self.hook, name, None) + if hc is None: + hc = _HookCaller(name, self._hookexec, module_or_class, spec_opts) + setattr(self.hook, name, hc) + else: + # plugins registered this hook without knowing the spec + hc.set_specification(module_or_class, spec_opts) + for hookfunction in (hc._wrappers + hc._nonwrappers): + self._verify_hook(hc, hookfunction) + names.append(name) + + if not names: + raise ValueError("did not find any %r hooks in %r" % + (self.project_name, module_or_class)) + + def parse_hookspec_opts(self, module_or_class, name): + method = getattr(module_or_class, name) + return getattr(method, self.project_name + "_spec", None) + + def get_plugins(self): + """ return the set of registered plugins. """ + return set(self._plugin2hookcallers) + + def is_registered(self, plugin): + """ Return True if the plugin is already registered. """ + return plugin in self._plugin2hookcallers + + def get_canonical_name(self, plugin): + """ Return canonical name for a plugin object. Note that a plugin + may be registered under a different name which was specified + by the caller of register(plugin, name). To obtain the name + of an registered plugin use ``get_name(plugin)`` instead.""" + return getattr(plugin, "__name__", None) or str(id(plugin)) + + def get_plugin(self, name): + """ Return a plugin or None for the given name. """ + return self._name2plugin.get(name) + + def has_plugin(self, name): + """ Return True if a plugin with the given name is registered. """ + return self.get_plugin(name) is not None + + def get_name(self, plugin): + """ Return name for registered plugin or None if not registered. """ + for name, val in self._name2plugin.items(): + if plugin == val: + return name + + def _verify_hook(self, hook, hookimpl): + if hook.is_historic() and hookimpl.hookwrapper: + raise PluginValidationError( + "Plugin %r\nhook %r\nhistoric incompatible to hookwrapper" % + (hookimpl.plugin_name, hook.name)) + + for arg in hookimpl.argnames: + if arg not in hook.argnames: + raise PluginValidationError( + "Plugin %r\nhook %r\nargument %r not available\n" + "plugin definition: %s\n" + "available hookargs: %s" % + (hookimpl.plugin_name, hook.name, arg, + _formatdef(hookimpl.function), ", ".join(hook.argnames))) + + def check_pending(self): + """ Verify that all hooks which have not been verified against + a hook specification are optional, otherwise raise PluginValidationError""" + for name in self.hook.__dict__: + if name[0] != "_": + hook = getattr(self.hook, name) + if not hook.has_spec(): + for hookimpl in (hook._wrappers + hook._nonwrappers): + if not hookimpl.optionalhook: + raise PluginValidationError( + "unknown hook %r in plugin %r" % + (name, hookimpl.plugin)) + + def load_setuptools_entrypoints(self, entrypoint_name): + """ Load modules from querying the specified setuptools entrypoint name. + Return the number of loaded plugins. """ + from pkg_resources import (iter_entry_points, DistributionNotFound, + VersionConflict) + for ep in iter_entry_points(entrypoint_name): + # is the plugin registered or blocked? + if self.get_plugin(ep.name) or self.is_blocked(ep.name): + continue + try: + plugin = ep.load() + except DistributionNotFound: + continue + except VersionConflict as e: + raise PluginValidationError( + "Plugin %r could not be loaded: %s!" % (ep.name, e)) + self.register(plugin, name=ep.name) + self._plugin_distinfo.append((plugin, ep.dist)) + return len(self._plugin_distinfo) + + def list_plugin_distinfo(self): + """ return list of distinfo/plugin tuples for all setuptools registered + plugins. """ + return list(self._plugin_distinfo) + + def list_name_plugin(self): + """ return list of name/plugin pairs. """ + return list(self._name2plugin.items()) + + def get_hookcallers(self, plugin): + """ get all hook callers for the specified plugin. """ + return self._plugin2hookcallers.get(plugin) + + def add_hookcall_monitoring(self, before, after): + """ add before/after tracing functions for all hooks + and return an undo function which, when called, + will remove the added tracers. + + ``before(hook_name, hook_impls, kwargs)`` will be called ahead + of all hook calls and receive a hookcaller instance, a list + of HookImpl instances and the keyword arguments for the hook call. + + ``after(outcome, hook_name, hook_impls, kwargs)`` receives the + same arguments as ``before`` but also a :py:class:`_CallOutcome <_pytest.vendored_packages.pluggy._CallOutcome>` object + which represents the result of the overall hook call. + """ + return _TracedHookExecution(self, before, after).undo + + def enable_tracing(self): + """ enable tracing of hook calls and return an undo function. """ + hooktrace = self.hook._trace + + def before(hook_name, methods, kwargs): + hooktrace.root.indent += 1 + hooktrace(hook_name, kwargs) + + def after(outcome, hook_name, methods, kwargs): + if outcome.excinfo is None: + hooktrace("finish", hook_name, "-->", outcome.result) + hooktrace.root.indent -= 1 + + return self.add_hookcall_monitoring(before, after) + + def subset_hook_caller(self, name, remove_plugins): + """ Return a new _HookCaller instance for the named method + which manages calls to all registered plugins except the + ones from remove_plugins. """ + orig = getattr(self.hook, name) + plugins_to_remove = [plug for plug in remove_plugins if hasattr(plug, name)] + if plugins_to_remove: + hc = _HookCaller(orig.name, orig._hookexec, orig._specmodule_or_class, + orig.spec_opts) + for hookimpl in (orig._wrappers + orig._nonwrappers): + plugin = hookimpl.plugin + if plugin not in plugins_to_remove: + hc._add_hookimpl(hookimpl) + # we also keep track of this hook caller so it + # gets properly removed on plugin unregistration + self._plugin2hookcallers.setdefault(plugin, []).append(hc) + return hc + return orig + + +class _MultiCall: + """ execute a call into multiple python functions/methods. """ + + # XXX note that the __multicall__ argument is supported only + # for pytest compatibility reasons. It was never officially + # supported there and is explicitely deprecated since 2.8 + # so we can remove it soon, allowing to avoid the below recursion + # in execute() and simplify/speed up the execute loop. + + def __init__(self, hook_impls, kwargs, specopts={}): + self.hook_impls = hook_impls + self.kwargs = kwargs + self.kwargs["__multicall__"] = self + self.specopts = specopts + + def execute(self): + all_kwargs = self.kwargs + self.results = results = [] + firstresult = self.specopts.get("firstresult") + + while self.hook_impls: + hook_impl = self.hook_impls.pop() + try: + args = [all_kwargs[argname] for argname in hook_impl.argnames] + except KeyError: + for argname in hook_impl.argnames: + if argname not in all_kwargs: + raise HookCallError( + "hook call must provide argument %r" % (argname,)) + if hook_impl.hookwrapper: + return _wrapped_call(hook_impl.function(*args), self.execute) + res = hook_impl.function(*args) + if res is not None: + if firstresult: + return res + results.append(res) + + if not firstresult: + return results + + def __repr__(self): + status = "%d meths" % (len(self.hook_impls),) + if hasattr(self, "results"): + status = ("%d results, " % len(self.results)) + status + return "<_MultiCall %s, kwargs=%r>" % (status, self.kwargs) + + +def varnames(func, startindex=None): + """ return argument name tuple for a function, method, class or callable. + + In case of a class, its "__init__" method is considered. + For methods the "self" parameter is not included unless you are passing + an unbound method with Python3 (which has no supports for unbound methods) + """ + cache = getattr(func, "__dict__", {}) + try: + return cache["_varnames"] + except KeyError: + pass + if inspect.isclass(func): + try: + func = func.__init__ + except AttributeError: + return () + startindex = 1 + else: + if not inspect.isfunction(func) and not inspect.ismethod(func): + try: + func = getattr(func, '__call__', func) + except Exception: + return () + if startindex is None: + startindex = int(inspect.ismethod(func)) + + try: + rawcode = func.__code__ + except AttributeError: + return () + try: + x = rawcode.co_varnames[startindex:rawcode.co_argcount] + except AttributeError: + x = () + else: + defaults = func.__defaults__ + if defaults: + x = x[:-len(defaults)] + try: + cache["_varnames"] = x + except TypeError: + pass + return x + + +class _HookRelay: + """ hook holder object for performing 1:N hook calls where N is the number + of registered plugins. + + """ + + def __init__(self, trace): + self._trace = trace + + +class _HookCaller(object): + def __init__(self, name, hook_execute, specmodule_or_class=None, spec_opts=None): + self.name = name + self._wrappers = [] + self._nonwrappers = [] + self._hookexec = hook_execute + if specmodule_or_class is not None: + assert spec_opts is not None + self.set_specification(specmodule_or_class, spec_opts) + + def has_spec(self): + return hasattr(self, "_specmodule_or_class") + + def set_specification(self, specmodule_or_class, spec_opts): + assert not self.has_spec() + self._specmodule_or_class = specmodule_or_class + specfunc = getattr(specmodule_or_class, self.name) + argnames = varnames(specfunc, startindex=inspect.isclass(specmodule_or_class)) + assert "self" not in argnames # sanity check + self.argnames = ["__multicall__"] + list(argnames) + self.spec_opts = spec_opts + if spec_opts.get("historic"): + self._call_history = [] + + def is_historic(self): + return hasattr(self, "_call_history") + + def _remove_plugin(self, plugin): + def remove(wrappers): + for i, method in enumerate(wrappers): + if method.plugin == plugin: + del wrappers[i] + return True + if remove(self._wrappers) is None: + if remove(self._nonwrappers) is None: + raise ValueError("plugin %r not found" % (plugin,)) + + def _add_hookimpl(self, hookimpl): + if hookimpl.hookwrapper: + methods = self._wrappers + else: + methods = self._nonwrappers + + if hookimpl.trylast: + methods.insert(0, hookimpl) + elif hookimpl.tryfirst: + methods.append(hookimpl) + else: + # find last non-tryfirst method + i = len(methods) - 1 + while i >= 0 and methods[i].tryfirst: + i -= 1 + methods.insert(i + 1, hookimpl) + + def __repr__(self): + return "<_HookCaller %r>" % (self.name,) + + def __call__(self, **kwargs): + assert not self.is_historic() + return self._hookexec(self, self._nonwrappers + self._wrappers, kwargs) + + def call_historic(self, proc=None, kwargs=None): + self._call_history.append((kwargs or {}, proc)) + # historizing hooks don't return results + self._hookexec(self, self._nonwrappers + self._wrappers, kwargs) + + def call_extra(self, methods, kwargs): + """ Call the hook with some additional temporarily participating + methods using the specified kwargs as call parameters. """ + old = list(self._nonwrappers), list(self._wrappers) + for method in methods: + opts = dict(hookwrapper=False, trylast=False, tryfirst=False) + hookimpl = HookImpl(None, "", method, opts) + self._add_hookimpl(hookimpl) + try: + return self(**kwargs) + finally: + self._nonwrappers, self._wrappers = old + + def _maybe_apply_history(self, method): + if self.is_historic(): + for kwargs, proc in self._call_history: + res = self._hookexec(self, [method], kwargs) + if res and proc is not None: + proc(res[0]) + + +class HookImpl: + def __init__(self, plugin, plugin_name, function, hook_impl_opts): + self.function = function + self.argnames = varnames(self.function) + self.plugin = plugin + self.opts = hook_impl_opts + self.plugin_name = plugin_name + self.__dict__.update(hook_impl_opts) + + +class PluginValidationError(Exception): + """ plugin failed validation. """ + + +class HookCallError(Exception): + """ Hook was called wrongly. """ + + +if hasattr(inspect, 'signature'): + def _formatdef(func): + return "%s%s" % ( + func.__name__, + str(inspect.signature(func)) + ) +else: + def _formatdef(func): + return "%s%s" % ( + func.__name__, + inspect.formatargspec(*inspect.getargspec(func)) + ) diff --git a/venv/lib/python3.5/site-packages/_pytest/warnings.py b/venv/lib/python3.5/site-packages/_pytest/warnings.py new file mode 100644 index 0000000..4fe28bd --- /dev/null +++ b/venv/lib/python3.5/site-packages/_pytest/warnings.py @@ -0,0 +1,88 @@ +from __future__ import absolute_import, division, print_function + +import warnings +from contextlib import contextmanager + +import pytest + +from _pytest import compat + + +def _setoption(wmod, arg): + """ + Copy of the warning._setoption function but does not escape arguments. + """ + parts = arg.split(':') + if len(parts) > 5: + raise wmod._OptionError("too many fields (max 5): %r" % (arg,)) + while len(parts) < 5: + parts.append('') + action, message, category, module, lineno = [s.strip() + for s in parts] + action = wmod._getaction(action) + category = wmod._getcategory(category) + if lineno: + try: + lineno = int(lineno) + if lineno < 0: + raise ValueError + except (ValueError, OverflowError): + raise wmod._OptionError("invalid lineno %r" % (lineno,)) + else: + lineno = 0 + wmod.filterwarnings(action, message, category, module, lineno) + + +def pytest_addoption(parser): + group = parser.getgroup("pytest-warnings") + group.addoption( + '-W', '--pythonwarnings', action='append', + help="set which warnings to report, see -W option of python itself.") + parser.addini("filterwarnings", type="linelist", + help="Each line specifies warning filter pattern which would be passed" + "to warnings.filterwarnings. Process after -W and --pythonwarnings.") + + +@contextmanager +def catch_warnings_for_item(item): + """ + catches the warnings generated during setup/call/teardown execution + of the given item and after it is done posts them as warnings to this + item. + """ + args = item.config.getoption('pythonwarnings') or [] + inifilters = item.config.getini("filterwarnings") + with warnings.catch_warnings(record=True) as log: + for arg in args: + warnings._setoption(arg) + + for arg in inifilters: + _setoption(warnings, arg) + + yield + + for warning in log: + warn_msg = warning.message + unicode_warning = False + + if compat._PY2 and any(isinstance(m, compat.UNICODE_TYPES) for m in warn_msg.args): + new_args = [compat.safe_str(m) for m in warn_msg.args] + unicode_warning = warn_msg.args != new_args + warn_msg.args = new_args + + msg = warnings.formatwarning( + warn_msg, warning.category, + warning.filename, warning.lineno, warning.line) + item.warn("unused", msg) + + if unicode_warning: + warnings.warn( + "Warning is using unicode non convertible to ascii, " + "converting to a safe representation:\n %s" % msg, + UnicodeWarning) + + +@pytest.hookimpl(hookwrapper=True) +def pytest_runtest_protocol(item): + with catch_warnings_for_item(item): + yield diff --git a/venv/lib/python3.5/site-packages/easy_install.py b/venv/lib/python3.5/site-packages/easy_install.py new file mode 100644 index 0000000..d87e984 --- /dev/null +++ b/venv/lib/python3.5/site-packages/easy_install.py @@ -0,0 +1,5 @@ +"""Run the EasyInstall command""" + +if __name__ == '__main__': + from setuptools.command.easy_install import main + main() diff --git a/venv/lib/python3.5/site-packages/pip-8.1.1.dist-info/DESCRIPTION.rst b/venv/lib/python3.5/site-packages/pip-8.1.1.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..39586d2 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip-8.1.1.dist-info/DESCRIPTION.rst @@ -0,0 +1,36 @@ +pip +=== + +The `PyPA recommended +`_ +tool for installing Python packages. + +* `Installation `_ +* `Documentation `_ +* `Changelog `_ +* `Github Page `_ +* `Issue Tracking `_ +* `User mailing list `_ +* `Dev mailing list `_ +* User IRC: #pypa on Freenode. +* Dev IRC: #pypa-dev on Freenode. + + +.. image:: https://img.shields.io/pypi/v/pip.svg + :target: https://pypi.python.org/pypi/pip + +.. image:: https://img.shields.io/travis/pypa/pip/develop.svg + :target: http://travis-ci.org/pypa/pip + +.. image:: https://readthedocs.org/projects/pip/badge/?version=stable + :target: https://pip.pypa.io/en/stable + +Code of Conduct +--------------- + +Everyone interacting in the pip project's codebases, issue trackers, chat +rooms, and mailing lists is expected to follow the `PyPA Code of Conduct`_. + +.. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/ + + diff --git a/venv/lib/python3.5/site-packages/pip-8.1.1.dist-info/INSTALLER b/venv/lib/python3.5/site-packages/pip-8.1.1.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip-8.1.1.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.5/site-packages/pip-8.1.1.dist-info/METADATA b/venv/lib/python3.5/site-packages/pip-8.1.1.dist-info/METADATA new file mode 100644 index 0000000..79657d0 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip-8.1.1.dist-info/METADATA @@ -0,0 +1,65 @@ +Metadata-Version: 2.0 +Name: pip +Version: 8.1.1 +Summary: The PyPA recommended tool for installing Python packages. +Home-page: https://pip.pypa.io/ +Author: The pip developers +Author-email: python-virtualenv@groups.google.com +License: MIT +Keywords: easy_install distutils setuptools egg virtualenv +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Topic :: Software Development :: Build Tools +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: Implementation :: PyPy +Provides-Extra: testing +Requires-Dist: mock; extra == 'testing' +Requires-Dist: pretend; extra == 'testing' +Requires-Dist: pytest; extra == 'testing' +Requires-Dist: scripttest (>=1.3); extra == 'testing' +Requires-Dist: virtualenv (>=1.10); extra == 'testing' + +pip +=== + +The `PyPA recommended +`_ +tool for installing Python packages. + +* `Installation `_ +* `Documentation `_ +* `Changelog `_ +* `Github Page `_ +* `Issue Tracking `_ +* `User mailing list `_ +* `Dev mailing list `_ +* User IRC: #pypa on Freenode. +* Dev IRC: #pypa-dev on Freenode. + + +.. image:: https://img.shields.io/pypi/v/pip.svg + :target: https://pypi.python.org/pypi/pip + +.. image:: https://img.shields.io/travis/pypa/pip/develop.svg + :target: http://travis-ci.org/pypa/pip + +.. image:: https://readthedocs.org/projects/pip/badge/?version=stable + :target: https://pip.pypa.io/en/stable + +Code of Conduct +--------------- + +Everyone interacting in the pip project's codebases, issue trackers, chat +rooms, and mailing lists is expected to follow the `PyPA Code of Conduct`_. + +.. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/ + + diff --git a/venv/lib/python3.5/site-packages/pip-8.1.1.dist-info/RECORD b/venv/lib/python3.5/site-packages/pip-8.1.1.dist-info/RECORD new file mode 100644 index 0000000..51722f8 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip-8.1.1.dist-info/RECORD @@ -0,0 +1,117 @@ +pip/__init__.py,sha256=fFs-ytm2H4V2evGESaozmF7U0BaGIMM0drFJZ5Ifj4s,10427 +pip/__main__.py,sha256=V6Kh-IEDEFpt1cahRE6MajUF_14qJR_Qsvn4MjWZXzE,584 +pip/basecommand.py,sha256=Zlg6SE42TIjRyt1mct0LCkgNxcKKnss3xvASJyDqucE,11429 +pip/baseparser.py,sha256=Nlc7Un9gat27xtB24SnKL_3pZZOoh62gNNRdS6tDRZY,10465 +pip/cmdoptions.py,sha256=pf24iszA39rhcJ5DjFA4oD_z5vTI0NG98qUahHs3qPM,15878 +pip/download.py,sha256=oJ3sZ8I6ct9X3eoXQ9xm_Ne0e6N85G_rWaERmMCVF2k,31722 +pip/exceptions.py,sha256=GdDhHOROBj-kW2rgerLJYXsxN8ENy1BX5RUb_Vs9TXM,7980 +pip/index.py,sha256=DsxoKRxoL4oEdaqQOctou0HN1rciulGp0EBcHtnyBR4,37235 +pip/locations.py,sha256=MqUzS8YI2wDa7oFzTQw4zM4s0Hci05yubxfU_kTXXlU,5632 +pip/pep425tags.py,sha256=4PNr9hd8OsXnKYR2q2oLzfDDhF5bFBwUZA-ZQxAClSI,11318 +pip/status_codes.py,sha256=F6uDG6Gj7RNKQJUDnd87QKqI16Us-t-B0wPF_4QMpWc,156 +pip/wheel.py,sha256=Ux0Ry07GGBf0jGwr2tLLVMk3e2WuIJgu5PYQfSESZvk,32080 +pip/_vendor/__init__.py,sha256=9EPZ-JLxtXMt71Fp5_pKTTe1QbJZZVlN81rsRYEvlpA,4781 +pip/commands/__init__.py,sha256=naZ1iIWRutNznOVpLj8qyn1GPE0B5rhCWCrSUOZSt4M,2145 +pip/commands/completion.py,sha256=2BEUY3jowgemiIGgUP3rpk6A9My4Eu8rTPosFxlESOE,1967 +pip/commands/download.py,sha256=dMRtH0JMBhNGlJWr1qC29vOeiBzG2K0OjOAfzdxSVgA,4804 +pip/commands/freeze.py,sha256=KmQoLf-HruqBDzc-F2-ganGVn2lboNQqppfyrMsx3SU,2774 +pip/commands/hash.py,sha256=MCt4jEFyfoce0lVeNEz1x49uaTY-VDkKiBvvxrVcHkw,1597 +pip/commands/help.py,sha256=84HWkEdnGP_AEBHnn8gJP2Te0XTXRKFoXqXopbOZTNo,982 +pip/commands/install.py,sha256=DvRVVwfUy6LV-AtNcxl9kLl7XOc7G7087ZhdD4QbP60,15628 +pip/commands/list.py,sha256=u76U5TLODQ2g53sSUA4q6WhYus7usbuWuITQJsCnP3E,7412 +pip/commands/search.py,sha256=9ClAcFzkJ_7AksTkNrUed5qzsplpBtMlJByJLqiZFqw,4777 +pip/commands/show.py,sha256=dytBbI9XV-ChpV51tsuBygZJJO-QaO2Gtz5kbLkBCZE,5815 +pip/commands/uninstall.py,sha256=tz8cXz4WdpUdnt3RvpdQwH6_SNMB50egBIZWa1dwfcc,2884 +pip/commands/wheel.py,sha256=iT92Uo8qpVILl_Yk8L7AtkFVYGmY0ep5oDeyQSpwkLs,7528 +pip/compat/__init__.py,sha256=7WN0B0XMYIldfminnT679VoEJLxNQPi9MFwCIt1_llU,4669 +pip/compat/dictconfig.py,sha256=dRrelPDWrceDSzFT51RTEVY2GuM7UDyc5Igh_tn4Fvk,23096 +pip/compat/ordereddict.py,sha256=6RQCd4PyTE4tvLUoAnsygvrreOSTV4BRDbc_4gCSkTs,4110 +pip/models/__init__.py,sha256=0Rs7_RA4DxeOkWT5Cq4CQzDrSEhvYcN3TH2cazr72PE,71 +pip/models/index.py,sha256=pUfbO__v3mD9j-2n_ClwPS8pVyx4l2wIwyvWt8GMCRA,487 +pip/operations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pip/operations/freeze.py,sha256=5Pcs6Z9-TybQ_e8fcxS4aYY9MCywxbjBGUSKg7qDWgc,4323 +pip/req/__init__.py,sha256=vFwZY8_Vc1WU1zFAespg1My_r_AT3n7cN0W9eX0EFqk,276 +pip/req/req_file.py,sha256=3eaVnPMUAjikLdC5i8hZUAf8aAOby2UxmAVFf94FOXY,11928 +pip/req/req_install.py,sha256=ZzZActkzRdXP8N6THxaUd_ZTwohN6uFndbObbDiXNo4,45952 +pip/req/req_set.py,sha256=dLkSHIaA9W52NXPU-HEzFAS-FtrVsN0CzKdMvUGHaV8,32320 +pip/req/req_uninstall.py,sha256=fdH2VgCjEC8NRYDS7fRu3ZJaBBUEy-N5muwxDX5MBNM,6897 +pip/utils/__init__.py,sha256=SSixMJeh2SdjNgra_50jaC0jdmXFewLkFh_-a3tw9ks,28256 +pip/utils/appdirs.py,sha256=KTpZANfjYw5K2tZ0_jNNdP_kMxQAns79qZWelwaJo0c,7896 +pip/utils/build.py,sha256=4smLRrfSCmXmjEnVnMFh2tBEpNcSLRe6J0ejZJ-wWJE,1312 +pip/utils/deprecation.py,sha256=DR3cKqzovYu9Pif7c9bT2KmwekfW95N3BsI45_5u38I,2239 +pip/utils/encoding.py,sha256=NQxGiFS5GbeAveLZTnx92t5r0PYqvt0iRnP2u9SGG1w,971 +pip/utils/filesystem.py,sha256=ZEVBuYM3fqr2_lgOESh4Y7fPFszGD474zVm_M3Mb5Tk,899 +pip/utils/hashes.py,sha256=oMk7cd3PbJgzpSQyXq1MytMud5f6H5Oa2YY5hYuCq6I,2866 +pip/utils/logging.py,sha256=7yWu4gZw-Qclj7X80QVdpGWkdTWGKT4LiUVKcE04pro,3327 +pip/utils/outdated.py,sha256=fNwOCL5r2EftPGhgCYGMKu032HC8cV-JAr9lp0HmToM,5455 +pip/utils/setuptools_build.py,sha256=8IGop-SZ6lxUl5HMOjLRaDlORPugIH_b_b2Y67x4jQc,240 +pip/utils/ui.py,sha256=pbDkSAeumZ6jdZcOJ2yAbx8iBgeP2zfpqNnLJK1gskQ,11597 +pip/vcs/__init__.py,sha256=lnea41zMq9HqB1Qo7hxy2IjUzk5WtBvnoloCCMR6Vk4,12349 +pip/vcs/bazaar.py,sha256=tYTwc4b4off8mr0O2o8SiGejqBDJxcbDBMSMd9-ISYc,3803 +pip/vcs/git.py,sha256=u16VCiNW_a9AaYqLri2b8-f4lOZlOYwsGpHHV3uv_dQ,10218 +pip/vcs/mercurial.py,sha256=xG6rDiwHCRytJEs23SIHBXl_SwQo2jkkdD_6rVVP5h4,3472 +pip/vcs/subversion.py,sha256=mGT7sAzuVc1u-9MPoXJNyShnRzhdJpDdGNuhhzUPv6w,8687 +pip-8.1.1.dist-info/DESCRIPTION.rst,sha256=jSvW1qOjwzndvm_p_DexGCVJfwgg3rWPMJWzf6Rmsfc,1167 +pip-8.1.1.dist-info/METADATA,sha256=p_9D2tGGDX-wd8S14XVVx0K-qOjDrrwu-CmYn9Dndlc,2362 +pip-8.1.1.dist-info/RECORD,, +pip-8.1.1.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110 +pip-8.1.1.dist-info/entry_points.txt,sha256=GWc-Wb9WUKZ1EuVWNz-G0l3BeIpbNJLx0OJbZ61AAV0,68 +pip-8.1.1.dist-info/metadata.json,sha256=wAnzudgBGV69N0kQOAgeAXIjQSbkBZhZEs98ULrfRUE,1513 +pip-8.1.1.dist-info/top_level.txt,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +../../../bin/pip,sha256=HFK3ZGDO7d9XgI0PgAO77DsOuZ6znbPSkGv-iY8vdvM,250 +../../../bin/pip3,sha256=HFK3ZGDO7d9XgI0PgAO77DsOuZ6znbPSkGv-iY8vdvM,250 +../../../bin/pip3.5,sha256=HFK3ZGDO7d9XgI0PgAO77DsOuZ6znbPSkGv-iY8vdvM,250 +pip-8.1.1.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pip/utils/__pycache__/filesystem.cpython-35.pyc,, +pip/compat/__pycache__/ordereddict.cpython-35.pyc,, +pip/commands/__pycache__/hash.cpython-35.pyc,, +pip/commands/__pycache__/help.cpython-35.pyc,, +pip/__pycache__/baseparser.cpython-35.pyc,, +pip/operations/__pycache__/__init__.cpython-35.pyc,, +pip/utils/__pycache__/outdated.cpython-35.pyc,, +pip/operations/__pycache__/freeze.cpython-35.pyc,, +pip/commands/__pycache__/wheel.cpython-35.pyc,, +pip/__pycache__/locations.cpython-35.pyc,, +pip/req/__pycache__/req_set.cpython-35.pyc,, +pip/utils/__pycache__/encoding.cpython-35.pyc,, +pip/utils/__pycache__/__init__.cpython-35.pyc,, +pip/vcs/__pycache__/mercurial.cpython-35.pyc,, +pip/utils/__pycache__/hashes.cpython-35.pyc,, +pip/compat/__pycache__/__init__.cpython-35.pyc,, +pip/__pycache__/__init__.cpython-35.pyc,, +pip/__pycache__/download.cpython-35.pyc,, +pip/commands/__pycache__/uninstall.cpython-35.pyc,, +pip/req/__pycache__/__init__.cpython-35.pyc,, +pip/commands/__pycache__/completion.cpython-35.pyc,, +pip/vcs/__pycache__/bazaar.cpython-35.pyc,, +pip/req/__pycache__/req_install.cpython-35.pyc,, +pip/__pycache__/cmdoptions.cpython-35.pyc,, +pip/req/__pycache__/req_file.cpython-35.pyc,, +pip/commands/__pycache__/freeze.cpython-35.pyc,, +pip/commands/__pycache__/install.cpython-35.pyc,, +pip/vcs/__pycache__/subversion.cpython-35.pyc,, +pip/utils/__pycache__/appdirs.cpython-35.pyc,, +pip/commands/__pycache__/search.cpython-35.pyc,, +pip/utils/__pycache__/build.cpython-35.pyc,, +pip/commands/__pycache__/show.cpython-35.pyc,, +pip/models/__pycache__/index.cpython-35.pyc,, +pip/commands/__pycache__/__init__.cpython-35.pyc,, +pip/commands/__pycache__/download.cpython-35.pyc,, +pip/__pycache__/status_codes.cpython-35.pyc,, +pip/req/__pycache__/req_uninstall.cpython-35.pyc,, +pip/__pycache__/pep425tags.cpython-35.pyc,, +pip/__pycache__/index.cpython-35.pyc,, +pip/models/__pycache__/__init__.cpython-35.pyc,, +pip/utils/__pycache__/logging.cpython-35.pyc,, +pip/compat/__pycache__/dictconfig.cpython-35.pyc,, +pip/vcs/__pycache__/__init__.cpython-35.pyc,, +pip/__pycache__/basecommand.cpython-35.pyc,, +pip/_vendor/__pycache__/__init__.cpython-35.pyc,, +pip/utils/__pycache__/deprecation.cpython-35.pyc,, +pip/vcs/__pycache__/git.cpython-35.pyc,, +pip/utils/__pycache__/ui.cpython-35.pyc,, +pip/utils/__pycache__/setuptools_build.cpython-35.pyc,, +pip/__pycache__/exceptions.cpython-35.pyc,, +pip/__pycache__/__main__.cpython-35.pyc,, +pip/__pycache__/wheel.cpython-35.pyc,, +pip/commands/__pycache__/list.cpython-35.pyc,, diff --git a/venv/lib/python3.5/site-packages/pip-8.1.1.dist-info/WHEEL b/venv/lib/python3.5/site-packages/pip-8.1.1.dist-info/WHEEL new file mode 100644 index 0000000..8b6dd1b --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip-8.1.1.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.29.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/venv/lib/python3.5/site-packages/pip-8.1.1.dist-info/entry_points.txt b/venv/lib/python3.5/site-packages/pip-8.1.1.dist-info/entry_points.txt new file mode 100644 index 0000000..c02a8d5 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip-8.1.1.dist-info/entry_points.txt @@ -0,0 +1,5 @@ +[console_scripts] +pip = pip:main +pip3 = pip:main +pip3.5 = pip:main + diff --git a/venv/lib/python3.5/site-packages/pip-8.1.1.dist-info/metadata.json b/venv/lib/python3.5/site-packages/pip-8.1.1.dist-info/metadata.json new file mode 100644 index 0000000..91434c5 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip-8.1.1.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Topic :: Software Development :: Build Tools", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: Implementation :: PyPy"], "extensions": {"python.commands": {"wrap_console": {"pip": "pip:main", "pip3": "pip:main", "pip3.5": "pip:main"}}, "python.details": {"contacts": [{"email": "python-virtualenv@groups.google.com", "name": "The pip developers", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://pip.pypa.io/"}}, "python.exports": {"console_scripts": {"pip": "pip:main", "pip3": "pip:main", "pip3.5": "pip:main"}}}, "extras": ["testing"], "generator": "bdist_wheel (0.29.0)", "keywords": ["easy_install", "distutils", "setuptools", "egg", "virtualenv"], "license": "MIT", "metadata_version": "2.0", "name": "pip", "run_requires": [{"extra": "testing", "requires": ["mock", "pretend", "pytest", "scripttest (>=1.3)", "virtualenv (>=1.10)"]}], "summary": "The PyPA recommended tool for installing Python packages.", "test_requires": [{"requires": ["mock", "pretend", "pytest", "scripttest (>=1.3)", "virtualenv (>=1.10)"]}], "version": "8.1.1"} \ No newline at end of file diff --git a/venv/lib/python3.5/site-packages/pip-8.1.1.dist-info/top_level.txt b/venv/lib/python3.5/site-packages/pip-8.1.1.dist-info/top_level.txt new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip-8.1.1.dist-info/top_level.txt @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.5/site-packages/pip/__init__.py b/venv/lib/python3.5/site-packages/pip/__init__.py new file mode 100644 index 0000000..51e7eaf --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/__init__.py @@ -0,0 +1,315 @@ +#!/usr/bin/env python +from __future__ import absolute_import + +import locale +import logging +import os +import optparse +import warnings + +import sys +import re + +from pip.exceptions import InstallationError, CommandError, PipError +from pip.utils import get_installed_distributions, get_prog +from pip.utils import deprecation, dist_is_editable +from pip.vcs import git, mercurial, subversion, bazaar # noqa +from pip.baseparser import ConfigOptionParser, UpdatingDefaultsHelpFormatter +from pip.commands import get_summaries, get_similar_commands +from pip.commands import commands_dict +from pip._vendor.requests.packages.urllib3.exceptions import ( + InsecureRequestWarning, +) + + +# assignment for flake8 to be happy + +# This fixes a peculiarity when importing via __import__ - as we are +# initialising the pip module, "from pip import cmdoptions" is recursive +# and appears not to work properly in that situation. +import pip.cmdoptions +cmdoptions = pip.cmdoptions + +# The version as used in the setup.py and the docs conf.py +__version__ = "8.1.1" + + +logger = logging.getLogger(__name__) + +# Hide the InsecureRequestWArning from urllib3 +warnings.filterwarnings("ignore", category=InsecureRequestWarning) + + +def autocomplete(): + """Command and option completion for the main option parser (and options) + and its subcommands (and options). + + Enable by sourcing one of the completion shell scripts (bash or zsh). + """ + # Don't complete if user hasn't sourced bash_completion file. + if 'PIP_AUTO_COMPLETE' not in os.environ: + return + cwords = os.environ['COMP_WORDS'].split()[1:] + cword = int(os.environ['COMP_CWORD']) + try: + current = cwords[cword - 1] + except IndexError: + current = '' + + subcommands = [cmd for cmd, summary in get_summaries()] + options = [] + # subcommand + try: + subcommand_name = [w for w in cwords if w in subcommands][0] + except IndexError: + subcommand_name = None + + parser = create_main_parser() + # subcommand options + if subcommand_name: + # special case: 'help' subcommand has no options + if subcommand_name == 'help': + sys.exit(1) + # special case: list locally installed dists for uninstall command + if subcommand_name == 'uninstall' and not current.startswith('-'): + installed = [] + lc = current.lower() + for dist in get_installed_distributions(local_only=True): + if dist.key.startswith(lc) and dist.key not in cwords[1:]: + installed.append(dist.key) + # if there are no dists installed, fall back to option completion + if installed: + for dist in installed: + print(dist) + sys.exit(1) + + subcommand = commands_dict[subcommand_name]() + options += [(opt.get_opt_string(), opt.nargs) + for opt in subcommand.parser.option_list_all + if opt.help != optparse.SUPPRESS_HELP] + + # filter out previously specified options from available options + prev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]] + options = [(x, v) for (x, v) in options if x not in prev_opts] + # filter options by current input + options = [(k, v) for k, v in options if k.startswith(current)] + for option in options: + opt_label = option[0] + # append '=' to options which require args + if option[1]: + opt_label += '=' + print(opt_label) + else: + # show main parser options only when necessary + if current.startswith('-') or current.startswith('--'): + opts = [i.option_list for i in parser.option_groups] + opts.append(parser.option_list) + opts = (o for it in opts for o in it) + + subcommands += [i.get_opt_string() for i in opts + if i.help != optparse.SUPPRESS_HELP] + + print(' '.join([x for x in subcommands if x.startswith(current)])) + sys.exit(1) + + +def create_main_parser(): + parser_kw = { + 'usage': '\n%prog [options]', + 'add_help_option': False, + 'formatter': UpdatingDefaultsHelpFormatter(), + 'name': 'global', + 'prog': get_prog(), + } + + parser = ConfigOptionParser(**parser_kw) + parser.disable_interspersed_args() + + pip_pkg_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__))) + parser.version = 'pip %s from %s (python %s)' % ( + __version__, pip_pkg_dir, sys.version[:3]) + + # add the general options + gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser) + parser.add_option_group(gen_opts) + + parser.main = True # so the help formatter knows + + # create command listing for description + command_summaries = get_summaries() + description = [''] + ['%-27s %s' % (i, j) for i, j in command_summaries] + parser.description = '\n'.join(description) + + return parser + + +def parseopts(args): + parser = create_main_parser() + + # Note: parser calls disable_interspersed_args(), so the result of this + # call is to split the initial args into the general options before the + # subcommand and everything else. + # For example: + # args: ['--timeout=5', 'install', '--user', 'INITools'] + # general_options: ['--timeout==5'] + # args_else: ['install', '--user', 'INITools'] + general_options, args_else = parser.parse_args(args) + + # --version + if general_options.version: + sys.stdout.write(parser.version) + sys.stdout.write(os.linesep) + sys.exit() + + # pip || pip help -> print_help() + if not args_else or (args_else[0] == 'help' and len(args_else) == 1): + parser.print_help() + sys.exit() + + # the subcommand name + cmd_name = args_else[0] + + if cmd_name not in commands_dict: + guess = get_similar_commands(cmd_name) + + msg = ['unknown command "%s"' % cmd_name] + if guess: + msg.append('maybe you meant "%s"' % guess) + + raise CommandError(' - '.join(msg)) + + # all the args without the subcommand + cmd_args = args[:] + cmd_args.remove(cmd_name) + + return cmd_name, cmd_args + + +def check_isolated(args): + isolated = False + + if "--isolated" in args: + isolated = True + + return isolated + + +def main(args=None): + if args is None: + args = sys.argv[1:] + + # Configure our deprecation warnings to be sent through loggers + deprecation.install_warning_logger() + + autocomplete() + + try: + cmd_name, cmd_args = parseopts(args) + except PipError as exc: + sys.stderr.write("ERROR: %s" % exc) + sys.stderr.write(os.linesep) + sys.exit(1) + + # Needed for locale.getpreferredencoding(False) to work + # in pip.utils.encoding.auto_decode + locale.setlocale(locale.LC_ALL, '') + command = commands_dict[cmd_name](isolated=check_isolated(cmd_args)) + return command.main(cmd_args) + + +# ########################################################### +# # Writing freeze files + +class FrozenRequirement(object): + + def __init__(self, name, req, editable, comments=()): + self.name = name + self.req = req + self.editable = editable + self.comments = comments + + _rev_re = re.compile(r'-r(\d+)$') + _date_re = re.compile(r'-(20\d\d\d\d\d\d)$') + + @classmethod + def from_dist(cls, dist, dependency_links): + location = os.path.normcase(os.path.abspath(dist.location)) + comments = [] + from pip.vcs import vcs, get_src_requirement + if dist_is_editable(dist) and vcs.get_backend_name(location): + editable = True + try: + req = get_src_requirement(dist, location) + except InstallationError as exc: + logger.warning( + "Error when trying to get requirement for VCS system %s, " + "falling back to uneditable format", exc + ) + req = None + if req is None: + logger.warning( + 'Could not determine repository location of %s', location + ) + comments.append( + '## !! Could not determine repository location' + ) + req = dist.as_requirement() + editable = False + else: + editable = False + req = dist.as_requirement() + specs = req.specs + assert len(specs) == 1 and specs[0][0] in ["==", "==="], \ + 'Expected 1 spec with == or ===; specs = %r; dist = %r' % \ + (specs, dist) + version = specs[0][1] + ver_match = cls._rev_re.search(version) + date_match = cls._date_re.search(version) + if ver_match or date_match: + svn_backend = vcs.get_backend('svn') + if svn_backend: + svn_location = svn_backend().get_location( + dist, + dependency_links, + ) + if not svn_location: + logger.warning( + 'Warning: cannot find svn location for %s', req) + comments.append( + '## FIXME: could not find svn URL in dependency_links ' + 'for this package:' + ) + else: + comments.append( + '# Installing as editable to satisfy requirement %s:' % + req + ) + if ver_match: + rev = ver_match.group(1) + else: + rev = '{%s}' % date_match.group(1) + editable = True + req = '%s@%s#egg=%s' % ( + svn_location, + rev, + cls.egg_name(dist) + ) + return cls(dist.project_name, req, editable, comments) + + @staticmethod + def egg_name(dist): + name = dist.egg_name() + match = re.search(r'-py\d\.\d$', name) + if match: + name = name[:match.start()] + return name + + def __str__(self): + req = self.req + if self.editable: + req = '-e %s' % req + return '\n'.join(list(self.comments) + [str(req)]) + '\n' + + +if __name__ == '__main__': + sys.exit(main()) diff --git a/venv/lib/python3.5/site-packages/pip/__main__.py b/venv/lib/python3.5/site-packages/pip/__main__.py new file mode 100644 index 0000000..5556539 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/__main__.py @@ -0,0 +1,19 @@ +from __future__ import absolute_import + +import os +import sys + +# If we are running from a wheel, add the wheel to sys.path +# This allows the usage python pip-*.whl/pip install pip-*.whl +if __package__ == '': + # __file__ is pip-*.whl/pip/__main__.py + # first dirname call strips of '/__main__.py', second strips off '/pip' + # Resulting path is the name of the wheel itself + # Add that to sys.path so we can import pip + path = os.path.dirname(os.path.dirname(__file__)) + sys.path.insert(0, path) + +import pip # noqa + +if __name__ == '__main__': + sys.exit(pip.main()) diff --git a/venv/lib/python3.5/site-packages/pip/_vendor/__init__.py b/venv/lib/python3.5/site-packages/pip/_vendor/__init__.py new file mode 100644 index 0000000..a822a5b --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/_vendor/__init__.py @@ -0,0 +1,110 @@ +""" +pip._vendor is for vendoring dependencies of pip to prevent needing pip to +depend on something external. + +Files inside of pip._vendor should be considered immutable and should only be +updated to versions from upstream. +""" +from __future__ import absolute_import + +import glob +import os.path +import sys + +# Downstream redistributors which have debundled our dependencies should also +# patch this value to be true. This will trigger the additional patching +# to cause things like "six" to be available as pip. +DEBUNDLED = True + +# By default, look in this directory for a bunch of .whl files which we will +# add to the beginning of sys.path before attempting to import anything. This +# is done to support downstream re-distributors like Debian and Fedora who +# wish to create their own Wheels for our dependencies to aid in debundling. +WHEEL_DIR = os.path.abspath(os.path.join(sys.prefix, 'share', 'python-wheels')) + + +# Define a small helper function to alias our vendored modules to the real ones +# if the vendored ones do not exist. This idea of this was taken from +# https://github.com/kennethreitz/requests/pull/2567. +def vendored(modulename): + vendored_name = "{0}.{1}".format(__name__, modulename) + + try: + __import__(vendored_name, globals(), locals(), level=0) + except ImportError: + try: + __import__(modulename, globals(), locals(), level=0) + except ImportError: + # We can just silently allow import failures to pass here. If we + # got to this point it means that ``import pip._vendor.whatever`` + # failed and so did ``import whatever``. Since we're importing this + # upfront in an attempt to alias imports, not erroring here will + # just mean we get a regular import error whenever pip *actually* + # tries to import one of these modules to use it, which actually + # gives us a better error message than we would have otherwise + # gotten. + pass + else: + sys.modules[vendored_name] = sys.modules[modulename] + base, head = vendored_name.rsplit(".", 1) + setattr(sys.modules[base], head, sys.modules[modulename]) + + +# If we're operating in a debundled setup, then we want to go ahead and trigger +# the aliasing of our vendored libraries as well as looking for wheels to add +# to our sys.path. This will cause all of this code to be a no-op typically +# however downstream redistributors can enable it in a consistent way across +# all platforms. +if DEBUNDLED: + # Actually look inside of WHEEL_DIR to find .whl files and add them to the + # front of our sys.path. + sys.path[:] = glob.glob(os.path.join(WHEEL_DIR, "*.whl")) + sys.path + + # Actually alias all of our vendored dependencies. + vendored("cachecontrol") + vendored("colorama") + vendored("distlib") + vendored("html5lib") + vendored("lockfile") + vendored("six") + vendored("six.moves") + vendored("six.moves.urllib") + vendored("packaging") + vendored("packaging.version") + vendored("packaging.specifiers") + vendored("pkg_resources") + vendored("progress") + vendored("retrying") + vendored("requests") + vendored("requests.packages") + vendored("requests.packages.urllib3") + vendored("requests.packages.urllib3._collections") + vendored("requests.packages.urllib3.connection") + vendored("requests.packages.urllib3.connectionpool") + vendored("requests.packages.urllib3.contrib") + vendored("requests.packages.urllib3.contrib.ntlmpool") + vendored("requests.packages.urllib3.contrib.pyopenssl") + vendored("requests.packages.urllib3.exceptions") + vendored("requests.packages.urllib3.fields") + vendored("requests.packages.urllib3.filepost") + vendored("requests.packages.urllib3.packages") + try: + vendored("requests.packages.urllib3.packages.ordered_dict") + vendored("requests.packages.urllib3.packages.six") + except ImportError: + # Debian already unbundles these from requests. + pass + vendored("requests.packages.urllib3.packages.ssl_match_hostname") + vendored("requests.packages.urllib3.packages.ssl_match_hostname." + "_implementation") + vendored("requests.packages.urllib3.poolmanager") + vendored("requests.packages.urllib3.request") + vendored("requests.packages.urllib3.response") + vendored("requests.packages.urllib3.util") + vendored("requests.packages.urllib3.util.connection") + vendored("requests.packages.urllib3.util.request") + vendored("requests.packages.urllib3.util.response") + vendored("requests.packages.urllib3.util.retry") + vendored("requests.packages.urllib3.util.ssl_") + vendored("requests.packages.urllib3.util.timeout") + vendored("requests.packages.urllib3.util.url") diff --git a/venv/lib/python3.5/site-packages/pip/basecommand.py b/venv/lib/python3.5/site-packages/pip/basecommand.py new file mode 100644 index 0000000..a07043a --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/basecommand.py @@ -0,0 +1,325 @@ +"""Base Command class, and related routines""" +from __future__ import absolute_import + +import logging +import os +import sys +import optparse +import warnings + +from pip import cmdoptions +from pip.index import PackageFinder +from pip.locations import running_under_virtualenv +from pip.download import PipSession +from pip.exceptions import (BadCommand, InstallationError, UninstallationError, + CommandError, PreviousBuildDirError) + +from pip.compat import logging_dictConfig +from pip.baseparser import ConfigOptionParser, UpdatingDefaultsHelpFormatter +from pip.req import InstallRequirement, parse_requirements +from pip.status_codes import ( + SUCCESS, ERROR, UNKNOWN_ERROR, VIRTUALENV_NOT_FOUND, + PREVIOUS_BUILD_DIR_ERROR, +) +from pip.utils import deprecation, get_prog, normalize_path +from pip.utils.logging import IndentingFormatter +from pip.utils.outdated import pip_version_check + + +__all__ = ['Command'] + + +logger = logging.getLogger(__name__) + + +class Command(object): + name = None + usage = None + hidden = False + log_streams = ("ext://sys.stdout", "ext://sys.stderr") + + def __init__(self, isolated=False): + parser_kw = { + 'usage': self.usage, + 'prog': '%s %s' % (get_prog(), self.name), + 'formatter': UpdatingDefaultsHelpFormatter(), + 'add_help_option': False, + 'name': self.name, + 'description': self.__doc__, + 'isolated': isolated, + } + + self.parser = ConfigOptionParser(**parser_kw) + + # Commands should add options to this option group + optgroup_name = '%s Options' % self.name.capitalize() + self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name) + + # Add the general options + gen_opts = cmdoptions.make_option_group( + cmdoptions.general_group, + self.parser, + ) + self.parser.add_option_group(gen_opts) + + def _build_session(self, options, retries=None, timeout=None): + session = PipSession( + cache=( + normalize_path(os.path.join(options.cache_dir, "http")) + if options.cache_dir else None + ), + retries=retries if retries is not None else options.retries, + insecure_hosts=options.trusted_hosts, + ) + + # Handle custom ca-bundles from the user + if options.cert: + session.verify = options.cert + + # Handle SSL client certificate + if options.client_cert: + session.cert = options.client_cert + + # Handle timeouts + if options.timeout or timeout: + session.timeout = ( + timeout if timeout is not None else options.timeout + ) + + # Handle configured proxies + if options.proxy: + session.proxies = { + "http": options.proxy, + "https": options.proxy, + } + + # Determine if we can prompt the user for authentication or not + session.auth.prompting = not options.no_input + + return session + + def parse_args(self, args): + # factored out for testability + return self.parser.parse_args(args) + + def main(self, args): + options, args = self.parse_args(args) + + if options.quiet: + if options.quiet == 1: + level = "WARNING" + if options.quiet == 2: + level = "ERROR" + else: + level = "CRITICAL" + elif options.verbose: + level = "DEBUG" + else: + level = "INFO" + + logging_dictConfig({ + "version": 1, + "disable_existing_loggers": False, + "filters": { + "exclude_warnings": { + "()": "pip.utils.logging.MaxLevelFilter", + "level": logging.WARNING, + }, + }, + "formatters": { + "indent": { + "()": IndentingFormatter, + "format": "%(message)s", + }, + }, + "handlers": { + "console": { + "level": level, + "class": "pip.utils.logging.ColorizedStreamHandler", + "stream": self.log_streams[0], + "filters": ["exclude_warnings"], + "formatter": "indent", + }, + "console_errors": { + "level": "WARNING", + "class": "pip.utils.logging.ColorizedStreamHandler", + "stream": self.log_streams[1], + "formatter": "indent", + }, + "user_log": { + "level": "DEBUG", + "class": "pip.utils.logging.BetterRotatingFileHandler", + "filename": options.log or "/dev/null", + "delay": True, + "formatter": "indent", + }, + }, + "root": { + "level": level, + "handlers": list(filter(None, [ + "console", + "console_errors", + "user_log" if options.log else None, + ])), + }, + # Disable any logging besides WARNING unless we have DEBUG level + # logging enabled. These use both pip._vendor and the bare names + # for the case where someone unbundles our libraries. + "loggers": dict( + ( + name, + { + "level": ( + "WARNING" + if level in ["INFO", "ERROR"] + else "DEBUG" + ), + }, + ) + for name in ["pip._vendor", "distlib", "requests", "urllib3"] + ), + }) + + if sys.version_info[:2] == (2, 6): + warnings.warn( + "Python 2.6 is no longer supported by the Python core team, " + "please upgrade your Python. A future version of pip will " + "drop support for Python 2.6", + deprecation.Python26DeprecationWarning + ) + + # TODO: try to get these passing down from the command? + # without resorting to os.environ to hold these. + + if options.no_input: + os.environ['PIP_NO_INPUT'] = '1' + + if options.exists_action: + os.environ['PIP_EXISTS_ACTION'] = ' '.join(options.exists_action) + + if options.require_venv: + # If a venv is required check if it can really be found + if not running_under_virtualenv(): + logger.critical( + 'Could not find an activated virtualenv (required).' + ) + sys.exit(VIRTUALENV_NOT_FOUND) + + try: + status = self.run(options, args) + # FIXME: all commands should return an exit status + # and when it is done, isinstance is not needed anymore + if isinstance(status, int): + return status + except PreviousBuildDirError as exc: + logger.critical(str(exc)) + logger.debug('Exception information:', exc_info=True) + + return PREVIOUS_BUILD_DIR_ERROR + except (InstallationError, UninstallationError, BadCommand) as exc: + logger.critical(str(exc)) + logger.debug('Exception information:', exc_info=True) + + return ERROR + except CommandError as exc: + logger.critical('ERROR: %s', exc) + logger.debug('Exception information:', exc_info=True) + + return ERROR + except KeyboardInterrupt: + logger.critical('Operation cancelled by user') + logger.debug('Exception information:', exc_info=True) + + return ERROR + except: + logger.critical('Exception:', exc_info=True) + + return UNKNOWN_ERROR + finally: + # Check if we're using the latest version of pip available + if (not options.disable_pip_version_check and not + getattr(options, "no_index", False)): + with self._build_session( + options, + retries=0, + timeout=min(5, options.timeout)) as session: + pip_version_check(session) + + return SUCCESS + + +class RequirementCommand(Command): + + @staticmethod + def populate_requirement_set(requirement_set, args, options, finder, + session, name, wheel_cache): + """ + Marshal cmd line args into a requirement set. + """ + for filename in options.constraints: + for req in parse_requirements( + filename, + constraint=True, finder=finder, options=options, + session=session, wheel_cache=wheel_cache): + requirement_set.add_requirement(req) + + for req in args: + requirement_set.add_requirement( + InstallRequirement.from_line( + req, None, isolated=options.isolated_mode, + wheel_cache=wheel_cache + ) + ) + + for req in options.editables: + requirement_set.add_requirement( + InstallRequirement.from_editable( + req, + default_vcs=options.default_vcs, + isolated=options.isolated_mode, + wheel_cache=wheel_cache + ) + ) + + found_req_in_file = False + for filename in options.requirements: + for req in parse_requirements( + filename, + finder=finder, options=options, session=session, + wheel_cache=wheel_cache): + found_req_in_file = True + requirement_set.add_requirement(req) + # If --require-hashes was a line in a requirements file, tell + # RequirementSet about it: + requirement_set.require_hashes = options.require_hashes + + if not (args or options.editables or found_req_in_file): + opts = {'name': name} + if options.find_links: + msg = ('You must give at least one requirement to ' + '%(name)s (maybe you meant "pip %(name)s ' + '%(links)s"?)' % + dict(opts, links=' '.join(options.find_links))) + else: + msg = ('You must give at least one requirement ' + 'to %(name)s (see "pip help %(name)s")' % opts) + logger.warning(msg) + + def _build_package_finder(self, options, session): + """ + Create a package finder appropriate to this requirement command. + """ + index_urls = [options.index_url] + options.extra_index_urls + if options.no_index: + logger.info('Ignoring indexes: %s', ','.join(index_urls)) + index_urls = [] + + return PackageFinder( + find_links=options.find_links, + format_control=options.format_control, + index_urls=index_urls, + trusted_hosts=options.trusted_hosts, + allow_all_prereleases=options.pre, + process_dependency_links=options.process_dependency_links, + session=session, + ) diff --git a/venv/lib/python3.5/site-packages/pip/baseparser.py b/venv/lib/python3.5/site-packages/pip/baseparser.py new file mode 100644 index 0000000..ccbf36b --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/baseparser.py @@ -0,0 +1,292 @@ +"""Base option parser setup""" +from __future__ import absolute_import + +import sys +import optparse +import os +import re +import textwrap +from distutils.util import strtobool + +from pip._vendor.six import string_types +from pip._vendor.six.moves import configparser +from pip.locations import ( + legacy_config_file, config_basename, running_under_virtualenv, + site_config_files +) +from pip.utils import appdirs, get_terminal_size + + +_environ_prefix_re = re.compile(r"^PIP_", re.I) + + +class PrettyHelpFormatter(optparse.IndentedHelpFormatter): + """A prettier/less verbose help formatter for optparse.""" + + def __init__(self, *args, **kwargs): + # help position must be aligned with __init__.parseopts.description + kwargs['max_help_position'] = 30 + kwargs['indent_increment'] = 1 + kwargs['width'] = get_terminal_size()[0] - 2 + optparse.IndentedHelpFormatter.__init__(self, *args, **kwargs) + + def format_option_strings(self, option): + return self._format_option_strings(option, ' <%s>', ', ') + + def _format_option_strings(self, option, mvarfmt=' <%s>', optsep=', '): + """ + Return a comma-separated list of option strings and metavars. + + :param option: tuple of (short opt, long opt), e.g: ('-f', '--format') + :param mvarfmt: metavar format string - evaluated as mvarfmt % metavar + :param optsep: separator + """ + opts = [] + + if option._short_opts: + opts.append(option._short_opts[0]) + if option._long_opts: + opts.append(option._long_opts[0]) + if len(opts) > 1: + opts.insert(1, optsep) + + if option.takes_value(): + metavar = option.metavar or option.dest.lower() + opts.append(mvarfmt % metavar.lower()) + + return ''.join(opts) + + def format_heading(self, heading): + if heading == 'Options': + return '' + return heading + ':\n' + + def format_usage(self, usage): + """ + Ensure there is only one newline between usage and the first heading + if there is no description. + """ + msg = '\nUsage: %s\n' % self.indent_lines(textwrap.dedent(usage), " ") + return msg + + def format_description(self, description): + # leave full control over description to us + if description: + if hasattr(self.parser, 'main'): + label = 'Commands' + else: + label = 'Description' + # some doc strings have initial newlines, some don't + description = description.lstrip('\n') + # some doc strings have final newlines and spaces, some don't + description = description.rstrip() + # dedent, then reindent + description = self.indent_lines(textwrap.dedent(description), " ") + description = '%s:\n%s\n' % (label, description) + return description + else: + return '' + + def format_epilog(self, epilog): + # leave full control over epilog to us + if epilog: + return epilog + else: + return '' + + def indent_lines(self, text, indent): + new_lines = [indent + line for line in text.split('\n')] + return "\n".join(new_lines) + + +class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter): + """Custom help formatter for use in ConfigOptionParser. + + This is updates the defaults before expanding them, allowing + them to show up correctly in the help listing. + """ + + def expand_default(self, option): + if self.parser is not None: + self.parser._update_defaults(self.parser.defaults) + return optparse.IndentedHelpFormatter.expand_default(self, option) + + +class CustomOptionParser(optparse.OptionParser): + def insert_option_group(self, idx, *args, **kwargs): + """Insert an OptionGroup at a given position.""" + group = self.add_option_group(*args, **kwargs) + + self.option_groups.pop() + self.option_groups.insert(idx, group) + + return group + + @property + def option_list_all(self): + """Get a list of all options, including those in option groups.""" + res = self.option_list[:] + for i in self.option_groups: + res.extend(i.option_list) + + return res + + +class ConfigOptionParser(CustomOptionParser): + """Custom option parser which updates its defaults by checking the + configuration files and environmental variables""" + + isolated = False + + def __init__(self, *args, **kwargs): + self.config = configparser.RawConfigParser() + self.name = kwargs.pop('name') + self.isolated = kwargs.pop("isolated", False) + self.files = self.get_config_files() + if self.files: + self.config.read(self.files) + assert self.name + optparse.OptionParser.__init__(self, *args, **kwargs) + + def get_config_files(self): + # the files returned by this method will be parsed in order with the + # first files listed being overridden by later files in standard + # ConfigParser fashion + config_file = os.environ.get('PIP_CONFIG_FILE', False) + if config_file == os.devnull: + return [] + + # at the base we have any site-wide configuration + files = list(site_config_files) + + # per-user configuration next + if not self.isolated: + if config_file and os.path.exists(config_file): + files.append(config_file) + else: + # This is the legacy config file, we consider it to be a lower + # priority than the new file location. + files.append(legacy_config_file) + + # This is the new config file, we consider it to be a higher + # priority than the legacy file. + files.append( + os.path.join( + appdirs.user_config_dir("pip"), + config_basename, + ) + ) + + # finally virtualenv configuration first trumping others + if running_under_virtualenv(): + venv_config_file = os.path.join( + sys.prefix, + config_basename, + ) + if os.path.exists(venv_config_file): + files.append(venv_config_file) + + return files + + def check_default(self, option, key, val): + try: + return option.check_value(key, val) + except optparse.OptionValueError as exc: + print("An error occurred during configuration: %s" % exc) + sys.exit(3) + + def _update_defaults(self, defaults): + """Updates the given defaults with values from the config files and + the environ. Does a little special handling for certain types of + options (lists).""" + # Then go and look for the other sources of configuration: + config = {} + # 1. config files + for section in ('global', self.name): + config.update( + self.normalize_keys(self.get_config_section(section)) + ) + # 2. environmental variables + if not self.isolated: + config.update(self.normalize_keys(self.get_environ_vars())) + # Accumulate complex default state. + self.values = optparse.Values(self.defaults) + late_eval = set() + # Then set the options with those values + for key, val in config.items(): + # ignore empty values + if not val: + continue + + option = self.get_option(key) + # Ignore options not present in this parser. E.g. non-globals put + # in [global] by users that want them to apply to all applicable + # commands. + if option is None: + continue + + if option.action in ('store_true', 'store_false', 'count'): + val = strtobool(val) + elif option.action == 'append': + val = val.split() + val = [self.check_default(option, key, v) for v in val] + elif option.action == 'callback': + late_eval.add(option.dest) + opt_str = option.get_opt_string() + val = option.convert_value(opt_str, val) + # From take_action + args = option.callback_args or () + kwargs = option.callback_kwargs or {} + option.callback(option, opt_str, val, self, *args, **kwargs) + else: + val = self.check_default(option, key, val) + + defaults[option.dest] = val + + for key in late_eval: + defaults[key] = getattr(self.values, key) + self.values = None + return defaults + + def normalize_keys(self, items): + """Return a config dictionary with normalized keys regardless of + whether the keys were specified in environment variables or in config + files""" + normalized = {} + for key, val in items: + key = key.replace('_', '-') + if not key.startswith('--'): + key = '--%s' % key # only prefer long opts + normalized[key] = val + return normalized + + def get_config_section(self, name): + """Get a section of a configuration""" + if self.config.has_section(name): + return self.config.items(name) + return [] + + def get_environ_vars(self): + """Returns a generator with all environmental vars with prefix PIP_""" + for key, val in os.environ.items(): + if _environ_prefix_re.search(key): + yield (_environ_prefix_re.sub("", key).lower(), val) + + def get_default_values(self): + """Overridding to make updating the defaults after instantiation of + the option parser possible, _update_defaults() does the dirty work.""" + if not self.process_default_values: + # Old, pre-Optik 1.5 behaviour. + return optparse.Values(self.defaults) + + defaults = self._update_defaults(self.defaults.copy()) # ours + for option in self._get_all_options(): + default = defaults.get(option.dest) + if isinstance(default, string_types): + opt_str = option.get_opt_string() + defaults[option.dest] = option.check_value(opt_str, default) + return optparse.Values(defaults) + + def error(self, msg): + self.print_usage(sys.stderr) + self.exit(2, "%s\n" % msg) diff --git a/venv/lib/python3.5/site-packages/pip/cmdoptions.py b/venv/lib/python3.5/site-packages/pip/cmdoptions.py new file mode 100644 index 0000000..1fade87 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/cmdoptions.py @@ -0,0 +1,618 @@ +""" +shared options and groups + +The principle here is to define options once, but *not* instantiate them +globally. One reason being that options with action='append' can carry state +between parses. pip parses general options twice internally, and shouldn't +pass on state. To be consistent, all options will follow this design. + +""" +from __future__ import absolute_import + +from functools import partial +from optparse import OptionGroup, SUPPRESS_HELP, Option +import warnings + +from pip.index import ( + FormatControl, fmt_ctl_handle_mutual_exclude, fmt_ctl_no_binary, + fmt_ctl_no_use_wheel) +from pip.models import PyPI +from pip.locations import USER_CACHE_DIR, src_prefix +from pip.utils.hashes import STRONG_HASHES + + +def make_option_group(group, parser): + """ + Return an OptionGroup object + group -- assumed to be dict with 'name' and 'options' keys + parser -- an optparse Parser + """ + option_group = OptionGroup(parser, group['name']) + for option in group['options']: + option_group.add_option(option()) + return option_group + + +def resolve_wheel_no_use_binary(options): + if not options.use_wheel: + control = options.format_control + fmt_ctl_no_use_wheel(control) + + +def check_install_build_global(options, check_options=None): + """Disable wheels if per-setup.py call options are set. + + :param options: The OptionParser options to update. + :param check_options: The options to check, if not supplied defaults to + options. + """ + if check_options is None: + check_options = options + + def getname(n): + return getattr(check_options, n, None) + names = ["build_options", "global_options", "install_options"] + if any(map(getname, names)): + control = options.format_control + fmt_ctl_no_binary(control) + warnings.warn( + 'Disabling all use of wheels due to the use of --build-options ' + '/ --global-options / --install-options.', stacklevel=2) + + +########### +# options # +########### + +help_ = partial( + Option, + '-h', '--help', + dest='help', + action='help', + help='Show help.') + +isolated_mode = partial( + Option, + "--isolated", + dest="isolated_mode", + action="store_true", + default=False, + help=( + "Run pip in an isolated mode, ignoring environment variables and user " + "configuration." + ), +) + +require_virtualenv = partial( + Option, + # Run only if inside a virtualenv, bail if not. + '--require-virtualenv', '--require-venv', + dest='require_venv', + action='store_true', + default=False, + help=SUPPRESS_HELP) + +verbose = partial( + Option, + '-v', '--verbose', + dest='verbose', + action='count', + default=0, + help='Give more output. Option is additive, and can be used up to 3 times.' +) + +version = partial( + Option, + '-V', '--version', + dest='version', + action='store_true', + help='Show version and exit.') + +quiet = partial( + Option, + '-q', '--quiet', + dest='quiet', + action='count', + default=0, + help='Give less output.') + +log = partial( + Option, + "--log", "--log-file", "--local-log", + dest="log", + metavar="path", + help="Path to a verbose appending log." +) + +no_input = partial( + Option, + # Don't ask for input + '--no-input', + dest='no_input', + action='store_true', + default=False, + help=SUPPRESS_HELP) + +proxy = partial( + Option, + '--proxy', + dest='proxy', + type='str', + default='', + help="Specify a proxy in the form [user:passwd@]proxy.server:port.") + +retries = partial( + Option, + '--retries', + dest='retries', + type='int', + default=5, + help="Maximum number of retries each connection should attempt " + "(default %default times).") + +timeout = partial( + Option, + '--timeout', '--default-timeout', + metavar='sec', + dest='timeout', + type='float', + default=15, + help='Set the socket timeout (default %default seconds).') + +default_vcs = partial( + Option, + # The default version control system for editables, e.g. 'svn' + '--default-vcs', + dest='default_vcs', + type='str', + default='', + help=SUPPRESS_HELP) + +skip_requirements_regex = partial( + Option, + # A regex to be used to skip requirements + '--skip-requirements-regex', + dest='skip_requirements_regex', + type='str', + default='', + help=SUPPRESS_HELP) + + +def exists_action(): + return Option( + # Option when path already exist + '--exists-action', + dest='exists_action', + type='choice', + choices=['s', 'i', 'w', 'b'], + default=[], + action='append', + metavar='action', + help="Default action when a path already exists: " + "(s)witch, (i)gnore, (w)ipe, (b)ackup.") + + +cert = partial( + Option, + '--cert', + dest='cert', + type='str', + metavar='path', + help="Path to alternate CA bundle.") + +client_cert = partial( + Option, + '--client-cert', + dest='client_cert', + type='str', + default=None, + metavar='path', + help="Path to SSL client certificate, a single file containing the " + "private key and the certificate in PEM format.") + +index_url = partial( + Option, + '-i', '--index-url', '--pypi-url', + dest='index_url', + metavar='URL', + default=PyPI.simple_url, + help='Base URL of Python Package Index (default %default).') + + +def extra_index_url(): + return Option( + '--extra-index-url', + dest='extra_index_urls', + metavar='URL', + action='append', + default=[], + help='Extra URLs of package indexes to use in addition to --index-url.' + ) + + +no_index = partial( + Option, + '--no-index', + dest='no_index', + action='store_true', + default=False, + help='Ignore package index (only looking at --find-links URLs instead).') + + +def find_links(): + return Option( + '-f', '--find-links', + dest='find_links', + action='append', + default=[], + metavar='url', + help="If a url or path to an html file, then parse for links to " + "archives. If a local path or file:// url that's a directory, " + "then look for archives in the directory listing.") + + +def allow_external(): + return Option( + "--allow-external", + dest="allow_external", + action="append", + default=[], + metavar="PACKAGE", + help=SUPPRESS_HELP, + ) + + +allow_all_external = partial( + Option, + "--allow-all-external", + dest="allow_all_external", + action="store_true", + default=False, + help=SUPPRESS_HELP, +) + + +def trusted_host(): + return Option( + "--trusted-host", + dest="trusted_hosts", + action="append", + metavar="HOSTNAME", + default=[], + help="Mark this host as trusted, even though it does not have valid " + "or any HTTPS.", + ) + + +# Remove after 7.0 +no_allow_external = partial( + Option, + "--no-allow-external", + dest="allow_all_external", + action="store_false", + default=False, + help=SUPPRESS_HELP, +) + + +# Remove --allow-insecure after 7.0 +def allow_unsafe(): + return Option( + "--allow-unverified", "--allow-insecure", + dest="allow_unverified", + action="append", + default=[], + metavar="PACKAGE", + help=SUPPRESS_HELP, + ) + +# Remove after 7.0 +no_allow_unsafe = partial( + Option, + "--no-allow-insecure", + dest="allow_all_insecure", + action="store_false", + default=False, + help=SUPPRESS_HELP +) + +# Remove after 1.5 +process_dependency_links = partial( + Option, + "--process-dependency-links", + dest="process_dependency_links", + action="store_true", + default=False, + help="Enable the processing of dependency links.", +) + + +def constraints(): + return Option( + '-c', '--constraint', + dest='constraints', + action='append', + default=[], + metavar='file', + help='Constrain versions using the given constraints file. ' + 'This option can be used multiple times.') + + +def requirements(): + return Option( + '-r', '--requirement', + dest='requirements', + action='append', + default=[], + metavar='file', + help='Install from the given requirements file. ' + 'This option can be used multiple times.') + + +def editable(): + return Option( + '-e', '--editable', + dest='editables', + action='append', + default=[], + metavar='path/url', + help=('Install a project in editable mode (i.e. setuptools ' + '"develop mode") from a local project path or a VCS url.'), + ) + +src = partial( + Option, + '--src', '--source', '--source-dir', '--source-directory', + dest='src_dir', + metavar='dir', + default=src_prefix, + help='Directory to check out editable projects into. ' + 'The default in a virtualenv is "/src". ' + 'The default for global installs is "/src".' +) + +# XXX: deprecated, remove in 9.0 +use_wheel = partial( + Option, + '--use-wheel', + dest='use_wheel', + action='store_true', + default=True, + help=SUPPRESS_HELP, +) + +# XXX: deprecated, remove in 9.0 +no_use_wheel = partial( + Option, + '--no-use-wheel', + dest='use_wheel', + action='store_false', + default=True, + help=('Do not Find and prefer wheel archives when searching indexes and ' + 'find-links locations. DEPRECATED in favour of --no-binary.'), +) + + +def _get_format_control(values, option): + """Get a format_control object.""" + return getattr(values, option.dest) + + +def _handle_no_binary(option, opt_str, value, parser): + existing = getattr(parser.values, option.dest) + fmt_ctl_handle_mutual_exclude( + value, existing.no_binary, existing.only_binary) + + +def _handle_only_binary(option, opt_str, value, parser): + existing = getattr(parser.values, option.dest) + fmt_ctl_handle_mutual_exclude( + value, existing.only_binary, existing.no_binary) + + +def no_binary(): + return Option( + "--no-binary", dest="format_control", action="callback", + callback=_handle_no_binary, type="str", + default=FormatControl(set(), set()), + help="Do not use binary packages. Can be supplied multiple times, and " + "each time adds to the existing value. Accepts either :all: to " + "disable all binary packages, :none: to empty the set, or one or " + "more package names with commas between them. Note that some " + "packages are tricky to compile and may fail to install when " + "this option is used on them.") + + +def only_binary(): + return Option( + "--only-binary", dest="format_control", action="callback", + callback=_handle_only_binary, type="str", + default=FormatControl(set(), set()), + help="Do not use source packages. Can be supplied multiple times, and " + "each time adds to the existing value. Accepts either :all: to " + "disable all source packages, :none: to empty the set, or one or " + "more package names with commas between them. Packages without " + "binary distributions will fail to install when this option is " + "used on them.") + + +cache_dir = partial( + Option, + "--cache-dir", + dest="cache_dir", + default=USER_CACHE_DIR, + metavar="dir", + help="Store the cache data in ." +) + +no_cache = partial( + Option, + "--no-cache-dir", + dest="cache_dir", + action="store_false", + help="Disable the cache.", +) + +no_deps = partial( + Option, + '--no-deps', '--no-dependencies', + dest='ignore_dependencies', + action='store_true', + default=False, + help="Don't install package dependencies.") + +build_dir = partial( + Option, + '-b', '--build', '--build-dir', '--build-directory', + dest='build_dir', + metavar='dir', + help='Directory to unpack packages into and build in.' +) + +install_options = partial( + Option, + '--install-option', + dest='install_options', + action='append', + metavar='options', + help="Extra arguments to be supplied to the setup.py install " + "command (use like --install-option=\"--install-scripts=/usr/local/" + "bin\"). Use multiple --install-option options to pass multiple " + "options to setup.py install. If you are using an option with a " + "directory path, be sure to use absolute path.") + +global_options = partial( + Option, + '--global-option', + dest='global_options', + action='append', + metavar='options', + help="Extra global options to be supplied to the setup.py " + "call before the install command.") + +no_clean = partial( + Option, + '--no-clean', + action='store_true', + default=False, + help="Don't clean up build directories.") + +pre = partial( + Option, + '--pre', + action='store_true', + default=False, + help="Include pre-release and development versions. By default, " + "pip only finds stable versions.") + +disable_pip_version_check = partial( + Option, + "--disable-pip-version-check", + dest="disable_pip_version_check", + action="store_true", + default=False, + help="Don't periodically check PyPI to determine whether a new version " + "of pip is available for download. Implied with --no-index.") + +# Deprecated, Remove later +always_unzip = partial( + Option, + '-Z', '--always-unzip', + dest='always_unzip', + action='store_true', + help=SUPPRESS_HELP, +) + + +def _merge_hash(option, opt_str, value, parser): + """Given a value spelled "algo:digest", append the digest to a list + pointed to in a dict by the algo name.""" + if not parser.values.hashes: + parser.values.hashes = {} + try: + algo, digest = value.split(':', 1) + except ValueError: + parser.error('Arguments to %s must be a hash name ' + 'followed by a value, like --hash=sha256:abcde...' % + opt_str) + if algo not in STRONG_HASHES: + parser.error('Allowed hash algorithms for %s are %s.' % + (opt_str, ', '.join(STRONG_HASHES))) + parser.values.hashes.setdefault(algo, []).append(digest) + + +hash = partial( + Option, + '--hash', + # Hash values eventually end up in InstallRequirement.hashes due to + # __dict__ copying in process_line(). + dest='hashes', + action='callback', + callback=_merge_hash, + type='string', + help="Verify that the package's archive matches this " + 'hash before installing. Example: --hash=sha256:abcdef...') + + +require_hashes = partial( + Option, + '--require-hashes', + dest='require_hashes', + action='store_true', + default=False, + help='Require a hash to check each requirement against, for ' + 'repeatable installs. This option is implied when any package in a ' + 'requirements file has a --hash option.') + + +########## +# groups # +########## + +general_group = { + 'name': 'General Options', + 'options': [ + help_, + isolated_mode, + require_virtualenv, + verbose, + version, + quiet, + log, + no_input, + proxy, + retries, + timeout, + default_vcs, + skip_requirements_regex, + exists_action, + trusted_host, + cert, + client_cert, + cache_dir, + no_cache, + disable_pip_version_check, + ] +} + +non_deprecated_index_group = { + 'name': 'Package Index Options', + 'options': [ + index_url, + extra_index_url, + no_index, + find_links, + process_dependency_links, + ] +} + +index_group = { + 'name': 'Package Index Options (including deprecated options)', + 'options': non_deprecated_index_group['options'] + [ + allow_external, + allow_all_external, + no_allow_external, + allow_unsafe, + no_allow_unsafe, + ] +} diff --git a/venv/lib/python3.5/site-packages/pip/commands/__init__.py b/venv/lib/python3.5/site-packages/pip/commands/__init__.py new file mode 100644 index 0000000..92b7ff5 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/commands/__init__.py @@ -0,0 +1,83 @@ +""" +Package containing all pip commands +""" +from __future__ import absolute_import + +from pip.commands.completion import CompletionCommand +from pip.commands.download import DownloadCommand +from pip.commands.freeze import FreezeCommand +from pip.commands.hash import HashCommand +from pip.commands.help import HelpCommand +from pip.commands.list import ListCommand +from pip.commands.search import SearchCommand +from pip.commands.show import ShowCommand +from pip.commands.install import InstallCommand +from pip.commands.uninstall import UninstallCommand +from pip.commands.wheel import WheelCommand + + +commands_dict = { + CompletionCommand.name: CompletionCommand, + FreezeCommand.name: FreezeCommand, + HashCommand.name: HashCommand, + HelpCommand.name: HelpCommand, + SearchCommand.name: SearchCommand, + ShowCommand.name: ShowCommand, + InstallCommand.name: InstallCommand, + UninstallCommand.name: UninstallCommand, + DownloadCommand.name: DownloadCommand, + ListCommand.name: ListCommand, + WheelCommand.name: WheelCommand, +} + + +commands_order = [ + InstallCommand, + DownloadCommand, + UninstallCommand, + FreezeCommand, + ListCommand, + ShowCommand, + SearchCommand, + WheelCommand, + HashCommand, + CompletionCommand, + HelpCommand, +] + + +def get_summaries(ordered=True): + """Yields sorted (command name, command summary) tuples.""" + + if ordered: + cmditems = _sort_commands(commands_dict, commands_order) + else: + cmditems = commands_dict.items() + + for name, command_class in cmditems: + yield (name, command_class.summary) + + +def get_similar_commands(name): + """Command name auto-correct.""" + from difflib import get_close_matches + + name = name.lower() + + close_commands = get_close_matches(name, commands_dict.keys()) + + if close_commands: + return close_commands[0] + else: + return False + + +def _sort_commands(cmddict, order): + def keyfn(key): + try: + return order.index(key[1]) + except ValueError: + # unordered items should come last + return 0xff + + return sorted(cmddict.items(), key=keyfn) diff --git a/venv/lib/python3.5/site-packages/pip/commands/completion.py b/venv/lib/python3.5/site-packages/pip/commands/completion.py new file mode 100644 index 0000000..dc80af3 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/commands/completion.py @@ -0,0 +1,67 @@ +from __future__ import absolute_import + +import sys +from pip.basecommand import Command + +BASE_COMPLETION = """ +# pip %(shell)s completion start%(script)s# pip %(shell)s completion end +""" + +COMPLETION_SCRIPTS = { + 'bash': """ +_pip_completion() +{ + COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\ + COMP_CWORD=$COMP_CWORD \\ + PIP_AUTO_COMPLETE=1 $1 ) ) +} +complete -o default -F _pip_completion pip +""", 'zsh': """ +function _pip_completion { + local words cword + read -Ac words + read -cn cword + reply=( $( COMP_WORDS="$words[*]" \\ + COMP_CWORD=$(( cword-1 )) \\ + PIP_AUTO_COMPLETE=1 $words[1] ) ) +} +compctl -K _pip_completion pip +"""} + + +class CompletionCommand(Command): + """A helper command to be used for command completion.""" + name = 'completion' + summary = 'A helper command used for command completion' + + def __init__(self, *args, **kw): + super(CompletionCommand, self).__init__(*args, **kw) + + cmd_opts = self.cmd_opts + + cmd_opts.add_option( + '--bash', '-b', + action='store_const', + const='bash', + dest='shell', + help='Emit completion code for bash') + cmd_opts.add_option( + '--zsh', '-z', + action='store_const', + const='zsh', + dest='shell', + help='Emit completion code for zsh') + + self.parser.insert_option_group(0, cmd_opts) + + def run(self, options, args): + """Prints the completion code of the given shell""" + shells = COMPLETION_SCRIPTS.keys() + shell_options = ['--' + shell for shell in sorted(shells)] + if options.shell in shells: + script = COMPLETION_SCRIPTS.get(options.shell, '') + print(BASE_COMPLETION % {'script': script, 'shell': options.shell}) + else: + sys.stderr.write( + 'ERROR: You must pass %s\n' % ' or '.join(shell_options) + ) diff --git a/venv/lib/python3.5/site-packages/pip/commands/download.py b/venv/lib/python3.5/site-packages/pip/commands/download.py new file mode 100644 index 0000000..4155e05 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/commands/download.py @@ -0,0 +1,136 @@ +from __future__ import absolute_import + +import logging +import os + +from pip.req import RequirementSet +from pip.basecommand import RequirementCommand +from pip import cmdoptions +from pip.utils import ensure_dir, normalize_path +from pip.utils.build import BuildDirectory +from pip.utils.filesystem import check_path_owner + + +logger = logging.getLogger(__name__) + + +class DownloadCommand(RequirementCommand): + """ + Download packages from: + + - PyPI (and other indexes) using requirement specifiers. + - VCS project urls. + - Local project directories. + - Local or remote source archives. + + pip also supports downloading from "requirements files", which provide + an easy way to specify a whole environment to be downloaded. + """ + name = 'download' + + usage = """ + %prog [options] [package-index-options] ... + %prog [options] -r [package-index-options] ... + %prog [options] [-e] ... + %prog [options] [-e] ... + %prog [options] ...""" + + summary = 'Download packages.' + + def __init__(self, *args, **kw): + super(DownloadCommand, self).__init__(*args, **kw) + + cmd_opts = self.cmd_opts + + cmd_opts.add_option(cmdoptions.constraints()) + cmd_opts.add_option(cmdoptions.editable()) + cmd_opts.add_option(cmdoptions.requirements()) + cmd_opts.add_option(cmdoptions.build_dir()) + cmd_opts.add_option(cmdoptions.no_deps()) + cmd_opts.add_option(cmdoptions.global_options()) + cmd_opts.add_option(cmdoptions.no_binary()) + cmd_opts.add_option(cmdoptions.only_binary()) + cmd_opts.add_option(cmdoptions.src()) + cmd_opts.add_option(cmdoptions.pre()) + cmd_opts.add_option(cmdoptions.no_clean()) + cmd_opts.add_option(cmdoptions.require_hashes()) + + cmd_opts.add_option( + '-d', '--dest', '--destination-dir', '--destination-directory', + dest='download_dir', + metavar='dir', + default=os.curdir, + help=("Download packages into ."), + ) + + index_opts = cmdoptions.make_option_group( + cmdoptions.non_deprecated_index_group, + self.parser, + ) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, cmd_opts) + + def run(self, options, args): + options.ignore_installed = True + options.src_dir = os.path.abspath(options.src_dir) + options.download_dir = normalize_path(options.download_dir) + + ensure_dir(options.download_dir) + + with self._build_session(options) as session: + + finder = self._build_package_finder(options, session) + build_delete = (not (options.no_clean or options.build_dir)) + if options.cache_dir and not check_path_owner(options.cache_dir): + logger.warning( + "The directory '%s' or its parent directory is not owned " + "by the current user and caching wheels has been " + "disabled. check the permissions and owner of that " + "directory. If executing pip with sudo, you may want " + "sudo's -H flag.", + options.cache_dir, + ) + options.cache_dir = None + + with BuildDirectory(options.build_dir, + delete=build_delete) as build_dir: + + requirement_set = RequirementSet( + build_dir=build_dir, + src_dir=options.src_dir, + download_dir=options.download_dir, + ignore_installed=True, + ignore_dependencies=options.ignore_dependencies, + session=session, + isolated=options.isolated_mode, + require_hashes=options.require_hashes + ) + self.populate_requirement_set( + requirement_set, + args, + options, + finder, + session, + self.name, + None + ) + + if not requirement_set.has_requirements: + return + + requirement_set.prepare_files(finder) + + downloaded = ' '.join([ + req.name for req in requirement_set.successfully_downloaded + ]) + if downloaded: + logger.info( + 'Successfully downloaded %s', downloaded + ) + + # Clean up + if not options.no_clean: + requirement_set.cleanup_files() + + return requirement_set diff --git a/venv/lib/python3.5/site-packages/pip/commands/freeze.py b/venv/lib/python3.5/site-packages/pip/commands/freeze.py new file mode 100644 index 0000000..0485d5f --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/commands/freeze.py @@ -0,0 +1,86 @@ +from __future__ import absolute_import + +import sys + +import pip +from pip.compat import stdlib_pkgs +from pip.basecommand import Command +from pip.operations.freeze import freeze +from pip.wheel import WheelCache + + +DEV_PKGS = ('pip', 'setuptools', 'distribute', 'wheel') + + +class FreezeCommand(Command): + """ + Output installed packages in requirements format. + + packages are listed in a case-insensitive sorted order. + """ + name = 'freeze' + usage = """ + %prog [options]""" + summary = 'Output installed packages in requirements format.' + log_streams = ("ext://sys.stderr", "ext://sys.stderr") + + def __init__(self, *args, **kw): + super(FreezeCommand, self).__init__(*args, **kw) + + self.cmd_opts.add_option( + '-r', '--requirement', + dest='requirement', + action='store', + default=None, + metavar='file', + help="Use the order in the given requirements file and its " + "comments when generating output.") + self.cmd_opts.add_option( + '-f', '--find-links', + dest='find_links', + action='append', + default=[], + metavar='URL', + help='URL for finding packages, which will be added to the ' + 'output.') + self.cmd_opts.add_option( + '-l', '--local', + dest='local', + action='store_true', + default=False, + help='If in a virtualenv that has global access, do not output ' + 'globally-installed packages.') + self.cmd_opts.add_option( + '--user', + dest='user', + action='store_true', + default=False, + help='Only output packages installed in user-site.') + self.cmd_opts.add_option( + '--all', + dest='freeze_all', + action='store_true', + help='Do not skip these packages in the output:' + ' %s' % ', '.join(DEV_PKGS)) + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options, args): + format_control = pip.index.FormatControl(set(), set()) + wheel_cache = WheelCache(options.cache_dir, format_control) + skip = set(stdlib_pkgs) + if not options.freeze_all: + skip.update(DEV_PKGS) + + freeze_kwargs = dict( + requirement=options.requirement, + find_links=options.find_links, + local_only=options.local, + user_only=options.user, + skip_regex=options.skip_requirements_regex, + isolated=options.isolated_mode, + wheel_cache=wheel_cache, + skip=skip) + + for line in freeze(**freeze_kwargs): + sys.stdout.write(line + '\n') diff --git a/venv/lib/python3.5/site-packages/pip/commands/hash.py b/venv/lib/python3.5/site-packages/pip/commands/hash.py new file mode 100644 index 0000000..27cca0b --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/commands/hash.py @@ -0,0 +1,57 @@ +from __future__ import absolute_import + +import hashlib +import logging +import sys + +from pip.basecommand import Command +from pip.status_codes import ERROR +from pip.utils import read_chunks +from pip.utils.hashes import FAVORITE_HASH, STRONG_HASHES + + +logger = logging.getLogger(__name__) + + +class HashCommand(Command): + """ + Compute a hash of a local package archive. + + These can be used with --hash in a requirements file to do repeatable + installs. + + """ + name = 'hash' + usage = '%prog [options] ...' + summary = 'Compute hashes of package archives.' + + def __init__(self, *args, **kw): + super(HashCommand, self).__init__(*args, **kw) + self.cmd_opts.add_option( + '-a', '--algorithm', + dest='algorithm', + choices=STRONG_HASHES, + action='store', + default=FAVORITE_HASH, + help='The hash algorithm to use: one of %s' % + ', '.join(STRONG_HASHES)) + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options, args): + if not args: + self.parser.print_usage(sys.stderr) + return ERROR + + algorithm = options.algorithm + for path in args: + logger.info('%s:\n--hash=%s:%s', + path, algorithm, _hash_of_file(path, algorithm)) + + +def _hash_of_file(path, algorithm): + """Return the hash digest of a file.""" + with open(path, 'rb') as archive: + hash = hashlib.new(algorithm) + for chunk in read_chunks(archive): + hash.update(chunk) + return hash.hexdigest() diff --git a/venv/lib/python3.5/site-packages/pip/commands/help.py b/venv/lib/python3.5/site-packages/pip/commands/help.py new file mode 100644 index 0000000..11722f1 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/commands/help.py @@ -0,0 +1,35 @@ +from __future__ import absolute_import + +from pip.basecommand import Command, SUCCESS +from pip.exceptions import CommandError + + +class HelpCommand(Command): + """Show help for commands""" + name = 'help' + usage = """ + %prog """ + summary = 'Show help for commands.' + + def run(self, options, args): + from pip.commands import commands_dict, get_similar_commands + + try: + # 'pip help' with no args is handled by pip.__init__.parseopt() + cmd_name = args[0] # the command we need help for + except IndexError: + return SUCCESS + + if cmd_name not in commands_dict: + guess = get_similar_commands(cmd_name) + + msg = ['unknown command "%s"' % cmd_name] + if guess: + msg.append('maybe you meant "%s"' % guess) + + raise CommandError(' - '.join(msg)) + + command = commands_dict[cmd_name]() + command.parser.print_help() + + return SUCCESS diff --git a/venv/lib/python3.5/site-packages/pip/commands/install.py b/venv/lib/python3.5/site-packages/pip/commands/install.py new file mode 100644 index 0000000..13b328f --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/commands/install.py @@ -0,0 +1,404 @@ +from __future__ import absolute_import + +import logging +import operator +import os +import tempfile +import shutil +import warnings +try: + import wheel +except ImportError: + wheel = None + +from pip.req import RequirementSet +from pip.basecommand import RequirementCommand +from pip.locations import virtualenv_no_global, distutils_scheme +from pip.exceptions import ( + InstallationError, CommandError, PreviousBuildDirError, +) +from pip import cmdoptions +from pip.utils import ensure_dir +from pip.utils.build import BuildDirectory +from pip.utils.deprecation import RemovedInPip10Warning +from pip.utils.filesystem import check_path_owner +from pip.wheel import WheelCache, WheelBuilder + +from pip.locations import running_under_virtualenv + +logger = logging.getLogger(__name__) + + +class InstallCommand(RequirementCommand): + """ + Install packages from: + + - PyPI (and other indexes) using requirement specifiers. + - VCS project urls. + - Local project directories. + - Local or remote source archives. + + pip also supports installing from "requirements files", which provide + an easy way to specify a whole environment to be installed. + """ + name = 'install' + + usage = """ + %prog [options] [package-index-options] ... + %prog [options] -r [package-index-options] ... + %prog [options] [-e] ... + %prog [options] [-e] ... + %prog [options] ...""" + + summary = 'Install packages.' + + def __init__(self, *args, **kw): + super(InstallCommand, self).__init__(*args, **kw) + + default_user = True + if running_under_virtualenv(): + default_user = False + if os.geteuid() == 0: + default_user = False + + cmd_opts = self.cmd_opts + + cmd_opts.add_option(cmdoptions.constraints()) + cmd_opts.add_option(cmdoptions.editable()) + cmd_opts.add_option(cmdoptions.requirements()) + cmd_opts.add_option(cmdoptions.build_dir()) + + cmd_opts.add_option( + '-t', '--target', + dest='target_dir', + metavar='dir', + default=None, + help='Install packages into . ' + 'By default this will not replace existing files/folders in ' + '. Use --upgrade to replace existing packages in ' + 'with new versions.' + ) + + cmd_opts.add_option( + '-d', '--download', '--download-dir', '--download-directory', + dest='download_dir', + metavar='dir', + default=None, + help=("Download packages into instead of installing them, " + "regardless of what's already installed."), + ) + + cmd_opts.add_option(cmdoptions.src()) + + cmd_opts.add_option( + '-U', '--upgrade', + dest='upgrade', + action='store_true', + help='Upgrade all specified packages to the newest available ' + 'version. This process is recursive regardless of whether ' + 'a dependency is already satisfied.' + ) + + cmd_opts.add_option( + '--force-reinstall', + dest='force_reinstall', + action='store_true', + help='When upgrading, reinstall all packages even if they are ' + 'already up-to-date.') + + cmd_opts.add_option( + '-I', '--ignore-installed', + dest='ignore_installed', + action='store_true', + default=default_user, + help='Ignore the installed packages (reinstalling instead).') + + cmd_opts.add_option(cmdoptions.no_deps()) + + cmd_opts.add_option(cmdoptions.install_options()) + cmd_opts.add_option(cmdoptions.global_options()) + + cmd_opts.add_option( + '--user', + dest='use_user_site', + action='store_true', + default=default_user, + help="Install to the Python user install directory for your " + "platform. Typically ~/.local/, or %APPDATA%\Python on " + "Windows. (See the Python documentation for site.USER_BASE " + "for full details.) On Debian systems, this is the " + "default when running outside of a virtual environment " + "and not as root.") + + cmd_opts.add_option( + '--system', + dest='use_user_site', + action='store_false', + help="Install using the system scheme (overrides --user on " + "Debian systems)") + + cmd_opts.add_option( + '--egg', + dest='as_egg', + action='store_true', + help="Install packages as eggs, not 'flat', like pip normally " + "does. This option is not about installing *from* eggs. " + "(WARNING: Because this option overrides pip's normal install" + " logic, requirements files may not behave as expected.)") + + cmd_opts.add_option( + '--root', + dest='root_path', + metavar='dir', + default=None, + help="Install everything relative to this alternate root " + "directory.") + + cmd_opts.add_option( + '--prefix', + dest='prefix_path', + metavar='dir', + default=None, + help="Installation prefix where lib, bin and other top-level " + "folders are placed") + + cmd_opts.add_option( + "--compile", + action="store_true", + dest="compile", + default=True, + help="Compile py files to pyc", + ) + + cmd_opts.add_option( + "--no-compile", + action="store_false", + dest="compile", + help="Do not compile py files to pyc", + ) + + cmd_opts.add_option(cmdoptions.use_wheel()) + cmd_opts.add_option(cmdoptions.no_use_wheel()) + cmd_opts.add_option(cmdoptions.no_binary()) + cmd_opts.add_option(cmdoptions.only_binary()) + cmd_opts.add_option(cmdoptions.pre()) + cmd_opts.add_option(cmdoptions.no_clean()) + cmd_opts.add_option(cmdoptions.require_hashes()) + + index_opts = cmdoptions.make_option_group( + cmdoptions.index_group, + self.parser, + ) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, cmd_opts) + + def run(self, options, args): + cmdoptions.resolve_wheel_no_use_binary(options) + cmdoptions.check_install_build_global(options) + + if options.allow_external: + warnings.warn( + "--allow-external has been deprecated and will be removed in " + "the future. Due to changes in the repository protocol, it no " + "longer has any effect.", + RemovedInPip10Warning, + ) + + if options.allow_all_external: + warnings.warn( + "--allow-all-external has been deprecated and will be removed " + "in the future. Due to changes in the repository protocol, it " + "no longer has any effect.", + RemovedInPip10Warning, + ) + + if options.allow_unverified: + warnings.warn( + "--allow-unverified has been deprecated and will be removed " + "in the future. Due to changes in the repository protocol, it " + "no longer has any effect.", + RemovedInPip10Warning, + ) + + if options.download_dir: + warnings.warn( + "pip install --download has been deprecated and will be " + "removed in the future. Pip now has a download command that " + "should be used instead.", + RemovedInPip10Warning, + ) + options.ignore_installed = True + + if options.build_dir: + options.build_dir = os.path.abspath(options.build_dir) + + options.src_dir = os.path.abspath(options.src_dir) + install_options = options.install_options or [] + if options.use_user_site: + if options.prefix_path: + raise CommandError( + "Can not combine '--user' and '--prefix' as they imply " + "different installation locations" + ) + if virtualenv_no_global(): + raise InstallationError( + "Can not perform a '--user' install. User site-packages " + "are not visible in this virtualenv." + ) + install_options.append('--user') + install_options.append('--prefix=') + + temp_target_dir = None + if options.target_dir: + options.ignore_installed = True + temp_target_dir = tempfile.mkdtemp() + options.target_dir = os.path.abspath(options.target_dir) + if (os.path.exists(options.target_dir) and not + os.path.isdir(options.target_dir)): + raise CommandError( + "Target path exists but is not a directory, will not " + "continue." + ) + install_options.append('--home=' + temp_target_dir) + + global_options = options.global_options or [] + + with self._build_session(options) as session: + + finder = self._build_package_finder(options, session) + build_delete = (not (options.no_clean or options.build_dir)) + wheel_cache = WheelCache(options.cache_dir, options.format_control) + if options.cache_dir and not check_path_owner(options.cache_dir): + logger.warning( + "The directory '%s' or its parent directory is not owned " + "by the current user and caching wheels has been " + "disabled. check the permissions and owner of that " + "directory. If executing pip with sudo, you may want " + "sudo's -H flag.", + options.cache_dir, + ) + options.cache_dir = None + + with BuildDirectory(options.build_dir, + delete=build_delete) as build_dir: + requirement_set = RequirementSet( + build_dir=build_dir, + src_dir=options.src_dir, + download_dir=options.download_dir, + upgrade=options.upgrade, + as_egg=options.as_egg, + ignore_installed=options.ignore_installed, + ignore_dependencies=options.ignore_dependencies, + force_reinstall=options.force_reinstall, + use_user_site=options.use_user_site, + target_dir=temp_target_dir, + session=session, + pycompile=options.compile, + isolated=options.isolated_mode, + wheel_cache=wheel_cache, + require_hashes=options.require_hashes, + ) + + self.populate_requirement_set( + requirement_set, args, options, finder, session, self.name, + wheel_cache + ) + + if not requirement_set.has_requirements: + return + + try: + if (options.download_dir or not wheel or not + options.cache_dir): + # on -d don't do complex things like building + # wheels, and don't try to build wheels when wheel is + # not installed. + requirement_set.prepare_files(finder) + else: + # build wheels before install. + wb = WheelBuilder( + requirement_set, + finder, + build_options=[], + global_options=[], + ) + # Ignore the result: a failed wheel will be + # installed from the sdist/vcs whatever. + wb.build(autobuilding=True) + + if not options.download_dir: + requirement_set.install( + install_options, + global_options, + root=options.root_path, + prefix=options.prefix_path, + ) + reqs = sorted( + requirement_set.successfully_installed, + key=operator.attrgetter('name')) + items = [] + for req in reqs: + item = req.name + try: + if hasattr(req, 'installed_version'): + if req.installed_version: + item += '-' + req.installed_version + except Exception: + pass + items.append(item) + installed = ' '.join(items) + if installed: + logger.info('Successfully installed %s', installed) + else: + downloaded = ' '.join([ + req.name + for req in requirement_set.successfully_downloaded + ]) + if downloaded: + logger.info( + 'Successfully downloaded %s', downloaded + ) + except PreviousBuildDirError: + options.no_clean = True + raise + finally: + # Clean up + if not options.no_clean: + requirement_set.cleanup_files() + + if options.target_dir: + ensure_dir(options.target_dir) + + lib_dir = distutils_scheme('', home=temp_target_dir)['purelib'] + + for item in os.listdir(lib_dir): + target_item_dir = os.path.join(options.target_dir, item) + if os.path.exists(target_item_dir): + if not options.upgrade: + logger.warning( + 'Target directory %s already exists. Specify ' + '--upgrade to force replacement.', + target_item_dir + ) + continue + if os.path.islink(target_item_dir): + logger.warning( + 'Target directory %s already exists and is ' + 'a link. Pip will not automatically replace ' + 'links, please remove if replacement is ' + 'desired.', + target_item_dir + ) + continue + if os.path.isdir(target_item_dir): + shutil.rmtree(target_item_dir) + else: + os.remove(target_item_dir) + + shutil.move( + os.path.join(lib_dir, item), + target_item_dir + ) + shutil.rmtree(temp_target_dir) + return requirement_set diff --git a/venv/lib/python3.5/site-packages/pip/commands/list.py b/venv/lib/python3.5/site-packages/pip/commands/list.py new file mode 100644 index 0000000..5346488 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/commands/list.py @@ -0,0 +1,209 @@ +from __future__ import absolute_import + +import logging +import warnings + +from pip.basecommand import Command +from pip.exceptions import CommandError +from pip.index import PackageFinder +from pip.utils import ( + get_installed_distributions, dist_is_editable) +from pip.utils.deprecation import RemovedInPip10Warning +from pip.cmdoptions import make_option_group, index_group + + +logger = logging.getLogger(__name__) + + +class ListCommand(Command): + """ + List installed packages, including editables. + + Packages are listed in a case-insensitive sorted order. + """ + name = 'list' + usage = """ + %prog [options]""" + summary = 'List installed packages.' + + def __init__(self, *args, **kw): + super(ListCommand, self).__init__(*args, **kw) + + cmd_opts = self.cmd_opts + + cmd_opts.add_option( + '-o', '--outdated', + action='store_true', + default=False, + help='List outdated packages') + cmd_opts.add_option( + '-u', '--uptodate', + action='store_true', + default=False, + help='List uptodate packages') + cmd_opts.add_option( + '-e', '--editable', + action='store_true', + default=False, + help='List editable projects.') + cmd_opts.add_option( + '-l', '--local', + action='store_true', + default=False, + help=('If in a virtualenv that has global access, do not list ' + 'globally-installed packages.'), + ) + self.cmd_opts.add_option( + '--user', + dest='user', + action='store_true', + default=False, + help='Only output packages installed in user-site.') + + cmd_opts.add_option( + '--pre', + action='store_true', + default=False, + help=("Include pre-release and development versions. By default, " + "pip only finds stable versions."), + ) + + index_opts = make_option_group(index_group, self.parser) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, cmd_opts) + + def _build_package_finder(self, options, index_urls, session): + """ + Create a package finder appropriate to this list command. + """ + return PackageFinder( + find_links=options.find_links, + index_urls=index_urls, + allow_all_prereleases=options.pre, + trusted_hosts=options.trusted_hosts, + process_dependency_links=options.process_dependency_links, + session=session, + ) + + def run(self, options, args): + if options.allow_external: + warnings.warn( + "--allow-external has been deprecated and will be removed in " + "the future. Due to changes in the repository protocol, it no " + "longer has any effect.", + RemovedInPip10Warning, + ) + + if options.allow_all_external: + warnings.warn( + "--allow-all-external has been deprecated and will be removed " + "in the future. Due to changes in the repository protocol, it " + "no longer has any effect.", + RemovedInPip10Warning, + ) + + if options.allow_unverified: + warnings.warn( + "--allow-unverified has been deprecated and will be removed " + "in the future. Due to changes in the repository protocol, it " + "no longer has any effect.", + RemovedInPip10Warning, + ) + if options.outdated and options.uptodate: + raise CommandError( + "Options --outdated and --uptodate cannot be combined.") + + if options.outdated: + self.run_outdated(options) + elif options.uptodate: + self.run_uptodate(options) + else: + self.run_listing(options) + + def run_outdated(self, options): + for dist, latest_version, typ in sorted( + self.find_packages_latest_versions(options), + key=lambda p: p[0].project_name.lower()): + if latest_version > dist.parsed_version: + logger.info( + '%s - Latest: %s [%s]', + self.output_package(dist), latest_version, typ, + ) + + def find_packages_latest_versions(self, options): + index_urls = [options.index_url] + options.extra_index_urls + if options.no_index: + logger.info('Ignoring indexes: %s', ','.join(index_urls)) + index_urls = [] + + dependency_links = [] + for dist in get_installed_distributions( + local_only=options.local, + user_only=options.user, + editables_only=options.editable): + if dist.has_metadata('dependency_links.txt'): + dependency_links.extend( + dist.get_metadata_lines('dependency_links.txt'), + ) + + with self._build_session(options) as session: + finder = self._build_package_finder(options, index_urls, session) + finder.add_dependency_links(dependency_links) + + installed_packages = get_installed_distributions( + local_only=options.local, + user_only=options.user, + editables_only=options.editable, + ) + for dist in installed_packages: + typ = 'unknown' + all_candidates = finder.find_all_candidates(dist.key) + if not options.pre: + # Remove prereleases + all_candidates = [candidate for candidate in all_candidates + if not candidate.version.is_prerelease] + + if not all_candidates: + continue + best_candidate = max(all_candidates, + key=finder._candidate_sort_key) + remote_version = best_candidate.version + if best_candidate.location.is_wheel: + typ = 'wheel' + else: + typ = 'sdist' + yield dist, remote_version, typ + + def run_listing(self, options): + installed_packages = get_installed_distributions( + local_only=options.local, + user_only=options.user, + editables_only=options.editable, + ) + self.output_package_listing(installed_packages) + + def output_package(self, dist): + if dist_is_editable(dist): + return '%s (%s, %s)' % ( + dist.project_name, + dist.version, + dist.location, + ) + else: + return '%s (%s)' % (dist.project_name, dist.version) + + def output_package_listing(self, installed_packages): + installed_packages = sorted( + installed_packages, + key=lambda dist: dist.project_name.lower(), + ) + for dist in installed_packages: + logger.info(self.output_package(dist)) + + def run_uptodate(self, options): + uptodate = [] + for dist, version, typ in self.find_packages_latest_versions(options): + if dist.parsed_version == version: + uptodate.append(dist) + self.output_package_listing(uptodate) diff --git a/venv/lib/python3.5/site-packages/pip/commands/search.py b/venv/lib/python3.5/site-packages/pip/commands/search.py new file mode 100644 index 0000000..3155e18 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/commands/search.py @@ -0,0 +1,146 @@ +from __future__ import absolute_import + +import logging +import sys +import textwrap + +from pip.basecommand import Command, SUCCESS +from pip.download import PipXmlrpcTransport +from pip.models import PyPI +from pip.utils import get_terminal_size +from pip.utils.logging import indent_log +from pip.exceptions import CommandError +from pip.status_codes import NO_MATCHES_FOUND +from pip._vendor import pkg_resources +from pip._vendor.six.moves import xmlrpc_client + + +logger = logging.getLogger(__name__) + + +class SearchCommand(Command): + """Search for PyPI packages whose name or summary contains .""" + name = 'search' + usage = """ + %prog [options] """ + summary = 'Search PyPI for packages.' + + def __init__(self, *args, **kw): + super(SearchCommand, self).__init__(*args, **kw) + self.cmd_opts.add_option( + '--index', + dest='index', + metavar='URL', + default=PyPI.pypi_url, + help='Base URL of Python Package Index (default %default)') + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options, args): + if not args: + raise CommandError('Missing required argument (search query).') + query = args + pypi_hits = self.search(query, options) + hits = transform_hits(pypi_hits) + + terminal_width = None + if sys.stdout.isatty(): + terminal_width = get_terminal_size()[0] + + print_results(hits, terminal_width=terminal_width) + if pypi_hits: + return SUCCESS + return NO_MATCHES_FOUND + + def search(self, query, options): + index_url = options.index + with self._build_session(options) as session: + transport = PipXmlrpcTransport(index_url, session) + pypi = xmlrpc_client.ServerProxy(index_url, transport) + hits = pypi.search({'name': query, 'summary': query}, 'or') + return hits + + +def transform_hits(hits): + """ + The list from pypi is really a list of versions. We want a list of + packages with the list of versions stored inline. This converts the + list from pypi into one we can use. + """ + packages = {} + for hit in hits: + name = hit['name'] + summary = hit['summary'] + version = hit['version'] + score = hit['_pypi_ordering'] + if score is None: + score = 0 + + if name not in packages.keys(): + packages[name] = { + 'name': name, + 'summary': summary, + 'versions': [version], + 'score': score, + } + else: + packages[name]['versions'].append(version) + + # if this is the highest version, replace summary and score + if version == highest_version(packages[name]['versions']): + packages[name]['summary'] = summary + packages[name]['score'] = score + + # each record has a unique name now, so we will convert the dict into a + # list sorted by score + package_list = sorted( + packages.values(), + key=lambda x: x['score'], + reverse=True, + ) + return package_list + + +def print_results(hits, name_column_width=None, terminal_width=None): + if not hits: + return + if name_column_width is None: + name_column_width = max([ + len(hit['name']) + len(hit.get('versions', ['-'])[-1]) + for hit in hits + ]) + 4 + + installed_packages = [p.project_name for p in pkg_resources.working_set] + for hit in hits: + name = hit['name'] + summary = hit['summary'] or '' + version = hit.get('versions', ['-'])[-1] + if terminal_width is not None: + # wrap and indent summary to fit terminal + summary = textwrap.wrap( + summary, + terminal_width - name_column_width - 5, + ) + summary = ('\n' + ' ' * (name_column_width + 3)).join(summary) + + line = '%-*s - %s' % (name_column_width, + '%s (%s)' % (name, version), summary) + try: + logger.info(line) + if name in installed_packages: + dist = pkg_resources.get_distribution(name) + with indent_log(): + latest = highest_version(hit['versions']) + if dist.version == latest: + logger.info('INSTALLED: %s (latest)', dist.version) + else: + logger.info('INSTALLED: %s', dist.version) + logger.info('LATEST: %s', latest) + except UnicodeEncodeError: + pass + + +def highest_version(versions): + return next(iter( + sorted(versions, key=pkg_resources.parse_version, reverse=True) + )) diff --git a/venv/lib/python3.5/site-packages/pip/commands/show.py b/venv/lib/python3.5/site-packages/pip/commands/show.py new file mode 100644 index 0000000..52a673a --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/commands/show.py @@ -0,0 +1,154 @@ +from __future__ import absolute_import + +from email.parser import FeedParser +import logging +import os + +from pip.basecommand import Command +from pip.status_codes import SUCCESS, ERROR +from pip._vendor import pkg_resources + + +logger = logging.getLogger(__name__) + + +class ShowCommand(Command): + """Show information about one or more installed packages.""" + name = 'show' + usage = """ + %prog [options] ...""" + summary = 'Show information about installed packages.' + + def __init__(self, *args, **kw): + super(ShowCommand, self).__init__(*args, **kw) + self.cmd_opts.add_option( + '-f', '--files', + dest='files', + action='store_true', + default=False, + help='Show the full list of installed files for each package.') + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options, args): + if not args: + logger.warning('ERROR: Please provide a package name or names.') + return ERROR + query = args + + results = search_packages_info(query) + if not print_results(results, options.files): + return ERROR + return SUCCESS + + +def search_packages_info(query): + """ + Gather details from installed distributions. Print distribution name, + version, location, and installed files. Installed files requires a + pip generated 'installed-files.txt' in the distributions '.egg-info' + directory. + """ + installed = dict( + [(p.project_name.lower(), p) for p in pkg_resources.working_set]) + query_names = [name.lower() for name in query] + for dist in [installed[pkg] for pkg in query_names if pkg in installed]: + package = { + 'name': dist.project_name, + 'version': dist.version, + 'location': dist.location, + 'requires': [dep.project_name for dep in dist.requires()], + } + file_list = None + metadata = None + if isinstance(dist, pkg_resources.DistInfoDistribution): + # RECORDs should be part of .dist-info metadatas + if dist.has_metadata('RECORD'): + lines = dist.get_metadata_lines('RECORD') + paths = [l.split(',')[0] for l in lines] + paths = [os.path.join(dist.location, p) for p in paths] + file_list = [os.path.relpath(p, dist.location) for p in paths] + + if dist.has_metadata('METADATA'): + metadata = dist.get_metadata('METADATA') + else: + # Otherwise use pip's log for .egg-info's + if dist.has_metadata('installed-files.txt'): + paths = dist.get_metadata_lines('installed-files.txt') + paths = [os.path.join(dist.egg_info, p) for p in paths] + file_list = [os.path.relpath(p, dist.location) for p in paths] + + if dist.has_metadata('PKG-INFO'): + metadata = dist.get_metadata('PKG-INFO') + + if dist.has_metadata('entry_points.txt'): + entry_points = dist.get_metadata_lines('entry_points.txt') + package['entry_points'] = entry_points + + installer = None + if dist.has_metadata('INSTALLER'): + for line in dist.get_metadata_lines('INSTALLER'): + if line.strip(): + installer = line.strip() + break + package['installer'] = installer + + # @todo: Should pkg_resources.Distribution have a + # `get_pkg_info` method? + feed_parser = FeedParser() + feed_parser.feed(metadata) + pkg_info_dict = feed_parser.close() + for key in ('metadata-version', 'summary', + 'home-page', 'author', 'author-email', 'license'): + package[key] = pkg_info_dict.get(key) + + # It looks like FeedParser can not deal with repeated headers + classifiers = [] + for line in metadata.splitlines(): + if not line: + break + # Classifier: License :: OSI Approved :: MIT License + if line.startswith('Classifier: '): + classifiers.append(line[len('Classifier: '):]) + package['classifiers'] = classifiers + + if file_list: + package['files'] = sorted(file_list) + yield package + + +def print_results(distributions, list_all_files): + """ + Print the informations from installed distributions found. + """ + results_printed = False + for dist in distributions: + results_printed = True + logger.info("---") + logger.info("Metadata-Version: %s", dist.get('metadata-version')) + logger.info("Name: %s", dist['name']) + logger.info("Version: %s", dist['version']) + logger.info("Summary: %s", dist.get('summary')) + logger.info("Home-page: %s", dist.get('home-page')) + logger.info("Author: %s", dist.get('author')) + logger.info("Author-email: %s", dist.get('author-email')) + if dist['installer'] is not None: + logger.info("Installer: %s", dist['installer']) + logger.info("License: %s", dist.get('license')) + logger.info("Location: %s", dist['location']) + logger.info("Requires: %s", ', '.join(dist['requires'])) + logger.info("Classifiers:") + for classifier in dist['classifiers']: + logger.info(" %s", classifier) + if list_all_files: + logger.info("Files:") + if 'files' in dist: + for line in dist['files']: + logger.info(" %s", line.strip()) + else: + logger.info("Cannot locate installed-files.txt") + if 'entry_points' in dist: + logger.info("Entry-points:") + for line in dist['entry_points']: + logger.info(" %s", line.strip()) + return results_printed diff --git a/venv/lib/python3.5/site-packages/pip/commands/uninstall.py b/venv/lib/python3.5/site-packages/pip/commands/uninstall.py new file mode 100644 index 0000000..8ba1a7c --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/commands/uninstall.py @@ -0,0 +1,76 @@ +from __future__ import absolute_import + +import pip +from pip.wheel import WheelCache +from pip.req import InstallRequirement, RequirementSet, parse_requirements +from pip.basecommand import Command +from pip.exceptions import InstallationError + + +class UninstallCommand(Command): + """ + Uninstall packages. + + pip is able to uninstall most installed packages. Known exceptions are: + + - Pure distutils packages installed with ``python setup.py install``, which + leave behind no metadata to determine what files were installed. + - Script wrappers installed by ``python setup.py develop``. + """ + name = 'uninstall' + usage = """ + %prog [options] ... + %prog [options] -r ...""" + summary = 'Uninstall packages.' + + def __init__(self, *args, **kw): + super(UninstallCommand, self).__init__(*args, **kw) + self.cmd_opts.add_option( + '-r', '--requirement', + dest='requirements', + action='append', + default=[], + metavar='file', + help='Uninstall all the packages listed in the given requirements ' + 'file. This option can be used multiple times.', + ) + self.cmd_opts.add_option( + '-y', '--yes', + dest='yes', + action='store_true', + help="Don't ask for confirmation of uninstall deletions.") + + self.parser.insert_option_group(0, self.cmd_opts) + + def run(self, options, args): + with self._build_session(options) as session: + format_control = pip.index.FormatControl(set(), set()) + wheel_cache = WheelCache(options.cache_dir, format_control) + requirement_set = RequirementSet( + build_dir=None, + src_dir=None, + download_dir=None, + isolated=options.isolated_mode, + session=session, + wheel_cache=wheel_cache, + ) + for name in args: + requirement_set.add_requirement( + InstallRequirement.from_line( + name, isolated=options.isolated_mode, + wheel_cache=wheel_cache + ) + ) + for filename in options.requirements: + for req in parse_requirements( + filename, + options=options, + session=session, + wheel_cache=wheel_cache): + requirement_set.add_requirement(req) + if not requirement_set.has_requirements: + raise InstallationError( + 'You must give at least one requirement to %(name)s (see ' + '"pip help %(name)s")' % dict(name=self.name) + ) + requirement_set.uninstall(auto_confirm=options.yes) diff --git a/venv/lib/python3.5/site-packages/pip/commands/wheel.py b/venv/lib/python3.5/site-packages/pip/commands/wheel.py new file mode 100644 index 0000000..1d77fe6 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/commands/wheel.py @@ -0,0 +1,204 @@ +# -*- coding: utf-8 -*- +from __future__ import absolute_import + +import logging +import os +import warnings + +from pip.basecommand import RequirementCommand +from pip.exceptions import CommandError, PreviousBuildDirError +from pip.req import RequirementSet +from pip.utils import import_or_raise +from pip.utils.build import BuildDirectory +from pip.utils.deprecation import RemovedInPip10Warning +from pip.wheel import WheelCache, WheelBuilder +from pip import cmdoptions + + +logger = logging.getLogger(__name__) + + +class WheelCommand(RequirementCommand): + """ + Build Wheel archives for your requirements and dependencies. + + Wheel is a built-package format, and offers the advantage of not + recompiling your software during every install. For more details, see the + wheel docs: http://wheel.readthedocs.org/en/latest. + + Requirements: setuptools>=0.8, and wheel. + + 'pip wheel' uses the bdist_wheel setuptools extension from the wheel + package to build individual wheels. + + """ + + name = 'wheel' + usage = """ + %prog [options] ... + %prog [options] -r ... + %prog [options] [-e] ... + %prog [options] [-e] ... + %prog [options] ...""" + + summary = 'Build wheels from your requirements.' + + def __init__(self, *args, **kw): + super(WheelCommand, self).__init__(*args, **kw) + + cmd_opts = self.cmd_opts + + cmd_opts.add_option( + '-w', '--wheel-dir', + dest='wheel_dir', + metavar='dir', + default=os.curdir, + help=("Build wheels into , where the default is the " + "current working directory."), + ) + cmd_opts.add_option(cmdoptions.use_wheel()) + cmd_opts.add_option(cmdoptions.no_use_wheel()) + cmd_opts.add_option(cmdoptions.no_binary()) + cmd_opts.add_option(cmdoptions.only_binary()) + cmd_opts.add_option( + '--build-option', + dest='build_options', + metavar='options', + action='append', + help="Extra arguments to be supplied to 'setup.py bdist_wheel'.") + cmd_opts.add_option(cmdoptions.constraints()) + cmd_opts.add_option(cmdoptions.editable()) + cmd_opts.add_option(cmdoptions.requirements()) + cmd_opts.add_option(cmdoptions.src()) + cmd_opts.add_option(cmdoptions.no_deps()) + cmd_opts.add_option(cmdoptions.build_dir()) + + cmd_opts.add_option( + '--global-option', + dest='global_options', + action='append', + metavar='options', + help="Extra global options to be supplied to the setup.py " + "call before the 'bdist_wheel' command.") + + cmd_opts.add_option( + '--pre', + action='store_true', + default=False, + help=("Include pre-release and development versions. By default, " + "pip only finds stable versions."), + ) + + cmd_opts.add_option(cmdoptions.no_clean()) + cmd_opts.add_option(cmdoptions.require_hashes()) + + index_opts = cmdoptions.make_option_group( + cmdoptions.index_group, + self.parser, + ) + + self.parser.insert_option_group(0, index_opts) + self.parser.insert_option_group(0, cmd_opts) + + def check_required_packages(self): + import_or_raise( + 'wheel.bdist_wheel', + CommandError, + "'pip wheel' requires the 'wheel' package. To fix this, run: " + "pip install wheel" + ) + pkg_resources = import_or_raise( + 'pkg_resources', + CommandError, + "'pip wheel' requires setuptools >= 0.8 for dist-info support." + " To fix this, run: pip install --upgrade setuptools" + ) + if not hasattr(pkg_resources, 'DistInfoDistribution'): + raise CommandError( + "'pip wheel' requires setuptools >= 0.8 for dist-info " + "support. To fix this, run: pip install --upgrade " + "setuptools" + ) + + def run(self, options, args): + self.check_required_packages() + cmdoptions.resolve_wheel_no_use_binary(options) + cmdoptions.check_install_build_global(options) + + if options.allow_external: + warnings.warn( + "--allow-external has been deprecated and will be removed in " + "the future. Due to changes in the repository protocol, it no " + "longer has any effect.", + RemovedInPip10Warning, + ) + + if options.allow_all_external: + warnings.warn( + "--allow-all-external has been deprecated and will be removed " + "in the future. Due to changes in the repository protocol, it " + "no longer has any effect.", + RemovedInPip10Warning, + ) + + if options.allow_unverified: + warnings.warn( + "--allow-unverified has been deprecated and will be removed " + "in the future. Due to changes in the repository protocol, it " + "no longer has any effect.", + RemovedInPip10Warning, + ) + + index_urls = [options.index_url] + options.extra_index_urls + if options.no_index: + logger.info('Ignoring indexes: %s', ','.join(index_urls)) + index_urls = [] + + if options.build_dir: + options.build_dir = os.path.abspath(options.build_dir) + + with self._build_session(options) as session: + finder = self._build_package_finder(options, session) + build_delete = (not (options.no_clean or options.build_dir)) + wheel_cache = WheelCache(options.cache_dir, options.format_control) + with BuildDirectory(options.build_dir, + delete=build_delete) as build_dir: + requirement_set = RequirementSet( + build_dir=build_dir, + src_dir=options.src_dir, + download_dir=None, + ignore_dependencies=options.ignore_dependencies, + ignore_installed=True, + isolated=options.isolated_mode, + session=session, + wheel_cache=wheel_cache, + wheel_download_dir=options.wheel_dir, + require_hashes=options.require_hashes + ) + + self.populate_requirement_set( + requirement_set, args, options, finder, session, self.name, + wheel_cache + ) + + if not requirement_set.has_requirements: + return + + try: + # build wheels + wb = WheelBuilder( + requirement_set, + finder, + build_options=options.build_options or [], + global_options=options.global_options or [], + ) + if not wb.build(): + raise CommandError( + "Failed to build one or more wheels" + ) + except PreviousBuildDirError: + options.no_clean = True + raise + finally: + if not options.no_clean: + requirement_set.cleanup_files() diff --git a/venv/lib/python3.5/site-packages/pip/compat/__init__.py b/venv/lib/python3.5/site-packages/pip/compat/__init__.py new file mode 100644 index 0000000..703852b --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/compat/__init__.py @@ -0,0 +1,164 @@ +"""Stuff that differs in different Python versions and platform +distributions.""" +from __future__ import absolute_import, division + +import os +import sys + +from pip._vendor.six import text_type + +try: + from logging.config import dictConfig as logging_dictConfig +except ImportError: + from pip.compat.dictconfig import dictConfig as logging_dictConfig + +try: + from collections import OrderedDict +except ImportError: + from pip.compat.ordereddict import OrderedDict + +try: + import ipaddress +except ImportError: + try: + from pip._vendor import ipaddress + except ImportError: + import ipaddr as ipaddress + ipaddress.ip_address = ipaddress.IPAddress + ipaddress.ip_network = ipaddress.IPNetwork + + +try: + import sysconfig + + def get_stdlib(): + paths = [ + sysconfig.get_path("stdlib"), + sysconfig.get_path("platstdlib"), + ] + return set(filter(bool, paths)) +except ImportError: + from distutils import sysconfig + + def get_stdlib(): + paths = [ + sysconfig.get_python_lib(standard_lib=True), + sysconfig.get_python_lib(standard_lib=True, plat_specific=True), + ] + return set(filter(bool, paths)) + + +__all__ = [ + "logging_dictConfig", "ipaddress", "uses_pycache", "console_to_str", + "native_str", "get_path_uid", "stdlib_pkgs", "WINDOWS", "samefile", + "OrderedDict", +] + + +if sys.version_info >= (3, 4): + uses_pycache = True + from importlib.util import cache_from_source +else: + import imp + uses_pycache = hasattr(imp, 'cache_from_source') + if uses_pycache: + cache_from_source = imp.cache_from_source + else: + cache_from_source = None + + +if sys.version_info >= (3,): + def console_to_str(s): + try: + return s.decode(sys.__stdout__.encoding) + except UnicodeDecodeError: + return s.decode('utf_8') + + def native_str(s, replace=False): + if isinstance(s, bytes): + return s.decode('utf-8', 'replace' if replace else 'strict') + return s + +else: + def console_to_str(s): + return s + + def native_str(s, replace=False): + # Replace is ignored -- unicode to UTF-8 can't fail + if isinstance(s, text_type): + return s.encode('utf-8') + return s + + +def total_seconds(td): + if hasattr(td, "total_seconds"): + return td.total_seconds() + else: + val = td.microseconds + (td.seconds + td.days * 24 * 3600) * 10 ** 6 + return val / 10 ** 6 + + +def get_path_uid(path): + """ + Return path's uid. + + Does not follow symlinks: + https://github.com/pypa/pip/pull/935#discussion_r5307003 + + Placed this function in compat due to differences on AIX and + Jython, that should eventually go away. + + :raises OSError: When path is a symlink or can't be read. + """ + if hasattr(os, 'O_NOFOLLOW'): + fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW) + file_uid = os.fstat(fd).st_uid + os.close(fd) + else: # AIX and Jython + # WARNING: time of check vulnerabity, but best we can do w/o NOFOLLOW + if not os.path.islink(path): + # older versions of Jython don't have `os.fstat` + file_uid = os.stat(path).st_uid + else: + # raise OSError for parity with os.O_NOFOLLOW above + raise OSError( + "%s is a symlink; Will not return uid for symlinks" % path + ) + return file_uid + + +def expanduser(path): + """ + Expand ~ and ~user constructions. + + Includes a workaround for http://bugs.python.org/issue14768 + """ + expanded = os.path.expanduser(path) + if path.startswith('~/') and expanded.startswith('//'): + expanded = expanded[1:] + return expanded + + +# packages in the stdlib that may have installation metadata, but should not be +# considered 'installed'. this theoretically could be determined based on +# dist.location (py27:`sysconfig.get_paths()['stdlib']`, +# py26:sysconfig.get_config_vars('LIBDEST')), but fear platform variation may +# make this ineffective, so hard-coding +stdlib_pkgs = ('python', 'wsgiref') +if sys.version_info >= (2, 7): + stdlib_pkgs += ('argparse',) + + +# windows detection, covers cpython and ironpython +WINDOWS = (sys.platform.startswith("win") or + (sys.platform == 'cli' and os.name == 'nt')) + + +def samefile(file1, file2): + """Provide an alternative for os.path.samefile on Windows/Python2""" + if hasattr(os.path, 'samefile'): + return os.path.samefile(file1, file2) + else: + path1 = os.path.normcase(os.path.abspath(file1)) + path2 = os.path.normcase(os.path.abspath(file2)) + return path1 == path2 diff --git a/venv/lib/python3.5/site-packages/pip/compat/dictconfig.py b/venv/lib/python3.5/site-packages/pip/compat/dictconfig.py new file mode 100644 index 0000000..ec684aa --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/compat/dictconfig.py @@ -0,0 +1,565 @@ +# This is a copy of the Python logging.config.dictconfig module, +# reproduced with permission. It is provided here for backwards +# compatibility for Python versions prior to 2.7. +# +# Copyright 2009-2010 by Vinay Sajip. All Rights Reserved. +# +# Permission to use, copy, modify, and distribute this software and its +# documentation for any purpose and without fee is hereby granted, +# provided that the above copyright notice appear in all copies and that +# both that copyright notice and this permission notice appear in +# supporting documentation, and that the name of Vinay Sajip +# not be used in advertising or publicity pertaining to distribution +# of the software without specific, written prior permission. +# VINAY SAJIP DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE, INCLUDING +# ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL +# VINAY SAJIP BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR +# ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER +# IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT +# OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. +from __future__ import absolute_import + +import logging.handlers +import re +import sys +import types + +from pip._vendor import six + +# flake8: noqa + +IDENTIFIER = re.compile('^[a-z_][a-z0-9_]*$', re.I) + + +def valid_ident(s): + m = IDENTIFIER.match(s) + if not m: + raise ValueError('Not a valid Python identifier: %r' % s) + return True + +# +# This function is defined in logging only in recent versions of Python +# +try: + from logging import _checkLevel +except ImportError: + def _checkLevel(level): + if isinstance(level, int): + rv = level + elif str(level) == level: + if level not in logging._levelNames: + raise ValueError('Unknown level: %r' % level) + rv = logging._levelNames[level] + else: + raise TypeError('Level not an integer or a ' + 'valid string: %r' % level) + return rv + +# The ConvertingXXX classes are wrappers around standard Python containers, +# and they serve to convert any suitable values in the container. The +# conversion converts base dicts, lists and tuples to their wrapped +# equivalents, whereas strings which match a conversion format are converted +# appropriately. +# +# Each wrapper should have a configurator attribute holding the actual +# configurator to use for conversion. + + +class ConvertingDict(dict): + """A converting dictionary wrapper.""" + + def __getitem__(self, key): + value = dict.__getitem__(self, key) + result = self.configurator.convert(value) + # If the converted value is different, save for next time + if value is not result: + self[key] = result + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + def get(self, key, default=None): + value = dict.get(self, key, default) + result = self.configurator.convert(value) + # If the converted value is different, save for next time + if value is not result: + self[key] = result + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + def pop(self, key, default=None): + value = dict.pop(self, key, default) + result = self.configurator.convert(value) + if value is not result: + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + +class ConvertingList(list): + """A converting list wrapper.""" + def __getitem__(self, key): + value = list.__getitem__(self, key) + result = self.configurator.convert(value) + # If the converted value is different, save for next time + if value is not result: + self[key] = result + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + def pop(self, idx=-1): + value = list.pop(self, idx) + result = self.configurator.convert(value) + if value is not result: + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + return result + + +class ConvertingTuple(tuple): + """A converting tuple wrapper.""" + def __getitem__(self, key): + value = tuple.__getitem__(self, key) + result = self.configurator.convert(value) + if value is not result: + if type(result) in (ConvertingDict, ConvertingList, + ConvertingTuple): + result.parent = self + result.key = key + return result + + +class BaseConfigurator(object): + """ + The configurator base class which defines some useful defaults. + """ + + CONVERT_PATTERN = re.compile(r'^(?P[a-z]+)://(?P.*)$') + + WORD_PATTERN = re.compile(r'^\s*(\w+)\s*') + DOT_PATTERN = re.compile(r'^\.\s*(\w+)\s*') + INDEX_PATTERN = re.compile(r'^\[\s*(\w+)\s*\]\s*') + DIGIT_PATTERN = re.compile(r'^\d+$') + + value_converters = { + 'ext' : 'ext_convert', + 'cfg' : 'cfg_convert', + } + + # We might want to use a different one, e.g. importlib + importer = __import__ + + def __init__(self, config): + self.config = ConvertingDict(config) + self.config.configurator = self + + def resolve(self, s): + """ + Resolve strings to objects using standard import and attribute + syntax. + """ + name = s.split('.') + used = name.pop(0) + try: + found = self.importer(used) + for frag in name: + used += '.' + frag + try: + found = getattr(found, frag) + except AttributeError: + self.importer(used) + found = getattr(found, frag) + return found + except ImportError: + e, tb = sys.exc_info()[1:] + v = ValueError('Cannot resolve %r: %s' % (s, e)) + v.__cause__, v.__traceback__ = e, tb + raise v + + def ext_convert(self, value): + """Default converter for the ext:// protocol.""" + return self.resolve(value) + + def cfg_convert(self, value): + """Default converter for the cfg:// protocol.""" + rest = value + m = self.WORD_PATTERN.match(rest) + if m is None: + raise ValueError("Unable to convert %r" % value) + else: + rest = rest[m.end():] + d = self.config[m.groups()[0]] + # print d, rest + while rest: + m = self.DOT_PATTERN.match(rest) + if m: + d = d[m.groups()[0]] + else: + m = self.INDEX_PATTERN.match(rest) + if m: + idx = m.groups()[0] + if not self.DIGIT_PATTERN.match(idx): + d = d[idx] + else: + try: + n = int(idx) # try as number first (most likely) + d = d[n] + except TypeError: + d = d[idx] + if m: + rest = rest[m.end():] + else: + raise ValueError('Unable to convert ' + '%r at %r' % (value, rest)) + # rest should be empty + return d + + def convert(self, value): + """ + Convert values to an appropriate type. dicts, lists and tuples are + replaced by their converting alternatives. Strings are checked to + see if they have a conversion format and are converted if they do. + """ + if not isinstance(value, ConvertingDict) and isinstance(value, dict): + value = ConvertingDict(value) + value.configurator = self + elif not isinstance(value, ConvertingList) and isinstance(value, list): + value = ConvertingList(value) + value.configurator = self + elif not isinstance(value, ConvertingTuple) and\ + isinstance(value, tuple): + value = ConvertingTuple(value) + value.configurator = self + elif isinstance(value, six.string_types): # str for py3k + m = self.CONVERT_PATTERN.match(value) + if m: + d = m.groupdict() + prefix = d['prefix'] + converter = self.value_converters.get(prefix, None) + if converter: + suffix = d['suffix'] + converter = getattr(self, converter) + value = converter(suffix) + return value + + def configure_custom(self, config): + """Configure an object with a user-supplied factory.""" + c = config.pop('()') + if not hasattr(c, '__call__') and hasattr(types, 'ClassType') and type(c) != types.ClassType: + c = self.resolve(c) + props = config.pop('.', None) + # Check for valid identifiers + kwargs = dict((k, config[k]) for k in config if valid_ident(k)) + result = c(**kwargs) + if props: + for name, value in props.items(): + setattr(result, name, value) + return result + + def as_tuple(self, value): + """Utility function which converts lists to tuples.""" + if isinstance(value, list): + value = tuple(value) + return value + + +class DictConfigurator(BaseConfigurator): + """ + Configure logging using a dictionary-like object to describe the + configuration. + """ + + def configure(self): + """Do the configuration.""" + + config = self.config + if 'version' not in config: + raise ValueError("dictionary doesn't specify a version") + if config['version'] != 1: + raise ValueError("Unsupported version: %s" % config['version']) + incremental = config.pop('incremental', False) + EMPTY_DICT = {} + logging._acquireLock() + try: + if incremental: + handlers = config.get('handlers', EMPTY_DICT) + # incremental handler config only if handler name + # ties in to logging._handlers (Python 2.7) + if sys.version_info[:2] == (2, 7): + for name in handlers: + if name not in logging._handlers: + raise ValueError('No handler found with ' + 'name %r' % name) + else: + try: + handler = logging._handlers[name] + handler_config = handlers[name] + level = handler_config.get('level', None) + if level: + handler.setLevel(_checkLevel(level)) + except StandardError as e: + raise ValueError('Unable to configure handler ' + '%r: %s' % (name, e)) + loggers = config.get('loggers', EMPTY_DICT) + for name in loggers: + try: + self.configure_logger(name, loggers[name], True) + except StandardError as e: + raise ValueError('Unable to configure logger ' + '%r: %s' % (name, e)) + root = config.get('root', None) + if root: + try: + self.configure_root(root, True) + except StandardError as e: + raise ValueError('Unable to configure root ' + 'logger: %s' % e) + else: + disable_existing = config.pop('disable_existing_loggers', True) + + logging._handlers.clear() + del logging._handlerList[:] + + # Do formatters first - they don't refer to anything else + formatters = config.get('formatters', EMPTY_DICT) + for name in formatters: + try: + formatters[name] = self.configure_formatter( + formatters[name]) + except StandardError as e: + raise ValueError('Unable to configure ' + 'formatter %r: %s' % (name, e)) + # Next, do filters - they don't refer to anything else, either + filters = config.get('filters', EMPTY_DICT) + for name in filters: + try: + filters[name] = self.configure_filter(filters[name]) + except StandardError as e: + raise ValueError('Unable to configure ' + 'filter %r: %s' % (name, e)) + + # Next, do handlers - they refer to formatters and filters + # As handlers can refer to other handlers, sort the keys + # to allow a deterministic order of configuration + handlers = config.get('handlers', EMPTY_DICT) + for name in sorted(handlers): + try: + handler = self.configure_handler(handlers[name]) + handler.name = name + handlers[name] = handler + except StandardError as e: + raise ValueError('Unable to configure handler ' + '%r: %s' % (name, e)) + # Next, do loggers - they refer to handlers and filters + + # we don't want to lose the existing loggers, + # since other threads may have pointers to them. + # existing is set to contain all existing loggers, + # and as we go through the new configuration we + # remove any which are configured. At the end, + # what's left in existing is the set of loggers + # which were in the previous configuration but + # which are not in the new configuration. + root = logging.root + existing = list(root.manager.loggerDict) + # The list needs to be sorted so that we can + # avoid disabling child loggers of explicitly + # named loggers. With a sorted list it is easier + # to find the child loggers. + existing.sort() + # We'll keep the list of existing loggers + # which are children of named loggers here... + child_loggers = [] + # now set up the new ones... + loggers = config.get('loggers', EMPTY_DICT) + for name in loggers: + if name in existing: + i = existing.index(name) + prefixed = name + "." + pflen = len(prefixed) + num_existing = len(existing) + i = i + 1 # look at the entry after name + while (i < num_existing) and\ + (existing[i][:pflen] == prefixed): + child_loggers.append(existing[i]) + i = i + 1 + existing.remove(name) + try: + self.configure_logger(name, loggers[name]) + except StandardError as e: + raise ValueError('Unable to configure logger ' + '%r: %s' % (name, e)) + + # Disable any old loggers. There's no point deleting + # them as other threads may continue to hold references + # and by disabling them, you stop them doing any logging. + # However, don't disable children of named loggers, as that's + # probably not what was intended by the user. + for log in existing: + logger = root.manager.loggerDict[log] + if log in child_loggers: + logger.level = logging.NOTSET + logger.handlers = [] + logger.propagate = True + elif disable_existing: + logger.disabled = True + + # And finally, do the root logger + root = config.get('root', None) + if root: + try: + self.configure_root(root) + except StandardError as e: + raise ValueError('Unable to configure root ' + 'logger: %s' % e) + finally: + logging._releaseLock() + + def configure_formatter(self, config): + """Configure a formatter from a dictionary.""" + if '()' in config: + factory = config['()'] # for use in exception handler + try: + result = self.configure_custom(config) + except TypeError as te: + if "'format'" not in str(te): + raise + # Name of parameter changed from fmt to format. + # Retry with old name. + # This is so that code can be used with older Python versions + #(e.g. by Django) + config['fmt'] = config.pop('format') + config['()'] = factory + result = self.configure_custom(config) + else: + fmt = config.get('format', None) + dfmt = config.get('datefmt', None) + result = logging.Formatter(fmt, dfmt) + return result + + def configure_filter(self, config): + """Configure a filter from a dictionary.""" + if '()' in config: + result = self.configure_custom(config) + else: + name = config.get('name', '') + result = logging.Filter(name) + return result + + def add_filters(self, filterer, filters): + """Add filters to a filterer from a list of names.""" + for f in filters: + try: + filterer.addFilter(self.config['filters'][f]) + except StandardError as e: + raise ValueError('Unable to add filter %r: %s' % (f, e)) + + def configure_handler(self, config): + """Configure a handler from a dictionary.""" + formatter = config.pop('formatter', None) + if formatter: + try: + formatter = self.config['formatters'][formatter] + except StandardError as e: + raise ValueError('Unable to set formatter ' + '%r: %s' % (formatter, e)) + level = config.pop('level', None) + filters = config.pop('filters', None) + if '()' in config: + c = config.pop('()') + if not hasattr(c, '__call__') and hasattr(types, 'ClassType') and type(c) != types.ClassType: + c = self.resolve(c) + factory = c + else: + klass = self.resolve(config.pop('class')) + # Special case for handler which refers to another handler + if issubclass(klass, logging.handlers.MemoryHandler) and\ + 'target' in config: + try: + config['target'] = self.config['handlers'][config['target']] + except StandardError as e: + raise ValueError('Unable to set target handler ' + '%r: %s' % (config['target'], e)) + elif issubclass(klass, logging.handlers.SMTPHandler) and\ + 'mailhost' in config: + config['mailhost'] = self.as_tuple(config['mailhost']) + elif issubclass(klass, logging.handlers.SysLogHandler) and\ + 'address' in config: + config['address'] = self.as_tuple(config['address']) + factory = klass + kwargs = dict((k, config[k]) for k in config if valid_ident(k)) + try: + result = factory(**kwargs) + except TypeError as te: + if "'stream'" not in str(te): + raise + # The argument name changed from strm to stream + # Retry with old name. + # This is so that code can be used with older Python versions + #(e.g. by Django) + kwargs['strm'] = kwargs.pop('stream') + result = factory(**kwargs) + if formatter: + result.setFormatter(formatter) + if level is not None: + result.setLevel(_checkLevel(level)) + if filters: + self.add_filters(result, filters) + return result + + def add_handlers(self, logger, handlers): + """Add handlers to a logger from a list of names.""" + for h in handlers: + try: + logger.addHandler(self.config['handlers'][h]) + except StandardError as e: + raise ValueError('Unable to add handler %r: %s' % (h, e)) + + def common_logger_config(self, logger, config, incremental=False): + """ + Perform configuration which is common to root and non-root loggers. + """ + level = config.get('level', None) + if level is not None: + logger.setLevel(_checkLevel(level)) + if not incremental: + # Remove any existing handlers + for h in logger.handlers[:]: + logger.removeHandler(h) + handlers = config.get('handlers', None) + if handlers: + self.add_handlers(logger, handlers) + filters = config.get('filters', None) + if filters: + self.add_filters(logger, filters) + + def configure_logger(self, name, config, incremental=False): + """Configure a non-root logger from a dictionary.""" + logger = logging.getLogger(name) + self.common_logger_config(logger, config, incremental) + propagate = config.get('propagate', None) + if propagate is not None: + logger.propagate = propagate + + def configure_root(self, config, incremental=False): + """Configure a root logger from a dictionary.""" + root = logging.getLogger() + self.common_logger_config(root, config, incremental) + +dictConfigClass = DictConfigurator + + +def dictConfig(config): + """Configure logging using a dictionary.""" + dictConfigClass(config).configure() diff --git a/venv/lib/python3.5/site-packages/pip/compat/ordereddict.py b/venv/lib/python3.5/site-packages/pip/compat/ordereddict.py new file mode 100644 index 0000000..6eb3ba4 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/compat/ordereddict.py @@ -0,0 +1,129 @@ +# Copyright (c) 2009 Raymond Hettinger +# +# Permission is hereby granted, free of charge, to any person +# obtaining a copy of this software and associated documentation files +# (the "Software"), to deal in the Software without restriction, +# including without limitation the rights to use, copy, modify, merge, +# publish, distribute, sublicense, and/or sell copies of the Software, +# and to permit persons to whom the Software is furnished to do so, +# subject to the following conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +# OTHER DEALINGS IN THE SOFTWARE. + +# flake8: noqa + +from UserDict import DictMixin + +class OrderedDict(dict, DictMixin): + + def __init__(self, *args, **kwds): + if len(args) > 1: + raise TypeError('expected at most 1 arguments, got %d' % len(args)) + try: + self.__end + except AttributeError: + self.clear() + self.update(*args, **kwds) + + def clear(self): + self.__end = end = [] + end += [None, end, end] # sentinel node for doubly linked list + self.__map = {} # key --> [key, prev, next] + dict.clear(self) + + def __setitem__(self, key, value): + if key not in self: + end = self.__end + curr = end[1] + curr[2] = end[1] = self.__map[key] = [key, curr, end] + dict.__setitem__(self, key, value) + + def __delitem__(self, key): + dict.__delitem__(self, key) + key, prev, next = self.__map.pop(key) + prev[2] = next + next[1] = prev + + def __iter__(self): + end = self.__end + curr = end[2] + while curr is not end: + yield curr[0] + curr = curr[2] + + def __reversed__(self): + end = self.__end + curr = end[1] + while curr is not end: + yield curr[0] + curr = curr[1] + + def popitem(self, last=True): + if not self: + raise KeyError('dictionary is empty') + if last: + key = reversed(self).next() + else: + key = iter(self).next() + value = self.pop(key) + return key, value + + def __reduce__(self): + items = [[k, self[k]] for k in self] + tmp = self.__map, self.__end + del self.__map, self.__end + inst_dict = vars(self).copy() + self.__map, self.__end = tmp + if inst_dict: + return (self.__class__, (items,), inst_dict) + return self.__class__, (items,) + + def keys(self): + return list(self) + + setdefault = DictMixin.setdefault + update = DictMixin.update + pop = DictMixin.pop + values = DictMixin.values + items = DictMixin.items + iterkeys = DictMixin.iterkeys + itervalues = DictMixin.itervalues + iteritems = DictMixin.iteritems + + def __repr__(self): + if not self: + return '%s()' % (self.__class__.__name__,) + return '%s(%r)' % (self.__class__.__name__, self.items()) + + def copy(self): + return self.__class__(self) + + @classmethod + def fromkeys(cls, iterable, value=None): + d = cls() + for key in iterable: + d[key] = value + return d + + def __eq__(self, other): + if isinstance(other, OrderedDict): + if len(self) != len(other): + return False + for p, q in zip(self.items(), other.items()): + if p != q: + return False + return True + return dict.__eq__(self, other) + + def __ne__(self, other): + return not self == other diff --git a/venv/lib/python3.5/site-packages/pip/download.py b/venv/lib/python3.5/site-packages/pip/download.py new file mode 100644 index 0000000..bbef9ea --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/download.py @@ -0,0 +1,895 @@ +from __future__ import absolute_import + +import cgi +import email.utils +import getpass +import json +import logging +import mimetypes +import os +import platform +import re +import shutil +import sys +import tempfile + +try: + import ssl # noqa + HAS_TLS = True +except ImportError: + HAS_TLS = False + +from pip._vendor.six.moves.urllib import parse as urllib_parse +from pip._vendor.six.moves.urllib import request as urllib_request + +import pip + +from pip.exceptions import InstallationError, HashMismatch +from pip.models import PyPI +from pip.utils import (splitext, rmtree, format_size, display_path, + backup_dir, ask_path_exists, unpack_file, + ARCHIVE_EXTENSIONS, consume, call_subprocess) +from pip.utils.encoding import auto_decode +from pip.utils.filesystem import check_path_owner +from pip.utils.logging import indent_log +from pip.utils.setuptools_build import SETUPTOOLS_SHIM +from pip.utils.ui import DownloadProgressBar, DownloadProgressSpinner +from pip.locations import write_delete_marker_file +from pip.vcs import vcs +from pip._vendor import requests, six +from pip._vendor.requests.adapters import BaseAdapter, HTTPAdapter +from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth +from pip._vendor.requests.models import CONTENT_CHUNK_SIZE, Response +from pip._vendor.requests.structures import CaseInsensitiveDict +from pip._vendor.requests.packages import urllib3 +from pip._vendor.cachecontrol import CacheControlAdapter +from pip._vendor.cachecontrol.caches import FileCache +from pip._vendor.lockfile import LockError +from pip._vendor.six.moves import xmlrpc_client + + +__all__ = ['get_file_content', + 'is_url', 'url_to_path', 'path_to_url', + 'is_archive_file', 'unpack_vcs_link', + 'unpack_file_url', 'is_vcs_url', 'is_file_url', + 'unpack_http_url', 'unpack_url'] + + +logger = logging.getLogger(__name__) + + +def user_agent(): + """ + Return a string representing the user agent. + """ + data = { + "installer": {"name": "pip", "version": pip.__version__}, + "python": platform.python_version(), + "implementation": { + "name": platform.python_implementation(), + }, + } + + if data["implementation"]["name"] == 'CPython': + data["implementation"]["version"] = platform.python_version() + elif data["implementation"]["name"] == 'PyPy': + if sys.pypy_version_info.releaselevel == 'final': + pypy_version_info = sys.pypy_version_info[:3] + else: + pypy_version_info = sys.pypy_version_info + data["implementation"]["version"] = ".".join( + [str(x) for x in pypy_version_info] + ) + elif data["implementation"]["name"] == 'Jython': + # Complete Guess + data["implementation"]["version"] = platform.python_version() + elif data["implementation"]["name"] == 'IronPython': + # Complete Guess + data["implementation"]["version"] = platform.python_version() + + if sys.platform.startswith("linux"): + distro = dict(filter( + lambda x: x[1], + zip(["name", "version", "id"], platform.linux_distribution()), + )) + libc = dict(filter( + lambda x: x[1], + zip(["lib", "version"], platform.libc_ver()), + )) + if libc: + distro["libc"] = libc + if distro: + data["distro"] = distro + + if sys.platform.startswith("darwin") and platform.mac_ver()[0]: + data["distro"] = {"name": "OS X", "version": platform.mac_ver()[0]} + + if platform.system(): + data.setdefault("system", {})["name"] = platform.system() + + if platform.release(): + data.setdefault("system", {})["release"] = platform.release() + + if platform.machine(): + data["cpu"] = platform.machine() + + # Python 2.6 doesn't have ssl.OPENSSL_VERSION. + if HAS_TLS and sys.version_info[:2] > (2, 6): + data["openssl_version"] = ssl.OPENSSL_VERSION + + return "{data[installer][name]}/{data[installer][version]} {json}".format( + data=data, + json=json.dumps(data, separators=(",", ":"), sort_keys=True), + ) + + +class MultiDomainBasicAuth(AuthBase): + + def __init__(self, prompting=True): + self.prompting = prompting + self.passwords = {} + + def __call__(self, req): + parsed = urllib_parse.urlparse(req.url) + + # Get the netloc without any embedded credentials + netloc = parsed.netloc.rsplit("@", 1)[-1] + + # Set the url of the request to the url without any credentials + req.url = urllib_parse.urlunparse(parsed[:1] + (netloc,) + parsed[2:]) + + # Use any stored credentials that we have for this netloc + username, password = self.passwords.get(netloc, (None, None)) + + # Extract credentials embedded in the url if we have none stored + if username is None: + username, password = self.parse_credentials(parsed.netloc) + + if username or password: + # Store the username and password + self.passwords[netloc] = (username, password) + + # Send the basic auth with this request + req = HTTPBasicAuth(username or "", password or "")(req) + + # Attach a hook to handle 401 responses + req.register_hook("response", self.handle_401) + + return req + + def handle_401(self, resp, **kwargs): + # We only care about 401 responses, anything else we want to just + # pass through the actual response + if resp.status_code != 401: + return resp + + # We are not able to prompt the user so simple return the response + if not self.prompting: + return resp + + parsed = urllib_parse.urlparse(resp.url) + + # Prompt the user for a new username and password + username = six.moves.input("User for %s: " % parsed.netloc) + password = getpass.getpass("Password: ") + + # Store the new username and password to use for future requests + if username or password: + self.passwords[parsed.netloc] = (username, password) + + # Consume content and release the original connection to allow our new + # request to reuse the same one. + resp.content + resp.raw.release_conn() + + # Add our new username and password to the request + req = HTTPBasicAuth(username or "", password or "")(resp.request) + + # Send our new request + new_resp = resp.connection.send(req, **kwargs) + new_resp.history.append(resp) + + return new_resp + + def parse_credentials(self, netloc): + if "@" in netloc: + userinfo = netloc.rsplit("@", 1)[0] + if ":" in userinfo: + return userinfo.split(":", 1) + return userinfo, None + return None, None + + +class LocalFSAdapter(BaseAdapter): + + def send(self, request, stream=None, timeout=None, verify=None, cert=None, + proxies=None): + pathname = url_to_path(request.url) + + resp = Response() + resp.status_code = 200 + resp.url = request.url + + try: + stats = os.stat(pathname) + except OSError as exc: + resp.status_code = 404 + resp.raw = exc + else: + modified = email.utils.formatdate(stats.st_mtime, usegmt=True) + content_type = mimetypes.guess_type(pathname)[0] or "text/plain" + resp.headers = CaseInsensitiveDict({ + "Content-Type": content_type, + "Content-Length": stats.st_size, + "Last-Modified": modified, + }) + + resp.raw = open(pathname, "rb") + resp.close = resp.raw.close + + return resp + + def close(self): + pass + + +class SafeFileCache(FileCache): + """ + A file based cache which is safe to use even when the target directory may + not be accessible or writable. + """ + + def __init__(self, *args, **kwargs): + super(SafeFileCache, self).__init__(*args, **kwargs) + + # Check to ensure that the directory containing our cache directory + # is owned by the user current executing pip. If it does not exist + # we will check the parent directory until we find one that does exist. + # If it is not owned by the user executing pip then we will disable + # the cache and log a warning. + if not check_path_owner(self.directory): + logger.warning( + "The directory '%s' or its parent directory is not owned by " + "the current user and the cache has been disabled. Please " + "check the permissions and owner of that directory. If " + "executing pip with sudo, you may want sudo's -H flag.", + self.directory, + ) + + # Set our directory to None to disable the Cache + self.directory = None + + def get(self, *args, **kwargs): + # If we don't have a directory, then the cache should be a no-op. + if self.directory is None: + return + + try: + return super(SafeFileCache, self).get(*args, **kwargs) + except (LockError, OSError, IOError): + # We intentionally silence this error, if we can't access the cache + # then we can just skip caching and process the request as if + # caching wasn't enabled. + pass + + def set(self, *args, **kwargs): + # If we don't have a directory, then the cache should be a no-op. + if self.directory is None: + return + + try: + return super(SafeFileCache, self).set(*args, **kwargs) + except (LockError, OSError, IOError): + # We intentionally silence this error, if we can't access the cache + # then we can just skip caching and process the request as if + # caching wasn't enabled. + pass + + def delete(self, *args, **kwargs): + # If we don't have a directory, then the cache should be a no-op. + if self.directory is None: + return + + try: + return super(SafeFileCache, self).delete(*args, **kwargs) + except (LockError, OSError, IOError): + # We intentionally silence this error, if we can't access the cache + # then we can just skip caching and process the request as if + # caching wasn't enabled. + pass + + +class InsecureHTTPAdapter(HTTPAdapter): + + def cert_verify(self, conn, url, verify, cert): + conn.cert_reqs = 'CERT_NONE' + conn.ca_certs = None + + +class PipSession(requests.Session): + + timeout = None + + def __init__(self, *args, **kwargs): + retries = kwargs.pop("retries", 0) + cache = kwargs.pop("cache", None) + insecure_hosts = kwargs.pop("insecure_hosts", []) + + super(PipSession, self).__init__(*args, **kwargs) + + # Attach our User Agent to the request + self.headers["User-Agent"] = user_agent() + + # Attach our Authentication handler to the session + self.auth = MultiDomainBasicAuth() + + # Create our urllib3.Retry instance which will allow us to customize + # how we handle retries. + retries = urllib3.Retry( + # Set the total number of retries that a particular request can + # have. + total=retries, + + # A 503 error from PyPI typically means that the Fastly -> Origin + # connection got interupted in some way. A 503 error in general + # is typically considered a transient error so we'll go ahead and + # retry it. + status_forcelist=[503], + + # Add a small amount of back off between failed requests in + # order to prevent hammering the service. + backoff_factor=0.25, + ) + + # We want to _only_ cache responses on securely fetched origins. We do + # this because we can't validate the response of an insecurely fetched + # origin, and we don't want someone to be able to poison the cache and + # require manual eviction from the cache to fix it. + if cache: + secure_adapter = CacheControlAdapter( + cache=SafeFileCache(cache, use_dir_lock=True), + max_retries=retries, + ) + else: + secure_adapter = HTTPAdapter(max_retries=retries) + + # Our Insecure HTTPAdapter disables HTTPS validation. It does not + # support caching (see above) so we'll use it for all http:// URLs as + # well as any https:// host that we've marked as ignoring TLS errors + # for. + insecure_adapter = InsecureHTTPAdapter(max_retries=retries) + + self.mount("https://", secure_adapter) + self.mount("http://", insecure_adapter) + + # Enable file:// urls + self.mount("file://", LocalFSAdapter()) + + # We want to use a non-validating adapter for any requests which are + # deemed insecure. + for host in insecure_hosts: + self.mount("https://{0}/".format(host), insecure_adapter) + + def request(self, method, url, *args, **kwargs): + # Allow setting a default timeout on a session + kwargs.setdefault("timeout", self.timeout) + + # Dispatch the actual request + return super(PipSession, self).request(method, url, *args, **kwargs) + + +def get_file_content(url, comes_from=None, session=None): + """Gets the content of a file; it may be a filename, file: URL, or + http: URL. Returns (location, content). Content is unicode.""" + if session is None: + raise TypeError( + "get_file_content() missing 1 required keyword argument: 'session'" + ) + + match = _scheme_re.search(url) + if match: + scheme = match.group(1).lower() + if (scheme == 'file' and comes_from and + comes_from.startswith('http')): + raise InstallationError( + 'Requirements file %s references URL %s, which is local' + % (comes_from, url)) + if scheme == 'file': + path = url.split(':', 1)[1] + path = path.replace('\\', '/') + match = _url_slash_drive_re.match(path) + if match: + path = match.group(1) + ':' + path.split('|', 1)[1] + path = urllib_parse.unquote(path) + if path.startswith('/'): + path = '/' + path.lstrip('/') + url = path + else: + # FIXME: catch some errors + resp = session.get(url) + resp.raise_for_status() + return resp.url, resp.text + try: + with open(url, 'rb') as f: + content = auto_decode(f.read()) + except IOError as exc: + raise InstallationError( + 'Could not open requirements file: %s' % str(exc) + ) + return url, content + + +_scheme_re = re.compile(r'^(http|https|file):', re.I) +_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I) + + +def is_url(name): + """Returns true if the name looks like a URL""" + if ':' not in name: + return False + scheme = name.split(':', 1)[0].lower() + return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes + + +def url_to_path(url): + """ + Convert a file: URL to a path. + """ + assert url.startswith('file:'), ( + "You can only turn file: urls into filenames (not %r)" % url) + + _, netloc, path, _, _ = urllib_parse.urlsplit(url) + + # if we have a UNC path, prepend UNC share notation + if netloc: + netloc = '\\\\' + netloc + + path = urllib_request.url2pathname(netloc + path) + return path + + +def path_to_url(path): + """ + Convert a path to a file: URL. The path will be made absolute and have + quoted path parts. + """ + path = os.path.normpath(os.path.abspath(path)) + url = urllib_parse.urljoin('file:', urllib_request.pathname2url(path)) + return url + + +def is_archive_file(name): + """Return True if `name` is a considered as an archive file.""" + ext = splitext(name)[1].lower() + if ext in ARCHIVE_EXTENSIONS: + return True + return False + + +def unpack_vcs_link(link, location): + vcs_backend = _get_used_vcs_backend(link) + vcs_backend.unpack(location) + + +def _get_used_vcs_backend(link): + for backend in vcs.backends: + if link.scheme in backend.schemes: + vcs_backend = backend(link.url) + return vcs_backend + + +def is_vcs_url(link): + return bool(_get_used_vcs_backend(link)) + + +def is_file_url(link): + return link.url.lower().startswith('file:') + + +def is_dir_url(link): + """Return whether a file:// Link points to a directory. + + ``link`` must not have any other scheme but file://. Call is_file_url() + first. + + """ + link_path = url_to_path(link.url_without_fragment) + return os.path.isdir(link_path) + + +def _progress_indicator(iterable, *args, **kwargs): + return iterable + + +def _download_url(resp, link, content_file, hashes): + try: + total_length = int(resp.headers['content-length']) + except (ValueError, KeyError, TypeError): + total_length = 0 + + cached_resp = getattr(resp, "from_cache", False) + + if logger.getEffectiveLevel() > logging.INFO: + show_progress = False + elif cached_resp: + show_progress = False + elif total_length > (40 * 1000): + show_progress = True + elif not total_length: + show_progress = True + else: + show_progress = False + + show_url = link.show_url + + def resp_read(chunk_size): + try: + # Special case for urllib3. + for chunk in resp.raw.stream( + chunk_size, + # We use decode_content=False here because we don't + # want urllib3 to mess with the raw bytes we get + # from the server. If we decompress inside of + # urllib3 then we cannot verify the checksum + # because the checksum will be of the compressed + # file. This breakage will only occur if the + # server adds a Content-Encoding header, which + # depends on how the server was configured: + # - Some servers will notice that the file isn't a + # compressible file and will leave the file alone + # and with an empty Content-Encoding + # - Some servers will notice that the file is + # already compressed and will leave the file + # alone and will add a Content-Encoding: gzip + # header + # - Some servers won't notice anything at all and + # will take a file that's already been compressed + # and compress it again and set the + # Content-Encoding: gzip header + # + # By setting this not to decode automatically we + # hope to eliminate problems with the second case. + decode_content=False): + yield chunk + except AttributeError: + # Standard file-like object. + while True: + chunk = resp.raw.read(chunk_size) + if not chunk: + break + yield chunk + + def written_chunks(chunks): + for chunk in chunks: + content_file.write(chunk) + yield chunk + + progress_indicator = _progress_indicator + + if link.netloc == PyPI.netloc: + url = show_url + else: + url = link.url_without_fragment + + if show_progress: # We don't show progress on cached responses + if total_length: + logger.info("Downloading %s (%s)", url, format_size(total_length)) + progress_indicator = DownloadProgressBar(max=total_length).iter + else: + logger.info("Downloading %s", url) + progress_indicator = DownloadProgressSpinner().iter + elif cached_resp: + logger.info("Using cached %s", url) + else: + logger.info("Downloading %s", url) + + logger.debug('Downloading from URL %s', link) + + downloaded_chunks = written_chunks( + progress_indicator( + resp_read(CONTENT_CHUNK_SIZE), + CONTENT_CHUNK_SIZE + ) + ) + if hashes: + hashes.check_against_chunks(downloaded_chunks) + else: + consume(downloaded_chunks) + + +def _copy_file(filename, location, link): + copy = True + download_location = os.path.join(location, link.filename) + if os.path.exists(download_location): + response = ask_path_exists( + 'The file %s exists. (i)gnore, (w)ipe, (b)ackup ' % + display_path(download_location), ('i', 'w', 'b')) + if response == 'i': + copy = False + elif response == 'w': + logger.warning('Deleting %s', display_path(download_location)) + os.remove(download_location) + elif response == 'b': + dest_file = backup_dir(download_location) + logger.warning( + 'Backing up %s to %s', + display_path(download_location), + display_path(dest_file), + ) + shutil.move(download_location, dest_file) + if copy: + shutil.copy(filename, download_location) + logger.info('Saved %s', display_path(download_location)) + + +def unpack_http_url(link, location, download_dir=None, + session=None, hashes=None): + if session is None: + raise TypeError( + "unpack_http_url() missing 1 required keyword argument: 'session'" + ) + + temp_dir = tempfile.mkdtemp('-unpack', 'pip-') + + # If a download dir is specified, is the file already downloaded there? + already_downloaded_path = None + if download_dir: + already_downloaded_path = _check_download_dir(link, + download_dir, + hashes) + + if already_downloaded_path: + from_path = already_downloaded_path + content_type = mimetypes.guess_type(from_path)[0] + else: + # let's download to a tmp dir + from_path, content_type = _download_http_url(link, + session, + temp_dir, + hashes) + + # unpack the archive to the build dir location. even when only downloading + # archives, they have to be unpacked to parse dependencies + unpack_file(from_path, location, content_type, link) + + # a download dir is specified; let's copy the archive there + if download_dir and not already_downloaded_path: + _copy_file(from_path, download_dir, link) + + if not already_downloaded_path: + os.unlink(from_path) + rmtree(temp_dir) + + +def unpack_file_url(link, location, download_dir=None, hashes=None): + """Unpack link into location. + + If download_dir is provided and link points to a file, make a copy + of the link file inside download_dir. + """ + link_path = url_to_path(link.url_without_fragment) + + # If it's a url to a local directory + if is_dir_url(link): + if os.path.isdir(location): + rmtree(location) + shutil.copytree(link_path, location, symlinks=True) + if download_dir: + logger.info('Link is a directory, ignoring download_dir') + return + + # If --require-hashes is off, `hashes` is either empty, the + # link's embeddded hash, or MissingHashes; it is required to + # match. If --require-hashes is on, we are satisfied by any + # hash in `hashes` matching: a URL-based or an option-based + # one; no internet-sourced hash will be in `hashes`. + if hashes: + hashes.check_against_path(link_path) + + # If a download dir is specified, is the file already there and valid? + already_downloaded_path = None + if download_dir: + already_downloaded_path = _check_download_dir(link, + download_dir, + hashes) + + if already_downloaded_path: + from_path = already_downloaded_path + else: + from_path = link_path + + content_type = mimetypes.guess_type(from_path)[0] + + # unpack the archive to the build dir location. even when only downloading + # archives, they have to be unpacked to parse dependencies + unpack_file(from_path, location, content_type, link) + + # a download dir is specified and not already downloaded + if download_dir and not already_downloaded_path: + _copy_file(from_path, download_dir, link) + + +def _copy_dist_from_dir(link_path, location): + """Copy distribution files in `link_path` to `location`. + + Invoked when user requests to install a local directory. E.g.: + + pip install . + pip install ~/dev/git-repos/python-prompt-toolkit + + """ + + # Note: This is currently VERY SLOW if you have a lot of data in the + # directory, because it copies everything with `shutil.copytree`. + # What it should really do is build an sdist and install that. + # See https://github.com/pypa/pip/issues/2195 + + if os.path.isdir(location): + rmtree(location) + + # build an sdist + setup_py = 'setup.py' + sdist_args = [sys.executable] + sdist_args.append('-c') + sdist_args.append(SETUPTOOLS_SHIM % setup_py) + sdist_args.append('sdist') + sdist_args += ['--dist-dir', location] + logger.info('Running setup.py sdist for %s', link_path) + + with indent_log(): + call_subprocess(sdist_args, cwd=link_path, show_stdout=False) + + # unpack sdist into `location` + sdist = os.path.join(location, os.listdir(location)[0]) + logger.info('Unpacking sdist %s into %s', sdist, location) + unpack_file(sdist, location, content_type=None, link=None) + + +class PipXmlrpcTransport(xmlrpc_client.Transport): + """Provide a `xmlrpclib.Transport` implementation via a `PipSession` + object. + """ + def __init__(self, index_url, session, use_datetime=False): + xmlrpc_client.Transport.__init__(self, use_datetime) + index_parts = urllib_parse.urlparse(index_url) + self._scheme = index_parts.scheme + self._session = session + + def request(self, host, handler, request_body, verbose=False): + parts = (self._scheme, host, handler, None, None, None) + url = urllib_parse.urlunparse(parts) + try: + headers = {'Content-Type': 'text/xml'} + response = self._session.post(url, data=request_body, + headers=headers, stream=True) + response.raise_for_status() + self.verbose = verbose + return self.parse_response(response.raw) + except requests.HTTPError as exc: + logger.critical( + "HTTP error %s while getting %s", + exc.response.status_code, url, + ) + raise + + +def unpack_url(link, location, download_dir=None, + only_download=False, session=None, hashes=None): + """Unpack link. + If link is a VCS link: + if only_download, export into download_dir and ignore location + else unpack into location + for other types of link: + - unpack into location + - if download_dir, copy the file into download_dir + - if only_download, mark location for deletion + + :param hashes: A Hashes object, one of whose embedded hashes must match, + or HashMismatch will be raised. If the Hashes is empty, no matches are + required, and unhashable types of requirements (like VCS ones, which + would ordinarily raise HashUnsupported) are allowed. + """ + # non-editable vcs urls + if is_vcs_url(link): + unpack_vcs_link(link, location) + + # file urls + elif is_file_url(link): + unpack_file_url(link, location, download_dir, hashes=hashes) + + # http urls + else: + if session is None: + session = PipSession() + + unpack_http_url( + link, + location, + download_dir, + session, + hashes=hashes + ) + if only_download: + write_delete_marker_file(location) + + +def _download_http_url(link, session, temp_dir, hashes): + """Download link url into temp_dir using provided session""" + target_url = link.url.split('#', 1)[0] + try: + resp = session.get( + target_url, + # We use Accept-Encoding: identity here because requests + # defaults to accepting compressed responses. This breaks in + # a variety of ways depending on how the server is configured. + # - Some servers will notice that the file isn't a compressible + # file and will leave the file alone and with an empty + # Content-Encoding + # - Some servers will notice that the file is already + # compressed and will leave the file alone and will add a + # Content-Encoding: gzip header + # - Some servers won't notice anything at all and will take + # a file that's already been compressed and compress it again + # and set the Content-Encoding: gzip header + # By setting this to request only the identity encoding We're + # hoping to eliminate the third case. Hopefully there does not + # exist a server which when given a file will notice it is + # already compressed and that you're not asking for a + # compressed file and will then decompress it before sending + # because if that's the case I don't think it'll ever be + # possible to make this work. + headers={"Accept-Encoding": "identity"}, + stream=True, + ) + resp.raise_for_status() + except requests.HTTPError as exc: + logger.critical( + "HTTP error %s while getting %s", exc.response.status_code, link, + ) + raise + + content_type = resp.headers.get('content-type', '') + filename = link.filename # fallback + # Have a look at the Content-Disposition header for a better guess + content_disposition = resp.headers.get('content-disposition') + if content_disposition: + type, params = cgi.parse_header(content_disposition) + # We use ``or`` here because we don't want to use an "empty" value + # from the filename param. + filename = params.get('filename') or filename + ext = splitext(filename)[1] + if not ext: + ext = mimetypes.guess_extension(content_type) + if ext: + filename += ext + if not ext and link.url != resp.url: + ext = os.path.splitext(resp.url)[1] + if ext: + filename += ext + file_path = os.path.join(temp_dir, filename) + with open(file_path, 'wb') as content_file: + _download_url(resp, link, content_file, hashes) + return file_path, content_type + + +def _check_download_dir(link, download_dir, hashes): + """ Check download_dir for previously downloaded file with correct hash + If a correct file is found return its path else None + """ + download_path = os.path.join(download_dir, link.filename) + if os.path.exists(download_path): + # If already downloaded, does its hash match? + logger.info('File was already downloaded %s', download_path) + if hashes: + try: + hashes.check_against_path(download_path) + except HashMismatch: + logger.warning( + 'Previously-downloaded file %s has bad hash. ' + 'Re-downloading.', + download_path + ) + os.unlink(download_path) + return None + return download_path + return None diff --git a/venv/lib/python3.5/site-packages/pip/exceptions.py b/venv/lib/python3.5/site-packages/pip/exceptions.py new file mode 100644 index 0000000..a529e40 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/exceptions.py @@ -0,0 +1,239 @@ +"""Exceptions used throughout package""" +from __future__ import absolute_import + +from itertools import chain, groupby, repeat + +from pip._vendor.six import iteritems + + +class PipError(Exception): + """Base pip exception""" + + +class InstallationError(PipError): + """General exception during installation""" + + +class UninstallationError(PipError): + """General exception during uninstallation""" + + +class DistributionNotFound(InstallationError): + """Raised when a distribution cannot be found to satisfy a requirement""" + + +class RequirementsFileParseError(InstallationError): + """Raised when a general error occurs parsing a requirements file line.""" + + +class BestVersionAlreadyInstalled(PipError): + """Raised when the most up-to-date version of a package is already + installed.""" + + +class BadCommand(PipError): + """Raised when virtualenv or a command is not found""" + + +class CommandError(PipError): + """Raised when there is an error in command-line arguments""" + + +class PreviousBuildDirError(PipError): + """Raised when there's a previous conflicting build directory""" + + +class InvalidWheelFilename(InstallationError): + """Invalid wheel filename.""" + + +class UnsupportedWheel(InstallationError): + """Unsupported wheel.""" + + +class HashErrors(InstallationError): + """Multiple HashError instances rolled into one for reporting""" + + def __init__(self): + self.errors = [] + + def append(self, error): + self.errors.append(error) + + def __str__(self): + lines = [] + self.errors.sort(key=lambda e: e.order) + for cls, errors_of_cls in groupby(self.errors, lambda e: e.__class__): + lines.append(cls.head) + lines.extend(e.body() for e in errors_of_cls) + if lines: + return '\n'.join(lines) + + def __nonzero__(self): + return bool(self.errors) + + def __bool__(self): + return self.__nonzero__() + + +class HashError(InstallationError): + """ + A failure to verify a package against known-good hashes + + :cvar order: An int sorting hash exception classes by difficulty of + recovery (lower being harder), so the user doesn't bother fretting + about unpinned packages when he has deeper issues, like VCS + dependencies, to deal with. Also keeps error reports in a + deterministic order. + :cvar head: A section heading for display above potentially many + exceptions of this kind + :ivar req: The InstallRequirement that triggered this error. This is + pasted on after the exception is instantiated, because it's not + typically available earlier. + + """ + req = None + head = '' + + def body(self): + """Return a summary of me for display under the heading. + + This default implementation simply prints a description of the + triggering requirement. + + :param req: The InstallRequirement that provoked this error, with + populate_link() having already been called + + """ + return ' %s' % self._requirement_name() + + def __str__(self): + return '%s\n%s' % (self.head, self.body()) + + def _requirement_name(self): + """Return a description of the requirement that triggered me. + + This default implementation returns long description of the req, with + line numbers + + """ + return str(self.req) if self.req else 'unknown package' + + +class VcsHashUnsupported(HashError): + """A hash was provided for a version-control-system-based requirement, but + we don't have a method for hashing those.""" + + order = 0 + head = ("Can't verify hashes for these requirements because we don't " + "have a way to hash version control repositories:") + + +class DirectoryUrlHashUnsupported(HashError): + """A hash was provided for a version-control-system-based requirement, but + we don't have a method for hashing those.""" + + order = 1 + head = ("Can't verify hashes for these file:// requirements because they " + "point to directories:") + + +class HashMissing(HashError): + """A hash was needed for a requirement but is absent.""" + + order = 2 + head = ('Hashes are required in --require-hashes mode, but they are ' + 'missing from some requirements. Here is a list of those ' + 'requirements along with the hashes their downloaded archives ' + 'actually had. Add lines like these to your requirements files to ' + 'prevent tampering. (If you did not enable --require-hashes ' + 'manually, note that it turns on automatically when any package ' + 'has a hash.)') + + def __init__(self, gotten_hash): + """ + :param gotten_hash: The hash of the (possibly malicious) archive we + just downloaded + """ + self.gotten_hash = gotten_hash + + def body(self): + from pip.utils.hashes import FAVORITE_HASH # Dodge circular import. + + package = None + if self.req: + # In the case of URL-based requirements, display the original URL + # seen in the requirements file rather than the package name, + # so the output can be directly copied into the requirements file. + package = (self.req.original_link if self.req.original_link + # In case someone feeds something downright stupid + # to InstallRequirement's constructor. + else getattr(self.req, 'req', None)) + return ' %s --hash=%s:%s' % (package or 'unknown package', + FAVORITE_HASH, + self.gotten_hash) + + +class HashUnpinned(HashError): + """A requirement had a hash specified but was not pinned to a specific + version.""" + + order = 3 + head = ('In --require-hashes mode, all requirements must have their ' + 'versions pinned with ==. These do not:') + + +class HashMismatch(HashError): + """ + Distribution file hash values don't match. + + :ivar package_name: The name of the package that triggered the hash + mismatch. Feel free to write to this after the exception is raise to + improve its error message. + + """ + order = 4 + head = ('THESE PACKAGES DO NOT MATCH THE HASHES FROM THE REQUIREMENTS ' + 'FILE. If you have updated the package versions, please update ' + 'the hashes. Otherwise, examine the package contents carefully; ' + 'someone may have tampered with them.') + + def __init__(self, allowed, gots): + """ + :param allowed: A dict of algorithm names pointing to lists of allowed + hex digests + :param gots: A dict of algorithm names pointing to hashes we + actually got from the files under suspicion + """ + self.allowed = allowed + self.gots = gots + + def body(self): + return ' %s:\n%s' % (self._requirement_name(), + self._hash_comparison()) + + def _hash_comparison(self): + """ + Return a comparison of actual and expected hash values. + + Example:: + + Expected sha256 abcdeabcdeabcdeabcdeabcdeabcdeabcdeabcdeabcde + or 123451234512345123451234512345123451234512345 + Got bcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdefbcdef + + """ + def hash_then_or(hash_name): + # For now, all the decent hashes have 6-char names, so we can get + # away with hard-coding space literals. + return chain([hash_name], repeat(' or')) + + lines = [] + for hash_name, expecteds in iteritems(self.allowed): + prefix = hash_then_or(hash_name) + lines.extend((' Expected %s %s' % (next(prefix), e)) + for e in expecteds) + lines.append(' Got %s\n' % + self.gots[hash_name].hexdigest()) + prefix = ' or' + return '\n'.join(lines) diff --git a/venv/lib/python3.5/site-packages/pip/index.py b/venv/lib/python3.5/site-packages/pip/index.py new file mode 100644 index 0000000..cae4c2d --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/index.py @@ -0,0 +1,1048 @@ +"""Routines related to PyPI, indexes""" +from __future__ import absolute_import + +import logging +import cgi +from collections import namedtuple +import itertools +import sys +import os +import re +import mimetypes +import posixpath +import warnings + +from pip._vendor.six.moves.urllib import parse as urllib_parse +from pip._vendor.six.moves.urllib import request as urllib_request + +from pip.compat import ipaddress +from pip.utils import ( + cached_property, splitext, normalize_path, + ARCHIVE_EXTENSIONS, SUPPORTED_EXTENSIONS, +) +from pip.utils.deprecation import RemovedInPip9Warning, RemovedInPip10Warning +from pip.utils.logging import indent_log +from pip.exceptions import ( + DistributionNotFound, BestVersionAlreadyInstalled, InvalidWheelFilename, + UnsupportedWheel, +) +from pip.download import HAS_TLS, is_url, path_to_url, url_to_path +from pip.wheel import Wheel, wheel_ext +from pip.pep425tags import supported_tags +from pip._vendor import html5lib, requests, six +from pip._vendor.packaging.version import parse as parse_version +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.requests.exceptions import SSLError + + +__all__ = ['FormatControl', 'fmt_ctl_handle_mutual_exclude', 'PackageFinder'] + + +SECURE_ORIGINS = [ + # protocol, hostname, port + # Taken from Chrome's list of secure origins (See: http://bit.ly/1qrySKC) + ("https", "*", "*"), + ("*", "localhost", "*"), + ("*", "127.0.0.0/8", "*"), + ("*", "::1/128", "*"), + ("file", "*", None), + # ssh is always secure. + ("ssh", "*", "*"), +] + + +logger = logging.getLogger(__name__) + + +class InstallationCandidate(object): + + def __init__(self, project, version, location): + self.project = project + self.version = parse_version(version) + self.location = location + self._key = (self.project, self.version, self.location) + + def __repr__(self): + return "".format( + self.project, self.version, self.location, + ) + + def __hash__(self): + return hash(self._key) + + def __lt__(self, other): + return self._compare(other, lambda s, o: s < o) + + def __le__(self, other): + return self._compare(other, lambda s, o: s <= o) + + def __eq__(self, other): + return self._compare(other, lambda s, o: s == o) + + def __ge__(self, other): + return self._compare(other, lambda s, o: s >= o) + + def __gt__(self, other): + return self._compare(other, lambda s, o: s > o) + + def __ne__(self, other): + return self._compare(other, lambda s, o: s != o) + + def _compare(self, other, method): + if not isinstance(other, InstallationCandidate): + return NotImplemented + + return method(self._key, other._key) + + +class PackageFinder(object): + """This finds packages. + + This is meant to match easy_install's technique for looking for + packages, by reading pages and looking for appropriate links. + """ + + def __init__(self, find_links, index_urls, allow_all_prereleases=False, + trusted_hosts=None, process_dependency_links=False, + session=None, format_control=None): + """Create a PackageFinder. + + :param format_control: A FormatControl object or None. Used to control + the selection of source packages / binary packages when consulting + the index and links. + """ + if session is None: + raise TypeError( + "PackageFinder() missing 1 required keyword argument: " + "'session'" + ) + + # Build find_links. If an argument starts with ~, it may be + # a local file relative to a home directory. So try normalizing + # it and if it exists, use the normalized version. + # This is deliberately conservative - it might be fine just to + # blindly normalize anything starting with a ~... + self.find_links = [] + for link in find_links: + if link.startswith('~'): + new_link = normalize_path(link) + if os.path.exists(new_link): + link = new_link + self.find_links.append(link) + + self.index_urls = index_urls + self.dependency_links = [] + + # These are boring links that have already been logged somehow: + self.logged_links = set() + + self.format_control = format_control or FormatControl(set(), set()) + + # Domains that we won't emit warnings for when not using HTTPS + self.secure_origins = [ + ("*", host, "*") + for host in (trusted_hosts if trusted_hosts else []) + ] + + # Do we want to allow _all_ pre-releases? + self.allow_all_prereleases = allow_all_prereleases + + # Do we process dependency links? + self.process_dependency_links = process_dependency_links + + # The Session we'll use to make requests + self.session = session + + # If we don't have TLS enabled, then WARN if anyplace we're looking + # relies on TLS. + if not HAS_TLS: + for link in itertools.chain(self.index_urls, self.find_links): + parsed = urllib_parse.urlparse(link) + if parsed.scheme == "https": + logger.warning( + "pip is configured with locations that require " + "TLS/SSL, however the ssl module in Python is not " + "available." + ) + break + + def add_dependency_links(self, links): + # # FIXME: this shouldn't be global list this, it should only + # # apply to requirements of the package that specifies the + # # dependency_links value + # # FIXME: also, we should track comes_from (i.e., use Link) + if self.process_dependency_links: + warnings.warn( + "Dependency Links processing has been deprecated and will be " + "removed in a future release.", + RemovedInPip9Warning, + ) + self.dependency_links.extend(links) + + @staticmethod + def _sort_locations(locations, expand_dir=False): + """ + Sort locations into "files" (archives) and "urls", and return + a pair of lists (files,urls) + """ + files = [] + urls = [] + + # puts the url for the given file path into the appropriate list + def sort_path(path): + url = path_to_url(path) + if mimetypes.guess_type(url, strict=False)[0] == 'text/html': + urls.append(url) + else: + files.append(url) + + for url in locations: + + is_local_path = os.path.exists(url) + is_file_url = url.startswith('file:') + + if is_local_path or is_file_url: + if is_local_path: + path = url + else: + path = url_to_path(url) + if os.path.isdir(path): + if expand_dir: + path = os.path.realpath(path) + for item in os.listdir(path): + sort_path(os.path.join(path, item)) + elif is_file_url: + urls.append(url) + elif os.path.isfile(path): + sort_path(path) + else: + logger.warning( + "Url '%s' is ignored: it is neither a file " + "nor a directory.", url) + elif is_url(url): + # Only add url with clear scheme + urls.append(url) + else: + logger.warning( + "Url '%s' is ignored. It is either a non-existing " + "path or lacks a specific scheme.", url) + + return files, urls + + def _candidate_sort_key(self, candidate): + """ + Function used to generate link sort key for link tuples. + The greater the return value, the more preferred it is. + If not finding wheels, then sorted by version only. + If finding wheels, then the sort order is by version, then: + 1. existing installs + 2. wheels ordered via Wheel.support_index_min() + 3. source archives + Note: it was considered to embed this logic into the Link + comparison operators, but then different sdist links + with the same version, would have to be considered equal + """ + support_num = len(supported_tags) + if candidate.location.is_wheel: + # can raise InvalidWheelFilename + wheel = Wheel(candidate.location.filename) + if not wheel.supported(): + raise UnsupportedWheel( + "%s is not a supported wheel for this platform. It " + "can't be sorted." % wheel.filename + ) + pri = -(wheel.support_index_min()) + else: # sdist + pri = -(support_num) + return (candidate.version, pri) + + def _validate_secure_origin(self, logger, location): + # Determine if this url used a secure transport mechanism + parsed = urllib_parse.urlparse(str(location)) + origin = (parsed.scheme, parsed.hostname, parsed.port) + + # The protocol to use to see if the protocol matches. + # Don't count the repository type as part of the protocol: in + # cases such as "git+ssh", only use "ssh". (I.e., Only verify against + # the last scheme.) + protocol = origin[0].rsplit('+', 1)[-1] + + # Determine if our origin is a secure origin by looking through our + # hardcoded list of secure origins, as well as any additional ones + # configured on this PackageFinder instance. + for secure_origin in (SECURE_ORIGINS + self.secure_origins): + if protocol != secure_origin[0] and secure_origin[0] != "*": + continue + + try: + # We need to do this decode dance to ensure that we have a + # unicode object, even on Python 2.x. + addr = ipaddress.ip_address( + origin[1] + if ( + isinstance(origin[1], six.text_type) or + origin[1] is None + ) + else origin[1].decode("utf8") + ) + network = ipaddress.ip_network( + secure_origin[1] + if isinstance(secure_origin[1], six.text_type) + else secure_origin[1].decode("utf8") + ) + except ValueError: + # We don't have both a valid address or a valid network, so + # we'll check this origin against hostnames. + if (origin[1] and + origin[1].lower() != secure_origin[1].lower() and + secure_origin[1] != "*"): + continue + else: + # We have a valid address and network, so see if the address + # is contained within the network. + if addr not in network: + continue + + # Check to see if the port patches + if (origin[2] != secure_origin[2] and + secure_origin[2] != "*" and + secure_origin[2] is not None): + continue + + # If we've gotten here, then this origin matches the current + # secure origin and we should return True + return True + + # If we've gotten to this point, then the origin isn't secure and we + # will not accept it as a valid location to search. We will however + # log a warning that we are ignoring it. + logger.warning( + "The repository located at %s is not a trusted or secure host and " + "is being ignored. If this repository is available via HTTPS it " + "is recommended to use HTTPS instead, otherwise you may silence " + "this warning and allow it anyways with '--trusted-host %s'.", + parsed.hostname, + parsed.hostname, + ) + + return False + + def _get_index_urls_locations(self, project_name): + """Returns the locations found via self.index_urls + + Checks the url_name on the main (first in the list) index and + use this url_name to produce all locations + """ + + def mkurl_pypi_url(url): + loc = posixpath.join( + url, + urllib_parse.quote(canonicalize_name(project_name))) + # For maximum compatibility with easy_install, ensure the path + # ends in a trailing slash. Although this isn't in the spec + # (and PyPI can handle it without the slash) some other index + # implementations might break if they relied on easy_install's + # behavior. + if not loc.endswith('/'): + loc = loc + '/' + return loc + + return [mkurl_pypi_url(url) for url in self.index_urls] + + def find_all_candidates(self, project_name): + """Find all available InstallationCandidate for project_name + + This checks index_urls, find_links and dependency_links. + All versions found are returned as an InstallationCandidate list. + + See _link_package_versions for details on which files are accepted + """ + index_locations = self._get_index_urls_locations(project_name) + index_file_loc, index_url_loc = self._sort_locations(index_locations) + fl_file_loc, fl_url_loc = self._sort_locations( + self.find_links, expand_dir=True) + dep_file_loc, dep_url_loc = self._sort_locations(self.dependency_links) + + file_locations = ( + Link(url) for url in itertools.chain( + index_file_loc, fl_file_loc, dep_file_loc) + ) + + # We trust every url that the user has given us whether it was given + # via --index-url or --find-links + # We explicitly do not trust links that came from dependency_links + # We want to filter out any thing which does not have a secure origin. + url_locations = [ + link for link in itertools.chain( + (Link(url) for url in index_url_loc), + (Link(url) for url in fl_url_loc), + (Link(url) for url in dep_url_loc), + ) + if self._validate_secure_origin(logger, link) + ] + + logger.debug('%d location(s) to search for versions of %s:', + len(url_locations), project_name) + + for location in url_locations: + logger.debug('* %s', location) + + canonical_name = canonicalize_name(project_name) + formats = fmt_ctl_formats(self.format_control, canonical_name) + search = Search(project_name, canonical_name, formats) + find_links_versions = self._package_versions( + # We trust every directly linked archive in find_links + (Link(url, '-f') for url in self.find_links), + search + ) + + page_versions = [] + for page in self._get_pages(url_locations, project_name): + logger.debug('Analyzing links from page %s', page.url) + with indent_log(): + page_versions.extend( + self._package_versions(page.links, search) + ) + + dependency_versions = self._package_versions( + (Link(url) for url in self.dependency_links), search + ) + if dependency_versions: + logger.debug( + 'dependency_links found: %s', + ', '.join([ + version.location.url for version in dependency_versions + ]) + ) + + file_versions = self._package_versions(file_locations, search) + if file_versions: + file_versions.sort(reverse=True) + logger.debug( + 'Local files found: %s', + ', '.join([ + url_to_path(candidate.location.url) + for candidate in file_versions + ]) + ) + + # This is an intentional priority ordering + return ( + file_versions + find_links_versions + page_versions + + dependency_versions + ) + + def find_requirement(self, req, upgrade): + """Try to find a Link matching req + + Expects req, an InstallRequirement and upgrade, a boolean + Returns a Link if found, + Raises DistributionNotFound or BestVersionAlreadyInstalled otherwise + """ + all_candidates = self.find_all_candidates(req.name) + + # Filter out anything which doesn't match our specifier + compatible_versions = set( + req.specifier.filter( + # We turn the version object into a str here because otherwise + # when we're debundled but setuptools isn't, Python will see + # packaging.version.Version and + # pkg_resources._vendor.packaging.version.Version as different + # types. This way we'll use a str as a common data interchange + # format. If we stop using the pkg_resources provided specifier + # and start using our own, we can drop the cast to str(). + [str(c.version) for c in all_candidates], + prereleases=( + self.allow_all_prereleases + if self.allow_all_prereleases else None + ), + ) + ) + applicable_candidates = [ + # Again, converting to str to deal with debundling. + c for c in all_candidates if str(c.version) in compatible_versions + ] + + if applicable_candidates: + best_candidate = max(applicable_candidates, + key=self._candidate_sort_key) + else: + best_candidate = None + + if req.satisfied_by is not None: + installed_version = parse_version(req.satisfied_by.version) + else: + installed_version = None + + if installed_version is None and best_candidate is None: + logger.critical( + 'Could not find a version that satisfies the requirement %s ' + '(from versions: %s)', + req, + ', '.join( + sorted( + set(str(c.version) for c in all_candidates), + key=parse_version, + ) + ) + ) + + raise DistributionNotFound( + 'No matching distribution found for %s' % req + ) + + best_installed = False + if installed_version and ( + best_candidate is None or + best_candidate.version <= installed_version): + best_installed = True + + if not upgrade and installed_version is not None: + if best_installed: + logger.debug( + 'Existing installed version (%s) is most up-to-date and ' + 'satisfies requirement', + installed_version, + ) + else: + logger.debug( + 'Existing installed version (%s) satisfies requirement ' + '(most up-to-date version is %s)', + installed_version, + best_candidate.version, + ) + return None + + if best_installed: + # We have an existing version, and its the best version + logger.debug( + 'Installed version (%s) is most up-to-date (past versions: ' + '%s)', + installed_version, + ', '.join(sorted(compatible_versions, key=parse_version)) or + "none", + ) + raise BestVersionAlreadyInstalled + + logger.debug( + 'Using version %s (newest of versions: %s)', + best_candidate.version, + ', '.join(sorted(compatible_versions, key=parse_version)) + ) + return best_candidate.location + + def _get_pages(self, locations, project_name): + """ + Yields (page, page_url) from the given locations, skipping + locations that have errors. + """ + seen = set() + for location in locations: + if location in seen: + continue + seen.add(location) + + page = self._get_page(location) + if page is None: + continue + + yield page + + _py_version_re = re.compile(r'-py([123]\.?[0-9]?)$') + + def _sort_links(self, links): + """ + Returns elements of links in order, non-egg links first, egg links + second, while eliminating duplicates + """ + eggs, no_eggs = [], [] + seen = set() + for link in links: + if link not in seen: + seen.add(link) + if link.egg_fragment: + eggs.append(link) + else: + no_eggs.append(link) + return no_eggs + eggs + + def _package_versions(self, links, search): + result = [] + for link in self._sort_links(links): + v = self._link_package_versions(link, search) + if v is not None: + result.append(v) + return result + + def _log_skipped_link(self, link, reason): + if link not in self.logged_links: + logger.debug('Skipping link %s; %s', link, reason) + self.logged_links.add(link) + + def _link_package_versions(self, link, search): + """Return an InstallationCandidate or None""" + + version = None + if link.egg_fragment: + egg_info = link.egg_fragment + ext = link.ext + else: + egg_info, ext = link.splitext() + if not ext: + self._log_skipped_link(link, 'not a file') + return + if ext not in SUPPORTED_EXTENSIONS: + self._log_skipped_link( + link, 'unsupported archive format: %s' % ext) + return + if "binary" not in search.formats and ext == wheel_ext: + self._log_skipped_link( + link, 'No binaries permitted for %s' % search.supplied) + return + if "macosx10" in link.path and ext == '.zip': + self._log_skipped_link(link, 'macosx10 one') + return + if ext == wheel_ext: + try: + wheel = Wheel(link.filename) + except InvalidWheelFilename: + self._log_skipped_link(link, 'invalid wheel filename') + return + if canonicalize_name(wheel.name) != search.canonical: + self._log_skipped_link( + link, 'wrong project name (not %s)' % search.supplied) + return + if not wheel.supported(): + self._log_skipped_link( + link, 'it is not compatible with this Python') + return + + version = wheel.version + + # This should be up by the search.ok_binary check, but see issue 2700. + if "source" not in search.formats and ext != wheel_ext: + self._log_skipped_link( + link, 'No sources permitted for %s' % search.supplied) + return + + if not version: + version = egg_info_matches(egg_info, search.supplied, link) + if version is None: + self._log_skipped_link( + link, 'wrong project name (not %s)' % search.supplied) + return + + match = self._py_version_re.search(version) + if match: + version = version[:match.start()] + py_version = match.group(1) + if py_version != sys.version[:3]: + self._log_skipped_link( + link, 'Python version is incorrect') + return + logger.debug('Found link %s, version: %s', link, version) + + return InstallationCandidate(search.supplied, version, link) + + def _get_page(self, link): + return HTMLPage.get_page(link, session=self.session) + + +def egg_info_matches( + egg_info, search_name, link, + _egg_info_re=re.compile(r'([a-z0-9_.]+)-([a-z0-9_.!+-]+)', re.I)): + """Pull the version part out of a string. + + :param egg_info: The string to parse. E.g. foo-2.1 + :param search_name: The name of the package this belongs to. None to + infer the name. Note that this cannot unambiguously parse strings + like foo-2-2 which might be foo, 2-2 or foo-2, 2. + :param link: The link the string came from, for logging on failure. + """ + match = _egg_info_re.search(egg_info) + if not match: + logger.debug('Could not parse version from link: %s', link) + return None + if search_name is None: + full_match = match.group(0) + return full_match[full_match.index('-'):] + name = match.group(0).lower() + # To match the "safe" name that pkg_resources creates: + name = name.replace('_', '-') + # project name and version must be separated by a dash + look_for = search_name.lower() + "-" + if name.startswith(look_for): + return match.group(0)[len(look_for):] + else: + return None + + +class HTMLPage(object): + """Represents one page, along with its URL""" + + def __init__(self, content, url, headers=None): + # Determine if we have any encoding information in our headers + encoding = None + if headers and "Content-Type" in headers: + content_type, params = cgi.parse_header(headers["Content-Type"]) + + if "charset" in params: + encoding = params['charset'] + + self.content = content + self.parsed = html5lib.parse( + self.content, + encoding=encoding, + namespaceHTMLElements=False, + ) + self.url = url + self.headers = headers + + def __str__(self): + return self.url + + @classmethod + def get_page(cls, link, skip_archives=True, session=None): + if session is None: + raise TypeError( + "get_page() missing 1 required keyword argument: 'session'" + ) + + url = link.url + url = url.split('#', 1)[0] + + # Check for VCS schemes that do not support lookup as web pages. + from pip.vcs import VcsSupport + for scheme in VcsSupport.schemes: + if url.lower().startswith(scheme) and url[len(scheme)] in '+:': + logger.debug('Cannot look at %s URL %s', scheme, link) + return None + + try: + if skip_archives: + filename = link.filename + for bad_ext in ARCHIVE_EXTENSIONS: + if filename.endswith(bad_ext): + content_type = cls._get_content_type( + url, session=session, + ) + if content_type.lower().startswith('text/html'): + break + else: + logger.debug( + 'Skipping page %s because of Content-Type: %s', + link, + content_type, + ) + return + + logger.debug('Getting page %s', url) + + # Tack index.html onto file:// URLs that point to directories + (scheme, netloc, path, params, query, fragment) = \ + urllib_parse.urlparse(url) + if (scheme == 'file' and + os.path.isdir(urllib_request.url2pathname(path))): + # add trailing slash if not present so urljoin doesn't trim + # final segment + if not url.endswith('/'): + url += '/' + url = urllib_parse.urljoin(url, 'index.html') + logger.debug(' file: URL is directory, getting %s', url) + + resp = session.get( + url, + headers={ + "Accept": "text/html", + "Cache-Control": "max-age=600", + }, + ) + resp.raise_for_status() + + # The check for archives above only works if the url ends with + # something that looks like an archive. However that is not a + # requirement of an url. Unless we issue a HEAD request on every + # url we cannot know ahead of time for sure if something is HTML + # or not. However we can check after we've downloaded it. + content_type = resp.headers.get('Content-Type', 'unknown') + if not content_type.lower().startswith("text/html"): + logger.debug( + 'Skipping page %s because of Content-Type: %s', + link, + content_type, + ) + return + + inst = cls(resp.content, resp.url, resp.headers) + except requests.HTTPError as exc: + cls._handle_fail(link, exc, url) + except SSLError as exc: + reason = ("There was a problem confirming the ssl certificate: " + "%s" % exc) + cls._handle_fail(link, reason, url, meth=logger.info) + except requests.ConnectionError as exc: + cls._handle_fail(link, "connection error: %s" % exc, url) + except requests.Timeout: + cls._handle_fail(link, "timed out", url) + else: + return inst + + @staticmethod + def _handle_fail(link, reason, url, meth=None): + if meth is None: + meth = logger.debug + + meth("Could not fetch URL %s: %s - skipping", link, reason) + + @staticmethod + def _get_content_type(url, session): + """Get the Content-Type of the given url, using a HEAD request""" + scheme, netloc, path, query, fragment = urllib_parse.urlsplit(url) + if scheme not in ('http', 'https'): + # FIXME: some warning or something? + # assertion error? + return '' + + resp = session.head(url, allow_redirects=True) + resp.raise_for_status() + + return resp.headers.get("Content-Type", "") + + @cached_property + def base_url(self): + bases = [ + x for x in self.parsed.findall(".//base") + if x.get("href") is not None + ] + if bases and bases[0].get("href"): + return bases[0].get("href") + else: + return self.url + + @property + def links(self): + """Yields all links in the page""" + for anchor in self.parsed.findall(".//a"): + if anchor.get("href"): + href = anchor.get("href") + url = self.clean_link( + urllib_parse.urljoin(self.base_url, href) + ) + yield Link(url, self) + + _clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I) + + def clean_link(self, url): + """Makes sure a link is fully encoded. That is, if a ' ' shows up in + the link, it will be rewritten to %20 (while not over-quoting + % or other characters).""" + return self._clean_re.sub( + lambda match: '%%%2x' % ord(match.group(0)), url) + + +class Link(object): + + def __init__(self, url, comes_from=None): + + # url can be a UNC windows share + if url.startswith('\\\\'): + url = path_to_url(url) + + self.url = url + self.comes_from = comes_from + + def __str__(self): + if self.comes_from: + return '%s (from %s)' % (self.url, self.comes_from) + else: + return str(self.url) + + def __repr__(self): + return '' % self + + def __eq__(self, other): + if not isinstance(other, Link): + return NotImplemented + return self.url == other.url + + def __ne__(self, other): + if not isinstance(other, Link): + return NotImplemented + return self.url != other.url + + def __lt__(self, other): + if not isinstance(other, Link): + return NotImplemented + return self.url < other.url + + def __le__(self, other): + if not isinstance(other, Link): + return NotImplemented + return self.url <= other.url + + def __gt__(self, other): + if not isinstance(other, Link): + return NotImplemented + return self.url > other.url + + def __ge__(self, other): + if not isinstance(other, Link): + return NotImplemented + return self.url >= other.url + + def __hash__(self): + return hash(self.url) + + @property + def filename(self): + _, netloc, path, _, _ = urllib_parse.urlsplit(self.url) + name = posixpath.basename(path.rstrip('/')) or netloc + name = urllib_parse.unquote(name) + assert name, ('URL %r produced no filename' % self.url) + return name + + @property + def scheme(self): + return urllib_parse.urlsplit(self.url)[0] + + @property + def netloc(self): + return urllib_parse.urlsplit(self.url)[1] + + @property + def path(self): + return urllib_parse.unquote(urllib_parse.urlsplit(self.url)[2]) + + def splitext(self): + return splitext(posixpath.basename(self.path.rstrip('/'))) + + @property + def ext(self): + return self.splitext()[1] + + @property + def url_without_fragment(self): + scheme, netloc, path, query, fragment = urllib_parse.urlsplit(self.url) + return urllib_parse.urlunsplit((scheme, netloc, path, query, None)) + + _egg_fragment_re = re.compile(r'[#&]egg=([^&]*)') + + @property + def egg_fragment(self): + match = self._egg_fragment_re.search(self.url) + if not match: + return None + return match.group(1) + + _subdirectory_fragment_re = re.compile(r'[#&]subdirectory=([^&]*)') + + @property + def subdirectory_fragment(self): + match = self._subdirectory_fragment_re.search(self.url) + if not match: + return None + return match.group(1) + + _hash_re = re.compile( + r'(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)' + ) + + @property + def hash(self): + match = self._hash_re.search(self.url) + if match: + return match.group(2) + return None + + @property + def hash_name(self): + match = self._hash_re.search(self.url) + if match: + return match.group(1) + return None + + @property + def show_url(self): + return posixpath.basename(self.url.split('#', 1)[0].split('?', 1)[0]) + + @property + def is_wheel(self): + return self.ext == wheel_ext + + @property + def is_artifact(self): + """ + Determines if this points to an actual artifact (e.g. a tarball) or if + it points to an "abstract" thing like a path or a VCS location. + """ + from pip.vcs import vcs + + if self.scheme in vcs.all_schemes: + return False + + return True + + +FormatControl = namedtuple('FormatControl', 'no_binary only_binary') +"""This object has two fields, no_binary and only_binary. + +If a field is falsy, it isn't set. If it is {':all:'}, it should match all +packages except those listed in the other field. Only one field can be set +to {':all:'} at a time. The rest of the time exact package name matches +are listed, with any given package only showing up in one field at a time. +""" + + +def fmt_ctl_handle_mutual_exclude(value, target, other): + new = value.split(',') + while ':all:' in new: + other.clear() + target.clear() + target.add(':all:') + del new[:new.index(':all:') + 1] + if ':none:' not in new: + # Without a none, we want to discard everything as :all: covers it + return + for name in new: + if name == ':none:': + target.clear() + continue + name = canonicalize_name(name) + other.discard(name) + target.add(name) + + +def fmt_ctl_formats(fmt_ctl, canonical_name): + result = set(["binary", "source"]) + if canonical_name in fmt_ctl.only_binary: + result.discard('source') + elif canonical_name in fmt_ctl.no_binary: + result.discard('binary') + elif ':all:' in fmt_ctl.only_binary: + result.discard('source') + elif ':all:' in fmt_ctl.no_binary: + result.discard('binary') + return frozenset(result) + + +def fmt_ctl_no_binary(fmt_ctl): + fmt_ctl_handle_mutual_exclude( + ':all:', fmt_ctl.no_binary, fmt_ctl.only_binary) + + +def fmt_ctl_no_use_wheel(fmt_ctl): + fmt_ctl_no_binary(fmt_ctl) + warnings.warn( + '--no-use-wheel is deprecated and will be removed in the future. ' + ' Please use --no-binary :all: instead.', RemovedInPip10Warning, + stacklevel=2) + + +Search = namedtuple('Search', 'supplied canonical formats') +"""Capture key aspects of a search. + +:attribute supplied: The user supplied package. +:attribute canonical: The canonical package name. +:attribute formats: The formats allowed for this package. Should be a set + with 'binary' or 'source' or both in it. +""" diff --git a/venv/lib/python3.5/site-packages/pip/locations.py b/venv/lib/python3.5/site-packages/pip/locations.py new file mode 100644 index 0000000..1bd0fae --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/locations.py @@ -0,0 +1,182 @@ +"""Locations where we look for configs, install stuff, etc""" +from __future__ import absolute_import + +import os +import os.path +import site +import sys + +from distutils import sysconfig +from distutils.command.install import install, SCHEME_KEYS # noqa + +from pip.compat import WINDOWS, expanduser +from pip.utils import appdirs + + +# Application Directories +USER_CACHE_DIR = appdirs.user_cache_dir("pip") + + +DELETE_MARKER_MESSAGE = '''\ +This file is placed here by pip to indicate the source was put +here by pip. + +Once this package is successfully installed this source code will be +deleted (unless you remove this file). +''' +PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt' + + +def write_delete_marker_file(directory): + """ + Write the pip delete marker file into this directory. + """ + filepath = os.path.join(directory, PIP_DELETE_MARKER_FILENAME) + with open(filepath, 'w') as marker_fp: + marker_fp.write(DELETE_MARKER_MESSAGE) + + +def running_under_virtualenv(): + """ + Return True if we're running inside a virtualenv, False otherwise. + + """ + if hasattr(sys, 'real_prefix'): + return True + elif sys.prefix != getattr(sys, "base_prefix", sys.prefix): + return True + + return False + + +def virtualenv_no_global(): + """ + Return True if in a venv and no system site packages. + """ + # this mirrors the logic in virtualenv.py for locating the + # no-global-site-packages.txt file + site_mod_dir = os.path.dirname(os.path.abspath(site.__file__)) + no_global_file = os.path.join(site_mod_dir, 'no-global-site-packages.txt') + if running_under_virtualenv() and os.path.isfile(no_global_file): + return True + + +if running_under_virtualenv(): + src_prefix = os.path.join(sys.prefix, 'src') +else: + # FIXME: keep src in cwd for now (it is not a temporary folder) + try: + src_prefix = os.path.join(os.getcwd(), 'src') + except OSError: + # In case the current working directory has been renamed or deleted + sys.exit( + "The folder you are executing pip from can no longer be found." + ) + +# under Mac OS X + virtualenv sys.prefix is not properly resolved +# it is something like /path/to/python/bin/.. +# Note: using realpath due to tmp dirs on OSX being symlinks +src_prefix = os.path.abspath(src_prefix) + +# FIXME doesn't account for venv linked to global site-packages + +site_packages = sysconfig.get_python_lib() +user_site = site.USER_SITE +user_dir = expanduser('~') +if WINDOWS: + bin_py = os.path.join(sys.prefix, 'Scripts') + bin_user = os.path.join(user_site, 'Scripts') + # buildout uses 'bin' on Windows too? + if not os.path.exists(bin_py): + bin_py = os.path.join(sys.prefix, 'bin') + bin_user = os.path.join(user_site, 'bin') + + config_basename = 'pip.ini' + + legacy_storage_dir = os.path.join(user_dir, 'pip') + legacy_config_file = os.path.join( + legacy_storage_dir, + config_basename, + ) +else: + bin_py = os.path.join(sys.prefix, 'bin') + bin_user = os.path.join(user_site, 'bin') + + config_basename = 'pip.conf' + + legacy_storage_dir = os.path.join(user_dir, '.pip') + legacy_config_file = os.path.join( + legacy_storage_dir, + config_basename, + ) + + # Forcing to use /usr/local/bin for standard Mac OS X framework installs + # Also log to ~/Library/Logs/ for use with the Console.app log viewer + if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/': + bin_py = '/usr/local/bin' + +site_config_files = [ + os.path.join(path, config_basename) + for path in appdirs.site_config_dirs('pip') +] + + +def distutils_scheme(dist_name, user=False, home=None, root=None, + isolated=False, prefix=None): + """ + Return a distutils install scheme + """ + from distutils.dist import Distribution + + scheme = {} + + if isolated: + extra_dist_args = {"script_args": ["--no-user-cfg"]} + else: + extra_dist_args = {} + dist_args = {'name': dist_name} + dist_args.update(extra_dist_args) + + d = Distribution(dist_args) + d.parse_config_files() + i = d.get_command_obj('install', create=True) + # NOTE: setting user or home has the side-effect of creating the home dir + # or user base for installations during finalize_options() + # ideally, we'd prefer a scheme class that has no side-effects. + assert not (user and prefix), "user={0} prefix={1}".format(user, prefix) + i.user = user or i.user + if user: + i.prefix = "" + i.prefix = prefix or i.prefix + i.home = home or i.home + i.root = root or i.root + i.finalize_options() + for key in SCHEME_KEYS: + scheme[key] = getattr(i, 'install_' + key) + + # install_lib specified in setup.cfg should install *everything* + # into there (i.e. it takes precedence over both purelib and + # platlib). Note, i.install_lib is *always* set after + # finalize_options(); we only want to override here if the user + # has explicitly requested it hence going back to the config + if 'install_lib' in d.get_option_dict('install'): + scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib)) + + if running_under_virtualenv(): + scheme['headers'] = os.path.join( + sys.prefix, + 'include', + 'site', + 'python' + sys.version[:3], + dist_name, + ) + + if root is not None: + path_no_drive = os.path.splitdrive( + os.path.abspath(scheme["headers"]))[1] + scheme["headers"] = os.path.join( + root, + path_no_drive[1:], + ) + + return scheme diff --git a/venv/lib/python3.5/site-packages/pip/models/__init__.py b/venv/lib/python3.5/site-packages/pip/models/__init__.py new file mode 100644 index 0000000..1d727d7 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/models/__init__.py @@ -0,0 +1,4 @@ +from pip.models.index import Index, PyPI + + +__all__ = ["Index", "PyPI"] diff --git a/venv/lib/python3.5/site-packages/pip/models/index.py b/venv/lib/python3.5/site-packages/pip/models/index.py new file mode 100644 index 0000000..be99119 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/models/index.py @@ -0,0 +1,16 @@ +from pip._vendor.six.moves.urllib import parse as urllib_parse + + +class Index(object): + def __init__(self, url): + self.url = url + self.netloc = urllib_parse.urlsplit(url).netloc + self.simple_url = self.url_to_path('simple') + self.pypi_url = self.url_to_path('pypi') + self.pip_json_url = self.url_to_path('pypi/pip/json') + + def url_to_path(self, path): + return urllib_parse.urljoin(self.url, path) + + +PyPI = Index('https://pypi.python.org/') diff --git a/venv/lib/python3.5/site-packages/pip/operations/__init__.py b/venv/lib/python3.5/site-packages/pip/operations/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.5/site-packages/pip/operations/freeze.py b/venv/lib/python3.5/site-packages/pip/operations/freeze.py new file mode 100644 index 0000000..7493ced --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/operations/freeze.py @@ -0,0 +1,122 @@ +from __future__ import absolute_import + +import logging +import re + +import pip +from pip.req import InstallRequirement +from pip.utils import get_installed_distributions +from pip._vendor import pkg_resources +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.pkg_resources import RequirementParseError + + +logger = logging.getLogger(__name__) + + +def freeze( + requirement=None, + find_links=None, local_only=None, user_only=None, skip_regex=None, + default_vcs=None, + isolated=False, + wheel_cache=None, + skip=()): + find_links = find_links or [] + skip_match = None + + if skip_regex: + skip_match = re.compile(skip_regex).search + + dependency_links = [] + + for dist in pkg_resources.working_set: + if dist.has_metadata('dependency_links.txt'): + dependency_links.extend( + dist.get_metadata_lines('dependency_links.txt') + ) + for link in find_links: + if '#egg=' in link: + dependency_links.append(link) + for link in find_links: + yield '-f %s' % link + installations = {} + for dist in get_installed_distributions(local_only=local_only, + skip=(), + user_only=user_only): + try: + req = pip.FrozenRequirement.from_dist( + dist, + dependency_links + ) + except RequirementParseError: + logger.warning( + "Could not parse requirement: %s", + dist.project_name + ) + continue + installations[req.name] = req + + if requirement: + with open(requirement) as req_file: + for line in req_file: + if (not line.strip() or + line.strip().startswith('#') or + (skip_match and skip_match(line)) or + line.startswith(( + '-r', '--requirement', + '-Z', '--always-unzip', + '-f', '--find-links', + '-i', '--index-url', + '--pre', + '--trusted-host', + '--process-dependency-links', + '--extra-index-url'))): + yield line.rstrip() + continue + + if line.startswith('-e') or line.startswith('--editable'): + if line.startswith('-e'): + line = line[2:].strip() + else: + line = line[len('--editable'):].strip().lstrip('=') + line_req = InstallRequirement.from_editable( + line, + default_vcs=default_vcs, + isolated=isolated, + wheel_cache=wheel_cache, + ) + else: + line_req = InstallRequirement.from_line( + line, + isolated=isolated, + wheel_cache=wheel_cache, + ) + + if not line_req.name: + logger.info( + "Skipping line because it's not clear what it " + "would install: %s", + line.strip(), + ) + logger.info( + " (add #egg=PackageName to the URL to avoid" + " this warning)" + ) + elif line_req.name not in installations: + logger.warning( + "Requirement file contains %s, but that package is" + " not installed", + line.strip(), + ) + else: + yield str(installations[line_req.name]).rstrip() + del installations[line_req.name] + + yield( + '## The following requirements were added by ' + 'pip freeze:' + ) + for installation in sorted( + installations.values(), key=lambda x: x.name.lower()): + if canonicalize_name(installation.name) not in skip: + yield str(installation).rstrip() diff --git a/venv/lib/python3.5/site-packages/pip/pep425tags.py b/venv/lib/python3.5/site-packages/pip/pep425tags.py new file mode 100644 index 0000000..e118457 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/pep425tags.py @@ -0,0 +1,338 @@ +"""Generate and work with PEP 425 Compatibility Tags.""" +from __future__ import absolute_import + +import re +import sys +import warnings +import platform +import logging +import ctypes + +try: + import sysconfig +except ImportError: # pragma nocover + # Python < 2.7 + import distutils.sysconfig as sysconfig +import distutils.util + +from pip.compat import OrderedDict + + +logger = logging.getLogger(__name__) + +_osx_arch_pat = re.compile(r'(.+)_(\d+)_(\d+)_(.+)') + + +def get_config_var(var): + try: + return sysconfig.get_config_var(var) + except IOError as e: # Issue #1074 + warnings.warn("{0}".format(e), RuntimeWarning) + return None + + +def get_abbr_impl(): + """Return abbreviated implementation name.""" + if hasattr(sys, 'pypy_version_info'): + pyimpl = 'pp' + elif sys.platform.startswith('java'): + pyimpl = 'jy' + elif sys.platform == 'cli': + pyimpl = 'ip' + else: + pyimpl = 'cp' + return pyimpl + + +def get_impl_ver(): + """Return implementation version.""" + impl_ver = get_config_var("py_version_nodot") + if not impl_ver or get_abbr_impl() == 'pp': + impl_ver = ''.join(map(str, get_impl_version_info())) + return impl_ver + + +def get_impl_version_info(): + """Return sys.version_info-like tuple for use in decrementing the minor + version.""" + if get_abbr_impl() == 'pp': + # as per https://github.com/pypa/pip/issues/2882 + return (sys.version_info[0], sys.pypy_version_info.major, + sys.pypy_version_info.minor) + else: + return sys.version_info[0], sys.version_info[1] + + +def get_impl_tag(): + """ + Returns the Tag for this specific implementation. + """ + return "{0}{1}".format(get_abbr_impl(), get_impl_ver()) + + +def get_flag(var, fallback, expected=True, warn=True): + """Use a fallback method for determining SOABI flags if the needed config + var is unset or unavailable.""" + val = get_config_var(var) + if val is None: + if warn: + logger.debug("Config variable '%s' is unset, Python ABI tag may " + "be incorrect", var) + return fallback() + return val == expected + + +def get_abi_tag(): + """Return the ABI tag based on SOABI (if available) or emulate SOABI + (CPython 2, PyPy).""" + soabi = get_config_var('SOABI') + impl = get_abbr_impl() + if not soabi and impl in ('cp', 'pp') and hasattr(sys, 'maxunicode'): + d = '' + m = '' + u = '' + if get_flag('Py_DEBUG', + lambda: hasattr(sys, 'gettotalrefcount'), + warn=(impl == 'cp')): + d = 'd' + if get_flag('WITH_PYMALLOC', + lambda: impl == 'cp', + warn=(impl == 'cp')): + m = 'm' + if get_flag('Py_UNICODE_SIZE', + lambda: sys.maxunicode == 0x10ffff, + expected=4, + warn=(impl == 'cp' and + sys.version_info < (3, 3))) \ + and sys.version_info < (3, 3): + u = 'u' + abi = '%s%s%s%s%s' % (impl, get_impl_ver(), d, m, u) + elif soabi and soabi.startswith('cpython-'): + abi = 'cp' + soabi.split('-')[1] + elif soabi: + abi = soabi.replace('.', '_').replace('-', '_') + else: + abi = None + return abi + + +def _is_running_32bit(): + return sys.maxsize == 2147483647 + + +def get_platform(): + """Return our platform name 'win32', 'linux_x86_64'""" + if sys.platform == 'darwin': + # distutils.util.get_platform() returns the release based on the value + # of MACOSX_DEPLOYMENT_TARGET on which Python was built, which may + # be signficantly older than the user's current machine. + release, _, machine = platform.mac_ver() + split_ver = release.split('.') + + if machine == "x86_64" and _is_running_32bit(): + machine = "i386" + elif machine == "ppc64" and _is_running_32bit(): + machine = "ppc" + + return 'macosx_{0}_{1}_{2}'.format(split_ver[0], split_ver[1], machine) + + # XXX remove distutils dependency + result = distutils.util.get_platform().replace('.', '_').replace('-', '_') + if result == "linux_x86_64" and _is_running_32bit(): + # 32 bit Python program (running on a 64 bit Linux): pip should only + # install and run 32 bit compiled extensions in that case. + result = "linux_i686" + + return result + + +def is_manylinux1_compatible(): + # Only Linux, and only x86-64 / i686 + if get_platform() not in ("linux_x86_64", "linux_i686"): + return False + + # Check for presence of _manylinux module + try: + import _manylinux + return bool(_manylinux.manylinux1_compatible) + except (ImportError, AttributeError): + # Fall through to heuristic check below + pass + + # Check glibc version. CentOS 5 uses glibc 2.5. + return have_compatible_glibc(2, 5) + + +def have_compatible_glibc(major, minimum_minor): + # ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen + # manpage says, "If filename is NULL, then the returned handle is for the + # main program". This way we can let the linker do the work to figure out + # which libc our process is actually using. + process_namespace = ctypes.CDLL(None) + try: + gnu_get_libc_version = process_namespace.gnu_get_libc_version + except AttributeError: + # Symbol doesn't exist -> therefore, we are not linked to + # glibc. + return False + + # Call gnu_get_libc_version, which returns a string like "2.5". + gnu_get_libc_version.restype = ctypes.c_char_p + version_str = gnu_get_libc_version() + # py2 / py3 compatibility: + if not isinstance(version_str, str): + version_str = version_str.decode("ascii") + + # Parse string and check against requested version. + version = [int(piece) for piece in version_str.split(".")] + if len(version) < 2: + warnings.warn("Expected glibc version with 2 components major.minor," + " got: %s" % version_str, RuntimeWarning) + return False + return version[0] == major and version[1] >= minimum_minor + + +def get_darwin_arches(major, minor, machine): + """Return a list of supported arches (including group arches) for + the given major, minor and machine architecture of an OS X machine. + """ + arches = [] + + def _supports_arch(major, minor, arch): + # Looking at the application support for OS X versions in the chart + # provided by https://en.wikipedia.org/wiki/OS_X#Versions it appears + # our timeline looks roughly like: + # + # 10.0 - Introduces ppc support. + # 10.4 - Introduces ppc64, i386, and x86_64 support, however the ppc64 + # and x86_64 support is CLI only, and cannot be used for GUI + # applications. + # 10.5 - Extends ppc64 and x86_64 support to cover GUI applications. + # 10.6 - Drops support for ppc64 + # 10.7 - Drops support for ppc + # + # Given that we do not know if we're installing a CLI or a GUI + # application, we must be conservative and assume it might be a GUI + # application and behave as if ppc64 and x86_64 support did not occur + # until 10.5. + # + # Note: The above information is taken from the "Application support" + # column in the chart not the "Processor support" since I believe + # that we care about what instruction sets an application can use + # not which processors the OS supports. + if arch == 'ppc': + return (major, minor) <= (10, 5) + if arch == 'ppc64': + return (major, minor) == (10, 5) + if arch == 'i386': + return (major, minor) >= (10, 4) + if arch == 'x86_64': + return (major, minor) >= (10, 5) + if arch in groups: + for garch in groups[arch]: + if _supports_arch(major, minor, garch): + return True + return False + + groups = OrderedDict([ + ("fat", ("i386", "ppc")), + ("intel", ("x86_64", "i386")), + ("fat64", ("x86_64", "ppc64")), + ("fat32", ("x86_64", "i386", "ppc")), + ]) + + if _supports_arch(major, minor, machine): + arches.append(machine) + + for garch in groups: + if machine in groups[garch] and _supports_arch(major, minor, garch): + arches.append(garch) + + arches.append('universal') + + return arches + + +def get_supported(versions=None, noarch=False): + """Return a list of supported tags for each version specified in + `versions`. + + :param versions: a list of string versions, of the form ["33", "32"], + or None. The first version will be assumed to support our ABI. + """ + supported = [] + + # Versions must be given with respect to the preference + if versions is None: + versions = [] + version_info = get_impl_version_info() + major = version_info[:-1] + # Support all previous minor Python versions. + for minor in range(version_info[-1], -1, -1): + versions.append(''.join(map(str, major + (minor,)))) + + impl = get_abbr_impl() + + abis = [] + + abi = get_abi_tag() + if abi: + abis[0:0] = [abi] + + abi3s = set() + import imp + for suffix in imp.get_suffixes(): + if suffix[0].startswith('.abi'): + abi3s.add(suffix[0].split('.', 2)[1]) + + abis.extend(sorted(list(abi3s))) + + abis.append('none') + + if not noarch: + arch = get_platform() + if sys.platform == 'darwin': + # support macosx-10.6-intel on macosx-10.9-x86_64 + match = _osx_arch_pat.match(arch) + if match: + name, major, minor, actual_arch = match.groups() + tpl = '{0}_{1}_%i_%s'.format(name, major) + arches = [] + for m in reversed(range(int(minor) + 1)): + for a in get_darwin_arches(int(major), m, actual_arch): + arches.append(tpl % (m, a)) + else: + # arch pattern didn't match (?!) + arches = [arch] + elif is_manylinux1_compatible(): + arches = [arch.replace('linux', 'manylinux1'), arch] + else: + arches = [arch] + + # Current version, current API (built specifically for our Python): + for abi in abis: + for arch in arches: + supported.append(('%s%s' % (impl, versions[0]), abi, arch)) + + # Has binaries, does not use the Python API: + for arch in arches: + supported.append(('py%s' % (versions[0][0]), 'none', arch)) + + # No abi / arch, but requires our implementation: + supported.append(('%s%s' % (impl, versions[0]), 'none', 'any')) + # Tagged specifically as being cross-version compatible + # (with just the major version specified) + supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any')) + + # No abi / arch, generic Python + for i, version in enumerate(versions): + supported.append(('py%s' % (version,), 'none', 'any')) + if i == 0: + supported.append(('py%s' % (version[0]), 'none', 'any')) + + return supported + +supported_tags = get_supported() +supported_tags_noarch = get_supported(noarch=True) + +implementation_tag = get_impl_tag() diff --git a/venv/lib/python3.5/site-packages/pip/req/__init__.py b/venv/lib/python3.5/site-packages/pip/req/__init__.py new file mode 100644 index 0000000..00185a4 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/req/__init__.py @@ -0,0 +1,10 @@ +from __future__ import absolute_import + +from .req_install import InstallRequirement +from .req_set import RequirementSet, Requirements +from .req_file import parse_requirements + +__all__ = [ + "RequirementSet", "Requirements", "InstallRequirement", + "parse_requirements", +] diff --git a/venv/lib/python3.5/site-packages/pip/req/req_file.py b/venv/lib/python3.5/site-packages/pip/req/req_file.py new file mode 100644 index 0000000..2cfb479 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/req/req_file.py @@ -0,0 +1,342 @@ +""" +Requirements file parsing +""" + +from __future__ import absolute_import + +import os +import re +import shlex +import sys +import optparse +import warnings + +from pip._vendor.six.moves.urllib import parse as urllib_parse +from pip._vendor.six.moves import filterfalse + +import pip +from pip.download import get_file_content +from pip.req.req_install import InstallRequirement +from pip.exceptions import (RequirementsFileParseError) +from pip.utils.deprecation import RemovedInPip10Warning +from pip import cmdoptions + +__all__ = ['parse_requirements'] + +SCHEME_RE = re.compile(r'^(http|https|file):', re.I) +COMMENT_RE = re.compile(r'(^|\s)+#.*$') + +SUPPORTED_OPTIONS = [ + cmdoptions.constraints, + cmdoptions.editable, + cmdoptions.requirements, + cmdoptions.no_index, + cmdoptions.index_url, + cmdoptions.find_links, + cmdoptions.extra_index_url, + cmdoptions.allow_external, + cmdoptions.allow_all_external, + cmdoptions.no_allow_external, + cmdoptions.allow_unsafe, + cmdoptions.no_allow_unsafe, + cmdoptions.use_wheel, + cmdoptions.no_use_wheel, + cmdoptions.always_unzip, + cmdoptions.no_binary, + cmdoptions.only_binary, + cmdoptions.pre, + cmdoptions.process_dependency_links, + cmdoptions.trusted_host, + cmdoptions.require_hashes, +] + +# options to be passed to requirements +SUPPORTED_OPTIONS_REQ = [ + cmdoptions.install_options, + cmdoptions.global_options, + cmdoptions.hash, +] + +# the 'dest' string values +SUPPORTED_OPTIONS_REQ_DEST = [o().dest for o in SUPPORTED_OPTIONS_REQ] + + +def parse_requirements(filename, finder=None, comes_from=None, options=None, + session=None, constraint=False, wheel_cache=None): + """Parse a requirements file and yield InstallRequirement instances. + + :param filename: Path or url of requirements file. + :param finder: Instance of pip.index.PackageFinder. + :param comes_from: Origin description of requirements. + :param options: cli options. + :param session: Instance of pip.download.PipSession. + :param constraint: If true, parsing a constraint file rather than + requirements file. + :param wheel_cache: Instance of pip.wheel.WheelCache + """ + if session is None: + raise TypeError( + "parse_requirements() missing 1 required keyword argument: " + "'session'" + ) + + _, content = get_file_content( + filename, comes_from=comes_from, session=session + ) + + lines_enum = preprocess(content, options) + + for line_number, line in lines_enum: + req_iter = process_line(line, filename, line_number, finder, + comes_from, options, session, wheel_cache, + constraint=constraint) + for req in req_iter: + yield req + + +def preprocess(content, options): + """Split, filter, and join lines, and return a line iterator + + :param content: the content of the requirements file + :param options: cli options + """ + lines_enum = enumerate(content.splitlines(), start=1) + lines_enum = join_lines(lines_enum) + lines_enum = ignore_comments(lines_enum) + lines_enum = skip_regex(lines_enum, options) + return lines_enum + + +def process_line(line, filename, line_number, finder=None, comes_from=None, + options=None, session=None, wheel_cache=None, + constraint=False): + """Process a single requirements line; This can result in creating/yielding + requirements, or updating the finder. + + For lines that contain requirements, the only options that have an effect + are from SUPPORTED_OPTIONS_REQ, and they are scoped to the + requirement. Other options from SUPPORTED_OPTIONS may be present, but are + ignored. + + For lines that do not contain requirements, the only options that have an + effect are from SUPPORTED_OPTIONS. Options from SUPPORTED_OPTIONS_REQ may + be present, but are ignored. These lines may contain multiple options + (although our docs imply only one is supported), and all our parsed and + affect the finder. + + :param constraint: If True, parsing a constraints file. + :param options: OptionParser options that we may update + """ + parser = build_parser() + defaults = parser.get_default_values() + defaults.index_url = None + if finder: + # `finder.format_control` will be updated during parsing + defaults.format_control = finder.format_control + args_str, options_str = break_args_options(line) + if sys.version_info < (2, 7, 3): + # Priori to 2.7.3, shlex can not deal with unicode entries + options_str = options_str.encode('utf8') + opts, _ = parser.parse_args(shlex.split(options_str), defaults) + + # preserve for the nested code path + line_comes_from = '%s %s (line %s)' % ( + '-c' if constraint else '-r', filename, line_number) + + # yield a line requirement + if args_str: + isolated = options.isolated_mode if options else False + if options: + cmdoptions.check_install_build_global(options, opts) + # get the options that apply to requirements + req_options = {} + for dest in SUPPORTED_OPTIONS_REQ_DEST: + if dest in opts.__dict__ and opts.__dict__[dest]: + req_options[dest] = opts.__dict__[dest] + yield InstallRequirement.from_line( + args_str, line_comes_from, constraint=constraint, + isolated=isolated, options=req_options, wheel_cache=wheel_cache + ) + + # yield an editable requirement + elif opts.editables: + isolated = options.isolated_mode if options else False + default_vcs = options.default_vcs if options else None + yield InstallRequirement.from_editable( + opts.editables[0], comes_from=line_comes_from, + constraint=constraint, default_vcs=default_vcs, isolated=isolated, + wheel_cache=wheel_cache + ) + + # parse a nested requirements file + elif opts.requirements or opts.constraints: + if opts.requirements: + req_path = opts.requirements[0] + nested_constraint = False + else: + req_path = opts.constraints[0] + nested_constraint = True + # original file is over http + if SCHEME_RE.search(filename): + # do a url join so relative paths work + req_path = urllib_parse.urljoin(filename, req_path) + # original file and nested file are paths + elif not SCHEME_RE.search(req_path): + # do a join so relative paths work + req_path = os.path.join(os.path.dirname(filename), req_path) + # TODO: Why not use `comes_from='-r {} (line {})'` here as well? + parser = parse_requirements( + req_path, finder, comes_from, options, session, + constraint=nested_constraint, wheel_cache=wheel_cache + ) + for req in parser: + yield req + + # percolate hash-checking option upward + elif opts.require_hashes: + options.require_hashes = opts.require_hashes + + # set finder options + elif finder: + if opts.allow_external: + warnings.warn( + "--allow-external has been deprecated and will be removed in " + "the future. Due to changes in the repository protocol, it no " + "longer has any effect.", + RemovedInPip10Warning, + ) + + if opts.allow_all_external: + warnings.warn( + "--allow-all-external has been deprecated and will be removed " + "in the future. Due to changes in the repository protocol, it " + "no longer has any effect.", + RemovedInPip10Warning, + ) + + if opts.allow_unverified: + warnings.warn( + "--allow-unverified has been deprecated and will be removed " + "in the future. Due to changes in the repository protocol, it " + "no longer has any effect.", + RemovedInPip10Warning, + ) + + if opts.index_url: + finder.index_urls = [opts.index_url] + if opts.use_wheel is False: + finder.use_wheel = False + pip.index.fmt_ctl_no_use_wheel(finder.format_control) + if opts.no_index is True: + finder.index_urls = [] + if opts.extra_index_urls: + finder.index_urls.extend(opts.extra_index_urls) + if opts.find_links: + # FIXME: it would be nice to keep track of the source + # of the find_links: support a find-links local path + # relative to a requirements file. + value = opts.find_links[0] + req_dir = os.path.dirname(os.path.abspath(filename)) + relative_to_reqs_file = os.path.join(req_dir, value) + if os.path.exists(relative_to_reqs_file): + value = relative_to_reqs_file + finder.find_links.append(value) + if opts.pre: + finder.allow_all_prereleases = True + if opts.process_dependency_links: + finder.process_dependency_links = True + if opts.trusted_hosts: + finder.secure_origins.extend( + ("*", host, "*") for host in opts.trusted_hosts) + + +def break_args_options(line): + """Break up the line into an args and options string. We only want to shlex + (and then optparse) the options, not the args. args can contain markers + which are corrupted by shlex. + """ + tokens = line.split(' ') + args = [] + options = tokens[:] + for token in tokens: + if token.startswith('-') or token.startswith('--'): + break + else: + args.append(token) + options.pop(0) + return ' '.join(args), ' '.join(options) + + +def build_parser(): + """ + Return a parser for parsing requirement lines + """ + parser = optparse.OptionParser(add_help_option=False) + + option_factories = SUPPORTED_OPTIONS + SUPPORTED_OPTIONS_REQ + for option_factory in option_factories: + option = option_factory() + parser.add_option(option) + + # By default optparse sys.exits on parsing errors. We want to wrap + # that in our own exception. + def parser_exit(self, msg): + raise RequirementsFileParseError(msg) + parser.exit = parser_exit + + return parser + + +def join_lines(lines_enum): + """Joins a line ending in '\' with the previous line (except when following + comments). The joined line takes on the index of the first line. + """ + primary_line_number = None + new_line = [] + for line_number, line in lines_enum: + if not line.endswith('\\') or COMMENT_RE.match(line): + if COMMENT_RE.match(line): + # this ensures comments are always matched later + line = ' ' + line + if new_line: + new_line.append(line) + yield primary_line_number, ''.join(new_line) + new_line = [] + else: + yield line_number, line + else: + if not new_line: + primary_line_number = line_number + new_line.append(line.strip('\\')) + + # last line contains \ + if new_line: + yield primary_line_number, ''.join(new_line) + + # TODO: handle space after '\'. + + +def ignore_comments(lines_enum): + """ + Strips comments and filter empty lines. + """ + for line_number, line in lines_enum: + line = COMMENT_RE.sub('', line) + line = line.strip() + if line: + yield line_number, line + + +def skip_regex(lines_enum, options): + """ + Skip lines that match '--skip-requirements-regex' pattern + + Note: the regex pattern is only built once + """ + skip_regex = options.skip_requirements_regex if options else None + if skip_regex: + pattern = re.compile(skip_regex) + lines_enum = filterfalse( + lambda e: pattern.search(e[1]), + lines_enum) + return lines_enum diff --git a/venv/lib/python3.5/site-packages/pip/req/req_install.py b/venv/lib/python3.5/site-packages/pip/req/req_install.py new file mode 100644 index 0000000..922173f --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/req/req_install.py @@ -0,0 +1,1190 @@ +from __future__ import absolute_import + +import logging +import os +import re +import shutil +import sys +import tempfile +import traceback +import warnings +import zipfile + +from distutils import sysconfig +from distutils.util import change_root +from email.parser import FeedParser + +from pip._vendor import pkg_resources, six +from pip._vendor.distlib.markers import interpret as markers_interpret +from pip._vendor.packaging import specifiers +from pip._vendor.packaging.requirements import InvalidRequirement, Requirement +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.packaging.version import Version +from pip._vendor.six.moves import configparser + +import pip.wheel + +from pip.compat import native_str, get_stdlib, WINDOWS +from pip.download import is_url, url_to_path, path_to_url, is_archive_file +from pip.exceptions import ( + InstallationError, UninstallationError, UnsupportedWheel, +) +from pip.locations import ( + bin_py, running_under_virtualenv, PIP_DELETE_MARKER_FILENAME, bin_user, +) +from pip.utils import ( + display_path, rmtree, ask_path_exists, backup_dir, is_installable_dir, + dist_in_usersite, dist_in_site_packages, egg_link_path, + call_subprocess, read_text_file, FakeFile, _make_build_dir, ensure_dir, + get_installed_version, normalize_path, dist_is_local, +) + +from pip.utils.hashes import Hashes +from pip.utils.deprecation import RemovedInPip9Warning, RemovedInPip10Warning +from pip.utils.logging import indent_log +from pip.utils.setuptools_build import SETUPTOOLS_SHIM +from pip.utils.ui import open_spinner +from pip.req.req_uninstall import UninstallPathSet +from pip.vcs import vcs +from pip.wheel import move_wheel_files, Wheel + + +logger = logging.getLogger(__name__) + +operators = specifiers.Specifier._operators.keys() + + +def _strip_extras(path): + m = re.match(r'^(.+)(\[[^\]]+\])$', path) + extras = None + if m: + path_no_extras = m.group(1) + extras = m.group(2) + else: + path_no_extras = path + + return path_no_extras, extras + + +class InstallRequirement(object): + + def __init__(self, req, comes_from, source_dir=None, editable=False, + link=None, as_egg=False, update=True, + pycompile=True, markers=None, isolated=False, options=None, + wheel_cache=None, constraint=False): + self.extras = () + if isinstance(req, six.string_types): + try: + req = Requirement(req) + except InvalidRequirement: + if os.path.sep in req: + add_msg = "It looks like a path. Does it exist ?" + elif '=' in req and not any(op in req for op in operators): + add_msg = "= is not a valid operator. Did you mean == ?" + else: + add_msg = traceback.format_exc() + raise InstallationError( + "Invalid requirement: '%s'\n%s" % (req, add_msg)) + self.extras = req.extras + + self.req = req + self.comes_from = comes_from + self.constraint = constraint + self.source_dir = source_dir + self.editable = editable + + self._wheel_cache = wheel_cache + self.link = self.original_link = link + self.as_egg = as_egg + self.markers = markers + self._egg_info_path = None + # This holds the pkg_resources.Distribution object if this requirement + # is already available: + self.satisfied_by = None + # This hold the pkg_resources.Distribution object if this requirement + # conflicts with another installed distribution: + self.conflicts_with = None + # Temporary build location + self._temp_build_dir = None + # Used to store the global directory where the _temp_build_dir should + # have been created. Cf _correct_build_location method. + self._ideal_build_dir = None + # True if the editable should be updated: + self.update = update + # Set to True after successful installation + self.install_succeeded = None + # UninstallPathSet of uninstalled distribution (for possible rollback) + self.uninstalled = None + # Set True if a legitimate do-nothing-on-uninstall has happened - e.g. + # system site packages, stdlib packages. + self.nothing_to_uninstall = False + self.use_user_site = False + self.target_dir = None + self.options = options if options else {} + self.pycompile = pycompile + # Set to True after successful preparation of this requirement + self.prepared = False + + self.isolated = isolated + + @classmethod + def from_editable(cls, editable_req, comes_from=None, default_vcs=None, + isolated=False, options=None, wheel_cache=None, + constraint=False): + from pip.index import Link + + name, url, extras_override = parse_editable( + editable_req, default_vcs) + if url.startswith('file:'): + source_dir = url_to_path(url) + else: + source_dir = None + + res = cls(name, comes_from, source_dir=source_dir, + editable=True, + link=Link(url), + constraint=constraint, + isolated=isolated, + options=options if options else {}, + wheel_cache=wheel_cache) + + if extras_override is not None: + res.extras = extras_override + + return res + + @classmethod + def from_line( + cls, name, comes_from=None, isolated=False, options=None, + wheel_cache=None, constraint=False): + """Creates an InstallRequirement from a name, which might be a + requirement, directory containing 'setup.py', filename, or URL. + """ + from pip.index import Link + + if is_url(name): + marker_sep = '; ' + else: + marker_sep = ';' + if marker_sep in name: + name, markers = name.split(marker_sep, 1) + markers = markers.strip() + if not markers: + markers = None + else: + markers = None + name = name.strip() + req = None + path = os.path.normpath(os.path.abspath(name)) + link = None + extras = None + + if is_url(name): + link = Link(name) + else: + p, extras = _strip_extras(path) + if (os.path.isdir(p) and + (os.path.sep in name or name.startswith('.'))): + + if not is_installable_dir(p): + raise InstallationError( + "Directory %r is not installable. File 'setup.py' " + "not found." % name + ) + link = Link(path_to_url(p)) + elif is_archive_file(p): + if not os.path.isfile(p): + logger.warning( + 'Requirement %r looks like a filename, but the ' + 'file does not exist', + name + ) + link = Link(path_to_url(p)) + + # it's a local file, dir, or url + if link: + # Handle relative file URLs + if link.scheme == 'file' and re.search(r'\.\./', link.url): + link = Link( + path_to_url(os.path.normpath(os.path.abspath(link.path)))) + # wheel file + if link.is_wheel: + wheel = Wheel(link.filename) # can raise InvalidWheelFilename + if not wheel.supported(): + raise UnsupportedWheel( + "%s is not a supported wheel on this platform." % + wheel.filename + ) + req = "%s==%s" % (wheel.name, wheel.version) + else: + # set the req to the egg fragment. when it's not there, this + # will become an 'unnamed' requirement + req = link.egg_fragment + + # a requirement specifier + else: + req = name + + options = options if options else {} + res = cls(req, comes_from, link=link, markers=markers, + isolated=isolated, options=options, + wheel_cache=wheel_cache, constraint=constraint) + + if extras: + res.extras = Requirement('placeholder' + extras).extras + + return res + + def __str__(self): + if self.req: + s = str(self.req) + if self.link: + s += ' from %s' % self.link.url + else: + s = self.link.url if self.link else None + if self.satisfied_by is not None: + s += ' in %s' % display_path(self.satisfied_by.location) + if self.comes_from: + if isinstance(self.comes_from, six.string_types): + comes_from = self.comes_from + else: + comes_from = self.comes_from.from_path() + if comes_from: + s += ' (from %s)' % comes_from + return s + + def __repr__(self): + return '<%s object: %s editable=%r>' % ( + self.__class__.__name__, str(self), self.editable) + + def populate_link(self, finder, upgrade, require_hashes): + """Ensure that if a link can be found for this, that it is found. + + Note that self.link may still be None - if Upgrade is False and the + requirement is already installed. + + If require_hashes is True, don't use the wheel cache, because cached + wheels, always built locally, have different hashes than the files + downloaded from the index server and thus throw false hash mismatches. + Furthermore, cached wheels at present have undeterministic contents due + to file modification times. + """ + if self.link is None: + self.link = finder.find_requirement(self, upgrade) + if self._wheel_cache is not None and not require_hashes: + old_link = self.link + self.link = self._wheel_cache.cached_wheel(self.link, self.name) + if old_link != self.link: + logger.debug('Using cached wheel link: %s', self.link) + + @property + def specifier(self): + return self.req.specifier + + @property + def is_pinned(self): + """Return whether I am pinned to an exact version. + + For example, some-package==1.2 is pinned; some-package>1.2 is not. + """ + specifiers = self.specifier + return (len(specifiers) == 1 and + next(iter(specifiers)).operator in ('==', '===')) + + def from_path(self): + if self.req is None: + return None + s = str(self.req) + if self.comes_from: + if isinstance(self.comes_from, six.string_types): + comes_from = self.comes_from + else: + comes_from = self.comes_from.from_path() + if comes_from: + s += '->' + comes_from + return s + + def build_location(self, build_dir): + if self._temp_build_dir is not None: + return self._temp_build_dir + if self.req is None: + # for requirement via a path to a directory: the name of the + # package is not available yet so we create a temp directory + # Once run_egg_info will have run, we'll be able + # to fix it via _correct_build_location + self._temp_build_dir = tempfile.mkdtemp('-build', 'pip-') + self._ideal_build_dir = build_dir + return self._temp_build_dir + if self.editable: + name = self.name.lower() + else: + name = self.name + # FIXME: Is there a better place to create the build_dir? (hg and bzr + # need this) + if not os.path.exists(build_dir): + logger.debug('Creating directory %s', build_dir) + _make_build_dir(build_dir) + return os.path.join(build_dir, name) + + def _correct_build_location(self): + """Move self._temp_build_dir to self._ideal_build_dir/self.req.name + + For some requirements (e.g. a path to a directory), the name of the + package is not available until we run egg_info, so the build_location + will return a temporary directory and store the _ideal_build_dir. + + This is only called by self.egg_info_path to fix the temporary build + directory. + """ + if self.source_dir is not None: + return + assert self.req is not None + assert self._temp_build_dir + assert self._ideal_build_dir + old_location = self._temp_build_dir + self._temp_build_dir = None + new_location = self.build_location(self._ideal_build_dir) + if os.path.exists(new_location): + raise InstallationError( + 'A package already exists in %s; please remove it to continue' + % display_path(new_location)) + logger.debug( + 'Moving package %s from %s to new location %s', + self, display_path(old_location), display_path(new_location), + ) + shutil.move(old_location, new_location) + self._temp_build_dir = new_location + self._ideal_build_dir = None + self.source_dir = new_location + self._egg_info_path = None + + @property + def name(self): + if self.req is None: + return None + return native_str(pkg_resources.safe_name(self.req.name)) + + @property + def setup_py_dir(self): + return os.path.join( + self.source_dir, + self.link and self.link.subdirectory_fragment or '') + + @property + def setup_py(self): + assert self.source_dir, "No source dir for %s" % self + try: + import setuptools # noqa + except ImportError: + if get_installed_version('setuptools') is None: + add_msg = "Please install setuptools." + else: + add_msg = traceback.format_exc() + # Setuptools is not available + raise InstallationError( + "Could not import setuptools which is required to " + "install from a source distribution.\n%s" % add_msg + ) + + setup_py = os.path.join(self.setup_py_dir, 'setup.py') + + # Python2 __file__ should not be unicode + if six.PY2 and isinstance(setup_py, six.text_type): + setup_py = setup_py.encode(sys.getfilesystemencoding()) + + return setup_py + + def run_egg_info(self): + assert self.source_dir + if self.name: + logger.debug( + 'Running setup.py (path:%s) egg_info for package %s', + self.setup_py, self.name, + ) + else: + logger.debug( + 'Running setup.py (path:%s) egg_info for package from %s', + self.setup_py, self.link, + ) + + with indent_log(): + script = SETUPTOOLS_SHIM % self.setup_py + base_cmd = [sys.executable, '-c', script] + if self.isolated: + base_cmd += ["--no-user-cfg"] + egg_info_cmd = base_cmd + ['egg_info'] + # We can't put the .egg-info files at the root, because then the + # source code will be mistaken for an installed egg, causing + # problems + if self.editable: + egg_base_option = [] + else: + egg_info_dir = os.path.join(self.setup_py_dir, 'pip-egg-info') + ensure_dir(egg_info_dir) + egg_base_option = ['--egg-base', 'pip-egg-info'] + call_subprocess( + egg_info_cmd + egg_base_option, + cwd=self.setup_py_dir, + show_stdout=False, + command_level=logging.DEBUG, + command_desc='python setup.py egg_info') + + if not self.req: + if isinstance( + pkg_resources.parse_version(self.pkg_info()["Version"]), + Version): + op = "==" + else: + op = "===" + self.req = Requirement( + "".join([ + self.pkg_info()["Name"], + op, + self.pkg_info()["Version"], + ]) + ) + self._correct_build_location() + else: + metadata_name = canonicalize_name(self.pkg_info()["Name"]) + if canonicalize_name(self.req.name) != metadata_name: + logger.warning( + 'Running setup.py (path:%s) egg_info for package %s ' + 'produced metadata for project name %s. Fix your ' + '#egg=%s fragments.', + self.setup_py, self.name, metadata_name, self.name + ) + self.req = Requirement(metadata_name) + + def egg_info_data(self, filename): + if self.satisfied_by is not None: + if not self.satisfied_by.has_metadata(filename): + return None + return self.satisfied_by.get_metadata(filename) + assert self.source_dir + filename = self.egg_info_path(filename) + if not os.path.exists(filename): + return None + data = read_text_file(filename) + return data + + def egg_info_path(self, filename): + if self._egg_info_path is None: + if self.editable: + base = self.source_dir + else: + base = os.path.join(self.setup_py_dir, 'pip-egg-info') + filenames = os.listdir(base) + if self.editable: + filenames = [] + for root, dirs, files in os.walk(base): + for dir in vcs.dirnames: + if dir in dirs: + dirs.remove(dir) + # Iterate over a copy of ``dirs``, since mutating + # a list while iterating over it can cause trouble. + # (See https://github.com/pypa/pip/pull/462.) + for dir in list(dirs): + # Don't search in anything that looks like a virtualenv + # environment + if ( + os.path.exists( + os.path.join(root, dir, 'bin', 'python') + ) or + os.path.exists( + os.path.join( + root, dir, 'Scripts', 'Python.exe' + ) + )): + dirs.remove(dir) + # Also don't search through tests + elif dir == 'test' or dir == 'tests': + dirs.remove(dir) + filenames.extend([os.path.join(root, dir) + for dir in dirs]) + filenames = [f for f in filenames if f.endswith('.egg-info')] + + if not filenames: + raise InstallationError( + 'No files/directories in %s (from %s)' % (base, filename) + ) + assert filenames, \ + "No files/directories in %s (from %s)" % (base, filename) + + # if we have more than one match, we pick the toplevel one. This + # can easily be the case if there is a dist folder which contains + # an extracted tarball for testing purposes. + if len(filenames) > 1: + filenames.sort( + key=lambda x: x.count(os.path.sep) + + (os.path.altsep and x.count(os.path.altsep) or 0) + ) + self._egg_info_path = os.path.join(base, filenames[0]) + return os.path.join(self._egg_info_path, filename) + + def pkg_info(self): + p = FeedParser() + data = self.egg_info_data('PKG-INFO') + if not data: + logger.warning( + 'No PKG-INFO file found in %s', + display_path(self.egg_info_path('PKG-INFO')), + ) + p.feed(data or '') + return p.close() + + _requirements_section_re = re.compile(r'\[(.*?)\]') + + @property + def installed_version(self): + return get_installed_version(self.name) + + def assert_source_matches_version(self): + assert self.source_dir + version = self.pkg_info()['version'] + if self.req.specifier and version not in self.req.specifier: + logger.warning( + 'Requested %s, but installing version %s', + self, + self.installed_version, + ) + else: + logger.debug( + 'Source in %s has version %s, which satisfies requirement %s', + display_path(self.source_dir), + version, + self, + ) + + def update_editable(self, obtain=True): + if not self.link: + logger.debug( + "Cannot update repository at %s; repository location is " + "unknown", + self.source_dir, + ) + return + assert self.editable + assert self.source_dir + if self.link.scheme == 'file': + # Static paths don't get updated + return + assert '+' in self.link.url, "bad url: %r" % self.link.url + if not self.update: + return + vc_type, url = self.link.url.split('+', 1) + backend = vcs.get_backend(vc_type) + if backend: + vcs_backend = backend(self.link.url) + if obtain: + vcs_backend.obtain(self.source_dir) + else: + vcs_backend.export(self.source_dir) + else: + assert 0, ( + 'Unexpected version control type (in %s): %s' + % (self.link, vc_type)) + + def uninstall(self, auto_confirm=False): + """ + Uninstall the distribution currently satisfying this requirement. + + Prompts before removing or modifying files unless + ``auto_confirm`` is True. + + Refuses to delete or modify files outside of ``sys.prefix`` - + thus uninstallation within a virtual environment can only + modify that virtual environment, even if the virtualenv is + linked to global site-packages. + + """ + if not self.check_if_exists(): + raise UninstallationError( + "Cannot uninstall requirement %s, not installed" % (self.name,) + ) + dist = self.satisfied_by or self.conflicts_with + + dist_path = normalize_path(dist.location) + if not dist_is_local(dist): + logger.info( + "Not uninstalling %s at %s, outside environment %s", + dist.key, + dist_path, + sys.prefix, + ) + self.nothing_to_uninstall = True + return + + if dist_path in get_stdlib(): + logger.info( + "Not uninstalling %s at %s, as it is in the standard library.", + dist.key, + dist_path, + ) + self.nothing_to_uninstall = True + return + + paths_to_remove = UninstallPathSet(dist) + develop_egg_link = egg_link_path(dist) + develop_egg_link_egg_info = '{0}.egg-info'.format( + pkg_resources.to_filename(dist.project_name)) + egg_info_exists = dist.egg_info and os.path.exists(dist.egg_info) + # Special case for distutils installed package + distutils_egg_info = getattr(dist._provider, 'path', None) + + # Uninstall cases order do matter as in the case of 2 installs of the + # same package, pip needs to uninstall the currently detected version + if (egg_info_exists and dist.egg_info.endswith('.egg-info') and + not dist.egg_info.endswith(develop_egg_link_egg_info)): + # if dist.egg_info.endswith(develop_egg_link_egg_info), we + # are in fact in the develop_egg_link case + paths_to_remove.add(dist.egg_info) + if dist.has_metadata('installed-files.txt'): + for installed_file in dist.get_metadata( + 'installed-files.txt').splitlines(): + path = os.path.normpath( + os.path.join(dist.egg_info, installed_file) + ) + paths_to_remove.add(path) + # FIXME: need a test for this elif block + # occurs with --single-version-externally-managed/--record outside + # of pip + elif dist.has_metadata('top_level.txt'): + if dist.has_metadata('namespace_packages.txt'): + namespaces = dist.get_metadata('namespace_packages.txt') + else: + namespaces = [] + for top_level_pkg in [ + p for p + in dist.get_metadata('top_level.txt').splitlines() + if p and p not in namespaces]: + path = os.path.join(dist.location, top_level_pkg) + paths_to_remove.add(path) + paths_to_remove.add(path + '.py') + paths_to_remove.add(path + '.pyc') + paths_to_remove.add(path + '.pyo') + + elif distutils_egg_info: + warnings.warn( + "Uninstalling a distutils installed project ({0}) has been " + "deprecated and will be removed in a future version. This is " + "due to the fact that uninstalling a distutils project will " + "only partially uninstall the project.".format(self.name), + RemovedInPip10Warning, + ) + paths_to_remove.add(distutils_egg_info) + + elif dist.location.endswith('.egg'): + # package installed by easy_install + # We cannot match on dist.egg_name because it can slightly vary + # i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg + paths_to_remove.add(dist.location) + easy_install_egg = os.path.split(dist.location)[1] + easy_install_pth = os.path.join(os.path.dirname(dist.location), + 'easy-install.pth') + paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg) + + elif develop_egg_link: + # develop egg + with open(develop_egg_link, 'r') as fh: + link_pointer = os.path.normcase(fh.readline().strip()) + assert (link_pointer == dist.location), ( + 'Egg-link %s does not match installed location of %s ' + '(at %s)' % (link_pointer, self.name, dist.location) + ) + paths_to_remove.add(develop_egg_link) + easy_install_pth = os.path.join(os.path.dirname(develop_egg_link), + 'easy-install.pth') + paths_to_remove.add_pth(easy_install_pth, dist.location) + + elif egg_info_exists and dist.egg_info.endswith('.dist-info'): + for path in pip.wheel.uninstallation_paths(dist): + paths_to_remove.add(path) + + else: + logger.debug( + 'Not sure how to uninstall: %s - Check: %s', + dist, dist.location) + + # find distutils scripts= scripts + if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'): + for script in dist.metadata_listdir('scripts'): + if dist_in_usersite(dist): + bin_dir = bin_user + else: + bin_dir = bin_py + paths_to_remove.add(os.path.join(bin_dir, script)) + if WINDOWS: + paths_to_remove.add(os.path.join(bin_dir, script) + '.bat') + + # find console_scripts + if dist.has_metadata('entry_points.txt'): + if six.PY2: + options = {} + else: + options = {"delimiters": ('=', )} + config = configparser.SafeConfigParser(**options) + config.readfp( + FakeFile(dist.get_metadata_lines('entry_points.txt')) + ) + if config.has_section('console_scripts'): + for name, value in config.items('console_scripts'): + if dist_in_usersite(dist): + bin_dir = bin_user + else: + bin_dir = bin_py + paths_to_remove.add(os.path.join(bin_dir, name)) + if WINDOWS: + paths_to_remove.add( + os.path.join(bin_dir, name) + '.exe' + ) + paths_to_remove.add( + os.path.join(bin_dir, name) + '.exe.manifest' + ) + paths_to_remove.add( + os.path.join(bin_dir, name) + '-script.py' + ) + + paths_to_remove.remove(auto_confirm) + self.uninstalled = paths_to_remove + + def rollback_uninstall(self): + if self.uninstalled: + self.uninstalled.rollback() + else: + logger.error( + "Can't rollback %s, nothing uninstalled.", self.name, + ) + + def commit_uninstall(self): + if self.uninstalled: + self.uninstalled.commit() + elif not self.nothing_to_uninstall: + logger.error( + "Can't commit %s, nothing uninstalled.", self.name, + ) + + def archive(self, build_dir): + assert self.source_dir + create_archive = True + archive_name = '%s-%s.zip' % (self.name, self.pkg_info()["version"]) + archive_path = os.path.join(build_dir, archive_name) + if os.path.exists(archive_path): + response = ask_path_exists( + 'The file %s exists. (i)gnore, (w)ipe, (b)ackup ' % + display_path(archive_path), ('i', 'w', 'b')) + if response == 'i': + create_archive = False + elif response == 'w': + logger.warning('Deleting %s', display_path(archive_path)) + os.remove(archive_path) + elif response == 'b': + dest_file = backup_dir(archive_path) + logger.warning( + 'Backing up %s to %s', + display_path(archive_path), + display_path(dest_file), + ) + shutil.move(archive_path, dest_file) + if create_archive: + zip = zipfile.ZipFile( + archive_path, 'w', zipfile.ZIP_DEFLATED, + allowZip64=True + ) + dir = os.path.normcase(os.path.abspath(self.setup_py_dir)) + for dirpath, dirnames, filenames in os.walk(dir): + if 'pip-egg-info' in dirnames: + dirnames.remove('pip-egg-info') + for dirname in dirnames: + dirname = os.path.join(dirpath, dirname) + name = self._clean_zip_name(dirname, dir) + zipdir = zipfile.ZipInfo(self.name + '/' + name + '/') + zipdir.external_attr = 0x1ED << 16 # 0o755 + zip.writestr(zipdir, '') + for filename in filenames: + if filename == PIP_DELETE_MARKER_FILENAME: + continue + filename = os.path.join(dirpath, filename) + name = self._clean_zip_name(filename, dir) + zip.write(filename, self.name + '/' + name) + zip.close() + logger.info('Saved %s', display_path(archive_path)) + + def _clean_zip_name(self, name, prefix): + assert name.startswith(prefix + os.path.sep), ( + "name %r doesn't start with prefix %r" % (name, prefix) + ) + name = name[len(prefix) + 1:] + name = name.replace(os.path.sep, '/') + return name + + def match_markers(self): + if self.markers is not None: + from packaging.markers import Marker + return Marker(self.markers).evaluate() + else: + return True + + def install(self, install_options, global_options=[], root=None, + prefix=None): + if self.editable: + self.install_editable( + install_options, global_options, prefix=prefix) + return + if self.is_wheel: + version = pip.wheel.wheel_version(self.source_dir) + pip.wheel.check_compatibility(version, self.name) + + self.move_wheel_files(self.source_dir, root=root, prefix=prefix) + self.install_succeeded = True + return + + # Extend the list of global and install options passed on to + # the setup.py call with the ones from the requirements file. + # Options specified in requirements file override those + # specified on the command line, since the last option given + # to setup.py is the one that is used. + global_options += self.options.get('global_options', []) + install_options += self.options.get('install_options', []) + + if self.isolated: + global_options = list(global_options) + ["--no-user-cfg"] + + temp_location = tempfile.mkdtemp('-record', 'pip-') + record_filename = os.path.join(temp_location, 'install-record.txt') + try: + install_args = [sys.executable, "-u"] + install_args.append('-c') + install_args.append(SETUPTOOLS_SHIM % self.setup_py) + install_args += list(global_options) + \ + ['install', '--record', record_filename] + + if not self.as_egg: + install_args += ['--single-version-externally-managed'] + + if root is not None: + install_args += ['--root', root] + if prefix is not None: + install_args += ['--prefix', prefix] + + if self.pycompile: + install_args += ["--compile"] + else: + install_args += ["--no-compile"] + + if running_under_virtualenv(): + py_ver_str = 'python' + sysconfig.get_python_version() + install_args += ['--install-headers', + os.path.join(sys.prefix, 'include', 'site', + py_ver_str, self.name)] + msg = 'Running setup.py install for %s' % (self.name,) + with open_spinner(msg) as spinner: + with indent_log(): + call_subprocess( + install_args + install_options, + cwd=self.setup_py_dir, + show_stdout=False, + spinner=spinner, + ) + + if not os.path.exists(record_filename): + logger.debug('Record file %s not found', record_filename) + return + self.install_succeeded = True + if self.as_egg: + # there's no --always-unzip option we can pass to install + # command so we unable to save the installed-files.txt + return + + def prepend_root(path): + if root is None or not os.path.isabs(path): + return path + else: + return change_root(root, path) + + with open(record_filename) as f: + for line in f: + directory = os.path.dirname(line) + if directory.endswith('.egg-info'): + egg_info_dir = prepend_root(directory) + break + else: + logger.warning( + 'Could not find .egg-info directory in install record' + ' for %s', + self, + ) + # FIXME: put the record somewhere + # FIXME: should this be an error? + return + new_lines = [] + with open(record_filename) as f: + for line in f: + filename = line.strip() + if os.path.isdir(filename): + filename += os.path.sep + new_lines.append( + os.path.relpath( + prepend_root(filename), egg_info_dir) + ) + inst_files_path = os.path.join(egg_info_dir, 'installed-files.txt') + with open(inst_files_path, 'w') as f: + f.write('\n'.join(new_lines) + '\n') + finally: + if os.path.exists(record_filename): + os.remove(record_filename) + rmtree(temp_location) + + def ensure_has_source_dir(self, parent_dir): + """Ensure that a source_dir is set. + + This will create a temporary build dir if the name of the requirement + isn't known yet. + + :param parent_dir: The ideal pip parent_dir for the source_dir. + Generally src_dir for editables and build_dir for sdists. + :return: self.source_dir + """ + if self.source_dir is None: + self.source_dir = self.build_location(parent_dir) + return self.source_dir + + def remove_temporary_source(self): + """Remove the source files from this requirement, if they are marked + for deletion""" + if self.source_dir and os.path.exists( + os.path.join(self.source_dir, PIP_DELETE_MARKER_FILENAME)): + logger.debug('Removing source in %s', self.source_dir) + rmtree(self.source_dir) + self.source_dir = None + if self._temp_build_dir and os.path.exists(self._temp_build_dir): + rmtree(self._temp_build_dir) + self._temp_build_dir = None + + def install_editable(self, install_options, + global_options=(), prefix=None): + logger.info('Running setup.py develop for %s', self.name) + + if self.isolated: + global_options = list(global_options) + ["--no-user-cfg"] + + if prefix: + prefix_param = ['--prefix={0}'.format(prefix)] + install_options = list(install_options) + prefix_param + + with indent_log(): + # FIXME: should we do --install-headers here too? + call_subprocess( + [ + sys.executable, + '-c', + SETUPTOOLS_SHIM % self.setup_py + ] + + list(global_options) + + ['develop', '--no-deps'] + + list(install_options), + + cwd=self.setup_py_dir, + show_stdout=False) + + self.install_succeeded = True + + def check_if_exists(self): + """Find an installed distribution that satisfies or conflicts + with this requirement, and set self.satisfied_by or + self.conflicts_with appropriately. + """ + if self.req is None: + return False + try: + # get_distribution() will resolve the entire list of requirements + # anyway, and we've already determined that we need the requirement + # in question, so strip the marker so that we don't try to + # evaluate it. + from packaging.requirements import Requirement + no_marker = Requirement(str(self.req)) + no_marker.marker = None + self.satisfied_by = pkg_resources.get_distribution(str(no_marker)) + except pkg_resources.DistributionNotFound: + return False + except pkg_resources.VersionConflict: + existing_dist = pkg_resources.get_distribution( + self.req.name + ) + if self.use_user_site: + if dist_in_usersite(existing_dist): + self.conflicts_with = existing_dist + elif (running_under_virtualenv() and + dist_in_site_packages(existing_dist)): + raise InstallationError( + "Will not install to the user site because it will " + "lack sys.path precedence to %s in %s" % + (existing_dist.project_name, existing_dist.location) + ) + else: + self.conflicts_with = existing_dist + return True + + @property + def is_wheel(self): + return self.link and self.link.is_wheel + + def move_wheel_files(self, wheeldir, root=None, prefix=None): + move_wheel_files( + self.name, self.req, wheeldir, + user=self.use_user_site, + home=self.target_dir, + root=root, + prefix=prefix, + pycompile=self.pycompile, + isolated=self.isolated, + ) + + def get_dist(self): + """Return a pkg_resources.Distribution built from self.egg_info_path""" + egg_info = self.egg_info_path('').rstrip('/') + base_dir = os.path.dirname(egg_info) + metadata = pkg_resources.PathMetadata(base_dir, egg_info) + dist_name = os.path.splitext(os.path.basename(egg_info))[0] + return pkg_resources.Distribution( + os.path.dirname(egg_info), + project_name=dist_name, + metadata=metadata) + + @property + def has_hash_options(self): + """Return whether any known-good hashes are specified as options. + + These activate --require-hashes mode; hashes specified as part of a + URL do not. + + """ + return bool(self.options.get('hashes', {})) + + def hashes(self, trust_internet=True): + """Return a hash-comparer that considers my option- and URL-based + hashes to be known-good. + + Hashes in URLs--ones embedded in the requirements file, not ones + downloaded from an index server--are almost peers with ones from + flags. They satisfy --require-hashes (whether it was implicitly or + explicitly activated) but do not activate it. md5 and sha224 are not + allowed in flags, which should nudge people toward good algos. We + always OR all hashes together, even ones from URLs. + + :param trust_internet: Whether to trust URL-based (#md5=...) hashes + downloaded from the internet, as by populate_link() + + """ + good_hashes = self.options.get('hashes', {}).copy() + link = self.link if trust_internet else self.original_link + if link and link.hash: + good_hashes.setdefault(link.hash_name, []).append(link.hash) + return Hashes(good_hashes) + + +def _strip_postfix(req): + """ + Strip req postfix ( -dev, 0.2, etc ) + """ + # FIXME: use package_to_requirement? + match = re.search(r'^(.*?)(?:-dev|-\d.*)$', req) + if match: + # Strip off -dev, -0.2, etc. + req = match.group(1) + return req + + +def _build_req_from_url(url): + + parts = [p for p in url.split('#', 1)[0].split('/') if p] + + req = None + if len(parts) > 2 and parts[-2] in ('tags', 'branches', 'tag', 'branch'): + req = parts[-3] + elif len(parts) > 1 and parts[-1] == 'trunk': + req = parts[-2] + if req: + warnings.warn( + 'Sniffing the requirement name from the url is deprecated and ' + 'will be removed in the future. Please specify an #egg segment ' + 'instead.', RemovedInPip9Warning, + stacklevel=2) + return req + + +def parse_editable(editable_req, default_vcs=None): + """Parses an editable requirement into: + - a requirement name + - an URL + - extras + - editable options + Accepted requirements: + svn+http://blahblah@rev#egg=Foobar[baz]&subdirectory=version_subdir + .[some_extra] + """ + + from pip.index import Link + + url = editable_req + extras = None + + # If a file path is specified with extras, strip off the extras. + m = re.match(r'^(.+)(\[[^\]]+\])$', url) + if m: + url_no_extras = m.group(1) + extras = m.group(2) + else: + url_no_extras = url + + if os.path.isdir(url_no_extras): + if not os.path.exists(os.path.join(url_no_extras, 'setup.py')): + raise InstallationError( + "Directory %r is not installable. File 'setup.py' not found." % + url_no_extras + ) + # Treating it as code that has already been checked out + url_no_extras = path_to_url(url_no_extras) + + if url_no_extras.lower().startswith('file:'): + package_name = Link(url_no_extras).egg_fragment + if extras: + return ( + package_name, + url_no_extras, + Requirement("placeholder" + extras).extras, + ) + else: + return package_name, url_no_extras, None + + for version_control in vcs: + if url.lower().startswith('%s:' % version_control): + url = '%s+%s' % (version_control, url) + break + + if '+' not in url: + if default_vcs: + url = default_vcs + '+' + url + else: + raise InstallationError( + '%s should either be a path to a local project or a VCS url ' + 'beginning with svn+, git+, hg+, or bzr+' % + editable_req + ) + + vc_type = url.split('+', 1)[0].lower() + + if not vcs.get_backend(vc_type): + error_message = 'For --editable=%s only ' % editable_req + \ + ', '.join([backend.name + '+URL' for backend in vcs.backends]) + \ + ' is currently supported' + raise InstallationError(error_message) + + package_name = Link(url).egg_fragment + if not package_name: + package_name = _build_req_from_url(editable_req) + if not package_name: + raise InstallationError( + '--editable=%s is not the right format; it must have ' + '#egg=Package' % editable_req + ) + return _strip_postfix(package_name), url, None diff --git a/venv/lib/python3.5/site-packages/pip/req/req_set.py b/venv/lib/python3.5/site-packages/pip/req/req_set.py new file mode 100644 index 0000000..03338a7 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/req/req_set.py @@ -0,0 +1,746 @@ +from __future__ import absolute_import + +from collections import defaultdict +from itertools import chain +import logging +import os + +from pip._vendor import pkg_resources +from pip._vendor import requests + +from pip.compat import expanduser +from pip.download import (is_file_url, is_dir_url, is_vcs_url, url_to_path, + unpack_url) +from pip.exceptions import (InstallationError, BestVersionAlreadyInstalled, + DistributionNotFound, PreviousBuildDirError, + HashError, HashErrors, HashUnpinned, + DirectoryUrlHashUnsupported, VcsHashUnsupported) +from pip.req.req_install import InstallRequirement +from pip.utils import ( + display_path, dist_in_usersite, ensure_dir, normalize_path) +from pip.utils.hashes import MissingHashes +from pip.utils.logging import indent_log +from pip.vcs import vcs + + +logger = logging.getLogger(__name__) + + +class Requirements(object): + + def __init__(self): + self._keys = [] + self._dict = {} + + def keys(self): + return self._keys + + def values(self): + return [self._dict[key] for key in self._keys] + + def __contains__(self, item): + return item in self._keys + + def __setitem__(self, key, value): + if key not in self._keys: + self._keys.append(key) + self._dict[key] = value + + def __getitem__(self, key): + return self._dict[key] + + def __repr__(self): + values = ['%s: %s' % (repr(k), repr(self[k])) for k in self.keys()] + return 'Requirements({%s})' % ', '.join(values) + + +class DistAbstraction(object): + """Abstracts out the wheel vs non-wheel prepare_files logic. + + The requirements for anything installable are as follows: + - we must be able to determine the requirement name + (or we can't correctly handle the non-upgrade case). + - we must be able to generate a list of run-time dependencies + without installing any additional packages (or we would + have to either burn time by doing temporary isolated installs + or alternatively violate pips 'don't start installing unless + all requirements are available' rule - neither of which are + desirable). + - for packages with setup requirements, we must also be able + to determine their requirements without installing additional + packages (for the same reason as run-time dependencies) + - we must be able to create a Distribution object exposing the + above metadata. + """ + + def __init__(self, req_to_install): + self.req_to_install = req_to_install + + def dist(self, finder): + """Return a setuptools Dist object.""" + raise NotImplementedError(self.dist) + + def prep_for_dist(self): + """Ensure that we can get a Dist for this requirement.""" + raise NotImplementedError(self.dist) + + +def make_abstract_dist(req_to_install): + """Factory to make an abstract dist object. + + Preconditions: Either an editable req with a source_dir, or satisfied_by or + a wheel link, or a non-editable req with a source_dir. + + :return: A concrete DistAbstraction. + """ + if req_to_install.editable: + return IsSDist(req_to_install) + elif req_to_install.link and req_to_install.link.is_wheel: + return IsWheel(req_to_install) + else: + return IsSDist(req_to_install) + + +class IsWheel(DistAbstraction): + + def dist(self, finder): + return list(pkg_resources.find_distributions( + self.req_to_install.source_dir))[0] + + def prep_for_dist(self): + # FIXME:https://github.com/pypa/pip/issues/1112 + pass + + +class IsSDist(DistAbstraction): + + def dist(self, finder): + dist = self.req_to_install.get_dist() + # FIXME: shouldn't be globally added: + if dist.has_metadata('dependency_links.txt'): + finder.add_dependency_links( + dist.get_metadata_lines('dependency_links.txt') + ) + return dist + + def prep_for_dist(self): + self.req_to_install.run_egg_info() + self.req_to_install.assert_source_matches_version() + + +class Installed(DistAbstraction): + + def dist(self, finder): + return self.req_to_install.satisfied_by + + def prep_for_dist(self): + pass + + +class RequirementSet(object): + + def __init__(self, build_dir, src_dir, download_dir, upgrade=False, + ignore_installed=False, as_egg=False, target_dir=None, + ignore_dependencies=False, force_reinstall=False, + use_user_site=False, session=None, pycompile=True, + isolated=False, wheel_download_dir=None, + wheel_cache=None, require_hashes=False): + """Create a RequirementSet. + + :param wheel_download_dir: Where still-packed .whl files should be + written to. If None they are written to the download_dir parameter. + Separate to download_dir to permit only keeping wheel archives for + pip wheel. + :param download_dir: Where still packed archives should be written to. + If None they are not saved, and are deleted immediately after + unpacking. + :param wheel_cache: The pip wheel cache, for passing to + InstallRequirement. + """ + if session is None: + raise TypeError( + "RequirementSet() missing 1 required keyword argument: " + "'session'" + ) + + self.build_dir = build_dir + self.src_dir = src_dir + # XXX: download_dir and wheel_download_dir overlap semantically and may + # be combined if we're willing to have non-wheel archives present in + # the wheelhouse output by 'pip wheel'. + self.download_dir = download_dir + self.upgrade = upgrade + self.ignore_installed = ignore_installed + self.force_reinstall = force_reinstall + self.requirements = Requirements() + # Mapping of alias: real_name + self.requirement_aliases = {} + self.unnamed_requirements = [] + self.ignore_dependencies = ignore_dependencies + self.successfully_downloaded = [] + self.successfully_installed = [] + self.reqs_to_cleanup = [] + self.as_egg = as_egg + self.use_user_site = use_user_site + self.target_dir = target_dir # set from --target option + self.session = session + self.pycompile = pycompile + self.isolated = isolated + if wheel_download_dir: + wheel_download_dir = normalize_path(wheel_download_dir) + self.wheel_download_dir = wheel_download_dir + self._wheel_cache = wheel_cache + self.require_hashes = require_hashes + # Maps from install_req -> dependencies_of_install_req + self._dependencies = defaultdict(list) + + def __str__(self): + reqs = [req for req in self.requirements.values() + if not req.comes_from] + reqs.sort(key=lambda req: req.name.lower()) + return ' '.join([str(req.req) for req in reqs]) + + def __repr__(self): + reqs = [req for req in self.requirements.values()] + reqs.sort(key=lambda req: req.name.lower()) + reqs_str = ', '.join([str(req.req) for req in reqs]) + return ('<%s object; %d requirement(s): %s>' + % (self.__class__.__name__, len(reqs), reqs_str)) + + def add_requirement(self, install_req, parent_req_name=None): + """Add install_req as a requirement to install. + + :param parent_req_name: The name of the requirement that needed this + added. The name is used because when multiple unnamed requirements + resolve to the same name, we could otherwise end up with dependency + links that point outside the Requirements set. parent_req must + already be added. Note that None implies that this is a user + supplied requirement, vs an inferred one. + :return: Additional requirements to scan. That is either [] if + the requirement is not applicable, or [install_req] if the + requirement is applicable and has just been added. + """ + name = install_req.name + if not install_req.match_markers(): + logger.warning("Ignoring %s: markers %r don't match your " + "environment", install_req.name, + install_req.markers) + return [] + + install_req.as_egg = self.as_egg + install_req.use_user_site = self.use_user_site + install_req.target_dir = self.target_dir + install_req.pycompile = self.pycompile + if not name: + # url or path requirement w/o an egg fragment + self.unnamed_requirements.append(install_req) + return [install_req] + else: + try: + existing_req = self.get_requirement(name) + except KeyError: + existing_req = None + if (parent_req_name is None and existing_req and not + existing_req.constraint and + existing_req.extras == install_req.extras and not + existing_req.req.specifier == install_req.req.specifier): + raise InstallationError( + 'Double requirement given: %s (already in %s, name=%r)' + % (install_req, existing_req, name)) + if not existing_req: + # Add requirement + self.requirements[name] = install_req + # FIXME: what about other normalizations? E.g., _ vs. -? + if name.lower() != name: + self.requirement_aliases[name.lower()] = name + result = [install_req] + else: + # Assume there's no need to scan, and that we've already + # encountered this for scanning. + result = [] + if not install_req.constraint and existing_req.constraint: + if (install_req.link and not (existing_req.link and + install_req.link.path == existing_req.link.path)): + self.reqs_to_cleanup.append(install_req) + raise InstallationError( + "Could not satisfy constraints for '%s': " + "installation from path or url cannot be " + "constrained to a version" % name) + # If we're now installing a constraint, mark the existing + # object for real installation. + existing_req.constraint = False + existing_req.extras = tuple( + sorted(set(existing_req.extras).union( + set(install_req.extras)))) + logger.debug("Setting %s extras to: %s", + existing_req, existing_req.extras) + # And now we need to scan this. + result = [existing_req] + # Canonicalise to the already-added object for the backref + # check below. + install_req = existing_req + if parent_req_name: + parent_req = self.get_requirement(parent_req_name) + self._dependencies[parent_req].append(install_req) + return result + + def has_requirement(self, project_name): + name = project_name.lower() + if (name in self.requirements and + not self.requirements[name].constraint or + name in self.requirement_aliases and + not self.requirements[self.requirement_aliases[name]].constraint): + return True + return False + + @property + def has_requirements(self): + return list(req for req in self.requirements.values() if not + req.constraint) or self.unnamed_requirements + + @property + def is_download(self): + if self.download_dir: + self.download_dir = expanduser(self.download_dir) + if os.path.exists(self.download_dir): + return True + else: + logger.critical('Could not find download directory') + raise InstallationError( + "Could not find or access download directory '%s'" + % display_path(self.download_dir)) + return False + + def get_requirement(self, project_name): + for name in project_name, project_name.lower(): + if name in self.requirements: + return self.requirements[name] + if name in self.requirement_aliases: + return self.requirements[self.requirement_aliases[name]] + raise KeyError("No project with the name %r" % project_name) + + def uninstall(self, auto_confirm=False): + for req in self.requirements.values(): + if req.constraint: + continue + req.uninstall(auto_confirm=auto_confirm) + req.commit_uninstall() + + def prepare_files(self, finder): + """ + Prepare process. Create temp directories, download and/or unpack files. + """ + # make the wheelhouse + if self.wheel_download_dir: + ensure_dir(self.wheel_download_dir) + + # If any top-level requirement has a hash specified, enter + # hash-checking mode, which requires hashes from all. + root_reqs = self.unnamed_requirements + self.requirements.values() + require_hashes = (self.require_hashes or + any(req.has_hash_options for req in root_reqs)) + if require_hashes and self.as_egg: + raise InstallationError( + '--egg is not allowed with --require-hashes mode, since it ' + 'delegates dependency resolution to setuptools and could thus ' + 'result in installation of unhashed packages.') + + # Actually prepare the files, and collect any exceptions. Most hash + # exceptions cannot be checked ahead of time, because + # req.populate_link() needs to be called before we can make decisions + # based on link type. + discovered_reqs = [] + hash_errors = HashErrors() + for req in chain(root_reqs, discovered_reqs): + try: + discovered_reqs.extend(self._prepare_file( + finder, + req, + require_hashes=require_hashes, + ignore_dependencies=self.ignore_dependencies)) + except HashError as exc: + exc.req = req + hash_errors.append(exc) + + if hash_errors: + raise hash_errors + + def _check_skip_installed(self, req_to_install, finder): + """Check if req_to_install should be skipped. + + This will check if the req is installed, and whether we should upgrade + or reinstall it, taking into account all the relevant user options. + + After calling this req_to_install will only have satisfied_by set to + None if the req_to_install is to be upgraded/reinstalled etc. Any + other value will be a dist recording the current thing installed that + satisfies the requirement. + + Note that for vcs urls and the like we can't assess skipping in this + routine - we simply identify that we need to pull the thing down, + then later on it is pulled down and introspected to assess upgrade/ + reinstalls etc. + + :return: A text reason for why it was skipped, or None. + """ + # Check whether to upgrade/reinstall this req or not. + req_to_install.check_if_exists() + if req_to_install.satisfied_by: + skip_reason = 'satisfied (use --upgrade to upgrade)' + if self.upgrade: + best_installed = False + # For link based requirements we have to pull the + # tree down and inspect to assess the version #, so + # its handled way down. + if not (self.force_reinstall or req_to_install.link): + try: + finder.find_requirement(req_to_install, self.upgrade) + except BestVersionAlreadyInstalled: + skip_reason = 'up-to-date' + best_installed = True + except DistributionNotFound: + # No distribution found, so we squash the + # error - it will be raised later when we + # re-try later to do the install. + # Why don't we just raise here? + pass + + if not best_installed: + # don't uninstall conflict if user install and + # conflict is not user install + if not (self.use_user_site and not + dist_in_usersite(req_to_install.satisfied_by)): + req_to_install.conflicts_with = \ + req_to_install.satisfied_by + req_to_install.satisfied_by = None + return skip_reason + else: + return None + + def _prepare_file(self, + finder, + req_to_install, + require_hashes=False, + ignore_dependencies=False): + """Prepare a single requirements file. + + :return: A list of additional InstallRequirements to also install. + """ + # Tell user what we are doing for this requirement: + # obtain (editable), skipping, processing (local url), collecting + # (remote url or package name) + if req_to_install.constraint or req_to_install.prepared: + return [] + + req_to_install.prepared = True + + # ###################### # + # # print log messages # # + # ###################### # + if req_to_install.editable: + logger.info('Obtaining %s', req_to_install) + else: + # satisfied_by is only evaluated by calling _check_skip_installed, + # so it must be None here. + assert req_to_install.satisfied_by is None + if not self.ignore_installed: + skip_reason = self._check_skip_installed( + req_to_install, finder) + + if req_to_install.satisfied_by: + assert skip_reason is not None, ( + '_check_skip_installed returned None but ' + 'req_to_install.satisfied_by is set to %r' + % (req_to_install.satisfied_by,)) + logger.info( + 'Requirement already %s: %s', skip_reason, + req_to_install) + else: + if (req_to_install.link and + req_to_install.link.scheme == 'file'): + path = url_to_path(req_to_install.link.url) + logger.info('Processing %s', display_path(path)) + else: + logger.info('Collecting %s', req_to_install) + + with indent_log(): + # ################################ # + # # vcs update or unpack archive # # + # ################################ # + if req_to_install.editable: + if require_hashes: + raise InstallationError( + 'The editable requirement %s cannot be installed when ' + 'requiring hashes, because there is no single file to ' + 'hash.' % req_to_install) + req_to_install.ensure_has_source_dir(self.src_dir) + req_to_install.update_editable(not self.is_download) + abstract_dist = make_abstract_dist(req_to_install) + abstract_dist.prep_for_dist() + if self.is_download: + req_to_install.archive(self.download_dir) + elif req_to_install.satisfied_by: + if require_hashes: + logger.debug( + 'Since it is already installed, we are trusting this ' + 'package without checking its hash. To ensure a ' + 'completely repeatable environment, install into an ' + 'empty virtualenv.') + abstract_dist = Installed(req_to_install) + else: + # @@ if filesystem packages are not marked + # editable in a req, a non deterministic error + # occurs when the script attempts to unpack the + # build directory + req_to_install.ensure_has_source_dir(self.build_dir) + # If a checkout exists, it's unwise to keep going. version + # inconsistencies are logged later, but do not fail the + # installation. + # FIXME: this won't upgrade when there's an existing + # package unpacked in `req_to_install.source_dir` + if os.path.exists( + os.path.join(req_to_install.source_dir, 'setup.py')): + raise PreviousBuildDirError( + "pip can't proceed with requirements '%s' due to a" + " pre-existing build directory (%s). This is " + "likely due to a previous installation that failed" + ". pip is being responsible and not assuming it " + "can delete this. Please delete it and try again." + % (req_to_install, req_to_install.source_dir) + ) + req_to_install.populate_link( + finder, self.upgrade, require_hashes) + # We can't hit this spot and have populate_link return None. + # req_to_install.satisfied_by is None here (because we're + # guarded) and upgrade has no impact except when satisfied_by + # is not None. + # Then inside find_requirement existing_applicable -> False + # If no new versions are found, DistributionNotFound is raised, + # otherwise a result is guaranteed. + assert req_to_install.link + link = req_to_install.link + + # Now that we have the real link, we can tell what kind of + # requirements we have and raise some more informative errors + # than otherwise. (For example, we can raise VcsHashUnsupported + # for a VCS URL rather than HashMissing.) + if require_hashes: + # We could check these first 2 conditions inside + # unpack_url and save repetition of conditions, but then + # we would report less-useful error messages for + # unhashable requirements, complaining that there's no + # hash provided. + if is_vcs_url(link): + raise VcsHashUnsupported() + elif is_file_url(link) and is_dir_url(link): + raise DirectoryUrlHashUnsupported() + if (not req_to_install.original_link and + not req_to_install.is_pinned): + # Unpinned packages are asking for trouble when a new + # version is uploaded. This isn't a security check, but + # it saves users a surprising hash mismatch in the + # future. + # + # file:/// URLs aren't pinnable, so don't complain + # about them not being pinned. + raise HashUnpinned() + hashes = req_to_install.hashes( + trust_internet=not require_hashes) + if require_hashes and not hashes: + # Known-good hashes are missing for this requirement, so + # shim it with a facade object that will provoke hash + # computation and then raise a HashMissing exception + # showing the user what the hash should be. + hashes = MissingHashes() + + try: + download_dir = self.download_dir + # We always delete unpacked sdists after pip ran. + autodelete_unpacked = True + if req_to_install.link.is_wheel \ + and self.wheel_download_dir: + # when doing 'pip wheel` we download wheels to a + # dedicated dir. + download_dir = self.wheel_download_dir + if req_to_install.link.is_wheel: + if download_dir: + # When downloading, we only unpack wheels to get + # metadata. + autodelete_unpacked = True + else: + # When installing a wheel, we use the unpacked + # wheel. + autodelete_unpacked = False + unpack_url( + req_to_install.link, req_to_install.source_dir, + download_dir, autodelete_unpacked, + session=self.session, hashes=hashes) + except requests.HTTPError as exc: + logger.critical( + 'Could not install requirement %s because ' + 'of error %s', + req_to_install, + exc, + ) + raise InstallationError( + 'Could not install requirement %s because ' + 'of HTTP error %s for URL %s' % + (req_to_install, exc, req_to_install.link) + ) + abstract_dist = make_abstract_dist(req_to_install) + abstract_dist.prep_for_dist() + if self.is_download: + # Make a .zip of the source_dir we already created. + if req_to_install.link.scheme in vcs.all_schemes: + req_to_install.archive(self.download_dir) + # req_to_install.req is only avail after unpack for URL + # pkgs repeat check_if_exists to uninstall-on-upgrade + # (#14) + if not self.ignore_installed: + req_to_install.check_if_exists() + if req_to_install.satisfied_by: + if self.upgrade or self.ignore_installed: + # don't uninstall conflict if user install and + # conflict is not user install + if not (self.use_user_site and not + dist_in_usersite( + req_to_install.satisfied_by)): + req_to_install.conflicts_with = \ + req_to_install.satisfied_by + req_to_install.satisfied_by = None + else: + logger.info( + 'Requirement already satisfied (use ' + '--upgrade to upgrade): %s', + req_to_install, + ) + + # ###################### # + # # parse dependencies # # + # ###################### # + dist = abstract_dist.dist(finder) + more_reqs = [] + + def add_req(subreq): + sub_install_req = InstallRequirement( + str(subreq), + req_to_install, + isolated=self.isolated, + wheel_cache=self._wheel_cache, + ) + more_reqs.extend(self.add_requirement( + sub_install_req, req_to_install.name)) + + # We add req_to_install before its dependencies, so that we + # can refer to it when adding dependencies. + if not self.has_requirement(req_to_install.name): + # 'unnamed' requirements will get added here + self.add_requirement(req_to_install, None) + + if not ignore_dependencies: + if (req_to_install.extras): + logger.debug( + "Installing extra requirements: %r", + ','.join(req_to_install.extras), + ) + missing_requested = sorted( + set(req_to_install.extras) - set(dist.extras) + ) + for missing in missing_requested: + logger.warning( + '%s does not provide the extra \'%s\'', + dist, missing + ) + + available_requested = sorted( + set(dist.extras) & set(req_to_install.extras) + ) + for subreq in dist.requires(available_requested): + add_req(subreq) + + # cleanup tmp src + self.reqs_to_cleanup.append(req_to_install) + + if not req_to_install.editable and not req_to_install.satisfied_by: + # XXX: --no-install leads this to report 'Successfully + # downloaded' for only non-editable reqs, even though we took + # action on them. + self.successfully_downloaded.append(req_to_install) + + return more_reqs + + def cleanup_files(self): + """Clean up files, remove builds.""" + logger.debug('Cleaning up...') + with indent_log(): + for req in self.reqs_to_cleanup: + req.remove_temporary_source() + + def _to_install(self): + """Create the installation order. + + The installation order is topological - requirements are installed + before the requiring thing. We break cycles at an arbitrary point, + and make no other guarantees. + """ + # The current implementation, which we may change at any point + # installs the user specified things in the order given, except when + # dependencies must come earlier to achieve topological order. + order = [] + ordered_reqs = set() + + def schedule(req): + if req.satisfied_by or req in ordered_reqs: + return + if req.constraint: + return + ordered_reqs.add(req) + for dep in self._dependencies[req]: + schedule(dep) + order.append(req) + for install_req in self.requirements.values(): + schedule(install_req) + return order + + def install(self, install_options, global_options=(), *args, **kwargs): + """ + Install everything in this set (after having downloaded and unpacked + the packages) + """ + to_install = self._to_install() + + if to_install: + logger.info( + 'Installing collected packages: %s', + ', '.join([req.name for req in to_install]), + ) + + with indent_log(): + for requirement in to_install: + if requirement.conflicts_with: + logger.info( + 'Found existing installation: %s', + requirement.conflicts_with, + ) + with indent_log(): + requirement.uninstall(auto_confirm=True) + try: + requirement.install( + install_options, + global_options, + *args, + **kwargs + ) + except: + # if install did not succeed, rollback previous uninstall + if (requirement.conflicts_with and not + requirement.install_succeeded): + requirement.rollback_uninstall() + raise + else: + if (requirement.conflicts_with and + requirement.install_succeeded): + requirement.commit_uninstall() + requirement.remove_temporary_source() + + self.successfully_installed = to_install diff --git a/venv/lib/python3.5/site-packages/pip/req/req_uninstall.py b/venv/lib/python3.5/site-packages/pip/req/req_uninstall.py new file mode 100644 index 0000000..5248430 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/req/req_uninstall.py @@ -0,0 +1,195 @@ +from __future__ import absolute_import + +import logging +import os +import tempfile + +from pip.compat import uses_pycache, WINDOWS, cache_from_source +from pip.exceptions import UninstallationError +from pip.utils import rmtree, ask, is_local, renames, normalize_path +from pip.utils.logging import indent_log + + +logger = logging.getLogger(__name__) + + +class UninstallPathSet(object): + """A set of file paths to be removed in the uninstallation of a + requirement.""" + def __init__(self, dist): + self.paths = set() + self._refuse = set() + self.pth = {} + self.dist = dist + self.save_dir = None + self._moved_paths = [] + + def _permitted(self, path): + """ + Return True if the given path is one we are permitted to + remove/modify, False otherwise. + + """ + return is_local(path) + + def add(self, path): + head, tail = os.path.split(path) + + # we normalize the head to resolve parent directory symlinks, but not + # the tail, since we only want to uninstall symlinks, not their targets + path = os.path.join(normalize_path(head), os.path.normcase(tail)) + + if not os.path.exists(path): + return + if self._permitted(path): + self.paths.add(path) + else: + self._refuse.add(path) + + # __pycache__ files can show up after 'installed-files.txt' is created, + # due to imports + if os.path.splitext(path)[1] == '.py' and uses_pycache: + self.add(cache_from_source(path)) + + def add_pth(self, pth_file, entry): + pth_file = normalize_path(pth_file) + if self._permitted(pth_file): + if pth_file not in self.pth: + self.pth[pth_file] = UninstallPthEntries(pth_file) + self.pth[pth_file].add(entry) + else: + self._refuse.add(pth_file) + + def compact(self, paths): + """Compact a path set to contain the minimal number of paths + necessary to contain all paths in the set. If /a/path/ and + /a/path/to/a/file.txt are both in the set, leave only the + shorter path.""" + short_paths = set() + for path in sorted(paths, key=len): + if not any([ + (path.startswith(shortpath) and + path[len(shortpath.rstrip(os.path.sep))] == os.path.sep) + for shortpath in short_paths]): + short_paths.add(path) + return short_paths + + def _stash(self, path): + return os.path.join( + self.save_dir, os.path.splitdrive(path)[1].lstrip(os.path.sep)) + + def remove(self, auto_confirm=False): + """Remove paths in ``self.paths`` with confirmation (unless + ``auto_confirm`` is True).""" + if not self.paths: + logger.info( + "Can't uninstall '%s'. No files were found to uninstall.", + self.dist.project_name, + ) + return + logger.info( + 'Uninstalling %s-%s:', + self.dist.project_name, self.dist.version + ) + + with indent_log(): + paths = sorted(self.compact(self.paths)) + + if auto_confirm: + response = 'y' + else: + for path in paths: + logger.info(path) + response = ask('Proceed (y/n)? ', ('y', 'n')) + if self._refuse: + logger.info('Not removing or modifying (outside of prefix):') + for path in self.compact(self._refuse): + logger.info(path) + if response == 'y': + self.save_dir = tempfile.mkdtemp(suffix='-uninstall', + prefix='pip-') + for path in paths: + new_path = self._stash(path) + logger.debug('Removing file or directory %s', path) + self._moved_paths.append(path) + renames(path, new_path) + for pth in self.pth.values(): + pth.remove() + logger.info( + 'Successfully uninstalled %s-%s', + self.dist.project_name, self.dist.version + ) + + def rollback(self): + """Rollback the changes previously made by remove().""" + if self.save_dir is None: + logger.error( + "Can't roll back %s; was not uninstalled", + self.dist.project_name, + ) + return False + logger.info('Rolling back uninstall of %s', self.dist.project_name) + for path in self._moved_paths: + tmp_path = self._stash(path) + logger.debug('Replacing %s', path) + renames(tmp_path, path) + for pth in self.pth.values(): + pth.rollback() + + def commit(self): + """Remove temporary save dir: rollback will no longer be possible.""" + if self.save_dir is not None: + rmtree(self.save_dir) + self.save_dir = None + self._moved_paths = [] + + +class UninstallPthEntries(object): + def __init__(self, pth_file): + if not os.path.isfile(pth_file): + raise UninstallationError( + "Cannot remove entries from nonexistent file %s" % pth_file + ) + self.file = pth_file + self.entries = set() + self._saved_lines = None + + def add(self, entry): + entry = os.path.normcase(entry) + # On Windows, os.path.normcase converts the entry to use + # backslashes. This is correct for entries that describe absolute + # paths outside of site-packages, but all the others use forward + # slashes. + if WINDOWS and not os.path.splitdrive(entry)[0]: + entry = entry.replace('\\', '/') + self.entries.add(entry) + + def remove(self): + logger.debug('Removing pth entries from %s:', self.file) + with open(self.file, 'rb') as fh: + # windows uses '\r\n' with py3k, but uses '\n' with py2.x + lines = fh.readlines() + self._saved_lines = lines + if any(b'\r\n' in line for line in lines): + endline = '\r\n' + else: + endline = '\n' + for entry in self.entries: + try: + logger.debug('Removing entry: %s', entry) + lines.remove((entry + endline).encode("utf-8")) + except ValueError: + pass + with open(self.file, 'wb') as fh: + fh.writelines(lines) + + def rollback(self): + if self._saved_lines is None: + logger.error( + 'Cannot roll back changes to %s, none were made', self.file + ) + return False + logger.debug('Rolling %s back to previous state', self.file) + with open(self.file, 'wb') as fh: + fh.writelines(self._saved_lines) + return True diff --git a/venv/lib/python3.5/site-packages/pip/status_codes.py b/venv/lib/python3.5/site-packages/pip/status_codes.py new file mode 100644 index 0000000..275360a --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/status_codes.py @@ -0,0 +1,8 @@ +from __future__ import absolute_import + +SUCCESS = 0 +ERROR = 1 +UNKNOWN_ERROR = 2 +VIRTUALENV_NOT_FOUND = 3 +PREVIOUS_BUILD_DIR_ERROR = 4 +NO_MATCHES_FOUND = 23 diff --git a/venv/lib/python3.5/site-packages/pip/utils/__init__.py b/venv/lib/python3.5/site-packages/pip/utils/__init__.py new file mode 100644 index 0000000..8ea2e38 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/utils/__init__.py @@ -0,0 +1,878 @@ +from __future__ import absolute_import + +from collections import deque +import contextlib +import errno +import io +import locale +# we have a submodule named 'logging' which would shadow this if we used the +# regular name: +import logging as std_logging +import re +import os +import posixpath +import shutil +import stat +import subprocess +import sys +import tarfile +import zipfile + +from pip.exceptions import InstallationError +from pip.compat import console_to_str, expanduser, stdlib_pkgs +from pip.locations import ( + site_packages, user_site, running_under_virtualenv, virtualenv_no_global, + write_delete_marker_file, +) +from pip._vendor import pkg_resources +from pip._vendor.six.moves import input +from pip._vendor.six import PY2 +from pip._vendor.retrying import retry + +if PY2: + from io import BytesIO as StringIO +else: + from io import StringIO + +__all__ = ['rmtree', 'display_path', 'backup_dir', + 'ask', 'splitext', + 'format_size', 'is_installable_dir', + 'is_svn_page', 'file_contents', + 'split_leading_dir', 'has_leading_dir', + 'normalize_path', + 'renames', 'get_terminal_size', 'get_prog', + 'unzip_file', 'untar_file', 'unpack_file', 'call_subprocess', + 'captured_stdout', 'remove_tracebacks', 'ensure_dir', + 'ARCHIVE_EXTENSIONS', 'SUPPORTED_EXTENSIONS', + 'get_installed_version'] + + +logger = std_logging.getLogger(__name__) + +BZ2_EXTENSIONS = ('.tar.bz2', '.tbz') +XZ_EXTENSIONS = ('.tar.xz', '.txz', '.tlz', '.tar.lz', '.tar.lzma') +ZIP_EXTENSIONS = ('.zip', '.whl') +TAR_EXTENSIONS = ('.tar.gz', '.tgz', '.tar') +ARCHIVE_EXTENSIONS = ( + ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS) +SUPPORTED_EXTENSIONS = ZIP_EXTENSIONS + TAR_EXTENSIONS +try: + import bz2 # noqa + SUPPORTED_EXTENSIONS += BZ2_EXTENSIONS +except ImportError: + logger.debug('bz2 module is not available') + +try: + # Only for Python 3.3+ + import lzma # noqa + SUPPORTED_EXTENSIONS += XZ_EXTENSIONS +except ImportError: + logger.debug('lzma module is not available') + + +def import_or_raise(pkg_or_module_string, ExceptionType, *args, **kwargs): + try: + return __import__(pkg_or_module_string) + except ImportError: + raise ExceptionType(*args, **kwargs) + + +def ensure_dir(path): + """os.path.makedirs without EEXIST.""" + try: + os.makedirs(path) + except OSError as e: + if e.errno != errno.EEXIST: + raise + + +def get_prog(): + try: + if os.path.basename(sys.argv[0]) in ('__main__.py', '-c'): + return "%s -m pip" % sys.executable + except (AttributeError, TypeError, IndexError): + pass + return 'pip' + + +# Retry every half second for up to 3 seconds +@retry(stop_max_delay=3000, wait_fixed=500) +def rmtree(dir, ignore_errors=False): + shutil.rmtree(dir, ignore_errors=ignore_errors, + onerror=rmtree_errorhandler) + + +def rmtree_errorhandler(func, path, exc_info): + """On Windows, the files in .svn are read-only, so when rmtree() tries to + remove them, an exception is thrown. We catch that here, remove the + read-only attribute, and hopefully continue without problems.""" + # if file type currently read only + if os.stat(path).st_mode & stat.S_IREAD: + # convert to read/write + os.chmod(path, stat.S_IWRITE) + # use the original function to repeat the operation + func(path) + return + else: + raise + + +def display_path(path): + """Gives the display value for a given path, making it relative to cwd + if possible.""" + path = os.path.normcase(os.path.abspath(path)) + if sys.version_info[0] == 2: + path = path.decode(sys.getfilesystemencoding(), 'replace') + path = path.encode(sys.getdefaultencoding(), 'replace') + if path.startswith(os.getcwd() + os.path.sep): + path = '.' + path[len(os.getcwd()):] + return path + + +def backup_dir(dir, ext='.bak'): + """Figure out the name of a directory to back up the given dir to + (adding .bak, .bak2, etc)""" + n = 1 + extension = ext + while os.path.exists(dir + extension): + n += 1 + extension = ext + str(n) + return dir + extension + + +def ask_path_exists(message, options): + for action in os.environ.get('PIP_EXISTS_ACTION', '').split(): + if action in options: + return action + return ask(message, options) + + +def ask(message, options): + """Ask the message interactively, with the given possible responses""" + while 1: + if os.environ.get('PIP_NO_INPUT'): + raise Exception( + 'No input was expected ($PIP_NO_INPUT set); question: %s' % + message + ) + response = input(message) + response = response.strip().lower() + if response not in options: + print( + 'Your response (%r) was not one of the expected responses: ' + '%s' % (response, ', '.join(options)) + ) + else: + return response + + +def format_size(bytes): + if bytes > 1000 * 1000: + return '%.1fMB' % (bytes / 1000.0 / 1000) + elif bytes > 10 * 1000: + return '%ikB' % (bytes / 1000) + elif bytes > 1000: + return '%.1fkB' % (bytes / 1000.0) + else: + return '%ibytes' % bytes + + +def is_installable_dir(path): + """Return True if `path` is a directory containing a setup.py file.""" + if not os.path.isdir(path): + return False + setup_py = os.path.join(path, 'setup.py') + if os.path.isfile(setup_py): + return True + return False + + +def is_svn_page(html): + """ + Returns true if the page appears to be the index page of an svn repository + """ + return (re.search(r'[^<]*Revision \d+:', html) and + re.search(r'Powered by (?:<a[^>]*?>)?Subversion', html, re.I)) + + +def file_contents(filename): + with open(filename, 'rb') as fp: + return fp.read().decode('utf-8') + + +def read_chunks(file, size=io.DEFAULT_BUFFER_SIZE): + """Yield pieces of data from a file-like object until EOF.""" + while True: + chunk = file.read(size) + if not chunk: + break + yield chunk + + +def split_leading_dir(path): + path = path.lstrip('/').lstrip('\\') + if '/' in path and (('\\' in path and path.find('/') < path.find('\\')) or + '\\' not in path): + return path.split('/', 1) + elif '\\' in path: + return path.split('\\', 1) + else: + return path, '' + + +def has_leading_dir(paths): + """Returns true if all the paths have the same leading path name + (i.e., everything is in one subdirectory in an archive)""" + common_prefix = None + for path in paths: + prefix, rest = split_leading_dir(path) + if not prefix: + return False + elif common_prefix is None: + common_prefix = prefix + elif prefix != common_prefix: + return False + return True + + +def normalize_path(path, resolve_symlinks=True): + """ + Convert a path to its canonical, case-normalized, absolute version. + + """ + path = expanduser(path) + if resolve_symlinks: + path = os.path.realpath(path) + else: + path = os.path.abspath(path) + return os.path.normcase(path) + + +def splitext(path): + """Like os.path.splitext, but take off .tar too""" + base, ext = posixpath.splitext(path) + if base.lower().endswith('.tar'): + ext = base[-4:] + ext + base = base[:-4] + return base, ext + + +def renames(old, new): + """Like os.renames(), but handles renaming across devices.""" + # Implementation borrowed from os.renames(). + head, tail = os.path.split(new) + if head and tail and not os.path.exists(head): + os.makedirs(head) + + shutil.move(old, new) + + head, tail = os.path.split(old) + if head and tail: + try: + os.removedirs(head) + except OSError: + pass + + +def is_local(path): + """ + Return True if this is a path pip is allowed to modify. + + If we're in a virtualenv, sys.prefix points to the virtualenv's + prefix; only sys.prefix is considered local. + + If we're not in a virtualenv, in general we can modify anything. + However, if the OS vendor has configured distutils to install + somewhere other than sys.prefix (which could be a subdirectory of + sys.prefix, e.g. /usr/local), we consider sys.prefix itself nonlocal + and the domain of the OS vendor. (In other words, everything _other + than_ sys.prefix is considered local.) + + """ + + path = normalize_path(path) + prefix = normalize_path(sys.prefix) + + if running_under_virtualenv(): + return path.startswith(normalize_path(sys.prefix)) + else: + from pip.locations import distutils_scheme + if path.startswith(prefix): + for local_path in distutils_scheme("").values(): + if path.startswith(normalize_path(local_path)): + return True + return False + else: + return True + + +def dist_is_local(dist): + """ + Return True if given Distribution object is installed somewhere pip + is allowed to modify. + + """ + return is_local(dist_location(dist)) + + +def dist_in_usersite(dist): + """ + Return True if given Distribution is installed in user site. + """ + norm_path = normalize_path(dist_location(dist)) + return norm_path.startswith(normalize_path(user_site)) + + +def dist_in_site_packages(dist): + """ + Return True if given Distribution is installed in + distutils.sysconfig.get_python_lib(). + """ + return normalize_path( + dist_location(dist) + ).startswith(normalize_path(site_packages)) + + +def dist_is_editable(dist): + """Is distribution an editable install?""" + for path_item in sys.path: + egg_link = os.path.join(path_item, dist.project_name + '.egg-link') + if os.path.isfile(egg_link): + return True + return False + + +def get_installed_distributions(local_only=True, + skip=stdlib_pkgs, + include_editables=True, + editables_only=False, + user_only=False): + """ + Return a list of installed Distribution objects. + + If ``local_only`` is True (default), only return installations + local to the current virtualenv, if in a virtualenv. + + ``skip`` argument is an iterable of lower-case project names to + ignore; defaults to stdlib_pkgs + + If ``editables`` is False, don't report editables. + + If ``editables_only`` is True , only report editables. + + If ``user_only`` is True , only report installations in the user + site directory. + + """ + if local_only: + local_test = dist_is_local + else: + def local_test(d): + return True + + if include_editables: + def editable_test(d): + return True + else: + def editable_test(d): + return not dist_is_editable(d) + + if editables_only: + def editables_only_test(d): + return dist_is_editable(d) + else: + def editables_only_test(d): + return True + + if user_only: + user_test = dist_in_usersite + else: + def user_test(d): + return True + + return [d for d in pkg_resources.working_set + if local_test(d) and + d.key not in skip and + editable_test(d) and + editables_only_test(d) and + user_test(d) + ] + + +def egg_link_path(dist): + """ + Return the path for the .egg-link file if it exists, otherwise, None. + + There's 3 scenarios: + 1) not in a virtualenv + try to find in site.USER_SITE, then site_packages + 2) in a no-global virtualenv + try to find in site_packages + 3) in a yes-global virtualenv + try to find in site_packages, then site.USER_SITE + (don't look in global location) + + For #1 and #3, there could be odd cases, where there's an egg-link in 2 + locations. + + This method will just return the first one found. + """ + sites = [] + if running_under_virtualenv(): + if virtualenv_no_global(): + sites.append(site_packages) + else: + sites.append(site_packages) + if user_site: + sites.append(user_site) + else: + if user_site: + sites.append(user_site) + sites.append(site_packages) + + for site in sites: + egglink = os.path.join(site, dist.project_name) + '.egg-link' + if os.path.isfile(egglink): + return egglink + + +def dist_location(dist): + """ + Get the site-packages location of this distribution. Generally + this is dist.location, except in the case of develop-installed + packages, where dist.location is the source code location, and we + want to know where the egg-link file is. + + """ + egg_link = egg_link_path(dist) + if egg_link: + return egg_link + return dist.location + + +def get_terminal_size(): + """Returns a tuple (x, y) representing the width(x) and the height(x) + in characters of the terminal window.""" + def ioctl_GWINSZ(fd): + try: + import fcntl + import termios + import struct + cr = struct.unpack( + 'hh', + fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234') + ) + except: + return None + if cr == (0, 0): + return None + return cr + cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2) + if not cr: + try: + fd = os.open(os.ctermid(), os.O_RDONLY) + cr = ioctl_GWINSZ(fd) + os.close(fd) + except: + pass + if not cr: + cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80)) + return int(cr[1]), int(cr[0]) + + +def current_umask(): + """Get the current umask which involves having to set it temporarily.""" + mask = os.umask(0) + os.umask(mask) + return mask + + +def unzip_file(filename, location, flatten=True): + """ + Unzip the file (with path `filename`) to the destination `location`. All + files are written based on system defaults and umask (i.e. permissions are + not preserved), except that regular file members with any execute + permissions (user, group, or world) have "chmod +x" applied after being + written. Note that for windows, any execute changes using os.chmod are + no-ops per the python docs. + """ + ensure_dir(location) + zipfp = open(filename, 'rb') + try: + zip = zipfile.ZipFile(zipfp, allowZip64=True) + leading = has_leading_dir(zip.namelist()) and flatten + for info in zip.infolist(): + name = info.filename + data = zip.read(name) + fn = name + if leading: + fn = split_leading_dir(name)[1] + fn = os.path.join(location, fn) + dir = os.path.dirname(fn) + if fn.endswith('/') or fn.endswith('\\'): + # A directory + ensure_dir(fn) + else: + ensure_dir(dir) + fp = open(fn, 'wb') + try: + fp.write(data) + finally: + fp.close() + mode = info.external_attr >> 16 + # if mode and regular file and any execute permissions for + # user/group/world? + if mode and stat.S_ISREG(mode) and mode & 0o111: + # make dest file have execute for user/group/world + # (chmod +x) no-op on windows per python docs + os.chmod(fn, (0o777 - current_umask() | 0o111)) + finally: + zipfp.close() + + +def untar_file(filename, location): + """ + Untar the file (with path `filename`) to the destination `location`. + All files are written based on system defaults and umask (i.e. permissions + are not preserved), except that regular file members with any execute + permissions (user, group, or world) have "chmod +x" applied after being + written. Note that for windows, any execute changes using os.chmod are + no-ops per the python docs. + """ + ensure_dir(location) + if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'): + mode = 'r:gz' + elif filename.lower().endswith(BZ2_EXTENSIONS): + mode = 'r:bz2' + elif filename.lower().endswith(XZ_EXTENSIONS): + mode = 'r:xz' + elif filename.lower().endswith('.tar'): + mode = 'r' + else: + logger.warning( + 'Cannot determine compression type for file %s', filename, + ) + mode = 'r:*' + tar = tarfile.open(filename, mode) + try: + # note: python<=2.5 doesn't seem to know about pax headers, filter them + leading = has_leading_dir([ + member.name for member in tar.getmembers() + if member.name != 'pax_global_header' + ]) + for member in tar.getmembers(): + fn = member.name + if fn == 'pax_global_header': + continue + if leading: + fn = split_leading_dir(fn)[1] + path = os.path.join(location, fn) + if member.isdir(): + ensure_dir(path) + elif member.issym(): + try: + tar._extract_member(member, path) + except Exception as exc: + # Some corrupt tar files seem to produce this + # (specifically bad symlinks) + logger.warning( + 'In the tar file %s the member %s is invalid: %s', + filename, member.name, exc, + ) + continue + else: + try: + fp = tar.extractfile(member) + except (KeyError, AttributeError) as exc: + # Some corrupt tar files seem to produce this + # (specifically bad symlinks) + logger.warning( + 'In the tar file %s the member %s is invalid: %s', + filename, member.name, exc, + ) + continue + ensure_dir(os.path.dirname(path)) + with open(path, 'wb') as destfp: + shutil.copyfileobj(fp, destfp) + fp.close() + # Update the timestamp (useful for cython compiled files) + tar.utime(member, path) + # member have any execute permissions for user/group/world? + if member.mode & 0o111: + # make dest file have execute for user/group/world + # no-op on windows per python docs + os.chmod(path, (0o777 - current_umask() | 0o111)) + finally: + tar.close() + + +def unpack_file(filename, location, content_type, link): + filename = os.path.realpath(filename) + if (content_type == 'application/zip' or + filename.lower().endswith(ZIP_EXTENSIONS) or + zipfile.is_zipfile(filename)): + unzip_file( + filename, + location, + flatten=not filename.endswith('.whl') + ) + elif (content_type == 'application/x-gzip' or + tarfile.is_tarfile(filename) or + filename.lower().endswith( + TAR_EXTENSIONS + BZ2_EXTENSIONS + XZ_EXTENSIONS)): + untar_file(filename, location) + elif (content_type and content_type.startswith('text/html') and + is_svn_page(file_contents(filename))): + # We don't really care about this + from pip.vcs.subversion import Subversion + Subversion('svn+' + link.url).unpack(location) + else: + # FIXME: handle? + # FIXME: magic signatures? + logger.critical( + 'Cannot unpack file %s (downloaded from %s, content-type: %s); ' + 'cannot detect archive format', + filename, location, content_type, + ) + raise InstallationError( + 'Cannot determine archive format of %s' % location + ) + + +def remove_tracebacks(output): + pattern = (r'(?:\W+File "(?:.*)", line (?:.*)\W+(?:.*)\W+\^\W+)?' + r'Syntax(?:Error|Warning): (?:.*)') + output = re.sub(pattern, '', output) + if PY2: + return output + # compileall.compile_dir() prints different messages to stdout + # in Python 3 + return re.sub(r"\*\*\* Error compiling (?:.*)", '', output) + + +def call_subprocess(cmd, show_stdout=True, cwd=None, + on_returncode='raise', + command_level=std_logging.DEBUG, command_desc=None, + extra_environ=None, spinner=None): + # This function's handling of subprocess output is confusing and I + # previously broke it terribly, so as penance I will write a long comment + # explaining things. + # + # The obvious thing that affects output is the show_stdout= + # kwarg. show_stdout=True means, let the subprocess write directly to our + # stdout. Even though it is nominally the default, it is almost never used + # inside pip (and should not be used in new code without a very good + # reason); as of 2016-02-22 it is only used in a few places inside the VCS + # wrapper code. Ideally we should get rid of it entirely, because it + # creates a lot of complexity here for a rarely used feature. + # + # Most places in pip set show_stdout=False. What this means is: + # - We connect the child stdout to a pipe, which we read. + # - By default, we hide the output but show a spinner -- unless the + # subprocess exits with an error, in which case we show the output. + # - If the --verbose option was passed (= loglevel is DEBUG), then we show + # the output unconditionally. (But in this case we don't want to show + # the output a second time if it turns out that there was an error.) + # + # stderr is always merged with stdout (even if show_stdout=True). + if show_stdout: + stdout = None + else: + stdout = subprocess.PIPE + if command_desc is None: + cmd_parts = [] + for part in cmd: + if ' ' in part or '\n' in part or '"' in part or "'" in part: + part = '"%s"' % part.replace('"', '\\"') + cmd_parts.append(part) + command_desc = ' '.join(cmd_parts) + logger.log(command_level, "Running command %s", command_desc) + env = os.environ.copy() + if extra_environ: + env.update(extra_environ) + try: + proc = subprocess.Popen( + cmd, stderr=subprocess.STDOUT, stdin=None, stdout=stdout, + cwd=cwd, env=env) + except Exception as exc: + logger.critical( + "Error %s while executing command %s", exc, command_desc, + ) + raise + if stdout is not None: + all_output = [] + while True: + line = console_to_str(proc.stdout.readline()) + if not line: + break + line = line.rstrip() + all_output.append(line + '\n') + if logger.getEffectiveLevel() <= std_logging.DEBUG: + # Show the line immediately + logger.debug(line) + else: + # Update the spinner + if spinner is not None: + spinner.spin() + proc.wait() + if spinner is not None: + if proc.returncode: + spinner.finish("error") + else: + spinner.finish("done") + if proc.returncode: + if on_returncode == 'raise': + if (logger.getEffectiveLevel() > std_logging.DEBUG and + not show_stdout): + logger.info( + 'Complete output from command %s:', command_desc, + ) + logger.info( + ''.join(all_output) + + '\n----------------------------------------' + ) + raise InstallationError( + 'Command "%s" failed with error code %s in %s' + % (command_desc, proc.returncode, cwd)) + elif on_returncode == 'warn': + logger.warning( + 'Command "%s" had error code %s in %s', + command_desc, proc.returncode, cwd, + ) + elif on_returncode == 'ignore': + pass + else: + raise ValueError('Invalid value: on_returncode=%s' % + repr(on_returncode)) + if not show_stdout: + return remove_tracebacks(''.join(all_output)) + + +def read_text_file(filename): + """Return the contents of *filename*. + + Try to decode the file contents with utf-8, the preferred system encoding + (e.g., cp1252 on some Windows machines), and latin1, in that order. + Decoding a byte string with latin1 will never raise an error. In the worst + case, the returned string will contain some garbage characters. + + """ + with open(filename, 'rb') as fp: + data = fp.read() + + encodings = ['utf-8', locale.getpreferredencoding(False), 'latin1'] + for enc in encodings: + try: + data = data.decode(enc) + except UnicodeDecodeError: + continue + break + + assert type(data) != bytes # Latin1 should have worked. + return data + + +def _make_build_dir(build_dir): + os.makedirs(build_dir) + write_delete_marker_file(build_dir) + + +class FakeFile(object): + """Wrap a list of lines in an object with readline() to make + ConfigParser happy.""" + def __init__(self, lines): + self._gen = (l for l in lines) + + def readline(self): + try: + try: + return next(self._gen) + except NameError: + return self._gen.next() + except StopIteration: + return '' + + def __iter__(self): + return self._gen + + +class StreamWrapper(StringIO): + + @classmethod + def from_stream(cls, orig_stream): + cls.orig_stream = orig_stream + return cls() + + # compileall.compile_dir() needs stdout.encoding to print to stdout + @property + def encoding(self): + return self.orig_stream.encoding + + +@contextlib.contextmanager +def captured_output(stream_name): + """Return a context manager used by captured_stdout/stdin/stderr + that temporarily replaces the sys stream *stream_name* with a StringIO. + + Taken from Lib/support/__init__.py in the CPython repo. + """ + orig_stdout = getattr(sys, stream_name) + setattr(sys, stream_name, StreamWrapper.from_stream(orig_stdout)) + try: + yield getattr(sys, stream_name) + finally: + setattr(sys, stream_name, orig_stdout) + + +def captured_stdout(): + """Capture the output of sys.stdout: + + with captured_stdout() as stdout: + print('hello') + self.assertEqual(stdout.getvalue(), 'hello\n') + + Taken from Lib/support/__init__.py in the CPython repo. + """ + return captured_output('stdout') + + +class cached_property(object): + """A property that is only computed once per instance and then replaces + itself with an ordinary attribute. Deleting the attribute resets the + property. + + Source: https://github.com/bottlepy/bottle/blob/0.11.5/bottle.py#L175 + """ + + def __init__(self, func): + self.__doc__ = getattr(func, '__doc__') + self.func = func + + def __get__(self, obj, cls): + if obj is None: + # We're being accessed from the class itself, not from an object + return self + value = obj.__dict__[self.func.__name__] = self.func(obj) + return value + + +def get_installed_version(dist_name): + """Get the installed version of dist_name avoiding pkg_resources cache""" + # Create a requirement that we'll look for inside of setuptools. + req = pkg_resources.Requirement.parse(dist_name) + + # We want to avoid having this cached, so we need to construct a new + # working set each time. + working_set = pkg_resources.WorkingSet() + + # Get the installed distribution from our working set + dist = working_set.find(req) + + # Check to see if we got an installed distribution or not, if we did + # we want to return it's version. + return dist.version if dist else None + + +def consume(iterator): + """Consume an iterable at C speed.""" + deque(iterator, maxlen=0) diff --git a/venv/lib/python3.5/site-packages/pip/utils/appdirs.py b/venv/lib/python3.5/site-packages/pip/utils/appdirs.py new file mode 100644 index 0000000..60ae76e --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/utils/appdirs.py @@ -0,0 +1,224 @@ +""" +This code was taken from https://github.com/ActiveState/appdirs and modified +to suit our purposes. +""" +from __future__ import absolute_import + +import os +import sys + +from pip.compat import WINDOWS, expanduser + + +def user_cache_dir(appname): + r""" + Return full path to the user-specific cache dir for this application. + + "appname" is the name of application. + + Typical user cache directories are: + Mac OS X: ~/Library/Caches/<AppName> + Unix: ~/.cache/<AppName> (XDG default) + Windows: C:\Users\<username>\AppData\Local\<AppName>\Cache + + On Windows the only suggestion in the MSDN docs is that local settings go + in the `CSIDL_LOCAL_APPDATA` directory. This is identical to the + non-roaming app data dir (the default returned by `user_data_dir`). Apps + typically put cache data somewhere *under* the given dir here. Some + examples: + ...\Mozilla\Firefox\Profiles\<ProfileName>\Cache + ...\Acme\SuperApp\Cache\1.0 + + OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value. + """ + if WINDOWS: + # Get the base path + path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA")) + + # Add our app name and Cache directory to it + path = os.path.join(path, appname, "Cache") + elif sys.platform == "darwin": + # Get the base path + path = expanduser("~/Library/Caches") + + # Add our app name to it + path = os.path.join(path, appname) + else: + # Get the base path + path = os.getenv("XDG_CACHE_HOME", expanduser("~/.cache")) + + # Add our app name to it + path = os.path.join(path, appname) + + return path + + +def user_data_dir(appname, roaming=False): + """ + Return full path to the user-specific data dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "roaming" (boolean, default False) can be set True to use the Windows + roaming appdata directory. That means that for users on a Windows + network setup for roaming profiles, this user data will be + sync'd on login. See + <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> + for a discussion of issues. + + Typical user data directories are: + Mac OS X: ~/Library/Application Support/<AppName> + Unix: ~/.local/share/<AppName> # or in + $XDG_DATA_HOME, if defined + Win XP (not roaming): C:\Documents and Settings\<username>\ ... + ...Application Data\<AppName> + Win XP (roaming): C:\Documents and Settings\<username>\Local ... + ...Settings\Application Data\<AppName> + Win 7 (not roaming): C:\\Users\<username>\AppData\Local\<AppName> + Win 7 (roaming): C:\\Users\<username>\AppData\Roaming\<AppName> + + For Unix, we follow the XDG spec and support $XDG_DATA_HOME. + That means, by default "~/.local/share/<AppName>". + """ + if WINDOWS: + const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA" + path = os.path.join(os.path.normpath(_get_win_folder(const)), appname) + elif sys.platform == "darwin": + path = os.path.join( + expanduser('~/Library/Application Support/'), + appname, + ) + else: + path = os.path.join( + os.getenv('XDG_DATA_HOME', expanduser("~/.local/share")), + appname, + ) + + return path + + +def user_config_dir(appname, roaming=True): + """Return full path to the user-specific config dir for this application. + + "appname" is the name of application. + If None, just the system directory is returned. + "roaming" (boolean, default True) can be set False to not use the + Windows roaming appdata directory. That means that for users on a + Windows network setup for roaming profiles, this user data will be + sync'd on login. See + <http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx> + for a discussion of issues. + + Typical user data directories are: + Mac OS X: same as user_data_dir + Unix: ~/.config/<AppName> + Win *: same as user_data_dir + + For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME. + That means, by deafult "~/.config/<AppName>". + """ + if WINDOWS: + path = user_data_dir(appname, roaming=roaming) + elif sys.platform == "darwin": + path = user_data_dir(appname) + else: + path = os.getenv('XDG_CONFIG_HOME', expanduser("~/.config")) + path = os.path.join(path, appname) + + return path + + +# for the discussion regarding site_config_dirs locations +# see <https://github.com/pypa/pip/issues/1733> +def site_config_dirs(appname): + """Return a list of potential user-shared config dirs for this application. + + "appname" is the name of application. + + Typical user config directories are: + Mac OS X: /Library/Application Support/<AppName>/ + Unix: /etc or $XDG_CONFIG_DIRS[i]/<AppName>/ for each value in + $XDG_CONFIG_DIRS + Win XP: C:\Documents and Settings\All Users\Application ... + ...Data\<AppName>\ + Vista: (Fail! "C:\ProgramData" is a hidden *system* directory + on Vista.) + Win 7: Hidden, but writeable on Win 7: + C:\ProgramData\<AppName>\ + """ + if WINDOWS: + path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA")) + pathlist = [os.path.join(path, appname)] + elif sys.platform == 'darwin': + pathlist = [os.path.join('/Library/Application Support', appname)] + else: + # try looking in $XDG_CONFIG_DIRS + xdg_config_dirs = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg') + if xdg_config_dirs: + pathlist = [ + os.path.join(expanduser(x), appname) + for x in xdg_config_dirs.split(os.pathsep) + ] + else: + pathlist = [] + + # always look in /etc directly as well + pathlist.append('/etc') + + return pathlist + + +# -- Windows support functions -- + +def _get_win_folder_from_registry(csidl_name): + """ + This is a fallback technique at best. I'm not sure if using the + registry for this guarantees us the correct answer for all CSIDL_* + names. + """ + import _winreg + + shell_folder_name = { + "CSIDL_APPDATA": "AppData", + "CSIDL_COMMON_APPDATA": "Common AppData", + "CSIDL_LOCAL_APPDATA": "Local AppData", + }[csidl_name] + + key = _winreg.OpenKey( + _winreg.HKEY_CURRENT_USER, + r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders" + ) + directory, _type = _winreg.QueryValueEx(key, shell_folder_name) + return directory + + +def _get_win_folder_with_ctypes(csidl_name): + csidl_const = { + "CSIDL_APPDATA": 26, + "CSIDL_COMMON_APPDATA": 35, + "CSIDL_LOCAL_APPDATA": 28, + }[csidl_name] + + buf = ctypes.create_unicode_buffer(1024) + ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf) + + # Downgrade to short path name if have highbit chars. See + # <http://bugs.activestate.com/show_bug.cgi?id=85099>. + has_high_char = False + for c in buf: + if ord(c) > 255: + has_high_char = True + break + if has_high_char: + buf2 = ctypes.create_unicode_buffer(1024) + if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024): + buf = buf2 + + return buf.value + +if WINDOWS: + try: + import ctypes + _get_win_folder = _get_win_folder_with_ctypes + except ImportError: + _get_win_folder = _get_win_folder_from_registry diff --git a/venv/lib/python3.5/site-packages/pip/utils/build.py b/venv/lib/python3.5/site-packages/pip/utils/build.py new file mode 100644 index 0000000..fc65cfa --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/utils/build.py @@ -0,0 +1,42 @@ +from __future__ import absolute_import + +import os.path +import tempfile + +from pip.utils import rmtree + + +class BuildDirectory(object): + + def __init__(self, name=None, delete=None): + # If we were not given an explicit directory, and we were not given an + # explicit delete option, then we'll default to deleting. + if name is None and delete is None: + delete = True + + if name is None: + # We realpath here because some systems have their default tmpdir + # symlinked to another directory. This tends to confuse build + # scripts, so we canonicalize the path by traversing potential + # symlinks here. + name = os.path.realpath(tempfile.mkdtemp(prefix="pip-build-")) + # If we were not given an explicit directory, and we were not given + # an explicit delete option, then we'll default to deleting. + if delete is None: + delete = True + + self.name = name + self.delete = delete + + def __repr__(self): + return "<{} {!r}>".format(self.__class__.__name__, self.name) + + def __enter__(self): + return self.name + + def __exit__(self, exc, value, tb): + self.cleanup() + + def cleanup(self): + if self.delete: + rmtree(self.name) diff --git a/venv/lib/python3.5/site-packages/pip/utils/deprecation.py b/venv/lib/python3.5/site-packages/pip/utils/deprecation.py new file mode 100644 index 0000000..2fb1d1e --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/utils/deprecation.py @@ -0,0 +1,76 @@ +""" +A module that implments tooling to enable easy warnings about deprecations. +""" +from __future__ import absolute_import + +import logging +import warnings + + +class PipDeprecationWarning(Warning): + pass + + +class Pending(object): + pass + + +class RemovedInPip9Warning(PipDeprecationWarning): + pass + + +class RemovedInPip10Warning(PipDeprecationWarning, Pending): + pass + + +class Python26DeprecationWarning(PipDeprecationWarning, Pending): + pass + + +# Warnings <-> Logging Integration + + +_warnings_showwarning = None + + +def _showwarning(message, category, filename, lineno, file=None, line=None): + if file is not None: + if _warnings_showwarning is not None: + _warnings_showwarning( + message, category, filename, lineno, file, line, + ) + else: + if issubclass(category, PipDeprecationWarning): + # We use a specially named logger which will handle all of the + # deprecation messages for pip. + logger = logging.getLogger("pip.deprecations") + + # This is purposely using the % formatter here instead of letting + # the logging module handle the interpolation. This is because we + # want it to appear as if someone typed this entire message out. + log_message = "DEPRECATION: %s" % message + + # PipDeprecationWarnings that are Pending still have at least 2 + # versions to go until they are removed so they can just be + # warnings. Otherwise, they will be removed in the very next + # version of pip. We want these to be more obvious so we use the + # ERROR logging level. + if issubclass(category, Pending): + logger.warning(log_message) + else: + logger.error(log_message) + else: + _warnings_showwarning( + message, category, filename, lineno, file, line, + ) + + +def install_warning_logger(): + # Enable our Deprecation Warnings + warnings.simplefilter("default", PipDeprecationWarning, append=True) + + global _warnings_showwarning + + if _warnings_showwarning is None: + _warnings_showwarning = warnings.showwarning + warnings.showwarning = _showwarning diff --git a/venv/lib/python3.5/site-packages/pip/utils/encoding.py b/venv/lib/python3.5/site-packages/pip/utils/encoding.py new file mode 100644 index 0000000..2483168 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/utils/encoding.py @@ -0,0 +1,31 @@ +import codecs +import locale +import re + + +BOMS = [ + (codecs.BOM_UTF8, 'utf8'), + (codecs.BOM_UTF16, 'utf16'), + (codecs.BOM_UTF16_BE, 'utf16-be'), + (codecs.BOM_UTF16_LE, 'utf16-le'), + (codecs.BOM_UTF32, 'utf32'), + (codecs.BOM_UTF32_BE, 'utf32-be'), + (codecs.BOM_UTF32_LE, 'utf32-le'), +] + +ENCODING_RE = re.compile(b'coding[:=]\s*([-\w.]+)') + + +def auto_decode(data): + """Check a bytes string for a BOM to correctly detect the encoding + + Fallback to locale.getpreferredencoding(False) like open() on Python3""" + for bom, encoding in BOMS: + if data.startswith(bom): + return data[len(bom):].decode(encoding) + # Lets check the first two lines as in PEP263 + for line in data.split(b'\n')[:2]: + if line[0:1] == b'#' and ENCODING_RE.search(line): + encoding = ENCODING_RE.search(line).groups()[0].decode('ascii') + return data.decode(encoding) + return data.decode(locale.getpreferredencoding(False)) diff --git a/venv/lib/python3.5/site-packages/pip/utils/filesystem.py b/venv/lib/python3.5/site-packages/pip/utils/filesystem.py new file mode 100644 index 0000000..25ad516 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/utils/filesystem.py @@ -0,0 +1,28 @@ +import os +import os.path + +from pip.compat import get_path_uid + + +def check_path_owner(path): + # If we don't have a way to check the effective uid of this process, then + # we'll just assume that we own the directory. + if not hasattr(os, "geteuid"): + return True + + previous = None + while path != previous: + if os.path.lexists(path): + # Check if path is writable by current user. + if os.geteuid() == 0: + # Special handling for root user in order to handle properly + # cases where users use sudo without -H flag. + try: + path_uid = get_path_uid(path) + except OSError: + return False + return path_uid == 0 + else: + return os.access(path, os.W_OK) + else: + previous, path = path, os.path.dirname(path) diff --git a/venv/lib/python3.5/site-packages/pip/utils/hashes.py b/venv/lib/python3.5/site-packages/pip/utils/hashes.py new file mode 100644 index 0000000..9602970 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/utils/hashes.py @@ -0,0 +1,92 @@ +from __future__ import absolute_import + +import hashlib + +from pip.exceptions import HashMismatch, HashMissing, InstallationError +from pip.utils import read_chunks +from pip._vendor.six import iteritems, iterkeys, itervalues + + +# The recommended hash algo of the moment. Change this whenever the state of +# the art changes; it won't hurt backward compatibility. +FAVORITE_HASH = 'sha256' + + +# Names of hashlib algorithms allowed by the --hash option and ``pip hash`` +# Currently, those are the ones at least as collision-resistant as sha256. +STRONG_HASHES = ['sha256', 'sha384', 'sha512'] + + +class Hashes(object): + """A wrapper that builds multiple hashes at once and checks them against + known-good values + + """ + def __init__(self, hashes=None): + """ + :param hashes: A dict of algorithm names pointing to lists of allowed + hex digests + """ + self._allowed = {} if hashes is None else hashes + + def check_against_chunks(self, chunks): + """Check good hashes against ones built from iterable of chunks of + data. + + Raise HashMismatch if none match. + + """ + gots = {} + for hash_name in iterkeys(self._allowed): + try: + gots[hash_name] = hashlib.new(hash_name) + except (ValueError, TypeError): + raise InstallationError('Unknown hash name: %s' % hash_name) + + for chunk in chunks: + for hash in itervalues(gots): + hash.update(chunk) + + for hash_name, got in iteritems(gots): + if got.hexdigest() in self._allowed[hash_name]: + return + self._raise(gots) + + def _raise(self, gots): + raise HashMismatch(self._allowed, gots) + + def check_against_file(self, file): + """Check good hashes against a file-like object + + Raise HashMismatch if none match. + + """ + return self.check_against_chunks(read_chunks(file)) + + def check_against_path(self, path): + with open(path, 'rb') as file: + return self.check_against_file(file) + + def __nonzero__(self): + """Return whether I know any known-good hashes.""" + return bool(self._allowed) + + def __bool__(self): + return self.__nonzero__() + + +class MissingHashes(Hashes): + """A workalike for Hashes used when we're missing a hash for a requirement + + It computes the actual hash of the requirement and raises a HashMissing + exception showing it to the user. + + """ + def __init__(self): + """Don't offer the ``hashes`` kwarg.""" + # Pass our favorite hash in to generate a "gotten hash". With the + # empty list, it will never match, so an error will always raise. + super(MissingHashes, self).__init__(hashes={FAVORITE_HASH: []}) + + def _raise(self, gots): + raise HashMissing(gots[FAVORITE_HASH].hexdigest()) diff --git a/venv/lib/python3.5/site-packages/pip/utils/logging.py b/venv/lib/python3.5/site-packages/pip/utils/logging.py new file mode 100644 index 0000000..1c1053a --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/utils/logging.py @@ -0,0 +1,130 @@ +from __future__ import absolute_import + +import contextlib +import logging +import logging.handlers +import os + +try: + import threading +except ImportError: + import dummy_threading as threading + +from pip.compat import WINDOWS +from pip.utils import ensure_dir + +try: + from pip._vendor import colorama +# Lots of different errors can come from this, including SystemError and +# ImportError. +except Exception: + colorama = None + + +_log_state = threading.local() +_log_state.indentation = 0 + + +@contextlib.contextmanager +def indent_log(num=2): + """ + A context manager which will cause the log output to be indented for any + log messages emitted inside it. + """ + _log_state.indentation += num + try: + yield + finally: + _log_state.indentation -= num + + +def get_indentation(): + return getattr(_log_state, 'indentation', 0) + + +class IndentingFormatter(logging.Formatter): + + def format(self, record): + """ + Calls the standard formatter, but will indent all of the log messages + by our current indentation level. + """ + formatted = logging.Formatter.format(self, record) + formatted = "".join([ + (" " * get_indentation()) + line + for line in formatted.splitlines(True) + ]) + return formatted + + +def _color_wrap(*colors): + def wrapped(inp): + return "".join(list(colors) + [inp, colorama.Style.RESET_ALL]) + return wrapped + + +class ColorizedStreamHandler(logging.StreamHandler): + + # Don't build up a list of colors if we don't have colorama + if colorama: + COLORS = [ + # This needs to be in order from highest logging level to lowest. + (logging.ERROR, _color_wrap(colorama.Fore.RED)), + (logging.WARNING, _color_wrap(colorama.Fore.YELLOW)), + ] + else: + COLORS = [] + + def __init__(self, stream=None): + logging.StreamHandler.__init__(self, stream) + + if WINDOWS and colorama: + self.stream = colorama.AnsiToWin32(self.stream) + + def should_color(self): + # Don't colorize things if we do not have colorama + if not colorama: + return False + + real_stream = ( + self.stream if not isinstance(self.stream, colorama.AnsiToWin32) + else self.stream.wrapped + ) + + # If the stream is a tty we should color it + if hasattr(real_stream, "isatty") and real_stream.isatty(): + return True + + # If we have an ASNI term we should color it + if os.environ.get("TERM") == "ANSI": + return True + + # If anything else we should not color it + return False + + def format(self, record): + msg = logging.StreamHandler.format(self, record) + + if self.should_color(): + for level, color in self.COLORS: + if record.levelno >= level: + msg = color(msg) + break + + return msg + + +class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler): + + def _open(self): + ensure_dir(os.path.dirname(self.baseFilename)) + return logging.handlers.RotatingFileHandler._open(self) + + +class MaxLevelFilter(logging.Filter): + + def __init__(self, level): + self.level = level + + def filter(self, record): + return record.levelno < self.level diff --git a/venv/lib/python3.5/site-packages/pip/utils/outdated.py b/venv/lib/python3.5/site-packages/pip/utils/outdated.py new file mode 100644 index 0000000..2164cc3 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/utils/outdated.py @@ -0,0 +1,162 @@ +from __future__ import absolute_import + +import datetime +import json +import logging +import os.path +import sys + +from pip._vendor import lockfile +from pip._vendor.packaging import version as packaging_version + +from pip.compat import total_seconds, WINDOWS +from pip.models import PyPI +from pip.locations import USER_CACHE_DIR, running_under_virtualenv +from pip.utils import ensure_dir, get_installed_version +from pip.utils.filesystem import check_path_owner + + +SELFCHECK_DATE_FMT = "%Y-%m-%dT%H:%M:%SZ" + + +logger = logging.getLogger(__name__) + + +class VirtualenvSelfCheckState(object): + def __init__(self): + self.statefile_path = os.path.join(sys.prefix, "pip-selfcheck.json") + + # Load the existing state + try: + with open(self.statefile_path) as statefile: + self.state = json.load(statefile) + except (IOError, ValueError): + self.state = {} + + def save(self, pypi_version, current_time): + # Attempt to write out our version check file + with open(self.statefile_path, "w") as statefile: + json.dump( + { + "last_check": current_time.strftime(SELFCHECK_DATE_FMT), + "pypi_version": pypi_version, + }, + statefile, + sort_keys=True, + separators=(",", ":") + ) + + +class GlobalSelfCheckState(object): + def __init__(self): + self.statefile_path = os.path.join(USER_CACHE_DIR, "selfcheck.json") + + # Load the existing state + try: + with open(self.statefile_path) as statefile: + self.state = json.load(statefile)[sys.prefix] + except (IOError, ValueError, KeyError): + self.state = {} + + def save(self, pypi_version, current_time): + # Check to make sure that we own the directory + if not check_path_owner(os.path.dirname(self.statefile_path)): + return + + # Now that we've ensured the directory is owned by this user, we'll go + # ahead and make sure that all our directories are created. + ensure_dir(os.path.dirname(self.statefile_path)) + + # Attempt to write out our version check file + with lockfile.LockFile(self.statefile_path): + if os.path.exists(self.statefile_path): + with open(self.statefile_path) as statefile: + state = json.load(statefile) + else: + state = {} + + state[sys.prefix] = { + "last_check": current_time.strftime(SELFCHECK_DATE_FMT), + "pypi_version": pypi_version, + } + + with open(self.statefile_path, "w") as statefile: + json.dump(state, statefile, sort_keys=True, + separators=(",", ":")) + + +def load_selfcheck_statefile(): + if running_under_virtualenv(): + return VirtualenvSelfCheckState() + else: + return GlobalSelfCheckState() + + +def pip_version_check(session): + """Check for an update for pip. + + Limit the frequency of checks to once per week. State is stored either in + the active virtualenv or in the user's USER_CACHE_DIR keyed off the prefix + of the pip script path. + """ + installed_version = get_installed_version("pip") + if installed_version is None: + return + + pip_version = packaging_version.parse(installed_version) + pypi_version = None + + try: + state = load_selfcheck_statefile() + + current_time = datetime.datetime.utcnow() + # Determine if we need to refresh the state + if "last_check" in state.state and "pypi_version" in state.state: + last_check = datetime.datetime.strptime( + state.state["last_check"], + SELFCHECK_DATE_FMT + ) + if total_seconds(current_time - last_check) < 7 * 24 * 60 * 60: + pypi_version = state.state["pypi_version"] + + # Refresh the version if we need to or just see if we need to warn + if pypi_version is None: + resp = session.get( + PyPI.pip_json_url, + headers={"Accept": "application/json"}, + ) + resp.raise_for_status() + pypi_version = [ + v for v in sorted( + list(resp.json()["releases"]), + key=packaging_version.parse, + ) + if not packaging_version.parse(v).is_prerelease + ][-1] + + # save that we've performed a check + state.save(pypi_version, current_time) + + remote_version = packaging_version.parse(pypi_version) + + # Determine if our pypi_version is older + if (pip_version < remote_version and + pip_version.base_version != remote_version.base_version): + # Advise "python -m pip" on Windows to avoid issues + # with overwriting pip.exe. + if WINDOWS: + pip_cmd = "python -m pip" + else: + pip_cmd = "pip" + logger.warning( + "You are using pip version %s, however version %s is " + "available.\nYou should consider upgrading via the " + "'%s install --upgrade pip' command.", + pip_version, pypi_version, pip_cmd + ) + + except Exception: + logger.debug( + "There was an error checking the latest version of pip", + exc_info=True, + ) diff --git a/venv/lib/python3.5/site-packages/pip/utils/setuptools_build.py b/venv/lib/python3.5/site-packages/pip/utils/setuptools_build.py new file mode 100644 index 0000000..4c9095e --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/utils/setuptools_build.py @@ -0,0 +1,6 @@ +# Shim to wrap setup.py invocation with setuptools +SETUPTOOLS_SHIM = ( + "import setuptools, tokenize;__file__=%r;" + "exec(compile(getattr(tokenize, 'open', open)(__file__).read()" + ".replace('\\r\\n', '\\n'), __file__, 'exec'))" +) diff --git a/venv/lib/python3.5/site-packages/pip/utils/ui.py b/venv/lib/python3.5/site-packages/pip/utils/ui.py new file mode 100644 index 0000000..bba73e3 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/utils/ui.py @@ -0,0 +1,344 @@ +from __future__ import absolute_import +from __future__ import division + +import itertools +import sys +from signal import signal, SIGINT, default_int_handler +import time +import contextlib +import logging + +from pip.compat import WINDOWS +from pip.utils import format_size +from pip.utils.logging import get_indentation +from pip._vendor import six +from pip._vendor.progress.bar import Bar, IncrementalBar +from pip._vendor.progress.helpers import (WritelnMixin, + HIDE_CURSOR, SHOW_CURSOR) +from pip._vendor.progress.spinner import Spinner + +try: + from pip._vendor import colorama +# Lots of different errors can come from this, including SystemError and +# ImportError. +except Exception: + colorama = None + +logger = logging.getLogger(__name__) + + +def _select_progress_class(preferred, fallback): + encoding = getattr(preferred.file, "encoding", None) + + # If we don't know what encoding this file is in, then we'll just assume + # that it doesn't support unicode and use the ASCII bar. + if not encoding: + return fallback + + # Collect all of the possible characters we want to use with the preferred + # bar. + characters = [ + getattr(preferred, "empty_fill", six.text_type()), + getattr(preferred, "fill", six.text_type()), + ] + characters += list(getattr(preferred, "phases", [])) + + # Try to decode the characters we're using for the bar using the encoding + # of the given file, if this works then we'll assume that we can use the + # fancier bar and if not we'll fall back to the plaintext bar. + try: + six.text_type().join(characters).encode(encoding) + except UnicodeEncodeError: + return fallback + else: + return preferred + + +_BaseBar = _select_progress_class(IncrementalBar, Bar) + + +class InterruptibleMixin(object): + """ + Helper to ensure that self.finish() gets called on keyboard interrupt. + + This allows downloads to be interrupted without leaving temporary state + (like hidden cursors) behind. + + This class is similar to the progress library's existing SigIntMixin + helper, but as of version 1.2, that helper has the following problems: + + 1. It calls sys.exit(). + 2. It discards the existing SIGINT handler completely. + 3. It leaves its own handler in place even after an uninterrupted finish, + which will have unexpected delayed effects if the user triggers an + unrelated keyboard interrupt some time after a progress-displaying + download has already completed, for example. + """ + + def __init__(self, *args, **kwargs): + """ + Save the original SIGINT handler for later. + """ + super(InterruptibleMixin, self).__init__(*args, **kwargs) + + self.original_handler = signal(SIGINT, self.handle_sigint) + + # If signal() returns None, the previous handler was not installed from + # Python, and we cannot restore it. This probably should not happen, + # but if it does, we must restore something sensible instead, at least. + # The least bad option should be Python's default SIGINT handler, which + # just raises KeyboardInterrupt. + if self.original_handler is None: + self.original_handler = default_int_handler + + def finish(self): + """ + Restore the original SIGINT handler after finishing. + + This should happen regardless of whether the progress display finishes + normally, or gets interrupted. + """ + super(InterruptibleMixin, self).finish() + signal(SIGINT, self.original_handler) + + def handle_sigint(self, signum, frame): + """ + Call self.finish() before delegating to the original SIGINT handler. + + This handler should only be in place while the progress display is + active. + """ + self.finish() + self.original_handler(signum, frame) + + +class DownloadProgressMixin(object): + + def __init__(self, *args, **kwargs): + super(DownloadProgressMixin, self).__init__(*args, **kwargs) + self.message = (" " * (get_indentation() + 2)) + self.message + + @property + def downloaded(self): + return format_size(self.index) + + @property + def download_speed(self): + # Avoid zero division errors... + if self.avg == 0.0: + return "..." + return format_size(1 / self.avg) + "/s" + + @property + def pretty_eta(self): + if self.eta: + return "eta %s" % self.eta_td + return "" + + def iter(self, it, n=1): + for x in it: + yield x + self.next(n) + self.finish() + + +class WindowsMixin(object): + + def __init__(self, *args, **kwargs): + # The Windows terminal does not support the hide/show cursor ANSI codes + # even with colorama. So we'll ensure that hide_cursor is False on + # Windows. + # This call neds to go before the super() call, so that hide_cursor + # is set in time. The base progress bar class writes the "hide cursor" + # code to the terminal in its init, so if we don't set this soon + # enough, we get a "hide" with no corresponding "show"... + if WINDOWS and self.hide_cursor: + self.hide_cursor = False + + super(WindowsMixin, self).__init__(*args, **kwargs) + + # Check if we are running on Windows and we have the colorama module, + # if we do then wrap our file with it. + if WINDOWS and colorama: + self.file = colorama.AnsiToWin32(self.file) + # The progress code expects to be able to call self.file.isatty() + # but the colorama.AnsiToWin32() object doesn't have that, so we'll + # add it. + self.file.isatty = lambda: self.file.wrapped.isatty() + # The progress code expects to be able to call self.file.flush() + # but the colorama.AnsiToWin32() object doesn't have that, so we'll + # add it. + self.file.flush = lambda: self.file.wrapped.flush() + + +class DownloadProgressBar(WindowsMixin, InterruptibleMixin, + DownloadProgressMixin, _BaseBar): + + file = sys.stdout + message = "%(percent)d%%" + suffix = "%(downloaded)s %(download_speed)s %(pretty_eta)s" + + +class DownloadProgressSpinner(WindowsMixin, InterruptibleMixin, + DownloadProgressMixin, WritelnMixin, Spinner): + + file = sys.stdout + suffix = "%(downloaded)s %(download_speed)s" + + def next_phase(self): + if not hasattr(self, "_phaser"): + self._phaser = itertools.cycle(self.phases) + return next(self._phaser) + + def update(self): + message = self.message % self + phase = self.next_phase() + suffix = self.suffix % self + line = ''.join([ + message, + " " if message else "", + phase, + " " if suffix else "", + suffix, + ]) + + self.writeln(line) + + +################################################################ +# Generic "something is happening" spinners +# +# We don't even try using progress.spinner.Spinner here because it's actually +# simpler to reimplement from scratch than to coerce their code into doing +# what we need. +################################################################ + +@contextlib.contextmanager +def hidden_cursor(file): + # The Windows terminal does not support the hide/show cursor ANSI codes, + # even via colorama. So don't even try. + if WINDOWS: + yield + # We don't want to clutter the output with control characters if we're + # writing to a file, or if the user is running with --quiet. + # See https://github.com/pypa/pip/issues/3418 + elif not file.isatty() or logger.getEffectiveLevel() > logging.INFO: + yield + else: + file.write(HIDE_CURSOR) + try: + yield + finally: + file.write(SHOW_CURSOR) + + +class RateLimiter(object): + def __init__(self, min_update_interval_seconds): + self._min_update_interval_seconds = min_update_interval_seconds + self._last_update = 0 + + def ready(self): + now = time.time() + delta = now - self._last_update + return delta >= self._min_update_interval_seconds + + def reset(self): + self._last_update = time.time() + + +class InteractiveSpinner(object): + def __init__(self, message, file=None, spin_chars="-\\|/", + # Empirically, 8 updates/second looks nice + min_update_interval_seconds=0.125): + self._message = message + if file is None: + file = sys.stdout + self._file = file + self._rate_limiter = RateLimiter(min_update_interval_seconds) + self._finished = False + + self._spin_cycle = itertools.cycle(spin_chars) + + self._file.write(" " * get_indentation() + self._message + " ... ") + self._width = 0 + + def _write(self, status): + assert not self._finished + # Erase what we wrote before by backspacing to the beginning, writing + # spaces to overwrite the old text, and then backspacing again + backup = "\b" * self._width + self._file.write(backup + " " * self._width + backup) + # Now we have a blank slate to add our status + self._file.write(status) + self._width = len(status) + self._file.flush() + self._rate_limiter.reset() + + def spin(self): + if self._finished: + return + if not self._rate_limiter.ready(): + return + self._write(next(self._spin_cycle)) + + def finish(self, final_status): + if self._finished: + return + self._write(final_status) + self._file.write("\n") + self._file.flush() + self._finished = True + + +# Used for dumb terminals, non-interactive installs (no tty), etc. +# We still print updates occasionally (once every 60 seconds by default) to +# act as a keep-alive for systems like Travis-CI that take lack-of-output as +# an indication that a task has frozen. +class NonInteractiveSpinner(object): + def __init__(self, message, min_update_interval_seconds=60): + self._message = message + self._finished = False + self._rate_limiter = RateLimiter(min_update_interval_seconds) + self._update("started") + + def _update(self, status): + assert not self._finished + self._rate_limiter.reset() + logger.info("%s: %s", self._message, status) + + def spin(self): + if self._finished: + return + if not self._rate_limiter.ready(): + return + self._update("still running...") + + def finish(self, final_status): + if self._finished: + return + self._update("finished with status '%s'" % (final_status,)) + self._finished = True + + +@contextlib.contextmanager +def open_spinner(message): + # Interactive spinner goes directly to sys.stdout rather than being routed + # through the logging system, but it acts like it has level INFO, + # i.e. it's only displayed if we're at level INFO or better. + # Non-interactive spinner goes through the logging system, so it is always + # in sync with logging configuration. + if sys.stdout.isatty() and logger.getEffectiveLevel() <= logging.INFO: + spinner = InteractiveSpinner(message) + else: + spinner = NonInteractiveSpinner(message) + try: + with hidden_cursor(sys.stdout): + yield spinner + except KeyboardInterrupt: + spinner.finish("canceled") + raise + except Exception: + spinner.finish("error") + raise + else: + spinner.finish("done") diff --git a/venv/lib/python3.5/site-packages/pip/vcs/__init__.py b/venv/lib/python3.5/site-packages/pip/vcs/__init__.py new file mode 100644 index 0000000..9dc1c60 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/vcs/__init__.py @@ -0,0 +1,363 @@ +"""Handles all VCS (version control) support""" +from __future__ import absolute_import + +import errno +import logging +import os +import shutil + +from pip._vendor.six.moves.urllib import parse as urllib_parse + +from pip.exceptions import BadCommand +from pip.utils import (display_path, backup_dir, call_subprocess, + rmtree, ask_path_exists) + + +__all__ = ['vcs', 'get_src_requirement'] + + +logger = logging.getLogger(__name__) + + +class VcsSupport(object): + _registry = {} + schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn'] + + def __init__(self): + # Register more schemes with urlparse for various version control + # systems + urllib_parse.uses_netloc.extend(self.schemes) + # Python >= 2.7.4, 3.3 doesn't have uses_fragment + if getattr(urllib_parse, 'uses_fragment', None): + urllib_parse.uses_fragment.extend(self.schemes) + super(VcsSupport, self).__init__() + + def __iter__(self): + return self._registry.__iter__() + + @property + def backends(self): + return list(self._registry.values()) + + @property + def dirnames(self): + return [backend.dirname for backend in self.backends] + + @property + def all_schemes(self): + schemes = [] + for backend in self.backends: + schemes.extend(backend.schemes) + return schemes + + def register(self, cls): + if not hasattr(cls, 'name'): + logger.warning('Cannot register VCS %s', cls.__name__) + return + if cls.name not in self._registry: + self._registry[cls.name] = cls + logger.debug('Registered VCS backend: %s', cls.name) + + def unregister(self, cls=None, name=None): + if name in self._registry: + del self._registry[name] + elif cls in self._registry.values(): + del self._registry[cls.name] + else: + logger.warning('Cannot unregister because no class or name given') + + def get_backend_name(self, location): + """ + Return the name of the version control backend if found at given + location, e.g. vcs.get_backend_name('/path/to/vcs/checkout') + """ + for vc_type in self._registry.values(): + if vc_type.controls_location(location): + logger.debug('Determine that %s uses VCS: %s', + location, vc_type.name) + return vc_type.name + return None + + def get_backend(self, name): + name = name.lower() + if name in self._registry: + return self._registry[name] + + def get_backend_from_location(self, location): + vc_type = self.get_backend_name(location) + if vc_type: + return self.get_backend(vc_type) + return None + + +vcs = VcsSupport() + + +class VersionControl(object): + name = '' + dirname = '' + # List of supported schemes for this Version Control + schemes = () + + def __init__(self, url=None, *args, **kwargs): + self.url = url + super(VersionControl, self).__init__(*args, **kwargs) + + def _is_local_repository(self, repo): + """ + posix absolute paths start with os.path.sep, + win32 ones ones start with drive (like c:\\folder) + """ + drive, tail = os.path.splitdrive(repo) + return repo.startswith(os.path.sep) or drive + + # See issue #1083 for why this method was introduced: + # https://github.com/pypa/pip/issues/1083 + def translate_egg_surname(self, surname): + # For example, Django has branches of the form "stable/1.7.x". + return surname.replace('/', '_') + + def export(self, location): + """ + Export the repository at the url to the destination location + i.e. only download the files, without vcs informations + """ + raise NotImplementedError + + def get_url_rev(self): + """ + Returns the correct repository URL and revision by parsing the given + repository URL + """ + error_message = ( + "Sorry, '%s' is a malformed VCS url. " + "The format is <vcs>+<protocol>://<url>, " + "e.g. svn+http://myrepo/svn/MyApp#egg=MyApp" + ) + assert '+' in self.url, error_message % self.url + url = self.url.split('+', 1)[1] + scheme, netloc, path, query, frag = urllib_parse.urlsplit(url) + rev = None + if '@' in path: + path, rev = path.rsplit('@', 1) + url = urllib_parse.urlunsplit((scheme, netloc, path, query, '')) + return url, rev + + def get_info(self, location): + """ + Returns (url, revision), where both are strings + """ + assert not location.rstrip('/').endswith(self.dirname), \ + 'Bad directory: %s' % location + return self.get_url(location), self.get_revision(location) + + def normalize_url(self, url): + """ + Normalize a URL for comparison by unquoting it and removing any + trailing slash. + """ + return urllib_parse.unquote(url).rstrip('/') + + def compare_urls(self, url1, url2): + """ + Compare two repo URLs for identity, ignoring incidental differences. + """ + return (self.normalize_url(url1) == self.normalize_url(url2)) + + def obtain(self, dest): + """ + Called when installing or updating an editable package, takes the + source path of the checkout. + """ + raise NotImplementedError + + def switch(self, dest, url, rev_options): + """ + Switch the repo at ``dest`` to point to ``URL``. + """ + raise NotImplementedError + + def update(self, dest, rev_options): + """ + Update an already-existing repo to the given ``rev_options``. + """ + raise NotImplementedError + + def check_version(self, dest, rev_options): + """ + Return True if the version is identical to what exists and + doesn't need to be updated. + """ + raise NotImplementedError + + def check_destination(self, dest, url, rev_options, rev_display): + """ + Prepare a location to receive a checkout/clone. + + Return True if the location is ready for (and requires) a + checkout/clone, False otherwise. + """ + checkout = True + prompt = False + if os.path.exists(dest): + checkout = False + if os.path.exists(os.path.join(dest, self.dirname)): + existing_url = self.get_url(dest) + if self.compare_urls(existing_url, url): + logger.debug( + '%s in %s exists, and has correct URL (%s)', + self.repo_name.title(), + display_path(dest), + url, + ) + if not self.check_version(dest, rev_options): + logger.info( + 'Updating %s %s%s', + display_path(dest), + self.repo_name, + rev_display, + ) + self.update(dest, rev_options) + else: + logger.info( + 'Skipping because already up-to-date.') + else: + logger.warning( + '%s %s in %s exists with URL %s', + self.name, + self.repo_name, + display_path(dest), + existing_url, + ) + prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ', + ('s', 'i', 'w', 'b')) + else: + logger.warning( + 'Directory %s already exists, and is not a %s %s.', + dest, + self.name, + self.repo_name, + ) + prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b')) + if prompt: + logger.warning( + 'The plan is to install the %s repository %s', + self.name, + url, + ) + response = ask_path_exists('What to do? %s' % prompt[0], + prompt[1]) + + if response == 's': + logger.info( + 'Switching %s %s to %s%s', + self.repo_name, + display_path(dest), + url, + rev_display, + ) + self.switch(dest, url, rev_options) + elif response == 'i': + # do nothing + pass + elif response == 'w': + logger.warning('Deleting %s', display_path(dest)) + rmtree(dest) + checkout = True + elif response == 'b': + dest_dir = backup_dir(dest) + logger.warning( + 'Backing up %s to %s', display_path(dest), dest_dir, + ) + shutil.move(dest, dest_dir) + checkout = True + return checkout + + def unpack(self, location): + """ + Clean up current location and download the url repository + (and vcs infos) into location + """ + if os.path.exists(location): + rmtree(location) + self.obtain(location) + + def get_src_requirement(self, dist, location): + """ + Return a string representing the requirement needed to + redownload the files currently present in location, something + like: + {repository_url}@{revision}#egg={project_name}-{version_identifier} + """ + raise NotImplementedError + + def get_url(self, location): + """ + Return the url used at location + Used in get_info or check_destination + """ + raise NotImplementedError + + def get_revision(self, location): + """ + Return the current revision of the files at location + Used in get_info + """ + raise NotImplementedError + + def run_command(self, cmd, show_stdout=True, cwd=None, + on_returncode='raise', + command_level=logging.DEBUG, command_desc=None, + extra_environ=None, spinner=None): + """ + Run a VCS subcommand + This is simply a wrapper around call_subprocess that adds the VCS + command name, and checks that the VCS is available + """ + cmd = [self.name] + cmd + try: + return call_subprocess(cmd, show_stdout, cwd, + on_returncode, command_level, + command_desc, extra_environ, + spinner) + except OSError as e: + # errno.ENOENT = no such file or directory + # In other words, the VCS executable isn't available + if e.errno == errno.ENOENT: + raise BadCommand('Cannot find command %r' % self.name) + else: + raise # re-raise exception if a different error occurred + + @classmethod + def controls_location(cls, location): + """ + Check if a location is controlled by the vcs. + It is meant to be overridden to implement smarter detection + mechanisms for specific vcs. + """ + logger.debug('Checking in %s for %s (%s)...', + location, cls.dirname, cls.name) + path = os.path.join(location, cls.dirname) + return os.path.exists(path) + + +def get_src_requirement(dist, location): + version_control = vcs.get_backend_from_location(location) + if version_control: + try: + return version_control().get_src_requirement(dist, + location) + except BadCommand: + logger.warning( + 'cannot determine version of editable source in %s ' + '(%s command not found in path)', + location, + version_control.name, + ) + return dist.as_requirement() + logger.warning( + 'cannot determine version of editable source in %s (is not SVN ' + 'checkout, Git clone, Mercurial clone or Bazaar branch)', + location, + ) + return dist.as_requirement() diff --git a/venv/lib/python3.5/site-packages/pip/vcs/bazaar.py b/venv/lib/python3.5/site-packages/pip/vcs/bazaar.py new file mode 100644 index 0000000..0f09584 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/vcs/bazaar.py @@ -0,0 +1,116 @@ +from __future__ import absolute_import + +import logging +import os +import tempfile + +# TODO: Get this into six.moves.urllib.parse +try: + from urllib import parse as urllib_parse +except ImportError: + import urlparse as urllib_parse + +from pip.utils import rmtree, display_path +from pip.vcs import vcs, VersionControl +from pip.download import path_to_url + + +logger = logging.getLogger(__name__) + + +class Bazaar(VersionControl): + name = 'bzr' + dirname = '.bzr' + repo_name = 'branch' + schemes = ( + 'bzr', 'bzr+http', 'bzr+https', 'bzr+ssh', 'bzr+sftp', 'bzr+ftp', + 'bzr+lp', + ) + + def __init__(self, url=None, *args, **kwargs): + super(Bazaar, self).__init__(url, *args, **kwargs) + # Python >= 2.7.4, 3.3 doesn't have uses_fragment or non_hierarchical + # Register lp but do not expose as a scheme to support bzr+lp. + if getattr(urllib_parse, 'uses_fragment', None): + urllib_parse.uses_fragment.extend(['lp']) + urllib_parse.non_hierarchical.extend(['lp']) + + def export(self, location): + """ + Export the Bazaar repository at the url to the destination location + """ + temp_dir = tempfile.mkdtemp('-export', 'pip-') + self.unpack(temp_dir) + if os.path.exists(location): + # Remove the location to make sure Bazaar can export it correctly + rmtree(location) + try: + self.run_command(['export', location], cwd=temp_dir, + show_stdout=False) + finally: + rmtree(temp_dir) + + def switch(self, dest, url, rev_options): + self.run_command(['switch', url], cwd=dest) + + def update(self, dest, rev_options): + self.run_command(['pull', '-q'] + rev_options, cwd=dest) + + def obtain(self, dest): + url, rev = self.get_url_rev() + if rev: + rev_options = ['-r', rev] + rev_display = ' (to revision %s)' % rev + else: + rev_options = [] + rev_display = '' + if self.check_destination(dest, url, rev_options, rev_display): + logger.info( + 'Checking out %s%s to %s', + url, + rev_display, + display_path(dest), + ) + self.run_command(['branch', '-q'] + rev_options + [url, dest]) + + def get_url_rev(self): + # hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it + url, rev = super(Bazaar, self).get_url_rev() + if url.startswith('ssh://'): + url = 'bzr+' + url + return url, rev + + def get_url(self, location): + urls = self.run_command(['info'], show_stdout=False, cwd=location) + for line in urls.splitlines(): + line = line.strip() + for x in ('checkout of branch: ', + 'parent branch: '): + if line.startswith(x): + repo = line.split(x)[1] + if self._is_local_repository(repo): + return path_to_url(repo) + return repo + return None + + def get_revision(self, location): + revision = self.run_command( + ['revno'], show_stdout=False, cwd=location) + return revision.splitlines()[-1] + + def get_src_requirement(self, dist, location): + repo = self.get_url(location) + if not repo: + return None + if not repo.lower().startswith('bzr:'): + repo = 'bzr+' + repo + egg_project_name = dist.egg_name().split('-', 1)[0] + current_rev = self.get_revision(location) + return '%s@%s#egg=%s' % (repo, current_rev, egg_project_name) + + def check_version(self, dest, rev_options): + """Always assume the versions don't match""" + return False + + +vcs.register(Bazaar) diff --git a/venv/lib/python3.5/site-packages/pip/vcs/git.py b/venv/lib/python3.5/site-packages/pip/vcs/git.py new file mode 100644 index 0000000..24528de --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/vcs/git.py @@ -0,0 +1,277 @@ +from __future__ import absolute_import + +import logging +import tempfile +import os.path + +from pip.compat import samefile +from pip.exceptions import BadCommand +from pip._vendor.six.moves.urllib import parse as urllib_parse +from pip._vendor.six.moves.urllib import request as urllib_request + +from pip.utils import display_path, rmtree +from pip.vcs import vcs, VersionControl + + +urlsplit = urllib_parse.urlsplit +urlunsplit = urllib_parse.urlunsplit + + +logger = logging.getLogger(__name__) + + +class Git(VersionControl): + name = 'git' + dirname = '.git' + repo_name = 'clone' + schemes = ( + 'git', 'git+http', 'git+https', 'git+ssh', 'git+git', 'git+file', + ) + + def __init__(self, url=None, *args, **kwargs): + + # Works around an apparent Git bug + # (see http://article.gmane.org/gmane.comp.version-control.git/146500) + if url: + scheme, netloc, path, query, fragment = urlsplit(url) + if scheme.endswith('file'): + initial_slashes = path[:-len(path.lstrip('/'))] + newpath = ( + initial_slashes + + urllib_request.url2pathname(path) + .replace('\\', '/').lstrip('/') + ) + url = urlunsplit((scheme, netloc, newpath, query, fragment)) + after_plus = scheme.find('+') + 1 + url = scheme[:after_plus] + urlunsplit( + (scheme[after_plus:], netloc, newpath, query, fragment), + ) + + super(Git, self).__init__(url, *args, **kwargs) + + def export(self, location): + """Export the Git repository at the url to the destination location""" + temp_dir = tempfile.mkdtemp('-export', 'pip-') + self.unpack(temp_dir) + try: + if not location.endswith('/'): + location = location + '/' + self.run_command( + ['checkout-index', '-a', '-f', '--prefix', location], + show_stdout=False, cwd=temp_dir) + finally: + rmtree(temp_dir) + + def check_rev_options(self, rev, dest, rev_options): + """Check the revision options before checkout to compensate that tags + and branches may need origin/ as a prefix. + Returns the SHA1 of the branch or tag if found. + """ + revisions = self.get_short_refs(dest) + + origin_rev = 'origin/%s' % rev + if origin_rev in revisions: + # remote branch + return [revisions[origin_rev]] + elif rev in revisions: + # a local tag or branch name + return [revisions[rev]] + else: + logger.warning( + "Could not find a tag or branch '%s', assuming commit.", rev, + ) + return rev_options + + def check_version(self, dest, rev_options): + """ + Compare the current sha to the ref. ref may be a branch or tag name, + but current rev will always point to a sha. This means that a branch + or tag will never compare as True. So this ultimately only matches + against exact shas. + """ + return self.get_revision(dest).startswith(rev_options[0]) + + def switch(self, dest, url, rev_options): + self.run_command(['config', 'remote.origin.url', url], cwd=dest) + self.run_command(['checkout', '-q'] + rev_options, cwd=dest) + + self.update_submodules(dest) + + def update(self, dest, rev_options): + # First fetch changes from the default remote + self.run_command(['fetch', '-q'], cwd=dest) + # Then reset to wanted revision (maybe even origin/master) + if rev_options: + rev_options = self.check_rev_options( + rev_options[0], dest, rev_options, + ) + self.run_command(['reset', '--hard', '-q'] + rev_options, cwd=dest) + #: update submodules + self.update_submodules(dest) + + def obtain(self, dest): + url, rev = self.get_url_rev() + if rev: + rev_options = [rev] + rev_display = ' (to %s)' % rev + else: + rev_options = ['origin/master'] + rev_display = '' + if self.check_destination(dest, url, rev_options, rev_display): + logger.info( + 'Cloning %s%s to %s', url, rev_display, display_path(dest), + ) + self.run_command(['clone', '-q', url, dest]) + + if rev: + rev_options = self.check_rev_options(rev, dest, rev_options) + # Only do a checkout if rev_options differs from HEAD + if not self.check_version(dest, rev_options): + self.run_command( + ['checkout', '-q'] + rev_options, + cwd=dest, + ) + #: repo may contain submodules + self.update_submodules(dest) + + def get_url(self, location): + """Return URL of the first remote encountered.""" + remotes = self.run_command( + ['config', '--get-regexp', 'remote\..*\.url'], + show_stdout=False, cwd=location) + first_remote = remotes.splitlines()[0] + url = first_remote.split(' ')[1] + return url.strip() + + def get_revision(self, location): + current_rev = self.run_command( + ['rev-parse', 'HEAD'], show_stdout=False, cwd=location) + return current_rev.strip() + + def get_full_refs(self, location): + """Yields tuples of (commit, ref) for branches and tags""" + output = self.run_command(['show-ref'], + show_stdout=False, cwd=location) + for line in output.strip().splitlines(): + commit, ref = line.split(' ', 1) + yield commit.strip(), ref.strip() + + def is_ref_remote(self, ref): + return ref.startswith('refs/remotes/') + + def is_ref_branch(self, ref): + return ref.startswith('refs/heads/') + + def is_ref_tag(self, ref): + return ref.startswith('refs/tags/') + + def is_ref_commit(self, ref): + """A ref is a commit sha if it is not anything else""" + return not any(( + self.is_ref_remote(ref), + self.is_ref_branch(ref), + self.is_ref_tag(ref), + )) + + # Should deprecate `get_refs` since it's ambiguous + def get_refs(self, location): + return self.get_short_refs(location) + + def get_short_refs(self, location): + """Return map of named refs (branches or tags) to commit hashes.""" + rv = {} + for commit, ref in self.get_full_refs(location): + ref_name = None + if self.is_ref_remote(ref): + ref_name = ref[len('refs/remotes/'):] + elif self.is_ref_branch(ref): + ref_name = ref[len('refs/heads/'):] + elif self.is_ref_tag(ref): + ref_name = ref[len('refs/tags/'):] + if ref_name is not None: + rv[ref_name] = commit + return rv + + def _get_subdirectory(self, location): + """Return the relative path of setup.py to the git repo root.""" + # find the repo root + git_dir = self.run_command(['rev-parse', '--git-dir'], + show_stdout=False, cwd=location).strip() + if not os.path.isabs(git_dir): + git_dir = os.path.join(location, git_dir) + root_dir = os.path.join(git_dir, '..') + # find setup.py + orig_location = location + while not os.path.exists(os.path.join(location, 'setup.py')): + last_location = location + location = os.path.dirname(location) + if location == last_location: + # We've traversed up to the root of the filesystem without + # finding setup.py + logger.warning( + "Could not find setup.py for directory %s (tried all " + "parent directories)", + orig_location, + ) + return None + # relative path of setup.py to repo root + if samefile(root_dir, location): + return None + return os.path.relpath(location, root_dir) + + def get_src_requirement(self, dist, location): + repo = self.get_url(location) + if not repo.lower().startswith('git:'): + repo = 'git+' + repo + egg_project_name = dist.egg_name().split('-', 1)[0] + if not repo: + return None + current_rev = self.get_revision(location) + req = '%s@%s#egg=%s' % (repo, current_rev, egg_project_name) + subdirectory = self._get_subdirectory(location) + if subdirectory: + req += '&subdirectory=' + subdirectory + return req + + def get_url_rev(self): + """ + Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'. + That's required because although they use SSH they sometimes doesn't + work with a ssh:// scheme (e.g. Github). But we need a scheme for + parsing. Hence we remove it again afterwards and return it as a stub. + """ + if '://' not in self.url: + assert 'file:' not in self.url + self.url = self.url.replace('git+', 'git+ssh://') + url, rev = super(Git, self).get_url_rev() + url = url.replace('ssh://', '') + else: + url, rev = super(Git, self).get_url_rev() + + return url, rev + + def update_submodules(self, location): + if not os.path.exists(os.path.join(location, '.gitmodules')): + return + self.run_command( + ['submodule', 'update', '--init', '--recursive', '-q'], + cwd=location, + ) + + @classmethod + def controls_location(cls, location): + if super(Git, cls).controls_location(location): + return True + try: + r = cls().run_command(['rev-parse'], + cwd=location, + show_stdout=False, + on_returncode='ignore') + return not r + except BadCommand: + logger.debug("could not determine if %s is under git control " + "because git is not available", location) + return False + + +vcs.register(Git) diff --git a/venv/lib/python3.5/site-packages/pip/vcs/mercurial.py b/venv/lib/python3.5/site-packages/pip/vcs/mercurial.py new file mode 100644 index 0000000..1aa83b9 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/vcs/mercurial.py @@ -0,0 +1,103 @@ +from __future__ import absolute_import + +import logging +import os +import tempfile + +from pip.utils import display_path, rmtree +from pip.vcs import vcs, VersionControl +from pip.download import path_to_url +from pip._vendor.six.moves import configparser + + +logger = logging.getLogger(__name__) + + +class Mercurial(VersionControl): + name = 'hg' + dirname = '.hg' + repo_name = 'clone' + schemes = ('hg', 'hg+http', 'hg+https', 'hg+ssh', 'hg+static-http') + + def export(self, location): + """Export the Hg repository at the url to the destination location""" + temp_dir = tempfile.mkdtemp('-export', 'pip-') + self.unpack(temp_dir) + try: + self.run_command( + ['archive', location], show_stdout=False, cwd=temp_dir) + finally: + rmtree(temp_dir) + + def switch(self, dest, url, rev_options): + repo_config = os.path.join(dest, self.dirname, 'hgrc') + config = configparser.SafeConfigParser() + try: + config.read(repo_config) + config.set('paths', 'default', url) + with open(repo_config, 'w') as config_file: + config.write(config_file) + except (OSError, configparser.NoSectionError) as exc: + logger.warning( + 'Could not switch Mercurial repository to %s: %s', url, exc, + ) + else: + self.run_command(['update', '-q'] + rev_options, cwd=dest) + + def update(self, dest, rev_options): + self.run_command(['pull', '-q'], cwd=dest) + self.run_command(['update', '-q'] + rev_options, cwd=dest) + + def obtain(self, dest): + url, rev = self.get_url_rev() + if rev: + rev_options = [rev] + rev_display = ' (to revision %s)' % rev + else: + rev_options = [] + rev_display = '' + if self.check_destination(dest, url, rev_options, rev_display): + logger.info( + 'Cloning hg %s%s to %s', + url, + rev_display, + display_path(dest), + ) + self.run_command(['clone', '--noupdate', '-q', url, dest]) + self.run_command(['update', '-q'] + rev_options, cwd=dest) + + def get_url(self, location): + url = self.run_command( + ['showconfig', 'paths.default'], + show_stdout=False, cwd=location).strip() + if self._is_local_repository(url): + url = path_to_url(url) + return url.strip() + + def get_revision(self, location): + current_revision = self.run_command( + ['parents', '--template={rev}'], + show_stdout=False, cwd=location).strip() + return current_revision + + def get_revision_hash(self, location): + current_rev_hash = self.run_command( + ['parents', '--template={node}'], + show_stdout=False, cwd=location).strip() + return current_rev_hash + + def get_src_requirement(self, dist, location): + repo = self.get_url(location) + if not repo.lower().startswith('hg:'): + repo = 'hg+' + repo + egg_project_name = dist.egg_name().split('-', 1)[0] + if not repo: + return None + current_rev_hash = self.get_revision_hash(location) + return '%s@%s#egg=%s' % (repo, current_rev_hash, egg_project_name) + + def check_version(self, dest, rev_options): + """Always assume the versions don't match""" + return False + +vcs.register(Mercurial) diff --git a/venv/lib/python3.5/site-packages/pip/vcs/subversion.py b/venv/lib/python3.5/site-packages/pip/vcs/subversion.py new file mode 100644 index 0000000..aa78fa6 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/vcs/subversion.py @@ -0,0 +1,249 @@ +from __future__ import absolute_import + +import logging +import os +import re + +from pip._vendor.six.moves.urllib import parse as urllib_parse + +from pip.index import Link +from pip.utils import rmtree, display_path +from pip.utils.logging import indent_log +from pip.vcs import vcs, VersionControl + +_svn_xml_url_re = re.compile('url="([^"]+)"') +_svn_rev_re = re.compile('committed-rev="(\d+)"') +_svn_url_re = re.compile(r'URL: (.+)') +_svn_revision_re = re.compile(r'Revision: (.+)') +_svn_info_xml_rev_re = re.compile(r'\s*revision="(\d+)"') +_svn_info_xml_url_re = re.compile(r'<url>(.*)</url>') + + +logger = logging.getLogger(__name__) + + +class Subversion(VersionControl): + name = 'svn' + dirname = '.svn' + repo_name = 'checkout' + schemes = ('svn', 'svn+ssh', 'svn+http', 'svn+https', 'svn+svn') + + def get_info(self, location): + """Returns (url, revision), where both are strings""" + assert not location.rstrip('/').endswith(self.dirname), \ + 'Bad directory: %s' % location + output = self.run_command( + ['info', location], + show_stdout=False, + extra_environ={'LANG': 'C'}, + ) + match = _svn_url_re.search(output) + if not match: + logger.warning( + 'Cannot determine URL of svn checkout %s', + display_path(location), + ) + logger.debug('Output that cannot be parsed: \n%s', output) + return None, None + url = match.group(1).strip() + match = _svn_revision_re.search(output) + if not match: + logger.warning( + 'Cannot determine revision of svn checkout %s', + display_path(location), + ) + logger.debug('Output that cannot be parsed: \n%s', output) + return url, None + return url, match.group(1) + + def export(self, location): + """Export the svn repository at the url to the destination location""" + url, rev = self.get_url_rev() + rev_options = get_rev_options(url, rev) + logger.info('Exporting svn repository %s to %s', url, location) + with indent_log(): + if os.path.exists(location): + # Subversion doesn't like to check out over an existing + # directory --force fixes this, but was only added in svn 1.5 + rmtree(location) + self.run_command( + ['export'] + rev_options + [url, location], + show_stdout=False) + + def switch(self, dest, url, rev_options): + self.run_command(['switch'] + rev_options + [url, dest]) + + def update(self, dest, rev_options): + self.run_command(['update'] + rev_options + [dest]) + + def obtain(self, dest): + url, rev = self.get_url_rev() + rev_options = get_rev_options(url, rev) + if rev: + rev_display = ' (to revision %s)' % rev + else: + rev_display = '' + if self.check_destination(dest, url, rev_options, rev_display): + logger.info( + 'Checking out %s%s to %s', + url, + rev_display, + display_path(dest), + ) + self.run_command(['checkout', '-q'] + rev_options + [url, dest]) + + def get_location(self, dist, dependency_links): + for url in dependency_links: + egg_fragment = Link(url).egg_fragment + if not egg_fragment: + continue + if '-' in egg_fragment: + # FIXME: will this work when a package has - in the name? + key = '-'.join(egg_fragment.split('-')[:-1]).lower() + else: + key = egg_fragment + if key == dist.key: + return url.split('#', 1)[0] + return None + + def get_revision(self, location): + """ + Return the maximum revision for all files under a given location + """ + # Note: taken from setuptools.command.egg_info + revision = 0 + + for base, dirs, files in os.walk(location): + if self.dirname not in dirs: + dirs[:] = [] + continue # no sense walking uncontrolled subdirs + dirs.remove(self.dirname) + entries_fn = os.path.join(base, self.dirname, 'entries') + if not os.path.exists(entries_fn): + # FIXME: should we warn? + continue + + dirurl, localrev = self._get_svn_url_rev(base) + + if base == location: + base_url = dirurl + '/' # save the root url + elif not dirurl or not dirurl.startswith(base_url): + dirs[:] = [] + continue # not part of the same svn tree, skip it + revision = max(revision, localrev) + return revision + + def get_url_rev(self): + # hotfix the URL scheme after removing svn+ from svn+ssh:// readd it + url, rev = super(Subversion, self).get_url_rev() + if url.startswith('ssh://'): + url = 'svn+' + url + return url, rev + + def get_url(self, location): + # In cases where the source is in a subdirectory, not alongside + # setup.py we have to look up in the location until we find a real + # setup.py + orig_location = location + while not os.path.exists(os.path.join(location, 'setup.py')): + last_location = location + location = os.path.dirname(location) + if location == last_location: + # We've traversed up to the root of the filesystem without + # finding setup.py + logger.warning( + "Could not find setup.py for directory %s (tried all " + "parent directories)", + orig_location, + ) + return None + + return self._get_svn_url_rev(location)[0] + + def _get_svn_url_rev(self, location): + from pip.exceptions import InstallationError + + entries_path = os.path.join(location, self.dirname, 'entries') + if os.path.exists(entries_path): + with open(entries_path) as f: + data = f.read() + else: # subversion >= 1.7 does not have the 'entries' file + data = '' + + if (data.startswith('8') or + data.startswith('9') or + data.startswith('10')): + data = list(map(str.splitlines, data.split('\n\x0c\n'))) + del data[0][0] # get rid of the '8' + url = data[0][3] + revs = [int(d[9]) for d in data if len(d) > 9 and d[9]] + [0] + elif data.startswith('<?xml'): + match = _svn_xml_url_re.search(data) + if not match: + raise ValueError('Badly formatted data: %r' % data) + url = match.group(1) # get repository URL + revs = [int(m.group(1)) for m in _svn_rev_re.finditer(data)] + [0] + else: + try: + # subversion >= 1.7 + xml = self.run_command( + ['info', '--xml', location], + show_stdout=False, + ) + url = _svn_info_xml_url_re.search(xml).group(1) + revs = [ + int(m.group(1)) for m in _svn_info_xml_rev_re.finditer(xml) + ] + except InstallationError: + url, revs = None, [] + + if revs: + rev = max(revs) + else: + rev = 0 + + return url, rev + + def get_src_requirement(self, dist, location): + repo = self.get_url(location) + if repo is None: + return None + # FIXME: why not project name? + egg_project_name = dist.egg_name().split('-', 1)[0] + rev = self.get_revision(location) + return 'svn+%s@%s#egg=%s' % (repo, rev, egg_project_name) + + def check_version(self, dest, rev_options): + """Always assume the versions don't match""" + return False + + +def get_rev_options(url, rev): + if rev: + rev_options = ['-r', rev] + else: + rev_options = [] + + r = urllib_parse.urlsplit(url) + if hasattr(r, 'username'): + # >= Python-2.5 + username, password = r.username, r.password + else: + netloc = r[1] + if '@' in netloc: + auth = netloc.split('@')[0] + if ':' in auth: + username, password = auth.split(':', 1) + else: + username, password = auth, None + else: + username, password = None, None + + if username: + rev_options += ['--username', username] + if password: + rev_options += ['--password', password] + return rev_options + + +vcs.register(Subversion) diff --git a/venv/lib/python3.5/site-packages/pip/wheel.py b/venv/lib/python3.5/site-packages/pip/wheel.py new file mode 100644 index 0000000..1d138e6 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pip/wheel.py @@ -0,0 +1,855 @@ +""" +Support for installing and building the "wheel" binary package format. +""" +from __future__ import absolute_import + +import compileall +import csv +import errno +import functools +import hashlib +import logging +import os +import os.path +import re +import shutil +import stat +import sys +import tempfile +import warnings + +from base64 import urlsafe_b64encode +from email.parser import Parser + +from pip._vendor.six import StringIO + +import pip +from pip.compat import expanduser +from pip.download import path_to_url, unpack_url +from pip.exceptions import ( + InstallationError, InvalidWheelFilename, UnsupportedWheel) +from pip.locations import distutils_scheme, PIP_DELETE_MARKER_FILENAME +from pip import pep425tags +from pip.utils import ( + call_subprocess, ensure_dir, captured_stdout, rmtree, read_chunks, +) +from pip.utils.ui import open_spinner +from pip.utils.logging import indent_log +from pip.utils.setuptools_build import SETUPTOOLS_SHIM +from pip._vendor.distlib.scripts import ScriptMaker +from pip._vendor import pkg_resources +from pip._vendor.packaging.utils import canonicalize_name +from pip._vendor.six.moves import configparser + + +wheel_ext = '.whl' + +VERSION_COMPATIBLE = (1, 0) + + +logger = logging.getLogger(__name__) + + +class WheelCache(object): + """A cache of wheels for future installs.""" + + def __init__(self, cache_dir, format_control): + """Create a wheel cache. + + :param cache_dir: The root of the cache. + :param format_control: A pip.index.FormatControl object to limit + binaries being read from the cache. + """ + self._cache_dir = expanduser(cache_dir) if cache_dir else None + self._format_control = format_control + + def cached_wheel(self, link, package_name): + return cached_wheel( + self._cache_dir, link, self._format_control, package_name) + + +def _cache_for_link(cache_dir, link): + """ + Return a directory to store cached wheels in for link. + + Because there are M wheels for any one sdist, we provide a directory + to cache them in, and then consult that directory when looking up + cache hits. + + We only insert things into the cache if they have plausible version + numbers, so that we don't contaminate the cache with things that were not + unique. E.g. ./package might have dozens of installs done for it and build + a version of 0.0...and if we built and cached a wheel, we'd end up using + the same wheel even if the source has been edited. + + :param cache_dir: The cache_dir being used by pip. + :param link: The link of the sdist for which this will cache wheels. + """ + + # We want to generate an url to use as our cache key, we don't want to just + # re-use the URL because it might have other items in the fragment and we + # don't care about those. + key_parts = [link.url_without_fragment] + if link.hash_name is not None and link.hash is not None: + key_parts.append("=".join([link.hash_name, link.hash])) + key_url = "#".join(key_parts) + + # Encode our key url with sha224, we'll use this because it has similar + # security properties to sha256, but with a shorter total output (and thus + # less secure). However the differences don't make a lot of difference for + # our use case here. + hashed = hashlib.sha224(key_url.encode()).hexdigest() + + # We want to nest the directories some to prevent having a ton of top level + # directories where we might run out of sub directories on some FS. + parts = [hashed[:2], hashed[2:4], hashed[4:6], hashed[6:]] + + # Inside of the base location for cached wheels, expand our parts and join + # them all together. + return os.path.join(cache_dir, "wheels", *parts) + + +def cached_wheel(cache_dir, link, format_control, package_name): + if not cache_dir: + return link + if not link: + return link + if link.is_wheel: + return link + if not link.is_artifact: + return link + if not package_name: + return link + canonical_name = canonicalize_name(package_name) + formats = pip.index.fmt_ctl_formats(format_control, canonical_name) + if "binary" not in formats: + return link + root = _cache_for_link(cache_dir, link) + try: + wheel_names = os.listdir(root) + except OSError as e: + if e.errno in (errno.ENOENT, errno.ENOTDIR): + return link + raise + candidates = [] + for wheel_name in wheel_names: + try: + wheel = Wheel(wheel_name) + except InvalidWheelFilename: + continue + if not wheel.supported(): + # Built for a different python/arch/etc + continue + candidates.append((wheel.support_index_min(), wheel_name)) + if not candidates: + return link + candidates.sort() + path = os.path.join(root, candidates[0][1]) + return pip.index.Link(path_to_url(path)) + + +def rehash(path, algo='sha256', blocksize=1 << 20): + """Return (hash, length) for path using hashlib.new(algo)""" + h = hashlib.new(algo) + length = 0 + with open(path, 'rb') as f: + for block in read_chunks(f, size=blocksize): + length += len(block) + h.update(block) + digest = 'sha256=' + urlsafe_b64encode( + h.digest() + ).decode('latin1').rstrip('=') + return (digest, length) + + +def open_for_csv(name, mode): + if sys.version_info[0] < 3: + nl = {} + bin = 'b' + else: + nl = {'newline': ''} + bin = '' + return open(name, mode + bin, **nl) + + +def fix_script(path): + """Replace #!python with #!/path/to/python + Return True if file was changed.""" + # XXX RECORD hashes will need to be updated + if os.path.isfile(path): + with open(path, 'rb') as script: + firstline = script.readline() + if not firstline.startswith(b'#!python'): + return False + exename = sys.executable.encode(sys.getfilesystemencoding()) + firstline = b'#!' + exename + os.linesep.encode("ascii") + rest = script.read() + with open(path, 'wb') as script: + script.write(firstline) + script.write(rest) + return True + +dist_info_re = re.compile(r"""^(?P<namever>(?P<name>.+?)(-(?P<ver>\d.+?))?) + \.dist-info$""", re.VERBOSE) + + +def root_is_purelib(name, wheeldir): + """ + Return True if the extracted wheel in wheeldir should go into purelib. + """ + name_folded = name.replace("-", "_") + for item in os.listdir(wheeldir): + match = dist_info_re.match(item) + if match and match.group('name') == name_folded: + with open(os.path.join(wheeldir, item, 'WHEEL')) as wheel: + for line in wheel: + line = line.lower().rstrip() + if line == "root-is-purelib: true": + return True + return False + + +def get_entrypoints(filename): + if not os.path.exists(filename): + return {}, {} + + # This is done because you can pass a string to entry_points wrappers which + # means that they may or may not be valid INI files. The attempt here is to + # strip leading and trailing whitespace in order to make them valid INI + # files. + with open(filename) as fp: + data = StringIO() + for line in fp: + data.write(line.strip()) + data.write("\n") + data.seek(0) + + cp = configparser.RawConfigParser() + cp.optionxform = lambda option: option + cp.readfp(data) + + console = {} + gui = {} + if cp.has_section('console_scripts'): + console = dict(cp.items('console_scripts')) + if cp.has_section('gui_scripts'): + gui = dict(cp.items('gui_scripts')) + return console, gui + + +def move_wheel_files(name, req, wheeldir, user=False, home=None, root=None, + pycompile=True, scheme=None, isolated=False, prefix=None): + """Install a wheel""" + + if not scheme: + scheme = distutils_scheme( + name, user=user, home=home, root=root, isolated=isolated, + prefix=prefix, + ) + + if root_is_purelib(name, wheeldir): + lib_dir = scheme['purelib'] + else: + lib_dir = scheme['platlib'] + + info_dir = [] + data_dirs = [] + source = wheeldir.rstrip(os.path.sep) + os.path.sep + + # Record details of the files moved + # installed = files copied from the wheel to the destination + # changed = files changed while installing (scripts #! line typically) + # generated = files newly generated during the install (script wrappers) + installed = {} + changed = set() + generated = [] + + # Compile all of the pyc files that we're going to be installing + if pycompile: + with captured_stdout() as stdout: + with warnings.catch_warnings(): + warnings.filterwarnings('ignore') + compileall.compile_dir(source, force=True, quiet=True) + logger.debug(stdout.getvalue()) + + def normpath(src, p): + return os.path.relpath(src, p).replace(os.path.sep, '/') + + def record_installed(srcfile, destfile, modified=False): + """Map archive RECORD paths to installation RECORD paths.""" + oldpath = normpath(srcfile, wheeldir) + newpath = normpath(destfile, lib_dir) + installed[oldpath] = newpath + if modified: + changed.add(destfile) + + def clobber(source, dest, is_base, fixer=None, filter=None): + ensure_dir(dest) # common for the 'include' path + + for dir, subdirs, files in os.walk(source): + basedir = dir[len(source):].lstrip(os.path.sep) + destdir = os.path.join(dest, basedir) + if is_base and basedir.split(os.path.sep, 1)[0].endswith('.data'): + continue + for s in subdirs: + destsubdir = os.path.join(dest, basedir, s) + if is_base and basedir == '' and destsubdir.endswith('.data'): + data_dirs.append(s) + continue + elif (is_base and + s.endswith('.dist-info') and + # is self.req.project_name case preserving? + s.lower().startswith( + req.name.replace('-', '_').lower())): + assert not info_dir, 'Multiple .dist-info directories' + info_dir.append(destsubdir) + for f in files: + # Skip unwanted files + if filter and filter(f): + continue + srcfile = os.path.join(dir, f) + destfile = os.path.join(dest, basedir, f) + # directory creation is lazy and after the file filtering above + # to ensure we don't install empty dirs; empty dirs can't be + # uninstalled. + ensure_dir(destdir) + + # We use copyfile (not move, copy, or copy2) to be extra sure + # that we are not moving directories over (copyfile fails for + # directories) as well as to ensure that we are not copying + # over any metadata because we want more control over what + # metadata we actually copy over. + shutil.copyfile(srcfile, destfile) + + # Copy over the metadata for the file, currently this only + # includes the atime and mtime. + st = os.stat(srcfile) + if hasattr(os, "utime"): + os.utime(destfile, (st.st_atime, st.st_mtime)) + + # If our file is executable, then make our destination file + # executable. + if os.access(srcfile, os.X_OK): + st = os.stat(srcfile) + permissions = ( + st.st_mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH + ) + os.chmod(destfile, permissions) + + changed = False + if fixer: + changed = fixer(destfile) + record_installed(srcfile, destfile, changed) + + clobber(source, lib_dir, True) + + assert info_dir, "%s .dist-info directory not found" % req + + # Get the defined entry points + ep_file = os.path.join(info_dir[0], 'entry_points.txt') + console, gui = get_entrypoints(ep_file) + + def is_entrypoint_wrapper(name): + # EP, EP.exe and EP-script.py are scripts generated for + # entry point EP by setuptools + if name.lower().endswith('.exe'): + matchname = name[:-4] + elif name.lower().endswith('-script.py'): + matchname = name[:-10] + elif name.lower().endswith(".pya"): + matchname = name[:-4] + else: + matchname = name + # Ignore setuptools-generated scripts + return (matchname in console or matchname in gui) + + for datadir in data_dirs: + fixer = None + filter = None + for subdir in os.listdir(os.path.join(wheeldir, datadir)): + fixer = None + if subdir == 'scripts': + fixer = fix_script + filter = is_entrypoint_wrapper + source = os.path.join(wheeldir, datadir, subdir) + dest = scheme[subdir] + clobber(source, dest, False, fixer=fixer, filter=filter) + + maker = ScriptMaker(None, scheme['scripts']) + + # Ensure old scripts are overwritten. + # See https://github.com/pypa/pip/issues/1800 + maker.clobber = True + + # Ensure we don't generate any variants for scripts because this is almost + # never what somebody wants. + # See https://bitbucket.org/pypa/distlib/issue/35/ + maker.variants = set(('', )) + + # This is required because otherwise distlib creates scripts that are not + # executable. + # See https://bitbucket.org/pypa/distlib/issue/32/ + maker.set_mode = True + + # Simplify the script and fix the fact that the default script swallows + # every single stack trace. + # See https://bitbucket.org/pypa/distlib/issue/34/ + # See https://bitbucket.org/pypa/distlib/issue/33/ + def _get_script_text(entry): + if entry.suffix is None: + raise InstallationError( + "Invalid script entry point: %s for req: %s - A callable " + "suffix is required. Cf https://packaging.python.org/en/" + "latest/distributing.html#console-scripts for more " + "information." % (entry, req) + ) + return maker.script_template % { + "module": entry.prefix, + "import_name": entry.suffix.split(".")[0], + "func": entry.suffix, + } + + maker._get_script_text = _get_script_text + maker.script_template = """# -*- coding: utf-8 -*- +import re +import sys + +from %(module)s import %(import_name)s + +if __name__ == '__main__': + sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0]) + sys.exit(%(func)s()) +""" + + # Special case pip and setuptools to generate versioned wrappers + # + # The issue is that some projects (specifically, pip and setuptools) use + # code in setup.py to create "versioned" entry points - pip2.7 on Python + # 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into + # the wheel metadata at build time, and so if the wheel is installed with + # a *different* version of Python the entry points will be wrong. The + # correct fix for this is to enhance the metadata to be able to describe + # such versioned entry points, but that won't happen till Metadata 2.0 is + # available. + # In the meantime, projects using versioned entry points will either have + # incorrect versioned entry points, or they will not be able to distribute + # "universal" wheels (i.e., they will need a wheel per Python version). + # + # Because setuptools and pip are bundled with _ensurepip and virtualenv, + # we need to use universal wheels. So, as a stopgap until Metadata 2.0, we + # override the versioned entry points in the wheel and generate the + # correct ones. This code is purely a short-term measure until Metadat 2.0 + # is available. + # + # To add the level of hack in this section of code, in order to support + # ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment + # variable which will control which version scripts get installed. + # + # ENSUREPIP_OPTIONS=altinstall + # - Only pipX.Y and easy_install-X.Y will be generated and installed + # ENSUREPIP_OPTIONS=install + # - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note + # that this option is technically if ENSUREPIP_OPTIONS is set and is + # not altinstall + # DEFAULT + # - The default behavior is to install pip, pipX, pipX.Y, easy_install + # and easy_install-X.Y. + pip_script = console.pop('pip', None) + if pip_script: + if "ENSUREPIP_OPTIONS" not in os.environ: + spec = 'pip = ' + pip_script + generated.extend(maker.make(spec)) + + if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall": + spec = 'pip%s = %s' % (sys.version[:1], pip_script) + generated.extend(maker.make(spec)) + + spec = 'pip%s = %s' % (sys.version[:3], pip_script) + generated.extend(maker.make(spec)) + # Delete any other versioned pip entry points + pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)] + for k in pip_ep: + del console[k] + easy_install_script = console.pop('easy_install', None) + if easy_install_script: + if "ENSUREPIP_OPTIONS" not in os.environ: + spec = 'easy_install = ' + easy_install_script + generated.extend(maker.make(spec)) + + spec = 'easy_install-%s = %s' % (sys.version[:3], easy_install_script) + generated.extend(maker.make(spec)) + # Delete any other versioned easy_install entry points + easy_install_ep = [ + k for k in console if re.match(r'easy_install(-\d\.\d)?$', k) + ] + for k in easy_install_ep: + del console[k] + + # Generate the console and GUI entry points specified in the wheel + if len(console) > 0: + generated.extend( + maker.make_multiple(['%s = %s' % kv for kv in console.items()]) + ) + if len(gui) > 0: + generated.extend( + maker.make_multiple( + ['%s = %s' % kv for kv in gui.items()], + {'gui': True} + ) + ) + + # Record pip as the installer + installer = os.path.join(info_dir[0], 'INSTALLER') + temp_installer = os.path.join(info_dir[0], 'INSTALLER.pip') + with open(temp_installer, 'wb') as installer_file: + installer_file.write(b'pip\n') + shutil.move(temp_installer, installer) + generated.append(installer) + + # Record details of all files installed + record = os.path.join(info_dir[0], 'RECORD') + temp_record = os.path.join(info_dir[0], 'RECORD.pip') + with open_for_csv(record, 'r') as record_in: + with open_for_csv(temp_record, 'w+') as record_out: + reader = csv.reader(record_in) + writer = csv.writer(record_out) + for row in reader: + row[0] = installed.pop(row[0], row[0]) + if row[0] in changed: + row[1], row[2] = rehash(row[0]) + writer.writerow(row) + for f in generated: + h, l = rehash(f) + writer.writerow((normpath(f, lib_dir), h, l)) + for f in installed: + writer.writerow((installed[f], '', '')) + shutil.move(temp_record, record) + + +def _unique(fn): + @functools.wraps(fn) + def unique(*args, **kw): + seen = set() + for item in fn(*args, **kw): + if item not in seen: + seen.add(item) + yield item + return unique + + +# TODO: this goes somewhere besides the wheel module +@_unique +def uninstallation_paths(dist): + """ + Yield all the uninstallation paths for dist based on RECORD-without-.pyc + + Yield paths to all the files in RECORD. For each .py file in RECORD, add + the .pyc in the same directory. + + UninstallPathSet.add() takes care of the __pycache__ .pyc. + """ + from pip.utils import FakeFile # circular import + r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD'))) + for row in r: + path = os.path.join(dist.location, row[0]) + yield path + if path.endswith('.py'): + dn, fn = os.path.split(path) + base = fn[:-3] + path = os.path.join(dn, base + '.pyc') + yield path + + +def wheel_version(source_dir): + """ + Return the Wheel-Version of an extracted wheel, if possible. + + Otherwise, return False if we couldn't parse / extract it. + """ + try: + dist = [d for d in pkg_resources.find_on_path(None, source_dir)][0] + + wheel_data = dist.get_metadata('WHEEL') + wheel_data = Parser().parsestr(wheel_data) + + version = wheel_data['Wheel-Version'].strip() + version = tuple(map(int, version.split('.'))) + return version + except: + return False + + +def check_compatibility(version, name): + """ + Raises errors or warns if called with an incompatible Wheel-Version. + + Pip should refuse to install a Wheel-Version that's a major series + ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when + installing a version only minor version ahead (e.g 1.2 > 1.1). + + version: a 2-tuple representing a Wheel-Version (Major, Minor) + name: name of wheel or package to raise exception about + + :raises UnsupportedWheel: when an incompatible Wheel-Version is given + """ + if not version: + raise UnsupportedWheel( + "%s is in an unsupported or invalid wheel" % name + ) + if version[0] > VERSION_COMPATIBLE[0]: + raise UnsupportedWheel( + "%s's Wheel-Version (%s) is not compatible with this version " + "of pip" % (name, '.'.join(map(str, version))) + ) + elif version > VERSION_COMPATIBLE: + logger.warning( + 'Installing from a newer Wheel-Version (%s)', + '.'.join(map(str, version)), + ) + + +class Wheel(object): + """A wheel file""" + + # TODO: maybe move the install code into this class + + wheel_file_re = re.compile( + r"""^(?P<namever>(?P<name>.+?)-(?P<ver>\d.*?)) + ((-(?P<build>\d.*?))?-(?P<pyver>.+?)-(?P<abi>.+?)-(?P<plat>.+?) + \.whl|\.dist-info)$""", + re.VERBOSE + ) + + def __init__(self, filename): + """ + :raises InvalidWheelFilename: when the filename is invalid for a wheel + """ + wheel_info = self.wheel_file_re.match(filename) + if not wheel_info: + raise InvalidWheelFilename( + "%s is not a valid wheel filename." % filename + ) + self.filename = filename + self.name = wheel_info.group('name').replace('_', '-') + # we'll assume "_" means "-" due to wheel naming scheme + # (https://github.com/pypa/pip/issues/1150) + self.version = wheel_info.group('ver').replace('_', '-') + self.pyversions = wheel_info.group('pyver').split('.') + self.abis = wheel_info.group('abi').split('.') + self.plats = wheel_info.group('plat').split('.') + + # All the tag combinations from this file + self.file_tags = set( + (x, y, z) for x in self.pyversions + for y in self.abis for z in self.plats + ) + + def support_index_min(self, tags=None): + """ + Return the lowest index that one of the wheel's file_tag combinations + achieves in the supported_tags list e.g. if there are 8 supported tags, + and one of the file tags is first in the list, then return 0. Returns + None is the wheel is not supported. + """ + if tags is None: # for mock + tags = pep425tags.supported_tags + indexes = [tags.index(c) for c in self.file_tags if c in tags] + return min(indexes) if indexes else None + + def supported(self, tags=None): + """Is this wheel supported on this system?""" + if tags is None: # for mock + tags = pep425tags.supported_tags + return bool(set(tags).intersection(self.file_tags)) + + +class WheelBuilder(object): + """Build wheels from a RequirementSet.""" + + def __init__(self, requirement_set, finder, build_options=None, + global_options=None): + self.requirement_set = requirement_set + self.finder = finder + self._cache_root = requirement_set._wheel_cache._cache_dir + self._wheel_dir = requirement_set.wheel_download_dir + self.build_options = build_options or [] + self.global_options = global_options or [] + + def _build_one(self, req, output_dir, python_tag=None): + """Build one wheel. + + :return: The filename of the built wheel, or None if the build failed. + """ + tempd = tempfile.mkdtemp('pip-wheel-') + try: + if self.__build_one(req, tempd, python_tag=python_tag): + try: + wheel_name = os.listdir(tempd)[0] + wheel_path = os.path.join(output_dir, wheel_name) + shutil.move(os.path.join(tempd, wheel_name), wheel_path) + logger.info('Stored in directory: %s', output_dir) + return wheel_path + except: + pass + # Ignore return, we can't do anything else useful. + self._clean_one(req) + return None + finally: + rmtree(tempd) + + def _base_setup_args(self, req): + return [ + sys.executable, "-u", '-c', + SETUPTOOLS_SHIM % req.setup_py + ] + list(self.global_options) + + def __build_one(self, req, tempd, python_tag=None): + base_args = self._base_setup_args(req) + + spin_message = 'Running setup.py bdist_wheel for %s' % (req.name,) + with open_spinner(spin_message) as spinner: + logger.debug('Destination directory: %s', tempd) + wheel_args = base_args + ['bdist_wheel', '-d', tempd] \ + + self.build_options + + if python_tag is not None: + wheel_args += ["--python-tag", python_tag] + + try: + call_subprocess(wheel_args, cwd=req.setup_py_dir, + show_stdout=False, spinner=spinner) + return True + except: + spinner.finish("error") + logger.error('Failed building wheel for %s', req.name) + return False + + def _clean_one(self, req): + base_args = self._base_setup_args(req) + + logger.info('Running setup.py clean for %s', req.name) + clean_args = base_args + ['clean', '--all'] + try: + call_subprocess(clean_args, cwd=req.source_dir, show_stdout=False) + return True + except: + logger.error('Failed cleaning build dir for %s', req.name) + return False + + def build(self, autobuilding=False): + """Build wheels. + + :param unpack: If True, replace the sdist we built from with the + newly built wheel, in preparation for installation. + :return: True if all the wheels built correctly. + """ + assert self._wheel_dir or (autobuilding and self._cache_root) + # unpack sdists and constructs req set + self.requirement_set.prepare_files(self.finder) + + reqset = self.requirement_set.requirements.values() + + buildset = [] + for req in reqset: + if req.constraint: + continue + if req.is_wheel: + if not autobuilding: + logger.info( + 'Skipping %s, due to already being wheel.', req.name) + elif req.editable: + if not autobuilding: + logger.info( + 'Skipping bdist_wheel for %s, due to being editable', + req.name) + elif autobuilding and req.link and not req.link.is_artifact: + pass + elif autobuilding and not req.source_dir: + pass + else: + if autobuilding: + link = req.link + base, ext = link.splitext() + if pip.index.egg_info_matches(base, None, link) is None: + # Doesn't look like a package - don't autobuild a wheel + # because we'll have no way to lookup the result sanely + continue + if "binary" not in pip.index.fmt_ctl_formats( + self.finder.format_control, + canonicalize_name(req.name)): + logger.info( + "Skipping bdist_wheel for %s, due to binaries " + "being disabled for it.", req.name) + continue + buildset.append(req) + + if not buildset: + return True + + # Build the wheels. + logger.info( + 'Building wheels for collected packages: %s', + ', '.join([req.name for req in buildset]), + ) + with indent_log(): + build_success, build_failure = [], [] + for req in buildset: + python_tag = None + if autobuilding: + python_tag = pep425tags.implementation_tag + output_dir = _cache_for_link(self._cache_root, req.link) + try: + ensure_dir(output_dir) + except OSError as e: + logger.warn("Building wheel for %s failed: %s", + req.name, e) + build_failure.append(req) + continue + else: + output_dir = self._wheel_dir + wheel_file = self._build_one( + req, output_dir, + python_tag=python_tag, + ) + if wheel_file: + build_success.append(req) + if autobuilding: + # XXX: This is mildly duplicative with prepare_files, + # but not close enough to pull out to a single common + # method. + # The code below assumes temporary source dirs - + # prevent it doing bad things. + if req.source_dir and not os.path.exists(os.path.join( + req.source_dir, PIP_DELETE_MARKER_FILENAME)): + raise AssertionError( + "bad source dir - missing marker") + # Delete the source we built the wheel from + req.remove_temporary_source() + # set the build directory again - name is known from + # the work prepare_files did. + req.source_dir = req.build_location( + self.requirement_set.build_dir) + # Update the link for this. + req.link = pip.index.Link( + path_to_url(wheel_file)) + assert req.link.is_wheel + # extract the wheel into the dir + unpack_url( + req.link, req.source_dir, None, False, + session=self.requirement_set.session) + else: + build_failure.append(req) + + # notify success/failure + if build_success: + logger.info( + 'Successfully built %s', + ' '.join([req.name for req in build_success]), + ) + if build_failure: + logger.info( + 'Failed to build %s', + ' '.join([req.name for req in build_failure]), + ) + # Return True if all builds were successful + return len(build_failure) == 0 diff --git a/venv/lib/python3.5/site-packages/pkg_resources-0.0.0.dist-info/DESCRIPTION.rst b/venv/lib/python3.5/site-packages/pkg_resources-0.0.0.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..e118723 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pkg_resources-0.0.0.dist-info/DESCRIPTION.rst @@ -0,0 +1,3 @@ +UNKNOWN + + diff --git a/venv/lib/python3.5/site-packages/pkg_resources-0.0.0.dist-info/INSTALLER b/venv/lib/python3.5/site-packages/pkg_resources-0.0.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.5/site-packages/pkg_resources-0.0.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.5/site-packages/pkg_resources-0.0.0.dist-info/METADATA b/venv/lib/python3.5/site-packages/pkg_resources-0.0.0.dist-info/METADATA new file mode 100644 index 0000000..7a50487 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pkg_resources-0.0.0.dist-info/METADATA @@ -0,0 +1,13 @@ +Metadata-Version: 2.0 +Name: pkg_resources +Version: 0.0.0 +Summary: UNKNOWN +Home-page: UNKNOWN +Author: UNKNOWN +Author-email: UNKNOWN +License: UNKNOWN +Platform: UNKNOWN + +UNKNOWN + + diff --git a/venv/lib/python3.5/site-packages/pkg_resources-0.0.0.dist-info/RECORD b/venv/lib/python3.5/site-packages/pkg_resources-0.0.0.dist-info/RECORD new file mode 100644 index 0000000..3b0bcf6 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pkg_resources-0.0.0.dist-info/RECORD @@ -0,0 +1,34 @@ +pkg_resources/__init__.py,sha256=5HdIlZ7QyXPbrDN2qXRsVWpMQLbeLZMgniIX5q4WKq4,101373 +pkg_resources/_vendor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +pkg_resources/_vendor/pyparsing.py,sha256=ic8qmDPiq8Li-Y0PeZcI56rEyMqevKNBK6hr6FbyVBc,160425 +pkg_resources/_vendor/six.py,sha256=A6hdJZVjI3t_geebZ9BzUvwRrIXo0lfwzQlM2LcKyas,30098 +pkg_resources/_vendor/packaging/__about__.py,sha256=zubjKyIqNuGdgnuwpl6C_ctMx-Zn54IhD6ejN0EZaU0,720 +pkg_resources/_vendor/packaging/__init__.py,sha256=_vNac5TrzwsrzbOFIbF-5cHqc_Y2aPT2D7zrIR06BOo,513 +pkg_resources/_vendor/packaging/_compat.py,sha256=Vi_A0rAQeHbU-a9X0tt1yQm9RqkgQbDSxzRw8WlU9kA,860 +pkg_resources/_vendor/packaging/_structures.py,sha256=RImECJ4c_wTlaTYYwZYLHEiebDMaAJmK1oPARhw1T5o,1416 +pkg_resources/_vendor/packaging/markers.py,sha256=eAjn0nigd-O5wN4R8Gc82OmkA9AgpQ3qBFsewqCa8yk,7573 +pkg_resources/_vendor/packaging/requirements.py,sha256=SikL2UynbsT0qtY9ltqngndha_sfo0w6XGFhAhoSoaQ,4355 +pkg_resources/_vendor/packaging/specifiers.py,sha256=SAMRerzO3fK2IkFZCaZkuwZaL_EGqHNOz4pni4vhnN0,28025 +pkg_resources/_vendor/packaging/utils.py,sha256=3m6WvPm6NNxE8rkTGmn0r75B_GZSGg7ikafxHsBN1WA,421 +pkg_resources/_vendor/packaging/version.py,sha256=OwGnxYfr2ghNzYx59qWIBkrK3SnB6n-Zfd1XaLpnnM0,11556 +pkg_resources/extern/__init__.py,sha256=rMBTxKimjNg8plSH94cB-y52pKO0zmM-AkFL30lZGfY,2474 +pkg_resources-0.0.0.dist-info/DESCRIPTION.rst,sha256=OCTuuN6LcWulhHS3d5rfjdsQtW22n7HENFRh6jC6ego,10 +pkg_resources-0.0.0.dist-info/METADATA,sha256=FOYDX6cmnDUkWo-yhqWQYtjKIMZR2IW2G1GFZhA6gUQ,177 +pkg_resources-0.0.0.dist-info/RECORD,, +pkg_resources-0.0.0.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110 +pkg_resources-0.0.0.dist-info/metadata.json,sha256=8ZVRFU96pY_wnWouockCkvXw981Y0iDB5nQFFGq8ZiY,221 +pkg_resources-0.0.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +pkg_resources/_vendor/packaging/__pycache__/requirements.cpython-35.pyc,, +pkg_resources/_vendor/packaging/__pycache__/utils.cpython-35.pyc,, +pkg_resources/_vendor/packaging/__pycache__/_compat.cpython-35.pyc,, +pkg_resources/_vendor/packaging/__pycache__/specifiers.cpython-35.pyc,, +pkg_resources/_vendor/packaging/__pycache__/markers.cpython-35.pyc,, +pkg_resources/__pycache__/__init__.cpython-35.pyc,, +pkg_resources/_vendor/packaging/__pycache__/__about__.cpython-35.pyc,, +pkg_resources/_vendor/packaging/__pycache__/_structures.cpython-35.pyc,, +pkg_resources/_vendor/__pycache__/pyparsing.cpython-35.pyc,, +pkg_resources/_vendor/__pycache__/__init__.cpython-35.pyc,, +pkg_resources/_vendor/__pycache__/six.cpython-35.pyc,, +pkg_resources/_vendor/packaging/__pycache__/version.cpython-35.pyc,, +pkg_resources/extern/__pycache__/__init__.cpython-35.pyc,, +pkg_resources/_vendor/packaging/__pycache__/__init__.cpython-35.pyc,, diff --git a/venv/lib/python3.5/site-packages/pkg_resources-0.0.0.dist-info/WHEEL b/venv/lib/python3.5/site-packages/pkg_resources-0.0.0.dist-info/WHEEL new file mode 100644 index 0000000..8b6dd1b --- /dev/null +++ b/venv/lib/python3.5/site-packages/pkg_resources-0.0.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.29.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/venv/lib/python3.5/site-packages/pkg_resources-0.0.0.dist-info/metadata.json b/venv/lib/python3.5/site-packages/pkg_resources-0.0.0.dist-info/metadata.json new file mode 100644 index 0000000..f7d360a --- /dev/null +++ b/venv/lib/python3.5/site-packages/pkg_resources-0.0.0.dist-info/metadata.json @@ -0,0 +1 @@ +{"extensions": {"python.details": {"document_names": {"description": "DESCRIPTION.rst"}}}, "generator": "bdist_wheel (0.29.0)", "metadata_version": "2.0", "name": "pkg_resources", "summary": "UNKNOWN", "version": "0.0.0"} \ No newline at end of file diff --git a/venv/lib/python3.5/site-packages/pkg_resources/__init__.py b/venv/lib/python3.5/site-packages/pkg_resources/__init__.py new file mode 100644 index 0000000..ce4e775 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pkg_resources/__init__.py @@ -0,0 +1,2956 @@ +""" +Package resource API +-------------------- + +A resource is a logical file contained within a package, or a logical +subdirectory thereof. The package resource API expects resource names +to have their path parts separated with ``/``, *not* whatever the local +path separator is. Do not use os.path operations to manipulate resource +names being passed into the API. + +The package resource API is designed to work with normal filesystem packages, +.egg files, and unpacked .egg files. It can also work in a limited way with +.zip files and with custom PEP 302 loaders that support the ``get_data()`` +method. +""" + +from __future__ import absolute_import + +import sys +import os +import io +import time +import re +import types +import zipfile +import zipimport +import warnings +import stat +import functools +import pkgutil +import operator +import platform +import collections +import plistlib +import email.parser +import tempfile +import textwrap +from pkgutil import get_importer + +try: + import _imp +except ImportError: + # Python 3.2 compatibility + import imp as _imp + +from pkg_resources.extern import six +from pkg_resources.extern.six.moves import urllib, map, filter + +# capture these to bypass sandboxing +from os import utime +try: + from os import mkdir, rename, unlink + WRITE_SUPPORT = True +except ImportError: + # no write support, probably under GAE + WRITE_SUPPORT = False + +from os import open as os_open +from os.path import isdir, split + +try: + import importlib.machinery as importlib_machinery + # access attribute to force import under delayed import mechanisms. + importlib_machinery.__name__ +except ImportError: + importlib_machinery = None + +from pkg_resources.extern import packaging +__import__('pkg_resources.extern.packaging.version') +__import__('pkg_resources.extern.packaging.specifiers') +__import__('pkg_resources.extern.packaging.requirements') +__import__('pkg_resources.extern.packaging.markers') + + +if (3, 0) < sys.version_info < (3, 3): + msg = ( + "Support for Python 3.0-3.2 has been dropped. Future versions " + "will fail here." + ) + warnings.warn(msg) + +# declare some globals that will be defined later to +# satisfy the linters. +require = None +working_set = None + + +class PEP440Warning(RuntimeWarning): + """ + Used when there is an issue with a version or specifier not complying with + PEP 440. + """ + + +class _SetuptoolsVersionMixin(object): + + def __hash__(self): + return super(_SetuptoolsVersionMixin, self).__hash__() + + def __lt__(self, other): + if isinstance(other, tuple): + return tuple(self) < other + else: + return super(_SetuptoolsVersionMixin, self).__lt__(other) + + def __le__(self, other): + if isinstance(other, tuple): + return tuple(self) <= other + else: + return super(_SetuptoolsVersionMixin, self).__le__(other) + + def __eq__(self, other): + if isinstance(other, tuple): + return tuple(self) == other + else: + return super(_SetuptoolsVersionMixin, self).__eq__(other) + + def __ge__(self, other): + if isinstance(other, tuple): + return tuple(self) >= other + else: + return super(_SetuptoolsVersionMixin, self).__ge__(other) + + def __gt__(self, other): + if isinstance(other, tuple): + return tuple(self) > other + else: + return super(_SetuptoolsVersionMixin, self).__gt__(other) + + def __ne__(self, other): + if isinstance(other, tuple): + return tuple(self) != other + else: + return super(_SetuptoolsVersionMixin, self).__ne__(other) + + def __getitem__(self, key): + return tuple(self)[key] + + def __iter__(self): + component_re = re.compile(r'(\d+ | [a-z]+ | \.| -)', re.VERBOSE) + replace = { + 'pre': 'c', + 'preview': 'c', + '-': 'final-', + 'rc': 'c', + 'dev': '@', + }.get + + def _parse_version_parts(s): + for part in component_re.split(s): + part = replace(part, part) + if not part or part == '.': + continue + if part[:1] in '0123456789': + # pad for numeric comparison + yield part.zfill(8) + else: + yield '*'+part + + # ensure that alpha/beta/candidate are before final + yield '*final' + + def old_parse_version(s): + parts = [] + for part in _parse_version_parts(s.lower()): + if part.startswith('*'): + # remove '-' before a prerelease tag + if part < '*final': + while parts and parts[-1] == '*final-': + parts.pop() + # remove trailing zeros from each series of numeric parts + while parts and parts[-1] == '00000000': + parts.pop() + parts.append(part) + return tuple(parts) + + # Warn for use of this function + warnings.warn( + "You have iterated over the result of " + "pkg_resources.parse_version. This is a legacy behavior which is " + "inconsistent with the new version class introduced in setuptools " + "8.0. In most cases, conversion to a tuple is unnecessary. For " + "comparison of versions, sort the Version instances directly. If " + "you have another use case requiring the tuple, please file a " + "bug with the setuptools project describing that need.", + RuntimeWarning, + stacklevel=1, + ) + + for part in old_parse_version(str(self)): + yield part + + +class SetuptoolsVersion(_SetuptoolsVersionMixin, packaging.version.Version): + pass + + +class SetuptoolsLegacyVersion(_SetuptoolsVersionMixin, + packaging.version.LegacyVersion): + pass + + +def parse_version(v): + try: + return SetuptoolsVersion(v) + except packaging.version.InvalidVersion: + return SetuptoolsLegacyVersion(v) + + +_state_vars = {} + +def _declare_state(vartype, **kw): + globals().update(kw) + _state_vars.update(dict.fromkeys(kw, vartype)) + +def __getstate__(): + state = {} + g = globals() + for k, v in _state_vars.items(): + state[k] = g['_sget_'+v](g[k]) + return state + +def __setstate__(state): + g = globals() + for k, v in state.items(): + g['_sset_'+_state_vars[k]](k, g[k], v) + return state + +def _sget_dict(val): + return val.copy() + +def _sset_dict(key, ob, state): + ob.clear() + ob.update(state) + +def _sget_object(val): + return val.__getstate__() + +def _sset_object(key, ob, state): + ob.__setstate__(state) + +_sget_none = _sset_none = lambda *args: None + + +def get_supported_platform(): + """Return this platform's maximum compatible version. + + distutils.util.get_platform() normally reports the minimum version + of Mac OS X that would be required to *use* extensions produced by + distutils. But what we want when checking compatibility is to know the + version of Mac OS X that we are *running*. To allow usage of packages that + explicitly require a newer version of Mac OS X, we must also know the + current version of the OS. + + If this condition occurs for any other platform with a version in its + platform strings, this function should be extended accordingly. + """ + plat = get_build_platform() + m = macosVersionString.match(plat) + if m is not None and sys.platform == "darwin": + try: + plat = 'macosx-%s-%s' % ('.'.join(_macosx_vers()[:2]), m.group(3)) + except ValueError: + # not Mac OS X + pass + return plat + +__all__ = [ + # Basic resource access and distribution/entry point discovery + 'require', 'run_script', 'get_provider', 'get_distribution', + 'load_entry_point', 'get_entry_map', 'get_entry_info', + 'iter_entry_points', + 'resource_string', 'resource_stream', 'resource_filename', + 'resource_listdir', 'resource_exists', 'resource_isdir', + + # Environmental control + 'declare_namespace', 'working_set', 'add_activation_listener', + 'find_distributions', 'set_extraction_path', 'cleanup_resources', + 'get_default_cache', + + # Primary implementation classes + 'Environment', 'WorkingSet', 'ResourceManager', + 'Distribution', 'Requirement', 'EntryPoint', + + # Exceptions + 'ResolutionError', 'VersionConflict', 'DistributionNotFound', + 'UnknownExtra', 'ExtractionError', + + # Warnings + 'PEP440Warning', + + # Parsing functions and string utilities + 'parse_requirements', 'parse_version', 'safe_name', 'safe_version', + 'get_platform', 'compatible_platforms', 'yield_lines', 'split_sections', + 'safe_extra', 'to_filename', 'invalid_marker', 'evaluate_marker', + + # filesystem utilities + 'ensure_directory', 'normalize_path', + + # Distribution "precedence" constants + 'EGG_DIST', 'BINARY_DIST', 'SOURCE_DIST', 'CHECKOUT_DIST', 'DEVELOP_DIST', + + # "Provider" interfaces, implementations, and registration/lookup APIs + 'IMetadataProvider', 'IResourceProvider', 'FileMetadata', + 'PathMetadata', 'EggMetadata', 'EmptyProvider', 'empty_provider', + 'NullProvider', 'EggProvider', 'DefaultProvider', 'ZipProvider', + 'register_finder', 'register_namespace_handler', 'register_loader_type', + 'fixup_namespace_packages', 'get_importer', + + # Deprecated/backward compatibility only + 'run_main', 'AvailableDistributions', +] + +class ResolutionError(Exception): + """Abstract base for dependency resolution errors""" + def __repr__(self): + return self.__class__.__name__+repr(self.args) + + +class VersionConflict(ResolutionError): + """ + An already-installed version conflicts with the requested version. + + Should be initialized with the installed Distribution and the requested + Requirement. + """ + + _template = "{self.dist} is installed but {self.req} is required" + + @property + def dist(self): + return self.args[0] + + @property + def req(self): + return self.args[1] + + def report(self): + return self._template.format(**locals()) + + def with_context(self, required_by): + """ + If required_by is non-empty, return a version of self that is a + ContextualVersionConflict. + """ + if not required_by: + return self + args = self.args + (required_by,) + return ContextualVersionConflict(*args) + + +class ContextualVersionConflict(VersionConflict): + """ + A VersionConflict that accepts a third parameter, the set of the + requirements that required the installed Distribution. + """ + + _template = VersionConflict._template + ' by {self.required_by}' + + @property + def required_by(self): + return self.args[2] + + +class DistributionNotFound(ResolutionError): + """A requested distribution was not found""" + + _template = ("The '{self.req}' distribution was not found " + "and is required by {self.requirers_str}") + + @property + def req(self): + return self.args[0] + + @property + def requirers(self): + return self.args[1] + + @property + def requirers_str(self): + if not self.requirers: + return 'the application' + return ', '.join(self.requirers) + + def report(self): + return self._template.format(**locals()) + + def __str__(self): + return self.report() + + +class UnknownExtra(ResolutionError): + """Distribution doesn't have an "extra feature" of the given name""" +_provider_factories = {} + +PY_MAJOR = sys.version[:3] +EGG_DIST = 3 +BINARY_DIST = 2 +SOURCE_DIST = 1 +CHECKOUT_DIST = 0 +DEVELOP_DIST = -1 + +def register_loader_type(loader_type, provider_factory): + """Register `provider_factory` to make providers for `loader_type` + + `loader_type` is the type or class of a PEP 302 ``module.__loader__``, + and `provider_factory` is a function that, passed a *module* object, + returns an ``IResourceProvider`` for that module. + """ + _provider_factories[loader_type] = provider_factory + +def get_provider(moduleOrReq): + """Return an IResourceProvider for the named module or requirement""" + if isinstance(moduleOrReq, Requirement): + return working_set.find(moduleOrReq) or require(str(moduleOrReq))[0] + try: + module = sys.modules[moduleOrReq] + except KeyError: + __import__(moduleOrReq) + module = sys.modules[moduleOrReq] + loader = getattr(module, '__loader__', None) + return _find_adapter(_provider_factories, loader)(module) + +def _macosx_vers(_cache=[]): + if not _cache: + version = platform.mac_ver()[0] + # fallback for MacPorts + if version == '': + plist = '/System/Library/CoreServices/SystemVersion.plist' + if os.path.exists(plist): + if hasattr(plistlib, 'readPlist'): + plist_content = plistlib.readPlist(plist) + if 'ProductVersion' in plist_content: + version = plist_content['ProductVersion'] + + _cache.append(version.split('.')) + return _cache[0] + +def _macosx_arch(machine): + return {'PowerPC': 'ppc', 'Power_Macintosh': 'ppc'}.get(machine, machine) + +def get_build_platform(): + """Return this platform's string for platform-specific distributions + + XXX Currently this is the same as ``distutils.util.get_platform()``, but it + needs some hacks for Linux and Mac OS X. + """ + try: + # Python 2.7 or >=3.2 + from sysconfig import get_platform + except ImportError: + from distutils.util import get_platform + + plat = get_platform() + if sys.platform == "darwin" and not plat.startswith('macosx-'): + try: + version = _macosx_vers() + machine = os.uname()[4].replace(" ", "_") + return "macosx-%d.%d-%s" % (int(version[0]), int(version[1]), + _macosx_arch(machine)) + except ValueError: + # if someone is running a non-Mac darwin system, this will fall + # through to the default implementation + pass + return plat + +macosVersionString = re.compile(r"macosx-(\d+)\.(\d+)-(.*)") +darwinVersionString = re.compile(r"darwin-(\d+)\.(\d+)\.(\d+)-(.*)") +# XXX backward compat +get_platform = get_build_platform + + +def compatible_platforms(provided, required): + """Can code for the `provided` platform run on the `required` platform? + + Returns true if either platform is ``None``, or the platforms are equal. + + XXX Needs compatibility checks for Linux and other unixy OSes. + """ + if provided is None or required is None or provided==required: + # easy case + return True + + # Mac OS X special cases + reqMac = macosVersionString.match(required) + if reqMac: + provMac = macosVersionString.match(provided) + + # is this a Mac package? + if not provMac: + # this is backwards compatibility for packages built before + # setuptools 0.6. All packages built after this point will + # use the new macosx designation. + provDarwin = darwinVersionString.match(provided) + if provDarwin: + dversion = int(provDarwin.group(1)) + macosversion = "%s.%s" % (reqMac.group(1), reqMac.group(2)) + if dversion == 7 and macosversion >= "10.3" or \ + dversion == 8 and macosversion >= "10.4": + return True + # egg isn't macosx or legacy darwin + return False + + # are they the same major version and machine type? + if provMac.group(1) != reqMac.group(1) or \ + provMac.group(3) != reqMac.group(3): + return False + + # is the required OS major update >= the provided one? + if int(provMac.group(2)) > int(reqMac.group(2)): + return False + + return True + + # XXX Linux and other platforms' special cases should go here + return False + + +def run_script(dist_spec, script_name): + """Locate distribution `dist_spec` and run its `script_name` script""" + ns = sys._getframe(1).f_globals + name = ns['__name__'] + ns.clear() + ns['__name__'] = name + require(dist_spec)[0].run_script(script_name, ns) + +# backward compatibility +run_main = run_script + +def get_distribution(dist): + """Return a current distribution object for a Requirement or string""" + if isinstance(dist, six.string_types): + dist = Requirement.parse(dist) + if isinstance(dist, Requirement): + dist = get_provider(dist) + if not isinstance(dist, Distribution): + raise TypeError("Expected string, Requirement, or Distribution", dist) + return dist + +def load_entry_point(dist, group, name): + """Return `name` entry point of `group` for `dist` or raise ImportError""" + return get_distribution(dist).load_entry_point(group, name) + +def get_entry_map(dist, group=None): + """Return the entry point map for `group`, or the full entry map""" + return get_distribution(dist).get_entry_map(group) + +def get_entry_info(dist, group, name): + """Return the EntryPoint object for `group`+`name`, or ``None``""" + return get_distribution(dist).get_entry_info(group, name) + + +class IMetadataProvider: + + def has_metadata(name): + """Does the package's distribution contain the named metadata?""" + + def get_metadata(name): + """The named metadata resource as a string""" + + def get_metadata_lines(name): + """Yield named metadata resource as list of non-blank non-comment lines + + Leading and trailing whitespace is stripped from each line, and lines + with ``#`` as the first non-blank character are omitted.""" + + def metadata_isdir(name): + """Is the named metadata a directory? (like ``os.path.isdir()``)""" + + def metadata_listdir(name): + """List of metadata names in the directory (like ``os.listdir()``)""" + + def run_script(script_name, namespace): + """Execute the named script in the supplied namespace dictionary""" + + +class IResourceProvider(IMetadataProvider): + """An object that provides access to package resources""" + + def get_resource_filename(manager, resource_name): + """Return a true filesystem path for `resource_name` + + `manager` must be an ``IResourceManager``""" + + def get_resource_stream(manager, resource_name): + """Return a readable file-like object for `resource_name` + + `manager` must be an ``IResourceManager``""" + + def get_resource_string(manager, resource_name): + """Return a string containing the contents of `resource_name` + + `manager` must be an ``IResourceManager``""" + + def has_resource(resource_name): + """Does the package contain the named resource?""" + + def resource_isdir(resource_name): + """Is the named resource a directory? (like ``os.path.isdir()``)""" + + def resource_listdir(resource_name): + """List of resource names in the directory (like ``os.listdir()``)""" + + +class WorkingSet(object): + """A collection of active distributions on sys.path (or a similar list)""" + + def __init__(self, entries=None): + """Create working set from list of path entries (default=sys.path)""" + self.entries = [] + self.entry_keys = {} + self.by_key = {} + self.callbacks = [] + + if entries is None: + entries = sys.path + + for entry in entries: + self.add_entry(entry) + + @classmethod + def _build_master(cls): + """ + Prepare the master working set. + """ + ws = cls() + try: + from __main__ import __requires__ + except ImportError: + # The main program does not list any requirements + return ws + + # ensure the requirements are met + try: + ws.require(__requires__) + except VersionConflict: + return cls._build_from_requirements(__requires__) + + return ws + + @classmethod + def _build_from_requirements(cls, req_spec): + """ + Build a working set from a requirement spec. Rewrites sys.path. + """ + # try it without defaults already on sys.path + # by starting with an empty path + ws = cls([]) + reqs = parse_requirements(req_spec) + dists = ws.resolve(reqs, Environment()) + for dist in dists: + ws.add(dist) + + # add any missing entries from sys.path + for entry in sys.path: + if entry not in ws.entries: + ws.add_entry(entry) + + # then copy back to sys.path + sys.path[:] = ws.entries + return ws + + def add_entry(self, entry): + """Add a path item to ``.entries``, finding any distributions on it + + ``find_distributions(entry, True)`` is used to find distributions + corresponding to the path entry, and they are added. `entry` is + always appended to ``.entries``, even if it is already present. + (This is because ``sys.path`` can contain the same value more than + once, and the ``.entries`` of the ``sys.path`` WorkingSet should always + equal ``sys.path``.) + """ + self.entry_keys.setdefault(entry, []) + self.entries.append(entry) + for dist in find_distributions(entry, True): + self.add(dist, entry, False) + + def __contains__(self, dist): + """True if `dist` is the active distribution for its project""" + return self.by_key.get(dist.key) == dist + + def find(self, req): + """Find a distribution matching requirement `req` + + If there is an active distribution for the requested project, this + returns it as long as it meets the version requirement specified by + `req`. But, if there is an active distribution for the project and it + does *not* meet the `req` requirement, ``VersionConflict`` is raised. + If there is no active distribution for the requested project, ``None`` + is returned. + """ + dist = self.by_key.get(req.key) + if dist is not None and dist not in req: + # XXX add more info + raise VersionConflict(dist, req) + return dist + + def iter_entry_points(self, group, name=None): + """Yield entry point objects from `group` matching `name` + + If `name` is None, yields all entry points in `group` from all + distributions in the working set, otherwise only ones matching + both `group` and `name` are yielded (in distribution order). + """ + for dist in self: + entries = dist.get_entry_map(group) + if name is None: + for ep in entries.values(): + yield ep + elif name in entries: + yield entries[name] + + def run_script(self, requires, script_name): + """Locate distribution for `requires` and run `script_name` script""" + ns = sys._getframe(1).f_globals + name = ns['__name__'] + ns.clear() + ns['__name__'] = name + self.require(requires)[0].run_script(script_name, ns) + + def __iter__(self): + """Yield distributions for non-duplicate projects in the working set + + The yield order is the order in which the items' path entries were + added to the working set. + """ + seen = {} + for item in self.entries: + if item not in self.entry_keys: + # workaround a cache issue + continue + + for key in self.entry_keys[item]: + if key not in seen: + seen[key]=1 + yield self.by_key[key] + + def add(self, dist, entry=None, insert=True, replace=False): + """Add `dist` to working set, associated with `entry` + + If `entry` is unspecified, it defaults to the ``.location`` of `dist`. + On exit from this routine, `entry` is added to the end of the working + set's ``.entries`` (if it wasn't already present). + + `dist` is only added to the working set if it's for a project that + doesn't already have a distribution in the set, unless `replace=True`. + If it's added, any callbacks registered with the ``subscribe()`` method + will be called. + """ + if insert: + dist.insert_on(self.entries, entry, replace=replace) + + if entry is None: + entry = dist.location + keys = self.entry_keys.setdefault(entry,[]) + keys2 = self.entry_keys.setdefault(dist.location,[]) + if not replace and dist.key in self.by_key: + # ignore hidden distros + return + + self.by_key[dist.key] = dist + if dist.key not in keys: + keys.append(dist.key) + if dist.key not in keys2: + keys2.append(dist.key) + self._added_new(dist) + + def resolve(self, requirements, env=None, installer=None, + replace_conflicting=False): + """List all distributions needed to (recursively) meet `requirements` + + `requirements` must be a sequence of ``Requirement`` objects. `env`, + if supplied, should be an ``Environment`` instance. If + not supplied, it defaults to all distributions available within any + entry or distribution in the working set. `installer`, if supplied, + will be invoked with each requirement that cannot be met by an + already-installed distribution; it should return a ``Distribution`` or + ``None``. + + Unless `replace_conflicting=True`, raises a VersionConflict exception if + any requirements are found on the path that have the correct name but + the wrong version. Otherwise, if an `installer` is supplied it will be + invoked to obtain the correct version of the requirement and activate + it. + """ + + # set up the stack + requirements = list(requirements)[::-1] + # set of processed requirements + processed = {} + # key -> dist + best = {} + to_activate = [] + + req_extras = _ReqExtras() + + # Mapping of requirement to set of distributions that required it; + # useful for reporting info about conflicts. + required_by = collections.defaultdict(set) + + while requirements: + # process dependencies breadth-first + req = requirements.pop(0) + if req in processed: + # Ignore cyclic or redundant dependencies + continue + + if not req_extras.markers_pass(req): + continue + + dist = best.get(req.key) + if dist is None: + # Find the best distribution and add it to the map + dist = self.by_key.get(req.key) + if dist is None or (dist not in req and replace_conflicting): + ws = self + if env is None: + if dist is None: + env = Environment(self.entries) + else: + # Use an empty environment and workingset to avoid + # any further conflicts with the conflicting + # distribution + env = Environment([]) + ws = WorkingSet([]) + dist = best[req.key] = env.best_match(req, ws, installer) + if dist is None: + requirers = required_by.get(req, None) + raise DistributionNotFound(req, requirers) + to_activate.append(dist) + if dist not in req: + # Oops, the "best" so far conflicts with a dependency + dependent_req = required_by[req] + raise VersionConflict(dist, req).with_context(dependent_req) + + # push the new requirements onto the stack + new_requirements = dist.requires(req.extras)[::-1] + requirements.extend(new_requirements) + + # Register the new requirements needed by req + for new_requirement in new_requirements: + required_by[new_requirement].add(req.project_name) + req_extras[new_requirement] = req.extras + + processed[req] = True + + # return list of distros to activate + return to_activate + + def find_plugins(self, plugin_env, full_env=None, installer=None, + fallback=True): + """Find all activatable distributions in `plugin_env` + + Example usage:: + + distributions, errors = working_set.find_plugins( + Environment(plugin_dirlist) + ) + # add plugins+libs to sys.path + map(working_set.add, distributions) + # display errors + print('Could not load', errors) + + The `plugin_env` should be an ``Environment`` instance that contains + only distributions that are in the project's "plugin directory" or + directories. The `full_env`, if supplied, should be an ``Environment`` + contains all currently-available distributions. If `full_env` is not + supplied, one is created automatically from the ``WorkingSet`` this + method is called on, which will typically mean that every directory on + ``sys.path`` will be scanned for distributions. + + `installer` is a standard installer callback as used by the + ``resolve()`` method. The `fallback` flag indicates whether we should + attempt to resolve older versions of a plugin if the newest version + cannot be resolved. + + This method returns a 2-tuple: (`distributions`, `error_info`), where + `distributions` is a list of the distributions found in `plugin_env` + that were loadable, along with any other distributions that are needed + to resolve their dependencies. `error_info` is a dictionary mapping + unloadable plugin distributions to an exception instance describing the + error that occurred. Usually this will be a ``DistributionNotFound`` or + ``VersionConflict`` instance. + """ + + plugin_projects = list(plugin_env) + # scan project names in alphabetic order + plugin_projects.sort() + + error_info = {} + distributions = {} + + if full_env is None: + env = Environment(self.entries) + env += plugin_env + else: + env = full_env + plugin_env + + shadow_set = self.__class__([]) + # put all our entries in shadow_set + list(map(shadow_set.add, self)) + + for project_name in plugin_projects: + + for dist in plugin_env[project_name]: + + req = [dist.as_requirement()] + + try: + resolvees = shadow_set.resolve(req, env, installer) + + except ResolutionError as v: + # save error info + error_info[dist] = v + if fallback: + # try the next older version of project + continue + else: + # give up on this project, keep going + break + + else: + list(map(shadow_set.add, resolvees)) + distributions.update(dict.fromkeys(resolvees)) + + # success, no need to try any more versions of this project + break + + distributions = list(distributions) + distributions.sort() + + return distributions, error_info + + def require(self, *requirements): + """Ensure that distributions matching `requirements` are activated + + `requirements` must be a string or a (possibly-nested) sequence + thereof, specifying the distributions and versions required. The + return value is a sequence of the distributions that needed to be + activated to fulfill the requirements; all relevant distributions are + included, even if they were already activated in this working set. + """ + needed = self.resolve(parse_requirements(requirements)) + + for dist in needed: + self.add(dist) + + return needed + + def subscribe(self, callback): + """Invoke `callback` for all distributions (including existing ones)""" + if callback in self.callbacks: + return + self.callbacks.append(callback) + for dist in self: + callback(dist) + + def _added_new(self, dist): + for callback in self.callbacks: + callback(dist) + + def __getstate__(self): + return ( + self.entries[:], self.entry_keys.copy(), self.by_key.copy(), + self.callbacks[:] + ) + + def __setstate__(self, e_k_b_c): + entries, keys, by_key, callbacks = e_k_b_c + self.entries = entries[:] + self.entry_keys = keys.copy() + self.by_key = by_key.copy() + self.callbacks = callbacks[:] + + +class _ReqExtras(dict): + """ + Map each requirement to the extras that demanded it. + """ + + def markers_pass(self, req): + """ + Evaluate markers for req against each extra that + demanded it. + + Return False if the req has a marker and fails + evaluation. Otherwise, return True. + """ + extra_evals = ( + req.marker.evaluate({'extra': extra}) + for extra in self.get(req, ()) + ) + return not req.marker or any(extra_evals) or req.marker.evaluate() + + +class Environment(object): + """Searchable snapshot of distributions on a search path""" + + def __init__(self, search_path=None, platform=get_supported_platform(), + python=PY_MAJOR): + """Snapshot distributions available on a search path + + Any distributions found on `search_path` are added to the environment. + `search_path` should be a sequence of ``sys.path`` items. If not + supplied, ``sys.path`` is used. + + `platform` is an optional string specifying the name of the platform + that platform-specific distributions must be compatible with. If + unspecified, it defaults to the current platform. `python` is an + optional string naming the desired version of Python (e.g. ``'3.3'``); + it defaults to the current version. + + You may explicitly set `platform` (and/or `python`) to ``None`` if you + wish to map *all* distributions, not just those compatible with the + running platform or Python version. + """ + self._distmap = {} + self.platform = platform + self.python = python + self.scan(search_path) + + def can_add(self, dist): + """Is distribution `dist` acceptable for this environment? + + The distribution must match the platform and python version + requirements specified when this environment was created, or False + is returned. + """ + return (self.python is None or dist.py_version is None + or dist.py_version==self.python) \ + and compatible_platforms(dist.platform, self.platform) + + def remove(self, dist): + """Remove `dist` from the environment""" + self._distmap[dist.key].remove(dist) + + def scan(self, search_path=None): + """Scan `search_path` for distributions usable in this environment + + Any distributions found are added to the environment. + `search_path` should be a sequence of ``sys.path`` items. If not + supplied, ``sys.path`` is used. Only distributions conforming to + the platform/python version defined at initialization are added. + """ + if search_path is None: + search_path = sys.path + + for item in search_path: + for dist in find_distributions(item): + self.add(dist) + + def __getitem__(self, project_name): + """Return a newest-to-oldest list of distributions for `project_name` + + Uses case-insensitive `project_name` comparison, assuming all the + project's distributions use their project's name converted to all + lowercase as their key. + + """ + distribution_key = project_name.lower() + return self._distmap.get(distribution_key, []) + + def add(self, dist): + """Add `dist` if we ``can_add()`` it and it has not already been added + """ + if self.can_add(dist) and dist.has_version(): + dists = self._distmap.setdefault(dist.key, []) + if dist not in dists: + dists.append(dist) + dists.sort(key=operator.attrgetter('hashcmp'), reverse=True) + + def best_match(self, req, working_set, installer=None): + """Find distribution best matching `req` and usable on `working_set` + + This calls the ``find(req)`` method of the `working_set` to see if a + suitable distribution is already active. (This may raise + ``VersionConflict`` if an unsuitable version of the project is already + active in the specified `working_set`.) If a suitable distribution + isn't active, this method returns the newest distribution in the + environment that meets the ``Requirement`` in `req`. If no suitable + distribution is found, and `installer` is supplied, then the result of + calling the environment's ``obtain(req, installer)`` method will be + returned. + """ + dist = working_set.find(req) + if dist is not None: + return dist + for dist in self[req.key]: + if dist in req: + return dist + # try to download/install + return self.obtain(req, installer) + + def obtain(self, requirement, installer=None): + """Obtain a distribution matching `requirement` (e.g. via download) + + Obtain a distro that matches requirement (e.g. via download). In the + base ``Environment`` class, this routine just returns + ``installer(requirement)``, unless `installer` is None, in which case + None is returned instead. This method is a hook that allows subclasses + to attempt other ways of obtaining a distribution before falling back + to the `installer` argument.""" + if installer is not None: + return installer(requirement) + + def __iter__(self): + """Yield the unique project names of the available distributions""" + for key in self._distmap.keys(): + if self[key]: + yield key + + def __iadd__(self, other): + """In-place addition of a distribution or environment""" + if isinstance(other, Distribution): + self.add(other) + elif isinstance(other, Environment): + for project in other: + for dist in other[project]: + self.add(dist) + else: + raise TypeError("Can't add %r to environment" % (other,)) + return self + + def __add__(self, other): + """Add an environment or distribution to an environment""" + new = self.__class__([], platform=None, python=None) + for env in self, other: + new += env + return new + + +# XXX backward compatibility +AvailableDistributions = Environment + + +class ExtractionError(RuntimeError): + """An error occurred extracting a resource + + The following attributes are available from instances of this exception: + + manager + The resource manager that raised this exception + + cache_path + The base directory for resource extraction + + original_error + The exception instance that caused extraction to fail + """ + + +class ResourceManager: + """Manage resource extraction and packages""" + extraction_path = None + + def __init__(self): + self.cached_files = {} + + def resource_exists(self, package_or_requirement, resource_name): + """Does the named resource exist?""" + return get_provider(package_or_requirement).has_resource(resource_name) + + def resource_isdir(self, package_or_requirement, resource_name): + """Is the named resource an existing directory?""" + return get_provider(package_or_requirement).resource_isdir( + resource_name + ) + + def resource_filename(self, package_or_requirement, resource_name): + """Return a true filesystem path for specified resource""" + return get_provider(package_or_requirement).get_resource_filename( + self, resource_name + ) + + def resource_stream(self, package_or_requirement, resource_name): + """Return a readable file-like object for specified resource""" + return get_provider(package_or_requirement).get_resource_stream( + self, resource_name + ) + + def resource_string(self, package_or_requirement, resource_name): + """Return specified resource as a string""" + return get_provider(package_or_requirement).get_resource_string( + self, resource_name + ) + + def resource_listdir(self, package_or_requirement, resource_name): + """List the contents of the named resource directory""" + return get_provider(package_or_requirement).resource_listdir( + resource_name + ) + + def extraction_error(self): + """Give an error message for problems extracting file(s)""" + + old_exc = sys.exc_info()[1] + cache_path = self.extraction_path or get_default_cache() + + tmpl = textwrap.dedent(""" + Can't extract file(s) to egg cache + + The following error occurred while trying to extract file(s) to the Python egg + cache: + + {old_exc} + + The Python egg cache directory is currently set to: + + {cache_path} + + Perhaps your account does not have write access to this directory? You can + change the cache directory by setting the PYTHON_EGG_CACHE environment + variable to point to an accessible directory. + """).lstrip() + err = ExtractionError(tmpl.format(**locals())) + err.manager = self + err.cache_path = cache_path + err.original_error = old_exc + raise err + + def get_cache_path(self, archive_name, names=()): + """Return absolute location in cache for `archive_name` and `names` + + The parent directory of the resulting path will be created if it does + not already exist. `archive_name` should be the base filename of the + enclosing egg (which may not be the name of the enclosing zipfile!), + including its ".egg" extension. `names`, if provided, should be a + sequence of path name parts "under" the egg's extraction location. + + This method should only be called by resource providers that need to + obtain an extraction location, and only for names they intend to + extract, as it tracks the generated names for possible cleanup later. + """ + extract_path = self.extraction_path or get_default_cache() + target_path = os.path.join(extract_path, archive_name+'-tmp', *names) + try: + _bypass_ensure_directory(target_path) + except: + self.extraction_error() + + self._warn_unsafe_extraction_path(extract_path) + + self.cached_files[target_path] = 1 + return target_path + + @staticmethod + def _warn_unsafe_extraction_path(path): + """ + If the default extraction path is overridden and set to an insecure + location, such as /tmp, it opens up an opportunity for an attacker to + replace an extracted file with an unauthorized payload. Warn the user + if a known insecure location is used. + + See Distribute #375 for more details. + """ + if os.name == 'nt' and not path.startswith(os.environ['windir']): + # On Windows, permissions are generally restrictive by default + # and temp directories are not writable by other users, so + # bypass the warning. + return + mode = os.stat(path).st_mode + if mode & stat.S_IWOTH or mode & stat.S_IWGRP: + msg = ("%s is writable by group/others and vulnerable to attack " + "when " + "used with get_resource_filename. Consider a more secure " + "location (set with .set_extraction_path or the " + "PYTHON_EGG_CACHE environment variable)." % path) + warnings.warn(msg, UserWarning) + + def postprocess(self, tempname, filename): + """Perform any platform-specific postprocessing of `tempname` + + This is where Mac header rewrites should be done; other platforms don't + have anything special they should do. + + Resource providers should call this method ONLY after successfully + extracting a compressed resource. They must NOT call it on resources + that are already in the filesystem. + + `tempname` is the current (temporary) name of the file, and `filename` + is the name it will be renamed to by the caller after this routine + returns. + """ + + if os.name == 'posix': + # Make the resource executable + mode = ((os.stat(tempname).st_mode) | 0o555) & 0o7777 + os.chmod(tempname, mode) + + def set_extraction_path(self, path): + """Set the base path where resources will be extracted to, if needed. + + If you do not call this routine before any extractions take place, the + path defaults to the return value of ``get_default_cache()``. (Which + is based on the ``PYTHON_EGG_CACHE`` environment variable, with various + platform-specific fallbacks. See that routine's documentation for more + details.) + + Resources are extracted to subdirectories of this path based upon + information given by the ``IResourceProvider``. You may set this to a + temporary directory, but then you must call ``cleanup_resources()`` to + delete the extracted files when done. There is no guarantee that + ``cleanup_resources()`` will be able to remove all extracted files. + + (Note: you may not change the extraction path for a given resource + manager once resources have been extracted, unless you first call + ``cleanup_resources()``.) + """ + if self.cached_files: + raise ValueError( + "Can't change extraction path, files already extracted" + ) + + self.extraction_path = path + + def cleanup_resources(self, force=False): + """ + Delete all extracted resource files and directories, returning a list + of the file and directory names that could not be successfully removed. + This function does not have any concurrency protection, so it should + generally only be called when the extraction path is a temporary + directory exclusive to a single process. This method is not + automatically called; you must call it explicitly or register it as an + ``atexit`` function if you wish to ensure cleanup of a temporary + directory used for extractions. + """ + # XXX + +def get_default_cache(): + """Determine the default cache location + + This returns the ``PYTHON_EGG_CACHE`` environment variable, if set. + Otherwise, on Windows, it returns a "Python-Eggs" subdirectory of the + "Application Data" directory. On all other systems, it's "~/.python-eggs". + """ + try: + return os.environ['PYTHON_EGG_CACHE'] + except KeyError: + pass + + if os.name!='nt': + return os.path.expanduser('~/.python-eggs') + + # XXX this may be locale-specific! + app_data = 'Application Data' + app_homes = [ + # best option, should be locale-safe + (('APPDATA',), None), + (('USERPROFILE',), app_data), + (('HOMEDRIVE','HOMEPATH'), app_data), + (('HOMEPATH',), app_data), + (('HOME',), None), + # 95/98/ME + (('WINDIR',), app_data), + ] + + for keys, subdir in app_homes: + dirname = '' + for key in keys: + if key in os.environ: + dirname = os.path.join(dirname, os.environ[key]) + else: + break + else: + if subdir: + dirname = os.path.join(dirname, subdir) + return os.path.join(dirname, 'Python-Eggs') + else: + raise RuntimeError( + "Please set the PYTHON_EGG_CACHE enviroment variable" + ) + +def safe_name(name): + """Convert an arbitrary string to a standard distribution name + + Any runs of non-alphanumeric/. characters are replaced with a single '-'. + """ + return re.sub('[^A-Za-z0-9.]+', '-', name) + + +def safe_version(version): + """ + Convert an arbitrary string to a standard version string + """ + try: + # normalize the version + return str(packaging.version.Version(version)) + except packaging.version.InvalidVersion: + version = version.replace(' ','.') + return re.sub('[^A-Za-z0-9.]+', '-', version) + + +def safe_extra(extra): + """Convert an arbitrary string to a standard 'extra' name + + Any runs of non-alphanumeric characters are replaced with a single '_', + and the result is always lowercased. + """ + return re.sub('[^A-Za-z0-9.]+', '_', extra).lower() + + +def to_filename(name): + """Convert a project or version name to its filename-escaped form + + Any '-' characters are currently replaced with '_'. + """ + return name.replace('-','_') + + +def invalid_marker(text): + """ + Validate text as a PEP 508 environment marker; return an exception + if invalid or False otherwise. + """ + try: + evaluate_marker(text) + except SyntaxError as e: + e.filename = None + e.lineno = None + return e + return False + + +def evaluate_marker(text, extra=None): + """ + Evaluate a PEP 508 environment marker. + Return a boolean indicating the marker result in this environment. + Raise SyntaxError if marker is invalid. + + This implementation uses the 'pyparsing' module. + """ + try: + marker = packaging.markers.Marker(text) + return marker.evaluate() + except packaging.markers.InvalidMarker as e: + raise SyntaxError(e) + + +class NullProvider: + """Try to implement resources and metadata for arbitrary PEP 302 loaders""" + + egg_name = None + egg_info = None + loader = None + + def __init__(self, module): + self.loader = getattr(module, '__loader__', None) + self.module_path = os.path.dirname(getattr(module, '__file__', '')) + + def get_resource_filename(self, manager, resource_name): + return self._fn(self.module_path, resource_name) + + def get_resource_stream(self, manager, resource_name): + return io.BytesIO(self.get_resource_string(manager, resource_name)) + + def get_resource_string(self, manager, resource_name): + return self._get(self._fn(self.module_path, resource_name)) + + def has_resource(self, resource_name): + return self._has(self._fn(self.module_path, resource_name)) + + def has_metadata(self, name): + return self.egg_info and self._has(self._fn(self.egg_info, name)) + + if sys.version_info <= (3,): + def get_metadata(self, name): + if not self.egg_info: + return "" + return self._get(self._fn(self.egg_info, name)) + else: + def get_metadata(self, name): + if not self.egg_info: + return "" + return self._get(self._fn(self.egg_info, name)).decode("utf-8") + + def get_metadata_lines(self, name): + return yield_lines(self.get_metadata(name)) + + def resource_isdir(self, resource_name): + return self._isdir(self._fn(self.module_path, resource_name)) + + def metadata_isdir(self, name): + return self.egg_info and self._isdir(self._fn(self.egg_info, name)) + + def resource_listdir(self, resource_name): + return self._listdir(self._fn(self.module_path, resource_name)) + + def metadata_listdir(self, name): + if self.egg_info: + return self._listdir(self._fn(self.egg_info, name)) + return [] + + def run_script(self, script_name, namespace): + script = 'scripts/'+script_name + if not self.has_metadata(script): + raise ResolutionError("No script named %r" % script_name) + script_text = self.get_metadata(script).replace('\r\n', '\n') + script_text = script_text.replace('\r', '\n') + script_filename = self._fn(self.egg_info, script) + namespace['__file__'] = script_filename + if os.path.exists(script_filename): + source = open(script_filename).read() + code = compile(source, script_filename, 'exec') + exec(code, namespace, namespace) + else: + from linecache import cache + cache[script_filename] = ( + len(script_text), 0, script_text.split('\n'), script_filename + ) + script_code = compile(script_text, script_filename,'exec') + exec(script_code, namespace, namespace) + + def _has(self, path): + raise NotImplementedError( + "Can't perform this operation for unregistered loader type" + ) + + def _isdir(self, path): + raise NotImplementedError( + "Can't perform this operation for unregistered loader type" + ) + + def _listdir(self, path): + raise NotImplementedError( + "Can't perform this operation for unregistered loader type" + ) + + def _fn(self, base, resource_name): + if resource_name: + return os.path.join(base, *resource_name.split('/')) + return base + + def _get(self, path): + if hasattr(self.loader, 'get_data'): + return self.loader.get_data(path) + raise NotImplementedError( + "Can't perform this operation for loaders without 'get_data()'" + ) + +register_loader_type(object, NullProvider) + + +class EggProvider(NullProvider): + """Provider based on a virtual filesystem""" + + def __init__(self, module): + NullProvider.__init__(self, module) + self._setup_prefix() + + def _setup_prefix(self): + # we assume here that our metadata may be nested inside a "basket" + # of multiple eggs; that's why we use module_path instead of .archive + path = self.module_path + old = None + while path!=old: + if _is_unpacked_egg(path): + self.egg_name = os.path.basename(path) + self.egg_info = os.path.join(path, 'EGG-INFO') + self.egg_root = path + break + old = path + path, base = os.path.split(path) + +class DefaultProvider(EggProvider): + """Provides access to package resources in the filesystem""" + + def _has(self, path): + return os.path.exists(path) + + def _isdir(self, path): + return os.path.isdir(path) + + def _listdir(self, path): + return os.listdir(path) + + def get_resource_stream(self, manager, resource_name): + return open(self._fn(self.module_path, resource_name), 'rb') + + def _get(self, path): + with open(path, 'rb') as stream: + return stream.read() + + @classmethod + def _register(cls): + loader_cls = getattr(importlib_machinery, 'SourceFileLoader', + type(None)) + register_loader_type(loader_cls, cls) + +DefaultProvider._register() + + +class EmptyProvider(NullProvider): + """Provider that returns nothing for all requests""" + + _isdir = _has = lambda self, path: False + _get = lambda self, path: '' + _listdir = lambda self, path: [] + module_path = None + + def __init__(self): + pass + +empty_provider = EmptyProvider() + + +class ZipManifests(dict): + """ + zip manifest builder + """ + + @classmethod + def build(cls, path): + """ + Build a dictionary similar to the zipimport directory + caches, except instead of tuples, store ZipInfo objects. + + Use a platform-specific path separator (os.sep) for the path keys + for compatibility with pypy on Windows. + """ + with ContextualZipFile(path) as zfile: + items = ( + ( + name.replace('/', os.sep), + zfile.getinfo(name), + ) + for name in zfile.namelist() + ) + return dict(items) + + load = build + + +class MemoizedZipManifests(ZipManifests): + """ + Memoized zipfile manifests. + """ + manifest_mod = collections.namedtuple('manifest_mod', 'manifest mtime') + + def load(self, path): + """ + Load a manifest at path or return a suitable manifest already loaded. + """ + path = os.path.normpath(path) + mtime = os.stat(path).st_mtime + + if path not in self or self[path].mtime != mtime: + manifest = self.build(path) + self[path] = self.manifest_mod(manifest, mtime) + + return self[path].manifest + + +class ContextualZipFile(zipfile.ZipFile): + """ + Supplement ZipFile class to support context manager for Python 2.6 + """ + + def __enter__(self): + return self + + def __exit__(self, type, value, traceback): + self.close() + + def __new__(cls, *args, **kwargs): + """ + Construct a ZipFile or ContextualZipFile as appropriate + """ + if hasattr(zipfile.ZipFile, '__exit__'): + return zipfile.ZipFile(*args, **kwargs) + return super(ContextualZipFile, cls).__new__(cls) + + +class ZipProvider(EggProvider): + """Resource support for zips and eggs""" + + eagers = None + _zip_manifests = MemoizedZipManifests() + + def __init__(self, module): + EggProvider.__init__(self, module) + self.zip_pre = self.loader.archive+os.sep + + def _zipinfo_name(self, fspath): + # Convert a virtual filename (full path to file) into a zipfile subpath + # usable with the zipimport directory cache for our target archive + if fspath.startswith(self.zip_pre): + return fspath[len(self.zip_pre):] + raise AssertionError( + "%s is not a subpath of %s" % (fspath, self.zip_pre) + ) + + def _parts(self, zip_path): + # Convert a zipfile subpath into an egg-relative path part list. + # pseudo-fs path + fspath = self.zip_pre+zip_path + if fspath.startswith(self.egg_root+os.sep): + return fspath[len(self.egg_root)+1:].split(os.sep) + raise AssertionError( + "%s is not a subpath of %s" % (fspath, self.egg_root) + ) + + @property + def zipinfo(self): + return self._zip_manifests.load(self.loader.archive) + + def get_resource_filename(self, manager, resource_name): + if not self.egg_name: + raise NotImplementedError( + "resource_filename() only supported for .egg, not .zip" + ) + # no need to lock for extraction, since we use temp names + zip_path = self._resource_to_zip(resource_name) + eagers = self._get_eager_resources() + if '/'.join(self._parts(zip_path)) in eagers: + for name in eagers: + self._extract_resource(manager, self._eager_to_zip(name)) + return self._extract_resource(manager, zip_path) + + @staticmethod + def _get_date_and_size(zip_stat): + size = zip_stat.file_size + # ymdhms+wday, yday, dst + date_time = zip_stat.date_time + (0, 0, -1) + # 1980 offset already done + timestamp = time.mktime(date_time) + return timestamp, size + + def _extract_resource(self, manager, zip_path): + + if zip_path in self._index(): + for name in self._index()[zip_path]: + last = self._extract_resource( + manager, os.path.join(zip_path, name) + ) + # return the extracted directory name + return os.path.dirname(last) + + timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) + + if not WRITE_SUPPORT: + raise IOError('"os.rename" and "os.unlink" are not supported ' + 'on this platform') + try: + + real_path = manager.get_cache_path( + self.egg_name, self._parts(zip_path) + ) + + if self._is_current(real_path, zip_path): + return real_path + + outf, tmpnam = _mkstemp(".$extract", dir=os.path.dirname(real_path)) + os.write(outf, self.loader.get_data(zip_path)) + os.close(outf) + utime(tmpnam, (timestamp, timestamp)) + manager.postprocess(tmpnam, real_path) + + try: + rename(tmpnam, real_path) + + except os.error: + if os.path.isfile(real_path): + if self._is_current(real_path, zip_path): + # the file became current since it was checked above, + # so proceed. + return real_path + # Windows, del old file and retry + elif os.name=='nt': + unlink(real_path) + rename(tmpnam, real_path) + return real_path + raise + + except os.error: + # report a user-friendly error + manager.extraction_error() + + return real_path + + def _is_current(self, file_path, zip_path): + """ + Return True if the file_path is current for this zip_path + """ + timestamp, size = self._get_date_and_size(self.zipinfo[zip_path]) + if not os.path.isfile(file_path): + return False + stat = os.stat(file_path) + if stat.st_size!=size or stat.st_mtime!=timestamp: + return False + # check that the contents match + zip_contents = self.loader.get_data(zip_path) + with open(file_path, 'rb') as f: + file_contents = f.read() + return zip_contents == file_contents + + def _get_eager_resources(self): + if self.eagers is None: + eagers = [] + for name in ('native_libs.txt', 'eager_resources.txt'): + if self.has_metadata(name): + eagers.extend(self.get_metadata_lines(name)) + self.eagers = eagers + return self.eagers + + def _index(self): + try: + return self._dirindex + except AttributeError: + ind = {} + for path in self.zipinfo: + parts = path.split(os.sep) + while parts: + parent = os.sep.join(parts[:-1]) + if parent in ind: + ind[parent].append(parts[-1]) + break + else: + ind[parent] = [parts.pop()] + self._dirindex = ind + return ind + + def _has(self, fspath): + zip_path = self._zipinfo_name(fspath) + return zip_path in self.zipinfo or zip_path in self._index() + + def _isdir(self, fspath): + return self._zipinfo_name(fspath) in self._index() + + def _listdir(self, fspath): + return list(self._index().get(self._zipinfo_name(fspath), ())) + + def _eager_to_zip(self, resource_name): + return self._zipinfo_name(self._fn(self.egg_root, resource_name)) + + def _resource_to_zip(self, resource_name): + return self._zipinfo_name(self._fn(self.module_path, resource_name)) + +register_loader_type(zipimport.zipimporter, ZipProvider) + + +class FileMetadata(EmptyProvider): + """Metadata handler for standalone PKG-INFO files + + Usage:: + + metadata = FileMetadata("/path/to/PKG-INFO") + + This provider rejects all data and metadata requests except for PKG-INFO, + which is treated as existing, and will be the contents of the file at + the provided location. + """ + + def __init__(self, path): + self.path = path + + def has_metadata(self, name): + return name=='PKG-INFO' and os.path.isfile(self.path) + + def get_metadata(self, name): + if name=='PKG-INFO': + with io.open(self.path, encoding='utf-8') as f: + try: + metadata = f.read() + except UnicodeDecodeError as exc: + # add path context to error message + tmpl = " in {self.path}" + exc.reason += tmpl.format(self=self) + raise + return metadata + raise KeyError("No metadata except PKG-INFO is available") + + def get_metadata_lines(self, name): + return yield_lines(self.get_metadata(name)) + + +class PathMetadata(DefaultProvider): + """Metadata provider for egg directories + + Usage:: + + # Development eggs: + + egg_info = "/path/to/PackageName.egg-info" + base_dir = os.path.dirname(egg_info) + metadata = PathMetadata(base_dir, egg_info) + dist_name = os.path.splitext(os.path.basename(egg_info))[0] + dist = Distribution(basedir, project_name=dist_name, metadata=metadata) + + # Unpacked egg directories: + + egg_path = "/path/to/PackageName-ver-pyver-etc.egg" + metadata = PathMetadata(egg_path, os.path.join(egg_path,'EGG-INFO')) + dist = Distribution.from_filename(egg_path, metadata=metadata) + """ + + def __init__(self, path, egg_info): + self.module_path = path + self.egg_info = egg_info + + +class EggMetadata(ZipProvider): + """Metadata provider for .egg files""" + + def __init__(self, importer): + """Create a metadata provider from a zipimporter""" + + self.zip_pre = importer.archive+os.sep + self.loader = importer + if importer.prefix: + self.module_path = os.path.join(importer.archive, importer.prefix) + else: + self.module_path = importer.archive + self._setup_prefix() + +_declare_state('dict', _distribution_finders = {}) + +def register_finder(importer_type, distribution_finder): + """Register `distribution_finder` to find distributions in sys.path items + + `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item + handler), and `distribution_finder` is a callable that, passed a path + item and the importer instance, yields ``Distribution`` instances found on + that path item. See ``pkg_resources.find_on_path`` for an example.""" + _distribution_finders[importer_type] = distribution_finder + + +def find_distributions(path_item, only=False): + """Yield distributions accessible via `path_item`""" + importer = get_importer(path_item) + finder = _find_adapter(_distribution_finders, importer) + return finder(importer, path_item, only) + +def find_eggs_in_zip(importer, path_item, only=False): + """ + Find eggs in zip files; possibly multiple nested eggs. + """ + if importer.archive.endswith('.whl'): + # wheels are not supported with this finder + # they don't have PKG-INFO metadata, and won't ever contain eggs + return + metadata = EggMetadata(importer) + if metadata.has_metadata('PKG-INFO'): + yield Distribution.from_filename(path_item, metadata=metadata) + if only: + # don't yield nested distros + return + for subitem in metadata.resource_listdir('/'): + if _is_unpacked_egg(subitem): + subpath = os.path.join(path_item, subitem) + for dist in find_eggs_in_zip(zipimport.zipimporter(subpath), subpath): + yield dist + +register_finder(zipimport.zipimporter, find_eggs_in_zip) + +def find_nothing(importer, path_item, only=False): + return () +register_finder(object, find_nothing) + +def find_on_path(importer, path_item, only=False): + """Yield distributions accessible on a sys.path directory""" + path_item = _normalize_cached(path_item) + + if os.path.isdir(path_item) and os.access(path_item, os.R_OK): + if _is_unpacked_egg(path_item): + yield Distribution.from_filename( + path_item, metadata=PathMetadata( + path_item, os.path.join(path_item,'EGG-INFO') + ) + ) + else: + # scan for .egg and .egg-info in directory + for entry in os.listdir(path_item): + lower = entry.lower() + if lower.endswith('.egg-info') or lower.endswith('.dist-info'): + fullpath = os.path.join(path_item, entry) + if os.path.isdir(fullpath): + # egg-info directory, allow getting metadata + metadata = PathMetadata(path_item, fullpath) + else: + metadata = FileMetadata(fullpath) + yield Distribution.from_location( + path_item, entry, metadata, precedence=DEVELOP_DIST + ) + elif not only and _is_unpacked_egg(entry): + dists = find_distributions(os.path.join(path_item, entry)) + for dist in dists: + yield dist + elif not only and lower.endswith('.egg-link'): + with open(os.path.join(path_item, entry)) as entry_file: + entry_lines = entry_file.readlines() + for line in entry_lines: + if not line.strip(): + continue + path = os.path.join(path_item, line.rstrip()) + dists = find_distributions(path) + for item in dists: + yield item + break +register_finder(pkgutil.ImpImporter, find_on_path) + +if hasattr(importlib_machinery, 'FileFinder'): + register_finder(importlib_machinery.FileFinder, find_on_path) + +_declare_state('dict', _namespace_handlers={}) +_declare_state('dict', _namespace_packages={}) + + +def register_namespace_handler(importer_type, namespace_handler): + """Register `namespace_handler` to declare namespace packages + + `importer_type` is the type or class of a PEP 302 "Importer" (sys.path item + handler), and `namespace_handler` is a callable like this:: + + def namespace_handler(importer, path_entry, moduleName, module): + # return a path_entry to use for child packages + + Namespace handlers are only called if the importer object has already + agreed that it can handle the relevant path item, and they should only + return a subpath if the module __path__ does not already contain an + equivalent subpath. For an example namespace handler, see + ``pkg_resources.file_ns_handler``. + """ + _namespace_handlers[importer_type] = namespace_handler + +def _handle_ns(packageName, path_item): + """Ensure that named package includes a subpath of path_item (if needed)""" + + importer = get_importer(path_item) + if importer is None: + return None + loader = importer.find_module(packageName) + if loader is None: + return None + module = sys.modules.get(packageName) + if module is None: + module = sys.modules[packageName] = types.ModuleType(packageName) + module.__path__ = [] + _set_parent_ns(packageName) + elif not hasattr(module,'__path__'): + raise TypeError("Not a package:", packageName) + handler = _find_adapter(_namespace_handlers, importer) + subpath = handler(importer, path_item, packageName, module) + if subpath is not None: + path = module.__path__ + path.append(subpath) + loader.load_module(packageName) + _rebuild_mod_path(path, packageName, module) + return subpath + + +def _rebuild_mod_path(orig_path, package_name, module): + """ + Rebuild module.__path__ ensuring that all entries are ordered + corresponding to their sys.path order + """ + sys_path = [_normalize_cached(p) for p in sys.path] + def position_in_sys_path(path): + """ + Return the ordinal of the path based on its position in sys.path + """ + path_parts = path.split(os.sep) + module_parts = package_name.count('.') + 1 + parts = path_parts[:-module_parts] + return sys_path.index(_normalize_cached(os.sep.join(parts))) + + orig_path.sort(key=position_in_sys_path) + module.__path__[:] = [_normalize_cached(p) for p in orig_path] + + +def declare_namespace(packageName): + """Declare that package 'packageName' is a namespace package""" + + _imp.acquire_lock() + try: + if packageName in _namespace_packages: + return + + path, parent = sys.path, None + if '.' in packageName: + parent = '.'.join(packageName.split('.')[:-1]) + declare_namespace(parent) + if parent not in _namespace_packages: + __import__(parent) + try: + path = sys.modules[parent].__path__ + except AttributeError: + raise TypeError("Not a package:", parent) + + # Track what packages are namespaces, so when new path items are added, + # they can be updated + _namespace_packages.setdefault(parent,[]).append(packageName) + _namespace_packages.setdefault(packageName,[]) + + for path_item in path: + # Ensure all the parent's path items are reflected in the child, + # if they apply + _handle_ns(packageName, path_item) + + finally: + _imp.release_lock() + +def fixup_namespace_packages(path_item, parent=None): + """Ensure that previously-declared namespace packages include path_item""" + _imp.acquire_lock() + try: + for package in _namespace_packages.get(parent,()): + subpath = _handle_ns(package, path_item) + if subpath: + fixup_namespace_packages(subpath, package) + finally: + _imp.release_lock() + +def file_ns_handler(importer, path_item, packageName, module): + """Compute an ns-package subpath for a filesystem or zipfile importer""" + + subpath = os.path.join(path_item, packageName.split('.')[-1]) + normalized = _normalize_cached(subpath) + for item in module.__path__: + if _normalize_cached(item)==normalized: + break + else: + # Only return the path if it's not already there + return subpath + +register_namespace_handler(pkgutil.ImpImporter, file_ns_handler) +register_namespace_handler(zipimport.zipimporter, file_ns_handler) + +if hasattr(importlib_machinery, 'FileFinder'): + register_namespace_handler(importlib_machinery.FileFinder, file_ns_handler) + + +def null_ns_handler(importer, path_item, packageName, module): + return None + +register_namespace_handler(object, null_ns_handler) + + +def normalize_path(filename): + """Normalize a file/dir name for comparison purposes""" + return os.path.normcase(os.path.realpath(filename)) + +def _normalize_cached(filename, _cache={}): + try: + return _cache[filename] + except KeyError: + _cache[filename] = result = normalize_path(filename) + return result + +def _is_unpacked_egg(path): + """ + Determine if given path appears to be an unpacked egg. + """ + return ( + path.lower().endswith('.egg') + ) + +def _set_parent_ns(packageName): + parts = packageName.split('.') + name = parts.pop() + if parts: + parent = '.'.join(parts) + setattr(sys.modules[parent], name, sys.modules[packageName]) + + +def yield_lines(strs): + """Yield non-empty/non-comment lines of a string or sequence""" + if isinstance(strs, six.string_types): + for s in strs.splitlines(): + s = s.strip() + # skip blank lines/comments + if s and not s.startswith('#'): + yield s + else: + for ss in strs: + for s in yield_lines(ss): + yield s + +MODULE = re.compile(r"\w+(\.\w+)*$").match +EGG_NAME = re.compile( + r""" + (?P<name>[^-]+) ( + -(?P<ver>[^-]+) ( + -py(?P<pyver>[^-]+) ( + -(?P<plat>.+) + )? + )? + )? + """, + re.VERBOSE | re.IGNORECASE, +).match + + +class EntryPoint(object): + """Object representing an advertised importable object""" + + def __init__(self, name, module_name, attrs=(), extras=(), dist=None): + if not MODULE(module_name): + raise ValueError("Invalid module name", module_name) + self.name = name + self.module_name = module_name + self.attrs = tuple(attrs) + self.extras = Requirement.parse(("x[%s]" % ','.join(extras))).extras + self.dist = dist + + def __str__(self): + s = "%s = %s" % (self.name, self.module_name) + if self.attrs: + s += ':' + '.'.join(self.attrs) + if self.extras: + s += ' [%s]' % ','.join(self.extras) + return s + + def __repr__(self): + return "EntryPoint.parse(%r)" % str(self) + + def load(self, require=True, *args, **kwargs): + """ + Require packages for this EntryPoint, then resolve it. + """ + if not require or args or kwargs: + warnings.warn( + "Parameters to load are deprecated. Call .resolve and " + ".require separately.", + DeprecationWarning, + stacklevel=2, + ) + if require: + self.require(*args, **kwargs) + return self.resolve() + + def resolve(self): + """ + Resolve the entry point from its module and attrs. + """ + module = __import__(self.module_name, fromlist=['__name__'], level=0) + try: + return functools.reduce(getattr, self.attrs, module) + except AttributeError as exc: + raise ImportError(str(exc)) + + def require(self, env=None, installer=None): + if self.extras and not self.dist: + raise UnknownExtra("Can't require() without a distribution", self) + reqs = self.dist.requires(self.extras) + items = working_set.resolve(reqs, env, installer) + list(map(working_set.add, items)) + + pattern = re.compile( + r'\s*' + r'(?P<name>.+?)\s*' + r'=\s*' + r'(?P<module>[\w.]+)\s*' + r'(:\s*(?P<attr>[\w.]+))?\s*' + r'(?P<extras>\[.*\])?\s*$' + ) + + @classmethod + def parse(cls, src, dist=None): + """Parse a single entry point from string `src` + + Entry point syntax follows the form:: + + name = some.module:some.attr [extra1, extra2] + + The entry name and module name are required, but the ``:attrs`` and + ``[extras]`` parts are optional + """ + m = cls.pattern.match(src) + if not m: + msg = "EntryPoint must be in 'name=module:attrs [extras]' format" + raise ValueError(msg, src) + res = m.groupdict() + extras = cls._parse_extras(res['extras']) + attrs = res['attr'].split('.') if res['attr'] else () + return cls(res['name'], res['module'], attrs, extras, dist) + + @classmethod + def _parse_extras(cls, extras_spec): + if not extras_spec: + return () + req = Requirement.parse('x' + extras_spec) + if req.specs: + raise ValueError() + return req.extras + + @classmethod + def parse_group(cls, group, lines, dist=None): + """Parse an entry point group""" + if not MODULE(group): + raise ValueError("Invalid group name", group) + this = {} + for line in yield_lines(lines): + ep = cls.parse(line, dist) + if ep.name in this: + raise ValueError("Duplicate entry point", group, ep.name) + this[ep.name]=ep + return this + + @classmethod + def parse_map(cls, data, dist=None): + """Parse a map of entry point groups""" + if isinstance(data, dict): + data = data.items() + else: + data = split_sections(data) + maps = {} + for group, lines in data: + if group is None: + if not lines: + continue + raise ValueError("Entry points must be listed in groups") + group = group.strip() + if group in maps: + raise ValueError("Duplicate group name", group) + maps[group] = cls.parse_group(group, lines, dist) + return maps + + +def _remove_md5_fragment(location): + if not location: + return '' + parsed = urllib.parse.urlparse(location) + if parsed[-1].startswith('md5='): + return urllib.parse.urlunparse(parsed[:-1] + ('',)) + return location + + +def _version_from_file(lines): + """ + Given an iterable of lines from a Metadata file, return + the value of the Version field, if present, or None otherwise. + """ + is_version_line = lambda line: line.lower().startswith('version:') + version_lines = filter(is_version_line, lines) + line = next(iter(version_lines), '') + _, _, value = line.partition(':') + return safe_version(value.strip()) or None + + +class Distribution(object): + """Wrap an actual or potential sys.path entry w/metadata""" + PKG_INFO = 'PKG-INFO' + + def __init__(self, location=None, metadata=None, project_name=None, + version=None, py_version=PY_MAJOR, platform=None, + precedence=EGG_DIST): + self.project_name = safe_name(project_name or 'Unknown') + if version is not None: + self._version = safe_version(version) + self.py_version = py_version + self.platform = platform + self.location = location + self.precedence = precedence + self._provider = metadata or empty_provider + + @classmethod + def from_location(cls, location, basename, metadata=None, **kw): + project_name, version, py_version, platform = [None]*4 + basename, ext = os.path.splitext(basename) + if ext.lower() in _distributionImpl: + cls = _distributionImpl[ext.lower()] + + match = EGG_NAME(basename) + if match: + project_name, version, py_version, platform = match.group( + 'name', 'ver', 'pyver', 'plat' + ) + return cls( + location, metadata, project_name=project_name, version=version, + py_version=py_version, platform=platform, **kw + )._reload_version() + + def _reload_version(self): + return self + + @property + def hashcmp(self): + return ( + self.parsed_version, + self.precedence, + self.key, + _remove_md5_fragment(self.location), + self.py_version or '', + self.platform or '', + ) + + def __hash__(self): + return hash(self.hashcmp) + + def __lt__(self, other): + return self.hashcmp < other.hashcmp + + def __le__(self, other): + return self.hashcmp <= other.hashcmp + + def __gt__(self, other): + return self.hashcmp > other.hashcmp + + def __ge__(self, other): + return self.hashcmp >= other.hashcmp + + def __eq__(self, other): + if not isinstance(other, self.__class__): + # It's not a Distribution, so they are not equal + return False + return self.hashcmp == other.hashcmp + + def __ne__(self, other): + return not self == other + + # These properties have to be lazy so that we don't have to load any + # metadata until/unless it's actually needed. (i.e., some distributions + # may not know their name or version without loading PKG-INFO) + + @property + def key(self): + try: + return self._key + except AttributeError: + self._key = key = self.project_name.lower() + return key + + @property + def parsed_version(self): + if not hasattr(self, "_parsed_version"): + self._parsed_version = parse_version(self.version) + + return self._parsed_version + + def _warn_legacy_version(self): + LV = packaging.version.LegacyVersion + is_legacy = isinstance(self._parsed_version, LV) + if not is_legacy: + return + + # While an empty version is technically a legacy version and + # is not a valid PEP 440 version, it's also unlikely to + # actually come from someone and instead it is more likely that + # it comes from setuptools attempting to parse a filename and + # including it in the list. So for that we'll gate this warning + # on if the version is anything at all or not. + if not self.version: + return + + tmpl = textwrap.dedent(""" + '{project_name} ({version})' is being parsed as a legacy, + non PEP 440, + version. You may find odd behavior and sort order. + In particular it will be sorted as less than 0.0. It + is recommended to migrate to PEP 440 compatible + versions. + """).strip().replace('\n', ' ') + + warnings.warn(tmpl.format(**vars(self)), PEP440Warning) + + @property + def version(self): + try: + return self._version + except AttributeError: + version = _version_from_file(self._get_metadata(self.PKG_INFO)) + if version is None: + tmpl = "Missing 'Version:' header and/or %s file" + raise ValueError(tmpl % self.PKG_INFO, self) + return version + + @property + def _dep_map(self): + try: + return self.__dep_map + except AttributeError: + dm = self.__dep_map = {None: []} + for name in 'requires.txt', 'depends.txt': + for extra, reqs in split_sections(self._get_metadata(name)): + if extra: + if ':' in extra: + extra, marker = extra.split(':', 1) + if invalid_marker(marker): + # XXX warn + reqs=[] + elif not evaluate_marker(marker): + reqs=[] + extra = safe_extra(extra) or None + dm.setdefault(extra,[]).extend(parse_requirements(reqs)) + return dm + + def requires(self, extras=()): + """List of Requirements needed for this distro if `extras` are used""" + dm = self._dep_map + deps = [] + deps.extend(dm.get(None, ())) + for ext in extras: + try: + deps.extend(dm[safe_extra(ext)]) + except KeyError: + raise UnknownExtra( + "%s has no such extra feature %r" % (self, ext) + ) + return deps + + def _get_metadata(self, name): + if self.has_metadata(name): + for line in self.get_metadata_lines(name): + yield line + + def activate(self, path=None): + """Ensure distribution is importable on `path` (default=sys.path)""" + if path is None: + path = sys.path + self.insert_on(path, replace=True) + if path is sys.path: + fixup_namespace_packages(self.location) + for pkg in self._get_metadata('namespace_packages.txt'): + if pkg in sys.modules: + declare_namespace(pkg) + + def egg_name(self): + """Return what this distribution's standard .egg filename should be""" + filename = "%s-%s-py%s" % ( + to_filename(self.project_name), to_filename(self.version), + self.py_version or PY_MAJOR + ) + + if self.platform: + filename += '-' + self.platform + return filename + + def __repr__(self): + if self.location: + return "%s (%s)" % (self, self.location) + else: + return str(self) + + def __str__(self): + try: + version = getattr(self, 'version', None) + except ValueError: + version = None + version = version or "[unknown version]" + return "%s %s" % (self.project_name, version) + + def __getattr__(self, attr): + """Delegate all unrecognized public attributes to .metadata provider""" + if attr.startswith('_'): + raise AttributeError(attr) + return getattr(self._provider, attr) + + @classmethod + def from_filename(cls, filename, metadata=None, **kw): + return cls.from_location( + _normalize_cached(filename), os.path.basename(filename), metadata, + **kw + ) + + def as_requirement(self): + """Return a ``Requirement`` that matches this distribution exactly""" + if isinstance(self.parsed_version, packaging.version.Version): + spec = "%s==%s" % (self.project_name, self.parsed_version) + else: + spec = "%s===%s" % (self.project_name, self.parsed_version) + + return Requirement.parse(spec) + + def load_entry_point(self, group, name): + """Return the `name` entry point of `group` or raise ImportError""" + ep = self.get_entry_info(group, name) + if ep is None: + raise ImportError("Entry point %r not found" % ((group, name),)) + return ep.load() + + def get_entry_map(self, group=None): + """Return the entry point map for `group`, or the full entry map""" + try: + ep_map = self._ep_map + except AttributeError: + ep_map = self._ep_map = EntryPoint.parse_map( + self._get_metadata('entry_points.txt'), self + ) + if group is not None: + return ep_map.get(group,{}) + return ep_map + + def get_entry_info(self, group, name): + """Return the EntryPoint object for `group`+`name`, or ``None``""" + return self.get_entry_map(group).get(name) + + def insert_on(self, path, loc=None, replace=False): + """Insert self.location in path before its nearest parent directory""" + + loc = loc or self.location + if not loc: + return + + nloc = _normalize_cached(loc) + bdir = os.path.dirname(nloc) + npath= [(p and _normalize_cached(p) or p) for p in path] + + for p, item in enumerate(npath): + if item == nloc: + break + elif item == bdir and self.precedence == EGG_DIST: + # if it's an .egg, give it precedence over its directory + if path is sys.path: + self.check_version_conflict() + path.insert(p, loc) + npath.insert(p, nloc) + break + else: + if path is sys.path: + self.check_version_conflict() + if replace: + path.insert(0, loc) + else: + path.append(loc) + return + + # p is the spot where we found or inserted loc; now remove duplicates + while True: + try: + np = npath.index(nloc, p+1) + except ValueError: + break + else: + del npath[np], path[np] + # ha! + p = np + + return + + def check_version_conflict(self): + if self.key == 'setuptools': + # ignore the inevitable setuptools self-conflicts :( + return + + nsp = dict.fromkeys(self._get_metadata('namespace_packages.txt')) + loc = normalize_path(self.location) + for modname in self._get_metadata('top_level.txt'): + if (modname not in sys.modules or modname in nsp + or modname in _namespace_packages): + continue + if modname in ('pkg_resources', 'setuptools', 'site'): + continue + fn = getattr(sys.modules[modname], '__file__', None) + if fn and (normalize_path(fn).startswith(loc) or + fn.startswith(self.location)): + continue + issue_warning( + "Module %s was already imported from %s, but %s is being added" + " to sys.path" % (modname, fn, self.location), + ) + + def has_version(self): + try: + self.version + except ValueError: + issue_warning("Unbuilt egg for " + repr(self)) + return False + return True + + def clone(self,**kw): + """Copy this distribution, substituting in any changed keyword args""" + names = 'project_name version py_version platform location precedence' + for attr in names.split(): + kw.setdefault(attr, getattr(self, attr, None)) + kw.setdefault('metadata', self._provider) + return self.__class__(**kw) + + @property + def extras(self): + return [dep for dep in self._dep_map if dep] + + +class EggInfoDistribution(Distribution): + + def _reload_version(self): + """ + Packages installed by distutils (e.g. numpy or scipy), + which uses an old safe_version, and so + their version numbers can get mangled when + converted to filenames (e.g., 1.11.0.dev0+2329eae to + 1.11.0.dev0_2329eae). These distributions will not be + parsed properly + downstream by Distribution and safe_version, so + take an extra step and try to get the version number from + the metadata file itself instead of the filename. + """ + md_version = _version_from_file(self._get_metadata(self.PKG_INFO)) + if md_version: + self._version = md_version + return self + + +class DistInfoDistribution(Distribution): + """Wrap an actual or potential sys.path entry w/metadata, .dist-info style""" + PKG_INFO = 'METADATA' + EQEQ = re.compile(r"([\(,])\s*(\d.*?)\s*([,\)])") + + @property + def _parsed_pkg_info(self): + """Parse and cache metadata""" + try: + return self._pkg_info + except AttributeError: + metadata = self.get_metadata(self.PKG_INFO) + self._pkg_info = email.parser.Parser().parsestr(metadata) + return self._pkg_info + + @property + def _dep_map(self): + try: + return self.__dep_map + except AttributeError: + self.__dep_map = self._compute_dependencies() + return self.__dep_map + + def _compute_dependencies(self): + """Recompute this distribution's dependencies.""" + dm = self.__dep_map = {None: []} + + reqs = [] + # Including any condition expressions + for req in self._parsed_pkg_info.get_all('Requires-Dist') or []: + reqs.extend(parse_requirements(req)) + + def reqs_for_extra(extra): + for req in reqs: + if not req.marker or req.marker.evaluate({'extra': extra}): + yield req + + common = frozenset(reqs_for_extra(None)) + dm[None].extend(common) + + for extra in self._parsed_pkg_info.get_all('Provides-Extra') or []: + extra = safe_extra(extra.strip()) + dm[extra] = list(frozenset(reqs_for_extra(extra)) - common) + + return dm + + +_distributionImpl = { + '.egg': Distribution, + '.egg-info': EggInfoDistribution, + '.dist-info': DistInfoDistribution, + } + + +def issue_warning(*args,**kw): + level = 1 + g = globals() + try: + # find the first stack frame that is *not* code in + # the pkg_resources module, to use for the warning + while sys._getframe(level).f_globals is g: + level += 1 + except ValueError: + pass + warnings.warn(stacklevel=level + 1, *args, **kw) + + +class RequirementParseError(ValueError): + def __str__(self): + return ' '.join(self.args) + + +def parse_requirements(strs): + """Yield ``Requirement`` objects for each specification in `strs` + + `strs` must be a string, or a (possibly-nested) iterable thereof. + """ + # create a steppable iterator, so we can handle \-continuations + lines = iter(yield_lines(strs)) + + for line in lines: + # Drop comments -- a hash without a space may be in a URL. + if ' #' in line: + line = line[:line.find(' #')] + # If there is a line continuation, drop it, and append the next line. + if line.endswith('\\'): + line = line[:-2].strip() + line += next(lines) + yield Requirement(line) + + +class Requirement(packaging.requirements.Requirement): + def __init__(self, requirement_string): + """DO NOT CALL THIS UNDOCUMENTED METHOD; use Requirement.parse()!""" + try: + super(Requirement, self).__init__(requirement_string) + except packaging.requirements.InvalidRequirement as e: + raise RequirementParseError(str(e)) + self.unsafe_name = self.name + project_name = safe_name(self.name) + self.project_name, self.key = project_name, project_name.lower() + self.specs = [ + (spec.operator, spec.version) for spec in self.specifier] + self.extras = tuple(map(safe_extra, self.extras)) + self.hashCmp = ( + self.key, + self.specifier, + frozenset(self.extras), + str(self.marker) if self.marker else None, + ) + self.__hash = hash(self.hashCmp) + + def __eq__(self, other): + return ( + isinstance(other, Requirement) and + self.hashCmp == other.hashCmp + ) + + def __ne__(self, other): + return not self == other + + def __contains__(self, item): + if isinstance(item, Distribution): + if item.key != self.key: + return False + + item = item.version + + # Allow prereleases always in order to match the previous behavior of + # this method. In the future this should be smarter and follow PEP 440 + # more accurately. + return self.specifier.contains(item, prereleases=True) + + def __hash__(self): + return self.__hash + + def __repr__(self): return "Requirement.parse(%r)" % str(self) + + @staticmethod + def parse(s): + req, = parse_requirements(s) + return req + + +def _get_mro(cls): + """Get an mro for a type or classic class""" + if not isinstance(cls, type): + class cls(cls, object): pass + return cls.__mro__[1:] + return cls.__mro__ + +def _find_adapter(registry, ob): + """Return an adapter factory for `ob` from `registry`""" + for t in _get_mro(getattr(ob, '__class__', type(ob))): + if t in registry: + return registry[t] + + +def ensure_directory(path): + """Ensure that the parent directory of `path` exists""" + dirname = os.path.dirname(path) + if not os.path.isdir(dirname): + os.makedirs(dirname) + + +def _bypass_ensure_directory(path): + """Sandbox-bypassing version of ensure_directory()""" + if not WRITE_SUPPORT: + raise IOError('"os.mkdir" not supported on this platform.') + dirname, filename = split(path) + if dirname and filename and not isdir(dirname): + _bypass_ensure_directory(dirname) + mkdir(dirname, 0o755) + + +def split_sections(s): + """Split a string or iterable thereof into (section, content) pairs + + Each ``section`` is a stripped version of the section header ("[section]") + and each ``content`` is a list of stripped lines excluding blank lines and + comment-only lines. If there are any such lines before the first section + header, they're returned in a first ``section`` of ``None``. + """ + section = None + content = [] + for line in yield_lines(s): + if line.startswith("["): + if line.endswith("]"): + if section or content: + yield section, content + section = line[1:-1].strip() + content = [] + else: + raise ValueError("Invalid section heading", line) + else: + content.append(line) + + # wrap up last segment + yield section, content + +def _mkstemp(*args,**kw): + old_open = os.open + try: + # temporarily bypass sandboxing + os.open = os_open + return tempfile.mkstemp(*args,**kw) + finally: + # and then put it back + os.open = old_open + + +# Silence the PEP440Warning by default, so that end users don't get hit by it +# randomly just because they use pkg_resources. We want to append the rule +# because we want earlier uses of filterwarnings to take precedence over this +# one. +warnings.filterwarnings("ignore", category=PEP440Warning, append=True) + + +# from jaraco.functools 1.3 +def _call_aside(f, *args, **kwargs): + f(*args, **kwargs) + return f + + +@_call_aside +def _initialize(g=globals()): + "Set up global resource manager (deliberately not state-saved)" + manager = ResourceManager() + g['_manager'] = manager + for name in dir(manager): + if not name.startswith('_'): + g[name] = getattr(manager, name) + + +@_call_aside +def _initialize_master_working_set(): + """ + Prepare the master working set and make the ``require()`` + API available. + + This function has explicit effects on the global state + of pkg_resources. It is intended to be invoked once at + the initialization of this module. + + Invocation by other packages is unsupported and done + at their own risk. + """ + working_set = WorkingSet._build_master() + _declare_state('object', working_set=working_set) + + require = working_set.require + iter_entry_points = working_set.iter_entry_points + add_activation_listener = working_set.subscribe + run_script = working_set.run_script + # backward compatibility + run_main = run_script + # Activate all distributions already on sys.path, and ensure that + # all distributions added to the working set in the future (e.g. by + # calling ``require()``) will get activated as well. + add_activation_listener(lambda dist: dist.activate()) + working_set.entries=[] + # match order + list(map(working_set.add_entry, sys.path)) + globals().update(locals()) diff --git a/venv/lib/python3.5/site-packages/pkg_resources/_vendor/__init__.py b/venv/lib/python3.5/site-packages/pkg_resources/_vendor/__init__.py new file mode 100644 index 0000000..e69de29 diff --git a/venv/lib/python3.5/site-packages/pkg_resources/_vendor/packaging/__about__.py b/venv/lib/python3.5/site-packages/pkg_resources/_vendor/packaging/__about__.py new file mode 100644 index 0000000..baa9075 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pkg_resources/_vendor/packaging/__about__.py @@ -0,0 +1,21 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +__all__ = [ + "__title__", "__summary__", "__uri__", "__version__", "__author__", + "__email__", "__license__", "__copyright__", +] + +__title__ = "packaging" +__summary__ = "Core utilities for Python packages" +__uri__ = "https://github.com/pypa/packaging" + +__version__ = "16.6" + +__author__ = "Donald Stufft and individual contributors" +__email__ = "donald@stufft.io" + +__license__ = "BSD or Apache License, Version 2.0" +__copyright__ = "Copyright 2014-2016 %s" % __author__ diff --git a/venv/lib/python3.5/site-packages/pkg_resources/_vendor/packaging/__init__.py b/venv/lib/python3.5/site-packages/pkg_resources/_vendor/packaging/__init__.py new file mode 100644 index 0000000..5ee6220 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pkg_resources/_vendor/packaging/__init__.py @@ -0,0 +1,14 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +from .__about__ import ( + __author__, __copyright__, __email__, __license__, __summary__, __title__, + __uri__, __version__ +) + +__all__ = [ + "__title__", "__summary__", "__uri__", "__version__", "__author__", + "__email__", "__license__", "__copyright__", +] diff --git a/venv/lib/python3.5/site-packages/pkg_resources/_vendor/packaging/_compat.py b/venv/lib/python3.5/site-packages/pkg_resources/_vendor/packaging/_compat.py new file mode 100644 index 0000000..210bb80 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pkg_resources/_vendor/packaging/_compat.py @@ -0,0 +1,30 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import sys + + +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 + +# flake8: noqa + +if PY3: + string_types = str, +else: + string_types = basestring, + + +def with_metaclass(meta, *bases): + """ + Create a base class with a metaclass. + """ + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(meta): + def __new__(cls, name, this_bases, d): + return meta(name, bases, d) + return type.__new__(metaclass, 'temporary_class', (), {}) diff --git a/venv/lib/python3.5/site-packages/pkg_resources/_vendor/packaging/_structures.py b/venv/lib/python3.5/site-packages/pkg_resources/_vendor/packaging/_structures.py new file mode 100644 index 0000000..ccc2786 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pkg_resources/_vendor/packaging/_structures.py @@ -0,0 +1,68 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + + +class Infinity(object): + + def __repr__(self): + return "Infinity" + + def __hash__(self): + return hash(repr(self)) + + def __lt__(self, other): + return False + + def __le__(self, other): + return False + + def __eq__(self, other): + return isinstance(other, self.__class__) + + def __ne__(self, other): + return not isinstance(other, self.__class__) + + def __gt__(self, other): + return True + + def __ge__(self, other): + return True + + def __neg__(self): + return NegativeInfinity + +Infinity = Infinity() + + +class NegativeInfinity(object): + + def __repr__(self): + return "-Infinity" + + def __hash__(self): + return hash(repr(self)) + + def __lt__(self, other): + return True + + def __le__(self, other): + return True + + def __eq__(self, other): + return isinstance(other, self.__class__) + + def __ne__(self, other): + return not isinstance(other, self.__class__) + + def __gt__(self, other): + return False + + def __ge__(self, other): + return False + + def __neg__(self): + return Infinity + +NegativeInfinity = NegativeInfinity() diff --git a/venv/lib/python3.5/site-packages/pkg_resources/_vendor/packaging/markers.py b/venv/lib/python3.5/site-packages/pkg_resources/_vendor/packaging/markers.py new file mode 100644 index 0000000..bac852d --- /dev/null +++ b/venv/lib/python3.5/site-packages/pkg_resources/_vendor/packaging/markers.py @@ -0,0 +1,278 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import operator +import os +import platform +import sys + +from pkg_resources.extern.pyparsing import ParseException, ParseResults, stringStart, stringEnd +from pkg_resources.extern.pyparsing import ZeroOrMore, Group, Forward, QuotedString +from pkg_resources.extern.pyparsing import Literal as L # noqa + +from ._compat import string_types +from .specifiers import Specifier, InvalidSpecifier + + +__all__ = [ + "InvalidMarker", "UndefinedComparison", "UndefinedEnvironmentName", + "Marker", "default_environment", +] + + +class InvalidMarker(ValueError): + """ + An invalid marker was found, users should refer to PEP 508. + """ + + +class UndefinedComparison(ValueError): + """ + An invalid operation was attempted on a value that doesn't support it. + """ + + +class UndefinedEnvironmentName(ValueError): + """ + A name was attempted to be used that does not exist inside of the + environment. + """ + + +class Node(object): + + def __init__(self, value): + self.value = value + + def __str__(self): + return str(self.value) + + def __repr__(self): + return "<{0}({1!r})>".format(self.__class__.__name__, str(self)) + + +class Variable(Node): + pass + + +class Value(Node): + pass + + +VARIABLE = ( + L("implementation_version") | + L("platform_python_implementation") | + L("implementation_name") | + L("python_full_version") | + L("platform_release") | + L("platform_version") | + L("platform_machine") | + L("platform_system") | + L("python_version") | + L("sys_platform") | + L("os_name") | + L("os.name") | # PEP-345 + L("sys.platform") | # PEP-345 + L("platform.version") | # PEP-345 + L("platform.machine") | # PEP-345 + L("platform.python_implementation") | # PEP-345 + L("extra") +) +VARIABLE.setParseAction(lambda s, l, t: Variable(t[0].replace('.', '_'))) + +VERSION_CMP = ( + L("===") | + L("==") | + L(">=") | + L("<=") | + L("!=") | + L("~=") | + L(">") | + L("<") +) + +MARKER_OP = VERSION_CMP | L("not in") | L("in") + +MARKER_VALUE = QuotedString("'") | QuotedString('"') +MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0])) + +BOOLOP = L("and") | L("or") + +MARKER_VAR = VARIABLE | MARKER_VALUE + +MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR) +MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0])) + +LPAREN = L("(").suppress() +RPAREN = L(")").suppress() + +MARKER_EXPR = Forward() +MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN) +MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR) + +MARKER = stringStart + MARKER_EXPR + stringEnd + + +def _coerce_parse_result(results): + if isinstance(results, ParseResults): + return [_coerce_parse_result(i) for i in results] + else: + return results + + +def _format_marker(marker, first=True): + assert isinstance(marker, (list, tuple, string_types)) + + # Sometimes we have a structure like [[...]] which is a single item list + # where the single item is itself it's own list. In that case we want skip + # the rest of this function so that we don't get extraneous () on the + # outside. + if (isinstance(marker, list) and len(marker) == 1 and + isinstance(marker[0], (list, tuple))): + return _format_marker(marker[0]) + + if isinstance(marker, list): + inner = (_format_marker(m, first=False) for m in marker) + if first: + return " ".join(inner) + else: + return "(" + " ".join(inner) + ")" + elif isinstance(marker, tuple): + return '{0} {1} "{2}"'.format(*marker) + else: + return marker + + +_operators = { + "in": lambda lhs, rhs: lhs in rhs, + "not in": lambda lhs, rhs: lhs not in rhs, + "<": operator.lt, + "<=": operator.le, + "==": operator.eq, + "!=": operator.ne, + ">=": operator.ge, + ">": operator.gt, +} + + +def _eval_op(lhs, op, rhs): + try: + spec = Specifier("".join([op, rhs])) + except InvalidSpecifier: + pass + else: + return spec.contains(lhs) + + oper = _operators.get(op) + if oper is None: + raise UndefinedComparison( + "Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs) + ) + + return oper(lhs, rhs) + + +_undefined = object() + + +def _get_env(environment, name): + value = environment.get(name, _undefined) + + if value is _undefined: + raise UndefinedEnvironmentName( + "{0!r} does not exist in evaluation environment.".format(name) + ) + + return value + + +def _evaluate_markers(markers, environment): + groups = [[]] + + for marker in markers: + assert isinstance(marker, (list, tuple, string_types)) + + if isinstance(marker, list): + groups[-1].append(_evaluate_markers(marker, environment)) + elif isinstance(marker, tuple): + lhs, op, rhs = marker + + if isinstance(lhs, Variable): + lhs_value = _get_env(environment, lhs.value) + rhs_value = rhs.value + else: + lhs_value = lhs.value + rhs_value = _get_env(environment, rhs.value) + + groups[-1].append(_eval_op(lhs_value, op, rhs_value)) + else: + assert marker in ["and", "or"] + if marker == "or": + groups.append([]) + + return any(all(item) for item in groups) + + +def format_full_version(info): + version = '{0.major}.{0.minor}.{0.micro}'.format(info) + kind = info.releaselevel + if kind != 'final': + version += kind[0] + str(info.serial) + return version + + +def default_environment(): + if hasattr(sys, 'implementation'): + iver = format_full_version(sys.implementation.version) + implementation_name = sys.implementation.name + else: + iver = '0' + implementation_name = '' + + return { + "implementation_name": implementation_name, + "implementation_version": iver, + "os_name": os.name, + "platform_machine": platform.machine(), + "platform_release": platform.release(), + "platform_system": platform.system(), + "platform_version": platform.version(), + "python_full_version": platform.python_version(), + "platform_python_implementation": platform.python_implementation(), + "python_version": platform.python_version()[:3], + "sys_platform": sys.platform, + } + + +class Marker(object): + + def __init__(self, marker): + try: + self._markers = _coerce_parse_result(MARKER.parseString(marker)) + except ParseException as e: + err_str = "Invalid marker: {0!r}, parse error at {1!r}".format( + marker, marker[e.loc:e.loc + 8]) + raise InvalidMarker(err_str) + + def __str__(self): + return _format_marker(self._markers) + + def __repr__(self): + return "<Marker({0!r})>".format(str(self)) + + def evaluate(self, environment=None): + """Evaluate a marker. + + Return the boolean from evaluating the given marker against the + environment. environment is an optional argument to override all or + part of the determined environment. + + The environment is determined from the current Python process. + """ + current_environment = default_environment() + if environment is not None: + current_environment.update(environment) + + return _evaluate_markers(self._markers, current_environment) diff --git a/venv/lib/python3.5/site-packages/pkg_resources/_vendor/packaging/requirements.py b/venv/lib/python3.5/site-packages/pkg_resources/_vendor/packaging/requirements.py new file mode 100644 index 0000000..0c8c4a3 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pkg_resources/_vendor/packaging/requirements.py @@ -0,0 +1,127 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import string +import re + +from pkg_resources.extern.pyparsing import stringStart, stringEnd, originalTextFor, ParseException +from pkg_resources.extern.pyparsing import ZeroOrMore, Word, Optional, Regex, Combine +from pkg_resources.extern.pyparsing import Literal as L # noqa +from pkg_resources.extern.six.moves.urllib import parse as urlparse + +from .markers import MARKER_EXPR, Marker +from .specifiers import LegacySpecifier, Specifier, SpecifierSet + + +class InvalidRequirement(ValueError): + """ + An invalid requirement was found, users should refer to PEP 508. + """ + + +ALPHANUM = Word(string.ascii_letters + string.digits) + +LBRACKET = L("[").suppress() +RBRACKET = L("]").suppress() +LPAREN = L("(").suppress() +RPAREN = L(")").suppress() +COMMA = L(",").suppress() +SEMICOLON = L(";").suppress() +AT = L("@").suppress() + +PUNCTUATION = Word("-_.") +IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM) +IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END)) + +NAME = IDENTIFIER("name") +EXTRA = IDENTIFIER + +URI = Regex(r'[^ ]+')("url") +URL = (AT + URI) + +EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA) +EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras") + +VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE) +VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE) + +VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY +VERSION_MANY = Combine(VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), + joinString=",", adjacent=False)("_raw_spec") +_VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY)) +_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or '') + +VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier") +VERSION_SPEC.setParseAction(lambda s, l, t: t[1]) + +MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker") +MARKER_EXPR.setParseAction( + lambda s, l, t: Marker(s[t._original_start:t._original_end]) +) +MARKER_SEPERATOR = SEMICOLON +MARKER = MARKER_SEPERATOR + MARKER_EXPR + +VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER) +URL_AND_MARKER = URL + Optional(MARKER) + +NAMED_REQUIREMENT = \ + NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER) + +REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd + + +class Requirement(object): + """Parse a requirement. + + Parse a given requirement string into its parts, such as name, specifier, + URL, and extras. Raises InvalidRequirement on a badly-formed requirement + string. + """ + + # TODO: Can we test whether something is contained within a requirement? + # If so how do we do that? Do we need to test against the _name_ of + # the thing as well as the version? What about the markers? + # TODO: Can we normalize the name and extra name? + + def __init__(self, requirement_string): + try: + req = REQUIREMENT.parseString(requirement_string) + except ParseException as e: + raise InvalidRequirement( + "Invalid requirement, parse error at \"{0!r}\"".format( + requirement_string[e.loc:e.loc + 8])) + + self.name = req.name + if req.url: + parsed_url = urlparse.urlparse(req.url) + if not (parsed_url.scheme and parsed_url.netloc) or ( + not parsed_url.scheme and not parsed_url.netloc): + raise InvalidRequirement("Invalid URL given") + self.url = req.url + else: + self.url = None + self.extras = set(req.extras.asList() if req.extras else []) + self.specifier = SpecifierSet(req.specifier) + self.marker = req.marker if req.marker else None + + def __str__(self): + parts = [self.name] + + if self.extras: + parts.append("[{0}]".format(",".join(sorted(self.extras)))) + + if self.specifier: + parts.append(str(self.specifier)) + + if self.url: + parts.append("@ {0}".format(self.url)) + + if self.marker: + parts.append("; {0}".format(self.marker)) + + return "".join(parts) + + def __repr__(self): + return "<Requirement({0!r})>".format(str(self)) diff --git a/venv/lib/python3.5/site-packages/pkg_resources/_vendor/packaging/specifiers.py b/venv/lib/python3.5/site-packages/pkg_resources/_vendor/packaging/specifiers.py new file mode 100644 index 0000000..7f5a76c --- /dev/null +++ b/venv/lib/python3.5/site-packages/pkg_resources/_vendor/packaging/specifiers.py @@ -0,0 +1,774 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import abc +import functools +import itertools +import re + +from ._compat import string_types, with_metaclass +from .version import Version, LegacyVersion, parse + + +class InvalidSpecifier(ValueError): + """ + An invalid specifier was found, users should refer to PEP 440. + """ + + +class BaseSpecifier(with_metaclass(abc.ABCMeta, object)): + + @abc.abstractmethod + def __str__(self): + """ + Returns the str representation of this Specifier like object. This + should be representative of the Specifier itself. + """ + + @abc.abstractmethod + def __hash__(self): + """ + Returns a hash value for this Specifier like object. + """ + + @abc.abstractmethod + def __eq__(self, other): + """ + Returns a boolean representing whether or not the two Specifier like + objects are equal. + """ + + @abc.abstractmethod + def __ne__(self, other): + """ + Returns a boolean representing whether or not the two Specifier like + objects are not equal. + """ + + @abc.abstractproperty + def prereleases(self): + """ + Returns whether or not pre-releases as a whole are allowed by this + specifier. + """ + + @prereleases.setter + def prereleases(self, value): + """ + Sets whether or not pre-releases as a whole are allowed by this + specifier. + """ + + @abc.abstractmethod + def contains(self, item, prereleases=None): + """ + Determines if the given item is contained within this specifier. + """ + + @abc.abstractmethod + def filter(self, iterable, prereleases=None): + """ + Takes an iterable of items and filters them so that only items which + are contained within this specifier are allowed in it. + """ + + +class _IndividualSpecifier(BaseSpecifier): + + _operators = {} + + def __init__(self, spec="", prereleases=None): + match = self._regex.search(spec) + if not match: + raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec)) + + self._spec = ( + match.group("operator").strip(), + match.group("version").strip(), + ) + + # Store whether or not this Specifier should accept prereleases + self._prereleases = prereleases + + def __repr__(self): + pre = ( + ", prereleases={0!r}".format(self.prereleases) + if self._prereleases is not None + else "" + ) + + return "<{0}({1!r}{2})>".format( + self.__class__.__name__, + str(self), + pre, + ) + + def __str__(self): + return "{0}{1}".format(*self._spec) + + def __hash__(self): + return hash(self._spec) + + def __eq__(self, other): + if isinstance(other, string_types): + try: + other = self.__class__(other) + except InvalidSpecifier: + return NotImplemented + elif not isinstance(other, self.__class__): + return NotImplemented + + return self._spec == other._spec + + def __ne__(self, other): + if isinstance(other, string_types): + try: + other = self.__class__(other) + except InvalidSpecifier: + return NotImplemented + elif not isinstance(other, self.__class__): + return NotImplemented + + return self._spec != other._spec + + def _get_operator(self, op): + return getattr(self, "_compare_{0}".format(self._operators[op])) + + def _coerce_version(self, version): + if not isinstance(version, (LegacyVersion, Version)): + version = parse(version) + return version + + @property + def operator(self): + return self._spec[0] + + @property + def version(self): + return self._spec[1] + + @property + def prereleases(self): + return self._prereleases + + @prereleases.setter + def prereleases(self, value): + self._prereleases = value + + def __contains__(self, item): + return self.contains(item) + + def contains(self, item, prereleases=None): + # Determine if prereleases are to be allowed or not. + if prereleases is None: + prereleases = self.prereleases + + # Normalize item to a Version or LegacyVersion, this allows us to have + # a shortcut for ``"2.0" in Specifier(">=2") + item = self._coerce_version(item) + + # Determine if we should be supporting prereleases in this specifier + # or not, if we do not support prereleases than we can short circuit + # logic if this version is a prereleases. + if item.is_prerelease and not prereleases: + return False + + # Actually do the comparison to determine if this item is contained + # within this Specifier or not. + return self._get_operator(self.operator)(item, self.version) + + def filter(self, iterable, prereleases=None): + yielded = False + found_prereleases = [] + + kw = {"prereleases": prereleases if prereleases is not None else True} + + # Attempt to iterate over all the values in the iterable and if any of + # them match, yield them. + for version in iterable: + parsed_version = self._coerce_version(version) + + if self.contains(parsed_version, **kw): + # If our version is a prerelease, and we were not set to allow + # prereleases, then we'll store it for later incase nothing + # else matches this specifier. + if (parsed_version.is_prerelease and not + (prereleases or self.prereleases)): + found_prereleases.append(version) + # Either this is not a prerelease, or we should have been + # accepting prereleases from the begining. + else: + yielded = True + yield version + + # Now that we've iterated over everything, determine if we've yielded + # any values, and if we have not and we have any prereleases stored up + # then we will go ahead and yield the prereleases. + if not yielded and found_prereleases: + for version in found_prereleases: + yield version + + +class LegacySpecifier(_IndividualSpecifier): + + _regex_str = ( + r""" + (?P<operator>(==|!=|<=|>=|<|>)) + \s* + (?P<version> + [^,;\s)]* # Since this is a "legacy" specifier, and the version + # string can be just about anything, we match everything + # except for whitespace, a semi-colon for marker support, + # a closing paren since versions can be enclosed in + # them, and a comma since it's a version separator. + ) + """ + ) + + _regex = re.compile( + r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) + + _operators = { + "==": "equal", + "!=": "not_equal", + "<=": "less_than_equal", + ">=": "greater_than_equal", + "<": "less_than", + ">": "greater_than", + } + + def _coerce_version(self, version): + if not isinstance(version, LegacyVersion): + version = LegacyVersion(str(version)) + return version + + def _compare_equal(self, prospective, spec): + return prospective == self._coerce_version(spec) + + def _compare_not_equal(self, prospective, spec): + return prospective != self._coerce_version(spec) + + def _compare_less_than_equal(self, prospective, spec): + return prospective <= self._coerce_version(spec) + + def _compare_greater_than_equal(self, prospective, spec): + return prospective >= self._coerce_version(spec) + + def _compare_less_than(self, prospective, spec): + return prospective < self._coerce_version(spec) + + def _compare_greater_than(self, prospective, spec): + return prospective > self._coerce_version(spec) + + +def _require_version_compare(fn): + @functools.wraps(fn) + def wrapped(self, prospective, spec): + if not isinstance(prospective, Version): + return False + return fn(self, prospective, spec) + return wrapped + + +class Specifier(_IndividualSpecifier): + + _regex_str = ( + r""" + (?P<operator>(~=|==|!=|<=|>=|<|>|===)) + (?P<version> + (?: + # The identity operators allow for an escape hatch that will + # do an exact string match of the version you wish to install. + # This will not be parsed by PEP 440 and we cannot determine + # any semantic meaning from it. This operator is discouraged + # but included entirely as an escape hatch. + (?<====) # Only match for the identity operator + \s* + [^\s]* # We just match everything, except for whitespace + # since we are only testing for strict identity. + ) + | + (?: + # The (non)equality operators allow for wild card and local + # versions to be specified so we have to define these two + # operators separately to enable that. + (?<===|!=) # Only match for equals and not equals + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)* # release + (?: # pre release + [-_\.]? + (a|b|c|rc|alpha|beta|pre|preview) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + + # You cannot use a wild card and a dev or local version + # together so group them with a | and make them optional. + (?: + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + (?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local + | + \.\* # Wild card syntax of .* + )? + ) + | + (?: + # The compatible operator requires at least two digits in the + # release segment. + (?<=~=) # Only match for the compatible operator + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *) + (?: # pre release + [-_\.]? + (a|b|c|rc|alpha|beta|pre|preview) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + ) + | + (?: + # All other operators only allow a sub set of what the + # (non)equality operators do. Specifically they do not allow + # local versions to be specified nor do they allow the prefix + # matching wild cards. + (?<!==|!=|~=) # We have special cases for these + # operators so we want to make sure they + # don't match here. + + \s* + v? + (?:[0-9]+!)? # epoch + [0-9]+(?:\.[0-9]+)* # release + (?: # pre release + [-_\.]? + (a|b|c|rc|alpha|beta|pre|preview) + [-_\.]? + [0-9]* + )? + (?: # post release + (?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*) + )? + (?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release + ) + ) + """ + ) + + _regex = re.compile( + r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE) + + _operators = { + "~=": "compatible", + "==": "equal", + "!=": "not_equal", + "<=": "less_than_equal", + ">=": "greater_than_equal", + "<": "less_than", + ">": "greater_than", + "===": "arbitrary", + } + + @_require_version_compare + def _compare_compatible(self, prospective, spec): + # Compatible releases have an equivalent combination of >= and ==. That + # is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to + # implement this in terms of the other specifiers instead of + # implementing it ourselves. The only thing we need to do is construct + # the other specifiers. + + # We want everything but the last item in the version, but we want to + # ignore post and dev releases and we want to treat the pre-release as + # it's own separate segment. + prefix = ".".join( + list( + itertools.takewhile( + lambda x: (not x.startswith("post") and not + x.startswith("dev")), + _version_split(spec), + ) + )[:-1] + ) + + # Add the prefix notation to the end of our string + prefix += ".*" + + return (self._get_operator(">=")(prospective, spec) and + self._get_operator("==")(prospective, prefix)) + + @_require_version_compare + def _compare_equal(self, prospective, spec): + # We need special logic to handle prefix matching + if spec.endswith(".*"): + # In the case of prefix matching we want to ignore local segment. + prospective = Version(prospective.public) + # Split the spec out by dots, and pretend that there is an implicit + # dot in between a release segment and a pre-release segment. + spec = _version_split(spec[:-2]) # Remove the trailing .* + + # Split the prospective version out by dots, and pretend that there + # is an implicit dot in between a release segment and a pre-release + # segment. + prospective = _version_split(str(prospective)) + + # Shorten the prospective version to be the same length as the spec + # so that we can determine if the specifier is a prefix of the + # prospective version or not. + prospective = prospective[:len(spec)] + + # Pad out our two sides with zeros so that they both equal the same + # length. + spec, prospective = _pad_version(spec, prospective) + else: + # Convert our spec string into a Version + spec = Version(spec) + + # If the specifier does not have a local segment, then we want to + # act as if the prospective version also does not have a local + # segment. + if not spec.local: + prospective = Version(prospective.public) + + return prospective == spec + + @_require_version_compare + def _compare_not_equal(self, prospective, spec): + return not self._compare_equal(prospective, spec) + + @_require_version_compare + def _compare_less_than_equal(self, prospective, spec): + return prospective <= Version(spec) + + @_require_version_compare + def _compare_greater_than_equal(self, prospective, spec): + return prospective >= Version(spec) + + @_require_version_compare + def _compare_less_than(self, prospective, spec): + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = Version(spec) + + # Check to see if the prospective version is less than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective < spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a pre-release version, that we do not accept pre-release + # versions for the version mentioned in the specifier (e.g. <3.1 should + # not match 3.1.dev0, but should match 3.0.dev0). + if not spec.is_prerelease and prospective.is_prerelease: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # If we've gotten to here, it means that prospective version is both + # less than the spec version *and* it's not a pre-release of the same + # version in the spec. + return True + + @_require_version_compare + def _compare_greater_than(self, prospective, spec): + # Convert our spec to a Version instance, since we'll want to work with + # it as a version. + spec = Version(spec) + + # Check to see if the prospective version is greater than the spec + # version. If it's not we can short circuit and just return False now + # instead of doing extra unneeded work. + if not prospective > spec: + return False + + # This special case is here so that, unless the specifier itself + # includes is a post-release version, that we do not accept + # post-release versions for the version mentioned in the specifier + # (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0). + if not spec.is_postrelease and prospective.is_postrelease: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # Ensure that we do not allow a local version of the version mentioned + # in the specifier, which is techincally greater than, to match. + if prospective.local is not None: + if Version(prospective.base_version) == Version(spec.base_version): + return False + + # If we've gotten to here, it means that prospective version is both + # greater than the spec version *and* it's not a pre-release of the + # same version in the spec. + return True + + def _compare_arbitrary(self, prospective, spec): + return str(prospective).lower() == str(spec).lower() + + @property + def prereleases(self): + # If there is an explicit prereleases set for this, then we'll just + # blindly use that. + if self._prereleases is not None: + return self._prereleases + + # Look at all of our specifiers and determine if they are inclusive + # operators, and if they are if they are including an explicit + # prerelease. + operator, version = self._spec + if operator in ["==", ">=", "<=", "~=", "==="]: + # The == specifier can include a trailing .*, if it does we + # want to remove before parsing. + if operator == "==" and version.endswith(".*"): + version = version[:-2] + + # Parse the version, and if it is a pre-release than this + # specifier allows pre-releases. + if parse(version).is_prerelease: + return True + + return False + + @prereleases.setter + def prereleases(self, value): + self._prereleases = value + + +_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$") + + +def _version_split(version): + result = [] + for item in version.split("."): + match = _prefix_regex.search(item) + if match: + result.extend(match.groups()) + else: + result.append(item) + return result + + +def _pad_version(left, right): + left_split, right_split = [], [] + + # Get the release segment of our versions + left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left))) + right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right))) + + # Get the rest of our versions + left_split.append(left[len(left_split[0]):]) + right_split.append(right[len(right_split[0]):]) + + # Insert our padding + left_split.insert( + 1, + ["0"] * max(0, len(right_split[0]) - len(left_split[0])), + ) + right_split.insert( + 1, + ["0"] * max(0, len(left_split[0]) - len(right_split[0])), + ) + + return ( + list(itertools.chain(*left_split)), + list(itertools.chain(*right_split)), + ) + + +class SpecifierSet(BaseSpecifier): + + def __init__(self, specifiers="", prereleases=None): + # Split on , to break each indidivual specifier into it's own item, and + # strip each item to remove leading/trailing whitespace. + specifiers = [s.strip() for s in specifiers.split(",") if s.strip()] + + # Parsed each individual specifier, attempting first to make it a + # Specifier and falling back to a LegacySpecifier. + parsed = set() + for specifier in specifiers: + try: + parsed.add(Specifier(specifier)) + except InvalidSpecifier: + parsed.add(LegacySpecifier(specifier)) + + # Turn our parsed specifiers into a frozen set and save them for later. + self._specs = frozenset(parsed) + + # Store our prereleases value so we can use it later to determine if + # we accept prereleases or not. + self._prereleases = prereleases + + def __repr__(self): + pre = ( + ", prereleases={0!r}".format(self.prereleases) + if self._prereleases is not None + else "" + ) + + return "<SpecifierSet({0!r}{1})>".format(str(self), pre) + + def __str__(self): + return ",".join(sorted(str(s) for s in self._specs)) + + def __hash__(self): + return hash(self._specs) + + def __and__(self, other): + if isinstance(other, string_types): + other = SpecifierSet(other) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + specifier = SpecifierSet() + specifier._specs = frozenset(self._specs | other._specs) + + if self._prereleases is None and other._prereleases is not None: + specifier._prereleases = other._prereleases + elif self._prereleases is not None and other._prereleases is None: + specifier._prereleases = self._prereleases + elif self._prereleases == other._prereleases: + specifier._prereleases = self._prereleases + else: + raise ValueError( + "Cannot combine SpecifierSets with True and False prerelease " + "overrides." + ) + + return specifier + + def __eq__(self, other): + if isinstance(other, string_types): + other = SpecifierSet(other) + elif isinstance(other, _IndividualSpecifier): + other = SpecifierSet(str(other)) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + return self._specs == other._specs + + def __ne__(self, other): + if isinstance(other, string_types): + other = SpecifierSet(other) + elif isinstance(other, _IndividualSpecifier): + other = SpecifierSet(str(other)) + elif not isinstance(other, SpecifierSet): + return NotImplemented + + return self._specs != other._specs + + def __len__(self): + return len(self._specs) + + def __iter__(self): + return iter(self._specs) + + @property + def prereleases(self): + # If we have been given an explicit prerelease modifier, then we'll + # pass that through here. + if self._prereleases is not None: + return self._prereleases + + # If we don't have any specifiers, and we don't have a forced value, + # then we'll just return None since we don't know if this should have + # pre-releases or not. + if not self._specs: + return None + + # Otherwise we'll see if any of the given specifiers accept + # prereleases, if any of them do we'll return True, otherwise False. + return any(s.prereleases for s in self._specs) + + @prereleases.setter + def prereleases(self, value): + self._prereleases = value + + def __contains__(self, item): + return self.contains(item) + + def contains(self, item, prereleases=None): + # Ensure that our item is a Version or LegacyVersion instance. + if not isinstance(item, (LegacyVersion, Version)): + item = parse(item) + + # Determine if we're forcing a prerelease or not, if we're not forcing + # one for this particular filter call, then we'll use whatever the + # SpecifierSet thinks for whether or not we should support prereleases. + if prereleases is None: + prereleases = self.prereleases + + # We can determine if we're going to allow pre-releases by looking to + # see if any of the underlying items supports them. If none of them do + # and this item is a pre-release then we do not allow it and we can + # short circuit that here. + # Note: This means that 1.0.dev1 would not be contained in something + # like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0 + if not prereleases and item.is_prerelease: + return False + + # We simply dispatch to the underlying specs here to make sure that the + # given version is contained within all of them. + # Note: This use of all() here means that an empty set of specifiers + # will always return True, this is an explicit design decision. + return all( + s.contains(item, prereleases=prereleases) + for s in self._specs + ) + + def filter(self, iterable, prereleases=None): + # Determine if we're forcing a prerelease or not, if we're not forcing + # one for this particular filter call, then we'll use whatever the + # SpecifierSet thinks for whether or not we should support prereleases. + if prereleases is None: + prereleases = self.prereleases + + # If we have any specifiers, then we want to wrap our iterable in the + # filter method for each one, this will act as a logical AND amongst + # each specifier. + if self._specs: + for spec in self._specs: + iterable = spec.filter(iterable, prereleases=bool(prereleases)) + return iterable + # If we do not have any specifiers, then we need to have a rough filter + # which will filter out any pre-releases, unless there are no final + # releases, and which will filter out LegacyVersion in general. + else: + filtered = [] + found_prereleases = [] + + for item in iterable: + # Ensure that we some kind of Version class for this item. + if not isinstance(item, (LegacyVersion, Version)): + parsed_version = parse(item) + else: + parsed_version = item + + # Filter out any item which is parsed as a LegacyVersion + if isinstance(parsed_version, LegacyVersion): + continue + + # Store any item which is a pre-release for later unless we've + # already found a final version or we are accepting prereleases + if parsed_version.is_prerelease and not prereleases: + if not filtered: + found_prereleases.append(item) + else: + filtered.append(item) + + # If we've found no items except for pre-releases, then we'll go + # ahead and use the pre-releases + if not filtered and found_prereleases and prereleases is None: + return found_prereleases + + return filtered diff --git a/venv/lib/python3.5/site-packages/pkg_resources/_vendor/packaging/utils.py b/venv/lib/python3.5/site-packages/pkg_resources/_vendor/packaging/utils.py new file mode 100644 index 0000000..942387c --- /dev/null +++ b/venv/lib/python3.5/site-packages/pkg_resources/_vendor/packaging/utils.py @@ -0,0 +1,14 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import re + + +_canonicalize_regex = re.compile(r"[-_.]+") + + +def canonicalize_name(name): + # This is taken from PEP 503. + return _canonicalize_regex.sub("-", name).lower() diff --git a/venv/lib/python3.5/site-packages/pkg_resources/_vendor/packaging/version.py b/venv/lib/python3.5/site-packages/pkg_resources/_vendor/packaging/version.py new file mode 100644 index 0000000..83b5ee8 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pkg_resources/_vendor/packaging/version.py @@ -0,0 +1,393 @@ +# This file is dual licensed under the terms of the Apache License, Version +# 2.0, and the BSD License. See the LICENSE file in the root of this repository +# for complete details. +from __future__ import absolute_import, division, print_function + +import collections +import itertools +import re + +from ._structures import Infinity + + +__all__ = [ + "parse", "Version", "LegacyVersion", "InvalidVersion", "VERSION_PATTERN" +] + + +_Version = collections.namedtuple( + "_Version", + ["epoch", "release", "dev", "pre", "post", "local"], +) + + +def parse(version): + """ + Parse the given version string and return either a :class:`Version` object + or a :class:`LegacyVersion` object depending on if the given version is + a valid PEP 440 version or a legacy version. + """ + try: + return Version(version) + except InvalidVersion: + return LegacyVersion(version) + + +class InvalidVersion(ValueError): + """ + An invalid version was found, users should refer to PEP 440. + """ + + +class _BaseVersion(object): + + def __hash__(self): + return hash(self._key) + + def __lt__(self, other): + return self._compare(other, lambda s, o: s < o) + + def __le__(self, other): + return self._compare(other, lambda s, o: s <= o) + + def __eq__(self, other): + return self._compare(other, lambda s, o: s == o) + + def __ge__(self, other): + return self._compare(other, lambda s, o: s >= o) + + def __gt__(self, other): + return self._compare(other, lambda s, o: s > o) + + def __ne__(self, other): + return self._compare(other, lambda s, o: s != o) + + def _compare(self, other, method): + if not isinstance(other, _BaseVersion): + return NotImplemented + + return method(self._key, other._key) + + +class LegacyVersion(_BaseVersion): + + def __init__(self, version): + self._version = str(version) + self._key = _legacy_cmpkey(self._version) + + def __str__(self): + return self._version + + def __repr__(self): + return "<LegacyVersion({0})>".format(repr(str(self))) + + @property + def public(self): + return self._version + + @property + def base_version(self): + return self._version + + @property + def local(self): + return None + + @property + def is_prerelease(self): + return False + + @property + def is_postrelease(self): + return False + + +_legacy_version_component_re = re.compile( + r"(\d+ | [a-z]+ | \.| -)", re.VERBOSE, +) + +_legacy_version_replacement_map = { + "pre": "c", "preview": "c", "-": "final-", "rc": "c", "dev": "@", +} + + +def _parse_version_parts(s): + for part in _legacy_version_component_re.split(s): + part = _legacy_version_replacement_map.get(part, part) + + if not part or part == ".": + continue + + if part[:1] in "0123456789": + # pad for numeric comparison + yield part.zfill(8) + else: + yield "*" + part + + # ensure that alpha/beta/candidate are before final + yield "*final" + + +def _legacy_cmpkey(version): + # We hardcode an epoch of -1 here. A PEP 440 version can only have a epoch + # greater than or equal to 0. This will effectively put the LegacyVersion, + # which uses the defacto standard originally implemented by setuptools, + # as before all PEP 440 versions. + epoch = -1 + + # This scheme is taken from pkg_resources.parse_version setuptools prior to + # it's adoption of the packaging library. + parts = [] + for part in _parse_version_parts(version.lower()): + if part.startswith("*"): + # remove "-" before a prerelease tag + if part < "*final": + while parts and parts[-1] == "*final-": + parts.pop() + + # remove trailing zeros from each series of numeric parts + while parts and parts[-1] == "00000000": + parts.pop() + + parts.append(part) + parts = tuple(parts) + + return epoch, parts + +# Deliberately not anchored to the start and end of the string, to make it +# easier for 3rd party code to reuse +VERSION_PATTERN = r""" + v? + (?: + (?:(?P<epoch>[0-9]+)!)? # epoch + (?P<release>[0-9]+(?:\.[0-9]+)*) # release segment + (?P<pre> # pre-release + [-_\.]? + (?P<pre_l>(a|b|c|rc|alpha|beta|pre|preview)) + [-_\.]? + (?P<pre_n>[0-9]+)? + )? + (?P<post> # post release + (?:-(?P<post_n1>[0-9]+)) + | + (?: + [-_\.]? + (?P<post_l>post|rev|r) + [-_\.]? + (?P<post_n2>[0-9]+)? + ) + )? + (?P<dev> # dev release + [-_\.]? + (?P<dev_l>dev) + [-_\.]? + (?P<dev_n>[0-9]+)? + )? + ) + (?:\+(?P<local>[a-z0-9]+(?:[-_\.][a-z0-9]+)*))? # local version +""" + + +class Version(_BaseVersion): + + _regex = re.compile( + r"^\s*" + VERSION_PATTERN + r"\s*$", + re.VERBOSE | re.IGNORECASE, + ) + + def __init__(self, version): + # Validate the version and parse it into pieces + match = self._regex.search(version) + if not match: + raise InvalidVersion("Invalid version: '{0}'".format(version)) + + # Store the parsed out pieces of the version + self._version = _Version( + epoch=int(match.group("epoch")) if match.group("epoch") else 0, + release=tuple(int(i) for i in match.group("release").split(".")), + pre=_parse_letter_version( + match.group("pre_l"), + match.group("pre_n"), + ), + post=_parse_letter_version( + match.group("post_l"), + match.group("post_n1") or match.group("post_n2"), + ), + dev=_parse_letter_version( + match.group("dev_l"), + match.group("dev_n"), + ), + local=_parse_local_version(match.group("local")), + ) + + # Generate a key which will be used for sorting + self._key = _cmpkey( + self._version.epoch, + self._version.release, + self._version.pre, + self._version.post, + self._version.dev, + self._version.local, + ) + + def __repr__(self): + return "<Version({0})>".format(repr(str(self))) + + def __str__(self): + parts = [] + + # Epoch + if self._version.epoch != 0: + parts.append("{0}!".format(self._version.epoch)) + + # Release segment + parts.append(".".join(str(x) for x in self._version.release)) + + # Pre-release + if self._version.pre is not None: + parts.append("".join(str(x) for x in self._version.pre)) + + # Post-release + if self._version.post is not None: + parts.append(".post{0}".format(self._version.post[1])) + + # Development release + if self._version.dev is not None: + parts.append(".dev{0}".format(self._version.dev[1])) + + # Local version segment + if self._version.local is not None: + parts.append( + "+{0}".format(".".join(str(x) for x in self._version.local)) + ) + + return "".join(parts) + + @property + def public(self): + return str(self).split("+", 1)[0] + + @property + def base_version(self): + parts = [] + + # Epoch + if self._version.epoch != 0: + parts.append("{0}!".format(self._version.epoch)) + + # Release segment + parts.append(".".join(str(x) for x in self._version.release)) + + return "".join(parts) + + @property + def local(self): + version_string = str(self) + if "+" in version_string: + return version_string.split("+", 1)[1] + + @property + def is_prerelease(self): + return bool(self._version.dev or self._version.pre) + + @property + def is_postrelease(self): + return bool(self._version.post) + + +def _parse_letter_version(letter, number): + if letter: + # We consider there to be an implicit 0 in a pre-release if there is + # not a numeral associated with it. + if number is None: + number = 0 + + # We normalize any letters to their lower case form + letter = letter.lower() + + # We consider some words to be alternate spellings of other words and + # in those cases we want to normalize the spellings to our preferred + # spelling. + if letter == "alpha": + letter = "a" + elif letter == "beta": + letter = "b" + elif letter in ["c", "pre", "preview"]: + letter = "rc" + elif letter in ["rev", "r"]: + letter = "post" + + return letter, int(number) + if not letter and number: + # We assume if we are given a number, but we are not given a letter + # then this is using the implicit post release syntax (e.g. 1.0-1) + letter = "post" + + return letter, int(number) + + +_local_version_seperators = re.compile(r"[\._-]") + + +def _parse_local_version(local): + """ + Takes a string like abc.1.twelve and turns it into ("abc", 1, "twelve"). + """ + if local is not None: + return tuple( + part.lower() if not part.isdigit() else int(part) + for part in _local_version_seperators.split(local) + ) + + +def _cmpkey(epoch, release, pre, post, dev, local): + # When we compare a release version, we want to compare it with all of the + # trailing zeros removed. So we'll use a reverse the list, drop all the now + # leading zeros until we come to something non zero, then take the rest + # re-reverse it back into the correct order and make it a tuple and use + # that for our sorting key. + release = tuple( + reversed(list( + itertools.dropwhile( + lambda x: x == 0, + reversed(release), + ) + )) + ) + + # We need to "trick" the sorting algorithm to put 1.0.dev0 before 1.0a0. + # We'll do this by abusing the pre segment, but we _only_ want to do this + # if there is not a pre or a post segment. If we have one of those then + # the normal sorting rules will handle this case correctly. + if pre is None and post is None and dev is not None: + pre = -Infinity + # Versions without a pre-release (except as noted above) should sort after + # those with one. + elif pre is None: + pre = Infinity + + # Versions without a post segment should sort before those with one. + if post is None: + post = -Infinity + + # Versions without a development segment should sort after those with one. + if dev is None: + dev = Infinity + + if local is None: + # Versions without a local segment should sort before those with one. + local = -Infinity + else: + # Versions with a local segment need that segment parsed to implement + # the sorting rules in PEP440. + # - Alpha numeric segments sort before numeric segments + # - Alpha numeric segments sort lexicographically + # - Numeric segments sort numerically + # - Shorter versions sort before longer versions when the prefixes + # match exactly + local = tuple( + (i, "") if isinstance(i, int) else (-Infinity, i) + for i in local + ) + + return epoch, release, pre, post, dev, local diff --git a/venv/lib/python3.5/site-packages/pkg_resources/_vendor/pyparsing.py b/venv/lib/python3.5/site-packages/pkg_resources/_vendor/pyparsing.py new file mode 100644 index 0000000..3e02dbe --- /dev/null +++ b/venv/lib/python3.5/site-packages/pkg_resources/_vendor/pyparsing.py @@ -0,0 +1,3805 @@ +# module pyparsing.py +# +# Copyright (c) 2003-2015 Paul T. McGuire +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +# + +__doc__ = \ +""" +pyparsing module - Classes and methods to define and execute parsing grammars + +The pyparsing module is an alternative approach to creating and executing simple grammars, +vs. the traditional lex/yacc approach, or the use of regular expressions. With pyparsing, you +don't need to learn a new syntax for defining grammars or matching expressions - the parsing module +provides a library of classes that you use to construct the grammar directly in Python. + +Here is a program to parse "Hello, World!" (or any greeting of the form C{"<salutation>, <addressee>!"}):: + + from pyparsing import Word, alphas + + # define grammar of a greeting + greet = Word( alphas ) + "," + Word( alphas ) + "!" + + hello = "Hello, World!" + print (hello, "->", greet.parseString( hello )) + +The program outputs the following:: + + Hello, World! -> ['Hello', ',', 'World', '!'] + +The Python representation of the grammar is quite readable, owing to the self-explanatory +class names, and the use of '+', '|' and '^' operators. + +The parsed results returned from C{parseString()} can be accessed as a nested list, a dictionary, or an +object with named attributes. + +The pyparsing module handles some of the problems that are typically vexing when writing text parsers: + - extra or missing whitespace (the above program will also handle "Hello,World!", "Hello , World !", etc.) + - quoted strings + - embedded comments +""" + +__version__ = "2.0.6" +__versionTime__ = "9 Nov 2015 19:03" +__author__ = "Paul McGuire <ptmcg@users.sourceforge.net>" + +import string +from weakref import ref as wkref +import copy +import sys +import warnings +import re +import sre_constants +import collections +import pprint +import functools +import itertools + +#~ sys.stderr.write( "testing pyparsing module, version %s, %s\n" % (__version__,__versionTime__ ) ) + +__all__ = [ +'And', 'CaselessKeyword', 'CaselessLiteral', 'CharsNotIn', 'Combine', 'Dict', 'Each', 'Empty', +'FollowedBy', 'Forward', 'GoToColumn', 'Group', 'Keyword', 'LineEnd', 'LineStart', 'Literal', +'MatchFirst', 'NoMatch', 'NotAny', 'OneOrMore', 'OnlyOnce', 'Optional', 'Or', +'ParseBaseException', 'ParseElementEnhance', 'ParseException', 'ParseExpression', 'ParseFatalException', +'ParseResults', 'ParseSyntaxException', 'ParserElement', 'QuotedString', 'RecursiveGrammarException', +'Regex', 'SkipTo', 'StringEnd', 'StringStart', 'Suppress', 'Token', 'TokenConverter', 'Upcase', +'White', 'Word', 'WordEnd', 'WordStart', 'ZeroOrMore', +'alphanums', 'alphas', 'alphas8bit', 'anyCloseTag', 'anyOpenTag', 'cStyleComment', 'col', +'commaSeparatedList', 'commonHTMLEntity', 'countedArray', 'cppStyleComment', 'dblQuotedString', +'dblSlashComment', 'delimitedList', 'dictOf', 'downcaseTokens', 'empty', 'hexnums', +'htmlComment', 'javaStyleComment', 'keepOriginalText', 'line', 'lineEnd', 'lineStart', 'lineno', +'makeHTMLTags', 'makeXMLTags', 'matchOnlyAtCol', 'matchPreviousExpr', 'matchPreviousLiteral', +'nestedExpr', 'nullDebugAction', 'nums', 'oneOf', 'opAssoc', 'operatorPrecedence', 'printables', +'punc8bit', 'pythonStyleComment', 'quotedString', 'removeQuotes', 'replaceHTMLEntity', +'replaceWith', 'restOfLine', 'sglQuotedString', 'srange', 'stringEnd', +'stringStart', 'traceParseAction', 'unicodeString', 'upcaseTokens', 'withAttribute', +'indentedBlock', 'originalTextFor', 'ungroup', 'infixNotation','locatedExpr', 'withClass', +] + +PY_3 = sys.version.startswith('3') +if PY_3: + _MAX_INT = sys.maxsize + basestring = str + unichr = chr + _ustr = str + + # build list of single arg builtins, that can be used as parse actions + singleArgBuiltins = [sum, len, sorted, reversed, list, tuple, set, any, all, min, max] + +else: + _MAX_INT = sys.maxint + range = xrange + + def _ustr(obj): + """Drop-in replacement for str(obj) that tries to be Unicode friendly. It first tries + str(obj). If that fails with a UnicodeEncodeError, then it tries unicode(obj). It + then < returns the unicode object | encodes it with the default encoding | ... >. + """ + if isinstance(obj,unicode): + return obj + + try: + # If this works, then _ustr(obj) has the same behaviour as str(obj), so + # it won't break any existing code. + return str(obj) + + except UnicodeEncodeError: + # The Python docs (http://docs.python.org/ref/customization.html#l2h-182) + # state that "The return value must be a string object". However, does a + # unicode object (being a subclass of basestring) count as a "string + # object"? + # If so, then return a unicode object: + return unicode(obj) + # Else encode it... but how? There are many choices... :) + # Replace unprintables with escape codes? + #return unicode(obj).encode(sys.getdefaultencoding(), 'backslashreplace_errors') + # Replace unprintables with question marks? + #return unicode(obj).encode(sys.getdefaultencoding(), 'replace') + # ... + + # build list of single arg builtins, tolerant of Python version, that can be used as parse actions + singleArgBuiltins = [] + import __builtin__ + for fname in "sum len sorted reversed list tuple set any all min max".split(): + try: + singleArgBuiltins.append(getattr(__builtin__,fname)) + except AttributeError: + continue + +_generatorType = type((y for y in range(1))) + +def _xml_escape(data): + """Escape &, <, >, ", ', etc. in a string of data.""" + + # ampersand must be replaced first + from_symbols = '&><"\'' + to_symbols = ('&'+s+';' for s in "amp gt lt quot apos".split()) + for from_,to_ in zip(from_symbols, to_symbols): + data = data.replace(from_, to_) + return data + +class _Constants(object): + pass + +alphas = string.ascii_lowercase + string.ascii_uppercase +nums = "0123456789" +hexnums = nums + "ABCDEFabcdef" +alphanums = alphas + nums +_bslash = chr(92) +printables = "".join(c for c in string.printable if c not in string.whitespace) + +class ParseBaseException(Exception): + """base exception class for all parsing runtime exceptions""" + # Performance tuning: we construct a *lot* of these, so keep this + # constructor as small and fast as possible + def __init__( self, pstr, loc=0, msg=None, elem=None ): + self.loc = loc + if msg is None: + self.msg = pstr + self.pstr = "" + else: + self.msg = msg + self.pstr = pstr + self.parserElement = elem + + def __getattr__( self, aname ): + """supported attributes by name are: + - lineno - returns the line number of the exception text + - col - returns the column number of the exception text + - line - returns the line containing the exception text + """ + if( aname == "lineno" ): + return lineno( self.loc, self.pstr ) + elif( aname in ("col", "column") ): + return col( self.loc, self.pstr ) + elif( aname == "line" ): + return line( self.loc, self.pstr ) + else: + raise AttributeError(aname) + + def __str__( self ): + return "%s (at char %d), (line:%d, col:%d)" % \ + ( self.msg, self.loc, self.lineno, self.column ) + def __repr__( self ): + return _ustr(self) + def markInputline( self, markerString = ">!<" ): + """Extracts the exception line from the input string, and marks + the location of the exception with a special symbol. + """ + line_str = self.line + line_column = self.column - 1 + if markerString: + line_str = "".join((line_str[:line_column], + markerString, line_str[line_column:])) + return line_str.strip() + def __dir__(self): + return "loc msg pstr parserElement lineno col line " \ + "markInputline __str__ __repr__".split() + +class ParseException(ParseBaseException): + """exception thrown when parse expressions don't match class; + supported attributes by name are: + - lineno - returns the line number of the exception text + - col - returns the column number of the exception text + - line - returns the line containing the exception text + """ + pass + +class ParseFatalException(ParseBaseException): + """user-throwable exception thrown when inconsistent parse content + is found; stops all parsing immediately""" + pass + +class ParseSyntaxException(ParseFatalException): + """just like C{L{ParseFatalException}}, but thrown internally when an + C{L{ErrorStop<And._ErrorStop>}} ('-' operator) indicates that parsing is to stop immediately because + an unbacktrackable syntax error has been found""" + def __init__(self, pe): + super(ParseSyntaxException, self).__init__( + pe.pstr, pe.loc, pe.msg, pe.parserElement) + +#~ class ReparseException(ParseBaseException): + #~ """Experimental class - parse actions can raise this exception to cause + #~ pyparsing to reparse the input string: + #~ - with a modified input string, and/or + #~ - with a modified start location + #~ Set the values of the ReparseException in the constructor, and raise the + #~ exception in a parse action to cause pyparsing to use the new string/location. + #~ Setting the values as None causes no change to be made. + #~ """ + #~ def __init_( self, newstring, restartLoc ): + #~ self.newParseText = newstring + #~ self.reparseLoc = restartLoc + +class RecursiveGrammarException(Exception): + """exception thrown by C{validate()} if the grammar could be improperly recursive""" + def __init__( self, parseElementList ): + self.parseElementTrace = parseElementList + + def __str__( self ): + return "RecursiveGrammarException: %s" % self.parseElementTrace + +class _ParseResultsWithOffset(object): + def __init__(self,p1,p2): + self.tup = (p1,p2) + def __getitem__(self,i): + return self.tup[i] + def __repr__(self): + return repr(self.tup) + def setOffset(self,i): + self.tup = (self.tup[0],i) + +class ParseResults(object): + """Structured parse results, to provide multiple means of access to the parsed data: + - as a list (C{len(results)}) + - by list index (C{results[0], results[1]}, etc.) + - by attribute (C{results.<resultsName>}) + """ + def __new__(cls, toklist, name=None, asList=True, modal=True ): + if isinstance(toklist, cls): + return toklist + retobj = object.__new__(cls) + retobj.__doinit = True + return retobj + + # Performance tuning: we construct a *lot* of these, so keep this + # constructor as small and fast as possible + def __init__( self, toklist, name=None, asList=True, modal=True, isinstance=isinstance ): + if self.__doinit: + self.__doinit = False + self.__name = None + self.__parent = None + self.__accumNames = {} + if isinstance(toklist, list): + self.__toklist = toklist[:] + elif isinstance(toklist, _generatorType): + self.__toklist = list(toklist) + else: + self.__toklist = [toklist] + self.__tokdict = dict() + + if name is not None and name: + if not modal: + self.__accumNames[name] = 0 + if isinstance(name,int): + name = _ustr(name) # will always return a str, but use _ustr for consistency + self.__name = name + if not (isinstance(toklist, (type(None), basestring, list)) and toklist in (None,'',[])): + if isinstance(toklist,basestring): + toklist = [ toklist ] + if asList: + if isinstance(toklist,ParseResults): + self[name] = _ParseResultsWithOffset(toklist.copy(),0) + else: + self[name] = _ParseResultsWithOffset(ParseResults(toklist[0]),0) + self[name].__name = name + else: + try: + self[name] = toklist[0] + except (KeyError,TypeError,IndexError): + self[name] = toklist + + def __getitem__( self, i ): + if isinstance( i, (int,slice) ): + return self.__toklist[i] + else: + if i not in self.__accumNames: + return self.__tokdict[i][-1][0] + else: + return ParseResults([ v[0] for v in self.__tokdict[i] ]) + + def __setitem__( self, k, v, isinstance=isinstance ): + if isinstance(v,_ParseResultsWithOffset): + self.__tokdict[k] = self.__tokdict.get(k,list()) + [v] + sub = v[0] + elif isinstance(k,int): + self.__toklist[k] = v + sub = v + else: + self.__tokdict[k] = self.__tokdict.get(k,list()) + [_ParseResultsWithOffset(v,0)] + sub = v + if isinstance(sub,ParseResults): + sub.__parent = wkref(self) + + def __delitem__( self, i ): + if isinstance(i,(int,slice)): + mylen = len( self.__toklist ) + del self.__toklist[i] + + # convert int to slice + if isinstance(i, int): + if i < 0: + i += mylen + i = slice(i, i+1) + # get removed indices + removed = list(range(*i.indices(mylen))) + removed.reverse() + # fixup indices in token dictionary + #~ for name in self.__tokdict: + #~ occurrences = self.__tokdict[name] + #~ for j in removed: + #~ for k, (value, position) in enumerate(occurrences): + #~ occurrences[k] = _ParseResultsWithOffset(value, position - (position > j)) + for name,occurrences in self.__tokdict.items(): + for j in removed: + for k, (value, position) in enumerate(occurrences): + occurrences[k] = _ParseResultsWithOffset(value, position - (position > j)) + else: + del self.__tokdict[i] + + def __contains__( self, k ): + return k in self.__tokdict + + def __len__( self ): return len( self.__toklist ) + def __bool__(self): return len( self.__toklist ) > 0 + __nonzero__ = __bool__ + def __iter__( self ): return iter( self.__toklist ) + def __reversed__( self ): return iter( self.__toklist[::-1] ) + def iterkeys( self ): + """Returns all named result keys.""" + if hasattr(self.__tokdict, "iterkeys"): + return self.__tokdict.iterkeys() + else: + return iter(self.__tokdict) + + def itervalues( self ): + """Returns all named result values.""" + return (self[k] for k in self.iterkeys()) + + def iteritems( self ): + return ((k, self[k]) for k in self.iterkeys()) + + if PY_3: + keys = iterkeys + values = itervalues + items = iteritems + else: + def keys( self ): + """Returns all named result keys.""" + return list(self.iterkeys()) + + def values( self ): + """Returns all named result values.""" + return list(self.itervalues()) + + def items( self ): + """Returns all named result keys and values as a list of tuples.""" + return list(self.iteritems()) + + def haskeys( self ): + """Since keys() returns an iterator, this method is helpful in bypassing + code that looks for the existence of any defined results names.""" + return bool(self.__tokdict) + + def pop( self, *args, **kwargs): + """Removes and returns item at specified index (default=last). + Supports both list and dict semantics for pop(). If passed no + argument or an integer argument, it will use list semantics + and pop tokens from the list of parsed tokens. If passed a + non-integer argument (most likely a string), it will use dict + semantics and pop the corresponding value from any defined + results names. A second default return value argument is + supported, just as in dict.pop().""" + if not args: + args = [-1] + for k,v in kwargs.items(): + if k == 'default': + args = (args[0], v) + else: + raise TypeError("pop() got an unexpected keyword argument '%s'" % k) + if (isinstance(args[0], int) or + len(args) == 1 or + args[0] in self): + index = args[0] + ret = self[index] + del self[index] + return ret + else: + defaultvalue = args[1] + return defaultvalue + + def get(self, key, defaultValue=None): + """Returns named result matching the given key, or if there is no + such name, then returns the given C{defaultValue} or C{None} if no + C{defaultValue} is specified.""" + if key in self: + return self[key] + else: + return defaultValue + + def insert( self, index, insStr ): + """Inserts new element at location index in the list of parsed tokens.""" + self.__toklist.insert(index, insStr) + # fixup indices in token dictionary + #~ for name in self.__tokdict: + #~ occurrences = self.__tokdict[name] + #~ for k, (value, position) in enumerate(occurrences): + #~ occurrences[k] = _ParseResultsWithOffset(value, position + (position > index)) + for name,occurrences in self.__tokdict.items(): + for k, (value, position) in enumerate(occurrences): + occurrences[k] = _ParseResultsWithOffset(value, position + (position > index)) + + def append( self, item ): + """Add single element to end of ParseResults list of elements.""" + self.__toklist.append(item) + + def extend( self, itemseq ): + """Add sequence of elements to end of ParseResults list of elements.""" + if isinstance(itemseq, ParseResults): + self += itemseq + else: + self.__toklist.extend(itemseq) + + def clear( self ): + """Clear all elements and results names.""" + del self.__toklist[:] + self.__tokdict.clear() + + def __getattr__( self, name ): + try: + return self[name] + except KeyError: + return "" + + if name in self.__tokdict: + if name not in self.__accumNames: + return self.__tokdict[name][-1][0] + else: + return ParseResults([ v[0] for v in self.__tokdict[name] ]) + else: + return "" + + def __add__( self, other ): + ret = self.copy() + ret += other + return ret + + def __iadd__( self, other ): + if other.__tokdict: + offset = len(self.__toklist) + addoffset = lambda a: offset if a<0 else a+offset + otheritems = other.__tokdict.items() + otherdictitems = [(k, _ParseResultsWithOffset(v[0],addoffset(v[1])) ) + for (k,vlist) in otheritems for v in vlist] + for k,v in otherdictitems: + self[k] = v + if isinstance(v[0],ParseResults): + v[0].__parent = wkref(self) + + self.__toklist += other.__toklist + self.__accumNames.update( other.__accumNames ) + return self + + def __radd__(self, other): + if isinstance(other,int) and other == 0: + return self.copy() + + def __repr__( self ): + return "(%s, %s)" % ( repr( self.__toklist ), repr( self.__tokdict ) ) + + def __str__( self ): + return '[' + ', '.join(_ustr(i) if isinstance(i, ParseResults) else repr(i) for i in self.__toklist) + ']' + + def _asStringList( self, sep='' ): + out = [] + for item in self.__toklist: + if out and sep: + out.append(sep) + if isinstance( item, ParseResults ): + out += item._asStringList() + else: + out.append( _ustr(item) ) + return out + + def asList( self ): + """Returns the parse results as a nested list of matching tokens, all converted to strings.""" + return [res.asList() if isinstance(res,ParseResults) else res for res in self.__toklist] + + def asDict( self ): + """Returns the named parse results as dictionary.""" + if PY_3: + return dict( self.items() ) + else: + return dict( self.iteritems() ) + + def copy( self ): + """Returns a new copy of a C{ParseResults} object.""" + ret = ParseResults( self.__toklist ) + ret.__tokdict = self.__tokdict.copy() + ret.__parent = self.__parent + ret.__accumNames.update( self.__accumNames ) + ret.__name = self.__name + return ret + + def asXML( self, doctag=None, namedItemsOnly=False, indent="", formatted=True ): + """Returns the parse results as XML. Tags are created for tokens and lists that have defined results names.""" + nl = "\n" + out = [] + namedItems = dict((v[1],k) for (k,vlist) in self.__tokdict.items() + for v in vlist) + nextLevelIndent = indent + " " + + # collapse out indents if formatting is not desired + if not formatted: + indent = "" + nextLevelIndent = "" + nl = "" + + selfTag = None + if doctag is not None: + selfTag = doctag + else: + if self.__name: + selfTag = self.__name + + if not selfTag: + if namedItemsOnly: + return "" + else: + selfTag = "ITEM" + + out += [ nl, indent, "<", selfTag, ">" ] + + for i,res in enumerate(self.__toklist): + if isinstance(res,ParseResults): + if i in namedItems: + out += [ res.asXML(namedItems[i], + namedItemsOnly and doctag is None, + nextLevelIndent, + formatted)] + else: + out += [ res.asXML(None, + namedItemsOnly and doctag is None, + nextLevelIndent, + formatted)] + else: + # individual token, see if there is a name for it + resTag = None + if i in namedItems: + resTag = namedItems[i] + if not resTag: + if namedItemsOnly: + continue + else: + resTag = "ITEM" + xmlBodyText = _xml_escape(_ustr(res)) + out += [ nl, nextLevelIndent, "<", resTag, ">", + xmlBodyText, + "</", resTag, ">" ] + + out += [ nl, indent, "</", selfTag, ">" ] + return "".join(out) + + def __lookup(self,sub): + for k,vlist in self.__tokdict.items(): + for v,loc in vlist: + if sub is v: + return k + return None + + def getName(self): + """Returns the results name for this token expression.""" + if self.__name: + return self.__name + elif self.__parent: + par = self.__parent() + if par: + return par.__lookup(self) + else: + return None + elif (len(self) == 1 and + len(self.__tokdict) == 1 and + self.__tokdict.values()[0][0][1] in (0,-1)): + return self.__tokdict.keys()[0] + else: + return None + + def dump(self,indent='',depth=0): + """Diagnostic method for listing out the contents of a C{ParseResults}. + Accepts an optional C{indent} argument so that this string can be embedded + in a nested display of other data.""" + out = [] + NL = '\n' + out.append( indent+_ustr(self.asList()) ) + if self.haskeys(): + items = sorted(self.items()) + for k,v in items: + if out: + out.append(NL) + out.append( "%s%s- %s: " % (indent,(' '*depth), k) ) + if isinstance(v,ParseResults): + if v: + out.append( v.dump(indent,depth+1) ) + else: + out.append(_ustr(v)) + else: + out.append(_ustr(v)) + elif any(isinstance(vv,ParseResults) for vv in self): + v = self + for i,vv in enumerate(v): + if isinstance(vv,ParseResults): + out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),vv.dump(indent,depth+1) )) + else: + out.append("\n%s%s[%d]:\n%s%s%s" % (indent,(' '*(depth)),i,indent,(' '*(depth+1)),_ustr(vv))) + + return "".join(out) + + def pprint(self, *args, **kwargs): + """Pretty-printer for parsed results as a list, using the C{pprint} module. + Accepts additional positional or keyword args as defined for the + C{pprint.pprint} method. (U{http://docs.python.org/3/library/pprint.html#pprint.pprint})""" + pprint.pprint(self.asList(), *args, **kwargs) + + # add support for pickle protocol + def __getstate__(self): + return ( self.__toklist, + ( self.__tokdict.copy(), + self.__parent is not None and self.__parent() or None, + self.__accumNames, + self.__name ) ) + + def __setstate__(self,state): + self.__toklist = state[0] + (self.__tokdict, + par, + inAccumNames, + self.__name) = state[1] + self.__accumNames = {} + self.__accumNames.update(inAccumNames) + if par is not None: + self.__parent = wkref(par) + else: + self.__parent = None + + def __dir__(self): + return dir(super(ParseResults,self)) + list(self.keys()) + +collections.MutableMapping.register(ParseResults) + +def col (loc,strg): + """Returns current column within a string, counting newlines as line separators. + The first column is number 1. + + Note: the default parsing behavior is to expand tabs in the input string + before starting the parsing process. See L{I{ParserElement.parseString}<ParserElement.parseString>} for more information + on parsing strings containing C{<TAB>}s, and suggested methods to maintain a + consistent view of the parsed string, the parse location, and line and column + positions within the parsed string. + """ + s = strg + return 1 if loc<len(s) and s[loc] == '\n' else loc - s.rfind("\n", 0, loc) + +def lineno(loc,strg): + """Returns current line number within a string, counting newlines as line separators. + The first line is number 1. + + Note: the default parsing behavior is to expand tabs in the input string + before starting the parsing process. See L{I{ParserElement.parseString}<ParserElement.parseString>} for more information + on parsing strings containing C{<TAB>}s, and suggested methods to maintain a + consistent view of the parsed string, the parse location, and line and column + positions within the parsed string. + """ + return strg.count("\n",0,loc) + 1 + +def line( loc, strg ): + """Returns the line of text containing loc within a string, counting newlines as line separators. + """ + lastCR = strg.rfind("\n", 0, loc) + nextCR = strg.find("\n", loc) + if nextCR >= 0: + return strg[lastCR+1:nextCR] + else: + return strg[lastCR+1:] + +def _defaultStartDebugAction( instring, loc, expr ): + print (("Match " + _ustr(expr) + " at loc " + _ustr(loc) + "(%d,%d)" % ( lineno(loc,instring), col(loc,instring) ))) + +def _defaultSuccessDebugAction( instring, startloc, endloc, expr, toks ): + print ("Matched " + _ustr(expr) + " -> " + str(toks.asList())) + +def _defaultExceptionDebugAction( instring, loc, expr, exc ): + print ("Exception raised:" + _ustr(exc)) + +def nullDebugAction(*args): + """'Do-nothing' debug action, to suppress debugging output during parsing.""" + pass + +# Only works on Python 3.x - nonlocal is toxic to Python 2 installs +#~ 'decorator to trim function calls to match the arity of the target' +#~ def _trim_arity(func, maxargs=3): + #~ if func in singleArgBuiltins: + #~ return lambda s,l,t: func(t) + #~ limit = 0 + #~ foundArity = False + #~ def wrapper(*args): + #~ nonlocal limit,foundArity + #~ while 1: + #~ try: + #~ ret = func(*args[limit:]) + #~ foundArity = True + #~ return ret + #~ except TypeError: + #~ if limit == maxargs or foundArity: + #~ raise + #~ limit += 1 + #~ continue + #~ return wrapper + +# this version is Python 2.x-3.x cross-compatible +'decorator to trim function calls to match the arity of the target' +def _trim_arity(func, maxargs=2): + if func in singleArgBuiltins: + return lambda s,l,t: func(t) + limit = [0] + foundArity = [False] + def wrapper(*args): + while 1: + try: + ret = func(*args[limit[0]:]) + foundArity[0] = True + return ret + except TypeError: + if limit[0] <= maxargs and not foundArity[0]: + limit[0] += 1 + continue + raise + return wrapper + +class ParserElement(object): + """Abstract base level parser element class.""" + DEFAULT_WHITE_CHARS = " \n\t\r" + verbose_stacktrace = False + + @staticmethod + def setDefaultWhitespaceChars( chars ): + """Overrides the default whitespace chars + """ + ParserElement.DEFAULT_WHITE_CHARS = chars + + @staticmethod + def inlineLiteralsUsing(cls): + """ + Set class to be used for inclusion of string literals into a parser. + """ + ParserElement.literalStringClass = cls + + def __init__( self, savelist=False ): + self.parseAction = list() + self.failAction = None + #~ self.name = "<unknown>" # don't define self.name, let subclasses try/except upcall + self.strRepr = None + self.resultsName = None + self.saveAsList = savelist + self.skipWhitespace = True + self.whiteChars = ParserElement.DEFAULT_WHITE_CHARS + self.copyDefaultWhiteChars = True + self.mayReturnEmpty = False # used when checking for left-recursion + self.keepTabs = False + self.ignoreExprs = list() + self.debug = False + self.streamlined = False + self.mayIndexError = True # used to optimize exception handling for subclasses that don't advance parse index + self.errmsg = "" + self.modalResults = True # used to mark results names as modal (report only last) or cumulative (list all) + self.debugActions = ( None, None, None ) #custom debug actions + self.re = None + self.callPreparse = True # used to avoid redundant calls to preParse + self.callDuringTry = False + + def copy( self ): + """Make a copy of this C{ParserElement}. Useful for defining different parse actions + for the same parsing pattern, using copies of the original parse element.""" + cpy = copy.copy( self ) + cpy.parseAction = self.parseAction[:] + cpy.ignoreExprs = self.ignoreExprs[:] + if self.copyDefaultWhiteChars: + cpy.whiteChars = ParserElement.DEFAULT_WHITE_CHARS + return cpy + + def setName( self, name ): + """Define name for this expression, for use in debugging.""" + self.name = name + self.errmsg = "Expected " + self.name + if hasattr(self,"exception"): + self.exception.msg = self.errmsg + return self + + def setResultsName( self, name, listAllMatches=False ): + """Define name for referencing matching tokens as a nested attribute + of the returned parse results. + NOTE: this returns a *copy* of the original C{ParserElement} object; + this is so that the client can define a basic element, such as an + integer, and reference it in multiple places with different names. + + You can also set results names using the abbreviated syntax, + C{expr("name")} in place of C{expr.setResultsName("name")} - + see L{I{__call__}<__call__>}. + """ + newself = self.copy() + if name.endswith("*"): + name = name[:-1] + listAllMatches=True + newself.resultsName = name + newself.modalResults = not listAllMatches + return newself + + def setBreak(self,breakFlag = True): + """Method to invoke the Python pdb debugger when this element is + about to be parsed. Set C{breakFlag} to True to enable, False to + disable. + """ + if breakFlag: + _parseMethod = self._parse + def breaker(instring, loc, doActions=True, callPreParse=True): + import pdb + pdb.set_trace() + return _parseMethod( instring, loc, doActions, callPreParse ) + breaker._originalParseMethod = _parseMethod + self._parse = breaker + else: + if hasattr(self._parse,"_originalParseMethod"): + self._parse = self._parse._originalParseMethod + return self + + def setParseAction( self, *fns, **kwargs ): + """Define action to perform when successfully matching parse element definition. + Parse action fn is a callable method with 0-3 arguments, called as C{fn(s,loc,toks)}, + C{fn(loc,toks)}, C{fn(toks)}, or just C{fn()}, where: + - s = the original string being parsed (see note below) + - loc = the location of the matching substring + - toks = a list of the matched tokens, packaged as a C{L{ParseResults}} object + If the functions in fns modify the tokens, they can return them as the return + value from fn, and the modified list of tokens will replace the original. + Otherwise, fn does not need to return any value. + + Note: the default parsing behavior is to expand tabs in the input string + before starting the parsing process. See L{I{parseString}<parseString>} for more information + on parsing strings containing C{<TAB>}s, and suggested methods to maintain a + consistent view of the parsed string, the parse location, and line and column + positions within the parsed string. + """ + self.parseAction = list(map(_trim_arity, list(fns))) + self.callDuringTry = kwargs.get("callDuringTry", False) + return self + + def addParseAction( self, *fns, **kwargs ): + """Add parse action to expression's list of parse actions. See L{I{setParseAction}<setParseAction>}.""" + self.parseAction += list(map(_trim_arity, list(fns))) + self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False) + return self + + def addCondition(self, *fns, **kwargs): + """Add a boolean predicate function to expression's list of parse actions. See + L{I{setParseAction}<setParseAction>}. Optional keyword argument C{message} can + be used to define a custom message to be used in the raised exception.""" + msg = kwargs.get("message") or "failed user-defined condition" + for fn in fns: + def pa(s,l,t): + if not bool(_trim_arity(fn)(s,l,t)): + raise ParseException(s,l,msg) + return t + self.parseAction.append(pa) + self.callDuringTry = self.callDuringTry or kwargs.get("callDuringTry", False) + return self + + def setFailAction( self, fn ): + """Define action to perform if parsing fails at this expression. + Fail acton fn is a callable function that takes the arguments + C{fn(s,loc,expr,err)} where: + - s = string being parsed + - loc = location where expression match was attempted and failed + - expr = the parse expression that failed + - err = the exception thrown + The function returns no value. It may throw C{L{ParseFatalException}} + if it is desired to stop parsing immediately.""" + self.failAction = fn + return self + + def _skipIgnorables( self, instring, loc ): + exprsFound = True + while exprsFound: + exprsFound = False + for e in self.ignoreExprs: + try: + while 1: + loc,dummy = e._parse( instring, loc ) + exprsFound = True + except ParseException: + pass + return loc + + def preParse( self, instring, loc ): + if self.ignoreExprs: + loc = self._skipIgnorables( instring, loc ) + + if self.skipWhitespace: + wt = self.whiteChars + instrlen = len(instring) + while loc < instrlen and instring[loc] in wt: + loc += 1 + + return loc + + def parseImpl( self, instring, loc, doActions=True ): + return loc, [] + + def postParse( self, instring, loc, tokenlist ): + return tokenlist + + #~ @profile + def _parseNoCache( self, instring, loc, doActions=True, callPreParse=True ): + debugging = ( self.debug ) #and doActions ) + + if debugging or self.failAction: + #~ print ("Match",self,"at loc",loc,"(%d,%d)" % ( lineno(loc,instring), col(loc,instring) )) + if (self.debugActions[0] ): + self.debugActions[0]( instring, loc, self ) + if callPreParse and self.callPreparse: + preloc = self.preParse( instring, loc ) + else: + preloc = loc + tokensStart = preloc + try: + try: + loc,tokens = self.parseImpl( instring, preloc, doActions ) + except IndexError: + raise ParseException( instring, len(instring), self.errmsg, self ) + except ParseBaseException as err: + #~ print ("Exception raised:", err) + if self.debugActions[2]: + self.debugActions[2]( instring, tokensStart, self, err ) + if self.failAction: + self.failAction( instring, tokensStart, self, err ) + raise + else: + if callPreParse and self.callPreparse: + preloc = self.preParse( instring, loc ) + else: + preloc = loc + tokensStart = preloc + if self.mayIndexError or loc >= len(instring): + try: + loc,tokens = self.parseImpl( instring, preloc, doActions ) + except IndexError: + raise ParseException( instring, len(instring), self.errmsg, self ) + else: + loc,tokens = self.parseImpl( instring, preloc, doActions ) + + tokens = self.postParse( instring, loc, tokens ) + + retTokens = ParseResults( tokens, self.resultsName, asList=self.saveAsList, modal=self.modalResults ) + if self.parseAction and (doActions or self.callDuringTry): + if debugging: + try: + for fn in self.parseAction: + tokens = fn( instring, tokensStart, retTokens ) + if tokens is not None: + retTokens = ParseResults( tokens, + self.resultsName, + asList=self.saveAsList and isinstance(tokens,(ParseResults,list)), + modal=self.modalResults ) + except ParseBaseException as err: + #~ print "Exception raised in user parse action:", err + if (self.debugActions[2] ): + self.debugActions[2]( instring, tokensStart, self, err ) + raise + else: + for fn in self.parseAction: + tokens = fn( instring, tokensStart, retTokens ) + if tokens is not None: + retTokens = ParseResults( tokens, + self.resultsName, + asList=self.saveAsList and isinstance(tokens,(ParseResults,list)), + modal=self.modalResults ) + + if debugging: + #~ print ("Matched",self,"->",retTokens.asList()) + if (self.debugActions[1] ): + self.debugActions[1]( instring, tokensStart, loc, self, retTokens ) + + return loc, retTokens + + def tryParse( self, instring, loc ): + try: + return self._parse( instring, loc, doActions=False )[0] + except ParseFatalException: + raise ParseException( instring, loc, self.errmsg, self) + + # this method gets repeatedly called during backtracking with the same arguments - + # we can cache these arguments and save ourselves the trouble of re-parsing the contained expression + def _parseCache( self, instring, loc, doActions=True, callPreParse=True ): + lookup = (self,instring,loc,callPreParse,doActions) + if lookup in ParserElement._exprArgCache: + value = ParserElement._exprArgCache[ lookup ] + if isinstance(value, Exception): + raise value + return (value[0],value[1].copy()) + else: + try: + value = self._parseNoCache( instring, loc, doActions, callPreParse ) + ParserElement._exprArgCache[ lookup ] = (value[0],value[1].copy()) + return value + except ParseBaseException as pe: + pe.__traceback__ = None + ParserElement._exprArgCache[ lookup ] = pe + raise + + _parse = _parseNoCache + + # argument cache for optimizing repeated calls when backtracking through recursive expressions + _exprArgCache = {} + @staticmethod + def resetCache(): + ParserElement._exprArgCache.clear() + + _packratEnabled = False + @staticmethod + def enablePackrat(): + """Enables "packrat" parsing, which adds memoizing to the parsing logic. + Repeated parse attempts at the same string location (which happens + often in many complex grammars) can immediately return a cached value, + instead of re-executing parsing/validating code. Memoizing is done of + both valid results and parsing exceptions. + + This speedup may break existing programs that use parse actions that + have side-effects. For this reason, packrat parsing is disabled when + you first import pyparsing. To activate the packrat feature, your + program must call the class method C{ParserElement.enablePackrat()}. If + your program uses C{psyco} to "compile as you go", you must call + C{enablePackrat} before calling C{psyco.full()}. If you do not do this, + Python will crash. For best results, call C{enablePackrat()} immediately + after importing pyparsing. + """ + if not ParserElement._packratEnabled: + ParserElement._packratEnabled = True + ParserElement._parse = ParserElement._parseCache + + def parseString( self, instring, parseAll=False ): + """Execute the parse expression with the given string. + This is the main interface to the client code, once the complete + expression has been built. + + If you want the grammar to require that the entire input string be + successfully parsed, then set C{parseAll} to True (equivalent to ending + the grammar with C{L{StringEnd()}}). + + Note: C{parseString} implicitly calls C{expandtabs()} on the input string, + in order to report proper column numbers in parse actions. + If the input string contains tabs and + the grammar uses parse actions that use the C{loc} argument to index into the + string being parsed, you can ensure you have a consistent view of the input + string by: + - calling C{parseWithTabs} on your grammar before calling C{parseString} + (see L{I{parseWithTabs}<parseWithTabs>}) + - define your parse action using the full C{(s,loc,toks)} signature, and + reference the input string using the parse action's C{s} argument + - explictly expand the tabs in your input string before calling + C{parseString} + """ + ParserElement.resetCache() + if not self.streamlined: + self.streamline() + #~ self.saveAsList = True + for e in self.ignoreExprs: + e.streamline() + if not self.keepTabs: + instring = instring.expandtabs() + try: + loc, tokens = self._parse( instring, 0 ) + if parseAll: + loc = self.preParse( instring, loc ) + se = Empty() + StringEnd() + se._parse( instring, loc ) + except ParseBaseException as exc: + if ParserElement.verbose_stacktrace: + raise + else: + # catch and re-raise exception from here, clears out pyparsing internal stack trace + raise exc + else: + return tokens + + def scanString( self, instring, maxMatches=_MAX_INT, overlap=False ): + """Scan the input string for expression matches. Each match will return the + matching tokens, start location, and end location. May be called with optional + C{maxMatches} argument, to clip scanning after 'n' matches are found. If + C{overlap} is specified, then overlapping matches will be reported. + + Note that the start and end locations are reported relative to the string + being parsed. See L{I{parseString}<parseString>} for more information on parsing + strings with embedded tabs.""" + if not self.streamlined: + self.streamline() + for e in self.ignoreExprs: + e.streamline() + + if not self.keepTabs: + instring = _ustr(instring).expandtabs() + instrlen = len(instring) + loc = 0 + preparseFn = self.preParse + parseFn = self._parse + ParserElement.resetCache() + matches = 0 + try: + while loc <= instrlen and matches < maxMatches: + try: + preloc = preparseFn( instring, loc ) + nextLoc,tokens = parseFn( instring, preloc, callPreParse=False ) + except ParseException: + loc = preloc+1 + else: + if nextLoc > loc: + matches += 1 + yield tokens, preloc, nextLoc + if overlap: + nextloc = preparseFn( instring, loc ) + if nextloc > loc: + loc = nextLoc + else: + loc += 1 + else: + loc = nextLoc + else: + loc = preloc+1 + except ParseBaseException as exc: + if ParserElement.verbose_stacktrace: + raise + else: + # catch and re-raise exception from here, clears out pyparsing internal stack trace + raise exc + + def transformString( self, instring ): + """Extension to C{L{scanString}}, to modify matching text with modified tokens that may + be returned from a parse action. To use C{transformString}, define a grammar and + attach a parse action to it that modifies the returned token list. + Invoking C{transformString()} on a target string will then scan for matches, + and replace the matched text patterns according to the logic in the parse + action. C{transformString()} returns the resulting transformed string.""" + out = [] + lastE = 0 + # force preservation of <TAB>s, to minimize unwanted transformation of string, and to + # keep string locs straight between transformString and scanString + self.keepTabs = True + try: + for t,s,e in self.scanString( instring ): + out.append( instring[lastE:s] ) + if t: + if isinstance(t,ParseResults): + out += t.asList() + elif isinstance(t,list): + out += t + else: + out.append(t) + lastE = e + out.append(instring[lastE:]) + out = [o for o in out if o] + return "".join(map(_ustr,_flatten(out))) + except ParseBaseException as exc: + if ParserElement.verbose_stacktrace: + raise + else: + # catch and re-raise exception from here, clears out pyparsing internal stack trace + raise exc + + def searchString( self, instring, maxMatches=_MAX_INT ): + """Another extension to C{L{scanString}}, simplifying the access to the tokens found + to match the given parse expression. May be called with optional + C{maxMatches} argument, to clip searching after 'n' matches are found. + """ + try: + return ParseResults([ t for t,s,e in self.scanString( instring, maxMatches ) ]) + except ParseBaseException as exc: + if ParserElement.verbose_stacktrace: + raise + else: + # catch and re-raise exception from here, clears out pyparsing internal stack trace + raise exc + + def __add__(self, other ): + """Implementation of + operator - returns C{L{And}}""" + if isinstance( other, basestring ): + other = ParserElement.literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return And( [ self, other ] ) + + def __radd__(self, other ): + """Implementation of + operator when left operand is not a C{L{ParserElement}}""" + if isinstance( other, basestring ): + other = ParserElement.literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return other + self + + def __sub__(self, other): + """Implementation of - operator, returns C{L{And}} with error stop""" + if isinstance( other, basestring ): + other = ParserElement.literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return And( [ self, And._ErrorStop(), other ] ) + + def __rsub__(self, other ): + """Implementation of - operator when left operand is not a C{L{ParserElement}}""" + if isinstance( other, basestring ): + other = ParserElement.literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return other - self + + def __mul__(self,other): + """Implementation of * operator, allows use of C{expr * 3} in place of + C{expr + expr + expr}. Expressions may also me multiplied by a 2-integer + tuple, similar to C{{min,max}} multipliers in regular expressions. Tuples + may also include C{None} as in: + - C{expr*(n,None)} or C{expr*(n,)} is equivalent + to C{expr*n + L{ZeroOrMore}(expr)} + (read as "at least n instances of C{expr}") + - C{expr*(None,n)} is equivalent to C{expr*(0,n)} + (read as "0 to n instances of C{expr}") + - C{expr*(None,None)} is equivalent to C{L{ZeroOrMore}(expr)} + - C{expr*(1,None)} is equivalent to C{L{OneOrMore}(expr)} + + Note that C{expr*(None,n)} does not raise an exception if + more than n exprs exist in the input stream; that is, + C{expr*(None,n)} does not enforce a maximum number of expr + occurrences. If this behavior is desired, then write + C{expr*(None,n) + ~expr} + + """ + if isinstance(other,int): + minElements, optElements = other,0 + elif isinstance(other,tuple): + other = (other + (None, None))[:2] + if other[0] is None: + other = (0, other[1]) + if isinstance(other[0],int) and other[1] is None: + if other[0] == 0: + return ZeroOrMore(self) + if other[0] == 1: + return OneOrMore(self) + else: + return self*other[0] + ZeroOrMore(self) + elif isinstance(other[0],int) and isinstance(other[1],int): + minElements, optElements = other + optElements -= minElements + else: + raise TypeError("cannot multiply 'ParserElement' and ('%s','%s') objects", type(other[0]),type(other[1])) + else: + raise TypeError("cannot multiply 'ParserElement' and '%s' objects", type(other)) + + if minElements < 0: + raise ValueError("cannot multiply ParserElement by negative value") + if optElements < 0: + raise ValueError("second tuple value must be greater or equal to first tuple value") + if minElements == optElements == 0: + raise ValueError("cannot multiply ParserElement by 0 or (0,0)") + + if (optElements): + def makeOptionalList(n): + if n>1: + return Optional(self + makeOptionalList(n-1)) + else: + return Optional(self) + if minElements: + if minElements == 1: + ret = self + makeOptionalList(optElements) + else: + ret = And([self]*minElements) + makeOptionalList(optElements) + else: + ret = makeOptionalList(optElements) + else: + if minElements == 1: + ret = self + else: + ret = And([self]*minElements) + return ret + + def __rmul__(self, other): + return self.__mul__(other) + + def __or__(self, other ): + """Implementation of | operator - returns C{L{MatchFirst}}""" + if isinstance( other, basestring ): + other = ParserElement.literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return MatchFirst( [ self, other ] ) + + def __ror__(self, other ): + """Implementation of | operator when left operand is not a C{L{ParserElement}}""" + if isinstance( other, basestring ): + other = ParserElement.literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return other | self + + def __xor__(self, other ): + """Implementation of ^ operator - returns C{L{Or}}""" + if isinstance( other, basestring ): + other = ParserElement.literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return Or( [ self, other ] ) + + def __rxor__(self, other ): + """Implementation of ^ operator when left operand is not a C{L{ParserElement}}""" + if isinstance( other, basestring ): + other = ParserElement.literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return other ^ self + + def __and__(self, other ): + """Implementation of & operator - returns C{L{Each}}""" + if isinstance( other, basestring ): + other = ParserElement.literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return Each( [ self, other ] ) + + def __rand__(self, other ): + """Implementation of & operator when left operand is not a C{L{ParserElement}}""" + if isinstance( other, basestring ): + other = ParserElement.literalStringClass( other ) + if not isinstance( other, ParserElement ): + warnings.warn("Cannot combine element of type %s with ParserElement" % type(other), + SyntaxWarning, stacklevel=2) + return None + return other & self + + def __invert__( self ): + """Implementation of ~ operator - returns C{L{NotAny}}""" + return NotAny( self ) + + def __call__(self, name=None): + """Shortcut for C{L{setResultsName}}, with C{listAllMatches=default}:: + userdata = Word(alphas).setResultsName("name") + Word(nums+"-").setResultsName("socsecno") + could be written as:: + userdata = Word(alphas)("name") + Word(nums+"-")("socsecno") + + If C{name} is given with a trailing C{'*'} character, then C{listAllMatches} will be + passed as C{True}. + + If C{name} is omitted, same as calling C{L{copy}}. + """ + if name is not None: + return self.setResultsName(name) + else: + return self.copy() + + def suppress( self ): + """Suppresses the output of this C{ParserElement}; useful to keep punctuation from + cluttering up returned output. + """ + return Suppress( self ) + + def leaveWhitespace( self ): + """Disables the skipping of whitespace before matching the characters in the + C{ParserElement}'s defined pattern. This is normally only used internally by + the pyparsing module, but may be needed in some whitespace-sensitive grammars. + """ + self.skipWhitespace = False + return self + + def setWhitespaceChars( self, chars ): + """Overrides the default whitespace chars + """ + self.skipWhitespace = True + self.whiteChars = chars + self.copyDefaultWhiteChars = False + return self + + def parseWithTabs( self ): + """Overrides default behavior to expand C{<TAB>}s to spaces before parsing the input string. + Must be called before C{parseString} when the input grammar contains elements that + match C{<TAB>} characters.""" + self.keepTabs = True + return self + + def ignore( self, other ): + """Define expression to be ignored (e.g., comments) while doing pattern + matching; may be called repeatedly, to define multiple comment or other + ignorable patterns. + """ + if isinstance( other, Suppress ): + if other not in self.ignoreExprs: + self.ignoreExprs.append( other.copy() ) + else: + self.ignoreExprs.append( Suppress( other.copy() ) ) + return self + + def setDebugActions( self, startAction, successAction, exceptionAction ): + """Enable display of debugging messages while doing pattern matching.""" + self.debugActions = (startAction or _defaultStartDebugAction, + successAction or _defaultSuccessDebugAction, + exceptionAction or _defaultExceptionDebugAction) + self.debug = True + return self + + def setDebug( self, flag=True ): + """Enable display of debugging messages while doing pattern matching. + Set C{flag} to True to enable, False to disable.""" + if flag: + self.setDebugActions( _defaultStartDebugAction, _defaultSuccessDebugAction, _defaultExceptionDebugAction ) + else: + self.debug = False + return self + + def __str__( self ): + return self.name + + def __repr__( self ): + return _ustr(self) + + def streamline( self ): + self.streamlined = True + self.strRepr = None + return self + + def checkRecursion( self, parseElementList ): + pass + + def validate( self, validateTrace=[] ): + """Check defined expressions for valid structure, check for infinite recursive definitions.""" + self.checkRecursion( [] ) + + def parseFile( self, file_or_filename, parseAll=False ): + """Execute the parse expression on the given file or filename. + If a filename is specified (instead of a file object), + the entire file is opened, read, and closed before parsing. + """ + try: + file_contents = file_or_filename.read() + except AttributeError: + f = open(file_or_filename, "r") + file_contents = f.read() + f.close() + try: + return self.parseString(file_contents, parseAll) + except ParseBaseException as exc: + if ParserElement.verbose_stacktrace: + raise + else: + # catch and re-raise exception from here, clears out pyparsing internal stack trace + raise exc + + def __eq__(self,other): + if isinstance(other, ParserElement): + return self is other or self.__dict__ == other.__dict__ + elif isinstance(other, basestring): + try: + self.parseString(_ustr(other), parseAll=True) + return True + except ParseBaseException: + return False + else: + return super(ParserElement,self)==other + + def __ne__(self,other): + return not (self == other) + + def __hash__(self): + return hash(id(self)) + + def __req__(self,other): + return self == other + + def __rne__(self,other): + return not (self == other) + + def runTests(self, tests, parseAll=False): + """Execute the parse expression on a series of test strings, showing each + test, the parsed results or where the parse failed. Quick and easy way to + run a parse expression against a list of sample strings. + + Parameters: + - tests - a list of separate test strings, or a multiline string of test strings + - parseAll - (default=False) - flag to pass to C{L{parseString}} when running tests + """ + if isinstance(tests, basestring): + tests = map(str.strip, tests.splitlines()) + for t in tests: + out = [t] + try: + out.append(self.parseString(t, parseAll=parseAll).dump()) + except ParseException as pe: + if '\n' in t: + out.append(line(pe.loc, t)) + out.append(' '*(col(pe.loc,t)-1) + '^') + else: + out.append(' '*pe.loc + '^') + out.append(str(pe)) + out.append('') + print('\n'.join(out)) + + +class Token(ParserElement): + """Abstract C{ParserElement} subclass, for defining atomic matching patterns.""" + def __init__( self ): + super(Token,self).__init__( savelist=False ) + + +class Empty(Token): + """An empty token, will always match.""" + def __init__( self ): + super(Empty,self).__init__() + self.name = "Empty" + self.mayReturnEmpty = True + self.mayIndexError = False + + +class NoMatch(Token): + """A token that will never match.""" + def __init__( self ): + super(NoMatch,self).__init__() + self.name = "NoMatch" + self.mayReturnEmpty = True + self.mayIndexError = False + self.errmsg = "Unmatchable token" + + def parseImpl( self, instring, loc, doActions=True ): + raise ParseException(instring, loc, self.errmsg, self) + + +class Literal(Token): + """Token to exactly match a specified string.""" + def __init__( self, matchString ): + super(Literal,self).__init__() + self.match = matchString + self.matchLen = len(matchString) + try: + self.firstMatchChar = matchString[0] + except IndexError: + warnings.warn("null string passed to Literal; use Empty() instead", + SyntaxWarning, stacklevel=2) + self.__class__ = Empty + self.name = '"%s"' % _ustr(self.match) + self.errmsg = "Expected " + self.name + self.mayReturnEmpty = False + self.mayIndexError = False + + # Performance tuning: this routine gets called a *lot* + # if this is a single character match string and the first character matches, + # short-circuit as quickly as possible, and avoid calling startswith + #~ @profile + def parseImpl( self, instring, loc, doActions=True ): + if (instring[loc] == self.firstMatchChar and + (self.matchLen==1 or instring.startswith(self.match,loc)) ): + return loc+self.matchLen, self.match + raise ParseException(instring, loc, self.errmsg, self) +_L = Literal +ParserElement.literalStringClass = Literal + +class Keyword(Token): + """Token to exactly match a specified string as a keyword, that is, it must be + immediately followed by a non-keyword character. Compare with C{L{Literal}}:: + Literal("if") will match the leading C{'if'} in C{'ifAndOnlyIf'}. + Keyword("if") will not; it will only match the leading C{'if'} in C{'if x=1'}, or C{'if(y==2)'} + Accepts two optional constructor arguments in addition to the keyword string: + C{identChars} is a string of characters that would be valid identifier characters, + defaulting to all alphanumerics + "_" and "$"; C{caseless} allows case-insensitive + matching, default is C{False}. + """ + DEFAULT_KEYWORD_CHARS = alphanums+"_$" + + def __init__( self, matchString, identChars=DEFAULT_KEYWORD_CHARS, caseless=False ): + super(Keyword,self).__init__() + self.match = matchString + self.matchLen = len(matchString) + try: + self.firstMatchChar = matchString[0] + except IndexError: + warnings.warn("null string passed to Keyword; use Empty() instead", + SyntaxWarning, stacklevel=2) + self.name = '"%s"' % self.match + self.errmsg = "Expected " + self.name + self.mayReturnEmpty = False + self.mayIndexError = False + self.caseless = caseless + if caseless: + self.caselessmatch = matchString.upper() + identChars = identChars.upper() + self.identChars = set(identChars) + + def parseImpl( self, instring, loc, doActions=True ): + if self.caseless: + if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and + (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) and + (loc == 0 or instring[loc-1].upper() not in self.identChars) ): + return loc+self.matchLen, self.match + else: + if (instring[loc] == self.firstMatchChar and + (self.matchLen==1 or instring.startswith(self.match,loc)) and + (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen] not in self.identChars) and + (loc == 0 or instring[loc-1] not in self.identChars) ): + return loc+self.matchLen, self.match + raise ParseException(instring, loc, self.errmsg, self) + + def copy(self): + c = super(Keyword,self).copy() + c.identChars = Keyword.DEFAULT_KEYWORD_CHARS + return c + + @staticmethod + def setDefaultKeywordChars( chars ): + """Overrides the default Keyword chars + """ + Keyword.DEFAULT_KEYWORD_CHARS = chars + +class CaselessLiteral(Literal): + """Token to match a specified string, ignoring case of letters. + Note: the matched results will always be in the case of the given + match string, NOT the case of the input text. + """ + def __init__( self, matchString ): + super(CaselessLiteral,self).__init__( matchString.upper() ) + # Preserve the defining literal. + self.returnString = matchString + self.name = "'%s'" % self.returnString + self.errmsg = "Expected " + self.name + + def parseImpl( self, instring, loc, doActions=True ): + if instring[ loc:loc+self.matchLen ].upper() == self.match: + return loc+self.matchLen, self.returnString + raise ParseException(instring, loc, self.errmsg, self) + +class CaselessKeyword(Keyword): + def __init__( self, matchString, identChars=Keyword.DEFAULT_KEYWORD_CHARS ): + super(CaselessKeyword,self).__init__( matchString, identChars, caseless=True ) + + def parseImpl( self, instring, loc, doActions=True ): + if ( (instring[ loc:loc+self.matchLen ].upper() == self.caselessmatch) and + (loc >= len(instring)-self.matchLen or instring[loc+self.matchLen].upper() not in self.identChars) ): + return loc+self.matchLen, self.match + raise ParseException(instring, loc, self.errmsg, self) + +class Word(Token): + """Token for matching words composed of allowed character sets. + Defined with string containing all allowed initial characters, + an optional string containing allowed body characters (if omitted, + defaults to the initial character set), and an optional minimum, + maximum, and/or exact length. The default value for C{min} is 1 (a + minimum value < 1 is not valid); the default values for C{max} and C{exact} + are 0, meaning no maximum or exact length restriction. An optional + C{exclude} parameter can list characters that might be found in + the input C{bodyChars} string; useful to define a word of all printables + except for one or two characters, for instance. + """ + def __init__( self, initChars, bodyChars=None, min=1, max=0, exact=0, asKeyword=False, excludeChars=None ): + super(Word,self).__init__() + if excludeChars: + initChars = ''.join(c for c in initChars if c not in excludeChars) + if bodyChars: + bodyChars = ''.join(c for c in bodyChars if c not in excludeChars) + self.initCharsOrig = initChars + self.initChars = set(initChars) + if bodyChars : + self.bodyCharsOrig = bodyChars + self.bodyChars = set(bodyChars) + else: + self.bodyCharsOrig = initChars + self.bodyChars = set(initChars) + + self.maxSpecified = max > 0 + + if min < 1: + raise ValueError("cannot specify a minimum length < 1; use Optional(Word()) if zero-length word is permitted") + + self.minLen = min + + if max > 0: + self.maxLen = max + else: + self.maxLen = _MAX_INT + + if exact > 0: + self.maxLen = exact + self.minLen = exact + + self.name = _ustr(self) + self.errmsg = "Expected " + self.name + self.mayIndexError = False + self.asKeyword = asKeyword + + if ' ' not in self.initCharsOrig+self.bodyCharsOrig and (min==1 and max==0 and exact==0): + if self.bodyCharsOrig == self.initCharsOrig: + self.reString = "[%s]+" % _escapeRegexRangeChars(self.initCharsOrig) + elif len(self.initCharsOrig) == 1: + self.reString = "%s[%s]*" % \ + (re.escape(self.initCharsOrig), + _escapeRegexRangeChars(self.bodyCharsOrig),) + else: + self.reString = "[%s][%s]*" % \ + (_escapeRegexRangeChars(self.initCharsOrig), + _escapeRegexRangeChars(self.bodyCharsOrig),) + if self.asKeyword: + self.reString = r"\b"+self.reString+r"\b" + try: + self.re = re.compile( self.reString ) + except: + self.re = None + + def parseImpl( self, instring, loc, doActions=True ): + if self.re: + result = self.re.match(instring,loc) + if not result: + raise ParseException(instring, loc, self.errmsg, self) + + loc = result.end() + return loc, result.group() + + if not(instring[ loc ] in self.initChars): + raise ParseException(instring, loc, self.errmsg, self) + + start = loc + loc += 1 + instrlen = len(instring) + bodychars = self.bodyChars + maxloc = start + self.maxLen + maxloc = min( maxloc, instrlen ) + while loc < maxloc and instring[loc] in bodychars: + loc += 1 + + throwException = False + if loc - start < self.minLen: + throwException = True + if self.maxSpecified and loc < instrlen and instring[loc] in bodychars: + throwException = True + if self.asKeyword: + if (start>0 and instring[start-1] in bodychars) or (loc<instrlen and instring[loc] in bodychars): + throwException = True + + if throwException: + raise ParseException(instring, loc, self.errmsg, self) + + return loc, instring[start:loc] + + def __str__( self ): + try: + return super(Word,self).__str__() + except: + pass + + + if self.strRepr is None: + + def charsAsStr(s): + if len(s)>4: + return s[:4]+"..." + else: + return s + + if ( self.initCharsOrig != self.bodyCharsOrig ): + self.strRepr = "W:(%s,%s)" % ( charsAsStr(self.initCharsOrig), charsAsStr(self.bodyCharsOrig) ) + else: + self.strRepr = "W:(%s)" % charsAsStr(self.initCharsOrig) + + return self.strRepr + + +class Regex(Token): + """Token for matching strings that match a given regular expression. + Defined with string specifying the regular expression in a form recognized by the inbuilt Python re module. + """ + compiledREtype = type(re.compile("[A-Z]")) + def __init__( self, pattern, flags=0): + """The parameters C{pattern} and C{flags} are passed to the C{re.compile()} function as-is. See the Python C{re} module for an explanation of the acceptable patterns and flags.""" + super(Regex,self).__init__() + + if isinstance(pattern, basestring): + if len(pattern) == 0: + warnings.warn("null string passed to Regex; use Empty() instead", + SyntaxWarning, stacklevel=2) + + self.pattern = pattern + self.flags = flags + + try: + self.re = re.compile(self.pattern, self.flags) + self.reString = self.pattern + except sre_constants.error: + warnings.warn("invalid pattern (%s) passed to Regex" % pattern, + SyntaxWarning, stacklevel=2) + raise + + elif isinstance(pattern, Regex.compiledREtype): + self.re = pattern + self.pattern = \ + self.reString = str(pattern) + self.flags = flags + + else: + raise ValueError("Regex may only be constructed with a string or a compiled RE object") + + self.name = _ustr(self) + self.errmsg = "Expected " + self.name + self.mayIndexError = False + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + result = self.re.match(instring,loc) + if not result: + raise ParseException(instring, loc, self.errmsg, self) + + loc = result.end() + d = result.groupdict() + ret = ParseResults(result.group()) + if d: + for k in d: + ret[k] = d[k] + return loc,ret + + def __str__( self ): + try: + return super(Regex,self).__str__() + except: + pass + + if self.strRepr is None: + self.strRepr = "Re:(%s)" % repr(self.pattern) + + return self.strRepr + + +class QuotedString(Token): + """Token for matching strings that are delimited by quoting characters. + """ + def __init__( self, quoteChar, escChar=None, escQuote=None, multiline=False, unquoteResults=True, endQuoteChar=None): + """ + Defined with the following parameters: + - quoteChar - string of one or more characters defining the quote delimiting string + - escChar - character to escape quotes, typically backslash (default=None) + - escQuote - special quote sequence to escape an embedded quote string (such as SQL's "" to escape an embedded ") (default=None) + - multiline - boolean indicating whether quotes can span multiple lines (default=C{False}) + - unquoteResults - boolean indicating whether the matched text should be unquoted (default=C{True}) + - endQuoteChar - string of one or more characters defining the end of the quote delimited string (default=C{None} => same as quoteChar) + """ + super(QuotedString,self).__init__() + + # remove white space from quote chars - wont work anyway + quoteChar = quoteChar.strip() + if len(quoteChar) == 0: + warnings.warn("quoteChar cannot be the empty string",SyntaxWarning,stacklevel=2) + raise SyntaxError() + + if endQuoteChar is None: + endQuoteChar = quoteChar + else: + endQuoteChar = endQuoteChar.strip() + if len(endQuoteChar) == 0: + warnings.warn("endQuoteChar cannot be the empty string",SyntaxWarning,stacklevel=2) + raise SyntaxError() + + self.quoteChar = quoteChar + self.quoteCharLen = len(quoteChar) + self.firstQuoteChar = quoteChar[0] + self.endQuoteChar = endQuoteChar + self.endQuoteCharLen = len(endQuoteChar) + self.escChar = escChar + self.escQuote = escQuote + self.unquoteResults = unquoteResults + + if multiline: + self.flags = re.MULTILINE | re.DOTALL + self.pattern = r'%s(?:[^%s%s]' % \ + ( re.escape(self.quoteChar), + _escapeRegexRangeChars(self.endQuoteChar[0]), + (escChar is not None and _escapeRegexRangeChars(escChar) or '') ) + else: + self.flags = 0 + self.pattern = r'%s(?:[^%s\n\r%s]' % \ + ( re.escape(self.quoteChar), + _escapeRegexRangeChars(self.endQuoteChar[0]), + (escChar is not None and _escapeRegexRangeChars(escChar) or '') ) + if len(self.endQuoteChar) > 1: + self.pattern += ( + '|(?:' + ')|(?:'.join("%s[^%s]" % (re.escape(self.endQuoteChar[:i]), + _escapeRegexRangeChars(self.endQuoteChar[i])) + for i in range(len(self.endQuoteChar)-1,0,-1)) + ')' + ) + if escQuote: + self.pattern += (r'|(?:%s)' % re.escape(escQuote)) + if escChar: + self.pattern += (r'|(?:%s.)' % re.escape(escChar)) + self.escCharReplacePattern = re.escape(self.escChar)+"(.)" + self.pattern += (r')*%s' % re.escape(self.endQuoteChar)) + + try: + self.re = re.compile(self.pattern, self.flags) + self.reString = self.pattern + except sre_constants.error: + warnings.warn("invalid pattern (%s) passed to Regex" % self.pattern, + SyntaxWarning, stacklevel=2) + raise + + self.name = _ustr(self) + self.errmsg = "Expected " + self.name + self.mayIndexError = False + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + result = instring[loc] == self.firstQuoteChar and self.re.match(instring,loc) or None + if not result: + raise ParseException(instring, loc, self.errmsg, self) + + loc = result.end() + ret = result.group() + + if self.unquoteResults: + + # strip off quotes + ret = ret[self.quoteCharLen:-self.endQuoteCharLen] + + if isinstance(ret,basestring): + # replace escaped characters + if self.escChar: + ret = re.sub(self.escCharReplacePattern,"\g<1>",ret) + + # replace escaped quotes + if self.escQuote: + ret = ret.replace(self.escQuote, self.endQuoteChar) + + return loc, ret + + def __str__( self ): + try: + return super(QuotedString,self).__str__() + except: + pass + + if self.strRepr is None: + self.strRepr = "quoted string, starting with %s ending with %s" % (self.quoteChar, self.endQuoteChar) + + return self.strRepr + + +class CharsNotIn(Token): + """Token for matching words composed of characters *not* in a given set. + Defined with string containing all disallowed characters, and an optional + minimum, maximum, and/or exact length. The default value for C{min} is 1 (a + minimum value < 1 is not valid); the default values for C{max} and C{exact} + are 0, meaning no maximum or exact length restriction. + """ + def __init__( self, notChars, min=1, max=0, exact=0 ): + super(CharsNotIn,self).__init__() + self.skipWhitespace = False + self.notChars = notChars + + if min < 1: + raise ValueError("cannot specify a minimum length < 1; use Optional(CharsNotIn()) if zero-length char group is permitted") + + self.minLen = min + + if max > 0: + self.maxLen = max + else: + self.maxLen = _MAX_INT + + if exact > 0: + self.maxLen = exact + self.minLen = exact + + self.name = _ustr(self) + self.errmsg = "Expected " + self.name + self.mayReturnEmpty = ( self.minLen == 0 ) + self.mayIndexError = False + + def parseImpl( self, instring, loc, doActions=True ): + if instring[loc] in self.notChars: + raise ParseException(instring, loc, self.errmsg, self) + + start = loc + loc += 1 + notchars = self.notChars + maxlen = min( start+self.maxLen, len(instring) ) + while loc < maxlen and \ + (instring[loc] not in notchars): + loc += 1 + + if loc - start < self.minLen: + raise ParseException(instring, loc, self.errmsg, self) + + return loc, instring[start:loc] + + def __str__( self ): + try: + return super(CharsNotIn, self).__str__() + except: + pass + + if self.strRepr is None: + if len(self.notChars) > 4: + self.strRepr = "!W:(%s...)" % self.notChars[:4] + else: + self.strRepr = "!W:(%s)" % self.notChars + + return self.strRepr + +class White(Token): + """Special matching class for matching whitespace. Normally, whitespace is ignored + by pyparsing grammars. This class is included when some whitespace structures + are significant. Define with a string containing the whitespace characters to be + matched; default is C{" \\t\\r\\n"}. Also takes optional C{min}, C{max}, and C{exact} arguments, + as defined for the C{L{Word}} class.""" + whiteStrs = { + " " : "<SPC>", + "\t": "<TAB>", + "\n": "<LF>", + "\r": "<CR>", + "\f": "<FF>", + } + def __init__(self, ws=" \t\r\n", min=1, max=0, exact=0): + super(White,self).__init__() + self.matchWhite = ws + self.setWhitespaceChars( "".join(c for c in self.whiteChars if c not in self.matchWhite) ) + #~ self.leaveWhitespace() + self.name = ("".join(White.whiteStrs[c] for c in self.matchWhite)) + self.mayReturnEmpty = True + self.errmsg = "Expected " + self.name + + self.minLen = min + + if max > 0: + self.maxLen = max + else: + self.maxLen = _MAX_INT + + if exact > 0: + self.maxLen = exact + self.minLen = exact + + def parseImpl( self, instring, loc, doActions=True ): + if not(instring[ loc ] in self.matchWhite): + raise ParseException(instring, loc, self.errmsg, self) + start = loc + loc += 1 + maxloc = start + self.maxLen + maxloc = min( maxloc, len(instring) ) + while loc < maxloc and instring[loc] in self.matchWhite: + loc += 1 + + if loc - start < self.minLen: + raise ParseException(instring, loc, self.errmsg, self) + + return loc, instring[start:loc] + + +class _PositionToken(Token): + def __init__( self ): + super(_PositionToken,self).__init__() + self.name=self.__class__.__name__ + self.mayReturnEmpty = True + self.mayIndexError = False + +class GoToColumn(_PositionToken): + """Token to advance to a specific column of input text; useful for tabular report scraping.""" + def __init__( self, colno ): + super(GoToColumn,self).__init__() + self.col = colno + + def preParse( self, instring, loc ): + if col(loc,instring) != self.col: + instrlen = len(instring) + if self.ignoreExprs: + loc = self._skipIgnorables( instring, loc ) + while loc < instrlen and instring[loc].isspace() and col( loc, instring ) != self.col : + loc += 1 + return loc + + def parseImpl( self, instring, loc, doActions=True ): + thiscol = col( loc, instring ) + if thiscol > self.col: + raise ParseException( instring, loc, "Text not in expected column", self ) + newloc = loc + self.col - thiscol + ret = instring[ loc: newloc ] + return newloc, ret + +class LineStart(_PositionToken): + """Matches if current position is at the beginning of a line within the parse string""" + def __init__( self ): + super(LineStart,self).__init__() + self.setWhitespaceChars( ParserElement.DEFAULT_WHITE_CHARS.replace("\n","") ) + self.errmsg = "Expected start of line" + + def preParse( self, instring, loc ): + preloc = super(LineStart,self).preParse(instring,loc) + if instring[preloc] == "\n": + loc += 1 + return loc + + def parseImpl( self, instring, loc, doActions=True ): + if not( loc==0 or + (loc == self.preParse( instring, 0 )) or + (instring[loc-1] == "\n") ): #col(loc, instring) != 1: + raise ParseException(instring, loc, self.errmsg, self) + return loc, [] + +class LineEnd(_PositionToken): + """Matches if current position is at the end of a line within the parse string""" + def __init__( self ): + super(LineEnd,self).__init__() + self.setWhitespaceChars( ParserElement.DEFAULT_WHITE_CHARS.replace("\n","") ) + self.errmsg = "Expected end of line" + + def parseImpl( self, instring, loc, doActions=True ): + if loc<len(instring): + if instring[loc] == "\n": + return loc+1, "\n" + else: + raise ParseException(instring, loc, self.errmsg, self) + elif loc == len(instring): + return loc+1, [] + else: + raise ParseException(instring, loc, self.errmsg, self) + +class StringStart(_PositionToken): + """Matches if current position is at the beginning of the parse string""" + def __init__( self ): + super(StringStart,self).__init__() + self.errmsg = "Expected start of text" + + def parseImpl( self, instring, loc, doActions=True ): + if loc != 0: + # see if entire string up to here is just whitespace and ignoreables + if loc != self.preParse( instring, 0 ): + raise ParseException(instring, loc, self.errmsg, self) + return loc, [] + +class StringEnd(_PositionToken): + """Matches if current position is at the end of the parse string""" + def __init__( self ): + super(StringEnd,self).__init__() + self.errmsg = "Expected end of text" + + def parseImpl( self, instring, loc, doActions=True ): + if loc < len(instring): + raise ParseException(instring, loc, self.errmsg, self) + elif loc == len(instring): + return loc+1, [] + elif loc > len(instring): + return loc, [] + else: + raise ParseException(instring, loc, self.errmsg, self) + +class WordStart(_PositionToken): + """Matches if the current position is at the beginning of a Word, and + is not preceded by any character in a given set of C{wordChars} + (default=C{printables}). To emulate the C{\b} behavior of regular expressions, + use C{WordStart(alphanums)}. C{WordStart} will also match at the beginning of + the string being parsed, or at the beginning of a line. + """ + def __init__(self, wordChars = printables): + super(WordStart,self).__init__() + self.wordChars = set(wordChars) + self.errmsg = "Not at the start of a word" + + def parseImpl(self, instring, loc, doActions=True ): + if loc != 0: + if (instring[loc-1] in self.wordChars or + instring[loc] not in self.wordChars): + raise ParseException(instring, loc, self.errmsg, self) + return loc, [] + +class WordEnd(_PositionToken): + """Matches if the current position is at the end of a Word, and + is not followed by any character in a given set of C{wordChars} + (default=C{printables}). To emulate the C{\b} behavior of regular expressions, + use C{WordEnd(alphanums)}. C{WordEnd} will also match at the end of + the string being parsed, or at the end of a line. + """ + def __init__(self, wordChars = printables): + super(WordEnd,self).__init__() + self.wordChars = set(wordChars) + self.skipWhitespace = False + self.errmsg = "Not at the end of a word" + + def parseImpl(self, instring, loc, doActions=True ): + instrlen = len(instring) + if instrlen>0 and loc<instrlen: + if (instring[loc] in self.wordChars or + instring[loc-1] not in self.wordChars): + raise ParseException(instring, loc, self.errmsg, self) + return loc, [] + + +class ParseExpression(ParserElement): + """Abstract subclass of ParserElement, for combining and post-processing parsed tokens.""" + def __init__( self, exprs, savelist = False ): + super(ParseExpression,self).__init__(savelist) + if isinstance( exprs, _generatorType ): + exprs = list(exprs) + + if isinstance( exprs, basestring ): + self.exprs = [ Literal( exprs ) ] + elif isinstance( exprs, collections.Sequence ): + # if sequence of strings provided, wrap with Literal + if all(isinstance(expr, basestring) for expr in exprs): + exprs = map(Literal, exprs) + self.exprs = list(exprs) + else: + try: + self.exprs = list( exprs ) + except TypeError: + self.exprs = [ exprs ] + self.callPreparse = False + + def __getitem__( self, i ): + return self.exprs[i] + + def append( self, other ): + self.exprs.append( other ) + self.strRepr = None + return self + + def leaveWhitespace( self ): + """Extends C{leaveWhitespace} defined in base class, and also invokes C{leaveWhitespace} on + all contained expressions.""" + self.skipWhitespace = False + self.exprs = [ e.copy() for e in self.exprs ] + for e in self.exprs: + e.leaveWhitespace() + return self + + def ignore( self, other ): + if isinstance( other, Suppress ): + if other not in self.ignoreExprs: + super( ParseExpression, self).ignore( other ) + for e in self.exprs: + e.ignore( self.ignoreExprs[-1] ) + else: + super( ParseExpression, self).ignore( other ) + for e in self.exprs: + e.ignore( self.ignoreExprs[-1] ) + return self + + def __str__( self ): + try: + return super(ParseExpression,self).__str__() + except: + pass + + if self.strRepr is None: + self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.exprs) ) + return self.strRepr + + def streamline( self ): + super(ParseExpression,self).streamline() + + for e in self.exprs: + e.streamline() + + # collapse nested And's of the form And( And( And( a,b), c), d) to And( a,b,c,d ) + # but only if there are no parse actions or resultsNames on the nested And's + # (likewise for Or's and MatchFirst's) + if ( len(self.exprs) == 2 ): + other = self.exprs[0] + if ( isinstance( other, self.__class__ ) and + not(other.parseAction) and + other.resultsName is None and + not other.debug ): + self.exprs = other.exprs[:] + [ self.exprs[1] ] + self.strRepr = None + self.mayReturnEmpty |= other.mayReturnEmpty + self.mayIndexError |= other.mayIndexError + + other = self.exprs[-1] + if ( isinstance( other, self.__class__ ) and + not(other.parseAction) and + other.resultsName is None and + not other.debug ): + self.exprs = self.exprs[:-1] + other.exprs[:] + self.strRepr = None + self.mayReturnEmpty |= other.mayReturnEmpty + self.mayIndexError |= other.mayIndexError + + self.errmsg = "Expected " + str(self) + + return self + + def setResultsName( self, name, listAllMatches=False ): + ret = super(ParseExpression,self).setResultsName(name,listAllMatches) + return ret + + def validate( self, validateTrace=[] ): + tmp = validateTrace[:]+[self] + for e in self.exprs: + e.validate(tmp) + self.checkRecursion( [] ) + + def copy(self): + ret = super(ParseExpression,self).copy() + ret.exprs = [e.copy() for e in self.exprs] + return ret + +class And(ParseExpression): + """Requires all given C{ParseExpression}s to be found in the given order. + Expressions may be separated by whitespace. + May be constructed using the C{'+'} operator. + """ + + class _ErrorStop(Empty): + def __init__(self, *args, **kwargs): + super(And._ErrorStop,self).__init__(*args, **kwargs) + self.name = '-' + self.leaveWhitespace() + + def __init__( self, exprs, savelist = True ): + super(And,self).__init__(exprs, savelist) + self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) + self.setWhitespaceChars( self.exprs[0].whiteChars ) + self.skipWhitespace = self.exprs[0].skipWhitespace + self.callPreparse = True + + def parseImpl( self, instring, loc, doActions=True ): + # pass False as last arg to _parse for first element, since we already + # pre-parsed the string as part of our And pre-parsing + loc, resultlist = self.exprs[0]._parse( instring, loc, doActions, callPreParse=False ) + errorStop = False + for e in self.exprs[1:]: + if isinstance(e, And._ErrorStop): + errorStop = True + continue + if errorStop: + try: + loc, exprtokens = e._parse( instring, loc, doActions ) + except ParseSyntaxException: + raise + except ParseBaseException as pe: + pe.__traceback__ = None + raise ParseSyntaxException(pe) + except IndexError: + raise ParseSyntaxException( ParseException(instring, len(instring), self.errmsg, self) ) + else: + loc, exprtokens = e._parse( instring, loc, doActions ) + if exprtokens or exprtokens.haskeys(): + resultlist += exprtokens + return loc, resultlist + + def __iadd__(self, other ): + if isinstance( other, basestring ): + other = Literal( other ) + return self.append( other ) #And( [ self, other ] ) + + def checkRecursion( self, parseElementList ): + subRecCheckList = parseElementList[:] + [ self ] + for e in self.exprs: + e.checkRecursion( subRecCheckList ) + if not e.mayReturnEmpty: + break + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + " ".join(_ustr(e) for e in self.exprs) + "}" + + return self.strRepr + + +class Or(ParseExpression): + """Requires that at least one C{ParseExpression} is found. + If two expressions match, the expression that matches the longest string will be used. + May be constructed using the C{'^'} operator. + """ + def __init__( self, exprs, savelist = False ): + super(Or,self).__init__(exprs, savelist) + if self.exprs: + self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs) + else: + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + maxExcLoc = -1 + maxException = None + matches = [] + for e in self.exprs: + try: + loc2 = e.tryParse( instring, loc ) + except ParseException as err: + err.__traceback__ = None + if err.loc > maxExcLoc: + maxException = err + maxExcLoc = err.loc + except IndexError: + if len(instring) > maxExcLoc: + maxException = ParseException(instring,len(instring),e.errmsg,self) + maxExcLoc = len(instring) + else: + # save match among all matches, to retry longest to shortest + matches.append((loc2, e)) + + if matches: + matches.sort(key=lambda x: -x[0]) + for _,e in matches: + try: + return e._parse( instring, loc, doActions ) + except ParseException as err: + err.__traceback__ = None + if err.loc > maxExcLoc: + maxException = err + maxExcLoc = err.loc + + if maxException is not None: + maxException.msg = self.errmsg + raise maxException + else: + raise ParseException(instring, loc, "no defined alternatives to match", self) + + + def __ixor__(self, other ): + if isinstance( other, basestring ): + other = ParserElement.literalStringClass( other ) + return self.append( other ) #Or( [ self, other ] ) + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + " ^ ".join(_ustr(e) for e in self.exprs) + "}" + + return self.strRepr + + def checkRecursion( self, parseElementList ): + subRecCheckList = parseElementList[:] + [ self ] + for e in self.exprs: + e.checkRecursion( subRecCheckList ) + + +class MatchFirst(ParseExpression): + """Requires that at least one C{ParseExpression} is found. + If two expressions match, the first one listed is the one that will match. + May be constructed using the C{'|'} operator. + """ + def __init__( self, exprs, savelist = False ): + super(MatchFirst,self).__init__(exprs, savelist) + if self.exprs: + self.mayReturnEmpty = any(e.mayReturnEmpty for e in self.exprs) + else: + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + maxExcLoc = -1 + maxException = None + for e in self.exprs: + try: + ret = e._parse( instring, loc, doActions ) + return ret + except ParseException as err: + if err.loc > maxExcLoc: + maxException = err + maxExcLoc = err.loc + except IndexError: + if len(instring) > maxExcLoc: + maxException = ParseException(instring,len(instring),e.errmsg,self) + maxExcLoc = len(instring) + + # only got here if no expression matched, raise exception for match that made it the furthest + else: + if maxException is not None: + maxException.msg = self.errmsg + raise maxException + else: + raise ParseException(instring, loc, "no defined alternatives to match", self) + + def __ior__(self, other ): + if isinstance( other, basestring ): + other = ParserElement.literalStringClass( other ) + return self.append( other ) #MatchFirst( [ self, other ] ) + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + " | ".join(_ustr(e) for e in self.exprs) + "}" + + return self.strRepr + + def checkRecursion( self, parseElementList ): + subRecCheckList = parseElementList[:] + [ self ] + for e in self.exprs: + e.checkRecursion( subRecCheckList ) + + +class Each(ParseExpression): + """Requires all given C{ParseExpression}s to be found, but in any order. + Expressions may be separated by whitespace. + May be constructed using the C{'&'} operator. + """ + def __init__( self, exprs, savelist = True ): + super(Each,self).__init__(exprs, savelist) + self.mayReturnEmpty = all(e.mayReturnEmpty for e in self.exprs) + self.skipWhitespace = True + self.initExprGroups = True + + def parseImpl( self, instring, loc, doActions=True ): + if self.initExprGroups: + self.opt1map = dict((id(e.expr),e) for e in self.exprs if isinstance(e,Optional)) + opt1 = [ e.expr for e in self.exprs if isinstance(e,Optional) ] + opt2 = [ e for e in self.exprs if e.mayReturnEmpty and not isinstance(e,Optional)] + self.optionals = opt1 + opt2 + self.multioptionals = [ e.expr for e in self.exprs if isinstance(e,ZeroOrMore) ] + self.multirequired = [ e.expr for e in self.exprs if isinstance(e,OneOrMore) ] + self.required = [ e for e in self.exprs if not isinstance(e,(Optional,ZeroOrMore,OneOrMore)) ] + self.required += self.multirequired + self.initExprGroups = False + tmpLoc = loc + tmpReqd = self.required[:] + tmpOpt = self.optionals[:] + matchOrder = [] + + keepMatching = True + while keepMatching: + tmpExprs = tmpReqd + tmpOpt + self.multioptionals + self.multirequired + failed = [] + for e in tmpExprs: + try: + tmpLoc = e.tryParse( instring, tmpLoc ) + except ParseException: + failed.append(e) + else: + matchOrder.append(self.opt1map.get(id(e),e)) + if e in tmpReqd: + tmpReqd.remove(e) + elif e in tmpOpt: + tmpOpt.remove(e) + if len(failed) == len(tmpExprs): + keepMatching = False + + if tmpReqd: + missing = ", ".join(_ustr(e) for e in tmpReqd) + raise ParseException(instring,loc,"Missing one or more required elements (%s)" % missing ) + + # add any unmatched Optionals, in case they have default values defined + matchOrder += [e for e in self.exprs if isinstance(e,Optional) and e.expr in tmpOpt] + + resultlist = [] + for e in matchOrder: + loc,results = e._parse(instring,loc,doActions) + resultlist.append(results) + + finalResults = ParseResults([]) + for r in resultlist: + dups = {} + for k in r.keys(): + if k in finalResults: + tmp = ParseResults(finalResults[k]) + tmp += ParseResults(r[k]) + dups[k] = tmp + finalResults += ParseResults(r) + for k,v in dups.items(): + finalResults[k] = v + return loc, finalResults + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + " & ".join(_ustr(e) for e in self.exprs) + "}" + + return self.strRepr + + def checkRecursion( self, parseElementList ): + subRecCheckList = parseElementList[:] + [ self ] + for e in self.exprs: + e.checkRecursion( subRecCheckList ) + + +class ParseElementEnhance(ParserElement): + """Abstract subclass of C{ParserElement}, for combining and post-processing parsed tokens.""" + def __init__( self, expr, savelist=False ): + super(ParseElementEnhance,self).__init__(savelist) + if isinstance( expr, basestring ): + expr = Literal(expr) + self.expr = expr + self.strRepr = None + if expr is not None: + self.mayIndexError = expr.mayIndexError + self.mayReturnEmpty = expr.mayReturnEmpty + self.setWhitespaceChars( expr.whiteChars ) + self.skipWhitespace = expr.skipWhitespace + self.saveAsList = expr.saveAsList + self.callPreparse = expr.callPreparse + self.ignoreExprs.extend(expr.ignoreExprs) + + def parseImpl( self, instring, loc, doActions=True ): + if self.expr is not None: + return self.expr._parse( instring, loc, doActions, callPreParse=False ) + else: + raise ParseException("",loc,self.errmsg,self) + + def leaveWhitespace( self ): + self.skipWhitespace = False + self.expr = self.expr.copy() + if self.expr is not None: + self.expr.leaveWhitespace() + return self + + def ignore( self, other ): + if isinstance( other, Suppress ): + if other not in self.ignoreExprs: + super( ParseElementEnhance, self).ignore( other ) + if self.expr is not None: + self.expr.ignore( self.ignoreExprs[-1] ) + else: + super( ParseElementEnhance, self).ignore( other ) + if self.expr is not None: + self.expr.ignore( self.ignoreExprs[-1] ) + return self + + def streamline( self ): + super(ParseElementEnhance,self).streamline() + if self.expr is not None: + self.expr.streamline() + return self + + def checkRecursion( self, parseElementList ): + if self in parseElementList: + raise RecursiveGrammarException( parseElementList+[self] ) + subRecCheckList = parseElementList[:] + [ self ] + if self.expr is not None: + self.expr.checkRecursion( subRecCheckList ) + + def validate( self, validateTrace=[] ): + tmp = validateTrace[:]+[self] + if self.expr is not None: + self.expr.validate(tmp) + self.checkRecursion( [] ) + + def __str__( self ): + try: + return super(ParseElementEnhance,self).__str__() + except: + pass + + if self.strRepr is None and self.expr is not None: + self.strRepr = "%s:(%s)" % ( self.__class__.__name__, _ustr(self.expr) ) + return self.strRepr + + +class FollowedBy(ParseElementEnhance): + """Lookahead matching of the given parse expression. C{FollowedBy} + does *not* advance the parsing position within the input string, it only + verifies that the specified parse expression matches at the current + position. C{FollowedBy} always returns a null token list.""" + def __init__( self, expr ): + super(FollowedBy,self).__init__(expr) + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + self.expr.tryParse( instring, loc ) + return loc, [] + + +class NotAny(ParseElementEnhance): + """Lookahead to disallow matching with the given parse expression. C{NotAny} + does *not* advance the parsing position within the input string, it only + verifies that the specified parse expression does *not* match at the current + position. Also, C{NotAny} does *not* skip over leading whitespace. C{NotAny} + always returns a null token list. May be constructed using the '~' operator.""" + def __init__( self, expr ): + super(NotAny,self).__init__(expr) + #~ self.leaveWhitespace() + self.skipWhitespace = False # do NOT use self.leaveWhitespace(), don't want to propagate to exprs + self.mayReturnEmpty = True + self.errmsg = "Found unwanted token, "+_ustr(self.expr) + + def parseImpl( self, instring, loc, doActions=True ): + try: + self.expr.tryParse( instring, loc ) + except (ParseException,IndexError): + pass + else: + raise ParseException(instring, loc, self.errmsg, self) + return loc, [] + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "~{" + _ustr(self.expr) + "}" + + return self.strRepr + + +class ZeroOrMore(ParseElementEnhance): + """Optional repetition of zero or more of the given expression.""" + def __init__( self, expr ): + super(ZeroOrMore,self).__init__(expr) + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + tokens = [] + try: + loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False ) + hasIgnoreExprs = ( len(self.ignoreExprs) > 0 ) + while 1: + if hasIgnoreExprs: + preloc = self._skipIgnorables( instring, loc ) + else: + preloc = loc + loc, tmptokens = self.expr._parse( instring, preloc, doActions ) + if tmptokens or tmptokens.haskeys(): + tokens += tmptokens + except (ParseException,IndexError): + pass + + return loc, tokens + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "[" + _ustr(self.expr) + "]..." + + return self.strRepr + + def setResultsName( self, name, listAllMatches=False ): + ret = super(ZeroOrMore,self).setResultsName(name,listAllMatches) + ret.saveAsList = True + return ret + + +class OneOrMore(ParseElementEnhance): + """Repetition of one or more of the given expression.""" + def parseImpl( self, instring, loc, doActions=True ): + # must be at least one + loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False ) + try: + hasIgnoreExprs = ( len(self.ignoreExprs) > 0 ) + while 1: + if hasIgnoreExprs: + preloc = self._skipIgnorables( instring, loc ) + else: + preloc = loc + loc, tmptokens = self.expr._parse( instring, preloc, doActions ) + if tmptokens or tmptokens.haskeys(): + tokens += tmptokens + except (ParseException,IndexError): + pass + + return loc, tokens + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "{" + _ustr(self.expr) + "}..." + + return self.strRepr + + def setResultsName( self, name, listAllMatches=False ): + ret = super(OneOrMore,self).setResultsName(name,listAllMatches) + ret.saveAsList = True + return ret + +class _NullToken(object): + def __bool__(self): + return False + __nonzero__ = __bool__ + def __str__(self): + return "" + +_optionalNotMatched = _NullToken() +class Optional(ParseElementEnhance): + """Optional matching of the given expression. + A default return string can also be specified, if the optional expression + is not found. + """ + def __init__( self, expr, default=_optionalNotMatched ): + super(Optional,self).__init__( expr, savelist=False ) + self.defaultValue = default + self.mayReturnEmpty = True + + def parseImpl( self, instring, loc, doActions=True ): + try: + loc, tokens = self.expr._parse( instring, loc, doActions, callPreParse=False ) + except (ParseException,IndexError): + if self.defaultValue is not _optionalNotMatched: + if self.expr.resultsName: + tokens = ParseResults([ self.defaultValue ]) + tokens[self.expr.resultsName] = self.defaultValue + else: + tokens = [ self.defaultValue ] + else: + tokens = [] + return loc, tokens + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + if self.strRepr is None: + self.strRepr = "[" + _ustr(self.expr) + "]" + + return self.strRepr + + +class SkipTo(ParseElementEnhance): + """Token for skipping over all undefined text until the matched expression is found. + If C{include} is set to true, the matched expression is also parsed (the skipped text + and matched expression are returned as a 2-element list). The C{ignore} + argument is used to define grammars (typically quoted strings and comments) that + might contain false matches. + """ + def __init__( self, other, include=False, ignore=None, failOn=None ): + super( SkipTo, self ).__init__( other ) + self.ignoreExpr = ignore + self.mayReturnEmpty = True + self.mayIndexError = False + self.includeMatch = include + self.asList = False + if failOn is not None and isinstance(failOn, basestring): + self.failOn = Literal(failOn) + else: + self.failOn = failOn + self.errmsg = "No match found for "+_ustr(self.expr) + + def parseImpl( self, instring, loc, doActions=True ): + startLoc = loc + instrlen = len(instring) + expr = self.expr + failParse = False + while loc <= instrlen: + try: + if self.failOn: + try: + self.failOn.tryParse(instring, loc) + except ParseBaseException: + pass + else: + failParse = True + raise ParseException(instring, loc, "Found expression " + str(self.failOn)) + failParse = False + if self.ignoreExpr is not None: + while 1: + try: + loc = self.ignoreExpr.tryParse(instring,loc) + # print("found ignoreExpr, advance to", loc) + except ParseBaseException: + break + expr._parse( instring, loc, doActions=False, callPreParse=False ) + skipText = instring[startLoc:loc] + if self.includeMatch: + loc,mat = expr._parse(instring,loc,doActions,callPreParse=False) + if mat: + skipRes = ParseResults( skipText ) + skipRes += mat + return loc, [ skipRes ] + else: + return loc, [ skipText ] + else: + return loc, [ skipText ] + except (ParseException,IndexError): + if failParse: + raise + else: + loc += 1 + raise ParseException(instring, loc, self.errmsg, self) + +class Forward(ParseElementEnhance): + """Forward declaration of an expression to be defined later - + used for recursive grammars, such as algebraic infix notation. + When the expression is known, it is assigned to the C{Forward} variable using the '<<' operator. + + Note: take care when assigning to C{Forward} not to overlook precedence of operators. + Specifically, '|' has a lower precedence than '<<', so that:: + fwdExpr << a | b | c + will actually be evaluated as:: + (fwdExpr << a) | b | c + thereby leaving b and c out as parseable alternatives. It is recommended that you + explicitly group the values inserted into the C{Forward}:: + fwdExpr << (a | b | c) + Converting to use the '<<=' operator instead will avoid this problem. + """ + def __init__( self, other=None ): + super(Forward,self).__init__( other, savelist=False ) + + def __lshift__( self, other ): + if isinstance( other, basestring ): + other = ParserElement.literalStringClass(other) + self.expr = other + self.mayReturnEmpty = other.mayReturnEmpty + self.strRepr = None + self.mayIndexError = self.expr.mayIndexError + self.mayReturnEmpty = self.expr.mayReturnEmpty + self.setWhitespaceChars( self.expr.whiteChars ) + self.skipWhitespace = self.expr.skipWhitespace + self.saveAsList = self.expr.saveAsList + self.ignoreExprs.extend(self.expr.ignoreExprs) + return self + + def __ilshift__(self, other): + return self << other + + def leaveWhitespace( self ): + self.skipWhitespace = False + return self + + def streamline( self ): + if not self.streamlined: + self.streamlined = True + if self.expr is not None: + self.expr.streamline() + return self + + def validate( self, validateTrace=[] ): + if self not in validateTrace: + tmp = validateTrace[:]+[self] + if self.expr is not None: + self.expr.validate(tmp) + self.checkRecursion([]) + + def __str__( self ): + if hasattr(self,"name"): + return self.name + + self._revertClass = self.__class__ + self.__class__ = _ForwardNoRecurse + try: + if self.expr is not None: + retString = _ustr(self.expr) + else: + retString = "None" + finally: + self.__class__ = self._revertClass + return self.__class__.__name__ + ": " + retString + + def copy(self): + if self.expr is not None: + return super(Forward,self).copy() + else: + ret = Forward() + ret <<= self + return ret + +class _ForwardNoRecurse(Forward): + def __str__( self ): + return "..." + +class TokenConverter(ParseElementEnhance): + """Abstract subclass of C{ParseExpression}, for converting parsed results.""" + def __init__( self, expr, savelist=False ): + super(TokenConverter,self).__init__( expr )#, savelist ) + self.saveAsList = False + +class Upcase(TokenConverter): + """Converter to upper case all matching tokens.""" + def __init__(self, *args): + super(Upcase,self).__init__(*args) + warnings.warn("Upcase class is deprecated, use upcaseTokens parse action instead", + DeprecationWarning,stacklevel=2) + + def postParse( self, instring, loc, tokenlist ): + return list(map( str.upper, tokenlist )) + + +class Combine(TokenConverter): + """Converter to concatenate all matching tokens to a single string. + By default, the matching patterns must also be contiguous in the input string; + this can be disabled by specifying C{'adjacent=False'} in the constructor. + """ + def __init__( self, expr, joinString="", adjacent=True ): + super(Combine,self).__init__( expr ) + # suppress whitespace-stripping in contained parse expressions, but re-enable it on the Combine itself + if adjacent: + self.leaveWhitespace() + self.adjacent = adjacent + self.skipWhitespace = True + self.joinString = joinString + self.callPreparse = True + + def ignore( self, other ): + if self.adjacent: + ParserElement.ignore(self, other) + else: + super( Combine, self).ignore( other ) + return self + + def postParse( self, instring, loc, tokenlist ): + retToks = tokenlist.copy() + del retToks[:] + retToks += ParseResults([ "".join(tokenlist._asStringList(self.joinString)) ], modal=self.modalResults) + + if self.resultsName and retToks.haskeys(): + return [ retToks ] + else: + return retToks + +class Group(TokenConverter): + """Converter to return the matched tokens as a list - useful for returning tokens of C{L{ZeroOrMore}} and C{L{OneOrMore}} expressions.""" + def __init__( self, expr ): + super(Group,self).__init__( expr ) + self.saveAsList = True + + def postParse( self, instring, loc, tokenlist ): + return [ tokenlist ] + +class Dict(TokenConverter): + """Converter to return a repetitive expression as a list, but also as a dictionary. + Each element can also be referenced using the first token in the expression as its key. + Useful for tabular report scraping when the first column can be used as a item key. + """ + def __init__( self, expr ): + super(Dict,self).__init__( expr ) + self.saveAsList = True + + def postParse( self, instring, loc, tokenlist ): + for i,tok in enumerate(tokenlist): + if len(tok) == 0: + continue + ikey = tok[0] + if isinstance(ikey,int): + ikey = _ustr(tok[0]).strip() + if len(tok)==1: + tokenlist[ikey] = _ParseResultsWithOffset("",i) + elif len(tok)==2 and not isinstance(tok[1],ParseResults): + tokenlist[ikey] = _ParseResultsWithOffset(tok[1],i) + else: + dictvalue = tok.copy() #ParseResults(i) + del dictvalue[0] + if len(dictvalue)!= 1 or (isinstance(dictvalue,ParseResults) and dictvalue.haskeys()): + tokenlist[ikey] = _ParseResultsWithOffset(dictvalue,i) + else: + tokenlist[ikey] = _ParseResultsWithOffset(dictvalue[0],i) + + if self.resultsName: + return [ tokenlist ] + else: + return tokenlist + + +class Suppress(TokenConverter): + """Converter for ignoring the results of a parsed expression.""" + def postParse( self, instring, loc, tokenlist ): + return [] + + def suppress( self ): + return self + + +class OnlyOnce(object): + """Wrapper for parse actions, to ensure they are only called once.""" + def __init__(self, methodCall): + self.callable = _trim_arity(methodCall) + self.called = False + def __call__(self,s,l,t): + if not self.called: + results = self.callable(s,l,t) + self.called = True + return results + raise ParseException(s,l,"") + def reset(self): + self.called = False + +def traceParseAction(f): + """Decorator for debugging parse actions.""" + f = _trim_arity(f) + def z(*paArgs): + thisFunc = f.func_name + s,l,t = paArgs[-3:] + if len(paArgs)>3: + thisFunc = paArgs[0].__class__.__name__ + '.' + thisFunc + sys.stderr.write( ">>entering %s(line: '%s', %d, %s)\n" % (thisFunc,line(l,s),l,t) ) + try: + ret = f(*paArgs) + except Exception as exc: + sys.stderr.write( "<<leaving %s (exception: %s)\n" % (thisFunc,exc) ) + raise + sys.stderr.write( "<<leaving %s (ret: %s)\n" % (thisFunc,ret) ) + return ret + try: + z.__name__ = f.__name__ + except AttributeError: + pass + return z + +# +# global helpers +# +def delimitedList( expr, delim=",", combine=False ): + """Helper to define a delimited list of expressions - the delimiter defaults to ','. + By default, the list elements and delimiters can have intervening whitespace, and + comments, but this can be overridden by passing C{combine=True} in the constructor. + If C{combine} is set to C{True}, the matching tokens are returned as a single token + string, with the delimiters included; otherwise, the matching tokens are returned + as a list of tokens, with the delimiters suppressed. + """ + dlName = _ustr(expr)+" ["+_ustr(delim)+" "+_ustr(expr)+"]..." + if combine: + return Combine( expr + ZeroOrMore( delim + expr ) ).setName(dlName) + else: + return ( expr + ZeroOrMore( Suppress( delim ) + expr ) ).setName(dlName) + +def countedArray( expr, intExpr=None ): + """Helper to define a counted list of expressions. + This helper defines a pattern of the form:: + integer expr expr expr... + where the leading integer tells how many expr expressions follow. + The matched tokens returns the array of expr tokens as a list - the leading count token is suppressed. + """ + arrayExpr = Forward() + def countFieldParseAction(s,l,t): + n = t[0] + arrayExpr << (n and Group(And([expr]*n)) or Group(empty)) + return [] + if intExpr is None: + intExpr = Word(nums).setParseAction(lambda t:int(t[0])) + else: + intExpr = intExpr.copy() + intExpr.setName("arrayLen") + intExpr.addParseAction(countFieldParseAction, callDuringTry=True) + return ( intExpr + arrayExpr ) + +def _flatten(L): + ret = [] + for i in L: + if isinstance(i,list): + ret.extend(_flatten(i)) + else: + ret.append(i) + return ret + +def matchPreviousLiteral(expr): + """Helper to define an expression that is indirectly defined from + the tokens matched in a previous expression, that is, it looks + for a 'repeat' of a previous expression. For example:: + first = Word(nums) + second = matchPreviousLiteral(first) + matchExpr = first + ":" + second + will match C{"1:1"}, but not C{"1:2"}. Because this matches a + previous literal, will also match the leading C{"1:1"} in C{"1:10"}. + If this is not desired, use C{matchPreviousExpr}. + Do *not* use with packrat parsing enabled. + """ + rep = Forward() + def copyTokenToRepeater(s,l,t): + if t: + if len(t) == 1: + rep << t[0] + else: + # flatten t tokens + tflat = _flatten(t.asList()) + rep << And( [ Literal(tt) for tt in tflat ] ) + else: + rep << Empty() + expr.addParseAction(copyTokenToRepeater, callDuringTry=True) + return rep + +def matchPreviousExpr(expr): + """Helper to define an expression that is indirectly defined from + the tokens matched in a previous expression, that is, it looks + for a 'repeat' of a previous expression. For example:: + first = Word(nums) + second = matchPreviousExpr(first) + matchExpr = first + ":" + second + will match C{"1:1"}, but not C{"1:2"}. Because this matches by + expressions, will *not* match the leading C{"1:1"} in C{"1:10"}; + the expressions are evaluated first, and then compared, so + C{"1"} is compared with C{"10"}. + Do *not* use with packrat parsing enabled. + """ + rep = Forward() + e2 = expr.copy() + rep <<= e2 + def copyTokenToRepeater(s,l,t): + matchTokens = _flatten(t.asList()) + def mustMatchTheseTokens(s,l,t): + theseTokens = _flatten(t.asList()) + if theseTokens != matchTokens: + raise ParseException("",0,"") + rep.setParseAction( mustMatchTheseTokens, callDuringTry=True ) + expr.addParseAction(copyTokenToRepeater, callDuringTry=True) + return rep + +def _escapeRegexRangeChars(s): + #~ escape these chars: ^-] + for c in r"\^-]": + s = s.replace(c,_bslash+c) + s = s.replace("\n",r"\n") + s = s.replace("\t",r"\t") + return _ustr(s) + +def oneOf( strs, caseless=False, useRegex=True ): + """Helper to quickly define a set of alternative Literals, and makes sure to do + longest-first testing when there is a conflict, regardless of the input order, + but returns a C{L{MatchFirst}} for best performance. + + Parameters: + - strs - a string of space-delimited literals, or a list of string literals + - caseless - (default=False) - treat all literals as caseless + - useRegex - (default=True) - as an optimization, will generate a Regex + object; otherwise, will generate a C{MatchFirst} object (if C{caseless=True}, or + if creating a C{Regex} raises an exception) + """ + if caseless: + isequal = ( lambda a,b: a.upper() == b.upper() ) + masks = ( lambda a,b: b.upper().startswith(a.upper()) ) + parseElementClass = CaselessLiteral + else: + isequal = ( lambda a,b: a == b ) + masks = ( lambda a,b: b.startswith(a) ) + parseElementClass = Literal + + symbols = [] + if isinstance(strs,basestring): + symbols = strs.split() + elif isinstance(strs, collections.Sequence): + symbols = list(strs[:]) + elif isinstance(strs, _generatorType): + symbols = list(strs) + else: + warnings.warn("Invalid argument to oneOf, expected string or list", + SyntaxWarning, stacklevel=2) + if not symbols: + return NoMatch() + + i = 0 + while i < len(symbols)-1: + cur = symbols[i] + for j,other in enumerate(symbols[i+1:]): + if ( isequal(other, cur) ): + del symbols[i+j+1] + break + elif ( masks(cur, other) ): + del symbols[i+j+1] + symbols.insert(i,other) + cur = other + break + else: + i += 1 + + if not caseless and useRegex: + #~ print (strs,"->", "|".join( [ _escapeRegexChars(sym) for sym in symbols] )) + try: + if len(symbols)==len("".join(symbols)): + return Regex( "[%s]" % "".join(_escapeRegexRangeChars(sym) for sym in symbols) ) + else: + return Regex( "|".join(re.escape(sym) for sym in symbols) ) + except: + warnings.warn("Exception creating Regex for oneOf, building MatchFirst", + SyntaxWarning, stacklevel=2) + + + # last resort, just use MatchFirst + return MatchFirst( [ parseElementClass(sym) for sym in symbols ] ) + +def dictOf( key, value ): + """Helper to easily and clearly define a dictionary by specifying the respective patterns + for the key and value. Takes care of defining the C{L{Dict}}, C{L{ZeroOrMore}}, and C{L{Group}} tokens + in the proper order. The key pattern can include delimiting markers or punctuation, + as long as they are suppressed, thereby leaving the significant key text. The value + pattern can include named results, so that the C{Dict} results can include named token + fields. + """ + return Dict( ZeroOrMore( Group ( key + value ) ) ) + +def originalTextFor(expr, asString=True): + """Helper to return the original, untokenized text for a given expression. Useful to + restore the parsed fields of an HTML start tag into the raw tag text itself, or to + revert separate tokens with intervening whitespace back to the original matching + input text. Simpler to use than the parse action C{L{keepOriginalText}}, and does not + require the inspect module to chase up the call stack. By default, returns a + string containing the original parsed text. + + If the optional C{asString} argument is passed as C{False}, then the return value is a + C{L{ParseResults}} containing any results names that were originally matched, and a + single token containing the original matched text from the input string. So if + the expression passed to C{L{originalTextFor}} contains expressions with defined + results names, you must set C{asString} to C{False} if you want to preserve those + results name values.""" + locMarker = Empty().setParseAction(lambda s,loc,t: loc) + endlocMarker = locMarker.copy() + endlocMarker.callPreparse = False + matchExpr = locMarker("_original_start") + expr + endlocMarker("_original_end") + if asString: + extractText = lambda s,l,t: s[t._original_start:t._original_end] + else: + def extractText(s,l,t): + del t[:] + t.insert(0, s[t._original_start:t._original_end]) + del t["_original_start"] + del t["_original_end"] + matchExpr.setParseAction(extractText) + return matchExpr + +def ungroup(expr): + """Helper to undo pyparsing's default grouping of And expressions, even + if all but one are non-empty.""" + return TokenConverter(expr).setParseAction(lambda t:t[0]) + +def locatedExpr(expr): + """Helper to decorate a returned token with its starting and ending locations in the input string. + This helper adds the following results names: + - locn_start = location where matched expression begins + - locn_end = location where matched expression ends + - value = the actual parsed results + + Be careful if the input text contains C{<TAB>} characters, you may want to call + C{L{ParserElement.parseWithTabs}} + """ + locator = Empty().setParseAction(lambda s,l,t: l) + return Group(locator("locn_start") + expr("value") + locator.copy().leaveWhitespace()("locn_end")) + + +# convenience constants for positional expressions +empty = Empty().setName("empty") +lineStart = LineStart().setName("lineStart") +lineEnd = LineEnd().setName("lineEnd") +stringStart = StringStart().setName("stringStart") +stringEnd = StringEnd().setName("stringEnd") + +_escapedPunc = Word( _bslash, r"\[]-*.$+^?()~ ", exact=2 ).setParseAction(lambda s,l,t:t[0][1]) +_escapedHexChar = Regex(r"\\0?[xX][0-9a-fA-F]+").setParseAction(lambda s,l,t:unichr(int(t[0].lstrip(r'\0x'),16))) +_escapedOctChar = Regex(r"\\0[0-7]+").setParseAction(lambda s,l,t:unichr(int(t[0][1:],8))) +_singleChar = _escapedPunc | _escapedHexChar | _escapedOctChar | Word(printables, excludeChars=r'\]', exact=1) | Regex(r"\w", re.UNICODE) +_charRange = Group(_singleChar + Suppress("-") + _singleChar) +_reBracketExpr = Literal("[") + Optional("^").setResultsName("negate") + Group( OneOrMore( _charRange | _singleChar ) ).setResultsName("body") + "]" + +def srange(s): + r"""Helper to easily define string ranges for use in Word construction. Borrows + syntax from regexp '[]' string range definitions:: + srange("[0-9]") -> "0123456789" + srange("[a-z]") -> "abcdefghijklmnopqrstuvwxyz" + srange("[a-z$_]") -> "abcdefghijklmnopqrstuvwxyz$_" + The input string must be enclosed in []'s, and the returned string is the expanded + character set joined into a single string. + The values enclosed in the []'s may be:: + a single character + an escaped character with a leading backslash (such as \- or \]) + an escaped hex character with a leading '\x' (\x21, which is a '!' character) + (\0x## is also supported for backwards compatibility) + an escaped octal character with a leading '\0' (\041, which is a '!' character) + a range of any of the above, separated by a dash ('a-z', etc.) + any combination of the above ('aeiouy', 'a-zA-Z0-9_$', etc.) + """ + _expanded = lambda p: p if not isinstance(p,ParseResults) else ''.join(unichr(c) for c in range(ord(p[0]),ord(p[1])+1)) + try: + return "".join(_expanded(part) for part in _reBracketExpr.parseString(s).body) + except: + return "" + +def matchOnlyAtCol(n): + """Helper method for defining parse actions that require matching at a specific + column in the input text. + """ + def verifyCol(strg,locn,toks): + if col(locn,strg) != n: + raise ParseException(strg,locn,"matched token not at column %d" % n) + return verifyCol + +def replaceWith(replStr): + """Helper method for common parse actions that simply return a literal value. Especially + useful when used with C{L{transformString<ParserElement.transformString>}()}. + """ + #def _replFunc(*args): + # return [replStr] + #return _replFunc + return functools.partial(next, itertools.repeat([replStr])) + +def removeQuotes(s,l,t): + """Helper parse action for removing quotation marks from parsed quoted strings. + To use, add this parse action to quoted string using:: + quotedString.setParseAction( removeQuotes ) + """ + return t[0][1:-1] + +def upcaseTokens(s,l,t): + """Helper parse action to convert tokens to upper case.""" + return [ tt.upper() for tt in map(_ustr,t) ] + +def downcaseTokens(s,l,t): + """Helper parse action to convert tokens to lower case.""" + return [ tt.lower() for tt in map(_ustr,t) ] + +def keepOriginalText(s,startLoc,t): + """DEPRECATED - use new helper method C{L{originalTextFor}}. + Helper parse action to preserve original parsed text, + overriding any nested parse actions.""" + try: + endloc = getTokensEndLoc() + except ParseException: + raise ParseFatalException("incorrect usage of keepOriginalText - may only be called as a parse action") + del t[:] + t += ParseResults(s[startLoc:endloc]) + return t + +def getTokensEndLoc(): + """Method to be called from within a parse action to determine the end + location of the parsed tokens.""" + import inspect + fstack = inspect.stack() + try: + # search up the stack (through intervening argument normalizers) for correct calling routine + for f in fstack[2:]: + if f[3] == "_parseNoCache": + endloc = f[0].f_locals["loc"] + return endloc + else: + raise ParseFatalException("incorrect usage of getTokensEndLoc - may only be called from within a parse action") + finally: + del fstack + +def _makeTags(tagStr, xml): + """Internal helper to construct opening and closing tag expressions, given a tag name""" + if isinstance(tagStr,basestring): + resname = tagStr + tagStr = Keyword(tagStr, caseless=not xml) + else: + resname = tagStr.name + + tagAttrName = Word(alphas,alphanums+"_-:") + if (xml): + tagAttrValue = dblQuotedString.copy().setParseAction( removeQuotes ) + openTag = Suppress("<") + tagStr("tag") + \ + Dict(ZeroOrMore(Group( tagAttrName + Suppress("=") + tagAttrValue ))) + \ + Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">") + else: + printablesLessRAbrack = "".join(c for c in printables if c not in ">") + tagAttrValue = quotedString.copy().setParseAction( removeQuotes ) | Word(printablesLessRAbrack) + openTag = Suppress("<") + tagStr("tag") + \ + Dict(ZeroOrMore(Group( tagAttrName.setParseAction(downcaseTokens) + \ + Optional( Suppress("=") + tagAttrValue ) ))) + \ + Optional("/",default=[False]).setResultsName("empty").setParseAction(lambda s,l,t:t[0]=='/') + Suppress(">") + closeTag = Combine(_L("</") + tagStr + ">") + + openTag = openTag.setResultsName("start"+"".join(resname.replace(":"," ").title().split())).setName("<%s>" % tagStr) + closeTag = closeTag.setResultsName("end"+"".join(resname.replace(":"," ").title().split())).setName("</%s>" % tagStr) + openTag.tag = resname + closeTag.tag = resname + return openTag, closeTag + +def makeHTMLTags(tagStr): + """Helper to construct opening and closing tag expressions for HTML, given a tag name""" + return _makeTags( tagStr, False ) + +def makeXMLTags(tagStr): + """Helper to construct opening and closing tag expressions for XML, given a tag name""" + return _makeTags( tagStr, True ) + +def withAttribute(*args,**attrDict): + """Helper to create a validating parse action to be used with start tags created + with C{L{makeXMLTags}} or C{L{makeHTMLTags}}. Use C{withAttribute} to qualify a starting tag + with a required attribute value, to avoid false matches on common tags such as + C{<TD>} or C{<DIV>}. + + Call C{withAttribute} with a series of attribute names and values. Specify the list + of filter attributes names and values as: + - keyword arguments, as in C{(align="right")}, or + - as an explicit dict with C{**} operator, when an attribute name is also a Python + reserved word, as in C{**{"class":"Customer", "align":"right"}} + - a list of name-value tuples, as in ( ("ns1:class", "Customer"), ("ns2:align","right") ) + For attribute names with a namespace prefix, you must use the second form. Attribute + names are matched insensitive to upper/lower case. + + If just testing for C{class} (with or without a namespace), use C{L{withClass}}. + + To verify that the attribute exists, but without specifying a value, pass + C{withAttribute.ANY_VALUE} as the value. + """ + if args: + attrs = args[:] + else: + attrs = attrDict.items() + attrs = [(k,v) for k,v in attrs] + def pa(s,l,tokens): + for attrName,attrValue in attrs: + if attrName not in tokens: + raise ParseException(s,l,"no matching attribute " + attrName) + if attrValue != withAttribute.ANY_VALUE and tokens[attrName] != attrValue: + raise ParseException(s,l,"attribute '%s' has value '%s', must be '%s'" % + (attrName, tokens[attrName], attrValue)) + return pa +withAttribute.ANY_VALUE = object() + +def withClass(classname, namespace=''): + """Simplified version of C{L{withAttribute}} when matching on a div class - made + difficult because C{class} is a reserved word in Python. + """ + classattr = "%s:class" % namespace if namespace else "class" + return withAttribute(**{classattr : classname}) + +opAssoc = _Constants() +opAssoc.LEFT = object() +opAssoc.RIGHT = object() + +def infixNotation( baseExpr, opList, lpar=Suppress('('), rpar=Suppress(')') ): + """Helper method for constructing grammars of expressions made up of + operators working in a precedence hierarchy. Operators may be unary or + binary, left- or right-associative. Parse actions can also be attached + to operator expressions. + + Parameters: + - baseExpr - expression representing the most basic element for the nested + - opList - list of tuples, one for each operator precedence level in the + expression grammar; each tuple is of the form + (opExpr, numTerms, rightLeftAssoc, parseAction), where: + - opExpr is the pyparsing expression for the operator; + may also be a string, which will be converted to a Literal; + if numTerms is 3, opExpr is a tuple of two expressions, for the + two operators separating the 3 terms + - numTerms is the number of terms for this operator (must + be 1, 2, or 3) + - rightLeftAssoc is the indicator whether the operator is + right or left associative, using the pyparsing-defined + constants C{opAssoc.RIGHT} and C{opAssoc.LEFT}. + - parseAction is the parse action to be associated with + expressions matching this operator expression (the + parse action tuple member may be omitted) + - lpar - expression for matching left-parentheses (default=Suppress('(')) + - rpar - expression for matching right-parentheses (default=Suppress(')')) + """ + ret = Forward() + lastExpr = baseExpr | ( lpar + ret + rpar ) + for i,operDef in enumerate(opList): + opExpr,arity,rightLeftAssoc,pa = (operDef + (None,))[:4] + if arity == 3: + if opExpr is None or len(opExpr) != 2: + raise ValueError("if numterms=3, opExpr must be a tuple or list of two expressions") + opExpr1, opExpr2 = opExpr + thisExpr = Forward()#.setName("expr%d" % i) + if rightLeftAssoc == opAssoc.LEFT: + if arity == 1: + matchExpr = FollowedBy(lastExpr + opExpr) + Group( lastExpr + OneOrMore( opExpr ) ) + elif arity == 2: + if opExpr is not None: + matchExpr = FollowedBy(lastExpr + opExpr + lastExpr) + Group( lastExpr + OneOrMore( opExpr + lastExpr ) ) + else: + matchExpr = FollowedBy(lastExpr+lastExpr) + Group( lastExpr + OneOrMore(lastExpr) ) + elif arity == 3: + matchExpr = FollowedBy(lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr) + \ + Group( lastExpr + opExpr1 + lastExpr + opExpr2 + lastExpr ) + else: + raise ValueError("operator must be unary (1), binary (2), or ternary (3)") + elif rightLeftAssoc == opAssoc.RIGHT: + if arity == 1: + # try to avoid LR with this extra test + if not isinstance(opExpr, Optional): + opExpr = Optional(opExpr) + matchExpr = FollowedBy(opExpr.expr + thisExpr) + Group( opExpr + thisExpr ) + elif arity == 2: + if opExpr is not None: + matchExpr = FollowedBy(lastExpr + opExpr + thisExpr) + Group( lastExpr + OneOrMore( opExpr + thisExpr ) ) + else: + matchExpr = FollowedBy(lastExpr + thisExpr) + Group( lastExpr + OneOrMore( thisExpr ) ) + elif arity == 3: + matchExpr = FollowedBy(lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr) + \ + Group( lastExpr + opExpr1 + thisExpr + opExpr2 + thisExpr ) + else: + raise ValueError("operator must be unary (1), binary (2), or ternary (3)") + else: + raise ValueError("operator must indicate right or left associativity") + if pa: + matchExpr.setParseAction( pa ) + thisExpr <<= ( matchExpr | lastExpr ) + lastExpr = thisExpr + ret <<= lastExpr + return ret +operatorPrecedence = infixNotation + +dblQuotedString = Regex(r'"(?:[^"\n\r\\]|(?:"")|(?:\\x[0-9a-fA-F]+)|(?:\\.))*"').setName("string enclosed in double quotes") +sglQuotedString = Regex(r"'(?:[^'\n\r\\]|(?:'')|(?:\\x[0-9a-fA-F]+)|(?:\\.))*'").setName("string enclosed in single quotes") +quotedString = Regex(r'''(?:"(?:[^"\n\r\\]|(?:"")|(?:\\x[0-9a-fA-F]+)|(?:\\.))*")|(?:'(?:[^'\n\r\\]|(?:'')|(?:\\x[0-9a-fA-F]+)|(?:\\.))*')''').setName("quotedString using single or double quotes") +unicodeString = Combine(_L('u') + quotedString.copy()) + +def nestedExpr(opener="(", closer=")", content=None, ignoreExpr=quotedString.copy()): + """Helper method for defining nested lists enclosed in opening and closing + delimiters ("(" and ")" are the default). + + Parameters: + - opener - opening character for a nested list (default="("); can also be a pyparsing expression + - closer - closing character for a nested list (default=")"); can also be a pyparsing expression + - content - expression for items within the nested lists (default=None) + - ignoreExpr - expression for ignoring opening and closing delimiters (default=quotedString) + + If an expression is not provided for the content argument, the nested + expression will capture all whitespace-delimited content between delimiters + as a list of separate values. + + Use the C{ignoreExpr} argument to define expressions that may contain + opening or closing characters that should not be treated as opening + or closing characters for nesting, such as quotedString or a comment + expression. Specify multiple expressions using an C{L{Or}} or C{L{MatchFirst}}. + The default is L{quotedString}, but if no expressions are to be ignored, + then pass C{None} for this argument. + """ + if opener == closer: + raise ValueError("opening and closing strings cannot be the same") + if content is None: + if isinstance(opener,basestring) and isinstance(closer,basestring): + if len(opener) == 1 and len(closer)==1: + if ignoreExpr is not None: + content = (Combine(OneOrMore(~ignoreExpr + + CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS,exact=1)) + ).setParseAction(lambda t:t[0].strip())) + else: + content = (empty.copy()+CharsNotIn(opener+closer+ParserElement.DEFAULT_WHITE_CHARS + ).setParseAction(lambda t:t[0].strip())) + else: + if ignoreExpr is not None: + content = (Combine(OneOrMore(~ignoreExpr + + ~Literal(opener) + ~Literal(closer) + + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) + ).setParseAction(lambda t:t[0].strip())) + else: + content = (Combine(OneOrMore(~Literal(opener) + ~Literal(closer) + + CharsNotIn(ParserElement.DEFAULT_WHITE_CHARS,exact=1)) + ).setParseAction(lambda t:t[0].strip())) + else: + raise ValueError("opening and closing arguments must be strings if no content expression is given") + ret = Forward() + if ignoreExpr is not None: + ret <<= Group( Suppress(opener) + ZeroOrMore( ignoreExpr | ret | content ) + Suppress(closer) ) + else: + ret <<= Group( Suppress(opener) + ZeroOrMore( ret | content ) + Suppress(closer) ) + return ret + +def indentedBlock(blockStatementExpr, indentStack, indent=True): + """Helper method for defining space-delimited indentation blocks, such as + those used to define block statements in Python source code. + + Parameters: + - blockStatementExpr - expression defining syntax of statement that + is repeated within the indented block + - indentStack - list created by caller to manage indentation stack + (multiple statementWithIndentedBlock expressions within a single grammar + should share a common indentStack) + - indent - boolean indicating whether block must be indented beyond the + the current level; set to False for block of left-most statements + (default=True) + + A valid block must contain at least one C{blockStatement}. + """ + def checkPeerIndent(s,l,t): + if l >= len(s): return + curCol = col(l,s) + if curCol != indentStack[-1]: + if curCol > indentStack[-1]: + raise ParseFatalException(s,l,"illegal nesting") + raise ParseException(s,l,"not a peer entry") + + def checkSubIndent(s,l,t): + curCol = col(l,s) + if curCol > indentStack[-1]: + indentStack.append( curCol ) + else: + raise ParseException(s,l,"not a subentry") + + def checkUnindent(s,l,t): + if l >= len(s): return + curCol = col(l,s) + if not(indentStack and curCol < indentStack[-1] and curCol <= indentStack[-2]): + raise ParseException(s,l,"not an unindent") + indentStack.pop() + + NL = OneOrMore(LineEnd().setWhitespaceChars("\t ").suppress()) + INDENT = Empty() + Empty().setParseAction(checkSubIndent) + PEER = Empty().setParseAction(checkPeerIndent) + UNDENT = Empty().setParseAction(checkUnindent) + if indent: + smExpr = Group( Optional(NL) + + #~ FollowedBy(blockStatementExpr) + + INDENT + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) + UNDENT) + else: + smExpr = Group( Optional(NL) + + (OneOrMore( PEER + Group(blockStatementExpr) + Optional(NL) )) ) + blockStatementExpr.ignore(_bslash + LineEnd()) + return smExpr + +alphas8bit = srange(r"[\0xc0-\0xd6\0xd8-\0xf6\0xf8-\0xff]") +punc8bit = srange(r"[\0xa1-\0xbf\0xd7\0xf7]") + +anyOpenTag,anyCloseTag = makeHTMLTags(Word(alphas,alphanums+"_:")) +commonHTMLEntity = Combine(_L("&") + oneOf("gt lt amp nbsp quot").setResultsName("entity") +";").streamline() +_htmlEntityMap = dict(zip("gt lt amp nbsp quot".split(),'><& "')) +replaceHTMLEntity = lambda t : t.entity in _htmlEntityMap and _htmlEntityMap[t.entity] or None + +# it's easy to get these comment structures wrong - they're very common, so may as well make them available +cStyleComment = Regex(r"/\*(?:[^*]*\*+)+?/").setName("C style comment") + +htmlComment = Regex(r"<!--[\s\S]*?-->") +restOfLine = Regex(r".*").leaveWhitespace() +dblSlashComment = Regex(r"\/\/(\\\n|.)*").setName("// comment") +cppStyleComment = Regex(r"/(?:\*(?:[^*]*\*+)+?/|/[^\n]*(?:\n[^\n]*)*?(?:(?<!\\)|\Z))").setName("C++ style comment") + +javaStyleComment = cppStyleComment +pythonStyleComment = Regex(r"#.*").setName("Python style comment") +_commasepitem = Combine(OneOrMore(Word(printables, excludeChars=',') + + Optional( Word(" \t") + + ~Literal(",") + ~LineEnd() ) ) ).streamline().setName("commaItem") +commaSeparatedList = delimitedList( Optional( quotedString.copy() | _commasepitem, default="") ).setName("commaSeparatedList") + + +if __name__ == "__main__": + + selectToken = CaselessLiteral( "select" ) + fromToken = CaselessLiteral( "from" ) + + ident = Word( alphas, alphanums + "_$" ) + columnName = delimitedList( ident, ".", combine=True ).setParseAction( upcaseTokens ) + columnNameList = Group( delimitedList( columnName ) ).setName("columns") + tableName = delimitedList( ident, ".", combine=True ).setParseAction( upcaseTokens ) + tableNameList = Group( delimitedList( tableName ) ).setName("tables") + simpleSQL = ( selectToken + \ + ( '*' | columnNameList ).setResultsName( "columns" ) + \ + fromToken + \ + tableNameList.setResultsName( "tables" ) ) + + simpleSQL.runTests("""\ + SELECT * from XYZZY, ABC + select * from SYS.XYZZY + Select A from Sys.dual + Select AA,BB,CC from Sys.dual + Select A, B, C from Sys.dual + Select A, B, C from Sys.dual + Xelect A, B, C from Sys.dual + Select A, B, C frox Sys.dual + Select + Select ^^^ frox Sys.dual + Select A, B, C from Sys.dual, Table2""") + diff --git a/venv/lib/python3.5/site-packages/pkg_resources/_vendor/six.py b/venv/lib/python3.5/site-packages/pkg_resources/_vendor/six.py new file mode 100644 index 0000000..190c023 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pkg_resources/_vendor/six.py @@ -0,0 +1,868 @@ +"""Utilities for writing code that runs on Python 2 and 3""" + +# Copyright (c) 2010-2015 Benjamin Peterson +# +# Permission is hereby granted, free of charge, to any person obtaining a copy +# of this software and associated documentation files (the "Software"), to deal +# in the Software without restriction, including without limitation the rights +# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +# copies of the Software, and to permit persons to whom the Software is +# furnished to do so, subject to the following conditions: +# +# The above copyright notice and this permission notice shall be included in all +# copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +# SOFTWARE. + +from __future__ import absolute_import + +import functools +import itertools +import operator +import sys +import types + +__author__ = "Benjamin Peterson <benjamin@python.org>" +__version__ = "1.10.0" + + +# Useful for very coarse version differentiation. +PY2 = sys.version_info[0] == 2 +PY3 = sys.version_info[0] == 3 +PY34 = sys.version_info[0:2] >= (3, 4) + +if PY3: + string_types = str, + integer_types = int, + class_types = type, + text_type = str + binary_type = bytes + + MAXSIZE = sys.maxsize +else: + string_types = basestring, + integer_types = (int, long) + class_types = (type, types.ClassType) + text_type = unicode + binary_type = str + + if sys.platform.startswith("java"): + # Jython always uses 32 bits. + MAXSIZE = int((1 << 31) - 1) + else: + # It's possible to have sizeof(long) != sizeof(Py_ssize_t). + class X(object): + + def __len__(self): + return 1 << 31 + try: + len(X()) + except OverflowError: + # 32-bit + MAXSIZE = int((1 << 31) - 1) + else: + # 64-bit + MAXSIZE = int((1 << 63) - 1) + del X + + +def _add_doc(func, doc): + """Add documentation to a function.""" + func.__doc__ = doc + + +def _import_module(name): + """Import module, returning the module after the last dot.""" + __import__(name) + return sys.modules[name] + + +class _LazyDescr(object): + + def __init__(self, name): + self.name = name + + def __get__(self, obj, tp): + result = self._resolve() + setattr(obj, self.name, result) # Invokes __set__. + try: + # This is a bit ugly, but it avoids running this again by + # removing this descriptor. + delattr(obj.__class__, self.name) + except AttributeError: + pass + return result + + +class MovedModule(_LazyDescr): + + def __init__(self, name, old, new=None): + super(MovedModule, self).__init__(name) + if PY3: + if new is None: + new = name + self.mod = new + else: + self.mod = old + + def _resolve(self): + return _import_module(self.mod) + + def __getattr__(self, attr): + _module = self._resolve() + value = getattr(_module, attr) + setattr(self, attr, value) + return value + + +class _LazyModule(types.ModuleType): + + def __init__(self, name): + super(_LazyModule, self).__init__(name) + self.__doc__ = self.__class__.__doc__ + + def __dir__(self): + attrs = ["__doc__", "__name__"] + attrs += [attr.name for attr in self._moved_attributes] + return attrs + + # Subclasses should override this + _moved_attributes = [] + + +class MovedAttribute(_LazyDescr): + + def __init__(self, name, old_mod, new_mod, old_attr=None, new_attr=None): + super(MovedAttribute, self).__init__(name) + if PY3: + if new_mod is None: + new_mod = name + self.mod = new_mod + if new_attr is None: + if old_attr is None: + new_attr = name + else: + new_attr = old_attr + self.attr = new_attr + else: + self.mod = old_mod + if old_attr is None: + old_attr = name + self.attr = old_attr + + def _resolve(self): + module = _import_module(self.mod) + return getattr(module, self.attr) + + +class _SixMetaPathImporter(object): + + """ + A meta path importer to import six.moves and its submodules. + + This class implements a PEP302 finder and loader. It should be compatible + with Python 2.5 and all existing versions of Python3 + """ + + def __init__(self, six_module_name): + self.name = six_module_name + self.known_modules = {} + + def _add_module(self, mod, *fullnames): + for fullname in fullnames: + self.known_modules[self.name + "." + fullname] = mod + + def _get_module(self, fullname): + return self.known_modules[self.name + "." + fullname] + + def find_module(self, fullname, path=None): + if fullname in self.known_modules: + return self + return None + + def __get_module(self, fullname): + try: + return self.known_modules[fullname] + except KeyError: + raise ImportError("This loader does not know module " + fullname) + + def load_module(self, fullname): + try: + # in case of a reload + return sys.modules[fullname] + except KeyError: + pass + mod = self.__get_module(fullname) + if isinstance(mod, MovedModule): + mod = mod._resolve() + else: + mod.__loader__ = self + sys.modules[fullname] = mod + return mod + + def is_package(self, fullname): + """ + Return true, if the named module is a package. + + We need this method to get correct spec objects with + Python 3.4 (see PEP451) + """ + return hasattr(self.__get_module(fullname), "__path__") + + def get_code(self, fullname): + """Return None + + Required, if is_package is implemented""" + self.__get_module(fullname) # eventually raises ImportError + return None + get_source = get_code # same as get_code + +_importer = _SixMetaPathImporter(__name__) + + +class _MovedItems(_LazyModule): + + """Lazy loading of moved objects""" + __path__ = [] # mark as package + + +_moved_attributes = [ + MovedAttribute("cStringIO", "cStringIO", "io", "StringIO"), + MovedAttribute("filter", "itertools", "builtins", "ifilter", "filter"), + MovedAttribute("filterfalse", "itertools", "itertools", "ifilterfalse", "filterfalse"), + MovedAttribute("input", "__builtin__", "builtins", "raw_input", "input"), + MovedAttribute("intern", "__builtin__", "sys"), + MovedAttribute("map", "itertools", "builtins", "imap", "map"), + MovedAttribute("getcwd", "os", "os", "getcwdu", "getcwd"), + MovedAttribute("getcwdb", "os", "os", "getcwd", "getcwdb"), + MovedAttribute("range", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("reload_module", "__builtin__", "importlib" if PY34 else "imp", "reload"), + MovedAttribute("reduce", "__builtin__", "functools"), + MovedAttribute("shlex_quote", "pipes", "shlex", "quote"), + MovedAttribute("StringIO", "StringIO", "io"), + MovedAttribute("UserDict", "UserDict", "collections"), + MovedAttribute("UserList", "UserList", "collections"), + MovedAttribute("UserString", "UserString", "collections"), + MovedAttribute("xrange", "__builtin__", "builtins", "xrange", "range"), + MovedAttribute("zip", "itertools", "builtins", "izip", "zip"), + MovedAttribute("zip_longest", "itertools", "itertools", "izip_longest", "zip_longest"), + MovedModule("builtins", "__builtin__"), + MovedModule("configparser", "ConfigParser"), + MovedModule("copyreg", "copy_reg"), + MovedModule("dbm_gnu", "gdbm", "dbm.gnu"), + MovedModule("_dummy_thread", "dummy_thread", "_dummy_thread"), + MovedModule("http_cookiejar", "cookielib", "http.cookiejar"), + MovedModule("http_cookies", "Cookie", "http.cookies"), + MovedModule("html_entities", "htmlentitydefs", "html.entities"), + MovedModule("html_parser", "HTMLParser", "html.parser"), + MovedModule("http_client", "httplib", "http.client"), + MovedModule("email_mime_multipart", "email.MIMEMultipart", "email.mime.multipart"), + MovedModule("email_mime_nonmultipart", "email.MIMENonMultipart", "email.mime.nonmultipart"), + MovedModule("email_mime_text", "email.MIMEText", "email.mime.text"), + MovedModule("email_mime_base", "email.MIMEBase", "email.mime.base"), + MovedModule("BaseHTTPServer", "BaseHTTPServer", "http.server"), + MovedModule("CGIHTTPServer", "CGIHTTPServer", "http.server"), + MovedModule("SimpleHTTPServer", "SimpleHTTPServer", "http.server"), + MovedModule("cPickle", "cPickle", "pickle"), + MovedModule("queue", "Queue"), + MovedModule("reprlib", "repr"), + MovedModule("socketserver", "SocketServer"), + MovedModule("_thread", "thread", "_thread"), + MovedModule("tkinter", "Tkinter"), + MovedModule("tkinter_dialog", "Dialog", "tkinter.dialog"), + MovedModule("tkinter_filedialog", "FileDialog", "tkinter.filedialog"), + MovedModule("tkinter_scrolledtext", "ScrolledText", "tkinter.scrolledtext"), + MovedModule("tkinter_simpledialog", "SimpleDialog", "tkinter.simpledialog"), + MovedModule("tkinter_tix", "Tix", "tkinter.tix"), + MovedModule("tkinter_ttk", "ttk", "tkinter.ttk"), + MovedModule("tkinter_constants", "Tkconstants", "tkinter.constants"), + MovedModule("tkinter_dnd", "Tkdnd", "tkinter.dnd"), + MovedModule("tkinter_colorchooser", "tkColorChooser", + "tkinter.colorchooser"), + MovedModule("tkinter_commondialog", "tkCommonDialog", + "tkinter.commondialog"), + MovedModule("tkinter_tkfiledialog", "tkFileDialog", "tkinter.filedialog"), + MovedModule("tkinter_font", "tkFont", "tkinter.font"), + MovedModule("tkinter_messagebox", "tkMessageBox", "tkinter.messagebox"), + MovedModule("tkinter_tksimpledialog", "tkSimpleDialog", + "tkinter.simpledialog"), + MovedModule("urllib_parse", __name__ + ".moves.urllib_parse", "urllib.parse"), + MovedModule("urllib_error", __name__ + ".moves.urllib_error", "urllib.error"), + MovedModule("urllib", __name__ + ".moves.urllib", __name__ + ".moves.urllib"), + MovedModule("urllib_robotparser", "robotparser", "urllib.robotparser"), + MovedModule("xmlrpc_client", "xmlrpclib", "xmlrpc.client"), + MovedModule("xmlrpc_server", "SimpleXMLRPCServer", "xmlrpc.server"), +] +# Add windows specific modules. +if sys.platform == "win32": + _moved_attributes += [ + MovedModule("winreg", "_winreg"), + ] + +for attr in _moved_attributes: + setattr(_MovedItems, attr.name, attr) + if isinstance(attr, MovedModule): + _importer._add_module(attr, "moves." + attr.name) +del attr + +_MovedItems._moved_attributes = _moved_attributes + +moves = _MovedItems(__name__ + ".moves") +_importer._add_module(moves, "moves") + + +class Module_six_moves_urllib_parse(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_parse""" + + +_urllib_parse_moved_attributes = [ + MovedAttribute("ParseResult", "urlparse", "urllib.parse"), + MovedAttribute("SplitResult", "urlparse", "urllib.parse"), + MovedAttribute("parse_qs", "urlparse", "urllib.parse"), + MovedAttribute("parse_qsl", "urlparse", "urllib.parse"), + MovedAttribute("urldefrag", "urlparse", "urllib.parse"), + MovedAttribute("urljoin", "urlparse", "urllib.parse"), + MovedAttribute("urlparse", "urlparse", "urllib.parse"), + MovedAttribute("urlsplit", "urlparse", "urllib.parse"), + MovedAttribute("urlunparse", "urlparse", "urllib.parse"), + MovedAttribute("urlunsplit", "urlparse", "urllib.parse"), + MovedAttribute("quote", "urllib", "urllib.parse"), + MovedAttribute("quote_plus", "urllib", "urllib.parse"), + MovedAttribute("unquote", "urllib", "urllib.parse"), + MovedAttribute("unquote_plus", "urllib", "urllib.parse"), + MovedAttribute("urlencode", "urllib", "urllib.parse"), + MovedAttribute("splitquery", "urllib", "urllib.parse"), + MovedAttribute("splittag", "urllib", "urllib.parse"), + MovedAttribute("splituser", "urllib", "urllib.parse"), + MovedAttribute("uses_fragment", "urlparse", "urllib.parse"), + MovedAttribute("uses_netloc", "urlparse", "urllib.parse"), + MovedAttribute("uses_params", "urlparse", "urllib.parse"), + MovedAttribute("uses_query", "urlparse", "urllib.parse"), + MovedAttribute("uses_relative", "urlparse", "urllib.parse"), +] +for attr in _urllib_parse_moved_attributes: + setattr(Module_six_moves_urllib_parse, attr.name, attr) +del attr + +Module_six_moves_urllib_parse._moved_attributes = _urllib_parse_moved_attributes + +_importer._add_module(Module_six_moves_urllib_parse(__name__ + ".moves.urllib_parse"), + "moves.urllib_parse", "moves.urllib.parse") + + +class Module_six_moves_urllib_error(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_error""" + + +_urllib_error_moved_attributes = [ + MovedAttribute("URLError", "urllib2", "urllib.error"), + MovedAttribute("HTTPError", "urllib2", "urllib.error"), + MovedAttribute("ContentTooShortError", "urllib", "urllib.error"), +] +for attr in _urllib_error_moved_attributes: + setattr(Module_six_moves_urllib_error, attr.name, attr) +del attr + +Module_six_moves_urllib_error._moved_attributes = _urllib_error_moved_attributes + +_importer._add_module(Module_six_moves_urllib_error(__name__ + ".moves.urllib.error"), + "moves.urllib_error", "moves.urllib.error") + + +class Module_six_moves_urllib_request(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_request""" + + +_urllib_request_moved_attributes = [ + MovedAttribute("urlopen", "urllib2", "urllib.request"), + MovedAttribute("install_opener", "urllib2", "urllib.request"), + MovedAttribute("build_opener", "urllib2", "urllib.request"), + MovedAttribute("pathname2url", "urllib", "urllib.request"), + MovedAttribute("url2pathname", "urllib", "urllib.request"), + MovedAttribute("getproxies", "urllib", "urllib.request"), + MovedAttribute("Request", "urllib2", "urllib.request"), + MovedAttribute("OpenerDirector", "urllib2", "urllib.request"), + MovedAttribute("HTTPDefaultErrorHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPRedirectHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPCookieProcessor", "urllib2", "urllib.request"), + MovedAttribute("ProxyHandler", "urllib2", "urllib.request"), + MovedAttribute("BaseHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgr", "urllib2", "urllib.request"), + MovedAttribute("HTTPPasswordMgrWithDefaultRealm", "urllib2", "urllib.request"), + MovedAttribute("AbstractBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyBasicAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("AbstractDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("ProxyDigestAuthHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPSHandler", "urllib2", "urllib.request"), + MovedAttribute("FileHandler", "urllib2", "urllib.request"), + MovedAttribute("FTPHandler", "urllib2", "urllib.request"), + MovedAttribute("CacheFTPHandler", "urllib2", "urllib.request"), + MovedAttribute("UnknownHandler", "urllib2", "urllib.request"), + MovedAttribute("HTTPErrorProcessor", "urllib2", "urllib.request"), + MovedAttribute("urlretrieve", "urllib", "urllib.request"), + MovedAttribute("urlcleanup", "urllib", "urllib.request"), + MovedAttribute("URLopener", "urllib", "urllib.request"), + MovedAttribute("FancyURLopener", "urllib", "urllib.request"), + MovedAttribute("proxy_bypass", "urllib", "urllib.request"), +] +for attr in _urllib_request_moved_attributes: + setattr(Module_six_moves_urllib_request, attr.name, attr) +del attr + +Module_six_moves_urllib_request._moved_attributes = _urllib_request_moved_attributes + +_importer._add_module(Module_six_moves_urllib_request(__name__ + ".moves.urllib.request"), + "moves.urllib_request", "moves.urllib.request") + + +class Module_six_moves_urllib_response(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_response""" + + +_urllib_response_moved_attributes = [ + MovedAttribute("addbase", "urllib", "urllib.response"), + MovedAttribute("addclosehook", "urllib", "urllib.response"), + MovedAttribute("addinfo", "urllib", "urllib.response"), + MovedAttribute("addinfourl", "urllib", "urllib.response"), +] +for attr in _urllib_response_moved_attributes: + setattr(Module_six_moves_urllib_response, attr.name, attr) +del attr + +Module_six_moves_urllib_response._moved_attributes = _urllib_response_moved_attributes + +_importer._add_module(Module_six_moves_urllib_response(__name__ + ".moves.urllib.response"), + "moves.urllib_response", "moves.urllib.response") + + +class Module_six_moves_urllib_robotparser(_LazyModule): + + """Lazy loading of moved objects in six.moves.urllib_robotparser""" + + +_urllib_robotparser_moved_attributes = [ + MovedAttribute("RobotFileParser", "robotparser", "urllib.robotparser"), +] +for attr in _urllib_robotparser_moved_attributes: + setattr(Module_six_moves_urllib_robotparser, attr.name, attr) +del attr + +Module_six_moves_urllib_robotparser._moved_attributes = _urllib_robotparser_moved_attributes + +_importer._add_module(Module_six_moves_urllib_robotparser(__name__ + ".moves.urllib.robotparser"), + "moves.urllib_robotparser", "moves.urllib.robotparser") + + +class Module_six_moves_urllib(types.ModuleType): + + """Create a six.moves.urllib namespace that resembles the Python 3 namespace""" + __path__ = [] # mark as package + parse = _importer._get_module("moves.urllib_parse") + error = _importer._get_module("moves.urllib_error") + request = _importer._get_module("moves.urllib_request") + response = _importer._get_module("moves.urllib_response") + robotparser = _importer._get_module("moves.urllib_robotparser") + + def __dir__(self): + return ['parse', 'error', 'request', 'response', 'robotparser'] + +_importer._add_module(Module_six_moves_urllib(__name__ + ".moves.urllib"), + "moves.urllib") + + +def add_move(move): + """Add an item to six.moves.""" + setattr(_MovedItems, move.name, move) + + +def remove_move(name): + """Remove item from six.moves.""" + try: + delattr(_MovedItems, name) + except AttributeError: + try: + del moves.__dict__[name] + except KeyError: + raise AttributeError("no such move, %r" % (name,)) + + +if PY3: + _meth_func = "__func__" + _meth_self = "__self__" + + _func_closure = "__closure__" + _func_code = "__code__" + _func_defaults = "__defaults__" + _func_globals = "__globals__" +else: + _meth_func = "im_func" + _meth_self = "im_self" + + _func_closure = "func_closure" + _func_code = "func_code" + _func_defaults = "func_defaults" + _func_globals = "func_globals" + + +try: + advance_iterator = next +except NameError: + def advance_iterator(it): + return it.next() +next = advance_iterator + + +try: + callable = callable +except NameError: + def callable(obj): + return any("__call__" in klass.__dict__ for klass in type(obj).__mro__) + + +if PY3: + def get_unbound_function(unbound): + return unbound + + create_bound_method = types.MethodType + + def create_unbound_method(func, cls): + return func + + Iterator = object +else: + def get_unbound_function(unbound): + return unbound.im_func + + def create_bound_method(func, obj): + return types.MethodType(func, obj, obj.__class__) + + def create_unbound_method(func, cls): + return types.MethodType(func, None, cls) + + class Iterator(object): + + def next(self): + return type(self).__next__(self) + + callable = callable +_add_doc(get_unbound_function, + """Get the function out of a possibly unbound function""") + + +get_method_function = operator.attrgetter(_meth_func) +get_method_self = operator.attrgetter(_meth_self) +get_function_closure = operator.attrgetter(_func_closure) +get_function_code = operator.attrgetter(_func_code) +get_function_defaults = operator.attrgetter(_func_defaults) +get_function_globals = operator.attrgetter(_func_globals) + + +if PY3: + def iterkeys(d, **kw): + return iter(d.keys(**kw)) + + def itervalues(d, **kw): + return iter(d.values(**kw)) + + def iteritems(d, **kw): + return iter(d.items(**kw)) + + def iterlists(d, **kw): + return iter(d.lists(**kw)) + + viewkeys = operator.methodcaller("keys") + + viewvalues = operator.methodcaller("values") + + viewitems = operator.methodcaller("items") +else: + def iterkeys(d, **kw): + return d.iterkeys(**kw) + + def itervalues(d, **kw): + return d.itervalues(**kw) + + def iteritems(d, **kw): + return d.iteritems(**kw) + + def iterlists(d, **kw): + return d.iterlists(**kw) + + viewkeys = operator.methodcaller("viewkeys") + + viewvalues = operator.methodcaller("viewvalues") + + viewitems = operator.methodcaller("viewitems") + +_add_doc(iterkeys, "Return an iterator over the keys of a dictionary.") +_add_doc(itervalues, "Return an iterator over the values of a dictionary.") +_add_doc(iteritems, + "Return an iterator over the (key, value) pairs of a dictionary.") +_add_doc(iterlists, + "Return an iterator over the (key, [values]) pairs of a dictionary.") + + +if PY3: + def b(s): + return s.encode("latin-1") + + def u(s): + return s + unichr = chr + import struct + int2byte = struct.Struct(">B").pack + del struct + byte2int = operator.itemgetter(0) + indexbytes = operator.getitem + iterbytes = iter + import io + StringIO = io.StringIO + BytesIO = io.BytesIO + _assertCountEqual = "assertCountEqual" + if sys.version_info[1] <= 1: + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" + else: + _assertRaisesRegex = "assertRaisesRegex" + _assertRegex = "assertRegex" +else: + def b(s): + return s + # Workaround for standalone backslash + + def u(s): + return unicode(s.replace(r'\\', r'\\\\'), "unicode_escape") + unichr = unichr + int2byte = chr + + def byte2int(bs): + return ord(bs[0]) + + def indexbytes(buf, i): + return ord(buf[i]) + iterbytes = functools.partial(itertools.imap, ord) + import StringIO + StringIO = BytesIO = StringIO.StringIO + _assertCountEqual = "assertItemsEqual" + _assertRaisesRegex = "assertRaisesRegexp" + _assertRegex = "assertRegexpMatches" +_add_doc(b, """Byte literal""") +_add_doc(u, """Text literal""") + + +def assertCountEqual(self, *args, **kwargs): + return getattr(self, _assertCountEqual)(*args, **kwargs) + + +def assertRaisesRegex(self, *args, **kwargs): + return getattr(self, _assertRaisesRegex)(*args, **kwargs) + + +def assertRegex(self, *args, **kwargs): + return getattr(self, _assertRegex)(*args, **kwargs) + + +if PY3: + exec_ = getattr(moves.builtins, "exec") + + def reraise(tp, value, tb=None): + if value is None: + value = tp() + if value.__traceback__ is not tb: + raise value.with_traceback(tb) + raise value + +else: + def exec_(_code_, _globs_=None, _locs_=None): + """Execute code in a namespace.""" + if _globs_ is None: + frame = sys._getframe(1) + _globs_ = frame.f_globals + if _locs_ is None: + _locs_ = frame.f_locals + del frame + elif _locs_ is None: + _locs_ = _globs_ + exec("""exec _code_ in _globs_, _locs_""") + + exec_("""def reraise(tp, value, tb=None): + raise tp, value, tb +""") + + +if sys.version_info[:2] == (3, 2): + exec_("""def raise_from(value, from_value): + if from_value is None: + raise value + raise value from from_value +""") +elif sys.version_info[:2] > (3, 2): + exec_("""def raise_from(value, from_value): + raise value from from_value +""") +else: + def raise_from(value, from_value): + raise value + + +print_ = getattr(moves.builtins, "print", None) +if print_ is None: + def print_(*args, **kwargs): + """The new-style print function for Python 2.4 and 2.5.""" + fp = kwargs.pop("file", sys.stdout) + if fp is None: + return + + def write(data): + if not isinstance(data, basestring): + data = str(data) + # If the file has an encoding, encode unicode with it. + if (isinstance(fp, file) and + isinstance(data, unicode) and + fp.encoding is not None): + errors = getattr(fp, "errors", None) + if errors is None: + errors = "strict" + data = data.encode(fp.encoding, errors) + fp.write(data) + want_unicode = False + sep = kwargs.pop("sep", None) + if sep is not None: + if isinstance(sep, unicode): + want_unicode = True + elif not isinstance(sep, str): + raise TypeError("sep must be None or a string") + end = kwargs.pop("end", None) + if end is not None: + if isinstance(end, unicode): + want_unicode = True + elif not isinstance(end, str): + raise TypeError("end must be None or a string") + if kwargs: + raise TypeError("invalid keyword arguments to print()") + if not want_unicode: + for arg in args: + if isinstance(arg, unicode): + want_unicode = True + break + if want_unicode: + newline = unicode("\n") + space = unicode(" ") + else: + newline = "\n" + space = " " + if sep is None: + sep = space + if end is None: + end = newline + for i, arg in enumerate(args): + if i: + write(sep) + write(arg) + write(end) +if sys.version_info[:2] < (3, 3): + _print = print_ + + def print_(*args, **kwargs): + fp = kwargs.get("file", sys.stdout) + flush = kwargs.pop("flush", False) + _print(*args, **kwargs) + if flush and fp is not None: + fp.flush() + +_add_doc(reraise, """Reraise an exception.""") + +if sys.version_info[0:2] < (3, 4): + def wraps(wrapped, assigned=functools.WRAPPER_ASSIGNMENTS, + updated=functools.WRAPPER_UPDATES): + def wrapper(f): + f = functools.wraps(wrapped, assigned, updated)(f) + f.__wrapped__ = wrapped + return f + return wrapper +else: + wraps = functools.wraps + + +def with_metaclass(meta, *bases): + """Create a base class with a metaclass.""" + # This requires a bit of explanation: the basic idea is to make a dummy + # metaclass for one level of class instantiation that replaces itself with + # the actual metaclass. + class metaclass(meta): + + def __new__(cls, name, this_bases, d): + return meta(name, bases, d) + return type.__new__(metaclass, 'temporary_class', (), {}) + + +def add_metaclass(metaclass): + """Class decorator for creating a class with a metaclass.""" + def wrapper(cls): + orig_vars = cls.__dict__.copy() + slots = orig_vars.get('__slots__') + if slots is not None: + if isinstance(slots, str): + slots = [slots] + for slots_var in slots: + orig_vars.pop(slots_var) + orig_vars.pop('__dict__', None) + orig_vars.pop('__weakref__', None) + return metaclass(cls.__name__, cls.__bases__, orig_vars) + return wrapper + + +def python_2_unicode_compatible(klass): + """ + A decorator that defines __unicode__ and __str__ methods under Python 2. + Under Python 3 it does nothing. + + To support Python 2 and 3 with a single code base, define a __str__ method + returning text and apply this decorator to the class. + """ + if PY2: + if '__str__' not in klass.__dict__: + raise ValueError("@python_2_unicode_compatible cannot be applied " + "to %s because it doesn't define __str__()." % + klass.__name__) + klass.__unicode__ = klass.__str__ + klass.__str__ = lambda self: self.__unicode__().encode('utf-8') + return klass + + +# Complete the moves implementation. +# This code is at the end of this module to speed up module loading. +# Turn this module into a package. +__path__ = [] # required for PEP 302 and PEP 451 +__package__ = __name__ # see PEP 366 @ReservedAssignment +if globals().get("__spec__") is not None: + __spec__.submodule_search_locations = [] # PEP 451 @UndefinedVariable +# Remove other six meta path importers, since they cause problems. This can +# happen if six is removed from sys.modules and then reloaded. (Setuptools does +# this for some reason.) +if sys.meta_path: + for i, importer in enumerate(sys.meta_path): + # Here's some real nastiness: Another "instance" of the six module might + # be floating around. Therefore, we can't use isinstance() to check for + # the six meta path importer, since the other six instance will have + # inserted an importer with different class. + if (type(importer).__name__ == "_SixMetaPathImporter" and + importer.name == __name__): + del sys.meta_path[i] + break + del i, importer +# Finally, add the importer to the meta path import hook. +sys.meta_path.append(_importer) diff --git a/venv/lib/python3.5/site-packages/pkg_resources/extern/__init__.py b/venv/lib/python3.5/site-packages/pkg_resources/extern/__init__.py new file mode 100644 index 0000000..6758d36 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pkg_resources/extern/__init__.py @@ -0,0 +1,71 @@ +import sys + + +class VendorImporter: + """ + A PEP 302 meta path importer for finding optionally-vendored + or otherwise naturally-installed packages from root_name. + """ + def __init__(self, root_name, vendored_names=(), vendor_pkg=None): + self.root_name = root_name + self.vendored_names = set(vendored_names) + self.vendor_pkg = vendor_pkg or root_name.replace('extern', '_vendor') + + @property + def search_path(self): + """ + Search first the vendor package then as a natural package. + """ + yield self.vendor_pkg + '.' + yield '' + + def find_module(self, fullname, path=None): + """ + Return self when fullname starts with root_name and the + target module is one vendored through this importer. + """ + root, base, target = fullname.partition(self.root_name + '.') + if root: + return + if not any(map(target.startswith, self.vendored_names)): + return + return self + + def load_module(self, fullname): + """ + Iterate over the search path to locate and load fullname. + """ + root, base, target = fullname.partition(self.root_name + '.') + for prefix in self.search_path: + try: + extant = prefix + target + __import__(extant) + mod = sys.modules[extant] + sys.modules[fullname] = mod + # mysterious hack: + # Remove the reference to the extant package/module + # on later Python versions to cause relative imports + # in the vendor package to resolve the same modules + # as those going through this importer. + if sys.version_info > (3, 3): + del sys.modules[extant] + return mod + except ImportError: + pass + else: + raise ImportError( + "The '{target}' package is required; " + "normally this is bundled with this package so if you get " + "this warning, consult the packager of your " + "distribution.".format(**locals()) + ) + + def install(self): + """ + Install this importer into sys.meta_path if not already present. + """ + if self not in sys.meta_path: + sys.meta_path.append(self) + +names = 'packaging', 'pyparsing', 'six' +VendorImporter(__name__, names).install() diff --git a/venv/lib/python3.5/site-packages/py-1.4.34.dist-info/DESCRIPTION.rst b/venv/lib/python3.5/site-packages/py-1.4.34.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..d2a2562 --- /dev/null +++ b/venv/lib/python3.5/site-packages/py-1.4.34.dist-info/DESCRIPTION.rst @@ -0,0 +1,23 @@ +.. image:: https://img.shields.io/pypi/pyversions/pytest.svg + :target: https://pypi.org/project/py +.. image:: https://img.shields.io/travis/pytest-dev/py.svg + :target: https://travis-ci.org/pytest-dev/py + +The py lib is a Python development support library featuring +the following tools and modules: + +* ``py.path``: uniform local and svn path objects +* ``py.apipkg``: explicit API control and lazy-importing +* ``py.iniconfig``: easy parsing of .ini files +* ``py.code``: dynamic code generation and introspection + +NOTE: prior to the 1.4 release this distribution used to +contain py.test which is now its own package, see http://pytest.org + +For questions and more information please visit http://py.readthedocs.org + +Bugs and issues: https://github.com/pytest-dev/py + +Authors: Holger Krekel and others, 2004-2017 + + diff --git a/venv/lib/python3.5/site-packages/py-1.4.34.dist-info/INSTALLER b/venv/lib/python3.5/site-packages/py-1.4.34.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.5/site-packages/py-1.4.34.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.5/site-packages/py-1.4.34.dist-info/LICENSE.txt b/venv/lib/python3.5/site-packages/py-1.4.34.dist-info/LICENSE.txt new file mode 100644 index 0000000..790c261 --- /dev/null +++ b/venv/lib/python3.5/site-packages/py-1.4.34.dist-info/LICENSE.txt @@ -0,0 +1,19 @@ + + Permission is hereby granted, free of charge, to any person obtaining a copy + of this software and associated documentation files (the "Software"), to deal + in the Software without restriction, including without limitation the rights + to use, copy, modify, merge, publish, distribute, sublicense, and/or sell + copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + + The above copyright notice and this permission notice shall be included in all + copies or substantial portions of the Software. + + THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, + OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE + SOFTWARE. + diff --git a/venv/lib/python3.5/site-packages/py-1.4.34.dist-info/METADATA b/venv/lib/python3.5/site-packages/py-1.4.34.dist-info/METADATA new file mode 100644 index 0000000..6bb4c3a --- /dev/null +++ b/venv/lib/python3.5/site-packages/py-1.4.34.dist-info/METADATA @@ -0,0 +1,48 @@ +Metadata-Version: 2.0 +Name: py +Version: 1.4.34 +Summary: library with cross-python path, ini-parsing, io, code, log facilities +Home-page: http://py.readthedocs.io/ +Author: holger krekel, Ronny Pfannschmidt, Benjamin Peterson and others +Author-email: pytest-dev@python.org +License: MIT license +Platform: unix +Platform: linux +Platform: osx +Platform: cygwin +Platform: win32 +Classifier: Development Status :: 6 - Mature +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: POSIX +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Topic :: Software Development :: Testing +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: Utilities +Classifier: Programming Language :: Python +Classifier: Programming Language :: Python :: 3 + +.. image:: https://img.shields.io/pypi/pyversions/pytest.svg + :target: https://pypi.org/project/py +.. image:: https://img.shields.io/travis/pytest-dev/py.svg + :target: https://travis-ci.org/pytest-dev/py + +The py lib is a Python development support library featuring +the following tools and modules: + +* ``py.path``: uniform local and svn path objects +* ``py.apipkg``: explicit API control and lazy-importing +* ``py.iniconfig``: easy parsing of .ini files +* ``py.code``: dynamic code generation and introspection + +NOTE: prior to the 1.4 release this distribution used to +contain py.test which is now its own package, see http://pytest.org + +For questions and more information please visit http://py.readthedocs.org + +Bugs and issues: https://github.com/pytest-dev/py + +Authors: Holger Krekel and others, 2004-2017 + + diff --git a/venv/lib/python3.5/site-packages/py-1.4.34.dist-info/RECORD b/venv/lib/python3.5/site-packages/py-1.4.34.dist-info/RECORD new file mode 100644 index 0000000..627ccca --- /dev/null +++ b/venv/lib/python3.5/site-packages/py-1.4.34.dist-info/RECORD @@ -0,0 +1,74 @@ +py/__init__.py,sha256=5J0eNhSXu47etaQMA46OQGCT8txvP2UZirz92LoZgRQ,6080 +py/__metainfo.py,sha256=pFMfAGHL6CyKiBq8B04_cWYsy3NTT0Zni0zkEDKCcI4,57 +py/_apipkg.py,sha256=Ei3D8e5ejPQWBfxjHNFX95D7B1ZGug1zL8W42FR1-_E,6036 +py/_builtin.py,sha256=7o0W5kXt4_61w7OWYx_-iUmm9xSnFAnzaoMOp7QXZEc,6769 +py/_error.py,sha256=e5KUTfCaRJNP_g4jqR1X4Q3oS8FBFI-QpcyErV9j4eI,2928 +py/_iniconfig.py,sha256=1nQ-98BJOBOHGkp-mIGGABWVXVg_jYQ4Lz-_WQRlt_Q,5355 +py/_std.py,sha256=un9XhDfAgY3bYbnuYwK-L2iC0lLZ95Qqh-ss_e7Cvm4,433 +py/_xmlgen.py,sha256=5qTsQ8g531nIl3lOA4q80YbEZcCLVWQOCsezRplYlOs,8619 +py/test.py,sha256=q-TVyj_8W-ZB6d1gp_CRjZyqvSYV2foxQRbcUoLjJJw,232 +py/_code/__init__.py,sha256=DQ0hvCWtOfRLxmV6FnAWycsvnoY6dnfwEFMy73S8kYg,47 +py/_code/_assertionnew.py,sha256=1Ix0P-QhrmmK8-qqBChgiJqFLlI_4Ekgv29vxkljHdw,12723 +py/_code/_assertionold.py,sha256=5wmmUaJtZ43sM1_8VT0un2At2NsXKT24bOXh92zErg4,18418 +py/_code/_py2traceback.py,sha256=vb-2kfq-StMaG2w6bfw3mPifcmF7pn4uRl1rgENgn0I,2844 +py/_code/assertion.py,sha256=SwhL9xhOf7FspLTHRht6AJp2NIC8gzg1pvE90RjyXIY,3381 +py/_code/code.py,sha256=tJ_KbpwhNMN9Z5AQNm6JzfsUbo0YJiFmDFLFv7MQVVw,28228 +py/_code/source.py,sha256=HbcZkCBYFhIBw1rIShtuYrQ6kSBcc4JVFStGTzk7Ifc,14525 +py/_io/__init__.py,sha256=TdnZK_eUMWsEURzXWVmTnOMixTHimK6yHuUS9On88DM,30 +py/_io/capture.py,sha256=6qIK9HoVRrfI6VJS61280FVNezNhFrtzp_tPhk20HjE,12011 +py/_io/saferepr.py,sha256=L37QomdlZnXqE208xRCi1yiJ_vsT0POoY-IyYg5c1a4,2554 +py/_io/terminalwriter.py,sha256=YP7FFA4f8jdosCt0INiKNHVer17S1eByV4mhD6AZCLw,13106 +py/_log/__init__.py,sha256=rxgcYPMS-fOXRaOQIJRDqEO3ziyW-91NhFokzdDkb_M,76 +py/_log/log.py,sha256=PnEto-RGnNYZJ9QdOThG_fxhtnQOqjI3KkH2xt95A9E,6081 +py/_log/warning.py,sha256=7VeifGAU2GgcLbU9lQwO7yu7JHA-JepFXaJx656MMcY,2618 +py/_path/__init__.py,sha256=1dMOH9zuZQV4_Q2kMOHtjA4heUqM2gVDDSOJ_7vTfNQ,33 +py/_path/cacheutil.py,sha256=gWwxwfUCQWGgFPT9tsMaz-uomjiRparhxO84HzSv6ok,3447 +py/_path/common.py,sha256=CPZMuH3ftV8WDHvGJU_HBMly9-3upWmKB6RgRg3e8RU,14844 +py/_path/local.py,sha256=howBO3uiN37OIYNChvtRZf1XGobggKtxA4460WC0FZI,33568 +py/_path/svnurl.py,sha256=w_hTOYIgOEIKe7k4EfKTrT6ns0mQROm0jbtEb8FvHpo,15095 +py/_path/svnwc.py,sha256=_YN4XmbFeox5PqiASV45iHTToOvjLpBjU9A7_r76NRo,45089 +py/_process/__init__.py,sha256=7wR_PgdLZx16epAJB09JOhWw-RAZBlOaSy_OctdsD4c,41 +py/_process/cmdexec.py,sha256=unIPyZXQcGF_wHZBZncmQhSSR-aaiOWESYsxWOD35qU,1863 +py/_process/forkedfunc.py,sha256=SaLmnA3z3eKftQ4vrNUkGUZJb3fY29OJvB3pB_I-z8c,3812 +py/_process/killproc.py,sha256=vNcOjh7eTRKZMd5BzwGJGb6eqrk8HgzXLDB13SZm05c,671 +py-1.4.34.dist-info/DESCRIPTION.rst,sha256=6tYfJwotn446mSdFV5pnM2hF4e01sxroGXloz0-a3sg,817 +py-1.4.34.dist-info/LICENSE.txt,sha256=lzT2iwmQMhJkdHxOoJR_hS0kgPQRX2RJzjv7_aF32OM,1080 +py-1.4.34.dist-info/METADATA,sha256=5iQQQWCijdAETfcmqbu471UixH7vrx1F5Vij97daKos,1698 +py-1.4.34.dist-info/RECORD,, +py-1.4.34.dist-info/WHEEL,sha256=5f-Lb0dq-Ei5P75pgK065AnrFl2dK_ZqskZBpCbWERA,116 +py-1.4.34.dist-info/metadata.json,sha256=feWylPGwn_KxDvtVimq51RAQd8uH9Go2cE7GoavlQDI,971 +py-1.4.34.dist-info/top_level.txt,sha256=rwh8_ukTaGscjyhGkBVcsGOMdc-Cfdz2QH7BKGENv-4,3 +py-1.4.34.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +py/__pycache__/_builtin.cpython-35.pyc,, +py/_io/__pycache__/capture.cpython-35.pyc,, +py/_path/__pycache__/common.cpython-35.pyc,, +py/__pycache__/_std.cpython-35.pyc,, +py/_io/__pycache__/terminalwriter.cpython-35.pyc,, +py/_log/__pycache__/__init__.cpython-35.pyc,, +py/_path/__pycache__/cacheutil.cpython-35.pyc,, +py/_code/__pycache__/assertion.cpython-35.pyc,, +py/__pycache__/_iniconfig.cpython-35.pyc,, +py/_log/__pycache__/log.cpython-35.pyc,, +py/_code/__pycache__/__init__.cpython-35.pyc,, +py/_log/__pycache__/warning.cpython-35.pyc,, +py/__pycache__/test.cpython-35.pyc,, +py/_process/__pycache__/killproc.cpython-35.pyc,, +py/_process/__pycache__/__init__.cpython-35.pyc,, +py/__pycache__/_error.cpython-35.pyc,, +py/__pycache__/__metainfo.cpython-35.pyc,, +py/_code/__pycache__/source.cpython-35.pyc,, +py/_path/__pycache__/local.cpython-35.pyc,, +py/_code/__pycache__/_assertionnew.cpython-35.pyc,, +py/_process/__pycache__/forkedfunc.cpython-35.pyc,, +py/_path/__pycache__/svnurl.cpython-35.pyc,, +py/__pycache__/_apipkg.cpython-35.pyc,, +py/_code/__pycache__/_py2traceback.cpython-35.pyc,, +py/_code/__pycache__/_assertionold.cpython-35.pyc,, +py/__pycache__/_xmlgen.cpython-35.pyc,, +py/_io/__pycache__/__init__.cpython-35.pyc,, +py/_code/__pycache__/code.cpython-35.pyc,, +py/_io/__pycache__/saferepr.cpython-35.pyc,, +py/_path/__pycache__/__init__.cpython-35.pyc,, +py/_process/__pycache__/cmdexec.cpython-35.pyc,, +py/__pycache__/__init__.cpython-35.pyc,, +py/_path/__pycache__/svnwc.cpython-35.pyc,, diff --git a/venv/lib/python3.5/site-packages/py-1.4.34.dist-info/WHEEL b/venv/lib/python3.5/site-packages/py-1.4.34.dist-info/WHEEL new file mode 100644 index 0000000..4d519f1 --- /dev/null +++ b/venv/lib/python3.5/site-packages/py-1.4.34.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.26.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/venv/lib/python3.5/site-packages/py-1.4.34.dist-info/metadata.json b/venv/lib/python3.5/site-packages/py-1.4.34.dist-info/metadata.json new file mode 100644 index 0000000..cd3b8a8 --- /dev/null +++ b/venv/lib/python3.5/site-packages/py-1.4.34.dist-info/metadata.json @@ -0,0 +1 @@ +{"generator": "bdist_wheel (0.26.0)", "summary": "library with cross-python path, ini-parsing, io, code, log facilities", "classifiers": ["Development Status :: 6 - Mature", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: POSIX", "Operating System :: Microsoft :: Windows", "Operating System :: MacOS :: MacOS X", "Topic :: Software Development :: Testing", "Topic :: Software Development :: Libraries", "Topic :: Utilities", "Programming Language :: Python", "Programming Language :: Python :: 3"], "extensions": {"python.details": {"project_urls": {"Home": "http://py.readthedocs.io/"}, "contacts": [{"email": "pytest-dev@python.org", "name": "holger krekel, Ronny Pfannschmidt, Benjamin Peterson and others", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst", "license": "LICENSE.txt"}}}, "license": "MIT license", "metadata_version": "2.0", "name": "py", "platform": "unix", "version": "1.4.34"} \ No newline at end of file diff --git a/venv/lib/python3.5/site-packages/py-1.4.34.dist-info/top_level.txt b/venv/lib/python3.5/site-packages/py-1.4.34.dist-info/top_level.txt new file mode 100644 index 0000000..edfce78 --- /dev/null +++ b/venv/lib/python3.5/site-packages/py-1.4.34.dist-info/top_level.txt @@ -0,0 +1 @@ +py diff --git a/venv/lib/python3.5/site-packages/py/__init__.py b/venv/lib/python3.5/site-packages/py/__init__.py new file mode 100644 index 0000000..4a2b6fa --- /dev/null +++ b/venv/lib/python3.5/site-packages/py/__init__.py @@ -0,0 +1,152 @@ +""" +pylib: rapid testing and development utils + +this module uses apipkg.py for lazy-loading sub modules +and classes. The initpkg-dictionary below specifies +name->value mappings where value can be another namespace +dictionary or an import path. + +(c) Holger Krekel and others, 2004-2014 +""" +__version__ = '1.4.34' + +from py import _apipkg + +# so that py.error.* instances are picklable +import sys +sys.modules['py.error'] = _apipkg.AliasModule("py.error", "py._error", 'error') +import py.error # "Dereference" it now just to be safe (issue110) + + +_apipkg.initpkg(__name__, attr={'_apipkg': _apipkg}, exportdefs={ + # access to all standard lib modules + 'std': '._std:std', + # access to all posix errno's as classes + 'error': '._error:error', + + '_pydir' : '.__metainfo:pydir', + 'version': 'py:__version__', # backward compatibility + + # pytest-2.0 has a flat namespace, we use alias modules + # to keep old references compatible + 'test' : 'pytest', + 'test.collect' : 'pytest', + 'test.cmdline' : 'pytest', + + # hook into the top-level standard library + 'process' : { + '__doc__' : '._process:__doc__', + 'cmdexec' : '._process.cmdexec:cmdexec', + 'kill' : '._process.killproc:kill', + 'ForkedFunc' : '._process.forkedfunc:ForkedFunc', + }, + + 'apipkg' : { + 'initpkg' : '._apipkg:initpkg', + 'ApiModule' : '._apipkg:ApiModule', + }, + + 'iniconfig' : { + 'IniConfig' : '._iniconfig:IniConfig', + 'ParseError' : '._iniconfig:ParseError', + }, + + 'path' : { + '__doc__' : '._path:__doc__', + 'svnwc' : '._path.svnwc:SvnWCCommandPath', + 'svnurl' : '._path.svnurl:SvnCommandPath', + 'local' : '._path.local:LocalPath', + 'SvnAuth' : '._path.svnwc:SvnAuth', + }, + + # python inspection/code-generation API + 'code' : { + '__doc__' : '._code:__doc__', + 'compile' : '._code.source:compile_', + 'Source' : '._code.source:Source', + 'Code' : '._code.code:Code', + 'Frame' : '._code.code:Frame', + 'ExceptionInfo' : '._code.code:ExceptionInfo', + 'Traceback' : '._code.code:Traceback', + 'getfslineno' : '._code.source:getfslineno', + 'getrawcode' : '._code.code:getrawcode', + 'patch_builtins' : '._code.code:patch_builtins', + 'unpatch_builtins' : '._code.code:unpatch_builtins', + '_AssertionError' : '._code.assertion:AssertionError', + '_reinterpret_old' : '._code.assertion:reinterpret_old', + '_reinterpret' : '._code.assertion:reinterpret', + '_reprcompare' : '._code.assertion:_reprcompare', + '_format_explanation' : '._code.assertion:_format_explanation', + }, + + # backports and additions of builtins + 'builtin' : { + '__doc__' : '._builtin:__doc__', + 'enumerate' : '._builtin:enumerate', + 'reversed' : '._builtin:reversed', + 'sorted' : '._builtin:sorted', + 'any' : '._builtin:any', + 'all' : '._builtin:all', + 'set' : '._builtin:set', + 'frozenset' : '._builtin:frozenset', + 'BaseException' : '._builtin:BaseException', + 'GeneratorExit' : '._builtin:GeneratorExit', + '_sysex' : '._builtin:_sysex', + 'print_' : '._builtin:print_', + '_reraise' : '._builtin:_reraise', + '_tryimport' : '._builtin:_tryimport', + 'exec_' : '._builtin:exec_', + '_basestring' : '._builtin:_basestring', + '_totext' : '._builtin:_totext', + '_isbytes' : '._builtin:_isbytes', + '_istext' : '._builtin:_istext', + '_getimself' : '._builtin:_getimself', + '_getfuncdict' : '._builtin:_getfuncdict', + '_getcode' : '._builtin:_getcode', + 'builtins' : '._builtin:builtins', + 'execfile' : '._builtin:execfile', + 'callable' : '._builtin:callable', + 'bytes' : '._builtin:bytes', + 'text' : '._builtin:text', + }, + + # input-output helping + 'io' : { + '__doc__' : '._io:__doc__', + 'dupfile' : '._io.capture:dupfile', + 'TextIO' : '._io.capture:TextIO', + 'BytesIO' : '._io.capture:BytesIO', + 'FDCapture' : '._io.capture:FDCapture', + 'StdCapture' : '._io.capture:StdCapture', + 'StdCaptureFD' : '._io.capture:StdCaptureFD', + 'TerminalWriter' : '._io.terminalwriter:TerminalWriter', + 'ansi_print' : '._io.terminalwriter:ansi_print', + 'get_terminal_width' : '._io.terminalwriter:get_terminal_width', + 'saferepr' : '._io.saferepr:saferepr', + }, + + # small and mean xml/html generation + 'xml' : { + '__doc__' : '._xmlgen:__doc__', + 'html' : '._xmlgen:html', + 'Tag' : '._xmlgen:Tag', + 'raw' : '._xmlgen:raw', + 'Namespace' : '._xmlgen:Namespace', + 'escape' : '._xmlgen:escape', + }, + + 'log' : { + # logging API ('producers' and 'consumers' connected via keywords) + '__doc__' : '._log:__doc__', + '_apiwarn' : '._log.warning:_apiwarn', + 'Producer' : '._log.log:Producer', + 'setconsumer' : '._log.log:setconsumer', + '_setstate' : '._log.log:setstate', + '_getstate' : '._log.log:getstate', + 'Path' : '._log.log:Path', + 'STDOUT' : '._log.log:STDOUT', + 'STDERR' : '._log.log:STDERR', + 'Syslog' : '._log.log:Syslog', + }, + +}) diff --git a/venv/lib/python3.5/site-packages/py/__metainfo.py b/venv/lib/python3.5/site-packages/py/__metainfo.py new file mode 100644 index 0000000..067806c --- /dev/null +++ b/venv/lib/python3.5/site-packages/py/__metainfo.py @@ -0,0 +1,2 @@ +import py +pydir = py.path.local(py.__file__).dirpath() diff --git a/venv/lib/python3.5/site-packages/py/_apipkg.py b/venv/lib/python3.5/site-packages/py/_apipkg.py new file mode 100644 index 0000000..42bd29b --- /dev/null +++ b/venv/lib/python3.5/site-packages/py/_apipkg.py @@ -0,0 +1,181 @@ +""" +apipkg: control the exported namespace of a python package. + +see http://pypi.python.org/pypi/apipkg + +(c) holger krekel, 2009 - MIT license +""" +import os +import sys +from types import ModuleType + +__version__ = '1.3.dev' + +def _py_abspath(path): + """ + special version of abspath + that will leave paths from jython jars alone + """ + if path.startswith('__pyclasspath__'): + + return path + else: + return os.path.abspath(path) + +def initpkg(pkgname, exportdefs, attr=dict()): + """ initialize given package from the export definitions. """ + oldmod = sys.modules.get(pkgname) + d = {} + f = getattr(oldmod, '__file__', None) + if f: + f = _py_abspath(f) + d['__file__'] = f + if hasattr(oldmod, '__version__'): + d['__version__'] = oldmod.__version__ + if hasattr(oldmod, '__loader__'): + d['__loader__'] = oldmod.__loader__ + if hasattr(oldmod, '__path__'): + d['__path__'] = [_py_abspath(p) for p in oldmod.__path__] + if '__doc__' not in exportdefs and getattr(oldmod, '__doc__', None): + d['__doc__'] = oldmod.__doc__ + d.update(attr) + if hasattr(oldmod, "__dict__"): + oldmod.__dict__.update(d) + mod = ApiModule(pkgname, exportdefs, implprefix=pkgname, attr=d) + sys.modules[pkgname] = mod + +def importobj(modpath, attrname): + module = __import__(modpath, None, None, ['__doc__']) + if not attrname: + return module + + retval = module + names = attrname.split(".") + for x in names: + retval = getattr(retval, x) + return retval + +class ApiModule(ModuleType): + def __docget(self): + try: + return self.__doc + except AttributeError: + if '__doc__' in self.__map__: + return self.__makeattr('__doc__') + def __docset(self, value): + self.__doc = value + __doc__ = property(__docget, __docset) + + def __init__(self, name, importspec, implprefix=None, attr=None): + self.__name__ = name + self.__all__ = [x for x in importspec if x != '__onfirstaccess__'] + self.__map__ = {} + self.__implprefix__ = implprefix or name + if attr: + for name, val in attr.items(): + # print "setting", self.__name__, name, val + setattr(self, name, val) + for name, importspec in importspec.items(): + if isinstance(importspec, dict): + subname = '%s.%s' % (self.__name__, name) + apimod = ApiModule(subname, importspec, implprefix) + sys.modules[subname] = apimod + setattr(self, name, apimod) + else: + parts = importspec.split(':') + modpath = parts.pop(0) + attrname = parts and parts[0] or "" + if modpath[0] == '.': + modpath = implprefix + modpath + + if not attrname: + subname = '%s.%s' % (self.__name__, name) + apimod = AliasModule(subname, modpath) + sys.modules[subname] = apimod + if '.' not in name: + setattr(self, name, apimod) + else: + self.__map__[name] = (modpath, attrname) + + def __repr__(self): + l = [] + if hasattr(self, '__version__'): + l.append("version=" + repr(self.__version__)) + if hasattr(self, '__file__'): + l.append('from ' + repr(self.__file__)) + if l: + return '<ApiModule %r %s>' % (self.__name__, " ".join(l)) + return '<ApiModule %r>' % (self.__name__,) + + def __makeattr(self, name): + """lazily compute value for name or raise AttributeError if unknown.""" + # print "makeattr", self.__name__, name + target = None + if '__onfirstaccess__' in self.__map__: + target = self.__map__.pop('__onfirstaccess__') + importobj(*target)() + try: + modpath, attrname = self.__map__[name] + except KeyError: + if target is not None and name != '__onfirstaccess__': + # retry, onfirstaccess might have set attrs + return getattr(self, name) + raise AttributeError(name) + else: + result = importobj(modpath, attrname) + setattr(self, name, result) + try: + del self.__map__[name] + except KeyError: + pass # in a recursive-import situation a double-del can happen + return result + + __getattr__ = __makeattr + + def __dict__(self): + # force all the content of the module to be loaded when __dict__ is read + dictdescr = ModuleType.__dict__['__dict__'] + dict = dictdescr.__get__(self) + if dict is not None: + hasattr(self, 'some') + for name in self.__all__: + try: + self.__makeattr(name) + except AttributeError: + pass + return dict + __dict__ = property(__dict__) + + +def AliasModule(modname, modpath, attrname=None): + mod = [] + + def getmod(): + if not mod: + x = importobj(modpath, None) + if attrname is not None: + x = getattr(x, attrname) + mod.append(x) + return mod[0] + + class AliasModule(ModuleType): + + def __repr__(self): + x = modpath + if attrname: + x += "." + attrname + return '<AliasModule %r for %r>' % (modname, x) + + def __getattribute__(self, name): + try: + return getattr(getmod(), name) + except ImportError: + return None + + def __setattr__(self, name, value): + setattr(getmod(), name, value) + + def __delattr__(self, name): + delattr(getmod(), name) + + return AliasModule(str(modname)) diff --git a/venv/lib/python3.5/site-packages/py/_builtin.py b/venv/lib/python3.5/site-packages/py/_builtin.py new file mode 100644 index 0000000..6199afa --- /dev/null +++ b/venv/lib/python3.5/site-packages/py/_builtin.py @@ -0,0 +1,248 @@ +import sys + +try: + reversed = reversed +except NameError: + def reversed(sequence): + """reversed(sequence) -> reverse iterator over values of the sequence + + Return a reverse iterator + """ + if hasattr(sequence, '__reversed__'): + return sequence.__reversed__() + if not hasattr(sequence, '__getitem__'): + raise TypeError("argument to reversed() must be a sequence") + return reversed_iterator(sequence) + + class reversed_iterator(object): + + def __init__(self, seq): + self.seq = seq + self.remaining = len(seq) + + def __iter__(self): + return self + + def next(self): + i = self.remaining + if i > 0: + i -= 1 + item = self.seq[i] + self.remaining = i + return item + raise StopIteration + + def __length_hint__(self): + return self.remaining + +try: + any = any +except NameError: + def any(iterable): + for x in iterable: + if x: + return True + return False + +try: + all = all +except NameError: + def all(iterable): + for x in iterable: + if not x: + return False + return True + +try: + sorted = sorted +except NameError: + builtin_cmp = cmp # need to use cmp as keyword arg + + def sorted(iterable, cmp=None, key=None, reverse=0): + use_cmp = None + if key is not None: + if cmp is None: + def use_cmp(x, y): + return builtin_cmp(x[0], y[0]) + else: + def use_cmp(x, y): + return cmp(x[0], y[0]) + l = [(key(element), element) for element in iterable] + else: + if cmp is not None: + use_cmp = cmp + l = list(iterable) + if use_cmp is not None: + l.sort(use_cmp) + else: + l.sort() + if reverse: + l.reverse() + if key is not None: + return [element for (_, element) in l] + return l + +try: + set, frozenset = set, frozenset +except NameError: + from sets import set, frozenset + +# pass through +enumerate = enumerate + +try: + BaseException = BaseException +except NameError: + BaseException = Exception + +try: + GeneratorExit = GeneratorExit +except NameError: + class GeneratorExit(Exception): + """ This exception is never raised, it is there to make it possible to + write code compatible with CPython 2.5 even in lower CPython + versions.""" + pass + GeneratorExit.__module__ = 'exceptions' + +_sysex = (KeyboardInterrupt, SystemExit, MemoryError, GeneratorExit) + +try: + callable = callable +except NameError: + def callable(obj): + return hasattr(obj, "__call__") + +if sys.version_info >= (3, 0): + exec ("print_ = print ; exec_=exec") + import builtins + + # some backward compatibility helpers + _basestring = str + def _totext(obj, encoding=None, errors=None): + if isinstance(obj, bytes): + if errors is None: + obj = obj.decode(encoding) + else: + obj = obj.decode(encoding, errors) + elif not isinstance(obj, str): + obj = str(obj) + return obj + + def _isbytes(x): + return isinstance(x, bytes) + def _istext(x): + return isinstance(x, str) + + text = str + bytes = bytes + + + def _getimself(function): + return getattr(function, '__self__', None) + + def _getfuncdict(function): + return getattr(function, "__dict__", None) + + def _getcode(function): + return getattr(function, "__code__", None) + + def execfile(fn, globs=None, locs=None): + if globs is None: + back = sys._getframe(1) + globs = back.f_globals + locs = back.f_locals + del back + elif locs is None: + locs = globs + fp = open(fn, "r") + try: + source = fp.read() + finally: + fp.close() + co = compile(source, fn, "exec", dont_inherit=True) + exec_(co, globs, locs) + +else: + import __builtin__ as builtins + _totext = unicode + _basestring = basestring + text = unicode + bytes = str + execfile = execfile + callable = callable + def _isbytes(x): + return isinstance(x, str) + def _istext(x): + return isinstance(x, unicode) + + def _getimself(function): + return getattr(function, 'im_self', None) + + def _getfuncdict(function): + return getattr(function, "__dict__", None) + + def _getcode(function): + try: + return getattr(function, "__code__") + except AttributeError: + return getattr(function, "func_code", None) + + def print_(*args, **kwargs): + """ minimal backport of py3k print statement. """ + sep = ' ' + if 'sep' in kwargs: + sep = kwargs.pop('sep') + end = '\n' + if 'end' in kwargs: + end = kwargs.pop('end') + file = 'file' in kwargs and kwargs.pop('file') or sys.stdout + if kwargs: + args = ", ".join([str(x) for x in kwargs]) + raise TypeError("invalid keyword arguments: %s" % args) + at_start = True + for x in args: + if not at_start: + file.write(sep) + file.write(str(x)) + at_start = False + file.write(end) + + def exec_(obj, globals=None, locals=None): + """ minimal backport of py3k exec statement. """ + __tracebackhide__ = True + if globals is None: + frame = sys._getframe(1) + globals = frame.f_globals + if locals is None: + locals = frame.f_locals + elif locals is None: + locals = globals + exec2(obj, globals, locals) + +if sys.version_info >= (3, 0): + def _reraise(cls, val, tb): + __tracebackhide__ = True + assert hasattr(val, '__traceback__') + raise cls.with_traceback(val, tb) +else: + exec (""" +def _reraise(cls, val, tb): + __tracebackhide__ = True + raise cls, val, tb +def exec2(obj, globals, locals): + __tracebackhide__ = True + exec obj in globals, locals +""") + +def _tryimport(*names): + """ return the first successfully imported module. """ + assert names + for name in names: + try: + __import__(name) + except ImportError: + excinfo = sys.exc_info() + else: + return sys.modules[name] + _reraise(*excinfo) diff --git a/venv/lib/python3.5/site-packages/py/_code/__init__.py b/venv/lib/python3.5/site-packages/py/_code/__init__.py new file mode 100644 index 0000000..152ff58 --- /dev/null +++ b/venv/lib/python3.5/site-packages/py/_code/__init__.py @@ -0,0 +1 @@ +""" python inspection/code generation API """ diff --git a/venv/lib/python3.5/site-packages/py/_code/_assertionnew.py b/venv/lib/python3.5/site-packages/py/_code/_assertionnew.py new file mode 100644 index 0000000..e9b7050 --- /dev/null +++ b/venv/lib/python3.5/site-packages/py/_code/_assertionnew.py @@ -0,0 +1,339 @@ +""" +Find intermediate evalutation results in assert statements through builtin AST. +This should replace _assertionold.py eventually. +""" + +import sys +import ast + +import py +from py._code.assertion import _format_explanation, BuiltinAssertionError + + +if sys.platform.startswith("java") and sys.version_info < (2, 5, 2): + # See http://bugs.jython.org/issue1497 + _exprs = ("BoolOp", "BinOp", "UnaryOp", "Lambda", "IfExp", "Dict", + "ListComp", "GeneratorExp", "Yield", "Compare", "Call", + "Repr", "Num", "Str", "Attribute", "Subscript", "Name", + "List", "Tuple") + _stmts = ("FunctionDef", "ClassDef", "Return", "Delete", "Assign", + "AugAssign", "Print", "For", "While", "If", "With", "Raise", + "TryExcept", "TryFinally", "Assert", "Import", "ImportFrom", + "Exec", "Global", "Expr", "Pass", "Break", "Continue") + _expr_nodes = set(getattr(ast, name) for name in _exprs) + _stmt_nodes = set(getattr(ast, name) for name in _stmts) + def _is_ast_expr(node): + return node.__class__ in _expr_nodes + def _is_ast_stmt(node): + return node.__class__ in _stmt_nodes +else: + def _is_ast_expr(node): + return isinstance(node, ast.expr) + def _is_ast_stmt(node): + return isinstance(node, ast.stmt) + + +class Failure(Exception): + """Error found while interpreting AST.""" + + def __init__(self, explanation=""): + self.cause = sys.exc_info() + self.explanation = explanation + + +def interpret(source, frame, should_fail=False): + mod = ast.parse(source) + visitor = DebugInterpreter(frame) + try: + visitor.visit(mod) + except Failure: + failure = sys.exc_info()[1] + return getfailure(failure) + if should_fail: + return ("(assertion failed, but when it was re-run for " + "printing intermediate values, it did not fail. Suggestions: " + "compute assert expression before the assert or use --no-assert)") + +def run(offending_line, frame=None): + if frame is None: + frame = py.code.Frame(sys._getframe(1)) + return interpret(offending_line, frame) + +def getfailure(failure): + explanation = _format_explanation(failure.explanation) + value = failure.cause[1] + if str(value): + lines = explanation.splitlines() + if not lines: + lines.append("") + lines[0] += " << %s" % (value,) + explanation = "\n".join(lines) + text = "%s: %s" % (failure.cause[0].__name__, explanation) + if text.startswith("AssertionError: assert "): + text = text[16:] + return text + + +operator_map = { + ast.BitOr : "|", + ast.BitXor : "^", + ast.BitAnd : "&", + ast.LShift : "<<", + ast.RShift : ">>", + ast.Add : "+", + ast.Sub : "-", + ast.Mult : "*", + ast.Div : "/", + ast.FloorDiv : "//", + ast.Mod : "%", + ast.Eq : "==", + ast.NotEq : "!=", + ast.Lt : "<", + ast.LtE : "<=", + ast.Gt : ">", + ast.GtE : ">=", + ast.Pow : "**", + ast.Is : "is", + ast.IsNot : "is not", + ast.In : "in", + ast.NotIn : "not in" +} + +unary_map = { + ast.Not : "not %s", + ast.Invert : "~%s", + ast.USub : "-%s", + ast.UAdd : "+%s" +} + + +class DebugInterpreter(ast.NodeVisitor): + """Interpret AST nodes to gleam useful debugging information. """ + + def __init__(self, frame): + self.frame = frame + + def generic_visit(self, node): + # Fallback when we don't have a special implementation. + if _is_ast_expr(node): + mod = ast.Expression(node) + co = self._compile(mod) + try: + result = self.frame.eval(co) + except Exception: + raise Failure() + explanation = self.frame.repr(result) + return explanation, result + elif _is_ast_stmt(node): + mod = ast.Module([node]) + co = self._compile(mod, "exec") + try: + self.frame.exec_(co) + except Exception: + raise Failure() + return None, None + else: + raise AssertionError("can't handle %s" %(node,)) + + def _compile(self, source, mode="eval"): + return compile(source, "<assertion interpretation>", mode) + + def visit_Expr(self, expr): + return self.visit(expr.value) + + def visit_Module(self, mod): + for stmt in mod.body: + self.visit(stmt) + + def visit_Name(self, name): + explanation, result = self.generic_visit(name) + # See if the name is local. + source = "%r in locals() is not globals()" % (name.id,) + co = self._compile(source) + try: + local = self.frame.eval(co) + except Exception: + # have to assume it isn't + local = False + if not local: + return name.id, result + return explanation, result + + def visit_Compare(self, comp): + left = comp.left + left_explanation, left_result = self.visit(left) + for op, next_op in zip(comp.ops, comp.comparators): + next_explanation, next_result = self.visit(next_op) + op_symbol = operator_map[op.__class__] + explanation = "%s %s %s" % (left_explanation, op_symbol, + next_explanation) + source = "__exprinfo_left %s __exprinfo_right" % (op_symbol,) + co = self._compile(source) + try: + result = self.frame.eval(co, __exprinfo_left=left_result, + __exprinfo_right=next_result) + except Exception: + raise Failure(explanation) + try: + if not result: + break + except KeyboardInterrupt: + raise + except: + break + left_explanation, left_result = next_explanation, next_result + + rcomp = py.code._reprcompare + if rcomp: + res = rcomp(op_symbol, left_result, next_result) + if res: + explanation = res + return explanation, result + + def visit_BoolOp(self, boolop): + is_or = isinstance(boolop.op, ast.Or) + explanations = [] + for operand in boolop.values: + explanation, result = self.visit(operand) + explanations.append(explanation) + if result == is_or: + break + name = is_or and " or " or " and " + explanation = "(" + name.join(explanations) + ")" + return explanation, result + + def visit_UnaryOp(self, unary): + pattern = unary_map[unary.op.__class__] + operand_explanation, operand_result = self.visit(unary.operand) + explanation = pattern % (operand_explanation,) + co = self._compile(pattern % ("__exprinfo_expr",)) + try: + result = self.frame.eval(co, __exprinfo_expr=operand_result) + except Exception: + raise Failure(explanation) + return explanation, result + + def visit_BinOp(self, binop): + left_explanation, left_result = self.visit(binop.left) + right_explanation, right_result = self.visit(binop.right) + symbol = operator_map[binop.op.__class__] + explanation = "(%s %s %s)" % (left_explanation, symbol, + right_explanation) + source = "__exprinfo_left %s __exprinfo_right" % (symbol,) + co = self._compile(source) + try: + result = self.frame.eval(co, __exprinfo_left=left_result, + __exprinfo_right=right_result) + except Exception: + raise Failure(explanation) + return explanation, result + + def visit_Call(self, call): + func_explanation, func = self.visit(call.func) + arg_explanations = [] + ns = {"__exprinfo_func" : func} + arguments = [] + for arg in call.args: + arg_explanation, arg_result = self.visit(arg) + arg_name = "__exprinfo_%s" % (len(ns),) + ns[arg_name] = arg_result + arguments.append(arg_name) + arg_explanations.append(arg_explanation) + for keyword in call.keywords: + arg_explanation, arg_result = self.visit(keyword.value) + arg_name = "__exprinfo_%s" % (len(ns),) + ns[arg_name] = arg_result + keyword_source = "%s=%%s" % (keyword.arg) + arguments.append(keyword_source % (arg_name,)) + arg_explanations.append(keyword_source % (arg_explanation,)) + if call.starargs: + arg_explanation, arg_result = self.visit(call.starargs) + arg_name = "__exprinfo_star" + ns[arg_name] = arg_result + arguments.append("*%s" % (arg_name,)) + arg_explanations.append("*%s" % (arg_explanation,)) + if call.kwargs: + arg_explanation, arg_result = self.visit(call.kwargs) + arg_name = "__exprinfo_kwds" + ns[arg_name] = arg_result + arguments.append("**%s" % (arg_name,)) + arg_explanations.append("**%s" % (arg_explanation,)) + args_explained = ", ".join(arg_explanations) + explanation = "%s(%s)" % (func_explanation, args_explained) + args = ", ".join(arguments) + source = "__exprinfo_func(%s)" % (args,) + co = self._compile(source) + try: + result = self.frame.eval(co, **ns) + except Exception: + raise Failure(explanation) + pattern = "%s\n{%s = %s\n}" + rep = self.frame.repr(result) + explanation = pattern % (rep, rep, explanation) + return explanation, result + + def _is_builtin_name(self, name): + pattern = "%r not in globals() and %r not in locals()" + source = pattern % (name.id, name.id) + co = self._compile(source) + try: + return self.frame.eval(co) + except Exception: + return False + + def visit_Attribute(self, attr): + if not isinstance(attr.ctx, ast.Load): + return self.generic_visit(attr) + source_explanation, source_result = self.visit(attr.value) + explanation = "%s.%s" % (source_explanation, attr.attr) + source = "__exprinfo_expr.%s" % (attr.attr,) + co = self._compile(source) + try: + result = self.frame.eval(co, __exprinfo_expr=source_result) + except Exception: + raise Failure(explanation) + explanation = "%s\n{%s = %s.%s\n}" % (self.frame.repr(result), + self.frame.repr(result), + source_explanation, attr.attr) + # Check if the attr is from an instance. + source = "%r in getattr(__exprinfo_expr, '__dict__', {})" + source = source % (attr.attr,) + co = self._compile(source) + try: + from_instance = self.frame.eval(co, __exprinfo_expr=source_result) + except Exception: + from_instance = True + if from_instance: + rep = self.frame.repr(result) + pattern = "%s\n{%s = %s\n}" + explanation = pattern % (rep, rep, explanation) + return explanation, result + + def visit_Assert(self, assrt): + test_explanation, test_result = self.visit(assrt.test) + if test_explanation.startswith("False\n{False =") and \ + test_explanation.endswith("\n"): + test_explanation = test_explanation[15:-2] + explanation = "assert %s" % (test_explanation,) + if not test_result: + try: + raise BuiltinAssertionError + except Exception: + raise Failure(explanation) + return explanation, test_result + + def visit_Assign(self, assign): + value_explanation, value_result = self.visit(assign.value) + explanation = "... = %s" % (value_explanation,) + name = ast.Name("__exprinfo_expr", ast.Load(), + lineno=assign.value.lineno, + col_offset=assign.value.col_offset) + new_assign = ast.Assign(assign.targets, name, lineno=assign.lineno, + col_offset=assign.col_offset) + mod = ast.Module([new_assign]) + co = self._compile(mod, "exec") + try: + self.frame.exec_(co, __exprinfo_expr=value_result) + except Exception: + raise Failure(explanation) + return explanation, value_result diff --git a/venv/lib/python3.5/site-packages/py/_code/_assertionold.py b/venv/lib/python3.5/site-packages/py/_code/_assertionold.py new file mode 100644 index 0000000..98786e4 --- /dev/null +++ b/venv/lib/python3.5/site-packages/py/_code/_assertionold.py @@ -0,0 +1,555 @@ +import py +import sys, inspect +from compiler import parse, ast, pycodegen +from py._code.assertion import BuiltinAssertionError, _format_explanation + +passthroughex = py.builtin._sysex + +class Failure: + def __init__(self, node): + self.exc, self.value, self.tb = sys.exc_info() + self.node = node + +class View(object): + """View base class. + + If C is a subclass of View, then C(x) creates a proxy object around + the object x. The actual class of the proxy is not C in general, + but a *subclass* of C determined by the rules below. To avoid confusion + we call view class the class of the proxy (a subclass of C, so of View) + and object class the class of x. + + Attributes and methods not found in the proxy are automatically read on x. + Other operations like setting attributes are performed on the proxy, as + determined by its view class. The object x is available from the proxy + as its __obj__ attribute. + + The view class selection is determined by the __view__ tuples and the + optional __viewkey__ method. By default, the selected view class is the + most specific subclass of C whose __view__ mentions the class of x. + If no such subclass is found, the search proceeds with the parent + object classes. For example, C(True) will first look for a subclass + of C with __view__ = (..., bool, ...) and only if it doesn't find any + look for one with __view__ = (..., int, ...), and then ..., object,... + If everything fails the class C itself is considered to be the default. + + Alternatively, the view class selection can be driven by another aspect + of the object x, instead of the class of x, by overriding __viewkey__. + See last example at the end of this module. + """ + + _viewcache = {} + __view__ = () + + def __new__(rootclass, obj, *args, **kwds): + self = object.__new__(rootclass) + self.__obj__ = obj + self.__rootclass__ = rootclass + key = self.__viewkey__() + try: + self.__class__ = self._viewcache[key] + except KeyError: + self.__class__ = self._selectsubclass(key) + return self + + def __getattr__(self, attr): + # attributes not found in the normal hierarchy rooted on View + # are looked up in the object's real class + return getattr(self.__obj__, attr) + + def __viewkey__(self): + return self.__obj__.__class__ + + def __matchkey__(self, key, subclasses): + if inspect.isclass(key): + keys = inspect.getmro(key) + else: + keys = [key] + for key in keys: + result = [C for C in subclasses if key in C.__view__] + if result: + return result + return [] + + def _selectsubclass(self, key): + subclasses = list(enumsubclasses(self.__rootclass__)) + for C in subclasses: + if not isinstance(C.__view__, tuple): + C.__view__ = (C.__view__,) + choices = self.__matchkey__(key, subclasses) + if not choices: + return self.__rootclass__ + elif len(choices) == 1: + return choices[0] + else: + # combine the multiple choices + return type('?', tuple(choices), {}) + + def __repr__(self): + return '%s(%r)' % (self.__rootclass__.__name__, self.__obj__) + + +def enumsubclasses(cls): + for subcls in cls.__subclasses__(): + for subsubclass in enumsubclasses(subcls): + yield subsubclass + yield cls + + +class Interpretable(View): + """A parse tree node with a few extra methods.""" + explanation = None + + def is_builtin(self, frame): + return False + + def eval(self, frame): + # fall-back for unknown expression nodes + try: + expr = ast.Expression(self.__obj__) + expr.filename = '<eval>' + self.__obj__.filename = '<eval>' + co = pycodegen.ExpressionCodeGenerator(expr).getCode() + result = frame.eval(co) + except passthroughex: + raise + except: + raise Failure(self) + self.result = result + self.explanation = self.explanation or frame.repr(self.result) + + def run(self, frame): + # fall-back for unknown statement nodes + try: + expr = ast.Module(None, ast.Stmt([self.__obj__])) + expr.filename = '<run>' + co = pycodegen.ModuleCodeGenerator(expr).getCode() + frame.exec_(co) + except passthroughex: + raise + except: + raise Failure(self) + + def nice_explanation(self): + return _format_explanation(self.explanation) + + +class Name(Interpretable): + __view__ = ast.Name + + def is_local(self, frame): + source = '%r in locals() is not globals()' % self.name + try: + return frame.is_true(frame.eval(source)) + except passthroughex: + raise + except: + return False + + def is_global(self, frame): + source = '%r in globals()' % self.name + try: + return frame.is_true(frame.eval(source)) + except passthroughex: + raise + except: + return False + + def is_builtin(self, frame): + source = '%r not in locals() and %r not in globals()' % ( + self.name, self.name) + try: + return frame.is_true(frame.eval(source)) + except passthroughex: + raise + except: + return False + + def eval(self, frame): + super(Name, self).eval(frame) + if not self.is_local(frame): + self.explanation = self.name + +class Compare(Interpretable): + __view__ = ast.Compare + + def eval(self, frame): + expr = Interpretable(self.expr) + expr.eval(frame) + for operation, expr2 in self.ops: + if hasattr(self, 'result'): + # shortcutting in chained expressions + if not frame.is_true(self.result): + break + expr2 = Interpretable(expr2) + expr2.eval(frame) + self.explanation = "%s %s %s" % ( + expr.explanation, operation, expr2.explanation) + source = "__exprinfo_left %s __exprinfo_right" % operation + try: + self.result = frame.eval(source, + __exprinfo_left=expr.result, + __exprinfo_right=expr2.result) + except passthroughex: + raise + except: + raise Failure(self) + expr = expr2 + +class And(Interpretable): + __view__ = ast.And + + def eval(self, frame): + explanations = [] + for expr in self.nodes: + expr = Interpretable(expr) + expr.eval(frame) + explanations.append(expr.explanation) + self.result = expr.result + if not frame.is_true(expr.result): + break + self.explanation = '(' + ' and '.join(explanations) + ')' + +class Or(Interpretable): + __view__ = ast.Or + + def eval(self, frame): + explanations = [] + for expr in self.nodes: + expr = Interpretable(expr) + expr.eval(frame) + explanations.append(expr.explanation) + self.result = expr.result + if frame.is_true(expr.result): + break + self.explanation = '(' + ' or '.join(explanations) + ')' + + +# == Unary operations == +keepalive = [] +for astclass, astpattern in { + ast.Not : 'not __exprinfo_expr', + ast.Invert : '(~__exprinfo_expr)', + }.items(): + + class UnaryArith(Interpretable): + __view__ = astclass + + def eval(self, frame, astpattern=astpattern): + expr = Interpretable(self.expr) + expr.eval(frame) + self.explanation = astpattern.replace('__exprinfo_expr', + expr.explanation) + try: + self.result = frame.eval(astpattern, + __exprinfo_expr=expr.result) + except passthroughex: + raise + except: + raise Failure(self) + + keepalive.append(UnaryArith) + +# == Binary operations == +for astclass, astpattern in { + ast.Add : '(__exprinfo_left + __exprinfo_right)', + ast.Sub : '(__exprinfo_left - __exprinfo_right)', + ast.Mul : '(__exprinfo_left * __exprinfo_right)', + ast.Div : '(__exprinfo_left / __exprinfo_right)', + ast.Mod : '(__exprinfo_left % __exprinfo_right)', + ast.Power : '(__exprinfo_left ** __exprinfo_right)', + }.items(): + + class BinaryArith(Interpretable): + __view__ = astclass + + def eval(self, frame, astpattern=astpattern): + left = Interpretable(self.left) + left.eval(frame) + right = Interpretable(self.right) + right.eval(frame) + self.explanation = (astpattern + .replace('__exprinfo_left', left .explanation) + .replace('__exprinfo_right', right.explanation)) + try: + self.result = frame.eval(astpattern, + __exprinfo_left=left.result, + __exprinfo_right=right.result) + except passthroughex: + raise + except: + raise Failure(self) + + keepalive.append(BinaryArith) + + +class CallFunc(Interpretable): + __view__ = ast.CallFunc + + def is_bool(self, frame): + source = 'isinstance(__exprinfo_value, bool)' + try: + return frame.is_true(frame.eval(source, + __exprinfo_value=self.result)) + except passthroughex: + raise + except: + return False + + def eval(self, frame): + node = Interpretable(self.node) + node.eval(frame) + explanations = [] + vars = {'__exprinfo_fn': node.result} + source = '__exprinfo_fn(' + for a in self.args: + if isinstance(a, ast.Keyword): + keyword = a.name + a = a.expr + else: + keyword = None + a = Interpretable(a) + a.eval(frame) + argname = '__exprinfo_%d' % len(vars) + vars[argname] = a.result + if keyword is None: + source += argname + ',' + explanations.append(a.explanation) + else: + source += '%s=%s,' % (keyword, argname) + explanations.append('%s=%s' % (keyword, a.explanation)) + if self.star_args: + star_args = Interpretable(self.star_args) + star_args.eval(frame) + argname = '__exprinfo_star' + vars[argname] = star_args.result + source += '*' + argname + ',' + explanations.append('*' + star_args.explanation) + if self.dstar_args: + dstar_args = Interpretable(self.dstar_args) + dstar_args.eval(frame) + argname = '__exprinfo_kwds' + vars[argname] = dstar_args.result + source += '**' + argname + ',' + explanations.append('**' + dstar_args.explanation) + self.explanation = "%s(%s)" % ( + node.explanation, ', '.join(explanations)) + if source.endswith(','): + source = source[:-1] + source += ')' + try: + self.result = frame.eval(source, **vars) + except passthroughex: + raise + except: + raise Failure(self) + if not node.is_builtin(frame) or not self.is_bool(frame): + r = frame.repr(self.result) + self.explanation = '%s\n{%s = %s\n}' % (r, r, self.explanation) + +class Getattr(Interpretable): + __view__ = ast.Getattr + + def eval(self, frame): + expr = Interpretable(self.expr) + expr.eval(frame) + source = '__exprinfo_expr.%s' % self.attrname + try: + self.result = frame.eval(source, __exprinfo_expr=expr.result) + except passthroughex: + raise + except: + raise Failure(self) + self.explanation = '%s.%s' % (expr.explanation, self.attrname) + # if the attribute comes from the instance, its value is interesting + source = ('hasattr(__exprinfo_expr, "__dict__") and ' + '%r in __exprinfo_expr.__dict__' % self.attrname) + try: + from_instance = frame.is_true( + frame.eval(source, __exprinfo_expr=expr.result)) + except passthroughex: + raise + except: + from_instance = True + if from_instance: + r = frame.repr(self.result) + self.explanation = '%s\n{%s = %s\n}' % (r, r, self.explanation) + +# == Re-interpretation of full statements == + +class Assert(Interpretable): + __view__ = ast.Assert + + def run(self, frame): + test = Interpretable(self.test) + test.eval(frame) + # simplify 'assert False where False = ...' + if (test.explanation.startswith('False\n{False = ') and + test.explanation.endswith('\n}')): + test.explanation = test.explanation[15:-2] + # print the result as 'assert <explanation>' + self.result = test.result + self.explanation = 'assert ' + test.explanation + if not frame.is_true(test.result): + try: + raise BuiltinAssertionError + except passthroughex: + raise + except: + raise Failure(self) + +class Assign(Interpretable): + __view__ = ast.Assign + + def run(self, frame): + expr = Interpretable(self.expr) + expr.eval(frame) + self.result = expr.result + self.explanation = '... = ' + expr.explanation + # fall-back-run the rest of the assignment + ass = ast.Assign(self.nodes, ast.Name('__exprinfo_expr')) + mod = ast.Module(None, ast.Stmt([ass])) + mod.filename = '<run>' + co = pycodegen.ModuleCodeGenerator(mod).getCode() + try: + frame.exec_(co, __exprinfo_expr=expr.result) + except passthroughex: + raise + except: + raise Failure(self) + +class Discard(Interpretable): + __view__ = ast.Discard + + def run(self, frame): + expr = Interpretable(self.expr) + expr.eval(frame) + self.result = expr.result + self.explanation = expr.explanation + +class Stmt(Interpretable): + __view__ = ast.Stmt + + def run(self, frame): + for stmt in self.nodes: + stmt = Interpretable(stmt) + stmt.run(frame) + + +def report_failure(e): + explanation = e.node.nice_explanation() + if explanation: + explanation = ", in: " + explanation + else: + explanation = "" + sys.stdout.write("%s: %s%s\n" % (e.exc.__name__, e.value, explanation)) + +def check(s, frame=None): + if frame is None: + frame = sys._getframe(1) + frame = py.code.Frame(frame) + expr = parse(s, 'eval') + assert isinstance(expr, ast.Expression) + node = Interpretable(expr.node) + try: + node.eval(frame) + except passthroughex: + raise + except Failure: + e = sys.exc_info()[1] + report_failure(e) + else: + if not frame.is_true(node.result): + sys.stderr.write("assertion failed: %s\n" % node.nice_explanation()) + + +########################################################### +# API / Entry points +# ######################################################### + +def interpret(source, frame, should_fail=False): + module = Interpretable(parse(source, 'exec').node) + #print "got module", module + if isinstance(frame, py.std.types.FrameType): + frame = py.code.Frame(frame) + try: + module.run(frame) + except Failure: + e = sys.exc_info()[1] + return getfailure(e) + except passthroughex: + raise + except: + import traceback + traceback.print_exc() + if should_fail: + return ("(assertion failed, but when it was re-run for " + "printing intermediate values, it did not fail. Suggestions: " + "compute assert expression before the assert or use --nomagic)") + else: + return None + +def getmsg(excinfo): + if isinstance(excinfo, tuple): + excinfo = py.code.ExceptionInfo(excinfo) + #frame, line = gettbline(tb) + #frame = py.code.Frame(frame) + #return interpret(line, frame) + + tb = excinfo.traceback[-1] + source = str(tb.statement).strip() + x = interpret(source, tb.frame, should_fail=True) + if not isinstance(x, str): + raise TypeError("interpret returned non-string %r" % (x,)) + return x + +def getfailure(e): + explanation = e.node.nice_explanation() + if str(e.value): + lines = explanation.split('\n') + lines[0] += " << %s" % (e.value,) + explanation = '\n'.join(lines) + text = "%s: %s" % (e.exc.__name__, explanation) + if text.startswith('AssertionError: assert '): + text = text[16:] + return text + +def run(s, frame=None): + if frame is None: + frame = sys._getframe(1) + frame = py.code.Frame(frame) + module = Interpretable(parse(s, 'exec').node) + try: + module.run(frame) + except Failure: + e = sys.exc_info()[1] + report_failure(e) + + +if __name__ == '__main__': + # example: + def f(): + return 5 + def g(): + return 3 + def h(x): + return 'never' + check("f() * g() == 5") + check("not f()") + check("not (f() and g() or 0)") + check("f() == g()") + i = 4 + check("i == f()") + check("len(f()) == 0") + check("isinstance(2+3+4, float)") + + run("x = i") + check("x == 5") + + run("assert not f(), 'oops'") + run("a, b, c = 1, 2") + run("a, b, c = f()") + + check("max([f(),g()]) == 4") + check("'hello'[g()] == 'h'") + run("'guk%d' % h(f())") diff --git a/venv/lib/python3.5/site-packages/py/_code/_py2traceback.py b/venv/lib/python3.5/site-packages/py/_code/_py2traceback.py new file mode 100644 index 0000000..227cb96 --- /dev/null +++ b/venv/lib/python3.5/site-packages/py/_code/_py2traceback.py @@ -0,0 +1,79 @@ +# copied from python-2.7.3's traceback.py +# CHANGES: +# - some_str is replaced, trying to create unicode strings +# +import types + +def format_exception_only(etype, value): + """Format the exception part of a traceback. + + The arguments are the exception type and value such as given by + sys.last_type and sys.last_value. The return value is a list of + strings, each ending in a newline. + + Normally, the list contains a single string; however, for + SyntaxError exceptions, it contains several lines that (when + printed) display detailed information about where the syntax + error occurred. + + The message indicating which exception occurred is always the last + string in the list. + + """ + + # An instance should not have a meaningful value parameter, but + # sometimes does, particularly for string exceptions, such as + # >>> raise string1, string2 # deprecated + # + # Clear these out first because issubtype(string1, SyntaxError) + # would throw another exception and mask the original problem. + if (isinstance(etype, BaseException) or + isinstance(etype, types.InstanceType) or + etype is None or type(etype) is str): + return [_format_final_exc_line(etype, value)] + + stype = etype.__name__ + + if not issubclass(etype, SyntaxError): + return [_format_final_exc_line(stype, value)] + + # It was a syntax error; show exactly where the problem was found. + lines = [] + try: + msg, (filename, lineno, offset, badline) = value.args + except Exception: + pass + else: + filename = filename or "<string>" + lines.append(' File "%s", line %d\n' % (filename, lineno)) + if badline is not None: + lines.append(' %s\n' % badline.strip()) + if offset is not None: + caretspace = badline.rstrip('\n')[:offset].lstrip() + # non-space whitespace (likes tabs) must be kept for alignment + caretspace = ((c.isspace() and c or ' ') for c in caretspace) + # only three spaces to account for offset1 == pos 0 + lines.append(' %s^\n' % ''.join(caretspace)) + value = msg + + lines.append(_format_final_exc_line(stype, value)) + return lines + +def _format_final_exc_line(etype, value): + """Return a list of a single line -- normal case for format_exception_only""" + valuestr = _some_str(value) + if value is None or not valuestr: + line = "%s\n" % etype + else: + line = "%s: %s\n" % (etype, valuestr) + return line + +def _some_str(value): + try: + return unicode(value) + except Exception: + try: + return str(value) + except Exception: + pass + return '<unprintable %s object>' % type(value).__name__ diff --git a/venv/lib/python3.5/site-packages/py/_code/assertion.py b/venv/lib/python3.5/site-packages/py/_code/assertion.py new file mode 100644 index 0000000..3bcb8cd --- /dev/null +++ b/venv/lib/python3.5/site-packages/py/_code/assertion.py @@ -0,0 +1,94 @@ +import sys +import py + +BuiltinAssertionError = py.builtin.builtins.AssertionError + +_reprcompare = None # if set, will be called by assert reinterp for comparison ops + +def _format_explanation(explanation): + """This formats an explanation + + Normally all embedded newlines are escaped, however there are + three exceptions: \n{, \n} and \n~. The first two are intended + cover nested explanations, see function and attribute explanations + for examples (.visit_Call(), visit_Attribute()). The last one is + for when one explanation needs to span multiple lines, e.g. when + displaying diffs. + """ + raw_lines = (explanation or '').split('\n') + # escape newlines not followed by {, } and ~ + lines = [raw_lines[0]] + for l in raw_lines[1:]: + if l.startswith('{') or l.startswith('}') or l.startswith('~'): + lines.append(l) + else: + lines[-1] += '\\n' + l + + result = lines[:1] + stack = [0] + stackcnt = [0] + for line in lines[1:]: + if line.startswith('{'): + if stackcnt[-1]: + s = 'and ' + else: + s = 'where ' + stack.append(len(result)) + stackcnt[-1] += 1 + stackcnt.append(0) + result.append(' +' + ' '*(len(stack)-1) + s + line[1:]) + elif line.startswith('}'): + assert line.startswith('}') + stack.pop() + stackcnt.pop() + result[stack[-1]] += line[1:] + else: + assert line.startswith('~') + result.append(' '*len(stack) + line[1:]) + assert len(stack) == 1 + return '\n'.join(result) + + +class AssertionError(BuiltinAssertionError): + def __init__(self, *args): + BuiltinAssertionError.__init__(self, *args) + if args: + try: + self.msg = str(args[0]) + except py.builtin._sysex: + raise + except: + self.msg = "<[broken __repr__] %s at %0xd>" %( + args[0].__class__, id(args[0])) + else: + f = py.code.Frame(sys._getframe(1)) + try: + source = f.code.fullsource + if source is not None: + try: + source = source.getstatement(f.lineno, assertion=True) + except IndexError: + source = None + else: + source = str(source.deindent()).strip() + except py.error.ENOENT: + source = None + # this can also occur during reinterpretation, when the + # co_filename is set to "<run>". + if source: + self.msg = reinterpret(source, f, should_fail=True) + else: + self.msg = "<could not determine information>" + if not self.args: + self.args = (self.msg,) + +if sys.version_info > (3, 0): + AssertionError.__module__ = "builtins" + reinterpret_old = "old reinterpretation not available for py3" +else: + from py._code._assertionold import interpret as reinterpret_old +if sys.version_info >= (2, 6) or (sys.platform.startswith("java")): + from py._code._assertionnew import interpret as reinterpret +else: + reinterpret = reinterpret_old + diff --git a/venv/lib/python3.5/site-packages/py/_code/code.py b/venv/lib/python3.5/site-packages/py/_code/code.py new file mode 100644 index 0000000..822cf4b --- /dev/null +++ b/venv/lib/python3.5/site-packages/py/_code/code.py @@ -0,0 +1,787 @@ +import py +import sys +from inspect import CO_VARARGS, CO_VARKEYWORDS + +builtin_repr = repr + +reprlib = py.builtin._tryimport('repr', 'reprlib') + +if sys.version_info[0] >= 3: + from traceback import format_exception_only +else: + from py._code._py2traceback import format_exception_only + +class Code(object): + """ wrapper around Python code objects """ + def __init__(self, rawcode): + if not hasattr(rawcode, "co_filename"): + rawcode = py.code.getrawcode(rawcode) + try: + self.filename = rawcode.co_filename + self.firstlineno = rawcode.co_firstlineno - 1 + self.name = rawcode.co_name + except AttributeError: + raise TypeError("not a code object: %r" %(rawcode,)) + self.raw = rawcode + + def __eq__(self, other): + return self.raw == other.raw + + def __ne__(self, other): + return not self == other + + @property + def path(self): + """ return a path object pointing to source code (note that it + might not point to an actually existing file). """ + p = py.path.local(self.raw.co_filename) + # maybe don't try this checking + if not p.check(): + # XXX maybe try harder like the weird logic + # in the standard lib [linecache.updatecache] does? + p = self.raw.co_filename + return p + + @property + def fullsource(self): + """ return a py.code.Source object for the full source file of the code + """ + from py._code import source + full, _ = source.findsource(self.raw) + return full + + def source(self): + """ return a py.code.Source object for the code object's source only + """ + # return source only for that part of code + return py.code.Source(self.raw) + + def getargs(self, var=False): + """ return a tuple with the argument names for the code object + + if 'var' is set True also return the names of the variable and + keyword arguments when present + """ + # handfull shortcut for getting args + raw = self.raw + argcount = raw.co_argcount + if var: + argcount += raw.co_flags & CO_VARARGS + argcount += raw.co_flags & CO_VARKEYWORDS + return raw.co_varnames[:argcount] + +class Frame(object): + """Wrapper around a Python frame holding f_locals and f_globals + in which expressions can be evaluated.""" + + def __init__(self, frame): + self.lineno = frame.f_lineno - 1 + self.f_globals = frame.f_globals + self.f_locals = frame.f_locals + self.raw = frame + self.code = py.code.Code(frame.f_code) + + @property + def statement(self): + """ statement this frame is at """ + if self.code.fullsource is None: + return py.code.Source("") + return self.code.fullsource.getstatement(self.lineno) + + def eval(self, code, **vars): + """ evaluate 'code' in the frame + + 'vars' are optional additional local variables + + returns the result of the evaluation + """ + f_locals = self.f_locals.copy() + f_locals.update(vars) + return eval(code, self.f_globals, f_locals) + + def exec_(self, code, **vars): + """ exec 'code' in the frame + + 'vars' are optiona; additional local variables + """ + f_locals = self.f_locals.copy() + f_locals.update(vars) + py.builtin.exec_(code, self.f_globals, f_locals ) + + def repr(self, object): + """ return a 'safe' (non-recursive, one-line) string repr for 'object' + """ + return py.io.saferepr(object) + + def is_true(self, object): + return object + + def getargs(self, var=False): + """ return a list of tuples (name, value) for all arguments + + if 'var' is set True also include the variable and keyword + arguments when present + """ + retval = [] + for arg in self.code.getargs(var): + try: + retval.append((arg, self.f_locals[arg])) + except KeyError: + pass # this can occur when using Psyco + return retval + +class TracebackEntry(object): + """ a single entry in a traceback """ + + _repr_style = None + exprinfo = None + + def __init__(self, rawentry): + self._rawentry = rawentry + self.lineno = rawentry.tb_lineno - 1 + + def set_repr_style(self, mode): + assert mode in ("short", "long") + self._repr_style = mode + + @property + def frame(self): + return py.code.Frame(self._rawentry.tb_frame) + + @property + def relline(self): + return self.lineno - self.frame.code.firstlineno + + def __repr__(self): + return "<TracebackEntry %s:%d>" %(self.frame.code.path, self.lineno+1) + + @property + def statement(self): + """ py.code.Source object for the current statement """ + source = self.frame.code.fullsource + return source.getstatement(self.lineno) + + @property + def path(self): + """ path to the source code """ + return self.frame.code.path + + def getlocals(self): + return self.frame.f_locals + locals = property(getlocals, None, None, "locals of underlaying frame") + + def reinterpret(self): + """Reinterpret the failing statement and returns a detailed information + about what operations are performed.""" + if self.exprinfo is None: + source = str(self.statement).strip() + x = py.code._reinterpret(source, self.frame, should_fail=True) + if not isinstance(x, str): + raise TypeError("interpret returned non-string %r" % (x,)) + self.exprinfo = x + return self.exprinfo + + def getfirstlinesource(self): + # on Jython this firstlineno can be -1 apparently + return max(self.frame.code.firstlineno, 0) + + def getsource(self, astcache=None): + """ return failing source code. """ + # we use the passed in astcache to not reparse asttrees + # within exception info printing + from py._code.source import getstatementrange_ast + source = self.frame.code.fullsource + if source is None: + return None + key = astnode = None + if astcache is not None: + key = self.frame.code.path + if key is not None: + astnode = astcache.get(key, None) + start = self.getfirstlinesource() + try: + astnode, _, end = getstatementrange_ast(self.lineno, source, + astnode=astnode) + except SyntaxError: + end = self.lineno + 1 + else: + if key is not None: + astcache[key] = astnode + return source[start:end] + + source = property(getsource) + + def ishidden(self): + """ return True if the current frame has a var __tracebackhide__ + resolving to True + + mostly for internal use + """ + try: + return self.frame.f_locals['__tracebackhide__'] + except KeyError: + try: + return self.frame.f_globals['__tracebackhide__'] + except KeyError: + return False + + def __str__(self): + try: + fn = str(self.path) + except py.error.Error: + fn = '???' + name = self.frame.code.name + try: + line = str(self.statement).lstrip() + except KeyboardInterrupt: + raise + except: + line = "???" + return " File %r:%d in %s\n %s\n" %(fn, self.lineno+1, name, line) + + def name(self): + return self.frame.code.raw.co_name + name = property(name, None, None, "co_name of underlaying code") + +class Traceback(list): + """ Traceback objects encapsulate and offer higher level + access to Traceback entries. + """ + Entry = TracebackEntry + def __init__(self, tb): + """ initialize from given python traceback object. """ + if hasattr(tb, 'tb_next'): + def f(cur): + while cur is not None: + yield self.Entry(cur) + cur = cur.tb_next + list.__init__(self, f(tb)) + else: + list.__init__(self, tb) + + def cut(self, path=None, lineno=None, firstlineno=None, excludepath=None): + """ return a Traceback instance wrapping part of this Traceback + + by provding any combination of path, lineno and firstlineno, the + first frame to start the to-be-returned traceback is determined + + this allows cutting the first part of a Traceback instance e.g. + for formatting reasons (removing some uninteresting bits that deal + with handling of the exception/traceback) + """ + for x in self: + code = x.frame.code + codepath = code.path + if ((path is None or codepath == path) and + (excludepath is None or not hasattr(codepath, 'relto') or + not codepath.relto(excludepath)) and + (lineno is None or x.lineno == lineno) and + (firstlineno is None or x.frame.code.firstlineno == firstlineno)): + return Traceback(x._rawentry) + return self + + def __getitem__(self, key): + val = super(Traceback, self).__getitem__(key) + if isinstance(key, type(slice(0))): + val = self.__class__(val) + return val + + def filter(self, fn=lambda x: not x.ishidden()): + """ return a Traceback instance with certain items removed + + fn is a function that gets a single argument, a TracebackItem + instance, and should return True when the item should be added + to the Traceback, False when not + + by default this removes all the TracebackItems which are hidden + (see ishidden() above) + """ + return Traceback(filter(fn, self)) + + def getcrashentry(self): + """ return last non-hidden traceback entry that lead + to the exception of a traceback. + """ + for i in range(-1, -len(self)-1, -1): + entry = self[i] + if not entry.ishidden(): + return entry + return self[-1] + + def recursionindex(self): + """ return the index of the frame/TracebackItem where recursion + originates if appropriate, None if no recursion occurred + """ + cache = {} + for i, entry in enumerate(self): + # id for the code.raw is needed to work around + # the strange metaprogramming in the decorator lib from pypi + # which generates code objects that have hash/value equality + #XXX needs a test + key = entry.frame.code.path, id(entry.frame.code.raw), entry.lineno + #print "checking for recursion at", key + l = cache.setdefault(key, []) + if l: + f = entry.frame + loc = f.f_locals + for otherloc in l: + if f.is_true(f.eval(co_equal, + __recursioncache_locals_1=loc, + __recursioncache_locals_2=otherloc)): + return i + l.append(entry.frame.f_locals) + return None + +co_equal = compile('__recursioncache_locals_1 == __recursioncache_locals_2', + '?', 'eval') + +class ExceptionInfo(object): + """ wraps sys.exc_info() objects and offers + help for navigating the traceback. + """ + _striptext = '' + def __init__(self, tup=None, exprinfo=None): + if tup is None: + tup = sys.exc_info() + if exprinfo is None and isinstance(tup[1], AssertionError): + exprinfo = getattr(tup[1], 'msg', None) + if exprinfo is None: + exprinfo = str(tup[1]) + if exprinfo and exprinfo.startswith('assert '): + self._striptext = 'AssertionError: ' + self._excinfo = tup + #: the exception class + self.type = tup[0] + #: the exception instance + self.value = tup[1] + #: the exception raw traceback + self.tb = tup[2] + #: the exception type name + self.typename = self.type.__name__ + #: the exception traceback (py.code.Traceback instance) + self.traceback = py.code.Traceback(self.tb) + + def __repr__(self): + return "<ExceptionInfo %s tblen=%d>" % (self.typename, len(self.traceback)) + + def exconly(self, tryshort=False): + """ return the exception as a string + + when 'tryshort' resolves to True, and the exception is a + py.code._AssertionError, only the actual exception part of + the exception representation is returned (so 'AssertionError: ' is + removed from the beginning) + """ + lines = format_exception_only(self.type, self.value) + text = ''.join(lines) + text = text.rstrip() + if tryshort: + if text.startswith(self._striptext): + text = text[len(self._striptext):] + return text + + def errisinstance(self, exc): + """ return True if the exception is an instance of exc """ + return isinstance(self.value, exc) + + def _getreprcrash(self): + exconly = self.exconly(tryshort=True) + entry = self.traceback.getcrashentry() + path, lineno = entry.frame.code.raw.co_filename, entry.lineno + return ReprFileLocation(path, lineno+1, exconly) + + def getrepr(self, showlocals=False, style="long", + abspath=False, tbfilter=True, funcargs=False): + """ return str()able representation of this exception info. + showlocals: show locals per traceback entry + style: long|short|no|native traceback style + tbfilter: hide entries (where __tracebackhide__ is true) + + in case of style==native, tbfilter and showlocals is ignored. + """ + if style == 'native': + return ReprExceptionInfo(ReprTracebackNative( + py.std.traceback.format_exception( + self.type, + self.value, + self.traceback[0]._rawentry, + )), self._getreprcrash()) + + fmt = FormattedExcinfo(showlocals=showlocals, style=style, + abspath=abspath, tbfilter=tbfilter, funcargs=funcargs) + return fmt.repr_excinfo(self) + + def __str__(self): + entry = self.traceback[-1] + loc = ReprFileLocation(entry.path, entry.lineno + 1, self.exconly()) + return str(loc) + + def __unicode__(self): + entry = self.traceback[-1] + loc = ReprFileLocation(entry.path, entry.lineno + 1, self.exconly()) + return loc.__unicode__() + + +class FormattedExcinfo(object): + """ presenting information about failing Functions and Generators. """ + # for traceback entries + flow_marker = ">" + fail_marker = "E" + + def __init__(self, showlocals=False, style="long", abspath=True, tbfilter=True, funcargs=False): + self.showlocals = showlocals + self.style = style + self.tbfilter = tbfilter + self.funcargs = funcargs + self.abspath = abspath + self.astcache = {} + + def _getindent(self, source): + # figure out indent for given source + try: + s = str(source.getstatement(len(source)-1)) + except KeyboardInterrupt: + raise + except: + try: + s = str(source[-1]) + except KeyboardInterrupt: + raise + except: + return 0 + return 4 + (len(s) - len(s.lstrip())) + + def _getentrysource(self, entry): + source = entry.getsource(self.astcache) + if source is not None: + source = source.deindent() + return source + + def _saferepr(self, obj): + return py.io.saferepr(obj) + + def repr_args(self, entry): + if self.funcargs: + args = [] + for argname, argvalue in entry.frame.getargs(var=True): + args.append((argname, self._saferepr(argvalue))) + return ReprFuncArgs(args) + + def get_source(self, source, line_index=-1, excinfo=None, short=False): + """ return formatted and marked up source lines. """ + lines = [] + if source is None or line_index >= len(source.lines): + source = py.code.Source("???") + line_index = 0 + if line_index < 0: + line_index += len(source) + space_prefix = " " + if short: + lines.append(space_prefix + source.lines[line_index].strip()) + else: + for line in source.lines[:line_index]: + lines.append(space_prefix + line) + lines.append(self.flow_marker + " " + source.lines[line_index]) + for line in source.lines[line_index+1:]: + lines.append(space_prefix + line) + if excinfo is not None: + indent = 4 if short else self._getindent(source) + lines.extend(self.get_exconly(excinfo, indent=indent, markall=True)) + return lines + + def get_exconly(self, excinfo, indent=4, markall=False): + lines = [] + indent = " " * indent + # get the real exception information out + exlines = excinfo.exconly(tryshort=True).split('\n') + failindent = self.fail_marker + indent[1:] + for line in exlines: + lines.append(failindent + line) + if not markall: + failindent = indent + return lines + + def repr_locals(self, locals): + if self.showlocals: + lines = [] + keys = [loc for loc in locals if loc[0] != "@"] + keys.sort() + for name in keys: + value = locals[name] + if name == '__builtins__': + lines.append("__builtins__ = <builtins>") + else: + # This formatting could all be handled by the + # _repr() function, which is only reprlib.Repr in + # disguise, so is very configurable. + str_repr = self._saferepr(value) + #if len(str_repr) < 70 or not isinstance(value, + # (list, tuple, dict)): + lines.append("%-10s = %s" %(name, str_repr)) + #else: + # self._line("%-10s =\\" % (name,)) + # # XXX + # py.std.pprint.pprint(value, stream=self.excinfowriter) + return ReprLocals(lines) + + def repr_traceback_entry(self, entry, excinfo=None): + source = self._getentrysource(entry) + if source is None: + source = py.code.Source("???") + line_index = 0 + else: + # entry.getfirstlinesource() can be -1, should be 0 on jython + line_index = entry.lineno - max(entry.getfirstlinesource(), 0) + + lines = [] + style = entry._repr_style + if style is None: + style = self.style + if style in ("short", "long"): + short = style == "short" + reprargs = self.repr_args(entry) if not short else None + s = self.get_source(source, line_index, excinfo, short=short) + lines.extend(s) + if short: + message = "in %s" %(entry.name) + else: + message = excinfo and excinfo.typename or "" + path = self._makepath(entry.path) + filelocrepr = ReprFileLocation(path, entry.lineno+1, message) + localsrepr = None + if not short: + localsrepr = self.repr_locals(entry.locals) + return ReprEntry(lines, reprargs, localsrepr, filelocrepr, style) + if excinfo: + lines.extend(self.get_exconly(excinfo, indent=4)) + return ReprEntry(lines, None, None, None, style) + + def _makepath(self, path): + if not self.abspath: + try: + np = py.path.local().bestrelpath(path) + except OSError: + return path + if len(np) < len(str(path)): + path = np + return path + + def repr_traceback(self, excinfo): + traceback = excinfo.traceback + if self.tbfilter: + traceback = traceback.filter() + recursionindex = None + if excinfo.errisinstance(RuntimeError): + if "maximum recursion depth exceeded" in str(excinfo.value): + recursionindex = traceback.recursionindex() + last = traceback[-1] + entries = [] + extraline = None + for index, entry in enumerate(traceback): + einfo = (last == entry) and excinfo or None + reprentry = self.repr_traceback_entry(entry, einfo) + entries.append(reprentry) + if index == recursionindex: + extraline = "!!! Recursion detected (same locals & position)" + break + return ReprTraceback(entries, extraline, style=self.style) + + def repr_excinfo(self, excinfo): + reprtraceback = self.repr_traceback(excinfo) + reprcrash = excinfo._getreprcrash() + return ReprExceptionInfo(reprtraceback, reprcrash) + +class TerminalRepr: + def __str__(self): + s = self.__unicode__() + if sys.version_info[0] < 3: + s = s.encode('utf-8') + return s + + def __unicode__(self): + # FYI this is called from pytest-xdist's serialization of exception + # information. + io = py.io.TextIO() + tw = py.io.TerminalWriter(file=io) + self.toterminal(tw) + return io.getvalue().strip() + + def __repr__(self): + return "<%s instance at %0x>" %(self.__class__, id(self)) + + +class ReprExceptionInfo(TerminalRepr): + def __init__(self, reprtraceback, reprcrash): + self.reprtraceback = reprtraceback + self.reprcrash = reprcrash + self.sections = [] + + def addsection(self, name, content, sep="-"): + self.sections.append((name, content, sep)) + + def toterminal(self, tw): + self.reprtraceback.toterminal(tw) + for name, content, sep in self.sections: + tw.sep(sep, name) + tw.line(content) + +class ReprTraceback(TerminalRepr): + entrysep = "_ " + + def __init__(self, reprentries, extraline, style): + self.reprentries = reprentries + self.extraline = extraline + self.style = style + + def toterminal(self, tw): + # the entries might have different styles + last_style = None + for i, entry in enumerate(self.reprentries): + if entry.style == "long": + tw.line("") + entry.toterminal(tw) + if i < len(self.reprentries) - 1: + next_entry = self.reprentries[i+1] + if entry.style == "long" or \ + entry.style == "short" and next_entry.style == "long": + tw.sep(self.entrysep) + + if self.extraline: + tw.line(self.extraline) + +class ReprTracebackNative(ReprTraceback): + def __init__(self, tblines): + self.style = "native" + self.reprentries = [ReprEntryNative(tblines)] + self.extraline = None + +class ReprEntryNative(TerminalRepr): + style = "native" + + def __init__(self, tblines): + self.lines = tblines + + def toterminal(self, tw): + tw.write("".join(self.lines)) + +class ReprEntry(TerminalRepr): + localssep = "_ " + + def __init__(self, lines, reprfuncargs, reprlocals, filelocrepr, style): + self.lines = lines + self.reprfuncargs = reprfuncargs + self.reprlocals = reprlocals + self.reprfileloc = filelocrepr + self.style = style + + def toterminal(self, tw): + if self.style == "short": + self.reprfileloc.toterminal(tw) + for line in self.lines: + red = line.startswith("E ") + tw.line(line, bold=True, red=red) + #tw.line("") + return + if self.reprfuncargs: + self.reprfuncargs.toterminal(tw) + for line in self.lines: + red = line.startswith("E ") + tw.line(line, bold=True, red=red) + if self.reprlocals: + #tw.sep(self.localssep, "Locals") + tw.line("") + self.reprlocals.toterminal(tw) + if self.reprfileloc: + if self.lines: + tw.line("") + self.reprfileloc.toterminal(tw) + + def __str__(self): + return "%s\n%s\n%s" % ("\n".join(self.lines), + self.reprlocals, + self.reprfileloc) + +class ReprFileLocation(TerminalRepr): + def __init__(self, path, lineno, message): + self.path = str(path) + self.lineno = lineno + self.message = message + + def toterminal(self, tw): + # filename and lineno output for each entry, + # using an output format that most editors unterstand + msg = self.message + i = msg.find("\n") + if i != -1: + msg = msg[:i] + tw.line("%s:%s: %s" %(self.path, self.lineno, msg)) + +class ReprLocals(TerminalRepr): + def __init__(self, lines): + self.lines = lines + + def toterminal(self, tw): + for line in self.lines: + tw.line(line) + +class ReprFuncArgs(TerminalRepr): + def __init__(self, args): + self.args = args + + def toterminal(self, tw): + if self.args: + linesofar = "" + for name, value in self.args: + ns = "%s = %s" %(name, value) + if len(ns) + len(linesofar) + 2 > tw.fullwidth: + if linesofar: + tw.line(linesofar) + linesofar = ns + else: + if linesofar: + linesofar += ", " + ns + else: + linesofar = ns + if linesofar: + tw.line(linesofar) + tw.line("") + + + +oldbuiltins = {} + +def patch_builtins(assertion=True, compile=True): + """ put compile and AssertionError builtins to Python's builtins. """ + if assertion: + from py._code import assertion + l = oldbuiltins.setdefault('AssertionError', []) + l.append(py.builtin.builtins.AssertionError) + py.builtin.builtins.AssertionError = assertion.AssertionError + if compile: + l = oldbuiltins.setdefault('compile', []) + l.append(py.builtin.builtins.compile) + py.builtin.builtins.compile = py.code.compile + +def unpatch_builtins(assertion=True, compile=True): + """ remove compile and AssertionError builtins from Python builtins. """ + if assertion: + py.builtin.builtins.AssertionError = oldbuiltins['AssertionError'].pop() + if compile: + py.builtin.builtins.compile = oldbuiltins['compile'].pop() + +def getrawcode(obj, trycall=True): + """ return code object for given function. """ + try: + return obj.__code__ + except AttributeError: + obj = getattr(obj, 'im_func', obj) + obj = getattr(obj, 'func_code', obj) + obj = getattr(obj, 'f_code', obj) + obj = getattr(obj, '__code__', obj) + if trycall and not hasattr(obj, 'co_firstlineno'): + if hasattr(obj, '__call__') and not py.std.inspect.isclass(obj): + x = getrawcode(obj.__call__, trycall=False) + if hasattr(x, 'co_firstlineno'): + return x + return obj + diff --git a/venv/lib/python3.5/site-packages/py/_code/source.py b/venv/lib/python3.5/site-packages/py/_code/source.py new file mode 100644 index 0000000..59281b5 --- /dev/null +++ b/venv/lib/python3.5/site-packages/py/_code/source.py @@ -0,0 +1,411 @@ +from __future__ import generators + +from bisect import bisect_right +import sys +import inspect, tokenize +import py +from types import ModuleType +cpy_compile = compile + +try: + import _ast + from _ast import PyCF_ONLY_AST as _AST_FLAG +except ImportError: + _AST_FLAG = 0 + _ast = None + + +class Source(object): + """ a immutable object holding a source code fragment, + possibly deindenting it. + """ + _compilecounter = 0 + def __init__(self, *parts, **kwargs): + self.lines = lines = [] + de = kwargs.get('deindent', True) + rstrip = kwargs.get('rstrip', True) + for part in parts: + if not part: + partlines = [] + if isinstance(part, Source): + partlines = part.lines + elif isinstance(part, (tuple, list)): + partlines = [x.rstrip("\n") for x in part] + elif isinstance(part, py.builtin._basestring): + partlines = part.split('\n') + if rstrip: + while partlines: + if partlines[-1].strip(): + break + partlines.pop() + else: + partlines = getsource(part, deindent=de).lines + if de: + partlines = deindent(partlines) + lines.extend(partlines) + + def __eq__(self, other): + try: + return self.lines == other.lines + except AttributeError: + if isinstance(other, str): + return str(self) == other + return False + + def __getitem__(self, key): + if isinstance(key, int): + return self.lines[key] + else: + if key.step not in (None, 1): + raise IndexError("cannot slice a Source with a step") + return self.__getslice__(key.start, key.stop) + + def __len__(self): + return len(self.lines) + + def __getslice__(self, start, end): + newsource = Source() + newsource.lines = self.lines[start:end] + return newsource + + def strip(self): + """ return new source object with trailing + and leading blank lines removed. + """ + start, end = 0, len(self) + while start < end and not self.lines[start].strip(): + start += 1 + while end > start and not self.lines[end-1].strip(): + end -= 1 + source = Source() + source.lines[:] = self.lines[start:end] + return source + + def putaround(self, before='', after='', indent=' ' * 4): + """ return a copy of the source object with + 'before' and 'after' wrapped around it. + """ + before = Source(before) + after = Source(after) + newsource = Source() + lines = [ (indent + line) for line in self.lines] + newsource.lines = before.lines + lines + after.lines + return newsource + + def indent(self, indent=' ' * 4): + """ return a copy of the source object with + all lines indented by the given indent-string. + """ + newsource = Source() + newsource.lines = [(indent+line) for line in self.lines] + return newsource + + def getstatement(self, lineno, assertion=False): + """ return Source statement which contains the + given linenumber (counted from 0). + """ + start, end = self.getstatementrange(lineno, assertion) + return self[start:end] + + def getstatementrange(self, lineno, assertion=False): + """ return (start, end) tuple which spans the minimal + statement region which containing the given lineno. + """ + if not (0 <= lineno < len(self)): + raise IndexError("lineno out of range") + ast, start, end = getstatementrange_ast(lineno, self) + return start, end + + def deindent(self, offset=None): + """ return a new source object deindented by offset. + If offset is None then guess an indentation offset from + the first non-blank line. Subsequent lines which have a + lower indentation offset will be copied verbatim as + they are assumed to be part of multilines. + """ + # XXX maybe use the tokenizer to properly handle multiline + # strings etc.pp? + newsource = Source() + newsource.lines[:] = deindent(self.lines, offset) + return newsource + + def isparseable(self, deindent=True): + """ return True if source is parseable, heuristically + deindenting it by default. + """ + try: + import parser + except ImportError: + syntax_checker = lambda x: compile(x, 'asd', 'exec') + else: + syntax_checker = parser.suite + + if deindent: + source = str(self.deindent()) + else: + source = str(self) + try: + #compile(source+'\n', "x", "exec") + syntax_checker(source+'\n') + except KeyboardInterrupt: + raise + except Exception: + return False + else: + return True + + def __str__(self): + return "\n".join(self.lines) + + def compile(self, filename=None, mode='exec', + flag=generators.compiler_flag, + dont_inherit=0, _genframe=None): + """ return compiled code object. if filename is None + invent an artificial filename which displays + the source/line position of the caller frame. + """ + if not filename or py.path.local(filename).check(file=0): + if _genframe is None: + _genframe = sys._getframe(1) # the caller + fn,lineno = _genframe.f_code.co_filename, _genframe.f_lineno + base = "<%d-codegen " % self._compilecounter + self.__class__._compilecounter += 1 + if not filename: + filename = base + '%s:%d>' % (fn, lineno) + else: + filename = base + '%r %s:%d>' % (filename, fn, lineno) + source = "\n".join(self.lines) + '\n' + try: + co = cpy_compile(source, filename, mode, flag) + except SyntaxError: + ex = sys.exc_info()[1] + # re-represent syntax errors from parsing python strings + msglines = self.lines[:ex.lineno] + if ex.offset: + msglines.append(" "*ex.offset + '^') + msglines.append("(code was compiled probably from here: %s)" % filename) + newex = SyntaxError('\n'.join(msglines)) + newex.offset = ex.offset + newex.lineno = ex.lineno + newex.text = ex.text + raise newex + else: + if flag & _AST_FLAG: + return co + lines = [(x + "\n") for x in self.lines] + py.std.linecache.cache[filename] = (1, None, lines, filename) + return co + +# +# public API shortcut functions +# + +def compile_(source, filename=None, mode='exec', flags= + generators.compiler_flag, dont_inherit=0): + """ compile the given source to a raw code object, + and maintain an internal cache which allows later + retrieval of the source code for the code object + and any recursively created code objects. + """ + if _ast is not None and isinstance(source, _ast.AST): + # XXX should Source support having AST? + return cpy_compile(source, filename, mode, flags, dont_inherit) + _genframe = sys._getframe(1) # the caller + s = Source(source) + co = s.compile(filename, mode, flags, _genframe=_genframe) + return co + + +def getfslineno(obj): + """ Return source location (path, lineno) for the given object. + If the source cannot be determined return ("", -1) + """ + try: + code = py.code.Code(obj) + except TypeError: + try: + fn = (py.std.inspect.getsourcefile(obj) or + py.std.inspect.getfile(obj)) + except TypeError: + return "", -1 + + fspath = fn and py.path.local(fn) or None + lineno = -1 + if fspath: + try: + _, lineno = findsource(obj) + except IOError: + pass + else: + fspath = code.path + lineno = code.firstlineno + assert isinstance(lineno, int) + return fspath, lineno + +# +# helper functions +# + +def findsource(obj): + try: + sourcelines, lineno = py.std.inspect.findsource(obj) + except py.builtin._sysex: + raise + except: + return None, -1 + source = Source() + source.lines = [line.rstrip() for line in sourcelines] + return source, lineno + +def getsource(obj, **kwargs): + obj = py.code.getrawcode(obj) + try: + strsrc = inspect.getsource(obj) + except IndentationError: + strsrc = "\"Buggy python version consider upgrading, cannot get source\"" + assert isinstance(strsrc, str) + return Source(strsrc, **kwargs) + +def deindent(lines, offset=None): + if offset is None: + for line in lines: + line = line.expandtabs() + s = line.lstrip() + if s: + offset = len(line)-len(s) + break + else: + offset = 0 + if offset == 0: + return list(lines) + newlines = [] + def readline_generator(lines): + for line in lines: + yield line + '\n' + while True: + yield '' + + it = readline_generator(lines) + + try: + for _, _, (sline, _), (eline, _), _ in tokenize.generate_tokens(lambda: next(it)): + if sline > len(lines): + break # End of input reached + if sline > len(newlines): + line = lines[sline - 1].expandtabs() + if line.lstrip() and line[:offset].isspace(): + line = line[offset:] # Deindent + newlines.append(line) + + for i in range(sline, eline): + # Don't deindent continuing lines of + # multiline tokens (i.e. multiline strings) + newlines.append(lines[i]) + except (IndentationError, tokenize.TokenError): + pass + # Add any lines we didn't see. E.g. if an exception was raised. + newlines.extend(lines[len(newlines):]) + return newlines + + +def get_statement_startend2(lineno, node): + import ast + # flatten all statements and except handlers into one lineno-list + # AST's line numbers start indexing at 1 + l = [] + for x in ast.walk(node): + if isinstance(x, _ast.stmt) or isinstance(x, _ast.ExceptHandler): + l.append(x.lineno - 1) + for name in "finalbody", "orelse": + val = getattr(x, name, None) + if val: + # treat the finally/orelse part as its own statement + l.append(val[0].lineno - 1 - 1) + l.sort() + insert_index = bisect_right(l, lineno) + start = l[insert_index - 1] + if insert_index >= len(l): + end = None + else: + end = l[insert_index] + return start, end + + +def getstatementrange_ast(lineno, source, assertion=False, astnode=None): + if astnode is None: + content = str(source) + if sys.version_info < (2,7): + content += "\n" + try: + astnode = compile(content, "source", "exec", 1024) # 1024 for AST + except ValueError: + start, end = getstatementrange_old(lineno, source, assertion) + return None, start, end + start, end = get_statement_startend2(lineno, astnode) + # we need to correct the end: + # - ast-parsing strips comments + # - there might be empty lines + # - we might have lesser indented code blocks at the end + if end is None: + end = len(source.lines) + + if end > start + 1: + # make sure we don't span differently indented code blocks + # by using the BlockFinder helper used which inspect.getsource() uses itself + block_finder = inspect.BlockFinder() + # if we start with an indented line, put blockfinder to "started" mode + block_finder.started = source.lines[start][0].isspace() + it = ((x + "\n") for x in source.lines[start:end]) + try: + for tok in tokenize.generate_tokens(lambda: next(it)): + block_finder.tokeneater(*tok) + except (inspect.EndOfBlock, IndentationError): + end = block_finder.last + start + except Exception: + pass + + # the end might still point to a comment or empty line, correct it + while end: + line = source.lines[end - 1].lstrip() + if line.startswith("#") or not line: + end -= 1 + else: + break + return astnode, start, end + + +def getstatementrange_old(lineno, source, assertion=False): + """ return (start, end) tuple which spans the minimal + statement region which containing the given lineno. + raise an IndexError if no such statementrange can be found. + """ + # XXX this logic is only used on python2.4 and below + # 1. find the start of the statement + from codeop import compile_command + for start in range(lineno, -1, -1): + if assertion: + line = source.lines[start] + # the following lines are not fully tested, change with care + if 'super' in line and 'self' in line and '__init__' in line: + raise IndexError("likely a subclass") + if "assert" not in line and "raise" not in line: + continue + trylines = source.lines[start:lineno+1] + # quick hack to prepare parsing an indented line with + # compile_command() (which errors on "return" outside defs) + trylines.insert(0, 'def xxx():') + trysource = '\n '.join(trylines) + # ^ space here + try: + compile_command(trysource) + except (SyntaxError, OverflowError, ValueError): + continue + + # 2. find the end of the statement + for end in range(lineno+1, len(source)+1): + trysource = source[start:end] + if trysource.isparseable(): + return start, end + raise SyntaxError("no valid source range around line %d " % (lineno,)) + + diff --git a/venv/lib/python3.5/site-packages/py/_error.py b/venv/lib/python3.5/site-packages/py/_error.py new file mode 100644 index 0000000..228de58 --- /dev/null +++ b/venv/lib/python3.5/site-packages/py/_error.py @@ -0,0 +1,89 @@ +""" +create errno-specific classes for IO or os calls. + +""" +import sys, os, errno + +class Error(EnvironmentError): + def __repr__(self): + return "%s.%s %r: %s " %(self.__class__.__module__, + self.__class__.__name__, + self.__class__.__doc__, + " ".join(map(str, self.args)), + #repr(self.args) + ) + + def __str__(self): + s = "[%s]: %s" %(self.__class__.__doc__, + " ".join(map(str, self.args)), + ) + return s + +_winerrnomap = { + 2: errno.ENOENT, + 3: errno.ENOENT, + 17: errno.EEXIST, + 18: errno.EXDEV, + 13: errno.EBUSY, # empty cd drive, but ENOMEDIUM seems unavailiable + 22: errno.ENOTDIR, + 20: errno.ENOTDIR, + 267: errno.ENOTDIR, + 5: errno.EACCES, # anything better? +} + +class ErrorMaker(object): + """ lazily provides Exception classes for each possible POSIX errno + (as defined per the 'errno' module). All such instances + subclass EnvironmentError. + """ + Error = Error + _errno2class = {} + + def __getattr__(self, name): + if name[0] == "_": + raise AttributeError(name) + eno = getattr(errno, name) + cls = self._geterrnoclass(eno) + setattr(self, name, cls) + return cls + + def _geterrnoclass(self, eno): + try: + return self._errno2class[eno] + except KeyError: + clsname = errno.errorcode.get(eno, "UnknownErrno%d" %(eno,)) + errorcls = type(Error)(clsname, (Error,), + {'__module__':'py.error', + '__doc__': os.strerror(eno)}) + self._errno2class[eno] = errorcls + return errorcls + + def checked_call(self, func, *args, **kwargs): + """ call a function and raise an errno-exception if applicable. """ + __tracebackhide__ = True + try: + return func(*args, **kwargs) + except self.Error: + raise + except (OSError, EnvironmentError): + cls, value, tb = sys.exc_info() + if not hasattr(value, 'errno'): + raise + __tracebackhide__ = False + errno = value.errno + try: + if not isinstance(value, WindowsError): + raise NameError + except NameError: + # we are not on Windows, or we got a proper OSError + cls = self._geterrnoclass(errno) + else: + try: + cls = self._geterrnoclass(_winerrnomap[errno]) + except KeyError: + raise value + raise cls("%s%r" % (func.__name__, args)) + __tracebackhide__ = True + + +error = ErrorMaker() diff --git a/venv/lib/python3.5/site-packages/py/_iniconfig.py b/venv/lib/python3.5/site-packages/py/_iniconfig.py new file mode 100644 index 0000000..8e46404 --- /dev/null +++ b/venv/lib/python3.5/site-packages/py/_iniconfig.py @@ -0,0 +1,162 @@ +""" brain-dead simple parser for ini-style files. +(C) Ronny Pfannschmidt, Holger Krekel -- MIT licensed +""" +__version__ = "0.2.dev2" + +__all__ = ['IniConfig', 'ParseError'] + +COMMENTCHARS = "#;" + +class ParseError(Exception): + def __init__(self, path, lineno, msg): + Exception.__init__(self, path, lineno, msg) + self.path = path + self.lineno = lineno + self.msg = msg + + def __str__(self): + return "%s:%s: %s" %(self.path, self.lineno+1, self.msg) + +class SectionWrapper(object): + def __init__(self, config, name): + self.config = config + self.name = name + + def lineof(self, name): + return self.config.lineof(self.name, name) + + def get(self, key, default=None, convert=str): + return self.config.get(self.name, key, convert=convert, default=default) + + def __getitem__(self, key): + return self.config.sections[self.name][key] + + def __iter__(self): + section = self.config.sections.get(self.name, []) + def lineof(key): + return self.config.lineof(self.name, key) + for name in sorted(section, key=lineof): + yield name + + def items(self): + for name in self: + yield name, self[name] + + +class IniConfig(object): + def __init__(self, path, data=None): + self.path = str(path) # convenience + if data is None: + f = open(self.path) + try: + tokens = self._parse(iter(f)) + finally: + f.close() + else: + tokens = self._parse(data.splitlines(True)) + + self._sources = {} + self.sections = {} + + for lineno, section, name, value in tokens: + if section is None: + self._raise(lineno, 'no section header defined') + self._sources[section, name] = lineno + if name is None: + if section in self.sections: + self._raise(lineno, 'duplicate section %r'%(section, )) + self.sections[section] = {} + else: + if name in self.sections[section]: + self._raise(lineno, 'duplicate name %r'%(name, )) + self.sections[section][name] = value + + def _raise(self, lineno, msg): + raise ParseError(self.path, lineno, msg) + + def _parse(self, line_iter): + result = [] + section = None + for lineno, line in enumerate(line_iter): + name, data = self._parseline(line, lineno) + # new value + if name is not None and data is not None: + result.append((lineno, section, name, data)) + # new section + elif name is not None and data is None: + if not name: + self._raise(lineno, 'empty section name') + section = name + result.append((lineno, section, None, None)) + # continuation + elif name is None and data is not None: + if not result: + self._raise(lineno, 'unexpected value continuation') + last = result.pop() + last_name, last_data = last[-2:] + if last_name is None: + self._raise(lineno, 'unexpected value continuation') + + if last_data: + data = '%s\n%s' % (last_data, data) + result.append(last[:-1] + (data,)) + return result + + def _parseline(self, line, lineno): + # blank lines + if iscommentline(line): + line = "" + else: + line = line.rstrip() + if not line: + return None, None + # section + if line[0] == '[': + realline = line + for c in COMMENTCHARS: + line = line.split(c)[0].rstrip() + if line[-1] == "]": + return line[1:-1], None + return None, realline.strip() + # value + elif not line[0].isspace(): + try: + name, value = line.split('=', 1) + if ":" in name: + raise ValueError() + except ValueError: + try: + name, value = line.split(":", 1) + except ValueError: + self._raise(lineno, 'unexpected line: %r' % line) + return name.strip(), value.strip() + # continuation + else: + return None, line.strip() + + def lineof(self, section, name=None): + lineno = self._sources.get((section, name)) + if lineno is not None: + return lineno + 1 + + def get(self, section, name, default=None, convert=str): + try: + return convert(self.sections[section][name]) + except KeyError: + return default + + def __getitem__(self, name): + if name not in self.sections: + raise KeyError(name) + return SectionWrapper(self, name) + + def __iter__(self): + for name in sorted(self.sections, key=self.lineof): + yield SectionWrapper(self, name) + + def __contains__(self, arg): + return arg in self.sections + +def iscommentline(line): + c = line.lstrip()[:1] + return c in COMMENTCHARS diff --git a/venv/lib/python3.5/site-packages/py/_io/__init__.py b/venv/lib/python3.5/site-packages/py/_io/__init__.py new file mode 100644 index 0000000..f1a6d63 --- /dev/null +++ b/venv/lib/python3.5/site-packages/py/_io/__init__.py @@ -0,0 +1 @@ +""" input/output helping """ diff --git a/venv/lib/python3.5/site-packages/py/_io/capture.py b/venv/lib/python3.5/site-packages/py/_io/capture.py new file mode 100644 index 0000000..3ce2259 --- /dev/null +++ b/venv/lib/python3.5/site-packages/py/_io/capture.py @@ -0,0 +1,371 @@ +import os +import sys +import py +import tempfile + +try: + from io import StringIO +except ImportError: + from StringIO import StringIO + +if sys.version_info < (3,0): + class TextIO(StringIO): + def write(self, data): + if not isinstance(data, unicode): + data = unicode(data, getattr(self, '_encoding', 'UTF-8'), 'replace') + StringIO.write(self, data) +else: + TextIO = StringIO + +try: + from io import BytesIO +except ImportError: + class BytesIO(StringIO): + def write(self, data): + if isinstance(data, unicode): + raise TypeError("not a byte value: %r" %(data,)) + StringIO.write(self, data) + +patchsysdict = {0: 'stdin', 1: 'stdout', 2: 'stderr'} + +class FDCapture: + """ Capture IO to/from a given os-level filedescriptor. """ + + def __init__(self, targetfd, tmpfile=None, now=True, patchsys=False): + """ save targetfd descriptor, and open a new + temporary file there. If no tmpfile is + specified a tempfile.Tempfile() will be opened + in text mode. + """ + self.targetfd = targetfd + if tmpfile is None and targetfd != 0: + f = tempfile.TemporaryFile('wb+') + tmpfile = dupfile(f, encoding="UTF-8") + f.close() + self.tmpfile = tmpfile + self._savefd = os.dup(self.targetfd) + if patchsys: + self._oldsys = getattr(sys, patchsysdict[targetfd]) + if now: + self.start() + + def start(self): + try: + os.fstat(self._savefd) + except OSError: + raise ValueError("saved filedescriptor not valid, " + "did you call start() twice?") + if self.targetfd == 0 and not self.tmpfile: + fd = os.open(devnullpath, os.O_RDONLY) + os.dup2(fd, 0) + os.close(fd) + if hasattr(self, '_oldsys'): + setattr(sys, patchsysdict[self.targetfd], DontReadFromInput()) + else: + os.dup2(self.tmpfile.fileno(), self.targetfd) + if hasattr(self, '_oldsys'): + setattr(sys, patchsysdict[self.targetfd], self.tmpfile) + + def done(self): + """ unpatch and clean up, returns the self.tmpfile (file object) + """ + os.dup2(self._savefd, self.targetfd) + os.close(self._savefd) + if self.targetfd != 0: + self.tmpfile.seek(0) + if hasattr(self, '_oldsys'): + setattr(sys, patchsysdict[self.targetfd], self._oldsys) + return self.tmpfile + + def writeorg(self, data): + """ write a string to the original file descriptor + """ + tempfp = tempfile.TemporaryFile() + try: + os.dup2(self._savefd, tempfp.fileno()) + tempfp.write(data) + finally: + tempfp.close() + + +def dupfile(f, mode=None, buffering=0, raising=False, encoding=None): + """ return a new open file object that's a duplicate of f + + mode is duplicated if not given, 'buffering' controls + buffer size (defaulting to no buffering) and 'raising' + defines whether an exception is raised when an incompatible + file object is passed in (if raising is False, the file + object itself will be returned) + """ + try: + fd = f.fileno() + mode = mode or f.mode + except AttributeError: + if raising: + raise + return f + newfd = os.dup(fd) + if sys.version_info >= (3,0): + if encoding is not None: + mode = mode.replace("b", "") + buffering = True + return os.fdopen(newfd, mode, buffering, encoding, closefd=True) + else: + f = os.fdopen(newfd, mode, buffering) + if encoding is not None: + return EncodedFile(f, encoding) + return f + +class EncodedFile(object): + def __init__(self, _stream, encoding): + self._stream = _stream + self.encoding = encoding + + def write(self, obj): + if isinstance(obj, unicode): + obj = obj.encode(self.encoding) + elif isinstance(obj, str): + pass + else: + obj = str(obj) + self._stream.write(obj) + + def writelines(self, linelist): + data = ''.join(linelist) + self.write(data) + + def __getattr__(self, name): + return getattr(self._stream, name) + +class Capture(object): + def call(cls, func, *args, **kwargs): + """ return a (res, out, err) tuple where + out and err represent the output/error output + during function execution. + call the given function with args/kwargs + and capture output/error during its execution. + """ + so = cls() + try: + res = func(*args, **kwargs) + finally: + out, err = so.reset() + return res, out, err + call = classmethod(call) + + def reset(self): + """ reset sys.stdout/stderr and return captured output as strings. """ + if hasattr(self, '_reset'): + raise ValueError("was already reset") + self._reset = True + outfile, errfile = self.done(save=False) + out, err = "", "" + if outfile and not outfile.closed: + out = outfile.read() + outfile.close() + if errfile and errfile != outfile and not errfile.closed: + err = errfile.read() + errfile.close() + return out, err + + def suspend(self): + """ return current snapshot captures, memorize tempfiles. """ + outerr = self.readouterr() + outfile, errfile = self.done() + return outerr + + +class StdCaptureFD(Capture): + """ This class allows to capture writes to FD1 and FD2 + and may connect a NULL file to FD0 (and prevent + reads from sys.stdin). If any of the 0,1,2 file descriptors + is invalid it will not be captured. + """ + def __init__(self, out=True, err=True, mixed=False, + in_=True, patchsys=True, now=True): + self._options = { + "out": out, + "err": err, + "mixed": mixed, + "in_": in_, + "patchsys": patchsys, + "now": now, + } + self._save() + if now: + self.startall() + + def _save(self): + in_ = self._options['in_'] + out = self._options['out'] + err = self._options['err'] + mixed = self._options['mixed'] + patchsys = self._options['patchsys'] + if in_: + try: + self.in_ = FDCapture(0, tmpfile=None, now=False, + patchsys=patchsys) + except OSError: + pass + if out: + tmpfile = None + if hasattr(out, 'write'): + tmpfile = out + try: + self.out = FDCapture(1, tmpfile=tmpfile, + now=False, patchsys=patchsys) + self._options['out'] = self.out.tmpfile + except OSError: + pass + if err: + if out and mixed: + tmpfile = self.out.tmpfile + elif hasattr(err, 'write'): + tmpfile = err + else: + tmpfile = None + try: + self.err = FDCapture(2, tmpfile=tmpfile, + now=False, patchsys=patchsys) + self._options['err'] = self.err.tmpfile + except OSError: + pass + + def startall(self): + if hasattr(self, 'in_'): + self.in_.start() + if hasattr(self, 'out'): + self.out.start() + if hasattr(self, 'err'): + self.err.start() + + def resume(self): + """ resume capturing with original temp files. """ + self.startall() + + def done(self, save=True): + """ return (outfile, errfile) and stop capturing. """ + outfile = errfile = None + if hasattr(self, 'out') and not self.out.tmpfile.closed: + outfile = self.out.done() + if hasattr(self, 'err') and not self.err.tmpfile.closed: + errfile = self.err.done() + if hasattr(self, 'in_'): + tmpfile = self.in_.done() + if save: + self._save() + return outfile, errfile + + def readouterr(self): + """ return snapshot value of stdout/stderr capturings. """ + if hasattr(self, "out"): + out = self._readsnapshot(self.out.tmpfile) + else: + out = "" + if hasattr(self, "err"): + err = self._readsnapshot(self.err.tmpfile) + else: + err = "" + return [out, err] + + def _readsnapshot(self, f): + f.seek(0) + res = f.read() + enc = getattr(f, "encoding", None) + if enc: + res = py.builtin._totext(res, enc, "replace") + f.truncate(0) + f.seek(0) + return res + + +class StdCapture(Capture): + """ This class allows to capture writes to sys.stdout|stderr "in-memory" + and will raise errors on tries to read from sys.stdin. It only + modifies sys.stdout|stderr|stdin attributes and does not + touch underlying File Descriptors (use StdCaptureFD for that). + """ + def __init__(self, out=True, err=True, in_=True, mixed=False, now=True): + self._oldout = sys.stdout + self._olderr = sys.stderr + self._oldin = sys.stdin + if out and not hasattr(out, 'file'): + out = TextIO() + self.out = out + if err: + if mixed: + err = out + elif not hasattr(err, 'write'): + err = TextIO() + self.err = err + self.in_ = in_ + if now: + self.startall() + + def startall(self): + if self.out: + sys.stdout = self.out + if self.err: + sys.stderr = self.err + if self.in_: + sys.stdin = self.in_ = DontReadFromInput() + + def done(self, save=True): + """ return (outfile, errfile) and stop capturing. """ + outfile = errfile = None + if self.out and not self.out.closed: + sys.stdout = self._oldout + outfile = self.out + outfile.seek(0) + if self.err and not self.err.closed: + sys.stderr = self._olderr + errfile = self.err + errfile.seek(0) + if self.in_: + sys.stdin = self._oldin + return outfile, errfile + + def resume(self): + """ resume capturing with original temp files. """ + self.startall() + + def readouterr(self): + """ return snapshot value of stdout/stderr capturings. """ + out = err = "" + if self.out: + out = self.out.getvalue() + self.out.truncate(0) + self.out.seek(0) + if self.err: + err = self.err.getvalue() + self.err.truncate(0) + self.err.seek(0) + return out, err + +class DontReadFromInput: + """Temporary stub class. Ideally when stdin is accessed, the + capturing should be turned off, with possibly all data captured + so far sent to the screen. This should be configurable, though, + because in automated test runs it is better to crash than + hang indefinitely. + """ + def read(self, *args): + raise IOError("reading from stdin while output is captured") + readline = read + readlines = read + __iter__ = read + + def fileno(self): + raise ValueError("redirected Stdin is pseudofile, has no fileno()") + def isatty(self): + return False + def close(self): + pass + +try: + devnullpath = os.devnull +except AttributeError: + if os.name == 'nt': + devnullpath = 'NUL' + else: + devnullpath = '/dev/null' diff --git a/venv/lib/python3.5/site-packages/py/_io/saferepr.py b/venv/lib/python3.5/site-packages/py/_io/saferepr.py new file mode 100644 index 0000000..e86f18f --- /dev/null +++ b/venv/lib/python3.5/site-packages/py/_io/saferepr.py @@ -0,0 +1,71 @@ +import py +import sys + +builtin_repr = repr + +reprlib = py.builtin._tryimport('repr', 'reprlib') + +class SafeRepr(reprlib.Repr): + """ subclass of repr.Repr that limits the resulting size of repr() + and includes information on exceptions raised during the call. + """ + def repr(self, x): + return self._callhelper(reprlib.Repr.repr, self, x) + + def repr_unicode(self, x, level): + # Strictly speaking wrong on narrow builds + def repr(u): + if "'" not in u: + return py.builtin._totext("'%s'") % u + elif '"' not in u: + return py.builtin._totext('"%s"') % u + else: + return py.builtin._totext("'%s'") % u.replace("'", r"\'") + s = repr(x[:self.maxstring]) + if len(s) > self.maxstring: + i = max(0, (self.maxstring-3)//2) + j = max(0, self.maxstring-3-i) + s = repr(x[:i] + x[len(x)-j:]) + s = s[:i] + '...' + s[len(s)-j:] + return s + + def repr_instance(self, x, level): + return self._callhelper(builtin_repr, x) + + def _callhelper(self, call, x, *args): + try: + # Try the vanilla repr and make sure that the result is a string + s = call(x, *args) + except py.builtin._sysex: + raise + except: + cls, e, tb = sys.exc_info() + exc_name = getattr(cls, '__name__', 'unknown') + try: + exc_info = str(e) + except py.builtin._sysex: + raise + except: + exc_info = 'unknown' + return '<[%s("%s") raised in repr()] %s object at 0x%x>' % ( + exc_name, exc_info, x.__class__.__name__, id(x)) + else: + if len(s) > self.maxsize: + i = max(0, (self.maxsize-3)//2) + j = max(0, self.maxsize-3-i) + s = s[:i] + '...' + s[len(s)-j:] + return s + +def saferepr(obj, maxsize=240): + """ return a size-limited safe repr-string for the given object. + Failing __repr__ functions of user instances will be represented + with a short exception info and 'saferepr' generally takes + care to never raise exceptions itself. This function is a wrapper + around the Repr/reprlib functionality of the standard 2.6 lib. + """ + # review exception handling + srepr = SafeRepr() + srepr.maxstring = maxsize + srepr.maxsize = maxsize + srepr.maxother = 160 + return srepr.repr(obj) diff --git a/venv/lib/python3.5/site-packages/py/_io/terminalwriter.py b/venv/lib/python3.5/site-packages/py/_io/terminalwriter.py new file mode 100644 index 0000000..715547d --- /dev/null +++ b/venv/lib/python3.5/site-packages/py/_io/terminalwriter.py @@ -0,0 +1,357 @@ +""" + +Helper functions for writing to terminals and files. + +""" + + +import sys, os +import py +py3k = sys.version_info[0] >= 3 +from py.builtin import text, bytes + +win32_and_ctypes = False +colorama = None +if sys.platform == "win32": + try: + import colorama + except ImportError: + try: + import ctypes + win32_and_ctypes = True + except ImportError: + pass + + +def _getdimensions(): + import termios,fcntl,struct + call = fcntl.ioctl(1,termios.TIOCGWINSZ,"\000"*8) + height,width = struct.unpack( "hhhh", call ) [:2] + return height, width + + +def get_terminal_width(): + height = width = 0 + try: + height, width = _getdimensions() + except py.builtin._sysex: + raise + except: + # pass to fallback below + pass + + if width == 0: + # FALLBACK: + # * some exception happened + # * or this is emacs terminal which reports (0,0) + width = int(os.environ.get('COLUMNS', 80)) + + # XXX the windows getdimensions may be bogus, let's sanify a bit + if width < 40: + width = 80 + return width + +terminal_width = get_terminal_width() + +# XXX unify with _escaped func below +def ansi_print(text, esc, file=None, newline=True, flush=False): + if file is None: + file = sys.stderr + text = text.rstrip() + if esc and not isinstance(esc, tuple): + esc = (esc,) + if esc and sys.platform != "win32" and file.isatty(): + text = (''.join(['\x1b[%sm' % cod for cod in esc]) + + text + + '\x1b[0m') # ANSI color code "reset" + if newline: + text += '\n' + + if esc and win32_and_ctypes and file.isatty(): + if 1 in esc: + bold = True + esc = tuple([x for x in esc if x != 1]) + else: + bold = False + esctable = {() : FOREGROUND_WHITE, # normal + (31,): FOREGROUND_RED, # red + (32,): FOREGROUND_GREEN, # green + (33,): FOREGROUND_GREEN|FOREGROUND_RED, # yellow + (34,): FOREGROUND_BLUE, # blue + (35,): FOREGROUND_BLUE|FOREGROUND_RED, # purple + (36,): FOREGROUND_BLUE|FOREGROUND_GREEN, # cyan + (37,): FOREGROUND_WHITE, # white + (39,): FOREGROUND_WHITE, # reset + } + attr = esctable.get(esc, FOREGROUND_WHITE) + if bold: + attr |= FOREGROUND_INTENSITY + STD_OUTPUT_HANDLE = -11 + STD_ERROR_HANDLE = -12 + if file is sys.stderr: + handle = GetStdHandle(STD_ERROR_HANDLE) + else: + handle = GetStdHandle(STD_OUTPUT_HANDLE) + oldcolors = GetConsoleInfo(handle).wAttributes + attr |= (oldcolors & 0x0f0) + SetConsoleTextAttribute(handle, attr) + while len(text) > 32768: + file.write(text[:32768]) + text = text[32768:] + if text: + file.write(text) + SetConsoleTextAttribute(handle, oldcolors) + else: + file.write(text) + + if flush: + file.flush() + +def should_do_markup(file): + if os.environ.get('PY_COLORS') == '1': + return True + if os.environ.get('PY_COLORS') == '0': + return False + return hasattr(file, 'isatty') and file.isatty() \ + and os.environ.get('TERM') != 'dumb' \ + and not (sys.platform.startswith('java') and os._name == 'nt') + +class TerminalWriter(object): + _esctable = dict(black=30, red=31, green=32, yellow=33, + blue=34, purple=35, cyan=36, white=37, + Black=40, Red=41, Green=42, Yellow=43, + Blue=44, Purple=45, Cyan=46, White=47, + bold=1, light=2, blink=5, invert=7) + + # XXX deprecate stringio argument + def __init__(self, file=None, stringio=False, encoding=None): + if file is None: + if stringio: + self.stringio = file = py.io.TextIO() + else: + file = py.std.sys.stdout + elif py.builtin.callable(file) and not ( + hasattr(file, "write") and hasattr(file, "flush")): + file = WriteFile(file, encoding=encoding) + if hasattr(file, "isatty") and file.isatty() and colorama: + file = colorama.AnsiToWin32(file).stream + self.encoding = encoding or getattr(file, 'encoding', "utf-8") + self._file = file + self.hasmarkup = should_do_markup(file) + self._lastlen = 0 + + @property + def fullwidth(self): + if hasattr(self, '_terminal_width'): + return self._terminal_width + return get_terminal_width() + + @fullwidth.setter + def fullwidth(self, value): + self._terminal_width = value + + def _escaped(self, text, esc): + if esc and self.hasmarkup: + text = (''.join(['\x1b[%sm' % cod for cod in esc]) + + text +'\x1b[0m') + return text + + def markup(self, text, **kw): + esc = [] + for name in kw: + if name not in self._esctable: + raise ValueError("unknown markup: %r" %(name,)) + if kw[name]: + esc.append(self._esctable[name]) + return self._escaped(text, tuple(esc)) + + def sep(self, sepchar, title=None, fullwidth=None, **kw): + if fullwidth is None: + fullwidth = self.fullwidth + # the goal is to have the line be as long as possible + # under the condition that len(line) <= fullwidth + if sys.platform == "win32": + # if we print in the last column on windows we are on a + # new line but there is no way to verify/neutralize this + # (we may not know the exact line width) + # so let's be defensive to avoid empty lines in the output + fullwidth -= 1 + if title is not None: + # we want 2 + 2*len(fill) + len(title) <= fullwidth + # i.e. 2 + 2*len(sepchar)*N + len(title) <= fullwidth + # 2*len(sepchar)*N <= fullwidth - len(title) - 2 + # N <= (fullwidth - len(title) - 2) // (2*len(sepchar)) + N = (fullwidth - len(title) - 2) // (2*len(sepchar)) + fill = sepchar * N + line = "%s %s %s" % (fill, title, fill) + else: + # we want len(sepchar)*N <= fullwidth + # i.e. N <= fullwidth // len(sepchar) + line = sepchar * (fullwidth // len(sepchar)) + # in some situations there is room for an extra sepchar at the right, + # in particular if we consider that with a sepchar like "_ " the + # trailing space is not important at the end of the line + if len(line) + len(sepchar.rstrip()) <= fullwidth: + line += sepchar.rstrip() + + self.line(line, **kw) + + def write(self, msg, **kw): + if msg: + if not isinstance(msg, (bytes, text)): + msg = text(msg) + if self.hasmarkup and kw: + markupmsg = self.markup(msg, **kw) + else: + markupmsg = msg + write_out(self._file, markupmsg) + + def line(self, s='', **kw): + self.write(s, **kw) + self._checkfill(s) + self.write('\n') + + def reline(self, line, **kw): + if not self.hasmarkup: + raise ValueError("cannot use rewrite-line without terminal") + self.write(line, **kw) + self._checkfill(line) + self.write('\r') + self._lastlen = len(line) + + def _checkfill(self, line): + diff2last = self._lastlen - len(line) + if diff2last > 0: + self.write(" " * diff2last) + +class Win32ConsoleWriter(TerminalWriter): + def write(self, msg, **kw): + if msg: + if not isinstance(msg, (bytes, text)): + msg = text(msg) + oldcolors = None + if self.hasmarkup and kw: + handle = GetStdHandle(STD_OUTPUT_HANDLE) + oldcolors = GetConsoleInfo(handle).wAttributes + default_bg = oldcolors & 0x00F0 + attr = default_bg + if kw.pop('bold', False): + attr |= FOREGROUND_INTENSITY + + if kw.pop('red', False): + attr |= FOREGROUND_RED + elif kw.pop('blue', False): + attr |= FOREGROUND_BLUE + elif kw.pop('green', False): + attr |= FOREGROUND_GREEN + elif kw.pop('yellow', False): + attr |= FOREGROUND_GREEN|FOREGROUND_RED + else: + attr |= oldcolors & 0x0007 + + SetConsoleTextAttribute(handle, attr) + write_out(self._file, msg) + if oldcolors: + SetConsoleTextAttribute(handle, oldcolors) + +class WriteFile(object): + def __init__(self, writemethod, encoding=None): + self.encoding = encoding + self._writemethod = writemethod + + def write(self, data): + if self.encoding: + data = data.encode(self.encoding, "replace") + self._writemethod(data) + + def flush(self): + return + + +if win32_and_ctypes: + TerminalWriter = Win32ConsoleWriter + import ctypes + from ctypes import wintypes + + # ctypes access to the Windows console + STD_OUTPUT_HANDLE = -11 + STD_ERROR_HANDLE = -12 + FOREGROUND_BLACK = 0x0000 # black text + FOREGROUND_BLUE = 0x0001 # text color contains blue. + FOREGROUND_GREEN = 0x0002 # text color contains green. + FOREGROUND_RED = 0x0004 # text color contains red. + FOREGROUND_WHITE = 0x0007 + FOREGROUND_INTENSITY = 0x0008 # text color is intensified. + BACKGROUND_BLACK = 0x0000 # background color black + BACKGROUND_BLUE = 0x0010 # background color contains blue. + BACKGROUND_GREEN = 0x0020 # background color contains green. + BACKGROUND_RED = 0x0040 # background color contains red. + BACKGROUND_WHITE = 0x0070 + BACKGROUND_INTENSITY = 0x0080 # background color is intensified. + + SHORT = ctypes.c_short + class COORD(ctypes.Structure): + _fields_ = [('X', SHORT), + ('Y', SHORT)] + class SMALL_RECT(ctypes.Structure): + _fields_ = [('Left', SHORT), + ('Top', SHORT), + ('Right', SHORT), + ('Bottom', SHORT)] + class CONSOLE_SCREEN_BUFFER_INFO(ctypes.Structure): + _fields_ = [('dwSize', COORD), + ('dwCursorPosition', COORD), + ('wAttributes', wintypes.WORD), + ('srWindow', SMALL_RECT), + ('dwMaximumWindowSize', COORD)] + + _GetStdHandle = ctypes.windll.kernel32.GetStdHandle + _GetStdHandle.argtypes = [wintypes.DWORD] + _GetStdHandle.restype = wintypes.HANDLE + def GetStdHandle(kind): + return _GetStdHandle(kind) + + SetConsoleTextAttribute = ctypes.windll.kernel32.SetConsoleTextAttribute + SetConsoleTextAttribute.argtypes = [wintypes.HANDLE, wintypes.WORD] + SetConsoleTextAttribute.restype = wintypes.BOOL + + _GetConsoleScreenBufferInfo = \ + ctypes.windll.kernel32.GetConsoleScreenBufferInfo + _GetConsoleScreenBufferInfo.argtypes = [wintypes.HANDLE, + ctypes.POINTER(CONSOLE_SCREEN_BUFFER_INFO)] + _GetConsoleScreenBufferInfo.restype = wintypes.BOOL + def GetConsoleInfo(handle): + info = CONSOLE_SCREEN_BUFFER_INFO() + _GetConsoleScreenBufferInfo(handle, ctypes.byref(info)) + return info + + def _getdimensions(): + handle = GetStdHandle(STD_OUTPUT_HANDLE) + info = GetConsoleInfo(handle) + # Substract one from the width, otherwise the cursor wraps + # and the ending \n causes an empty line to display. + return info.dwSize.Y, info.dwSize.X - 1 + +def write_out(fil, msg): + # XXX sometimes "msg" is of type bytes, sometimes text which + # complicates the situation. Should we try to enforce unicode? + try: + # on py27 and above writing out to sys.stdout with an encoding + # should usually work for unicode messages (if the encoding is + # capable of it) + fil.write(msg) + except UnicodeEncodeError: + # on py26 it might not work because stdout expects bytes + if fil.encoding: + try: + fil.write(msg.encode(fil.encoding)) + except UnicodeEncodeError: + # it might still fail if the encoding is not capable + pass + else: + fil.flush() + return + # fallback: escape all unicode characters + msg = msg.encode("unicode-escape").decode("ascii") + fil.write(msg) + fil.flush() diff --git a/venv/lib/python3.5/site-packages/py/_log/__init__.py b/venv/lib/python3.5/site-packages/py/_log/__init__.py new file mode 100644 index 0000000..b211e2d --- /dev/null +++ b/venv/lib/python3.5/site-packages/py/_log/__init__.py @@ -0,0 +1,2 @@ +""" logging API ('producers' and 'consumers' connected via keywords) """ + diff --git a/venv/lib/python3.5/site-packages/py/_log/log.py b/venv/lib/python3.5/site-packages/py/_log/log.py new file mode 100644 index 0000000..86205fe --- /dev/null +++ b/venv/lib/python3.5/site-packages/py/_log/log.py @@ -0,0 +1,186 @@ +""" +basic logging functionality based on a producer/consumer scheme. + +XXX implement this API: (maybe put it into slogger.py?) + + log = Logger( + info=py.log.STDOUT, + debug=py.log.STDOUT, + command=None) + log.info("hello", "world") + log.command("hello", "world") + + log = Logger(info=Logger(something=...), + debug=py.log.STDOUT, + command=None) +""" +import py, sys + +class Message(object): + def __init__(self, keywords, args): + self.keywords = keywords + self.args = args + + def content(self): + return " ".join(map(str, self.args)) + + def prefix(self): + return "[%s] " % (":".join(self.keywords)) + + def __str__(self): + return self.prefix() + self.content() + + +class Producer(object): + """ (deprecated) Log producer API which sends messages to be logged + to a 'consumer' object, which then prints them to stdout, + stderr, files, etc. Used extensively by PyPy-1.1. + """ + + Message = Message # to allow later customization + keywords2consumer = {} + + def __init__(self, keywords, keywordmapper=None, **kw): + if hasattr(keywords, 'split'): + keywords = tuple(keywords.split()) + self._keywords = keywords + if keywordmapper is None: + keywordmapper = default_keywordmapper + self._keywordmapper = keywordmapper + + def __repr__(self): + return "<py.log.Producer %s>" % ":".join(self._keywords) + + def __getattr__(self, name): + if '_' in name: + raise AttributeError(name) + producer = self.__class__(self._keywords + (name,)) + setattr(self, name, producer) + return producer + + def __call__(self, *args): + """ write a message to the appropriate consumer(s) """ + func = self._keywordmapper.getconsumer(self._keywords) + if func is not None: + func(self.Message(self._keywords, args)) + +class KeywordMapper: + def __init__(self): + self.keywords2consumer = {} + + def getstate(self): + return self.keywords2consumer.copy() + def setstate(self, state): + self.keywords2consumer.clear() + self.keywords2consumer.update(state) + + def getconsumer(self, keywords): + """ return a consumer matching the given keywords. + + tries to find the most suitable consumer by walking, starting from + the back, the list of keywords, the first consumer matching a + keyword is returned (falling back to py.log.default) + """ + for i in range(len(keywords), 0, -1): + try: + return self.keywords2consumer[keywords[:i]] + except KeyError: + continue + return self.keywords2consumer.get('default', default_consumer) + + def setconsumer(self, keywords, consumer): + """ set a consumer for a set of keywords. """ + # normalize to tuples + if isinstance(keywords, str): + keywords = tuple(filter(None, keywords.split())) + elif hasattr(keywords, '_keywords'): + keywords = keywords._keywords + elif not isinstance(keywords, tuple): + raise TypeError("key %r is not a string or tuple" % (keywords,)) + if consumer is not None and not py.builtin.callable(consumer): + if not hasattr(consumer, 'write'): + raise TypeError( + "%r should be None, callable or file-like" % (consumer,)) + consumer = File(consumer) + self.keywords2consumer[keywords] = consumer + +def default_consumer(msg): + """ the default consumer, prints the message to stdout (using 'print') """ + sys.stderr.write(str(msg)+"\n") + +default_keywordmapper = KeywordMapper() + +def setconsumer(keywords, consumer): + default_keywordmapper.setconsumer(keywords, consumer) + +def setstate(state): + default_keywordmapper.setstate(state) +def getstate(): + return default_keywordmapper.getstate() + +# +# Consumers +# + +class File(object): + """ log consumer wrapping a file(-like) object """ + def __init__(self, f): + assert hasattr(f, 'write') + #assert isinstance(f, file) or not hasattr(f, 'open') + self._file = f + + def __call__(self, msg): + """ write a message to the log """ + self._file.write(str(msg) + "\n") + if hasattr(self._file, 'flush'): + self._file.flush() + +class Path(object): + """ log consumer that opens and writes to a Path """ + def __init__(self, filename, append=False, + delayed_create=False, buffering=False): + self._append = append + self._filename = str(filename) + self._buffering = buffering + if not delayed_create: + self._openfile() + + def _openfile(self): + mode = self._append and 'a' or 'w' + f = open(self._filename, mode) + self._file = f + + def __call__(self, msg): + """ write a message to the log """ + if not hasattr(self, "_file"): + self._openfile() + self._file.write(str(msg) + "\n") + if not self._buffering: + self._file.flush() + +def STDOUT(msg): + """ consumer that writes to sys.stdout """ + sys.stdout.write(str(msg)+"\n") + +def STDERR(msg): + """ consumer that writes to sys.stderr """ + sys.stderr.write(str(msg)+"\n") + +class Syslog: + """ consumer that writes to the syslog daemon """ + + def __init__(self, priority = None): + if priority is None: + priority = self.LOG_INFO + self.priority = priority + + def __call__(self, msg): + """ write a message to the log """ + py.std.syslog.syslog(self.priority, str(msg)) + +for _prio in "EMERG ALERT CRIT ERR WARNING NOTICE INFO DEBUG".split(): + _prio = "LOG_" + _prio + try: + setattr(Syslog, _prio, getattr(py.std.syslog, _prio)) + except AttributeError: + pass diff --git a/venv/lib/python3.5/site-packages/py/_log/warning.py b/venv/lib/python3.5/site-packages/py/_log/warning.py new file mode 100644 index 0000000..a137fe8 --- /dev/null +++ b/venv/lib/python3.5/site-packages/py/_log/warning.py @@ -0,0 +1,76 @@ +import py, sys + +class DeprecationWarning(DeprecationWarning): + def __init__(self, msg, path, lineno): + self.msg = msg + self.path = path + self.lineno = lineno + def __repr__(self): + return "%s:%d: %s" %(self.path, self.lineno+1, self.msg) + def __str__(self): + return self.msg + +def _apiwarn(startversion, msg, stacklevel=2, function=None): + # below is mostly COPIED from python2.4/warnings.py's def warn() + # Get context information + if isinstance(stacklevel, str): + frame = sys._getframe(1) + level = 1 + found = frame.f_code.co_filename.find(stacklevel) != -1 + while frame: + co = frame.f_code + if co.co_filename.find(stacklevel) == -1: + if found: + stacklevel = level + break + else: + found = True + level += 1 + frame = frame.f_back + else: + stacklevel = 1 + msg = "%s (since version %s)" %(msg, startversion) + warn(msg, stacklevel=stacklevel+1, function=function) + +def warn(msg, stacklevel=1, function=None): + if function is not None: + filename = py.std.inspect.getfile(function) + lineno = py.code.getrawcode(function).co_firstlineno + else: + try: + caller = sys._getframe(stacklevel) + except ValueError: + globals = sys.__dict__ + lineno = 1 + else: + globals = caller.f_globals + lineno = caller.f_lineno + if '__name__' in globals: + module = globals['__name__'] + else: + module = "<string>" + filename = globals.get('__file__') + if filename: + fnl = filename.lower() + if fnl.endswith(".pyc") or fnl.endswith(".pyo"): + filename = filename[:-1] + elif fnl.endswith("$py.class"): + filename = filename.replace('$py.class', '.py') + else: + if module == "__main__": + try: + filename = sys.argv[0] + except AttributeError: + # embedded interpreters don't have sys.argv, see bug #839151 + filename = '__main__' + if not filename: + filename = module + path = py.path.local(filename) + warning = DeprecationWarning(msg, path, lineno) + py.std.warnings.warn_explicit(warning, category=Warning, + filename=str(warning.path), + lineno=warning.lineno, + registry=py.std.warnings.__dict__.setdefault( + "__warningsregistry__", {}) + ) + diff --git a/venv/lib/python3.5/site-packages/py/_path/__init__.py b/venv/lib/python3.5/site-packages/py/_path/__init__.py new file mode 100644 index 0000000..7b1ea8a --- /dev/null +++ b/venv/lib/python3.5/site-packages/py/_path/__init__.py @@ -0,0 +1 @@ +""" unified file system api """ diff --git a/venv/lib/python3.5/site-packages/py/_path/cacheutil.py b/venv/lib/python3.5/site-packages/py/_path/cacheutil.py new file mode 100644 index 0000000..89ea90b --- /dev/null +++ b/venv/lib/python3.5/site-packages/py/_path/cacheutil.py @@ -0,0 +1,114 @@ +""" +This module contains multithread-safe cache implementations. + +All Caches have + + getorbuild(key, builder) + delentry(key) + +methods and allow configuration when instantiating the cache class. +""" +from time import time as gettime + +class BasicCache(object): + def __init__(self, maxentries=128): + self.maxentries = maxentries + self.prunenum = int(maxentries - maxentries/8) + self._dict = {} + + def clear(self): + self._dict.clear() + + def _getentry(self, key): + return self._dict[key] + + def _putentry(self, key, entry): + self._prunelowestweight() + self._dict[key] = entry + + def delentry(self, key, raising=False): + try: + del self._dict[key] + except KeyError: + if raising: + raise + + def getorbuild(self, key, builder): + try: + entry = self._getentry(key) + except KeyError: + entry = self._build(key, builder) + self._putentry(key, entry) + return entry.value + + def _prunelowestweight(self): + """ prune out entries with lowest weight. """ + numentries = len(self._dict) + if numentries >= self.maxentries: + # evict according to entry's weight + items = [(entry.weight, key) + for key, entry in self._dict.items()] + items.sort() + index = numentries - self.prunenum + if index > 0: + for weight, key in items[:index]: + # in MT situations the element might be gone + self.delentry(key, raising=False) + +class BuildcostAccessCache(BasicCache): + """ A BuildTime/Access-counting cache implementation. + the weight of a value is computed as the product of + + num-accesses-of-a-value * time-to-build-the-value + + The values with the least such weights are evicted + if the cache maxentries threshold is superceded. + For implementation flexibility more than one object + might be evicted at a time. + """ + # time function to use for measuring build-times + + def _build(self, key, builder): + start = gettime() + val = builder() + end = gettime() + return WeightedCountingEntry(val, end-start) + + +class WeightedCountingEntry(object): + def __init__(self, value, oneweight): + self._value = value + self.weight = self._oneweight = oneweight + + def value(self): + self.weight += self._oneweight + return self._value + value = property(value) + +class AgingCache(BasicCache): + """ This cache prunes out cache entries that are too old. + """ + def __init__(self, maxentries=128, maxseconds=10.0): + super(AgingCache, self).__init__(maxentries) + self.maxseconds = maxseconds + + def _getentry(self, key): + entry = self._dict[key] + if entry.isexpired(): + self.delentry(key) + raise KeyError(key) + return entry + + def _build(self, key, builder): + val = builder() + entry = AgingEntry(val, gettime() + self.maxseconds) + return entry + +class AgingEntry(object): + def __init__(self, value, expirationtime): + self.value = value + self.weight = expirationtime + + def isexpired(self): + t = gettime() + return t >= self.weight diff --git a/venv/lib/python3.5/site-packages/py/_path/common.py b/venv/lib/python3.5/site-packages/py/_path/common.py new file mode 100644 index 0000000..4bc292c --- /dev/null +++ b/venv/lib/python3.5/site-packages/py/_path/common.py @@ -0,0 +1,445 @@ +""" +""" +import os, sys, posixpath +import fnmatch +import py + +# Moved from local.py. +iswin32 = sys.platform == "win32" or (getattr(os, '_name', False) == 'nt') + +try: + from os import fspath +except ImportError: + def fspath(path): + """ + Return the string representation of the path. + If str or bytes is passed in, it is returned unchanged. + This code comes from PEP 519, modified to support earlier versions of + python. + + This is required for python < 3.6. + """ + if isinstance(path, (py.builtin.text, py.builtin.bytes)): + return path + + # Work from the object's type to match method resolution of other magic + # methods. + path_type = type(path) + try: + return path_type.__fspath__(path) + except AttributeError: + if hasattr(path_type, '__fspath__'): + raise + try: + import pathlib + except ImportError: + pass + else: + if isinstance(path, pathlib.PurePath): + return py.builtin.text(path) + + raise TypeError("expected str, bytes or os.PathLike object, not " + + path_type.__name__) + +class Checkers: + _depend_on_existence = 'exists', 'link', 'dir', 'file' + + def __init__(self, path): + self.path = path + + def dir(self): + raise NotImplementedError + + def file(self): + raise NotImplementedError + + def dotfile(self): + return self.path.basename.startswith('.') + + def ext(self, arg): + if not arg.startswith('.'): + arg = '.' + arg + return self.path.ext == arg + + def exists(self): + raise NotImplementedError + + def basename(self, arg): + return self.path.basename == arg + + def basestarts(self, arg): + return self.path.basename.startswith(arg) + + def relto(self, arg): + return self.path.relto(arg) + + def fnmatch(self, arg): + return self.path.fnmatch(arg) + + def endswith(self, arg): + return str(self.path).endswith(arg) + + def _evaluate(self, kw): + for name, value in kw.items(): + invert = False + meth = None + try: + meth = getattr(self, name) + except AttributeError: + if name[:3] == 'not': + invert = True + try: + meth = getattr(self, name[3:]) + except AttributeError: + pass + if meth is None: + raise TypeError( + "no %r checker available for %r" % (name, self.path)) + try: + if py.code.getrawcode(meth).co_argcount > 1: + if (not meth(value)) ^ invert: + return False + else: + if bool(value) ^ bool(meth()) ^ invert: + return False + except (py.error.ENOENT, py.error.ENOTDIR, py.error.EBUSY): + # EBUSY feels not entirely correct, + # but its kind of necessary since ENOMEDIUM + # is not accessible in python + for name in self._depend_on_existence: + if name in kw: + if kw.get(name): + return False + name = 'not' + name + if name in kw: + if not kw.get(name): + return False + return True + +class NeverRaised(Exception): + pass + +class PathBase(object): + """ shared implementation for filesystem path objects.""" + Checkers = Checkers + + def __div__(self, other): + return self.join(fspath(other)) + __truediv__ = __div__ # py3k + + def basename(self): + """ basename part of path. """ + return self._getbyspec('basename')[0] + basename = property(basename, None, None, basename.__doc__) + + def dirname(self): + """ dirname part of path. """ + return self._getbyspec('dirname')[0] + dirname = property(dirname, None, None, dirname.__doc__) + + def purebasename(self): + """ pure base name of the path.""" + return self._getbyspec('purebasename')[0] + purebasename = property(purebasename, None, None, purebasename.__doc__) + + def ext(self): + """ extension of the path (including the '.').""" + return self._getbyspec('ext')[0] + ext = property(ext, None, None, ext.__doc__) + + def dirpath(self, *args, **kwargs): + """ return the directory path joined with any given path arguments. """ + return self.new(basename='').join(*args, **kwargs) + + def read_binary(self): + """ read and return a bytestring from reading the path. """ + with self.open('rb') as f: + return f.read() + + def read_text(self, encoding): + """ read and return a Unicode string from reading the path. """ + with self.open("r", encoding=encoding) as f: + return f.read() + + + def read(self, mode='r'): + """ read and return a bytestring from reading the path. """ + with self.open(mode) as f: + return f.read() + + def readlines(self, cr=1): + """ read and return a list of lines from the path. if cr is False, the +newline will be removed from the end of each line. """ + if sys.version_info < (3, ): + mode = 'rU' + else: # python 3 deprecates mode "U" in favor of "newline" option + mode = 'r' + + if not cr: + content = self.read(mode) + return content.split('\n') + else: + f = self.open(mode) + try: + return f.readlines() + finally: + f.close() + + def load(self): + """ (deprecated) return object unpickled from self.read() """ + f = self.open('rb') + try: + return py.error.checked_call(py.std.pickle.load, f) + finally: + f.close() + + def move(self, target): + """ move this path to target. """ + if target.relto(self): + raise py.error.EINVAL(target, + "cannot move path into a subdirectory of itself") + try: + self.rename(target) + except py.error.EXDEV: # invalid cross-device link + self.copy(target) + self.remove() + + def __repr__(self): + """ return a string representation of this path. """ + return repr(str(self)) + + def check(self, **kw): + """ check a path for existence and properties. + + Without arguments, return True if the path exists, otherwise False. + + valid checkers:: + + file=1 # is a file + file=0 # is not a file (may not even exist) + dir=1 # is a dir + link=1 # is a link + exists=1 # exists + + You can specify multiple checker definitions, for example:: + + path.check(file=1, link=1) # a link pointing to a file + """ + if not kw: + kw = {'exists' : 1} + return self.Checkers(self)._evaluate(kw) + + def fnmatch(self, pattern): + """return true if the basename/fullname matches the glob-'pattern'. + + valid pattern characters:: + + * matches everything + ? matches any single character + [seq] matches any character in seq + [!seq] matches any char not in seq + + If the pattern contains a path-separator then the full path + is used for pattern matching and a '*' is prepended to the + pattern. + + if the pattern doesn't contain a path-separator the pattern + is only matched against the basename. + """ + return FNMatcher(pattern)(self) + + def relto(self, relpath): + """ return a string which is the relative part of the path + to the given 'relpath'. + """ + if not isinstance(relpath, (str, PathBase)): + raise TypeError("%r: not a string or path object" %(relpath,)) + strrelpath = str(relpath) + if strrelpath and strrelpath[-1] != self.sep: + strrelpath += self.sep + #assert strrelpath[-1] == self.sep + #assert strrelpath[-2] != self.sep + strself = self.strpath + if sys.platform == "win32" or getattr(os, '_name', None) == 'nt': + if os.path.normcase(strself).startswith( + os.path.normcase(strrelpath)): + return strself[len(strrelpath):] + elif strself.startswith(strrelpath): + return strself[len(strrelpath):] + return "" + + def ensure_dir(self, *args): + """ ensure the path joined with args is a directory. """ + return self.ensure(*args, **{"dir": True}) + + def bestrelpath(self, dest): + """ return a string which is a relative path from self + (assumed to be a directory) to dest such that + self.join(bestrelpath) == dest and if not such + path can be determined return dest. + """ + try: + if self == dest: + return os.curdir + base = self.common(dest) + if not base: # can be the case on windows + return str(dest) + self2base = self.relto(base) + reldest = dest.relto(base) + if self2base: + n = self2base.count(self.sep) + 1 + else: + n = 0 + l = [os.pardir] * n + if reldest: + l.append(reldest) + target = dest.sep.join(l) + return target + except AttributeError: + return str(dest) + + def exists(self): + return self.check() + + def isdir(self): + return self.check(dir=1) + + def isfile(self): + return self.check(file=1) + + def parts(self, reverse=False): + """ return a root-first list of all ancestor directories + plus the path itself. + """ + current = self + l = [self] + while 1: + last = current + current = current.dirpath() + if last == current: + break + l.append(current) + if not reverse: + l.reverse() + return l + + def common(self, other): + """ return the common part shared with the other path + or None if there is no common part. + """ + last = None + for x, y in zip(self.parts(), other.parts()): + if x != y: + return last + last = x + return last + + def __add__(self, other): + """ return new path object with 'other' added to the basename""" + return self.new(basename=self.basename+str(other)) + + def __cmp__(self, other): + """ return sort value (-1, 0, +1). """ + try: + return cmp(self.strpath, other.strpath) + except AttributeError: + return cmp(str(self), str(other)) # self.path, other.path) + + def __lt__(self, other): + try: + return self.strpath < other.strpath + except AttributeError: + return str(self) < str(other) + + def visit(self, fil=None, rec=None, ignore=NeverRaised, bf=False, sort=False): + """ yields all paths below the current one + + fil is a filter (glob pattern or callable), if not matching the + path will not be yielded, defaulting to None (everything is + returned) + + rec is a filter (glob pattern or callable) that controls whether + a node is descended, defaulting to None + + ignore is an Exception class that is ignoredwhen calling dirlist() + on any of the paths (by default, all exceptions are reported) + + bf if True will cause a breadthfirst search instead of the + default depthfirst. Default: False + + sort if True will sort entries within each directory level. + """ + for x in Visitor(fil, rec, ignore, bf, sort).gen(self): + yield x + + def _sortlist(self, res, sort): + if sort: + if hasattr(sort, '__call__'): + res.sort(sort) + else: + res.sort() + + def samefile(self, other): + """ return True if other refers to the same stat object as self. """ + return self.strpath == str(other) + + def __fspath__(self): + return self.strpath + +class Visitor: + def __init__(self, fil, rec, ignore, bf, sort): + if isinstance(fil, py.builtin._basestring): + fil = FNMatcher(fil) + if isinstance(rec, py.builtin._basestring): + self.rec = FNMatcher(rec) + elif not hasattr(rec, '__call__') and rec: + self.rec = lambda path: True + else: + self.rec = rec + self.fil = fil + self.ignore = ignore + self.breadthfirst = bf + self.optsort = sort and sorted or (lambda x: x) + + def gen(self, path): + try: + entries = path.listdir() + except self.ignore: + return + rec = self.rec + dirs = self.optsort([p for p in entries + if p.check(dir=1) and (rec is None or rec(p))]) + if not self.breadthfirst: + for subdir in dirs: + for p in self.gen(subdir): + yield p + for p in self.optsort(entries): + if self.fil is None or self.fil(p): + yield p + if self.breadthfirst: + for subdir in dirs: + for p in self.gen(subdir): + yield p + +class FNMatcher: + def __init__(self, pattern): + self.pattern = pattern + + def __call__(self, path): + pattern = self.pattern + + if (pattern.find(path.sep) == -1 and + iswin32 and + pattern.find(posixpath.sep) != -1): + # Running on Windows, the pattern has no Windows path separators, + # and the pattern has one or more Posix path separators. Replace + # the Posix path separators with the Windows path separator. + pattern = pattern.replace(posixpath.sep, path.sep) + + if pattern.find(path.sep) == -1: + name = path.basename + else: + name = str(path) # path.strpath # XXX svn? + if not os.path.isabs(pattern): + pattern = '*' + path.sep + pattern + return fnmatch.fnmatch(name, pattern) diff --git a/venv/lib/python3.5/site-packages/py/_path/local.py b/venv/lib/python3.5/site-packages/py/_path/local.py new file mode 100644 index 0000000..264f115 --- /dev/null +++ b/venv/lib/python3.5/site-packages/py/_path/local.py @@ -0,0 +1,930 @@ +""" +local path implementation. +""" +from __future__ import with_statement + +from contextlib import contextmanager +import sys, os, re, atexit, io +import py +from py._path import common +from py._path.common import iswin32, fspath +from stat import S_ISLNK, S_ISDIR, S_ISREG + +from os.path import abspath, normcase, normpath, isabs, exists, isdir, isfile, islink, dirname + +if sys.version_info > (3,0): + def map_as_list(func, iter): + return list(map(func, iter)) +else: + map_as_list = map + +class Stat(object): + def __getattr__(self, name): + return getattr(self._osstatresult, "st_" + name) + + def __init__(self, path, osstatresult): + self.path = path + self._osstatresult = osstatresult + + @property + def owner(self): + if iswin32: + raise NotImplementedError("XXX win32") + import pwd + entry = py.error.checked_call(pwd.getpwuid, self.uid) + return entry[0] + + @property + def group(self): + """ return group name of file. """ + if iswin32: + raise NotImplementedError("XXX win32") + import grp + entry = py.error.checked_call(grp.getgrgid, self.gid) + return entry[0] + + def isdir(self): + return S_ISDIR(self._osstatresult.st_mode) + + def isfile(self): + return S_ISREG(self._osstatresult.st_mode) + + def islink(self): + st = self.path.lstat() + return S_ISLNK(self._osstatresult.st_mode) + +class PosixPath(common.PathBase): + def chown(self, user, group, rec=0): + """ change ownership to the given user and group. + user and group may be specified by a number or + by a name. if rec is True change ownership + recursively. + """ + uid = getuserid(user) + gid = getgroupid(group) + if rec: + for x in self.visit(rec=lambda x: x.check(link=0)): + if x.check(link=0): + py.error.checked_call(os.chown, str(x), uid, gid) + py.error.checked_call(os.chown, str(self), uid, gid) + + def readlink(self): + """ return value of a symbolic link. """ + return py.error.checked_call(os.readlink, self.strpath) + + def mklinkto(self, oldname): + """ posix style hard link to another name. """ + py.error.checked_call(os.link, str(oldname), str(self)) + + def mksymlinkto(self, value, absolute=1): + """ create a symbolic link with the given value (pointing to another name). """ + if absolute: + py.error.checked_call(os.symlink, str(value), self.strpath) + else: + base = self.common(value) + # with posix local paths '/' is always a common base + relsource = self.__class__(value).relto(base) + reldest = self.relto(base) + n = reldest.count(self.sep) + target = self.sep.join(('..', )*n + (relsource, )) + py.error.checked_call(os.symlink, target, self.strpath) + +def getuserid(user): + import pwd + if not isinstance(user, int): + user = pwd.getpwnam(user)[2] + return user + +def getgroupid(group): + import grp + if not isinstance(group, int): + group = grp.getgrnam(group)[2] + return group + +FSBase = not iswin32 and PosixPath or common.PathBase + +class LocalPath(FSBase): + """ object oriented interface to os.path and other local filesystem + related information. + """ + class ImportMismatchError(ImportError): + """ raised on pyimport() if there is a mismatch of __file__'s""" + + sep = os.sep + class Checkers(common.Checkers): + def _stat(self): + try: + return self._statcache + except AttributeError: + try: + self._statcache = self.path.stat() + except py.error.ELOOP: + self._statcache = self.path.lstat() + return self._statcache + + def dir(self): + return S_ISDIR(self._stat().mode) + + def file(self): + return S_ISREG(self._stat().mode) + + def exists(self): + return self._stat() + + def link(self): + st = self.path.lstat() + return S_ISLNK(st.mode) + + def __init__(self, path=None, expanduser=False): + """ Initialize and return a local Path instance. + + Path can be relative to the current directory. + If path is None it defaults to the current working directory. + If expanduser is True, tilde-expansion is performed. + Note that Path instances always carry an absolute path. + Note also that passing in a local path object will simply return + the exact same path object. Use new() to get a new copy. + """ + if path is None: + self.strpath = py.error.checked_call(os.getcwd) + else: + try: + path = fspath(path) + except TypeError: + raise ValueError("can only pass None, Path instances " + "or non-empty strings to LocalPath") + if expanduser: + path = os.path.expanduser(path) + self.strpath = abspath(path) + + def __hash__(self): + return hash(self.strpath) + + def __eq__(self, other): + s1 = fspath(self) + try: + s2 = fspath(other) + except TypeError: + return False + if iswin32: + s1 = s1.lower() + try: + s2 = s2.lower() + except AttributeError: + return False + return s1 == s2 + + def __ne__(self, other): + return not (self == other) + + def __lt__(self, other): + return fspath(self) < fspath(other) + + def __gt__(self, other): + return fspath(self) > fspath(other) + + def samefile(self, other): + """ return True if 'other' references the same file as 'self'. + """ + other = fspath(other) + if not isabs(other): + other = abspath(other) + if self == other: + return True + if iswin32: + return False # there is no samefile + return py.error.checked_call( + os.path.samefile, self.strpath, other) + + def remove(self, rec=1, ignore_errors=False): + """ remove a file or directory (or a directory tree if rec=1). + if ignore_errors is True, errors while removing directories will + be ignored. + """ + if self.check(dir=1, link=0): + if rec: + # force remove of readonly files on windows + if iswin32: + self.chmod(0o700, rec=1) + py.error.checked_call(py.std.shutil.rmtree, self.strpath, + ignore_errors=ignore_errors) + else: + py.error.checked_call(os.rmdir, self.strpath) + else: + if iswin32: + self.chmod(0o700) + py.error.checked_call(os.remove, self.strpath) + + def computehash(self, hashtype="md5", chunksize=524288): + """ return hexdigest of hashvalue for this file. """ + try: + try: + import hashlib as mod + except ImportError: + if hashtype == "sha1": + hashtype = "sha" + mod = __import__(hashtype) + hash = getattr(mod, hashtype)() + except (AttributeError, ImportError): + raise ValueError("Don't know how to compute %r hash" %(hashtype,)) + f = self.open('rb') + try: + while 1: + buf = f.read(chunksize) + if not buf: + return hash.hexdigest() + hash.update(buf) + finally: + f.close() + + def new(self, **kw): + """ create a modified version of this path. + the following keyword arguments modify various path parts:: + + a:/some/path/to/a/file.ext + xx drive + xxxxxxxxxxxxxxxxx dirname + xxxxxxxx basename + xxxx purebasename + xxx ext + """ + obj = object.__new__(self.__class__) + if not kw: + obj.strpath = self.strpath + return obj + drive, dirname, basename, purebasename,ext = self._getbyspec( + "drive,dirname,basename,purebasename,ext") + if 'basename' in kw: + if 'purebasename' in kw or 'ext' in kw: + raise ValueError("invalid specification %r" % kw) + else: + pb = kw.setdefault('purebasename', purebasename) + try: + ext = kw['ext'] + except KeyError: + pass + else: + if ext and not ext.startswith('.'): + ext = '.' + ext + kw['basename'] = pb + ext + + if ('dirname' in kw and not kw['dirname']): + kw['dirname'] = drive + else: + kw.setdefault('dirname', dirname) + kw.setdefault('sep', self.sep) + obj.strpath = normpath( + "%(dirname)s%(sep)s%(basename)s" % kw) + return obj + + def _getbyspec(self, spec): + """ see new for what 'spec' can be. """ + res = [] + parts = self.strpath.split(self.sep) + + args = filter(None, spec.split(',') ) + append = res.append + for name in args: + if name == 'drive': + append(parts[0]) + elif name == 'dirname': + append(self.sep.join(parts[:-1])) + else: + basename = parts[-1] + if name == 'basename': + append(basename) + else: + i = basename.rfind('.') + if i == -1: + purebasename, ext = basename, '' + else: + purebasename, ext = basename[:i], basename[i:] + if name == 'purebasename': + append(purebasename) + elif name == 'ext': + append(ext) + else: + raise ValueError("invalid part specification %r" % name) + return res + + def dirpath(self, *args, **kwargs): + """ return the directory path joined with any given path arguments. """ + if not kwargs: + path = object.__new__(self.__class__) + path.strpath = dirname(self.strpath) + if args: + path = path.join(*args) + return path + return super(LocalPath, self).dirpath(*args, **kwargs) + + def join(self, *args, **kwargs): + """ return a new path by appending all 'args' as path + components. if abs=1 is used restart from root if any + of the args is an absolute path. + """ + sep = self.sep + strargs = [fspath(arg) for arg in args] + strpath = self.strpath + if kwargs.get('abs'): + newargs = [] + for arg in reversed(strargs): + if isabs(arg): + strpath = arg + strargs = newargs + break + newargs.insert(0, arg) + for arg in strargs: + arg = arg.strip(sep) + if iswin32: + # allow unix style paths even on windows. + arg = arg.strip('/') + arg = arg.replace('/', sep) + strpath = strpath + sep + arg + obj = object.__new__(self.__class__) + obj.strpath = normpath(strpath) + return obj + + def open(self, mode='r', ensure=False, encoding=None): + """ return an opened file with the given mode. + + If ensure is True, create parent directories if needed. + """ + if ensure: + self.dirpath().ensure(dir=1) + if encoding: + return py.error.checked_call(io.open, self.strpath, mode, encoding=encoding) + return py.error.checked_call(open, self.strpath, mode) + + def _fastjoin(self, name): + child = object.__new__(self.__class__) + child.strpath = self.strpath + self.sep + name + return child + + def islink(self): + return islink(self.strpath) + + def check(self, **kw): + if not kw: + return exists(self.strpath) + if len(kw) == 1: + if "dir" in kw: + return not kw["dir"] ^ isdir(self.strpath) + if "file" in kw: + return not kw["file"] ^ isfile(self.strpath) + return super(LocalPath, self).check(**kw) + + _patternchars = set("*?[" + os.path.sep) + def listdir(self, fil=None, sort=None): + """ list directory contents, possibly filter by the given fil func + and possibly sorted. + """ + if fil is None and sort is None: + names = py.error.checked_call(os.listdir, self.strpath) + return map_as_list(self._fastjoin, names) + if isinstance(fil, py.builtin._basestring): + if not self._patternchars.intersection(fil): + child = self._fastjoin(fil) + if exists(child.strpath): + return [child] + return [] + fil = common.FNMatcher(fil) + names = py.error.checked_call(os.listdir, self.strpath) + res = [] + for name in names: + child = self._fastjoin(name) + if fil is None or fil(child): + res.append(child) + self._sortlist(res, sort) + return res + + def size(self): + """ return size of the underlying file object """ + return self.stat().size + + def mtime(self): + """ return last modification time of the path. """ + return self.stat().mtime + + def copy(self, target, mode=False, stat=False): + """ copy path to target. + + If mode is True, will copy copy permission from path to target. + If stat is True, copy permission, last modification + time, last access time, and flags from path to target. + """ + if self.check(file=1): + if target.check(dir=1): + target = target.join(self.basename) + assert self!=target + copychunked(self, target) + if mode: + copymode(self.strpath, target.strpath) + if stat: + copystat(self, target) + else: + def rec(p): + return p.check(link=0) + for x in self.visit(rec=rec): + relpath = x.relto(self) + newx = target.join(relpath) + newx.dirpath().ensure(dir=1) + if x.check(link=1): + newx.mksymlinkto(x.readlink()) + continue + elif x.check(file=1): + copychunked(x, newx) + elif x.check(dir=1): + newx.ensure(dir=1) + if mode: + copymode(x.strpath, newx.strpath) + if stat: + copystat(x, newx) + + def rename(self, target): + """ rename this path to target. """ + target = fspath(target) + return py.error.checked_call(os.rename, self.strpath, target) + + def dump(self, obj, bin=1): + """ pickle object into path location""" + f = self.open('wb') + try: + py.error.checked_call(py.std.pickle.dump, obj, f, bin) + finally: + f.close() + + def mkdir(self, *args): + """ create & return the directory joined with args. """ + p = self.join(*args) + py.error.checked_call(os.mkdir, fspath(p)) + return p + + def write_binary(self, data, ensure=False): + """ write binary data into path. If ensure is True create + missing parent directories. + """ + if ensure: + self.dirpath().ensure(dir=1) + with self.open('wb') as f: + f.write(data) + + def write_text(self, data, encoding, ensure=False): + """ write text data into path using the specified encoding. + If ensure is True create missing parent directories. + """ + if ensure: + self.dirpath().ensure(dir=1) + with self.open('w', encoding=encoding) as f: + f.write(data) + + def write(self, data, mode='w', ensure=False): + """ write data into path. If ensure is True create + missing parent directories. + """ + if ensure: + self.dirpath().ensure(dir=1) + if 'b' in mode: + if not py.builtin._isbytes(data): + raise ValueError("can only process bytes") + else: + if not py.builtin._istext(data): + if not py.builtin._isbytes(data): + data = str(data) + else: + data = py.builtin._totext(data, sys.getdefaultencoding()) + f = self.open(mode) + try: + f.write(data) + finally: + f.close() + + def _ensuredirs(self): + parent = self.dirpath() + if parent == self: + return self + if parent.check(dir=0): + parent._ensuredirs() + if self.check(dir=0): + try: + self.mkdir() + except py.error.EEXIST: + # race condition: file/dir created by another thread/process. + # complain if it is not a dir + if self.check(dir=0): + raise + return self + + def ensure(self, *args, **kwargs): + """ ensure that an args-joined path exists (by default as + a file). if you specify a keyword argument 'dir=True' + then the path is forced to be a directory path. + """ + p = self.join(*args) + if kwargs.get('dir', 0): + return p._ensuredirs() + else: + p.dirpath()._ensuredirs() + if not p.check(file=1): + p.open('w').close() + return p + + def stat(self, raising=True): + """ Return an os.stat() tuple. """ + if raising == True: + return Stat(self, py.error.checked_call(os.stat, self.strpath)) + try: + return Stat(self, os.stat(self.strpath)) + except KeyboardInterrupt: + raise + except Exception: + return None + + def lstat(self): + """ Return an os.lstat() tuple. """ + return Stat(self, py.error.checked_call(os.lstat, self.strpath)) + + def setmtime(self, mtime=None): + """ set modification time for the given path. if 'mtime' is None + (the default) then the file's mtime is set to current time. + + Note that the resolution for 'mtime' is platform dependent. + """ + if mtime is None: + return py.error.checked_call(os.utime, self.strpath, mtime) + try: + return py.error.checked_call(os.utime, self.strpath, (-1, mtime)) + except py.error.EINVAL: + return py.error.checked_call(os.utime, self.strpath, (self.atime(), mtime)) + + def chdir(self): + """ change directory to self and return old current directory """ + try: + old = self.__class__() + except py.error.ENOENT: + old = None + py.error.checked_call(os.chdir, self.strpath) + return old + + + @contextmanager + def as_cwd(self): + """ return context manager which changes to current dir during the + managed "with" context. On __enter__ it returns the old dir. + """ + old = self.chdir() + try: + yield old + finally: + old.chdir() + + def realpath(self): + """ return a new path which contains no symbolic links.""" + return self.__class__(os.path.realpath(self.strpath)) + + def atime(self): + """ return last access time of the path. """ + return self.stat().atime + + def __repr__(self): + return 'local(%r)' % self.strpath + + def __str__(self): + """ return string representation of the Path. """ + return self.strpath + + def chmod(self, mode, rec=0): + """ change permissions to the given mode. If mode is an + integer it directly encodes the os-specific modes. + if rec is True perform recursively. + """ + if not isinstance(mode, int): + raise TypeError("mode %r must be an integer" % (mode,)) + if rec: + for x in self.visit(rec=rec): + py.error.checked_call(os.chmod, str(x), mode) + py.error.checked_call(os.chmod, self.strpath, mode) + + def pypkgpath(self): + """ return the Python package path by looking for the last + directory upwards which still contains an __init__.py. + Return None if a pkgpath can not be determined. + """ + pkgpath = None + for parent in self.parts(reverse=True): + if parent.isdir(): + if not parent.join('__init__.py').exists(): + break + if not isimportable(parent.basename): + break + pkgpath = parent + return pkgpath + + def _ensuresyspath(self, ensuremode, path): + if ensuremode: + s = str(path) + if ensuremode == "append": + if s not in sys.path: + sys.path.append(s) + else: + if s != sys.path[0]: + sys.path.insert(0, s) + + def pyimport(self, modname=None, ensuresyspath=True): + """ return path as an imported python module. + + If modname is None, look for the containing package + and construct an according module name. + The module will be put/looked up in sys.modules. + if ensuresyspath is True then the root dir for importing + the file (taking __init__.py files into account) will + be prepended to sys.path if it isn't there already. + If ensuresyspath=="append" the root dir will be appended + if it isn't already contained in sys.path. + if ensuresyspath is False no modification of syspath happens. + """ + if not self.check(): + raise py.error.ENOENT(self) + + pkgpath = None + if modname is None: + pkgpath = self.pypkgpath() + if pkgpath is not None: + pkgroot = pkgpath.dirpath() + names = self.new(ext="").relto(pkgroot).split(self.sep) + if names[-1] == "__init__": + names.pop() + modname = ".".join(names) + else: + pkgroot = self.dirpath() + modname = self.purebasename + + self._ensuresyspath(ensuresyspath, pkgroot) + __import__(modname) + mod = sys.modules[modname] + if self.basename == "__init__.py": + return mod # we don't check anything as we might + # we in a namespace package ... too icky to check + modfile = mod.__file__ + if modfile[-4:] in ('.pyc', '.pyo'): + modfile = modfile[:-1] + elif modfile.endswith('$py.class'): + modfile = modfile[:-9] + '.py' + if modfile.endswith(os.path.sep + "__init__.py"): + if self.basename != "__init__.py": + modfile = modfile[:-12] + try: + issame = self.samefile(modfile) + except py.error.ENOENT: + issame = False + if not issame: + raise self.ImportMismatchError(modname, modfile, self) + return mod + else: + try: + return sys.modules[modname] + except KeyError: + # we have a custom modname, do a pseudo-import + mod = py.std.types.ModuleType(modname) + mod.__file__ = str(self) + sys.modules[modname] = mod + try: + py.builtin.execfile(str(self), mod.__dict__) + except: + del sys.modules[modname] + raise + return mod + + def sysexec(self, *argv, **popen_opts): + """ return stdout text from executing a system child process, + where the 'self' path points to executable. + The process is directly invoked and not through a system shell. + """ + from subprocess import Popen, PIPE + argv = map_as_list(str, argv) + popen_opts['stdout'] = popen_opts['stderr'] = PIPE + proc = Popen([str(self)] + argv, **popen_opts) + stdout, stderr = proc.communicate() + ret = proc.wait() + if py.builtin._isbytes(stdout): + stdout = py.builtin._totext(stdout, sys.getdefaultencoding()) + if ret != 0: + if py.builtin._isbytes(stderr): + stderr = py.builtin._totext(stderr, sys.getdefaultencoding()) + raise py.process.cmdexec.Error(ret, ret, str(self), + stdout, stderr,) + return stdout + + def sysfind(cls, name, checker=None, paths=None): + """ return a path object found by looking at the systems + underlying PATH specification. If the checker is not None + it will be invoked to filter matching paths. If a binary + cannot be found, None is returned + Note: This is probably not working on plain win32 systems + but may work on cygwin. + """ + if isabs(name): + p = py.path.local(name) + if p.check(file=1): + return p + else: + if paths is None: + if iswin32: + paths = py.std.os.environ['Path'].split(';') + if '' not in paths and '.' not in paths: + paths.append('.') + try: + systemroot = os.environ['SYSTEMROOT'] + except KeyError: + pass + else: + paths = [re.sub('%SystemRoot%', systemroot, path) + for path in paths] + else: + paths = py.std.os.environ['PATH'].split(':') + tryadd = [] + if iswin32: + tryadd += os.environ['PATHEXT'].split(os.pathsep) + tryadd.append("") + + for x in paths: + for addext in tryadd: + p = py.path.local(x).join(name, abs=True) + addext + try: + if p.check(file=1): + if checker: + if not checker(p): + continue + return p + except py.error.EACCES: + pass + return None + sysfind = classmethod(sysfind) + + def _gethomedir(cls): + try: + x = os.environ['HOME'] + except KeyError: + try: + x = os.environ["HOMEDRIVE"] + os.environ['HOMEPATH'] + except KeyError: + return None + return cls(x) + _gethomedir = classmethod(_gethomedir) + + #""" + #special class constructors for local filesystem paths + #""" + def get_temproot(cls): + """ return the system's temporary directory + (where tempfiles are usually created in) + """ + return py.path.local(py.std.tempfile.gettempdir()) + get_temproot = classmethod(get_temproot) + + def mkdtemp(cls, rootdir=None): + """ return a Path object pointing to a fresh new temporary directory + (which we created ourself). + """ + import tempfile + if rootdir is None: + rootdir = cls.get_temproot() + return cls(py.error.checked_call(tempfile.mkdtemp, dir=str(rootdir))) + mkdtemp = classmethod(mkdtemp) + + def make_numbered_dir(cls, prefix='session-', rootdir=None, keep=3, + lock_timeout = 172800): # two days + """ return unique directory with a number greater than the current + maximum one. The number is assumed to start directly after prefix. + if keep is true directories with a number less than (maxnum-keep) + will be removed. + """ + if rootdir is None: + rootdir = cls.get_temproot() + + nprefix = normcase(prefix) + def parse_num(path): + """ parse the number out of a path (if it matches the prefix) """ + nbasename = normcase(path.basename) + if nbasename.startswith(nprefix): + try: + return int(nbasename[len(nprefix):]) + except ValueError: + pass + + # compute the maximum number currently in use with the + # prefix + lastmax = None + while True: + maxnum = -1 + for path in rootdir.listdir(): + num = parse_num(path) + if num is not None: + maxnum = max(maxnum, num) + + # make the new directory + try: + udir = rootdir.mkdir(prefix + str(maxnum+1)) + except py.error.EEXIST: + # race condition: another thread/process created the dir + # in the meantime. Try counting again + if lastmax == maxnum: + raise + lastmax = maxnum + continue + break + + # put a .lock file in the new directory that will be removed at + # process exit + if lock_timeout: + lockfile = udir.join('.lock') + mypid = os.getpid() + if hasattr(lockfile, 'mksymlinkto'): + lockfile.mksymlinkto(str(mypid)) + else: + lockfile.write(str(mypid)) + def try_remove_lockfile(): + # in a fork() situation, only the last process should + # remove the .lock, otherwise the other processes run the + # risk of seeing their temporary dir disappear. For now + # we remove the .lock in the parent only (i.e. we assume + # that the children finish before the parent). + if os.getpid() != mypid: + return + try: + lockfile.remove() + except py.error.Error: + pass + atexit.register(try_remove_lockfile) + + # prune old directories + if keep: + for path in rootdir.listdir(): + num = parse_num(path) + if num is not None and num <= (maxnum - keep): + lf = path.join('.lock') + try: + t1 = lf.lstat().mtime + t2 = lockfile.lstat().mtime + if not lock_timeout or abs(t2-t1) < lock_timeout: + continue # skip directories still locked + except py.error.Error: + pass # assume that it means that there is no 'lf' + try: + path.remove(rec=1) + except KeyboardInterrupt: + raise + except: # this might be py.error.Error, WindowsError ... + pass + + # make link... + try: + username = os.environ['USER'] #linux, et al + except KeyError: + try: + username = os.environ['USERNAME'] #windows + except KeyError: + username = 'current' + + src = str(udir) + dest = src[:src.rfind('-')] + '-' + username + try: + os.unlink(dest) + except OSError: + pass + try: + os.symlink(src, dest) + except (OSError, AttributeError, NotImplementedError): + pass + + return udir + make_numbered_dir = classmethod(make_numbered_dir) + + +def copymode(src, dest): + """ copy permission from src to dst. """ + py.std.shutil.copymode(src, dest) + +def copystat(src, dest): + """ copy permission, last modification time, last access time, and flags from src to dst.""" + py.std.shutil.copystat(str(src), str(dest)) + +def copychunked(src, dest): + chunksize = 524288 # half a meg of bytes + fsrc = src.open('rb') + try: + fdest = dest.open('wb') + try: + while 1: + buf = fsrc.read(chunksize) + if not buf: + break + fdest.write(buf) + finally: + fdest.close() + finally: + fsrc.close() + +def isimportable(name): + if name and (name[0].isalpha() or name[0] == '_'): + name = name.replace("_", '') + return not name or name.isalnum() diff --git a/venv/lib/python3.5/site-packages/py/_path/svnurl.py b/venv/lib/python3.5/site-packages/py/_path/svnurl.py new file mode 100644 index 0000000..5f45fa2 --- /dev/null +++ b/venv/lib/python3.5/site-packages/py/_path/svnurl.py @@ -0,0 +1,380 @@ +""" +module defining a subversion path object based on the external +command 'svn'. This modules aims to work with svn 1.3 and higher +but might also interact well with earlier versions. +""" + +import os, sys, time, re +import py +from py import path, process +from py._path import common +from py._path import svnwc as svncommon +from py._path.cacheutil import BuildcostAccessCache, AgingCache + +DEBUG=False + +class SvnCommandPath(svncommon.SvnPathBase): + """ path implementation that offers access to (possibly remote) subversion + repositories. """ + + _lsrevcache = BuildcostAccessCache(maxentries=128) + _lsnorevcache = AgingCache(maxentries=1000, maxseconds=60.0) + + def __new__(cls, path, rev=None, auth=None): + self = object.__new__(cls) + if isinstance(path, cls): + rev = path.rev + auth = path.auth + path = path.strpath + svncommon.checkbadchars(path) + path = path.rstrip('/') + self.strpath = path + self.rev = rev + self.auth = auth + return self + + def __repr__(self): + if self.rev == -1: + return 'svnurl(%r)' % self.strpath + else: + return 'svnurl(%r, %r)' % (self.strpath, self.rev) + + def _svnwithrev(self, cmd, *args): + """ execute an svn command, append our own url and revision """ + if self.rev is None: + return self._svnwrite(cmd, *args) + else: + args = ['-r', self.rev] + list(args) + return self._svnwrite(cmd, *args) + + def _svnwrite(self, cmd, *args): + """ execute an svn command, append our own url """ + l = ['svn %s' % cmd] + args = ['"%s"' % self._escape(item) for item in args] + l.extend(args) + l.append('"%s"' % self._encodedurl()) + # fixing the locale because we can't otherwise parse + string = " ".join(l) + if DEBUG: + print("execing %s" % string) + out = self._svncmdexecauth(string) + return out + + def _svncmdexecauth(self, cmd): + """ execute an svn command 'as is' """ + cmd = svncommon.fixlocale() + cmd + if self.auth is not None: + cmd += ' ' + self.auth.makecmdoptions() + return self._cmdexec(cmd) + + def _cmdexec(self, cmd): + try: + out = process.cmdexec(cmd) + except py.process.cmdexec.Error: + e = sys.exc_info()[1] + if (e.err.find('File Exists') != -1 or + e.err.find('File already exists') != -1): + raise py.error.EEXIST(self) + raise + return out + + def _svnpopenauth(self, cmd): + """ execute an svn command, return a pipe for reading stdin """ + cmd = svncommon.fixlocale() + cmd + if self.auth is not None: + cmd += ' ' + self.auth.makecmdoptions() + return self._popen(cmd) + + def _popen(self, cmd): + return os.popen(cmd) + + def _encodedurl(self): + return self._escape(self.strpath) + + def _norev_delentry(self, path): + auth = self.auth and self.auth.makecmdoptions() or None + self._lsnorevcache.delentry((str(path), auth)) + + def open(self, mode='r'): + """ return an opened file with the given mode. """ + if mode not in ("r", "rU",): + raise ValueError("mode %r not supported" % (mode,)) + assert self.check(file=1) # svn cat returns an empty file otherwise + if self.rev is None: + return self._svnpopenauth('svn cat "%s"' % ( + self._escape(self.strpath), )) + else: + return self._svnpopenauth('svn cat -r %s "%s"' % ( + self.rev, self._escape(self.strpath))) + + def dirpath(self, *args, **kwargs): + """ return the directory path of the current path joined + with any given path arguments. + """ + l = self.strpath.split(self.sep) + if len(l) < 4: + raise py.error.EINVAL(self, "base is not valid") + elif len(l) == 4: + return self.join(*args, **kwargs) + else: + return self.new(basename='').join(*args, **kwargs) + + # modifying methods (cache must be invalidated) + def mkdir(self, *args, **kwargs): + """ create & return the directory joined with args. + pass a 'msg' keyword argument to set the commit message. + """ + commit_msg = kwargs.get('msg', "mkdir by py lib invocation") + createpath = self.join(*args) + createpath._svnwrite('mkdir', '-m', commit_msg) + self._norev_delentry(createpath.dirpath()) + return createpath + + def copy(self, target, msg='copied by py lib invocation'): + """ copy path to target with checkin message msg.""" + if getattr(target, 'rev', None) is not None: + raise py.error.EINVAL(target, "revisions are immutable") + self._svncmdexecauth('svn copy -m "%s" "%s" "%s"' %(msg, + self._escape(self), self._escape(target))) + self._norev_delentry(target.dirpath()) + + def rename(self, target, msg="renamed by py lib invocation"): + """ rename this path to target with checkin message msg. """ + if getattr(self, 'rev', None) is not None: + raise py.error.EINVAL(self, "revisions are immutable") + self._svncmdexecauth('svn move -m "%s" --force "%s" "%s"' %( + msg, self._escape(self), self._escape(target))) + self._norev_delentry(self.dirpath()) + self._norev_delentry(self) + + def remove(self, rec=1, msg='removed by py lib invocation'): + """ remove a file or directory (or a directory tree if rec=1) with +checkin message msg.""" + if self.rev is not None: + raise py.error.EINVAL(self, "revisions are immutable") + self._svncmdexecauth('svn rm -m "%s" "%s"' %(msg, self._escape(self))) + self._norev_delentry(self.dirpath()) + + def export(self, topath): + """ export to a local path + + topath should not exist prior to calling this, returns a + py.path.local instance + """ + topath = py.path.local(topath) + args = ['"%s"' % (self._escape(self),), + '"%s"' % (self._escape(topath),)] + if self.rev is not None: + args = ['-r', str(self.rev)] + args + self._svncmdexecauth('svn export %s' % (' '.join(args),)) + return topath + + def ensure(self, *args, **kwargs): + """ ensure that an args-joined path exists (by default as + a file). If you specify a keyword argument 'dir=True' + then the path is forced to be a directory path. + """ + if getattr(self, 'rev', None) is not None: + raise py.error.EINVAL(self, "revisions are immutable") + target = self.join(*args) + dir = kwargs.get('dir', 0) + for x in target.parts(reverse=True): + if x.check(): + break + else: + raise py.error.ENOENT(target, "has not any valid base!") + if x == target: + if not x.check(dir=dir): + raise dir and py.error.ENOTDIR(x) or py.error.EISDIR(x) + return x + tocreate = target.relto(x) + basename = tocreate.split(self.sep, 1)[0] + tempdir = py.path.local.mkdtemp() + try: + tempdir.ensure(tocreate, dir=dir) + cmd = 'svn import -m "%s" "%s" "%s"' % ( + "ensure %s" % self._escape(tocreate), + self._escape(tempdir.join(basename)), + x.join(basename)._encodedurl()) + self._svncmdexecauth(cmd) + self._norev_delentry(x) + finally: + tempdir.remove() + return target + + # end of modifying methods + def _propget(self, name): + res = self._svnwithrev('propget', name) + return res[:-1] # strip trailing newline + + def _proplist(self): + res = self._svnwithrev('proplist') + lines = res.split('\n') + lines = [x.strip() for x in lines[1:]] + return svncommon.PropListDict(self, lines) + + def info(self): + """ return an Info structure with svn-provided information. """ + parent = self.dirpath() + nameinfo_seq = parent._listdir_nameinfo() + bn = self.basename + for name, info in nameinfo_seq: + if name == bn: + return info + raise py.error.ENOENT(self) + + + def _listdir_nameinfo(self): + """ return sequence of name-info directory entries of self """ + def builder(): + try: + res = self._svnwithrev('ls', '-v') + except process.cmdexec.Error: + e = sys.exc_info()[1] + if e.err.find('non-existent in that revision') != -1: + raise py.error.ENOENT(self, e.err) + elif e.err.find("E200009:") != -1: + raise py.error.ENOENT(self, e.err) + elif e.err.find('File not found') != -1: + raise py.error.ENOENT(self, e.err) + elif e.err.find('not part of a repository')!=-1: + raise py.error.ENOENT(self, e.err) + elif e.err.find('Unable to open')!=-1: + raise py.error.ENOENT(self, e.err) + elif e.err.lower().find('method not allowed')!=-1: + raise py.error.EACCES(self, e.err) + raise py.error.Error(e.err) + lines = res.split('\n') + nameinfo_seq = [] + for lsline in lines: + if lsline: + info = InfoSvnCommand(lsline) + if info._name != '.': # svn 1.5 produces '.' dirs, + nameinfo_seq.append((info._name, info)) + nameinfo_seq.sort() + return nameinfo_seq + auth = self.auth and self.auth.makecmdoptions() or None + if self.rev is not None: + return self._lsrevcache.getorbuild((self.strpath, self.rev, auth), + builder) + else: + return self._lsnorevcache.getorbuild((self.strpath, auth), + builder) + + def listdir(self, fil=None, sort=None): + """ list directory contents, possibly filter by the given fil func + and possibly sorted. + """ + if isinstance(fil, str): + fil = common.FNMatcher(fil) + nameinfo_seq = self._listdir_nameinfo() + if len(nameinfo_seq) == 1: + name, info = nameinfo_seq[0] + if name == self.basename and info.kind == 'file': + #if not self.check(dir=1): + raise py.error.ENOTDIR(self) + paths = [self.join(name) for (name, info) in nameinfo_seq] + if fil: + paths = [x for x in paths if fil(x)] + self._sortlist(paths, sort) + return paths + + + def log(self, rev_start=None, rev_end=1, verbose=False): + """ return a list of LogEntry instances for this path. +rev_start is the starting revision (defaulting to the first one). +rev_end is the last revision (defaulting to HEAD). +if verbose is True, then the LogEntry instances also know which files changed. +""" + assert self.check() #make it simpler for the pipe + rev_start = rev_start is None and "HEAD" or rev_start + rev_end = rev_end is None and "HEAD" or rev_end + + if rev_start == "HEAD" and rev_end == 1: + rev_opt = "" + else: + rev_opt = "-r %s:%s" % (rev_start, rev_end) + verbose_opt = verbose and "-v" or "" + xmlpipe = self._svnpopenauth('svn log --xml %s %s "%s"' % + (rev_opt, verbose_opt, self.strpath)) + from xml.dom import minidom + tree = minidom.parse(xmlpipe) + result = [] + for logentry in filter(None, tree.firstChild.childNodes): + if logentry.nodeType == logentry.ELEMENT_NODE: + result.append(svncommon.LogEntry(logentry)) + return result + +#01234567890123456789012345678901234567890123467 +# 2256 hpk 165 Nov 24 17:55 __init__.py +# XXX spotted by Guido, SVN 1.3.0 has different aligning, breaks the code!!! +# 1312 johnny 1627 May 05 14:32 test_decorators.py +# +class InfoSvnCommand: + # the '0?' part in the middle is an indication of whether the resource is + # locked, see 'svn help ls' + lspattern = re.compile( + r'^ *(?P<rev>\d+) +(?P<author>.+?) +(0? *(?P<size>\d+))? ' + r'*(?P<date>\w+ +\d{2} +[\d:]+) +(?P<file>.*)$') + def __init__(self, line): + # this is a typical line from 'svn ls http://...' + #_ 1127 jum 0 Jul 13 15:28 branch/ + match = self.lspattern.match(line) + data = match.groupdict() + self._name = data['file'] + if self._name[-1] == '/': + self._name = self._name[:-1] + self.kind = 'dir' + else: + self.kind = 'file' + #self.has_props = l.pop(0) == 'P' + self.created_rev = int(data['rev']) + self.last_author = data['author'] + self.size = data['size'] and int(data['size']) or 0 + self.mtime = parse_time_with_missing_year(data['date']) + self.time = self.mtime * 1000000 + + def __eq__(self, other): + return self.__dict__ == other.__dict__ + + +#____________________________________________________ +# +# helper functions +#____________________________________________________ +def parse_time_with_missing_year(timestr): + """ analyze the time part from a single line of "svn ls -v" + the svn output doesn't show the year makes the 'timestr' + ambigous. + """ + import calendar + t_now = time.gmtime() + + tparts = timestr.split() + month = time.strptime(tparts.pop(0), '%b')[1] + day = time.strptime(tparts.pop(0), '%d')[2] + last = tparts.pop(0) # year or hour:minute + try: + if ":" in last: + raise ValueError() + year = time.strptime(last, '%Y')[0] + hour = minute = 0 + except ValueError: + hour, minute = time.strptime(last, '%H:%M')[3:5] + year = t_now[0] + + t_result = (year, month, day, hour, minute, 0,0,0,0) + if t_result > t_now: + year -= 1 + t_result = (year, month, day, hour, minute, 0,0,0,0) + return calendar.timegm(t_result) + +class PathEntry: + def __init__(self, ppart): + self.strpath = ppart.firstChild.nodeValue.encode('UTF-8') + self.action = ppart.getAttribute('action').encode('UTF-8') + if self.action == 'A': + self.copyfrom_path = ppart.getAttribute('copyfrom-path').encode('UTF-8') + if self.copyfrom_path: + self.copyfrom_rev = int(ppart.getAttribute('copyfrom-rev')) + diff --git a/venv/lib/python3.5/site-packages/py/_path/svnwc.py b/venv/lib/python3.5/site-packages/py/_path/svnwc.py new file mode 100644 index 0000000..07233b0 --- /dev/null +++ b/venv/lib/python3.5/site-packages/py/_path/svnwc.py @@ -0,0 +1,1240 @@ +""" +svn-Command based Implementation of a Subversion WorkingCopy Path. + + SvnWCCommandPath is the main class. + +""" + +import os, sys, time, re, calendar +import py +import subprocess +from py._path import common + +#----------------------------------------------------------- +# Caching latest repository revision and repo-paths +# (getting them is slow with the current implementations) +# +# XXX make mt-safe +#----------------------------------------------------------- + +class cache: + proplist = {} + info = {} + entries = {} + prop = {} + +class RepoEntry: + def __init__(self, url, rev, timestamp): + self.url = url + self.rev = rev + self.timestamp = timestamp + + def __str__(self): + return "repo: %s;%s %s" %(self.url, self.rev, self.timestamp) + +class RepoCache: + """ The Repocache manages discovered repository paths + and their revisions. If inside a timeout the cache + will even return the revision of the root. + """ + timeout = 20 # seconds after which we forget that we know the last revision + + def __init__(self): + self.repos = [] + + def clear(self): + self.repos = [] + + def put(self, url, rev, timestamp=None): + if rev is None: + return + if timestamp is None: + timestamp = time.time() + + for entry in self.repos: + if url == entry.url: + entry.timestamp = timestamp + entry.rev = rev + #print "set repo", entry + break + else: + entry = RepoEntry(url, rev, timestamp) + self.repos.append(entry) + #print "appended repo", entry + + def get(self, url): + now = time.time() + for entry in self.repos: + if url.startswith(entry.url): + if now < entry.timestamp + self.timeout: + #print "returning immediate Etrny", entry + return entry.url, entry.rev + return entry.url, -1 + return url, -1 + +repositories = RepoCache() + + +# svn support code + +ALLOWED_CHARS = "_ -/\\=$.~+%" #add characters as necessary when tested +if sys.platform == "win32": + ALLOWED_CHARS += ":" +ALLOWED_CHARS_HOST = ALLOWED_CHARS + '@:' + +def _getsvnversion(ver=[]): + try: + return ver[0] + except IndexError: + v = py.process.cmdexec("svn -q --version") + v.strip() + v = '.'.join(v.split('.')[:2]) + ver.append(v) + return v + +def _escape_helper(text): + text = str(text) + if py.std.sys.platform != 'win32': + text = str(text).replace('$', '\\$') + return text + +def _check_for_bad_chars(text, allowed_chars=ALLOWED_CHARS): + for c in str(text): + if c.isalnum(): + continue + if c in allowed_chars: + continue + return True + return False + +def checkbadchars(url): + # (hpk) not quite sure about the exact purpose, guido w.? + proto, uri = url.split("://", 1) + if proto != "file": + host, uripath = uri.split('/', 1) + # only check for bad chars in the non-protocol parts + if (_check_for_bad_chars(host, ALLOWED_CHARS_HOST) \ + or _check_for_bad_chars(uripath, ALLOWED_CHARS)): + raise ValueError("bad char in %r" % (url, )) + + +#_______________________________________________________________ + +class SvnPathBase(common.PathBase): + """ Base implementation for SvnPath implementations. """ + sep = '/' + + def _geturl(self): + return self.strpath + url = property(_geturl, None, None, "url of this svn-path.") + + def __str__(self): + """ return a string representation (including rev-number) """ + return self.strpath + + def __hash__(self): + return hash(self.strpath) + + def new(self, **kw): + """ create a modified version of this path. A 'rev' argument + indicates a new revision. + the following keyword arguments modify various path parts:: + + http://host.com/repo/path/file.ext + |-----------------------| dirname + |------| basename + |--| purebasename + |--| ext + """ + obj = object.__new__(self.__class__) + obj.rev = kw.get('rev', self.rev) + obj.auth = kw.get('auth', self.auth) + dirname, basename, purebasename, ext = self._getbyspec( + "dirname,basename,purebasename,ext") + if 'basename' in kw: + if 'purebasename' in kw or 'ext' in kw: + raise ValueError("invalid specification %r" % kw) + else: + pb = kw.setdefault('purebasename', purebasename) + ext = kw.setdefault('ext', ext) + if ext and not ext.startswith('.'): + ext = '.' + ext + kw['basename'] = pb + ext + + kw.setdefault('dirname', dirname) + kw.setdefault('sep', self.sep) + if kw['basename']: + obj.strpath = "%(dirname)s%(sep)s%(basename)s" % kw + else: + obj.strpath = "%(dirname)s" % kw + return obj + + def _getbyspec(self, spec): + """ get specified parts of the path. 'arg' is a string + with comma separated path parts. The parts are returned + in exactly the order of the specification. + + you may specify the following parts: + + http://host.com/repo/path/file.ext + |-----------------------| dirname + |------| basename + |--| purebasename + |--| ext + """ + res = [] + parts = self.strpath.split(self.sep) + for name in spec.split(','): + name = name.strip() + if name == 'dirname': + res.append(self.sep.join(parts[:-1])) + elif name == 'basename': + res.append(parts[-1]) + else: + basename = parts[-1] + i = basename.rfind('.') + if i == -1: + purebasename, ext = basename, '' + else: + purebasename, ext = basename[:i], basename[i:] + if name == 'purebasename': + res.append(purebasename) + elif name == 'ext': + res.append(ext) + else: + raise NameError("Don't know part %r" % name) + return res + + def __eq__(self, other): + """ return true if path and rev attributes each match """ + return (str(self) == str(other) and + (self.rev == other.rev or self.rev == other.rev)) + + def __ne__(self, other): + return not self == other + + def join(self, *args): + """ return a new Path (with the same revision) which is composed + of the self Path followed by 'args' path components. + """ + if not args: + return self + + args = tuple([arg.strip(self.sep) for arg in args]) + parts = (self.strpath, ) + args + newpath = self.__class__(self.sep.join(parts), self.rev, self.auth) + return newpath + + def propget(self, name): + """ return the content of the given property. """ + value = self._propget(name) + return value + + def proplist(self): + """ list all property names. """ + content = self._proplist() + return content + + def size(self): + """ Return the size of the file content of the Path. """ + return self.info().size + + def mtime(self): + """ Return the last modification time of the file. """ + return self.info().mtime + + # shared help methods + + def _escape(self, cmd): + return _escape_helper(cmd) + + + #def _childmaxrev(self): + # """ return maximum revision number of childs (or self.rev if no childs) """ + # rev = self.rev + # for name, info in self._listdir_nameinfo(): + # rev = max(rev, info.created_rev) + # return rev + + #def _getlatestrevision(self): + # """ return latest repo-revision for this path. """ + # url = self.strpath + # path = self.__class__(url, None) + # + # # we need a long walk to find the root-repo and revision + # while 1: + # try: + # rev = max(rev, path._childmaxrev()) + # previous = path + # path = path.dirpath() + # except (IOError, process.cmdexec.Error): + # break + # if rev is None: + # raise IOError, "could not determine newest repo revision for %s" % self + # return rev + + class Checkers(common.Checkers): + def dir(self): + try: + return self.path.info().kind == 'dir' + except py.error.Error: + return self._listdirworks() + + def _listdirworks(self): + try: + self.path.listdir() + except py.error.ENOENT: + return False + else: + return True + + def file(self): + try: + return self.path.info().kind == 'file' + except py.error.ENOENT: + return False + + def exists(self): + try: + return self.path.info() + except py.error.ENOENT: + return self._listdirworks() + +def parse_apr_time(timestr): + i = timestr.rfind('.') + if i == -1: + raise ValueError("could not parse %s" % timestr) + timestr = timestr[:i] + parsedtime = time.strptime(timestr, "%Y-%m-%dT%H:%M:%S") + return time.mktime(parsedtime) + +class PropListDict(dict): + """ a Dictionary which fetches values (InfoSvnCommand instances) lazily""" + def __init__(self, path, keynames): + dict.__init__(self, [(x, None) for x in keynames]) + self.path = path + + def __getitem__(self, key): + value = dict.__getitem__(self, key) + if value is None: + value = self.path.propget(key) + dict.__setitem__(self, key, value) + return value + +def fixlocale(): + if sys.platform != 'win32': + return 'LC_ALL=C ' + return '' + +# some nasty chunk of code to solve path and url conversion and quoting issues +ILLEGAL_CHARS = '* | \\ / : < > ? \t \n \x0b \x0c \r'.split(' ') +if os.sep in ILLEGAL_CHARS: + ILLEGAL_CHARS.remove(os.sep) +ISWINDOWS = sys.platform == 'win32' +_reg_allow_disk = re.compile(r'^([a-z]\:\\)?[^:]+$', re.I) +def _check_path(path): + illegal = ILLEGAL_CHARS[:] + sp = path.strpath + if ISWINDOWS: + illegal.remove(':') + if not _reg_allow_disk.match(sp): + raise ValueError('path may not contain a colon (:)') + for char in sp: + if char not in string.printable or char in illegal: + raise ValueError('illegal character %r in path' % (char,)) + +def path_to_fspath(path, addat=True): + _check_path(path) + sp = path.strpath + if addat and path.rev != -1: + sp = '%s@%s' % (sp, path.rev) + elif addat: + sp = '%s@HEAD' % (sp,) + return sp + +def url_from_path(path): + fspath = path_to_fspath(path, False) + quote = py.std.urllib.quote + if ISWINDOWS: + match = _reg_allow_disk.match(fspath) + fspath = fspath.replace('\\', '/') + if match.group(1): + fspath = '/%s%s' % (match.group(1).replace('\\', '/'), + quote(fspath[len(match.group(1)):])) + else: + fspath = quote(fspath) + else: + fspath = quote(fspath) + if path.rev != -1: + fspath = '%s@%s' % (fspath, path.rev) + else: + fspath = '%s@HEAD' % (fspath,) + return 'file://%s' % (fspath,) + +class SvnAuth(object): + """ container for auth information for Subversion """ + def __init__(self, username, password, cache_auth=True, interactive=True): + self.username = username + self.password = password + self.cache_auth = cache_auth + self.interactive = interactive + + def makecmdoptions(self): + uname = self.username.replace('"', '\\"') + passwd = self.password.replace('"', '\\"') + ret = [] + if uname: + ret.append('--username="%s"' % (uname,)) + if passwd: + ret.append('--password="%s"' % (passwd,)) + if not self.cache_auth: + ret.append('--no-auth-cache') + if not self.interactive: + ret.append('--non-interactive') + return ' '.join(ret) + + def __str__(self): + return "<SvnAuth username=%s ...>" %(self.username,) + +rex_blame = re.compile(r'\s*(\d+)\s*(\S+) (.*)') + +class SvnWCCommandPath(common.PathBase): + """ path implementation offering access/modification to svn working copies. + It has methods similar to the functions in os.path and similar to the + commands of the svn client. + """ + sep = os.sep + + def __new__(cls, wcpath=None, auth=None): + self = object.__new__(cls) + if isinstance(wcpath, cls): + if wcpath.__class__ == cls: + return wcpath + wcpath = wcpath.localpath + if _check_for_bad_chars(str(wcpath), + ALLOWED_CHARS): + raise ValueError("bad char in wcpath %s" % (wcpath, )) + self.localpath = py.path.local(wcpath) + self.auth = auth + return self + + strpath = property(lambda x: str(x.localpath), None, None, "string path") + rev = property(lambda x: x.info(usecache=0).rev, None, None, "revision") + + def __eq__(self, other): + return self.localpath == getattr(other, 'localpath', None) + + def _geturl(self): + if getattr(self, '_url', None) is None: + info = self.info() + self._url = info.url #SvnPath(info.url, info.rev) + assert isinstance(self._url, py.builtin._basestring) + return self._url + + url = property(_geturl, None, None, "url of this WC item") + + def _escape(self, cmd): + return _escape_helper(cmd) + + def dump(self, obj): + """ pickle object into path location""" + return self.localpath.dump(obj) + + def svnurl(self): + """ return current SvnPath for this WC-item. """ + info = self.info() + return py.path.svnurl(info.url) + + def __repr__(self): + return "svnwc(%r)" % (self.strpath) # , self._url) + + def __str__(self): + return str(self.localpath) + + def _makeauthoptions(self): + if self.auth is None: + return '' + return self.auth.makecmdoptions() + + def _authsvn(self, cmd, args=None): + args = args and list(args) or [] + args.append(self._makeauthoptions()) + return self._svn(cmd, *args) + + def _svn(self, cmd, *args): + l = ['svn %s' % cmd] + args = [self._escape(item) for item in args] + l.extend(args) + l.append('"%s"' % self._escape(self.strpath)) + # try fixing the locale because we can't otherwise parse + string = fixlocale() + " ".join(l) + try: + try: + key = 'LC_MESSAGES' + hold = os.environ.get(key) + os.environ[key] = 'C' + out = py.process.cmdexec(string) + finally: + if hold: + os.environ[key] = hold + else: + del os.environ[key] + except py.process.cmdexec.Error: + e = sys.exc_info()[1] + strerr = e.err.lower() + if strerr.find('not found') != -1: + raise py.error.ENOENT(self) + elif strerr.find("E200009:") != -1: + raise py.error.ENOENT(self) + if (strerr.find('file exists') != -1 or + strerr.find('file already exists') != -1 or + strerr.find('w150002:') != -1 or + strerr.find("can't create directory") != -1): + raise py.error.EEXIST(strerr) #self) + raise + return out + + def switch(self, url): + """ switch to given URL. """ + self._authsvn('switch', [url]) + + def checkout(self, url=None, rev=None): + """ checkout from url to local wcpath. """ + args = [] + if url is None: + url = self.url + if rev is None or rev == -1: + if (py.std.sys.platform != 'win32' and + _getsvnversion() == '1.3'): + url += "@HEAD" + else: + if _getsvnversion() == '1.3': + url += "@%d" % rev + else: + args.append('-r' + str(rev)) + args.append(url) + self._authsvn('co', args) + + def update(self, rev='HEAD', interactive=True): + """ update working copy item to given revision. (None -> HEAD). """ + opts = ['-r', rev] + if not interactive: + opts.append("--non-interactive") + self._authsvn('up', opts) + + def write(self, content, mode='w'): + """ write content into local filesystem wc. """ + self.localpath.write(content, mode) + + def dirpath(self, *args): + """ return the directory Path of the current Path. """ + return self.__class__(self.localpath.dirpath(*args), auth=self.auth) + + def _ensuredirs(self): + parent = self.dirpath() + if parent.check(dir=0): + parent._ensuredirs() + if self.check(dir=0): + self.mkdir() + return self + + def ensure(self, *args, **kwargs): + """ ensure that an args-joined path exists (by default as + a file). if you specify a keyword argument 'directory=True' + then the path is forced to be a directory path. + """ + p = self.join(*args) + if p.check(): + if p.check(versioned=False): + p.add() + return p + if kwargs.get('dir', 0): + return p._ensuredirs() + parent = p.dirpath() + parent._ensuredirs() + p.write("") + p.add() + return p + + def mkdir(self, *args): + """ create & return the directory joined with args. """ + if args: + return self.join(*args).mkdir() + else: + self._svn('mkdir') + return self + + def add(self): + """ add ourself to svn """ + self._svn('add') + + def remove(self, rec=1, force=1): + """ remove a file or a directory tree. 'rec'ursive is + ignored and considered always true (because of + underlying svn semantics. + """ + assert rec, "svn cannot remove non-recursively" + if not self.check(versioned=True): + # not added to svn (anymore?), just remove + py.path.local(self).remove() + return + flags = [] + if force: + flags.append('--force') + self._svn('remove', *flags) + + def copy(self, target): + """ copy path to target.""" + py.process.cmdexec("svn copy %s %s" %(str(self), str(target))) + + def rename(self, target): + """ rename this path to target. """ + py.process.cmdexec("svn move --force %s %s" %(str(self), str(target))) + + def lock(self): + """ set a lock (exclusive) on the resource """ + out = self._authsvn('lock').strip() + if not out: + # warning or error, raise exception + raise ValueError("unknown error in svn lock command") + + def unlock(self): + """ unset a previously set lock """ + out = self._authsvn('unlock').strip() + if out.startswith('svn:'): + # warning or error, raise exception + raise Exception(out[4:]) + + def cleanup(self): + """ remove any locks from the resource """ + # XXX should be fixed properly!!! + try: + self.unlock() + except: + pass + + def status(self, updates=0, rec=0, externals=0): + """ return (collective) Status object for this file. """ + # http://svnbook.red-bean.com/book.html#svn-ch-3-sect-4.3.1 + # 2201 2192 jum test + # XXX + if externals: + raise ValueError("XXX cannot perform status() " + "on external items yet") + else: + #1.2 supports: externals = '--ignore-externals' + externals = '' + if rec: + rec= '' + else: + rec = '--non-recursive' + + # XXX does not work on all subversion versions + #if not externals: + # externals = '--ignore-externals' + + if updates: + updates = '-u' + else: + updates = '' + + try: + cmd = 'status -v --xml --no-ignore %s %s %s' % ( + updates, rec, externals) + out = self._authsvn(cmd) + except py.process.cmdexec.Error: + cmd = 'status -v --no-ignore %s %s %s' % ( + updates, rec, externals) + out = self._authsvn(cmd) + rootstatus = WCStatus(self).fromstring(out, self) + else: + rootstatus = XMLWCStatus(self).fromstring(out, self) + return rootstatus + + def diff(self, rev=None): + """ return a diff of the current path against revision rev (defaulting + to the last one). + """ + args = [] + if rev is not None: + args.append("-r %d" % rev) + out = self._authsvn('diff', args) + return out + + def blame(self): + """ return a list of tuples of three elements: + (revision, commiter, line) + """ + out = self._svn('blame') + result = [] + blamelines = out.splitlines() + reallines = py.path.svnurl(self.url).readlines() + for i, (blameline, line) in enumerate( + zip(blamelines, reallines)): + m = rex_blame.match(blameline) + if not m: + raise ValueError("output line %r of svn blame does not match " + "expected format" % (line, )) + rev, name, _ = m.groups() + result.append((int(rev), name, line)) + return result + + _rex_commit = re.compile(r'.*Committed revision (\d+)\.$', re.DOTALL) + def commit(self, msg='', rec=1): + """ commit with support for non-recursive commits """ + # XXX i guess escaping should be done better here?!? + cmd = 'commit -m "%s" --force-log' % (msg.replace('"', '\\"'),) + if not rec: + cmd += ' -N' + out = self._authsvn(cmd) + try: + del cache.info[self] + except KeyError: + pass + if out: + m = self._rex_commit.match(out) + return int(m.group(1)) + + def propset(self, name, value, *args): + """ set property name to value on this path. """ + d = py.path.local.mkdtemp() + try: + p = d.join('value') + p.write(value) + self._svn('propset', name, '--file', str(p), *args) + finally: + d.remove() + + def propget(self, name): + """ get property name on this path. """ + res = self._svn('propget', name) + return res[:-1] # strip trailing newline + + def propdel(self, name): + """ delete property name on this path. """ + res = self._svn('propdel', name) + return res[:-1] # strip trailing newline + + def proplist(self, rec=0): + """ return a mapping of property names to property values. +If rec is True, then return a dictionary mapping sub-paths to such mappings. +""" + if rec: + res = self._svn('proplist -R') + return make_recursive_propdict(self, res) + else: + res = self._svn('proplist') + lines = res.split('\n') + lines = [x.strip() for x in lines[1:]] + return PropListDict(self, lines) + + def revert(self, rec=0): + """ revert the local changes of this path. if rec is True, do so +recursively. """ + if rec: + result = self._svn('revert -R') + else: + result = self._svn('revert') + return result + + def new(self, **kw): + """ create a modified version of this path. A 'rev' argument + indicates a new revision. + the following keyword arguments modify various path parts: + + http://host.com/repo/path/file.ext + |-----------------------| dirname + |------| basename + |--| purebasename + |--| ext + """ + if kw: + localpath = self.localpath.new(**kw) + else: + localpath = self.localpath + return self.__class__(localpath, auth=self.auth) + + def join(self, *args, **kwargs): + """ return a new Path (with the same revision) which is composed + of the self Path followed by 'args' path components. + """ + if not args: + return self + localpath = self.localpath.join(*args, **kwargs) + return self.__class__(localpath, auth=self.auth) + + def info(self, usecache=1): + """ return an Info structure with svn-provided information. """ + info = usecache and cache.info.get(self) + if not info: + try: + output = self._svn('info') + except py.process.cmdexec.Error: + e = sys.exc_info()[1] + if e.err.find('Path is not a working copy directory') != -1: + raise py.error.ENOENT(self, e.err) + elif e.err.find("is not under version control") != -1: + raise py.error.ENOENT(self, e.err) + raise + # XXX SVN 1.3 has output on stderr instead of stdout (while it does + # return 0!), so a bit nasty, but we assume no output is output + # to stderr... + if (output.strip() == '' or + output.lower().find('not a versioned resource') != -1): + raise py.error.ENOENT(self, output) + info = InfoSvnWCCommand(output) + + # Can't reliably compare on Windows without access to win32api + if py.std.sys.platform != 'win32': + if info.path != self.localpath: + raise py.error.ENOENT(self, "not a versioned resource:" + + " %s != %s" % (info.path, self.localpath)) + cache.info[self] = info + return info + + def listdir(self, fil=None, sort=None): + """ return a sequence of Paths. + + listdir will return either a tuple or a list of paths + depending on implementation choices. + """ + if isinstance(fil, str): + fil = common.FNMatcher(fil) + # XXX unify argument naming with LocalPath.listdir + def notsvn(path): + return path.basename != '.svn' + + paths = [] + for localpath in self.localpath.listdir(notsvn): + p = self.__class__(localpath, auth=self.auth) + if notsvn(p) and (not fil or fil(p)): + paths.append(p) + self._sortlist(paths, sort) + return paths + + def open(self, mode='r'): + """ return an opened file with the given mode. """ + return open(self.strpath, mode) + + def _getbyspec(self, spec): + return self.localpath._getbyspec(spec) + + class Checkers(py.path.local.Checkers): + def __init__(self, path): + self.svnwcpath = path + self.path = path.localpath + def versioned(self): + try: + s = self.svnwcpath.info() + except (py.error.ENOENT, py.error.EEXIST): + return False + except py.process.cmdexec.Error: + e = sys.exc_info()[1] + if e.err.find('is not a working copy')!=-1: + return False + if e.err.lower().find('not a versioned resource') != -1: + return False + raise + else: + return True + + def log(self, rev_start=None, rev_end=1, verbose=False): + """ return a list of LogEntry instances for this path. +rev_start is the starting revision (defaulting to the first one). +rev_end is the last revision (defaulting to HEAD). +if verbose is True, then the LogEntry instances also know which files changed. +""" + assert self.check() # make it simpler for the pipe + rev_start = rev_start is None and "HEAD" or rev_start + rev_end = rev_end is None and "HEAD" or rev_end + if rev_start == "HEAD" and rev_end == 1: + rev_opt = "" + else: + rev_opt = "-r %s:%s" % (rev_start, rev_end) + verbose_opt = verbose and "-v" or "" + locale_env = fixlocale() + # some blather on stderr + auth_opt = self._makeauthoptions() + #stdin, stdout, stderr = os.popen3(locale_env + + # 'svn log --xml %s %s %s "%s"' % ( + # rev_opt, verbose_opt, auth_opt, + # self.strpath)) + cmd = locale_env + 'svn log --xml %s %s %s "%s"' % ( + rev_opt, verbose_opt, auth_opt, self.strpath) + + popen = subprocess.Popen(cmd, + stdout=subprocess.PIPE, + stderr=subprocess.PIPE, + shell=True, + ) + stdout, stderr = popen.communicate() + stdout = py.builtin._totext(stdout, sys.getdefaultencoding()) + minidom,ExpatError = importxml() + try: + tree = minidom.parseString(stdout) + except ExpatError: + raise ValueError('no such revision') + result = [] + for logentry in filter(None, tree.firstChild.childNodes): + if logentry.nodeType == logentry.ELEMENT_NODE: + result.append(LogEntry(logentry)) + return result + + def size(self): + """ Return the size of the file content of the Path. """ + return self.info().size + + def mtime(self): + """ Return the last modification time of the file. """ + return self.info().mtime + + def __hash__(self): + return hash((self.strpath, self.__class__, self.auth)) + + +class WCStatus: + attrnames = ('modified','added', 'conflict', 'unchanged', 'external', + 'deleted', 'prop_modified', 'unknown', 'update_available', + 'incomplete', 'kindmismatch', 'ignored', 'locked', 'replaced' + ) + + def __init__(self, wcpath, rev=None, modrev=None, author=None): + self.wcpath = wcpath + self.rev = rev + self.modrev = modrev + self.author = author + + for name in self.attrnames: + setattr(self, name, []) + + def allpath(self, sort=True, **kw): + d = {} + for name in self.attrnames: + if name not in kw or kw[name]: + for path in getattr(self, name): + d[path] = 1 + l = d.keys() + if sort: + l.sort() + return l + + # XXX a bit scary to assume there's always 2 spaces between username and + # path, however with win32 allowing spaces in user names there doesn't + # seem to be a more solid approach :( + _rex_status = re.compile(r'\s+(\d+|-)\s+(\S+)\s+(.+?)\s{2,}(.*)') + + def fromstring(data, rootwcpath, rev=None, modrev=None, author=None): + """ return a new WCStatus object from data 's' + """ + rootstatus = WCStatus(rootwcpath, rev, modrev, author) + update_rev = None + for line in data.split('\n'): + if not line.strip(): + continue + #print "processing %r" % line + flags, rest = line[:8], line[8:] + # first column + c0,c1,c2,c3,c4,c5,x6,c7 = flags + #if '*' in line: + # print "flags", repr(flags), "rest", repr(rest) + + if c0 in '?XI': + fn = line.split(None, 1)[1] + if c0 == '?': + wcpath = rootwcpath.join(fn, abs=1) + rootstatus.unknown.append(wcpath) + elif c0 == 'X': + wcpath = rootwcpath.__class__( + rootwcpath.localpath.join(fn, abs=1), + auth=rootwcpath.auth) + rootstatus.external.append(wcpath) + elif c0 == 'I': + wcpath = rootwcpath.join(fn, abs=1) + rootstatus.ignored.append(wcpath) + + continue + + #elif c0 in '~!' or c4 == 'S': + # raise NotImplementedError("received flag %r" % c0) + + m = WCStatus._rex_status.match(rest) + if not m: + if c7 == '*': + fn = rest.strip() + wcpath = rootwcpath.join(fn, abs=1) + rootstatus.update_available.append(wcpath) + continue + if line.lower().find('against revision:')!=-1: + update_rev = int(rest.split(':')[1].strip()) + continue + if line.lower().find('status on external') > -1: + # XXX not sure what to do here... perhaps we want to + # store some state instead of just continuing, as right + # now it makes the top-level external get added twice + # (once as external, once as 'normal' unchanged item) + # because of the way SVN presents external items + continue + # keep trying + raise ValueError("could not parse line %r" % line) + else: + rev, modrev, author, fn = m.groups() + wcpath = rootwcpath.join(fn, abs=1) + #assert wcpath.check() + if c0 == 'M': + assert wcpath.check(file=1), "didn't expect a directory with changed content here" + rootstatus.modified.append(wcpath) + elif c0 == 'A' or c3 == '+' : + rootstatus.added.append(wcpath) + elif c0 == 'D': + rootstatus.deleted.append(wcpath) + elif c0 == 'C': + rootstatus.conflict.append(wcpath) + elif c0 == '~': + rootstatus.kindmismatch.append(wcpath) + elif c0 == '!': + rootstatus.incomplete.append(wcpath) + elif c0 == 'R': + rootstatus.replaced.append(wcpath) + elif not c0.strip(): + rootstatus.unchanged.append(wcpath) + else: + raise NotImplementedError("received flag %r" % c0) + + if c1 == 'M': + rootstatus.prop_modified.append(wcpath) + # XXX do we cover all client versions here? + if c2 == 'L' or c5 == 'K': + rootstatus.locked.append(wcpath) + if c7 == '*': + rootstatus.update_available.append(wcpath) + + if wcpath == rootwcpath: + rootstatus.rev = rev + rootstatus.modrev = modrev + rootstatus.author = author + if update_rev: + rootstatus.update_rev = update_rev + continue + return rootstatus + fromstring = staticmethod(fromstring) + +class XMLWCStatus(WCStatus): + def fromstring(data, rootwcpath, rev=None, modrev=None, author=None): + """ parse 'data' (XML string as outputted by svn st) into a status obj + """ + # XXX for externals, the path is shown twice: once + # with external information, and once with full info as if + # the item was a normal non-external... the current way of + # dealing with this issue is by ignoring it - this does make + # externals appear as external items as well as 'normal', + # unchanged ones in the status object so this is far from ideal + rootstatus = WCStatus(rootwcpath, rev, modrev, author) + update_rev = None + minidom, ExpatError = importxml() + try: + doc = minidom.parseString(data) + except ExpatError: + e = sys.exc_info()[1] + raise ValueError(str(e)) + urevels = doc.getElementsByTagName('against') + if urevels: + rootstatus.update_rev = urevels[-1].getAttribute('revision') + for entryel in doc.getElementsByTagName('entry'): + path = entryel.getAttribute('path') + statusel = entryel.getElementsByTagName('wc-status')[0] + itemstatus = statusel.getAttribute('item') + + if itemstatus == 'unversioned': + wcpath = rootwcpath.join(path, abs=1) + rootstatus.unknown.append(wcpath) + continue + elif itemstatus == 'external': + wcpath = rootwcpath.__class__( + rootwcpath.localpath.join(path, abs=1), + auth=rootwcpath.auth) + rootstatus.external.append(wcpath) + continue + elif itemstatus == 'ignored': + wcpath = rootwcpath.join(path, abs=1) + rootstatus.ignored.append(wcpath) + continue + elif itemstatus == 'incomplete': + wcpath = rootwcpath.join(path, abs=1) + rootstatus.incomplete.append(wcpath) + continue + + rev = statusel.getAttribute('revision') + if itemstatus == 'added' or itemstatus == 'none': + rev = '0' + modrev = '?' + author = '?' + date = '' + elif itemstatus == "replaced": + pass + else: + #print entryel.toxml() + commitel = entryel.getElementsByTagName('commit')[0] + if commitel: + modrev = commitel.getAttribute('revision') + author = '' + author_els = commitel.getElementsByTagName('author') + if author_els: + for c in author_els[0].childNodes: + author += c.nodeValue + date = '' + for c in commitel.getElementsByTagName('date')[0]\ + .childNodes: + date += c.nodeValue + + wcpath = rootwcpath.join(path, abs=1) + + assert itemstatus != 'modified' or wcpath.check(file=1), ( + 'did\'t expect a directory with changed content here') + + itemattrname = { + 'normal': 'unchanged', + 'unversioned': 'unknown', + 'conflicted': 'conflict', + 'none': 'added', + }.get(itemstatus, itemstatus) + + attr = getattr(rootstatus, itemattrname) + attr.append(wcpath) + + propsstatus = statusel.getAttribute('props') + if propsstatus not in ('none', 'normal'): + rootstatus.prop_modified.append(wcpath) + + if wcpath == rootwcpath: + rootstatus.rev = rev + rootstatus.modrev = modrev + rootstatus.author = author + rootstatus.date = date + + # handle repos-status element (remote info) + rstatusels = entryel.getElementsByTagName('repos-status') + if rstatusels: + rstatusel = rstatusels[0] + ritemstatus = rstatusel.getAttribute('item') + if ritemstatus in ('added', 'modified'): + rootstatus.update_available.append(wcpath) + + lockels = entryel.getElementsByTagName('lock') + if len(lockels): + rootstatus.locked.append(wcpath) + + return rootstatus + fromstring = staticmethod(fromstring) + +class InfoSvnWCCommand: + def __init__(self, output): + # Path: test + # URL: http://codespeak.net/svn/std.path/trunk/dist/std.path/test + # Repository UUID: fd0d7bf2-dfb6-0310-8d31-b7ecfe96aada + # Revision: 2151 + # Node Kind: directory + # Schedule: normal + # Last Changed Author: hpk + # Last Changed Rev: 2100 + # Last Changed Date: 2003-10-27 20:43:14 +0100 (Mon, 27 Oct 2003) + # Properties Last Updated: 2003-11-03 14:47:48 +0100 (Mon, 03 Nov 2003) + + d = {} + for line in output.split('\n'): + if not line.strip(): + continue + key, value = line.split(':', 1) + key = key.lower().replace(' ', '') + value = value.strip() + d[key] = value + try: + self.url = d['url'] + except KeyError: + raise ValueError("Not a versioned resource") + #raise ValueError, "Not a versioned resource %r" % path + self.kind = d['nodekind'] == 'directory' and 'dir' or d['nodekind'] + try: + self.rev = int(d['revision']) + except KeyError: + self.rev = None + + self.path = py.path.local(d['path']) + self.size = self.path.size() + if 'lastchangedrev' in d: + self.created_rev = int(d['lastchangedrev']) + if 'lastchangedauthor' in d: + self.last_author = d['lastchangedauthor'] + if 'lastchangeddate' in d: + self.mtime = parse_wcinfotime(d['lastchangeddate']) + self.time = self.mtime * 1000000 + + def __eq__(self, other): + return self.__dict__ == other.__dict__ + +def parse_wcinfotime(timestr): + """ Returns seconds since epoch, UTC. """ + # example: 2003-10-27 20:43:14 +0100 (Mon, 27 Oct 2003) + m = re.match(r'(\d+-\d+-\d+ \d+:\d+:\d+) ([+-]\d+) .*', timestr) + if not m: + raise ValueError("timestring %r does not match" % timestr) + timestr, timezone = m.groups() + # do not handle timezone specially, return value should be UTC + parsedtime = time.strptime(timestr, "%Y-%m-%d %H:%M:%S") + return calendar.timegm(parsedtime) + +def make_recursive_propdict(wcroot, + output, + rex = re.compile("Properties on '(.*)':")): + """ Return a dictionary of path->PropListDict mappings. """ + lines = [x for x in output.split('\n') if x] + pdict = {} + while lines: + line = lines.pop(0) + m = rex.match(line) + if not m: + raise ValueError("could not parse propget-line: %r" % line) + path = m.groups()[0] + wcpath = wcroot.join(path, abs=1) + propnames = [] + while lines and lines[0].startswith(' '): + propname = lines.pop(0).strip() + propnames.append(propname) + assert propnames, "must have found properties!" + pdict[wcpath] = PropListDict(wcpath, propnames) + return pdict + + +def importxml(cache=[]): + if cache: + return cache + from xml.dom import minidom + from xml.parsers.expat import ExpatError + cache.extend([minidom, ExpatError]) + return cache + +class LogEntry: + def __init__(self, logentry): + self.rev = int(logentry.getAttribute('revision')) + for lpart in filter(None, logentry.childNodes): + if lpart.nodeType == lpart.ELEMENT_NODE: + if lpart.nodeName == 'author': + self.author = lpart.firstChild.nodeValue + elif lpart.nodeName == 'msg': + if lpart.firstChild: + self.msg = lpart.firstChild.nodeValue + else: + self.msg = '' + elif lpart.nodeName == 'date': + #2003-07-29T20:05:11.598637Z + timestr = lpart.firstChild.nodeValue + self.date = parse_apr_time(timestr) + elif lpart.nodeName == 'paths': + self.strpaths = [] + for ppart in filter(None, lpart.childNodes): + if ppart.nodeType == ppart.ELEMENT_NODE: + self.strpaths.append(PathEntry(ppart)) + def __repr__(self): + return '<Logentry rev=%d author=%s date=%s>' % ( + self.rev, self.author, self.date) + + diff --git a/venv/lib/python3.5/site-packages/py/_process/__init__.py b/venv/lib/python3.5/site-packages/py/_process/__init__.py new file mode 100644 index 0000000..3186a2d --- /dev/null +++ b/venv/lib/python3.5/site-packages/py/_process/__init__.py @@ -0,0 +1 @@ +""" high-level sub-process handling """ diff --git a/venv/lib/python3.5/site-packages/py/_process/cmdexec.py b/venv/lib/python3.5/site-packages/py/_process/cmdexec.py new file mode 100644 index 0000000..22a1a82 --- /dev/null +++ b/venv/lib/python3.5/site-packages/py/_process/cmdexec.py @@ -0,0 +1,49 @@ +import sys +import subprocess +import py +from subprocess import Popen, PIPE + +def cmdexec(cmd): + """ return unicode output of executing 'cmd' in a separate process. + + raise cmdexec.Error exeception if the command failed. + the exception will provide an 'err' attribute containing + the error-output from the command. + if the subprocess module does not provide a proper encoding/unicode strings + sys.getdefaultencoding() will be used, if that does not exist, 'UTF-8'. + """ + process = subprocess.Popen(cmd, shell=True, + universal_newlines=True, + stdout=subprocess.PIPE, stderr=subprocess.PIPE) + out, err = process.communicate() + if sys.version_info[0] < 3: # on py3 we get unicode strings, on py2 not + try: + default_encoding = sys.getdefaultencoding() # jython may not have it + except AttributeError: + default_encoding = sys.stdout.encoding or 'UTF-8' + out = unicode(out, process.stdout.encoding or default_encoding) + err = unicode(err, process.stderr.encoding or default_encoding) + status = process.poll() + if status: + raise ExecutionFailed(status, status, cmd, out, err) + return out + +class ExecutionFailed(py.error.Error): + def __init__(self, status, systemstatus, cmd, out, err): + Exception.__init__(self) + self.status = status + self.systemstatus = systemstatus + self.cmd = cmd + self.err = err + self.out = out + + def __str__(self): + return "ExecutionFailed: %d %s\n%s" %(self.status, self.cmd, self.err) + +# export the exception under the name 'py.process.cmdexec.Error' +cmdexec.Error = ExecutionFailed +try: + ExecutionFailed.__module__ = 'py.process.cmdexec' + ExecutionFailed.__name__ = 'Error' +except (AttributeError, TypeError): + pass diff --git a/venv/lib/python3.5/site-packages/py/_process/forkedfunc.py b/venv/lib/python3.5/site-packages/py/_process/forkedfunc.py new file mode 100644 index 0000000..d23e653 --- /dev/null +++ b/venv/lib/python3.5/site-packages/py/_process/forkedfunc.py @@ -0,0 +1,120 @@ + +""" + ForkedFunc provides a way to run a function in a forked process + and get at its return value, stdout and stderr output as well + as signals and exitstatusus. +""" + +import py +import os +import sys +import marshal + + +def get_unbuffered_io(fd, filename): + f = open(str(filename), "w") + if fd != f.fileno(): + os.dup2(f.fileno(), fd) + class AutoFlush: + def write(self, data): + f.write(data) + f.flush() + def __getattr__(self, name): + return getattr(f, name) + return AutoFlush() + + +class ForkedFunc: + EXITSTATUS_EXCEPTION = 3 + + + def __init__(self, fun, args=None, kwargs=None, nice_level=0, + child_on_start=None, child_on_exit=None): + if args is None: + args = [] + if kwargs is None: + kwargs = {} + self.fun = fun + self.args = args + self.kwargs = kwargs + self.tempdir = tempdir = py.path.local.mkdtemp() + self.RETVAL = tempdir.ensure('retval') + self.STDOUT = tempdir.ensure('stdout') + self.STDERR = tempdir.ensure('stderr') + + pid = os.fork() + if pid: # in parent process + self.pid = pid + else: # in child process + self.pid = None + self._child(nice_level, child_on_start, child_on_exit) + + def _child(self, nice_level, child_on_start, child_on_exit): + # right now we need to call a function, but first we need to + # map all IO that might happen + sys.stdout = stdout = get_unbuffered_io(1, self.STDOUT) + sys.stderr = stderr = get_unbuffered_io(2, self.STDERR) + retvalf = self.RETVAL.open("wb") + EXITSTATUS = 0 + try: + if nice_level: + os.nice(nice_level) + try: + if child_on_start is not None: + child_on_start() + retval = self.fun(*self.args, **self.kwargs) + retvalf.write(marshal.dumps(retval)) + if child_on_exit is not None: + child_on_exit() + except: + excinfo = py.code.ExceptionInfo() + stderr.write(str(excinfo._getreprcrash())) + EXITSTATUS = self.EXITSTATUS_EXCEPTION + finally: + stdout.close() + stderr.close() + retvalf.close() + os.close(1) + os.close(2) + os._exit(EXITSTATUS) + + def waitfinish(self, waiter=os.waitpid): + pid, systemstatus = waiter(self.pid, 0) + if systemstatus: + if os.WIFSIGNALED(systemstatus): + exitstatus = os.WTERMSIG(systemstatus) + 128 + else: + exitstatus = os.WEXITSTATUS(systemstatus) + else: + exitstatus = 0 + signal = systemstatus & 0x7f + if not exitstatus and not signal: + retval = self.RETVAL.open('rb') + try: + retval_data = retval.read() + finally: + retval.close() + retval = marshal.loads(retval_data) + else: + retval = None + stdout = self.STDOUT.read() + stderr = self.STDERR.read() + self._removetemp() + return Result(exitstatus, signal, retval, stdout, stderr) + + def _removetemp(self): + if self.tempdir.check(): + self.tempdir.remove() + + def __del__(self): + if self.pid is not None: # only clean up in main process + self._removetemp() + + +class Result(object): + def __init__(self, exitstatus, signal, retval, stdout, stderr): + self.exitstatus = exitstatus + self.signal = signal + self.retval = retval + self.out = stdout + self.err = stderr diff --git a/venv/lib/python3.5/site-packages/py/_process/killproc.py b/venv/lib/python3.5/site-packages/py/_process/killproc.py new file mode 100644 index 0000000..5b6032c --- /dev/null +++ b/venv/lib/python3.5/site-packages/py/_process/killproc.py @@ -0,0 +1,23 @@ +import py +import os, sys + +if sys.platform == "win32" or getattr(os, '_name', '') == 'nt': + try: + import ctypes + except ImportError: + def dokill(pid): + py.process.cmdexec("taskkill /F /PID %d" %(pid,)) + else: + def dokill(pid): + PROCESS_TERMINATE = 1 + handle = ctypes.windll.kernel32.OpenProcess( + PROCESS_TERMINATE, False, pid) + ctypes.windll.kernel32.TerminateProcess(handle, -1) + ctypes.windll.kernel32.CloseHandle(handle) +else: + def dokill(pid): + os.kill(pid, 15) + +def kill(pid): + """ kill process by id. """ + dokill(pid) diff --git a/venv/lib/python3.5/site-packages/py/_std.py b/venv/lib/python3.5/site-packages/py/_std.py new file mode 100644 index 0000000..e016bc8 --- /dev/null +++ b/venv/lib/python3.5/site-packages/py/_std.py @@ -0,0 +1,18 @@ +import sys + +class Std(object): + """ makes top-level python modules available as an attribute, + importing them on first access. + """ + + def __init__(self): + self.__dict__ = sys.modules + + def __getattr__(self, name): + try: + m = __import__(name) + except ImportError: + raise AttributeError("py.std: could not import %s" % name) + return m + +std = Std() diff --git a/venv/lib/python3.5/site-packages/py/_xmlgen.py b/venv/lib/python3.5/site-packages/py/_xmlgen.py new file mode 100644 index 0000000..f4d4116 --- /dev/null +++ b/venv/lib/python3.5/site-packages/py/_xmlgen.py @@ -0,0 +1,255 @@ +""" +module for generating and serializing xml and html structures +by using simple python objects. + +(c) holger krekel, holger at merlinux eu. 2009 +""" +import sys, re + +if sys.version_info >= (3,0): + def u(s): + return s + def unicode(x, errors=None): + if hasattr(x, '__unicode__'): + return x.__unicode__() + return str(x) +else: + def u(s): + return unicode(s) + unicode = unicode + + +class NamespaceMetaclass(type): + def __getattr__(self, name): + if name[:1] == '_': + raise AttributeError(name) + if self == Namespace: + raise ValueError("Namespace class is abstract") + tagspec = self.__tagspec__ + if tagspec is not None and name not in tagspec: + raise AttributeError(name) + classattr = {} + if self.__stickyname__: + classattr['xmlname'] = name + cls = type(name, (self.__tagclass__,), classattr) + setattr(self, name, cls) + return cls + +class Tag(list): + class Attr(object): + def __init__(self, **kwargs): + self.__dict__.update(kwargs) + + def __init__(self, *args, **kwargs): + super(Tag, self).__init__(args) + self.attr = self.Attr(**kwargs) + + def __unicode__(self): + return self.unicode(indent=0) + __str__ = __unicode__ + + def unicode(self, indent=2): + l = [] + SimpleUnicodeVisitor(l.append, indent).visit(self) + return u("").join(l) + + def __repr__(self): + name = self.__class__.__name__ + return "<%r tag object %d>" % (name, id(self)) + +Namespace = NamespaceMetaclass('Namespace', (object, ), { + '__tagspec__': None, + '__tagclass__': Tag, + '__stickyname__': False, +}) + +class HtmlTag(Tag): + def unicode(self, indent=2): + l = [] + HtmlVisitor(l.append, indent, shortempty=False).visit(self) + return u("").join(l) + +# exported plain html namespace +class html(Namespace): + __tagclass__ = HtmlTag + __stickyname__ = True + __tagspec__ = dict([(x,1) for x in ( + 'a,abbr,acronym,address,applet,area,article,aside,audio,b,' + 'base,basefont,bdi,bdo,big,blink,blockquote,body,br,button,' + 'canvas,caption,center,cite,code,col,colgroup,command,comment,' + 'datalist,dd,del,details,dfn,dir,div,dl,dt,em,embed,' + 'fieldset,figcaption,figure,footer,font,form,frame,frameset,h1,' + 'h2,h3,h4,h5,h6,head,header,hgroup,hr,html,i,iframe,img,input,' + 'ins,isindex,kbd,keygen,label,legend,li,link,listing,map,mark,' + 'marquee,menu,meta,meter,multicol,nav,nobr,noembed,noframes,' + 'noscript,object,ol,optgroup,option,output,p,param,pre,progress,' + 'q,rp,rt,ruby,s,samp,script,section,select,small,source,span,' + 'strike,strong,style,sub,summary,sup,table,tbody,td,textarea,' + 'tfoot,th,thead,time,title,tr,track,tt,u,ul,xmp,var,video,wbr' + ).split(',') if x]) + + class Style(object): + def __init__(self, **kw): + for x, y in kw.items(): + x = x.replace('_', '-') + setattr(self, x, y) + + +class raw(object): + """just a box that can contain a unicode string that will be + included directly in the output""" + def __init__(self, uniobj): + self.uniobj = uniobj + +class SimpleUnicodeVisitor(object): + """ recursive visitor to write unicode. """ + def __init__(self, write, indent=0, curindent=0, shortempty=True): + self.write = write + self.cache = {} + self.visited = {} # for detection of recursion + self.indent = indent + self.curindent = curindent + self.parents = [] + self.shortempty = shortempty # short empty tags or not + + def visit(self, node): + """ dispatcher on node's class/bases name. """ + cls = node.__class__ + try: + visitmethod = self.cache[cls] + except KeyError: + for subclass in cls.__mro__: + visitmethod = getattr(self, subclass.__name__, None) + if visitmethod is not None: + break + else: + visitmethod = self.__object + self.cache[cls] = visitmethod + visitmethod(node) + + # the default fallback handler is marked private + # to avoid clashes with the tag name object + def __object(self, obj): + #self.write(obj) + self.write(escape(unicode(obj))) + + def raw(self, obj): + self.write(obj.uniobj) + + def list(self, obj): + assert id(obj) not in self.visited + self.visited[id(obj)] = 1 + for elem in obj: + self.visit(elem) + + def Tag(self, tag): + assert id(tag) not in self.visited + try: + tag.parent = self.parents[-1] + except IndexError: + tag.parent = None + self.visited[id(tag)] = 1 + tagname = getattr(tag, 'xmlname', tag.__class__.__name__) + if self.curindent and not self._isinline(tagname): + self.write("\n" + u(' ') * self.curindent) + if tag: + self.curindent += self.indent + self.write(u('<%s%s>') % (tagname, self.attributes(tag))) + self.parents.append(tag) + for x in tag: + self.visit(x) + self.parents.pop() + self.write(u('</%s>') % tagname) + self.curindent -= self.indent + else: + nameattr = tagname+self.attributes(tag) + if self._issingleton(tagname): + self.write(u('<%s/>') % (nameattr,)) + else: + self.write(u('<%s></%s>') % (nameattr, tagname)) + + def attributes(self, tag): + # serialize attributes + attrlist = dir(tag.attr) + attrlist.sort() + l = [] + for name in attrlist: + res = self.repr_attribute(tag.attr, name) + if res is not None: + l.append(res) + l.extend(self.getstyle(tag)) + return u("").join(l) + + def repr_attribute(self, attrs, name): + if name[:2] != '__': + value = getattr(attrs, name) + if name.endswith('_'): + name = name[:-1] + if isinstance(value, raw): + insert = value.uniobj + else: + insert = escape(unicode(value)) + return ' %s="%s"' % (name, insert) + + def getstyle(self, tag): + """ return attribute list suitable for styling. """ + try: + styledict = tag.style.__dict__ + except AttributeError: + return [] + else: + stylelist = [x+': ' + y for x,y in styledict.items()] + return [u(' style="%s"') % u('; ').join(stylelist)] + + def _issingleton(self, tagname): + """can (and will) be overridden in subclasses""" + return self.shortempty + + def _isinline(self, tagname): + """can (and will) be overridden in subclasses""" + return False + +class HtmlVisitor(SimpleUnicodeVisitor): + + single = dict([(x, 1) for x in + ('br,img,area,param,col,hr,meta,link,base,' + 'input,frame').split(',')]) + inline = dict([(x, 1) for x in + ('a abbr acronym b basefont bdo big br cite code dfn em font ' + 'i img input kbd label q s samp select small span strike ' + 'strong sub sup textarea tt u var'.split(' '))]) + + def repr_attribute(self, attrs, name): + if name == 'class_': + value = getattr(attrs, name) + if value is None: + return + return super(HtmlVisitor, self).repr_attribute(attrs, name) + + def _issingleton(self, tagname): + return tagname in self.single + + def _isinline(self, tagname): + return tagname in self.inline + + +class _escape: + def __init__(self): + self.escape = { + u('"') : u('"'), u('<') : u('<'), u('>') : u('>'), + u('&') : u('&'), u("'") : u('''), + } + self.charef_rex = re.compile(u("|").join(self.escape.keys())) + + def _replacer(self, match): + return self.escape[match.group(0)] + + def __call__(self, ustring): + """ xml-escape the given unicode string. """ + try: + ustring = unicode(ustring) + except UnicodeDecodeError: + ustring = unicode(ustring, 'utf-8', errors='replace') + return self.charef_rex.sub(self._replacer, ustring) + +escape = _escape() diff --git a/venv/lib/python3.5/site-packages/py/test.py b/venv/lib/python3.5/site-packages/py/test.py new file mode 100644 index 0000000..84e8881 --- /dev/null +++ b/venv/lib/python3.5/site-packages/py/test.py @@ -0,0 +1,10 @@ +import sys +if __name__ == '__main__': + import pytest + sys.exit(pytest.main()) +else: + import sys, pytest + sys.modules['py.test'] = pytest + +# for more API entry points see the 'tests' definition +# in __init__.py diff --git a/venv/lib/python3.5/site-packages/pytest-3.1.3.dist-info/DESCRIPTION.rst b/venv/lib/python3.5/site-packages/pytest-3.1.3.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..387164f --- /dev/null +++ b/venv/lib/python3.5/site-packages/pytest-3.1.3.dist-info/DESCRIPTION.rst @@ -0,0 +1,111 @@ +.. image:: http://docs.pytest.org/en/latest/_static/pytest1.png + :target: http://docs.pytest.org + :align: center + :alt: pytest + +------ + +.. image:: https://img.shields.io/pypi/v/pytest.svg + :target: https://pypi.python.org/pypi/pytest + +.. image:: https://anaconda.org/conda-forge/pytest/badges/version.svg + :target: https://anaconda.org/conda-forge/pytest + +.. image:: https://img.shields.io/pypi/pyversions/pytest.svg + :target: https://pypi.python.org/pypi/pytest + +.. image:: https://img.shields.io/coveralls/pytest-dev/pytest/master.svg + :target: https://coveralls.io/r/pytest-dev/pytest + +.. image:: https://travis-ci.org/pytest-dev/pytest.svg?branch=master + :target: https://travis-ci.org/pytest-dev/pytest + +.. image:: https://ci.appveyor.com/api/projects/status/mrgbjaua7t33pg6b?svg=true + :target: https://ci.appveyor.com/project/pytestbot/pytest + +The ``pytest`` framework makes it easy to write small tests, yet +scales to support complex functional testing for applications and libraries. + +An example of a simple test: + +.. code-block:: python + + # content of test_sample.py + def inc(x): + return x + 1 + + def test_answer(): + assert inc(3) == 5 + + +To execute it:: + + $ pytest + ============================= test session starts ============================= + collected 1 items + + test_sample.py F + + ================================== FAILURES =================================== + _________________________________ test_answer _________________________________ + + def test_answer(): + > assert inc(3) == 5 + E assert 4 == 5 + E + where 4 = inc(3) + + test_sample.py:5: AssertionError + ========================== 1 failed in 0.04 seconds =========================== + + +Due to ``pytest``'s detailed assertion introspection, only plain ``assert`` statements are used. See `getting-started <http://docs.pytest.org/en/latest/getting-started.html#our-first-test-run>`_ for more examples. + + +Features +-------- + +- Detailed info on failing `assert statements <http://docs.pytest.org/en/latest/assert.html>`_ (no need to remember ``self.assert*`` names); + +- `Auto-discovery + <http://docs.pytest.org/en/latest/goodpractices.html#python-test-discovery>`_ + of test modules and functions; + +- `Modular fixtures <http://docs.pytest.org/en/latest/fixture.html>`_ for + managing small or parametrized long-lived test resources; + +- Can run `unittest <http://docs.pytest.org/en/latest/unittest.html>`_ (or trial), + `nose <http://docs.pytest.org/en/latest/nose.html>`_ test suites out of the box; + +- Python2.6+, Python3.3+, PyPy-2.3, Jython-2.5 (untested); + +- Rich plugin architecture, with over 150+ `external plugins <http://docs.pytest.org/en/latest/plugins.html#installing-external-plugins-searching>`_ and thriving community; + + +Documentation +------------- + +For full documentation, including installation, tutorials and PDF documents, please see http://docs.pytest.org. + + +Bugs/Requests +------------- + +Please use the `GitHub issue tracker <https://github.com/pytest-dev/pytest/issues>`_ to submit bugs or request features. + + +Changelog +--------- + +Consult the `Changelog <http://docs.pytest.org/en/latest/changelog.html>`__ page for fixes and enhancements of each version. + + +License +------- + +Copyright Holger Krekel and others, 2004-2017. + +Distributed under the terms of the `MIT`_ license, pytest is free and open source software. + +.. _`MIT`: https://github.com/pytest-dev/pytest/blob/master/LICENSE + + diff --git a/venv/lib/python3.5/site-packages/pytest-3.1.3.dist-info/INSTALLER b/venv/lib/python3.5/site-packages/pytest-3.1.3.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.5/site-packages/pytest-3.1.3.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.5/site-packages/pytest-3.1.3.dist-info/LICENSE.txt b/venv/lib/python3.5/site-packages/pytest-3.1.3.dist-info/LICENSE.txt new file mode 100644 index 0000000..629df45 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pytest-3.1.3.dist-info/LICENSE.txt @@ -0,0 +1,21 @@ +The MIT License (MIT) + +Copyright (c) 2004-2017 Holger Krekel and others + +Permission is hereby granted, free of charge, to any person obtaining a copy of +this software and associated documentation files (the "Software"), to deal in +the Software without restriction, including without limitation the rights to +use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies +of the Software, and to permit persons to whom the Software is furnished to do +so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/venv/lib/python3.5/site-packages/pytest-3.1.3.dist-info/METADATA b/venv/lib/python3.5/site-packages/pytest-3.1.3.dist-info/METADATA new file mode 100644 index 0000000..bba1b96 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pytest-3.1.3.dist-info/METADATA @@ -0,0 +1,147 @@ +Metadata-Version: 2.0 +Name: pytest +Version: 3.1.3 +Summary: pytest: simple powerful testing with Python +Home-page: http://pytest.org +Author: Holger Krekel, Bruno Oliveira, Ronny Pfannschmidt, Floris Bruynooghe, Brianna Laugher, Florian Bruhin and others +Author-email: UNKNOWN +License: MIT license +Keywords: test unittest +Platform: unix +Platform: linux +Platform: osx +Platform: cygwin +Platform: win32 +Classifier: Development Status :: 6 - Mature +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: POSIX +Classifier: Operating System :: Microsoft :: Windows +Classifier: Operating System :: MacOS :: MacOS X +Classifier: Topic :: Software Development :: Testing +Classifier: Topic :: Software Development :: Libraries +Classifier: Topic :: Utilities +Classifier: Programming Language :: Python :: 2 +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Programming Language :: Python :: 3.6 +Requires-Dist: py (>=1.4.33) +Requires-Dist: setuptools +Requires-Dist: argparse; python_version=="2.6" +Requires-Dist: colorama; sys_platform=="win32" + +.. image:: http://docs.pytest.org/en/latest/_static/pytest1.png + :target: http://docs.pytest.org + :align: center + :alt: pytest + +------ + +.. image:: https://img.shields.io/pypi/v/pytest.svg + :target: https://pypi.python.org/pypi/pytest + +.. image:: https://anaconda.org/conda-forge/pytest/badges/version.svg + :target: https://anaconda.org/conda-forge/pytest + +.. image:: https://img.shields.io/pypi/pyversions/pytest.svg + :target: https://pypi.python.org/pypi/pytest + +.. image:: https://img.shields.io/coveralls/pytest-dev/pytest/master.svg + :target: https://coveralls.io/r/pytest-dev/pytest + +.. image:: https://travis-ci.org/pytest-dev/pytest.svg?branch=master + :target: https://travis-ci.org/pytest-dev/pytest + +.. image:: https://ci.appveyor.com/api/projects/status/mrgbjaua7t33pg6b?svg=true + :target: https://ci.appveyor.com/project/pytestbot/pytest + +The ``pytest`` framework makes it easy to write small tests, yet +scales to support complex functional testing for applications and libraries. + +An example of a simple test: + +.. code-block:: python + + # content of test_sample.py + def inc(x): + return x + 1 + + def test_answer(): + assert inc(3) == 5 + + +To execute it:: + + $ pytest + ============================= test session starts ============================= + collected 1 items + + test_sample.py F + + ================================== FAILURES =================================== + _________________________________ test_answer _________________________________ + + def test_answer(): + > assert inc(3) == 5 + E assert 4 == 5 + E + where 4 = inc(3) + + test_sample.py:5: AssertionError + ========================== 1 failed in 0.04 seconds =========================== + + +Due to ``pytest``'s detailed assertion introspection, only plain ``assert`` statements are used. See `getting-started <http://docs.pytest.org/en/latest/getting-started.html#our-first-test-run>`_ for more examples. + + +Features +-------- + +- Detailed info on failing `assert statements <http://docs.pytest.org/en/latest/assert.html>`_ (no need to remember ``self.assert*`` names); + +- `Auto-discovery + <http://docs.pytest.org/en/latest/goodpractices.html#python-test-discovery>`_ + of test modules and functions; + +- `Modular fixtures <http://docs.pytest.org/en/latest/fixture.html>`_ for + managing small or parametrized long-lived test resources; + +- Can run `unittest <http://docs.pytest.org/en/latest/unittest.html>`_ (or trial), + `nose <http://docs.pytest.org/en/latest/nose.html>`_ test suites out of the box; + +- Python2.6+, Python3.3+, PyPy-2.3, Jython-2.5 (untested); + +- Rich plugin architecture, with over 150+ `external plugins <http://docs.pytest.org/en/latest/plugins.html#installing-external-plugins-searching>`_ and thriving community; + + +Documentation +------------- + +For full documentation, including installation, tutorials and PDF documents, please see http://docs.pytest.org. + + +Bugs/Requests +------------- + +Please use the `GitHub issue tracker <https://github.com/pytest-dev/pytest/issues>`_ to submit bugs or request features. + + +Changelog +--------- + +Consult the `Changelog <http://docs.pytest.org/en/latest/changelog.html>`__ page for fixes and enhancements of each version. + + +License +------- + +Copyright Holger Krekel and others, 2004-2017. + +Distributed under the terms of the `MIT`_ license, pytest is free and open source software. + +.. _`MIT`: https://github.com/pytest-dev/pytest/blob/master/LICENSE + + diff --git a/venv/lib/python3.5/site-packages/pytest-3.1.3.dist-info/RECORD b/venv/lib/python3.5/site-packages/pytest-3.1.3.dist-info/RECORD new file mode 100644 index 0000000..fc41779 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pytest-3.1.3.dist-info/RECORD @@ -0,0 +1,99 @@ +pytest.py,sha256=h94kggJr6sOBzLs3f5I03sXbHcdkFvWujEYp1pRGKiU,1803 +_pytest/__init__.py,sha256=tnN4b4FVL2drauWlNtu0BWXVgHN0Zi9nuTVfj4rQ3VU,239 +_pytest/_argcomplete.py,sha256=wQLVQKsi2hyc1BC04GMwojLcNVbcK9V_pN6dANxBduw,3688 +_pytest/_pluggy.py,sha256=_RXK7wPtdCW9S9cnEOK_CSV0jJwkwYaIlXeN4nsmVy8,426 +_pytest/_version.py,sha256=WyMlz3XJ-2tbmAfcmKaMgYzg8GYvwRXgKjwswhsmd8M,116 +_pytest/cacheprovider.py,sha256=tKfmAZCvJgIJxqMPv4j5tZc2Ywwv0pVmbeg2wcRUzFQ,9009 +_pytest/capture.py,sha256=KAYlzq7q91FEqkAcpfJ3yqErwId-uYxArB2n2O2chHE,17102 +_pytest/compat.py,sha256=qAuK9I-DkGkvC8TmjC2oDS93E-Q9PZsupg8F-uOGNeo,9117 +_pytest/config.py,sha256=3jFfaiqXTFBiB9BQ69NOQrSxnbuzAa8okk55fqi8n7A,52819 +_pytest/debugging.py,sha256=k8kC5XkMcDYI7eBm9sN8fny28HWYGVc5cSu8uwUZkds,4000 +_pytest/deprecated.py,sha256=50pCgBIMjjppF9N0vB3oMi24q8f1zmqFIMUAnLQ05h8,1048 +_pytest/doctest.py,sha256=rWj3godu2EArE-AtaFwoYMkYhBOpyFt5PKBQJ9Hlzm4,12960 +_pytest/fixtures.py,sha256=d0x25P7YlnAeVwOQx9XNuQkc8m-J5UyLydJrhMwN_RA,45027 +_pytest/freeze_support.py,sha256=AUcO6JGsLgRmhd_pDT4ICJFyXVZCdtitK_yPNuii4OE,1195 +_pytest/helpconfig.py,sha256=mq5PVrAU_rTzU3FcqZ4AzQ0Wpqtuqfxy3Oqm0oEVoFk,6486 +_pytest/hookspec.py,sha256=V-d7guBoDCEiV03mbn-KHi7B0L7Ive4823PmAKIHcV0,13228 +_pytest/junitxml.py,sha256=8bmGyMb2wAvKUGVuf9_N9AmQJE1g_Y6P8oeMAkf-ypE,15667 +_pytest/main.py,sha256=9AHR7kYXkCvMqzPcYNIE9i8aG21MysBepZGoUbSYF2I,27960 +_pytest/mark.py,sha256=gvHylqTuCuAGdFBOfEsSdXXloTyB7kCv2wjtPcpXoFM,13118 +_pytest/monkeypatch.py,sha256=wucFrEdH6kD30PazY5AU6OvAVEkC-4c0RjmDT9ojWek,9051 +_pytest/nose.py,sha256=zFEuaNGz8zy0L8f8_xu9y_DUwmtPjEoaw0fLHLQRe1g,2616 +_pytest/pastebin.py,sha256=-GRAii-D3XAkfiCvUTEAqH6Wyjg7rtCO5vUW_bCNI7U,3555 +_pytest/pytester.py,sha256=2MsQhBtXnhrksSYj4dMV7ScsC4SKG9cSIsn1i5y2TXs,40057 +_pytest/python.py,sha256=WIEkcLeu44kcsAX_A553cGG1Gpy3PdUuiCigmyN-z9c,61662 +_pytest/recwarn.py,sha256=5W9cNVbI-4Vx0Mj3zEd67DOnLeoP4n_m71eoZjC0QaQ,6995 +_pytest/resultlog.py,sha256=6BzQwDqnOEhnreyDNAAM4FxJDYB6IsfFYMatR-oUOUs,3677 +_pytest/runner.py,sha256=fsg-52OuX9k-3wh_D6CtffzBZyLwncmXht__GvHp_UM,19302 +_pytest/setuponly.py,sha256=Oif-L4JQdNrB7RJxf6-gK9_ilYANbFO65U8yAS01kfY,2560 +_pytest/setupplan.py,sha256=RO4GbGY7CtfTzFLBF162INxg7RabhXrUTUrmAL-PFvc,816 +_pytest/skipping.py,sha256=dQtYNHRQgOj9R08aLjWiQZSqDjbCTnTYSUCD3fqOw9s,13618 +_pytest/terminal.py,sha256=tGh2OMX21zzobxtj1PPDWj7DoOcGfAiNAOVnMrekrWU,23432 +_pytest/tmpdir.py,sha256=XKkZ32soWaFhej4zGIRQD5Tlc9MHk1qoa_hzUThcpes,4191 +_pytest/unittest.py,sha256=OciNcH7KaGBBf3mplwOVLJ4y0o2PJUWt5gi8BZIainQ,8437 +_pytest/warnings.py,sha256=cqA6bM47p5u4pO6jObuugVXH2-wg4I95DpmqzDfsfCY,2921 +_pytest/_code/__init__.py,sha256=xc4AtbMH64T0uj93rM06vh-wVInRayAbXpmamDyWWiQ,410 +_pytest/_code/_py2traceback.py,sha256=8NDEHOVaSlxdhewqx7xSRhPpIVCIvN0kZGdWHP-U618,2952 +_pytest/_code/code.py,sha256=aHnAmRXxxaE8-Xy0pPCJyt3irMo764GOOXMRQzzwnoE,31740 +_pytest/_code/source.py,sha256=OJum5Udz9vGmkJYKj8_c9gqrd6g3nv_1SwqaDXmCYGM,14136 +_pytest/assertion/__init__.py,sha256=uipwI8Dd7RmJsBArfbEbyjVtVkYPvMrHrVaL6N8Q81A,5248 +_pytest/assertion/rewrite.py,sha256=PYhOoZsryIVSmNwhg9ufETViJ6e6NOU2RxoBvY5JV3c,36227 +_pytest/assertion/truncate.py,sha256=GD9M8y0ZDhTvy7wnQXy7wJHJFIu0mi8lQDy_JFjB65s,3367 +_pytest/assertion/util.py,sha256=AY88RujtEkw40qDBefA6sDlxUXJbk-TEQFtc1geVecU,10769 +_pytest/vendored_packages/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0 +_pytest/vendored_packages/pluggy.py,sha256=yNWiA33HWoA5COc2__KdR1Oh7RRYFc22oETzYWZFFiw,30788 +pytest-3.1.3.dist-info/DESCRIPTION.rst,sha256=d9ltTXUsqAIufcSxb3WD7HvBUwXRtzI6eDgQHT9LmrM,3479 +pytest-3.1.3.dist-info/LICENSE.txt,sha256=oevOFa_Htc-Yx8beUS0ZWdS_YduMa_LxEShtSDtAqZc,1096 +pytest-3.1.3.dist-info/METADATA,sha256=O3KZOjsI4edrTiCR_q3Rt2pG_oMuXRA2O4aUuI2lxp4,4842 +pytest-3.1.3.dist-info/RECORD,, +pytest-3.1.3.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110 +pytest-3.1.3.dist-info/entry_points.txt,sha256=axdK_VT5P2oD8QJv5r7o8lo9DC45aJ2uvB3vpTP-qSE,62 +pytest-3.1.3.dist-info/metadata.json,sha256=FXVC1UUlZ8HVdh7GqS7X-hoRxmLiWE1HKUdGElxAq_E,1641 +pytest-3.1.3.dist-info/top_level.txt,sha256=ENE0IeZV1I1R61DOt8gs5KmSXwitaq2zstF0az5f9PA,15 +../../../bin/py.test,sha256=U2UBdj1zgvz3nzbf3YWXZ3ucDbEeuHwnRE2idKmN2iM,253 +../../../bin/pytest,sha256=U2UBdj1zgvz3nzbf3YWXZ3ucDbEeuHwnRE2idKmN2iM,253 +pytest-3.1.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +_pytest/__pycache__/unittest.cpython-35.pyc,, +_pytest/__pycache__/recwarn.cpython-35.pyc,, +_pytest/__pycache__/_version.cpython-35.pyc,, +_pytest/__pycache__/main.cpython-35.pyc,, +_pytest/__pycache__/skipping.cpython-35.pyc,, +_pytest/__pycache__/mark.cpython-35.pyc,, +_pytest/assertion/__pycache__/truncate.cpython-35.pyc,, +_pytest/__pycache__/hookspec.cpython-35.pyc,, +__pycache__/pytest.cpython-35.pyc,, +_pytest/__pycache__/python.cpython-35.pyc,, +_pytest/assertion/__pycache__/__init__.cpython-35.pyc,, +_pytest/__pycache__/pastebin.cpython-35.pyc,, +_pytest/__pycache__/freeze_support.cpython-35.pyc,, +_pytest/__pycache__/setuponly.cpython-35.pyc,, +_pytest/__pycache__/debugging.cpython-35.pyc,, +_pytest/vendored_packages/__pycache__/__init__.cpython-35.pyc,, +_pytest/_code/__pycache__/source.cpython-35.pyc,, +_pytest/assertion/__pycache__/rewrite.cpython-35.pyc,, +_pytest/__pycache__/warnings.cpython-35.pyc,, +_pytest/__pycache__/runner.cpython-35.pyc,, +_pytest/__pycache__/resultlog.cpython-35.pyc,, +_pytest/__pycache__/tmpdir.cpython-35.pyc,, +_pytest/__pycache__/fixtures.cpython-35.pyc,, +_pytest/__pycache__/_pluggy.cpython-35.pyc,, +_pytest/_code/__pycache__/__init__.cpython-35.pyc,, +_pytest/__pycache__/capture.cpython-35.pyc,, +_pytest/__pycache__/compat.cpython-35.pyc,, +_pytest/_code/__pycache__/code.cpython-35.pyc,, +_pytest/__pycache__/doctest.cpython-35.pyc,, +_pytest/vendored_packages/__pycache__/pluggy.cpython-35.pyc,, +_pytest/_code/__pycache__/_py2traceback.cpython-35.pyc,, +_pytest/__pycache__/helpconfig.cpython-35.pyc,, +_pytest/__pycache__/pytester.cpython-35.pyc,, +_pytest/__pycache__/setupplan.cpython-35.pyc,, +_pytest/__pycache__/_argcomplete.cpython-35.pyc,, +_pytest/__pycache__/__init__.cpython-35.pyc,, +_pytest/__pycache__/cacheprovider.cpython-35.pyc,, +_pytest/__pycache__/terminal.cpython-35.pyc,, +_pytest/__pycache__/monkeypatch.cpython-35.pyc,, +_pytest/__pycache__/deprecated.cpython-35.pyc,, +_pytest/__pycache__/config.cpython-35.pyc,, +_pytest/assertion/__pycache__/util.cpython-35.pyc,, +_pytest/__pycache__/junitxml.cpython-35.pyc,, +_pytest/__pycache__/nose.cpython-35.pyc,, diff --git a/venv/lib/python3.5/site-packages/pytest-3.1.3.dist-info/WHEEL b/venv/lib/python3.5/site-packages/pytest-3.1.3.dist-info/WHEEL new file mode 100644 index 0000000..8b6dd1b --- /dev/null +++ b/venv/lib/python3.5/site-packages/pytest-3.1.3.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.29.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/venv/lib/python3.5/site-packages/pytest-3.1.3.dist-info/entry_points.txt b/venv/lib/python3.5/site-packages/pytest-3.1.3.dist-info/entry_points.txt new file mode 100644 index 0000000..d8e4fd2 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pytest-3.1.3.dist-info/entry_points.txt @@ -0,0 +1,4 @@ +[console_scripts] +py.test = pytest:main +pytest = pytest:main + diff --git a/venv/lib/python3.5/site-packages/pytest-3.1.3.dist-info/metadata.json b/venv/lib/python3.5/site-packages/pytest-3.1.3.dist-info/metadata.json new file mode 100644 index 0000000..c58d0b0 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pytest-3.1.3.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 6 - Mature", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: POSIX", "Operating System :: Microsoft :: Windows", "Operating System :: MacOS :: MacOS X", "Topic :: Software Development :: Testing", "Topic :: Software Development :: Libraries", "Topic :: Utilities", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Programming Language :: Python :: 3.6"], "extensions": {"python.commands": {"wrap_console": {"py.test": "pytest:main", "pytest": "pytest:main"}}, "python.details": {"contacts": [{"name": "Holger Krekel, Bruno Oliveira, Ronny Pfannschmidt, Floris Bruynooghe, Brianna Laugher, Florian Bruhin and others", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst", "license": "LICENSE.txt"}, "project_urls": {"Home": "http://pytest.org"}}, "python.exports": {"console_scripts": {"py.test": "pytest:main", "pytest": "pytest:main"}}}, "extras": [], "generator": "bdist_wheel (0.29.0)", "keywords": ["test", "unittest"], "license": "MIT license", "metadata_version": "2.0", "name": "pytest", "platform": "unix", "run_requires": [{"requires": ["py (>=1.4.33)", "setuptools"]}, {"environment": "python_version==\"2.6\"", "requires": ["argparse"]}, {"environment": "sys_platform==\"win32\"", "requires": ["colorama"]}], "summary": "pytest: simple powerful testing with Python", "version": "3.1.3"} \ No newline at end of file diff --git a/venv/lib/python3.5/site-packages/pytest-3.1.3.dist-info/top_level.txt b/venv/lib/python3.5/site-packages/pytest-3.1.3.dist-info/top_level.txt new file mode 100644 index 0000000..e94857a --- /dev/null +++ b/venv/lib/python3.5/site-packages/pytest-3.1.3.dist-info/top_level.txt @@ -0,0 +1,2 @@ +_pytest +pytest diff --git a/venv/lib/python3.5/site-packages/pytest.py b/venv/lib/python3.5/site-packages/pytest.py new file mode 100644 index 0000000..4e4ccb3 --- /dev/null +++ b/venv/lib/python3.5/site-packages/pytest.py @@ -0,0 +1,78 @@ +# PYTHON_ARGCOMPLETE_OK +""" +pytest: unit and functional testing with Python. +""" + + +# else we are imported + +from _pytest.config import ( + main, UsageError, _preloadplugins, cmdline, + hookspec, hookimpl +) +from _pytest.fixtures import fixture, yield_fixture +from _pytest.assertion import register_assert_rewrite +from _pytest.freeze_support import freeze_includes +from _pytest import __version__ +from _pytest.debugging import pytestPDB as __pytestPDB +from _pytest.recwarn import warns, deprecated_call +from _pytest.runner import fail, skip, importorskip, exit +from _pytest.mark import MARK_GEN as mark, param +from _pytest.skipping import xfail +from _pytest.main import Item, Collector, File, Session +from _pytest.fixtures import fillfixtures as _fillfuncargs +from _pytest.python import ( + raises, approx, + Module, Class, Instance, Function, Generator, +) + +set_trace = __pytestPDB.set_trace + +__all__ = [ + 'main', + 'UsageError', + 'cmdline', + 'hookspec', + 'hookimpl', + '__version__', + 'register_assert_rewrite', + 'freeze_includes', + 'set_trace', + 'warns', + 'deprecated_call', + 'fixture', + 'yield_fixture', + 'fail', + 'skip', + 'xfail', + 'importorskip', + 'exit', + 'mark', + 'param', + 'approx', + '_fillfuncargs', + + 'Item', + 'File', + 'Collector', + 'Session', + 'Module', + 'Class', + 'Instance', + 'Function', + 'Generator', + 'raises', + + +] + +if __name__ == '__main__': + # if run as a script or by 'python -m pytest' + # we trigger the below "else" condition by the following import + import pytest + raise SystemExit(pytest.main()) +else: + + from _pytest.compat import _setup_collect_fakemodule + _preloadplugins() # to populate pytest.* namespace so help(pytest) works + _setup_collect_fakemodule() diff --git a/venv/lib/python3.5/site-packages/setuptools-20.7.0.dist-info/DESCRIPTION.rst b/venv/lib/python3.5/site-packages/setuptools-20.7.0.dist-info/DESCRIPTION.rst new file mode 100644 index 0000000..15e8d25 --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools-20.7.0.dist-info/DESCRIPTION.rst @@ -0,0 +1,238 @@ +=============================== +Installing and Using Setuptools +=============================== + +.. contents:: **Table of Contents** + + +`Change History <https://pythonhosted.org/setuptools/history.html>`_. + +------------------------- +Installation Instructions +------------------------- + +The recommended way to bootstrap setuptools on any system is to download +`ez_setup.py`_ and run it using the target Python environment. Different +operating systems have different recommended techniques to accomplish this +basic routine, so below are some examples to get you started. + +Setuptools requires Python 2.6 or later. To install setuptools +on Python 2.4 or Python 2.5, use the `bootstrap script for Setuptools 1.x +<https://raw.githubusercontent.com/pypa/setuptools/bootstrap-py24/ez_setup.py>`_. + +The link provided to ez_setup.py is a bookmark to bootstrap script for the +latest known stable release. + +.. _ez_setup.py: https://bootstrap.pypa.io/ez_setup.py + +Windows (Powershell 3 or later) +=============================== + +For best results, uninstall previous versions FIRST (see `Uninstalling`_). + +Using Windows 8 (which includes PowerShell 3) or earlier versions of Windows +with PowerShell 3 installed, it's possible to install with one simple +Powershell command. Start up Powershell and paste this command:: + + > (Invoke-WebRequest https://bootstrap.pypa.io/ez_setup.py).Content | python - + +You must start the Powershell with Administrative privileges or you may choose +to install a user-local installation:: + + > (Invoke-WebRequest https://bootstrap.pypa.io/ez_setup.py).Content | python - --user + +If you have Python 3.3 or later, you can use the ``py`` command to install to +different Python versions. For example, to install to Python 3.3 if you have +Python 2.7 installed:: + + > (Invoke-WebRequest https://bootstrap.pypa.io/ez_setup.py).Content | py -3 - + +The recommended way to install setuptools on Windows is to download +`ez_setup.py`_ and run it. The script will download the appropriate +distribution file and install it for you. + +Once installation is complete, you will find an ``easy_install`` program in +your Python ``Scripts`` subdirectory. For simple invocation and best results, +add this directory to your ``PATH`` environment variable, if it is not already +present. If you did a user-local install, the ``Scripts`` subdirectory is +``$env:APPDATA\Python\Scripts``. + + +Windows (simplified) +==================== + +For Windows without PowerShell 3 or for installation without a command-line, +download `ez_setup.py`_ using your preferred web browser or other technique +and "run" that file. + + +Unix (wget) +=========== + +Most Linux distributions come with wget. + +Download `ez_setup.py`_ and run it using the target Python version. The script +will download the appropriate version and install it for you:: + + > wget https://bootstrap.pypa.io/ez_setup.py -O - | python + +Note that you will may need to invoke the command with superuser privileges to +install to the system Python:: + + > wget https://bootstrap.pypa.io/ez_setup.py -O - | sudo python + +Alternatively, Setuptools may be installed to a user-local path:: + + > wget https://bootstrap.pypa.io/ez_setup.py -O - | python - --user + +Note that on some older systems (noted on Debian 6 and CentOS 5 installations), +`wget` may refuse to download `ez_setup.py`, complaining that the certificate common name `*.c.ssl.fastly.net` +does not match the host name `bootstrap.pypa.io`. In addition, the `ez_setup.py` script may then encounter similar problems using +`wget` internally to download `setuptools-x.y.zip`, complaining that the certificate common name of `www.python.org` does not match the +host name `pypi.python.org`. Those are known issues, related to a bug in the older versions of `wget` +(see `Issue 59 <https://bitbucket.org/pypa/pypi/issue/59#comment-5881915>`_). If you happen to encounter them, +install Setuptools as follows:: + + > wget --no-check-certificate https://bootstrap.pypa.io/ez_setup.py + > python ez_setup.py --insecure + + +Unix including Mac OS X (curl) +============================== + +If your system has curl installed, follow the ``wget`` instructions but +replace ``wget`` with ``curl`` and ``-O`` with ``-o``. For example:: + + > curl https://bootstrap.pypa.io/ez_setup.py -o - | python + + +Advanced Installation +===================== + +For more advanced installation options, such as installing to custom +locations or prefixes, download and extract the source +tarball from `Setuptools on PyPI <https://pypi.python.org/pypi/setuptools>`_ +and run setup.py with any supported distutils and Setuptools options. +For example:: + + setuptools-x.x$ python setup.py install --prefix=/opt/setuptools + +Use ``--help`` to get a full options list, but we recommend consulting +the `EasyInstall manual`_ for detailed instructions, especially `the section +on custom installation locations`_. + +.. _EasyInstall manual: https://pythonhosted.org/setuptools/EasyInstall +.. _the section on custom installation locations: https://pythonhosted.org/setuptools/EasyInstall#custom-installation-locations + + +Downloads +========= + +All setuptools downloads can be found at `the project's home page in the Python +Package Index`_. Scroll to the very bottom of the page to find the links. + +.. _the project's home page in the Python Package Index: https://pypi.python.org/pypi/setuptools + +In addition to the PyPI downloads, the development version of ``setuptools`` +is available from the `Bitbucket repo`_, and in-development versions of the +`0.6 branch`_ are available as well. + +.. _Bitbucket repo: https://bitbucket.org/pypa/setuptools/get/default.tar.gz#egg=setuptools-dev +.. _0.6 branch: http://svn.python.org/projects/sandbox/branches/setuptools-0.6/#egg=setuptools-dev06 + +Uninstalling +============ + +On Windows, if Setuptools was installed using an ``.exe`` or ``.msi`` +installer, simply use the uninstall feature of "Add/Remove Programs" in the +Control Panel. + +Otherwise, to uninstall Setuptools or Distribute, regardless of the Python +version, delete all ``setuptools*`` and ``distribute*`` files and +directories from your system's ``site-packages`` directory +(and any other ``sys.path`` directories) FIRST. + +If you are upgrading or otherwise plan to re-install Setuptools or Distribute, +nothing further needs to be done. If you want to completely remove Setuptools, +you may also want to remove the 'easy_install' and 'easy_install-x.x' scripts +and associated executables installed to the Python scripts directory. + +-------------------------------- +Using Setuptools and EasyInstall +-------------------------------- + +Here are some of the available manuals, tutorials, and other resources for +learning about Setuptools, Python Eggs, and EasyInstall: + +* `The EasyInstall user's guide and reference manual`_ +* `The setuptools Developer's Guide`_ +* `The pkg_resources API reference`_ +* `The Internal Structure of Python Eggs`_ + +Questions, comments, and bug reports should be directed to the `distutils-sig +mailing list`_. If you have written (or know of) any tutorials, documentation, +plug-ins, or other resources for setuptools users, please let us know about +them there, so this reference list can be updated. If you have working, +*tested* patches to correct problems or add features, you may submit them to +the `setuptools bug tracker`_. + +.. _setuptools bug tracker: https://github.com/pypa/setuptools/issues +.. _The Internal Structure of Python Eggs: https://pythonhosted.org/setuptools/formats.html +.. _The setuptools Developer's Guide: https://pythonhosted.org/setuptools/setuptools.html +.. _The pkg_resources API reference: https://pythonhosted.org/setuptools/pkg_resources.html +.. _The EasyInstall user's guide and reference manual: https://pythonhosted.org/setuptools/easy_install.html +.. _distutils-sig mailing list: http://mail.python.org/pipermail/distutils-sig/ + + +------- +Credits +------- + +* The original design for the ``.egg`` format and the ``pkg_resources`` API was + co-created by Phillip Eby and Bob Ippolito. Bob also implemented the first + version of ``pkg_resources``, and supplied the OS X operating system version + compatibility algorithm. + +* Ian Bicking implemented many early "creature comfort" features of + easy_install, including support for downloading via Sourceforge and + Subversion repositories. Ian's comments on the Web-SIG about WSGI + application deployment also inspired the concept of "entry points" in eggs, + and he has given talks at PyCon and elsewhere to inform and educate the + community about eggs and setuptools. + +* Jim Fulton contributed time and effort to build automated tests of various + aspects of ``easy_install``, and supplied the doctests for the command-line + ``.exe`` wrappers on Windows. + +* Phillip J. Eby is the seminal author of setuptools, and + first proposed the idea of an importable binary distribution format for + Python application plug-ins. + +* Significant parts of the implementation of setuptools were funded by the Open + Source Applications Foundation, to provide a plug-in infrastructure for the + Chandler PIM application. In addition, many OSAF staffers (such as Mike + "Code Bear" Taylor) contributed their time and stress as guinea pigs for the + use of eggs and setuptools, even before eggs were "cool". (Thanks, guys!) + +* Tarek Ziadé is the principal author of the Distribute fork, which + re-invigorated the community on the project, encouraged renewed innovation, + and addressed many defects. + +* Since the merge with Distribute, Jason R. Coombs is the + maintainer of setuptools. The project is maintained in coordination with + the Python Packaging Authority (PyPA) and the larger Python community. + +.. _files: + + +--------------- +Code of Conduct +--------------- + +Everyone interacting in the setuptools project's codebases, issue trackers, +chat rooms, and mailing lists is expected to follow the +`PyPA Code of Conduct`_. + +.. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/ + + diff --git a/venv/lib/python3.5/site-packages/setuptools-20.7.0.dist-info/INSTALLER b/venv/lib/python3.5/site-packages/setuptools-20.7.0.dist-info/INSTALLER new file mode 100644 index 0000000..a1b589e --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools-20.7.0.dist-info/INSTALLER @@ -0,0 +1 @@ +pip diff --git a/venv/lib/python3.5/site-packages/setuptools-20.7.0.dist-info/METADATA b/venv/lib/python3.5/site-packages/setuptools-20.7.0.dist-info/METADATA new file mode 100644 index 0000000..de4691c --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools-20.7.0.dist-info/METADATA @@ -0,0 +1,263 @@ +Metadata-Version: 2.0 +Name: setuptools +Version: 20.7.0 +Summary: Easily download, build, install, upgrade, and uninstall Python packages +Home-page: https://github.com/pypa/setuptools +Author: Python Packaging Authority +Author-email: distutils-sig@python.org +License: UNKNOWN +Keywords: CPAN PyPI distutils eggs package management +Platform: UNKNOWN +Classifier: Development Status :: 5 - Production/Stable +Classifier: Intended Audience :: Developers +Classifier: License :: OSI Approved :: MIT License +Classifier: Operating System :: OS Independent +Classifier: Programming Language :: Python :: 2.6 +Classifier: Programming Language :: Python :: 2.7 +Classifier: Programming Language :: Python :: 3 +Classifier: Programming Language :: Python :: 3.3 +Classifier: Programming Language :: Python :: 3.4 +Classifier: Programming Language :: Python :: 3.5 +Classifier: Topic :: Software Development :: Libraries :: Python Modules +Classifier: Topic :: System :: Archiving :: Packaging +Classifier: Topic :: System :: Systems Administration +Classifier: Topic :: Utilities + +=============================== +Installing and Using Setuptools +=============================== + +.. contents:: **Table of Contents** + + +`Change History <https://pythonhosted.org/setuptools/history.html>`_. + +------------------------- +Installation Instructions +------------------------- + +The recommended way to bootstrap setuptools on any system is to download +`ez_setup.py`_ and run it using the target Python environment. Different +operating systems have different recommended techniques to accomplish this +basic routine, so below are some examples to get you started. + +Setuptools requires Python 2.6 or later. To install setuptools +on Python 2.4 or Python 2.5, use the `bootstrap script for Setuptools 1.x +<https://raw.githubusercontent.com/pypa/setuptools/bootstrap-py24/ez_setup.py>`_. + +The link provided to ez_setup.py is a bookmark to bootstrap script for the +latest known stable release. + +.. _ez_setup.py: https://bootstrap.pypa.io/ez_setup.py + +Windows (Powershell 3 or later) +=============================== + +For best results, uninstall previous versions FIRST (see `Uninstalling`_). + +Using Windows 8 (which includes PowerShell 3) or earlier versions of Windows +with PowerShell 3 installed, it's possible to install with one simple +Powershell command. Start up Powershell and paste this command:: + + > (Invoke-WebRequest https://bootstrap.pypa.io/ez_setup.py).Content | python - + +You must start the Powershell with Administrative privileges or you may choose +to install a user-local installation:: + + > (Invoke-WebRequest https://bootstrap.pypa.io/ez_setup.py).Content | python - --user + +If you have Python 3.3 or later, you can use the ``py`` command to install to +different Python versions. For example, to install to Python 3.3 if you have +Python 2.7 installed:: + + > (Invoke-WebRequest https://bootstrap.pypa.io/ez_setup.py).Content | py -3 - + +The recommended way to install setuptools on Windows is to download +`ez_setup.py`_ and run it. The script will download the appropriate +distribution file and install it for you. + +Once installation is complete, you will find an ``easy_install`` program in +your Python ``Scripts`` subdirectory. For simple invocation and best results, +add this directory to your ``PATH`` environment variable, if it is not already +present. If you did a user-local install, the ``Scripts`` subdirectory is +``$env:APPDATA\Python\Scripts``. + + +Windows (simplified) +==================== + +For Windows without PowerShell 3 or for installation without a command-line, +download `ez_setup.py`_ using your preferred web browser or other technique +and "run" that file. + + +Unix (wget) +=========== + +Most Linux distributions come with wget. + +Download `ez_setup.py`_ and run it using the target Python version. The script +will download the appropriate version and install it for you:: + + > wget https://bootstrap.pypa.io/ez_setup.py -O - | python + +Note that you will may need to invoke the command with superuser privileges to +install to the system Python:: + + > wget https://bootstrap.pypa.io/ez_setup.py -O - | sudo python + +Alternatively, Setuptools may be installed to a user-local path:: + + > wget https://bootstrap.pypa.io/ez_setup.py -O - | python - --user + +Note that on some older systems (noted on Debian 6 and CentOS 5 installations), +`wget` may refuse to download `ez_setup.py`, complaining that the certificate common name `*.c.ssl.fastly.net` +does not match the host name `bootstrap.pypa.io`. In addition, the `ez_setup.py` script may then encounter similar problems using +`wget` internally to download `setuptools-x.y.zip`, complaining that the certificate common name of `www.python.org` does not match the +host name `pypi.python.org`. Those are known issues, related to a bug in the older versions of `wget` +(see `Issue 59 <https://bitbucket.org/pypa/pypi/issue/59#comment-5881915>`_). If you happen to encounter them, +install Setuptools as follows:: + + > wget --no-check-certificate https://bootstrap.pypa.io/ez_setup.py + > python ez_setup.py --insecure + + +Unix including Mac OS X (curl) +============================== + +If your system has curl installed, follow the ``wget`` instructions but +replace ``wget`` with ``curl`` and ``-O`` with ``-o``. For example:: + + > curl https://bootstrap.pypa.io/ez_setup.py -o - | python + + +Advanced Installation +===================== + +For more advanced installation options, such as installing to custom +locations or prefixes, download and extract the source +tarball from `Setuptools on PyPI <https://pypi.python.org/pypi/setuptools>`_ +and run setup.py with any supported distutils and Setuptools options. +For example:: + + setuptools-x.x$ python setup.py install --prefix=/opt/setuptools + +Use ``--help`` to get a full options list, but we recommend consulting +the `EasyInstall manual`_ for detailed instructions, especially `the section +on custom installation locations`_. + +.. _EasyInstall manual: https://pythonhosted.org/setuptools/EasyInstall +.. _the section on custom installation locations: https://pythonhosted.org/setuptools/EasyInstall#custom-installation-locations + + +Downloads +========= + +All setuptools downloads can be found at `the project's home page in the Python +Package Index`_. Scroll to the very bottom of the page to find the links. + +.. _the project's home page in the Python Package Index: https://pypi.python.org/pypi/setuptools + +In addition to the PyPI downloads, the development version of ``setuptools`` +is available from the `Bitbucket repo`_, and in-development versions of the +`0.6 branch`_ are available as well. + +.. _Bitbucket repo: https://bitbucket.org/pypa/setuptools/get/default.tar.gz#egg=setuptools-dev +.. _0.6 branch: http://svn.python.org/projects/sandbox/branches/setuptools-0.6/#egg=setuptools-dev06 + +Uninstalling +============ + +On Windows, if Setuptools was installed using an ``.exe`` or ``.msi`` +installer, simply use the uninstall feature of "Add/Remove Programs" in the +Control Panel. + +Otherwise, to uninstall Setuptools or Distribute, regardless of the Python +version, delete all ``setuptools*`` and ``distribute*`` files and +directories from your system's ``site-packages`` directory +(and any other ``sys.path`` directories) FIRST. + +If you are upgrading or otherwise plan to re-install Setuptools or Distribute, +nothing further needs to be done. If you want to completely remove Setuptools, +you may also want to remove the 'easy_install' and 'easy_install-x.x' scripts +and associated executables installed to the Python scripts directory. + +-------------------------------- +Using Setuptools and EasyInstall +-------------------------------- + +Here are some of the available manuals, tutorials, and other resources for +learning about Setuptools, Python Eggs, and EasyInstall: + +* `The EasyInstall user's guide and reference manual`_ +* `The setuptools Developer's Guide`_ +* `The pkg_resources API reference`_ +* `The Internal Structure of Python Eggs`_ + +Questions, comments, and bug reports should be directed to the `distutils-sig +mailing list`_. If you have written (or know of) any tutorials, documentation, +plug-ins, or other resources for setuptools users, please let us know about +them there, so this reference list can be updated. If you have working, +*tested* patches to correct problems or add features, you may submit them to +the `setuptools bug tracker`_. + +.. _setuptools bug tracker: https://github.com/pypa/setuptools/issues +.. _The Internal Structure of Python Eggs: https://pythonhosted.org/setuptools/formats.html +.. _The setuptools Developer's Guide: https://pythonhosted.org/setuptools/setuptools.html +.. _The pkg_resources API reference: https://pythonhosted.org/setuptools/pkg_resources.html +.. _The EasyInstall user's guide and reference manual: https://pythonhosted.org/setuptools/easy_install.html +.. _distutils-sig mailing list: http://mail.python.org/pipermail/distutils-sig/ + + +------- +Credits +------- + +* The original design for the ``.egg`` format and the ``pkg_resources`` API was + co-created by Phillip Eby and Bob Ippolito. Bob also implemented the first + version of ``pkg_resources``, and supplied the OS X operating system version + compatibility algorithm. + +* Ian Bicking implemented many early "creature comfort" features of + easy_install, including support for downloading via Sourceforge and + Subversion repositories. Ian's comments on the Web-SIG about WSGI + application deployment also inspired the concept of "entry points" in eggs, + and he has given talks at PyCon and elsewhere to inform and educate the + community about eggs and setuptools. + +* Jim Fulton contributed time and effort to build automated tests of various + aspects of ``easy_install``, and supplied the doctests for the command-line + ``.exe`` wrappers on Windows. + +* Phillip J. Eby is the seminal author of setuptools, and + first proposed the idea of an importable binary distribution format for + Python application plug-ins. + +* Significant parts of the implementation of setuptools were funded by the Open + Source Applications Foundation, to provide a plug-in infrastructure for the + Chandler PIM application. In addition, many OSAF staffers (such as Mike + "Code Bear" Taylor) contributed their time and stress as guinea pigs for the + use of eggs and setuptools, even before eggs were "cool". (Thanks, guys!) + +* Tarek Ziadé is the principal author of the Distribute fork, which + re-invigorated the community on the project, encouraged renewed innovation, + and addressed many defects. + +* Since the merge with Distribute, Jason R. Coombs is the + maintainer of setuptools. The project is maintained in coordination with + the Python Packaging Authority (PyPA) and the larger Python community. + +.. _files: + + +--------------- +Code of Conduct +--------------- + +Everyone interacting in the setuptools project's codebases, issue trackers, +chat rooms, and mailing lists is expected to follow the +`PyPA Code of Conduct`_. + +.. _PyPA Code of Conduct: https://www.pypa.io/en/latest/code-of-conduct/ + + diff --git a/venv/lib/python3.5/site-packages/setuptools-20.7.0.dist-info/RECORD b/venv/lib/python3.5/site-packages/setuptools-20.7.0.dist-info/RECORD new file mode 100644 index 0000000..833cb10 --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools-20.7.0.dist-info/RECORD @@ -0,0 +1,100 @@ +easy_install.py,sha256=MDC9vt5AxDsXX5qcKlBz2TnW6Tpuv_AobnfhCJ9X3PM,126 +setuptools/__init__.py,sha256=WEGb6BRGN2dz3eJTbNRUfInUAhb6_OZJyYAndPGJm6w,5440 +setuptools/archive_util.py,sha256=N30WE5ZQjkytzhAodAXw4FkK-9J5AP1ChrClHnZthOA,6609 +setuptools/depends.py,sha256=WyJIhjIX7D5-JpGSnMAPHEoDcVPQxaO0405keTQT6jM,6418 +setuptools/dist.py,sha256=liJ6Ob7BoAdC8IkpomV08kFBeVD3B9f7hE-XCHTwbMw,35704 +setuptools/extension.py,sha256=YvsyGHWVWzhNOXMHU239FR14wxw2WwdMLLzWsRP6_IY,1694 +setuptools/launch.py,sha256=hP3qZxDNu5Hf9C-VAkEP4IC_YYfR1XfxMTj6EguxxCg,730 +setuptools/lib2to3_ex.py,sha256=6jPF9sJuHiz0cyg4cwIBLl2VMAxcl3GYSZwWAOuJplU,1998 +setuptools/msvc9_support.py,sha256=ALzUH_kzYcMFOsj8r0t275PEYouMUOrrZO6wXSSdoKs,2185 +setuptools/package_index.py,sha256=T6tZGPHApup6Gl3kz1sCLtY7kmMUXLBKweSAORYS2Qc,39490 +setuptools/py26compat.py,sha256=1Vvuf-hj5bTM3OAXv6vgJQImulne12ann053caOgikU,481 +setuptools/py27compat.py,sha256=CGj-jZcFgHUkrEdLvArkxHj96tAaMbG2-yJtUVU7QVI,306 +setuptools/py31compat.py,sha256=cqYSVBd2pxvKl75185z40htfEr6EKC29KvSBiSoqHOA,1636 +setuptools/sandbox.py,sha256=tuMRu_8R0_w6Qer9VqDiOTqKy1qr_GjHi-2QAg7TMz0,14210 +setuptools/script (dev).tmpl,sha256=f7MR17dTkzaqkCMSVseyOCMVrPVSMdmTQsaB8cZzfuI,201 +setuptools/script.tmpl,sha256=WGTt5piezO27c-Dbx6l5Q4T3Ff20A5z7872hv3aAhYY,138 +setuptools/site-patch.py,sha256=K-0-cAx36mX_PG-qPZwosG9ZLCliRjquKQ4nHiJvvzg,2389 +setuptools/ssl_support.py,sha256=tAFeeyFPVle_GgarPkNrdfnCJgP9PyN_QYGXTgypoyc,8119 +setuptools/unicode_utils.py,sha256=8zVyrL_MFc6P5AmErs21rr7z-3N1pZ_NkOcDC7BPElU,995 +setuptools/utils.py,sha256=08Z7mt-9mvrx-XvmS5EyKoRn2lxNTlgFsUwBU3Eq9JQ,293 +setuptools/version.py,sha256=khyqulaxZr_XgWpQpVu3JKz9h0H0KFPVcKajnhTfCxc,132 +setuptools/windows_support.py,sha256=5GrfqSP2-dLGJoZTq2g6dCKkyQxxa2n5IQiXlJCoYEE,714 +setuptools/command/__init__.py,sha256=1AM3hv_zCixE7kTXA-onWfK_2KF8GC8fUw3WSxzi5Fg,564 +setuptools/command/alias.py,sha256=KjpE0sz_SDIHv3fpZcIQK-sCkJz-SrC6Gmug6b9Nkc8,2426 +setuptools/command/bdist_egg.py,sha256=Km4CsGbevhvej6kKEfvTYxfkPoQijUyXmImNifrO4Tg,17184 +setuptools/command/bdist_rpm.py,sha256=B7l0TnzCGb-0nLlm6rS00jWLkojASwVmdhW2w5Qz_Ak,1508 +setuptools/command/bdist_wininst.py,sha256=_6dz3lpB1tY200LxKPLM7qgwTCceOMgaWFF-jW2-pm0,637 +setuptools/command/build_ext.py,sha256=pkQ8xp3YPVGGLkGv-SvfxC_GqFpboph1AFEoMFOgQMo,11964 +setuptools/command/build_py.py,sha256=HvJ88JuougDccaowYlfMV12kYtd0GLahg2DR2vQRqL4,7983 +setuptools/command/develop.py,sha256=VxSYbpM2jQqtRBn5klIjPVBo3sWKNZMlSbHHiRLUlZo,7383 +setuptools/command/easy_install.py,sha256=jqLpcIhP5z1dXaoL5y1NS0074b8VdtMtH8RWKlYZu04,88268 +setuptools/command/egg_info.py,sha256=0_8eI8hgLAlGt8Xk5kiodY_d9lxG6_RSescJISKBJgA,16890 +setuptools/command/install.py,sha256=a0EZpL_A866KEdhicTGbuyD_TYl1sykfzdrri-zazT4,4683 +setuptools/command/install_egg_info.py,sha256=fEqU1EplTs_vUjAzwiEB7LrtdZBQ3BefwuUZLZBDEQ0,5027 +setuptools/command/install_lib.py,sha256=5IZM251t4DzOdZAXCezdROr3X0SeeE41eyV059RNgZ4,5011 +setuptools/command/install_scripts.py,sha256=vX2JC6v7l090N7CrTfihWBklNbPvfNKAY2LRtukM9XE,2231 +setuptools/command/register.py,sha256=bHlMm1qmBbSdahTOT8w6UhA-EgeQIz7p6cD-qOauaiI,270 +setuptools/command/rotate.py,sha256=QGZS2t4CmBl7t79KQijNCjRMU50lu3nRhu4FXWB5LIE,2038 +setuptools/command/saveopts.py,sha256=za7QCBcQimKKriWcoCcbhxPjUz30gSB74zuTL47xpP4,658 +setuptools/command/sdist.py,sha256=kQetnPMw6ao3nurWGJZgS4HkOH4AknzMOSvqbVA6jGA,7050 +setuptools/command/setopt.py,sha256=cygJaJWJmiVhR0e_Uh_0_fWyCxMJIqK-Bu6K0LyYUtU,5086 +setuptools/command/test.py,sha256=N2f5RwxkjwU3YQzFYHtzHr636-pdX9XJDuPg5Y92kSo,6888 +setuptools/command/upload.py,sha256=OjAryq4ZoARZiaTN_MpuG1X8Pu9CJNCKmmbMg-gab5I,649 +setuptools/command/upload_docs.py,sha256=htXpASci5gKP0RIrGZRRmbll7RnTRuwvKWZkYsBlDMM,6815 +setuptools/extern/__init__.py,sha256=mTrrj4yLMdFeEwwnqKnSuvZM5RM-HPZ1iXLgaYDlB9o,132 +setuptools-20.7.0.dist-info/DESCRIPTION.rst,sha256=s4bDJYqySAMbCdv6Fqr4vEUMG_vRSpGo0N0z5Im_1UM,9945 +setuptools-20.7.0.dist-info/METADATA,sha256=ZjaJZJLbxKSGaCMoLPHs2OApsNeqCk8vgnibr2sCPro,10999 +setuptools-20.7.0.dist-info/RECORD,, +setuptools-20.7.0.dist-info/WHEEL,sha256=o2k-Qa-RMNIJmUdIc7KU6VWR_ErNRbWNlxDIpl7lm34,110 +setuptools-20.7.0.dist-info/dependency_links.txt,sha256=oUNXJEArClXFiSSvfFwUKY8TYjeIXhuFfCpXn5K0DCE,226 +setuptools-20.7.0.dist-info/entry_points.txt,sha256=S6yRfyEABPIKq4cNMNO_7LHXzFVZW-exLSrKSI6kgNU,2779 +setuptools-20.7.0.dist-info/metadata.json,sha256=owxzpSorzMYFIWuy_rmAOseCtAdF59Lf98DD9h-lenw,4239 +setuptools-20.7.0.dist-info/top_level.txt,sha256=2HUXVVwA4Pff1xgTFr3GsTXXKaPaO6vlG6oNJ_4u4Tg,38 +setuptools-20.7.0.dist-info/zip-safe,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1 +../../../bin/easy_install,sha256=hx8dJQDFewDm3i-k0hClfOY_VX3MMPQfFb_cSgEWJgs,278 +../../../bin/easy_install-3.5,sha256=hx8dJQDFewDm3i-k0hClfOY_VX3MMPQfFb_cSgEWJgs,278 +setuptools-20.7.0.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4 +setuptools/__pycache__/msvc9_support.cpython-35.pyc,, +setuptools/__pycache__/windows_support.cpython-35.pyc,, +setuptools/command/__pycache__/easy_install.cpython-35.pyc,, +setuptools/command/__pycache__/bdist_wininst.cpython-35.pyc,, +setuptools/command/__pycache__/bdist_egg.cpython-35.pyc,, +setuptools/__pycache__/archive_util.cpython-35.pyc,, +setuptools/command/__pycache__/install.cpython-35.pyc,, +setuptools/command/__pycache__/upload.cpython-35.pyc,, +__pycache__/easy_install.cpython-35.pyc,, +setuptools/command/__pycache__/develop.cpython-35.pyc,, +setuptools/__pycache__/sandbox.cpython-35.pyc,, +setuptools/command/__pycache__/egg_info.cpython-35.pyc,, +setuptools/__pycache__/py27compat.cpython-35.pyc,, +setuptools/__pycache__/launch.cpython-35.pyc,, +setuptools/__pycache__/ssl_support.cpython-35.pyc,, +setuptools/__pycache__/lib2to3_ex.cpython-35.pyc,, +setuptools/command/__pycache__/sdist.cpython-35.pyc,, +setuptools/command/__pycache__/alias.cpython-35.pyc,, +setuptools/__pycache__/package_index.cpython-35.pyc,, +setuptools/command/__pycache__/build_py.cpython-35.pyc,, +setuptools/__pycache__/__init__.cpython-35.pyc,, +setuptools/__pycache__/version.cpython-35.pyc,, +setuptools/command/__pycache__/build_ext.cpython-35.pyc,, +setuptools/command/__pycache__/upload_docs.cpython-35.pyc,, +setuptools/command/__pycache__/saveopts.cpython-35.pyc,, +setuptools/__pycache__/py26compat.cpython-35.pyc,, +setuptools/command/__pycache__/install_lib.cpython-35.pyc,, +setuptools/__pycache__/extension.cpython-35.pyc,, +setuptools/command/__pycache__/rotate.cpython-35.pyc,, +setuptools/__pycache__/utils.cpython-35.pyc,, +setuptools/command/__pycache__/bdist_rpm.cpython-35.pyc,, +setuptools/__pycache__/site-patch.cpython-35.pyc,, +setuptools/command/__pycache__/register.cpython-35.pyc,, +setuptools/command/__pycache__/install_egg_info.cpython-35.pyc,, +setuptools/extern/__pycache__/__init__.cpython-35.pyc,, +setuptools/__pycache__/depends.cpython-35.pyc,, +setuptools/__pycache__/unicode_utils.cpython-35.pyc,, +setuptools/command/__pycache__/test.cpython-35.pyc,, +setuptools/command/__pycache__/install_scripts.cpython-35.pyc,, +setuptools/command/__pycache__/setopt.cpython-35.pyc,, +setuptools/__pycache__/dist.cpython-35.pyc,, +setuptools/command/__pycache__/__init__.cpython-35.pyc,, +setuptools/__pycache__/py31compat.cpython-35.pyc,, diff --git a/venv/lib/python3.5/site-packages/setuptools-20.7.0.dist-info/WHEEL b/venv/lib/python3.5/site-packages/setuptools-20.7.0.dist-info/WHEEL new file mode 100644 index 0000000..8b6dd1b --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools-20.7.0.dist-info/WHEEL @@ -0,0 +1,6 @@ +Wheel-Version: 1.0 +Generator: bdist_wheel (0.29.0) +Root-Is-Purelib: true +Tag: py2-none-any +Tag: py3-none-any + diff --git a/venv/lib/python3.5/site-packages/setuptools-20.7.0.dist-info/dependency_links.txt b/venv/lib/python3.5/site-packages/setuptools-20.7.0.dist-info/dependency_links.txt new file mode 100644 index 0000000..47d1e81 --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools-20.7.0.dist-info/dependency_links.txt @@ -0,0 +1,2 @@ +https://pypi.python.org/packages/source/c/certifi/certifi-2015.11.20.tar.gz#md5=25134646672c695c1ff1593c2dd75d08 +https://pypi.python.org/packages/source/w/wincertstore/wincertstore-0.2.zip#md5=ae728f2f007185648d0c7a8679b361e2 diff --git a/venv/lib/python3.5/site-packages/setuptools-20.7.0.dist-info/entry_points.txt b/venv/lib/python3.5/site-packages/setuptools-20.7.0.dist-info/entry_points.txt new file mode 100644 index 0000000..5270e4a --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools-20.7.0.dist-info/entry_points.txt @@ -0,0 +1,61 @@ +[console_scripts] +easy_install = setuptools.command.easy_install:main + +[distutils.commands] +alias = setuptools.command.alias:alias +bdist_egg = setuptools.command.bdist_egg:bdist_egg +bdist_rpm = setuptools.command.bdist_rpm:bdist_rpm +bdist_wininst = setuptools.command.bdist_wininst:bdist_wininst +build_ext = setuptools.command.build_ext:build_ext +build_py = setuptools.command.build_py:build_py +develop = setuptools.command.develop:develop +easy_install = setuptools.command.easy_install:easy_install +egg_info = setuptools.command.egg_info:egg_info +install = setuptools.command.install:install +install_egg_info = setuptools.command.install_egg_info:install_egg_info +install_lib = setuptools.command.install_lib:install_lib +install_scripts = setuptools.command.install_scripts:install_scripts +register = setuptools.command.register:register +rotate = setuptools.command.rotate:rotate +saveopts = setuptools.command.saveopts:saveopts +sdist = setuptools.command.sdist:sdist +setopt = setuptools.command.setopt:setopt +test = setuptools.command.test:test +upload = setuptools.command.upload:upload +upload_docs = setuptools.command.upload_docs:upload_docs + +[distutils.setup_keywords] +convert_2to3_doctests = setuptools.dist:assert_string_list +dependency_links = setuptools.dist:assert_string_list +eager_resources = setuptools.dist:assert_string_list +entry_points = setuptools.dist:check_entry_points +exclude_package_data = setuptools.dist:check_package_data +extras_require = setuptools.dist:check_extras +include_package_data = setuptools.dist:assert_bool +install_requires = setuptools.dist:check_requirements +namespace_packages = setuptools.dist:check_nsp +package_data = setuptools.dist:check_package_data +packages = setuptools.dist:check_packages +setup_requires = setuptools.dist:check_requirements +test_loader = setuptools.dist:check_importable +test_runner = setuptools.dist:check_importable +test_suite = setuptools.dist:check_test_suite +tests_require = setuptools.dist:check_requirements +use_2to3 = setuptools.dist:assert_bool +use_2to3_exclude_fixers = setuptools.dist:assert_string_list +use_2to3_fixers = setuptools.dist:assert_string_list +zip_safe = setuptools.dist:assert_bool + +[egg_info.writers] +PKG-INFO = setuptools.command.egg_info:write_pkg_info +dependency_links.txt = setuptools.command.egg_info:overwrite_arg +depends.txt = setuptools.command.egg_info:warn_depends_obsolete +eager_resources.txt = setuptools.command.egg_info:overwrite_arg +entry_points.txt = setuptools.command.egg_info:write_entries +namespace_packages.txt = setuptools.command.egg_info:overwrite_arg +requires.txt = setuptools.command.egg_info:write_requirements +top_level.txt = setuptools.command.egg_info:write_toplevel_names + +[setuptools.installation] +eggsecutable = setuptools.command.easy_install:bootstrap + diff --git a/venv/lib/python3.5/site-packages/setuptools-20.7.0.dist-info/metadata.json b/venv/lib/python3.5/site-packages/setuptools-20.7.0.dist-info/metadata.json new file mode 100644 index 0000000..a5427bf --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools-20.7.0.dist-info/metadata.json @@ -0,0 +1 @@ +{"classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Operating System :: OS Independent", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.3", "Programming Language :: Python :: 3.4", "Programming Language :: Python :: 3.5", "Topic :: Software Development :: Libraries :: Python Modules", "Topic :: System :: Archiving :: Packaging", "Topic :: System :: Systems Administration", "Topic :: Utilities"], "extensions": {"python.commands": {"wrap_console": {"easy_install": "setuptools.command.easy_install:main"}}, "python.details": {"contacts": [{"email": "distutils-sig@python.org", "name": "Python Packaging Authority", "role": "author"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://github.com/pypa/setuptools"}}, "python.exports": {"console_scripts": {"easy_install": "setuptools.command.easy_install:main"}, "distutils.commands": {"alias": "setuptools.command.alias:alias", "bdist_egg": "setuptools.command.bdist_egg:bdist_egg", "bdist_rpm": "setuptools.command.bdist_rpm:bdist_rpm", "bdist_wininst": "setuptools.command.bdist_wininst:bdist_wininst", "build_ext": "setuptools.command.build_ext:build_ext", "build_py": "setuptools.command.build_py:build_py", "develop": "setuptools.command.develop:develop", "easy_install": "setuptools.command.easy_install:easy_install", "egg_info": "setuptools.command.egg_info:egg_info", "install": "setuptools.command.install:install", "install_egg_info": "setuptools.command.install_egg_info:install_egg_info", "install_lib": "setuptools.command.install_lib:install_lib", "install_scripts": "setuptools.command.install_scripts:install_scripts", "register": "setuptools.command.register:register", "rotate": "setuptools.command.rotate:rotate", "saveopts": "setuptools.command.saveopts:saveopts", "sdist": "setuptools.command.sdist:sdist", "setopt": "setuptools.command.setopt:setopt", "test": "setuptools.command.test:test", "upload": "setuptools.command.upload:upload", "upload_docs": "setuptools.command.upload_docs:upload_docs"}, "distutils.setup_keywords": {"convert_2to3_doctests": "setuptools.dist:assert_string_list", "dependency_links": "setuptools.dist:assert_string_list", "eager_resources": "setuptools.dist:assert_string_list", "entry_points": "setuptools.dist:check_entry_points", "exclude_package_data": "setuptools.dist:check_package_data", "extras_require": "setuptools.dist:check_extras", "include_package_data": "setuptools.dist:assert_bool", "install_requires": "setuptools.dist:check_requirements", "namespace_packages": "setuptools.dist:check_nsp", "package_data": "setuptools.dist:check_package_data", "packages": "setuptools.dist:check_packages", "setup_requires": "setuptools.dist:check_requirements", "test_loader": "setuptools.dist:check_importable", "test_runner": "setuptools.dist:check_importable", "test_suite": "setuptools.dist:check_test_suite", "tests_require": "setuptools.dist:check_requirements", "use_2to3": "setuptools.dist:assert_bool", "use_2to3_exclude_fixers": "setuptools.dist:assert_string_list", "use_2to3_fixers": "setuptools.dist:assert_string_list", "zip_safe": "setuptools.dist:assert_bool"}, "egg_info.writers": {"PKG-INFO": "setuptools.command.egg_info:write_pkg_info", "dependency_links.txt": "setuptools.command.egg_info:overwrite_arg", "depends.txt": "setuptools.command.egg_info:warn_depends_obsolete", "eager_resources.txt": "setuptools.command.egg_info:overwrite_arg", "entry_points.txt": "setuptools.command.egg_info:write_entries", "namespace_packages.txt": "setuptools.command.egg_info:overwrite_arg", "requires.txt": "setuptools.command.egg_info:write_requirements", "top_level.txt": "setuptools.command.egg_info:write_toplevel_names"}, "setuptools.installation": {"eggsecutable": "setuptools.command.easy_install:bootstrap"}}}, "generator": "bdist_wheel (0.29.0)", "keywords": ["CPAN", "PyPI", "distutils", "eggs", "package", "management"], "metadata_version": "2.0", "name": "setuptools", "summary": "Easily download, build, install, upgrade, and uninstall Python packages", "version": "20.7.0"} \ No newline at end of file diff --git a/venv/lib/python3.5/site-packages/setuptools-20.7.0.dist-info/top_level.txt b/venv/lib/python3.5/site-packages/setuptools-20.7.0.dist-info/top_level.txt new file mode 100644 index 0000000..4577c6a --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools-20.7.0.dist-info/top_level.txt @@ -0,0 +1,3 @@ +easy_install +pkg_resources +setuptools diff --git a/venv/lib/python3.5/site-packages/setuptools-20.7.0.dist-info/zip-safe b/venv/lib/python3.5/site-packages/setuptools-20.7.0.dist-info/zip-safe new file mode 100644 index 0000000..8b13789 --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools-20.7.0.dist-info/zip-safe @@ -0,0 +1 @@ + diff --git a/venv/lib/python3.5/site-packages/setuptools/__init__.py b/venv/lib/python3.5/site-packages/setuptools/__init__.py new file mode 100644 index 0000000..67b57e4 --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/__init__.py @@ -0,0 +1,169 @@ +"""Extensions to the 'distutils' for large or complex distributions""" + +import os +import functools +import distutils.core +import distutils.filelist +from distutils.core import Command as _Command +from distutils.util import convert_path +from fnmatch import fnmatchcase + +from setuptools.extern.six.moves import filterfalse, map + +import setuptools.version +from setuptools.extension import Extension +from setuptools.dist import Distribution, Feature, _get_unpatched +from setuptools.depends import Require + +__all__ = [ + 'setup', 'Distribution', 'Feature', 'Command', 'Extension', 'Require', + 'find_packages' +] + +__version__ = setuptools.version.__version__ + +bootstrap_install_from = None + +# If we run 2to3 on .py files, should we also convert docstrings? +# Default: yes; assume that we can detect doctests reliably +run_2to3_on_doctests = True +# Standard package names for fixer packages +lib2to3_fixer_packages = ['lib2to3.fixes'] + + +class PackageFinder(object): + @classmethod + def find(cls, where='.', exclude=(), include=('*',)): + """Return a list all Python packages found within directory 'where' + + 'where' should be supplied as a "cross-platform" (i.e. URL-style) + path; it will be converted to the appropriate local path syntax. + 'exclude' is a sequence of package names to exclude; '*' can be used + as a wildcard in the names, such that 'foo.*' will exclude all + subpackages of 'foo' (but not 'foo' itself). + + 'include' is a sequence of package names to include. If it's + specified, only the named packages will be included. If it's not + specified, all found packages will be included. 'include' can contain + shell style wildcard patterns just like 'exclude'. + + The list of included packages is built up first and then any + explicitly excluded packages are removed from it. + """ + out = cls._find_packages_iter(convert_path(where)) + out = cls.require_parents(out) + includes = cls._build_filter(*include) + excludes = cls._build_filter('ez_setup', '*__pycache__', *exclude) + out = filter(includes, out) + out = filterfalse(excludes, out) + return list(out) + + @staticmethod + def require_parents(packages): + """ + Exclude any apparent package that apparently doesn't include its + parent. + + For example, exclude 'foo.bar' if 'foo' is not present. + """ + found = [] + for pkg in packages: + base, sep, child = pkg.rpartition('.') + if base and base not in found: + continue + found.append(pkg) + yield pkg + + @staticmethod + def _candidate_dirs(base_path): + """ + Return all dirs in base_path that might be packages. + """ + has_dot = lambda name: '.' in name + for root, dirs, files in os.walk(base_path, followlinks=True): + # Exclude directories that contain a period, as they cannot be + # packages. Mutate the list to avoid traversal. + dirs[:] = filterfalse(has_dot, dirs) + for dir in dirs: + yield os.path.relpath(os.path.join(root, dir), base_path) + + @classmethod + def _find_packages_iter(cls, base_path): + candidates = cls._candidate_dirs(base_path) + return ( + path.replace(os.path.sep, '.') + for path in candidates + if cls._looks_like_package(os.path.join(base_path, path)) + ) + + @staticmethod + def _looks_like_package(path): + return os.path.isfile(os.path.join(path, '__init__.py')) + + @staticmethod + def _build_filter(*patterns): + """ + Given a list of patterns, return a callable that will be true only if + the input matches one of the patterns. + """ + return lambda name: any(fnmatchcase(name, pat=pat) for pat in patterns) + +class PEP420PackageFinder(PackageFinder): + @staticmethod + def _looks_like_package(path): + return True + +find_packages = PackageFinder.find + +setup = distutils.core.setup + +_Command = _get_unpatched(_Command) + +class Command(_Command): + __doc__ = _Command.__doc__ + + command_consumes_arguments = False + + def __init__(self, dist, **kw): + """ + Construct the command for dist, updating + vars(self) with any keyword parameters. + """ + _Command.__init__(self, dist) + vars(self).update(kw) + + def reinitialize_command(self, command, reinit_subcommands=0, **kw): + cmd = _Command.reinitialize_command(self, command, reinit_subcommands) + vars(cmd).update(kw) + return cmd + +# we can't patch distutils.cmd, alas +distutils.core.Command = Command + + +def _find_all_simple(path): + """ + Find all files under 'path' + """ + results = ( + os.path.join(base, file) + for base, dirs, files in os.walk(path, followlinks=True) + for file in files + ) + return filter(os.path.isfile, results) + + +def findall(dir=os.curdir): + """ + Find all files under 'dir' and return the list of full filenames. + Unless dir is '.', return full filenames with dir prepended. + """ + files = _find_all_simple(dir) + if dir == os.curdir: + make_rel = functools.partial(os.path.relpath, start=dir) + files = map(make_rel, files) + return list(files) + + +# fix findall bug in distutils (http://bugs.python.org/issue12885) +distutils.filelist.findall = findall diff --git a/venv/lib/python3.5/site-packages/setuptools/archive_util.py b/venv/lib/python3.5/site-packages/setuptools/archive_util.py new file mode 100644 index 0000000..b3c9fa5 --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/archive_util.py @@ -0,0 +1,170 @@ +"""Utilities for extracting common archive formats""" + + +__all__ = [ + "unpack_archive", "unpack_zipfile", "unpack_tarfile", "default_filter", + "UnrecognizedFormat", "extraction_drivers", "unpack_directory", +] + +import zipfile +import tarfile +import os +import shutil +import posixpath +import contextlib +from pkg_resources import ensure_directory, ContextualZipFile +from distutils.errors import DistutilsError + +class UnrecognizedFormat(DistutilsError): + """Couldn't recognize the archive type""" + +def default_filter(src,dst): + """The default progress/filter callback; returns True for all files""" + return dst + + +def unpack_archive(filename, extract_dir, progress_filter=default_filter, + drivers=None): + """Unpack `filename` to `extract_dir`, or raise ``UnrecognizedFormat`` + + `progress_filter` is a function taking two arguments: a source path + internal to the archive ('/'-separated), and a filesystem path where it + will be extracted. The callback must return the desired extract path + (which may be the same as the one passed in), or else ``None`` to skip + that file or directory. The callback can thus be used to report on the + progress of the extraction, as well as to filter the items extracted or + alter their extraction paths. + + `drivers`, if supplied, must be a non-empty sequence of functions with the + same signature as this function (minus the `drivers` argument), that raise + ``UnrecognizedFormat`` if they do not support extracting the designated + archive type. The `drivers` are tried in sequence until one is found that + does not raise an error, or until all are exhausted (in which case + ``UnrecognizedFormat`` is raised). If you do not supply a sequence of + drivers, the module's ``extraction_drivers`` constant will be used, which + means that ``unpack_zipfile`` and ``unpack_tarfile`` will be tried, in that + order. + """ + for driver in drivers or extraction_drivers: + try: + driver(filename, extract_dir, progress_filter) + except UnrecognizedFormat: + continue + else: + return + else: + raise UnrecognizedFormat( + "Not a recognized archive type: %s" % filename + ) + + +def unpack_directory(filename, extract_dir, progress_filter=default_filter): + """"Unpack" a directory, using the same interface as for archives + + Raises ``UnrecognizedFormat`` if `filename` is not a directory + """ + if not os.path.isdir(filename): + raise UnrecognizedFormat("%s is not a directory" % filename) + + paths = { + filename: ('', extract_dir), + } + for base, dirs, files in os.walk(filename): + src, dst = paths[base] + for d in dirs: + paths[os.path.join(base, d)] = src + d + '/', os.path.join(dst, d) + for f in files: + target = os.path.join(dst, f) + target = progress_filter(src + f, target) + if not target: + # skip non-files + continue + ensure_directory(target) + f = os.path.join(base, f) + shutil.copyfile(f, target) + shutil.copystat(f, target) + + +def unpack_zipfile(filename, extract_dir, progress_filter=default_filter): + """Unpack zip `filename` to `extract_dir` + + Raises ``UnrecognizedFormat`` if `filename` is not a zipfile (as determined + by ``zipfile.is_zipfile()``). See ``unpack_archive()`` for an explanation + of the `progress_filter` argument. + """ + + if not zipfile.is_zipfile(filename): + raise UnrecognizedFormat("%s is not a zip file" % (filename,)) + + with ContextualZipFile(filename) as z: + for info in z.infolist(): + name = info.filename + + # don't extract absolute paths or ones with .. in them + if name.startswith('/') or '..' in name.split('/'): + continue + + target = os.path.join(extract_dir, *name.split('/')) + target = progress_filter(name, target) + if not target: + continue + if name.endswith('/'): + # directory + ensure_directory(target) + else: + # file + ensure_directory(target) + data = z.read(info.filename) + with open(target, 'wb') as f: + f.write(data) + unix_attributes = info.external_attr >> 16 + if unix_attributes: + os.chmod(target, unix_attributes) + + +def unpack_tarfile(filename, extract_dir, progress_filter=default_filter): + """Unpack tar/tar.gz/tar.bz2 `filename` to `extract_dir` + + Raises ``UnrecognizedFormat`` if `filename` is not a tarfile (as determined + by ``tarfile.open()``). See ``unpack_archive()`` for an explanation + of the `progress_filter` argument. + """ + try: + tarobj = tarfile.open(filename) + except tarfile.TarError: + raise UnrecognizedFormat( + "%s is not a compressed or uncompressed tar file" % (filename,) + ) + with contextlib.closing(tarobj): + # don't do any chowning! + tarobj.chown = lambda *args: None + for member in tarobj: + name = member.name + # don't extract absolute paths or ones with .. in them + if not name.startswith('/') and '..' not in name.split('/'): + prelim_dst = os.path.join(extract_dir, *name.split('/')) + + # resolve any links and to extract the link targets as normal + # files + while member is not None and (member.islnk() or member.issym()): + linkpath = member.linkname + if member.issym(): + base = posixpath.dirname(member.name) + linkpath = posixpath.join(base, linkpath) + linkpath = posixpath.normpath(linkpath) + member = tarobj._getmember(linkpath) + + if member is not None and (member.isfile() or member.isdir()): + final_dst = progress_filter(name, prelim_dst) + if final_dst: + if final_dst.endswith(os.sep): + final_dst = final_dst[:-1] + try: + # XXX Ugh + tarobj._extract_member(member, final_dst) + except tarfile.ExtractError: + # chown/chmod/mkfifo/mknode/makedev failed + pass + return True + +extraction_drivers = unpack_directory, unpack_zipfile, unpack_tarfile diff --git a/venv/lib/python3.5/site-packages/setuptools/command/__init__.py b/venv/lib/python3.5/site-packages/setuptools/command/__init__.py new file mode 100644 index 0000000..3fb2f6d --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/command/__init__.py @@ -0,0 +1,18 @@ +__all__ = [ + 'alias', 'bdist_egg', 'bdist_rpm', 'build_ext', 'build_py', 'develop', + 'easy_install', 'egg_info', 'install', 'install_lib', 'rotate', 'saveopts', + 'sdist', 'setopt', 'test', 'install_egg_info', 'install_scripts', + 'register', 'bdist_wininst', 'upload_docs', 'upload', +] + +from distutils.command.bdist import bdist +import sys + +from setuptools.command import install_scripts + + +if 'egg' not in bdist.format_commands: + bdist.format_command['egg'] = ('bdist_egg', "Python .egg file") + bdist.format_commands.append('egg') + +del bdist, sys diff --git a/venv/lib/python3.5/site-packages/setuptools/command/alias.py b/venv/lib/python3.5/site-packages/setuptools/command/alias.py new file mode 100644 index 0000000..4532b1c --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/command/alias.py @@ -0,0 +1,80 @@ +from distutils.errors import DistutilsOptionError + +from setuptools.extern.six.moves import map + +from setuptools.command.setopt import edit_config, option_base, config_file + + +def shquote(arg): + """Quote an argument for later parsing by shlex.split()""" + for c in '"', "'", "\\", "#": + if c in arg: + return repr(arg) + if arg.split() != [arg]: + return repr(arg) + return arg + + +class alias(option_base): + """Define a shortcut that invokes one or more commands""" + + description = "define a shortcut to invoke one or more commands" + command_consumes_arguments = True + + user_options = [ + ('remove', 'r', 'remove (unset) the alias'), + ] + option_base.user_options + + boolean_options = option_base.boolean_options + ['remove'] + + def initialize_options(self): + option_base.initialize_options(self) + self.args = None + self.remove = None + + def finalize_options(self): + option_base.finalize_options(self) + if self.remove and len(self.args) != 1: + raise DistutilsOptionError( + "Must specify exactly one argument (the alias name) when " + "using --remove" + ) + + def run(self): + aliases = self.distribution.get_option_dict('aliases') + + if not self.args: + print("Command Aliases") + print("---------------") + for alias in aliases: + print("setup.py alias", format_alias(alias, aliases)) + return + + elif len(self.args) == 1: + alias, = self.args + if self.remove: + command = None + elif alias in aliases: + print("setup.py alias", format_alias(alias, aliases)) + return + else: + print("No alias definition found for %r" % alias) + return + else: + alias = self.args[0] + command = ' '.join(map(shquote, self.args[1:])) + + edit_config(self.filename, {'aliases': {alias: command}}, self.dry_run) + + +def format_alias(name, aliases): + source, command = aliases[name] + if source == config_file('global'): + source = '--global-config ' + elif source == config_file('user'): + source = '--user-config ' + elif source == config_file('local'): + source = '' + else: + source = '--filename=%r' % source + return source + name + ' ' + command diff --git a/venv/lib/python3.5/site-packages/setuptools/command/bdist_egg.py b/venv/lib/python3.5/site-packages/setuptools/command/bdist_egg.py new file mode 100644 index 0000000..9cebd7f --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/command/bdist_egg.py @@ -0,0 +1,471 @@ +"""setuptools.command.bdist_egg + +Build .egg distributions""" + +from distutils.errors import DistutilsSetupError +from distutils.dir_util import remove_tree, mkpath +from distutils import log +from types import CodeType +import sys +import os +import marshal +import textwrap + +from setuptools.extern import six + +from pkg_resources import get_build_platform, Distribution, ensure_directory +from pkg_resources import EntryPoint +from setuptools.extension import Library +from setuptools import Command + +try: + # Python 2.7 or >=3.2 + from sysconfig import get_path, get_python_version + + def _get_purelib(): + return get_path("purelib") +except ImportError: + from distutils.sysconfig import get_python_lib, get_python_version + + def _get_purelib(): + return get_python_lib(False) + + +def strip_module(filename): + if '.' in filename: + filename = os.path.splitext(filename)[0] + if filename.endswith('module'): + filename = filename[:-6] + return filename + + +def write_stub(resource, pyfile): + _stub_template = textwrap.dedent(""" + def __bootstrap__(): + global __bootstrap__, __loader__, __file__ + import sys, pkg_resources, imp + __file__ = pkg_resources.resource_filename(__name__, %r) + __loader__ = None; del __bootstrap__, __loader__ + imp.load_dynamic(__name__,__file__) + __bootstrap__() + """).lstrip() + with open(pyfile, 'w') as f: + f.write(_stub_template % resource) + + +class bdist_egg(Command): + description = "create an \"egg\" distribution" + + user_options = [ + ('bdist-dir=', 'b', + "temporary directory for creating the distribution"), + ('plat-name=', 'p', "platform name to embed in generated filenames " + "(default: %s)" % get_build_platform()), + ('exclude-source-files', None, + "remove all .py files from the generated egg"), + ('keep-temp', 'k', + "keep the pseudo-installation tree around after " + + "creating the distribution archive"), + ('dist-dir=', 'd', + "directory to put final built distributions in"), + ('skip-build', None, + "skip rebuilding everything (for testing/debugging)"), + ] + + boolean_options = [ + 'keep-temp', 'skip-build', 'exclude-source-files' + ] + + def initialize_options(self): + self.bdist_dir = None + self.plat_name = None + self.keep_temp = 0 + self.dist_dir = None + self.skip_build = 0 + self.egg_output = None + self.exclude_source_files = None + + def finalize_options(self): + ei_cmd = self.ei_cmd = self.get_finalized_command("egg_info") + self.egg_info = ei_cmd.egg_info + + if self.bdist_dir is None: + bdist_base = self.get_finalized_command('bdist').bdist_base + self.bdist_dir = os.path.join(bdist_base, 'egg') + + if self.plat_name is None: + self.plat_name = get_build_platform() + + self.set_undefined_options('bdist', ('dist_dir', 'dist_dir')) + + if self.egg_output is None: + + # Compute filename of the output egg + basename = Distribution( + None, None, ei_cmd.egg_name, ei_cmd.egg_version, + get_python_version(), + self.distribution.has_ext_modules() and self.plat_name + ).egg_name() + + self.egg_output = os.path.join(self.dist_dir, basename + '.egg') + + def do_install_data(self): + # Hack for packages that install data to install's --install-lib + self.get_finalized_command('install').install_lib = self.bdist_dir + + site_packages = os.path.normcase(os.path.realpath(_get_purelib())) + old, self.distribution.data_files = self.distribution.data_files, [] + + for item in old: + if isinstance(item, tuple) and len(item) == 2: + if os.path.isabs(item[0]): + realpath = os.path.realpath(item[0]) + normalized = os.path.normcase(realpath) + if normalized == site_packages or normalized.startswith( + site_packages + os.sep + ): + item = realpath[len(site_packages) + 1:], item[1] + # XXX else: raise ??? + self.distribution.data_files.append(item) + + try: + log.info("installing package data to %s" % self.bdist_dir) + self.call_command('install_data', force=0, root=None) + finally: + self.distribution.data_files = old + + def get_outputs(self): + return [self.egg_output] + + def call_command(self, cmdname, **kw): + """Invoke reinitialized command `cmdname` with keyword args""" + for dirname in INSTALL_DIRECTORY_ATTRS: + kw.setdefault(dirname, self.bdist_dir) + kw.setdefault('skip_build', self.skip_build) + kw.setdefault('dry_run', self.dry_run) + cmd = self.reinitialize_command(cmdname, **kw) + self.run_command(cmdname) + return cmd + + def run(self): + # Generate metadata first + self.run_command("egg_info") + # We run install_lib before install_data, because some data hacks + # pull their data path from the install_lib command. + log.info("installing library code to %s" % self.bdist_dir) + instcmd = self.get_finalized_command('install') + old_root = instcmd.root + instcmd.root = None + if self.distribution.has_c_libraries() and not self.skip_build: + self.run_command('build_clib') + cmd = self.call_command('install_lib', warn_dir=0) + instcmd.root = old_root + + all_outputs, ext_outputs = self.get_ext_outputs() + self.stubs = [] + to_compile = [] + for (p, ext_name) in enumerate(ext_outputs): + filename, ext = os.path.splitext(ext_name) + pyfile = os.path.join(self.bdist_dir, strip_module(filename) + + '.py') + self.stubs.append(pyfile) + log.info("creating stub loader for %s" % ext_name) + if not self.dry_run: + write_stub(os.path.basename(ext_name), pyfile) + to_compile.append(pyfile) + ext_outputs[p] = ext_name.replace(os.sep, '/') + + if to_compile: + cmd.byte_compile(to_compile) + if self.distribution.data_files: + self.do_install_data() + + # Make the EGG-INFO directory + archive_root = self.bdist_dir + egg_info = os.path.join(archive_root, 'EGG-INFO') + self.mkpath(egg_info) + if self.distribution.scripts: + script_dir = os.path.join(egg_info, 'scripts') + log.info("installing scripts to %s" % script_dir) + self.call_command('install_scripts', install_dir=script_dir, + no_ep=1) + + self.copy_metadata_to(egg_info) + native_libs = os.path.join(egg_info, "native_libs.txt") + if all_outputs: + log.info("writing %s" % native_libs) + if not self.dry_run: + ensure_directory(native_libs) + libs_file = open(native_libs, 'wt') + libs_file.write('\n'.join(all_outputs)) + libs_file.write('\n') + libs_file.close() + elif os.path.isfile(native_libs): + log.info("removing %s" % native_libs) + if not self.dry_run: + os.unlink(native_libs) + + write_safety_flag( + os.path.join(archive_root, 'EGG-INFO'), self.zip_safe() + ) + + if os.path.exists(os.path.join(self.egg_info, 'depends.txt')): + log.warn( + "WARNING: 'depends.txt' will not be used by setuptools 0.6!\n" + "Use the install_requires/extras_require setup() args instead." + ) + + if self.exclude_source_files: + self.zap_pyfiles() + + # Make the archive + make_zipfile(self.egg_output, archive_root, verbose=self.verbose, + dry_run=self.dry_run, mode=self.gen_header()) + if not self.keep_temp: + remove_tree(self.bdist_dir, dry_run=self.dry_run) + + # Add to 'Distribution.dist_files' so that the "upload" command works + getattr(self.distribution, 'dist_files', []).append( + ('bdist_egg', get_python_version(), self.egg_output)) + + def zap_pyfiles(self): + log.info("Removing .py files from temporary directory") + for base, dirs, files in walk_egg(self.bdist_dir): + for name in files: + if name.endswith('.py'): + path = os.path.join(base, name) + log.debug("Deleting %s", path) + os.unlink(path) + + def zip_safe(self): + safe = getattr(self.distribution, 'zip_safe', None) + if safe is not None: + return safe + log.warn("zip_safe flag not set; analyzing archive contents...") + return analyze_egg(self.bdist_dir, self.stubs) + + def gen_header(self): + epm = EntryPoint.parse_map(self.distribution.entry_points or '') + ep = epm.get('setuptools.installation', {}).get('eggsecutable') + if ep is None: + return 'w' # not an eggsecutable, do it the usual way. + + if not ep.attrs or ep.extras: + raise DistutilsSetupError( + "eggsecutable entry point (%r) cannot have 'extras' " + "or refer to a module" % (ep,) + ) + + pyver = sys.version[:3] + pkg = ep.module_name + full = '.'.join(ep.attrs) + base = ep.attrs[0] + basename = os.path.basename(self.egg_output) + + header = ( + "#!/bin/sh\n" + 'if [ `basename $0` = "%(basename)s" ]\n' + 'then exec python%(pyver)s -c "' + "import sys, os; sys.path.insert(0, os.path.abspath('$0')); " + "from %(pkg)s import %(base)s; sys.exit(%(full)s())" + '" "$@"\n' + 'else\n' + ' echo $0 is not the correct name for this egg file.\n' + ' echo Please rename it back to %(basename)s and try again.\n' + ' exec false\n' + 'fi\n' + ) % locals() + + if not self.dry_run: + mkpath(os.path.dirname(self.egg_output), dry_run=self.dry_run) + f = open(self.egg_output, 'w') + f.write(header) + f.close() + return 'a' + + def copy_metadata_to(self, target_dir): + "Copy metadata (egg info) to the target_dir" + # normalize the path (so that a forward-slash in egg_info will + # match using startswith below) + norm_egg_info = os.path.normpath(self.egg_info) + prefix = os.path.join(norm_egg_info, '') + for path in self.ei_cmd.filelist.files: + if path.startswith(prefix): + target = os.path.join(target_dir, path[len(prefix):]) + ensure_directory(target) + self.copy_file(path, target) + + def get_ext_outputs(self): + """Get a list of relative paths to C extensions in the output distro""" + + all_outputs = [] + ext_outputs = [] + + paths = {self.bdist_dir: ''} + for base, dirs, files in os.walk(self.bdist_dir): + for filename in files: + if os.path.splitext(filename)[1].lower() in NATIVE_EXTENSIONS: + all_outputs.append(paths[base] + filename) + for filename in dirs: + paths[os.path.join(base, filename)] = (paths[base] + + filename + '/') + + if self.distribution.has_ext_modules(): + build_cmd = self.get_finalized_command('build_ext') + for ext in build_cmd.extensions: + if isinstance(ext, Library): + continue + fullname = build_cmd.get_ext_fullname(ext.name) + filename = build_cmd.get_ext_filename(fullname) + if not os.path.basename(filename).startswith('dl-'): + if os.path.exists(os.path.join(self.bdist_dir, filename)): + ext_outputs.append(filename) + + return all_outputs, ext_outputs + + +NATIVE_EXTENSIONS = dict.fromkeys('.dll .so .dylib .pyd'.split()) + + +def walk_egg(egg_dir): + """Walk an unpacked egg's contents, skipping the metadata directory""" + walker = os.walk(egg_dir) + base, dirs, files = next(walker) + if 'EGG-INFO' in dirs: + dirs.remove('EGG-INFO') + yield base, dirs, files + for bdf in walker: + yield bdf + + +def analyze_egg(egg_dir, stubs): + # check for existing flag in EGG-INFO + for flag, fn in safety_flags.items(): + if os.path.exists(os.path.join(egg_dir, 'EGG-INFO', fn)): + return flag + if not can_scan(): + return False + safe = True + for base, dirs, files in walk_egg(egg_dir): + for name in files: + if name.endswith('.py') or name.endswith('.pyw'): + continue + elif name.endswith('.pyc') or name.endswith('.pyo'): + # always scan, even if we already know we're not safe + safe = scan_module(egg_dir, base, name, stubs) and safe + return safe + + +def write_safety_flag(egg_dir, safe): + # Write or remove zip safety flag file(s) + for flag, fn in safety_flags.items(): + fn = os.path.join(egg_dir, fn) + if os.path.exists(fn): + if safe is None or bool(safe) != flag: + os.unlink(fn) + elif safe is not None and bool(safe) == flag: + f = open(fn, 'wt') + f.write('\n') + f.close() + + +safety_flags = { + True: 'zip-safe', + False: 'not-zip-safe', +} + + +def scan_module(egg_dir, base, name, stubs): + """Check whether module possibly uses unsafe-for-zipfile stuff""" + + filename = os.path.join(base, name) + if filename[:-1] in stubs: + return True # Extension module + pkg = base[len(egg_dir) + 1:].replace(os.sep, '.') + module = pkg + (pkg and '.' or '') + os.path.splitext(name)[0] + if sys.version_info < (3, 3): + skip = 8 # skip magic & date + else: + skip = 12 # skip magic & date & file size + f = open(filename, 'rb') + f.read(skip) + code = marshal.load(f) + f.close() + safe = True + symbols = dict.fromkeys(iter_symbols(code)) + for bad in ['__file__', '__path__']: + if bad in symbols: + log.warn("%s: module references %s", module, bad) + safe = False + if 'inspect' in symbols: + for bad in [ + 'getsource', 'getabsfile', 'getsourcefile', 'getfile' + 'getsourcelines', 'findsource', 'getcomments', 'getframeinfo', + 'getinnerframes', 'getouterframes', 'stack', 'trace' + ]: + if bad in symbols: + log.warn("%s: module MAY be using inspect.%s", module, bad) + safe = False + return safe + + +def iter_symbols(code): + """Yield names and strings used by `code` and its nested code objects""" + for name in code.co_names: + yield name + for const in code.co_consts: + if isinstance(const, six.string_types): + yield const + elif isinstance(const, CodeType): + for name in iter_symbols(const): + yield name + + +def can_scan(): + if not sys.platform.startswith('java') and sys.platform != 'cli': + # CPython, PyPy, etc. + return True + log.warn("Unable to analyze compiled code on this platform.") + log.warn("Please ask the author to include a 'zip_safe'" + " setting (either True or False) in the package's setup.py") + +# Attribute names of options for commands that might need to be convinced to +# install to the egg build directory + +INSTALL_DIRECTORY_ATTRS = [ + 'install_lib', 'install_dir', 'install_data', 'install_base' +] + + +def make_zipfile(zip_filename, base_dir, verbose=0, dry_run=0, compress=True, + mode='w'): + """Create a zip file from all the files under 'base_dir'. The output + zip file will be named 'base_dir' + ".zip". Uses either the "zipfile" + Python module (if available) or the InfoZIP "zip" utility (if installed + and found on the default search path). If neither tool is available, + raises DistutilsExecError. Returns the name of the output zip file. + """ + import zipfile + + mkpath(os.path.dirname(zip_filename), dry_run=dry_run) + log.info("creating '%s' and adding '%s' to it", zip_filename, base_dir) + + def visit(z, dirname, names): + for name in names: + path = os.path.normpath(os.path.join(dirname, name)) + if os.path.isfile(path): + p = path[len(base_dir) + 1:] + if not dry_run: + z.write(path, p) + log.debug("adding '%s'" % p) + + compression = zipfile.ZIP_DEFLATED if compress else zipfile.ZIP_STORED + if not dry_run: + z = zipfile.ZipFile(zip_filename, mode, compression=compression) + for dirname, dirs, files in os.walk(base_dir): + visit(z, dirname, files) + z.close() + else: + for dirname, dirs, files in os.walk(base_dir): + visit(None, dirname, files) + return zip_filename diff --git a/venv/lib/python3.5/site-packages/setuptools/command/bdist_rpm.py b/venv/lib/python3.5/site-packages/setuptools/command/bdist_rpm.py new file mode 100644 index 0000000..7073092 --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/command/bdist_rpm.py @@ -0,0 +1,43 @@ +import distutils.command.bdist_rpm as orig + + +class bdist_rpm(orig.bdist_rpm): + """ + Override the default bdist_rpm behavior to do the following: + + 1. Run egg_info to ensure the name and version are properly calculated. + 2. Always run 'install' using --single-version-externally-managed to + disable eggs in RPM distributions. + 3. Replace dash with underscore in the version numbers for better RPM + compatibility. + """ + + def run(self): + # ensure distro name is up-to-date + self.run_command('egg_info') + + orig.bdist_rpm.run(self) + + def _make_spec_file(self): + version = self.distribution.get_version() + rpmversion = version.replace('-', '_') + spec = orig.bdist_rpm._make_spec_file(self) + line23 = '%define version ' + version + line24 = '%define version ' + rpmversion + spec = [ + line.replace( + "Source0: %{name}-%{version}.tar", + "Source0: %{name}-%{unmangled_version}.tar" + ).replace( + "setup.py install ", + "setup.py install --single-version-externally-managed " + ).replace( + "%setup", + "%setup -n %{name}-%{unmangled_version}" + ).replace(line23, line24) + for line in spec + ] + insert_loc = spec.index(line24) + 1 + unmangled_version = "%define unmangled_version " + version + spec.insert(insert_loc, unmangled_version) + return spec diff --git a/venv/lib/python3.5/site-packages/setuptools/command/bdist_wininst.py b/venv/lib/python3.5/site-packages/setuptools/command/bdist_wininst.py new file mode 100644 index 0000000..073de97 --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/command/bdist_wininst.py @@ -0,0 +1,21 @@ +import distutils.command.bdist_wininst as orig + + +class bdist_wininst(orig.bdist_wininst): + def reinitialize_command(self, command, reinit_subcommands=0): + """ + Supplement reinitialize_command to work around + http://bugs.python.org/issue20819 + """ + cmd = self.distribution.reinitialize_command( + command, reinit_subcommands) + if command in ('install', 'install_lib'): + cmd.install_lib = None + return cmd + + def run(self): + self._is_running = True + try: + orig.bdist_wininst.run(self) + finally: + self._is_running = False diff --git a/venv/lib/python3.5/site-packages/setuptools/command/build_ext.py b/venv/lib/python3.5/site-packages/setuptools/command/build_ext.py new file mode 100644 index 0000000..92e4a18 --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/command/build_ext.py @@ -0,0 +1,296 @@ +from distutils.command.build_ext import build_ext as _du_build_ext +from distutils.file_util import copy_file +from distutils.ccompiler import new_compiler +from distutils.sysconfig import customize_compiler +from distutils.errors import DistutilsError +from distutils import log +import os +import sys +import itertools + +from setuptools.extension import Library + +try: + # Attempt to use Cython for building extensions, if available + from Cython.Distutils.build_ext import build_ext as _build_ext +except ImportError: + _build_ext = _du_build_ext + +try: + # Python 2.7 or >=3.2 + from sysconfig import _CONFIG_VARS +except ImportError: + from distutils.sysconfig import get_config_var + + get_config_var("LDSHARED") # make sure _config_vars is initialized + del get_config_var + from distutils.sysconfig import _config_vars as _CONFIG_VARS + +have_rtld = False +use_stubs = False +libtype = 'shared' + +if sys.platform == "darwin": + use_stubs = True +elif os.name != 'nt': + try: + import dl + use_stubs = have_rtld = hasattr(dl, 'RTLD_NOW') + except ImportError: + pass + + +if_dl = lambda s: s if have_rtld else '' + +class build_ext(_build_ext): + def run(self): + """Build extensions in build directory, then copy if --inplace""" + old_inplace, self.inplace = self.inplace, 0 + _build_ext.run(self) + self.inplace = old_inplace + if old_inplace: + self.copy_extensions_to_source() + + def copy_extensions_to_source(self): + build_py = self.get_finalized_command('build_py') + for ext in self.extensions: + fullname = self.get_ext_fullname(ext.name) + filename = self.get_ext_filename(fullname) + modpath = fullname.split('.') + package = '.'.join(modpath[:-1]) + package_dir = build_py.get_package_dir(package) + dest_filename = os.path.join(package_dir, + os.path.basename(filename)) + src_filename = os.path.join(self.build_lib, filename) + + # Always copy, even if source is older than destination, to ensure + # that the right extensions for the current Python/platform are + # used. + copy_file( + src_filename, dest_filename, verbose=self.verbose, + dry_run=self.dry_run + ) + if ext._needs_stub: + self.write_stub(package_dir or os.curdir, ext, True) + + def get_ext_filename(self, fullname): + filename = _build_ext.get_ext_filename(self, fullname) + if fullname in self.ext_map: + ext = self.ext_map[fullname] + if isinstance(ext, Library): + fn, ext = os.path.splitext(filename) + return self.shlib_compiler.library_filename(fn, libtype) + elif use_stubs and ext._links_to_dynamic: + d, fn = os.path.split(filename) + return os.path.join(d, 'dl-' + fn) + return filename + + def initialize_options(self): + _build_ext.initialize_options(self) + self.shlib_compiler = None + self.shlibs = [] + self.ext_map = {} + + def finalize_options(self): + _build_ext.finalize_options(self) + self.extensions = self.extensions or [] + self.check_extensions_list(self.extensions) + self.shlibs = [ext for ext in self.extensions + if isinstance(ext, Library)] + if self.shlibs: + self.setup_shlib_compiler() + for ext in self.extensions: + ext._full_name = self.get_ext_fullname(ext.name) + for ext in self.extensions: + fullname = ext._full_name + self.ext_map[fullname] = ext + + # distutils 3.1 will also ask for module names + # XXX what to do with conflicts? + self.ext_map[fullname.split('.')[-1]] = ext + + ltd = self.shlibs and self.links_to_dynamic(ext) or False + ns = ltd and use_stubs and not isinstance(ext, Library) + ext._links_to_dynamic = ltd + ext._needs_stub = ns + filename = ext._file_name = self.get_ext_filename(fullname) + libdir = os.path.dirname(os.path.join(self.build_lib, filename)) + if ltd and libdir not in ext.library_dirs: + ext.library_dirs.append(libdir) + if ltd and use_stubs and os.curdir not in ext.runtime_library_dirs: + ext.runtime_library_dirs.append(os.curdir) + + def setup_shlib_compiler(self): + compiler = self.shlib_compiler = new_compiler( + compiler=self.compiler, dry_run=self.dry_run, force=self.force + ) + if sys.platform == "darwin": + tmp = _CONFIG_VARS.copy() + try: + # XXX Help! I don't have any idea whether these are right... + _CONFIG_VARS['LDSHARED'] = ( + "gcc -Wl,-x -dynamiclib -undefined dynamic_lookup") + _CONFIG_VARS['CCSHARED'] = " -dynamiclib" + _CONFIG_VARS['SO'] = ".dylib" + customize_compiler(compiler) + finally: + _CONFIG_VARS.clear() + _CONFIG_VARS.update(tmp) + else: + customize_compiler(compiler) + + if self.include_dirs is not None: + compiler.set_include_dirs(self.include_dirs) + if self.define is not None: + # 'define' option is a list of (name,value) tuples + for (name, value) in self.define: + compiler.define_macro(name, value) + if self.undef is not None: + for macro in self.undef: + compiler.undefine_macro(macro) + if self.libraries is not None: + compiler.set_libraries(self.libraries) + if self.library_dirs is not None: + compiler.set_library_dirs(self.library_dirs) + if self.rpath is not None: + compiler.set_runtime_library_dirs(self.rpath) + if self.link_objects is not None: + compiler.set_link_objects(self.link_objects) + + # hack so distutils' build_extension() builds a library instead + compiler.link_shared_object = link_shared_object.__get__(compiler) + + def get_export_symbols(self, ext): + if isinstance(ext, Library): + return ext.export_symbols + return _build_ext.get_export_symbols(self, ext) + + def build_extension(self, ext): + ext._convert_pyx_sources_to_lang() + _compiler = self.compiler + try: + if isinstance(ext, Library): + self.compiler = self.shlib_compiler + _build_ext.build_extension(self, ext) + if ext._needs_stub: + cmd = self.get_finalized_command('build_py').build_lib + self.write_stub(cmd, ext) + finally: + self.compiler = _compiler + + def links_to_dynamic(self, ext): + """Return true if 'ext' links to a dynamic lib in the same package""" + # XXX this should check to ensure the lib is actually being built + # XXX as dynamic, and not just using a locally-found version or a + # XXX static-compiled version + libnames = dict.fromkeys([lib._full_name for lib in self.shlibs]) + pkg = '.'.join(ext._full_name.split('.')[:-1] + ['']) + return any(pkg + libname in libnames for libname in ext.libraries) + + def get_outputs(self): + return _build_ext.get_outputs(self) + self.__get_stubs_outputs() + + def __get_stubs_outputs(self): + # assemble the base name for each extension that needs a stub + ns_ext_bases = ( + os.path.join(self.build_lib, *ext._full_name.split('.')) + for ext in self.extensions + if ext._needs_stub + ) + # pair each base with the extension + pairs = itertools.product(ns_ext_bases, self.__get_output_extensions()) + return list(base + fnext for base, fnext in pairs) + + def __get_output_extensions(self): + yield '.py' + yield '.pyc' + if self.get_finalized_command('build_py').optimize: + yield '.pyo' + + def write_stub(self, output_dir, ext, compile=False): + log.info("writing stub loader for %s to %s", ext._full_name, + output_dir) + stub_file = (os.path.join(output_dir, *ext._full_name.split('.')) + + '.py') + if compile and os.path.exists(stub_file): + raise DistutilsError(stub_file + " already exists! Please delete.") + if not self.dry_run: + f = open(stub_file, 'w') + f.write( + '\n'.join([ + "def __bootstrap__():", + " global __bootstrap__, __file__, __loader__", + " import sys, os, pkg_resources, imp" + if_dl(", dl"), + " __file__ = pkg_resources.resource_filename" + "(__name__,%r)" + % os.path.basename(ext._file_name), + " del __bootstrap__", + " if '__loader__' in globals():", + " del __loader__", + if_dl(" old_flags = sys.getdlopenflags()"), + " old_dir = os.getcwd()", + " try:", + " os.chdir(os.path.dirname(__file__))", + if_dl(" sys.setdlopenflags(dl.RTLD_NOW)"), + " imp.load_dynamic(__name__,__file__)", + " finally:", + if_dl(" sys.setdlopenflags(old_flags)"), + " os.chdir(old_dir)", + "__bootstrap__()", + "" # terminal \n + ]) + ) + f.close() + if compile: + from distutils.util import byte_compile + + byte_compile([stub_file], optimize=0, + force=True, dry_run=self.dry_run) + optimize = self.get_finalized_command('install_lib').optimize + if optimize > 0: + byte_compile([stub_file], optimize=optimize, + force=True, dry_run=self.dry_run) + if os.path.exists(stub_file) and not self.dry_run: + os.unlink(stub_file) + + +if use_stubs or os.name == 'nt': + # Build shared libraries + # + def link_shared_object( + self, objects, output_libname, output_dir=None, libraries=None, + library_dirs=None, runtime_library_dirs=None, export_symbols=None, + debug=0, extra_preargs=None, extra_postargs=None, build_temp=None, + target_lang=None): + self.link( + self.SHARED_LIBRARY, objects, output_libname, + output_dir, libraries, library_dirs, runtime_library_dirs, + export_symbols, debug, extra_preargs, extra_postargs, + build_temp, target_lang + ) +else: + # Build static libraries everywhere else + libtype = 'static' + + def link_shared_object( + self, objects, output_libname, output_dir=None, libraries=None, + library_dirs=None, runtime_library_dirs=None, export_symbols=None, + debug=0, extra_preargs=None, extra_postargs=None, build_temp=None, + target_lang=None): + # XXX we need to either disallow these attrs on Library instances, + # or warn/abort here if set, or something... + # libraries=None, library_dirs=None, runtime_library_dirs=None, + # export_symbols=None, extra_preargs=None, extra_postargs=None, + # build_temp=None + + assert output_dir is None # distutils build_ext doesn't pass this + output_dir, filename = os.path.split(output_libname) + basename, ext = os.path.splitext(filename) + if self.library_filename("x").startswith('lib'): + # strip 'lib' prefix; this is kludgy if some platform uses + # a different prefix + basename = basename[3:] + + self.create_static_lib( + objects, basename, output_dir, debug, target_lang + ) diff --git a/venv/lib/python3.5/site-packages/setuptools/command/build_py.py b/venv/lib/python3.5/site-packages/setuptools/command/build_py.py new file mode 100644 index 0000000..8623c77 --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/command/build_py.py @@ -0,0 +1,222 @@ +from glob import glob +from distutils.util import convert_path +import distutils.command.build_py as orig +import os +import fnmatch +import textwrap +import io +import distutils.errors +import collections +import itertools + +from setuptools.extern.six.moves import map + +try: + from setuptools.lib2to3_ex import Mixin2to3 +except ImportError: + class Mixin2to3: + def run_2to3(self, files, doctests=True): + "do nothing" + + +class build_py(orig.build_py, Mixin2to3): + """Enhanced 'build_py' command that includes data files with packages + + The data files are specified via a 'package_data' argument to 'setup()'. + See 'setuptools.dist.Distribution' for more details. + + Also, this version of the 'build_py' command allows you to specify both + 'py_modules' and 'packages' in the same setup operation. + """ + + def finalize_options(self): + orig.build_py.finalize_options(self) + self.package_data = self.distribution.package_data + self.exclude_package_data = (self.distribution.exclude_package_data or + {}) + if 'data_files' in self.__dict__: + del self.__dict__['data_files'] + self.__updated_files = [] + self.__doctests_2to3 = [] + + def run(self): + """Build modules, packages, and copy data files to build directory""" + if not self.py_modules and not self.packages: + return + + if self.py_modules: + self.build_modules() + + if self.packages: + self.build_packages() + self.build_package_data() + + self.run_2to3(self.__updated_files, False) + self.run_2to3(self.__updated_files, True) + self.run_2to3(self.__doctests_2to3, True) + + # Only compile actual .py files, using our base class' idea of what our + # output files are. + self.byte_compile(orig.build_py.get_outputs(self, include_bytecode=0)) + + def __getattr__(self, attr): + "lazily compute data files" + if attr == 'data_files': + self.data_files = self._get_data_files() + return self.data_files + return orig.build_py.__getattr__(self, attr) + + def build_module(self, module, module_file, package): + outfile, copied = orig.build_py.build_module(self, module, module_file, + package) + if copied: + self.__updated_files.append(outfile) + return outfile, copied + + def _get_data_files(self): + """Generate list of '(package,src_dir,build_dir,filenames)' tuples""" + self.analyze_manifest() + return list(map(self._get_pkg_data_files, self.packages or ())) + + def _get_pkg_data_files(self, package): + # Locate package source directory + src_dir = self.get_package_dir(package) + + # Compute package build directory + build_dir = os.path.join(*([self.build_lib] + package.split('.'))) + + # Strip directory from globbed filenames + filenames = [ + os.path.relpath(file, src_dir) + for file in self.find_data_files(package, src_dir) + ] + return package, src_dir, build_dir, filenames + + def find_data_files(self, package, src_dir): + """Return filenames for package's data files in 'src_dir'""" + globs = (self.package_data.get('', []) + + self.package_data.get(package, [])) + files = self.manifest_files.get(package, [])[:] + for pattern in globs: + # Each pattern has to be converted to a platform-specific path + files.extend(glob(os.path.join(src_dir, convert_path(pattern)))) + return self.exclude_data_files(package, src_dir, files) + + def build_package_data(self): + """Copy data files into build directory""" + for package, src_dir, build_dir, filenames in self.data_files: + for filename in filenames: + target = os.path.join(build_dir, filename) + self.mkpath(os.path.dirname(target)) + srcfile = os.path.join(src_dir, filename) + outf, copied = self.copy_file(srcfile, target) + srcfile = os.path.abspath(srcfile) + if (copied and + srcfile in self.distribution.convert_2to3_doctests): + self.__doctests_2to3.append(outf) + + def analyze_manifest(self): + self.manifest_files = mf = {} + if not self.distribution.include_package_data: + return + src_dirs = {} + for package in self.packages or (): + # Locate package source directory + src_dirs[assert_relative(self.get_package_dir(package))] = package + + self.run_command('egg_info') + ei_cmd = self.get_finalized_command('egg_info') + for path in ei_cmd.filelist.files: + d, f = os.path.split(assert_relative(path)) + prev = None + oldf = f + while d and d != prev and d not in src_dirs: + prev = d + d, df = os.path.split(d) + f = os.path.join(df, f) + if d in src_dirs: + if path.endswith('.py') and f == oldf: + continue # it's a module, not data + mf.setdefault(src_dirs[d], []).append(path) + + def get_data_files(self): + pass # Lazily compute data files in _get_data_files() function. + + def check_package(self, package, package_dir): + """Check namespace packages' __init__ for declare_namespace""" + try: + return self.packages_checked[package] + except KeyError: + pass + + init_py = orig.build_py.check_package(self, package, package_dir) + self.packages_checked[package] = init_py + + if not init_py or not self.distribution.namespace_packages: + return init_py + + for pkg in self.distribution.namespace_packages: + if pkg == package or pkg.startswith(package + '.'): + break + else: + return init_py + + with io.open(init_py, 'rb') as f: + contents = f.read() + if b'declare_namespace' not in contents: + raise distutils.errors.DistutilsError( + "Namespace package problem: %s is a namespace package, but " + "its\n__init__.py does not call declare_namespace()! Please " + 'fix it.\n(See the setuptools manual under ' + '"Namespace Packages" for details.)\n"' % (package,) + ) + return init_py + + def initialize_options(self): + self.packages_checked = {} + orig.build_py.initialize_options(self) + + def get_package_dir(self, package): + res = orig.build_py.get_package_dir(self, package) + if self.distribution.src_root is not None: + return os.path.join(self.distribution.src_root, res) + return res + + def exclude_data_files(self, package, src_dir, files): + """Filter filenames for package's data files in 'src_dir'""" + globs = ( + self.exclude_package_data.get('', []) + + self.exclude_package_data.get(package, []) + ) + bad = set( + item + for pattern in globs + for item in fnmatch.filter( + files, + os.path.join(src_dir, convert_path(pattern)), + ) + ) + seen = collections.defaultdict(itertools.count) + return [ + fn + for fn in files + if fn not in bad + # ditch dupes + and not next(seen[fn]) + ] + + +def assert_relative(path): + if not os.path.isabs(path): + return path + from distutils.errors import DistutilsSetupError + + msg = textwrap.dedent(""" + Error: setup script specifies an absolute path: + + %s + + setup() arguments must *always* be /-separated paths relative to the + setup.py directory, *never* absolute paths. + """).lstrip() % path + raise DistutilsSetupError(msg) diff --git a/venv/lib/python3.5/site-packages/setuptools/command/develop.py b/venv/lib/python3.5/site-packages/setuptools/command/develop.py new file mode 100644 index 0000000..11b5df1 --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/command/develop.py @@ -0,0 +1,196 @@ +from distutils.util import convert_path +from distutils import log +from distutils.errors import DistutilsError, DistutilsOptionError +import os +import glob +import io + +from setuptools.extern import six + +from pkg_resources import Distribution, PathMetadata, normalize_path +from setuptools.command.easy_install import easy_install +import setuptools + + +class develop(easy_install): + """Set up package for development""" + + description = "install package in 'development mode'" + + user_options = easy_install.user_options + [ + ("uninstall", "u", "Uninstall this source package"), + ("egg-path=", None, "Set the path to be used in the .egg-link file"), + ] + + boolean_options = easy_install.boolean_options + ['uninstall'] + + command_consumes_arguments = False # override base + + def run(self): + if self.uninstall: + self.multi_version = True + self.uninstall_link() + else: + self.install_for_development() + self.warn_deprecated_options() + + def initialize_options(self): + self.uninstall = None + self.egg_path = None + easy_install.initialize_options(self) + self.setup_path = None + self.always_copy_from = '.' # always copy eggs installed in curdir + + def finalize_options(self): + ei = self.get_finalized_command("egg_info") + if ei.broken_egg_info: + template = "Please rename %r to %r before using 'develop'" + args = ei.egg_info, ei.broken_egg_info + raise DistutilsError(template % args) + self.args = [ei.egg_name] + + easy_install.finalize_options(self) + self.expand_basedirs() + self.expand_dirs() + # pick up setup-dir .egg files only: no .egg-info + self.package_index.scan(glob.glob('*.egg')) + + egg_link_fn = ei.egg_name + '.egg-link' + self.egg_link = os.path.join(self.install_dir, egg_link_fn) + self.egg_base = ei.egg_base + if self.egg_path is None: + self.egg_path = os.path.abspath(ei.egg_base) + + target = normalize_path(self.egg_base) + egg_path = normalize_path(os.path.join(self.install_dir, + self.egg_path)) + if egg_path != target: + raise DistutilsOptionError( + "--egg-path must be a relative path from the install" + " directory to " + target + ) + + # Make a distribution for the package's source + self.dist = Distribution( + target, + PathMetadata(target, os.path.abspath(ei.egg_info)), + project_name=ei.egg_name + ) + + p = self.egg_base.replace(os.sep, '/') + if p != os.curdir: + p = '../' * (p.count('/') + 1) + self.setup_path = p + p = normalize_path(os.path.join(self.install_dir, self.egg_path, p)) + if p != normalize_path(os.curdir): + raise DistutilsOptionError( + "Can't get a consistent path to setup script from" + " installation directory", p, normalize_path(os.curdir)) + + def install_for_development(self): + if six.PY3 and getattr(self.distribution, 'use_2to3', False): + # If we run 2to3 we can not do this inplace: + + # Ensure metadata is up-to-date + self.reinitialize_command('build_py', inplace=0) + self.run_command('build_py') + bpy_cmd = self.get_finalized_command("build_py") + build_path = normalize_path(bpy_cmd.build_lib) + + # Build extensions + self.reinitialize_command('egg_info', egg_base=build_path) + self.run_command('egg_info') + + self.reinitialize_command('build_ext', inplace=0) + self.run_command('build_ext') + + # Fixup egg-link and easy-install.pth + ei_cmd = self.get_finalized_command("egg_info") + self.egg_path = build_path + self.dist.location = build_path + # XXX + self.dist._provider = PathMetadata(build_path, ei_cmd.egg_info) + else: + # Without 2to3 inplace works fine: + self.run_command('egg_info') + + # Build extensions in-place + self.reinitialize_command('build_ext', inplace=1) + self.run_command('build_ext') + + self.install_site_py() # ensure that target dir is site-safe + if setuptools.bootstrap_install_from: + self.easy_install(setuptools.bootstrap_install_from) + setuptools.bootstrap_install_from = None + + # create an .egg-link in the installation dir, pointing to our egg + log.info("Creating %s (link to %s)", self.egg_link, self.egg_base) + if not self.dry_run: + with open(self.egg_link, "w") as f: + f.write(self.egg_path + "\n" + self.setup_path) + # postprocess the installed distro, fixing up .pth, installing scripts, + # and handling requirements + self.process_distribution(None, self.dist, not self.no_deps) + + def uninstall_link(self): + if os.path.exists(self.egg_link): + log.info("Removing %s (link to %s)", self.egg_link, self.egg_base) + egg_link_file = open(self.egg_link) + contents = [line.rstrip() for line in egg_link_file] + egg_link_file.close() + if contents not in ([self.egg_path], + [self.egg_path, self.setup_path]): + log.warn("Link points to %s: uninstall aborted", contents) + return + if not self.dry_run: + os.unlink(self.egg_link) + if not self.dry_run: + self.update_pth(self.dist) # remove any .pth link to us + if self.distribution.scripts: + # XXX should also check for entry point scripts! + log.warn("Note: you must uninstall or replace scripts manually!") + + def install_egg_scripts(self, dist): + if dist is not self.dist: + # Installing a dependency, so fall back to normal behavior + return easy_install.install_egg_scripts(self, dist) + + # create wrapper scripts in the script dir, pointing to dist.scripts + + # new-style... + self.install_wrapper_scripts(dist) + + # ...and old-style + for script_name in self.distribution.scripts or []: + script_path = os.path.abspath(convert_path(script_name)) + script_name = os.path.basename(script_path) + with io.open(script_path) as strm: + script_text = strm.read() + self.install_script(dist, script_name, script_text, script_path) + + def install_wrapper_scripts(self, dist): + dist = VersionlessRequirement(dist) + return easy_install.install_wrapper_scripts(self, dist) + + +class VersionlessRequirement(object): + """ + Adapt a pkg_resources.Distribution to simply return the project + name as the 'requirement' so that scripts will work across + multiple versions. + + >>> dist = Distribution(project_name='foo', version='1.0') + >>> str(dist.as_requirement()) + 'foo==1.0' + >>> adapted_dist = VersionlessRequirement(dist) + >>> str(adapted_dist.as_requirement()) + 'foo' + """ + def __init__(self, dist): + self.__dist = dist + + def __getattr__(self, name): + return getattr(self.__dist, name) + + def as_requirement(self): + return self.project_name diff --git a/venv/lib/python3.5/site-packages/setuptools/command/easy_install.py b/venv/lib/python3.5/site-packages/setuptools/command/easy_install.py new file mode 100644 index 0000000..7683e3b --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/command/easy_install.py @@ -0,0 +1,2315 @@ +#!/usr/bin/env python + +""" +Easy Install +------------ + +A tool for doing automatic download/extract/build of distutils-based Python +packages. For detailed documentation, see the accompanying EasyInstall.txt +file, or visit the `EasyInstall home page`__. + +__ https://pythonhosted.org/setuptools/easy_install.html + +""" + +from glob import glob +from distutils.util import get_platform +from distutils.util import convert_path, subst_vars +from distutils.errors import DistutilsArgError, DistutilsOptionError, \ + DistutilsError, DistutilsPlatformError +from distutils.command.install import INSTALL_SCHEMES, SCHEME_KEYS +from distutils import log, dir_util +from distutils.command.build_scripts import first_line_re +from distutils.spawn import find_executable +import sys +import os +import zipimport +import shutil +import tempfile +import zipfile +import re +import stat +import random +import platform +import textwrap +import warnings +import site +import struct +import contextlib +import subprocess +import shlex +import io + +from setuptools.extern import six +from setuptools.extern.six.moves import configparser, map + +from setuptools import Command +from setuptools.sandbox import run_setup +from setuptools.py31compat import get_path, get_config_vars +from setuptools.command import setopt +from setuptools.archive_util import unpack_archive +from setuptools.package_index import PackageIndex +from setuptools.package_index import URL_SCHEME +from setuptools.command import bdist_egg, egg_info +from pkg_resources import ( + yield_lines, normalize_path, resource_string, ensure_directory, + get_distribution, find_distributions, Environment, Requirement, + Distribution, PathMetadata, EggMetadata, WorkingSet, DistributionNotFound, + VersionConflict, DEVELOP_DIST, +) +import pkg_resources + +# Turn on PEP440Warnings +warnings.filterwarnings("default", category=pkg_resources.PEP440Warning) + + +__all__ = [ + 'samefile', 'easy_install', 'PthDistributions', 'extract_wininst_cfg', + 'main', 'get_exe_prefixes', +] + + +def is_64bit(): + return struct.calcsize("P") == 8 + + +def samefile(p1, p2): + both_exist = os.path.exists(p1) and os.path.exists(p2) + use_samefile = hasattr(os.path, 'samefile') and both_exist + if use_samefile: + return os.path.samefile(p1, p2) + norm_p1 = os.path.normpath(os.path.normcase(p1)) + norm_p2 = os.path.normpath(os.path.normcase(p2)) + return norm_p1 == norm_p2 + + +if six.PY2: + def _to_ascii(s): + return s + + def isascii(s): + try: + six.text_type(s, 'ascii') + return True + except UnicodeError: + return False +else: + def _to_ascii(s): + return s.encode('ascii') + + def isascii(s): + try: + s.encode('ascii') + return True + except UnicodeError: + return False + + +class easy_install(Command): + """Manage a download/build/install process""" + description = "Find/get/install Python packages" + command_consumes_arguments = True + + user_options = [ + ('prefix=', None, "installation prefix"), + ("zip-ok", "z", "install package as a zipfile"), + ("multi-version", "m", "make apps have to require() a version"), + ("upgrade", "U", "force upgrade (searches PyPI for latest versions)"), + ("install-dir=", "d", "install package to DIR"), + ("script-dir=", "s", "install scripts to DIR"), + ("exclude-scripts", "x", "Don't install scripts"), + ("always-copy", "a", "Copy all needed packages to install dir"), + ("index-url=", "i", "base URL of Python Package Index"), + ("find-links=", "f", "additional URL(s) to search for packages"), + ("build-directory=", "b", + "download/extract/build in DIR; keep the results"), + ('optimize=', 'O', + "also compile with optimization: -O1 for \"python -O\", " + "-O2 for \"python -OO\", and -O0 to disable [default: -O0]"), + ('record=', None, + "filename in which to record list of installed files"), + ('always-unzip', 'Z', "don't install as a zipfile, no matter what"), + ('site-dirs=', 'S', "list of directories where .pth files work"), + ('editable', 'e', "Install specified packages in editable form"), + ('no-deps', 'N', "don't install dependencies"), + ('allow-hosts=', 'H', "pattern(s) that hostnames must match"), + ('local-snapshots-ok', 'l', + "allow building eggs from local checkouts"), + ('version', None, "print version information and exit"), + ('install-layout=', None, "installation layout to choose (known values: deb)"), + ('force-installation-into-system-dir', '0', "force installation into /usr"), + ('no-find-links', None, + "Don't load find-links defined in packages being installed") + ] + boolean_options = [ + 'zip-ok', 'multi-version', 'exclude-scripts', 'upgrade', 'always-copy', + 'editable', + 'no-deps', 'local-snapshots-ok', 'version', 'force-installation-into-system-dir' + ] + + if site.ENABLE_USER_SITE: + help_msg = "install in user site-package '%s'" % site.USER_SITE + user_options.append(('user', None, help_msg)) + boolean_options.append('user') + + negative_opt = {'always-unzip': 'zip-ok'} + create_index = PackageIndex + + def initialize_options(self): + # the --user option seems to be an opt-in one, + # so the default should be False. + self.user = 0 + self.zip_ok = self.local_snapshots_ok = None + self.install_dir = self.script_dir = self.exclude_scripts = None + self.index_url = None + self.find_links = None + self.build_directory = None + self.args = None + self.optimize = self.record = None + self.upgrade = self.always_copy = self.multi_version = None + self.editable = self.no_deps = self.allow_hosts = None + self.root = self.prefix = self.no_report = None + self.version = None + self.install_purelib = None # for pure module distributions + self.install_platlib = None # non-pure (dists w/ extensions) + self.install_headers = None # for C/C++ headers + self.install_lib = None # set to either purelib or platlib + self.install_scripts = None + self.install_data = None + self.install_base = None + self.install_platbase = None + if site.ENABLE_USER_SITE: + self.install_userbase = site.USER_BASE + self.install_usersite = site.USER_SITE + else: + self.install_userbase = None + self.install_usersite = None + self.no_find_links = None + + # Options not specifiable via command line + self.package_index = None + self.pth_file = self.always_copy_from = None + self.site_dirs = None + self.installed_projects = {} + self.sitepy_installed = False + # enable custom installation, known values: deb + self.install_layout = None + self.force_installation_into_system_dir = None + self.multiarch = None + + # Always read easy_install options, even if we are subclassed, or have + # an independent instance created. This ensures that defaults will + # always come from the standard configuration file(s)' "easy_install" + # section, even if this is a "develop" or "install" command, or some + # other embedding. + self._dry_run = None + self.verbose = self.distribution.verbose + self.distribution._set_command_options( + self, self.distribution.get_option_dict('easy_install') + ) + + def delete_blockers(self, blockers): + extant_blockers = ( + filename for filename in blockers + if os.path.exists(filename) or os.path.islink(filename) + ) + list(map(self._delete_path, extant_blockers)) + + def _delete_path(self, path): + log.info("Deleting %s", path) + if self.dry_run: + return + + is_tree = os.path.isdir(path) and not os.path.islink(path) + remover = rmtree if is_tree else os.unlink + remover(path) + + @staticmethod + def _render_version(): + """ + Render the Setuptools version and installation details, then exit. + """ + ver = sys.version[:3] + dist = get_distribution('setuptools') + tmpl = 'setuptools {dist.version} from {dist.location} (Python {ver})' + print(tmpl.format(**locals())) + raise SystemExit() + + def finalize_options(self): + self.version and self._render_version() + + py_version = sys.version.split()[0] + prefix, exec_prefix = get_config_vars('prefix', 'exec_prefix') + + self.config_vars = { + 'dist_name': self.distribution.get_name(), + 'dist_version': self.distribution.get_version(), + 'dist_fullname': self.distribution.get_fullname(), + 'py_version': py_version, + 'py_version_short': py_version[0:3], + 'py_version_nodot': py_version[0] + py_version[2], + 'sys_prefix': prefix, + 'prefix': prefix, + 'sys_exec_prefix': exec_prefix, + 'exec_prefix': exec_prefix, + # Only python 3.2+ has abiflags + 'abiflags': getattr(sys, 'abiflags', ''), + } + + if site.ENABLE_USER_SITE: + self.config_vars['userbase'] = self.install_userbase + self.config_vars['usersite'] = self.install_usersite + + self._fix_install_dir_for_user_site() + + self.expand_basedirs() + self.expand_dirs() + + if self.install_layout: + if not self.install_layout.lower() in ['deb']: + raise DistutilsOptionError("unknown value for --install-layout") + self.install_layout = self.install_layout.lower() + import sysconfig + if sys.version_info[:2] >= (3, 3): + self.multiarch = sysconfig.get_config_var('MULTIARCH') + self._expand('install_dir', 'script_dir', 'build_directory', + 'site_dirs') + # If a non-default installation directory was specified, default the + # script directory to match it. + if self.script_dir is None: + self.script_dir = self.install_dir + + if self.no_find_links is None: + self.no_find_links = False + + # Let install_dir get set by install_lib command, which in turn + # gets its info from the install command, and takes into account + # --prefix and --home and all that other crud. + self.set_undefined_options( + 'install_lib', ('install_dir', 'install_dir') + ) + # Likewise, set default script_dir from 'install_scripts.install_dir' + self.set_undefined_options( + 'install_scripts', ('install_dir', 'script_dir') + ) + + if self.user and self.install_purelib: + self.install_dir = self.install_purelib + self.script_dir = self.install_scripts + + if self.prefix == '/usr' and not self.force_installation_into_system_dir: + raise DistutilsOptionError("""installation into /usr + +Trying to install into the system managed parts of the file system. Please +consider to install to another location, or use the option +--force-installation-into-system-dir to overwrite this warning. +""") + + # default --record from the install command + self.set_undefined_options('install', ('record', 'record')) + # Should this be moved to the if statement below? It's not used + # elsewhere + normpath = map(normalize_path, sys.path) + self.all_site_dirs = get_site_dirs() + if self.site_dirs is not None: + site_dirs = [ + os.path.expanduser(s.strip()) for s in + self.site_dirs.split(',') + ] + for d in site_dirs: + if not os.path.isdir(d): + log.warn("%s (in --site-dirs) does not exist", d) + elif normalize_path(d) not in normpath: + raise DistutilsOptionError( + d + " (in --site-dirs) is not on sys.path" + ) + else: + self.all_site_dirs.append(normalize_path(d)) + if not self.editable: + self.check_site_dir() + self.index_url = self.index_url or "https://pypi.python.org/simple" + self.shadow_path = self.all_site_dirs[:] + for path_item in self.install_dir, normalize_path(self.script_dir): + if path_item not in self.shadow_path: + self.shadow_path.insert(0, path_item) + + if self.allow_hosts is not None: + hosts = [s.strip() for s in self.allow_hosts.split(',')] + else: + hosts = ['*'] + if self.package_index is None: + self.package_index = self.create_index( + self.index_url, search_path=self.shadow_path, hosts=hosts, + ) + self.local_index = Environment(self.shadow_path + sys.path) + + if self.find_links is not None: + if isinstance(self.find_links, six.string_types): + self.find_links = self.find_links.split() + else: + self.find_links = [] + if self.local_snapshots_ok: + self.package_index.scan_egg_links(self.shadow_path + sys.path) + if not self.no_find_links: + self.package_index.add_find_links(self.find_links) + self.set_undefined_options('install_lib', ('optimize', 'optimize')) + if not isinstance(self.optimize, int): + try: + self.optimize = int(self.optimize) + if not (0 <= self.optimize <= 2): + raise ValueError + except ValueError: + raise DistutilsOptionError("--optimize must be 0, 1, or 2") + + if self.editable and not self.build_directory: + raise DistutilsArgError( + "Must specify a build directory (-b) when using --editable" + ) + if not self.args: + raise DistutilsArgError( + "No urls, filenames, or requirements specified (see --help)") + + self.outputs = [] + + def _fix_install_dir_for_user_site(self): + """ + Fix the install_dir if "--user" was used. + """ + if not self.user or not site.ENABLE_USER_SITE: + return + + self.create_home_path() + if self.install_userbase is None: + msg = "User base directory is not specified" + raise DistutilsPlatformError(msg) + self.install_base = self.install_platbase = self.install_userbase + scheme_name = os.name.replace('posix', 'unix') + '_user' + self.select_scheme(scheme_name) + + def _expand_attrs(self, attrs): + for attr in attrs: + val = getattr(self, attr) + if val is not None: + if os.name == 'posix' or os.name == 'nt': + val = os.path.expanduser(val) + val = subst_vars(val, self.config_vars) + setattr(self, attr, val) + + def expand_basedirs(self): + """Calls `os.path.expanduser` on install_base, install_platbase and + root.""" + self._expand_attrs(['install_base', 'install_platbase', 'root']) + + def expand_dirs(self): + """Calls `os.path.expanduser` on install dirs.""" + self._expand_attrs(['install_purelib', 'install_platlib', + 'install_lib', 'install_headers', + 'install_scripts', 'install_data', ]) + + def run(self): + if self.verbose != self.distribution.verbose: + log.set_verbosity(self.verbose) + try: + for spec in self.args: + self.easy_install(spec, not self.no_deps) + if self.record: + outputs = self.outputs + if self.root: # strip any package prefix + root_len = len(self.root) + for counter in range(len(outputs)): + outputs[counter] = outputs[counter][root_len:] + from distutils import file_util + + self.execute( + file_util.write_file, (self.record, outputs), + "writing list of installed files to '%s'" % + self.record + ) + self.warn_deprecated_options() + finally: + log.set_verbosity(self.distribution.verbose) + + def pseudo_tempname(self): + """Return a pseudo-tempname base in the install directory. + This code is intentionally naive; if a malicious party can write to + the target directory you're already in deep doodoo. + """ + try: + pid = os.getpid() + except: + pid = random.randint(0, sys.maxsize) + return os.path.join(self.install_dir, "test-easy-install-%s" % pid) + + def warn_deprecated_options(self): + pass + + def check_site_dir(self): + """Verify that self.install_dir is .pth-capable dir, if needed""" + + instdir = normalize_path(self.install_dir) + pth_file = os.path.join(instdir, 'easy-install.pth') + + # Is it a configured, PYTHONPATH, implicit, or explicit site dir? + is_site_dir = instdir in self.all_site_dirs + + if not is_site_dir and not self.multi_version: + # No? Then directly test whether it does .pth file processing + is_site_dir = self.check_pth_processing() + else: + # make sure we can write to target dir + testfile = self.pseudo_tempname() + '.write-test' + test_exists = os.path.exists(testfile) + try: + if test_exists: + os.unlink(testfile) + open(testfile, 'w').close() + os.unlink(testfile) + except (OSError, IOError): + self.cant_write_to_target() + + if not is_site_dir and not self.multi_version: + # Can't install non-multi to non-site dir + raise DistutilsError(self.no_default_version_msg()) + + if is_site_dir: + if self.pth_file is None: + self.pth_file = PthDistributions(pth_file, self.all_site_dirs) + else: + self.pth_file = None + + PYTHONPATH = os.environ.get('PYTHONPATH', '').split(os.pathsep) + if instdir not in map(normalize_path, filter(None, PYTHONPATH)): + # only PYTHONPATH dirs need a site.py, so pretend it's there + self.sitepy_installed = True + elif self.multi_version and not os.path.exists(pth_file): + self.sitepy_installed = True # don't need site.py in this case + self.pth_file = None # and don't create a .pth file + self.install_dir = instdir + + __cant_write_msg = textwrap.dedent(""" + can't create or remove files in install directory + + The following error occurred while trying to add or remove files in the + installation directory: + + %s + + The installation directory you specified (via --install-dir, --prefix, or + the distutils default setting) was: + + %s + """).lstrip() + + __not_exists_id = textwrap.dedent(""" + This directory does not currently exist. Please create it and try again, or + choose a different installation directory (using the -d or --install-dir + option). + """).lstrip() + + __access_msg = textwrap.dedent(""" + Perhaps your account does not have write access to this directory? If the + installation directory is a system-owned directory, you may need to sign in + as the administrator or "root" account. If you do not have administrative + access to this machine, you may wish to choose a different installation + directory, preferably one that is listed in your PYTHONPATH environment + variable. + + For information on other options, you may wish to consult the + documentation at: + + https://pythonhosted.org/setuptools/easy_install.html + + Please make the appropriate changes for your system and try again. + """).lstrip() + + def cant_write_to_target(self): + msg = self.__cant_write_msg % (sys.exc_info()[1], self.install_dir,) + + if not os.path.exists(self.install_dir): + msg += '\n' + self.__not_exists_id + else: + msg += '\n' + self.__access_msg + raise DistutilsError(msg) + + def check_pth_processing(self): + """Empirically verify whether .pth files are supported in inst. dir""" + instdir = self.install_dir + log.info("Checking .pth file support in %s", instdir) + pth_file = self.pseudo_tempname() + ".pth" + ok_file = pth_file + '.ok' + ok_exists = os.path.exists(ok_file) + try: + if ok_exists: + os.unlink(ok_file) + dirname = os.path.dirname(ok_file) + if not os.path.exists(dirname): + os.makedirs(dirname) + f = open(pth_file, 'w') + except (OSError, IOError): + self.cant_write_to_target() + else: + try: + f.write("import os; f = open(%r, 'w'); f.write('OK'); " + "f.close()\n" % (ok_file,)) + f.close() + f = None + executable = sys.executable + if os.name == 'nt': + dirname, basename = os.path.split(executable) + alt = os.path.join(dirname, 'pythonw.exe') + if (basename.lower() == 'python.exe' and + os.path.exists(alt)): + # use pythonw.exe to avoid opening a console window + executable = alt + + from distutils.spawn import spawn + + spawn([executable, '-E', '-c', 'pass'], 0) + + if os.path.exists(ok_file): + log.info( + "TEST PASSED: %s appears to support .pth files", + instdir + ) + return True + finally: + if f: + f.close() + if os.path.exists(ok_file): + os.unlink(ok_file) + if os.path.exists(pth_file): + os.unlink(pth_file) + if not self.multi_version: + log.warn("TEST FAILED: %s does NOT support .pth files", instdir) + return False + + def install_egg_scripts(self, dist): + """Write all the scripts for `dist`, unless scripts are excluded""" + if not self.exclude_scripts and dist.metadata_isdir('scripts'): + for script_name in dist.metadata_listdir('scripts'): + if dist.metadata_isdir('scripts/' + script_name): + # The "script" is a directory, likely a Python 3 + # __pycache__ directory, so skip it. + continue + self.install_script( + dist, script_name, + dist.get_metadata('scripts/' + script_name) + ) + self.install_wrapper_scripts(dist) + + def add_output(self, path): + if os.path.isdir(path): + for base, dirs, files in os.walk(path): + for filename in files: + self.outputs.append(os.path.join(base, filename)) + else: + self.outputs.append(path) + + def not_editable(self, spec): + if self.editable: + raise DistutilsArgError( + "Invalid argument %r: you can't use filenames or URLs " + "with --editable (except via the --find-links option)." + % (spec,) + ) + + def check_editable(self, spec): + if not self.editable: + return + + if os.path.exists(os.path.join(self.build_directory, spec.key)): + raise DistutilsArgError( + "%r already exists in %s; can't do a checkout there" % + (spec.key, self.build_directory) + ) + + def easy_install(self, spec, deps=False): + tmpdir = tempfile.mkdtemp(prefix="easy_install-") + download = None + if not self.editable: + self.install_site_py() + + try: + if not isinstance(spec, Requirement): + if URL_SCHEME(spec): + # It's a url, download it to tmpdir and process + self.not_editable(spec) + download = self.package_index.download(spec, tmpdir) + return self.install_item(None, download, tmpdir, deps, + True) + + elif os.path.exists(spec): + # Existing file or directory, just process it directly + self.not_editable(spec) + return self.install_item(None, spec, tmpdir, deps, True) + else: + spec = parse_requirement_arg(spec) + + self.check_editable(spec) + dist = self.package_index.fetch_distribution( + spec, tmpdir, self.upgrade, self.editable, + not self.always_copy, self.local_index + ) + if dist is None: + msg = "Could not find suitable distribution for %r" % spec + if self.always_copy: + msg += " (--always-copy skips system and development eggs)" + raise DistutilsError(msg) + elif dist.precedence == DEVELOP_DIST: + # .egg-info dists don't need installing, just process deps + self.process_distribution(spec, dist, deps, "Using") + return dist + else: + return self.install_item(spec, dist.location, tmpdir, deps) + + finally: + if os.path.exists(tmpdir): + rmtree(tmpdir) + + def install_item(self, spec, download, tmpdir, deps, install_needed=False): + + # Installation is also needed if file in tmpdir or is not an egg + install_needed = install_needed or self.always_copy + install_needed = install_needed or os.path.dirname(download) == tmpdir + install_needed = install_needed or not download.endswith('.egg') + install_needed = install_needed or ( + self.always_copy_from is not None and + os.path.dirname(normalize_path(download)) == + normalize_path(self.always_copy_from) + ) + + if spec and not install_needed: + # at this point, we know it's a local .egg, we just don't know if + # it's already installed. + for dist in self.local_index[spec.project_name]: + if dist.location == download: + break + else: + install_needed = True # it's not in the local index + + log.info("Processing %s", os.path.basename(download)) + + if install_needed: + dists = self.install_eggs(spec, download, tmpdir) + for dist in dists: + self.process_distribution(spec, dist, deps) + else: + dists = [self.egg_distribution(download)] + self.process_distribution(spec, dists[0], deps, "Using") + + if spec is not None: + for dist in dists: + if dist in spec: + return dist + + def select_scheme(self, name): + """Sets the install directories by applying the install schemes.""" + # it's the caller's problem if they supply a bad name! + scheme = INSTALL_SCHEMES[name] + for key in SCHEME_KEYS: + attrname = 'install_' + key + if getattr(self, attrname) is None: + setattr(self, attrname, scheme[key]) + + def process_distribution(self, requirement, dist, deps=True, *info): + self.update_pth(dist) + self.package_index.add(dist) + if dist in self.local_index[dist.key]: + self.local_index.remove(dist) + self.local_index.add(dist) + self.install_egg_scripts(dist) + self.installed_projects[dist.key] = dist + log.info(self.installation_report(requirement, dist, *info)) + if (dist.has_metadata('dependency_links.txt') and + not self.no_find_links): + self.package_index.add_find_links( + dist.get_metadata_lines('dependency_links.txt') + ) + if not deps and not self.always_copy: + return + elif requirement is not None and dist.key != requirement.key: + log.warn("Skipping dependencies for %s", dist) + return # XXX this is not the distribution we were looking for + elif requirement is None or dist not in requirement: + # if we wound up with a different version, resolve what we've got + distreq = dist.as_requirement() + requirement = Requirement(str(distreq)) + log.info("Processing dependencies for %s", requirement) + try: + distros = WorkingSet([]).resolve( + [requirement], self.local_index, self.easy_install + ) + except DistributionNotFound as e: + raise DistutilsError(str(e)) + except VersionConflict as e: + raise DistutilsError(e.report()) + if self.always_copy or self.always_copy_from: + # Force all the relevant distros to be copied or activated + for dist in distros: + if dist.key not in self.installed_projects: + self.easy_install(dist.as_requirement()) + log.info("Finished processing dependencies for %s", requirement) + + def should_unzip(self, dist): + if self.zip_ok is not None: + return not self.zip_ok + if dist.has_metadata('not-zip-safe'): + return True + if not dist.has_metadata('zip-safe'): + return True + return False + + def maybe_move(self, spec, dist_filename, setup_base): + dst = os.path.join(self.build_directory, spec.key) + if os.path.exists(dst): + msg = ("%r already exists in %s; build directory %s will not be " + "kept") + log.warn(msg, spec.key, self.build_directory, setup_base) + return setup_base + if os.path.isdir(dist_filename): + setup_base = dist_filename + else: + if os.path.dirname(dist_filename) == setup_base: + os.unlink(dist_filename) # get it out of the tmp dir + contents = os.listdir(setup_base) + if len(contents) == 1: + dist_filename = os.path.join(setup_base, contents[0]) + if os.path.isdir(dist_filename): + # if the only thing there is a directory, move it instead + setup_base = dist_filename + ensure_directory(dst) + shutil.move(setup_base, dst) + return dst + + def install_wrapper_scripts(self, dist): + if self.exclude_scripts: + return + for args in ScriptWriter.best().get_args(dist): + self.write_script(*args) + + def install_script(self, dist, script_name, script_text, dev_path=None): + """Generate a legacy script wrapper and install it""" + spec = str(dist.as_requirement()) + is_script = is_python_script(script_text, script_name) + + if is_script: + body = self._load_template(dev_path) % locals() + script_text = ScriptWriter.get_header(script_text) + body + self.write_script(script_name, _to_ascii(script_text), 'b') + + @staticmethod + def _load_template(dev_path): + """ + There are a couple of template scripts in the package. This + function loads one of them and prepares it for use. + """ + # See https://github.com/pypa/setuptools/issues/134 for info + # on script file naming and downstream issues with SVR4 + name = 'script.tmpl' + if dev_path: + name = name.replace('.tmpl', ' (dev).tmpl') + + raw_bytes = resource_string('setuptools', name) + return raw_bytes.decode('utf-8') + + def write_script(self, script_name, contents, mode="t", blockers=()): + """Write an executable file to the scripts directory""" + self.delete_blockers( # clean up old .py/.pyw w/o a script + [os.path.join(self.script_dir, x) for x in blockers] + ) + log.info("Installing %s script to %s", script_name, self.script_dir) + target = os.path.join(self.script_dir, script_name) + self.add_output(target) + + mask = current_umask() + if not self.dry_run: + ensure_directory(target) + if os.path.exists(target): + os.unlink(target) + with open(target, "w" + mode) as f: + f.write(contents) + chmod(target, 0o777 - mask) + + def install_eggs(self, spec, dist_filename, tmpdir): + # .egg dirs or files are already built, so just return them + if dist_filename.lower().endswith('.egg'): + return [self.install_egg(dist_filename, tmpdir)] + elif dist_filename.lower().endswith('.exe'): + return [self.install_exe(dist_filename, tmpdir)] + + # Anything else, try to extract and build + setup_base = tmpdir + if os.path.isfile(dist_filename) and not dist_filename.endswith('.py'): + unpack_archive(dist_filename, tmpdir, self.unpack_progress) + elif os.path.isdir(dist_filename): + setup_base = os.path.abspath(dist_filename) + + if (setup_base.startswith(tmpdir) # something we downloaded + and self.build_directory and spec is not None): + setup_base = self.maybe_move(spec, dist_filename, setup_base) + + # Find the setup.py file + setup_script = os.path.join(setup_base, 'setup.py') + + if not os.path.exists(setup_script): + setups = glob(os.path.join(setup_base, '*', 'setup.py')) + if not setups: + raise DistutilsError( + "Couldn't find a setup script in %s" % + os.path.abspath(dist_filename) + ) + if len(setups) > 1: + raise DistutilsError( + "Multiple setup scripts in %s" % + os.path.abspath(dist_filename) + ) + setup_script = setups[0] + + # Now run it, and return the result + if self.editable: + log.info(self.report_editable(spec, setup_script)) + return [] + else: + return self.build_and_install(setup_script, setup_base) + + def egg_distribution(self, egg_path): + if os.path.isdir(egg_path): + metadata = PathMetadata(egg_path, os.path.join(egg_path, + 'EGG-INFO')) + else: + metadata = EggMetadata(zipimport.zipimporter(egg_path)) + return Distribution.from_filename(egg_path, metadata=metadata) + + def install_egg(self, egg_path, tmpdir): + destination = os.path.join(self.install_dir, + os.path.basename(egg_path)) + destination = os.path.abspath(destination) + if not self.dry_run: + ensure_directory(destination) + + dist = self.egg_distribution(egg_path) + if not samefile(egg_path, destination): + if os.path.isdir(destination) and not os.path.islink(destination): + dir_util.remove_tree(destination, dry_run=self.dry_run) + elif os.path.exists(destination): + self.execute(os.unlink, (destination,), "Removing " + + destination) + try: + new_dist_is_zipped = False + if os.path.isdir(egg_path): + if egg_path.startswith(tmpdir): + f, m = shutil.move, "Moving" + else: + f, m = shutil.copytree, "Copying" + elif self.should_unzip(dist): + self.mkpath(destination) + f, m = self.unpack_and_compile, "Extracting" + else: + new_dist_is_zipped = True + if egg_path.startswith(tmpdir): + f, m = shutil.move, "Moving" + else: + f, m = shutil.copy2, "Copying" + self.execute(f, (egg_path, destination), + (m + " %s to %s") % + (os.path.basename(egg_path), + os.path.dirname(destination))) + update_dist_caches(destination, + fix_zipimporter_caches=new_dist_is_zipped) + except: + update_dist_caches(destination, fix_zipimporter_caches=False) + raise + + self.add_output(destination) + return self.egg_distribution(destination) + + def install_exe(self, dist_filename, tmpdir): + # See if it's valid, get data + cfg = extract_wininst_cfg(dist_filename) + if cfg is None: + raise DistutilsError( + "%s is not a valid distutils Windows .exe" % dist_filename + ) + # Create a dummy distribution object until we build the real distro + dist = Distribution( + None, + project_name=cfg.get('metadata', 'name'), + version=cfg.get('metadata', 'version'), platform=get_platform(), + ) + + # Convert the .exe to an unpacked egg + egg_path = dist.location = os.path.join(tmpdir, dist.egg_name() + + '.egg') + egg_tmp = egg_path + '.tmp' + _egg_info = os.path.join(egg_tmp, 'EGG-INFO') + pkg_inf = os.path.join(_egg_info, 'PKG-INFO') + ensure_directory(pkg_inf) # make sure EGG-INFO dir exists + dist._provider = PathMetadata(egg_tmp, _egg_info) # XXX + self.exe_to_egg(dist_filename, egg_tmp) + + # Write EGG-INFO/PKG-INFO + if not os.path.exists(pkg_inf): + f = open(pkg_inf, 'w') + f.write('Metadata-Version: 1.0\n') + for k, v in cfg.items('metadata'): + if k != 'target_version': + f.write('%s: %s\n' % (k.replace('_', '-').title(), v)) + f.close() + script_dir = os.path.join(_egg_info, 'scripts') + # delete entry-point scripts to avoid duping + self.delete_blockers( + [os.path.join(script_dir, args[0]) for args in + ScriptWriter.get_args(dist)] + ) + # Build .egg file from tmpdir + bdist_egg.make_zipfile( + egg_path, egg_tmp, verbose=self.verbose, dry_run=self.dry_run + ) + # install the .egg + return self.install_egg(egg_path, tmpdir) + + def exe_to_egg(self, dist_filename, egg_tmp): + """Extract a bdist_wininst to the directories an egg would use""" + # Check for .pth file and set up prefix translations + prefixes = get_exe_prefixes(dist_filename) + to_compile = [] + native_libs = [] + top_level = {} + + def process(src, dst): + s = src.lower() + for old, new in prefixes: + if s.startswith(old): + src = new + src[len(old):] + parts = src.split('/') + dst = os.path.join(egg_tmp, *parts) + dl = dst.lower() + if dl.endswith('.pyd') or dl.endswith('.dll'): + parts[-1] = bdist_egg.strip_module(parts[-1]) + top_level[os.path.splitext(parts[0])[0]] = 1 + native_libs.append(src) + elif dl.endswith('.py') and old != 'SCRIPTS/': + top_level[os.path.splitext(parts[0])[0]] = 1 + to_compile.append(dst) + return dst + if not src.endswith('.pth'): + log.warn("WARNING: can't process %s", src) + return None + + # extract, tracking .pyd/.dll->native_libs and .py -> to_compile + unpack_archive(dist_filename, egg_tmp, process) + stubs = [] + for res in native_libs: + if res.lower().endswith('.pyd'): # create stubs for .pyd's + parts = res.split('/') + resource = parts[-1] + parts[-1] = bdist_egg.strip_module(parts[-1]) + '.py' + pyfile = os.path.join(egg_tmp, *parts) + to_compile.append(pyfile) + stubs.append(pyfile) + bdist_egg.write_stub(resource, pyfile) + self.byte_compile(to_compile) # compile .py's + bdist_egg.write_safety_flag( + os.path.join(egg_tmp, 'EGG-INFO'), + bdist_egg.analyze_egg(egg_tmp, stubs)) # write zip-safety flag + + for name in 'top_level', 'native_libs': + if locals()[name]: + txt = os.path.join(egg_tmp, 'EGG-INFO', name + '.txt') + if not os.path.exists(txt): + f = open(txt, 'w') + f.write('\n'.join(locals()[name]) + '\n') + f.close() + + __mv_warning = textwrap.dedent(""" + Because this distribution was installed --multi-version, before you can + import modules from this package in an application, you will need to + 'import pkg_resources' and then use a 'require()' call similar to one of + these examples, in order to select the desired version: + + pkg_resources.require("%(name)s") # latest installed version + pkg_resources.require("%(name)s==%(version)s") # this exact version + pkg_resources.require("%(name)s>=%(version)s") # this version or higher + """).lstrip() + + __id_warning = textwrap.dedent(""" + Note also that the installation directory must be on sys.path at runtime for + this to work. (e.g. by being the application's script directory, by being on + PYTHONPATH, or by being added to sys.path by your code.) + """) + + def installation_report(self, req, dist, what="Installed"): + """Helpful installation message for display to package users""" + msg = "\n%(what)s %(eggloc)s%(extras)s" + if self.multi_version and not self.no_report: + msg += '\n' + self.__mv_warning + if self.install_dir not in map(normalize_path, sys.path): + msg += '\n' + self.__id_warning + + eggloc = dist.location + name = dist.project_name + version = dist.version + extras = '' # TODO: self.report_extras(req, dist) + return msg % locals() + + __editable_msg = textwrap.dedent(""" + Extracted editable version of %(spec)s to %(dirname)s + + If it uses setuptools in its setup script, you can activate it in + "development" mode by going to that directory and running:: + + %(python)s setup.py develop + + See the setuptools documentation for the "develop" command for more info. + """).lstrip() + + def report_editable(self, spec, setup_script): + dirname = os.path.dirname(setup_script) + python = sys.executable + return '\n' + self.__editable_msg % locals() + + def run_setup(self, setup_script, setup_base, args): + sys.modules.setdefault('distutils.command.bdist_egg', bdist_egg) + sys.modules.setdefault('distutils.command.egg_info', egg_info) + + args = list(args) + if self.verbose > 2: + v = 'v' * (self.verbose - 1) + args.insert(0, '-' + v) + elif self.verbose < 2: + args.insert(0, '-q') + if self.dry_run: + args.insert(0, '-n') + log.info( + "Running %s %s", setup_script[len(setup_base) + 1:], ' '.join(args) + ) + try: + run_setup(setup_script, args) + except SystemExit as v: + raise DistutilsError("Setup script exited with %s" % (v.args[0],)) + + def build_and_install(self, setup_script, setup_base): + args = ['bdist_egg', '--dist-dir'] + + dist_dir = tempfile.mkdtemp( + prefix='egg-dist-tmp-', dir=os.path.dirname(setup_script) + ) + try: + self._set_fetcher_options(os.path.dirname(setup_script)) + args.append(dist_dir) + + self.run_setup(setup_script, setup_base, args) + all_eggs = Environment([dist_dir]) + eggs = [] + for key in all_eggs: + for dist in all_eggs[key]: + eggs.append(self.install_egg(dist.location, setup_base)) + if not eggs and not self.dry_run: + log.warn("No eggs found in %s (setup script problem?)", + dist_dir) + return eggs + finally: + rmtree(dist_dir) + log.set_verbosity(self.verbose) # restore our log verbosity + + def _set_fetcher_options(self, base): + """ + When easy_install is about to run bdist_egg on a source dist, that + source dist might have 'setup_requires' directives, requiring + additional fetching. Ensure the fetcher options given to easy_install + are available to that command as well. + """ + # find the fetch options from easy_install and write them out + # to the setup.cfg file. + ei_opts = self.distribution.get_option_dict('easy_install').copy() + fetch_directives = ( + 'find_links', 'site_dirs', 'index_url', 'optimize', + 'site_dirs', 'allow_hosts', + ) + fetch_options = {} + for key, val in ei_opts.items(): + if key not in fetch_directives: + continue + fetch_options[key.replace('_', '-')] = val[1] + # create a settings dictionary suitable for `edit_config` + settings = dict(easy_install=fetch_options) + cfg_filename = os.path.join(base, 'setup.cfg') + setopt.edit_config(cfg_filename, settings) + + def update_pth(self, dist): + if self.pth_file is None: + return + + for d in self.pth_file[dist.key]: # drop old entries + if self.multi_version or d.location != dist.location: + log.info("Removing %s from easy-install.pth file", d) + self.pth_file.remove(d) + if d.location in self.shadow_path: + self.shadow_path.remove(d.location) + + if not self.multi_version: + if dist.location in self.pth_file.paths: + log.info( + "%s is already the active version in easy-install.pth", + dist + ) + else: + log.info("Adding %s to easy-install.pth file", dist) + self.pth_file.add(dist) # add new entry + if dist.location not in self.shadow_path: + self.shadow_path.append(dist.location) + + if not self.dry_run: + + self.pth_file.save() + + if dist.key == 'setuptools': + # Ensure that setuptools itself never becomes unavailable! + # XXX should this check for latest version? + filename = os.path.join(self.install_dir, 'setuptools.pth') + if os.path.islink(filename): + os.unlink(filename) + f = open(filename, 'wt') + f.write(self.pth_file.make_relative(dist.location) + '\n') + f.close() + + def unpack_progress(self, src, dst): + # Progress filter for unpacking + log.debug("Unpacking %s to %s", src, dst) + return dst # only unpack-and-compile skips files for dry run + + def unpack_and_compile(self, egg_path, destination): + to_compile = [] + to_chmod = [] + + def pf(src, dst): + if dst.endswith('.py') and not src.startswith('EGG-INFO/'): + to_compile.append(dst) + elif dst.endswith('.dll') or dst.endswith('.so'): + to_chmod.append(dst) + self.unpack_progress(src, dst) + return not self.dry_run and dst or None + + unpack_archive(egg_path, destination, pf) + self.byte_compile(to_compile) + if not self.dry_run: + for f in to_chmod: + mode = ((os.stat(f)[stat.ST_MODE]) | 0o555) & 0o7755 + chmod(f, mode) + + def byte_compile(self, to_compile): + if sys.dont_write_bytecode: + self.warn('byte-compiling is disabled, skipping.') + return + + from distutils.util import byte_compile + + try: + # try to make the byte compile messages quieter + log.set_verbosity(self.verbose - 1) + + byte_compile(to_compile, optimize=0, force=1, dry_run=self.dry_run) + if self.optimize: + byte_compile( + to_compile, optimize=self.optimize, force=1, + dry_run=self.dry_run + ) + finally: + log.set_verbosity(self.verbose) # restore original verbosity + + __no_default_msg = textwrap.dedent(""" + bad install directory or PYTHONPATH + + You are attempting to install a package to a directory that is not + on PYTHONPATH and which Python does not read ".pth" files from. The + installation directory you specified (via --install-dir, --prefix, or + the distutils default setting) was: + + %s + + and your PYTHONPATH environment variable currently contains: + + %r + + Here are some of your options for correcting the problem: + + * You can choose a different installation directory, i.e., one that is + on PYTHONPATH or supports .pth files + + * You can add the installation directory to the PYTHONPATH environment + variable. (It must then also be on PYTHONPATH whenever you run + Python and want to use the package(s) you are installing.) + + * You can set up the installation directory to support ".pth" files by + using one of the approaches described here: + + https://pythonhosted.org/setuptools/easy_install.html#custom-installation-locations + + Please make the appropriate changes for your system and try again.""").lstrip() + + def no_default_version_msg(self): + template = self.__no_default_msg + return template % (self.install_dir, os.environ.get('PYTHONPATH', '')) + + def install_site_py(self): + """Make sure there's a site.py in the target dir, if needed""" + + if self.sitepy_installed: + return # already did it, or don't need to + + sitepy = os.path.join(self.install_dir, "site.py") + source = resource_string("setuptools", "site-patch.py") + source = source.decode('utf-8') + current = "" + + if os.path.exists(sitepy): + log.debug("Checking existing site.py in %s", self.install_dir) + with io.open(sitepy) as strm: + current = strm.read() + + if not current.startswith('def __boot():'): + raise DistutilsError( + "%s is not a setuptools-generated site.py; please" + " remove it." % sitepy + ) + + if current != source: + log.info("Creating %s", sitepy) + if not self.dry_run: + ensure_directory(sitepy) + with io.open(sitepy, 'w', encoding='utf-8') as strm: + strm.write(source) + self.byte_compile([sitepy]) + + self.sitepy_installed = True + + def create_home_path(self): + """Create directories under ~.""" + if not self.user: + return + home = convert_path(os.path.expanduser("~")) + for name, path in six.iteritems(self.config_vars): + if path.startswith(home) and not os.path.isdir(path): + self.debug_print("os.makedirs('%s', 0o700)" % path) + os.makedirs(path, 0o700) + + if sys.version[:3] in ('2.3', '2.4', '2.5') or 'real_prefix' in sys.__dict__: + sitedir_name = 'site-packages' + else: + sitedir_name = 'dist-packages' + + INSTALL_SCHEMES = dict( + posix=dict( + install_dir='$base/lib/python$py_version_short/site-packages', + script_dir='$base/bin', + ), + unix_local = dict( + install_dir = '$base/local/lib/python$py_version_short/%s' % sitedir_name, + script_dir = '$base/local/bin', + ), + posix_local = dict( + install_dir = '$base/local/lib/python$py_version_short/%s' % sitedir_name, + script_dir = '$base/local/bin', + ), + deb_system = dict( + install_dir = '$base/lib/python3/%s' % sitedir_name, + script_dir = '$base/bin', + ), + ) + + DEFAULT_SCHEME = dict( + install_dir='$base/Lib/site-packages', + script_dir='$base/Scripts', + ) + + def _expand(self, *attrs): + config_vars = self.get_finalized_command('install').config_vars + + if self.prefix or self.install_layout: + if self.install_layout and self.install_layout in ['deb']: + scheme_name = "deb_system" + self.prefix = '/usr' + elif self.prefix or 'real_prefix' in sys.__dict__: + scheme_name = os.name + else: + scheme_name = "posix_local" + # Set default install_dir/scripts from --prefix + config_vars = config_vars.copy() + config_vars['base'] = self.prefix + scheme = self.INSTALL_SCHEMES.get(scheme_name,self.DEFAULT_SCHEME) + for attr, val in scheme.items(): + if getattr(self, attr, None) is None: + setattr(self, attr, val) + + from distutils.util import subst_vars + + for attr in attrs: + val = getattr(self, attr) + if val is not None: + val = subst_vars(val, config_vars) + if os.name == 'posix': + val = os.path.expanduser(val) + setattr(self, attr, val) + + +def get_site_dirs(): + # return a list of 'site' dirs + sitedirs = [_f for _f in os.environ.get('PYTHONPATH', + '').split(os.pathsep) if _f] + prefixes = [sys.prefix] + if sys.exec_prefix != sys.prefix: + prefixes.append(sys.exec_prefix) + for prefix in prefixes: + if prefix: + if sys.platform in ('os2emx', 'riscos'): + sitedirs.append(os.path.join(prefix, "Lib", "site-packages")) + elif os.sep == '/': + sitedirs.extend([os.path.join(prefix, + "lib", + "python" + sys.version[:3], + "site-packages"), + os.path.join(prefix, "lib", "site-python")]) + else: + if sys.version[:3] in ('2.3', '2.4', '2.5'): + sdir = "site-packages" + else: + sdir = "dist-packages" + sitedirs.extend( + [os.path.join(prefix, "local/lib", "python" + sys.version[:3], sdir), + os.path.join(prefix, "lib", "python" + sys.version[:3], sdir)] + ) + if sys.platform == 'darwin': + # for framework builds *only* we add the standard Apple + # locations. Currently only per-user, but /Library and + # /Network/Library could be added too + if 'Python.framework' in prefix: + home = os.environ.get('HOME') + if home: + sitedirs.append( + os.path.join(home, + 'Library', + 'Python', + sys.version[:3], + 'site-packages')) + lib_paths = get_path('purelib'), get_path('platlib') + for site_lib in lib_paths: + if site_lib not in sitedirs: + sitedirs.append(site_lib) + + if site.ENABLE_USER_SITE: + sitedirs.append(site.USER_SITE) + + sitedirs = list(map(normalize_path, sitedirs)) + + return sitedirs + + +def expand_paths(inputs): + """Yield sys.path directories that might contain "old-style" packages""" + + seen = {} + + for dirname in inputs: + dirname = normalize_path(dirname) + if dirname in seen: + continue + + seen[dirname] = 1 + if not os.path.isdir(dirname): + continue + + files = os.listdir(dirname) + yield dirname, files + + for name in files: + if not name.endswith('.pth'): + # We only care about the .pth files + continue + if name in ('easy-install.pth', 'setuptools.pth'): + # Ignore .pth files that we control + continue + + # Read the .pth file + f = open(os.path.join(dirname, name)) + lines = list(yield_lines(f)) + f.close() + + # Yield existing non-dupe, non-import directory lines from it + for line in lines: + if not line.startswith("import"): + line = normalize_path(line.rstrip()) + if line not in seen: + seen[line] = 1 + if not os.path.isdir(line): + continue + yield line, os.listdir(line) + + +def extract_wininst_cfg(dist_filename): + """Extract configuration data from a bdist_wininst .exe + + Returns a configparser.RawConfigParser, or None + """ + f = open(dist_filename, 'rb') + try: + endrec = zipfile._EndRecData(f) + if endrec is None: + return None + + prepended = (endrec[9] - endrec[5]) - endrec[6] + if prepended < 12: # no wininst data here + return None + f.seek(prepended - 12) + + tag, cfglen, bmlen = struct.unpack("<iii", f.read(12)) + if tag not in (0x1234567A, 0x1234567B): + return None # not a valid tag + + f.seek(prepended - (12 + cfglen)) + cfg = configparser.RawConfigParser( + {'version': '', 'target_version': ''}) + try: + part = f.read(cfglen) + # Read up to the first null byte. + config = part.split(b'\0', 1)[0] + # Now the config is in bytes, but for RawConfigParser, it should + # be text, so decode it. + config = config.decode(sys.getfilesystemencoding()) + cfg.readfp(six.StringIO(config)) + except configparser.Error: + return None + if not cfg.has_section('metadata') or not cfg.has_section('Setup'): + return None + return cfg + + finally: + f.close() + + +def get_exe_prefixes(exe_filename): + """Get exe->egg path translations for a given .exe file""" + + prefixes = [ + ('PURELIB/', ''), ('PLATLIB/pywin32_system32', ''), + ('PLATLIB/', ''), + ('SCRIPTS/', 'EGG-INFO/scripts/'), + ('DATA/lib/site-packages', ''), + ] + z = zipfile.ZipFile(exe_filename) + try: + for info in z.infolist(): + name = info.filename + parts = name.split('/') + if len(parts) == 3 and parts[2] == 'PKG-INFO': + if parts[1].endswith('.egg-info'): + prefixes.insert(0, ('/'.join(parts[:2]), 'EGG-INFO/')) + break + if len(parts) != 2 or not name.endswith('.pth'): + continue + if name.endswith('-nspkg.pth'): + continue + if parts[0].upper() in ('PURELIB', 'PLATLIB'): + contents = z.read(name) + if six.PY3: + contents = contents.decode() + for pth in yield_lines(contents): + pth = pth.strip().replace('\\', '/') + if not pth.startswith('import'): + prefixes.append((('%s/%s/' % (parts[0], pth)), '')) + finally: + z.close() + prefixes = [(x.lower(), y) for x, y in prefixes] + prefixes.sort() + prefixes.reverse() + return prefixes + + +def parse_requirement_arg(spec): + try: + return Requirement.parse(spec) + except ValueError: + raise DistutilsError( + "Not a URL, existing file, or requirement spec: %r" % (spec,) + ) + + +class PthDistributions(Environment): + """A .pth file with Distribution paths in it""" + + dirty = False + + def __init__(self, filename, sitedirs=()): + self.filename = filename + self.sitedirs = list(map(normalize_path, sitedirs)) + self.basedir = normalize_path(os.path.dirname(self.filename)) + self._load() + Environment.__init__(self, [], None, None) + for path in yield_lines(self.paths): + list(map(self.add, find_distributions(path, True))) + + def _load(self): + self.paths = [] + saw_import = False + seen = dict.fromkeys(self.sitedirs) + if os.path.isfile(self.filename): + f = open(self.filename, 'rt') + for line in f: + if line.startswith('import'): + saw_import = True + continue + path = line.rstrip() + self.paths.append(path) + if not path.strip() or path.strip().startswith('#'): + continue + # skip non-existent paths, in case somebody deleted a package + # manually, and duplicate paths as well + path = self.paths[-1] = normalize_path( + os.path.join(self.basedir, path) + ) + if not os.path.exists(path) or path in seen: + self.paths.pop() # skip it + self.dirty = True # we cleaned up, so we're dirty now :) + continue + seen[path] = 1 + f.close() + + if self.paths and not saw_import: + self.dirty = True # ensure anything we touch has import wrappers + while self.paths and not self.paths[-1].strip(): + self.paths.pop() + + def save(self): + """Write changed .pth file back to disk""" + if not self.dirty: + return + + rel_paths = list(map(self.make_relative, self.paths)) + if rel_paths: + log.debug("Saving %s", self.filename) + lines = self._wrap_lines(rel_paths) + data = '\n'.join(lines) + '\n' + + if os.path.islink(self.filename): + os.unlink(self.filename) + with open(self.filename, 'wt') as f: + f.write(data) + + elif os.path.exists(self.filename): + log.debug("Deleting empty %s", self.filename) + os.unlink(self.filename) + + self.dirty = False + + @staticmethod + def _wrap_lines(lines): + return lines + + def add(self, dist): + """Add `dist` to the distribution map""" + new_path = ( + dist.location not in self.paths and ( + dist.location not in self.sitedirs or + # account for '.' being in PYTHONPATH + dist.location == os.getcwd() + ) + ) + if new_path: + self.paths.append(dist.location) + self.dirty = True + Environment.add(self, dist) + + def remove(self, dist): + """Remove `dist` from the distribution map""" + while dist.location in self.paths: + self.paths.remove(dist.location) + self.dirty = True + Environment.remove(self, dist) + + def make_relative(self, path): + npath, last = os.path.split(normalize_path(path)) + baselen = len(self.basedir) + parts = [last] + sep = os.altsep == '/' and '/' or os.sep + while len(npath) >= baselen: + if npath == self.basedir: + parts.append(os.curdir) + parts.reverse() + return sep.join(parts) + npath, last = os.path.split(npath) + parts.append(last) + else: + return path + + +class RewritePthDistributions(PthDistributions): + + @classmethod + def _wrap_lines(cls, lines): + yield cls.prelude + for line in lines: + yield line + yield cls.postlude + + _inline = lambda text: textwrap.dedent(text).strip().replace('\n', '; ') + prelude = _inline(""" + import sys + sys.__plen = len(sys.path) + """) + postlude = _inline(""" + import sys + new = sys.path[sys.__plen:] + del sys.path[sys.__plen:] + p = getattr(sys, '__egginsert', 0) + sys.path[p:p] = new + sys.__egginsert = p + len(new) + """) + + +if os.environ.get('SETUPTOOLS_SYS_PATH_TECHNIQUE', 'rewrite') == 'rewrite': + PthDistributions = RewritePthDistributions + + +def _first_line_re(): + """ + Return a regular expression based on first_line_re suitable for matching + strings. + """ + if isinstance(first_line_re.pattern, str): + return first_line_re + + # first_line_re in Python >=3.1.4 and >=3.2.1 is a bytes pattern. + return re.compile(first_line_re.pattern.decode()) + + +def auto_chmod(func, arg, exc): + if func is os.remove and os.name == 'nt': + chmod(arg, stat.S_IWRITE) + return func(arg) + et, ev, _ = sys.exc_info() + six.reraise(et, (ev[0], ev[1] + (" %s %s" % (func, arg)))) + + +def update_dist_caches(dist_path, fix_zipimporter_caches): + """ + Fix any globally cached `dist_path` related data + + `dist_path` should be a path of a newly installed egg distribution (zipped + or unzipped). + + sys.path_importer_cache contains finder objects that have been cached when + importing data from the original distribution. Any such finders need to be + cleared since the replacement distribution might be packaged differently, + e.g. a zipped egg distribution might get replaced with an unzipped egg + folder or vice versa. Having the old finders cached may then cause Python + to attempt loading modules from the replacement distribution using an + incorrect loader. + + zipimport.zipimporter objects are Python loaders charged with importing + data packaged inside zip archives. If stale loaders referencing the + original distribution, are left behind, they can fail to load modules from + the replacement distribution. E.g. if an old zipimport.zipimporter instance + is used to load data from a new zipped egg archive, it may cause the + operation to attempt to locate the requested data in the wrong location - + one indicated by the original distribution's zip archive directory + information. Such an operation may then fail outright, e.g. report having + read a 'bad local file header', or even worse, it may fail silently & + return invalid data. + + zipimport._zip_directory_cache contains cached zip archive directory + information for all existing zipimport.zipimporter instances and all such + instances connected to the same archive share the same cached directory + information. + + If asked, and the underlying Python implementation allows it, we can fix + all existing zipimport.zipimporter instances instead of having to track + them down and remove them one by one, by updating their shared cached zip + archive directory information. This, of course, assumes that the + replacement distribution is packaged as a zipped egg. + + If not asked to fix existing zipimport.zipimporter instances, we still do + our best to clear any remaining zipimport.zipimporter related cached data + that might somehow later get used when attempting to load data from the new + distribution and thus cause such load operations to fail. Note that when + tracking down such remaining stale data, we can not catch every conceivable + usage from here, and we clear only those that we know of and have found to + cause problems if left alive. Any remaining caches should be updated by + whomever is in charge of maintaining them, i.e. they should be ready to + handle us replacing their zip archives with new distributions at runtime. + + """ + # There are several other known sources of stale zipimport.zipimporter + # instances that we do not clear here, but might if ever given a reason to + # do so: + # * Global setuptools pkg_resources.working_set (a.k.a. 'master working + # set') may contain distributions which may in turn contain their + # zipimport.zipimporter loaders. + # * Several zipimport.zipimporter loaders held by local variables further + # up the function call stack when running the setuptools installation. + # * Already loaded modules may have their __loader__ attribute set to the + # exact loader instance used when importing them. Python 3.4 docs state + # that this information is intended mostly for introspection and so is + # not expected to cause us problems. + normalized_path = normalize_path(dist_path) + _uncache(normalized_path, sys.path_importer_cache) + if fix_zipimporter_caches: + _replace_zip_directory_cache_data(normalized_path) + else: + # Here, even though we do not want to fix existing and now stale + # zipimporter cache information, we still want to remove it. Related to + # Python's zip archive directory information cache, we clear each of + # its stale entries in two phases: + # 1. Clear the entry so attempting to access zip archive information + # via any existing stale zipimport.zipimporter instances fails. + # 2. Remove the entry from the cache so any newly constructed + # zipimport.zipimporter instances do not end up using old stale + # zip archive directory information. + # This whole stale data removal step does not seem strictly necessary, + # but has been left in because it was done before we started replacing + # the zip archive directory information cache content if possible, and + # there are no relevant unit tests that we can depend on to tell us if + # this is really needed. + _remove_and_clear_zip_directory_cache_data(normalized_path) + + +def _collect_zipimporter_cache_entries(normalized_path, cache): + """ + Return zipimporter cache entry keys related to a given normalized path. + + Alternative path spellings (e.g. those using different character case or + those using alternative path separators) related to the same path are + included. Any sub-path entries are included as well, i.e. those + corresponding to zip archives embedded in other zip archives. + + """ + result = [] + prefix_len = len(normalized_path) + for p in cache: + np = normalize_path(p) + if (np.startswith(normalized_path) and + np[prefix_len:prefix_len + 1] in (os.sep, '')): + result.append(p) + return result + + +def _update_zipimporter_cache(normalized_path, cache, updater=None): + """ + Update zipimporter cache data for a given normalized path. + + Any sub-path entries are processed as well, i.e. those corresponding to zip + archives embedded in other zip archives. + + Given updater is a callable taking a cache entry key and the original entry + (after already removing the entry from the cache), and expected to update + the entry and possibly return a new one to be inserted in its place. + Returning None indicates that the entry should not be replaced with a new + one. If no updater is given, the cache entries are simply removed without + any additional processing, the same as if the updater simply returned None. + + """ + for p in _collect_zipimporter_cache_entries(normalized_path, cache): + # N.B. pypy's custom zipimport._zip_directory_cache implementation does + # not support the complete dict interface: + # * Does not support item assignment, thus not allowing this function + # to be used only for removing existing cache entries. + # * Does not support the dict.pop() method, forcing us to use the + # get/del patterns instead. For more detailed information see the + # following links: + # https://github.com/pypa/setuptools/issues/202#issuecomment-202913420 + # https://bitbucket.org/pypy/pypy/src/dd07756a34a41f674c0cacfbc8ae1d4cc9ea2ae4/pypy/module/zipimport/interp_zipimport.py#cl-99 + old_entry = cache[p] + del cache[p] + new_entry = updater and updater(p, old_entry) + if new_entry is not None: + cache[p] = new_entry + + +def _uncache(normalized_path, cache): + _update_zipimporter_cache(normalized_path, cache) + + +def _remove_and_clear_zip_directory_cache_data(normalized_path): + def clear_and_remove_cached_zip_archive_directory_data(path, old_entry): + old_entry.clear() + + _update_zipimporter_cache( + normalized_path, zipimport._zip_directory_cache, + updater=clear_and_remove_cached_zip_archive_directory_data) + +# PyPy Python implementation does not allow directly writing to the +# zipimport._zip_directory_cache and so prevents us from attempting to correct +# its content. The best we can do there is clear the problematic cache content +# and have PyPy repopulate it as needed. The downside is that if there are any +# stale zipimport.zipimporter instances laying around, attempting to use them +# will fail due to not having its zip archive directory information available +# instead of being automatically corrected to use the new correct zip archive +# directory information. +if '__pypy__' in sys.builtin_module_names: + _replace_zip_directory_cache_data = \ + _remove_and_clear_zip_directory_cache_data +else: + def _replace_zip_directory_cache_data(normalized_path): + def replace_cached_zip_archive_directory_data(path, old_entry): + # N.B. In theory, we could load the zip directory information just + # once for all updated path spellings, and then copy it locally and + # update its contained path strings to contain the correct + # spelling, but that seems like a way too invasive move (this cache + # structure is not officially documented anywhere and could in + # theory change with new Python releases) for no significant + # benefit. + old_entry.clear() + zipimport.zipimporter(path) + old_entry.update(zipimport._zip_directory_cache[path]) + return old_entry + + _update_zipimporter_cache( + normalized_path, zipimport._zip_directory_cache, + updater=replace_cached_zip_archive_directory_data) + + +def is_python(text, filename='<string>'): + "Is this string a valid Python script?" + try: + compile(text, filename, 'exec') + except (SyntaxError, TypeError): + return False + else: + return True + + +def is_sh(executable): + """Determine if the specified executable is a .sh (contains a #! line)""" + try: + with io.open(executable, encoding='latin-1') as fp: + magic = fp.read(2) + except (OSError, IOError): + return executable + return magic == '#!' + + +def nt_quote_arg(arg): + """Quote a command line argument according to Windows parsing rules""" + return subprocess.list2cmdline([arg]) + + +def is_python_script(script_text, filename): + """Is this text, as a whole, a Python script? (as opposed to shell/bat/etc. + """ + if filename.endswith('.py') or filename.endswith('.pyw'): + return True # extension says it's Python + if is_python(script_text, filename): + return True # it's syntactically valid Python + if script_text.startswith('#!'): + # It begins with a '#!' line, so check if 'python' is in it somewhere + return 'python' in script_text.splitlines()[0].lower() + + return False # Not any Python I can recognize + + +try: + from os import chmod as _chmod +except ImportError: + # Jython compatibility + def _chmod(*args): + pass + + +def chmod(path, mode): + log.debug("changing mode of %s to %o", path, mode) + try: + _chmod(path, mode) + except os.error as e: + log.debug("chmod failed: %s", e) + + +class CommandSpec(list): + """ + A command spec for a #! header, specified as a list of arguments akin to + those passed to Popen. + """ + + options = [] + split_args = dict() + + @classmethod + def best(cls): + """ + Choose the best CommandSpec class based on environmental conditions. + """ + return cls + + @classmethod + def _sys_executable(cls): + _default = os.path.normpath(sys.executable) + return os.environ.get('__PYVENV_LAUNCHER__', _default) + + @classmethod + def from_param(cls, param): + """ + Construct a CommandSpec from a parameter to build_scripts, which may + be None. + """ + if isinstance(param, cls): + return param + if isinstance(param, list): + return cls(param) + if param is None: + return cls.from_environment() + # otherwise, assume it's a string. + return cls.from_string(param) + + @classmethod + def from_environment(cls): + return cls([cls._sys_executable()]) + + @classmethod + def from_string(cls, string): + """ + Construct a command spec from a simple string representing a command + line parseable by shlex.split. + """ + items = shlex.split(string, **cls.split_args) + return cls(items) + + def install_options(self, script_text): + self.options = shlex.split(self._extract_options(script_text)) + cmdline = subprocess.list2cmdline(self) + if not isascii(cmdline): + self.options[:0] = ['-x'] + + @staticmethod + def _extract_options(orig_script): + """ + Extract any options from the first line of the script. + """ + first = (orig_script + '\n').splitlines()[0] + match = _first_line_re().match(first) + options = match.group(1) or '' if match else '' + return options.strip() + + def as_header(self): + return self._render(self + list(self.options)) + + @staticmethod + def _render(items): + cmdline = subprocess.list2cmdline(items) + return '#!' + cmdline + '\n' + +# For pbr compat; will be removed in a future version. +sys_executable = CommandSpec._sys_executable() + + +class WindowsCommandSpec(CommandSpec): + split_args = dict(posix=False) + + +class ScriptWriter(object): + """ + Encapsulates behavior around writing entry point scripts for console and + gui apps. + """ + + template = textwrap.dedent(""" + # EASY-INSTALL-ENTRY-SCRIPT: %(spec)r,%(group)r,%(name)r + __requires__ = %(spec)r + import sys + from pkg_resources import load_entry_point + + if __name__ == '__main__': + sys.exit( + load_entry_point(%(spec)r, %(group)r, %(name)r)() + ) + """).lstrip() + + command_spec_class = CommandSpec + + @classmethod + def get_script_args(cls, dist, executable=None, wininst=False): + # for backward compatibility + warnings.warn("Use get_args", DeprecationWarning) + writer = (WindowsScriptWriter if wininst else ScriptWriter).best() + header = cls.get_script_header("", executable, wininst) + return writer.get_args(dist, header) + + @classmethod + def get_script_header(cls, script_text, executable=None, wininst=False): + # for backward compatibility + warnings.warn("Use get_header", DeprecationWarning) + if wininst: + executable = "python.exe" + cmd = cls.command_spec_class.best().from_param(executable) + cmd.install_options(script_text) + return cmd.as_header() + + @classmethod + def get_args(cls, dist, header=None): + """ + Yield write_script() argument tuples for a distribution's + console_scripts and gui_scripts entry points. + """ + if header is None: + header = cls.get_header() + spec = str(dist.as_requirement()) + for type_ in 'console', 'gui': + group = type_ + '_scripts' + for name, ep in dist.get_entry_map(group).items(): + cls._ensure_safe_name(name) + script_text = cls.template % locals() + args = cls._get_script_args(type_, name, header, script_text) + for res in args: + yield res + + @staticmethod + def _ensure_safe_name(name): + """ + Prevent paths in *_scripts entry point names. + """ + has_path_sep = re.search(r'[\\/]', name) + if has_path_sep: + raise ValueError("Path separators not allowed in script names") + + @classmethod + def get_writer(cls, force_windows): + # for backward compatibility + warnings.warn("Use best", DeprecationWarning) + return WindowsScriptWriter.best() if force_windows else cls.best() + + @classmethod + def best(cls): + """ + Select the best ScriptWriter for this environment. + """ + if sys.platform == 'win32' or (os.name == 'java' and os._name == 'nt'): + return WindowsScriptWriter.best() + else: + return cls + + @classmethod + def _get_script_args(cls, type_, name, header, script_text): + # Simply write the stub with no extension. + yield (name, header + script_text) + + @classmethod + def get_header(cls, script_text="", executable=None): + """Create a #! line, getting options (if any) from script_text""" + cmd = cls.command_spec_class.best().from_param(executable) + cmd.install_options(script_text) + return cmd.as_header() + + +class WindowsScriptWriter(ScriptWriter): + command_spec_class = WindowsCommandSpec + + @classmethod + def get_writer(cls): + # for backward compatibility + warnings.warn("Use best", DeprecationWarning) + return cls.best() + + @classmethod + def best(cls): + """ + Select the best ScriptWriter suitable for Windows + """ + writer_lookup = dict( + executable=WindowsExecutableLauncherWriter, + natural=cls, + ) + # for compatibility, use the executable launcher by default + launcher = os.environ.get('SETUPTOOLS_LAUNCHER', 'executable') + return writer_lookup[launcher] + + @classmethod + def _get_script_args(cls, type_, name, header, script_text): + "For Windows, add a .py extension" + ext = dict(console='.pya', gui='.pyw')[type_] + if ext not in os.environ['PATHEXT'].lower().split(';'): + warnings.warn("%s not listed in PATHEXT; scripts will not be " + "recognized as executables." % ext, UserWarning) + old = ['.pya', '.py', '-script.py', '.pyc', '.pyo', '.pyw', '.exe'] + old.remove(ext) + header = cls._adjust_header(type_, header) + blockers = [name + x for x in old] + yield name + ext, header + script_text, 't', blockers + + @classmethod + def _adjust_header(cls, type_, orig_header): + """ + Make sure 'pythonw' is used for gui and and 'python' is used for + console (regardless of what sys.executable is). + """ + pattern = 'pythonw.exe' + repl = 'python.exe' + if type_ == 'gui': + pattern, repl = repl, pattern + pattern_ob = re.compile(re.escape(pattern), re.IGNORECASE) + new_header = pattern_ob.sub(string=orig_header, repl=repl) + return new_header if cls._use_header(new_header) else orig_header + + @staticmethod + def _use_header(new_header): + """ + Should _adjust_header use the replaced header? + + On non-windows systems, always use. On + Windows systems, only use the replaced header if it resolves + to an executable on the system. + """ + clean_header = new_header[2:-1].strip('"') + return sys.platform != 'win32' or find_executable(clean_header) + + +class WindowsExecutableLauncherWriter(WindowsScriptWriter): + @classmethod + def _get_script_args(cls, type_, name, header, script_text): + """ + For Windows, add a .py extension and an .exe launcher + """ + if type_ == 'gui': + launcher_type = 'gui' + ext = '-script.pyw' + old = ['.pyw'] + else: + launcher_type = 'cli' + ext = '-script.py' + old = ['.py', '.pyc', '.pyo'] + hdr = cls._adjust_header(type_, header) + blockers = [name + x for x in old] + yield (name + ext, hdr + script_text, 't', blockers) + yield ( + name + '.exe', get_win_launcher(launcher_type), + 'b' # write in binary mode + ) + if not is_64bit(): + # install a manifest for the launcher to prevent Windows + # from detecting it as an installer (which it will for + # launchers like easy_install.exe). Consider only + # adding a manifest for launchers detected as installers. + # See Distribute #143 for details. + m_name = name + '.exe.manifest' + yield (m_name, load_launcher_manifest(name), 't') + + +# for backward-compatibility +get_script_args = ScriptWriter.get_script_args +get_script_header = ScriptWriter.get_script_header + + +def get_win_launcher(type): + """ + Load the Windows launcher (executable) suitable for launching a script. + + `type` should be either 'cli' or 'gui' + + Returns the executable as a byte string. + """ + launcher_fn = '%s.exe' % type + if platform.machine().lower() == 'arm': + launcher_fn = launcher_fn.replace(".", "-arm.") + if is_64bit(): + launcher_fn = launcher_fn.replace(".", "-64.") + else: + launcher_fn = launcher_fn.replace(".", "-32.") + return resource_string('setuptools', launcher_fn) + + +def load_launcher_manifest(name): + manifest = pkg_resources.resource_string(__name__, 'launcher manifest.xml') + if six.PY2: + return manifest % vars() + else: + return manifest.decode('utf-8') % vars() + + +def rmtree(path, ignore_errors=False, onerror=auto_chmod): + """Recursively delete a directory tree. + + This code is taken from the Python 2.4 version of 'shutil', because + the 2.3 version doesn't really work right. + """ + if ignore_errors: + def onerror(*args): + pass + elif onerror is None: + def onerror(*args): + raise + names = [] + try: + names = os.listdir(path) + except os.error: + onerror(os.listdir, path, sys.exc_info()) + for name in names: + fullname = os.path.join(path, name) + try: + mode = os.lstat(fullname).st_mode + except os.error: + mode = 0 + if stat.S_ISDIR(mode): + rmtree(fullname, ignore_errors, onerror) + else: + try: + os.remove(fullname) + except os.error: + onerror(os.remove, fullname, sys.exc_info()) + try: + os.rmdir(path) + except os.error: + onerror(os.rmdir, path, sys.exc_info()) + + +def current_umask(): + tmp = os.umask(0o022) + os.umask(tmp) + return tmp + + +def bootstrap(): + # This function is called when setuptools*.egg is run using /bin/sh + import setuptools + + argv0 = os.path.dirname(setuptools.__path__[0]) + sys.argv[0] = argv0 + sys.argv.append(argv0) + main() + + +def main(argv=None, **kw): + from setuptools import setup + from setuptools.dist import Distribution + + class DistributionWithoutHelpCommands(Distribution): + common_usage = "" + + def _show_help(self, *args, **kw): + with _patch_usage(): + Distribution._show_help(self, *args, **kw) + + if argv is None: + argv = sys.argv[1:] + + with _patch_usage(): + setup( + script_args=['-q', 'easy_install', '-v'] + argv, + script_name=sys.argv[0] or 'easy_install', + distclass=DistributionWithoutHelpCommands, **kw + ) + + +@contextlib.contextmanager +def _patch_usage(): + import distutils.core + USAGE = textwrap.dedent(""" + usage: %(script)s [options] requirement_or_url ... + or: %(script)s --help + """).lstrip() + + def gen_usage(script_name): + return USAGE % dict( + script=os.path.basename(script_name), + ) + + saved = distutils.core.gen_usage + distutils.core.gen_usage = gen_usage + try: + yield + finally: + distutils.core.gen_usage = saved diff --git a/venv/lib/python3.5/site-packages/setuptools/command/egg_info.py b/venv/lib/python3.5/site-packages/setuptools/command/egg_info.py new file mode 100644 index 0000000..d1bd9b0 --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/command/egg_info.py @@ -0,0 +1,482 @@ +"""setuptools.command.egg_info + +Create a distribution's .egg-info directory and contents""" + +from distutils.filelist import FileList as _FileList +from distutils.util import convert_path +from distutils import log +import distutils.errors +import distutils.filelist +import os +import re +import sys +import io +import warnings +import time + +from setuptools.extern import six +from setuptools.extern.six.moves import map + +from setuptools import Command +from setuptools.command.sdist import sdist +from setuptools.command.sdist import walk_revctrl +from setuptools.command.setopt import edit_config +from setuptools.command import bdist_egg +from pkg_resources import ( + parse_requirements, safe_name, parse_version, + safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename) +import setuptools.unicode_utils as unicode_utils + +from pkg_resources.extern import packaging + +try: + from setuptools_svn import svn_utils +except ImportError: + pass + + +class egg_info(Command): + description = "create a distribution's .egg-info directory" + + user_options = [ + ('egg-base=', 'e', "directory containing .egg-info directories" + " (default: top of the source tree)"), + ('tag-svn-revision', 'r', + "Add subversion revision ID to version number"), + ('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"), + ('tag-build=', 'b', "Specify explicit tag to add to version number"), + ('no-svn-revision', 'R', + "Don't add subversion revision ID [default]"), + ('no-date', 'D', "Don't include date stamp [default]"), + ] + + boolean_options = ['tag-date', 'tag-svn-revision'] + negative_opt = {'no-svn-revision': 'tag-svn-revision', + 'no-date': 'tag-date'} + + def initialize_options(self): + self.egg_name = None + self.egg_version = None + self.egg_base = None + self.egg_info = None + self.tag_build = None + self.tag_svn_revision = 0 + self.tag_date = 0 + self.broken_egg_info = False + self.vtags = None + + def save_version_info(self, filename): + values = dict( + egg_info=dict( + tag_svn_revision=0, + tag_date=0, + tag_build=self.tags(), + ) + ) + edit_config(filename, values) + + def finalize_options(self): + self.egg_name = safe_name(self.distribution.get_name()) + self.vtags = self.tags() + self.egg_version = self.tagged_version() + + parsed_version = parse_version(self.egg_version) + + try: + is_version = isinstance(parsed_version, packaging.version.Version) + spec = ( + "%s==%s" if is_version else "%s===%s" + ) + list( + parse_requirements(spec % (self.egg_name, self.egg_version)) + ) + except ValueError: + raise distutils.errors.DistutilsOptionError( + "Invalid distribution name or version syntax: %s-%s" % + (self.egg_name, self.egg_version) + ) + + if self.egg_base is None: + dirs = self.distribution.package_dir + self.egg_base = (dirs or {}).get('', os.curdir) + + self.ensure_dirname('egg_base') + self.egg_info = to_filename(self.egg_name) + '.egg-info' + if self.egg_base != os.curdir: + self.egg_info = os.path.join(self.egg_base, self.egg_info) + if '-' in self.egg_name: + self.check_broken_egg_info() + + # Set package version for the benefit of dumber commands + # (e.g. sdist, bdist_wininst, etc.) + # + self.distribution.metadata.version = self.egg_version + + # If we bootstrapped around the lack of a PKG-INFO, as might be the + # case in a fresh checkout, make sure that any special tags get added + # to the version info + # + pd = self.distribution._patched_dist + if pd is not None and pd.key == self.egg_name.lower(): + pd._version = self.egg_version + pd._parsed_version = parse_version(self.egg_version) + self.distribution._patched_dist = None + + def write_or_delete_file(self, what, filename, data, force=False): + """Write `data` to `filename` or delete if empty + + If `data` is non-empty, this routine is the same as ``write_file()``. + If `data` is empty but not ``None``, this is the same as calling + ``delete_file(filename)`. If `data` is ``None``, then this is a no-op + unless `filename` exists, in which case a warning is issued about the + orphaned file (if `force` is false), or deleted (if `force` is true). + """ + if data: + self.write_file(what, filename, data) + elif os.path.exists(filename): + if data is None and not force: + log.warn( + "%s not set in setup(), but %s exists", what, filename + ) + return + else: + self.delete_file(filename) + + def write_file(self, what, filename, data): + """Write `data` to `filename` (if not a dry run) after announcing it + + `what` is used in a log message to identify what is being written + to the file. + """ + log.info("writing %s to %s", what, filename) + if six.PY3: + data = data.encode("utf-8") + if not self.dry_run: + f = open(filename, 'wb') + f.write(data) + f.close() + + def delete_file(self, filename): + """Delete `filename` (if not a dry run) after announcing it""" + log.info("deleting %s", filename) + if not self.dry_run: + os.unlink(filename) + + def tagged_version(self): + version = self.distribution.get_version() + # egg_info may be called more than once for a distribution, + # in which case the version string already contains all tags. + if self.vtags and version.endswith(self.vtags): + return safe_version(version) + return safe_version(version + self.vtags) + + def run(self): + self.mkpath(self.egg_info) + installer = self.distribution.fetch_build_egg + for ep in iter_entry_points('egg_info.writers'): + ep.require(installer=installer) + writer = ep.resolve() + writer(self, ep.name, os.path.join(self.egg_info, ep.name)) + + # Get rid of native_libs.txt if it was put there by older bdist_egg + nl = os.path.join(self.egg_info, "native_libs.txt") + if os.path.exists(nl): + self.delete_file(nl) + + self.find_sources() + + def tags(self): + version = '' + if self.tag_build: + version += self.tag_build + if self.tag_svn_revision: + version += '-r%s' % self.get_svn_revision() + if self.tag_date: + version += time.strftime("-%Y%m%d") + return version + + @staticmethod + def get_svn_revision(): + if 'svn_utils' not in globals(): + return "0" + return str(svn_utils.SvnInfo.load(os.curdir).get_revision()) + + def find_sources(self): + """Generate SOURCES.txt manifest file""" + manifest_filename = os.path.join(self.egg_info, "SOURCES.txt") + mm = manifest_maker(self.distribution) + mm.manifest = manifest_filename + mm.run() + self.filelist = mm.filelist + + def check_broken_egg_info(self): + bei = self.egg_name + '.egg-info' + if self.egg_base != os.curdir: + bei = os.path.join(self.egg_base, bei) + if os.path.exists(bei): + log.warn( + "-" * 78 + '\n' + "Note: Your current .egg-info directory has a '-' in its name;" + '\nthis will not work correctly with "setup.py develop".\n\n' + 'Please rename %s to %s to correct this problem.\n' + '-' * 78, + bei, self.egg_info + ) + self.broken_egg_info = self.egg_info + self.egg_info = bei # make it work for now + + +class FileList(_FileList): + """File list that accepts only existing, platform-independent paths""" + + def append(self, item): + if item.endswith('\r'): # Fix older sdists built on Windows + item = item[:-1] + path = convert_path(item) + + if self._safe_path(path): + self.files.append(path) + + def extend(self, paths): + self.files.extend(filter(self._safe_path, paths)) + + def _repair(self): + """ + Replace self.files with only safe paths + + Because some owners of FileList manipulate the underlying + ``files`` attribute directly, this method must be called to + repair those paths. + """ + self.files = list(filter(self._safe_path, self.files)) + + def _safe_path(self, path): + enc_warn = "'%s' not %s encodable -- skipping" + + # To avoid accidental trans-codings errors, first to unicode + u_path = unicode_utils.filesys_decode(path) + if u_path is None: + log.warn("'%s' in unexpected encoding -- skipping" % path) + return False + + # Must ensure utf-8 encodability + utf8_path = unicode_utils.try_encode(u_path, "utf-8") + if utf8_path is None: + log.warn(enc_warn, path, 'utf-8') + return False + + try: + # accept is either way checks out + if os.path.exists(u_path) or os.path.exists(utf8_path): + return True + # this will catch any encode errors decoding u_path + except UnicodeEncodeError: + log.warn(enc_warn, path, sys.getfilesystemencoding()) + + +class manifest_maker(sdist): + template = "MANIFEST.in" + + def initialize_options(self): + self.use_defaults = 1 + self.prune = 1 + self.manifest_only = 1 + self.force_manifest = 1 + + def finalize_options(self): + pass + + def run(self): + self.filelist = FileList() + if not os.path.exists(self.manifest): + self.write_manifest() # it must exist so it'll get in the list + self.filelist.findall() + self.add_defaults() + if os.path.exists(self.template): + self.read_template() + self.prune_file_list() + self.filelist.sort() + self.filelist.remove_duplicates() + self.write_manifest() + + def _manifest_normalize(self, path): + path = unicode_utils.filesys_decode(path) + return path.replace(os.sep, '/') + + def write_manifest(self): + """ + Write the file list in 'self.filelist' to the manifest file + named by 'self.manifest'. + """ + self.filelist._repair() + + # Now _repairs should encodability, but not unicode + files = [self._manifest_normalize(f) for f in self.filelist.files] + msg = "writing manifest file '%s'" % self.manifest + self.execute(write_file, (self.manifest, files), msg) + + def warn(self, msg): # suppress missing-file warnings from sdist + if not msg.startswith("standard file not found:"): + sdist.warn(self, msg) + + def add_defaults(self): + sdist.add_defaults(self) + self.filelist.append(self.template) + self.filelist.append(self.manifest) + rcfiles = list(walk_revctrl()) + if rcfiles: + self.filelist.extend(rcfiles) + elif os.path.exists(self.manifest): + self.read_manifest() + ei_cmd = self.get_finalized_command('egg_info') + self._add_egg_info(cmd=ei_cmd) + self.filelist.include_pattern("*", prefix=ei_cmd.egg_info) + + def _add_egg_info(self, cmd): + """ + Add paths for egg-info files for an external egg-base. + + The egg-info files are written to egg-base. If egg-base is + outside the current working directory, this method + searchs the egg-base directory for files to include + in the manifest. Uses distutils.filelist.findall (which is + really the version monkeypatched in by setuptools/__init__.py) + to perform the search. + + Since findall records relative paths, prefix the returned + paths with cmd.egg_base, so add_default's include_pattern call + (which is looking for the absolute cmd.egg_info) will match + them. + """ + if cmd.egg_base == os.curdir: + # egg-info files were already added by something else + return + + discovered = distutils.filelist.findall(cmd.egg_base) + resolved = (os.path.join(cmd.egg_base, path) for path in discovered) + self.filelist.allfiles.extend(resolved) + + def prune_file_list(self): + build = self.get_finalized_command('build') + base_dir = self.distribution.get_fullname() + self.filelist.exclude_pattern(None, prefix=build.build_base) + self.filelist.exclude_pattern(None, prefix=base_dir) + sep = re.escape(os.sep) + self.filelist.exclude_pattern(r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep, + is_regex=1) + + +def write_file(filename, contents): + """Create a file with the specified name and write 'contents' (a + sequence of strings without line terminators) to it. + """ + contents = "\n".join(contents) + + # assuming the contents has been vetted for utf-8 encoding + contents = contents.encode("utf-8") + + with open(filename, "wb") as f: # always write POSIX-style manifest + f.write(contents) + + +def write_pkg_info(cmd, basename, filename): + log.info("writing %s", filename) + if not cmd.dry_run: + metadata = cmd.distribution.metadata + metadata.version, oldver = cmd.egg_version, metadata.version + metadata.name, oldname = cmd.egg_name, metadata.name + try: + # write unescaped data to PKG-INFO, so older pkg_resources + # can still parse it + metadata.write_pkg_info(cmd.egg_info) + finally: + metadata.name, metadata.version = oldname, oldver + + safe = getattr(cmd.distribution, 'zip_safe', None) + + bdist_egg.write_safety_flag(cmd.egg_info, safe) + + +def warn_depends_obsolete(cmd, basename, filename): + if os.path.exists(filename): + log.warn( + "WARNING: 'depends.txt' is not used by setuptools 0.6!\n" + "Use the install_requires/extras_require setup() args instead." + ) + + +def _write_requirements(stream, reqs): + lines = yield_lines(reqs or ()) + append_cr = lambda line: line + '\n' + lines = map(append_cr, lines) + stream.writelines(lines) + + +def write_requirements(cmd, basename, filename): + dist = cmd.distribution + data = six.StringIO() + _write_requirements(data, dist.install_requires) + extras_require = dist.extras_require or {} + for extra in sorted(extras_require): + data.write('\n[{extra}]\n'.format(**vars())) + _write_requirements(data, extras_require[extra]) + cmd.write_or_delete_file("requirements", filename, data.getvalue()) + + +def write_setup_requirements(cmd, basename, filename): + data = StringIO() + _write_requirements(data, cmd.distribution.setup_requires) + cmd.write_or_delete_file("setup-requirements", filename, data.getvalue()) + + +def write_toplevel_names(cmd, basename, filename): + pkgs = dict.fromkeys( + [ + k.split('.', 1)[0] + for k in cmd.distribution.iter_distribution_names() + ] + ) + cmd.write_file("top-level names", filename, '\n'.join(sorted(pkgs)) + '\n') + + +def overwrite_arg(cmd, basename, filename): + write_arg(cmd, basename, filename, True) + + +def write_arg(cmd, basename, filename, force=False): + argname = os.path.splitext(basename)[0] + value = getattr(cmd.distribution, argname, None) + if value is not None: + value = '\n'.join(value) + '\n' + cmd.write_or_delete_file(argname, filename, value, force) + + +def write_entries(cmd, basename, filename): + ep = cmd.distribution.entry_points + + if isinstance(ep, six.string_types) or ep is None: + data = ep + elif ep is not None: + data = [] + for section, contents in sorted(ep.items()): + if not isinstance(contents, six.string_types): + contents = EntryPoint.parse_group(section, contents) + contents = '\n'.join(sorted(map(str, contents.values()))) + data.append('[%s]\n%s\n\n' % (section, contents)) + data = ''.join(data) + + cmd.write_or_delete_file('entry points', filename, data, True) + + +def get_pkg_info_revision(): + """ + Get a -r### off of PKG-INFO Version in case this is an sdist of + a subversion revision. + """ + warnings.warn("get_pkg_info_revision is deprecated.", DeprecationWarning) + if os.path.exists('PKG-INFO'): + with io.open('PKG-INFO') as f: + for line in f: + match = re.match(r"Version:.*-r(\d+)\s*$", line) + if match: + return int(match.group(1)) + return 0 diff --git a/venv/lib/python3.5/site-packages/setuptools/command/install.py b/venv/lib/python3.5/site-packages/setuptools/command/install.py new file mode 100644 index 0000000..31a5ddb --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/command/install.py @@ -0,0 +1,125 @@ +from distutils.errors import DistutilsArgError +import inspect +import glob +import warnings +import platform +import distutils.command.install as orig + +import setuptools + +# Prior to numpy 1.9, NumPy relies on the '_install' name, so provide it for +# now. See https://github.com/pypa/setuptools/issues/199/ +_install = orig.install + + +class install(orig.install): + """Use easy_install to install the package, w/dependencies""" + + user_options = orig.install.user_options + [ + ('old-and-unmanageable', None, "Try not to use this!"), + ('single-version-externally-managed', None, + "used by system package builders to create 'flat' eggs"), + ] + boolean_options = orig.install.boolean_options + [ + 'old-and-unmanageable', 'single-version-externally-managed', + ] + new_commands = [ + ('install_egg_info', lambda self: True), + ('install_scripts', lambda self: True), + ] + _nc = dict(new_commands) + + def initialize_options(self): + orig.install.initialize_options(self) + self.old_and_unmanageable = None + self.single_version_externally_managed = None + + def finalize_options(self): + orig.install.finalize_options(self) + if self.root: + self.single_version_externally_managed = True + elif self.single_version_externally_managed: + if not self.root and not self.record: + raise DistutilsArgError( + "You must specify --record or --root when building system" + " packages" + ) + + def handle_extra_path(self): + if self.root or self.single_version_externally_managed: + # explicit backward-compatibility mode, allow extra_path to work + return orig.install.handle_extra_path(self) + + # Ignore extra_path when installing an egg (or being run by another + # command without --root or --single-version-externally-managed + self.path_file = None + self.extra_dirs = '' + + def run(self): + # Explicit request for old-style install? Just do it + if self.old_and_unmanageable or self.single_version_externally_managed: + return orig.install.run(self) + + if not self._called_from_setup(inspect.currentframe()): + # Run in backward-compatibility mode to support bdist_* commands. + orig.install.run(self) + else: + self.do_egg_install() + + @staticmethod + def _called_from_setup(run_frame): + """ + Attempt to detect whether run() was called from setup() or by another + command. If called by setup(), the parent caller will be the + 'run_command' method in 'distutils.dist', and *its* caller will be + the 'run_commands' method. If called any other way, the + immediate caller *might* be 'run_command', but it won't have been + called by 'run_commands'. Return True in that case or if a call stack + is unavailable. Return False otherwise. + """ + if run_frame is None: + msg = "Call stack not available. bdist_* commands may fail." + warnings.warn(msg) + if platform.python_implementation() == 'IronPython': + msg = "For best results, pass -X:Frames to enable call stack." + warnings.warn(msg) + return True + res = inspect.getouterframes(run_frame)[2] + caller, = res[:1] + info = inspect.getframeinfo(caller) + caller_module = caller.f_globals.get('__name__', '') + return ( + caller_module == 'distutils.dist' + and info.function == 'run_commands' + ) + + def do_egg_install(self): + + easy_install = self.distribution.get_command_class('easy_install') + + cmd = easy_install( + self.distribution, args="x", root=self.root, record=self.record, + ) + cmd.ensure_finalized() # finalize before bdist_egg munges install cmd + cmd.always_copy_from = '.' # make sure local-dir eggs get installed + + # pick up setup-dir .egg files only: no .egg-info + cmd.package_index.scan(glob.glob('*.egg')) + + self.run_command('bdist_egg') + args = [self.distribution.get_command_obj('bdist_egg').egg_output] + + if setuptools.bootstrap_install_from: + # Bootstrap self-installation of setuptools + args.insert(0, setuptools.bootstrap_install_from) + + cmd.args = args + cmd.run() + setuptools.bootstrap_install_from = None + + +# XXX Python 3.1 doesn't see _nc if this is inside the class +install.sub_commands = ( + [cmd for cmd in orig.install.sub_commands if cmd[0] not in install._nc] + + install.new_commands +) diff --git a/venv/lib/python3.5/site-packages/setuptools/command/install_egg_info.py b/venv/lib/python3.5/site-packages/setuptools/command/install_egg_info.py new file mode 100644 index 0000000..ae0325d --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/command/install_egg_info.py @@ -0,0 +1,138 @@ +from distutils import log, dir_util +import os, sys + +from setuptools.extern.six.moves import map + +from setuptools import Command +from setuptools.archive_util import unpack_archive +import pkg_resources + + +class install_egg_info(Command): + """Install an .egg-info directory for the package""" + + description = "Install an .egg-info directory for the package" + + user_options = [ + ('install-dir=', 'd', "directory to install to"), + ] + + def initialize_options(self): + self.install_dir = None + self.install_layout = None + self.prefix_option = None + + def finalize_options(self): + self.set_undefined_options('install_lib', + ('install_dir', 'install_dir')) + self.set_undefined_options('install',('install_layout','install_layout')) + if sys.hexversion > 0x2060000: + self.set_undefined_options('install',('prefix_option','prefix_option')) + ei_cmd = self.get_finalized_command("egg_info") + basename = pkg_resources.Distribution( + None, None, ei_cmd.egg_name, ei_cmd.egg_version + ).egg_name() + '.egg-info' + + if self.install_layout: + if not self.install_layout.lower() in ['deb']: + raise DistutilsOptionError("unknown value for --install-layout") + self.install_layout = self.install_layout.lower() + basename = basename.replace('-py%s' % pkg_resources.PY_MAJOR, '') + elif self.prefix_option or 'real_prefix' in sys.__dict__: + # don't modify for virtualenv + pass + else: + basename = basename.replace('-py%s' % pkg_resources.PY_MAJOR, '') + + self.source = ei_cmd.egg_info + self.target = os.path.join(self.install_dir, basename) + self.outputs = [] + + def run(self): + self.run_command('egg_info') + if os.path.isdir(self.target) and not os.path.islink(self.target): + dir_util.remove_tree(self.target, dry_run=self.dry_run) + elif os.path.exists(self.target): + self.execute(os.unlink, (self.target,), "Removing " + self.target) + if not self.dry_run: + pkg_resources.ensure_directory(self.target) + self.execute( + self.copytree, (), "Copying %s to %s" % (self.source, self.target) + ) + self.install_namespaces() + + def get_outputs(self): + return self.outputs + + def copytree(self): + # Copy the .egg-info tree to site-packages + def skimmer(src, dst): + # filter out source-control directories; note that 'src' is always + # a '/'-separated path, regardless of platform. 'dst' is a + # platform-specific path. + for skip in '.svn/', 'CVS/': + if src.startswith(skip) or '/' + skip in src: + return None + if self.install_layout and self.install_layout in ['deb'] and src.startswith('SOURCES.txt'): + log.info("Skipping SOURCES.txt") + return None + self.outputs.append(dst) + log.debug("Copying %s to %s", src, dst) + return dst + + unpack_archive(self.source, self.target, skimmer) + + def install_namespaces(self): + nsp = self._get_all_ns_packages() + if not nsp: + return + filename, ext = os.path.splitext(self.target) + filename += '-nspkg.pth' + self.outputs.append(filename) + log.info("Installing %s", filename) + lines = map(self._gen_nspkg_line, nsp) + + if self.dry_run: + # always generate the lines, even in dry run + list(lines) + return + + with open(filename, 'wt') as f: + f.writelines(lines) + + _nspkg_tmpl = ( + "import sys, types, os", + "p = os.path.join(sys._getframe(1).f_locals['sitedir'], *%(pth)r)", + "ie = os.path.exists(os.path.join(p,'__init__.py'))", + "m = not ie and " + "sys.modules.setdefault(%(pkg)r, types.ModuleType(%(pkg)r))", + "mp = (m or []) and m.__dict__.setdefault('__path__',[])", + "(p not in mp) and mp.append(p)", + ) + "lines for the namespace installer" + + _nspkg_tmpl_multi = ( + 'm and setattr(sys.modules[%(parent)r], %(child)r, m)', + ) + "additional line(s) when a parent package is indicated" + + @classmethod + def _gen_nspkg_line(cls, pkg): + # ensure pkg is not a unicode string under Python 2.7 + pkg = str(pkg) + pth = tuple(pkg.split('.')) + tmpl_lines = cls._nspkg_tmpl + parent, sep, child = pkg.rpartition('.') + if parent: + tmpl_lines += cls._nspkg_tmpl_multi + return ';'.join(tmpl_lines) % locals() + '\n' + + def _get_all_ns_packages(self): + """Return sorted list of all package namespaces""" + nsp = set() + for pkg in self.distribution.namespace_packages or []: + pkg = pkg.split('.') + while pkg: + nsp.add('.'.join(pkg)) + pkg.pop() + return sorted(nsp) diff --git a/venv/lib/python3.5/site-packages/setuptools/command/install_lib.py b/venv/lib/python3.5/site-packages/setuptools/command/install_lib.py new file mode 100644 index 0000000..696b776 --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/command/install_lib.py @@ -0,0 +1,147 @@ +import os +import sys +import imp +from itertools import product, starmap +import distutils.command.install_lib as orig + +class install_lib(orig.install_lib): + """Don't add compiled flags to filenames of non-Python files""" + + def initialize_options(self): + orig.install_lib.initialize_options(self) + self.multiarch = None + self.install_layout = None + + def finalize_options(self): + orig.install_lib.finalize_options(self) + self.set_undefined_options('install',('install_layout','install_layout')) + if self.install_layout == 'deb' and sys.version_info[:2] >= (3, 3): + import sysconfig + self.multiarch = sysconfig.get_config_var('MULTIARCH') + + def run(self): + self.build() + outfiles = self.install() + if outfiles is not None: + # always compile, in case we have any extension stubs to deal with + self.byte_compile(outfiles) + + def get_exclusions(self): + """ + Return a collections.Sized collections.Container of paths to be + excluded for single_version_externally_managed installations. + """ + all_packages = ( + pkg + for ns_pkg in self._get_SVEM_NSPs() + for pkg in self._all_packages(ns_pkg) + ) + + excl_specs = product(all_packages, self._gen_exclusion_paths()) + return set(starmap(self._exclude_pkg_path, excl_specs)) + + def _exclude_pkg_path(self, pkg, exclusion_path): + """ + Given a package name and exclusion path within that package, + compute the full exclusion path. + """ + parts = pkg.split('.') + [exclusion_path] + return os.path.join(self.install_dir, *parts) + + @staticmethod + def _all_packages(pkg_name): + """ + >>> list(install_lib._all_packages('foo.bar.baz')) + ['foo.bar.baz', 'foo.bar', 'foo'] + """ + while pkg_name: + yield pkg_name + pkg_name, sep, child = pkg_name.rpartition('.') + + def _get_SVEM_NSPs(self): + """ + Get namespace packages (list) but only for + single_version_externally_managed installations and empty otherwise. + """ + # TODO: is it necessary to short-circuit here? i.e. what's the cost + # if get_finalized_command is called even when namespace_packages is + # False? + if not self.distribution.namespace_packages: + return [] + + install_cmd = self.get_finalized_command('install') + svem = install_cmd.single_version_externally_managed + + return self.distribution.namespace_packages if svem else [] + + @staticmethod + def _gen_exclusion_paths(): + """ + Generate file paths to be excluded for namespace packages (bytecode + cache files). + """ + # always exclude the package module itself + yield '__init__.py' + + yield '__init__.pyc' + yield '__init__.pyo' + + if not hasattr(imp, 'get_tag'): + return + + base = os.path.join('__pycache__', '__init__.' + imp.get_tag()) + yield base + '.pyc' + yield base + '.pyo' + yield base + '.opt-1.pyc' + yield base + '.opt-2.pyc' + + def copy_tree( + self, infile, outfile, + preserve_mode=1, preserve_times=1, preserve_symlinks=0, level=1 + ): + assert preserve_mode and preserve_times and not preserve_symlinks + exclude = self.get_exclusions() + + if not exclude: + import distutils.dir_util + distutils.dir_util._multiarch = self.multiarch + return orig.install_lib.copy_tree(self, infile, outfile) + + # Exclude namespace package __init__.py* files from the output + + from setuptools.archive_util import unpack_directory + from distutils import log + + outfiles = [] + + if self.multiarch: + import sysconfig + ext_suffix = sysconfig.get_config_var ('EXT_SUFFIX') + if ext_suffix.endswith(self.multiarch + ext_suffix[-3:]): + new_suffix = None + else: + new_suffix = "%s-%s%s" % (ext_suffix[:-3], self.multiarch, ext_suffix[-3:]) + + def pf(src, dst): + if dst in exclude: + log.warn("Skipping installation of %s (namespace package)", + dst) + return False + + if self.multiarch and new_suffix and dst.endswith(ext_suffix) and not dst.endswith(new_suffix): + dst = dst.replace(ext_suffix, new_suffix) + log.info("renaming extension to %s", os.path.basename(dst)) + + log.info("copying %s -> %s", src, os.path.dirname(dst)) + outfiles.append(dst) + return dst + + unpack_directory(infile, outfile, pf) + return outfiles + + def get_outputs(self): + outputs = orig.install_lib.get_outputs(self) + exclude = self.get_exclusions() + if exclude: + return [f for f in outputs if f not in exclude] + return outputs diff --git a/venv/lib/python3.5/site-packages/setuptools/command/install_scripts.py b/venv/lib/python3.5/site-packages/setuptools/command/install_scripts.py new file mode 100644 index 0000000..be66cb2 --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/command/install_scripts.py @@ -0,0 +1,60 @@ +from distutils import log +import distutils.command.install_scripts as orig +import os + +from pkg_resources import Distribution, PathMetadata, ensure_directory + + +class install_scripts(orig.install_scripts): + """Do normal script install, plus any egg_info wrapper scripts""" + + def initialize_options(self): + orig.install_scripts.initialize_options(self) + self.no_ep = False + + def run(self): + import setuptools.command.easy_install as ei + + self.run_command("egg_info") + if self.distribution.scripts: + orig.install_scripts.run(self) # run first to set up self.outfiles + else: + self.outfiles = [] + if self.no_ep: + # don't install entry point scripts into .egg file! + return + + ei_cmd = self.get_finalized_command("egg_info") + dist = Distribution( + ei_cmd.egg_base, PathMetadata(ei_cmd.egg_base, ei_cmd.egg_info), + ei_cmd.egg_name, ei_cmd.egg_version, + ) + bs_cmd = self.get_finalized_command('build_scripts') + exec_param = getattr(bs_cmd, 'executable', None) + bw_cmd = self.get_finalized_command("bdist_wininst") + is_wininst = getattr(bw_cmd, '_is_running', False) + writer = ei.ScriptWriter + if is_wininst: + exec_param = "python.exe" + writer = ei.WindowsScriptWriter + # resolve the writer to the environment + writer = writer.best() + cmd = writer.command_spec_class.best().from_param(exec_param) + for args in writer.get_args(dist, cmd.as_header()): + self.write_script(*args) + + def write_script(self, script_name, contents, mode="t", *ignored): + """Write an executable file to the scripts directory""" + from setuptools.command.easy_install import chmod, current_umask + + log.info("Installing %s script to %s", script_name, self.install_dir) + target = os.path.join(self.install_dir, script_name) + self.outfiles.append(target) + + mask = current_umask() + if not self.dry_run: + ensure_directory(target) + f = open(target, "w" + mode) + f.write(contents) + f.close() + chmod(target, 0o777 - mask) diff --git a/venv/lib/python3.5/site-packages/setuptools/command/register.py b/venv/lib/python3.5/site-packages/setuptools/command/register.py new file mode 100644 index 0000000..8d6336a --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/command/register.py @@ -0,0 +1,10 @@ +import distutils.command.register as orig + + +class register(orig.register): + __doc__ = orig.register.__doc__ + + def run(self): + # Make sure that we are using valid current name/version info + self.run_command('egg_info') + orig.register.run(self) diff --git a/venv/lib/python3.5/site-packages/setuptools/command/rotate.py b/venv/lib/python3.5/site-packages/setuptools/command/rotate.py new file mode 100644 index 0000000..804f962 --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/command/rotate.py @@ -0,0 +1,62 @@ +from distutils.util import convert_path +from distutils import log +from distutils.errors import DistutilsOptionError +import os + +from setuptools.extern import six + +from setuptools import Command + + +class rotate(Command): + """Delete older distributions""" + + description = "delete older distributions, keeping N newest files" + user_options = [ + ('match=', 'm', "patterns to match (required)"), + ('dist-dir=', 'd', "directory where the distributions are"), + ('keep=', 'k', "number of matching distributions to keep"), + ] + + boolean_options = [] + + def initialize_options(self): + self.match = None + self.dist_dir = None + self.keep = None + + def finalize_options(self): + if self.match is None: + raise DistutilsOptionError( + "Must specify one or more (comma-separated) match patterns " + "(e.g. '.zip' or '.egg')" + ) + if self.keep is None: + raise DistutilsOptionError("Must specify number of files to keep") + try: + self.keep = int(self.keep) + except ValueError: + raise DistutilsOptionError("--keep must be an integer") + if isinstance(self.match, six.string_types): + self.match = [ + convert_path(p.strip()) for p in self.match.split(',') + ] + self.set_undefined_options('bdist', ('dist_dir', 'dist_dir')) + + def run(self): + self.run_command("egg_info") + from glob import glob + + for pattern in self.match: + pattern = self.distribution.get_name() + '*' + pattern + files = glob(os.path.join(self.dist_dir, pattern)) + files = [(os.path.getmtime(f), f) for f in files] + files.sort() + files.reverse() + + log.info("%d file(s) matching %s", len(files), pattern) + files = files[self.keep:] + for (t, f) in files: + log.info("Deleting %s", f) + if not self.dry_run: + os.unlink(f) diff --git a/venv/lib/python3.5/site-packages/setuptools/command/saveopts.py b/venv/lib/python3.5/site-packages/setuptools/command/saveopts.py new file mode 100644 index 0000000..611cec5 --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/command/saveopts.py @@ -0,0 +1,22 @@ +from setuptools.command.setopt import edit_config, option_base + + +class saveopts(option_base): + """Save command-line options to a file""" + + description = "save supplied options to setup.cfg or other config file" + + def run(self): + dist = self.distribution + settings = {} + + for cmd in dist.command_options: + + if cmd == 'saveopts': + continue # don't save our own options! + + for opt, (src, val) in dist.get_option_dict(cmd).items(): + if src == "command line": + settings.setdefault(cmd, {})[opt] = val + + edit_config(self.filename, settings, self.dry_run) diff --git a/venv/lib/python3.5/site-packages/setuptools/command/sdist.py b/venv/lib/python3.5/site-packages/setuptools/command/sdist.py new file mode 100644 index 0000000..6640d4e --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/command/sdist.py @@ -0,0 +1,196 @@ +from glob import glob +from distutils import log +import distutils.command.sdist as orig +import os +import sys +import io + +from setuptools.extern import six + +from setuptools.utils import cs_path_exists + +import pkg_resources + +READMES = 'README', 'README.rst', 'README.txt' + +_default_revctrl = list + +def walk_revctrl(dirname=''): + """Find all files under revision control""" + for ep in pkg_resources.iter_entry_points('setuptools.file_finders'): + for item in ep.load()(dirname): + yield item + + +class sdist(orig.sdist): + """Smart sdist that finds anything supported by revision control""" + + user_options = [ + ('formats=', None, + "formats for source distribution (comma-separated list)"), + ('keep-temp', 'k', + "keep the distribution tree around after creating " + + "archive file(s)"), + ('dist-dir=', 'd', + "directory to put the source distribution archive(s) in " + "[default: dist]"), + ] + + negative_opt = {} + + def run(self): + self.run_command('egg_info') + ei_cmd = self.get_finalized_command('egg_info') + self.filelist = ei_cmd.filelist + self.filelist.append(os.path.join(ei_cmd.egg_info, 'SOURCES.txt')) + self.check_readme() + + # Run sub commands + for cmd_name in self.get_sub_commands(): + self.run_command(cmd_name) + + # Call check_metadata only if no 'check' command + # (distutils <= 2.6) + import distutils.command + + if 'check' not in distutils.command.__all__: + self.check_metadata() + + self.make_distribution() + + dist_files = getattr(self.distribution, 'dist_files', []) + for file in self.archive_files: + data = ('sdist', '', file) + if data not in dist_files: + dist_files.append(data) + + def __read_template_hack(self): + # This grody hack closes the template file (MANIFEST.in) if an + # exception occurs during read_template. + # Doing so prevents an error when easy_install attempts to delete the + # file. + try: + orig.sdist.read_template(self) + except: + _, _, tb = sys.exc_info() + tb.tb_next.tb_frame.f_locals['template'].close() + raise + + # Beginning with Python 2.7.2, 3.1.4, and 3.2.1, this leaky file handle + # has been fixed, so only override the method if we're using an earlier + # Python. + has_leaky_handle = ( + sys.version_info < (2, 7, 2) + or (3, 0) <= sys.version_info < (3, 1, 4) + or (3, 2) <= sys.version_info < (3, 2, 1) + ) + if has_leaky_handle: + read_template = __read_template_hack + + def add_defaults(self): + standards = [READMES, + self.distribution.script_name] + for fn in standards: + if isinstance(fn, tuple): + alts = fn + got_it = 0 + for fn in alts: + if cs_path_exists(fn): + got_it = 1 + self.filelist.append(fn) + break + + if not got_it: + self.warn("standard file not found: should have one of " + + ', '.join(alts)) + else: + if cs_path_exists(fn): + self.filelist.append(fn) + else: + self.warn("standard file '%s' not found" % fn) + + optional = ['test/test*.py', 'setup.cfg'] + for pattern in optional: + files = list(filter(cs_path_exists, glob(pattern))) + if files: + self.filelist.extend(files) + + # getting python files + if self.distribution.has_pure_modules(): + build_py = self.get_finalized_command('build_py') + self.filelist.extend(build_py.get_source_files()) + # This functionality is incompatible with include_package_data, and + # will in fact create an infinite recursion if include_package_data + # is True. Use of include_package_data will imply that + # distutils-style automatic handling of package_data is disabled + if not self.distribution.include_package_data: + for _, src_dir, _, filenames in build_py.data_files: + self.filelist.extend([os.path.join(src_dir, filename) + for filename in filenames]) + + if self.distribution.has_ext_modules(): + build_ext = self.get_finalized_command('build_ext') + self.filelist.extend(build_ext.get_source_files()) + + if self.distribution.has_c_libraries(): + build_clib = self.get_finalized_command('build_clib') + self.filelist.extend(build_clib.get_source_files()) + + if self.distribution.has_scripts(): + build_scripts = self.get_finalized_command('build_scripts') + self.filelist.extend(build_scripts.get_source_files()) + + def check_readme(self): + for f in READMES: + if os.path.exists(f): + return + else: + self.warn( + "standard file not found: should have one of " + + ', '.join(READMES) + ) + + def make_release_tree(self, base_dir, files): + orig.sdist.make_release_tree(self, base_dir, files) + + # Save any egg_info command line options used to create this sdist + dest = os.path.join(base_dir, 'setup.cfg') + if hasattr(os, 'link') and os.path.exists(dest): + # unlink and re-copy, since it might be hard-linked, and + # we don't want to change the source version + os.unlink(dest) + self.copy_file('setup.cfg', dest) + + self.get_finalized_command('egg_info').save_version_info(dest) + + def _manifest_is_not_generated(self): + # check for special comment used in 2.7.1 and higher + if not os.path.isfile(self.manifest): + return False + + with io.open(self.manifest, 'rb') as fp: + first_line = fp.readline() + return (first_line != + '# file GENERATED by distutils, do NOT edit\n'.encode()) + + def read_manifest(self): + """Read the manifest file (named by 'self.manifest') and use it to + fill in 'self.filelist', the list of files to include in the source + distribution. + """ + log.info("reading manifest file '%s'", self.manifest) + manifest = open(self.manifest, 'rbU') + for line in manifest: + # The manifest must contain UTF-8. See #303. + if six.PY3: + try: + line = line.decode('UTF-8') + except UnicodeDecodeError: + log.warn("%r not UTF-8 decodable -- skipping" % line) + continue + # ignore comments and blank lines + line = line.strip() + if line.startswith('#') or not line: + continue + self.filelist.append(line) + manifest.close() diff --git a/venv/lib/python3.5/site-packages/setuptools/command/setopt.py b/venv/lib/python3.5/site-packages/setuptools/command/setopt.py new file mode 100644 index 0000000..7f332be --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/command/setopt.py @@ -0,0 +1,150 @@ +from distutils.util import convert_path +from distutils import log +from distutils.errors import DistutilsOptionError +import distutils +import os + +from setuptools.extern.six.moves import configparser + +from setuptools import Command + + +__all__ = ['config_file', 'edit_config', 'option_base', 'setopt'] + + +def config_file(kind="local"): + """Get the filename of the distutils, local, global, or per-user config + + `kind` must be one of "local", "global", or "user" + """ + if kind == 'local': + return 'setup.cfg' + if kind == 'global': + return os.path.join( + os.path.dirname(distutils.__file__), 'distutils.cfg' + ) + if kind == 'user': + dot = os.name == 'posix' and '.' or '' + return os.path.expanduser(convert_path("~/%spydistutils.cfg" % dot)) + raise ValueError( + "config_file() type must be 'local', 'global', or 'user'", kind + ) + + +def edit_config(filename, settings, dry_run=False): + """Edit a configuration file to include `settings` + + `settings` is a dictionary of dictionaries or ``None`` values, keyed by + command/section name. A ``None`` value means to delete the entire section, + while a dictionary lists settings to be changed or deleted in that section. + A setting of ``None`` means to delete that setting. + """ + log.debug("Reading configuration from %s", filename) + opts = configparser.RawConfigParser() + opts.read([filename]) + for section, options in settings.items(): + if options is None: + log.info("Deleting section [%s] from %s", section, filename) + opts.remove_section(section) + else: + if not opts.has_section(section): + log.debug("Adding new section [%s] to %s", section, filename) + opts.add_section(section) + for option, value in options.items(): + if value is None: + log.debug( + "Deleting %s.%s from %s", + section, option, filename + ) + opts.remove_option(section, option) + if not opts.options(section): + log.info("Deleting empty [%s] section from %s", + section, filename) + opts.remove_section(section) + else: + log.debug( + "Setting %s.%s to %r in %s", + section, option, value, filename + ) + opts.set(section, option, value) + + log.info("Writing %s", filename) + if not dry_run: + with open(filename, 'w') as f: + opts.write(f) + + +class option_base(Command): + """Abstract base class for commands that mess with config files""" + + user_options = [ + ('global-config', 'g', + "save options to the site-wide distutils.cfg file"), + ('user-config', 'u', + "save options to the current user's pydistutils.cfg file"), + ('filename=', 'f', + "configuration file to use (default=setup.cfg)"), + ] + + boolean_options = [ + 'global-config', 'user-config', + ] + + def initialize_options(self): + self.global_config = None + self.user_config = None + self.filename = None + + def finalize_options(self): + filenames = [] + if self.global_config: + filenames.append(config_file('global')) + if self.user_config: + filenames.append(config_file('user')) + if self.filename is not None: + filenames.append(self.filename) + if not filenames: + filenames.append(config_file('local')) + if len(filenames) > 1: + raise DistutilsOptionError( + "Must specify only one configuration file option", + filenames + ) + self.filename, = filenames + + +class setopt(option_base): + """Save command-line options to a file""" + + description = "set an option in setup.cfg or another config file" + + user_options = [ + ('command=', 'c', 'command to set an option for'), + ('option=', 'o', 'option to set'), + ('set-value=', 's', 'value of the option'), + ('remove', 'r', 'remove (unset) the value'), + ] + option_base.user_options + + boolean_options = option_base.boolean_options + ['remove'] + + def initialize_options(self): + option_base.initialize_options(self) + self.command = None + self.option = None + self.set_value = None + self.remove = None + + def finalize_options(self): + option_base.finalize_options(self) + if self.command is None or self.option is None: + raise DistutilsOptionError("Must specify --command *and* --option") + if self.set_value is None and not self.remove: + raise DistutilsOptionError("Must specify --set-value or --remove") + + def run(self): + edit_config( + self.filename, { + self.command: {self.option.replace('-', '_'): self.set_value} + }, + self.dry_run + ) diff --git a/venv/lib/python3.5/site-packages/setuptools/command/test.py b/venv/lib/python3.5/site-packages/setuptools/command/test.py new file mode 100644 index 0000000..371e913 --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/command/test.py @@ -0,0 +1,196 @@ +from distutils.errors import DistutilsOptionError +from unittest import TestLoader +import sys + +from setuptools.extern import six +from setuptools.extern.six.moves import map + +from pkg_resources import (resource_listdir, resource_exists, normalize_path, + working_set, _namespace_packages, + add_activation_listener, require, EntryPoint) +from setuptools import Command +from setuptools.py31compat import unittest_main + + +class ScanningLoader(TestLoader): + def loadTestsFromModule(self, module, pattern=None): + """Return a suite of all tests cases contained in the given module + + If the module is a package, load tests from all the modules in it. + If the module has an ``additional_tests`` function, call it and add + the return value to the tests. + """ + tests = [] + tests.append(TestLoader.loadTestsFromModule(self, module)) + + if hasattr(module, "additional_tests"): + tests.append(module.additional_tests()) + + if hasattr(module, '__path__'): + for file in resource_listdir(module.__name__, ''): + if file.endswith('.py') and file != '__init__.py': + submodule = module.__name__ + '.' + file[:-3] + else: + if resource_exists(module.__name__, file + '/__init__.py'): + submodule = module.__name__ + '.' + file + else: + continue + tests.append(self.loadTestsFromName(submodule)) + + if len(tests) != 1: + return self.suiteClass(tests) + else: + return tests[0] # don't create a nested suite for only one return + + +# adapted from jaraco.classes.properties:NonDataProperty +class NonDataProperty(object): + def __init__(self, fget): + self.fget = fget + + def __get__(self, obj, objtype=None): + if obj is None: + return self + return self.fget(obj) + + +class test(Command): + """Command to run unit tests after in-place build""" + + description = "run unit tests after in-place build" + + user_options = [ + ('test-module=', 'm', "Run 'test_suite' in specified module"), + ('test-suite=', 's', + "Test suite to run (e.g. 'some_module.test_suite')"), + ('test-runner=', 'r', "Test runner to use"), + ] + + def initialize_options(self): + self.test_suite = None + self.test_module = None + self.test_loader = None + self.test_runner = None + + def finalize_options(self): + + if self.test_suite and self.test_module: + msg = "You may specify a module or a suite, but not both" + raise DistutilsOptionError(msg) + + if self.test_suite is None: + if self.test_module is None: + self.test_suite = self.distribution.test_suite + else: + self.test_suite = self.test_module + ".test_suite" + + if self.test_loader is None: + self.test_loader = getattr(self.distribution, 'test_loader', None) + if self.test_loader is None: + self.test_loader = "setuptools.command.test:ScanningLoader" + if self.test_runner is None: + self.test_runner = getattr(self.distribution, 'test_runner', None) + + @NonDataProperty + def test_args(self): + return list(self._test_args()) + + def _test_args(self): + if self.verbose: + yield '--verbose' + if self.test_suite: + yield self.test_suite + + def with_project_on_sys_path(self, func): + with_2to3 = six.PY3 and getattr(self.distribution, 'use_2to3', False) + + if with_2to3: + # If we run 2to3 we can not do this inplace: + + # Ensure metadata is up-to-date + self.reinitialize_command('build_py', inplace=0) + self.run_command('build_py') + bpy_cmd = self.get_finalized_command("build_py") + build_path = normalize_path(bpy_cmd.build_lib) + + # Build extensions + self.reinitialize_command('egg_info', egg_base=build_path) + self.run_command('egg_info') + + self.reinitialize_command('build_ext', inplace=0) + self.run_command('build_ext') + else: + # Without 2to3 inplace works fine: + self.run_command('egg_info') + + # Build extensions in-place + self.reinitialize_command('build_ext', inplace=1) + self.run_command('build_ext') + + ei_cmd = self.get_finalized_command("egg_info") + + old_path = sys.path[:] + old_modules = sys.modules.copy() + + try: + sys.path.insert(0, normalize_path(ei_cmd.egg_base)) + working_set.__init__() + add_activation_listener(lambda dist: dist.activate()) + require('%s==%s' % (ei_cmd.egg_name, ei_cmd.egg_version)) + func() + finally: + sys.path[:] = old_path + sys.modules.clear() + sys.modules.update(old_modules) + working_set.__init__() + + def run(self): + if self.distribution.install_requires: + self.distribution.fetch_build_eggs( + self.distribution.install_requires) + if self.distribution.tests_require: + self.distribution.fetch_build_eggs(self.distribution.tests_require) + + cmd = ' '.join(self._argv) + if self.dry_run: + self.announce('skipping "%s" (dry run)' % cmd) + else: + self.announce('running "%s"' % cmd) + self.with_project_on_sys_path(self.run_tests) + + def run_tests(self): + # Purge modules under test from sys.modules. The test loader will + # re-import them from the build location. Required when 2to3 is used + # with namespace packages. + if six.PY3 and getattr(self.distribution, 'use_2to3', False): + module = self.test_suite.split('.')[0] + if module in _namespace_packages: + del_modules = [] + if module in sys.modules: + del_modules.append(module) + module += '.' + for name in sys.modules: + if name.startswith(module): + del_modules.append(name) + list(map(sys.modules.__delitem__, del_modules)) + + unittest_main( + None, None, self._argv, + testLoader=self._resolve_as_ep(self.test_loader), + testRunner=self._resolve_as_ep(self.test_runner), + ) + + @property + def _argv(self): + return ['unittest'] + self.test_args + + @staticmethod + def _resolve_as_ep(val): + """ + Load the indicated attribute value, called, as a as if it were + specified as an entry point. + """ + if val is None: + return + parsed = EntryPoint.parse("x=" + val) + return parsed.resolve()() diff --git a/venv/lib/python3.5/site-packages/setuptools/command/upload.py b/venv/lib/python3.5/site-packages/setuptools/command/upload.py new file mode 100644 index 0000000..08c20ba --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/command/upload.py @@ -0,0 +1,23 @@ +from distutils.command import upload as orig + + +class upload(orig.upload): + """ + Override default upload behavior to look up password + in the keyring if available. + """ + + def finalize_options(self): + orig.upload.finalize_options(self) + self.password or self._load_password_from_keyring() + + def _load_password_from_keyring(self): + """ + Attempt to load password from keyring. Suppress Exceptions. + """ + try: + keyring = __import__('keyring') + self.password = keyring.get_password(self.repository, + self.username) + except Exception: + pass diff --git a/venv/lib/python3.5/site-packages/setuptools/command/upload_docs.py b/venv/lib/python3.5/site-packages/setuptools/command/upload_docs.py new file mode 100644 index 0000000..f887b47 --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/command/upload_docs.py @@ -0,0 +1,191 @@ +# -*- coding: utf-8 -*- +"""upload_docs + +Implements a Distutils 'upload_docs' subcommand (upload documentation to +PyPI's pythonhosted.org). +""" + +from base64 import standard_b64encode +from distutils import log +from distutils.errors import DistutilsOptionError +import os +import socket +import zipfile +import tempfile +import shutil + +from setuptools.extern import six +from setuptools.extern.six.moves import http_client, urllib + +from pkg_resources import iter_entry_points +from .upload import upload + + +errors = 'surrogateescape' if six.PY3 else 'strict' + + +# This is not just a replacement for byte literals +# but works as a general purpose encoder +def b(s, encoding='utf-8'): + if isinstance(s, six.text_type): + return s.encode(encoding, errors) + return s + + +class upload_docs(upload): + description = 'Upload documentation to PyPI' + + user_options = [ + ('repository=', 'r', + "url of repository [default: %s]" % upload.DEFAULT_REPOSITORY), + ('show-response', None, + 'display full response text from server'), + ('upload-dir=', None, 'directory to upload'), + ] + boolean_options = upload.boolean_options + + def has_sphinx(self): + if self.upload_dir is None: + for ep in iter_entry_points('distutils.commands', 'build_sphinx'): + return True + + sub_commands = [('build_sphinx', has_sphinx)] + + def initialize_options(self): + upload.initialize_options(self) + self.upload_dir = None + self.target_dir = None + + def finalize_options(self): + upload.finalize_options(self) + if self.upload_dir is None: + if self.has_sphinx(): + build_sphinx = self.get_finalized_command('build_sphinx') + self.target_dir = build_sphinx.builder_target_dir + else: + build = self.get_finalized_command('build') + self.target_dir = os.path.join(build.build_base, 'docs') + else: + self.ensure_dirname('upload_dir') + self.target_dir = self.upload_dir + self.announce('Using upload directory %s' % self.target_dir) + + def create_zipfile(self, filename): + zip_file = zipfile.ZipFile(filename, "w") + try: + self.mkpath(self.target_dir) # just in case + for root, dirs, files in os.walk(self.target_dir): + if root == self.target_dir and not files: + raise DistutilsOptionError( + "no files found in upload directory '%s'" + % self.target_dir) + for name in files: + full = os.path.join(root, name) + relative = root[len(self.target_dir):].lstrip(os.path.sep) + dest = os.path.join(relative, name) + zip_file.write(full, dest) + finally: + zip_file.close() + + def run(self): + # Run sub commands + for cmd_name in self.get_sub_commands(): + self.run_command(cmd_name) + + tmp_dir = tempfile.mkdtemp() + name = self.distribution.metadata.get_name() + zip_file = os.path.join(tmp_dir, "%s.zip" % name) + try: + self.create_zipfile(zip_file) + self.upload_file(zip_file) + finally: + shutil.rmtree(tmp_dir) + + def upload_file(self, filename): + f = open(filename, 'rb') + content = f.read() + f.close() + meta = self.distribution.metadata + data = { + ':action': 'doc_upload', + 'name': meta.get_name(), + 'content': (os.path.basename(filename), content), + } + # set up the authentication + credentials = b(self.username + ':' + self.password) + credentials = standard_b64encode(credentials) + if six.PY3: + credentials = credentials.decode('ascii') + auth = "Basic " + credentials + + # Build up the MIME payload for the POST data + boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254' + sep_boundary = b('\n--') + b(boundary) + end_boundary = sep_boundary + b('--') + body = [] + for key, values in six.iteritems(data): + title = '\nContent-Disposition: form-data; name="%s"' % key + # handle multiple entries for the same name + if not isinstance(values, list): + values = [values] + for value in values: + if type(value) is tuple: + title += '; filename="%s"' % value[0] + value = value[1] + else: + value = b(value) + body.append(sep_boundary) + body.append(b(title)) + body.append(b("\n\n")) + body.append(value) + if value and value[-1:] == b('\r'): + body.append(b('\n')) # write an extra newline (lurve Macs) + body.append(end_boundary) + body.append(b("\n")) + body = b('').join(body) + + self.announce("Submitting documentation to %s" % (self.repository), + log.INFO) + + # build the Request + # We can't use urllib2 since we need to send the Basic + # auth right with the first request + schema, netloc, url, params, query, fragments = \ + urllib.parse.urlparse(self.repository) + assert not params and not query and not fragments + if schema == 'http': + conn = http_client.HTTPConnection(netloc) + elif schema == 'https': + conn = http_client.HTTPSConnection(netloc) + else: + raise AssertionError("unsupported schema " + schema) + + data = '' + try: + conn.connect() + conn.putrequest("POST", url) + content_type = 'multipart/form-data; boundary=%s' % boundary + conn.putheader('Content-type', content_type) + conn.putheader('Content-length', str(len(body))) + conn.putheader('Authorization', auth) + conn.endheaders() + conn.send(body) + except socket.error as e: + self.announce(str(e), log.ERROR) + return + + r = conn.getresponse() + if r.status == 200: + self.announce('Server response (%s): %s' % (r.status, r.reason), + log.INFO) + elif r.status == 301: + location = r.getheader('Location') + if location is None: + location = 'https://pythonhosted.org/%s/' % meta.get_name() + self.announce('Upload successful. Visit %s' % location, + log.INFO) + else: + self.announce('Upload failed (%s): %s' % (r.status, r.reason), + log.ERROR) + if self.show_response: + print('-' * 75, r.read(), '-' * 75) diff --git a/venv/lib/python3.5/site-packages/setuptools/depends.py b/venv/lib/python3.5/site-packages/setuptools/depends.py new file mode 100644 index 0000000..9f7c9a3 --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/depends.py @@ -0,0 +1,217 @@ +import sys +import imp +import marshal +from imp import PKG_DIRECTORY, PY_COMPILED, PY_SOURCE, PY_FROZEN +from distutils.version import StrictVersion + +from setuptools.extern import six + +__all__ = [ + 'Require', 'find_module', 'get_module_constant', 'extract_constant' +] + +class Require: + """A prerequisite to building or installing a distribution""" + + def __init__(self, name, requested_version, module, homepage='', + attribute=None, format=None): + + if format is None and requested_version is not None: + format = StrictVersion + + if format is not None: + requested_version = format(requested_version) + if attribute is None: + attribute = '__version__' + + self.__dict__.update(locals()) + del self.self + + def full_name(self): + """Return full package/distribution name, w/version""" + if self.requested_version is not None: + return '%s-%s' % (self.name,self.requested_version) + return self.name + + def version_ok(self, version): + """Is 'version' sufficiently up-to-date?""" + return self.attribute is None or self.format is None or \ + str(version) != "unknown" and version >= self.requested_version + + def get_version(self, paths=None, default="unknown"): + + """Get version number of installed module, 'None', or 'default' + + Search 'paths' for module. If not found, return 'None'. If found, + return the extracted version attribute, or 'default' if no version + attribute was specified, or the value cannot be determined without + importing the module. The version is formatted according to the + requirement's version format (if any), unless it is 'None' or the + supplied 'default'. + """ + + if self.attribute is None: + try: + f,p,i = find_module(self.module,paths) + if f: f.close() + return default + except ImportError: + return None + + v = get_module_constant(self.module, self.attribute, default, paths) + + if v is not None and v is not default and self.format is not None: + return self.format(v) + + return v + + def is_present(self, paths=None): + """Return true if dependency is present on 'paths'""" + return self.get_version(paths) is not None + + def is_current(self, paths=None): + """Return true if dependency is present and up-to-date on 'paths'""" + version = self.get_version(paths) + if version is None: + return False + return self.version_ok(version) + + +def _iter_code(code): + + """Yield '(op,arg)' pair for each operation in code object 'code'""" + + from array import array + from dis import HAVE_ARGUMENT, EXTENDED_ARG + + bytes = array('b',code.co_code) + eof = len(code.co_code) + + ptr = 0 + extended_arg = 0 + + while ptr<eof: + + op = bytes[ptr] + + if op>=HAVE_ARGUMENT: + + arg = bytes[ptr+1] + bytes[ptr+2]*256 + extended_arg + ptr += 3 + + if op==EXTENDED_ARG: + long_type = six.integer_types[-1] + extended_arg = arg * long_type(65536) + continue + + else: + arg = None + ptr += 1 + + yield op,arg + + +def find_module(module, paths=None): + """Just like 'imp.find_module()', but with package support""" + + parts = module.split('.') + + while parts: + part = parts.pop(0) + f, path, (suffix,mode,kind) = info = imp.find_module(part, paths) + + if kind==PKG_DIRECTORY: + parts = parts or ['__init__'] + paths = [path] + + elif parts: + raise ImportError("Can't find %r in %s" % (parts,module)) + + return info + + +def get_module_constant(module, symbol, default=-1, paths=None): + + """Find 'module' by searching 'paths', and extract 'symbol' + + Return 'None' if 'module' does not exist on 'paths', or it does not define + 'symbol'. If the module defines 'symbol' as a constant, return the + constant. Otherwise, return 'default'.""" + + try: + f, path, (suffix, mode, kind) = find_module(module, paths) + except ImportError: + # Module doesn't exist + return None + + try: + if kind==PY_COMPILED: + f.read(8) # skip magic & date + code = marshal.load(f) + elif kind==PY_FROZEN: + code = imp.get_frozen_object(module) + elif kind==PY_SOURCE: + code = compile(f.read(), path, 'exec') + else: + # Not something we can parse; we'll have to import it. :( + if module not in sys.modules: + imp.load_module(module, f, path, (suffix, mode, kind)) + return getattr(sys.modules[module], symbol, None) + + finally: + if f: + f.close() + + return extract_constant(code, symbol, default) + + +def extract_constant(code, symbol, default=-1): + """Extract the constant value of 'symbol' from 'code' + + If the name 'symbol' is bound to a constant value by the Python code + object 'code', return that value. If 'symbol' is bound to an expression, + return 'default'. Otherwise, return 'None'. + + Return value is based on the first assignment to 'symbol'. 'symbol' must + be a global, or at least a non-"fast" local in the code block. That is, + only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol' + must be present in 'code.co_names'. + """ + + if symbol not in code.co_names: + # name's not there, can't possibly be an assigment + return None + + name_idx = list(code.co_names).index(symbol) + + STORE_NAME = 90 + STORE_GLOBAL = 97 + LOAD_CONST = 100 + + const = default + + for op, arg in _iter_code(code): + + if op==LOAD_CONST: + const = code.co_consts[arg] + elif arg==name_idx and (op==STORE_NAME or op==STORE_GLOBAL): + return const + else: + const = default + + +def _update_globals(): + """ + Patch the globals to remove the objects not available on some platforms. + + XXX it'd be better to test assertions about bytecode instead. + """ + + if not sys.platform.startswith('java') and sys.platform != 'cli': + return + incompatible = 'extract_constant', 'get_module_constant' + for name in incompatible: + del globals()[name] + __all__.remove(name) + +_update_globals() diff --git a/venv/lib/python3.5/site-packages/setuptools/dist.py b/venv/lib/python3.5/site-packages/setuptools/dist.py new file mode 100644 index 0000000..086e0a5 --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/dist.py @@ -0,0 +1,872 @@ +__all__ = ['Distribution'] + +import re +import os +import sys +import warnings +import numbers +import distutils.log +import distutils.core +import distutils.cmd +import distutils.dist +from distutils.core import Distribution as _Distribution +from distutils.errors import (DistutilsOptionError, DistutilsPlatformError, + DistutilsSetupError) + +from setuptools.extern import six +from setuptools.extern.six.moves import map +from pkg_resources.extern import packaging + +from setuptools.depends import Require +from setuptools import windows_support +import pkg_resources + + +def _get_unpatched(cls): + """Protect against re-patching the distutils if reloaded + + Also ensures that no other distutils extension monkeypatched the distutils + first. + """ + while cls.__module__.startswith('setuptools'): + cls, = cls.__bases__ + if not cls.__module__.startswith('distutils'): + raise AssertionError( + "distutils has already been patched by %r" % cls + ) + return cls + +_Distribution = _get_unpatched(_Distribution) + +def _patch_distribution_metadata_write_pkg_info(): + """ + Workaround issue #197 - Python 3 prior to 3.2.2 uses an environment-local + encoding to save the pkg_info. Monkey-patch its write_pkg_info method to + correct this undesirable behavior. + """ + environment_local = (3,) <= sys.version_info[:3] < (3, 2, 2) + if not environment_local: + return + + # from Python 3.4 + def write_pkg_info(self, base_dir): + """Write the PKG-INFO file into the release tree. + """ + with open(os.path.join(base_dir, 'PKG-INFO'), 'w', + encoding='UTF-8') as pkg_info: + self.write_pkg_file(pkg_info) + + distutils.dist.DistributionMetadata.write_pkg_info = write_pkg_info +_patch_distribution_metadata_write_pkg_info() + +sequence = tuple, list + +def check_importable(dist, attr, value): + try: + ep = pkg_resources.EntryPoint.parse('x='+value) + assert not ep.extras + except (TypeError,ValueError,AttributeError,AssertionError): + raise DistutilsSetupError( + "%r must be importable 'module:attrs' string (got %r)" + % (attr,value) + ) + + +def assert_string_list(dist, attr, value): + """Verify that value is a string list or None""" + try: + assert ''.join(value)!=value + except (TypeError,ValueError,AttributeError,AssertionError): + raise DistutilsSetupError( + "%r must be a list of strings (got %r)" % (attr,value) + ) +def check_nsp(dist, attr, value): + """Verify that namespace packages are valid""" + assert_string_list(dist,attr,value) + for nsp in value: + if not dist.has_contents_for(nsp): + raise DistutilsSetupError( + "Distribution contains no modules or packages for " + + "namespace package %r" % nsp + ) + if '.' in nsp: + parent = '.'.join(nsp.split('.')[:-1]) + if parent not in value: + distutils.log.warn( + "WARNING: %r is declared as a package namespace, but %r" + " is not: please correct this in setup.py", nsp, parent + ) + +def check_extras(dist, attr, value): + """Verify that extras_require mapping is valid""" + try: + for k,v in value.items(): + if ':' in k: + k,m = k.split(':',1) + if pkg_resources.invalid_marker(m): + raise DistutilsSetupError("Invalid environment marker: "+m) + list(pkg_resources.parse_requirements(v)) + except (TypeError,ValueError,AttributeError): + raise DistutilsSetupError( + "'extras_require' must be a dictionary whose values are " + "strings or lists of strings containing valid project/version " + "requirement specifiers." + ) + +def assert_bool(dist, attr, value): + """Verify that value is True, False, 0, or 1""" + if bool(value) != value: + tmpl = "{attr!r} must be a boolean value (got {value!r})" + raise DistutilsSetupError(tmpl.format(attr=attr, value=value)) + + +def check_requirements(dist, attr, value): + """Verify that install_requires is a valid requirements list""" + try: + list(pkg_resources.parse_requirements(value)) + except (TypeError, ValueError) as error: + tmpl = ( + "{attr!r} must be a string or list of strings " + "containing valid project/version requirement specifiers; {error}" + ) + raise DistutilsSetupError(tmpl.format(attr=attr, error=error)) + +def check_entry_points(dist, attr, value): + """Verify that entry_points map is parseable""" + try: + pkg_resources.EntryPoint.parse_map(value) + except ValueError as e: + raise DistutilsSetupError(e) + +def check_test_suite(dist, attr, value): + if not isinstance(value, six.string_types): + raise DistutilsSetupError("test_suite must be a string") + +def check_package_data(dist, attr, value): + """Verify that value is a dictionary of package names to glob lists""" + if isinstance(value,dict): + for k,v in value.items(): + if not isinstance(k,str): break + try: iter(v) + except TypeError: + break + else: + return + raise DistutilsSetupError( + attr+" must be a dictionary mapping package names to lists of " + "wildcard patterns" + ) + +def check_packages(dist, attr, value): + for pkgname in value: + if not re.match(r'\w+(\.\w+)*', pkgname): + distutils.log.warn( + "WARNING: %r not a valid package name; please use only " + ".-separated package names in setup.py", pkgname + ) + + +class Distribution(_Distribution): + """Distribution with support for features, tests, and package data + + This is an enhanced version of 'distutils.dist.Distribution' that + effectively adds the following new optional keyword arguments to 'setup()': + + 'install_requires' -- a string or sequence of strings specifying project + versions that the distribution requires when installed, in the format + used by 'pkg_resources.require()'. They will be installed + automatically when the package is installed. If you wish to use + packages that are not available in PyPI, or want to give your users an + alternate download location, you can add a 'find_links' option to the + '[easy_install]' section of your project's 'setup.cfg' file, and then + setuptools will scan the listed web pages for links that satisfy the + requirements. + + 'extras_require' -- a dictionary mapping names of optional "extras" to the + additional requirement(s) that using those extras incurs. For example, + this:: + + extras_require = dict(reST = ["docutils>=0.3", "reSTedit"]) + + indicates that the distribution can optionally provide an extra + capability called "reST", but it can only be used if docutils and + reSTedit are installed. If the user installs your package using + EasyInstall and requests one of your extras, the corresponding + additional requirements will be installed if needed. + + 'features' **deprecated** -- a dictionary mapping option names to + 'setuptools.Feature' + objects. Features are a portion of the distribution that can be + included or excluded based on user options, inter-feature dependencies, + and availability on the current system. Excluded features are omitted + from all setup commands, including source and binary distributions, so + you can create multiple distributions from the same source tree. + Feature names should be valid Python identifiers, except that they may + contain the '-' (minus) sign. Features can be included or excluded + via the command line options '--with-X' and '--without-X', where 'X' is + the name of the feature. Whether a feature is included by default, and + whether you are allowed to control this from the command line, is + determined by the Feature object. See the 'Feature' class for more + information. + + 'test_suite' -- the name of a test suite to run for the 'test' command. + If the user runs 'python setup.py test', the package will be installed, + and the named test suite will be run. The format is the same as + would be used on a 'unittest.py' command line. That is, it is the + dotted name of an object to import and call to generate a test suite. + + 'package_data' -- a dictionary mapping package names to lists of filenames + or globs to use to find data files contained in the named packages. + If the dictionary has filenames or globs listed under '""' (the empty + string), those names will be searched for in every package, in addition + to any names for the specific package. Data files found using these + names/globs will be installed along with the package, in the same + location as the package. Note that globs are allowed to reference + the contents of non-package subdirectories, as long as you use '/' as + a path separator. (Globs are automatically converted to + platform-specific paths at runtime.) + + In addition to these new keywords, this class also has several new methods + for manipulating the distribution's contents. For example, the 'include()' + and 'exclude()' methods can be thought of as in-place add and subtract + commands that add or remove packages, modules, extensions, and so on from + the distribution. They are used by the feature subsystem to configure the + distribution for the included and excluded features. + """ + + _patched_dist = None + + def patch_missing_pkg_info(self, attrs): + # Fake up a replacement for the data that would normally come from + # PKG-INFO, but which might not yet be built if this is a fresh + # checkout. + # + if not attrs or 'name' not in attrs or 'version' not in attrs: + return + key = pkg_resources.safe_name(str(attrs['name'])).lower() + dist = pkg_resources.working_set.by_key.get(key) + if dist is not None and not dist.has_metadata('PKG-INFO'): + dist._version = pkg_resources.safe_version(str(attrs['version'])) + self._patched_dist = dist + + def __init__(self, attrs=None): + have_package_data = hasattr(self, "package_data") + if not have_package_data: + self.package_data = {} + _attrs_dict = attrs or {} + if 'features' in _attrs_dict or 'require_features' in _attrs_dict: + Feature.warn_deprecated() + self.require_features = [] + self.features = {} + self.dist_files = [] + self.src_root = attrs and attrs.pop("src_root", None) + self.patch_missing_pkg_info(attrs) + # Make sure we have any eggs needed to interpret 'attrs' + if attrs is not None: + self.dependency_links = attrs.pop('dependency_links', []) + assert_string_list(self,'dependency_links',self.dependency_links) + if attrs and 'setup_requires' in attrs: + self.fetch_build_eggs(attrs['setup_requires']) + for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'): + vars(self).setdefault(ep.name, None) + _Distribution.__init__(self,attrs) + if isinstance(self.metadata.version, numbers.Number): + # Some people apparently take "version number" too literally :) + self.metadata.version = str(self.metadata.version) + + if self.metadata.version is not None: + try: + ver = packaging.version.Version(self.metadata.version) + normalized_version = str(ver) + if self.metadata.version != normalized_version: + warnings.warn( + "Normalizing '%s' to '%s'" % ( + self.metadata.version, + normalized_version, + ) + ) + self.metadata.version = normalized_version + except (packaging.version.InvalidVersion, TypeError): + warnings.warn( + "The version specified (%r) is an invalid version, this " + "may not work as expected with newer versions of " + "setuptools, pip, and PyPI. Please see PEP 440 for more " + "details." % self.metadata.version + ) + + def parse_command_line(self): + """Process features after parsing command line options""" + result = _Distribution.parse_command_line(self) + if self.features: + self._finalize_features() + return result + + def _feature_attrname(self,name): + """Convert feature name to corresponding option attribute name""" + return 'with_'+name.replace('-','_') + + def fetch_build_eggs(self, requires): + """Resolve pre-setup requirements""" + resolved_dists = pkg_resources.working_set.resolve( + pkg_resources.parse_requirements(requires), + installer=self.fetch_build_egg, + replace_conflicting=True, + ) + for dist in resolved_dists: + pkg_resources.working_set.add(dist, replace=True) + + def finalize_options(self): + _Distribution.finalize_options(self) + if self.features: + self._set_global_opts_from_features() + + for ep in pkg_resources.iter_entry_points('distutils.setup_keywords'): + value = getattr(self,ep.name,None) + if value is not None: + ep.require(installer=self.fetch_build_egg) + ep.load()(self, ep.name, value) + if getattr(self, 'convert_2to3_doctests', None): + # XXX may convert to set here when we can rely on set being builtin + self.convert_2to3_doctests = [os.path.abspath(p) for p in self.convert_2to3_doctests] + else: + self.convert_2to3_doctests = [] + + def get_egg_cache_dir(self): + egg_cache_dir = os.path.join(os.curdir, '.eggs') + if not os.path.exists(egg_cache_dir): + os.mkdir(egg_cache_dir) + windows_support.hide_file(egg_cache_dir) + readme_txt_filename = os.path.join(egg_cache_dir, 'README.txt') + with open(readme_txt_filename, 'w') as f: + f.write('This directory contains eggs that were downloaded ' + 'by setuptools to build, test, and run plug-ins.\n\n') + f.write('This directory caches those eggs to prevent ' + 'repeated downloads.\n\n') + f.write('However, it is safe to delete this directory.\n\n') + + return egg_cache_dir + + def fetch_build_egg(self, req): + """Fetch an egg needed for building""" + + try: + cmd = self._egg_fetcher + cmd.package_index.to_scan = [] + except AttributeError: + from setuptools.command.easy_install import easy_install + dist = self.__class__({'script_args':['easy_install']}) + dist.parse_config_files() + opts = dist.get_option_dict('easy_install') + keep = ( + 'find_links', 'site_dirs', 'index_url', 'optimize', + 'site_dirs', 'allow_hosts' + ) + for key in list(opts): + if key not in keep: + del opts[key] # don't use any other settings + if self.dependency_links: + links = self.dependency_links[:] + if 'find_links' in opts: + links = opts['find_links'][1].split() + links + opts['find_links'] = ('setup', links) + install_dir = self.get_egg_cache_dir() + cmd = easy_install( + dist, args=["x"], install_dir=install_dir, exclude_scripts=True, + always_copy=False, build_directory=None, editable=False, + upgrade=False, multi_version=True, no_report=True, user=False + ) + cmd.ensure_finalized() + self._egg_fetcher = cmd + return cmd.easy_install(req) + + def _set_global_opts_from_features(self): + """Add --with-X/--without-X options based on optional features""" + + go = [] + no = self.negative_opt.copy() + + for name,feature in self.features.items(): + self._set_feature(name,None) + feature.validate(self) + + if feature.optional: + descr = feature.description + incdef = ' (default)' + excdef='' + if not feature.include_by_default(): + excdef, incdef = incdef, excdef + + go.append(('with-'+name, None, 'include '+descr+incdef)) + go.append(('without-'+name, None, 'exclude '+descr+excdef)) + no['without-'+name] = 'with-'+name + + self.global_options = self.feature_options = go + self.global_options + self.negative_opt = self.feature_negopt = no + + def _finalize_features(self): + """Add/remove features and resolve dependencies between them""" + + # First, flag all the enabled items (and thus their dependencies) + for name,feature in self.features.items(): + enabled = self.feature_is_included(name) + if enabled or (enabled is None and feature.include_by_default()): + feature.include_in(self) + self._set_feature(name,1) + + # Then disable the rest, so that off-by-default features don't + # get flagged as errors when they're required by an enabled feature + for name,feature in self.features.items(): + if not self.feature_is_included(name): + feature.exclude_from(self) + self._set_feature(name,0) + + def get_command_class(self, command): + """Pluggable version of get_command_class()""" + if command in self.cmdclass: + return self.cmdclass[command] + + for ep in pkg_resources.iter_entry_points('distutils.commands',command): + ep.require(installer=self.fetch_build_egg) + self.cmdclass[command] = cmdclass = ep.load() + return cmdclass + else: + return _Distribution.get_command_class(self, command) + + def print_commands(self): + for ep in pkg_resources.iter_entry_points('distutils.commands'): + if ep.name not in self.cmdclass: + # don't require extras as the commands won't be invoked + cmdclass = ep.resolve() + self.cmdclass[ep.name] = cmdclass + return _Distribution.print_commands(self) + + def get_command_list(self): + for ep in pkg_resources.iter_entry_points('distutils.commands'): + if ep.name not in self.cmdclass: + # don't require extras as the commands won't be invoked + cmdclass = ep.resolve() + self.cmdclass[ep.name] = cmdclass + return _Distribution.get_command_list(self) + + def _set_feature(self,name,status): + """Set feature's inclusion status""" + setattr(self,self._feature_attrname(name),status) + + def feature_is_included(self,name): + """Return 1 if feature is included, 0 if excluded, 'None' if unknown""" + return getattr(self,self._feature_attrname(name)) + + def include_feature(self,name): + """Request inclusion of feature named 'name'""" + + if self.feature_is_included(name)==0: + descr = self.features[name].description + raise DistutilsOptionError( + descr + " is required, but was excluded or is not available" + ) + self.features[name].include_in(self) + self._set_feature(name,1) + + def include(self,**attrs): + """Add items to distribution that are named in keyword arguments + + For example, 'dist.exclude(py_modules=["x"])' would add 'x' to + the distribution's 'py_modules' attribute, if it was not already + there. + + Currently, this method only supports inclusion for attributes that are + lists or tuples. If you need to add support for adding to other + attributes in this or a subclass, you can add an '_include_X' method, + where 'X' is the name of the attribute. The method will be called with + the value passed to 'include()'. So, 'dist.include(foo={"bar":"baz"})' + will try to call 'dist._include_foo({"bar":"baz"})', which can then + handle whatever special inclusion logic is needed. + """ + for k,v in attrs.items(): + include = getattr(self, '_include_'+k, None) + if include: + include(v) + else: + self._include_misc(k,v) + + def exclude_package(self,package): + """Remove packages, modules, and extensions in named package""" + + pfx = package+'.' + if self.packages: + self.packages = [ + p for p in self.packages + if p != package and not p.startswith(pfx) + ] + + if self.py_modules: + self.py_modules = [ + p for p in self.py_modules + if p != package and not p.startswith(pfx) + ] + + if self.ext_modules: + self.ext_modules = [ + p for p in self.ext_modules + if p.name != package and not p.name.startswith(pfx) + ] + + def has_contents_for(self,package): + """Return true if 'exclude_package(package)' would do something""" + + pfx = package+'.' + + for p in self.iter_distribution_names(): + if p==package or p.startswith(pfx): + return True + + def _exclude_misc(self,name,value): + """Handle 'exclude()' for list/tuple attrs without a special handler""" + if not isinstance(value,sequence): + raise DistutilsSetupError( + "%s: setting must be a list or tuple (%r)" % (name, value) + ) + try: + old = getattr(self,name) + except AttributeError: + raise DistutilsSetupError( + "%s: No such distribution setting" % name + ) + if old is not None and not isinstance(old,sequence): + raise DistutilsSetupError( + name+": this setting cannot be changed via include/exclude" + ) + elif old: + setattr(self,name,[item for item in old if item not in value]) + + def _include_misc(self,name,value): + """Handle 'include()' for list/tuple attrs without a special handler""" + + if not isinstance(value,sequence): + raise DistutilsSetupError( + "%s: setting must be a list (%r)" % (name, value) + ) + try: + old = getattr(self,name) + except AttributeError: + raise DistutilsSetupError( + "%s: No such distribution setting" % name + ) + if old is None: + setattr(self,name,value) + elif not isinstance(old,sequence): + raise DistutilsSetupError( + name+": this setting cannot be changed via include/exclude" + ) + else: + setattr(self,name,old+[item for item in value if item not in old]) + + def exclude(self,**attrs): + """Remove items from distribution that are named in keyword arguments + + For example, 'dist.exclude(py_modules=["x"])' would remove 'x' from + the distribution's 'py_modules' attribute. Excluding packages uses + the 'exclude_package()' method, so all of the package's contained + packages, modules, and extensions are also excluded. + + Currently, this method only supports exclusion from attributes that are + lists or tuples. If you need to add support for excluding from other + attributes in this or a subclass, you can add an '_exclude_X' method, + where 'X' is the name of the attribute. The method will be called with + the value passed to 'exclude()'. So, 'dist.exclude(foo={"bar":"baz"})' + will try to call 'dist._exclude_foo({"bar":"baz"})', which can then + handle whatever special exclusion logic is needed. + """ + for k,v in attrs.items(): + exclude = getattr(self, '_exclude_'+k, None) + if exclude: + exclude(v) + else: + self._exclude_misc(k,v) + + def _exclude_packages(self,packages): + if not isinstance(packages,sequence): + raise DistutilsSetupError( + "packages: setting must be a list or tuple (%r)" % (packages,) + ) + list(map(self.exclude_package, packages)) + + def _parse_command_opts(self, parser, args): + # Remove --with-X/--without-X options when processing command args + self.global_options = self.__class__.global_options + self.negative_opt = self.__class__.negative_opt + + # First, expand any aliases + command = args[0] + aliases = self.get_option_dict('aliases') + while command in aliases: + src,alias = aliases[command] + del aliases[command] # ensure each alias can expand only once! + import shlex + args[:1] = shlex.split(alias,True) + command = args[0] + + nargs = _Distribution._parse_command_opts(self, parser, args) + + # Handle commands that want to consume all remaining arguments + cmd_class = self.get_command_class(command) + if getattr(cmd_class,'command_consumes_arguments',None): + self.get_option_dict(command)['args'] = ("command line", nargs) + if nargs is not None: + return [] + + return nargs + + def get_cmdline_options(self): + """Return a '{cmd: {opt:val}}' map of all command-line options + + Option names are all long, but do not include the leading '--', and + contain dashes rather than underscores. If the option doesn't take + an argument (e.g. '--quiet'), the 'val' is 'None'. + + Note that options provided by config files are intentionally excluded. + """ + + d = {} + + for cmd,opts in self.command_options.items(): + + for opt,(src,val) in opts.items(): + + if src != "command line": + continue + + opt = opt.replace('_','-') + + if val==0: + cmdobj = self.get_command_obj(cmd) + neg_opt = self.negative_opt.copy() + neg_opt.update(getattr(cmdobj,'negative_opt',{})) + for neg,pos in neg_opt.items(): + if pos==opt: + opt=neg + val=None + break + else: + raise AssertionError("Shouldn't be able to get here") + + elif val==1: + val = None + + d.setdefault(cmd,{})[opt] = val + + return d + + def iter_distribution_names(self): + """Yield all packages, modules, and extension names in distribution""" + + for pkg in self.packages or (): + yield pkg + + for module in self.py_modules or (): + yield module + + for ext in self.ext_modules or (): + if isinstance(ext,tuple): + name, buildinfo = ext + else: + name = ext.name + if name.endswith('module'): + name = name[:-6] + yield name + + def handle_display_options(self, option_order): + """If there were any non-global "display-only" options + (--help-commands or the metadata display options) on the command + line, display the requested info and return true; else return + false. + """ + import sys + + if six.PY2 or self.help_commands: + return _Distribution.handle_display_options(self, option_order) + + # Stdout may be StringIO (e.g. in tests) + import io + if not isinstance(sys.stdout, io.TextIOWrapper): + return _Distribution.handle_display_options(self, option_order) + + # Don't wrap stdout if utf-8 is already the encoding. Provides + # workaround for #334. + if sys.stdout.encoding.lower() in ('utf-8', 'utf8'): + return _Distribution.handle_display_options(self, option_order) + + # Print metadata in UTF-8 no matter the platform + encoding = sys.stdout.encoding + errors = sys.stdout.errors + newline = sys.platform != 'win32' and '\n' or None + line_buffering = sys.stdout.line_buffering + + sys.stdout = io.TextIOWrapper( + sys.stdout.detach(), 'utf-8', errors, newline, line_buffering) + try: + return _Distribution.handle_display_options(self, option_order) + finally: + sys.stdout = io.TextIOWrapper( + sys.stdout.detach(), encoding, errors, newline, line_buffering) + + +# Install it throughout the distutils +for module in distutils.dist, distutils.core, distutils.cmd: + module.Distribution = Distribution + + +class Feature: + """ + **deprecated** -- The `Feature` facility was never completely implemented + or supported, `has reported issues + <https://github.com/pypa/setuptools/issues/58>`_ and will be removed in + a future version. + + A subset of the distribution that can be excluded if unneeded/wanted + + Features are created using these keyword arguments: + + 'description' -- a short, human readable description of the feature, to + be used in error messages, and option help messages. + + 'standard' -- if true, the feature is included by default if it is + available on the current system. Otherwise, the feature is only + included if requested via a command line '--with-X' option, or if + another included feature requires it. The default setting is 'False'. + + 'available' -- if true, the feature is available for installation on the + current system. The default setting is 'True'. + + 'optional' -- if true, the feature's inclusion can be controlled from the + command line, using the '--with-X' or '--without-X' options. If + false, the feature's inclusion status is determined automatically, + based on 'availabile', 'standard', and whether any other feature + requires it. The default setting is 'True'. + + 'require_features' -- a string or sequence of strings naming features + that should also be included if this feature is included. Defaults to + empty list. May also contain 'Require' objects that should be + added/removed from the distribution. + + 'remove' -- a string or list of strings naming packages to be removed + from the distribution if this feature is *not* included. If the + feature *is* included, this argument is ignored. This argument exists + to support removing features that "crosscut" a distribution, such as + defining a 'tests' feature that removes all the 'tests' subpackages + provided by other features. The default for this argument is an empty + list. (Note: the named package(s) or modules must exist in the base + distribution when the 'setup()' function is initially called.) + + other keywords -- any other keyword arguments are saved, and passed to + the distribution's 'include()' and 'exclude()' methods when the + feature is included or excluded, respectively. So, for example, you + could pass 'packages=["a","b"]' to cause packages 'a' and 'b' to be + added or removed from the distribution as appropriate. + + A feature must include at least one 'requires', 'remove', or other + keyword argument. Otherwise, it can't affect the distribution in any way. + Note also that you can subclass 'Feature' to create your own specialized + feature types that modify the distribution in other ways when included or + excluded. See the docstrings for the various methods here for more detail. + Aside from the methods, the only feature attributes that distributions look + at are 'description' and 'optional'. + """ + + @staticmethod + def warn_deprecated(): + warnings.warn( + "Features are deprecated and will be removed in a future " + "version. See https://github.com/pypa/setuptools/issues/65.", + DeprecationWarning, + stacklevel=3, + ) + + def __init__(self, description, standard=False, available=True, + optional=True, require_features=(), remove=(), **extras): + self.warn_deprecated() + + self.description = description + self.standard = standard + self.available = available + self.optional = optional + if isinstance(require_features,(str,Require)): + require_features = require_features, + + self.require_features = [ + r for r in require_features if isinstance(r,str) + ] + er = [r for r in require_features if not isinstance(r,str)] + if er: extras['require_features'] = er + + if isinstance(remove,str): + remove = remove, + self.remove = remove + self.extras = extras + + if not remove and not require_features and not extras: + raise DistutilsSetupError( + "Feature %s: must define 'require_features', 'remove', or at least one" + " of 'packages', 'py_modules', etc." + ) + + def include_by_default(self): + """Should this feature be included by default?""" + return self.available and self.standard + + def include_in(self,dist): + + """Ensure feature and its requirements are included in distribution + + You may override this in a subclass to perform additional operations on + the distribution. Note that this method may be called more than once + per feature, and so should be idempotent. + + """ + + if not self.available: + raise DistutilsPlatformError( + self.description+" is required, " + "but is not available on this platform" + ) + + dist.include(**self.extras) + + for f in self.require_features: + dist.include_feature(f) + + def exclude_from(self,dist): + + """Ensure feature is excluded from distribution + + You may override this in a subclass to perform additional operations on + the distribution. This method will be called at most once per + feature, and only after all included features have been asked to + include themselves. + """ + + dist.exclude(**self.extras) + + if self.remove: + for item in self.remove: + dist.exclude_package(item) + + def validate(self,dist): + + """Verify that feature makes sense in context of distribution + + This method is called by the distribution just before it parses its + command line. It checks to ensure that the 'remove' attribute, if any, + contains only valid package/module names that are present in the base + distribution when 'setup()' is called. You may override it in a + subclass to perform any other required validation of the feature + against a target distribution. + """ + + for item in self.remove: + if not dist.has_contents_for(item): + raise DistutilsSetupError( + "%s wants to be able to remove %s, but the distribution" + " doesn't contain any packages or modules under %s" + % (self.description, item, item) + ) diff --git a/venv/lib/python3.5/site-packages/setuptools/extension.py b/venv/lib/python3.5/site-packages/setuptools/extension.py new file mode 100644 index 0000000..d10609b --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/extension.py @@ -0,0 +1,57 @@ +import sys +import re +import functools +import distutils.core +import distutils.errors +import distutils.extension + +from setuptools.extern.six.moves import map + +from .dist import _get_unpatched +from . import msvc9_support + +_Extension = _get_unpatched(distutils.core.Extension) + +msvc9_support.patch_for_specialized_compiler() + +def _have_cython(): + """ + Return True if Cython can be imported. + """ + cython_impl = 'Cython.Distutils.build_ext', + try: + # from (cython_impl) import build_ext + __import__(cython_impl, fromlist=['build_ext']).build_ext + return True + except Exception: + pass + return False + +# for compatibility +have_pyrex = _have_cython + + +class Extension(_Extension): + """Extension that uses '.c' files in place of '.pyx' files""" + + def _convert_pyx_sources_to_lang(self): + """ + Replace sources with .pyx extensions to sources with the target + language extension. This mechanism allows language authors to supply + pre-converted sources but to prefer the .pyx sources. + """ + if _have_cython(): + # the build has Cython, so allow it to compile the .pyx files + return + lang = self.language or '' + target_ext = '.cpp' if lang.lower() == 'c++' else '.c' + sub = functools.partial(re.sub, '.pyx$', target_ext) + self.sources = list(map(sub, self.sources)) + +class Library(Extension): + """Just like a regular Extension, but built as a library instead""" + +distutils.core.Extension = Extension +distutils.extension.Extension = Extension +if 'distutils.command.build_ext' in sys.modules: + sys.modules['distutils.command.build_ext'].Extension = Extension diff --git a/venv/lib/python3.5/site-packages/setuptools/extern/__init__.py b/venv/lib/python3.5/site-packages/setuptools/extern/__init__.py new file mode 100644 index 0000000..6859aa5 --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/extern/__init__.py @@ -0,0 +1,5 @@ +from pkg_resources.extern import VendorImporter + + +names = 'six', +VendorImporter(__name__, names, 'pkg_resources._vendor').install() diff --git a/venv/lib/python3.5/site-packages/setuptools/launch.py b/venv/lib/python3.5/site-packages/setuptools/launch.py new file mode 100644 index 0000000..06e15e1 --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/launch.py @@ -0,0 +1,35 @@ +""" +Launch the Python script on the command line after +setuptools is bootstrapped via import. +""" + +# Note that setuptools gets imported implicitly by the +# invocation of this script using python -m setuptools.launch + +import tokenize +import sys + + +def run(): + """ + Run the script in sys.argv[1] as if it had + been invoked naturally. + """ + __builtins__ + script_name = sys.argv[1] + namespace = dict( + __file__ = script_name, + __name__ = '__main__', + __doc__ = None, + ) + sys.argv[:] = sys.argv[1:] + + open_ = getattr(tokenize, 'open', open) + script = open_(script_name).read() + norm_script = script.replace('\\r\\n', '\\n') + code = compile(norm_script, script_name, 'exec') + exec(code, namespace) + + +if __name__ == '__main__': + run() diff --git a/venv/lib/python3.5/site-packages/setuptools/lib2to3_ex.py b/venv/lib/python3.5/site-packages/setuptools/lib2to3_ex.py new file mode 100644 index 0000000..feef591 --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/lib2to3_ex.py @@ -0,0 +1,58 @@ +""" +Customized Mixin2to3 support: + + - adds support for converting doctests + + +This module raises an ImportError on Python 2. +""" + +from distutils.util import Mixin2to3 as _Mixin2to3 +from distutils import log +from lib2to3.refactor import RefactoringTool, get_fixers_from_package +import setuptools + +class DistutilsRefactoringTool(RefactoringTool): + def log_error(self, msg, *args, **kw): + log.error(msg, *args) + + def log_message(self, msg, *args): + log.info(msg, *args) + + def log_debug(self, msg, *args): + log.debug(msg, *args) + +class Mixin2to3(_Mixin2to3): + def run_2to3(self, files, doctests = False): + # See of the distribution option has been set, otherwise check the + # setuptools default. + if self.distribution.use_2to3 is not True: + return + if not files: + return + log.info("Fixing "+" ".join(files)) + self.__build_fixer_names() + self.__exclude_fixers() + if doctests: + if setuptools.run_2to3_on_doctests: + r = DistutilsRefactoringTool(self.fixer_names) + r.refactor(files, write=True, doctests_only=True) + else: + _Mixin2to3.run_2to3(self, files) + + def __build_fixer_names(self): + if self.fixer_names: return + self.fixer_names = [] + for p in setuptools.lib2to3_fixer_packages: + self.fixer_names.extend(get_fixers_from_package(p)) + if self.distribution.use_2to3_fixers is not None: + for p in self.distribution.use_2to3_fixers: + self.fixer_names.extend(get_fixers_from_package(p)) + + def __exclude_fixers(self): + excluded_fixers = getattr(self, 'exclude_fixers', []) + if self.distribution.use_2to3_exclude_fixers is not None: + excluded_fixers.extend(self.distribution.use_2to3_exclude_fixers) + for fixer_name in excluded_fixers: + if fixer_name in self.fixer_names: + self.fixer_names.remove(fixer_name) diff --git a/venv/lib/python3.5/site-packages/setuptools/msvc9_support.py b/venv/lib/python3.5/site-packages/setuptools/msvc9_support.py new file mode 100644 index 0000000..9d86958 --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/msvc9_support.py @@ -0,0 +1,63 @@ +try: + import distutils.msvc9compiler +except Exception: + pass + +unpatched = dict() + +def patch_for_specialized_compiler(): + """ + Patch functions in distutils.msvc9compiler to use the standalone compiler + build for Python (Windows only). Fall back to original behavior when the + standalone compiler is not available. + """ + if 'distutils' not in globals(): + # The module isn't available to be patched + return + + if unpatched: + # Already patched + return + + unpatched.update(vars(distutils.msvc9compiler)) + + distutils.msvc9compiler.find_vcvarsall = find_vcvarsall + distutils.msvc9compiler.query_vcvarsall = query_vcvarsall + +def find_vcvarsall(version): + Reg = distutils.msvc9compiler.Reg + VC_BASE = r'Software\%sMicrosoft\DevDiv\VCForPython\%0.1f' + key = VC_BASE % ('', version) + try: + # Per-user installs register the compiler path here + productdir = Reg.get_value(key, "installdir") + except KeyError: + try: + # All-user installs on a 64-bit system register here + key = VC_BASE % ('Wow6432Node\\', version) + productdir = Reg.get_value(key, "installdir") + except KeyError: + productdir = None + + if productdir: + import os + vcvarsall = os.path.join(productdir, "vcvarsall.bat") + if os.path.isfile(vcvarsall): + return vcvarsall + + return unpatched['find_vcvarsall'](version) + +def query_vcvarsall(version, *args, **kwargs): + try: + return unpatched['query_vcvarsall'](version, *args, **kwargs) + except distutils.errors.DistutilsPlatformError as exc: + if exc and "vcvarsall.bat" in exc.args[0]: + message = 'Microsoft Visual C++ %0.1f is required (%s).' % (version, exc.args[0]) + if int(version) == 9: + # This redirection link is maintained by Microsoft. + # Contact vspython@microsoft.com if it needs updating. + raise distutils.errors.DistutilsPlatformError( + message + ' Get it from http://aka.ms/vcpython27' + ) + raise distutils.errors.DistutilsPlatformError(message) + raise diff --git a/venv/lib/python3.5/site-packages/setuptools/package_index.py b/venv/lib/python3.5/site-packages/setuptools/package_index.py new file mode 100644 index 0000000..c53343e --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/package_index.py @@ -0,0 +1,1069 @@ +"""PyPI and direct package downloading""" +import sys +import os +import re +import shutil +import socket +import base64 +import hashlib +import itertools +from functools import wraps + +try: + from urllib.parse import splituser +except ImportError: + from urllib2 import splituser + +from setuptools.extern import six +from setuptools.extern.six.moves import urllib, http_client, configparser, map + +from pkg_resources import ( + CHECKOUT_DIST, Distribution, BINARY_DIST, normalize_path, SOURCE_DIST, + require, Environment, find_distributions, safe_name, safe_version, + to_filename, Requirement, DEVELOP_DIST, +) +from setuptools import ssl_support +from distutils import log +from distutils.errors import DistutilsError +from fnmatch import translate +from setuptools.py26compat import strip_fragment +from setuptools.py27compat import get_all_headers + +EGG_FRAGMENT = re.compile(r'^egg=([-A-Za-z0-9_.]+)$') +HREF = re.compile("""href\\s*=\\s*['"]?([^'"> ]+)""", re.I) +# this is here to fix emacs' cruddy broken syntax highlighting +PYPI_MD5 = re.compile( + '<a href="([^"#]+)">([^<]+)</a>\n\s+\\(<a (?:title="MD5 hash"\n\s+)' + 'href="[^?]+\?:action=show_md5&digest=([0-9a-f]{32})">md5</a>\\)' +) +URL_SCHEME = re.compile('([-+.a-z0-9]{2,}):',re.I).match +EXTENSIONS = ".tar.gz .tar.bz2 .tar .zip .tgz".split() + +__all__ = [ + 'PackageIndex', 'distros_for_url', 'parse_bdist_wininst', + 'interpret_distro_name', +] + +_SOCKET_TIMEOUT = 15 + +def parse_bdist_wininst(name): + """Return (base,pyversion) or (None,None) for possible .exe name""" + + lower = name.lower() + base, py_ver, plat = None, None, None + + if lower.endswith('.exe'): + if lower.endswith('.win32.exe'): + base = name[:-10] + plat = 'win32' + elif lower.startswith('.win32-py',-16): + py_ver = name[-7:-4] + base = name[:-16] + plat = 'win32' + elif lower.endswith('.win-amd64.exe'): + base = name[:-14] + plat = 'win-amd64' + elif lower.startswith('.win-amd64-py',-20): + py_ver = name[-7:-4] + base = name[:-20] + plat = 'win-amd64' + return base,py_ver,plat + + +def egg_info_for_url(url): + parts = urllib.parse.urlparse(url) + scheme, server, path, parameters, query, fragment = parts + base = urllib.parse.unquote(path.split('/')[-1]) + if server=='sourceforge.net' and base=='download': # XXX Yuck + base = urllib.parse.unquote(path.split('/')[-2]) + if '#' in base: base, fragment = base.split('#',1) + return base,fragment + +def distros_for_url(url, metadata=None): + """Yield egg or source distribution objects that might be found at a URL""" + base, fragment = egg_info_for_url(url) + for dist in distros_for_location(url, base, metadata): yield dist + if fragment: + match = EGG_FRAGMENT.match(fragment) + if match: + for dist in interpret_distro_name( + url, match.group(1), metadata, precedence = CHECKOUT_DIST + ): + yield dist + +def distros_for_location(location, basename, metadata=None): + """Yield egg or source distribution objects based on basename""" + if basename.endswith('.egg.zip'): + basename = basename[:-4] # strip the .zip + if basename.endswith('.egg') and '-' in basename: + # only one, unambiguous interpretation + return [Distribution.from_location(location, basename, metadata)] + if basename.endswith('.exe'): + win_base, py_ver, platform = parse_bdist_wininst(basename) + if win_base is not None: + return interpret_distro_name( + location, win_base, metadata, py_ver, BINARY_DIST, platform + ) + # Try source distro extensions (.zip, .tgz, etc.) + # + for ext in EXTENSIONS: + if basename.endswith(ext): + basename = basename[:-len(ext)] + return interpret_distro_name(location, basename, metadata) + return [] # no extension matched + +def distros_for_filename(filename, metadata=None): + """Yield possible egg or source distribution objects based on a filename""" + return distros_for_location( + normalize_path(filename), os.path.basename(filename), metadata + ) + + +def interpret_distro_name( + location, basename, metadata, py_version=None, precedence=SOURCE_DIST, + platform=None + ): + """Generate alternative interpretations of a source distro name + + Note: if `location` is a filesystem filename, you should call + ``pkg_resources.normalize_path()`` on it before passing it to this + routine! + """ + # Generate alternative interpretations of a source distro name + # Because some packages are ambiguous as to name/versions split + # e.g. "adns-python-1.1.0", "egenix-mx-commercial", etc. + # So, we generate each possible interepretation (e.g. "adns, python-1.1.0" + # "adns-python, 1.1.0", and "adns-python-1.1.0, no version"). In practice, + # the spurious interpretations should be ignored, because in the event + # there's also an "adns" package, the spurious "python-1.1.0" version will + # compare lower than any numeric version number, and is therefore unlikely + # to match a request for it. It's still a potential problem, though, and + # in the long run PyPI and the distutils should go for "safe" names and + # versions in distribution archive names (sdist and bdist). + + parts = basename.split('-') + if not py_version and any(re.match('py\d\.\d$', p) for p in parts[2:]): + # it is a bdist_dumb, not an sdist -- bail out + return + + for p in range(1,len(parts)+1): + yield Distribution( + location, metadata, '-'.join(parts[:p]), '-'.join(parts[p:]), + py_version=py_version, precedence = precedence, + platform = platform + ) + +# From Python 2.7 docs +def unique_everseen(iterable, key=None): + "List unique elements, preserving order. Remember all elements ever seen." + # unique_everseen('AAAABBBCCDAABBB') --> A B C D + # unique_everseen('ABBCcAD', str.lower) --> A B C D + seen = set() + seen_add = seen.add + if key is None: + for element in six.moves.filterfalse(seen.__contains__, iterable): + seen_add(element) + yield element + else: + for element in iterable: + k = key(element) + if k not in seen: + seen_add(k) + yield element + +def unique_values(func): + """ + Wrap a function returning an iterable such that the resulting iterable + only ever yields unique items. + """ + @wraps(func) + def wrapper(*args, **kwargs): + return unique_everseen(func(*args, **kwargs)) + return wrapper + +REL = re.compile("""<([^>]*\srel\s*=\s*['"]?([^'">]+)[^>]*)>""", re.I) +# this line is here to fix emacs' cruddy broken syntax highlighting + +@unique_values +def find_external_links(url, page): + """Find rel="homepage" and rel="download" links in `page`, yielding URLs""" + + for match in REL.finditer(page): + tag, rel = match.groups() + rels = set(map(str.strip, rel.lower().split(','))) + if 'homepage' in rels or 'download' in rels: + for match in HREF.finditer(tag): + yield urllib.parse.urljoin(url, htmldecode(match.group(1))) + + for tag in ("<th>Home Page", "<th>Download URL"): + pos = page.find(tag) + if pos!=-1: + match = HREF.search(page,pos) + if match: + yield urllib.parse.urljoin(url, htmldecode(match.group(1))) + +user_agent = "Python-urllib/%s setuptools/%s" % ( + sys.version[:3], require('setuptools')[0].version +) + +class ContentChecker(object): + """ + A null content checker that defines the interface for checking content + """ + def feed(self, block): + """ + Feed a block of data to the hash. + """ + return + + def is_valid(self): + """ + Check the hash. Return False if validation fails. + """ + return True + + def report(self, reporter, template): + """ + Call reporter with information about the checker (hash name) + substituted into the template. + """ + return + +class HashChecker(ContentChecker): + pattern = re.compile( + r'(?P<hash_name>sha1|sha224|sha384|sha256|sha512|md5)=' + r'(?P<expected>[a-f0-9]+)' + ) + + def __init__(self, hash_name, expected): + self.hash_name = hash_name + self.hash = hashlib.new(hash_name) + self.expected = expected + + @classmethod + def from_url(cls, url): + "Construct a (possibly null) ContentChecker from a URL" + fragment = urllib.parse.urlparse(url)[-1] + if not fragment: + return ContentChecker() + match = cls.pattern.search(fragment) + if not match: + return ContentChecker() + return cls(**match.groupdict()) + + def feed(self, block): + self.hash.update(block) + + def is_valid(self): + return self.hash.hexdigest() == self.expected + + def report(self, reporter, template): + msg = template % self.hash_name + return reporter(msg) + + +class PackageIndex(Environment): + """A distribution index that scans web pages for download URLs""" + + def __init__( + self, index_url="https://pypi.python.org/simple", hosts=('*',), + ca_bundle=None, verify_ssl=True, *args, **kw + ): + Environment.__init__(self,*args,**kw) + self.index_url = index_url + "/"[:not index_url.endswith('/')] + self.scanned_urls = {} + self.fetched_urls = {} + self.package_pages = {} + self.allows = re.compile('|'.join(map(translate,hosts))).match + self.to_scan = [] + if verify_ssl and ssl_support.is_available and (ca_bundle or ssl_support.find_ca_bundle()): + self.opener = ssl_support.opener_for(ca_bundle) + else: self.opener = urllib.request.urlopen + + def process_url(self, url, retrieve=False): + """Evaluate a URL as a possible download, and maybe retrieve it""" + if url in self.scanned_urls and not retrieve: + return + self.scanned_urls[url] = True + if not URL_SCHEME(url): + self.process_filename(url) + return + else: + dists = list(distros_for_url(url)) + if dists: + if not self.url_ok(url): + return + self.debug("Found link: %s", url) + + if dists or not retrieve or url in self.fetched_urls: + list(map(self.add, dists)) + return # don't need the actual page + + if not self.url_ok(url): + self.fetched_urls[url] = True + return + + self.info("Reading %s", url) + self.fetched_urls[url] = True # prevent multiple fetch attempts + f = self.open_url(url, "Download error on %s: %%s -- Some packages may not be found!" % url) + if f is None: return + self.fetched_urls[f.url] = True + if 'html' not in f.headers.get('content-type', '').lower(): + f.close() # not html, we can't process it + return + + base = f.url # handle redirects + page = f.read() + if not isinstance(page, str): # We are in Python 3 and got bytes. We want str. + if isinstance(f, urllib.error.HTTPError): + # Errors have no charset, assume latin1: + charset = 'latin-1' + else: + charset = f.headers.get_param('charset') or 'latin-1' + page = page.decode(charset, "ignore") + f.close() + for match in HREF.finditer(page): + link = urllib.parse.urljoin(base, htmldecode(match.group(1))) + self.process_url(link) + if url.startswith(self.index_url) and getattr(f,'code',None)!=404: + page = self.process_index(url, page) + + def process_filename(self, fn, nested=False): + # process filenames or directories + if not os.path.exists(fn): + self.warn("Not found: %s", fn) + return + + if os.path.isdir(fn) and not nested: + path = os.path.realpath(fn) + for item in os.listdir(path): + self.process_filename(os.path.join(path,item), True) + + dists = distros_for_filename(fn) + if dists: + self.debug("Found: %s", fn) + list(map(self.add, dists)) + + def url_ok(self, url, fatal=False): + s = URL_SCHEME(url) + if (s and s.group(1).lower()=='file') or self.allows(urllib.parse.urlparse(url)[1]): + return True + msg = ("\nNote: Bypassing %s (disallowed host; see " + "http://bit.ly/1dg9ijs for details).\n") + if fatal: + raise DistutilsError(msg % url) + else: + self.warn(msg, url) + + def scan_egg_links(self, search_path): + dirs = filter(os.path.isdir, search_path) + egg_links = ( + (path, entry) + for path in dirs + for entry in os.listdir(path) + if entry.endswith('.egg-link') + ) + list(itertools.starmap(self.scan_egg_link, egg_links)) + + def scan_egg_link(self, path, entry): + with open(os.path.join(path, entry)) as raw_lines: + # filter non-empty lines + lines = list(filter(None, map(str.strip, raw_lines))) + + if len(lines) != 2: + # format is not recognized; punt + return + + egg_path, setup_path = lines + + for dist in find_distributions(os.path.join(path, egg_path)): + dist.location = os.path.join(path, *lines) + dist.precedence = SOURCE_DIST + self.add(dist) + + def process_index(self,url,page): + """Process the contents of a PyPI page""" + def scan(link): + # Process a URL to see if it's for a package page + if link.startswith(self.index_url): + parts = list(map( + urllib.parse.unquote, link[len(self.index_url):].split('/') + )) + if len(parts)==2 and '#' not in parts[1]: + # it's a package page, sanitize and index it + pkg = safe_name(parts[0]) + ver = safe_version(parts[1]) + self.package_pages.setdefault(pkg.lower(),{})[link] = True + return to_filename(pkg), to_filename(ver) + return None, None + + # process an index page into the package-page index + for match in HREF.finditer(page): + try: + scan(urllib.parse.urljoin(url, htmldecode(match.group(1)))) + except ValueError: + pass + + pkg, ver = scan(url) # ensure this page is in the page index + if pkg: + # process individual package page + for new_url in find_external_links(url, page): + # Process the found URL + base, frag = egg_info_for_url(new_url) + if base.endswith('.py') and not frag: + if ver: + new_url+='#egg=%s-%s' % (pkg,ver) + else: + self.need_version_info(url) + self.scan_url(new_url) + + return PYPI_MD5.sub( + lambda m: '<a href="%s#md5=%s">%s</a>' % m.group(1,3,2), page + ) + else: + return "" # no sense double-scanning non-package pages + + def need_version_info(self, url): + self.scan_all( + "Page at %s links to .py file(s) without version info; an index " + "scan is required.", url + ) + + def scan_all(self, msg=None, *args): + if self.index_url not in self.fetched_urls: + if msg: self.warn(msg,*args) + self.info( + "Scanning index of all packages (this may take a while)" + ) + self.scan_url(self.index_url) + + def find_packages(self, requirement): + self.scan_url(self.index_url + requirement.unsafe_name+'/') + + if not self.package_pages.get(requirement.key): + # Fall back to safe version of the name + self.scan_url(self.index_url + requirement.project_name+'/') + + if not self.package_pages.get(requirement.key): + # We couldn't find the target package, so search the index page too + self.not_found_in_index(requirement) + + for url in list(self.package_pages.get(requirement.key,())): + # scan each page that might be related to the desired package + self.scan_url(url) + + def obtain(self, requirement, installer=None): + self.prescan() + self.find_packages(requirement) + for dist in self[requirement.key]: + if dist in requirement: + return dist + self.debug("%s does not match %s", requirement, dist) + return super(PackageIndex, self).obtain(requirement,installer) + + def check_hash(self, checker, filename, tfp): + """ + checker is a ContentChecker + """ + checker.report(self.debug, + "Validating %%s checksum for %s" % filename) + if not checker.is_valid(): + tfp.close() + os.unlink(filename) + raise DistutilsError( + "%s validation failed for %s; " + "possible download problem?" % ( + checker.hash.name, os.path.basename(filename)) + ) + + def add_find_links(self, urls): + """Add `urls` to the list that will be prescanned for searches""" + for url in urls: + if ( + self.to_scan is None # if we have already "gone online" + or not URL_SCHEME(url) # or it's a local file/directory + or url.startswith('file:') + or list(distros_for_url(url)) # or a direct package link + ): + # then go ahead and process it now + self.scan_url(url) + else: + # otherwise, defer retrieval till later + self.to_scan.append(url) + + def prescan(self): + """Scan urls scheduled for prescanning (e.g. --find-links)""" + if self.to_scan: + list(map(self.scan_url, self.to_scan)) + self.to_scan = None # from now on, go ahead and process immediately + + def not_found_in_index(self, requirement): + if self[requirement.key]: # we've seen at least one distro + meth, msg = self.info, "Couldn't retrieve index page for %r" + else: # no distros seen for this name, might be misspelled + meth, msg = (self.warn, + "Couldn't find index page for %r (maybe misspelled?)") + meth(msg, requirement.unsafe_name) + self.scan_all() + + def download(self, spec, tmpdir): + """Locate and/or download `spec` to `tmpdir`, returning a local path + + `spec` may be a ``Requirement`` object, or a string containing a URL, + an existing local filename, or a project/version requirement spec + (i.e. the string form of a ``Requirement`` object). If it is the URL + of a .py file with an unambiguous ``#egg=name-version`` tag (i.e., one + that escapes ``-`` as ``_`` throughout), a trivial ``setup.py`` is + automatically created alongside the downloaded file. + + If `spec` is a ``Requirement`` object or a string containing a + project/version requirement spec, this method returns the location of + a matching distribution (possibly after downloading it to `tmpdir`). + If `spec` is a locally existing file or directory name, it is simply + returned unchanged. If `spec` is a URL, it is downloaded to a subpath + of `tmpdir`, and the local filename is returned. Various errors may be + raised if a problem occurs during downloading. + """ + if not isinstance(spec,Requirement): + scheme = URL_SCHEME(spec) + if scheme: + # It's a url, download it to tmpdir + found = self._download_url(scheme.group(1), spec, tmpdir) + base, fragment = egg_info_for_url(spec) + if base.endswith('.py'): + found = self.gen_setup(found,fragment,tmpdir) + return found + elif os.path.exists(spec): + # Existing file or directory, just return it + return spec + else: + try: + spec = Requirement.parse(spec) + except ValueError: + raise DistutilsError( + "Not a URL, existing file, or requirement spec: %r" % + (spec,) + ) + return getattr(self.fetch_distribution(spec, tmpdir),'location',None) + + def fetch_distribution( + self, requirement, tmpdir, force_scan=False, source=False, + develop_ok=False, local_index=None + ): + """Obtain a distribution suitable for fulfilling `requirement` + + `requirement` must be a ``pkg_resources.Requirement`` instance. + If necessary, or if the `force_scan` flag is set, the requirement is + searched for in the (online) package index as well as the locally + installed packages. If a distribution matching `requirement` is found, + the returned distribution's ``location`` is the value you would have + gotten from calling the ``download()`` method with the matching + distribution's URL or filename. If no matching distribution is found, + ``None`` is returned. + + If the `source` flag is set, only source distributions and source + checkout links will be considered. Unless the `develop_ok` flag is + set, development and system eggs (i.e., those using the ``.egg-info`` + format) will be ignored. + """ + # process a Requirement + self.info("Searching for %s", requirement) + skipped = {} + dist = None + + def find(req, env=None): + if env is None: + env = self + # Find a matching distribution; may be called more than once + + for dist in env[req.key]: + + if dist.precedence==DEVELOP_DIST and not develop_ok: + if dist not in skipped: + self.warn("Skipping development or system egg: %s",dist) + skipped[dist] = 1 + continue + + if dist in req and (dist.precedence<=SOURCE_DIST or not source): + return dist + + if force_scan: + self.prescan() + self.find_packages(requirement) + dist = find(requirement) + + if local_index is not None: + dist = dist or find(requirement, local_index) + + if dist is None: + if self.to_scan is not None: + self.prescan() + dist = find(requirement) + + if dist is None and not force_scan: + self.find_packages(requirement) + dist = find(requirement) + + if dist is None: + self.warn( + "No local packages or download links found for %s%s", + (source and "a source distribution of " or ""), + requirement, + ) + else: + self.info("Best match: %s", dist) + return dist.clone(location=self.download(dist.location, tmpdir)) + + def fetch(self, requirement, tmpdir, force_scan=False, source=False): + """Obtain a file suitable for fulfilling `requirement` + + DEPRECATED; use the ``fetch_distribution()`` method now instead. For + backward compatibility, this routine is identical but returns the + ``location`` of the downloaded distribution instead of a distribution + object. + """ + dist = self.fetch_distribution(requirement,tmpdir,force_scan,source) + if dist is not None: + return dist.location + return None + + def gen_setup(self, filename, fragment, tmpdir): + match = EGG_FRAGMENT.match(fragment) + dists = match and [ + d for d in + interpret_distro_name(filename, match.group(1), None) if d.version + ] or [] + + if len(dists)==1: # unambiguous ``#egg`` fragment + basename = os.path.basename(filename) + + # Make sure the file has been downloaded to the temp dir. + if os.path.dirname(filename) != tmpdir: + dst = os.path.join(tmpdir, basename) + from setuptools.command.easy_install import samefile + if not samefile(filename, dst): + shutil.copy2(filename, dst) + filename=dst + + with open(os.path.join(tmpdir, 'setup.py'), 'w') as file: + file.write( + "from setuptools import setup\n" + "setup(name=%r, version=%r, py_modules=[%r])\n" + % ( + dists[0].project_name, dists[0].version, + os.path.splitext(basename)[0] + ) + ) + return filename + + elif match: + raise DistutilsError( + "Can't unambiguously interpret project/version identifier %r; " + "any dashes in the name or version should be escaped using " + "underscores. %r" % (fragment,dists) + ) + else: + raise DistutilsError( + "Can't process plain .py files without an '#egg=name-version'" + " suffix to enable automatic setup script generation." + ) + + dl_blocksize = 8192 + def _download_to(self, url, filename): + self.info("Downloading %s", url) + # Download the file + fp, info = None, None + try: + checker = HashChecker.from_url(url) + fp = self.open_url(strip_fragment(url)) + if isinstance(fp, urllib.error.HTTPError): + raise DistutilsError( + "Can't download %s: %s %s" % (url, fp.code,fp.msg) + ) + headers = fp.info() + blocknum = 0 + bs = self.dl_blocksize + size = -1 + if "content-length" in headers: + # Some servers return multiple Content-Length headers :( + sizes = get_all_headers(headers, 'Content-Length') + size = max(map(int, sizes)) + self.reporthook(url, filename, blocknum, bs, size) + with open(filename,'wb') as tfp: + while True: + block = fp.read(bs) + if block: + checker.feed(block) + tfp.write(block) + blocknum += 1 + self.reporthook(url, filename, blocknum, bs, size) + else: + break + self.check_hash(checker, filename, tfp) + return headers + finally: + if fp: fp.close() + + def reporthook(self, url, filename, blocknum, blksize, size): + pass # no-op + + def open_url(self, url, warning=None): + if url.startswith('file:'): + return local_open(url) + try: + return open_with_auth(url, self.opener) + except (ValueError, http_client.InvalidURL) as v: + msg = ' '.join([str(arg) for arg in v.args]) + if warning: + self.warn(warning, msg) + else: + raise DistutilsError('%s %s' % (url, msg)) + except urllib.error.HTTPError as v: + return v + except urllib.error.URLError as v: + if warning: + self.warn(warning, v.reason) + else: + raise DistutilsError("Download error for %s: %s" + % (url, v.reason)) + except http_client.BadStatusLine as v: + if warning: + self.warn(warning, v.line) + else: + raise DistutilsError( + '%s returned a bad status line. The server might be ' + 'down, %s' % + (url, v.line) + ) + except http_client.HTTPException as v: + if warning: + self.warn(warning, v) + else: + raise DistutilsError("Download error for %s: %s" + % (url, v)) + + def _download_url(self, scheme, url, tmpdir): + # Determine download filename + # + name, fragment = egg_info_for_url(url) + if name: + while '..' in name: + name = name.replace('..','.').replace('\\','_') + else: + name = "__downloaded__" # default if URL has no path contents + + if name.endswith('.egg.zip'): + name = name[:-4] # strip the extra .zip before download + + filename = os.path.join(tmpdir,name) + + # Download the file + # + if scheme=='svn' or scheme.startswith('svn+'): + return self._download_svn(url, filename) + elif scheme=='git' or scheme.startswith('git+'): + return self._download_git(url, filename) + elif scheme.startswith('hg+'): + return self._download_hg(url, filename) + elif scheme=='file': + return urllib.request.url2pathname(urllib.parse.urlparse(url)[2]) + else: + self.url_ok(url, True) # raises error if not allowed + return self._attempt_download(url, filename) + + def scan_url(self, url): + self.process_url(url, True) + + def _attempt_download(self, url, filename): + headers = self._download_to(url, filename) + if 'html' in headers.get('content-type','').lower(): + return self._download_html(url, headers, filename) + else: + return filename + + def _download_html(self, url, headers, filename): + file = open(filename) + for line in file: + if line.strip(): + # Check for a subversion index page + if re.search(r'<title>([^- ]+ - )?Revision \d+:', line): + # it's a subversion index page: + file.close() + os.unlink(filename) + return self._download_svn(url, filename) + break # not an index page + file.close() + os.unlink(filename) + raise DistutilsError("Unexpected HTML page found at "+url) + + def _download_svn(self, url, filename): + url = url.split('#',1)[0] # remove any fragment for svn's sake + creds = '' + if url.lower().startswith('svn:') and '@' in url: + scheme, netloc, path, p, q, f = urllib.parse.urlparse(url) + if not netloc and path.startswith('//') and '/' in path[2:]: + netloc, path = path[2:].split('/',1) + auth, host = splituser(netloc) + if auth: + if ':' in auth: + user, pw = auth.split(':',1) + creds = " --username=%s --password=%s" % (user, pw) + else: + creds = " --username="+auth + netloc = host + parts = scheme, netloc, url, p, q, f + url = urllib.parse.urlunparse(parts) + self.info("Doing subversion checkout from %s to %s", url, filename) + os.system("svn checkout%s -q %s %s" % (creds, url, filename)) + return filename + + @staticmethod + def _vcs_split_rev_from_url(url, pop_prefix=False): + scheme, netloc, path, query, frag = urllib.parse.urlsplit(url) + + scheme = scheme.split('+', 1)[-1] + + # Some fragment identification fails + path = path.split('#',1)[0] + + rev = None + if '@' in path: + path, rev = path.rsplit('@', 1) + + # Also, discard fragment + url = urllib.parse.urlunsplit((scheme, netloc, path, query, '')) + + return url, rev + + def _download_git(self, url, filename): + filename = filename.split('#',1)[0] + url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True) + + self.info("Doing git clone from %s to %s", url, filename) + os.system("git clone --quiet %s %s" % (url, filename)) + + if rev is not None: + self.info("Checking out %s", rev) + os.system("(cd %s && git checkout --quiet %s)" % ( + filename, + rev, + )) + + return filename + + def _download_hg(self, url, filename): + filename = filename.split('#',1)[0] + url, rev = self._vcs_split_rev_from_url(url, pop_prefix=True) + + self.info("Doing hg clone from %s to %s", url, filename) + os.system("hg clone --quiet %s %s" % (url, filename)) + + if rev is not None: + self.info("Updating to %s", rev) + os.system("(cd %s && hg up -C -r %s >&-)" % ( + filename, + rev, + )) + + return filename + + def debug(self, msg, *args): + log.debug(msg, *args) + + def info(self, msg, *args): + log.info(msg, *args) + + def warn(self, msg, *args): + log.warn(msg, *args) + +# This pattern matches a character entity reference (a decimal numeric +# references, a hexadecimal numeric reference, or a named reference). +entity_sub = re.compile(r'&(#(\d+|x[\da-fA-F]+)|[\w.:-]+);?').sub + +def uchr(c): + if not isinstance(c, int): + return c + if c>255: return six.unichr(c) + return chr(c) + +def decode_entity(match): + what = match.group(1) + if what.startswith('#x'): + what = int(what[2:], 16) + elif what.startswith('#'): + what = int(what[1:]) + else: + what = six.moves.html_entities.name2codepoint.get(what, match.group(0)) + return uchr(what) + +def htmldecode(text): + """Decode HTML entities in the given text.""" + return entity_sub(decode_entity, text) + +def socket_timeout(timeout=15): + def _socket_timeout(func): + def _socket_timeout(*args, **kwargs): + old_timeout = socket.getdefaulttimeout() + socket.setdefaulttimeout(timeout) + try: + return func(*args, **kwargs) + finally: + socket.setdefaulttimeout(old_timeout) + return _socket_timeout + return _socket_timeout + +def _encode_auth(auth): + """ + A function compatible with Python 2.3-3.3 that will encode + auth from a URL suitable for an HTTP header. + >>> str(_encode_auth('username%3Apassword')) + 'dXNlcm5hbWU6cGFzc3dvcmQ=' + + Long auth strings should not cause a newline to be inserted. + >>> long_auth = 'username:' + 'password'*10 + >>> chr(10) in str(_encode_auth(long_auth)) + False + """ + auth_s = urllib.parse.unquote(auth) + # convert to bytes + auth_bytes = auth_s.encode() + # use the legacy interface for Python 2.3 support + encoded_bytes = base64.encodestring(auth_bytes) + # convert back to a string + encoded = encoded_bytes.decode() + # strip the trailing carriage return + return encoded.replace('\n','') + +class Credential(object): + """ + A username/password pair. Use like a namedtuple. + """ + def __init__(self, username, password): + self.username = username + self.password = password + + def __iter__(self): + yield self.username + yield self.password + + def __str__(self): + return '%(username)s:%(password)s' % vars(self) + +class PyPIConfig(configparser.RawConfigParser): + + def __init__(self): + """ + Load from ~/.pypirc + """ + defaults = dict.fromkeys(['username', 'password', 'repository'], '') + configparser.RawConfigParser.__init__(self, defaults) + + rc = os.path.join(os.path.expanduser('~'), '.pypirc') + if os.path.exists(rc): + self.read(rc) + + @property + def creds_by_repository(self): + sections_with_repositories = [ + section for section in self.sections() + if self.get(section, 'repository').strip() + ] + + return dict(map(self._get_repo_cred, sections_with_repositories)) + + def _get_repo_cred(self, section): + repo = self.get(section, 'repository').strip() + return repo, Credential( + self.get(section, 'username').strip(), + self.get(section, 'password').strip(), + ) + + def find_credential(self, url): + """ + If the URL indicated appears to be a repository defined in this + config, return the credential for that repository. + """ + for repository, cred in self.creds_by_repository.items(): + if url.startswith(repository): + return cred + + +def open_with_auth(url, opener=urllib.request.urlopen): + """Open a urllib2 request, handling HTTP authentication""" + + scheme, netloc, path, params, query, frag = urllib.parse.urlparse(url) + + # Double scheme does not raise on Mac OS X as revealed by a + # failing test. We would expect "nonnumeric port". Refs #20. + if netloc.endswith(':'): + raise http_client.InvalidURL("nonnumeric port: ''") + + if scheme in ('http', 'https'): + auth, host = splituser(netloc) + else: + auth = None + + if not auth: + cred = PyPIConfig().find_credential(url) + if cred: + auth = str(cred) + info = cred.username, url + log.info('Authenticating as %s for %s (from .pypirc)' % info) + + if auth: + auth = "Basic " + _encode_auth(auth) + parts = scheme, host, path, params, query, frag + new_url = urllib.parse.urlunparse(parts) + request = urllib.request.Request(new_url) + request.add_header("Authorization", auth) + else: + request = urllib.request.Request(url) + + request.add_header('User-Agent', user_agent) + fp = opener(request) + + if auth: + # Put authentication info back into request URL if same host, + # so that links found on the page will work + s2, h2, path2, param2, query2, frag2 = urllib.parse.urlparse(fp.url) + if s2==scheme and h2==host: + parts = s2, netloc, path2, param2, query2, frag2 + fp.url = urllib.parse.urlunparse(parts) + + return fp + +# adding a timeout to avoid freezing package_index +open_with_auth = socket_timeout(_SOCKET_TIMEOUT)(open_with_auth) + + +def fix_sf_url(url): + return url # backward compatibility + +def local_open(url): + """Read a local path, with special support for directories""" + scheme, server, path, param, query, frag = urllib.parse.urlparse(url) + filename = urllib.request.url2pathname(path) + if os.path.isfile(filename): + return urllib.request.urlopen(url) + elif path.endswith('/') and os.path.isdir(filename): + files = [] + for f in os.listdir(filename): + filepath = os.path.join(filename, f) + if f == 'index.html': + with open(filepath, 'r') as fp: + body = fp.read() + break + elif os.path.isdir(filepath): + f += '/' + files.append('<a href="{name}">{name}</a>'.format(name=f)) + else: + tmpl = ("<html><head><title>{url}" + "{files}") + body = tmpl.format(url=url, files='\n'.join(files)) + status, message = 200, "OK" + else: + status, message, body = 404, "Path not found", "Not found" + + headers = {'content-type': 'text/html'} + body_stream = six.StringIO(body) + return urllib.error.HTTPError(url, status, message, headers, body_stream) diff --git a/venv/lib/python3.5/site-packages/setuptools/py26compat.py b/venv/lib/python3.5/site-packages/setuptools/py26compat.py new file mode 100644 index 0000000..e52bd85 --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/py26compat.py @@ -0,0 +1,22 @@ +""" +Compatibility Support for Python 2.6 and earlier +""" + +import sys + +try: + from urllib.parse import splittag +except ImportError: + from urllib import splittag + +def strip_fragment(url): + """ + In `Python 8280 `_, Python 2.7 and + later was patched to disregard the fragment when making URL requests. + Do the same for Python 2.6 and earlier. + """ + url, fragment = splittag(url) + return url + +if sys.version_info >= (2,7): + strip_fragment = lambda x: x diff --git a/venv/lib/python3.5/site-packages/setuptools/py27compat.py b/venv/lib/python3.5/site-packages/setuptools/py27compat.py new file mode 100644 index 0000000..9d2886d --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/py27compat.py @@ -0,0 +1,15 @@ +""" +Compatibility Support for Python 2.7 and earlier +""" + +import sys + +def get_all_headers(message, key): + """ + Given an HTTPMessage, return all headers matching a given key. + """ + return message.get_all(key) + +if sys.version_info < (3,): + def get_all_headers(message, key): + return message.getheaders(key) diff --git a/venv/lib/python3.5/site-packages/setuptools/py31compat.py b/venv/lib/python3.5/site-packages/setuptools/py31compat.py new file mode 100644 index 0000000..8fe6dd9 --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/py31compat.py @@ -0,0 +1,52 @@ +import sys +import unittest + +__all__ = ['get_config_vars', 'get_path'] + +try: + # Python 2.7 or >=3.2 + from sysconfig import get_config_vars, get_path +except ImportError: + from distutils.sysconfig import get_config_vars, get_python_lib + def get_path(name): + if name not in ('platlib', 'purelib'): + raise ValueError("Name must be purelib or platlib") + return get_python_lib(name=='platlib') + +try: + # Python >=3.2 + from tempfile import TemporaryDirectory +except ImportError: + import shutil + import tempfile + class TemporaryDirectory(object): + """ + Very simple temporary directory context manager. + Will try to delete afterward, but will also ignore OS and similar + errors on deletion. + """ + def __init__(self): + self.name = None # Handle mkdtemp raising an exception + self.name = tempfile.mkdtemp() + + def __enter__(self): + return self.name + + def __exit__(self, exctype, excvalue, exctrace): + try: + shutil.rmtree(self.name, True) + except OSError: #removal errors are not the only possible + pass + self.name = None + + +unittest_main = unittest.main + +_PY31 = (3, 1) <= sys.version_info[:2] < (3, 2) +if _PY31: + # on Python 3.1, translate testRunner==None to TextTestRunner + # for compatibility with Python 2.6, 2.7, and 3.2+ + def unittest_main(*args, **kwargs): + if 'testRunner' in kwargs and kwargs['testRunner'] is None: + kwargs['testRunner'] = unittest.TextTestRunner + return unittest.main(*args, **kwargs) diff --git a/venv/lib/python3.5/site-packages/setuptools/sandbox.py b/venv/lib/python3.5/site-packages/setuptools/sandbox.py new file mode 100644 index 0000000..23e296b --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/sandbox.py @@ -0,0 +1,496 @@ +import os +import sys +import tempfile +import operator +import functools +import itertools +import re +import contextlib +import pickle + +from setuptools.extern import six +from setuptools.extern.six.moves import builtins, map + +import pkg_resources + +if sys.platform.startswith('java'): + import org.python.modules.posix.PosixModule as _os +else: + _os = sys.modules[os.name] +try: + _file = file +except NameError: + _file = None +_open = open +from distutils.errors import DistutilsError +from pkg_resources import working_set + +__all__ = [ + "AbstractSandbox", "DirectorySandbox", "SandboxViolation", "run_setup", +] + +def _execfile(filename, globals, locals=None): + """ + Python 3 implementation of execfile. + """ + mode = 'rb' + with open(filename, mode) as stream: + script = stream.read() + # compile() function in Python 2.6 and 3.1 requires LF line endings. + if sys.version_info[:2] < (2, 7) or sys.version_info[:2] >= (3, 0) and sys.version_info[:2] < (3, 2): + script = script.replace(b'\r\n', b'\n') + script = script.replace(b'\r', b'\n') + if locals is None: + locals = globals + code = compile(script, filename, 'exec') + exec(code, globals, locals) + + +@contextlib.contextmanager +def save_argv(repl=None): + saved = sys.argv[:] + if repl is not None: + sys.argv[:] = repl + try: + yield saved + finally: + sys.argv[:] = saved + + +@contextlib.contextmanager +def save_path(): + saved = sys.path[:] + try: + yield saved + finally: + sys.path[:] = saved + + +@contextlib.contextmanager +def override_temp(replacement): + """ + Monkey-patch tempfile.tempdir with replacement, ensuring it exists + """ + if not os.path.isdir(replacement): + os.makedirs(replacement) + + saved = tempfile.tempdir + + tempfile.tempdir = replacement + + try: + yield + finally: + tempfile.tempdir = saved + + +@contextlib.contextmanager +def pushd(target): + saved = os.getcwd() + os.chdir(target) + try: + yield saved + finally: + os.chdir(saved) + + +class UnpickleableException(Exception): + """ + An exception representing another Exception that could not be pickled. + """ + @staticmethod + def dump(type, exc): + """ + Always return a dumped (pickled) type and exc. If exc can't be pickled, + wrap it in UnpickleableException first. + """ + try: + return pickle.dumps(type), pickle.dumps(exc) + except Exception: + # get UnpickleableException inside the sandbox + from setuptools.sandbox import UnpickleableException as cls + return cls.dump(cls, cls(repr(exc))) + + +class ExceptionSaver: + """ + A Context Manager that will save an exception, serialized, and restore it + later. + """ + def __enter__(self): + return self + + def __exit__(self, type, exc, tb): + if not exc: + return + + # dump the exception + self._saved = UnpickleableException.dump(type, exc) + self._tb = tb + + # suppress the exception + return True + + def resume(self): + "restore and re-raise any exception" + + if '_saved' not in vars(self): + return + + type, exc = map(pickle.loads, self._saved) + six.reraise(type, exc, self._tb) + + +@contextlib.contextmanager +def save_modules(): + """ + Context in which imported modules are saved. + + Translates exceptions internal to the context into the equivalent exception + outside the context. + """ + saved = sys.modules.copy() + with ExceptionSaver() as saved_exc: + yield saved + + sys.modules.update(saved) + # remove any modules imported since + del_modules = ( + mod_name for mod_name in sys.modules + if mod_name not in saved + # exclude any encodings modules. See #285 + and not mod_name.startswith('encodings.') + ) + _clear_modules(del_modules) + + saved_exc.resume() + + +def _clear_modules(module_names): + for mod_name in list(module_names): + del sys.modules[mod_name] + + +@contextlib.contextmanager +def save_pkg_resources_state(): + saved = pkg_resources.__getstate__() + try: + yield saved + finally: + pkg_resources.__setstate__(saved) + + +@contextlib.contextmanager +def setup_context(setup_dir): + temp_dir = os.path.join(setup_dir, 'temp') + with save_pkg_resources_state(): + with save_modules(): + hide_setuptools() + with save_path(): + with save_argv(): + with override_temp(temp_dir): + with pushd(setup_dir): + # ensure setuptools commands are available + __import__('setuptools') + yield + + +def _needs_hiding(mod_name): + """ + >>> _needs_hiding('setuptools') + True + >>> _needs_hiding('pkg_resources') + True + >>> _needs_hiding('setuptools_plugin') + False + >>> _needs_hiding('setuptools.__init__') + True + >>> _needs_hiding('distutils') + True + >>> _needs_hiding('os') + False + >>> _needs_hiding('Cython') + True + """ + pattern = re.compile('(setuptools|pkg_resources|distutils|Cython)(\.|$)') + return bool(pattern.match(mod_name)) + + +def hide_setuptools(): + """ + Remove references to setuptools' modules from sys.modules to allow the + invocation to import the most appropriate setuptools. This technique is + necessary to avoid issues such as #315 where setuptools upgrading itself + would fail to find a function declared in the metadata. + """ + modules = filter(_needs_hiding, sys.modules) + _clear_modules(modules) + + +def run_setup(setup_script, args): + """Run a distutils setup script, sandboxed in its directory""" + setup_dir = os.path.abspath(os.path.dirname(setup_script)) + with setup_context(setup_dir): + try: + sys.argv[:] = [setup_script]+list(args) + sys.path.insert(0, setup_dir) + # reset to include setup dir, w/clean callback list + working_set.__init__() + working_set.callbacks.append(lambda dist:dist.activate()) + def runner(): + ns = dict(__file__=setup_script, __name__='__main__') + _execfile(setup_script, ns) + DirectorySandbox(setup_dir).run(runner) + except SystemExit as v: + if v.args and v.args[0]: + raise + # Normal exit, just return + + +class AbstractSandbox: + """Wrap 'os' module and 'open()' builtin for virtualizing setup scripts""" + + _active = False + + def __init__(self): + self._attrs = [ + name for name in dir(_os) + if not name.startswith('_') and hasattr(self,name) + ] + + def _copy(self, source): + for name in self._attrs: + setattr(os, name, getattr(source,name)) + + def run(self, func): + """Run 'func' under os sandboxing""" + try: + self._copy(self) + if _file: + builtins.file = self._file + builtins.open = self._open + self._active = True + return func() + finally: + self._active = False + if _file: + builtins.file = _file + builtins.open = _open + self._copy(_os) + + def _mk_dual_path_wrapper(name): + original = getattr(_os,name) + def wrap(self,src,dst,*args,**kw): + if self._active: + src,dst = self._remap_pair(name,src,dst,*args,**kw) + return original(src,dst,*args,**kw) + return wrap + + for name in ["rename", "link", "symlink"]: + if hasattr(_os,name): locals()[name] = _mk_dual_path_wrapper(name) + + def _mk_single_path_wrapper(name, original=None): + original = original or getattr(_os,name) + def wrap(self,path,*args,**kw): + if self._active: + path = self._remap_input(name,path,*args,**kw) + return original(path,*args,**kw) + return wrap + + if _file: + _file = _mk_single_path_wrapper('file', _file) + _open = _mk_single_path_wrapper('open', _open) + for name in [ + "stat", "listdir", "chdir", "open", "chmod", "chown", "mkdir", + "remove", "unlink", "rmdir", "utime", "lchown", "chroot", "lstat", + "startfile", "mkfifo", "mknod", "pathconf", "access" + ]: + if hasattr(_os,name): locals()[name] = _mk_single_path_wrapper(name) + + def _mk_single_with_return(name): + original = getattr(_os,name) + def wrap(self,path,*args,**kw): + if self._active: + path = self._remap_input(name,path,*args,**kw) + return self._remap_output(name, original(path,*args,**kw)) + return original(path,*args,**kw) + return wrap + + for name in ['readlink', 'tempnam']: + if hasattr(_os,name): locals()[name] = _mk_single_with_return(name) + + def _mk_query(name): + original = getattr(_os,name) + def wrap(self,*args,**kw): + retval = original(*args,**kw) + if self._active: + return self._remap_output(name, retval) + return retval + return wrap + + for name in ['getcwd', 'tmpnam']: + if hasattr(_os,name): locals()[name] = _mk_query(name) + + def _validate_path(self,path): + """Called to remap or validate any path, whether input or output""" + return path + + def _remap_input(self,operation,path,*args,**kw): + """Called for path inputs""" + return self._validate_path(path) + + def _remap_output(self,operation,path): + """Called for path outputs""" + return self._validate_path(path) + + def _remap_pair(self,operation,src,dst,*args,**kw): + """Called for path pairs like rename, link, and symlink operations""" + return ( + self._remap_input(operation+'-from',src,*args,**kw), + self._remap_input(operation+'-to',dst,*args,**kw) + ) + + +if hasattr(os, 'devnull'): + _EXCEPTIONS = [os.devnull,] +else: + _EXCEPTIONS = [] + +try: + from win32com.client.gencache import GetGeneratePath + _EXCEPTIONS.append(GetGeneratePath()) + del GetGeneratePath +except ImportError: + # it appears pywin32 is not installed, so no need to exclude. + pass + +class DirectorySandbox(AbstractSandbox): + """Restrict operations to a single subdirectory - pseudo-chroot""" + + write_ops = dict.fromkeys([ + "open", "chmod", "chown", "mkdir", "remove", "unlink", "rmdir", + "utime", "lchown", "chroot", "mkfifo", "mknod", "tempnam", + ]) + + _exception_patterns = [ + # Allow lib2to3 to attempt to save a pickled grammar object (#121) + '.*lib2to3.*\.pickle$', + ] + "exempt writing to paths that match the pattern" + + def __init__(self, sandbox, exceptions=_EXCEPTIONS): + self._sandbox = os.path.normcase(os.path.realpath(sandbox)) + self._prefix = os.path.join(self._sandbox,'') + self._exceptions = [ + os.path.normcase(os.path.realpath(path)) + for path in exceptions + ] + AbstractSandbox.__init__(self) + + def _violation(self, operation, *args, **kw): + from setuptools.sandbox import SandboxViolation + raise SandboxViolation(operation, args, kw) + + if _file: + def _file(self, path, mode='r', *args, **kw): + if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path): + self._violation("file", path, mode, *args, **kw) + return _file(path,mode,*args,**kw) + + def _open(self, path, mode='r', *args, **kw): + if mode not in ('r', 'rt', 'rb', 'rU', 'U') and not self._ok(path): + self._violation("open", path, mode, *args, **kw) + return _open(path,mode,*args,**kw) + + def tmpnam(self): + self._violation("tmpnam") + + def _ok(self, path): + active = self._active + try: + self._active = False + realpath = os.path.normcase(os.path.realpath(path)) + return ( + self._exempted(realpath) + or realpath == self._sandbox + or realpath.startswith(self._prefix) + ) + finally: + self._active = active + + def _exempted(self, filepath): + start_matches = ( + filepath.startswith(exception) + for exception in self._exceptions + ) + pattern_matches = ( + re.match(pattern, filepath) + for pattern in self._exception_patterns + ) + candidates = itertools.chain(start_matches, pattern_matches) + return any(candidates) + + def _remap_input(self, operation, path, *args, **kw): + """Called for path inputs""" + if operation in self.write_ops and not self._ok(path): + self._violation(operation, os.path.realpath(path), *args, **kw) + return path + + def _remap_pair(self, operation, src, dst, *args, **kw): + """Called for path pairs like rename, link, and symlink operations""" + if not self._ok(src) or not self._ok(dst): + self._violation(operation, src, dst, *args, **kw) + return (src,dst) + + def open(self, file, flags, mode=0o777, *args, **kw): + """Called for low-level os.open()""" + if flags & WRITE_FLAGS and not self._ok(file): + self._violation("os.open", file, flags, mode, *args, **kw) + return _os.open(file,flags,mode, *args, **kw) + +WRITE_FLAGS = functools.reduce( + operator.or_, [getattr(_os, a, 0) for a in + "O_WRONLY O_RDWR O_APPEND O_CREAT O_TRUNC O_TEMPORARY".split()] +) + +class SandboxViolation(DistutilsError): + """A setup script attempted to modify the filesystem outside the sandbox""" + + def __str__(self): + return """SandboxViolation: %s%r %s + +The package setup script has attempted to modify files on your system +that are not within the EasyInstall build area, and has been aborted. + +This package cannot be safely installed by EasyInstall, and may not +support alternate installation locations even if you run its setup +script by hand. Please inform the package's author and the EasyInstall +maintainers to find out if a fix or workaround is available.""" % self.args + + + + + + + + + + + + + + + + + + + + + + + + + + + +# diff --git a/venv/lib/python3.5/site-packages/setuptools/script (dev).tmpl b/venv/lib/python3.5/site-packages/setuptools/script (dev).tmpl new file mode 100644 index 0000000..d58b1bb --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/script (dev).tmpl @@ -0,0 +1,5 @@ +# EASY-INSTALL-DEV-SCRIPT: %(spec)r,%(script_name)r +__requires__ = %(spec)r +__import__('pkg_resources').require(%(spec)r) +__file__ = %(dev_path)r +exec(compile(open(__file__).read(), __file__, 'exec')) diff --git a/venv/lib/python3.5/site-packages/setuptools/script.tmpl b/venv/lib/python3.5/site-packages/setuptools/script.tmpl new file mode 100644 index 0000000..ff5efbc --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/script.tmpl @@ -0,0 +1,3 @@ +# EASY-INSTALL-SCRIPT: %(spec)r,%(script_name)r +__requires__ = %(spec)r +__import__('pkg_resources').run_script(%(spec)r, %(script_name)r) diff --git a/venv/lib/python3.5/site-packages/setuptools/site-patch.py b/venv/lib/python3.5/site-packages/setuptools/site-patch.py new file mode 100644 index 0000000..c216801 --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/site-patch.py @@ -0,0 +1,76 @@ +def __boot(): + import sys + import os + PYTHONPATH = os.environ.get('PYTHONPATH') + if PYTHONPATH is None or (sys.platform=='win32' and not PYTHONPATH): + PYTHONPATH = [] + else: + PYTHONPATH = PYTHONPATH.split(os.pathsep) + + pic = getattr(sys,'path_importer_cache',{}) + stdpath = sys.path[len(PYTHONPATH):] + mydir = os.path.dirname(__file__) + #print "searching",stdpath,sys.path + + for item in stdpath: + if item==mydir or not item: + continue # skip if current dir. on Windows, or my own directory + importer = pic.get(item) + if importer is not None: + loader = importer.find_module('site') + if loader is not None: + # This should actually reload the current module + loader.load_module('site') + break + else: + try: + import imp # Avoid import loop in Python >= 3.3 + stream, path, descr = imp.find_module('site',[item]) + except ImportError: + continue + if stream is None: + continue + try: + # This should actually reload the current module + imp.load_module('site',stream,path,descr) + finally: + stream.close() + break + else: + raise ImportError("Couldn't find the real 'site' module") + + #print "loaded", __file__ + + known_paths = dict([(makepath(item)[1],1) for item in sys.path]) # 2.2 comp + + oldpos = getattr(sys,'__egginsert',0) # save old insertion position + sys.__egginsert = 0 # and reset the current one + + for item in PYTHONPATH: + addsitedir(item) + + sys.__egginsert += oldpos # restore effective old position + + d, nd = makepath(stdpath[0]) + insert_at = None + new_path = [] + + for item in sys.path: + p, np = makepath(item) + + if np==nd and insert_at is None: + # We've hit the first 'system' path entry, so added entries go here + insert_at = len(new_path) + + if np in known_paths or insert_at is None: + new_path.append(item) + else: + # new path after the insert point, back-insert it + new_path.insert(insert_at, item) + insert_at += 1 + + sys.path[:] = new_path + +if __name__=='site': + __boot() + del __boot diff --git a/venv/lib/python3.5/site-packages/setuptools/ssl_support.py b/venv/lib/python3.5/site-packages/setuptools/ssl_support.py new file mode 100644 index 0000000..657197c --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/ssl_support.py @@ -0,0 +1,243 @@ +import os +import socket +import atexit +import re + +from setuptools.extern.six.moves import urllib, http_client, map + +import pkg_resources +from pkg_resources import ResolutionError, ExtractionError + +try: + import ssl +except ImportError: + ssl = None + +__all__ = [ + 'VerifyingHTTPSHandler', 'find_ca_bundle', 'is_available', 'cert_paths', + 'opener_for' +] + +cert_paths = """ +/etc/pki/tls/certs/ca-bundle.crt +/etc/ssl/certs/ca-certificates.crt +/usr/share/ssl/certs/ca-bundle.crt +/usr/local/share/certs/ca-root.crt +/etc/ssl/cert.pem +/System/Library/OpenSSL/certs/cert.pem +/usr/local/share/certs/ca-root-nss.crt +""".strip().split() + + +try: + HTTPSHandler = urllib.request.HTTPSHandler + HTTPSConnection = http_client.HTTPSConnection +except AttributeError: + HTTPSHandler = HTTPSConnection = object + +is_available = ssl is not None and object not in (HTTPSHandler, HTTPSConnection) + + +try: + from ssl import CertificateError, match_hostname +except ImportError: + try: + from backports.ssl_match_hostname import CertificateError + from backports.ssl_match_hostname import match_hostname + except ImportError: + CertificateError = None + match_hostname = None + +if not CertificateError: + class CertificateError(ValueError): + pass + +if not match_hostname: + def _dnsname_match(dn, hostname, max_wildcards=1): + """Matching according to RFC 6125, section 6.4.3 + + http://tools.ietf.org/html/rfc6125#section-6.4.3 + """ + pats = [] + if not dn: + return False + + # Ported from python3-syntax: + # leftmost, *remainder = dn.split(r'.') + parts = dn.split(r'.') + leftmost = parts[0] + remainder = parts[1:] + + wildcards = leftmost.count('*') + if wildcards > max_wildcards: + # Issue #17980: avoid denials of service by refusing more + # than one wildcard per fragment. A survey of established + # policy among SSL implementations showed it to be a + # reasonable choice. + raise CertificateError( + "too many wildcards in certificate DNS name: " + repr(dn)) + + # speed up common case w/o wildcards + if not wildcards: + return dn.lower() == hostname.lower() + + # RFC 6125, section 6.4.3, subitem 1. + # The client SHOULD NOT attempt to match a presented identifier in which + # the wildcard character comprises a label other than the left-most label. + if leftmost == '*': + # When '*' is a fragment by itself, it matches a non-empty dotless + # fragment. + pats.append('[^.]+') + elif leftmost.startswith('xn--') or hostname.startswith('xn--'): + # RFC 6125, section 6.4.3, subitem 3. + # The client SHOULD NOT attempt to match a presented identifier + # where the wildcard character is embedded within an A-label or + # U-label of an internationalized domain name. + pats.append(re.escape(leftmost)) + else: + # Otherwise, '*' matches any dotless string, e.g. www* + pats.append(re.escape(leftmost).replace(r'\*', '[^.]*')) + + # add the remaining fragments, ignore any wildcards + for frag in remainder: + pats.append(re.escape(frag)) + + pat = re.compile(r'\A' + r'\.'.join(pats) + r'\Z', re.IGNORECASE) + return pat.match(hostname) + + def match_hostname(cert, hostname): + """Verify that *cert* (in decoded format as returned by + SSLSocket.getpeercert()) matches the *hostname*. RFC 2818 and RFC 6125 + rules are followed, but IP addresses are not accepted for *hostname*. + + CertificateError is raised on failure. On success, the function + returns nothing. + """ + if not cert: + raise ValueError("empty or no certificate") + dnsnames = [] + san = cert.get('subjectAltName', ()) + for key, value in san: + if key == 'DNS': + if _dnsname_match(value, hostname): + return + dnsnames.append(value) + if not dnsnames: + # The subject is only checked when there is no dNSName entry + # in subjectAltName + for sub in cert.get('subject', ()): + for key, value in sub: + # XXX according to RFC 2818, the most specific Common Name + # must be used. + if key == 'commonName': + if _dnsname_match(value, hostname): + return + dnsnames.append(value) + if len(dnsnames) > 1: + raise CertificateError("hostname %r " + "doesn't match either of %s" + % (hostname, ', '.join(map(repr, dnsnames)))) + elif len(dnsnames) == 1: + raise CertificateError("hostname %r " + "doesn't match %r" + % (hostname, dnsnames[0])) + else: + raise CertificateError("no appropriate commonName or " + "subjectAltName fields were found") + + +class VerifyingHTTPSHandler(HTTPSHandler): + """Simple verifying handler: no auth, subclasses, timeouts, etc.""" + + def __init__(self, ca_bundle): + self.ca_bundle = ca_bundle + HTTPSHandler.__init__(self) + + def https_open(self, req): + return self.do_open( + lambda host, **kw: VerifyingHTTPSConn(host, self.ca_bundle, **kw), req + ) + + +class VerifyingHTTPSConn(HTTPSConnection): + """Simple verifying connection: no auth, subclasses, timeouts, etc.""" + def __init__(self, host, ca_bundle, **kw): + HTTPSConnection.__init__(self, host, **kw) + self.ca_bundle = ca_bundle + + def connect(self): + sock = socket.create_connection( + (self.host, self.port), getattr(self, 'source_address', None) + ) + + # Handle the socket if a (proxy) tunnel is present + if hasattr(self, '_tunnel') and getattr(self, '_tunnel_host', None): + self.sock = sock + self._tunnel() + # http://bugs.python.org/issue7776: Python>=3.4.1 and >=2.7.7 + # change self.host to mean the proxy server host when tunneling is + # being used. Adapt, since we are interested in the destination + # host for the match_hostname() comparison. + actual_host = self._tunnel_host + else: + actual_host = self.host + + self.sock = ssl.wrap_socket( + sock, cert_reqs=ssl.CERT_REQUIRED, ca_certs=self.ca_bundle + ) + try: + match_hostname(self.sock.getpeercert(), actual_host) + except CertificateError: + self.sock.shutdown(socket.SHUT_RDWR) + self.sock.close() + raise + +def opener_for(ca_bundle=None): + """Get a urlopen() replacement that uses ca_bundle for verification""" + return urllib.request.build_opener( + VerifyingHTTPSHandler(ca_bundle or find_ca_bundle()) + ).open + + +_wincerts = None + +def get_win_certfile(): + global _wincerts + if _wincerts is not None: + return _wincerts.name + + try: + from wincertstore import CertFile + except ImportError: + return None + + class MyCertFile(CertFile): + def __init__(self, stores=(), certs=()): + CertFile.__init__(self) + for store in stores: + self.addstore(store) + self.addcerts(certs) + atexit.register(self.close) + + def close(self): + try: + super(MyCertFile, self).close() + except OSError: + pass + + _wincerts = MyCertFile(stores=['CA', 'ROOT']) + return _wincerts.name + + +def find_ca_bundle(): + """Return an existing CA bundle path, or None""" + if os.name=='nt': + return get_win_certfile() + else: + for cert_path in cert_paths: + if os.path.isfile(cert_path): + return cert_path + try: + return pkg_resources.resource_filename('certifi', 'cacert.pem') + except (ImportError, ResolutionError, ExtractionError): + return None diff --git a/venv/lib/python3.5/site-packages/setuptools/unicode_utils.py b/venv/lib/python3.5/site-packages/setuptools/unicode_utils.py new file mode 100644 index 0000000..ffab3e2 --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/unicode_utils.py @@ -0,0 +1,43 @@ +import unicodedata +import sys + +from setuptools.extern import six + +# HFS Plus uses decomposed UTF-8 +def decompose(path): + if isinstance(path, six.text_type): + return unicodedata.normalize('NFD', path) + try: + path = path.decode('utf-8') + path = unicodedata.normalize('NFD', path) + path = path.encode('utf-8') + except UnicodeError: + pass # Not UTF-8 + return path + + +def filesys_decode(path): + """ + Ensure that the given path is decoded, + NONE when no expected encoding works + """ + + if isinstance(path, six.text_type): + return path + + fs_enc = sys.getfilesystemencoding() or 'utf-8' + candidates = fs_enc, 'utf-8' + + for enc in candidates: + try: + return path.decode(enc) + except UnicodeDecodeError: + continue + + +def try_encode(string, enc): + "turn unicode encoding into a functional routine" + try: + return string.encode(enc) + except UnicodeEncodeError: + return None diff --git a/venv/lib/python3.5/site-packages/setuptools/utils.py b/venv/lib/python3.5/site-packages/setuptools/utils.py new file mode 100644 index 0000000..91e4b87 --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/utils.py @@ -0,0 +1,11 @@ +import os +import os.path + + +def cs_path_exists(fspath): + if not os.path.exists(fspath): + return False + # make absolute so we always have a directory + abspath = os.path.abspath(fspath) + directory, filename = os.path.split(abspath) + return filename in os.listdir(directory) \ No newline at end of file diff --git a/venv/lib/python3.5/site-packages/setuptools/version.py b/venv/lib/python3.5/site-packages/setuptools/version.py new file mode 100644 index 0000000..049e7fe --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/version.py @@ -0,0 +1,6 @@ +import pkg_resources + +try: + __version__ = pkg_resources.require('setuptools')[0].version +except Exception: + __version__ = 'unknown' diff --git a/venv/lib/python3.5/site-packages/setuptools/windows_support.py b/venv/lib/python3.5/site-packages/setuptools/windows_support.py new file mode 100644 index 0000000..cb977cf --- /dev/null +++ b/venv/lib/python3.5/site-packages/setuptools/windows_support.py @@ -0,0 +1,29 @@ +import platform +import ctypes + + +def windows_only(func): + if platform.system() != 'Windows': + return lambda *args, **kwargs: None + return func + + +@windows_only +def hide_file(path): + """ + Set the hidden attribute on a file or directory. + + From http://stackoverflow.com/questions/19622133/ + + `path` must be text. + """ + __import__('ctypes.wintypes') + SetFileAttributes = ctypes.windll.kernel32.SetFileAttributesW + SetFileAttributes.argtypes = ctypes.wintypes.LPWSTR, ctypes.wintypes.DWORD + SetFileAttributes.restype = ctypes.wintypes.BOOL + + FILE_ATTRIBUTE_HIDDEN = 0x02 + + ret = SetFileAttributes(path, FILE_ATTRIBUTE_HIDDEN) + if not ret: + raise ctypes.WinError() diff --git a/venv/lib64 b/venv/lib64 new file mode 120000 index 0000000..7951405 --- /dev/null +++ b/venv/lib64 @@ -0,0 +1 @@ +lib \ No newline at end of file diff --git a/venv/pip-selfcheck.json b/venv/pip-selfcheck.json new file mode 100644 index 0000000..ecfa635 --- /dev/null +++ b/venv/pip-selfcheck.json @@ -0,0 +1 @@ +{"last_check":"2017-07-04T18:09:56Z","pypi_version":"9.0.1"} \ No newline at end of file diff --git a/venv/pyvenv.cfg b/venv/pyvenv.cfg new file mode 100644 index 0000000..20ac621 --- /dev/null +++ b/venv/pyvenv.cfg @@ -0,0 +1,3 @@ +home = /usr/bin +include-system-site-packages = false +version = 3.5.2 diff --git a/venv/share/python-wheels/CacheControl-0.11.5-py2.py3-none-any.whl b/venv/share/python-wheels/CacheControl-0.11.5-py2.py3-none-any.whl new file mode 100644 index 0000000..12b34d6 Binary files /dev/null and b/venv/share/python-wheels/CacheControl-0.11.5-py2.py3-none-any.whl differ diff --git a/venv/share/python-wheels/chardet-2.3.0-py2.py3-none-any.whl b/venv/share/python-wheels/chardet-2.3.0-py2.py3-none-any.whl new file mode 100644 index 0000000..daa9a12 Binary files /dev/null and b/venv/share/python-wheels/chardet-2.3.0-py2.py3-none-any.whl differ diff --git a/venv/share/python-wheels/colorama-0.3.7-py2.py3-none-any.whl b/venv/share/python-wheels/colorama-0.3.7-py2.py3-none-any.whl new file mode 100644 index 0000000..65328bf Binary files /dev/null and b/venv/share/python-wheels/colorama-0.3.7-py2.py3-none-any.whl differ diff --git a/venv/share/python-wheels/distlib-0.2.2-py2.py3-none-any.whl b/venv/share/python-wheels/distlib-0.2.2-py2.py3-none-any.whl new file mode 100644 index 0000000..827bd27 Binary files /dev/null and b/venv/share/python-wheels/distlib-0.2.2-py2.py3-none-any.whl differ diff --git a/venv/share/python-wheels/html5lib-0.999-py2.py3-none-any.whl b/venv/share/python-wheels/html5lib-0.999-py2.py3-none-any.whl new file mode 100644 index 0000000..0f920d5 Binary files /dev/null and b/venv/share/python-wheels/html5lib-0.999-py2.py3-none-any.whl differ diff --git a/venv/share/python-wheels/ipaddress-0.0.0-py2.py3-none-any.whl b/venv/share/python-wheels/ipaddress-0.0.0-py2.py3-none-any.whl new file mode 100644 index 0000000..9ffe578 Binary files /dev/null and b/venv/share/python-wheels/ipaddress-0.0.0-py2.py3-none-any.whl differ diff --git a/venv/share/python-wheels/lockfile-0.12.2-py2.py3-none-any.whl b/venv/share/python-wheels/lockfile-0.12.2-py2.py3-none-any.whl new file mode 100644 index 0000000..852e2bd Binary files /dev/null and b/venv/share/python-wheels/lockfile-0.12.2-py2.py3-none-any.whl differ diff --git a/venv/share/python-wheels/packaging-16.6-py2.py3-none-any.whl b/venv/share/python-wheels/packaging-16.6-py2.py3-none-any.whl new file mode 100644 index 0000000..06f5b14 Binary files /dev/null and b/venv/share/python-wheels/packaging-16.6-py2.py3-none-any.whl differ diff --git a/venv/share/python-wheels/pip-8.1.1-py2.py3-none-any.whl b/venv/share/python-wheels/pip-8.1.1-py2.py3-none-any.whl new file mode 100644 index 0000000..2c4bff4 Binary files /dev/null and b/venv/share/python-wheels/pip-8.1.1-py2.py3-none-any.whl differ diff --git a/venv/share/python-wheels/pkg_resources-0.0.0-py2.py3-none-any.whl b/venv/share/python-wheels/pkg_resources-0.0.0-py2.py3-none-any.whl new file mode 100644 index 0000000..0a85026 Binary files /dev/null and b/venv/share/python-wheels/pkg_resources-0.0.0-py2.py3-none-any.whl differ diff --git a/venv/share/python-wheels/progress-1.2-py2.py3-none-any.whl b/venv/share/python-wheels/progress-1.2-py2.py3-none-any.whl new file mode 100644 index 0000000..892afeb Binary files /dev/null and b/venv/share/python-wheels/progress-1.2-py2.py3-none-any.whl differ diff --git a/venv/share/python-wheels/pyparsing-2.0.3-py2.py3-none-any.whl b/venv/share/python-wheels/pyparsing-2.0.3-py2.py3-none-any.whl new file mode 100644 index 0000000..cd81204 Binary files /dev/null and b/venv/share/python-wheels/pyparsing-2.0.3-py2.py3-none-any.whl differ diff --git a/venv/share/python-wheels/requests-2.9.1-py2.py3-none-any.whl b/venv/share/python-wheels/requests-2.9.1-py2.py3-none-any.whl new file mode 100644 index 0000000..091c1ec Binary files /dev/null and b/venv/share/python-wheels/requests-2.9.1-py2.py3-none-any.whl differ diff --git a/venv/share/python-wheels/retrying-1.3.3-py2.py3-none-any.whl b/venv/share/python-wheels/retrying-1.3.3-py2.py3-none-any.whl new file mode 100644 index 0000000..b5992ae Binary files /dev/null and b/venv/share/python-wheels/retrying-1.3.3-py2.py3-none-any.whl differ diff --git a/venv/share/python-wheels/setuptools-20.7.0-py2.py3-none-any.whl b/venv/share/python-wheels/setuptools-20.7.0-py2.py3-none-any.whl new file mode 100644 index 0000000..10c5ea9 Binary files /dev/null and b/venv/share/python-wheels/setuptools-20.7.0-py2.py3-none-any.whl differ diff --git a/venv/share/python-wheels/six-1.10.0-py2.py3-none-any.whl b/venv/share/python-wheels/six-1.10.0-py2.py3-none-any.whl new file mode 100644 index 0000000..7895a74 Binary files /dev/null and b/venv/share/python-wheels/six-1.10.0-py2.py3-none-any.whl differ diff --git a/venv/share/python-wheels/urllib3-1.13.1-py2.py3-none-any.whl b/venv/share/python-wheels/urllib3-1.13.1-py2.py3-none-any.whl new file mode 100644 index 0000000..7b0cd26 Binary files /dev/null and b/venv/share/python-wheels/urllib3-1.13.1-py2.py3-none-any.whl differ diff --git a/venv/share/python-wheels/wheel-0.29.0-py2.py3-none-any.whl b/venv/share/python-wheels/wheel-0.29.0-py2.py3-none-any.whl new file mode 100644 index 0000000..71f3a5c Binary files /dev/null and b/venv/share/python-wheels/wheel-0.29.0-py2.py3-none-any.whl differ