add a virtual environment

This commit is contained in:
Andreas Zweili 2017-03-11 12:29:56 +01:00
parent 45aab6dc05
commit c1c5701775
235 changed files with 30295 additions and 0 deletions

76
bin/activate Normal file
View File

@ -0,0 +1,76 @@
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly
deactivate () {
# reset old environment variables
if [ -n "$_OLD_VIRTUAL_PATH" ] ; then
PATH="$_OLD_VIRTUAL_PATH"
export PATH
unset _OLD_VIRTUAL_PATH
fi
if [ -n "$_OLD_VIRTUAL_PYTHONHOME" ] ; then
PYTHONHOME="$_OLD_VIRTUAL_PYTHONHOME"
export PYTHONHOME
unset _OLD_VIRTUAL_PYTHONHOME
fi
# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands. Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "$BASH" -o -n "$ZSH_VERSION" ] ; then
hash -r
fi
if [ -n "$_OLD_VIRTUAL_PS1" ] ; then
PS1="$_OLD_VIRTUAL_PS1"
export PS1
unset _OLD_VIRTUAL_PS1
fi
unset VIRTUAL_ENV
if [ ! "$1" = "nondestructive" ] ; then
# Self destruct!
unset -f deactivate
fi
}
# unset irrelavent variables
deactivate nondestructive
VIRTUAL_ENV="/home/andreas/git_repos/price_checker"
export VIRTUAL_ENV
_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/bin:$PATH"
export PATH
# unset PYTHONHOME if set
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
# could use `if (set -u; : $PYTHONHOME) ;` in bash
if [ -n "$PYTHONHOME" ] ; then
_OLD_VIRTUAL_PYTHONHOME="$PYTHONHOME"
unset PYTHONHOME
fi
if [ -z "$VIRTUAL_ENV_DISABLE_PROMPT" ] ; then
_OLD_VIRTUAL_PS1="$PS1"
if [ "x(price_checker) " != x ] ; then
PS1="(price_checker) $PS1"
else
if [ "`basename \"$VIRTUAL_ENV\"`" = "__" ] ; then
# special case for Aspen magic directories
# see http://www.zetadev.com/software/aspen/
PS1="[`basename \`dirname \"$VIRTUAL_ENV\"\``] $PS1"
else
PS1="(`basename \"$VIRTUAL_ENV\"`)$PS1"
fi
fi
export PS1
fi
# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands. Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "$BASH" -o -n "$ZSH_VERSION" ] ; then
hash -r
fi

37
bin/activate.csh Normal file
View File

@ -0,0 +1,37 @@
# This file must be used with "source bin/activate.csh" *from csh*.
# You cannot run it directly.
# Created by Davide Di Blasi <davidedb@gmail.com>.
# Ported to Python 3.3 venv by Andrew Svetlov <andrew.svetlov@gmail.com>
alias deactivate 'test $?_OLD_VIRTUAL_PATH != 0 && setenv PATH "$_OLD_VIRTUAL_PATH" && unset _OLD_VIRTUAL_PATH; rehash; test $?_OLD_VIRTUAL_PROMPT != 0 && set prompt="$_OLD_VIRTUAL_PROMPT" && unset _OLD_VIRTUAL_PROMPT; unsetenv VIRTUAL_ENV; test "\!:*" != "nondestructive" && unalias deactivate'
# Unset irrelavent variables.
deactivate nondestructive
setenv VIRTUAL_ENV "/home/andreas/git_repos/price_checker"
set _OLD_VIRTUAL_PATH="$PATH"
setenv PATH "$VIRTUAL_ENV/bin:$PATH"
set _OLD_VIRTUAL_PROMPT="$prompt"
if (! "$?VIRTUAL_ENV_DISABLE_PROMPT") then
if ("price_checker" != "") then
set env_name = "price_checker"
else
if (`basename "VIRTUAL_ENV"` == "__") then
# special case for Aspen magic directories
# see http://www.zetadev.com/software/aspen/
set env_name = `basename \`dirname "$VIRTUAL_ENV"\``
else
set env_name = `basename "$VIRTUAL_ENV"`
endif
endif
set prompt = "[$env_name] $prompt"
unset env_name
endif
alias pydoc python -m pydoc
rehash

74
bin/activate.fish Normal file
View File

@ -0,0 +1,74 @@
# This file must be used with ". bin/activate.fish" *from fish* (http://fishshell.org)
# you cannot run it directly
function deactivate -d "Exit virtualenv and return to normal shell environment"
# reset old environment variables
if test -n "$_OLD_VIRTUAL_PATH"
set -gx PATH $_OLD_VIRTUAL_PATH
set -e _OLD_VIRTUAL_PATH
end
if test -n "$_OLD_VIRTUAL_PYTHONHOME"
set -gx PYTHONHOME $_OLD_VIRTUAL_PYTHONHOME
set -e _OLD_VIRTUAL_PYTHONHOME
end
if test -n "$_OLD_FISH_PROMPT_OVERRIDE"
functions -e fish_prompt
set -e _OLD_FISH_PROMPT_OVERRIDE
. ( begin
printf "function fish_prompt\n\t#"
functions _old_fish_prompt
end | psub )
functions -e _old_fish_prompt
end
set -e VIRTUAL_ENV
if test "$argv[1]" != "nondestructive"
# Self destruct!
functions -e deactivate
end
end
# unset irrelavent variables
deactivate nondestructive
set -gx VIRTUAL_ENV "/home/andreas/git_repos/price_checker"
set -gx _OLD_VIRTUAL_PATH $PATH
set -gx PATH "$VIRTUAL_ENV/bin" $PATH
# unset PYTHONHOME if set
if set -q PYTHONHOME
set -gx _OLD_VIRTUAL_PYTHONHOME $PYTHONHOME
set -e PYTHONHOME
end
if test -z "$VIRTUAL_ENV_DISABLE_PROMPT"
# fish uses a function instead of an env var to generate the prompt.
# save the current fish_prompt function as the function _old_fish_prompt
. ( begin
printf "function _old_fish_prompt\n\t#"
functions fish_prompt
end | psub )
# with the original prompt function renamed, we can override with our own.
function fish_prompt
# Prompt override?
if test -n "(price_checker) "
printf "%s%s%s" "(price_checker) " (set_color normal) (_old_fish_prompt)
return
end
# ...Otherwise, prepend env
set -l _checkbase (basename "$VIRTUAL_ENV")
if test $_checkbase = "__"
# special case for Aspen magic directories
# see http://www.zetadev.com/software/aspen/
printf "%s[%s]%s %s" (set_color -b blue white) (basename (dirname "$VIRTUAL_ENV")) (set_color normal) (_old_fish_prompt)
else
printf "%s(%s)%s%s" (set_color -b blue white) (basename "$VIRTUAL_ENV") (set_color normal) (_old_fish_prompt)
end
end
set -gx _OLD_FISH_PROMPT_OVERRIDE "$VIRTUAL_ENV"
end

11
bin/easy_install Executable file
View File

@ -0,0 +1,11 @@
#!/home/andreas/git_repos/price_checker/bin/python3.4
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

11
bin/easy_install-3.4 Executable file
View File

@ -0,0 +1,11 @@
#!/home/andreas/git_repos/price_checker/bin/python3.4
# -*- coding: utf-8 -*-
import re
import sys
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

11
bin/pip Executable file
View File

@ -0,0 +1,11 @@
#!/home/andreas/git_repos/price_checker/bin/python3.4
# -*- coding: utf-8 -*-
import re
import sys
from pip import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

11
bin/pip3 Executable file
View File

@ -0,0 +1,11 @@
#!/home/andreas/git_repos/price_checker/bin/python3.4
# -*- coding: utf-8 -*-
import re
import sys
from pip import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

11
bin/pip3.4 Executable file
View File

@ -0,0 +1,11 @@
#!/home/andreas/git_repos/price_checker/bin/python3.4
# -*- coding: utf-8 -*-
import re
import sys
from pip import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())

1
bin/python Symbolic link
View File

@ -0,0 +1 @@
/home/andreas/git_repos/price_checker/bin/python3.4

1
bin/python3 Symbolic link
View File

@ -0,0 +1 @@
/home/andreas/git_repos/price_checker/bin/python3.4

1
bin/python3.4 Symbolic link
View File

@ -0,0 +1 @@
/usr/bin/python3.4

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

Binary file not shown.

View File

@ -0,0 +1,16 @@
try:
import ast
from _markerlib.markers import default_environment, compile, interpret
except ImportError:
if 'ast' in globals():
raise
def default_environment():
return {}
def compile(marker):
def marker_fn(environment=None, override=None):
# 'empty markers are True' heuristic won't install extra deps.
return not marker.strip()
marker_fn.__doc__ = marker
return marker_fn
def interpret(marker, environment=None, override=None):
return compile(marker)()

View File

@ -0,0 +1,119 @@
# -*- coding: utf-8 -*-
"""Interpret PEP 345 environment markers.
EXPR [in|==|!=|not in] EXPR [or|and] ...
where EXPR belongs to any of those:
python_version = '%s.%s' % (sys.version_info[0], sys.version_info[1])
python_full_version = sys.version.split()[0]
os.name = os.name
sys.platform = sys.platform
platform.version = platform.version()
platform.machine = platform.machine()
platform.python_implementation = platform.python_implementation()
a free string, like '2.6', or 'win32'
"""
__all__ = ['default_environment', 'compile', 'interpret']
import ast
import os
import platform
import sys
import weakref
_builtin_compile = compile
try:
from platform import python_implementation
except ImportError:
if os.name == "java":
# Jython 2.5 has ast module, but not platform.python_implementation() function.
def python_implementation():
return "Jython"
else:
raise
# restricted set of variables
_VARS = {'sys.platform': sys.platform,
'python_version': '%s.%s' % sys.version_info[:2],
# FIXME parsing sys.platform is not reliable, but there is no other
# way to get e.g. 2.7.2+, and the PEP is defined with sys.version
'python_full_version': sys.version.split(' ', 1)[0],
'os.name': os.name,
'platform.version': platform.version(),
'platform.machine': platform.machine(),
'platform.python_implementation': python_implementation(),
'extra': None # wheel extension
}
for var in list(_VARS.keys()):
if '.' in var:
_VARS[var.replace('.', '_')] = _VARS[var]
def default_environment():
"""Return copy of default PEP 385 globals dictionary."""
return dict(_VARS)
class ASTWhitelist(ast.NodeTransformer):
def __init__(self, statement):
self.statement = statement # for error messages
ALLOWED = (ast.Compare, ast.BoolOp, ast.Attribute, ast.Name, ast.Load, ast.Str)
# Bool operations
ALLOWED += (ast.And, ast.Or)
# Comparison operations
ALLOWED += (ast.Eq, ast.Gt, ast.GtE, ast.In, ast.Is, ast.IsNot, ast.Lt, ast.LtE, ast.NotEq, ast.NotIn)
def visit(self, node):
"""Ensure statement only contains allowed nodes."""
if not isinstance(node, self.ALLOWED):
raise SyntaxError('Not allowed in environment markers.\n%s\n%s' %
(self.statement,
(' ' * node.col_offset) + '^'))
return ast.NodeTransformer.visit(self, node)
def visit_Attribute(self, node):
"""Flatten one level of attribute access."""
new_node = ast.Name("%s.%s" % (node.value.id, node.attr), node.ctx)
return ast.copy_location(new_node, node)
def parse_marker(marker):
tree = ast.parse(marker, mode='eval')
new_tree = ASTWhitelist(marker).generic_visit(tree)
return new_tree
def compile_marker(parsed_marker):
return _builtin_compile(parsed_marker, '<environment marker>', 'eval',
dont_inherit=True)
_cache = weakref.WeakValueDictionary()
def compile(marker):
"""Return compiled marker as a function accepting an environment dict."""
try:
return _cache[marker]
except KeyError:
pass
if not marker.strip():
def marker_fn(environment=None, override=None):
""""""
return True
else:
compiled_marker = compile_marker(parse_marker(marker))
def marker_fn(environment=None, override=None):
"""override updates environment"""
if override is None:
override = {}
if environment is None:
environment = default_environment()
environment.update(override)
return eval(compiled_marker, environment)
marker_fn.__doc__ = marker
_cache[marker] = marker_fn
return _cache[marker]
def interpret(marker, environment=None):
return compile(marker)(environment)

View File

@ -0,0 +1,5 @@
"""Run the EasyInstall command"""
if __name__ == '__main__':
from setuptools.command.easy_install import main
main()

View File

@ -0,0 +1,71 @@
Project Info
============
* Project Page: https://github.com/pypa/pip
* Install howto: https://pip.pypa.io/en/latest/installing.html
* Changelog: https://pip.pypa.io/en/latest/news.html
* Bug Tracking: https://github.com/pypa/pip/issues
* Mailing list: http://groups.google.com/group/python-virtualenv
* Docs: https://pip.pypa.io/
* User IRC: #pypa on Freenode.
* Dev IRC: #pypa-dev on Freenode.
Quickstart
==========
First, :doc:`Install pip <installing>`.
Install a package from `PyPI`_:
::
$ pip install SomePackage
[...]
Successfully installed SomePackage
Show what files were installed:
::
$ pip show --files SomePackage
Name: SomePackage
Version: 1.0
Location: /my/env/lib/pythonx.x/site-packages
Files:
../somepackage/__init__.py
[...]
List what packages are outdated:
::
$ pip list --outdated
SomePackage (Current: 1.0 Latest: 2.0)
Upgrade a package:
::
$ pip install --upgrade SomePackage
[...]
Found existing installation: SomePackage 1.0
Uninstalling SomePackage:
Successfully uninstalled SomePackage
Running setup.py install for SomePackage
Successfully installed SomePackage
Uninstall a package:
::
$ pip uninstall SomePackage
Uninstalling SomePackage:
/my/env/lib/pythonx.x/site-packages/somepackage
Proceed (y/n)? y
Successfully uninstalled SomePackage
.. _PyPI: http://pypi.python.org/pypi/

View File

@ -0,0 +1,97 @@
Metadata-Version: 2.0
Name: pip
Version: 1.5.6
Summary: A tool for installing and managing Python packages.
Home-page: https://pip.pypa.io/
Author: The pip developers
Author-email: python-virtualenv@groups.google.com
License: MIT
Keywords: easy_install distutils setuptools egg virtualenv
Platform: UNKNOWN
Classifier: Development Status :: 5 - Production/Stable
Classifier: Intended Audience :: Developers
Classifier: License :: OSI Approved :: MIT License
Classifier: Topic :: Software Development :: Build Tools
Classifier: Programming Language :: Python :: 2
Classifier: Programming Language :: Python :: 2.6
Classifier: Programming Language :: Python :: 2.7
Classifier: Programming Language :: Python :: 3
Classifier: Programming Language :: Python :: 3.1
Classifier: Programming Language :: Python :: 3.2
Classifier: Programming Language :: Python :: 3.3
Provides-Extra: testing
Requires-Dist: pytest; extra == 'testing'
Requires-Dist: scripttest (>=1.3); extra == 'testing'
Requires-Dist: mock; extra == 'testing'
Project Info
============
* Project Page: https://github.com/pypa/pip
* Install howto: https://pip.pypa.io/en/latest/installing.html
* Changelog: https://pip.pypa.io/en/latest/news.html
* Bug Tracking: https://github.com/pypa/pip/issues
* Mailing list: http://groups.google.com/group/python-virtualenv
* Docs: https://pip.pypa.io/
* User IRC: #pypa on Freenode.
* Dev IRC: #pypa-dev on Freenode.
Quickstart
==========
First, :doc:`Install pip <installing>`.
Install a package from `PyPI`_:
::
$ pip install SomePackage
[...]
Successfully installed SomePackage
Show what files were installed:
::
$ pip show --files SomePackage
Name: SomePackage
Version: 1.0
Location: /my/env/lib/pythonx.x/site-packages
Files:
../somepackage/__init__.py
[...]
List what packages are outdated:
::
$ pip list --outdated
SomePackage (Current: 1.0 Latest: 2.0)
Upgrade a package:
::
$ pip install --upgrade SomePackage
[...]
Found existing installation: SomePackage 1.0
Uninstalling SomePackage:
Successfully uninstalled SomePackage
Running setup.py install for SomePackage
Successfully installed SomePackage
Uninstall a package:
::
$ pip uninstall SomePackage
Uninstalling SomePackage:
/my/env/lib/pythonx.x/site-packages/somepackage
Proceed (y/n)? y
Successfully uninstalled SomePackage
.. _PyPI: http://pypi.python.org/pypi/

View File

@ -0,0 +1,80 @@
pip-1.5.6.dist-info/entry_points.txt,sha256=1-e4WB_Fe8mWHrMi1YQo_s5knbh0lu_uRmd8Wb6MJfY,68
pip-1.5.6.dist-info/WHEEL,sha256=AvR0WeTpDaxT645bl5FQxUK6NPsTls2ttpcGJg3j1Xg,110
pip-1.5.6.dist-info/metadata.json,sha256=FTHiqyD_sCWIrHxPzd_PkxOksmFyFLqpUwnT20gr9Gc,1379
pip-1.5.6.dist-info/top_level.txt,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
pip-1.5.6.dist-info/DESCRIPTION.rst,sha256=n5sT7bxCOnG9ej7TtEjjARQZ_n2ECqWFDiJK88BM0u0,1422
pip-1.5.6.dist-info/RECORD,,
pip-1.5.6.dist-info/METADATA,sha256=lzWjAnNXrB51bQ7xdjWidesBvidhIm0QMK5qdjl_KT4,2444
pip/pep425tags.py,sha256=jb5Rq395Gz_Uv8kn3L9Im1HX7EhEj8nqyYX0nXulzWo,2969
pip/basecommand.py,sha256=N_nE7BCcoMA7t2nRNTiJB8T__1XqI74SJI2G72VaM2E,6578
pip/index.py,sha256=STYPJFaliyW7bv3PqZZRN_BNBdPXYqdRs19VT__TzCM,40374
pip/wheel.py,sha256=kLXOu5T_PBIJ1vona020Tsb_S_qzcw_I2XrZ5gdRdjE,20551
pip/status_codes.py,sha256=sEFHUaUJbqv8iArL3HAtcztWZmGOFX01hTesSytDEh0,116
pip/__init__.py,sha256=KSoPZT91fVHfh_z_XkRFfYrupZIMzN35PsQWXpg-Ul4,12070
pip/locations.py,sha256=YyFyCLYADKgT5x-Ctj_LeZl5bEzkbBXuR2Iv8IbVqDA,6202
pip/__main__.py,sha256=9JBJhprGRLUy1fEvAdufs0tsjKFAvFAY_nTde6GDkHk,116
pip/cmdoptions.py,sha256=2E4AK_UNbyvJE6bmvlvsWhkrttE3jrwM_hNYpIecLfE,9330
pip/runner.py,sha256=VkcZKNikprpDOSl2Y3m0FaQbdGuYsoHkxdhjtL0N3oA,431
pip/log.py,sha256=YysOwj7x7qMnv8E8H5scLAqDUM2xrwXTkUj94EmdAwo,9438
pip/baseparser.py,sha256=GHPwVFcD9HVgQSNnmW5s7_tNa-lyLJbl5iQH9WnWBBo,8124
pip/exceptions.py,sha256=wAoboA4PdhGN7xH-ayf_dcDFPYZe9XAivAlZJbOgCN4,1086
pip/req.py,sha256=519qbgmVY_l_ymGNOiuI8Ya4ueyb3d4WJdaz6kLep0o,83782
pip/download.py,sha256=UWtMRDLPa0ONxw-LMmcCZ_3Hh9NiVnOQA4tHRUNACu8,22491
pip/util.py,sha256=y83oY2wa5OysB9JBCQt6EFM-2jdJ3T7xNe9Jl35o0kY,25816
pip/vcs/subversion.py,sha256=P31K7o83JdcipIyuEVlnpSp5KZqakb4OJ1PKT-FB7C8,10640
pip/vcs/__init__.py,sha256=kS31hLmJ6BgKnBu8kvXKQlJEwoj1MxYE7wfRuFL-daM,8748
pip/vcs/git.py,sha256=ib3TqDwJyfjBnSRFKVe_HhNdwkmfcOZfJHbqt2RUOVg,7898
pip/vcs/mercurial.py,sha256=71ESfgxotPPPZjiH6sMTBWcj5TS8kjgJxVnWrRb3bwo,5820
pip/vcs/bazaar.py,sha256=qUIuIqDJqwZ_nP6WR52YwvYVy1lvIUmvaT-IdxDYUHo,4943
pip/commands/show.py,sha256=klFQt5fhN5zydPnoe-IugUoWwTOK5WcT9GmjyWB04Wo,2750
pip/commands/bundle.py,sha256=tK8LU3Khjkrz65y3brNP71QOBkQCb9mlv9x8s1W02T4,1787
pip/commands/help.py,sha256=ETLg8xfv8uFwS3KvxmsCE-I56S15jUTvfkwaPAA18pE,927
pip/commands/install.py,sha256=JIorvRJM2a-O_UFqE8_2l7obIgrKx07Wzk7rG1tiRZY,13396
pip/commands/wheel.py,sha256=8CLXrGKgkAiBiZ56vBHlQa8rxWTDf-qzHv5hmAyRWPw,7891
pip/commands/list.py,sha256=FHf7H35AajbCuymiG2z8xAGNSx8W5CNZKj6Hh2QGo38,6814
pip/commands/__init__.py,sha256=N_4io-oGcWF9-raDN5TYXbGlJFsx5po36HZmwgLso6I,2236
pip/commands/search.py,sha256=RPaWhZ49vZnk0hiuNCjziniYZ_Ln5lNIbtamTC633rM,4719
pip/commands/uninstall.py,sha256=MF4zSLfMxnH3E8T673ORNWz0Bsc4C6LEI5KImpAQrck,2203
pip/commands/completion.py,sha256=LnJgUrpGGO4x2Y8VdwhKda4kGZWMFO28P4jYzYT5Q8k,1838
pip/commands/freeze.py,sha256=PFvq9h0joZOpnKv4-ohLIQ3dLhtyEIlebzhqKFiKvYo,4647
pip/commands/unzip.py,sha256=_PeTWKOd_iRxPt_7njQ8jGFpjX006vobn593tcIyeUc,185
pip/commands/zip.py,sha256=KECCb3oCHxJqDT3kUEnlf0udp31Ckoe8oyEKdS7EKNQ,14821
pip/backwardcompat/__init__.py,sha256=AcP5dr3nL-4AGxSwsFIEUcf9ki0ROUFwfc0IrIeHaJI,3756
/home/andreas/git_repos/price_checker/bin/pip,sha256=s0jidKRsV8InXj2au3WIUlOtBiEBlwaeJYsUlftKdLc,240
/home/andreas/git_repos/price_checker/bin/pip3,sha256=s0jidKRsV8InXj2au3WIUlOtBiEBlwaeJYsUlftKdLc,240
/home/andreas/git_repos/price_checker/bin/pip3.4,sha256=s0jidKRsV8InXj2au3WIUlOtBiEBlwaeJYsUlftKdLc,240
pip/__pycache__/__init__.cpython-34.pyc,,
pip/__pycache__/__main__.cpython-34.pyc,,
pip/commands/__pycache__/unzip.cpython-34.pyc,,
pip/backwardcompat/__pycache__/__init__.cpython-34.pyc,,
pip/__pycache__/wheel.cpython-34.pyc,,
pip/__pycache__/status_codes.cpython-34.pyc,,
pip/vcs/__pycache__/bazaar.cpython-34.pyc,,
pip/__pycache__/pep425tags.cpython-34.pyc,,
pip/commands/__pycache__/help.cpython-34.pyc,,
pip/commands/__pycache__/__init__.cpython-34.pyc,,
pip/__pycache__/runner.cpython-34.pyc,,
pip/commands/__pycache__/bundle.cpython-34.pyc,,
pip/commands/__pycache__/list.cpython-34.pyc,,
pip/commands/__pycache__/search.cpython-34.pyc,,
pip/commands/__pycache__/uninstall.cpython-34.pyc,,
pip/commands/__pycache__/install.cpython-34.pyc,,
pip/vcs/__pycache__/mercurial.cpython-34.pyc,,
pip/__pycache__/exceptions.cpython-34.pyc,,
pip/__pycache__/index.cpython-34.pyc,,
pip/vcs/__pycache__/git.cpython-34.pyc,,
pip/commands/__pycache__/freeze.cpython-34.pyc,,
pip/__pycache__/locations.cpython-34.pyc,,
pip/__pycache__/req.cpython-34.pyc,,
pip/__pycache__/log.cpython-34.pyc,,
pip/commands/__pycache__/show.cpython-34.pyc,,
pip/__pycache__/baseparser.cpython-34.pyc,,
pip/vcs/__pycache__/subversion.cpython-34.pyc,,
pip/vcs/__pycache__/__init__.cpython-34.pyc,,
pip/commands/__pycache__/wheel.cpython-34.pyc,,
pip/commands/__pycache__/zip.cpython-34.pyc,,
pip/commands/__pycache__/completion.cpython-34.pyc,,
pip/__pycache__/download.cpython-34.pyc,,
pip/__pycache__/basecommand.cpython-34.pyc,,
pip/__pycache__/cmdoptions.cpython-34.pyc,,
pip/__pycache__/util.cpython-34.pyc,,

View File

@ -0,0 +1,6 @@
Wheel-Version: 1.0
Generator: bdist_wheel (0.24.0)
Root-Is-Purelib: true
Tag: py2-none-any
Tag: py3-none-any

View File

@ -0,0 +1,5 @@
[console_scripts]
pip = pip:main
pip3 = pip:main
pip3.4 = pip:main

View File

@ -0,0 +1 @@
{"license": "MIT", "metadata_version": "2.0", "extras": ["testing"], "version": "1.5.6", "test_requires": [{"requires": ["pytest", "scripttest (>=1.3)", "mock"]}], "keywords": ["easy_install", "distutils", "setuptools", "egg", "virtualenv"], "run_requires": [{"requires": ["pytest", "scripttest (>=1.3)", "mock"], "extra": "testing"}], "generator": "bdist_wheel (0.24.0)", "extensions": {"python.details": {"contacts": [{"role": "author", "email": "python-virtualenv@groups.google.com", "name": "The pip developers"}], "document_names": {"description": "DESCRIPTION.rst"}, "project_urls": {"Home": "https://pip.pypa.io/"}}, "python.exports": {"console_scripts": {"pip3": "pip:main", "pip": "pip:main", "pip3.4": "pip:main"}}, "python.commands": {"wrap_console": {"pip3": "pip:main", "pip": "pip:main", "pip3.4": "pip:main"}}}, "classifiers": ["Development Status :: 5 - Production/Stable", "Intended Audience :: Developers", "License :: OSI Approved :: MIT License", "Topic :: Software Development :: Build Tools", "Programming Language :: Python :: 2", "Programming Language :: Python :: 2.6", "Programming Language :: Python :: 2.7", "Programming Language :: Python :: 3", "Programming Language :: Python :: 3.1", "Programming Language :: Python :: 3.2", "Programming Language :: Python :: 3.3"], "summary": "A tool for installing and managing Python packages.", "name": "pip"}

View File

@ -0,0 +1 @@
pip

View File

@ -0,0 +1,340 @@
#!/usr/bin/env python
import os
import optparse
import sys
import re
import errno
# Upstream pip vendorizes a bunch of its dependencies. Debian de-vendorizes
# (unbundles) these dependencies to be compliant with Debian policy. Instead,
# these dependencies are packaged as wheel (.whl) files in a known location.
# When pip itself executes, we have to arrange for these wheels to show up
# earlier on sys.path than any other version of these packages, otherwise
# things can break. See for example Bug #744145.
#
# The location of the wheels differs depending on whether we're inside or
# outside a virtual environment, regardless of whether that venv was created
# with virtualenv or pyvenv. The first thing we have to do is figure out if
# we're inside or outside a venv, then search the appropriate wheel directory
# and add all the .whls found there to the front of sys.path. As per Debian
# Python Policy, only the wheels needed to support this de-vendorization will
# be present, so it's safe to add them all.
#
# venv determination is a bit of a black art, but this algorithm should work
# in both Python 2 (virtualenv-only) and Python 3 (pyvenv and virtualenv). -
# updated by barry@debian.org 2015-02-25
base_prefix = getattr(sys, 'base_prefix', None)
real_prefix = getattr(sys, 'real_prefix', None)
if base_prefix is None:
# Python 2 has no base_prefix at all. It also has no pyvenv. Fall back
# to checking real_prefix.
if real_prefix is None:
# We are not in a venv.
in_venv = False
else:
# We're in a Python 2 virtualenv created venv, but real_prefix should
# never be the same as sys.prefix.
assert sys.prefix != real_prefix
in_venv = True
elif sys.prefix != base_prefix:
# We're in a Python 3, pyvenv created venv.
in_venv = True
elif real_prefix is None:
# We're in Python 3, outside a venv, but base better equal prefix.
assert sys.prefix == base_prefix
in_venv = False
else:
# We're in a Python 3, virtualenv created venv.
assert real_prefix != sys.prefix
in_venv = True
if in_venv:
wheel_dir = os.path.join(sys.prefix, 'lib', 'python-wheels')
else:
wheel_dir = '/usr/share/python-wheels'
# We'll add all the wheels we find to the front of sys.path so that they're
# found first, even if the same dependencies are available in site-packages.
try:
for filename in os.listdir(wheel_dir):
if os.path.splitext(filename)[1] == '.whl':
sys.path.insert(0, os.path.join(wheel_dir, filename))
# FileNotFoundError doesn't exist in Python 2, but ignore it anyway.
except OSError as error:
if error.errno != errno.ENOENT:
raise
from pip.exceptions import InstallationError, CommandError, PipError
from pip.log import logger
from pip.util import get_installed_distributions, get_prog
from pip.vcs import git, mercurial, subversion, bazaar # noqa
from pip.baseparser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
from pip.commands import commands, get_summaries, get_similar_commands
# This fixes a peculiarity when importing via __import__ - as we are
# initialising the pip module, "from pip import cmdoptions" is recursive
# and appears not to work properly in that situation.
import pip.cmdoptions
cmdoptions = pip.cmdoptions
# The version as used in the setup.py and the docs conf.py
__version__ = "1.5.6"
def autocomplete():
"""Command and option completion for the main option parser (and options)
and its subcommands (and options).
Enable by sourcing one of the completion shell scripts (bash or zsh).
"""
# Don't complete if user hasn't sourced bash_completion file.
if 'PIP_AUTO_COMPLETE' not in os.environ:
return
cwords = os.environ['COMP_WORDS'].split()[1:]
cword = int(os.environ['COMP_CWORD'])
try:
current = cwords[cword - 1]
except IndexError:
current = ''
subcommands = [cmd for cmd, summary in get_summaries()]
options = []
# subcommand
try:
subcommand_name = [w for w in cwords if w in subcommands][0]
except IndexError:
subcommand_name = None
parser = create_main_parser()
# subcommand options
if subcommand_name:
# special case: 'help' subcommand has no options
if subcommand_name == 'help':
sys.exit(1)
# special case: list locally installed dists for uninstall command
if subcommand_name == 'uninstall' and not current.startswith('-'):
installed = []
lc = current.lower()
for dist in get_installed_distributions(local_only=True):
if dist.key.startswith(lc) and dist.key not in cwords[1:]:
installed.append(dist.key)
# if there are no dists installed, fall back to option completion
if installed:
for dist in installed:
print(dist)
sys.exit(1)
subcommand = commands[subcommand_name]()
options += [(opt.get_opt_string(), opt.nargs)
for opt in subcommand.parser.option_list_all
if opt.help != optparse.SUPPRESS_HELP]
# filter out previously specified options from available options
prev_opts = [x.split('=')[0] for x in cwords[1:cword - 1]]
options = [(x, v) for (x, v) in options if x not in prev_opts]
# filter options by current input
options = [(k, v) for k, v in options if k.startswith(current)]
for option in options:
opt_label = option[0]
# append '=' to options which require args
if option[1]:
opt_label += '='
print(opt_label)
else:
# show main parser options only when necessary
if current.startswith('-') or current.startswith('--'):
opts = [i.option_list for i in parser.option_groups]
opts.append(parser.option_list)
opts = (o for it in opts for o in it)
subcommands += [i.get_opt_string() for i in opts
if i.help != optparse.SUPPRESS_HELP]
print(' '.join([x for x in subcommands if x.startswith(current)]))
sys.exit(1)
def create_main_parser():
parser_kw = {
'usage': '\n%prog <command> [options]',
'add_help_option': False,
'formatter': UpdatingDefaultsHelpFormatter(),
'name': 'global',
'prog': get_prog(),
}
parser = ConfigOptionParser(**parser_kw)
parser.disable_interspersed_args()
pip_pkg_dir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
parser.version = 'pip %s from %s (python %s)' % (
__version__, pip_pkg_dir, sys.version[:3])
# add the general options
gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, parser)
parser.add_option_group(gen_opts)
parser.main = True # so the help formatter knows
# create command listing for description
command_summaries = get_summaries()
description = [''] + ['%-27s %s' % (i, j) for i, j in command_summaries]
parser.description = '\n'.join(description)
return parser
def parseopts(args):
parser = create_main_parser()
# Note: parser calls disable_interspersed_args(), so the result of this call
# is to split the initial args into the general options before the
# subcommand and everything else.
# For example:
# args: ['--timeout=5', 'install', '--user', 'INITools']
# general_options: ['--timeout==5']
# args_else: ['install', '--user', 'INITools']
general_options, args_else = parser.parse_args(args)
# --version
if general_options.version:
sys.stdout.write(parser.version)
sys.stdout.write(os.linesep)
sys.exit()
# pip || pip help -> print_help()
if not args_else or (args_else[0] == 'help' and len(args_else) == 1):
parser.print_help()
sys.exit()
# the subcommand name
cmd_name = args_else[0].lower()
#all the args without the subcommand
cmd_args = args[:]
cmd_args.remove(args_else[0].lower())
if cmd_name not in commands:
guess = get_similar_commands(cmd_name)
msg = ['unknown command "%s"' % cmd_name]
if guess:
msg.append('maybe you meant "%s"' % guess)
raise CommandError(' - '.join(msg))
return cmd_name, cmd_args
def main(initial_args=None):
if initial_args is None:
initial_args = sys.argv[1:]
autocomplete()
try:
cmd_name, cmd_args = parseopts(initial_args)
except PipError:
e = sys.exc_info()[1]
sys.stderr.write("ERROR: %s" % e)
sys.stderr.write(os.linesep)
sys.exit(1)
command = commands[cmd_name]()
return command.main(cmd_args)
def bootstrap():
"""
Bootstrapping function to be called from install-pip.py script.
"""
pkgs = ['pip']
try:
import setuptools
except ImportError:
pkgs.append('setuptools')
return main(['install', '--upgrade'] + pkgs + sys.argv[1:])
############################################################
## Writing freeze files
class FrozenRequirement(object):
def __init__(self, name, req, editable, comments=()):
self.name = name
self.req = req
self.editable = editable
self.comments = comments
_rev_re = re.compile(r'-r(\d+)$')
_date_re = re.compile(r'-(20\d\d\d\d\d\d)$')
@classmethod
def from_dist(cls, dist, dependency_links, find_tags=False):
location = os.path.normcase(os.path.abspath(dist.location))
comments = []
from pip.vcs import vcs, get_src_requirement
if vcs.get_backend_name(location):
editable = True
try:
req = get_src_requirement(dist, location, find_tags)
except InstallationError:
ex = sys.exc_info()[1]
logger.warn("Error when trying to get requirement for VCS system %s, falling back to uneditable format" % ex)
req = None
if req is None:
logger.warn('Could not determine repository location of %s' % location)
comments.append('## !! Could not determine repository location')
req = dist.as_requirement()
editable = False
else:
editable = False
req = dist.as_requirement()
specs = req.specs
assert len(specs) == 1 and specs[0][0] == '=='
version = specs[0][1]
ver_match = cls._rev_re.search(version)
date_match = cls._date_re.search(version)
if ver_match or date_match:
svn_backend = vcs.get_backend('svn')
if svn_backend:
svn_location = svn_backend(
).get_location(dist, dependency_links)
if not svn_location:
logger.warn(
'Warning: cannot find svn location for %s' % req)
comments.append('## FIXME: could not find svn URL in dependency_links for this package:')
else:
comments.append('# Installing as editable to satisfy requirement %s:' % req)
if ver_match:
rev = ver_match.group(1)
else:
rev = '{%s}' % date_match.group(1)
editable = True
req = '%s@%s#egg=%s' % (svn_location, rev, cls.egg_name(dist))
return cls(dist.project_name, req, editable, comments)
@staticmethod
def egg_name(dist):
name = dist.egg_name()
match = re.search(r'-py\d\.\d$', name)
if match:
name = name[:match.start()]
return name
def __str__(self):
req = self.req
if self.editable:
req = '-e %s' % req
return '\n'.join(list(self.comments) + [str(req)]) + '\n'
if __name__ == '__main__':
exit = main()
if exit:
sys.exit(exit)

View File

@ -0,0 +1,7 @@
import sys
from .runner import run
if __name__ == '__main__':
exit = run()
if exit:
sys.exit(exit)

View File

@ -0,0 +1,138 @@
"""Stuff that differs in different Python versions and platform
distributions."""
import os
import imp
import sys
import site
__all__ = ['WindowsError']
uses_pycache = hasattr(imp, 'cache_from_source')
class NeverUsedException(Exception):
"""this exception should never be raised"""
try:
WindowsError = WindowsError
except NameError:
WindowsError = NeverUsedException
try:
#new in Python 3.3
PermissionError = PermissionError
except NameError:
PermissionError = NeverUsedException
console_encoding = sys.__stdout__.encoding
if sys.version_info >= (3,):
from io import StringIO, BytesIO
from functools import reduce
from urllib.error import URLError, HTTPError
from queue import Queue, Empty
from urllib.request import url2pathname, urlretrieve, pathname2url
from email import message as emailmessage
import urllib.parse as urllib
import urllib.request as urllib2
import configparser as ConfigParser
import xmlrpc.client as xmlrpclib
import urllib.parse as urlparse
import http.client as httplib
def cmp(a, b):
return (a > b) - (a < b)
def b(s):
return s.encode('utf-8')
def u(s):
return s.decode('utf-8')
def console_to_str(s):
try:
return s.decode(console_encoding)
except UnicodeDecodeError:
return s.decode('utf_8')
def get_http_message_param(http_message, param, default_value):
return http_message.get_param(param, default_value)
bytes = bytes
string_types = (str,)
raw_input = input
else:
from cStringIO import StringIO
from urllib2 import URLError, HTTPError
from Queue import Queue, Empty
from urllib import url2pathname, urlretrieve, pathname2url
from email import Message as emailmessage
import urllib
import urllib2
import urlparse
import ConfigParser
import xmlrpclib
import httplib
def b(s):
return s
def u(s):
return s
def console_to_str(s):
return s
def get_http_message_param(http_message, param, default_value):
result = http_message.getparam(param)
return result or default_value
bytes = str
string_types = (basestring,)
reduce = reduce
cmp = cmp
raw_input = raw_input
BytesIO = StringIO
from distutils.sysconfig import get_python_lib, get_python_version
#site.USER_SITE was created in py2.6
user_site = getattr(site, 'USER_SITE', None)
def product(*args, **kwds):
# product('ABCD', 'xy') --> Ax Ay Bx By Cx Cy Dx Dy
# product(range(2), repeat=3) --> 000 001 010 011 100 101 110 111
pools = list(map(tuple, args)) * kwds.get('repeat', 1)
result = [[]]
for pool in pools:
result = [x + [y] for x in result for y in pool]
for prod in result:
yield tuple(prod)
def get_path_uid(path):
"""
Return path's uid.
Does not follow symlinks: https://github.com/pypa/pip/pull/935#discussion_r5307003
Placed this function in backwardcompat due to differences on AIX and Jython,
that should eventually go away.
:raises OSError: When path is a symlink or can't be read.
"""
if hasattr(os, 'O_NOFOLLOW'):
fd = os.open(path, os.O_RDONLY | os.O_NOFOLLOW)
file_uid = os.fstat(fd).st_uid
os.close(fd)
else: # AIX and Jython
# WARNING: time of check vulnerabity, but best we can do w/o NOFOLLOW
if not os.path.islink(path):
# older versions of Jython don't have `os.fstat`
file_uid = os.stat(path).st_uid
else:
# raise OSError for parity with os.O_NOFOLLOW above
raise OSError("%s is a symlink; Will not return uid for symlinks" % path)
return file_uid

View File

@ -0,0 +1,201 @@
"""Base Command class, and related routines"""
import os
import sys
import tempfile
import traceback
import time
import optparse
from pip import cmdoptions
from pip.locations import running_under_virtualenv
from pip.log import logger
from pip.download import PipSession
from pip.exceptions import (BadCommand, InstallationError, UninstallationError,
CommandError, PreviousBuildDirError)
from pip.backwardcompat import StringIO
from pip.baseparser import ConfigOptionParser, UpdatingDefaultsHelpFormatter
from pip.status_codes import (SUCCESS, ERROR, UNKNOWN_ERROR, VIRTUALENV_NOT_FOUND,
PREVIOUS_BUILD_DIR_ERROR)
from pip.util import get_prog
__all__ = ['Command']
class Command(object):
name = None
usage = None
hidden = False
def __init__(self):
parser_kw = {
'usage': self.usage,
'prog': '%s %s' % (get_prog(), self.name),
'formatter': UpdatingDefaultsHelpFormatter(),
'add_help_option': False,
'name': self.name,
'description': self.__doc__,
}
self.parser = ConfigOptionParser(**parser_kw)
# Commands should add options to this option group
optgroup_name = '%s Options' % self.name.capitalize()
self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)
# Add the general options
gen_opts = cmdoptions.make_option_group(cmdoptions.general_group, self.parser)
self.parser.add_option_group(gen_opts)
def _build_session(self, options):
session = PipSession()
# Handle custom ca-bundles from the user
if options.cert:
session.verify = options.cert
# Handle timeouts
if options.timeout:
session.timeout = options.timeout
# Handle configured proxies
if options.proxy:
session.proxies = {
"http": options.proxy,
"https": options.proxy,
}
# Determine if we can prompt the user for authentication or not
session.auth.prompting = not options.no_input
return session
def setup_logging(self):
pass
def parse_args(self, args):
# factored out for testability
return self.parser.parse_args(args)
def main(self, args):
options, args = self.parse_args(args)
level = 1 # Notify
level += options.verbose
level -= options.quiet
level = logger.level_for_integer(4 - level)
complete_log = []
logger.add_consumers(
(level, sys.stdout),
(logger.DEBUG, complete_log.append),
)
if options.log_explicit_levels:
logger.explicit_levels = True
self.setup_logging()
#TODO: try to get these passing down from the command?
# without resorting to os.environ to hold these.
if options.no_input:
os.environ['PIP_NO_INPUT'] = '1'
if options.exists_action:
os.environ['PIP_EXISTS_ACTION'] = ' '.join(options.exists_action)
if options.require_venv:
# If a venv is required check if it can really be found
if not running_under_virtualenv():
logger.fatal('Could not find an activated virtualenv (required).')
sys.exit(VIRTUALENV_NOT_FOUND)
if options.log:
log_fp = open_logfile(options.log, 'a')
logger.add_consumers((logger.DEBUG, log_fp))
else:
log_fp = None
exit = SUCCESS
store_log = False
try:
status = self.run(options, args)
# FIXME: all commands should return an exit status
# and when it is done, isinstance is not needed anymore
if isinstance(status, int):
exit = status
except PreviousBuildDirError:
e = sys.exc_info()[1]
logger.fatal(str(e))
logger.info('Exception information:\n%s' % format_exc())
store_log = True
exit = PREVIOUS_BUILD_DIR_ERROR
except (InstallationError, UninstallationError):
e = sys.exc_info()[1]
logger.fatal(str(e))
logger.info('Exception information:\n%s' % format_exc())
store_log = True
exit = ERROR
except BadCommand:
e = sys.exc_info()[1]
logger.fatal(str(e))
logger.info('Exception information:\n%s' % format_exc())
store_log = True
exit = ERROR
except CommandError:
e = sys.exc_info()[1]
logger.fatal('ERROR: %s' % e)
logger.info('Exception information:\n%s' % format_exc())
exit = ERROR
except KeyboardInterrupt:
logger.fatal('Operation cancelled by user')
logger.info('Exception information:\n%s' % format_exc())
store_log = True
exit = ERROR
except:
logger.fatal('Exception:\n%s' % format_exc())
store_log = True
exit = UNKNOWN_ERROR
if store_log:
log_file_fn = options.log_file
text = '\n'.join(complete_log)
try:
log_file_fp = open_logfile(log_file_fn, 'w')
except IOError:
temp = tempfile.NamedTemporaryFile(delete=False)
log_file_fn = temp.name
log_file_fp = open_logfile(log_file_fn, 'w')
logger.fatal('Storing debug log for failure in %s' % log_file_fn)
log_file_fp.write(text)
log_file_fp.close()
if log_fp is not None:
log_fp.close()
return exit
def format_exc(exc_info=None):
if exc_info is None:
exc_info = sys.exc_info()
out = StringIO()
traceback.print_exception(*exc_info, **dict(file=out))
return out.getvalue()
def open_logfile(filename, mode='a'):
"""Open the named log file in append mode.
If the file already exists, a separator will also be printed to
the file to separate past activity from current activity.
"""
filename = os.path.expanduser(filename)
filename = os.path.abspath(filename)
dirname = os.path.dirname(filename)
if not os.path.exists(dirname):
os.makedirs(dirname)
exists = os.path.exists(filename)
log_fp = open(filename, mode)
if exists:
log_fp.write('%s\n' % ('-' * 60))
log_fp.write('%s run on %s\n' % (sys.argv[0], time.strftime('%c')))
return log_fp

View File

@ -0,0 +1,223 @@
"""Base option parser setup"""
import sys
import optparse
import os
import textwrap
from distutils.util import strtobool
from pip.backwardcompat import ConfigParser, string_types
from pip.locations import default_config_file
from pip.util import get_terminal_size, get_prog
class PrettyHelpFormatter(optparse.IndentedHelpFormatter):
"""A prettier/less verbose help formatter for optparse."""
def __init__(self, *args, **kwargs):
# help position must be aligned with __init__.parseopts.description
kwargs['max_help_position'] = 30
kwargs['indent_increment'] = 1
kwargs['width'] = get_terminal_size()[0] - 2
optparse.IndentedHelpFormatter.__init__(self, *args, **kwargs)
def format_option_strings(self, option):
return self._format_option_strings(option, ' <%s>', ', ')
def _format_option_strings(self, option, mvarfmt=' <%s>', optsep=', '):
"""
Return a comma-separated list of option strings and metavars.
:param option: tuple of (short opt, long opt), e.g: ('-f', '--format')
:param mvarfmt: metavar format string - evaluated as mvarfmt % metavar
:param optsep: separator
"""
opts = []
if option._short_opts:
opts.append(option._short_opts[0])
if option._long_opts:
opts.append(option._long_opts[0])
if len(opts) > 1:
opts.insert(1, optsep)
if option.takes_value():
metavar = option.metavar or option.dest.lower()
opts.append(mvarfmt % metavar.lower())
return ''.join(opts)
def format_heading(self, heading):
if heading == 'Options':
return ''
return heading + ':\n'
def format_usage(self, usage):
"""
Ensure there is only one newline between usage and the first heading
if there is no description.
"""
msg = '\nUsage: %s\n' % self.indent_lines(textwrap.dedent(usage), " ")
return msg
def format_description(self, description):
# leave full control over description to us
if description:
if hasattr(self.parser, 'main'):
label = 'Commands'
else:
label = 'Description'
#some doc strings have inital newlines, some don't
description = description.lstrip('\n')
#some doc strings have final newlines and spaces, some don't
description = description.rstrip()
#dedent, then reindent
description = self.indent_lines(textwrap.dedent(description), " ")
description = '%s:\n%s\n' % (label, description)
return description
else:
return ''
def format_epilog(self, epilog):
# leave full control over epilog to us
if epilog:
return epilog
else:
return ''
def indent_lines(self, text, indent):
new_lines = [indent + line for line in text.split('\n')]
return "\n".join(new_lines)
class UpdatingDefaultsHelpFormatter(PrettyHelpFormatter):
"""Custom help formatter for use in ConfigOptionParser that updates
the defaults before expanding them, allowing them to show up correctly
in the help listing"""
def expand_default(self, option):
if self.parser is not None:
self.parser.update_defaults(self.parser.defaults)
return optparse.IndentedHelpFormatter.expand_default(self, option)
class CustomOptionParser(optparse.OptionParser):
def insert_option_group(self, idx, *args, **kwargs):
"""Insert an OptionGroup at a given position."""
group = self.add_option_group(*args, **kwargs)
self.option_groups.pop()
self.option_groups.insert(idx, group)
return group
@property
def option_list_all(self):
"""Get a list of all options, including those in option groups."""
res = self.option_list[:]
for i in self.option_groups:
res.extend(i.option_list)
return res
class ConfigOptionParser(CustomOptionParser):
"""Custom option parser which updates its defaults by checking the
configuration files and environmental variables"""
def __init__(self, *args, **kwargs):
self.config = ConfigParser.RawConfigParser()
self.name = kwargs.pop('name')
self.files = self.get_config_files()
if self.files:
self.config.read(self.files)
assert self.name
optparse.OptionParser.__init__(self, *args, **kwargs)
def get_config_files(self):
config_file = os.environ.get('PIP_CONFIG_FILE', False)
if config_file == os.devnull:
return []
if config_file and os.path.exists(config_file):
return [config_file]
return [default_config_file]
def check_default(self, option, key, val):
try:
return option.check_value(key, val)
except optparse.OptionValueError:
e = sys.exc_info()[1]
print("An error occurred during configuration: %s" % e)
sys.exit(3)
def update_defaults(self, defaults):
"""Updates the given defaults with values from the config files and
the environ. Does a little special handling for certain types of
options (lists)."""
# Then go and look for the other sources of configuration:
config = {}
# 1. config files
for section in ('global', self.name):
config.update(self.normalize_keys(self.get_config_section(section)))
# 2. environmental variables
config.update(self.normalize_keys(self.get_environ_vars()))
# Then set the options with those values
for key, val in config.items():
option = self.get_option(key)
if option is not None:
# ignore empty values
if not val:
continue
if option.action in ('store_true', 'store_false', 'count'):
val = strtobool(val)
if option.action == 'append':
val = val.split()
val = [self.check_default(option, key, v) for v in val]
else:
val = self.check_default(option, key, val)
defaults[option.dest] = val
return defaults
def normalize_keys(self, items):
"""Return a config dictionary with normalized keys regardless of
whether the keys were specified in environment variables or in config
files"""
normalized = {}
for key, val in items:
key = key.replace('_', '-')
if not key.startswith('--'):
key = '--%s' % key # only prefer long opts
normalized[key] = val
return normalized
def get_config_section(self, name):
"""Get a section of a configuration"""
if self.config.has_section(name):
return self.config.items(name)
return []
def get_environ_vars(self, prefix='PIP_'):
"""Returns a generator with all environmental vars with prefix PIP_"""
for key, val in os.environ.items():
if key.startswith(prefix):
yield (key.replace(prefix, '').lower(), val)
def get_default_values(self):
"""Overridding to make updating the defaults after instantiation of
the option parser possible, update_defaults() does the dirty work."""
if not self.process_default_values:
# Old, pre-Optik 1.5 behaviour.
return optparse.Values(self.defaults)
defaults = self.update_defaults(self.defaults.copy()) # ours
for option in self._get_all_options():
default = defaults.get(option.dest)
if isinstance(default, string_types):
opt_str = option.get_opt_string()
defaults[option.dest] = option.check_value(opt_str, default)
return optparse.Values(defaults)
def error(self, msg):
self.print_usage(sys.stderr)
self.exit(2, "%s\n" % msg)

View File

@ -0,0 +1,369 @@
"""
shared options and groups
The principle here is to define options once, but *not* instantiate them globally.
One reason being that options with action='append' can carry state between parses.
pip parse's general options twice internally, and shouldn't pass on state.
To be consistent, all options will follow this design.
"""
import copy
from optparse import OptionGroup, SUPPRESS_HELP, Option
from pip.locations import default_log_file
def make_option_group(group, parser):
"""
Return an OptionGroup object
group -- assumed to be dict with 'name' and 'options' keys
parser -- an optparse Parser
"""
option_group = OptionGroup(parser, group['name'])
for option in group['options']:
option_group.add_option(option.make())
return option_group
class OptionMaker(object):
"""Class that stores the args/kwargs that would be used to make an Option,
for making them later, and uses deepcopy's to reset state."""
def __init__(self, *args, **kwargs):
self.args = args
self.kwargs = kwargs
def make(self):
args_copy = copy.deepcopy(self.args)
kwargs_copy = copy.deepcopy(self.kwargs)
return Option(*args_copy, **kwargs_copy)
###########
# options #
###########
help_ = OptionMaker(
'-h', '--help',
dest='help',
action='help',
help='Show help.')
require_virtualenv = OptionMaker(
# Run only if inside a virtualenv, bail if not.
'--require-virtualenv', '--require-venv',
dest='require_venv',
action='store_true',
default=False,
help=SUPPRESS_HELP)
verbose = OptionMaker(
'-v', '--verbose',
dest='verbose',
action='count',
default=0,
help='Give more output. Option is additive, and can be used up to 3 times.')
version = OptionMaker(
'-V', '--version',
dest='version',
action='store_true',
help='Show version and exit.')
quiet = OptionMaker(
'-q', '--quiet',
dest='quiet',
action='count',
default=0,
help='Give less output.')
log = OptionMaker(
'--log',
dest='log',
metavar='path',
help='Path to a verbose appending log. This log is inactive by default.')
log_explicit_levels = OptionMaker(
# Writes the log levels explicitely to the log'
'--log-explicit-levels',
dest='log_explicit_levels',
action='store_true',
default=False,
help=SUPPRESS_HELP)
log_file = OptionMaker(
# The default log file
'--log-file', '--local-log',
dest='log_file',
metavar='path',
default=default_log_file,
help='Path to a verbose non-appending log, that only logs failures. This log is active by default at %default.')
no_input = OptionMaker(
# Don't ask for input
'--no-input',
dest='no_input',
action='store_true',
default=False,
help=SUPPRESS_HELP)
proxy = OptionMaker(
'--proxy',
dest='proxy',
type='str',
default='',
help="Specify a proxy in the form [user:passwd@]proxy.server:port.")
timeout = OptionMaker(
'--timeout', '--default-timeout',
metavar='sec',
dest='timeout',
type='float',
default=15,
help='Set the socket timeout (default %default seconds).')
default_vcs = OptionMaker(
# The default version control system for editables, e.g. 'svn'
'--default-vcs',
dest='default_vcs',
type='str',
default='',
help=SUPPRESS_HELP)
skip_requirements_regex = OptionMaker(
# A regex to be used to skip requirements
'--skip-requirements-regex',
dest='skip_requirements_regex',
type='str',
default='',
help=SUPPRESS_HELP)
exists_action = OptionMaker(
# Option when path already exist
'--exists-action',
dest='exists_action',
type='choice',
choices=['s', 'i', 'w', 'b'],
default=[],
action='append',
metavar='action',
help="Default action when a path already exists: "
"(s)witch, (i)gnore, (w)ipe, (b)ackup.")
cert = OptionMaker(
'--cert',
dest='cert',
type='str',
default='',
metavar='path',
help = "Path to alternate CA bundle.")
index_url = OptionMaker(
'-i', '--index-url', '--pypi-url',
dest='index_url',
metavar='URL',
default='https://pypi.python.org/simple/',
help='Base URL of Python Package Index (default %default).')
extra_index_url = OptionMaker(
'--extra-index-url',
dest='extra_index_urls',
metavar='URL',
action='append',
default=[],
help='Extra URLs of package indexes to use in addition to --index-url.')
no_index = OptionMaker(
'--no-index',
dest='no_index',
action='store_true',
default=False,
help='Ignore package index (only looking at --find-links URLs instead).')
find_links = OptionMaker(
'-f', '--find-links',
dest='find_links',
action='append',
default=[],
metavar='url',
help="If a url or path to an html file, then parse for links to archives. If a local path or file:// url that's a directory, then look for archives in the directory listing.")
# TODO: Remove after 1.6
use_mirrors = OptionMaker(
'-M', '--use-mirrors',
dest='use_mirrors',
action='store_true',
default=False,
help=SUPPRESS_HELP)
# TODO: Remove after 1.6
mirrors = OptionMaker(
'--mirrors',
dest='mirrors',
metavar='URL',
action='append',
default=[],
help=SUPPRESS_HELP)
allow_external = OptionMaker(
"--allow-external",
dest="allow_external",
action="append",
default=[],
metavar="PACKAGE",
help="Allow the installation of externally hosted files",
)
allow_all_external = OptionMaker(
"--allow-all-external",
dest="allow_all_external",
action="store_true",
default=False,
help="Allow the installation of all externally hosted files",
)
# Remove after 1.7
no_allow_external = OptionMaker(
"--no-allow-external",
dest="allow_all_external",
action="store_false",
default=False,
help=SUPPRESS_HELP,
)
# Remove --allow-insecure after 1.7
allow_unsafe = OptionMaker(
"--allow-unverified", "--allow-insecure",
dest="allow_unverified",
action="append",
default=[],
metavar="PACKAGE",
help="Allow the installation of insecure and unverifiable files",
)
# Remove after 1.7
no_allow_unsafe = OptionMaker(
"--no-allow-insecure",
dest="allow_all_insecure",
action="store_false",
default=False,
help=SUPPRESS_HELP
)
# Remove after 1.5
process_dependency_links = OptionMaker(
"--process-dependency-links",
dest="process_dependency_links",
action="store_true",
default=False,
help="Enable the processing of dependency links.",
)
requirements = OptionMaker(
'-r', '--requirement',
dest='requirements',
action='append',
default=[],
metavar='file',
help='Install from the given requirements file. '
'This option can be used multiple times.')
use_wheel = OptionMaker(
'--use-wheel',
dest='use_wheel',
action='store_true',
help=SUPPRESS_HELP,
)
no_use_wheel = OptionMaker(
'--no-use-wheel',
dest='use_wheel',
action='store_false',
default=True,
help=('Do not Find and prefer wheel archives when searching indexes and '
'find-links locations.'),
)
download_cache = OptionMaker(
'--download-cache',
dest='download_cache',
metavar='dir',
default=None,
help='Cache downloaded packages in <dir>.')
no_deps = OptionMaker(
'--no-deps', '--no-dependencies',
dest='ignore_dependencies',
action='store_true',
default=False,
help="Don't install package dependencies.")
build_dir = OptionMaker(
'-b', '--build', '--build-dir', '--build-directory',
dest='build_dir',
metavar='dir',
help='Directory to unpack packages into and build in.',
)
install_options = OptionMaker(
'--install-option',
dest='install_options',
action='append',
metavar='options',
help="Extra arguments to be supplied to the setup.py install "
"command (use like --install-option=\"--install-scripts=/usr/local/bin\"). "
"Use multiple --install-option options to pass multiple options to setup.py install. "
"If you are using an option with a directory path, be sure to use absolute path.")
global_options = OptionMaker(
'--global-option',
dest='global_options',
action='append',
metavar='options',
help="Extra global options to be supplied to the setup.py "
"call before the install command.")
no_clean = OptionMaker(
'--no-clean',
action='store_true',
default=False,
help="Don't clean up build directories.")
##########
# groups #
##########
general_group = {
'name': 'General Options',
'options': [
help_,
require_virtualenv,
verbose,
version,
quiet,
log_file,
log,
log_explicit_levels,
no_input,
proxy,
timeout,
default_vcs,
skip_requirements_regex,
exists_action,
cert,
]
}
index_group = {
'name': 'Package Index Options',
'options': [
index_url,
extra_index_url,
no_index,
find_links,
use_mirrors,
mirrors,
allow_external,
allow_all_external,
no_allow_external,
allow_unsafe,
no_allow_unsafe,
process_dependency_links,
]
}

View File

@ -0,0 +1,88 @@
"""
Package containing all pip commands
"""
from pip.commands.bundle import BundleCommand
from pip.commands.completion import CompletionCommand
from pip.commands.freeze import FreezeCommand
from pip.commands.help import HelpCommand
from pip.commands.list import ListCommand
from pip.commands.search import SearchCommand
from pip.commands.show import ShowCommand
from pip.commands.install import InstallCommand
from pip.commands.uninstall import UninstallCommand
from pip.commands.unzip import UnzipCommand
from pip.commands.zip import ZipCommand
from pip.commands.wheel import WheelCommand
commands = {
BundleCommand.name: BundleCommand,
CompletionCommand.name: CompletionCommand,
FreezeCommand.name: FreezeCommand,
HelpCommand.name: HelpCommand,
SearchCommand.name: SearchCommand,
ShowCommand.name: ShowCommand,
InstallCommand.name: InstallCommand,
UninstallCommand.name: UninstallCommand,
UnzipCommand.name: UnzipCommand,
ZipCommand.name: ZipCommand,
ListCommand.name: ListCommand,
WheelCommand.name: WheelCommand,
}
commands_order = [
InstallCommand,
UninstallCommand,
FreezeCommand,
ListCommand,
ShowCommand,
SearchCommand,
WheelCommand,
ZipCommand,
UnzipCommand,
BundleCommand,
HelpCommand,
]
def get_summaries(ignore_hidden=True, ordered=True):
"""Yields sorted (command name, command summary) tuples."""
if ordered:
cmditems = _sort_commands(commands, commands_order)
else:
cmditems = commands.items()
for name, command_class in cmditems:
if ignore_hidden and command_class.hidden:
continue
yield (name, command_class.summary)
def get_similar_commands(name):
"""Command name auto-correct."""
from difflib import get_close_matches
close_commands = get_close_matches(name, commands.keys())
if close_commands:
guess = close_commands[0]
else:
guess = False
return guess
def _sort_commands(cmddict, order):
def keyfn(key):
try:
return order.index(key[1])
except ValueError:
# unordered items should come last
return 0xff
return sorted(cmddict.items(), key=keyfn)

View File

@ -0,0 +1,42 @@
import textwrap
from pip.locations import build_prefix, src_prefix
from pip.util import display_path, backup_dir
from pip.log import logger
from pip.exceptions import InstallationError
from pip.commands.install import InstallCommand
class BundleCommand(InstallCommand):
"""Create pybundles (archives containing multiple packages)."""
name = 'bundle'
usage = """
%prog [options] <bundle name>.pybundle <package>..."""
summary = 'DEPRECATED. Create pybundles.'
bundle = True
def __init__(self, *args, **kw):
super(BundleCommand, self).__init__(*args, **kw)
# bundle uses different default source and build dirs
build_opt = self.parser.get_option("--build")
build_opt.default = backup_dir(build_prefix, '-bundle')
src_opt = self.parser.get_option("--src")
src_opt.default = backup_dir(src_prefix, '-bundle')
self.parser.set_defaults(**{
src_opt.dest: src_opt.default,
build_opt.dest: build_opt.default,
})
def run(self, options, args):
logger.deprecated('1.6', "DEPRECATION: 'pip bundle' and support for installing from *.pybundle files is deprecated. "
"See https://github.com/pypa/pip/pull/1046")
if not args:
raise InstallationError('You must give a bundle filename')
# We have to get everything when creating a bundle:
options.ignore_installed = True
logger.notify('Putting temporary build files in %s and source/develop files in %s'
% (display_path(options.build_dir), display_path(options.src_dir)))
self.bundle_filename = args.pop(0)
requirement_set = super(BundleCommand, self).run(options, args)
return requirement_set

View File

@ -0,0 +1,59 @@
import sys
from pip.basecommand import Command
BASE_COMPLETION = """
# pip %(shell)s completion start%(script)s# pip %(shell)s completion end
"""
COMPLETION_SCRIPTS = {
'bash': """
_pip_completion()
{
COMPREPLY=( $( COMP_WORDS="${COMP_WORDS[*]}" \\
COMP_CWORD=$COMP_CWORD \\
PIP_AUTO_COMPLETE=1 $1 ) )
}
complete -o default -F _pip_completion pip
""", 'zsh': """
function _pip_completion {
local words cword
read -Ac words
read -cn cword
reply=( $( COMP_WORDS="$words[*]" \\
COMP_CWORD=$(( cword-1 )) \\
PIP_AUTO_COMPLETE=1 $words[1] ) )
}
compctl -K _pip_completion pip
"""}
class CompletionCommand(Command):
"""A helper command to be used for command completion."""
name = 'completion'
summary = 'A helper command to be used for command completion'
hidden = True
def __init__(self, *args, **kw):
super(CompletionCommand, self).__init__(*args, **kw)
self.parser.add_option(
'--bash', '-b',
action='store_const',
const='bash',
dest='shell',
help='Emit completion code for bash')
self.parser.add_option(
'--zsh', '-z',
action='store_const',
const='zsh',
dest='shell',
help='Emit completion code for zsh')
def run(self, options, args):
"""Prints the completion code of the given shell"""
shells = COMPLETION_SCRIPTS.keys()
shell_options = ['--' + shell for shell in sorted(shells)]
if options.shell in shells:
script = COMPLETION_SCRIPTS.get(options.shell, '')
print(BASE_COMPLETION % {'script': script, 'shell': options.shell})
else:
sys.stderr.write('ERROR: You must pass %s\n' % ' or '.join(shell_options))

View File

@ -0,0 +1,114 @@
import re
import sys
import pip
from pip.req import InstallRequirement
from pip.log import logger
from pip.basecommand import Command
from pip.util import get_installed_distributions
import pkg_resources
class FreezeCommand(Command):
"""Output installed packages in requirements format."""
name = 'freeze'
usage = """
%prog [options]"""
summary = 'Output installed packages in requirements format.'
def __init__(self, *args, **kw):
super(FreezeCommand, self).__init__(*args, **kw)
self.cmd_opts.add_option(
'-r', '--requirement',
dest='requirement',
action='store',
default=None,
metavar='file',
help="Use the order in the given requirements file and it's comments when generating output.")
self.cmd_opts.add_option(
'-f', '--find-links',
dest='find_links',
action='append',
default=[],
metavar='URL',
help='URL for finding packages, which will be added to the output.')
self.cmd_opts.add_option(
'-l', '--local',
dest='local',
action='store_true',
default=False,
help='If in a virtualenv that has global access, do not output globally-installed packages.')
self.parser.insert_option_group(0, self.cmd_opts)
def setup_logging(self):
logger.move_stdout_to_stderr()
def run(self, options, args):
requirement = options.requirement
find_links = options.find_links or []
local_only = options.local
## FIXME: Obviously this should be settable:
find_tags = False
skip_match = None
skip_regex = options.skip_requirements_regex
if skip_regex:
skip_match = re.compile(skip_regex)
dependency_links = []
f = sys.stdout
for dist in pkg_resources.working_set:
if dist.has_metadata('dependency_links.txt'):
dependency_links.extend(dist.get_metadata_lines('dependency_links.txt'))
for link in find_links:
if '#egg=' in link:
dependency_links.append(link)
for link in find_links:
f.write('-f %s\n' % link)
installations = {}
for dist in get_installed_distributions(local_only=local_only):
req = pip.FrozenRequirement.from_dist(dist, dependency_links, find_tags=find_tags)
installations[req.name] = req
if requirement:
req_f = open(requirement)
for line in req_f:
if not line.strip() or line.strip().startswith('#'):
f.write(line)
continue
if skip_match and skip_match.search(line):
f.write(line)
continue
elif line.startswith('-e') or line.startswith('--editable'):
if line.startswith('-e'):
line = line[2:].strip()
else:
line = line[len('--editable'):].strip().lstrip('=')
line_req = InstallRequirement.from_editable(line, default_vcs=options.default_vcs)
elif (line.startswith('-r') or line.startswith('--requirement')
or line.startswith('-Z') or line.startswith('--always-unzip')
or line.startswith('-f') or line.startswith('-i')
or line.startswith('--extra-index-url')
or line.startswith('--find-links')
or line.startswith('--index-url')):
f.write(line)
continue
else:
line_req = InstallRequirement.from_line(line)
if not line_req.name:
logger.notify("Skipping line because it's not clear what it would install: %s"
% line.strip())
logger.notify(" (add #egg=PackageName to the URL to avoid this warning)")
continue
if line_req.name not in installations:
logger.warn("Requirement file contains %s, but that package is not installed"
% line.strip())
continue
f.write(str(installations[line_req.name]))
del installations[line_req.name]
f.write('## The following requirements were added by pip --freeze:\n')
for installation in sorted(installations.values(), key=lambda x: x.name):
f.write(str(installation))

View File

@ -0,0 +1,33 @@
from pip.basecommand import Command, SUCCESS
from pip.exceptions import CommandError
class HelpCommand(Command):
"""Show help for commands"""
name = 'help'
usage = """
%prog <command>"""
summary = 'Show help for commands.'
def run(self, options, args):
from pip.commands import commands, get_similar_commands
try:
# 'pip help' with no args is handled by pip.__init__.parseopt()
cmd_name = args[0] # the command we need help for
except IndexError:
return SUCCESS
if cmd_name not in commands:
guess = get_similar_commands(cmd_name)
msg = ['unknown command "%s"' % cmd_name]
if guess:
msg.append('maybe you meant "%s"' % guess)
raise CommandError(' - '.join(msg))
command = commands[cmd_name]()
command.parser.print_help()
return SUCCESS

View File

@ -0,0 +1,326 @@
import os
import sys
import tempfile
import shutil
from pip.req import InstallRequirement, RequirementSet, parse_requirements
from pip.log import logger
from pip.locations import (src_prefix, virtualenv_no_global, distutils_scheme,
build_prefix)
from pip.basecommand import Command
from pip.index import PackageFinder
from pip.exceptions import InstallationError, CommandError, PreviousBuildDirError
from pip import cmdoptions
from pip.util import BuildDirectory
class InstallCommand(Command):
"""
Install packages from:
- PyPI (and other indexes) using requirement specifiers.
- VCS project urls.
- Local project directories.
- Local or remote source archives.
pip also supports installing from "requirements files", which provide
an easy way to specify a whole environment to be installed.
"""
name = 'install'
usage = """
%prog [options] <requirement specifier> ...
%prog [options] -r <requirements file> ...
%prog [options] [-e] <vcs project url> ...
%prog [options] [-e] <local project path> ...
%prog [options] <archive url/path> ..."""
summary = 'Install packages.'
bundle = False
def __init__(self, *args, **kw):
super(InstallCommand, self).__init__(*args, **kw)
cmd_opts = self.cmd_opts
cmd_opts.add_option(
'-e', '--editable',
dest='editables',
action='append',
default=[],
metavar='path/url',
help='Install a project in editable mode (i.e. setuptools "develop mode") from a local project path or a VCS url.')
cmd_opts.add_option(cmdoptions.requirements.make())
cmd_opts.add_option(cmdoptions.build_dir.make())
cmd_opts.add_option(
'-t', '--target',
dest='target_dir',
metavar='dir',
default=None,
help='Install packages into <dir>.')
cmd_opts.add_option(
'-d', '--download', '--download-dir', '--download-directory',
dest='download_dir',
metavar='dir',
default=None,
help="Download packages into <dir> instead of installing them, regardless of what's already installed.")
cmd_opts.add_option(cmdoptions.download_cache.make())
cmd_opts.add_option(
'--src', '--source', '--source-dir', '--source-directory',
dest='src_dir',
metavar='dir',
default=src_prefix,
help='Directory to check out editable projects into. '
'The default in a virtualenv is "<venv path>/src". '
'The default for global installs is "<current dir>/src".')
cmd_opts.add_option(
'-U', '--upgrade',
dest='upgrade',
action='store_true',
help='Upgrade all packages to the newest available version. '
'This process is recursive regardless of whether a dependency is already satisfied.')
cmd_opts.add_option(
'--force-reinstall',
dest='force_reinstall',
action='store_true',
help='When upgrading, reinstall all packages even if they are '
'already up-to-date.')
cmd_opts.add_option(
'-I', '--ignore-installed',
dest='ignore_installed',
action='store_true',
help='Ignore the installed packages (reinstalling instead).')
cmd_opts.add_option(cmdoptions.no_deps.make())
cmd_opts.add_option(
'--no-install',
dest='no_install',
action='store_true',
help="DEPRECATED. Download and unpack all packages, but don't actually install them.")
cmd_opts.add_option(
'--no-download',
dest='no_download',
action="store_true",
help="DEPRECATED. Don't download any packages, just install the ones already downloaded "
"(completes an install run with --no-install).")
cmd_opts.add_option(cmdoptions.install_options.make())
cmd_opts.add_option(cmdoptions.global_options.make())
cmd_opts.add_option(
'--user',
dest='use_user_site',
action='store_true',
help='Install using the user scheme.')
cmd_opts.add_option(
'--egg',
dest='as_egg',
action='store_true',
help="Install packages as eggs, not 'flat', like pip normally does. This option is not about installing *from* eggs. (WARNING: Because this option overrides pip's normal install logic, requirements files may not behave as expected.)")
cmd_opts.add_option(
'--root',
dest='root_path',
metavar='dir',
default=None,
help="Install everything relative to this alternate root directory.")
cmd_opts.add_option(
"--compile",
action="store_true",
dest="compile",
default=True,
help="Compile py files to pyc",
)
cmd_opts.add_option(
"--no-compile",
action="store_false",
dest="compile",
help="Do not compile py files to pyc",
)
cmd_opts.add_option(cmdoptions.use_wheel.make())
cmd_opts.add_option(cmdoptions.no_use_wheel.make())
cmd_opts.add_option(
'--pre',
action='store_true',
default=False,
help="Include pre-release and development versions. By default, pip only finds stable versions.")
cmd_opts.add_option(cmdoptions.no_clean.make())
index_opts = cmdoptions.make_option_group(cmdoptions.index_group, self.parser)
self.parser.insert_option_group(0, index_opts)
self.parser.insert_option_group(0, cmd_opts)
def _build_package_finder(self, options, index_urls, session):
"""
Create a package finder appropriate to this install command.
This method is meant to be overridden by subclasses, not
called directly.
"""
return PackageFinder(find_links=options.find_links,
index_urls=index_urls,
use_wheel=options.use_wheel,
allow_external=options.allow_external,
allow_unverified=options.allow_unverified,
allow_all_external=options.allow_all_external,
allow_all_prereleases=options.pre,
process_dependency_links=
options.process_dependency_links,
session=session,
)
def run(self, options, args):
if (
options.no_install or
options.no_download or
options.build_dir or
options.no_clean
):
logger.deprecated('1.7', 'DEPRECATION: --no-install, --no-download, --build, '
'and --no-clean are deprecated. See https://github.com/pypa/pip/issues/906.')
if options.download_dir:
options.no_install = True
options.ignore_installed = True
# If we have --no-install or --no-download and no --build we use the
# legacy static build dir
if (options.build_dir is None
and (options.no_install or options.no_download)):
options.build_dir = build_prefix
if options.build_dir:
options.build_dir = os.path.abspath(options.build_dir)
options.src_dir = os.path.abspath(options.src_dir)
install_options = options.install_options or []
if options.use_user_site:
if virtualenv_no_global():
raise InstallationError("Can not perform a '--user' install. User site-packages are not visible in this virtualenv.")
install_options.append('--user')
temp_target_dir = None
if options.target_dir:
options.ignore_installed = True
temp_target_dir = tempfile.mkdtemp()
options.target_dir = os.path.abspath(options.target_dir)
if os.path.exists(options.target_dir) and not os.path.isdir(options.target_dir):
raise CommandError("Target path exists but is not a directory, will not continue.")
install_options.append('--home=' + temp_target_dir)
global_options = options.global_options or []
index_urls = [options.index_url] + options.extra_index_urls
if options.no_index:
logger.notify('Ignoring indexes: %s' % ','.join(index_urls))
index_urls = []
if options.use_mirrors:
logger.deprecated("1.7",
"--use-mirrors has been deprecated and will be removed"
" in the future. Explicit uses of --index-url and/or "
"--extra-index-url is suggested.")
if options.mirrors:
logger.deprecated("1.7",
"--mirrors has been deprecated and will be removed in "
" the future. Explicit uses of --index-url and/or "
"--extra-index-url is suggested.")
index_urls += options.mirrors
session = self._build_session(options)
finder = self._build_package_finder(options, index_urls, session)
build_delete = (not (options.no_clean or options.build_dir))
with BuildDirectory(options.build_dir, delete=build_delete) as build_dir:
requirement_set = RequirementSet(
build_dir=build_dir,
src_dir=options.src_dir,
download_dir=options.download_dir,
download_cache=options.download_cache,
upgrade=options.upgrade,
as_egg=options.as_egg,
ignore_installed=options.ignore_installed,
ignore_dependencies=options.ignore_dependencies,
force_reinstall=options.force_reinstall,
use_user_site=options.use_user_site,
target_dir=temp_target_dir,
session=session,
pycompile=options.compile,
)
for name in args:
requirement_set.add_requirement(
InstallRequirement.from_line(name, None))
for name in options.editables:
requirement_set.add_requirement(
InstallRequirement.from_editable(name, default_vcs=options.default_vcs))
for filename in options.requirements:
for req in parse_requirements(filename, finder=finder, options=options, session=session):
requirement_set.add_requirement(req)
if not requirement_set.has_requirements:
opts = {'name': self.name}
if options.find_links:
msg = ('You must give at least one requirement to %(name)s '
'(maybe you meant "pip %(name)s %(links)s"?)' %
dict(opts, links=' '.join(options.find_links)))
else:
msg = ('You must give at least one requirement '
'to %(name)s (see "pip help %(name)s")' % opts)
logger.warn(msg)
return
try:
if not options.no_download:
requirement_set.prepare_files(finder, force_root_egg_info=self.bundle, bundle=self.bundle)
else:
requirement_set.locate_files()
if not options.no_install and not self.bundle:
requirement_set.install(install_options, global_options, root=options.root_path)
installed = ' '.join([req.name for req in
requirement_set.successfully_installed])
if installed:
logger.notify('Successfully installed %s' % installed)
elif not self.bundle:
downloaded = ' '.join([req.name for req in
requirement_set.successfully_downloaded])
if downloaded:
logger.notify('Successfully downloaded %s' % downloaded)
elif self.bundle:
requirement_set.create_bundle(self.bundle_filename)
logger.notify('Created bundle in %s' % self.bundle_filename)
except PreviousBuildDirError:
options.no_clean = True
raise
finally:
# Clean up
if (not options.no_clean) and ((not options.no_install) or options.download_dir):
requirement_set.cleanup_files(bundle=self.bundle)
if options.target_dir:
if not os.path.exists(options.target_dir):
os.makedirs(options.target_dir)
lib_dir = distutils_scheme('', home=temp_target_dir)['purelib']
for item in os.listdir(lib_dir):
shutil.move(
os.path.join(lib_dir, item),
os.path.join(options.target_dir, item)
)
shutil.rmtree(temp_target_dir)
return requirement_set

View File

@ -0,0 +1,162 @@
from pip.basecommand import Command
from pip.exceptions import DistributionNotFound, BestVersionAlreadyInstalled
from pip.index import PackageFinder
from pip.log import logger
from pip.req import InstallRequirement
from pip.util import get_installed_distributions, dist_is_editable
from pip.cmdoptions import make_option_group, index_group
class ListCommand(Command):
"""List installed packages, including editables."""
name = 'list'
usage = """
%prog [options]"""
summary = 'List installed packages.'
# distributions to skip (python itself is reported by pkg_resources.working_set)
skip = ['python']
def __init__(self, *args, **kw):
super(ListCommand, self).__init__(*args, **kw)
cmd_opts = self.cmd_opts
cmd_opts.add_option(
'-o', '--outdated',
action='store_true',
default=False,
help='List outdated packages (excluding editables)')
cmd_opts.add_option(
'-u', '--uptodate',
action='store_true',
default=False,
help='List uptodate packages (excluding editables)')
cmd_opts.add_option(
'-e', '--editable',
action='store_true',
default=False,
help='List editable projects.')
cmd_opts.add_option(
'-l', '--local',
action='store_true',
default=False,
help='If in a virtualenv that has global access, do not list globally-installed packages.')
cmd_opts.add_option(
'--pre',
action='store_true',
default=False,
help="Include pre-release and development versions. By default, pip only finds stable versions.")
index_opts = make_option_group(index_group, self.parser)
self.parser.insert_option_group(0, index_opts)
self.parser.insert_option_group(0, cmd_opts)
def _build_package_finder(self, options, index_urls, session):
"""
Create a package finder appropriate to this list command.
"""
return PackageFinder(find_links=options.find_links,
index_urls=index_urls,
allow_external=options.allow_external,
allow_unverified=options.allow_unverified,
allow_all_external=options.allow_all_external,
allow_all_prereleases=options.pre,
process_dependency_links=
options.process_dependency_links,
session=session,
)
def run(self, options, args):
if options.outdated:
self.run_outdated(options)
elif options.uptodate:
self.run_uptodate(options)
elif options.editable:
self.run_editables(options)
else:
self.run_listing(options)
def run_outdated(self, options):
for dist, remote_version_raw, remote_version_parsed in self.find_packages_latests_versions(options):
if remote_version_parsed > dist.parsed_version:
logger.notify('%s (Current: %s Latest: %s)' % (dist.project_name,
dist.version, remote_version_raw))
def find_packages_latests_versions(self, options):
index_urls = [options.index_url] + options.extra_index_urls
if options.no_index:
logger.notify('Ignoring indexes: %s' % ','.join(index_urls))
index_urls = []
if options.use_mirrors:
logger.deprecated("1.7",
"--use-mirrors has been deprecated and will be removed"
" in the future. Explicit uses of --index-url and/or "
"--extra-index-url is suggested.")
if options.mirrors:
logger.deprecated("1.7",
"--mirrors has been deprecated and will be removed in "
" the future. Explicit uses of --index-url and/or "
"--extra-index-url is suggested.")
index_urls += options.mirrors
dependency_links = []
for dist in get_installed_distributions(local_only=options.local, skip=self.skip):
if dist.has_metadata('dependency_links.txt'):
dependency_links.extend(
dist.get_metadata_lines('dependency_links.txt'),
)
session = self._build_session(options)
finder = self._build_package_finder(options, index_urls, session)
finder.add_dependency_links(dependency_links)
installed_packages = get_installed_distributions(local_only=options.local, include_editables=False, skip=self.skip)
for dist in installed_packages:
req = InstallRequirement.from_line(dist.key, None)
try:
link = finder.find_requirement(req, True)
# If link is None, means installed version is most up-to-date
if link is None:
continue
except DistributionNotFound:
continue
except BestVersionAlreadyInstalled:
remote_version = req.installed_version
else:
# It might be a good idea that link or finder had a public method
# that returned version
remote_version = finder._link_package_versions(link, req.name)[0]
remote_version_raw = remote_version[2]
remote_version_parsed = remote_version[0]
yield dist, remote_version_raw, remote_version_parsed
def run_listing(self, options):
installed_packages = get_installed_distributions(local_only=options.local, skip=self.skip)
self.output_package_listing(installed_packages)
def run_editables(self, options):
installed_packages = get_installed_distributions(local_only=options.local, editables_only=True)
self.output_package_listing(installed_packages)
def output_package_listing(self, installed_packages):
installed_packages = sorted(installed_packages, key=lambda dist: dist.project_name.lower())
for dist in installed_packages:
if dist_is_editable(dist):
line = '%s (%s, %s)' % (dist.project_name, dist.version, dist.location)
else:
line = '%s (%s)' % (dist.project_name, dist.version)
logger.notify(line)
def run_uptodate(self, options):
uptodate = []
for dist, remote_version_raw, remote_version_parsed in self.find_packages_latests_versions(options):
if dist.parsed_version == remote_version_parsed:
uptodate.append(dist)
self.output_package_listing(uptodate)

View File

@ -0,0 +1,132 @@
import sys
import textwrap
import pip.download
from pip.basecommand import Command, SUCCESS
from pip.util import get_terminal_size
from pip.log import logger
from pip.backwardcompat import xmlrpclib, reduce, cmp
from pip.exceptions import CommandError
from pip.status_codes import NO_MATCHES_FOUND
import pkg_resources
from distutils.version import StrictVersion, LooseVersion
class SearchCommand(Command):
"""Search for PyPI packages whose name or summary contains <query>."""
name = 'search'
usage = """
%prog [options] <query>"""
summary = 'Search PyPI for packages.'
def __init__(self, *args, **kw):
super(SearchCommand, self).__init__(*args, **kw)
self.cmd_opts.add_option(
'--index',
dest='index',
metavar='URL',
default='https://pypi.python.org/pypi',
help='Base URL of Python Package Index (default %default)')
self.parser.insert_option_group(0, self.cmd_opts)
def run(self, options, args):
if not args:
raise CommandError('Missing required argument (search query).')
query = args
index_url = options.index
pypi_hits = self.search(query, index_url)
hits = transform_hits(pypi_hits)
terminal_width = None
if sys.stdout.isatty():
terminal_width = get_terminal_size()[0]
print_results(hits, terminal_width=terminal_width)
if pypi_hits:
return SUCCESS
return NO_MATCHES_FOUND
def search(self, query, index_url):
pypi = xmlrpclib.ServerProxy(index_url)
hits = pypi.search({'name': query, 'summary': query}, 'or')
return hits
def transform_hits(hits):
"""
The list from pypi is really a list of versions. We want a list of
packages with the list of versions stored inline. This converts the
list from pypi into one we can use.
"""
packages = {}
for hit in hits:
name = hit['name']
summary = hit['summary']
version = hit['version']
score = hit['_pypi_ordering']
if score is None:
score = 0
if name not in packages.keys():
packages[name] = {'name': name, 'summary': summary, 'versions': [version], 'score': score}
else:
packages[name]['versions'].append(version)
# if this is the highest version, replace summary and score
if version == highest_version(packages[name]['versions']):
packages[name]['summary'] = summary
packages[name]['score'] = score
# each record has a unique name now, so we will convert the dict into a list sorted by score
package_list = sorted(packages.values(), key=lambda x: x['score'], reverse=True)
return package_list
def print_results(hits, name_column_width=25, terminal_width=None):
installed_packages = [p.project_name for p in pkg_resources.working_set]
for hit in hits:
name = hit['name']
summary = hit['summary'] or ''
if terminal_width is not None:
# wrap and indent summary to fit terminal
summary = textwrap.wrap(summary, terminal_width - name_column_width - 5)
summary = ('\n' + ' ' * (name_column_width + 3)).join(summary)
line = '%s - %s' % (name.ljust(name_column_width), summary)
try:
logger.notify(line)
if name in installed_packages:
dist = pkg_resources.get_distribution(name)
logger.indent += 2
try:
latest = highest_version(hit['versions'])
if dist.version == latest:
logger.notify('INSTALLED: %s (latest)' % dist.version)
else:
logger.notify('INSTALLED: %s' % dist.version)
logger.notify('LATEST: %s' % latest)
finally:
logger.indent -= 2
except UnicodeEncodeError:
pass
def compare_versions(version1, version2):
try:
return cmp(StrictVersion(version1), StrictVersion(version2))
# in case of abnormal version number, fall back to LooseVersion
except ValueError:
pass
try:
return cmp(LooseVersion(version1), LooseVersion(version2))
except TypeError:
# certain LooseVersion comparions raise due to unorderable types,
# fallback to string comparison
return cmp([str(v) for v in LooseVersion(version1).version],
[str(v) for v in LooseVersion(version2).version])
def highest_version(versions):
return reduce((lambda v1, v2: compare_versions(v1, v2) == 1 and v1 or v2), versions)

View File

@ -0,0 +1,80 @@
import os
from pip.basecommand import Command
from pip.log import logger
import pkg_resources
class ShowCommand(Command):
"""Show information about one or more installed packages."""
name = 'show'
usage = """
%prog [options] <package> ..."""
summary = 'Show information about installed packages.'
def __init__(self, *args, **kw):
super(ShowCommand, self).__init__(*args, **kw)
self.cmd_opts.add_option(
'-f', '--files',
dest='files',
action='store_true',
default=False,
help='Show the full list of installed files for each package.')
self.parser.insert_option_group(0, self.cmd_opts)
def run(self, options, args):
if not args:
logger.warn('ERROR: Please provide a package name or names.')
return
query = args
results = search_packages_info(query)
print_results(results, options.files)
def search_packages_info(query):
"""
Gather details from installed distributions. Print distribution name,
version, location, and installed files. Installed files requires a
pip generated 'installed-files.txt' in the distributions '.egg-info'
directory.
"""
installed_packages = dict(
[(p.project_name.lower(), p) for p in pkg_resources.working_set])
for name in query:
normalized_name = name.lower()
if normalized_name in installed_packages:
dist = installed_packages[normalized_name]
package = {
'name': dist.project_name,
'version': dist.version,
'location': dist.location,
'requires': [dep.project_name for dep in dist.requires()],
}
filelist = os.path.join(
dist.location,
dist.egg_name() + '.egg-info',
'installed-files.txt')
if os.path.isfile(filelist):
package['files'] = filelist
yield package
def print_results(distributions, list_all_files):
"""
Print the informations from installed distributions found.
"""
for dist in distributions:
logger.notify("---")
logger.notify("Name: %s" % dist['name'])
logger.notify("Version: %s" % dist['version'])
logger.notify("Location: %s" % dist['location'])
logger.notify("Requires: %s" % ', '.join(dist['requires']))
if list_all_files:
logger.notify("Files:")
if 'files' in dist:
for line in open(dist['files']):
logger.notify(" %s" % line.strip())
else:
logger.notify("Cannot locate installed-files.txt")

View File

@ -0,0 +1,59 @@
from pip.req import InstallRequirement, RequirementSet, parse_requirements
from pip.basecommand import Command
from pip.exceptions import InstallationError
class UninstallCommand(Command):
"""
Uninstall packages.
pip is able to uninstall most installed packages. Known exceptions are:
- Pure distutils packages installed with ``python setup.py install``, which
leave behind no metadata to determine what files were installed.
- Script wrappers installed by ``python setup.py develop``.
"""
name = 'uninstall'
usage = """
%prog [options] <package> ...
%prog [options] -r <requirements file> ..."""
summary = 'Uninstall packages.'
def __init__(self, *args, **kw):
super(UninstallCommand, self).__init__(*args, **kw)
self.cmd_opts.add_option(
'-r', '--requirement',
dest='requirements',
action='append',
default=[],
metavar='file',
help='Uninstall all the packages listed in the given requirements file. '
'This option can be used multiple times.')
self.cmd_opts.add_option(
'-y', '--yes',
dest='yes',
action='store_true',
help="Don't ask for confirmation of uninstall deletions.")
self.parser.insert_option_group(0, self.cmd_opts)
def run(self, options, args):
session = self._build_session(options)
requirement_set = RequirementSet(
build_dir=None,
src_dir=None,
download_dir=None,
session=session,
)
for name in args:
requirement_set.add_requirement(
InstallRequirement.from_line(name))
for filename in options.requirements:
for req in parse_requirements(filename,
options=options, session=session):
requirement_set.add_requirement(req)
if not requirement_set.has_requirements:
raise InstallationError('You must give at least one requirement '
'to %(name)s (see "pip help %(name)s")' % dict(name=self.name))
requirement_set.uninstall(auto_confirm=options.yes)

View File

@ -0,0 +1,7 @@
from pip.commands.zip import ZipCommand
class UnzipCommand(ZipCommand):
"""Unzip individual packages."""
name = 'unzip'
summary = 'DEPRECATED. Unzip individual packages.'

View File

@ -0,0 +1,203 @@
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import os
import sys
from pip.basecommand import Command
from pip.index import PackageFinder
from pip.log import logger
from pip.exceptions import CommandError, PreviousBuildDirError
from pip.req import InstallRequirement, RequirementSet, parse_requirements
from pip.util import BuildDirectory, normalize_path
from pip.wheel import WheelBuilder
from pip import cmdoptions
DEFAULT_WHEEL_DIR = os.path.join(normalize_path(os.curdir), 'wheelhouse')
class WheelCommand(Command):
"""
Build Wheel archives for your requirements and dependencies.
Wheel is a built-package format, and offers the advantage of not recompiling your software during every install.
For more details, see the wheel docs: http://wheel.readthedocs.org/en/latest.
Requirements: setuptools>=0.8, and wheel.
'pip wheel' uses the bdist_wheel setuptools extension from the wheel package to build individual wheels.
"""
name = 'wheel'
usage = """
%prog [options] <requirement specifier> ...
%prog [options] -r <requirements file> ...
%prog [options] <vcs project url> ...
%prog [options] <local project path> ...
%prog [options] <archive url/path> ..."""
summary = 'Build wheels from your requirements.'
def __init__(self, *args, **kw):
super(WheelCommand, self).__init__(*args, **kw)
cmd_opts = self.cmd_opts
cmd_opts.add_option(
'-w', '--wheel-dir',
dest='wheel_dir',
metavar='dir',
default=DEFAULT_WHEEL_DIR,
help="Build wheels into <dir>, where the default is '<cwd>/wheelhouse'.")
cmd_opts.add_option(cmdoptions.use_wheel.make())
cmd_opts.add_option(cmdoptions.no_use_wheel.make())
cmd_opts.add_option(
'--build-option',
dest='build_options',
metavar='options',
action='append',
help="Extra arguments to be supplied to 'setup.py bdist_wheel'.")
cmd_opts.add_option(cmdoptions.requirements.make())
cmd_opts.add_option(cmdoptions.download_cache.make())
cmd_opts.add_option(cmdoptions.no_deps.make())
cmd_opts.add_option(cmdoptions.build_dir.make())
cmd_opts.add_option(
'--global-option',
dest='global_options',
action='append',
metavar='options',
help="Extra global options to be supplied to the setup.py "
"call before the 'bdist_wheel' command.")
cmd_opts.add_option(
'--pre',
action='store_true',
default=False,
help="Include pre-release and development versions. By default, pip only finds stable versions.")
cmd_opts.add_option(cmdoptions.no_clean.make())
index_opts = cmdoptions.make_option_group(cmdoptions.index_group, self.parser)
self.parser.insert_option_group(0, index_opts)
self.parser.insert_option_group(0, cmd_opts)
def run(self, options, args):
# confirm requirements
try:
import wheel.bdist_wheel
except ImportError:
if sys.version_info < (3,):
debian_package = 'python-wheel'
else:
debian_package = 'python3-wheel'
raise CommandError("'pip wheel' requires the 'wheel' package. To fix this, run: sudo apt-get install %s" % debian_package)
try:
import pkg_resources
except ImportError:
raise CommandError(
"'pip wheel' requires setuptools >= 0.8 for dist-info support."
" To fix this, run: pip install --upgrade setuptools"
)
else:
if not hasattr(pkg_resources, 'DistInfoDistribution'):
raise CommandError(
"'pip wheel' requires setuptools >= 0.8 for dist-info "
"support. To fix this, run: pip install --upgrade "
"setuptools"
)
index_urls = [options.index_url] + options.extra_index_urls
if options.no_index:
logger.notify('Ignoring indexes: %s' % ','.join(index_urls))
index_urls = []
if options.use_mirrors:
logger.deprecated("1.7",
"--use-mirrors has been deprecated and will be removed"
" in the future. Explicit uses of --index-url and/or "
"--extra-index-url is suggested.")
if options.mirrors:
logger.deprecated("1.7",
"--mirrors has been deprecated and will be removed in "
" the future. Explicit uses of --index-url and/or "
"--extra-index-url is suggested.")
index_urls += options.mirrors
if options.build_dir:
options.build_dir = os.path.abspath(options.build_dir)
session = self._build_session(options)
finder = PackageFinder(find_links=options.find_links,
index_urls=index_urls,
use_wheel=options.use_wheel,
allow_external=options.allow_external,
allow_unverified=options.allow_unverified,
allow_all_external=options.allow_all_external,
allow_all_prereleases=options.pre,
process_dependency_links=
options.process_dependency_links,
session=session,
)
build_delete = (not (options.no_clean or options.build_dir))
with BuildDirectory(options.build_dir, delete=build_delete) as build_dir:
requirement_set = RequirementSet(
build_dir=build_dir,
src_dir=None,
download_dir=None,
download_cache=options.download_cache,
ignore_dependencies=options.ignore_dependencies,
ignore_installed=True,
session=session,
wheel_download_dir=options.wheel_dir
)
# make the wheelhouse
if not os.path.exists(options.wheel_dir):
os.makedirs(options.wheel_dir)
#parse args and/or requirements files
for name in args:
requirement_set.add_requirement(
InstallRequirement.from_line(name, None))
for filename in options.requirements:
for req in parse_requirements(
filename,
finder=finder,
options=options,
session=session):
if req.editable:
logger.notify("ignoring %s" % req.url)
continue
requirement_set.add_requirement(req)
#fail if no requirements
if not requirement_set.has_requirements:
opts = {'name': self.name}
msg = ('You must give at least one requirement '
'to %(name)s (see "pip help %(name)s")' % opts)
logger.error(msg)
return
try:
#build wheels
wb = WheelBuilder(
requirement_set,
finder,
options.wheel_dir,
build_options = options.build_options or [],
global_options = options.global_options or []
)
wb.build()
except PreviousBuildDirError:
options.no_clean = True
raise
finally:
if not options.no_clean:
requirement_set.cleanup_files()

View File

@ -0,0 +1,351 @@
import sys
import re
import fnmatch
import os
import shutil
import zipfile
from pip.util import display_path, backup_dir, rmtree
from pip.log import logger
from pip.exceptions import InstallationError
from pip.basecommand import Command
class ZipCommand(Command):
"""Zip individual packages."""
name = 'zip'
usage = """
%prog [options] <package> ..."""
summary = 'DEPRECATED. Zip individual packages.'
def __init__(self, *args, **kw):
super(ZipCommand, self).__init__(*args, **kw)
if self.name == 'zip':
self.cmd_opts.add_option(
'--unzip',
action='store_true',
dest='unzip',
help='Unzip (rather than zip) a package.')
else:
self.cmd_opts.add_option(
'--zip',
action='store_false',
dest='unzip',
default=True,
help='Zip (rather than unzip) a package.')
self.cmd_opts.add_option(
'--no-pyc',
action='store_true',
dest='no_pyc',
help='Do not include .pyc files in zip files (useful on Google App Engine).')
self.cmd_opts.add_option(
'-l', '--list',
action='store_true',
dest='list',
help='List the packages available, and their zip status.')
self.cmd_opts.add_option(
'--sort-files',
action='store_true',
dest='sort_files',
help='With --list, sort packages according to how many files they contain.')
self.cmd_opts.add_option(
'--path',
action='append',
dest='paths',
help='Restrict operations to the given paths (may include wildcards).')
self.cmd_opts.add_option(
'-n', '--simulate',
action='store_true',
help='Do not actually perform the zip/unzip operation.')
self.parser.insert_option_group(0, self.cmd_opts)
def paths(self):
"""All the entries of sys.path, possibly restricted by --path"""
if not self.select_paths:
return sys.path
result = []
match_any = set()
for path in sys.path:
path = os.path.normcase(os.path.abspath(path))
for match in self.select_paths:
match = os.path.normcase(os.path.abspath(match))
if '*' in match:
if re.search(fnmatch.translate(match + '*'), path):
result.append(path)
match_any.add(match)
break
else:
if path.startswith(match):
result.append(path)
match_any.add(match)
break
else:
logger.debug("Skipping path %s because it doesn't match %s"
% (path, ', '.join(self.select_paths)))
for match in self.select_paths:
if match not in match_any and '*' not in match:
result.append(match)
logger.debug("Adding path %s because it doesn't match "
"anything already on sys.path" % match)
return result
def run(self, options, args):
logger.deprecated('1.7', "DEPRECATION: 'pip zip' and 'pip unzip` are deprecated, and will be removed in a future release.")
self.select_paths = options.paths
self.simulate = options.simulate
if options.list:
return self.list(options, args)
if not args:
raise InstallationError(
'You must give at least one package to zip or unzip')
packages = []
for arg in args:
module_name, filename = self.find_package(arg)
if options.unzip and os.path.isdir(filename):
raise InstallationError(
'The module %s (in %s) is not a zip file; cannot be unzipped'
% (module_name, filename))
elif not options.unzip and not os.path.isdir(filename):
raise InstallationError(
'The module %s (in %s) is not a directory; cannot be zipped'
% (module_name, filename))
packages.append((module_name, filename))
last_status = None
for module_name, filename in packages:
if options.unzip:
last_status = self.unzip_package(module_name, filename)
else:
last_status = self.zip_package(module_name, filename, options.no_pyc)
return last_status
def unzip_package(self, module_name, filename):
zip_filename = os.path.dirname(filename)
if not os.path.isfile(zip_filename) and zipfile.is_zipfile(zip_filename):
raise InstallationError(
'Module %s (in %s) isn\'t located in a zip file in %s'
% (module_name, filename, zip_filename))
package_path = os.path.dirname(zip_filename)
if not package_path in self.paths():
logger.warn(
'Unpacking %s into %s, but %s is not on sys.path'
% (display_path(zip_filename), display_path(package_path),
display_path(package_path)))
logger.notify('Unzipping %s (in %s)' % (module_name, display_path(zip_filename)))
if self.simulate:
logger.notify('Skipping remaining operations because of --simulate')
return
logger.indent += 2
try:
## FIXME: this should be undoable:
zip = zipfile.ZipFile(zip_filename)
to_save = []
for info in zip.infolist():
name = info.filename
if name.startswith(module_name + os.path.sep):
content = zip.read(name)
dest = os.path.join(package_path, name)
if not os.path.exists(os.path.dirname(dest)):
os.makedirs(os.path.dirname(dest))
if not content and dest.endswith(os.path.sep):
if not os.path.exists(dest):
os.makedirs(dest)
else:
f = open(dest, 'wb')
f.write(content)
f.close()
else:
to_save.append((name, zip.read(name)))
zip.close()
if not to_save:
logger.info('Removing now-empty zip file %s' % display_path(zip_filename))
os.unlink(zip_filename)
self.remove_filename_from_pth(zip_filename)
else:
logger.info('Removing entries in %s/ from zip file %s' % (module_name, display_path(zip_filename)))
zip = zipfile.ZipFile(zip_filename, 'w')
for name, content in to_save:
zip.writestr(name, content)
zip.close()
finally:
logger.indent -= 2
def zip_package(self, module_name, filename, no_pyc):
orig_filename = filename
logger.notify('Zip %s (in %s)' % (module_name, display_path(filename)))
logger.indent += 2
if filename.endswith('.egg'):
dest_filename = filename
else:
dest_filename = filename + '.zip'
try:
## FIXME: I think this needs to be undoable:
if filename == dest_filename:
filename = backup_dir(orig_filename)
logger.notify('Moving %s aside to %s' % (orig_filename, filename))
if not self.simulate:
shutil.move(orig_filename, filename)
try:
logger.info('Creating zip file in %s' % display_path(dest_filename))
if not self.simulate:
zip = zipfile.ZipFile(dest_filename, 'w')
zip.writestr(module_name + '/', '')
for dirpath, dirnames, filenames in os.walk(filename):
if no_pyc:
filenames = [f for f in filenames
if not f.lower().endswith('.pyc')]
for fns, is_dir in [(dirnames, True), (filenames, False)]:
for fn in fns:
full = os.path.join(dirpath, fn)
dest = os.path.join(module_name, dirpath[len(filename):].lstrip(os.path.sep), fn)
if is_dir:
zip.writestr(dest + '/', '')
else:
zip.write(full, dest)
zip.close()
logger.info('Removing old directory %s' % display_path(filename))
if not self.simulate:
rmtree(filename)
except:
## FIXME: need to do an undo here
raise
## FIXME: should also be undone:
self.add_filename_to_pth(dest_filename)
finally:
logger.indent -= 2
def remove_filename_from_pth(self, filename):
for pth in self.pth_files():
f = open(pth, 'r')
lines = f.readlines()
f.close()
new_lines = [
l for l in lines if l.strip() != filename]
if lines != new_lines:
logger.info('Removing reference to %s from .pth file %s'
% (display_path(filename), display_path(pth)))
if not [line for line in new_lines if line]:
logger.info('%s file would be empty: deleting' % display_path(pth))
if not self.simulate:
os.unlink(pth)
else:
if not self.simulate:
f = open(pth, 'wb')
f.writelines(new_lines)
f.close()
return
logger.warn('Cannot find a reference to %s in any .pth file' % display_path(filename))
def add_filename_to_pth(self, filename):
path = os.path.dirname(filename)
dest = filename + '.pth'
if path not in self.paths():
logger.warn('Adding .pth file %s, but it is not on sys.path' % display_path(dest))
if not self.simulate:
if os.path.exists(dest):
f = open(dest)
lines = f.readlines()
f.close()
if lines and not lines[-1].endswith('\n'):
lines[-1] += '\n'
lines.append(filename + '\n')
else:
lines = [filename + '\n']
f = open(dest, 'wb')
f.writelines(lines)
f.close()
def pth_files(self):
for path in self.paths():
if not os.path.exists(path) or not os.path.isdir(path):
continue
for filename in os.listdir(path):
if filename.endswith('.pth'):
yield os.path.join(path, filename)
def find_package(self, package):
for path in self.paths():
full = os.path.join(path, package)
if os.path.exists(full):
return package, full
if not os.path.isdir(path) and zipfile.is_zipfile(path):
zip = zipfile.ZipFile(path, 'r')
try:
zip.read(os.path.join(package, '__init__.py'))
except KeyError:
pass
else:
zip.close()
return package, full
zip.close()
## FIXME: need special error for package.py case:
raise InstallationError(
'No package with the name %s found' % package)
def list(self, options, args):
if args:
raise InstallationError(
'You cannot give an argument with --list')
for path in sorted(self.paths()):
if not os.path.exists(path):
continue
basename = os.path.basename(path.rstrip(os.path.sep))
if os.path.isfile(path) and zipfile.is_zipfile(path):
if os.path.dirname(path) not in self.paths():
logger.notify('Zipped egg: %s' % display_path(path))
continue
if (basename != 'site-packages' and basename != 'dist-packages'
and not path.replace('\\', '/').endswith('lib/python')):
continue
logger.notify('In %s:' % display_path(path))
logger.indent += 2
zipped = []
unzipped = []
try:
for filename in sorted(os.listdir(path)):
ext = os.path.splitext(filename)[1].lower()
if ext in ('.pth', '.egg-info', '.egg-link'):
continue
if ext == '.py':
logger.info('Not displaying %s: not a package' % display_path(filename))
continue
full = os.path.join(path, filename)
if os.path.isdir(full):
unzipped.append((filename, self.count_package(full)))
elif zipfile.is_zipfile(full):
zipped.append(filename)
else:
logger.info('Unknown file: %s' % display_path(filename))
if zipped:
logger.notify('Zipped packages:')
logger.indent += 2
try:
for filename in zipped:
logger.notify(filename)
finally:
logger.indent -= 2
else:
logger.notify('No zipped packages.')
if unzipped:
if options.sort_files:
unzipped.sort(key=lambda x: -x[1])
logger.notify('Unzipped packages:')
logger.indent += 2
try:
for filename, count in unzipped:
logger.notify('%s (%i files)' % (filename, count))
finally:
logger.indent -= 2
else:
logger.notify('No unzipped packages.')
finally:
logger.indent -= 2
def count_package(self, path):
total = 0
for dirpath, dirnames, filenames in os.walk(path):
filenames = [f for f in filenames
if not f.lower().endswith('.pyc')]
total += len(filenames)
return total

View File

@ -0,0 +1,644 @@
import cgi
import email.utils
import hashlib
import getpass
import mimetypes
import os
import platform
import re
import shutil
import sys
import tempfile
import pip
from pip.backwardcompat import urllib, urlparse, raw_input
from pip.exceptions import InstallationError, HashMismatch
from pip.util import (splitext, rmtree, format_size, display_path,
backup_dir, ask_path_exists, unpack_file,
create_download_cache_folder, cache_download)
from pip.vcs import vcs
from pip.log import logger
import requests, six
from requests.adapters import BaseAdapter
from requests.auth import AuthBase, HTTPBasicAuth
from requests.compat import IncompleteRead
from requests.exceptions import InvalidURL, ChunkedEncodingError
from requests.models import Response
from requests.structures import CaseInsensitiveDict
__all__ = ['get_file_content',
'is_url', 'url_to_path', 'path_to_url',
'is_archive_file', 'unpack_vcs_link',
'unpack_file_url', 'is_vcs_url', 'is_file_url', 'unpack_http_url']
def user_agent():
"""Return a string representing the user agent."""
_implementation = platform.python_implementation()
if _implementation == 'CPython':
_implementation_version = platform.python_version()
elif _implementation == 'PyPy':
_implementation_version = '%s.%s.%s' % (sys.pypy_version_info.major,
sys.pypy_version_info.minor,
sys.pypy_version_info.micro)
if sys.pypy_version_info.releaselevel != 'final':
_implementation_version = ''.join([
_implementation_version,
sys.pypy_version_info.releaselevel,
])
elif _implementation == 'Jython':
_implementation_version = platform.python_version() # Complete Guess
elif _implementation == 'IronPython':
_implementation_version = platform.python_version() # Complete Guess
else:
_implementation_version = 'Unknown'
try:
p_system = platform.system()
p_release = platform.release()
except IOError:
p_system = 'Unknown'
p_release = 'Unknown'
return " ".join(['pip/%s' % pip.__version__,
'%s/%s' % (_implementation, _implementation_version),
'%s/%s' % (p_system, p_release)])
class MultiDomainBasicAuth(AuthBase):
def __init__(self, prompting=True):
self.prompting = prompting
self.passwords = {}
def __call__(self, req):
parsed = urlparse.urlparse(req.url)
# Get the netloc without any embedded credentials
netloc = parsed.netloc.split("@", 1)[-1]
# Set the url of the request to the url without any credentials
req.url = urlparse.urlunparse(parsed[:1] + (netloc,) + parsed[2:])
# Use any stored credentials that we have for this netloc
username, password = self.passwords.get(netloc, (None, None))
# Extract credentials embedded in the url if we have none stored
if username is None:
username, password = self.parse_credentials(parsed.netloc)
if username or password:
# Store the username and password
self.passwords[netloc] = (username, password)
# Send the basic auth with this request
req = HTTPBasicAuth(username or "", password or "")(req)
# Attach a hook to handle 401 responses
req.register_hook("response", self.handle_401)
return req
def handle_401(self, resp, **kwargs):
# We only care about 401 responses, anything else we want to just
# pass through the actual response
if resp.status_code != 401:
return resp
# We are not able to prompt the user so simple return the response
if not self.prompting:
return resp
parsed = urlparse.urlparse(resp.url)
# Prompt the user for a new username and password
username = raw_input("User for %s: " % parsed.netloc)
password = getpass.getpass("Password: ")
# Store the new username and password to use for future requests
if username or password:
self.passwords[parsed.netloc] = (username, password)
# Consume content and release the original connection to allow our new
# request to reuse the same one.
resp.content
resp.raw.release_conn()
# Add our new username and password to the request
req = HTTPBasicAuth(username or "", password or "")(resp.request)
# Send our new request
new_resp = resp.connection.send(req, **kwargs)
new_resp.history.append(resp)
return new_resp
def parse_credentials(self, netloc):
if "@" in netloc:
userinfo = netloc.rsplit("@", 1)[0]
if ":" in userinfo:
return userinfo.split(":", 1)
return userinfo, None
return None, None
class LocalFSResponse(object):
def __init__(self, fileobj):
self.fileobj = fileobj
def __getattr__(self, name):
return getattr(self.fileobj, name)
def read(self, amt=None, decode_content=None, cache_content=False):
return self.fileobj.read(amt)
# Insert Hacks to Make Cookie Jar work w/ Requests
@property
def _original_response(self):
class FakeMessage(object):
def getheaders(self, header):
return []
def get_all(self, header, default):
return []
class FakeResponse(object):
@property
def msg(self):
return FakeMessage()
return FakeResponse()
class LocalFSAdapter(BaseAdapter):
def send(self, request, stream=None, timeout=None, verify=None, cert=None,
proxies=None):
parsed_url = urlparse.urlparse(request.url)
# We only work for requests with a host of localhost
if parsed_url.netloc.lower() != "localhost":
raise InvalidURL("Invalid URL %r: Only localhost is allowed" %
request.url)
real_url = urlparse.urlunparse(parsed_url[:1] + ("",) + parsed_url[2:])
pathname = url_to_path(real_url)
resp = Response()
resp.status_code = 200
resp.url = real_url
stats = os.stat(pathname)
modified = email.utils.formatdate(stats.st_mtime, usegmt=True)
resp.headers = CaseInsensitiveDict({
"Content-Type": mimetypes.guess_type(pathname)[0] or "text/plain",
"Content-Length": stats.st_size,
"Last-Modified": modified,
})
resp.raw = LocalFSResponse(open(pathname, "rb"))
resp.close = resp.raw.close
return resp
def close(self):
pass
class PipSession(requests.Session):
timeout = None
def __init__(self, *args, **kwargs):
super(PipSession, self).__init__(*args, **kwargs)
# Attach our User Agent to the request
self.headers["User-Agent"] = user_agent()
# Attach our Authentication handler to the session
self.auth = MultiDomainBasicAuth()
# Enable file:// urls
self.mount("file://", LocalFSAdapter())
def request(self, method, url, *args, **kwargs):
# Make file:// urls not fail due to lack of a hostname
parsed = urlparse.urlparse(url)
if parsed.scheme == "file":
url = urlparse.urlunparse(parsed[:1] + ("localhost",) + parsed[2:])
# Allow setting a default timeout on a session
kwargs.setdefault("timeout", self.timeout)
# Dispatch the actual request
return super(PipSession, self).request(method, url, *args, **kwargs)
def get_file_content(url, comes_from=None, session=None):
"""Gets the content of a file; it may be a filename, file: URL, or
http: URL. Returns (location, content). Content is unicode."""
if session is None:
session = PipSession()
match = _scheme_re.search(url)
if match:
scheme = match.group(1).lower()
if (scheme == 'file' and comes_from
and comes_from.startswith('http')):
raise InstallationError(
'Requirements file %s references URL %s, which is local'
% (comes_from, url))
if scheme == 'file':
path = url.split(':', 1)[1]
path = path.replace('\\', '/')
match = _url_slash_drive_re.match(path)
if match:
path = match.group(1) + ':' + path.split('|', 1)[1]
path = urllib.unquote(path)
if path.startswith('/'):
path = '/' + path.lstrip('/')
url = path
else:
## FIXME: catch some errors
resp = session.get(url)
resp.raise_for_status()
if six.PY3:
return resp.url, resp.text
else:
return resp.url, resp.content
try:
f = open(url)
content = f.read()
except IOError:
e = sys.exc_info()[1]
raise InstallationError('Could not open requirements file: %s' % str(e))
else:
f.close()
return url, content
_scheme_re = re.compile(r'^(http|https|file):', re.I)
_url_slash_drive_re = re.compile(r'/*([a-z])\|', re.I)
def is_url(name):
"""Returns true if the name looks like a URL"""
if ':' not in name:
return False
scheme = name.split(':', 1)[0].lower()
return scheme in ['http', 'https', 'file', 'ftp'] + vcs.all_schemes
def url_to_path(url):
"""
Convert a file: URL to a path.
"""
assert url.startswith('file:'), (
"You can only turn file: urls into filenames (not %r)" % url)
path = url[len('file:'):].lstrip('/')
path = urllib.unquote(path)
if _url_drive_re.match(path):
path = path[0] + ':' + path[2:]
else:
path = '/' + path
return path
_drive_re = re.compile('^([a-z]):', re.I)
_url_drive_re = re.compile('^([a-z])[:|]', re.I)
def path_to_url(path):
"""
Convert a path to a file: URL. The path will be made absolute and have
quoted path parts.
"""
path = os.path.normpath(os.path.abspath(path))
drive, path = os.path.splitdrive(path)
filepath = path.split(os.path.sep)
url = '/'.join([urllib.quote(part) for part in filepath])
if not drive:
url = url.lstrip('/')
return 'file:///' + drive + url
def is_archive_file(name):
"""Return True if `name` is a considered as an archive file."""
archives = ('.zip', '.tar.gz', '.tar.bz2', '.tgz', '.tar', '.pybundle',
'.whl')
ext = splitext(name)[1].lower()
if ext in archives:
return True
return False
def unpack_vcs_link(link, location, only_download=False):
vcs_backend = _get_used_vcs_backend(link)
if only_download:
vcs_backend.export(location)
else:
vcs_backend.unpack(location)
def _get_used_vcs_backend(link):
for backend in vcs.backends:
if link.scheme in backend.schemes:
vcs_backend = backend(link.url)
return vcs_backend
def is_vcs_url(link):
return bool(_get_used_vcs_backend(link))
def is_file_url(link):
return link.url.lower().startswith('file:')
def _check_hash(download_hash, link):
if download_hash.digest_size != hashlib.new(link.hash_name).digest_size:
logger.fatal("Hash digest size of the package %d (%s) doesn't match the expected hash name %s!"
% (download_hash.digest_size, link, link.hash_name))
raise HashMismatch('Hash name mismatch for package %s' % link)
if download_hash.hexdigest() != link.hash:
logger.fatal("Hash of the package %s (%s) doesn't match the expected hash %s!"
% (link, download_hash.hexdigest(), link.hash))
raise HashMismatch('Bad %s hash for package %s' % (link.hash_name, link))
def _get_hash_from_file(target_file, link):
try:
download_hash = hashlib.new(link.hash_name)
except (ValueError, TypeError):
logger.warn("Unsupported hash name %s for package %s" % (link.hash_name, link))
return None
fp = open(target_file, 'rb')
while True:
chunk = fp.read(4096)
if not chunk:
break
download_hash.update(chunk)
fp.close()
return download_hash
def _download_url(resp, link, temp_location):
fp = open(temp_location, 'wb')
download_hash = None
if link.hash and link.hash_name:
try:
download_hash = hashlib.new(link.hash_name)
except ValueError:
logger.warn("Unsupported hash name %s for package %s" % (link.hash_name, link))
try:
total_length = int(resp.headers['content-length'])
except (ValueError, KeyError, TypeError):
total_length = 0
downloaded = 0
show_progress = total_length > 40 * 1000 or not total_length
show_url = link.show_url
try:
if show_progress:
## FIXME: the URL can get really long in this message:
if total_length:
logger.start_progress('Downloading %s (%s): ' % (show_url, format_size(total_length)))
else:
logger.start_progress('Downloading %s (unknown size): ' % show_url)
else:
logger.notify('Downloading %s' % show_url)
logger.info('Downloading from URL %s' % link)
def resp_read(chunk_size):
try:
# Special case for urllib3.
try:
for chunk in resp.raw.stream(
chunk_size, decode_content=False):
yield chunk
except IncompleteRead as e:
raise ChunkedEncodingError(e)
except AttributeError:
# Standard file-like object.
while True:
chunk = resp.raw.read(chunk_size)
if not chunk:
break
yield chunk
for chunk in resp_read(4096):
downloaded += len(chunk)
if show_progress:
if not total_length:
logger.show_progress('%s' % format_size(downloaded))
else:
logger.show_progress('%3i%% %s' % (100 * downloaded / total_length, format_size(downloaded)))
if download_hash is not None:
download_hash.update(chunk)
fp.write(chunk)
fp.close()
finally:
if show_progress:
logger.end_progress('%s downloaded' % format_size(downloaded))
return download_hash
def _copy_file(filename, location, content_type, link):
copy = True
download_location = os.path.join(location, link.filename)
if os.path.exists(download_location):
response = ask_path_exists(
'The file %s exists. (i)gnore, (w)ipe, (b)ackup ' %
display_path(download_location), ('i', 'w', 'b'))
if response == 'i':
copy = False
elif response == 'w':
logger.warn('Deleting %s' % display_path(download_location))
os.remove(download_location)
elif response == 'b':
dest_file = backup_dir(download_location)
logger.warn('Backing up %s to %s'
% (display_path(download_location), display_path(dest_file)))
shutil.move(download_location, dest_file)
if copy:
shutil.copy(filename, download_location)
logger.notify('Saved %s' % display_path(download_location))
def unpack_http_url(link, location, download_cache, download_dir=None,
session=None):
if session is None:
session = PipSession()
temp_dir = tempfile.mkdtemp('-unpack', 'pip-')
temp_location = None
target_url = link.url.split('#', 1)[0]
already_cached = False
cache_file = None
cache_content_type_file = None
download_hash = None
# If a download cache is specified, is the file cached there?
if download_cache:
cache_file = os.path.join(download_cache,
urllib.quote(target_url, ''))
cache_content_type_file = cache_file + '.content-type'
already_cached = (
os.path.exists(cache_file) and
os.path.exists(cache_content_type_file)
)
if not os.path.isdir(download_cache):
create_download_cache_folder(download_cache)
# If a download dir is specified, is the file already downloaded there?
already_downloaded = None
if download_dir:
already_downloaded = os.path.join(download_dir, link.filename)
if not os.path.exists(already_downloaded):
already_downloaded = None
# If already downloaded, does it's hash match?
if already_downloaded:
temp_location = already_downloaded
content_type = mimetypes.guess_type(already_downloaded)[0]
logger.notify('File was already downloaded %s' % already_downloaded)
if link.hash:
download_hash = _get_hash_from_file(temp_location, link)
try:
_check_hash(download_hash, link)
except HashMismatch:
logger.warn(
'Previously-downloaded file %s has bad hash, '
're-downloading.' % temp_location
)
temp_location = None
os.unlink(already_downloaded)
already_downloaded = None
# If not a valid download, let's confirm the cached file is valid
if already_cached and not temp_location:
with open(cache_content_type_file) as fp:
content_type = fp.read().strip()
temp_location = cache_file
logger.notify('Using download cache from %s' % cache_file)
if link.hash and link.hash_name:
download_hash = _get_hash_from_file(cache_file, link)
try:
_check_hash(download_hash, link)
except HashMismatch:
logger.warn(
'Cached file %s has bad hash, '
're-downloading.' % temp_location
)
temp_location = None
os.unlink(cache_file)
os.unlink(cache_content_type_file)
already_cached = False
# We don't have either a cached or a downloaded copy
# let's download to a tmp dir
if not temp_location:
try:
resp = session.get(target_url, stream=True)
resp.raise_for_status()
except requests.HTTPError as exc:
logger.fatal("HTTP error %s while getting %s" %
(exc.response.status_code, link))
raise
content_type = resp.headers.get('content-type', '')
filename = link.filename # fallback
# Have a look at the Content-Disposition header for a better guess
content_disposition = resp.headers.get('content-disposition')
if content_disposition:
type, params = cgi.parse_header(content_disposition)
# We use ``or`` here because we don't want to use an "empty" value
# from the filename param.
filename = params.get('filename') or filename
ext = splitext(filename)[1]
if not ext:
ext = mimetypes.guess_extension(content_type)
if ext:
filename += ext
if not ext and link.url != resp.url:
ext = os.path.splitext(resp.url)[1]
if ext:
filename += ext
temp_location = os.path.join(temp_dir, filename)
download_hash = _download_url(resp, link, temp_location)
if link.hash and link.hash_name:
_check_hash(download_hash, link)
# a download dir is specified; let's copy the archive there
if download_dir and not already_downloaded:
_copy_file(temp_location, download_dir, content_type, link)
# unpack the archive to the build dir location. even when only downloading
# archives, they have to be unpacked to parse dependencies
unpack_file(temp_location, location, content_type, link)
# if using a download cache, cache it, if needed
if cache_file and not already_cached:
cache_download(cache_file, temp_location, content_type)
if not (already_cached or already_downloaded):
os.unlink(temp_location)
os.rmdir(temp_dir)
def unpack_file_url(link, location, download_dir=None):
link_path = url_to_path(link.url_without_fragment)
already_downloaded = False
# If it's a url to a local directory
if os.path.isdir(link_path):
if os.path.isdir(location):
rmtree(location)
shutil.copytree(link_path, location, symlinks=True)
return
# if link has a hash, let's confirm it matches
if link.hash:
link_path_hash = _get_hash_from_file(link_path, link)
_check_hash(link_path_hash, link)
# If a download dir is specified, is the file already there and valid?
if download_dir:
download_path = os.path.join(download_dir, link.filename)
if os.path.exists(download_path):
content_type = mimetypes.guess_type(download_path)[0]
logger.notify('File was already downloaded %s' % download_path)
if link.hash:
download_hash = _get_hash_from_file(download_path, link)
try:
_check_hash(download_hash, link)
already_downloaded = True
except HashMismatch:
logger.warn(
'Previously-downloaded file %s has bad hash, '
're-downloading.' % link_path
)
os.unlink(download_path)
else:
already_downloaded = True
if already_downloaded:
from_path = download_path
else:
from_path = link_path
content_type = mimetypes.guess_type(from_path)[0]
# unpack the archive to the build dir location. even when only downloading
# archives, they have to be unpacked to parse dependencies
unpack_file(from_path, location, content_type, link)
# a download dir is specified and not already downloaded
if download_dir and not already_downloaded:
_copy_file(from_path, download_dir, content_type, link)

View File

@ -0,0 +1,46 @@
"""Exceptions used throughout package"""
class PipError(Exception):
"""Base pip exception"""
class InstallationError(PipError):
"""General exception during installation"""
class UninstallationError(PipError):
"""General exception during uninstallation"""
class DistributionNotFound(InstallationError):
"""Raised when a distribution cannot be found to satisfy a requirement"""
class BestVersionAlreadyInstalled(PipError):
"""Raised when the most up-to-date version of a package is already
installed. """
class BadCommand(PipError):
"""Raised when virtualenv or a command is not found"""
class CommandError(PipError):
"""Raised when there is an error in command-line arguments"""
class PreviousBuildDirError(PipError):
"""Raised when there's a previous conflicting build directory"""
class HashMismatch(InstallationError):
"""Distribution file hash values don't match."""
class InvalidWheelFilename(InstallationError):
"""Invalid wheel filename."""
class UnsupportedWheel(InstallationError):
"""Unsupported wheel."""

View File

@ -0,0 +1,990 @@
"""Routines related to PyPI, indexes"""
import sys
import os
import re
import mimetypes
import posixpath
from pip.log import logger
from pip.util import Inf, normalize_name, splitext, is_prerelease
from pip.exceptions import (DistributionNotFound, BestVersionAlreadyInstalled,
InstallationError, InvalidWheelFilename, UnsupportedWheel)
from pip.backwardcompat import urlparse, url2pathname
from pip.download import PipSession, url_to_path, path_to_url
from pip.wheel import Wheel, wheel_ext
from pip.pep425tags import supported_tags, supported_tags_noarch, get_platform
import html5lib, requests, pkg_resources
from requests.exceptions import SSLError
__all__ = ['PackageFinder']
DEFAULT_MIRROR_HOSTNAME = "last.pypi.python.org"
INSECURE_SCHEMES = {
"http": ["https"],
}
class PackageFinder(object):
"""This finds packages.
This is meant to match easy_install's technique for looking for
packages, by reading pages and looking for appropriate links
"""
def __init__(self, find_links, index_urls,
use_wheel=True, allow_external=[], allow_unverified=[],
allow_all_external=False, allow_all_prereleases=False,
process_dependency_links=False, session=None):
self.find_links = find_links
self.index_urls = index_urls
self.dependency_links = []
self.cache = PageCache()
# These are boring links that have already been logged somehow:
self.logged_links = set()
self.use_wheel = use_wheel
# Do we allow (safe and verifiable) externally hosted files?
self.allow_external = set(normalize_name(n) for n in allow_external)
# Which names are allowed to install insecure and unverifiable files?
self.allow_unverified = set(
normalize_name(n) for n in allow_unverified
)
# Anything that is allowed unverified is also allowed external
self.allow_external |= self.allow_unverified
# Do we allow all (safe and verifiable) externally hosted files?
self.allow_all_external = allow_all_external
# Stores if we ignored any external links so that we can instruct
# end users how to install them if no distributions are available
self.need_warn_external = False
# Stores if we ignored any unsafe links so that we can instruct
# end users how to install them if no distributions are available
self.need_warn_unverified = False
# Do we want to allow _all_ pre-releases?
self.allow_all_prereleases = allow_all_prereleases
# Do we process dependency links?
self.process_dependency_links = process_dependency_links
self._have_warned_dependency_links = False
# The Session we'll use to make requests
self.session = session or PipSession()
def add_dependency_links(self, links):
## FIXME: this shouldn't be global list this, it should only
## apply to requirements of the package that specifies the
## dependency_links value
## FIXME: also, we should track comes_from (i.e., use Link)
if self.process_dependency_links:
if not self._have_warned_dependency_links:
logger.deprecated(
"1.6",
"Dependency Links processing has been deprecated with an "
"accelerated time schedule and will be removed in pip 1.6",
)
self._have_warned_dependency_links = True
self.dependency_links.extend(links)
def _sort_locations(self, locations):
"""
Sort locations into "files" (archives) and "urls", and return
a pair of lists (files,urls)
"""
files = []
urls = []
# puts the url for the given file path into the appropriate list
def sort_path(path):
url = path_to_url(path)
if mimetypes.guess_type(url, strict=False)[0] == 'text/html':
urls.append(url)
else:
files.append(url)
for url in locations:
is_local_path = os.path.exists(url)
is_file_url = url.startswith('file:')
is_find_link = url in self.find_links
if is_local_path or is_file_url:
if is_local_path:
path = url
else:
path = url_to_path(url)
if is_find_link and os.path.isdir(path):
path = os.path.realpath(path)
for item in os.listdir(path):
sort_path(os.path.join(path, item))
elif is_file_url and os.path.isdir(path):
urls.append(url)
elif os.path.isfile(path):
sort_path(path)
else:
urls.append(url)
return files, urls
def _link_sort_key(self, link_tuple):
"""
Function used to generate link sort key for link tuples.
The greater the return value, the more preferred it is.
If not finding wheels, then sorted by version only.
If finding wheels, then the sort order is by version, then:
1. existing installs
2. wheels ordered via Wheel.support_index_min()
3. source archives
Note: it was considered to embed this logic into the Link
comparison operators, but then different sdist links
with the same version, would have to be considered equal
"""
parsed_version, link, _ = link_tuple
if self.use_wheel:
support_num = len(supported_tags)
if link == INSTALLED_VERSION:
pri = 1
elif link.ext == wheel_ext:
wheel = Wheel(link.filename) # can raise InvalidWheelFilename
if not wheel.supported():
raise UnsupportedWheel("%s is not a supported wheel for this platform. It can't be sorted." % wheel.filename)
pri = -(wheel.support_index_min())
else: # sdist
pri = -(support_num)
return (parsed_version, pri)
else:
return parsed_version
def _sort_versions(self, applicable_versions):
"""
Bring the latest version (and wheels) to the front, but maintain the existing ordering as secondary.
See the docstring for `_link_sort_key` for details.
This function is isolated for easier unit testing.
"""
return sorted(applicable_versions, key=self._link_sort_key, reverse=True)
def find_requirement(self, req, upgrade):
def mkurl_pypi_url(url):
loc = posixpath.join(url, url_name)
# For maximum compatibility with easy_install, ensure the path
# ends in a trailing slash. Although this isn't in the spec
# (and PyPI can handle it without the slash) some other index
# implementations might break if they relied on easy_install's behavior.
if not loc.endswith('/'):
loc = loc + '/'
return loc
url_name = req.url_name
# Only check main index if index URL is given:
main_index_url = None
if self.index_urls:
# Check that we have the url_name correctly spelled:
main_index_url = Link(mkurl_pypi_url(self.index_urls[0]), trusted=True)
# This will also cache the page, so it's okay that we get it again later:
page = self._get_page(main_index_url, req)
if page is None:
url_name = self._find_url_name(Link(self.index_urls[0], trusted=True), url_name, req) or req.url_name
if url_name is not None:
locations = [
mkurl_pypi_url(url)
for url in self.index_urls] + self.find_links
else:
locations = list(self.find_links)
for version in req.absolute_versions:
if url_name is not None and main_index_url is not None:
locations = [
posixpath.join(main_index_url.url, version)] + locations
file_locations, url_locations = self._sort_locations(locations)
_flocations, _ulocations = self._sort_locations(self.dependency_links)
file_locations.extend(_flocations)
# We trust every url that the user has given us whether it was given
# via --index-url or --find-links
locations = [Link(url, trusted=True) for url in url_locations]
# We explicitly do not trust links that came from dependency_links
locations.extend([Link(url) for url in _ulocations])
logger.debug('URLs to search for versions for %s:' % req)
for location in locations:
logger.debug('* %s' % location)
# Determine if this url used a secure transport mechanism
parsed = urlparse.urlparse(str(location))
if parsed.scheme in INSECURE_SCHEMES:
secure_schemes = INSECURE_SCHEMES[parsed.scheme]
if len(secure_schemes) == 1:
ctx = (location, parsed.scheme, secure_schemes[0],
parsed.netloc)
logger.warn("%s uses an insecure transport scheme (%s). "
"Consider using %s if %s has it available" %
ctx)
elif len(secure_schemes) > 1:
ctx = (location, parsed.scheme, ", ".join(secure_schemes),
parsed.netloc)
logger.warn("%s uses an insecure transport scheme (%s). "
"Consider using one of %s if %s has any of "
"them available" % ctx)
else:
ctx = (location, parsed.scheme)
logger.warn("%s uses an insecure transport scheme (%s)." %
ctx)
found_versions = []
found_versions.extend(
self._package_versions(
# We trust every directly linked archive in find_links
[Link(url, '-f', trusted=True) for url in self.find_links], req.name.lower()))
page_versions = []
for page in self._get_pages(locations, req):
logger.debug('Analyzing links from page %s' % page.url)
logger.indent += 2
try:
page_versions.extend(self._package_versions(page.links, req.name.lower()))
finally:
logger.indent -= 2
dependency_versions = list(self._package_versions(
[Link(url) for url in self.dependency_links], req.name.lower()))
if dependency_versions:
logger.info('dependency_links found: %s' % ', '.join([link.url for parsed, link, version in dependency_versions]))
file_versions = list(self._package_versions(
[Link(url) for url in file_locations], req.name.lower()))
if not found_versions and not page_versions and not dependency_versions and not file_versions:
logger.fatal('Could not find any downloads that satisfy the requirement %s' % req)
if self.need_warn_external:
logger.warn("Some externally hosted files were ignored (use "
"--allow-external %s to allow)." % req.name)
if self.need_warn_unverified:
logger.warn("Some insecure and unverifiable files were ignored"
" (use --allow-unverified %s to allow)." %
req.name)
raise DistributionNotFound('No distributions at all found for %s' % req)
installed_version = []
if req.satisfied_by is not None:
installed_version = [(req.satisfied_by.parsed_version, INSTALLED_VERSION, req.satisfied_by.version)]
if file_versions:
file_versions.sort(reverse=True)
logger.info('Local files found: %s' % ', '.join([url_to_path(link.url) for parsed, link, version in file_versions]))
#this is an intentional priority ordering
all_versions = installed_version + file_versions + found_versions + page_versions + dependency_versions
applicable_versions = []
for (parsed_version, link, version) in all_versions:
if version not in req.req:
logger.info("Ignoring link %s, version %s doesn't match %s"
% (link, version, ','.join([''.join(s) for s in req.req.specs])))
continue
elif is_prerelease(version) and not (self.allow_all_prereleases or req.prereleases):
# If this version isn't the already installed one, then
# ignore it if it's a pre-release.
if link is not INSTALLED_VERSION:
logger.info("Ignoring link %s, version %s is a pre-release (use --pre to allow)." % (link, version))
continue
applicable_versions.append((parsed_version, link, version))
applicable_versions = self._sort_versions(applicable_versions)
existing_applicable = bool([link for parsed_version, link, version in applicable_versions if link is INSTALLED_VERSION])
if not upgrade and existing_applicable:
if applicable_versions[0][1] is INSTALLED_VERSION:
logger.info('Existing installed version (%s) is most up-to-date and satisfies requirement'
% req.satisfied_by.version)
else:
logger.info('Existing installed version (%s) satisfies requirement (most up-to-date version is %s)'
% (req.satisfied_by.version, applicable_versions[0][2]))
return None
if not applicable_versions:
logger.fatal('Could not find a version that satisfies the requirement %s (from versions: %s)'
% (req, ', '.join([version for parsed_version, link, version in all_versions])))
if self.need_warn_external:
logger.warn("Some externally hosted files were ignored (use "
"--allow-external to allow).")
if self.need_warn_unverified:
logger.warn("Some insecure and unverifiable files were ignored"
" (use --allow-unverified %s to allow)." %
req.name)
raise DistributionNotFound('No distributions matching the version for %s' % req)
if applicable_versions[0][1] is INSTALLED_VERSION:
# We have an existing version, and its the best version
logger.info('Installed version (%s) is most up-to-date (past versions: %s)'
% (req.satisfied_by.version, ', '.join([version for parsed_version, link, version in applicable_versions[1:]]) or 'none'))
raise BestVersionAlreadyInstalled
if len(applicable_versions) > 1:
logger.info('Using version %s (newest of versions: %s)' %
(applicable_versions[0][2], ', '.join([version for parsed_version, link, version in applicable_versions])))
selected_version = applicable_versions[0][1]
if (selected_version.internal is not None
and not selected_version.internal):
logger.warn("%s an externally hosted file and may be "
"unreliable" % req.name)
if (selected_version.verifiable is not None
and not selected_version.verifiable):
logger.warn("%s is potentially insecure and "
"unverifiable." % req.name)
if selected_version._deprecated_regex:
logger.deprecated(
"1.7",
"%s discovered using a deprecated method of parsing, "
"in the future it will no longer be discovered" % req.name
)
return selected_version
def _find_url_name(self, index_url, url_name, req):
"""Finds the true URL name of a package, when the given name isn't quite correct.
This is usually used to implement case-insensitivity."""
if not index_url.url.endswith('/'):
# Vaguely part of the PyPI API... weird but true.
## FIXME: bad to modify this?
index_url.url += '/'
page = self._get_page(index_url, req)
if page is None:
logger.fatal('Cannot fetch index base URL %s' % index_url)
return
norm_name = normalize_name(req.url_name)
for link in page.links:
base = posixpath.basename(link.path.rstrip('/'))
if norm_name == normalize_name(base):
logger.notify('Real name of requirement %s is %s' % (url_name, base))
return base
return None
def _get_pages(self, locations, req):
"""
Yields (page, page_url) from the given locations, skipping
locations that have errors, and adding download/homepage links
"""
all_locations = list(locations)
seen = set()
while all_locations:
location = all_locations.pop(0)
if location in seen:
continue
seen.add(location)
page = self._get_page(location, req)
if page is None:
continue
yield page
for link in page.rel_links():
normalized = normalize_name(req.name).lower()
if (not normalized in self.allow_external
and not self.allow_all_external):
self.need_warn_external = True
logger.debug("Not searching %s for files because external "
"urls are disallowed." % link)
continue
if (link.trusted is not None
and not link.trusted
and not normalized in self.allow_unverified):
logger.debug("Not searching %s for urls, it is an "
"untrusted link and cannot produce safe or "
"verifiable files." % link)
self.need_warn_unverified = True
continue
all_locations.append(link)
_egg_fragment_re = re.compile(r'#egg=([^&]*)')
_egg_info_re = re.compile(r'([a-z0-9_.]+)-([a-z0-9_.-]+)', re.I)
_py_version_re = re.compile(r'-py([123]\.?[0-9]?)$')
def _sort_links(self, links):
"Returns elements of links in order, non-egg links first, egg links second, while eliminating duplicates"
eggs, no_eggs = [], []
seen = set()
for link in links:
if link not in seen:
seen.add(link)
if link.egg_fragment:
eggs.append(link)
else:
no_eggs.append(link)
return no_eggs + eggs
def _package_versions(self, links, search_name):
for link in self._sort_links(links):
for v in self._link_package_versions(link, search_name):
yield v
def _known_extensions(self):
extensions = ('.tar.gz', '.tar.bz2', '.tar', '.tgz', '.zip')
if self.use_wheel:
return extensions + (wheel_ext,)
return extensions
def _link_package_versions(self, link, search_name):
"""
Return an iterable of triples (pkg_resources_version_key,
link, python_version) that can be extracted from the given
link.
Meant to be overridden by subclasses, not called by clients.
"""
platform = get_platform()
version = None
if link.egg_fragment:
egg_info = link.egg_fragment
else:
egg_info, ext = link.splitext()
if not ext:
if link not in self.logged_links:
logger.debug('Skipping link %s; not a file' % link)
self.logged_links.add(link)
return []
if egg_info.endswith('.tar'):
# Special double-extension case:
egg_info = egg_info[:-4]
ext = '.tar' + ext
if ext not in self._known_extensions():
if link not in self.logged_links:
logger.debug('Skipping link %s; unknown archive format: %s' % (link, ext))
self.logged_links.add(link)
return []
if "macosx10" in link.path and ext == '.zip':
if link not in self.logged_links:
logger.debug('Skipping link %s; macosx10 one' % (link))
self.logged_links.add(link)
return []
if ext == wheel_ext:
try:
wheel = Wheel(link.filename)
except InvalidWheelFilename:
logger.debug('Skipping %s because the wheel filename is invalid' % link)
return []
if wheel.name.lower() != search_name.lower():
logger.debug('Skipping link %s; wrong project name (not %s)' % (link, search_name))
return []
if not wheel.supported():
logger.debug('Skipping %s because it is not compatible with this Python' % link)
return []
# This is a dirty hack to prevent installing Binary Wheels from
# PyPI unless it is a Windows or Mac Binary Wheel. This is
# paired with a change to PyPI disabling uploads for the
# same. Once we have a mechanism for enabling support for binary
# wheels on linux that deals with the inherent problems of
# binary distribution this can be removed.
comes_from = getattr(link, "comes_from", None)
if ((
not platform.startswith('win')
and not platform.startswith('macosx')
)
and comes_from is not None
and urlparse.urlparse(comes_from.url).netloc.endswith(
"pypi.python.org")):
if not wheel.supported(tags=supported_tags_noarch):
logger.debug(
"Skipping %s because it is a pypi-hosted binary "
"Wheel on an unsupported platform" % link
)
return []
version = wheel.version
if not version:
version = self._egg_info_matches(egg_info, search_name, link)
if version is None:
logger.debug('Skipping link %s; wrong project name (not %s)' % (link, search_name))
return []
if (link.internal is not None
and not link.internal
and not normalize_name(search_name).lower() in self.allow_external
and not self.allow_all_external):
# We have a link that we are sure is external, so we should skip
# it unless we are allowing externals
logger.debug("Skipping %s because it is externally hosted." % link)
self.need_warn_external = True
return []
if (link.verifiable is not None
and not link.verifiable
and not (normalize_name(search_name).lower()
in self.allow_unverified)):
# We have a link that we are sure we cannot verify it's integrity,
# so we should skip it unless we are allowing unsafe installs
# for this requirement.
logger.debug("Skipping %s because it is an insecure and "
"unverifiable file." % link)
self.need_warn_unverified = True
return []
match = self._py_version_re.search(version)
if match:
version = version[:match.start()]
py_version = match.group(1)
if py_version != sys.version[:3]:
logger.debug('Skipping %s because Python version is incorrect' % link)
return []
logger.debug('Found link %s, version: %s' % (link, version))
return [(pkg_resources.parse_version(version),
link,
version)]
def _egg_info_matches(self, egg_info, search_name, link):
match = self._egg_info_re.search(egg_info)
if not match:
logger.debug('Could not parse version from link: %s' % link)
return None
name = match.group(0).lower()
# To match the "safe" name that pkg_resources creates:
name = name.replace('_', '-')
# project name and version must be separated by a dash
look_for = search_name.lower() + "-"
if name.startswith(look_for):
return match.group(0)[len(look_for):]
else:
return None
def _get_page(self, link, req):
return HTMLPage.get_page(link, req,
cache=self.cache,
session=self.session,
)
class PageCache(object):
"""Cache of HTML pages"""
failure_limit = 3
def __init__(self):
self._failures = {}
self._pages = {}
self._archives = {}
def too_many_failures(self, url):
return self._failures.get(url, 0) >= self.failure_limit
def get_page(self, url):
return self._pages.get(url)
def is_archive(self, url):
return self._archives.get(url, False)
def set_is_archive(self, url, value=True):
self._archives[url] = value
def add_page_failure(self, url, level):
self._failures[url] = self._failures.get(url, 0)+level
def add_page(self, urls, page):
for url in urls:
self._pages[url] = page
class HTMLPage(object):
"""Represents one page, along with its URL"""
## FIXME: these regexes are horrible hacks:
_homepage_re = re.compile(r'<th>\s*home\s*page', re.I)
_download_re = re.compile(r'<th>\s*download\s+url', re.I)
_href_re = re.compile('href=(?:"([^"]*)"|\'([^\']*)\'|([^>\\s\\n]*))', re.I|re.S)
def __init__(self, content, url, headers=None, trusted=None):
self.content = content
self.parsed = html5lib.parse(self.content, namespaceHTMLElements=False)
self.url = url
self.headers = headers
self.trusted = trusted
def __str__(self):
return self.url
@classmethod
def get_page(cls, link, req, cache=None, skip_archives=True, session=None):
if session is None:
session = PipSession()
url = link.url
url = url.split('#', 1)[0]
if cache.too_many_failures(url):
return None
# Check for VCS schemes that do not support lookup as web pages.
from pip.vcs import VcsSupport
for scheme in VcsSupport.schemes:
if url.lower().startswith(scheme) and url[len(scheme)] in '+:':
logger.debug('Cannot look at %(scheme)s URL %(link)s' % locals())
return None
if cache is not None:
inst = cache.get_page(url)
if inst is not None:
return inst
try:
if skip_archives:
if cache is not None:
if cache.is_archive(url):
return None
filename = link.filename
for bad_ext in ['.tar', '.tar.gz', '.tar.bz2', '.tgz', '.zip']:
if filename.endswith(bad_ext):
content_type = cls._get_content_type(url,
session=session,
)
if content_type.lower().startswith('text/html'):
break
else:
logger.debug('Skipping page %s because of Content-Type: %s' % (link, content_type))
if cache is not None:
cache.set_is_archive(url)
return None
logger.debug('Getting page %s' % url)
# Tack index.html onto file:// URLs that point to directories
(scheme, netloc, path, params, query, fragment) = urlparse.urlparse(url)
if scheme == 'file' and os.path.isdir(url2pathname(path)):
# add trailing slash if not present so urljoin doesn't trim final segment
if not url.endswith('/'):
url += '/'
url = urlparse.urljoin(url, 'index.html')
logger.debug(' file: URL is directory, getting %s' % url)
resp = session.get(url, headers={"Accept": "text/html"})
resp.raise_for_status()
# The check for archives above only works if the url ends with
# something that looks like an archive. However that is not a
# requirement. For instance http://sourceforge.net/projects/docutils/files/docutils/0.8.1/docutils-0.8.1.tar.gz/download
# redirects to http://superb-dca3.dl.sourceforge.net/project/docutils/docutils/0.8.1/docutils-0.8.1.tar.gz
# Unless we issue a HEAD request on every url we cannot know
# ahead of time for sure if something is HTML or not. However we
# can check after we've downloaded it.
content_type = resp.headers.get('Content-Type', 'unknown')
if not content_type.lower().startswith("text/html"):
logger.debug('Skipping page %s because of Content-Type: %s' %
(link, content_type))
if cache is not None:
cache.set_is_archive(url)
return None
inst = cls(resp.text, resp.url, resp.headers, trusted=link.trusted)
except requests.HTTPError as exc:
level = 2 if exc.response.status_code == 404 else 1
cls._handle_fail(req, link, exc, url, cache=cache, level=level)
except requests.ConnectionError as exc:
cls._handle_fail(
req, link, "connection error: %s" % exc, url,
cache=cache,
)
except requests.Timeout:
cls._handle_fail(req, link, "timed out", url, cache=cache)
except SSLError as exc:
reason = ("There was a problem confirming the ssl certificate: "
"%s" % exc)
cls._handle_fail(req, link, reason, url,
cache=cache,
level=2,
meth=logger.notify,
)
else:
if cache is not None:
cache.add_page([url, resp.url], inst)
return inst
@staticmethod
def _handle_fail(req, link, reason, url, cache=None, level=1, meth=None):
if meth is None:
meth = logger.info
meth("Could not fetch URL %s: %s", link, reason)
meth("Will skip URL %s when looking for download links for %s" %
(link.url, req))
if cache is not None:
cache.add_page_failure(url, level)
@staticmethod
def _get_content_type(url, session=None):
"""Get the Content-Type of the given url, using a HEAD request"""
if session is None:
session = PipSession()
scheme, netloc, path, query, fragment = urlparse.urlsplit(url)
if not scheme in ('http', 'https', 'ftp', 'ftps'):
## FIXME: some warning or something?
## assertion error?
return ''
resp = session.head(url, allow_redirects=True)
resp.raise_for_status()
return resp.headers.get("Content-Type", "")
@property
def api_version(self):
if not hasattr(self, "_api_version"):
_api_version = None
metas = [x for x in self.parsed.findall(".//meta")
if x.get("name", "").lower() == "api-version"]
if metas:
try:
_api_version = int(metas[0].get("value", None))
except (TypeError, ValueError):
_api_version = None
self._api_version = _api_version
return self._api_version
@property
def base_url(self):
if not hasattr(self, "_base_url"):
base = self.parsed.find(".//base")
if base is not None and base.get("href"):
self._base_url = base.get("href")
else:
self._base_url = self.url
return self._base_url
@property
def links(self):
"""Yields all links in the page"""
for anchor in self.parsed.findall(".//a"):
if anchor.get("href"):
href = anchor.get("href")
url = self.clean_link(urlparse.urljoin(self.base_url, href))
# Determine if this link is internal. If that distinction
# doesn't make sense in this context, then we don't make
# any distinction.
internal = None
if self.api_version and self.api_version >= 2:
# Only api_versions >= 2 have a distinction between
# external and internal links
internal = bool(anchor.get("rel")
and "internal" in anchor.get("rel").split())
yield Link(url, self, internal=internal)
def rel_links(self):
for url in self.explicit_rel_links():
yield url
for url in self.scraped_rel_links():
yield url
def explicit_rel_links(self, rels=('homepage', 'download')):
"""Yields all links with the given relations"""
rels = set(rels)
for anchor in self.parsed.findall(".//a"):
if anchor.get("rel") and anchor.get("href"):
found_rels = set(anchor.get("rel").split())
# Determine the intersection between what rels were found and
# what rels were being looked for
if found_rels & rels:
href = anchor.get("href")
url = self.clean_link(urlparse.urljoin(self.base_url, href))
yield Link(url, self, trusted=False)
def scraped_rel_links(self):
# Can we get rid of this horrible horrible method?
for regex in (self._homepage_re, self._download_re):
match = regex.search(self.content)
if not match:
continue
href_match = self._href_re.search(self.content, pos=match.end())
if not href_match:
continue
url = href_match.group(1) or href_match.group(2) or href_match.group(3)
if not url:
continue
url = self.clean_link(urlparse.urljoin(self.base_url, url))
yield Link(url, self, trusted=False, _deprecated_regex=True)
_clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I)
def clean_link(self, url):
"""Makes sure a link is fully encoded. That is, if a ' ' shows up in
the link, it will be rewritten to %20 (while not over-quoting
% or other characters)."""
return self._clean_re.sub(
lambda match: '%%%2x' % ord(match.group(0)), url)
class Link(object):
def __init__(self, url, comes_from=None, internal=None, trusted=None,
_deprecated_regex=False):
self.url = url
self.comes_from = comes_from
self.internal = internal
self.trusted = trusted
self._deprecated_regex = _deprecated_regex
def __str__(self):
if self.comes_from:
return '%s (from %s)' % (self.url, self.comes_from)
else:
return str(self.url)
def __repr__(self):
return '<Link %s>' % self
def __eq__(self, other):
return self.url == other.url
def __ne__(self, other):
return self.url != other.url
def __lt__(self, other):
return self.url < other.url
def __le__(self, other):
return self.url <= other.url
def __gt__(self, other):
return self.url > other.url
def __ge__(self, other):
return self.url >= other.url
def __hash__(self):
return hash(self.url)
@property
def filename(self):
_, netloc, path, _, _ = urlparse.urlsplit(self.url)
name = posixpath.basename(path.rstrip('/')) or netloc
assert name, ('URL %r produced no filename' % self.url)
return name
@property
def scheme(self):
return urlparse.urlsplit(self.url)[0]
@property
def path(self):
return urlparse.urlsplit(self.url)[2]
def splitext(self):
return splitext(posixpath.basename(self.path.rstrip('/')))
@property
def ext(self):
return self.splitext()[1]
@property
def url_without_fragment(self):
scheme, netloc, path, query, fragment = urlparse.urlsplit(self.url)
return urlparse.urlunsplit((scheme, netloc, path, query, None))
_egg_fragment_re = re.compile(r'#egg=([^&]*)')
@property
def egg_fragment(self):
match = self._egg_fragment_re.search(self.url)
if not match:
return None
return match.group(1)
_hash_re = re.compile(r'(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)')
@property
def hash(self):
match = self._hash_re.search(self.url)
if match:
return match.group(2)
return None
@property
def hash_name(self):
match = self._hash_re.search(self.url)
if match:
return match.group(1)
return None
@property
def show_url(self):
return posixpath.basename(self.url.split('#', 1)[0].split('?', 1)[0])
@property
def verifiable(self):
"""
Returns True if this link can be verified after download, False if it
cannot, and None if we cannot determine.
"""
trusted = self.trusted or getattr(self.comes_from, "trusted", None)
if trusted is not None and trusted:
# This link came from a trusted source. It *may* be verifiable but
# first we need to see if this page is operating under the new
# API version.
try:
api_version = getattr(self.comes_from, "api_version", None)
api_version = int(api_version)
except (ValueError, TypeError):
api_version = None
if api_version is None or api_version <= 1:
# This link is either trusted, or it came from a trusted,
# however it is not operating under the API version 2 so
# we can't make any claims about if it's safe or not
return
if self.hash:
# This link came from a trusted source and it has a hash, so we
# can consider it safe.
return True
else:
# This link came from a trusted source, using the new API
# version, and it does not have a hash. It is NOT verifiable
return False
elif trusted is not None:
# This link came from an untrusted source and we cannot trust it
return False
# An object to represent the "link" for the installed version of a requirement.
# Using Inf as the url makes it sort higher.
INSTALLED_VERSION = Link(Inf)
def get_requirement_from_url(url):
"""Get a requirement from the URL, if possible. This looks for #egg
in the URL"""
link = Link(url)
egg_info = link.egg_fragment
if not egg_info:
egg_info = splitext(link.filename)[0]
return package_to_requirement(egg_info)
def package_to_requirement(package_name):
"""Translate a name like Foo-1.2 to Foo==1.3"""
match = re.search(r'^(.*?)-(dev|\d.*)', package_name)
if match:
name = match.group(1)
version = match.group(2)
else:
name = package_name
version = ''
if version:
return '%s==%s' % (name, version)
else:
return name

View File

@ -0,0 +1,172 @@
"""Locations where we look for configs, install stuff, etc"""
import sys
import site
import os
import tempfile
from distutils.command.install import install, SCHEME_KEYS
import getpass
from pip.backwardcompat import get_python_lib, get_path_uid, user_site
import pip.exceptions
DELETE_MARKER_MESSAGE = '''\
This file is placed here by pip to indicate the source was put
here by pip.
Once this package is successfully installed this source code will be
deleted (unless you remove this file).
'''
PIP_DELETE_MARKER_FILENAME = 'pip-delete-this-directory.txt'
def write_delete_marker_file(directory):
"""
Write the pip delete marker file into this directory.
"""
filepath = os.path.join(directory, PIP_DELETE_MARKER_FILENAME)
marker_fp = open(filepath, 'w')
marker_fp.write(DELETE_MARKER_MESSAGE)
marker_fp.close()
def running_under_virtualenv():
"""
Return True if we're running inside a virtualenv, False otherwise.
"""
if hasattr(sys, 'real_prefix'):
return True
elif sys.prefix != getattr(sys, "base_prefix", sys.prefix):
return True
return False
def virtualenv_no_global():
"""
Return True if in a venv and no system site packages.
"""
#this mirrors the logic in virtualenv.py for locating the no-global-site-packages.txt file
site_mod_dir = os.path.dirname(os.path.abspath(site.__file__))
no_global_file = os.path.join(site_mod_dir, 'no-global-site-packages.txt')
if running_under_virtualenv() and os.path.isfile(no_global_file):
return True
def __get_username():
""" Returns the effective username of the current process. """
if sys.platform == 'win32':
return getpass.getuser()
import pwd
return pwd.getpwuid(os.geteuid()).pw_name
def _get_build_prefix():
""" Returns a safe build_prefix """
path = os.path.join(tempfile.gettempdir(), 'pip_build_%s' %
__get_username())
if sys.platform == 'win32':
""" on windows(tested on 7) temp dirs are isolated """
return path
try:
os.mkdir(path)
write_delete_marker_file(path)
except OSError:
file_uid = None
try:
# raises OSError for symlinks
# https://github.com/pypa/pip/pull/935#discussion_r5307003
file_uid = get_path_uid(path)
except OSError:
file_uid = None
if file_uid != os.geteuid():
msg = "The temporary folder for building (%s) is either not owned by you, or is a symlink." \
% path
print (msg)
print("pip will not work until the temporary folder is " + \
"either deleted or is a real directory owned by your user account.")
raise pip.exceptions.InstallationError(msg)
return path
if running_under_virtualenv():
build_prefix = os.path.join(sys.prefix, 'build')
src_prefix = os.path.join(sys.prefix, 'src')
else:
# Note: intentionally NOT using mkdtemp
# See https://github.com/pypa/pip/issues/906 for plan to move to mkdtemp
build_prefix = _get_build_prefix()
## FIXME: keep src in cwd for now (it is not a temporary folder)
try:
src_prefix = os.path.join(os.getcwd(), 'src')
except OSError:
# In case the current working directory has been renamed or deleted
sys.exit("The folder you are executing pip from can no longer be found.")
# under Mac OS X + virtualenv sys.prefix is not properly resolved
# it is something like /path/to/python/bin/..
# Note: using realpath due to tmp dirs on OSX being symlinks
build_prefix = os.path.abspath(os.path.realpath(build_prefix))
src_prefix = os.path.abspath(src_prefix)
# FIXME doesn't account for venv linked to global site-packages
site_packages = get_python_lib()
user_dir = os.path.expanduser('~')
if sys.platform == 'win32':
bin_py = os.path.join(sys.prefix, 'Scripts')
bin_user = os.path.join(user_site, 'Scripts') if user_site else None
# buildout uses 'bin' on Windows too?
if not os.path.exists(bin_py):
bin_py = os.path.join(sys.prefix, 'bin')
bin_user = os.path.join(user_site, 'bin') if user_site else None
default_storage_dir = os.path.join(user_dir, 'pip')
default_config_file = os.path.join(default_storage_dir, 'pip.ini')
default_log_file = os.path.join(default_storage_dir, 'pip.log')
else:
bin_py = os.path.join(sys.prefix, 'bin')
bin_user = os.path.join(user_site, 'bin') if user_site else None
default_storage_dir = os.path.join(user_dir, '.pip')
default_config_file = os.path.join(default_storage_dir, 'pip.conf')
default_log_file = os.path.join(default_storage_dir, 'pip.log')
# Forcing to use /usr/local/bin for standard Mac OS X framework installs
# Also log to ~/Library/Logs/ for use with the Console.app log viewer
if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/':
bin_py = '/usr/local/bin'
default_log_file = os.path.join(user_dir, 'Library/Logs/pip.log')
def distutils_scheme(dist_name, user=False, home=None, root=None):
"""
Return a distutils install scheme
"""
from distutils.dist import Distribution
scheme = {}
d = Distribution({'name': dist_name})
d.parse_config_files()
i = d.get_command_obj('install', create=True)
# NOTE: setting user or home has the side-effect of creating the home dir or
# user base for installations during finalize_options()
# ideally, we'd prefer a scheme class that has no side-effects.
i.user = user or i.user
i.home = home or i.home
i.root = root or i.root
i.finalize_options()
for key in SCHEME_KEYS:
scheme[key] = getattr(i, 'install_'+key)
if running_under_virtualenv():
scheme['headers'] = os.path.join(sys.prefix,
'include',
'site',
'python' + sys.version[:3],
dist_name)
if root is not None:
scheme["headers"] = os.path.join(
root,
os.path.abspath(scheme["headers"])[1:],
)
return scheme

View File

@ -0,0 +1,276 @@
"""Logging
"""
import sys
import os
import logging
from pip import backwardcompat
import colorama, pkg_resources
def _color_wrap(*colors):
def wrapped(inp):
return "".join(list(colors) + [inp, colorama.Style.RESET_ALL])
return wrapped
def should_color(consumer, environ, std=(sys.stdout, sys.stderr)):
real_consumer = (consumer if not isinstance(consumer, colorama.AnsiToWin32)
else consumer.wrapped)
# If consumer isn't stdout or stderr we shouldn't colorize it
if real_consumer not in std:
return False
# If consumer is a tty we should color it
if hasattr(real_consumer, "isatty") and real_consumer.isatty():
return True
# If we have an ASNI term we should color it
if environ.get("TERM") == "ANSI":
return True
# If anything else we should not color it
return False
def should_warn(current_version, removal_version):
# Our Significant digits on versions is 2, so remove everything but the
# first two places.
current_version = ".".join(current_version.split(".")[:2])
removal_version = ".".join(removal_version.split(".")[:2])
# Our warning threshold is one minor version before removal, so we
# decrement the minor version by one
major, minor = removal_version.split(".")
minor = str(int(minor) - 1)
warn_version = ".".join([major, minor])
# Test if our current_version should be a warn
return (pkg_resources.parse_version(current_version)
< pkg_resources.parse_version(warn_version))
class Logger(object):
"""
Logging object for use in command-line script. Allows ranges of
levels, to avoid some redundancy of displayed information.
"""
VERBOSE_DEBUG = logging.DEBUG - 1
DEBUG = logging.DEBUG
INFO = logging.INFO
NOTIFY = (logging.INFO + logging.WARN) / 2
WARN = WARNING = logging.WARN
ERROR = logging.ERROR
FATAL = logging.FATAL
LEVELS = [VERBOSE_DEBUG, DEBUG, INFO, NOTIFY, WARN, ERROR, FATAL]
COLORS = {
WARN: _color_wrap(colorama.Fore.YELLOW),
ERROR: _color_wrap(colorama.Fore.RED),
FATAL: _color_wrap(colorama.Fore.RED),
}
def __init__(self):
self.consumers = []
self.indent = 0
self.explicit_levels = False
self.in_progress = None
self.in_progress_hanging = False
def add_consumers(self, *consumers):
if sys.platform.startswith("win"):
for level, consumer in consumers:
if hasattr(consumer, "write"):
self.consumers.append(
(level, colorama.AnsiToWin32(consumer)),
)
else:
self.consumers.append((level, consumer))
else:
self.consumers.extend(consumers)
def debug(self, msg, *args, **kw):
self.log(self.DEBUG, msg, *args, **kw)
def info(self, msg, *args, **kw):
self.log(self.INFO, msg, *args, **kw)
def notify(self, msg, *args, **kw):
self.log(self.NOTIFY, msg, *args, **kw)
def warn(self, msg, *args, **kw):
self.log(self.WARN, msg, *args, **kw)
def error(self, msg, *args, **kw):
self.log(self.ERROR, msg, *args, **kw)
def fatal(self, msg, *args, **kw):
self.log(self.FATAL, msg, *args, **kw)
def deprecated(self, removal_version, msg, *args, **kwargs):
"""
Logs deprecation message which is log level WARN if the
``removal_version`` is > 1 minor release away and log level ERROR
otherwise.
removal_version should be the version that the deprecated feature is
expected to be removed in, so something that will not exist in
version 1.7, but will in 1.6 would have a removal_version of 1.7.
"""
from pip import __version__
if should_warn(__version__, removal_version):
self.warn(msg, *args, **kwargs)
else:
self.error(msg, *args, **kwargs)
def log(self, level, msg, *args, **kw):
if args:
if kw:
raise TypeError(
"You may give positional or keyword arguments, not both")
args = args or kw
# render
if args:
rendered = msg % args
else:
rendered = msg
rendered = ' ' * self.indent + rendered
if self.explicit_levels:
## FIXME: should this be a name, not a level number?
rendered = '%02i %s' % (level, rendered)
for consumer_level, consumer in self.consumers:
if self.level_matches(level, consumer_level):
if (self.in_progress_hanging
and consumer in (sys.stdout, sys.stderr)):
self.in_progress_hanging = False
sys.stdout.write('\n')
sys.stdout.flush()
if hasattr(consumer, 'write'):
write_content = rendered + '\n'
if should_color(consumer, os.environ):
# We are printing to stdout or stderr and it supports
# colors so render our text colored
colorizer = self.COLORS.get(level, lambda x: x)
write_content = colorizer(write_content)
consumer.write(write_content)
if hasattr(consumer, 'flush'):
consumer.flush()
else:
consumer(rendered)
def _show_progress(self):
"""Should we display download progress?"""
return (self.stdout_level_matches(self.NOTIFY) and sys.stdout.isatty())
def start_progress(self, msg):
assert not self.in_progress, (
"Tried to start_progress(%r) while in_progress %r"
% (msg, self.in_progress))
if self._show_progress():
sys.stdout.write(' ' * self.indent + msg)
sys.stdout.flush()
self.in_progress_hanging = True
else:
self.in_progress_hanging = False
self.in_progress = msg
self.last_message = None
def end_progress(self, msg='done.'):
assert self.in_progress, (
"Tried to end_progress without start_progress")
if self._show_progress():
if not self.in_progress_hanging:
# Some message has been printed out since start_progress
sys.stdout.write('...' + self.in_progress + msg + '\n')
sys.stdout.flush()
else:
# These erase any messages shown with show_progress (besides .'s)
logger.show_progress('')
logger.show_progress('')
sys.stdout.write(msg + '\n')
sys.stdout.flush()
self.in_progress = None
self.in_progress_hanging = False
def show_progress(self, message=None):
"""If we are in a progress scope, and no log messages have been
shown, write out another '.'"""
if self.in_progress_hanging:
if message is None:
sys.stdout.write('.')
sys.stdout.flush()
else:
if self.last_message:
padding = ' ' * max(0, len(self.last_message) - len(message))
else:
padding = ''
sys.stdout.write('\r%s%s%s%s' %
(' ' * self.indent, self.in_progress, message, padding))
sys.stdout.flush()
self.last_message = message
def stdout_level_matches(self, level):
"""Returns true if a message at this level will go to stdout"""
return self.level_matches(level, self._stdout_level())
def _stdout_level(self):
"""Returns the level that stdout runs at"""
for level, consumer in self.consumers:
if consumer is sys.stdout:
return level
return self.FATAL
def level_matches(self, level, consumer_level):
"""
>>> l = Logger()
>>> l.level_matches(3, 4)
False
>>> l.level_matches(3, 2)
True
>>> l.level_matches(slice(None, 3), 3)
False
>>> l.level_matches(slice(None, 3), 2)
True
>>> l.level_matches(slice(1, 3), 1)
True
>>> l.level_matches(slice(2, 3), 1)
False
"""
if isinstance(level, slice):
start, stop = level.start, level.stop
if start is not None and start > consumer_level:
return False
if stop is not None or stop <= consumer_level:
return False
return True
else:
return level >= consumer_level
@classmethod
def level_for_integer(cls, level):
levels = cls.LEVELS
if level < 0:
return levels[0]
if level >= len(levels):
return levels[-1]
return levels[level]
def move_stdout_to_stderr(self):
to_remove = []
to_add = []
for consumer_level, consumer in self.consumers:
if consumer == sys.stdout:
to_remove.append((consumer_level, consumer))
to_add.append((consumer_level, sys.stderr))
for item in to_remove:
self.consumers.remove(item)
self.consumers.extend(to_add)
logger = Logger()

View File

@ -0,0 +1,102 @@
"""Generate and work with PEP 425 Compatibility Tags."""
import sys
import warnings
try:
import sysconfig
except ImportError: # pragma nocover
# Python < 2.7
import distutils.sysconfig as sysconfig
import distutils.util
def get_abbr_impl():
"""Return abbreviated implementation name."""
if hasattr(sys, 'pypy_version_info'):
pyimpl = 'pp'
elif sys.platform.startswith('java'):
pyimpl = 'jy'
elif sys.platform == 'cli':
pyimpl = 'ip'
else:
pyimpl = 'cp'
return pyimpl
def get_impl_ver():
"""Return implementation version."""
return ''.join(map(str, sys.version_info[:2]))
def get_platform():
"""Return our platform name 'win32', 'linux_x86_64'"""
# XXX remove distutils dependency
return distutils.util.get_platform().replace('.', '_').replace('-', '_')
def get_supported(versions=None, noarch=False):
"""Return a list of supported tags for each version specified in
`versions`.
:param versions: a list of string versions, of the form ["33", "32"],
or None. The first version will be assumed to support our ABI.
"""
supported = []
# Versions must be given with respect to the preference
if versions is None:
versions = []
major = sys.version_info[0]
# Support all previous minor Python versions.
for minor in range(sys.version_info[1], -1, -1):
versions.append(''.join(map(str, (major, minor))))
impl = get_abbr_impl()
abis = []
try:
soabi = sysconfig.get_config_var('SOABI')
except IOError as e: # Issue #1074
warnings.warn("{0}".format(e), RuntimeWarning)
soabi = None
if soabi and soabi.startswith('cpython-'):
abis[0:0] = ['cp' + soabi.split('-', 1)[-1]]
abi3s = set()
import imp
for suffix in imp.get_suffixes():
if suffix[0].startswith('.abi'):
abi3s.add(suffix[0].split('.', 2)[1])
abis.extend(sorted(list(abi3s)))
abis.append('none')
if not noarch:
arch = get_platform()
# Current version, current API (built specifically for our Python):
for abi in abis:
supported.append(('%s%s' % (impl, versions[0]), abi, arch))
# No abi / arch, but requires our implementation:
for i, version in enumerate(versions):
supported.append(('%s%s' % (impl, version), 'none', 'any'))
if i == 0:
# Tagged specifically as being cross-version compatible
# (with just the major version specified)
supported.append(('%s%s' % (impl, versions[0][0]), 'none', 'any'))
# No abi / arch, generic Python
for i, version in enumerate(versions):
supported.append(('py%s' % (version,), 'none', 'any'))
if i == 0:
supported.append(('py%s' % (version[0]), 'none', 'any'))
return supported
supported_tags = get_supported()
supported_tags_noarch = get_supported(noarch=True)

File diff suppressed because it is too large Load Diff

View File

@ -0,0 +1,18 @@
import sys
import os
def run():
base = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
## FIXME: this is kind of crude; if we could create a fake pip
## module, then exec into it and update pip.__path__ properly, we
## wouldn't have to update sys.path:
sys.path.insert(0, base)
import pip
return pip.main()
if __name__ == '__main__':
exit = run()
if exit:
sys.exit(exit)

View File

@ -0,0 +1,6 @@
SUCCESS = 0
ERROR = 1
UNKNOWN_ERROR = 2
VIRTUALENV_NOT_FOUND = 3
PREVIOUS_BUILD_DIR_ERROR = 4
NO_MATCHES_FOUND = 23

View File

@ -0,0 +1,771 @@
import sys
import shutil
import os
import stat
import re
import posixpath
import zipfile
import tarfile
import subprocess
import textwrap
import tempfile
from pip.exceptions import InstallationError, BadCommand, PipError
from pip.backwardcompat import(WindowsError, string_types, raw_input,
console_to_str, user_site, PermissionError)
from pip.locations import (site_packages, running_under_virtualenv, virtualenv_no_global,
distutils_scheme)
from pip.log import logger
import pkg_resources
from distlib import version
__all__ = ['rmtree', 'display_path', 'backup_dir',
'find_command', 'ask', 'Inf',
'normalize_name', 'splitext',
'format_size', 'is_installable_dir',
'is_svn_page', 'file_contents',
'split_leading_dir', 'has_leading_dir',
'make_path_relative', 'normalize_path',
'renames', 'get_terminal_size', 'get_prog',
'unzip_file', 'untar_file', 'create_download_cache_folder',
'cache_download', 'unpack_file', 'call_subprocess']
def get_prog():
try:
if os.path.basename(sys.argv[0]) in ('__main__.py', '-c'):
return "%s -m pip" % sys.executable
except (AttributeError, TypeError, IndexError):
pass
return 'pip'
def rmtree(dir, ignore_errors=False):
shutil.rmtree(dir, ignore_errors=ignore_errors,
onerror=rmtree_errorhandler)
def rmtree_errorhandler(func, path, exc_info):
"""On Windows, the files in .svn are read-only, so when rmtree() tries to
remove them, an exception is thrown. We catch that here, remove the
read-only attribute, and hopefully continue without problems."""
exctype, value = exc_info[:2]
if not ((exctype is WindowsError and value.args[0] == 5) or #others
(exctype is OSError and value.args[0] == 13) or #python2.4
(exctype is PermissionError and value.args[3] == 5) #python3.3
):
raise
# file type should currently be read only
if ((os.stat(path).st_mode & stat.S_IREAD) != stat.S_IREAD):
raise
# convert to read/write
os.chmod(path, stat.S_IWRITE)
# use the original function to repeat the operation
func(path)
def display_path(path):
"""Gives the display value for a given path, making it relative to cwd
if possible."""
path = os.path.normcase(os.path.abspath(path))
if path.startswith(os.getcwd() + os.path.sep):
path = '.' + path[len(os.getcwd()):]
return path
def backup_dir(dir, ext='.bak'):
"""Figure out the name of a directory to back up the given dir to
(adding .bak, .bak2, etc)"""
n = 1
extension = ext
while os.path.exists(dir + extension):
n += 1
extension = ext + str(n)
return dir + extension
def find_command(cmd, paths=None, pathext=None):
"""Searches the PATH for the given command and returns its path"""
if paths is None:
paths = os.environ.get('PATH', '').split(os.pathsep)
if isinstance(paths, string_types):
paths = [paths]
# check if there are funny path extensions for executables, e.g. Windows
if pathext is None:
pathext = get_pathext()
pathext = [ext for ext in pathext.lower().split(os.pathsep) if len(ext)]
# don't use extensions if the command ends with one of them
if os.path.splitext(cmd)[1].lower() in pathext:
pathext = ['']
# check if we find the command on PATH
for path in paths:
# try without extension first
cmd_path = os.path.join(path, cmd)
for ext in pathext:
# then including the extension
cmd_path_ext = cmd_path + ext
if os.path.isfile(cmd_path_ext):
return cmd_path_ext
if os.path.isfile(cmd_path):
return cmd_path
raise BadCommand('Cannot find command %r' % cmd)
def get_pathext(default_pathext=None):
"""Returns the path extensions from environment or a default"""
if default_pathext is None:
default_pathext = os.pathsep.join(['.COM', '.EXE', '.BAT', '.CMD'])
pathext = os.environ.get('PATHEXT', default_pathext)
return pathext
def ask_path_exists(message, options):
for action in os.environ.get('PIP_EXISTS_ACTION', '').split():
if action in options:
return action
return ask(message, options)
def ask(message, options):
"""Ask the message interactively, with the given possible responses"""
while 1:
if os.environ.get('PIP_NO_INPUT'):
raise Exception('No input was expected ($PIP_NO_INPUT set); question: %s' % message)
response = raw_input(message)
response = response.strip().lower()
if response not in options:
print('Your response (%r) was not one of the expected responses: %s' % (
response, ', '.join(options)))
else:
return response
class _Inf(object):
"""I am bigger than everything!"""
def __eq__(self, other):
if self is other:
return True
else:
return False
def __ne__(self, other):
return not self.__eq__(other)
def __lt__(self, other):
return False
def __le__(self, other):
return False
def __gt__(self, other):
return True
def __ge__(self, other):
return True
def __repr__(self):
return 'Inf'
Inf = _Inf() #this object is not currently used as a sortable in our code
del _Inf
_normalize_re = re.compile(r'[^a-z]', re.I)
def normalize_name(name):
return _normalize_re.sub('-', name.lower())
def format_size(bytes):
if bytes > 1000*1000:
return '%.1fMB' % (bytes/1000.0/1000)
elif bytes > 10*1000:
return '%ikB' % (bytes/1000)
elif bytes > 1000:
return '%.1fkB' % (bytes/1000.0)
else:
return '%ibytes' % bytes
def is_installable_dir(path):
"""Return True if `path` is a directory containing a setup.py file."""
if not os.path.isdir(path):
return False
setup_py = os.path.join(path, 'setup.py')
if os.path.isfile(setup_py):
return True
return False
def is_svn_page(html):
"""Returns true if the page appears to be the index page of an svn repository"""
return (re.search(r'<title>[^<]*Revision \d+:', html)
and re.search(r'Powered by (?:<a[^>]*?>)?Subversion', html, re.I))
def file_contents(filename):
fp = open(filename, 'rb')
try:
return fp.read().decode('utf-8')
finally:
fp.close()
def split_leading_dir(path):
path = str(path)
path = path.lstrip('/').lstrip('\\')
if '/' in path and (('\\' in path and path.find('/') < path.find('\\'))
or '\\' not in path):
return path.split('/', 1)
elif '\\' in path:
return path.split('\\', 1)
else:
return path, ''
def has_leading_dir(paths):
"""Returns true if all the paths have the same leading path name
(i.e., everything is in one subdirectory in an archive)"""
common_prefix = None
for path in paths:
prefix, rest = split_leading_dir(path)
if not prefix:
return False
elif common_prefix is None:
common_prefix = prefix
elif prefix != common_prefix:
return False
return True
def make_path_relative(path, rel_to):
"""
Make a filename relative, where the filename path, and it is
relative to rel_to
>>> make_relative_path('/usr/share/something/a-file.pth',
... '/usr/share/another-place/src/Directory')
'../../../something/a-file.pth'
>>> make_relative_path('/usr/share/something/a-file.pth',
... '/home/user/src/Directory')
'../../../usr/share/something/a-file.pth'
>>> make_relative_path('/usr/share/a-file.pth', '/usr/share/')
'a-file.pth'
"""
path_filename = os.path.basename(path)
path = os.path.dirname(path)
path = os.path.normpath(os.path.abspath(path))
rel_to = os.path.normpath(os.path.abspath(rel_to))
path_parts = path.strip(os.path.sep).split(os.path.sep)
rel_to_parts = rel_to.strip(os.path.sep).split(os.path.sep)
while path_parts and rel_to_parts and path_parts[0] == rel_to_parts[0]:
path_parts.pop(0)
rel_to_parts.pop(0)
full_parts = ['..']*len(rel_to_parts) + path_parts + [path_filename]
if full_parts == ['']:
return '.' + os.path.sep
return os.path.sep.join(full_parts)
def normalize_path(path):
"""
Convert a path to its canonical, case-normalized, absolute version.
"""
return os.path.normcase(os.path.realpath(os.path.expanduser(path)))
def splitext(path):
"""Like os.path.splitext, but take off .tar too"""
base, ext = posixpath.splitext(path)
if base.lower().endswith('.tar'):
ext = base[-4:] + ext
base = base[:-4]
return base, ext
def renames(old, new):
"""Like os.renames(), but handles renaming across devices."""
# Implementation borrowed from os.renames().
head, tail = os.path.split(new)
if head and tail and not os.path.exists(head):
os.makedirs(head)
shutil.move(old, new)
head, tail = os.path.split(old)
if head and tail:
try:
os.removedirs(head)
except OSError:
pass
def is_local(path):
"""
Return True if this is a path pip is allowed to modify.
If we're in a virtualenv, sys.prefix points to the virtualenv's
prefix; only sys.prefix is considered local.
If we're not in a virtualenv, in general we can modify anything.
However, if the OS vendor has configured distutils to install
somewhere other than sys.prefix (which could be a subdirectory of
sys.prefix, e.g. /usr/local), we consider sys.prefix itself nonlocal
and the domain of the OS vendor. (In other words, everything _other
than_ sys.prefix is considered local.)
"""
path = normalize_path(path)
prefix = normalize_path(sys.prefix)
if running_under_virtualenv():
return path.startswith(normalize_path(sys.prefix))
else:
if path.startswith(prefix):
for local_path in distutils_scheme("").values():
if path.startswith(normalize_path(local_path)):
return True
return False
else:
return True
def dist_is_local(dist):
"""
Return True if given Distribution object is installed somewhere pip
is allowed to modify.
"""
return is_local(dist_location(dist))
def dist_in_usersite(dist):
"""
Return True if given Distribution is installed in user site.
"""
if user_site:
return normalize_path(dist_location(dist)).startswith(normalize_path(user_site))
else:
return False
def dist_in_site_packages(dist):
"""
Return True if given Distribution is installed in distutils.sysconfig.get_python_lib().
"""
return normalize_path(dist_location(dist)).startswith(normalize_path(site_packages))
def dist_is_editable(dist):
"""Is distribution an editable install?"""
#TODO: factor out determining editableness out of FrozenRequirement
from pip import FrozenRequirement
req = FrozenRequirement.from_dist(dist, [])
return req.editable
def get_installed_distributions(local_only=True,
skip=('setuptools', 'pip', 'python', 'distribute'),
include_editables=True,
editables_only=False):
"""
Return a list of installed Distribution objects.
If ``local_only`` is True (default), only return installations
local to the current virtualenv, if in a virtualenv.
``skip`` argument is an iterable of lower-case project names to
ignore; defaults to ('setuptools', 'pip', 'python'). [FIXME also
skip virtualenv?]
If ``editables`` is False, don't report editables.
If ``editables_only`` is True , only report editables.
"""
if local_only:
local_test = dist_is_local
else:
local_test = lambda d: True
if include_editables:
editable_test = lambda d: True
else:
editable_test = lambda d: not dist_is_editable(d)
if editables_only:
editables_only_test = lambda d: dist_is_editable(d)
else:
editables_only_test = lambda d: True
return [d for d in pkg_resources.working_set
if local_test(d)
and d.key not in skip
and editable_test(d)
and editables_only_test(d)
]
def egg_link_path(dist):
"""
Return the path for the .egg-link file if it exists, otherwise, None.
There's 3 scenarios:
1) not in a virtualenv
try to find in site.USER_SITE, then site_packages
2) in a no-global virtualenv
try to find in site_packages
3) in a yes-global virtualenv
try to find in site_packages, then site.USER_SITE (don't look in global location)
For #1 and #3, there could be odd cases, where there's an egg-link in 2 locations.
This method will just return the first one found.
"""
sites = []
if running_under_virtualenv():
if virtualenv_no_global():
sites.append(site_packages)
else:
sites.append(site_packages)
if user_site:
sites.append(user_site)
else:
if user_site:
sites.append(user_site)
sites.append(site_packages)
for site in sites:
egglink = os.path.join(site, dist.project_name) + '.egg-link'
if os.path.isfile(egglink):
return egglink
def dist_location(dist):
"""
Get the site-packages location of this distribution. Generally
this is dist.location, except in the case of develop-installed
packages, where dist.location is the source code location, and we
want to know where the egg-link file is.
"""
egg_link = egg_link_path(dist)
if egg_link:
return egg_link
return dist.location
def get_terminal_size():
"""Returns a tuple (x, y) representing the width(x) and the height(x)
in characters of the terminal window."""
def ioctl_GWINSZ(fd):
try:
import fcntl
import termios
import struct
cr = struct.unpack('hh', fcntl.ioctl(fd, termios.TIOCGWINSZ,
'1234'))
except:
return None
if cr == (0, 0):
return None
if cr == (0, 0):
return None
return cr
cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2)
if not cr:
try:
fd = os.open(os.ctermid(), os.O_RDONLY)
cr = ioctl_GWINSZ(fd)
os.close(fd)
except:
pass
if not cr:
cr = (os.environ.get('LINES', 25), os.environ.get('COLUMNS', 80))
return int(cr[1]), int(cr[0])
def current_umask():
"""Get the current umask which involves having to set it temporarily."""
mask = os.umask(0)
os.umask(mask)
return mask
def unzip_file(filename, location, flatten=True):
"""
Unzip the file (with path `filename`) to the destination `location`. All
files are written based on system defaults and umask (i.e. permissions are
not preserved), except that regular file members with any execute
permissions (user, group, or world) have "chmod +x" applied after being
written. Note that for windows, any execute changes using os.chmod are
no-ops per the python docs.
"""
if not os.path.exists(location):
os.makedirs(location)
zipfp = open(filename, 'rb')
try:
zip = zipfile.ZipFile(zipfp)
leading = has_leading_dir(zip.namelist()) and flatten
for info in zip.infolist():
name = info.filename
data = zip.read(name)
fn = name
if leading:
fn = split_leading_dir(name)[1]
fn = os.path.join(location, fn)
dir = os.path.dirname(fn)
if not os.path.exists(dir):
os.makedirs(dir)
if fn.endswith('/') or fn.endswith('\\'):
# A directory
if not os.path.exists(fn):
os.makedirs(fn)
else:
fp = open(fn, 'wb')
try:
fp.write(data)
finally:
fp.close()
mode = info.external_attr >> 16
# if mode and regular file and any execute permissions for user/group/world?
if mode and stat.S_ISREG(mode) and mode & 0o111:
# make dest file have execute for user/group/world (chmod +x)
# no-op on windows per python docs
os.chmod(fn, (0o777-current_umask() | 0o111))
finally:
zipfp.close()
def untar_file(filename, location):
"""
Untar the file (with path `filename`) to the destination `location`.
All files are written based on system defaults and umask (i.e. permissions
are not preserved), except that regular file members with any execute
permissions (user, group, or world) have "chmod +x" applied after being
written. Note that for windows, any execute changes using os.chmod are
no-ops per the python docs.
"""
if not os.path.exists(location):
os.makedirs(location)
if filename.lower().endswith('.gz') or filename.lower().endswith('.tgz'):
mode = 'r:gz'
elif filename.lower().endswith('.bz2') or filename.lower().endswith('.tbz'):
mode = 'r:bz2'
elif filename.lower().endswith('.tar'):
mode = 'r'
else:
logger.warn('Cannot determine compression type for file %s' % filename)
mode = 'r:*'
tar = tarfile.open(filename, mode)
try:
# note: python<=2.5 doesnt seem to know about pax headers, filter them
leading = has_leading_dir([
member.name for member in tar.getmembers()
if member.name != 'pax_global_header'
])
for member in tar.getmembers():
fn = member.name
if fn == 'pax_global_header':
continue
if leading:
fn = split_leading_dir(fn)[1]
path = os.path.join(location, fn)
if member.isdir():
if not os.path.exists(path):
os.makedirs(path)
elif member.issym():
try:
tar._extract_member(member, path)
except:
e = sys.exc_info()[1]
# Some corrupt tar files seem to produce this
# (specifically bad symlinks)
logger.warn(
'In the tar file %s the member %s is invalid: %s'
% (filename, member.name, e))
continue
else:
try:
fp = tar.extractfile(member)
except (KeyError, AttributeError):
e = sys.exc_info()[1]
# Some corrupt tar files seem to produce this
# (specifically bad symlinks)
logger.warn(
'In the tar file %s the member %s is invalid: %s'
% (filename, member.name, e))
continue
if not os.path.exists(os.path.dirname(path)):
os.makedirs(os.path.dirname(path))
destfp = open(path, 'wb')
try:
shutil.copyfileobj(fp, destfp)
finally:
destfp.close()
fp.close()
# member have any execute permissions for user/group/world?
if member.mode & 0o111:
# make dest file have execute for user/group/world
# no-op on windows per python docs
os.chmod(path, (0o777-current_umask() | 0o111))
finally:
tar.close()
def create_download_cache_folder(folder):
logger.indent -= 2
logger.notify('Creating supposed download cache at %s' % folder)
logger.indent += 2
os.makedirs(folder)
def cache_download(target_file, temp_location, content_type):
logger.notify('Storing download in cache at %s' % display_path(target_file))
shutil.copyfile(temp_location, target_file)
fp = open(target_file+'.content-type', 'w')
fp.write(content_type)
fp.close()
def unpack_file(filename, location, content_type, link):
filename = os.path.realpath(filename)
if (content_type == 'application/zip'
or filename.endswith('.zip')
or filename.endswith('.pybundle')
or filename.endswith('.whl')
or zipfile.is_zipfile(filename)):
unzip_file(filename, location, flatten=not filename.endswith(('.pybundle', '.whl')))
elif (content_type == 'application/x-gzip'
or tarfile.is_tarfile(filename)
or splitext(filename)[1].lower() in ('.tar', '.tar.gz', '.tar.bz2', '.tgz', '.tbz')):
untar_file(filename, location)
elif (content_type and content_type.startswith('text/html')
and is_svn_page(file_contents(filename))):
# We don't really care about this
from pip.vcs.subversion import Subversion
Subversion('svn+' + link.url).unpack(location)
else:
## FIXME: handle?
## FIXME: magic signatures?
logger.fatal('Cannot unpack file %s (downloaded from %s, content-type: %s); cannot detect archive format'
% (filename, location, content_type))
raise InstallationError('Cannot determine archive format of %s' % location)
def call_subprocess(cmd, show_stdout=True,
filter_stdout=None, cwd=None,
raise_on_returncode=True,
command_level=logger.DEBUG, command_desc=None,
extra_environ=None):
if command_desc is None:
cmd_parts = []
for part in cmd:
if ' ' in part or '\n' in part or '"' in part or "'" in part:
part = '"%s"' % part.replace('"', '\\"')
cmd_parts.append(part)
command_desc = ' '.join(cmd_parts)
if show_stdout:
stdout = None
else:
stdout = subprocess.PIPE
logger.log(command_level, "Running command %s" % command_desc)
env = os.environ.copy()
if extra_environ:
env.update(extra_environ)
try:
proc = subprocess.Popen(
cmd, stderr=subprocess.STDOUT, stdin=None, stdout=stdout,
cwd=cwd, env=env)
except Exception:
e = sys.exc_info()[1]
logger.fatal(
"Error %s while executing command %s" % (e, command_desc))
raise
all_output = []
if stdout is not None:
stdout = proc.stdout
while 1:
line = console_to_str(stdout.readline())
if not line:
break
line = line.rstrip()
all_output.append(line + '\n')
if filter_stdout:
level = filter_stdout(line)
if isinstance(level, tuple):
level, line = level
logger.log(level, line)
if not logger.stdout_level_matches(level):
logger.show_progress()
else:
logger.info(line)
else:
returned_stdout, returned_stderr = proc.communicate()
all_output = [returned_stdout or '']
proc.wait()
if proc.returncode:
if raise_on_returncode:
if all_output:
logger.notify('Complete output from command %s:' % command_desc)
logger.notify('\n'.join(all_output) + '\n----------------------------------------')
raise InstallationError(
"Command %s failed with error code %s in %s"
% (command_desc, proc.returncode, cwd))
else:
logger.warn(
"Command %s had error code %s in %s"
% (command_desc, proc.returncode, cwd))
if stdout is not None:
return ''.join(all_output)
def is_prerelease(vers):
"""
Attempt to determine if this is a pre-release using PEP386/PEP426 rules.
Will return True if it is a pre-release and False if not. Versions are
assumed to be a pre-release if they cannot be parsed.
"""
normalized = version._suggest_normalized_version(vers)
if normalized is None:
# Cannot normalize, assume it is a pre-release
return True
parsed = version._normalized_key(normalized)
return any([any([y in set(["a", "b", "c", "rc", "dev"]) for y in x]) for x in parsed])
class BuildDirectory(object):
def __init__(self, name=None, delete=None):
# If we were not given an explicit directory, and we were not given an
# explicit delete option, then we'll default to deleting.
if name is None and delete is None:
delete = True
if name is None:
name = tempfile.mkdtemp(prefix="pip-build-")
# If we were not given an explicit directory, and we were not given
# an explicit delete option, then we'll default to deleting.
if delete is None:
delete = True
self.name = name
self.delete = delete
def __repr__(self):
return "<{} {!r}>".format(self.__class__.__name__, self.name)
def __enter__(self):
return self.name
def __exit__(self, exc, value, tb):
self.cleanup()
def cleanup(self):
if self.delete:
rmtree(self.name)

View File

@ -0,0 +1,251 @@
"""Handles all VCS (version control) support"""
import os
import shutil
from pip.backwardcompat import urlparse, urllib
from pip.log import logger
from pip.util import (display_path, backup_dir, find_command,
rmtree, ask_path_exists)
__all__ = ['vcs', 'get_src_requirement']
class VcsSupport(object):
_registry = {}
schemes = ['ssh', 'git', 'hg', 'bzr', 'sftp', 'svn']
def __init__(self):
# Register more schemes with urlparse for various version control systems
urlparse.uses_netloc.extend(self.schemes)
# Python >= 2.7.4, 3.3 doesn't have uses_fragment
if getattr(urlparse, 'uses_fragment', None):
urlparse.uses_fragment.extend(self.schemes)
super(VcsSupport, self).__init__()
def __iter__(self):
return self._registry.__iter__()
@property
def backends(self):
return list(self._registry.values())
@property
def dirnames(self):
return [backend.dirname for backend in self.backends]
@property
def all_schemes(self):
schemes = []
for backend in self.backends:
schemes.extend(backend.schemes)
return schemes
def register(self, cls):
if not hasattr(cls, 'name'):
logger.warn('Cannot register VCS %s' % cls.__name__)
return
if cls.name not in self._registry:
self._registry[cls.name] = cls
def unregister(self, cls=None, name=None):
if name in self._registry:
del self._registry[name]
elif cls in self._registry.values():
del self._registry[cls.name]
else:
logger.warn('Cannot unregister because no class or name given')
def get_backend_name(self, location):
"""
Return the name of the version control backend if found at given
location, e.g. vcs.get_backend_name('/path/to/vcs/checkout')
"""
for vc_type in self._registry.values():
path = os.path.join(location, vc_type.dirname)
if os.path.exists(path):
return vc_type.name
return None
def get_backend(self, name):
name = name.lower()
if name in self._registry:
return self._registry[name]
def get_backend_from_location(self, location):
vc_type = self.get_backend_name(location)
if vc_type:
return self.get_backend(vc_type)
return None
vcs = VcsSupport()
class VersionControl(object):
name = ''
dirname = ''
def __init__(self, url=None, *args, **kwargs):
self.url = url
self._cmd = None
super(VersionControl, self).__init__(*args, **kwargs)
def _filter(self, line):
return (logger.INFO, line)
def _is_local_repository(self, repo):
"""
posix absolute paths start with os.path.sep,
win32 ones ones start with drive (like c:\\folder)
"""
drive, tail = os.path.splitdrive(repo)
return repo.startswith(os.path.sep) or drive
@property
def cmd(self):
if self._cmd is not None:
return self._cmd
command = find_command(self.name)
logger.info('Found command %r at %r' % (self.name, command))
self._cmd = command
return command
def get_url_rev(self):
"""
Returns the correct repository URL and revision by parsing the given
repository URL
"""
error_message = (
"Sorry, '%s' is a malformed VCS url. "
"The format is <vcs>+<protocol>://<url>, "
"e.g. svn+http://myrepo/svn/MyApp#egg=MyApp")
assert '+' in self.url, error_message % self.url
url = self.url.split('+', 1)[1]
scheme, netloc, path, query, frag = urlparse.urlsplit(url)
rev = None
if '@' in path:
path, rev = path.rsplit('@', 1)
url = urlparse.urlunsplit((scheme, netloc, path, query, ''))
return url, rev
def get_info(self, location):
"""
Returns (url, revision), where both are strings
"""
assert not location.rstrip('/').endswith(self.dirname), 'Bad directory: %s' % location
return self.get_url(location), self.get_revision(location)
def normalize_url(self, url):
"""
Normalize a URL for comparison by unquoting it and removing any trailing slash.
"""
return urllib.unquote(url).rstrip('/')
def compare_urls(self, url1, url2):
"""
Compare two repo URLs for identity, ignoring incidental differences.
"""
return (self.normalize_url(url1) == self.normalize_url(url2))
def parse_vcs_bundle_file(self, content):
"""
Takes the contents of the bundled text file that explains how to revert
the stripped off version control data of the given package and returns
the URL and revision of it.
"""
raise NotImplementedError
def obtain(self, dest):
"""
Called when installing or updating an editable package, takes the
source path of the checkout.
"""
raise NotImplementedError
def switch(self, dest, url, rev_options):
"""
Switch the repo at ``dest`` to point to ``URL``.
"""
raise NotImplemented
def update(self, dest, rev_options):
"""
Update an already-existing repo to the given ``rev_options``.
"""
raise NotImplementedError
def check_destination(self, dest, url, rev_options, rev_display):
"""
Prepare a location to receive a checkout/clone.
Return True if the location is ready for (and requires) a
checkout/clone, False otherwise.
"""
checkout = True
prompt = False
if os.path.exists(dest):
checkout = False
if os.path.exists(os.path.join(dest, self.dirname)):
existing_url = self.get_url(dest)
if self.compare_urls(existing_url, url):
logger.info('%s in %s exists, and has correct URL (%s)' %
(self.repo_name.title(), display_path(dest),
url))
logger.notify('Updating %s %s%s' %
(display_path(dest), self.repo_name,
rev_display))
self.update(dest, rev_options)
else:
logger.warn('%s %s in %s exists with URL %s' %
(self.name, self.repo_name,
display_path(dest), existing_url))
prompt = ('(s)witch, (i)gnore, (w)ipe, (b)ackup ',
('s', 'i', 'w', 'b'))
else:
logger.warn('Directory %s already exists, '
'and is not a %s %s.' %
(dest, self.name, self.repo_name))
prompt = ('(i)gnore, (w)ipe, (b)ackup ', ('i', 'w', 'b'))
if prompt:
logger.warn('The plan is to install the %s repository %s' %
(self.name, url))
response = ask_path_exists('What to do? %s' % prompt[0],
prompt[1])
if response == 's':
logger.notify('Switching %s %s to %s%s' %
(self.repo_name, display_path(dest), url,
rev_display))
self.switch(dest, url, rev_options)
elif response == 'i':
# do nothing
pass
elif response == 'w':
logger.warn('Deleting %s' % display_path(dest))
rmtree(dest)
checkout = True
elif response == 'b':
dest_dir = backup_dir(dest)
logger.warn('Backing up %s to %s'
% (display_path(dest), dest_dir))
shutil.move(dest, dest_dir)
checkout = True
return checkout
def unpack(self, location):
if os.path.exists(location):
rmtree(location)
self.obtain(location)
def get_src_requirement(self, dist, location, find_tags=False):
raise NotImplementedError
def get_src_requirement(dist, location, find_tags):
version_control = vcs.get_backend_from_location(location)
if version_control:
return version_control().get_src_requirement(dist, location, find_tags)
logger.warn('cannot determine version of editable source in %s (is not SVN checkout, Git clone, Mercurial clone or Bazaar branch)' % location)
return dist.as_requirement()

View File

@ -0,0 +1,131 @@
import os
import tempfile
import re
from pip.backwardcompat import urlparse
from pip.log import logger
from pip.util import rmtree, display_path, call_subprocess
from pip.vcs import vcs, VersionControl
from pip.download import path_to_url
class Bazaar(VersionControl):
name = 'bzr'
dirname = '.bzr'
repo_name = 'branch'
bundle_file = 'bzr-branch.txt'
schemes = ('bzr', 'bzr+http', 'bzr+https', 'bzr+ssh', 'bzr+sftp', 'bzr+ftp', 'bzr+lp')
guide = ('# This was a Bazaar branch; to make it a branch again run:\n'
'bzr branch -r %(rev)s %(url)s .\n')
def __init__(self, url=None, *args, **kwargs):
super(Bazaar, self).__init__(url, *args, **kwargs)
# Python >= 2.7.4, 3.3 doesn't have uses_fragment or non_hierarchical
# Register lp but do not expose as a scheme to support bzr+lp.
if getattr(urlparse, 'uses_fragment', None):
urlparse.uses_fragment.extend(['lp'])
urlparse.non_hierarchical.extend(['lp'])
def parse_vcs_bundle_file(self, content):
url = rev = None
for line in content.splitlines():
if not line.strip() or line.strip().startswith('#'):
continue
match = re.search(r'^bzr\s*branch\s*-r\s*(\d*)', line)
if match:
rev = match.group(1).strip()
url = line[match.end():].strip().split(None, 1)[0]
if url and rev:
return url, rev
return None, None
def export(self, location):
"""Export the Bazaar repository at the url to the destination location"""
temp_dir = tempfile.mkdtemp('-export', 'pip-')
self.unpack(temp_dir)
if os.path.exists(location):
# Remove the location to make sure Bazaar can export it correctly
rmtree(location)
try:
call_subprocess([self.cmd, 'export', location], cwd=temp_dir,
filter_stdout=self._filter, show_stdout=False)
finally:
rmtree(temp_dir)
def switch(self, dest, url, rev_options):
call_subprocess([self.cmd, 'switch', url], cwd=dest)
def update(self, dest, rev_options):
call_subprocess(
[self.cmd, 'pull', '-q'] + rev_options, cwd=dest)
def obtain(self, dest):
url, rev = self.get_url_rev()
if rev:
rev_options = ['-r', rev]
rev_display = ' (to revision %s)' % rev
else:
rev_options = []
rev_display = ''
if self.check_destination(dest, url, rev_options, rev_display):
logger.notify('Checking out %s%s to %s'
% (url, rev_display, display_path(dest)))
call_subprocess(
[self.cmd, 'branch', '-q'] + rev_options + [url, dest])
def get_url_rev(self):
# hotfix the URL scheme after removing bzr+ from bzr+ssh:// readd it
url, rev = super(Bazaar, self).get_url_rev()
if url.startswith('ssh://'):
url = 'bzr+' + url
return url, rev
def get_url(self, location):
urls = call_subprocess(
[self.cmd, 'info'], show_stdout=False, cwd=location)
for line in urls.splitlines():
line = line.strip()
for x in ('checkout of branch: ',
'parent branch: '):
if line.startswith(x):
repo = line.split(x)[1]
if self._is_local_repository(repo):
return path_to_url(repo)
return repo
return None
def get_revision(self, location):
revision = call_subprocess(
[self.cmd, 'revno'], show_stdout=False, cwd=location)
return revision.splitlines()[-1]
def get_tag_revs(self, location):
tags = call_subprocess(
[self.cmd, 'tags'], show_stdout=False, cwd=location)
tag_revs = []
for line in tags.splitlines():
tags_match = re.search(r'([.\w-]+)\s*(.*)$', line)
if tags_match:
tag = tags_match.group(1)
rev = tags_match.group(2)
tag_revs.append((rev.strip(), tag.strip()))
return dict(tag_revs)
def get_src_requirement(self, dist, location, find_tags):
repo = self.get_url(location)
if not repo.lower().startswith('bzr:'):
repo = 'bzr+' + repo
egg_project_name = dist.egg_name().split('-', 1)[0]
if not repo:
return None
current_rev = self.get_revision(location)
tag_revs = self.get_tag_revs(location)
if current_rev in tag_revs:
# It's a tag
full_egg_name = '%s-%s' % (egg_project_name, tag_revs[current_rev])
else:
full_egg_name = '%s-dev_r%s' % (dist.egg_name(), current_rev)
return '%s@%s#egg=%s' % (repo, current_rev, full_egg_name)
vcs.register(Bazaar)

View File

@ -0,0 +1,194 @@
import tempfile
import re
import os.path
from pip.util import call_subprocess
from pip.util import display_path, rmtree
from pip.vcs import vcs, VersionControl
from pip.log import logger
from pip.backwardcompat import url2pathname, urlparse
urlsplit = urlparse.urlsplit
urlunsplit = urlparse.urlunsplit
class Git(VersionControl):
name = 'git'
dirname = '.git'
repo_name = 'clone'
schemes = ('git', 'git+http', 'git+https', 'git+ssh', 'git+git', 'git+file')
bundle_file = 'git-clone.txt'
guide = ('# This was a Git repo; to make it a repo again run:\n'
'git init\ngit remote add origin %(url)s -f\ngit checkout %(rev)s\n')
def __init__(self, url=None, *args, **kwargs):
# Works around an apparent Git bug
# (see http://article.gmane.org/gmane.comp.version-control.git/146500)
if url:
scheme, netloc, path, query, fragment = urlsplit(url)
if scheme.endswith('file'):
initial_slashes = path[:-len(path.lstrip('/'))]
newpath = initial_slashes + url2pathname(path).replace('\\', '/').lstrip('/')
url = urlunsplit((scheme, netloc, newpath, query, fragment))
after_plus = scheme.find('+') + 1
url = scheme[:after_plus] + urlunsplit((scheme[after_plus:], netloc, newpath, query, fragment))
super(Git, self).__init__(url, *args, **kwargs)
def parse_vcs_bundle_file(self, content):
url = rev = None
for line in content.splitlines():
if not line.strip() or line.strip().startswith('#'):
continue
url_match = re.search(r'git\s*remote\s*add\s*origin(.*)\s*-f', line)
if url_match:
url = url_match.group(1).strip()
rev_match = re.search(r'^git\s*checkout\s*-q\s*(.*)\s*', line)
if rev_match:
rev = rev_match.group(1).strip()
if url and rev:
return url, rev
return None, None
def export(self, location):
"""Export the Git repository at the url to the destination location"""
temp_dir = tempfile.mkdtemp('-export', 'pip-')
self.unpack(temp_dir)
try:
if not location.endswith('/'):
location = location + '/'
call_subprocess(
[self.cmd, 'checkout-index', '-a', '-f', '--prefix', location],
filter_stdout=self._filter, show_stdout=False, cwd=temp_dir)
finally:
rmtree(temp_dir)
def check_rev_options(self, rev, dest, rev_options):
"""Check the revision options before checkout to compensate that tags
and branches may need origin/ as a prefix.
Returns the SHA1 of the branch or tag if found.
"""
revisions = self.get_refs(dest)
origin_rev = 'origin/%s' % rev
if origin_rev in revisions:
# remote branch
return [revisions[origin_rev]]
elif rev in revisions:
# a local tag or branch name
return [revisions[rev]]
else:
logger.warn("Could not find a tag or branch '%s', assuming commit." % rev)
return rev_options
def switch(self, dest, url, rev_options):
call_subprocess(
[self.cmd, 'config', 'remote.origin.url', url], cwd=dest)
call_subprocess(
[self.cmd, 'checkout', '-q'] + rev_options, cwd=dest)
self.update_submodules(dest)
def update(self, dest, rev_options):
# First fetch changes from the default remote
call_subprocess([self.cmd, 'fetch', '-q'], cwd=dest)
# Then reset to wanted revision (maby even origin/master)
if rev_options:
rev_options = self.check_rev_options(rev_options[0], dest, rev_options)
call_subprocess([self.cmd, 'reset', '--hard', '-q'] + rev_options, cwd=dest)
#: update submodules
self.update_submodules(dest)
def obtain(self, dest):
url, rev = self.get_url_rev()
if rev:
rev_options = [rev]
rev_display = ' (to %s)' % rev
else:
rev_options = ['origin/master']
rev_display = ''
if self.check_destination(dest, url, rev_options, rev_display):
logger.notify('Cloning %s%s to %s' % (url, rev_display, display_path(dest)))
call_subprocess([self.cmd, 'clone', '-q', url, dest])
#: repo may contain submodules
self.update_submodules(dest)
if rev:
rev_options = self.check_rev_options(rev, dest, rev_options)
# Only do a checkout if rev_options differs from HEAD
if not self.get_revision(dest).startswith(rev_options[0]):
call_subprocess([self.cmd, 'checkout', '-q'] + rev_options, cwd=dest)
def get_url(self, location):
url = call_subprocess(
[self.cmd, 'config', 'remote.origin.url'],
show_stdout=False, cwd=location)
return url.strip()
def get_revision(self, location):
current_rev = call_subprocess(
[self.cmd, 'rev-parse', 'HEAD'], show_stdout=False, cwd=location)
return current_rev.strip()
def get_refs(self, location):
"""Return map of named refs (branches or tags) to commit hashes."""
output = call_subprocess([self.cmd, 'show-ref'],
show_stdout=False, cwd=location)
rv = {}
for line in output.strip().splitlines():
commit, ref = line.split(' ', 1)
ref = ref.strip()
ref_name = None
if ref.startswith('refs/remotes/'):
ref_name = ref[len('refs/remotes/'):]
elif ref.startswith('refs/heads/'):
ref_name = ref[len('refs/heads/'):]
elif ref.startswith('refs/tags/'):
ref_name = ref[len('refs/tags/'):]
if ref_name is not None:
rv[ref_name] = commit.strip()
return rv
def get_src_requirement(self, dist, location, find_tags):
repo = self.get_url(location)
if not repo.lower().startswith('git:'):
repo = 'git+' + repo
egg_project_name = dist.egg_name().split('-', 1)[0]
if not repo:
return None
current_rev = self.get_revision(location)
refs = self.get_refs(location)
# refs maps names to commit hashes; we need the inverse
# if multiple names map to a single commit, this arbitrarily picks one
names_by_commit = dict((commit, ref) for ref, commit in refs.items())
if current_rev in names_by_commit:
# It's a tag
full_egg_name = '%s-%s' % (egg_project_name, names_by_commit[current_rev])
else:
full_egg_name = '%s-dev' % egg_project_name
return '%s@%s#egg=%s' % (repo, current_rev, full_egg_name)
def get_url_rev(self):
"""
Prefixes stub URLs like 'user@hostname:user/repo.git' with 'ssh://'.
That's required because although they use SSH they sometimes doesn't
work with a ssh:// scheme (e.g. Github). But we need a scheme for
parsing. Hence we remove it again afterwards and return it as a stub.
"""
if not '://' in self.url:
assert not 'file:' in self.url
self.url = self.url.replace('git+', 'git+ssh://')
url, rev = super(Git, self).get_url_rev()
url = url.replace('ssh://', '')
else:
url, rev = super(Git, self).get_url_rev()
return url, rev
def update_submodules(self, location):
if not os.path.exists(os.path.join(location, '.gitmodules')):
return
call_subprocess([self.cmd, 'submodule', 'update', '--init', '--recursive', '-q'],
cwd=location)
vcs.register(Git)

Some files were not shown because too many files have changed in this diff Show More