Datasets:

Modalities:
Text
Formats:
json
ArXiv:
Tags:
code
Libraries:
Datasets
pandas
License:
repo
stringclasses
856 values
pull_number
int64
3
127k
instance_id
stringlengths
12
58
issue_numbers
sequencelengths
1
5
base_commit
stringlengths
40
40
patch
stringlengths
67
1.54M
test_patch
stringlengths
0
107M
problem_statement
stringlengths
3
307k
hints_text
stringlengths
0
908k
created_at
timestamp[s]
conda/conda
620
conda__conda-620
[ "599" ]
c453be49e865297bf12858548a6b3e7891a8cb43
diff --git a/conda/resolve.py b/conda/resolve.py --- a/conda/resolve.py +++ b/conda/resolve.py @@ -30,6 +30,11 @@ def normalized_version(version): return version +class NoPackagesFound(RuntimeError): + def __init__(self, msg, pkg): + super(NoPackagesFound, self).__init__(msg) + self.pkg = pkg + const_pat = re.compile(r'([=<>!]{1,2})(\S+)$') def ver_eval(version, constraint): """ @@ -243,7 +248,7 @@ def track_features(self, fn): def get_pkgs(self, ms, max_only=False): pkgs = [Package(fn, self.index[fn]) for fn in self.find_matches(ms)] if not pkgs: - raise RuntimeError("No packages found matching: %s" % ms) + raise NoPackagesFound("No packages found matching: %s" % ms, ms.spec) if max_only: maxpkg = max(pkgs) ret = [] @@ -262,7 +267,7 @@ def get_pkgs(self, ms, max_only=False): def get_max_dists(self, ms): pkgs = self.get_pkgs(ms, max_only=True) if not pkgs: - raise RuntimeError("No packages found matching: %s" % ms) + raise NoPackagesFound("No packages found matching: %s" % ms, ms.spec) for pkg in pkgs: yield pkg.fn @@ -371,11 +376,22 @@ def generate_version_eq(self, v, dists, include0=False): def get_dists(self, specs, max_only=False): dists = {} for spec in specs: + found = False + notfound = [] for pkg in self.get_pkgs(MatchSpec(spec), max_only=max_only): if pkg.fn in dists: + found = True continue - dists.update(self.all_deps(pkg.fn, max_only=max_only)) - dists[pkg.fn] = pkg + try: + dists.update(self.all_deps(pkg.fn, max_only=max_only)) + except NoPackagesFound as e: + # Ignore any package that has nonexisting dependencies. + notfound.append(e.pkg) + else: + dists[pkg.fn] = pkg + found = True + if not found: + raise NoPackagesFound("Could not find some dependencies for %s: %s" % (spec, ', '.join(notfound)), None) return dists @@ -387,25 +403,31 @@ def solve2(self, specs, features, guess=True, alg='sorter', returnall=False): # complicated cases that the pseudo-boolean solver does, but it's also # much faster when it does work. - dists = self.get_dists(specs, max_only=True) - - v = {} # map fn to variable number - w = {} # map variable number to fn - i = -1 # in case the loop doesn't run - for i, fn in enumerate(sorted(dists)): - v[fn] = i + 1 - w[i + 1] = fn - m = i + 1 - - dotlog.debug("Solving using max dists only") - clauses = self.gen_clauses(v, dists, specs, features) - solutions = min_sat(clauses) - - if len(solutions) == 1: - ret = [w[lit] for lit in solutions.pop(0) if 0 < lit] - if returnall: - return [ret] - return ret + try: + dists = self.get_dists(specs, max_only=True) + except NoPackagesFound: + # Handle packages that are not included because some dependencies + # couldn't be found. + pass + else: + v = {} # map fn to variable number + w = {} # map variable number to fn + i = -1 # in case the loop doesn't run + for i, fn in enumerate(sorted(dists)): + v[fn] = i + 1 + w[i + 1] = fn + m = i + 1 + + dotlog.debug("Solving using max dists only") + clauses = self.gen_clauses(v, dists, specs, features) + solutions = min_sat(clauses) + + + if len(solutions) == 1: + ret = [w[lit] for lit in solutions.pop(0) if 0 < lit] + if returnall: + return [ret] + return ret dists = self.get_dists(specs)
diff --git a/tests/test_resolve.py b/tests/test_resolve.py --- a/tests/test_resolve.py +++ b/tests/test_resolve.py @@ -3,13 +3,15 @@ import unittest from os.path import dirname, join -from conda.resolve import ver_eval, VersionSpec, MatchSpec, Package, Resolve +from conda.resolve import ver_eval, VersionSpec, MatchSpec, Package, Resolve, NoPackagesFound from .helpers import raises with open(join(dirname(__file__), 'index.json')) as fi: - r = Resolve(json.load(fi)) + index = json.load(fi) + +r = Resolve(index) f_mkl = set(['mkl']) @@ -672,9 +674,183 @@ def test_unsat(): def test_nonexistent(): r.msd_cache = {} - assert raises(RuntimeError, lambda: r.solve(['notarealpackage 2.0*']), 'No packages found') + assert raises(NoPackagesFound, lambda: r.solve(['notarealpackage 2.0*']), 'No packages found') # This exact version of NumPy does not exist - assert raises(RuntimeError, lambda: r.solve(['numpy 1.5']), 'No packages found') + assert raises(NoPackagesFound, lambda: r.solve(['numpy 1.5']), 'No packages found') + +def test_nonexistent_deps(): + index2 = index.copy() + index2['mypackage-1.0-py33_0.tar.bz2'] = { + 'build': 'py33_0', + 'build_number': 0, + 'depends': ['nose', 'python 3.3*', 'notarealpackage 2.0*'], + 'name': 'mypackage', + 'requires': ['nose 1.2.1', 'python 3.3'], + 'version': '1.0', + } + index2['mypackage-1.1-py33_0.tar.bz2'] = { + 'build': 'py33_0', + 'build_number': 0, + 'depends': ['nose', 'python 3.3*'], + 'name': 'mypackage', + 'requires': ['nose 1.2.1', 'python 3.3'], + 'version': '1.1', + } + r = Resolve(index2) + + assert set(r.find_matches(MatchSpec('mypackage'))) == { + 'mypackage-1.0-py33_0.tar.bz2', + 'mypackage-1.1-py33_0.tar.bz2', + } + assert set(r.get_dists(['mypackage']).keys()) == { + 'mypackage-1.1-py33_0.tar.bz2', + 'nose-1.1.2-py26_0.tar.bz2', + 'nose-1.1.2-py27_0.tar.bz2', + 'nose-1.1.2-py33_0.tar.bz2', + 'nose-1.2.1-py26_0.tar.bz2', + 'nose-1.2.1-py27_0.tar.bz2', + 'nose-1.2.1-py33_0.tar.bz2', + 'nose-1.3.0-py26_0.tar.bz2', + 'nose-1.3.0-py27_0.tar.bz2', + 'nose-1.3.0-py33_0.tar.bz2', + 'openssl-1.0.1c-0.tar.bz2', + 'python-2.6.8-1.tar.bz2', + 'python-2.6.8-2.tar.bz2', + 'python-2.6.8-3.tar.bz2', + 'python-2.6.8-4.tar.bz2', + 'python-2.6.8-5.tar.bz2', + 'python-2.6.8-6.tar.bz2', + 'python-2.7.3-2.tar.bz2', + 'python-2.7.3-3.tar.bz2', + 'python-2.7.3-4.tar.bz2', + 'python-2.7.3-5.tar.bz2', + 'python-2.7.3-6.tar.bz2', + 'python-2.7.3-7.tar.bz2', + 'python-2.7.4-0.tar.bz2', + 'python-2.7.5-0.tar.bz2', + 'python-3.3.0-2.tar.bz2', + 'python-3.3.0-3.tar.bz2', + 'python-3.3.0-4.tar.bz2', + 'python-3.3.0-pro0.tar.bz2', + 'python-3.3.0-pro1.tar.bz2', + 'python-3.3.1-0.tar.bz2', + 'python-3.3.2-0.tar.bz2', + 'readline-6.2-0.tar.bz2', + 'sqlite-3.7.13-0.tar.bz2', + 'system-5.8-0.tar.bz2', + 'system-5.8-1.tar.bz2', + 'tk-8.5.13-0.tar.bz2', + 'zlib-1.2.7-0.tar.bz2', + } + + assert set(r.get_dists(['mypackage'], max_only=True).keys()) == { + 'mypackage-1.1-py33_0.tar.bz2', + 'nose-1.3.0-py26_0.tar.bz2', + 'nose-1.3.0-py27_0.tar.bz2', + 'nose-1.3.0-py33_0.tar.bz2', + 'openssl-1.0.1c-0.tar.bz2', + 'python-2.6.8-6.tar.bz2', + 'python-2.7.5-0.tar.bz2', + 'python-3.3.2-0.tar.bz2', + 'readline-6.2-0.tar.bz2', + 'sqlite-3.7.13-0.tar.bz2', + 'system-5.8-1.tar.bz2', + 'tk-8.5.13-0.tar.bz2', + 'zlib-1.2.7-0.tar.bz2', + } + + assert r.solve(['mypackage']) == r.solve(['mypackage 1.1']) == [ + 'mypackage-1.1-py33_0.tar.bz2', + 'nose-1.3.0-py33_0.tar.bz2', + 'openssl-1.0.1c-0.tar.bz2', + 'python-3.3.2-0.tar.bz2', + 'readline-6.2-0.tar.bz2', + 'sqlite-3.7.13-0.tar.bz2', + 'system-5.8-1.tar.bz2', + 'tk-8.5.13-0.tar.bz2', + 'zlib-1.2.7-0.tar.bz2', + ] + assert raises(RuntimeError, lambda: r.solve(['mypackage 1.0'])) + + # This time, the latest version is messed up + index3 = index.copy() + index3['mypackage-1.1-py33_0.tar.bz2'] = { + 'build': 'py33_0', + 'build_number': 0, + 'depends': ['nose', 'python 3.3*', 'notarealpackage 2.0*'], + 'name': 'mypackage', + 'requires': ['nose 1.2.1', 'python 3.3'], + 'version': '1.1', + } + index3['mypackage-1.0-py33_0.tar.bz2'] = { + 'build': 'py33_0', + 'build_number': 0, + 'depends': ['nose', 'python 3.3*'], + 'name': 'mypackage', + 'requires': ['nose 1.2.1', 'python 3.3'], + 'version': '1.0', + } + r = Resolve(index3) + + assert set(r.find_matches(MatchSpec('mypackage'))) == { + 'mypackage-1.0-py33_0.tar.bz2', + 'mypackage-1.1-py33_0.tar.bz2', + } + assert set(r.get_dists(['mypackage']).keys()) == { + 'mypackage-1.0-py33_0.tar.bz2', + 'nose-1.1.2-py26_0.tar.bz2', + 'nose-1.1.2-py27_0.tar.bz2', + 'nose-1.1.2-py33_0.tar.bz2', + 'nose-1.2.1-py26_0.tar.bz2', + 'nose-1.2.1-py27_0.tar.bz2', + 'nose-1.2.1-py33_0.tar.bz2', + 'nose-1.3.0-py26_0.tar.bz2', + 'nose-1.3.0-py27_0.tar.bz2', + 'nose-1.3.0-py33_0.tar.bz2', + 'openssl-1.0.1c-0.tar.bz2', + 'python-2.6.8-1.tar.bz2', + 'python-2.6.8-2.tar.bz2', + 'python-2.6.8-3.tar.bz2', + 'python-2.6.8-4.tar.bz2', + 'python-2.6.8-5.tar.bz2', + 'python-2.6.8-6.tar.bz2', + 'python-2.7.3-2.tar.bz2', + 'python-2.7.3-3.tar.bz2', + 'python-2.7.3-4.tar.bz2', + 'python-2.7.3-5.tar.bz2', + 'python-2.7.3-6.tar.bz2', + 'python-2.7.3-7.tar.bz2', + 'python-2.7.4-0.tar.bz2', + 'python-2.7.5-0.tar.bz2', + 'python-3.3.0-2.tar.bz2', + 'python-3.3.0-3.tar.bz2', + 'python-3.3.0-4.tar.bz2', + 'python-3.3.0-pro0.tar.bz2', + 'python-3.3.0-pro1.tar.bz2', + 'python-3.3.1-0.tar.bz2', + 'python-3.3.2-0.tar.bz2', + 'readline-6.2-0.tar.bz2', + 'sqlite-3.7.13-0.tar.bz2', + 'system-5.8-0.tar.bz2', + 'system-5.8-1.tar.bz2', + 'tk-8.5.13-0.tar.bz2', + 'zlib-1.2.7-0.tar.bz2', + } + + assert raises(RuntimeError, lambda: r.get_dists(['mypackage'], max_only=True)) + + assert r.solve(['mypackage']) == r.solve(['mypackage 1.0']) == [ + 'mypackage-1.0-py33_0.tar.bz2', + 'nose-1.3.0-py33_0.tar.bz2', + 'openssl-1.0.1c-0.tar.bz2', + 'python-3.3.2-0.tar.bz2', + 'readline-6.2-0.tar.bz2', + 'sqlite-3.7.13-0.tar.bz2', + 'system-5.8-1.tar.bz2', + 'tk-8.5.13-0.tar.bz2', + 'zlib-1.2.7-0.tar.bz2', + ] + assert raises(NoPackagesFound, lambda: r.solve(['mypackage 1.1'])) def test_package_ordering(): sympy_071 = Package('sympy-0.7.1-py27_0.tar.bz2', r.index['sympy-0.7.1-py27_0.tar.bz2'])
Don't bail when a dependency can't be found When a dependency for a package can't be found, conda bails completely, but this can happen e.g., just for some old builds of something. So we should just exclude any package like this from the solver.
It can also happen if someone has a package on their binstar but not all the dependencies for it.
2014-03-24T18:23:13
conda/conda
662
conda__conda-662
[ "464" ]
3d4118668fca738984cce13d9235e0fc11a79df4
diff --git a/conda/cli/main_remove.py b/conda/cli/main_remove.py --- a/conda/cli/main_remove.py +++ b/conda/cli/main_remove.py @@ -63,6 +63,7 @@ def execute(args, parser): from conda.api import get_index from conda.cli import pscheck from conda.install import rm_rf, linked + from conda import config if not (args.all or args.package_names): sys.exit('Error: no package names supplied,\n' @@ -71,12 +72,11 @@ def execute(args, parser): prefix = common.get_prefix(args) common.check_write('remove', prefix) - index = None + common.ensure_override_channels_requires_channel(args) + channel_urls = args.channel or () + index = get_index(channel_urls=channel_urls, + prepend=not args.override_channels) if args.features: - common.ensure_override_channels_requires_channel(args) - channel_urls = args.channel or () - index = get_index(channel_urls=channel_urls, - prepend=not args.override_channels) features = set(args.package_names) actions = plan.remove_features_actions(prefix, index, features) diff --git a/conda/plan.py b/conda/plan.py --- a/conda/plan.py +++ b/conda/plan.py @@ -20,7 +20,7 @@ from conda import install from conda.fetch import fetch_pkg from conda.history import History -from conda.resolve import MatchSpec, Resolve +from conda.resolve import MatchSpec, Resolve, Package from conda.utils import md5_file, human_bytes log = getLogger(__name__) @@ -60,7 +60,7 @@ def split_linkarg(arg): linktype = install.LINK_HARD return dist, pkgs_dir, int(linktype) -def display_actions(actions, index=None): +def display_actions(actions, index): if actions.get(FETCH): print("\nThe following packages will be downloaded:\n") @@ -79,19 +79,113 @@ def display_actions(actions, index=None): print(" " * 43 + "Total: %14s" % human_bytes(sum(index[dist + '.tar.bz2']['size'] for dist in actions[FETCH]))) - if actions.get(UNLINK): - print("\nThe following packages will be UN-linked:\n") - print_dists([ - (dist, None) - for dist in actions[UNLINK]]) - if actions.get(LINK): - print("\nThe following packages will be linked:\n") - lst = [] - for arg in actions[LINK]: - dist, pkgs_dir, lt = split_linkarg(arg) - extra = ' %s' % install.link_name_map.get(lt) - lst.append((dist, extra)) - print_dists(lst) + + # package -> [oldver-oldbuild, newver-newbuild] + packages = defaultdict(lambda: list(('', ''))) + features = defaultdict(lambda: list(('', ''))) + + # This assumes each package will appear in LINK no more than once. + Packages = {} + linktypes = {} + for arg in actions.get(LINK, []): + dist, pkgs_dir, lt = split_linkarg(arg) + pkg, ver, build = dist.rsplit('-', 2) + packages[pkg][1] = ver + '-' + build + Packages[dist] = Package(dist + '.tar.bz2', index[dist + '.tar.bz2']) + linktypes[pkg] = lt + features[pkg][1] = index[dist + '.tar.bz2'].get('features', '') + for arg in actions.get(UNLINK, []): + dist, pkgs_dir, lt = split_linkarg(arg) + pkg, ver, build = dist.rsplit('-', 2) + packages[pkg][0] = ver + '-' + build + Packages[dist] = Package(dist + '.tar.bz2', index[dist + '.tar.bz2']) + features[pkg][0] = index[dist + '.tar.bz2'].get('features', '') + + # Put a minimum length here---. .--For the : + # v v + maxpkg = max(len(max(packages or [''], key=len)), 0) + 1 + maxoldver = len(max(packages.values() or [['']], key=lambda i: len(i[0]))[0]) + maxnewver = len(max(packages.values() or [['', '']], key=lambda i: len(i[1]))[1]) + maxoldfeatures = len(max(features.values() or [['']], key=lambda i: len(i[0]))[0]) + maxnewfeatures = len(max(features.values() or [['', '']], key=lambda i: len(i[1]))[1]) + maxoldchannel = len(max([config.canonical_channel_name(Packages[pkg + '-' + + packages[pkg][0]].channel) for pkg in packages if packages[pkg][0]] or + [''], key=len)) + maxnewchannel = len(max([config.canonical_channel_name(Packages[pkg + '-' + + packages[pkg][1]].channel) for pkg in packages if packages[pkg][1]] or + [''], key=len)) + new = {pkg for pkg in packages if not packages[pkg][0]} + removed = {pkg for pkg in packages if not packages[pkg][1]} + updated = set() + downgraded = set() + oldfmt = {} + newfmt = {} + for pkg in packages: + # That's right. I'm using old-style string formatting to generate a + # string with new-style string formatting. + oldfmt[pkg] = '{pkg:<%s} {vers[0]:<%s}' % (maxpkg, maxoldver) + if config.show_channel_urls: + oldfmt[pkg] += ' {channel[0]:<%s}' % maxoldchannel + if packages[pkg][0]: + newfmt[pkg] = '{vers[1]:<%s}' % maxnewver + else: + newfmt[pkg] = '{pkg:<%s} {vers[1]:<%s}' % (maxpkg, maxnewver) + if config.show_channel_urls: + newfmt[pkg] += ' {channel[1]:<%s}' % maxnewchannel + # TODO: Should we also care about the old package's link type? + if pkg in linktypes and linktypes[pkg] != install.LINK_HARD: + newfmt[pkg] += ' (%s)' % install.link_name_map[linktypes[pkg]] + + if features[pkg][0]: + oldfmt[pkg] += ' [{features[0]:<%s}]' % maxoldfeatures + if features[pkg][1]: + newfmt[pkg] += ' [{features[1]:<%s}]' % maxnewfeatures + + if pkg in new or pkg in removed: + continue + P0 = Packages[pkg + '-' + packages[pkg][0]] + P1 = Packages[pkg + '-' + packages[pkg][1]] + try: + # <= here means that unchanged packages will be put in updated + newer = (P0.name, P0.norm_version, P0.build_number) <= (P1.name, P1.norm_version, P1.build_number) + except TypeError: + newer = (P0.name, P0.version, P0.build_number) <= (P1.name, P1.version, P1.build_number) + if newer: + updated.add(pkg) + else: + downgraded.add(pkg) + + arrow = ' --> ' + lead = ' '*4 + + def format(s, pkg): + channel = ['', ''] + for i in range(2): + if packages[pkg][i]: + channel[i] = config.canonical_channel_name(Packages[pkg + '-' + packages[pkg][i]].channel) + return lead + s.format(pkg=pkg+':', vers=packages[pkg], + channel=channel, features=features[pkg]) + + if new: + print("\nThe following NEW packages will be INSTALLED:\n") + for pkg in sorted(new): + print(format(newfmt[pkg], pkg)) + + if removed: + print("\nThe following packages will be REMOVED:\n") + for pkg in sorted(removed): + print(format(oldfmt[pkg], pkg)) + + if updated: + print("\nThe following packages will be UPDATED:\n") + for pkg in sorted(updated): + print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg)) + + if downgraded: + print("\nThe following packages will be DOWNGRADED:\n") + for pkg in sorted(downgraded): + print(format(oldfmt[pkg] + arrow + newfmt[pkg], pkg)) + print() # the order matters here, don't change it
diff --git a/tests/helpers.py b/tests/helpers.py --- a/tests/helpers.py +++ b/tests/helpers.py @@ -5,6 +5,8 @@ import sys import os +from contextlib import contextmanager + def raises(exception, func, string=None): try: a = func() @@ -35,3 +37,31 @@ def run_conda_command(*args): stdout, stderr = p.communicate() return (stdout.decode('utf-8').replace('\r\n', '\n'), stderr.decode('utf-8').replace('\r\n', '\n')) + +class CapturedText(object): + pass + +@contextmanager +def captured(): + """ + Context manager to capture the printed output of the code in the with block + + Bind the context manager to a variable using `as` and the result will be + in the stdout property. + + >>> from tests.helpers import capture + >>> with captured() as c: + ... print('hello world!') + ... + >>> c.stdout + 'hello world!\n' + """ + from conda.compat import StringIO + import sys + + stdout = sys.stdout + sys.stdout = file = StringIO() + c = CapturedText() + yield c + c.stdout = file.getvalue() + sys.stdout = stdout diff --git a/tests/test_plan.py b/tests/test_plan.py --- a/tests/test_plan.py +++ b/tests/test_plan.py @@ -1,15 +1,20 @@ import json import unittest from os.path import dirname, join +from collections import defaultdict from conda.config import default_python, pkgs_dirs +import conda.config from conda.install import LINK_HARD import conda.plan as plan +from conda.plan import display_actions from conda.resolve import Resolve +from tests.helpers import captured with open(join(dirname(__file__), 'index.json')) as fi: - r = Resolve(json.load(fi)) + index = json.load(fi) + r = Resolve(index) def solve(specs): return [fn[:-8] for fn in r.solve(specs)] @@ -77,3 +82,685 @@ def test_4(self): (['anaconda', 'python 3*'], []), ]: self.check(specs, added) + +def test_display_actions(): + conda.config.show_channel_urls = False + actions = defaultdict(list, {"FETCH": ['sympy-0.7.2-py27_0', + "numpy-1.7.1-py27_0"]}) + # The older test index doesn't have the size metadata + index['sympy-0.7.2-py27_0.tar.bz2']['size'] = 4374752 + index["numpy-1.7.1-py27_0.tar.bz2"]['size'] = 5994338 + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be downloaded: + + package | build + ---------------------------|----------------- + sympy-0.7.2 | py27_0 4.2 MB + numpy-1.7.1 | py27_0 5.7 MB + ------------------------------------------------------------ + Total: 9.9 MB + +""" + + actions = defaultdict(list, {'PREFIX': + '/Users/aaronmeurer/anaconda/envs/test', 'SYMLINK_CONDA': + ['/Users/aaronmeurer/anaconda'], 'LINK': ['python-3.3.2-0', 'readline-6.2-0 /Users/aaronmeurer/anaconda/pkgs 1', 'sqlite-3.7.13-0 /Users/aaronmeurer/anaconda/pkgs 1', 'tk-8.5.13-0 /Users/aaronmeurer/anaconda/pkgs 1', 'zlib-1.2.7-0 /Users/aaronmeurer/anaconda/pkgs 1']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following NEW packages will be INSTALLED: + + python: 3.3.2-0 \n\ + readline: 6.2-0 \n\ + sqlite: 3.7.13-0 + tk: 8.5.13-0 + zlib: 1.2.7-0 \n\ + +""" + + actions['UNLINK'] = actions['LINK'] + actions['LINK'] = [] + + with captured() as c: + display_actions(actions, index) + + + assert c.stdout == """ +The following packages will be REMOVED: + + python: 3.3.2-0 \n\ + readline: 6.2-0 \n\ + sqlite: 3.7.13-0 + tk: 8.5.13-0 + zlib: 1.2.7-0 \n\ + +""" + + + actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0'], 'UNLINK': + ['cython-0.19-py33_0']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be UPDATED: + + cython: 0.19-py33_0 --> 0.19.1-py33_0 + +""" + + actions['LINK'], actions['UNLINK'] = actions['UNLINK'], actions['LINK'] + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be DOWNGRADED: + + cython: 0.19.1-py33_0 --> 0.19-py33_0 + +""" + + actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0', + 'dateutil-1.5-py33_0', 'numpy-1.7.1-py33_0'], 'UNLINK': + ['cython-0.19-py33_0', 'dateutil-2.1-py33_1', 'pip-1.3.1-py33_1']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following NEW packages will be INSTALLED: + + numpy: 1.7.1-py33_0 \n\ + +The following packages will be REMOVED: + + pip: 1.3.1-py33_1 + +The following packages will be UPDATED: + + cython: 0.19-py33_0 --> 0.19.1-py33_0 + +The following packages will be DOWNGRADED: + + dateutil: 2.1-py33_1 --> 1.5-py33_0 \n\ + +""" + + + actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0', + 'dateutil-2.1-py33_1'], 'UNLINK': ['cython-0.19-py33_0', + 'dateutil-1.5-py33_0']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be UPDATED: + + cython: 0.19-py33_0 --> 0.19.1-py33_0 + dateutil: 1.5-py33_0 --> 2.1-py33_1 \n\ + +""" + + actions['LINK'], actions['UNLINK'] = actions['UNLINK'], actions['LINK'] + + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be DOWNGRADED: + + cython: 0.19.1-py33_0 --> 0.19-py33_0 + dateutil: 2.1-py33_1 --> 1.5-py33_0 \n\ + +""" + + + +def test_display_actions_show_channel_urls(): + conda.config.show_channel_urls = True + actions = defaultdict(list, {"FETCH": ['sympy-0.7.2-py27_0', + "numpy-1.7.1-py27_0"]}) + # The older test index doesn't have the size metadata + index['sympy-0.7.2-py27_0.tar.bz2']['size'] = 4374752 + index["numpy-1.7.1-py27_0.tar.bz2"]['size'] = 5994338 + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be downloaded: + + package | build + ---------------------------|----------------- + sympy-0.7.2 | py27_0 4.2 MB <unknown> + numpy-1.7.1 | py27_0 5.7 MB <unknown> + ------------------------------------------------------------ + Total: 9.9 MB + +""" + + + actions = defaultdict(list, {'PREFIX': + '/Users/aaronmeurer/anaconda/envs/test', 'SYMLINK_CONDA': + ['/Users/aaronmeurer/anaconda'], 'LINK': ['python-3.3.2-0', 'readline-6.2-0 /Users/aaronmeurer/anaconda/pkgs 1', 'sqlite-3.7.13-0 /Users/aaronmeurer/anaconda/pkgs 1', 'tk-8.5.13-0 /Users/aaronmeurer/anaconda/pkgs 1', 'zlib-1.2.7-0 /Users/aaronmeurer/anaconda/pkgs 1']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following NEW packages will be INSTALLED: + + python: 3.3.2-0 <unknown> + readline: 6.2-0 <unknown> + sqlite: 3.7.13-0 <unknown> + tk: 8.5.13-0 <unknown> + zlib: 1.2.7-0 <unknown> + +""" + + actions['UNLINK'] = actions['LINK'] + actions['LINK'] = [] + + with captured() as c: + display_actions(actions, index) + + + assert c.stdout == """ +The following packages will be REMOVED: + + python: 3.3.2-0 <unknown> + readline: 6.2-0 <unknown> + sqlite: 3.7.13-0 <unknown> + tk: 8.5.13-0 <unknown> + zlib: 1.2.7-0 <unknown> + +""" + + + actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0'], 'UNLINK': + ['cython-0.19-py33_0']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be UPDATED: + + cython: 0.19-py33_0 <unknown> --> 0.19.1-py33_0 <unknown> + +""" + + actions['LINK'], actions['UNLINK'] = actions['UNLINK'], actions['LINK'] + + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be DOWNGRADED: + + cython: 0.19.1-py33_0 <unknown> --> 0.19-py33_0 <unknown> + +""" + + + actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0', + 'dateutil-1.5-py33_0', 'numpy-1.7.1-py33_0'], 'UNLINK': + ['cython-0.19-py33_0', 'dateutil-2.1-py33_1', 'pip-1.3.1-py33_1']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following NEW packages will be INSTALLED: + + numpy: 1.7.1-py33_0 <unknown> + +The following packages will be REMOVED: + + pip: 1.3.1-py33_1 <unknown> + +The following packages will be UPDATED: + + cython: 0.19-py33_0 <unknown> --> 0.19.1-py33_0 <unknown> + +The following packages will be DOWNGRADED: + + dateutil: 2.1-py33_1 <unknown> --> 1.5-py33_0 <unknown> + +""" + + + actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0', + 'dateutil-2.1-py33_1'], 'UNLINK': ['cython-0.19-py33_0', + 'dateutil-1.5-py33_0']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be UPDATED: + + cython: 0.19-py33_0 <unknown> --> 0.19.1-py33_0 <unknown> + dateutil: 1.5-py33_0 <unknown> --> 2.1-py33_1 <unknown> + +""" + + actions['LINK'], actions['UNLINK'] = actions['UNLINK'], actions['LINK'] + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be DOWNGRADED: + + cython: 0.19.1-py33_0 <unknown> --> 0.19-py33_0 <unknown> + dateutil: 2.1-py33_1 <unknown> --> 1.5-py33_0 <unknown> + +""" + + actions['LINK'], actions['UNLINK'] = actions['UNLINK'], actions['LINK'] + + index['cython-0.19.1-py33_0.tar.bz2']['channel'] = 'my_channel' + index['dateutil-1.5-py33_0.tar.bz2']['channel'] = 'my_channel' + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be UPDATED: + + cython: 0.19-py33_0 <unknown> --> 0.19.1-py33_0 my_channel + dateutil: 1.5-py33_0 my_channel --> 2.1-py33_1 <unknown> \n\ + +""" + + actions['LINK'], actions['UNLINK'] = actions['UNLINK'], actions['LINK'] + + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be DOWNGRADED: + + cython: 0.19.1-py33_0 my_channel --> 0.19-py33_0 <unknown> \n\ + dateutil: 2.1-py33_1 <unknown> --> 1.5-py33_0 my_channel + +""" + + +def test_display_actions_link_type(): + conda.config.show_channel_urls = False + + actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 2', 'dateutil-1.5-py33_0 /Users/aaronmeurer/anaconda/pkgs 2', + 'numpy-1.7.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 2', 'python-3.3.2-0 /Users/aaronmeurer/anaconda/pkgs 2', 'readline-6.2-0 /Users/aaronmeurer/anaconda/pkgs 2', 'sqlite-3.7.13-0 /Users/aaronmeurer/anaconda/pkgs 2', 'tk-8.5.13-0 /Users/aaronmeurer/anaconda/pkgs 2', 'zlib-1.2.7-0 /Users/aaronmeurer/anaconda/pkgs 2']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following NEW packages will be INSTALLED: + + cython: 0.19.1-py33_0 (soft-link) + dateutil: 1.5-py33_0 (soft-link) + numpy: 1.7.1-py33_0 (soft-link) + python: 3.3.2-0 (soft-link) + readline: 6.2-0 (soft-link) + sqlite: 3.7.13-0 (soft-link) + tk: 8.5.13-0 (soft-link) + zlib: 1.2.7-0 (soft-link) + +""" + + actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 2', + 'dateutil-2.1-py33_1 /Users/aaronmeurer/anaconda/pkgs 2'], 'UNLINK': ['cython-0.19-py33_0', + 'dateutil-1.5-py33_0']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be UPDATED: + + cython: 0.19-py33_0 --> 0.19.1-py33_0 (soft-link) + dateutil: 1.5-py33_0 --> 2.1-py33_1 (soft-link) + +""" + + actions = defaultdict(list, {'LINK': ['cython-0.19-py33_0 /Users/aaronmeurer/anaconda/pkgs 2', + 'dateutil-1.5-py33_0 /Users/aaronmeurer/anaconda/pkgs 2'], 'UNLINK': ['cython-0.19.1-py33_0', + 'dateutil-2.1-py33_1']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be DOWNGRADED: + + cython: 0.19.1-py33_0 --> 0.19-py33_0 (soft-link) + dateutil: 2.1-py33_1 --> 1.5-py33_0 (soft-link) + +""" + + actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 1', 'dateutil-1.5-py33_0 /Users/aaronmeurer/anaconda/pkgs 1', + 'numpy-1.7.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 1', 'python-3.3.2-0 /Users/aaronmeurer/anaconda/pkgs 1', 'readline-6.2-0 /Users/aaronmeurer/anaconda/pkgs 1', 'sqlite-3.7.13-0 /Users/aaronmeurer/anaconda/pkgs 1', 'tk-8.5.13-0 /Users/aaronmeurer/anaconda/pkgs 1', 'zlib-1.2.7-0 /Users/aaronmeurer/anaconda/pkgs 1']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following NEW packages will be INSTALLED: + + cython: 0.19.1-py33_0 + dateutil: 1.5-py33_0 \n\ + numpy: 1.7.1-py33_0 \n\ + python: 3.3.2-0 \n\ + readline: 6.2-0 \n\ + sqlite: 3.7.13-0 \n\ + tk: 8.5.13-0 \n\ + zlib: 1.2.7-0 \n\ + +""" + + actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 1', + 'dateutil-2.1-py33_1 /Users/aaronmeurer/anaconda/pkgs 1'], 'UNLINK': ['cython-0.19-py33_0', + 'dateutil-1.5-py33_0']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be UPDATED: + + cython: 0.19-py33_0 --> 0.19.1-py33_0 + dateutil: 1.5-py33_0 --> 2.1-py33_1 \n\ + +""" + + actions = defaultdict(list, {'LINK': ['cython-0.19-py33_0 /Users/aaronmeurer/anaconda/pkgs 1', + 'dateutil-1.5-py33_0 /Users/aaronmeurer/anaconda/pkgs 1'], 'UNLINK': ['cython-0.19.1-py33_0', + 'dateutil-2.1-py33_1']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be DOWNGRADED: + + cython: 0.19.1-py33_0 --> 0.19-py33_0 + dateutil: 2.1-py33_1 --> 1.5-py33_0 \n\ + +""" + + actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 3', 'dateutil-1.5-py33_0 /Users/aaronmeurer/anaconda/pkgs 3', + 'numpy-1.7.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 3', 'python-3.3.2-0 /Users/aaronmeurer/anaconda/pkgs 3', 'readline-6.2-0 /Users/aaronmeurer/anaconda/pkgs 3', 'sqlite-3.7.13-0 /Users/aaronmeurer/anaconda/pkgs 3', 'tk-8.5.13-0 /Users/aaronmeurer/anaconda/pkgs 3', 'zlib-1.2.7-0 /Users/aaronmeurer/anaconda/pkgs 3']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following NEW packages will be INSTALLED: + + cython: 0.19.1-py33_0 (copy) + dateutil: 1.5-py33_0 (copy) + numpy: 1.7.1-py33_0 (copy) + python: 3.3.2-0 (copy) + readline: 6.2-0 (copy) + sqlite: 3.7.13-0 (copy) + tk: 8.5.13-0 (copy) + zlib: 1.2.7-0 (copy) + +""" + + actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 3', + 'dateutil-2.1-py33_1 /Users/aaronmeurer/anaconda/pkgs 3'], 'UNLINK': ['cython-0.19-py33_0', + 'dateutil-1.5-py33_0']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be UPDATED: + + cython: 0.19-py33_0 --> 0.19.1-py33_0 (copy) + dateutil: 1.5-py33_0 --> 2.1-py33_1 (copy) + +""" + + actions = defaultdict(list, {'LINK': ['cython-0.19-py33_0 /Users/aaronmeurer/anaconda/pkgs 3', + 'dateutil-1.5-py33_0 /Users/aaronmeurer/anaconda/pkgs 3'], 'UNLINK': ['cython-0.19.1-py33_0', + 'dateutil-2.1-py33_1']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be DOWNGRADED: + + cython: 0.19.1-py33_0 --> 0.19-py33_0 (copy) + dateutil: 2.1-py33_1 --> 1.5-py33_0 (copy) + +""" + + conda.config.show_channel_urls = True + + index['cython-0.19.1-py33_0.tar.bz2']['channel'] = 'my_channel' + index['dateutil-1.5-py33_0.tar.bz2']['channel'] = 'my_channel' + + actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 3', 'dateutil-1.5-py33_0 /Users/aaronmeurer/anaconda/pkgs 3', + 'numpy-1.7.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 3', 'python-3.3.2-0 /Users/aaronmeurer/anaconda/pkgs 3', 'readline-6.2-0 /Users/aaronmeurer/anaconda/pkgs 3', 'sqlite-3.7.13-0 /Users/aaronmeurer/anaconda/pkgs 3', 'tk-8.5.13-0 /Users/aaronmeurer/anaconda/pkgs 3', 'zlib-1.2.7-0 /Users/aaronmeurer/anaconda/pkgs 3']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following NEW packages will be INSTALLED: + + cython: 0.19.1-py33_0 my_channel (copy) + dateutil: 1.5-py33_0 my_channel (copy) + numpy: 1.7.1-py33_0 <unknown> (copy) + python: 3.3.2-0 <unknown> (copy) + readline: 6.2-0 <unknown> (copy) + sqlite: 3.7.13-0 <unknown> (copy) + tk: 8.5.13-0 <unknown> (copy) + zlib: 1.2.7-0 <unknown> (copy) + +""" + + actions = defaultdict(list, {'LINK': ['cython-0.19.1-py33_0 /Users/aaronmeurer/anaconda/pkgs 3', + 'dateutil-2.1-py33_1 /Users/aaronmeurer/anaconda/pkgs 3'], 'UNLINK': ['cython-0.19-py33_0', + 'dateutil-1.5-py33_0']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be UPDATED: + + cython: 0.19-py33_0 <unknown> --> 0.19.1-py33_0 my_channel (copy) + dateutil: 1.5-py33_0 my_channel --> 2.1-py33_1 <unknown> (copy) + +""" + + actions = defaultdict(list, {'LINK': ['cython-0.19-py33_0 /Users/aaronmeurer/anaconda/pkgs 3', + 'dateutil-1.5-py33_0 /Users/aaronmeurer/anaconda/pkgs 3'], 'UNLINK': ['cython-0.19.1-py33_0', + 'dateutil-2.1-py33_1']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be DOWNGRADED: + + cython: 0.19.1-py33_0 my_channel --> 0.19-py33_0 <unknown> (copy) + dateutil: 2.1-py33_1 <unknown> --> 1.5-py33_0 my_channel (copy) + +""" + +def test_display_actions_features(): + conda.config.show_channel_urls = False + + actions = defaultdict(list, {'LINK': ['numpy-1.7.1-py33_p0', 'cython-0.19-py33_0']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following NEW packages will be INSTALLED: + + cython: 0.19-py33_0 \n\ + numpy: 1.7.1-py33_p0 [mkl] + +""" + + actions = defaultdict(list, {'UNLINK': ['numpy-1.7.1-py33_p0', 'cython-0.19-py33_0']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be REMOVED: + + cython: 0.19-py33_0 \n\ + numpy: 1.7.1-py33_p0 [mkl] + +""" + + actions = defaultdict(list, {'UNLINK': ['numpy-1.7.1-py33_p0'], 'LINK': ['numpy-1.7.0-py33_p0']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be DOWNGRADED: + + numpy: 1.7.1-py33_p0 [mkl] --> 1.7.0-py33_p0 [mkl] + +""" + + actions = defaultdict(list, {'LINK': ['numpy-1.7.1-py33_p0'], 'UNLINK': ['numpy-1.7.0-py33_p0']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be UPDATED: + + numpy: 1.7.0-py33_p0 [mkl] --> 1.7.1-py33_p0 [mkl] + +""" + + actions = defaultdict(list, {'LINK': ['numpy-1.7.1-py33_p0'], 'UNLINK': ['numpy-1.7.1-py33_0']}) + + with captured() as c: + display_actions(actions, index) + + # NB: Packages whose version do not changed are put in UPDATED + assert c.stdout == """ +The following packages will be UPDATED: + + numpy: 1.7.1-py33_0 --> 1.7.1-py33_p0 [mkl] + +""" + + actions = defaultdict(list, {'UNLINK': ['numpy-1.7.1-py33_p0'], 'LINK': ['numpy-1.7.1-py33_0']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be UPDATED: + + numpy: 1.7.1-py33_p0 [mkl] --> 1.7.1-py33_0 + +""" + + conda.config.show_channel_urls = True + + actions = defaultdict(list, {'LINK': ['numpy-1.7.1-py33_p0', 'cython-0.19-py33_0']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following NEW packages will be INSTALLED: + + cython: 0.19-py33_0 <unknown> + numpy: 1.7.1-py33_p0 <unknown> [mkl] + +""" + + + actions = defaultdict(list, {'UNLINK': ['numpy-1.7.1-py33_p0', 'cython-0.19-py33_0']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be REMOVED: + + cython: 0.19-py33_0 <unknown> + numpy: 1.7.1-py33_p0 <unknown> [mkl] + +""" + + actions = defaultdict(list, {'UNLINK': ['numpy-1.7.1-py33_p0'], 'LINK': ['numpy-1.7.0-py33_p0']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be DOWNGRADED: + + numpy: 1.7.1-py33_p0 <unknown> [mkl] --> 1.7.0-py33_p0 <unknown> [mkl] + +""" + + actions = defaultdict(list, {'LINK': ['numpy-1.7.1-py33_p0'], 'UNLINK': ['numpy-1.7.0-py33_p0']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be UPDATED: + + numpy: 1.7.0-py33_p0 <unknown> [mkl] --> 1.7.1-py33_p0 <unknown> [mkl] + +""" + + + actions = defaultdict(list, {'LINK': ['numpy-1.7.1-py33_p0'], 'UNLINK': ['numpy-1.7.1-py33_0']}) + + with captured() as c: + display_actions(actions, index) + + # NB: Packages whose version do not changed are put in UPDATED + assert c.stdout == """ +The following packages will be UPDATED: + + numpy: 1.7.1-py33_0 <unknown> --> 1.7.1-py33_p0 <unknown> [mkl] + +""" + + actions = defaultdict(list, {'UNLINK': ['numpy-1.7.1-py33_p0'], 'LINK': ['numpy-1.7.1-py33_0']}) + + with captured() as c: + display_actions(actions, index) + + assert c.stdout == """ +The following packages will be UPDATED: + + numpy: 1.7.1-py33_p0 <unknown> [mkl] --> 1.7.1-py33_0 <unknown> + +"""
Make the conda install table easier to read The table of what packages will be installed and removed is hard to read. For one thing, it's hard to tell easily what packages are not removed but just upgraded or downgraded. Also, the "link" terminology is confusing. A suggestion by @jklowden: ``` $ conda update conda Updating Anaconda environment at /usr/local/anaconda The following packages will be downloaded: conda-2.2.3-py27_0.tar.bz2 [http://repo.continuum.io/pkgs/free/osx-64/] The following packages will be upgraded: Old version Replace with ------------------------- ------------------------- conda-1.4.4 conda-2.2.3 ``` > or, if you really want the build (I don't, it's not meaningful to the user) ``` package Old version New version ------------ ------------------ ------------------ conda 1.4.4, py27_0 2.2.3, py27_0 ``` I think the build is meaningful as it tells you what Python version is being used. It also tells you if you are using mkl. And also some people might use the build string to put other information which may be useful to users. <!--- @huboard:{"order":3.3142282405143226e-49,"custom_state":""} -->
2014-04-11T20:19:51
conda/conda
667
conda__conda-667
[ "666" ]
f2934aea3f32ac94907b742a800d82c1e08757fe
diff --git a/conda/resolve.py b/conda/resolve.py --- a/conda/resolve.py +++ b/conda/resolve.py @@ -278,12 +278,25 @@ def all_deps(self, root_fn, max_only=False): def add_dependents(fn1, max_only=False): for ms in self.ms_depends(fn1): + found = False + notfound = [] for pkg2 in self.get_pkgs(ms, max_only=max_only): if pkg2.fn in res: + found = True continue - res[pkg2.fn] = pkg2 - if ms.strictness < 3: - add_dependents(pkg2.fn, max_only=max_only) + try: + if ms.strictness < 3: + add_dependents(pkg2.fn, max_only=max_only) + except NoPackagesFound as e: + if e.pkg not in notfound: + notfound.append(e.pkg) + else: + found = True + res[pkg2.fn] = pkg2 + + if not found: + raise NoPackagesFound("Could not find some dependencies " + "for %s: %s" % (ms, ', '.join(notfound)), str(ms)) add_dependents(root_fn, max_only=max_only) return res @@ -394,7 +407,7 @@ def get_dists(self, specs, max_only=False): dists[pkg.fn] = pkg found = True if not found: - raise NoPackagesFound("Could not find some dependencies for %s: %s" % (spec, ', '.join(notfound)), None) + raise NoPackagesFound("Could not find some dependencies for %s: %s" % (spec, ', '.join(notfound)), spec) return dists
diff --git a/tests/test_resolve.py b/tests/test_resolve.py --- a/tests/test_resolve.py +++ b/tests/test_resolve.py @@ -696,6 +696,22 @@ def test_nonexistent_deps(): 'requires': ['nose 1.2.1', 'python 3.3'], 'version': '1.1', } + index2['anotherpackage-1.0-py33_0.tar.bz2'] = { + 'build': 'py33_0', + 'build_number': 0, + 'depends': ['nose', 'mypackage 1.1'], + 'name': 'anotherpackage', + 'requires': ['nose', 'mypackage 1.1'], + 'version': '1.0', + } + index2['anotherpackage-2.0-py33_0.tar.bz2'] = { + 'build': 'py33_0', + 'build_number': 0, + 'depends': ['nose', 'mypackage'], + 'name': 'anotherpackage', + 'requires': ['nose', 'mypackage'], + 'version': '2.0', + } r = Resolve(index2) assert set(r.find_matches(MatchSpec('mypackage'))) == { @@ -772,6 +788,32 @@ def test_nonexistent_deps(): ] assert raises(NoPackagesFound, lambda: r.solve(['mypackage 1.0'])) + assert r.solve(['anotherpackage 1.0']) == [ + 'anotherpackage-1.0-py33_0.tar.bz2', + 'mypackage-1.1-py33_0.tar.bz2', + 'nose-1.3.0-py33_0.tar.bz2', + 'openssl-1.0.1c-0.tar.bz2', + 'python-3.3.2-0.tar.bz2', + 'readline-6.2-0.tar.bz2', + 'sqlite-3.7.13-0.tar.bz2', + 'system-5.8-1.tar.bz2', + 'tk-8.5.13-0.tar.bz2', + 'zlib-1.2.7-0.tar.bz2', + ] + + assert r.solve(['anotherpackage']) == [ + 'anotherpackage-2.0-py33_0.tar.bz2', + 'mypackage-1.1-py33_0.tar.bz2', + 'nose-1.3.0-py33_0.tar.bz2', + 'openssl-1.0.1c-0.tar.bz2', + 'python-3.3.2-0.tar.bz2', + 'readline-6.2-0.tar.bz2', + 'sqlite-3.7.13-0.tar.bz2', + 'system-5.8-1.tar.bz2', + 'tk-8.5.13-0.tar.bz2', + 'zlib-1.2.7-0.tar.bz2', + ] + # This time, the latest version is messed up index3 = index.copy() index3['mypackage-1.1-py33_0.tar.bz2'] = { @@ -790,6 +832,22 @@ def test_nonexistent_deps(): 'requires': ['nose 1.2.1', 'python 3.3'], 'version': '1.0', } + index3['anotherpackage-1.0-py33_0.tar.bz2'] = { + 'build': 'py33_0', + 'build_number': 0, + 'depends': ['nose', 'mypackage 1.0'], + 'name': 'anotherpackage', + 'requires': ['nose', 'mypackage 1.0'], + 'version': '1.0', + } + index3['anotherpackage-2.0-py33_0.tar.bz2'] = { + 'build': 'py33_0', + 'build_number': 0, + 'depends': ['nose', 'mypackage'], + 'name': 'anotherpackage', + 'requires': ['nose', 'mypackage'], + 'version': '2.0', + } r = Resolve(index3) assert set(r.find_matches(MatchSpec('mypackage'))) == { @@ -852,6 +910,35 @@ def test_nonexistent_deps(): ] assert raises(NoPackagesFound, lambda: r.solve(['mypackage 1.1'])) + + assert r.solve(['anotherpackage 1.0']) == [ + 'anotherpackage-1.0-py33_0.tar.bz2', + 'mypackage-1.0-py33_0.tar.bz2', + 'nose-1.3.0-py33_0.tar.bz2', + 'openssl-1.0.1c-0.tar.bz2', + 'python-3.3.2-0.tar.bz2', + 'readline-6.2-0.tar.bz2', + 'sqlite-3.7.13-0.tar.bz2', + 'system-5.8-1.tar.bz2', + 'tk-8.5.13-0.tar.bz2', + 'zlib-1.2.7-0.tar.bz2', + ] + + # If recursive checking is working correctly, this will give + # anotherpackage 2.0, not anotherpackage 1.0 + assert r.solve(['anotherpackage']) == [ + 'anotherpackage-2.0-py33_0.tar.bz2', + 'mypackage-1.0-py33_0.tar.bz2', + 'nose-1.3.0-py33_0.tar.bz2', + 'openssl-1.0.1c-0.tar.bz2', + 'python-3.3.2-0.tar.bz2', + 'readline-6.2-0.tar.bz2', + 'sqlite-3.7.13-0.tar.bz2', + 'system-5.8-1.tar.bz2', + 'tk-8.5.13-0.tar.bz2', + 'zlib-1.2.7-0.tar.bz2', + ] + def test_package_ordering(): sympy_071 = Package('sympy-0.7.1-py27_0.tar.bz2', r.index['sympy-0.7.1-py27_0.tar.bz2']) sympy_072 = Package('sympy-0.7.2-py27_0.tar.bz2', r.index['sympy-0.7.2-py27_0.tar.bz2'])
NoPackagesFound does not work correctly for missing recursive dependencies
2014-04-14T22:05:18
conda/conda
682
conda__conda-682
[ "400" ]
102471a0fe64749a94ea0c1c9ddc45d17fd4f2d4
diff --git a/conda/connection.py b/conda/connection.py --- a/conda/connection.py +++ b/conda/connection.py @@ -7,106 +7,368 @@ from __future__ import print_function, division, absolute_import from logging import getLogger +import re +import mimetypes +import os +import email +import base64 +import ftplib +import cgi +from io import BytesIO -from conda.compat import PY3, string_types -from conda.compat import iteritems, input +from conda.compat import urlparse, StringIO from conda.config import get_proxy_servers -if PY3: - # Python 3.x - import urllib.request as urllib2 - from urllib import parse as urlparse -else: - # Python 2.x - import urllib2 - import urlparse +import requests +RETRIES = 3 log = getLogger(__name__) -# 1. get proxies if needed. a proxy for each protocol -# 2. handle authentication -# basic, digest, and nltm (windows) authentications should be handled. -# 3. handle any protocol -# typically http, https, ftp - -# 1. get the proxies list -# urllib can only get proxies on windows and mac. so on linux or if the user -# wants to specify the proxy there has to be a way to do that. TODO get proxies -# from condarc and overrwrite any system proxies -# the proxies are in a dict {'http':'http://proxy:8080'} -# protocol:proxyserver -proxies_dict = get_proxy_servers() or urllib2.getproxies() - -#2. handle authentication - -proxypwdmgr = urllib2.HTTPPasswordMgrWithDefaultRealm() - - -def get_userandpass(proxytype='', realm=''): - """a function to get username and password from terminal. - can be replaced with anything like some gui""" - import getpass - - uname = input(proxytype + ' proxy username:') - pword = getpass.getpass() - return uname, pword - - -# a procedure that needs to be executed with changes to handlers -def installopener(): - opener = urllib2.build_opener( - urllib2.ProxyHandler(proxies_dict), - urllib2.ProxyBasicAuthHandler(proxypwdmgr), - urllib2.ProxyDigestAuthHandler(proxypwdmgr), - urllib2.HTTPHandler, - ) - # digest auth may not work with all proxies - # http://bugs.python.org/issue16095 - # could add windows/nltm authentication here - #opener=urllib2.build_opener(urllib2.ProxyHandler(proxies_dict), urllib2.HTTPHandler) - - urllib2.install_opener(opener) - - -firstconnection = True -#i made this func so i wouldn't alter the original code much -def connectionhandled_urlopen(request): - """handles aspects of establishing the connection with the remote""" - - installopener() - - if isinstance(request, string_types): - request = urllib2.Request(request) - - try: - return urllib2.urlopen(request) - - except urllib2.HTTPError as HTTPErrorinst: - if HTTPErrorinst.code in (407, 401): - # proxy authentication error - # ...(need to auth) or supplied creds failed - if HTTPErrorinst.code == 401: - log.debug('proxy authentication failed') - #authenticate and retry - uname, pword = get_userandpass() - #assign same user+pwd to all protocols (a reasonable assumption) to - #decrease user input. otherwise you'd need to assign a user/pwd to - #each proxy type - if firstconnection == True: - for aprotocol, aproxy in iteritems(proxies_dict): - proxypwdmgr.add_password(None, aproxy, uname, pword) - firstconnection == False - else: #...assign a uname pwd for the specific protocol proxy type - assert(firstconnection == False) - protocol = urlparse.urlparse(request.get_full_url()).scheme - proxypwdmgr.add_password(None, proxies_dict[protocol], - uname, pword) - installopener() - # i'm uncomfortable with this - # but i just want to exec to start from the top again - return connectionhandled_urlopen(request) - raise - - except: - raise +# Modified from code in pip/download.py: + +# Copyright (c) 2008-2014 The pip developers (see AUTHORS.txt file) +# +# Permission is hereby granted, free of charge, to any person obtaining +# a copy of this software and associated documentation files (the +# "Software"), to deal in the Software without restriction, including +# without limitation the rights to use, copy, modify, merge, publish, +# distribute, sublicense, and/or sell copies of the Software, and to +# permit persons to whom the Software is furnished to do so, subject to +# the following conditions: +# +# The above copyright notice and this permission notice shall be +# included in all copies or substantial portions of the Software. +# +# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE +# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION +# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION +# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +class CondaSession(requests.Session): + + timeout = None + + def __init__(self, *args, **kwargs): + retries = kwargs.pop('retries', RETRIES) + + super(CondaSession, self).__init__(*args, **kwargs) + + self.proxies = get_proxy_servers() + + # Configure retries + if retries: + http_adapter = requests.adapters.HTTPAdapter(max_retries=retries) + self.mount("http://", http_adapter) + self.mount("https://", http_adapter) + + # Enable file:// urls + self.mount("file://", LocalFSAdapter()) + + # Enable ftp:// urls + self.mount("ftp://", FTPAdapter()) + +class LocalFSAdapter(requests.adapters.BaseAdapter): + + def send(self, request, stream=None, timeout=None, verify=None, cert=None, + proxies=None): + pathname = url_to_path(request.url) + + resp = requests.models.Response() + resp.status_code = 200 + resp.url = request.url + + try: + stats = os.stat(pathname) + except OSError as exc: + resp.status_code = 404 + resp.raw = exc + else: + modified = email.utils.formatdate(stats.st_mtime, usegmt=True) + content_type = mimetypes.guess_type(pathname)[0] or "text/plain" + resp.headers = requests.structures.CaseInsensitiveDict({ + "Content-Type": content_type, + "Content-Length": stats.st_size, + "Last-Modified": modified, + }) + + resp.raw = open(pathname, "rb") + resp.close = resp.raw.close + + return resp + + def close(self): + pass + +def url_to_path(url): + """ + Convert a file: URL to a path. + """ + assert url.startswith('file:'), ( + "You can only turn file: urls into filenames (not %r)" % url) + path = url[len('file:'):].lstrip('/') + path = urlparse.unquote(path) + if _url_drive_re.match(path): + path = path[0] + ':' + path[2:] + else: + path = '/' + path + return path + +_url_drive_re = re.compile('^([a-z])[:|]', re.I) + +# Taken from requests-ftp +# (https://github.com/Lukasa/requests-ftp/blob/master/requests_ftp/ftp.py) + +# Copyright 2012 Cory Benfield + +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. + +class FTPAdapter(requests.adapters.BaseAdapter): + '''A Requests Transport Adapter that handles FTP urls.''' + def __init__(self): + super(FTPAdapter, self).__init__() + + # Build a dictionary keyed off the methods we support in upper case. + # The values of this dictionary should be the functions we use to + # send the specific queries. + self.func_table = {'LIST': self.list, + 'RETR': self.retr, + 'STOR': self.stor, + 'NLST': self.nlst, + 'GET': self.retr,} + + def send(self, request, **kwargs): + '''Sends a PreparedRequest object over FTP. Returns a response object. + ''' + # Get the authentication from the prepared request, if any. + auth = self.get_username_password_from_header(request) + + # Next, get the host and the path. + host, port, path = self.get_host_and_path_from_url(request) + + # Sort out the timeout. + timeout = kwargs.get('timeout', None) + + # Establish the connection and login if needed. + self.conn = ftplib.FTP() + self.conn.connect(host, port, timeout) + + if auth is not None: + self.conn.login(auth[0], auth[1]) + else: + self.conn.login() + + # Get the method and attempt to find the function to call. + resp = self.func_table[request.method](path, request) + + # Return the response. + return resp + + def close(self): + '''Dispose of any internal state.''' + # Currently this is a no-op. + pass + + def list(self, path, request): + '''Executes the FTP LIST command on the given path.''' + data = StringIO() + + # To ensure the StringIO gets cleaned up, we need to alias its close + # method to the release_conn() method. This is a dirty hack, but there + # you go. + data.release_conn = data.close + + self.conn.cwd(path) + code = self.conn.retrbinary('LIST', data_callback_factory(data)) + + # When that call has finished executing, we'll have all our data. + response = build_text_response(request, data, code) + + # Close the connection. + self.conn.close() + + return response + + def retr(self, path, request): + '''Executes the FTP RETR command on the given path.''' + data = BytesIO() + + # To ensure the BytesIO gets cleaned up, we need to alias its close + # method. See self.list(). + data.release_conn = data.close + + code = self.conn.retrbinary('RETR ' + path, data_callback_factory(data)) + + response = build_binary_response(request, data, code) + + # Close the connection. + self.conn.close() + + return response + + def stor(self, path, request): + '''Executes the FTP STOR command on the given path.''' + + # First, get the file handle. We assume (bravely) + # that there is only one file to be sent to a given URL. We also + # assume that the filename is sent as part of the URL, not as part of + # the files argument. Both of these assumptions are rarely correct, + # but they are easy. + data = parse_multipart_files(request) + + # Split into the path and the filename. + path, filename = os.path.split(path) + + # Switch directories and upload the data. + self.conn.cwd(path) + code = self.conn.storbinary('STOR ' + filename, data) + + # Close the connection and build the response. + self.conn.close() + + response = build_binary_response(request, BytesIO(), code) + + return response + + def nlst(self, path, request): + '''Executes the FTP NLST command on the given path.''' + data = StringIO() + + # Alias the close method. + data.release_conn = data.close + + self.conn.cwd(path) + code = self.conn.retrbinary('NLST', data_callback_factory(data)) + + # When that call has finished executing, we'll have all our data. + response = build_text_response(request, data, code) + + # Close the connection. + self.conn.close() + + return response + + def get_username_password_from_header(self, request): + '''Given a PreparedRequest object, reverse the process of adding HTTP + Basic auth to obtain the username and password. Allows the FTP adapter + to piggyback on the basic auth notation without changing the control + flow.''' + auth_header = request.headers.get('Authorization') + + if auth_header: + # The basic auth header is of the form 'Basic xyz'. We want the + # second part. Check that we have the right kind of auth though. + encoded_components = auth_header.split()[:2] + if encoded_components[0] != 'Basic': + raise AuthError('Invalid form of Authentication used.') + else: + encoded = encoded_components[1] + + # Decode the base64 encoded string. + decoded = base64.b64decode(encoded) + + # The string is of the form 'username:password'. Split on the + # colon. + components = decoded.split(':') + username = components[0] + password = components[1] + return (username, password) + else: + # No auth header. Return None. + return None + + def get_host_and_path_from_url(self, request): + '''Given a PreparedRequest object, split the URL in such a manner as to + determine the host and the path. This is a separate method to wrap some + of urlparse's craziness.''' + url = request.url + # scheme, netloc, path, params, query, fragment = urlparse(url) + parsed = urlparse.urlparse(url) + path = parsed.path + + # If there is a slash on the front of the path, chuck it. + if path[0] == '/': + path = path[1:] + + host = parsed.hostname + port = parsed.port or 0 + + return (host, port, path) + +def data_callback_factory(variable): + '''Returns a callback suitable for use by the FTP library. This callback + will repeatedly save data into the variable provided to this function. This + variable should be a file-like structure.''' + def callback(data): + variable.write(data) + return + + return callback + +class AuthError(Exception): + '''Denotes an error with authentication.''' + pass + +def build_text_response(request, data, code): + '''Build a response for textual data.''' + return build_response(request, data, code, 'ascii') + +def build_binary_response(request, data, code): + '''Build a response for data whose encoding is unknown.''' + return build_response(request, data, code, None) + +def build_response(request, data, code, encoding): + '''Builds a response object from the data returned by ftplib, using the + specified encoding.''' + response = requests.Response() + + response.encoding = encoding + + # Fill in some useful fields. + response.raw = data + response.url = request.url + response.request = request + response.status_code = code.split()[0] + + # Make sure to seek the file-like raw object back to the start. + response.raw.seek(0) + + # Run the response hook. + response = requests.hooks.dispatch_hook('response', request.hooks, response) + return response + +def parse_multipart_files(request): + '''Given a prepared reqest, return a file-like object containing the + original data. This is pretty hacky.''' + # Start by grabbing the pdict. + _, pdict = cgi.parse_header(request.headers['Content-Type']) + + # Now, wrap the multipart data in a BytesIO buffer. This is annoying. + buf = BytesIO() + buf.write(request.body) + buf.seek(0) + + # Parse the data. Simply take the first file. + data = cgi.parse_multipart(buf, pdict) + _, filedata = data.popitem() + buf.close() + + # Get a BytesIO now, and write the file into it. + buf = BytesIO() + buf.write(''.join(filedata)) + buf.seek(0) + + return buf diff --git a/conda/fetch.py b/conda/fetch.py --- a/conda/fetch.py +++ b/conda/fetch.py @@ -8,7 +8,6 @@ import os import bz2 -import sys import json import shutil import hashlib @@ -18,22 +17,18 @@ from conda import config from conda.utils import memoized -from conda.connection import connectionhandled_urlopen -from conda.compat import PY3, itervalues, get_http_value +from conda.connection import CondaSession +from conda.compat import itervalues, get_http_value from conda.lock import Locked -if PY3: - import urllib.request as urllib2 -else: - import urllib2 - +import requests log = getLogger(__name__) dotlog = getLogger('dotupdate') stdoutlog = getLogger('stdoutlog') +stderrlog = getLogger('stderrlog') fail_unknown_host = False -retries = 3 def create_cache_dir(): @@ -55,9 +50,11 @@ def add_http_value_to_dict(u, http_key, d, dict_key): d[dict_key] = value -def fetch_repodata(url, cache_dir=None, use_cache=False): +def fetch_repodata(url, cache_dir=None, use_cache=False, session=None): dotlog.debug("fetching repodata: %s ..." % url) + session = session or CondaSession() + cache_path = join(cache_dir or create_cache_dir(), cache_fn_url(url)) try: cache = json.load(open(cache_path)) @@ -67,33 +64,32 @@ def fetch_repodata(url, cache_dir=None, use_cache=False): if use_cache: return cache - request = urllib2.Request(url + 'repodata.json.bz2') - if '_etag' in cache: - request.add_header('If-None-Match', cache['_etag']) - if '_mod' in cache: - request.add_header('If-Modified-Since', cache['_mod']) + headers = {} + if "_tag" in cache: + headers["If-None-Match"] = cache["_etag"] + if "_mod" in cache: + headers["If-Modified-Since"] = cache["_mod"] try: - u = connectionhandled_urlopen(request) - data = u.read() - u.close() - cache = json.loads(bz2.decompress(data).decode('utf-8')) - add_http_value_to_dict(u, 'Etag', cache, '_etag') - add_http_value_to_dict(u, 'Last-Modified', cache, '_mod') + resp = session.get(url + 'repodata.json.bz2', headers=headers) + resp.raise_for_status() + if resp.status_code != 304: + cache = json.loads(bz2.decompress(resp.content).decode('utf-8')) except ValueError: raise RuntimeError("Invalid index file: %srepodata.json.bz2" % url) - except urllib2.HTTPError as e: - msg = "HTTPError: %d %s %s\n" % (e.code, e.msg, url) + except requests.exceptions.HTTPError as e: + msg = "HTTPError: %s: %s\n" % (e, url) log.debug(msg) - if e.code != 304: - raise RuntimeError(msg) + raise RuntimeError(msg) - except urllib2.URLError as e: - sys.stderr.write("Error: unknown host: %s (%r)\n" % (url, e)) + except requests.exceptions.ConnectionError as e: + msg = "Connection error: %s: %s\n" % (e, url) + stderrlog.info('Could not connect to %s\n' % url) + log.debug(msg) if fail_unknown_host: - sys.exit(1) + raise RuntimeError(msg) cache['_url'] = url try: @@ -104,16 +100,16 @@ def fetch_repodata(url, cache_dir=None, use_cache=False): return cache or None - @memoized def fetch_index(channel_urls, use_cache=False, unknown=False): log.debug('channel_urls=' + repr(channel_urls)) index = {} stdoutlog.info("Fetching package metadata: ") + session = CondaSession() for url in reversed(channel_urls): if config.allowed_channels and url not in config.allowed_channels: sys.exit("\nError: URL '%s' not in allowed channels" % url) - repodata = fetch_repodata(url, use_cache=use_cache) + repodata = fetch_repodata(url, use_cache=use_cache, session=session) if repodata is None: continue new_index = repodata['packages'] @@ -141,107 +137,80 @@ def fetch_index(channel_urls, use_cache=False, unknown=False): return index - -def fetch_pkg(info, dst_dir=None): +def fetch_pkg(info, dst_dir=None, session=None): ''' fetch a package given by `info` and store it into `dst_dir` ''' if dst_dir is None: dst_dir = config.pkgs_dirs[0] + session = session or CondaSession() + fn = '%(name)s-%(version)s-%(build)s.tar.bz2' % info url = info['channel'] + fn log.debug("url=%r" % url) path = join(dst_dir, fn) - pp = path + '.part' + + download(url, path, session=session, md5=info['md5'], urlstxt=True) + +def download(url, dst_path, session=None, md5=None, urlstxt=False): + pp = dst_path + '.part' + dst_dir = os.path.split(dst_path)[0] + session = session or CondaSession() with Locked(dst_dir): - for x in range(retries): - try: - fi = connectionhandled_urlopen(url) - except IOError: - log.debug("attempt %d failed at urlopen" % x) - continue - if fi is None: - log.debug("could not fetch (urlopen returned None)") - continue - n = 0 - h = hashlib.new('md5') - getLogger('fetch.start').info((fn, info['size'])) - need_retry = False - try: - fo = open(pp, 'wb') - except IOError: - raise RuntimeError("Could not open %r for writing. " - "Permissions problem or missing directory?" % pp) - while True: - try: - chunk = fi.read(16384) - except IOError: - need_retry = True - break - if not chunk: - break - try: - fo.write(chunk) - except IOError: - raise RuntimeError("Failed to write to %r." % pp) - h.update(chunk) - n += len(chunk) - getLogger('fetch.update').info(n) + try: + resp = session.get(url, stream=True) + except IOError: + raise RuntimeError("Could not open '%s'" % url) + except requests.exceptions.HTTPError as e: + msg = "HTTPError: %s: %s\n" % (e, url) + log.debug(msg) + raise RuntimeError(msg) - fo.close() - if need_retry: - continue + size = resp.headers.get('Content-Length') + if size: + size = int(size) + fn = basename(dst_path) + getLogger('fetch.start').info((fn[:14], size)) + + n = 0 + if md5: + h = hashlib.new('md5') + try: + with open(pp, 'wb') as fo: + for chunk in resp.iter_content(2**14): + try: + fo.write(chunk) + except IOError: + raise RuntimeError("Failed to write to %r." % pp) + if md5: + h.update(chunk) + n += len(chunk) + if size: + getLogger('fetch.update').info(n) + except IOError: + raise RuntimeError("Could not open %r for writing. " + "Permissions problem or missing directory?" % pp) - fi.close() + if size: getLogger('fetch.stop').info(None) - if h.hexdigest() != info['md5']: - raise RuntimeError("MD5 sums mismatch for download: %s (%s != %s)" % (fn, h.hexdigest(), info['md5'])) - try: - os.rename(pp, path) - except OSError: - raise RuntimeError("Could not rename %r to %r." % (pp, path)) + + if md5 and h.hexdigest() != md5: + raise RuntimeError("MD5 sums mismatch for download: %s (%s != %s)" % (url, h.hexdigest(), md5)) + + try: + os.rename(pp, dst_path) + except OSError as e: + raise RuntimeError("Could not rename %r to %r: %r" % (pp, + dst_path, e)) + + if urlstxt: try: with open(join(dst_dir, 'urls.txt'), 'a') as fa: fa.write('%s\n' % url) except IOError: pass - return - - raise RuntimeError("Could not locate '%s'" % url) - - -def download(url, dst_path): - try: - u = connectionhandled_urlopen(url) - except IOError: - raise RuntimeError("Could not open '%s'" % url) - except ValueError as e: - raise RuntimeError(e) - - size = get_http_value(u, 'Content-Length') - if size: - size = int(size) - fn = basename(dst_path) - getLogger('fetch.start').info((fn[:14], size)) - - n = 0 - fo = open(dst_path, 'wb') - while True: - chunk = u.read(16384) - if not chunk: - break - fo.write(chunk) - n += len(chunk) - if size: - getLogger('fetch.update').info(n) - fo.close() - - u.close() - if size: - getLogger('fetch.stop').info(None) - class TmpDownload(object): """
TLS does not appear to be verified As far as I can tell conda is just using urllib2 which doesn't verify SSL at all in Python 2.x and in Python 3.x it doesn't do it by default. This means that even recipes which use a https link without a md5 hash there is a simple MITM code execution attack. <!--- @huboard:{"order":158.5,"custom_state":""} -->
How can we fix it? Do we need to add several more dependencies to conda? How can it be enabled in Python 3? And why doesn't it do it by default? So there are a few options here, the lowest impact but easiest to get wrong is to backport the ssl stuff from Python 3 into Python 2 and include your own root certificates. This is what pip did in version 1.3-1.4 and it was done incorrectly. My recommendation would be to either use urllib3 directly or to use requests. Pip went the requests route which allows the entire Python community to sort of centralize it's cross platform TLS verification. Python 3 doesn't do it by default because it doesn't have any root certificates to verify against, I believe that Christian Heimes is hoping to fix this for 3.4 (or has already..). OK. Hopefully SNI is not needed here, as I found that it required three additional packages and a patch to requests. SNI has pretty bad support outside of browsers. It is a nice to have but not a requirement. The difference between SNI and not SNI is that HTTPS urls will require a dedicated IP address per certificate (but a certificate can contain multiple host names) whereas SNI allows you to have multiple certificates attached to one IP address. The other thing about missing SNI is that it'll "fail closed" instead of "fail open", so if someone has a repo or url that needs SNI they'll get an error message at any attempt to use that repo or url instead of it being silently insecure. FWIW it appears that the default continiuum repos are also HTTP only which should change as well, they should be available via HTTPS and conda should default to using HTTPS for them. Otherwise I can inject arbitrary package contents to people installing from those repositories if I have a successful MITM. I agree with @dstufft that standardising on requests is a good idea - the main reason is that the inclusion of the cert bundle addresses the fact that Python prior to 3.4 can't consistently access the system certs, and the Python Security Response Team will be keeping a close eye on that cert bundle now that it has a security impact on CPython itself (due to the pip bootstrapping in PEP 453). @asmeurer Asked me on twitter if urllib3 was fine to solve this. I took a look and urllib3 does _not_ ship it's own certificates. I would recommend that Conda does _not_ try to get into the business of shipping certificates. You have to make sure you're on top of whenever they get updated, parsing them can be dificult to do properly etc. It's a tricky business to get correct. I would recommend if you can that you switch to requests and try to keep it updated sot hat you get any new certificates automatically. However if you do want to use urllib3, I recommend using the certifi package to handle certs. Let's just use requests. Will hot-swapping `urllib2` with `requests.packages.urllib3` work? I know requests is better API-wise, but we already have the code there for urllib, so this would again be a one-line fix. `requests.packages.urllib3` is equivalent to just using `urllib3` so all of the above still apples. requests just includes an unmodified copy of urllib3 as part of it's source. OK, I'm looking at requests. What is the best way to do caching with requests? Will it be easy to port https://github.com/conda/conda/blob/master/conda/fetch.py#L71? If not, is there an easy way to do it without adding an additional dependency like https://github.com/Lukasa/httpcache or https://github.com/ionrock/cachecontrol? Or is using one of those the best way to go? Oh hey, this is an opportune time for that question, as I'm just looking at ripping out pip's custom download cache and replace it with a generic HTTP cache. I've recently submitted a few pull requests to CacheControl and I would suggest using it if you can. It's a good library and with the PRs i've submitted I was able to "drop it" into pip's requests based code without any trouble. However if you want to keep maintaining your existing cache, I did a quick once over of a port of that code to requests http://bpaste.net/show/Y5IJFRpQEwjZxswCPGwR/ I removed the error handling just because I'm lazy. Pip ticket in case you're wondering: pypa/pip#1732 Thanks. That will save me a lot of time I'm sure. We only use this to cache the repo metadata. Packages themselves are cached manually, using the md5 which is stored in the repodata. I'll probably just work off your modification for now. A good motivation for moving to CacheControl would probably be if it ends up being faster than the current method somehow. I'm not sure if that's possible, but for me, with about 10 channels, it takes several seconds to load the repo metadata with every command. @srossross may also have ideas about this. There is more than this. We do need proper error handling (we don't want users to ever see a traceback from conda, unless there is a bug), and also it needs to be able to handle proxies (I'm hoping this is easy with requests), because quite a few users of conda are stuck behind corporate firewalls. Yea I just left out error handling because I'm lazy :) I'm not sure if CacheControl would be faster than the current code, it'd likely mostly just be shifting logic around so you don't have to handle it yourself. Requests does make proxies very easy! In fact by default it will respect `HTTP_PROXY` and `HTTPS_PROXY` environment variables so you get proxies for "free" though that. In pip we also have a command line flag that lets people do it that way too.http://docs.python-requests.org/en/latest/user/advanced/#proxies. Although if those corporate firewalls are ISA based I think you have to install some client or something to make it work. You'll most likely find your HTTP handling code shrink because requests handles a lot of that stuff out of the box. One thing that's odd about requests is that `requests.get('http://repo.continuum.io/pkgs/free/osx-64/repodata.json.bz2')` returns a `str` rather than `bytes` in Python 3. Do you know any way to fix this? Yeah, I'm pretty sure the real way to improve speed is to have some API on the binstar side that conda could use to get the merged repodata for several channels with one request. That's something that @srossross and I have discussed. Sorry, that's `resp.text`. Maybe there is some other attribute of the Response object I should be using. It should return a Response object, which has a `text` attribute which will be the content of the request decoded with whatever encoding it detects the text of the response is in, and a `content` attribute which is just the (mostly raw) bytes from the wire. I say mostly raw because it'll automatically do gzip decoding even for `resp.content` if the server has the `Content-Encoding: gzip` header. Oh, I see, it's `resp.content`. I guess it makes sense that "text" would be a str, even if it isn't really text :) Is there a similar header for bz2? Also if you want to do like progress bar, you can do: ``` python resp = requests.get("https:///", stream=True) chunk = b"" for i, chunk in enumerate(resp.iter_content(4096)): print("Chunk #{}".format(i)) downloaded += chunk ``` The above will open a connection, but _not_ download the entire response body, but will instead download a 4096 byte chunk for each loop iteration. OK, I haven't gotten to the pkg downloading code yet. That's next. We hardly need a progress bar for fetching the repodata. Thanks for the advice so far. Not by default, HTTP connections typically only use deflate or gzip (both of those will be decoded automatically). Anything else won't be. Although if you wanted it handled automatically you could probably make requests do it by making your own transport adapter that passed one into the urllib3 HTTPReponse I think. Also if you're doing multiple connections to the same host, you'll want to use a requests session. ``` python import requests session = requests.session() session.get("https://....") ``` The benefit is that you'll get connection pooling and keep alive connections between HTTP requests (so you won't have to do the TLS handshake for each one, things will generally be much faster etc). You can also use requests sessions to share some settings to prevent you from having to pass them into each function call. (See http://docs.python-requests.org/en/latest/api/#sessionapi) I didn't notice a speed difference using session, but I'll use it anyway.
2014-04-23T22:43:45
conda/conda
707
conda__conda-707
[ "670" ]
ad6c5ffe86bb2eac4add2c4be7b8987f0f14c453
diff --git a/conda/lock.py b/conda/lock.py --- a/conda/lock.py +++ b/conda/lock.py @@ -19,7 +19,7 @@ import os from os.path import join import glob - +from time import sleep LOCKFN = '.conda_lock' @@ -36,15 +36,28 @@ def __init__(self, path): self.remove = True def __enter__(self): - files = glob.glob(self.pattern) - if files and not files[0].endswith(self.end): - # Keep the string "LOCKERROR" in this string so that external - # programs can look for it. - raise RuntimeError("""\ -LOCKERROR: It looks like conda is already doing something. -The lock %s was found. Wait for it to finish before continuing. -If you are sure that conda is not running, remove it and try again. -You can also use: $ conda clean --lock""" % self.lock_path) + retries = 10 + # Keep the string "LOCKERROR" in this string so that external + # programs can look for it. + lockstr = ("""\ + LOCKERROR: It looks like conda is already doing something. + The lock %s was found. Wait for it to finish before continuing. + If you are sure that conda is not running, remove it and try again. + You can also use: $ conda clean --lock""" % self.lock_path) + sleeptime = 1 + while retries: + files = glob.glob(self.pattern) + if files and not files[0].endswith(self.end): + print(lockstr) + print("Sleeping for %s seconds" % sleeptime) + sleep(sleeptime) + sleeptime *= 2 + retries -= 1 + else: + break + else: + print("Exceeded max retries, giving up") + raise RuntimeError(lockstr) if not files: try:
Add ability to keep retrying with a lock error The yum installer (IIRC) has a nice feature that it will keep trying every 10 seconds or so if there is a lock error. This could be useful for conda.
2014-05-02T16:20:55
conda/conda
739
conda__conda-739
[ "731" ]
27441fe05630c37e7225f275ee041411f995aae5
diff --git a/conda/config.py b/conda/config.py --- a/conda/config.py +++ b/conda/config.py @@ -244,7 +244,10 @@ def get_allowed_channels(): def get_proxy_servers(): res = rc.get('proxy_servers') - if res is None or isinstance(res, dict): + if res is None: + import requests + return requests.utils.getproxies() + if isinstance(res, dict): return res sys.exit("Error: proxy_servers setting not a mapping") diff --git a/conda/connection.py b/conda/connection.py --- a/conda/connection.py +++ b/conda/connection.py @@ -372,3 +372,51 @@ def parse_multipart_files(request): buf.seek(0) return buf + +# Taken from urllib3 (actually +# https://github.com/shazow/urllib3/pull/394). Once it is fully upstreamed to +# requests.packages.urllib3 we can just use that. + + +def unparse_url(U): + """ + Convert a :class:`.Url` into a url + + The input can be any iterable that gives ['scheme', 'auth', 'host', + 'port', 'path', 'query', 'fragment']. Unused items should be None. + + This function should more or less round-trip with :func:`.parse_url`. The + returned url may not be exactly the same as the url inputted to + :func:`.parse_url`, but it should be equivalent by the RFC (e.g., urls + with a blank port). + + + Example: :: + + >>> Url = parse_url('http://google.com/mail/') + >>> unparse_url(Url) + 'http://google.com/mail/' + >>> unparse_url(['http', 'username:password', 'host.com', 80, + ... '/path', 'query', 'fragment']) + 'http://username:[email protected]:80/path?query#fragment' + """ + scheme, auth, host, port, path, query, fragment = U + url = '' + + # We use "is not None" we want things to happen with empty strings (or 0 port) + if scheme is not None: + url = scheme + '://' + if auth is not None: + url += auth + '@' + if host is not None: + url += host + if port is not None: + url += ':' + str(port) + if path is not None: + url += path + if query is not None: + url += '?' + query + if fragment is not None: + url += '#' + fragment + + return url diff --git a/conda/fetch.py b/conda/fetch.py --- a/conda/fetch.py +++ b/conda/fetch.py @@ -15,12 +15,13 @@ from logging import getLogger from os.path import basename, isdir, join import sys -from multiprocessing.pool import ThreadPool +import getpass +# from multiprocessing.pool import ThreadPool from conda import config from conda.utils import memoized -from conda.connection import CondaSession -from conda.compat import itervalues, get_http_value +from conda.connection import CondaSession, unparse_url +from conda.compat import itervalues, get_http_value, input from conda.lock import Locked import requests @@ -73,20 +74,34 @@ def fetch_repodata(url, cache_dir=None, use_cache=False, session=None): headers["If-Modified-Since"] = cache["_mod"] try: - resp = session.get(url + 'repodata.json.bz2', headers=headers) + resp = session.get(url + 'repodata.json.bz2', headers=headers, proxies=session.proxies) resp.raise_for_status() if resp.status_code != 304: cache = json.loads(bz2.decompress(resp.content).decode('utf-8')) - except ValueError: - raise RuntimeError("Invalid index file: %srepodata.json.bz2" % url) + except ValueError as e: + raise RuntimeError("Invalid index file: %srepodata.json.bz2: %s" % + (url, e)) except requests.exceptions.HTTPError as e: + if e.response.status_code == 407: # Proxy Authentication Required + handle_proxy_407(url, session) + # Try again + return fetch_repodata(url, cache_dir=cache_dir, use_cache=use_cache, session=session) msg = "HTTPError: %s: %s\n" % (e, url) log.debug(msg) raise RuntimeError(msg) except requests.exceptions.ConnectionError as e: + # requests isn't so nice here. For whatever reason, https gives this + # error and http gives the above error. Also, there is no status_code + # attribute here. We have to just check if it looks like 407. See + # https://github.com/kennethreitz/requests/issues/2061. + if "407" in str(e): # Proxy Authentication Required + handle_proxy_407(url, session) + # Try again + return fetch_repodata(url, cache_dir=cache_dir, use_cache=use_cache, session=session) + msg = "Connection error: %s: %s\n" % (e, url) stderrlog.info('Could not connect to %s\n' % url) log.debug(msg) @@ -102,10 +117,31 @@ def fetch_repodata(url, cache_dir=None, use_cache=False, session=None): return cache or None +def handle_proxy_407(url, session): + """ + Prompts the user for the proxy username and password and modifies the + proxy in the session object to include it. + """ + # We could also use HTTPProxyAuth, but this does not work with https + # proxies (see https://github.com/kennethreitz/requests/issues/2061). + scheme = requests.packages.urllib3.util.url.parse_url(url).scheme + username, passwd = get_proxy_username_and_pass(scheme) + session.proxies[scheme] = add_username_and_pass_to_url(session.proxies[scheme], username, passwd) + +def add_username_and_pass_to_url(url, username, passwd): + urlparts = list(requests.packages.urllib3.util.url.parse_url(url)) + urlparts[1] = username + ':' + passwd + return unparse_url(urlparts) + +def get_proxy_username_and_pass(scheme): + username = input("\n%s proxy username: " % scheme) + passwd = getpass.getpass("Password:") + return username, passwd + @memoized def fetch_index(channel_urls, use_cache=False, unknown=False): log.debug('channel_urls=' + repr(channel_urls)) - pool = ThreadPool(5) + # pool = ThreadPool(5) index = {} stdoutlog.info("Fetching package metadata: ") session = CondaSession() @@ -171,15 +207,29 @@ def download(url, dst_path, session=None, md5=None, urlstxt=False): with Locked(dst_dir): try: - resp = session.get(url, stream=True) + resp = session.get(url, stream=True, proxies=session.proxies) resp.raise_for_status() - except IOError: - raise RuntimeError("Could not open '%s'" % url) except requests.exceptions.HTTPError as e: + if e.response.status_code == 407: # Proxy Authentication Required + handle_proxy_407(url, session) + # Try again + return download(url, dst_path, session=session, md5=md5, urlstxt=urlstxt) msg = "HTTPError: %s: %s\n" % (e, url) log.debug(msg) raise RuntimeError(msg) + except requests.exceptions.ConnectionError as e: + # requests isn't so nice here. For whatever reason, https gives this + # error and http gives the above error. Also, there is no status_code + # attribute here. We have to just check if it looks like 407. See + # https://github.com/kennethreitz/requests/issues/2061. + if "407" in str(e): # Proxy Authentication Required + handle_proxy_407(url, session) + # Try again + return download(url, dst_path, session=session, md5=md5, urlstxt=urlstxt) + except IOError as e: + raise RuntimeError("Could not open '%s': %s" % (url, e)) + size = resp.headers.get('Content-Length') if size: size = int(size)
conda does not prompt for proxy username and password ``` [ COMPLETE ] |#################################################| 100% The batch file cannot be found. C:\Code>conda update conda Fetching package metadata: .Error: HTTPError: 407 Client Error: Proxy Authentication Required: http://repo.continuum.io/pkgs/pro/win-64/ ```
Do you have proxy settings set in your `.condarc` or using the `HTTP_PROXY` environment variable? no. I used to have HTTP_PROXY set but it was automatically removed by a company pushed OS update. To avoid reliance on it I enter the proxy id and pw at the prompt. OK. I guess the new requests code regressed in that it no longer asks for it at a prompt. The workaround for now is to just add it to your .condarc (http://conda.pydata.org/docs/config.html), or to set that environment variable. I'm trying to understand how the code worked before. Did you always have the proxy server (and port) itself set, and only the username and password was prompted? I honestly don't recall entering the proxy info anywhere. The only thing I was ever required to enter was user and pw. > On May 20, 2014, at 5:39 PM, Aaron Meurer [email protected] wrote: > > I'm trying to understand how the code worked before. Did you always have the proxy server (and port) itself set, and only the username and password was prompted? > > \ > Reply to this email directly or view it on GitHub. I'm still getting this error after entering the proxy info in condarc The issue really seems to be with the repo. Why am I seeing the "pro" repo? I am using the free distribution of Anaconda. Could this be the problem? ``` C:\>conda update conda Fetching package metadata: .Error: HTTPError: 407 Client Error: Proxy Authentication Required: http://repo.continuum.io/pkgs/pro/win-64/ ``` No, the pro repos are there for everyone, and do not require any authentication on the open internet. Ok. I am completely lost since I don't know much about conda. If there is anything that can be tested to provide more information please let me know. Well I am a little surprised that adding the proxy to .condarc didn't work. Are you sure you used the exact format `http://[username]:[password]@[server]:[port]` (note that the `http://` part is required)? Also, does it work if you use the `HTTP_PROXY` environment variable, like ``` HTTP_PROXY=yourproxy conda update conda ``` Oh you're on Windows. I don't know if that syntax works on Windows. You may need to ``` set HTTP_PROXY=yourproxy conda update conda ``` setting HTTP_PROXY works. OK, but setting the http proxy in .condarc to the exact same thing does not work? That's definitely a bug then. exact same thing.
2014-05-23T15:50:51
conda/conda
804
conda__conda-804
[ "802" ]
037a783f85eb5edaf5ea59bdb5e456bed92587b3
diff --git a/conda/cli/install.py b/conda/cli/install.py --- a/conda/cli/install.py +++ b/conda/cli/install.py @@ -143,10 +143,10 @@ def install(args, parser, command='install'): common.ensure_override_channels_requires_channel(args) channel_urls = args.channel or () + specs = [] if args.file: - specs = common.specs_from_url(args.file) + specs.extend(common.specs_from_url(args.file)) elif getattr(args, 'all', False): - specs = [] linked = ci.linked(prefix) for pkg in linked: name, ver, build = pkg.rsplit('-', 2) @@ -155,8 +155,7 @@ def install(args, parser, command='install'): specs.append('%s >=%s,<3' % (name, ver)) else: specs.append('%s >=%s' % (name, ver)) - else: - specs = common.specs_from_args(args.packages) + specs.extend(common.specs_from_args(args.packages)) if command == 'install' and args.revision: get_revision(args.revision)
`conda create --file deps.txt pkg1 pkg2 ... pkgn` doesn't work ``` $ echo "scipy" > deps.txt $ conda create -n test08 --file deps.txt sympy Fetching package metadata: .. Solving package specifications: . Package plan for installation in environment /home/mateusz/py/envs/test08: The following packages will be linked: package | build ---------------------------|----------------- numpy-1.8.1 | py27_0 hard-link openssl-1.0.1h | 0 hard-link python-2.7.8 | 0 hard-link readline-6.2 | 2 hard-link scipy-0.14.0 | np18py27_0 hard-link sqlite-3.8.4.1 | 0 hard-link system-5.8 | 1 hard-link tk-8.5.15 | 0 hard-link zlib-1.2.7 | 0 hard-link Proceed ([y]/n)? n ```
2014-07-10T14:40:10
conda/conda
834
conda__conda-834
[ "803", "803" ]
997b70c012fc5be64cc9df8cdabfd860a12c8230
diff --git a/conda/cli/main_run.py b/conda/cli/main_run.py --- a/conda/cli/main_run.py +++ b/conda/cli/main_run.py @@ -7,6 +7,7 @@ from __future__ import print_function, division, absolute_import import sys +import logging from conda.cli import common @@ -17,6 +18,7 @@ def configure_parser(sub_parsers): description = descr, help = descr) common.add_parser_prefix(p) + common.add_parser_quiet(p) common.add_parser_json(p) p.add_argument( 'package', @@ -47,6 +49,9 @@ def execute(args, parser): prefix = common.get_prefix(args) + if args.quiet: + logging.disable(logging.CRITICAL) + if args.package.endswith('.tar.bz2'): if app_is_installed(args.package, prefixes=[prefix]): fn = args.package @@ -65,20 +70,38 @@ def execute(args, parser): if name == args.package: installed = [conda.resolve.Package(pkg + '.tar.bz2', conda.install.is_linked(prefix, pkg))] + break - if not installed: - error_message = "App {} not installed.".format(args.package) - common.error_and_exit(error_message, json=args.json, - error_type="AppNotInstalled") + if installed: + package = max(installed) + fn = package.fn - package = max(installed) - fn = package.fn + try: + subprocess = launch(fn, prefix=prefix, + additional_args=args.arguments, + background=args.json) + if args.json: + common.stdout_json(dict(fn=fn, pid=subprocess.pid)) + elif not args.quiet: + print("Started app. Some apps may take a while to finish loading.") + except TypeError: + execute_command(args.package, prefix, args.arguments, args.json) + except Exception as e: + common.exception_and_exit(e, json=args.json) + else: + # Try interpreting it as a command + execute_command(args.package, prefix, args.arguments, args.json) +def execute_command(cmd, prefix, additional_args, json=False): + from conda.misc import execute_in_environment try: - subprocess = launch(fn, prefix=prefix, additional_args=args.arguments) - if args.json: - common.stdout_json(dict(fn=fn, pid=subprocess.pid)) + process = execute_in_environment( + cmd, prefix=prefix, additional_args=additional_args, inherit=not json) + if not json: + sys.exit(process.wait()) else: - print("Started app. Some apps may take a while to finish loading.") - except Exception as e: - common.exception_and_exit(e, json=args.json) + common.stdout_json(dict(cmd=cmd, pid=process.pid)) + except OSError: + error_message = "App {} not installed.".format(cmd) + common.error_and_exit(error_message, json=json, + error_type="AppNotInstalled") diff --git a/conda/misc.py b/conda/misc.py --- a/conda/misc.py +++ b/conda/misc.py @@ -199,21 +199,29 @@ def install_local_packages(prefix, paths, verbose=False): execute_actions(actions, verbose=verbose) -def launch(fn, prefix=config.root_dir, additional_args=None): +def environment_for_conda_environment(prefix=config.root_dir): + # prepend the bin directory to the path + fmt = r'%s\Scripts' if sys.platform == 'win32' else '%s/bin' + binpath = fmt % abspath(prefix) + path = r'%s;%s' if sys.platform == 'win32' else '%s:%s' + path = path % (binpath, os.getenv('PATH')) + env = {'PATH': path} + # copy existing environment variables, but not anything with PATH in it + for k, v in iteritems(os.environ): + if k != 'PATH': + env[k] = v + return binpath, env + + +def launch(fn, prefix=config.root_dir, additional_args=None, background=False): info = install.is_linked(prefix, fn[:-8]) if info is None: return None if not info.get('type') == 'app': - raise Exception('Not an application: %s' % fn) + raise TypeError('Not an application: %s' % fn) - # prepend the bin directory to the path - fmt = r'%s\Scripts;%s' if sys.platform == 'win32' else '%s/bin:%s' - env = {'PATH': fmt % (abspath(prefix), os.getenv('PATH'))} - # copy existing environment variables, but not anything with PATH in it - for k, v in iteritems(os.environ): - if k != 'PATH': - env[k] = v + binpath, env = environment_for_conda_environment(prefix) # allow updating environment variables from metadata if 'app_env' in info: env.update(info['app_env']) @@ -229,7 +237,44 @@ def launch(fn, prefix=config.root_dir, additional_args=None): cwd = abspath(expanduser('~')) if additional_args: args.extend(additional_args) - return subprocess.Popen(args, cwd=cwd, env=env, close_fds=False) + if sys.platform == 'win32' and background: + return subprocess.Popen(args, cwd=cwd, env=env, close_fds=False, + creationflags=subprocess.CREATE_NEW_CONSOLE) + else: + return subprocess.Popen(args, cwd=cwd, env=env, close_fds=False) + + +def execute_in_environment(cmd, prefix=config.root_dir, additional_args=None, + inherit=True): + """Runs ``cmd`` in the specified environment. + + ``inherit`` specifies whether the child inherits stdio handles (for JSON + output, we don't want to trample this process's stdout). + """ + binpath, env = environment_for_conda_environment(prefix) + + if sys.platform == 'win32' and cmd == 'python': + # python is located one directory up on Windows + cmd = join(binpath, '..', cmd) + else: + cmd = join(binpath, cmd) + + args = [cmd] + if additional_args: + args.extend(additional_args) + + if inherit: + stdin, stdout, stderr = None, None, None + else: + stdin, stdout, stderr = subprocess.PIPE, subprocess.PIPE, subprocess.PIPE + + if sys.platform == 'win32' and not inherit: + return subprocess.Popen(args, env=env, close_fds=False, + stdin=stdin, stdout=stdout, stderr=stderr, + creationflags=subprocess.CREATE_NEW_CONSOLE) + else: + return subprocess.Popen(args, env=env, close_fds=False, + stdin=stdin, stdout=stdout, stderr=stderr) def make_icon_url(info):
conda command-line tool provides a convenience command to run the Python executable from a specified conda environment I often want to run the Python interpreter from a specific conda environment while knowing only the name of that environment. I know that `conda -e` gives the path to each conda environment, from which I can derive the path of an environment-specific Python interpreter or the `activate` shell script, but this is inconvenient extra step. It would be convenient to have a conda command like `conda intepreter -n ${environment} -- [args]` that invokes the environment-specific Python interpreter, inherits STDIN and command-line arguments, and returns exit code of Python. Ideally, it would be a drop-in replacement for directly running the Python interpreter, as documented https://docs.python.org/2/tutorial/interpreter.html. My shell-fu is weak, but I think that something like `"exec /path/to/environment/bin/python "$@"` might work. conda command-line tool provides a convenience command to run the Python executable from a specified conda environment I often want to run the Python interpreter from a specific conda environment while knowing only the name of that environment. I know that `conda -e` gives the path to each conda environment, from which I can derive the path of an environment-specific Python interpreter or the `activate` shell script, but this is inconvenient extra step. It would be convenient to have a conda command like `conda intepreter -n ${environment} -- [args]` that invokes the environment-specific Python interpreter, inherits STDIN and command-line arguments, and returns exit code of Python. Ideally, it would be a drop-in replacement for directly running the Python interpreter, as documented https://docs.python.org/2/tutorial/interpreter.html. My shell-fu is weak, but I think that something like `"exec /path/to/environment/bin/python "$@"` might work.
Maybe the new conda run command could be used to do this. Maybe the new conda run command could be used to do this.
2014-07-28T20:50:17
conda/conda
909
conda__conda-909
[ "907" ]
e082781cad83e0bc6a41a2870b605f4ee08bbd4d
diff --git a/conda/install.py b/conda/install.py --- a/conda/install.py +++ b/conda/install.py @@ -155,11 +155,20 @@ def rm_rf(path, max_retries=5): shutil.rmtree(path) return except OSError as e: - log.debug("Unable to delete %s (%s): retrying after %s " - "seconds" % (path, e, i)) + msg = "Unable to delete %s\n%s\n" % (path, e) + if on_win and e.args[0] == 5: + try: + subprocess.check_call(['cmd', '/c', 'rd', '/s', '/q', path]) + return + except subprocess.CalledProcessError as e1: + msg += '%s\n' % e1 + log.debug(msg + "Retrying after %s seconds..." % i) time.sleep(i) # Final time. pass exceptions to caller. - shutil.rmtree(path) + if on_win and e.args[0] == 5: + subprocess.check_call(['cmd', '/c', 'rd', '/s', '/q', path]) + else: + shutil.rmtree(path) def rm_empty_dir(path): """
Use rmtree workaround for write-protected files on Windows See https://stackoverflow.com/questions/1889597/deleting-directory-in-python/1889686#1889686. Alternately we can use rd /s.
2014-09-11T17:31:02
conda/conda
1,138
conda__conda-1138
[ "897" ]
7305bf1990c6f6d47fc7f6f7b3f7844ec1948388
diff --git a/conda/cli/install.py b/conda/cli/install.py --- a/conda/cli/install.py +++ b/conda/cli/install.py @@ -191,13 +191,13 @@ def install(args, parser, command='install'): "prefix %s" % prefix) for pkg in linked: name, ver, build = pkg.rsplit('-', 2) - if name in getattr(args, '_skip', []): + if name in getattr(args, '_skip', ['anaconda']): continue if name == 'python' and ver.startswith('2'): # Oh Python 2... specs.append('%s >=%s,<3' % (name, ver)) else: - specs.append('%s >=%s' % (name, ver)) + specs.append('%s' % name) specs.extend(common.specs_from_args(args.packages, json=args.json)) if command == 'install' and args.revision: @@ -345,7 +345,7 @@ def install(args, parser, command='install'): else: # Not sure what to do here pass - args._skip = getattr(args, '_skip', []) + args._skip = getattr(args, '_skip', ['anaconda']) args._skip.extend([i.split()[0] for i in e.pkgs]) return install(args, parser, command=command) else:
Only try updating outdated packages with update --all conda update --all tends to fail a lot because it requires that the whole environment become satisfiable, and without downgrading any packages. Perhaps a better solution would be to only try installing those packages that are known to be outdated. Another idea would be to relax the downgrade restriction, and just have it essentially "reinstall" the environment. This could lead to some surprises when it does downgrade things, but it would also reduce the number of unsatisfiable packages issues, as those seem to usually come from the version specification.
2015-02-09T21:39:08
conda/conda
1,231
conda__conda-1231
[ "1230" ]
50000e443ff4d60904faf59c1ea04cf269ec42c3
diff --git a/conda/cli/main_clean.py b/conda/cli/main_clean.py --- a/conda/cli/main_clean.py +++ b/conda/cli/main_clean.py @@ -8,6 +8,7 @@ from argparse import RawDescriptionHelpFormatter import os import sys +from collections import defaultdict from os.path import join, getsize, isdir from os import lstat, walk, listdir @@ -102,29 +103,29 @@ def rm_lock(locks, verbose=True): def find_tarballs(): - pkgs_dir = config.pkgs_dirs[0] - - rmlist = [] - for fn in os.listdir(pkgs_dir): - if fn.endswith('.tar.bz2') or fn.endswith('.tar.bz2.part'): - rmlist.append(fn) - - if not rmlist: - return pkgs_dir, rmlist, 0 + pkgs_dirs = defaultdict(list) + for pkgs_dir in config.pkgs_dirs: + if not isdir(pkgs_dir): + continue + for fn in os.listdir(pkgs_dir): + if fn.endswith('.tar.bz2') or fn.endswith('.tar.bz2.part'): + pkgs_dirs[pkgs_dir].append(fn) totalsize = 0 - for fn in rmlist: - size = getsize(join(pkgs_dir, fn)) - totalsize += size + for pkgs_dir in pkgs_dirs: + for fn in pkgs_dirs[pkgs_dir]: + size = getsize(join(pkgs_dir, fn)) + totalsize += size - return pkgs_dir, rmlist, totalsize + return pkgs_dirs, totalsize -def rm_tarballs(args, pkgs_dir, rmlist, totalsize, verbose=True): +def rm_tarballs(args, pkgs_dirs, totalsize, verbose=True): if verbose: - print('Cache location: %s' % pkgs_dir) + for pkgs_dir in pkgs_dirs: + print('Cache location: %s' % pkgs_dir) - if not rmlist: + if not any(pkgs_dirs[i] for i in pkgs_dirs): if verbose: print("There are no tarballs to remove") return @@ -133,12 +134,15 @@ def rm_tarballs(args, pkgs_dir, rmlist, totalsize, verbose=True): print("Will remove the following tarballs:") print() - maxlen = len(max(rmlist, key=lambda x: len(str(x)))) - fmt = "%-40s %10s" - for fn in rmlist: - size = getsize(join(pkgs_dir, fn)) - print(fmt % (fn, human_bytes(size))) - print('-' * (maxlen + 2 + 10)) + for pkgs_dir in pkgs_dirs: + print(pkgs_dir) + print('-'*len(pkgs_dir)) + fmt = "%-40s %10s" + for fn in pkgs_dirs[pkgs_dir]: + size = getsize(join(pkgs_dir, fn)) + print(fmt % (fn, human_bytes(size))) + print() + print('-' * 51) # From 40 + 1 + 10 in fmt print(fmt % ('Total:', human_bytes(totalsize))) print() @@ -147,79 +151,82 @@ def rm_tarballs(args, pkgs_dir, rmlist, totalsize, verbose=True): if args.json and args.dry_run: return - for fn in rmlist: - if verbose: - print("removing %s" % fn) - os.unlink(os.path.join(pkgs_dir, fn)) + for pkgs_dir in pkgs_dirs: + for fn in pkgs_dirs[pkgs_dir]: + if verbose: + print("removing %s" % fn) + os.unlink(os.path.join(pkgs_dir, fn)) def find_pkgs(): # TODO: This doesn't handle packages that have hard links to files within # themselves, like bin/python3.3 and bin/python3.3m in the Python package - pkgs_dir = config.pkgs_dirs[0] warnings = [] - rmlist = [] - pkgs = [i for i in listdir(pkgs_dir) if isdir(join(pkgs_dir, i)) and - # Only include actual packages - isdir(join(pkgs_dir, i, 'info'))] - for pkg in pkgs: - breakit = False - for root, dir, files in walk(join(pkgs_dir, pkg)): - if breakit: - break - for fn in files: - try: - stat = lstat(join(root, fn)) - except OSError as e: - warnings.append((fn, e)) - continue - if stat.st_nlink > 1: - # print('%s is installed: %s' % (pkg, join(root, fn))) - breakit = True + pkgs_dirs = defaultdict(list) + for pkgs_dir in config.pkgs_dirs: + pkgs = [i for i in listdir(pkgs_dir) if isdir(join(pkgs_dir, i)) and + # Only include actual packages + isdir(join(pkgs_dir, i, 'info'))] + for pkg in pkgs: + breakit = False + for root, dir, files in walk(join(pkgs_dir, pkg)): + if breakit: break - else: - rmlist.append(pkg) - - if not rmlist: - return pkgs_dir, rmlist, warnings, 0, [] + for fn in files: + try: + stat = lstat(join(root, fn)) + except OSError as e: + warnings.append((fn, e)) + continue + if stat.st_nlink > 1: + # print('%s is installed: %s' % (pkg, join(root, fn))) + breakit = True + break + else: + pkgs_dirs[pkgs_dir].append(pkg) totalsize = 0 - pkgsizes = [] - for pkg in rmlist: - pkgsize = 0 - for root, dir, files in walk(join(pkgs_dir, pkg)): - for fn in files: - # We don't have to worry about counting things twice: by - # definition these files all have a link count of 1! - size = lstat(join(root, fn)).st_size - totalsize += size - pkgsize += size - pkgsizes.append(pkgsize) - - return pkgs_dir, rmlist, warnings, totalsize, pkgsizes - - -def rm_pkgs(args, pkgs_dir, rmlist, warnings, totalsize, pkgsizes, + pkgsizes = defaultdict(list) + for pkgs_dir in pkgs_dirs: + for pkg in pkgs_dirs[pkgs_dir]: + pkgsize = 0 + for root, dir, files in walk(join(pkgs_dir, pkg)): + for fn in files: + # We don't have to worry about counting things twice: by + # definition these files all have a link count of 1! + size = lstat(join(root, fn)).st_size + totalsize += size + pkgsize += size + pkgsizes[pkgs_dir].append(pkgsize) + + return pkgs_dirs, warnings, totalsize, pkgsizes + + +def rm_pkgs(args, pkgs_dirs, warnings, totalsize, pkgsizes, verbose=True): if verbose: - print('Cache location: %s' % pkgs_dir) - for fn, exception in warnings: - print(exception) + for pkgs_dir in pkgs_dirs: + print('Cache location: %s' % pkgs_dir) + for fn, exception in warnings: + print(exception) - if not rmlist: + if not any(pkgs_dirs[i] for i in pkgs_dirs): if verbose: print("There are no unused packages to remove") return if verbose: print("Will remove the following packages:") - print() - maxlen = len(max(rmlist, key=lambda x: len(str(x)))) - fmt = "%-40s %10s" - for pkg, pkgsize in zip(rmlist, pkgsizes): - print(fmt % (pkg, human_bytes(pkgsize))) - print('-' * (maxlen + 2 + 10)) + for pkgs_dir in pkgs_dirs: + print(pkgs_dir) + print('-' * len(pkgs_dir)) + print() + fmt = "%-40s %10s" + for pkg, pkgsize in zip(pkgs_dirs[pkgs_dir], pkgsizes[pkgs_dir]): + print(fmt % (pkg, human_bytes(pkgsize))) + print() + print('-' * 51) # 40 + 1 + 10 in fmt print(fmt % ('Total:', human_bytes(totalsize))) print() @@ -228,10 +235,11 @@ def rm_pkgs(args, pkgs_dir, rmlist, warnings, totalsize, pkgsizes, if args.json and args.dry_run: return - for pkg in rmlist: - if verbose: - print("removing %s" % pkg) - rm_rf(join(pkgs_dir, pkg)) + for pkgs_dir in pkgs_dirs: + for pkg in pkgs_dirs[pkgs_dir]: + if verbose: + print("removing %s" % pkg) + rm_rf(join(pkgs_dir, pkg)) def rm_index_cache(): @@ -314,13 +322,15 @@ def execute(args, parser): rm_lock(locks, verbose=not args.json) if args.tarballs: - pkgs_dir, rmlist, totalsize = find_tarballs() + pkgs_dirs, totalsize = find_tarballs() + first = sorted(pkgs_dirs)[0] if pkgs_dirs else '' json_result['tarballs'] = { - 'pkgs_dir': pkgs_dir, - 'files': rmlist, + 'pkgs_dir': first, # Backwards compabitility + 'pkgs_dirs': dict(pkgs_dirs), + 'files': pkgs_dirs[first], # Backwards compatibility 'total_size': totalsize } - rm_tarballs(args, pkgs_dir, rmlist, totalsize, verbose=not args.json) + rm_tarballs(args, pkgs_dirs, totalsize, verbose=not args.json) if args.index_cache: json_result['index_cache'] = { @@ -329,15 +339,17 @@ def execute(args, parser): rm_index_cache() if args.packages: - pkgs_dir, rmlist, warnings, totalsize, pkgsizes = find_pkgs() + pkgs_dirs, warnings, totalsize, pkgsizes = find_pkgs() + first = sorted(pkgs_dirs)[0] if pkgs_dirs else '' json_result['packages'] = { - 'pkgs_dir': pkgs_dir, - 'files': rmlist, + 'pkgs_dir': first, # Backwards compatibility + 'pkgs_dirs': dict(pkgs_dirs), + 'files': pkgs_dirs[first], # Backwards compatibility 'total_size': totalsize, 'warnings': warnings, - 'pkg_sizes': dict(zip(rmlist, pkgsizes)) + 'pkg_sizes': {i: dict(zip(pkgs_dirs[i], pkgsizes[i])) for i in pkgs_dirs}, } - rm_pkgs(args, pkgs_dir, rmlist, warnings, totalsize, pkgsizes, + rm_pkgs(args, pkgs_dirs, warnings, totalsize, pkgsizes, verbose=not args.json) if args.source_cache:
conda clean -t fails with FileNotFoundError ``` [root@localhost conda-recipes]# conda clean -t An unexpected error has occurred, please consider sending the following traceback to the conda GitHub issue tracker at: https://github.com/conda/conda/issues Include the output of the command 'conda info' in your report. Traceback (most recent call last): File "/root/anaconda/bin/conda", line 5, in <module> sys.exit(main()) File "/home/aaronmeurer/conda/conda/cli/main.py", line 202, in main args_func(args, p) File "/home/aaronmeurer/conda/conda/cli/main.py", line 207, in args_func args.func(args, p) File "/home/aaronmeurer/conda/conda/cli/main_clean.py", line 317, in execute pkgs_dir, rmlist, totalsize = find_tarballs() File "/home/aaronmeurer/conda/conda/cli/main_clean.py", line 108, in find_tarballs for fn in os.listdir(pkgs_dir): FileNotFoundError: [Errno 2] No such file or directory: '/root/.conda/envs/.pkgs' ```
2015-03-30T19:30:39
conda/conda
1,318
conda__conda-1318
[ "1317" ]
d6704ec38705aa3181aabff5759d0365cd0e59b0
diff --git a/conda/cli/main_remove.py b/conda/cli/main_remove.py --- a/conda/cli/main_remove.py +++ b/conda/cli/main_remove.py @@ -190,6 +190,9 @@ def execute(args, parser): return + if not args.json: + common.confirm_yn(args) + if args.json and not args.quiet: with json_progress_bars(): plan.execute_actions(actions, index, verbose=not args.quiet)
conda remove --dry-run actually removes the package Is there anything set around here: https://github.com/conda/conda/blob/ded940c3fa845bbb86b3492e4a7c883c1bcec10b/conda/cli/main_remove.py#L196 to actually exit before removing the package if --dry-run is set but --json is not? I'm just running 3.11.0 and haven't grabbed and functionally tested master to see if this is still a problem.
2015-05-04T20:08:26
conda/conda
1,463
conda__conda-1463
[ "1452" ]
3ea1dc2f9a91b05509b80e6fd8d6ee08299967dd
diff --git a/conda/fetch.py b/conda/fetch.py --- a/conda/fetch.py +++ b/conda/fetch.py @@ -185,6 +185,10 @@ def handle_proxy_407(url, session): # We could also use HTTPProxyAuth, but this does not work with https # proxies (see https://github.com/kennethreitz/requests/issues/2061). scheme = requests.packages.urllib3.util.url.parse_url(url).scheme + if scheme not in session.proxies: + sys.exit("""Could not find a proxy for %r. See +http://conda.pydata.org/docs/config.html#configure-conda-for-use-behind-a-proxy-server +for more information on how to configure proxies.""" % scheme) username, passwd = get_proxy_username_and_pass(scheme) session.proxies[scheme] = add_username_and_pass_to_url( session.proxies[scheme], username, passwd)
https proxy username/password I have installed anaconda using the command "bash Anaconda-2.3.0-Linux-x86_64.sh" and gave path of bin/conda to .bashrc and default it asks for bin path prepending which I gave, after closing and then running terminal I ran: conda create -n dato-env python=2.7 which requires https proxy unsername and password for metadata, following is the error: Password: An unexpected error has occurred, please consider sending the following traceback to the conda GitHub issue tracker at: ``` https://github.com/conda/conda/issues ``` Include the output of the command 'conda info' in your report. Traceback (most recent call last): File "/home/mayank/anaconda/bin/conda", line 5, in <module> sys.exit(main()) File "/home/mayank/anaconda/lib/python2.7/site-packages/conda/cli/main.py", line 201, in main args_func(args, p) File "/home/mayank/anaconda/lib/python2.7/site-packages/conda/cli/main.py", line 208, in args_func args.func(args, p) File "/home/mayank/anaconda/lib/python2.7/site-packages/conda/cli/common.py", line 612, in inner return func(args, parser) File "/home/mayank/anaconda/lib/python2.7/site-packages/conda/cli/main_create.py", line 50, in execute install.install(args, parser, 'create') File "/home/mayank/anaconda/lib/python2.7/site-packages/conda/cli/install.py", line 255, in install offline=args.offline) File "/home/mayank/anaconda/lib/python2.7/site-packages/conda/cli/common.py", line 549, in get_index_trap return get_index(_args, *_kwargs) File "/home/mayank/anaconda/lib/python2.7/site-packages/conda/api.py", line 42, in get_index unknown=unknown) File "/home/mayank/anaconda/lib/python2.7/site-packages/conda/utils.py", line 119, in **call** value = self.func(_args, *_kw) File "/home/mayank/anaconda/lib/python2.7/site-packages/conda/fetch.py", line 255, in fetch_index reversed(channel_urls)) File "/home/mayank/anaconda/lib/python2.7/site-packages/conda/fetch.py", line 254, in <lambda> use_cache=use_cache, session=session)), File "/home/mayank/anaconda/lib/python2.7/site-packages/conda/fetch.py", line 65, in func res = f(_args, *_kwargs) File "/home/mayank/anaconda/lib/python2.7/site-packages/conda/fetch.py", line 154, in fetch_repodata handle_proxy_407(url, session) File "/home/mayank/anaconda/lib/python2.7/site-packages/conda/fetch.py", line 184, in handle_proxy_407 session.proxies[scheme], username, passwd) KeyError: 'https' Can you please help in this regard
Can you paste the output of `conda info` here? Sure. Output of conda info: Current conda install: ``` platform : linux-64 conda version : 3.14.1 ``` conda-build version : 1.14.1 python version : 2.7.10.final.0 requests version : 2.7.0 root environment : /home/mayank/anaconda (writable) default environment : /home/mayank/anaconda envs directories : /home/mayank/anaconda/envs package cache : /home/mayank/anaconda/pkgs channel URLs : https://repo.continuum.io/pkgs/free/linux-64/ https://repo.continuum.io/pkgs/free/noarch/ https://repo.continuum.io/pkgs/pro/linux-64/ https://repo.continuum.io/pkgs/pro/noarch/ config file : None is foreign system : False
2015-07-27T18:35:38
conda/conda
1,496
conda__conda-1496
[ "1495" ]
8d095e771947121c32edef476efc333e2cfeb60b
diff --git a/conda/connection.py b/conda/connection.py --- a/conda/connection.py +++ b/conda/connection.py @@ -19,7 +19,7 @@ import conda from conda.compat import urlparse, StringIO -from conda.config import get_proxy_servers +from conda.config import get_proxy_servers, ssl_verify import requests @@ -82,6 +82,7 @@ def __init__(self, *args, **kwargs): self.headers['User-Agent'] = "conda/%s %s" % ( conda.__version__, self.headers['User-Agent']) + self.verify = ssl_verify class S3Adapter(requests.adapters.BaseAdapter): diff --git a/conda/fetch.py b/conda/fetch.py --- a/conda/fetch.py +++ b/conda/fetch.py @@ -97,8 +97,7 @@ def fetch_repodata(url, cache_dir=None, use_cache=False, session=None): try: resp = session.get(url + 'repodata.json.bz2', - headers=headers, proxies=session.proxies, - verify=config.ssl_verify) + headers=headers, proxies=session.proxies) resp.raise_for_status() if resp.status_code != 304: cache = json.loads(bz2.decompress(resp.content).decode('utf-8')) @@ -323,8 +322,7 @@ def download(url, dst_path, session=None, md5=None, urlstxt=False, retries = RETRIES with Locked(dst_dir): try: - resp = session.get(url, stream=True, proxies=session.proxies, - verify=config.ssl_verify) + resp = session.get(url, stream=True, proxies=session.proxies) resp.raise_for_status() except requests.exceptions.HTTPError as e: if e.response.status_code == 407: # Proxy Authentication Required
Set the default CondaSession.verify default from the config setting Currently, the .condarc config setting must be explicitly used when making a request with a CondaSession object. This means that anyone who wants to use CondaSession must check the .condarc ssl_verify value and appropriately interpret it, etc. (For example, see https://github.com/conda/conda/blob/47e300b0e2cd5aad1dfe18d26eada5995b058004/conda/fetch.py#L101) I think it would be much cleaner for CondaSession itself to set the default verify value in its **init**: `self.verify = <code to get the .condarc ssl_verify setting`. See https://github.com/kennethreitz/requests/blob/8b5e457b756b2ab4c02473f7a42c2e0201ecc7e9/requests/sessions.py#L314 to see that this is correct. This change would mean that we don't need the verify argument here: https://github.com/conda/conda/blob/47e300b0e2cd5aad1dfe18d26eada5995b058004/conda/fetch.py#L101, and would also solve this issue: https://github.com/conda/conda-build/issues/523
Sounds good to me.
2015-08-05T17:57:52
conda/conda
1,541
conda__conda-1541
[ "1535" ]
a003d2809ec13f826f0692c9515a2f1291fae56f
diff --git a/conda/install.py b/conda/install.py --- a/conda/install.py +++ b/conda/install.py @@ -272,6 +272,7 @@ def update_prefix(path, new_prefix, placeholder=prefix_placeholder, if new_data == data: return st = os.lstat(path) + os.remove(path) # Remove file before rewriting to avoid destroying hard-linked cache. with open(path, 'wb') as fo: fo.write(new_data) os.chmod(path, stat.S_IMODE(st.st_mode))
`conda.install.update_prefix` is modifying cached pkgs in Windows Since files are hardlinks, it looks like conda's prefix replacement mechanism is breaking its own files. File contents inside package: ``` python x = r'/opt/anaconda1anaconda2anaconda3\Scripts', ``` File contents after installing in 'env1': ``` python x = r'C:\Miniconda\envs\env1\Scripts', ``` File contents after installing in 'NOTENV1': ``` python x = r'C:\Miniconda\envs\env1\Scripts', ``` Note that the second install fails, because the first one modified the cached files in `C:\Miniconda\pkgs`. Reading @asmeurer comments in #679, I agree that the correct behavior in this case would be to delete the file being modified and re-create it to avoid this issue.
2015-08-20T12:16:12
conda/conda
1,562
conda__conda-1562
[ "1561" ]
113e9451512b87d0bf0ef3ecdd19ca78fb25c047
diff --git a/conda/resolve.py b/conda/resolve.py --- a/conda/resolve.py +++ b/conda/resolve.py @@ -309,7 +309,7 @@ def add_dependents(fn1, max_only=False): if not found: raise NoPackagesFound("Could not find some dependencies " - "for %s: %s" % (ms, ', '.join(notfound)), notfound) + "for %s: %s" % (ms, ', '.join(notfound)), [ms.spec] + notfound) add_dependents(root_fn, max_only=max_only) return res @@ -426,7 +426,7 @@ def get_dists(self, specs, max_only=False): dists[pkg.fn] = pkg found = True if not found: - raise NoPackagesFound("Could not find some dependencies for %s: %s" % (spec, ', '.join(notfound)), notfound) + raise NoPackagesFound("Could not find some dependencies for %s: %s" % (spec, ', '.join(notfound)), [spec] + notfound) return dists
recursion problem or infinite loop in dependency solver I am executing this command: ``` bash /opt/wakari/miniconda/bin/conda update --dry-run -c https://conda.anaconda.org/wakari/channel/release:0.8.0 -p /opt/wakari/wakari-server --all ``` And it appears to loop "forever" (well, for a few minutes, at least", stating again and again: ``` bash Warning: Could not find some dependencies for wakari-enterprise-server-conf: wakari-server >=1.8.0, skipping Solving package specifications: Warning: Could not find some dependencies for wakari-enterprise-server-conf: wakari-server >=1.8.0, skipping Solving package specifications: Warning: Could not find some dependencies for wakari-enterprise-server-conf: wakari-server >=1.8.0, skipping Solving package specifications: Warning: Could not find some dependencies for wakari-enterprise-server-conf: wakari-server >=1.8.0, skipping Solving package specifications: Warning: Could not find some dependencies for wakari-enterprise-server-conf: wakari-server >=1.8.0, skipping Solving package specifications: ``` (you get the point), and then when I finally `CTRL-C` to abort, the stack-trace suggests a recursion problem: ``` python Solving package specifications: Warning: Could not find some dependencies for wakari-enterprise-server-conf: wakari-server >=1.8.0, skipping ^CTraceback (most recent call last): File "/opt/wakari/miniconda/bin/conda", line 5, in <module> sys.exit(main()) File "/opt/wakari/miniconda/lib/python2.7/site-packages/conda/cli/main.py", line 194, in main args_func(args, p) File "/opt/wakari/miniconda/lib/python2.7/site-packages/conda/cli/main.py", line 201, in args_func args.func(args, p) File "/opt/wakari/miniconda/lib/python2.7/site-packages/conda/cli/main_update.py", line 38, in execute install.install(args, parser, 'update') File "/opt/wakari/miniconda/lib/python2.7/site-packages/conda/cli/install.py", line 358, in install return install(args, parser, command=command) ... REPEATED MANY TIMES ... File "/opt/wakari/miniconda/lib/python2.7/site-packages/conda/cli/install.py", line 358, in install return install(args, parser, command=command) File "/opt/wakari/miniconda/lib/python2.7/site-packages/conda/cli/install.py", line 358, in install return install(args, parser, command=command) File "/opt/wakari/miniconda/lib/python2.7/site-packages/conda/cli/install.py", line 337, in install minimal_hint=args.alt_hint) File "/opt/wakari/miniconda/lib/python2.7/site-packages/conda/plan.py", line 402, in install_actions config.track_features, minimal_hint=minimal_hint): File "/opt/wakari/miniconda/lib/python2.7/site-packages/conda/resolve.py", line 726, in solve for pkg in self.get_pkgs(ms, max_only=max_only): File "/opt/wakari/miniconda/lib/python2.7/site-packages/conda/utils.py", line 142, in __call__ res = cache[key] = self.func(*args, **kw) File "/opt/wakari/miniconda/lib/python2.7/site-packages/conda/resolve.py", line 261, in get_pkgs pkgs = [Package(fn, self.index[fn]) for fn in self.find_matches(ms)] File "/opt/wakari/miniconda/lib/python2.7/site-packages/conda/resolve.py", line 168, in __init__ self.norm_version = normalized_version(self.version) File "/opt/wakari/miniconda/lib/python2.7/site-packages/conda/resolve.py", line 28, in normalized_version return verlib.NormalizedVersion(version) File "/opt/wakari/miniconda/lib/python2.7/site-packages/conda/verlib.py", line 93, in __init__ self._parse(s, error_on_huge_major_num) File "/opt/wakari/miniconda/lib/python2.7/site-packages/conda/verlib.py", line 110, in _parse block = self._parse_numdots(groups['version'], s, False, 2) File "/opt/wakari/miniconda/lib/python2.7/site-packages/conda/verlib.py", line 159, in _parse_numdots if len(n) > 1 and n[0] == '0': KeyboardInterrupt ```
This is with conda 3.16.0: ``` bash /opt/wakari/miniconda/bin/conda -V conda 3.16.0 ``` I see the problem. It's trying to ignore `wakari-server` but it really should be ignoring `wakari-enterprise-server-conf`.
2015-08-31T16:25:35
conda/conda
1,563
conda__conda-1563
[ "1557" ]
9e9becd30eeb6e56c1601521f5edb7b16a29abd7
diff --git a/conda/cli/install.py b/conda/cli/install.py --- a/conda/cli/install.py +++ b/conda/cli/install.py @@ -169,6 +169,8 @@ def install(args, parser, command='install'): if any(pkg.split('=')[0] == default_pkg for pkg in args.packages): default_packages.remove(default_pkg) args.packages.extend(default_packages) + else: + default_packages = [] common.ensure_override_channels_requires_channel(args) channel_urls = args.channel or () @@ -244,7 +246,7 @@ def install(args, parser, command='install'): offline=args.offline) if newenv and args.clone: - if args.packages: + if set(args.packages) - set(default_packages): common.error_and_exit('did not expect any arguments for --clone', json=args.json, error_type="ValueError")
create_default_packages rules out --clone Hi all, In case of the .condarc file defining the 'create_default_packages' options, the conda --clone command gives a confusing error messages: alain@alain-K53E:~$ conda create --name flowersqq --clone snowflakes Fetching package metadata: .... Error: did not expect any arguments for --clone alain@alain-K53E:~$ Here my settings: lain@alain-K53E:~$ conda info Current conda install: ``` platform : linux-32 conda version : 3.16.0 conda-build version : 1.16.0 python version : 2.7.10.final.0 requests version : 2.7.0 root environment : /home/alain/miniconda (writable) default environment : /home/alain/miniconda envs directories : /home/alain/test/conda-envs package cache : /home/alain/test/conda-envs/.pkgs channel URLs : https://repo.continuum.io/pkgs/free/linux-32/ https://repo.continuum.io/pkgs/free/noarch/ https://repo.continuum.io/pkgs/pro/linux-32/ https://repo.continuum.io/pkgs/pro/noarch/ config file : /home/alain/.condarc_env is foreign system : False ``` alain@alain-K53E:~$ alain@alain-K53E:~$ cat /home/alain/.condarc_env channels: - defaults # Directories in which environments are located. envs_dirs: - ~/test/conda-envs create_default_packages: - python - pip binstar_upload: False binstar_personal: True alain@alain-K53E:~$ And here the workaround for me (luckily i was able to browse the source code: https://github.com/algorete/apkg/blob/master/conda/cli/install.py, this gave me a hint, but i do not really understand..) alain@alain-K53E:~$ conda create --name flowersqq --clone snowflakes --no-default-packages Fetching package metadata: .... src_prefix: '/home/alain/test/conda-envs/snowflakes' dst_prefix: '/home/alain/test/conda-envs/flowersqq' Packages: 14 Files: 0 Linking packages ... [ COMPLETE ]|#################################################################################################| 100% alain@alain-K53E:~$ Kind Regards Alain P.S: I could understand that changing the .condrc file to include default packages _after_ having created an env which did not define the create_default_packages could confuse the '--clone', but the problem seems always present.
2015-08-31T19:55:34
conda/conda
1,613
conda__conda-1613
[ "1118" ]
acf392df41d6c4ecf311733b38869d83fa19239f
diff --git a/conda/cli/install.py b/conda/cli/install.py --- a/conda/cli/install.py +++ b/conda/cli/install.py @@ -196,12 +196,14 @@ def install(args, parser, command='install'): common.check_specs(prefix, specs, json=args.json, create=(command == 'create')) - # handle tar file containing conda packages + num_cp = sum(s.endswith('.tar.bz2') for s in args.packages) if num_cp: if num_cp == len(args.packages): depends = misc.install_local_packages(prefix, args.packages, verbose=not args.quiet) + if args.no_deps: + depends = [] specs = list(set(depends)) args.unknown = True else: @@ -209,10 +211,14 @@ def install(args, parser, command='install'): "cannot mix specifications with conda package filenames", json=args.json, error_type="ValueError") + + # handle tar file containing conda packages if len(args.packages) == 1: tar_path = args.packages[0] if tar_path.endswith('.tar'): depends = install_tar(prefix, tar_path, verbose=not args.quiet) + if args.no_deps: + depends = [] specs = list(set(depends)) args.unknown = True
conda install --no-deps still installing deps when installing tarball When running the following command: `conda install --no-deps ./matplotlib-1.4.0-np18py27_0.tar.bz2` You would assume that no dependencies are installed, but conda seems to install the dependencies anyway.
fwiw: This seemed to work with conda 3.7.0 That's because conda used to never install the dependencies of a tarball. This feature was added, but the `--no-deps` flag was neglected. @ilanschnell this is similar to the issue I was describing with `--offline` installations breaking because of the dependency resolution happening after tarball installs. Didn't you say you had a fix in mind for how to deal with that? Would that also solve this problem? Yes, I have described the fix I had in mind here: https://github.com/conda/conda/issues/1075 Just fixing this issue should be much simpler than fixing that, though. We just need to make it respect the command line flag. What is the status here?
2015-09-14T17:37:43
conda/conda
1,618
conda__conda-1618
[ "1594" ]
1b100cde07dac3769e1dbffcb05e11574b9a416b
diff --git a/conda/install.py b/conda/install.py --- a/conda/install.py +++ b/conda/install.py @@ -27,19 +27,19 @@ from __future__ import print_function, division, absolute_import -import time -import os -import json import errno +import json +import logging +import os +import shlex import shutil import stat -import sys import subprocess +import sys import tarfile +import time import traceback -import logging -import shlex -from os.path import abspath, basename, dirname, isdir, isfile, islink, join +from os.path import abspath, basename, dirname, isdir, isfile, islink, join, relpath try: from conda.lock import Locked @@ -143,13 +143,14 @@ def _remove_readonly(func, path, excinfo): func(path) -def rm_rf(path, max_retries=5): +def rm_rf(path, max_retries=5, trash=True): """ Completely delete path max_retries is the number of times to retry on failure. The default is 5. This only applies to deleting a directory. + If removing path fails and trash is True, files will be moved to the trash directory. """ if islink(path) or isfile(path): # Note that we have to check if the destination is a link because @@ -180,6 +181,15 @@ def rm_rf(path, max_retries=5): if not isdir(path): return + if trash: + try: + move_path_to_trash(path) + if not isdir(path): + return + except OSError as e2: + raise + msg += "Retry with onerror failed (%s)\n" % e2 + log.debug(msg + "Retrying after %s seconds..." % i) time.sleep(i) # Final time. pass exceptions to caller. @@ -497,14 +507,14 @@ def is_linked(prefix, dist): except IOError: return None -def delete_trash(prefix): +def delete_trash(prefix=None): from conda import config for pkg_dir in config.pkgs_dirs: trash_dir = join(pkg_dir, '.trash') try: log.debug("Trying to delete the trash dir %s" % trash_dir) - rm_rf(trash_dir, max_retries=1) + rm_rf(trash_dir, max_retries=1, trash=False) except OSError as e: log.debug("Could not delete the trash dir %s (%s)" % (trash_dir, e)) @@ -512,11 +522,23 @@ def move_to_trash(prefix, f, tempdir=None): """ Move a file f from prefix to the trash - tempdir should be the name of the directory in the trash + tempdir is a deprecated parameter, and will be ignored. + + This function is deprecated in favor of `move_path_to_trash`. + """ + return move_path_to_trash(join(prefix, f)) + +def move_path_to_trash(path): + """ + Move a path to the trash """ + # Try deleting the trash every time we use it. + delete_trash() + from conda import config for pkg_dir in config.pkgs_dirs: + import tempfile trash_dir = join(pkg_dir, '.trash') try: @@ -525,26 +547,23 @@ def move_to_trash(prefix, f, tempdir=None): if e1.errno != errno.EEXIST: continue - if tempdir is None: - import tempfile - trash_dir = tempfile.mkdtemp(dir=trash_dir) - else: - trash_dir = join(trash_dir, tempdir) + trash_dir = tempfile.mkdtemp(dir=trash_dir) + trash_dir = join(trash_dir, relpath(os.path.dirname(path), config.root_dir)) try: - try: - os.makedirs(join(trash_dir, dirname(f))) - except OSError as e1: - if e1.errno != errno.EEXIST: - continue - shutil.move(join(prefix, f), join(trash_dir, f)) + os.makedirs(trash_dir) + except OSError as e2: + if e2.errno != errno.EEXIST: + continue + try: + shutil.move(path, trash_dir) except OSError as e: - log.debug("Could not move %s to %s (%s)" % (f, trash_dir, e)) + log.debug("Could not move %s to %s (%s)" % (path, trash_dir, e)) else: return True - log.debug("Could not move %s to trash" % f) + log.debug("Could not move %s to trash" % path) return False # FIXME This should contain the implementation that loads meta, not is_linked() @@ -556,11 +575,6 @@ def link(pkgs_dir, prefix, dist, linktype=LINK_HARD, index=None): Set up a package in a specified (environment) prefix. We assume that the package has been extracted (using extract() above). ''' - if on_win: - # Try deleting the trash every time we link something. - delete_trash(prefix) - - index = index or {} log.debug('pkgs_dir=%r, prefix=%r, dist=%r, linktype=%r' % (pkgs_dir, prefix, dist, linktype)) @@ -595,7 +609,7 @@ def link(pkgs_dir, prefix, dist, linktype=LINK_HARD, index=None): log.error('failed to unlink: %r' % dst) if on_win: try: - move_to_trash(prefix, f) + move_path_to_trash(dst) except ImportError: # This shouldn't be an issue in the installer anyway pass @@ -685,7 +699,7 @@ def unlink(prefix, dist): if on_win and os.path.exists(join(prefix, f)): try: log.debug("moving to trash") - move_to_trash(prefix, f) + move_path_to_trash(dst) except ImportError: # This shouldn't be an issue in the installer anyway pass
`conda.install.rm_rf` can't delete old environments in Windows + Python 2.7 This issue hit me when trying to use `--force` option in `conda env create` (I added this in #102), and it revealed a possible problem with how conda handles links in Windows + Python 2.7. # Symptom Trying to delete an environment (not active) from within Python fails because the original file being linked is locked by Windows. # Context - Windows - Python 2.7 (`os.islink` does not work here, which might affect the `rm_rf` function from `conda.install`) # Reproducing This command line is enough to reproduce the problem: ``` bat $ conda create -n test pyyaml && python -c "import yaml;from conda.install import rm_rf;rm_rf('C:\\Miniconda\\envs\\test')" Fetching package metadata: ...... # # To activate this environment, use: # > activate test # Traceback (most recent call last): File "<string>", line 1, in <module> File "C:\Miniconda\lib\site-packages\conda\install.py", line 204, in rm_rf shutil.rmtree(path) File "C:\Miniconda\lib\shutil.py", line 247, in rmtree rmtree(fullname, ignore_errors, onerror) File "C:\Miniconda\lib\shutil.py", line 247, in rmtree rmtree(fullname, ignore_errors, onerror) File "C:\Miniconda\lib\shutil.py", line 252, in rmtree onerror(os.remove, fullname, sys.exc_info()) File "C:\Miniconda\lib\shutil.py", line 250, in rmtree os.remove(fullname) WindowsError: [Error 5] Acesso negado: 'C:\\Miniconda\\envs\\test\\Lib\\site-packages\\yaml.dll' ``` Note that when I import `pyyaml`, I'm locking file `C:\\Miniconda\\Lib\\site-packages\\yaml.dll` (root environment), but that also prevents me from deleting `C:\\Miniconda\\envs\\test\\Lib\\site-packages\\yaml.dll` (another hard link). # Solution? Maybe we should add some improved support for detecting and unlinking hardlinks in Windows with Python 2.7, and handle those cases individually instead of trying to `shutil.rmtree` everything. Possibly from [jarako.windows](https://bitbucket.org/jaraco/jaraco.windows/src/default/jaraco/windows/filesystem/__init__.py#cl-76) or [ntfs/fs.py](https://github.com/sid0/ntfs/blob/master/ntfsutils/fs.py#L88)
A possible solution would to force all conda requirements ( python, pycosat, pyyaml, conda, openssl, requests) to be copied in Windows (never linked). This might also be enough to remove some code in `install.py` that already has a different behavior when linking python (just need to do the same for the other ones) I think it should work to put the trash stuff in `rm_rf`. Curiously, `conda env remove` has no trouble removing the same environment... hmm... @nicoddemus it might not be going through a path that imports `yaml` @asmeurer moving trash to other folders is allowed when the file is 'in use'? This problem is already semi-solved, except that it's not been wired up in all parts of conda yet. See #1133 and for conda-build see https://github.com/conda/conda-build/pull/521 I say semi because you could theoretically run out of disk space before the environments actually get removed. But that's certainly an edge case. @campos-ddc Yes, that's allowed (you can easily test it on your machine).
2015-09-15T18:13:42
conda/conda
1,668
conda__conda-1668
[ "1667" ]
c13df56c2a6b4e494bfffbb695df63d34e28ad5f
diff --git a/conda/cli/common.py b/conda/cli/common.py --- a/conda/cli/common.py +++ b/conda/cli/common.py @@ -180,6 +180,16 @@ def add_parser_use_index_cache(p): help="Use cache of channel index files.", ) + +def add_parser_no_use_index_cache(p): + p.add_argument( + "--no-use-index-cache", + action="store_false", + default=True, + dest="use_index_cache", + help="Use cache of channel index files.", + ) + def add_parser_copy(p): p.add_argument( '--copy', diff --git a/conda/cli/main_remove.py b/conda/cli/main_remove.py --- a/conda/cli/main_remove.py +++ b/conda/cli/main_remove.py @@ -68,6 +68,8 @@ def configure_parser(sub_parsers, name='remove'): common.add_parser_channels(p) common.add_parser_prefix(p) common.add_parser_quiet(p) + # Putting this one first makes it the default + common.add_parser_no_use_index_cache(p) common.add_parser_use_index_cache(p) common.add_parser_use_local(p) common.add_parser_offline(p)
conda remove shouldn't fetch the package metadata It only needs it to print the channel location. It should either get that locally, or at least just use the index cache.
2015-10-01T19:07:14
conda/conda
1,735
conda__conda-1735
[ "1734" ]
02c652c00ebad8c17747509185d007057d2b0374
diff --git a/conda/utils.py b/conda/utils.py --- a/conda/utils.py +++ b/conda/utils.py @@ -5,8 +5,10 @@ import hashlib import collections from functools import partial -from os.path import abspath, isdir, join +from os.path import abspath, isdir import os +import tempfile + log = logging.getLogger(__name__) stderrlog = logging.getLogger('stderrlog') @@ -43,13 +45,10 @@ def can_open_all_files_in_prefix(prefix, files): def try_write(dir_path): assert isdir(dir_path) try: - try: - with open(join(dir_path, '.conda-try-write'), mode='wb') as fo: - fo.write(b'This is a test file.\n') - return True - finally: - # XXX: If this raises an exception it will also return False - os.unlink(join(dir_path, '.conda-try-write')) + with tempfile.TemporaryFile(prefix='.conda-try-write', + dir=dir_path) as fo: + fo.write(b'This is a test file.\n') + return True except (IOError, OSError): return False
Race condition for root environment detection Periodically, when two conda processes are running at the same time, it is possible to see a race condition on determining whether the root environment is writable. Notice how the following produces two different configs from the same setup: ``` $ conda info & conda info Current conda install: platform : osx-64 conda version : 3.18.3 conda-build version : 1.18.0 python version : 2.7.10.final.0 requests version : 2.8.1 root environment : /Users/pelson/miniconda (read only) default environment : /Users/pelson/miniconda envs directories : /Users/pelson/.conda/envs /Users/pelson/envs /Users/pelson/miniconda/envs package cache : /Users/pelson/.conda/envs/.pkgs /Users/pelson/envs/.pkgs /Users/pelson/miniconda/pkgs ... Current conda install: platform : osx-64 conda version : 3.18.3 conda-build version : 1.18.0 python version : 2.7.10.final.0 requests version : 2.8.1 root environment : /Users/pelson/miniconda (writable) default environment : /Users/pelson/miniconda envs directories : /Users/pelson/miniconda/envs package cache : /Users/pelson/miniconda/pkgs ... ``` The offending line is in https://github.com/conda/conda/blob/master/conda/config.py#L135-L143 and https://github.com/conda/conda/blob/master/conda/utils.py#L43-L54. My assumption is that the `.conda-try-write` is being removed by the other process, and the exception is being raised in the `finally` block.
2015-10-24T05:46:05
conda/conda
1,807
conda__conda-1807
[ "1751" ]
f46c73c3cb6353a409449fdba08fdbd0856bdb35
diff --git a/conda/cli/main_clean.py b/conda/cli/main_clean.py --- a/conda/cli/main_clean.py +++ b/conda/cli/main_clean.py @@ -151,9 +151,13 @@ def rm_tarballs(args, pkgs_dirs, totalsize, verbose=True): for pkgs_dir in pkgs_dirs: for fn in pkgs_dirs[pkgs_dir]: - if verbose: - print("removing %s" % fn) - os.unlink(os.path.join(pkgs_dir, fn)) + if os.access(os.path.join(pkgs_dir, fn), os.W_OK): + if verbose: + print("Removing %s" % fn) + os.unlink(os.path.join(pkgs_dir, fn)) + else: + if verbose: + print("WARNING: cannot remove, file permissions: %s" % fn) def find_pkgs(): diff --git a/conda/install.py b/conda/install.py --- a/conda/install.py +++ b/conda/install.py @@ -138,6 +138,13 @@ def _remove_readonly(func, path, excinfo): os.chmod(path, stat.S_IWRITE) func(path) +def warn_failed_remove(function, path, exc_info): + if exc_info[1].errno == errno.EACCES: + log.warn("Cannot remove, permission denied: {0}".format(path)) + elif exc_info[1].errno == errno.ENOTEMPTY: + log.warn("Cannot remove, not empty: {0}".format(path)) + else: + log.warn("Cannot remove, unknown reason: {0}".format(path)) def rm_rf(path, max_retries=5, trash=True): """ @@ -152,12 +159,15 @@ def rm_rf(path, max_retries=5, trash=True): # Note that we have to check if the destination is a link because # exists('/path/to/dead-link') will return False, although # islink('/path/to/dead-link') is True. - os.unlink(path) + if os.access(path, os.W_OK): + os.unlink(path) + else: + log.warn("Cannot remove, permission denied: {0}".format(path)) elif isdir(path): for i in range(max_retries): try: - shutil.rmtree(path) + shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove) return except OSError as e: msg = "Unable to delete %s\n%s\n" % (path, e) @@ -189,7 +199,7 @@ def rm_rf(path, max_retries=5, trash=True): log.debug(msg + "Retrying after %s seconds..." % i) time.sleep(i) # Final time. pass exceptions to caller. - shutil.rmtree(path) + shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove) def rm_empty_dir(path): """
diff --git a/tests/test_install.py b/tests/test_install.py --- a/tests/test_install.py +++ b/tests/test_install.py @@ -8,7 +8,7 @@ from conda import install -from conda.install import PaddingError, binary_replace, update_prefix +from conda.install import PaddingError, binary_replace, update_prefix, warn_failed_remove from .decorators import skip_if_no_mock from .helpers import mock @@ -140,6 +140,11 @@ def generate_mock_isfile(self, value): with patch.object(install, 'isfile', return_value=value) as isfile: yield isfile + @contextmanager + def generate_mock_os_access(self, value): + with patch.object(install.os, 'access', return_value=value) as os_access: + yield os_access + @contextmanager def generate_mock_unlink(self): with patch.object(install.os, 'unlink') as unlink: @@ -173,25 +178,27 @@ def generate_mock_check_call(self): yield check_call @contextmanager - def generate_mocks(self, islink=True, isfile=True, isdir=True, on_win=False): + def generate_mocks(self, islink=True, isfile=True, isdir=True, on_win=False, os_access=True): with self.generate_mock_islink(islink) as mock_islink: with self.generate_mock_isfile(isfile) as mock_isfile: - with self.generate_mock_isdir(isdir) as mock_isdir: - with self.generate_mock_unlink() as mock_unlink: - with self.generate_mock_rmtree() as mock_rmtree: - with self.generate_mock_sleep() as mock_sleep: - with self.generate_mock_log() as mock_log: - with self.generate_mock_on_win(on_win): - with self.generate_mock_check_call() as check_call: - yield { - 'islink': mock_islink, - 'isfile': mock_isfile, - 'isdir': mock_isdir, - 'unlink': mock_unlink, - 'rmtree': mock_rmtree, - 'sleep': mock_sleep, - 'log': mock_log, - 'check_call': check_call, + with self.generate_mock_os_access(os_access) as mock_os_access: + with self.generate_mock_isdir(isdir) as mock_isdir: + with self.generate_mock_unlink() as mock_unlink: + with self.generate_mock_rmtree() as mock_rmtree: + with self.generate_mock_sleep() as mock_sleep: + with self.generate_mock_log() as mock_log: + with self.generate_mock_on_win(on_win): + with self.generate_mock_check_call() as check_call: + yield { + 'islink': mock_islink, + 'isfile': mock_isfile, + 'isdir': mock_isdir, + 'os_access': mock_os_access, + 'unlink': mock_unlink, + 'rmtree': mock_rmtree, + 'sleep': mock_sleep, + 'log': mock_log, + 'check_call': check_call, } def generate_directory_mocks(self, on_win=False): @@ -219,6 +226,13 @@ def test_calls_unlink_on_true_islink(self): install.rm_rf(some_path) mocks['unlink'].assert_called_with(some_path) + @skip_if_no_mock + def test_does_not_call_unlink_on_os_access_false(self): + with self.generate_mocks(os_access=False) as mocks: + some_path = self.generate_random_path + install.rm_rf(some_path) + self.assertFalse(mocks['unlink'].called) + @skip_if_no_mock def test_does_not_call_isfile_if_islink_is_true(self): with self.generate_mocks() as mocks: @@ -259,7 +273,8 @@ def test_calls_rmtree_at_least_once_on_isdir_true(self): with self.generate_directory_mocks() as mocks: some_path = self.generate_random_path install.rm_rf(some_path) - mocks['rmtree'].assert_called_with(some_path) + mocks['rmtree'].assert_called_with( + some_path, onerror=warn_failed_remove, ignore_errors=False) @skip_if_no_mock def test_calls_rmtree_only_once_on_success(self): @@ -342,7 +357,7 @@ def test_tries_extra_kwarg_on_windows(self): install.rm_rf(random_path) expected_call_list = [ - mock.call(random_path), + mock.call(random_path, ignore_errors=False, onerror=warn_failed_remove), mock.call(random_path, onerror=install._remove_readonly) ] mocks['rmtree'].assert_has_calls(expected_call_list)
conda clean -pt as non-root user with root anaconda install I have installed root miniconda at /opt/anaconda. When running ``` conda clean -pt ``` as a lesser user than root, I am seeing errors indicating conda is not checking permissions before attempting to delete package dirs: ``` conda clean -pt Cache location: /opt/anaconda/pkgs Will remove the following tarballs: /opt/anaconda/pkgs ------------------ conda-3.18.3-py27_0.tar.bz2 175 KB conda-env-2.4.4-py27_0.tar.bz2 24 KB itsdangerous-0.24-py27_0.tar.bz2 16 KB markupsafe-0.23-py27_0.tar.bz2 30 KB flask-0.10.1-py27_1.tar.bz2 129 KB jinja2-2.8-py27_0.tar.bz2 263 KB anaconda-build-0.12.0-py27_0.tar.bz2 69 KB flask-wtf-0.8.4-py27_1.tar.bz2 12 KB flask-ldap-login-0.3.0-py27_1.tar.bz2 13 KB --------------------------------------------------- Total: 730 KB removing conda-3.18.3-py27_0.tar.bz2 An unexpected error has occurred, please consider sending the following traceback to the conda GitHub issue tracker at: https://github.com/conda/conda/issues Include the output of the command 'conda info' in your report. Traceback (most recent call last): File "/opt/anaconda/envs/anaconda.org/bin/conda", line 5, in <module> sys.exit(main()) File "/opt/anaconda/lib/python2.7/site-packages/conda/cli/main.py", line 195, in main args_func(args, p) File "/opt/anaconda/lib/python2.7/site-packages/conda/cli/main.py", line 202, in args_func args.func(args, p) File "/opt/anaconda/lib/python2.7/site-packages/conda/cli/main_clean.py", line 331, in execute rm_tarballs(args, pkgs_dirs, totalsize, verbose=not args.json) File "/opt/anaconda/lib/python2.7/site-packages/conda/cli/main_clean.py", line 156, in rm_tarballs os.unlink(os.path.join(pkgs_dir, fn)) OSError: [Errno 13] Permission denied: '/opt/anaconda/pkgs/conda-3.18.3-py27_0.tar.bz2' ```
Hi @csoja - this issue is blocking some key Build System stability fixes. LMK if this can be prioritized (along with #1752 above) @stephenakearns and @PeterDSteinberg and @csoja this issue is also now preventing us from moving forward with the anaconda-cluster build scripts. @csoja - I know you're strapped for resources and may have a new person taking a look at this. Can we bring it up in the platform meeting to discuss a possible solution
2015-11-11T20:20:17
conda/conda
1,808
conda__conda-1808
[ "1752" ]
f46c73c3cb6353a409449fdba08fdbd0856bdb35
diff --git a/conda/cli/main_clean.py b/conda/cli/main_clean.py --- a/conda/cli/main_clean.py +++ b/conda/cli/main_clean.py @@ -151,9 +151,13 @@ def rm_tarballs(args, pkgs_dirs, totalsize, verbose=True): for pkgs_dir in pkgs_dirs: for fn in pkgs_dirs[pkgs_dir]: - if verbose: - print("removing %s" % fn) - os.unlink(os.path.join(pkgs_dir, fn)) + if os.access(os.path.join(pkgs_dir, fn), os.W_OK): + if verbose: + print("Removing %s" % fn) + os.unlink(os.path.join(pkgs_dir, fn)) + else: + if verbose: + print("WARNING: cannot remove, file permissions: %s" % fn) def find_pkgs(): @@ -163,6 +167,9 @@ def find_pkgs(): pkgs_dirs = defaultdict(list) for pkgs_dir in config.pkgs_dirs: + if not os.path.exists(pkgs_dir): + print("WARNING: {0} does not exist".format(pkgs_dir)) + continue pkgs = [i for i in listdir(pkgs_dir) if isdir(join(pkgs_dir, i)) and # Only include actual packages isdir(join(pkgs_dir, i, 'info'))] diff --git a/conda/install.py b/conda/install.py --- a/conda/install.py +++ b/conda/install.py @@ -138,6 +138,13 @@ def _remove_readonly(func, path, excinfo): os.chmod(path, stat.S_IWRITE) func(path) +def warn_failed_remove(function, path, exc_info): + if exc_info[1].errno == errno.EACCES: + log.warn( "WARNING: cannot remove, permission denied: %s" % path ) + elif exc_info[1].errno == errno.ENOTEMPTY: + log.warn( "WARNING: cannot remove, not empty: %s" % path ) + else: + log.warn( "WARNING: cannot remove, unknown reason: %s" % path ) def rm_rf(path, max_retries=5, trash=True): """ @@ -152,12 +159,15 @@ def rm_rf(path, max_retries=5, trash=True): # Note that we have to check if the destination is a link because # exists('/path/to/dead-link') will return False, although # islink('/path/to/dead-link') is True. - os.unlink(path) + if os.access(path, os.W_OK): + os.unlink(path) + else: + log.warn("WARNING: cannot remove, permission denied: %s" % path) elif isdir(path): for i in range(max_retries): try: - shutil.rmtree(path) + shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove) return except OSError as e: msg = "Unable to delete %s\n%s\n" % (path, e) @@ -189,7 +199,7 @@ def rm_rf(path, max_retries=5, trash=True): log.debug(msg + "Retrying after %s seconds..." % i) time.sleep(i) # Final time. pass exceptions to caller. - shutil.rmtree(path) + shutil.rmtree(path, ignore_errors=False, onerror=warn_failed_remove) def rm_empty_dir(path): """
diff --git a/tests/test_install.py b/tests/test_install.py --- a/tests/test_install.py +++ b/tests/test_install.py @@ -8,7 +8,7 @@ from conda import install -from conda.install import PaddingError, binary_replace, update_prefix +from conda.install import PaddingError, binary_replace, update_prefix, warn_failed_remove from .decorators import skip_if_no_mock from .helpers import mock @@ -140,6 +140,11 @@ def generate_mock_isfile(self, value): with patch.object(install, 'isfile', return_value=value) as isfile: yield isfile + @contextmanager + def generate_mock_os_access(self, value): + with patch.object(install.os, 'access', return_value=value) as os_access: + yield os_access + @contextmanager def generate_mock_unlink(self): with patch.object(install.os, 'unlink') as unlink: @@ -173,25 +178,27 @@ def generate_mock_check_call(self): yield check_call @contextmanager - def generate_mocks(self, islink=True, isfile=True, isdir=True, on_win=False): + def generate_mocks(self, islink=True, isfile=True, isdir=True, on_win=False, os_access=True): with self.generate_mock_islink(islink) as mock_islink: with self.generate_mock_isfile(isfile) as mock_isfile: - with self.generate_mock_isdir(isdir) as mock_isdir: - with self.generate_mock_unlink() as mock_unlink: - with self.generate_mock_rmtree() as mock_rmtree: - with self.generate_mock_sleep() as mock_sleep: - with self.generate_mock_log() as mock_log: - with self.generate_mock_on_win(on_win): - with self.generate_mock_check_call() as check_call: - yield { - 'islink': mock_islink, - 'isfile': mock_isfile, - 'isdir': mock_isdir, - 'unlink': mock_unlink, - 'rmtree': mock_rmtree, - 'sleep': mock_sleep, - 'log': mock_log, - 'check_call': check_call, + with self.generate_mock_os_access(os_access) as mock_os_access: + with self.generate_mock_isdir(isdir) as mock_isdir: + with self.generate_mock_unlink() as mock_unlink: + with self.generate_mock_rmtree() as mock_rmtree: + with self.generate_mock_sleep() as mock_sleep: + with self.generate_mock_log() as mock_log: + with self.generate_mock_on_win(on_win): + with self.generate_mock_check_call() as check_call: + yield { + 'islink': mock_islink, + 'isfile': mock_isfile, + 'isdir': mock_isdir, + 'os_access': mock_os_access, + 'unlink': mock_unlink, + 'rmtree': mock_rmtree, + 'sleep': mock_sleep, + 'log': mock_log, + 'check_call': check_call, } def generate_directory_mocks(self, on_win=False): @@ -219,6 +226,13 @@ def test_calls_unlink_on_true_islink(self): install.rm_rf(some_path) mocks['unlink'].assert_called_with(some_path) + @skip_if_no_mock + def test_does_not_call_unlink_on_os_access_false(self): + with self.generate_mocks(os_access=False) as mocks: + some_path = self.generate_random_path + install.rm_rf(some_path) + self.assertFalse(mocks['unlink'].called) + @skip_if_no_mock def test_does_not_call_isfile_if_islink_is_true(self): with self.generate_mocks() as mocks: @@ -259,7 +273,8 @@ def test_calls_rmtree_at_least_once_on_isdir_true(self): with self.generate_directory_mocks() as mocks: some_path = self.generate_random_path install.rm_rf(some_path) - mocks['rmtree'].assert_called_with(some_path) + mocks['rmtree'].assert_called_with( + some_path, onerror=warn_failed_remove, ignore_errors=False) @skip_if_no_mock def test_calls_rmtree_only_once_on_success(self): @@ -342,7 +357,7 @@ def test_tries_extra_kwarg_on_windows(self): install.rm_rf(random_path) expected_call_list = [ - mock.call(random_path), + mock.call(random_path, ignore_errors=False, onerror=warn_failed_remove), mock.call(random_path, onerror=install._remove_readonly) ] mocks['rmtree'].assert_has_calls(expected_call_list)
conda clean -pt with empty package cache and non-root user I have a root miniconda install at /opt/anaconda. I ran ``` conda clean -pt ``` successfully as root then immediately tried running the same command as a lesser user. I got this error even though the package cache was empty: ``` Cache location: There are no tarballs to remove An unexpected error has occurred, please consider sending the following traceback to the conda GitHub issue tracker at: https://github.com/conda/conda/issues Include the output of the command 'conda info' in your report. Traceback (most recent call last): File "/opt/anaconda/envs/anaconda.org/bin/conda", line 5, in <module> sys.exit(main()) File "/opt/anaconda/lib/python2.7/site-packages/conda/cli/main.py", line 195, in main args_func(args, p) File "/opt/anaconda/lib/python2.7/site-packages/conda/cli/main.py", line 202, in args_func args.func(args, p) File "/opt/anaconda/lib/python2.7/site-packages/conda/cli/main_clean.py", line 340, in execute pkgs_dirs, warnings, totalsize, pkgsizes = find_pkgs() File "/opt/anaconda/lib/python2.7/site-packages/conda/cli/main_clean.py", line 166, in find_pkgs pkgs = [i for i in listdir(pkgs_dir) if isdir(join(pkgs_dir, i)) and OSError: [Errno 2] No such file or directory: '/home/user5/envs/.pkgs' ```
Appears somewhat related to #1751. Fixing #1751 and #1752 is necessary for the future needs of anaconda-build.
2015-11-11T21:17:30
conda/conda
1,944
conda__conda-1944
[ "1285" ]
4db6b4e58c6efa70e461dd68a90d812d4a634619
diff --git a/conda/cli/main_clean.py b/conda/cli/main_clean.py --- a/conda/cli/main_clean.py +++ b/conda/cli/main_clean.py @@ -65,6 +65,95 @@ def configure_parser(sub_parsers): p.set_defaults(func=execute) +# work-around for python bug on Windows prior to python 3.2 +# https://bugs.python.org/issue10027 +# Adapted from the ntfsutils package, Copyright (c) 2012, the Mozilla Foundation +class CrossPlatformStLink(object): + _st_nlink = None + + def __call__(self, path): + return self.st_nlink(path) + + @classmethod + def st_nlink(cls, path): + if cls._st_nlink is None: + cls._initialize() + return cls._st_nlink(path) + + @classmethod + def _standard_st_nlink(cls, path): + return lstat(path).st_nlink + + @classmethod + def _windows_st_nlink(cls, path): + st_nlink = cls._standard_st_nlink(path) + if st_nlink != 0: + return st_nlink + else: + # cannot trust python on Windows when st_nlink == 0 + # get value using windows libraries to be sure of its true value + # Adapted from the ntfsutils package, Copyright (c) 2012, the Mozilla Foundation + GENERIC_READ = 0x80000000 + FILE_SHARE_READ = 0x00000001 + OPEN_EXISTING = 3 + hfile = cls.CreateFile(path, GENERIC_READ, FILE_SHARE_READ, None, + OPEN_EXISTING, 0, None) + if hfile is None: + from ctypes import WinError + raise WinError() + info = cls.BY_HANDLE_FILE_INFORMATION() + rv = cls.GetFileInformationByHandle(hfile, info) + cls.CloseHandle(hfile) + if rv == 0: + from ctypes import WinError + raise WinError() + return info.nNumberOfLinks + + @classmethod + def _initialize(cls): + if os.name != 'nt': + cls._st_nlink = cls._standard_st_nlink + else: + # http://msdn.microsoft.com/en-us/library/windows/desktop/aa363858 + import ctypes + from ctypes import POINTER, WinError + from ctypes.wintypes import DWORD, HANDLE, BOOL + + cls.CreateFile = ctypes.windll.kernel32.CreateFileW + cls.CreateFile.argtypes = [ctypes.c_wchar_p, DWORD, DWORD, ctypes.c_void_p, + DWORD, DWORD, HANDLE] + cls.CreateFile.restype = HANDLE + + # http://msdn.microsoft.com/en-us/library/windows/desktop/ms724211 + cls.CloseHandle = ctypes.windll.kernel32.CloseHandle + cls.CloseHandle.argtypes = [HANDLE] + cls.CloseHandle.restype = BOOL + + class FILETIME(ctypes.Structure): + _fields_ = [("dwLowDateTime", DWORD), + ("dwHighDateTime", DWORD)] + + class BY_HANDLE_FILE_INFORMATION(ctypes.Structure): + _fields_ = [("dwFileAttributes", DWORD), + ("ftCreationTime", FILETIME), + ("ftLastAccessTime", FILETIME), + ("ftLastWriteTime", FILETIME), + ("dwVolumeSerialNumber", DWORD), + ("nFileSizeHigh", DWORD), + ("nFileSizeLow", DWORD), + ("nNumberOfLinks", DWORD), + ("nFileIndexHigh", DWORD), + ("nFileIndexLow", DWORD)] + cls.BY_HANDLE_FILE_INFORMATION = BY_HANDLE_FILE_INFORMATION + + # http://msdn.microsoft.com/en-us/library/windows/desktop/aa364952 + cls.GetFileInformationByHandle = ctypes.windll.kernel32.GetFileInformationByHandle + cls.GetFileInformationByHandle.argtypes = [HANDLE, POINTER(BY_HANDLE_FILE_INFORMATION)] + cls.GetFileInformationByHandle.restype = BOOL + + cls._st_nlink = cls._windows_st_nlink + + def find_lock(): from os.path import join @@ -164,7 +253,8 @@ def find_pkgs(): # TODO: This doesn't handle packages that have hard links to files within # themselves, like bin/python3.3 and bin/python3.3m in the Python package warnings = [] - + + cross_platform_st_nlink = CrossPlatformStLink() pkgs_dirs = defaultdict(list) for pkgs_dir in config.pkgs_dirs: if not os.path.exists(pkgs_dir): @@ -180,11 +270,11 @@ def find_pkgs(): break for fn in files: try: - stat = lstat(join(root, fn)) + st_nlink = cross_platform_st_nlink(join(root, fn)) except OSError as e: warnings.append((fn, e)) continue - if stat.st_nlink > 1: + if st_nlink > 1: # print('%s is installed: %s' % (pkg, join(root, fn))) breakit = True break
conda clean -p breaks hard linking of new installs @asmeurer Following on from conda/conda#68: On Windows 8 (and probably equally on all other windows versions), `conda clean -p` removes all extracted packaged from the `pkgs` directory even if they are hard linked in some environments. While all existing environments will continue to function, creation of a new environment with the same packages causes conda to extract the packages again and to hard link to the newly extracted files. This massively increases disk usage and results in the opposite of what a user running `conda clean` was attempting to achieve. The problem lies in `main_clean.py` and the fact that on Windows `lstat(file).st_nlink` always returns 0, even if `file` is hard linked. (This seems to have been fixed from python 3.2 onwards: https://bugs.python.org/issue10027) As a stop gap measure `conda clean -p` should be prevented from being run on windows until a better solution can be found. ``` C:\>conda list # packages in environment at C:\Anaconda: # conda 3.10.1 py27_0 conda-env 2.1.4 py27_0 menuinst 1.0.4 py27_0 psutil 2.2.1 py27_0 pycosat 0.6.1 py27_0 python 2.7.9 1 pyyaml 3.11 py27_0 requests 2.6.0 py27_0 ```
Yep, pretty annoying issue. > The problem lies in main_clean.py and the fact that on Windows lstat(file).st_nlink always returns 0, even if file is hard linked. (This seems to have been fixed from python 3.2 onwards: https://bugs.python.org/issue10027) If it's really the case then WinAPI **GetFileInformationByHandle** should be used. The nNumberOfLinks field of the BY_HANDLE_FILE_INFORMATION structure is exactly what we want. **ntfsutils** has very nice implementation of the thing here http://pydoc.net/Python/ntfsutils/0.1.3/ntfsutils.fs/ , I guess it can be borrowed. You know, `if getfileinfo(path).nNumberOfLinks > 1: ...`
2016-01-08T15:05:30
conda/conda
2,108
conda__conda-2108
[ "2093" ]
d637298bb13b5434a989174ebe9acea3d7a0e2a0
diff --git a/conda/connection.py b/conda/connection.py --- a/conda/connection.py +++ b/conda/connection.py @@ -101,7 +101,9 @@ def send(self, request, stream=None, timeout=None, verify=None, cert=None, import boto except ImportError: stderrlog.info('\nError: boto is required for S3 channels. ' - 'Please install it with: conda install boto\n') + 'Please install it with `conda install boto`\n' + 'Make sure to run `source deactivate` if you ' + 'are in a conda environment.\n') resp.status_code = 404 return resp
confusing error message when boto is missing If one tries to install from an s3 channel (e.g., `conda install foo -c s3://bar/baz/conda`) while in an environment, this message shows up: `Error: boto is required for S3 channels. Please install it with: conda install boto`. Installing `boto` in the environment doesn't actually solve the problem, however, because `boto` has to be installed into the main set of packages (e.g., `source deactivate; conda install boto`). Perhaps a simple edit to the error message would address this. (e.g., `Error: boto is required for S3 channels. Please install it with: conda install boto. Make sure to run "source deactivate" if you are in an environment.`)
2016-02-19T22:10:29

SWE-Fixer: Training Open-Source LLMs for Effective and Efficient GitHub Issue Resolution

πŸ“ƒ Paper

πŸš€ GitHub

SWE-Fixer is a simple yet effective solution for addressing real-world GitHub issues by training open-source LLMs. It features a streamlined retrieve-then-edit pipeline with two core components: a code file retriever and a code editor.

This repo holds the data SWE-Fixer-Train-110K we curated for SWE-Fixer training.

For more information, please visit our project page.

πŸ“š Citation

@article{xie2025swefixer,
  title={SWE-Fixer: Training Open-Source LLMs for Effective and Efficient GitHub Issue Resolution}, 
  author={Xie, Chengxing and Li, Bowen and Gao, Chang and Du, He and Lam, Wai and Zou, Difan and Chen, Kai},
  journal={arXiv preprint arXiv:2501.05040},
  year={2025}
}
Downloads last month
10

Models trained or fine-tuned on internlm/SWE-Fixer-Train-110K