Merge branch 'release/1.3.2'

fix/49-unicode-decode-on-readline
Isis Lovecruft 2014-10-04 00:53:44 +00:00
commit d1a8fb101f
No known key found for this signature in database
GPG Key ID: 18C16EC5F9F1D673
16 changed files with 174 additions and 3854 deletions

View File

@ -1,12 +0,0 @@
language: python
before_install:
- sudo apt-get install gpg
python:
- "2.6"
- "2.7"
- "3.3"
install:
- make install
# command to run tests, e.g. python setup.py test
script:
- make test

View File

@ -1,4 +1,15 @@
include LICENSE
include requirements.txt
graft docs
recursive-include examples *.py
recursive-include docs *.rst
include docs/_static
include docs/conf.py
include docs/make.bat
include docs/Makefile
prune docs/_build
prune private
global-exclude *.log *~

View File

@ -34,8 +34,11 @@ cleanup-tests-all: cleanup-tests
rm -rf tests/tmp
cleanup-build:
mkdir buildnot
rm -rf build*
-rm MANIFEST
-rm -rf build
cleanup-dist:
-rm -rf dist
# it's not strictly necessary that gnupg2, gpg-agent, pinentry, or pip be
# installed, so ignore error exit statuses for those commands
@ -90,3 +93,8 @@ docs-html:
docs-zipfile: docs-html
cd $(DOC_HTML_DIR) && { find . -name '*' | zip -@ -v ../$(DOC_BUILD_ZIP) ;};
@echo "Built documentation in $(DOC_BUILD_DIR)/$(DOC_BUILD_ZIP)"
upload: cleanup-build
python setup.py bdist_egg upload --sign
#python setup.py bdist_wheel upload --sign
python setup.py sdist --formats=gztar,zip upload --sign

7
TODO
View File

@ -5,13 +5,6 @@
It would be nice to make the file descriptors for communication with the GnuPG
process configurable, and not the default, hard-coded 0=stdin 1=stdout
2=stderr.
** TODO look into RDBMS/ORM for public key storage :io:db:
see http://stackoverflow.com/q/1235594 and http://elixir.ematia.de/trac/wiki
memcached and pymemcached were the first ones I looked at, then I discovered
redis, which seemed better. At some point we should look into using elixer,
mentioned in the above SO post, so that the backend DB can be chosen freely
and we´re not restricting users to only memcached/cassandra/redis/sqlite/etc.
* Key editing :editkey:
** TODO add '--edit-key' feature :editkey:

View File

@ -135,13 +135,17 @@ class GPGBase(object):
file for secret keys.
"""
self.binary = _util._find_binary(binary)
self.homedir = home if home else _util._conf
self.homedir = os.path.expanduser(home) if home else _util._conf
pub = _parsers._fix_unsafe(keyring) if keyring else 'pubring.gpg'
sec = _parsers._fix_unsafe(secring) if secring else 'secring.gpg'
self.keyring = os.path.join(self._homedir, pub)
self.secring = os.path.join(self._homedir, sec)
self.options = _parsers._sanitise(options) if options else None
#: The version string of our GnuPG binary
self.binary_version = '0.0.0'
self.verbose = False
if default_preference_list:
self._prefs = _check_preferences(default_preference_list, 'all')
else:
@ -169,13 +173,7 @@ class GPGBase(object):
log.error("GPGBase.__init__(): %s" % str(ae))
raise RuntimeError(str(ae))
else:
if verbose is True:
# The caller wants logging, but we need a valid --debug-level
# for gpg. Default to "basic", and warn about the ambiguity.
# (garrettr)
verbose = "basic"
log.warning('GPG(verbose=True) is ambiguous, defaulting to "basic" logging')
self.verbose = verbose
self._set_verbose(verbose)
self.use_agent = use_agent
if hasattr(self, '_agent_proc') \
@ -183,6 +181,9 @@ class GPGBase(object):
if hasattr(self, '__remove_path__'):
self.__remove_path__('pinentry')
# Assign our self.binary_version attribute:
self._check_sane_and_get_gpg_version()
def __remove_path__(self, prog=None, at_exit=True):
"""Remove the directories containing a program from the system's
``$PATH``. If ``GPGBase.binary`` is in a directory being removed, it
@ -436,6 +437,24 @@ class GPGBase(object):
_generated_keys = _util.InheritableProperty(_generated_keys_getter,
_generated_keys_setter)
def _check_sane_and_get_gpg_version(self):
"""Check that everything runs alright, and grab the gpg binary's
version number while we're at it, storing it as :data:`binary_version`.
:raises RuntimeError: if we cannot invoke the gpg binary.
"""
proc = self._open_subprocess(["--list-config", "--with-colons"])
result = self._result_map['list'](self)
self._read_data(proc.stdout, result)
if proc.returncode:
raise RuntimeError("Error invoking gpg: %s" % result.data)
else:
proc.terminate()
version_line = str(result.data).partition(':version:')[2]
self.binary_version = version_line.split('\n')[0]
log.debug("Using GnuPG version %s" % self.binary_version)
def _make_args(self, args, passphrase=False):
"""Make a list of command line elements for GPG.
@ -480,9 +499,14 @@ class GPGBase(object):
if self.verbose:
cmd.append('--debug-all')
if ((isinstance(self.verbose, str) and
self.verbose in ['basic', 'advanced', 'expert', 'guru'])
or (isinstance(self.verbose, int) and (1<=self.verbose<=9))):
if (isinstance(self.verbose, str) or
(isinstance(self.verbose, int) and (self.verbose >= 1))):
# GnuPG<=1.4.18 parses the `--debug-level` command in a way
# that is incompatible with all other GnuPG versions. :'(
if self.binary_version and (self.binary_version <= '1.4.18'):
cmd.append('--debug-level=%s' % self.verbose)
else:
cmd.append('--debug-level %s' % self.verbose)
return cmd
@ -592,6 +616,36 @@ class GPGBase(object):
log.debug("Finishing reading from stream %r..." % stream.__repr__())
log.debug("Read %4d bytes total" % len(result.data))
def _set_verbose(self, verbose):
"""Check and set our :data:`verbose` attribute.
The debug-level must be a string or an integer. If it is one of
the allowed strings, GnuPG will translate it internally to it's
corresponding integer level:
basic = 1-2
advanced = 3-5
expert = 6-8
guru = 9+
If it's not one of the recognised string levels, then then
entire argument is ignored by GnuPG. :(
To fix that stupid behaviour, if they wanted debugging but typo'd
the string level (or specified ``verbose=True``), we'll default to
'basic' logging.
"""
string_levels = ('basic', 'advanced', 'expert', 'guru')
if verbose is True:
# The caller wants logging, but we need a valid --debug-level
# for gpg. Default to "basic", and warn about the ambiguity.
verbose = 'basic'
if (isinstance(verbose, str) and not (verbose in string_levels)):
verbose = 'basic'
self.verbose = verbose
def _collect_output(self, process, result, writer=None, stdin=None):
"""Drain the subprocesses output streams, writing the collected output
to the result. If a writer thread (writing to the subprocess) is given,

View File

@ -475,6 +475,8 @@ def _get_options_group(group=None):
'--export-secret-subkeys',
'--fingerprint',
'--gen-revoke',
'--hidden-encrypt-to',
'--hidden-recipient',
'--list-key',
'--list-keys',
'--list-public-keys',
@ -566,6 +568,7 @@ def _get_options_group(group=None):
'--quiet',
'--sign',
'--symmetric',
'--throw-keyids',
'--use-agent',
'--verbose',
'--version',
@ -1048,7 +1051,7 @@ class ImportResult(object):
:rtype: bool
:returns: True if we have immport some keys, False otherwise.
"""
if self.counts.not_imported > 0: return False
if self.counts['not_imported'] > 0: return False
if len(self.fingerprints) == 0: return False
return True
__bool__ = __nonzero__
@ -1220,6 +1223,7 @@ class Verify(object):
self.status = 'signature good'
self.key_id, self.username = value.split(None, 1)
elif key == "VALIDSIG":
self.valid = True
(self.fingerprint,
self.creation_date,
self.sig_timestamp,
@ -1245,17 +1249,47 @@ class Verify(object):
self.valid = False
self.key_id = value
self.status = 'no public key'
elif key in ("KEYEXPIRED", "SIGEXPIRED"):
# these are useless in verify, since they are spit out for any
# These are useless in Verify, since they are spit out for any
# pub/subkeys on the key, not just the one doing the signing.
# if we want to check for signatures with expired key,
# the relevant flag is EXPKEYSIG.
# if we want to check for signatures make with expired key,
# the relevant flags are REVKEYSIG and KEYREVOKED.
elif key in ("KEYEXPIRED", "SIGEXPIRED"):
pass
# The signature has an expiration date which has already passed
# (EXPKEYSIG), or the signature has been revoked (REVKEYSIG):
elif key in ("EXPKEYSIG", "REVKEYSIG"):
# signed with expired or revoked key
self.valid = False
self.key_id = value.split()[0]
self.status = (('%s %s') % (key[:3], key[3:])).lower()
# This is super annoying, and bad design on the part of GnuPG, in my
# opinion.
#
# This flag can get triggered if a valid signature is made, and then
# later the key (or subkey) which created the signature is
# revoked. When this happens, GnuPG will output:
#
# REVKEYSIG 075BFD18B365D34C Test Expired Key <test@python-gnupg.git>
# VALIDSIG DAB69B05F591640B7F4DCBEA075BFD18B365D34C 2014-09-26 1411700539 0 4 0 1 2 00 4BA800F77452A6C29447FF20F4AF76ACBBE22CE2
# KEYREVOKED
#
# Meaning that we have a timestamp for when the signature was created,
# and we know that the signature is valid, but since GnuPG gives us no
# timestamp for when the key was revoked... we have no ability to
# determine if the valid signature was made *before* the signing key
# was revoked or *after*. Meaning that if you are like me and you sign
# all your software releases and git commits, and you also practice
# good opsec by doing regular key rotations, your old signatures made
# by your expired/revoked keys (even though they were created when the
# key was still good) are considered bad because GnuPG is a
# braindamaged piece of shit.
#
# Software engineering, motherfuckers, DO YOU SPEAK IT?
#
# The signing key which created the signature has since been revoked
# (KEYREVOKED), and we're going to ignore it (but add something to the
# status message):
elif key in ("KEYREVOKED"):
self.status = '\n'.join([self.status, "key revoked"])
else:
raise ValueError("Unknown status message: %r" % key)

View File

@ -70,6 +70,9 @@ _conf = os.path.join(os.path.join(_user, '.config'), 'python-gnupg')
## Logger is disabled by default
log = _logger.create_logger(0)
#: Compiled regex for determining a GnuPG binary's version:
_VERSION_STRING_REGEX = re.compile('(\d)*(\.)*(\d)*(\.)*(\d)*')
def find_encodings(enc=None, system=False):
"""Find functions for encoding translations for a specific codec.
@ -260,6 +263,8 @@ def _find_binary(binary=None):
"""
found = None
if binary is not None:
if os.path.isabs(binary) and os.path.isfile(binary):
return binary
if not os.path.isabs(binary):
try:
found = _which(binary)
@ -434,8 +439,7 @@ def _match_version_string(version):
:param str version: A version string in the form x.x.x
"""
regex = re.compile('(\d)*(\.)*(\d)*(\.)*(\d)*')
matched = regex.match(version)
matched = _VERSION_STRING_REGEX.match(version)
g = matched.groups()
major, minor, micro = int(g[0]), int(g[2]), int(g[4])
return (major, minor, micro)

View File

@ -36,13 +36,7 @@ import os
import re
import textwrap
try:
from io import StringIO
except ImportError:
from cStringIO import StringIO
#: see :pep:`328` http://docs.python.org/2.5/whatsnew/pep-328.html
from . import _parsers
from . import _util
from . import _trust
from ._meta import GPGBase
@ -128,6 +122,7 @@ class GPG(GPGBase):
log.info(textwrap.dedent("""
Initialised settings:
binary: %s
binary version: %s
homedir: %s
keyring: %s
secring: %s
@ -136,9 +131,15 @@ class GPG(GPGBase):
options: %s
verbose: %s
use_agent: %s
""" % (self.binary, self.homedir, self.keyring, self.secring,
self.default_preference_list, self.keyserver, self.options,
str(self.verbose), str(self.use_agent))))
""" % (self.binary,
self.binary_version,
self.homedir,
self.keyring,
self.secring,
self.default_preference_list,
self.keyserver, self.options,
str(self.verbose),
str(self.use_agent))))
self._batch_dir = os.path.join(self.homedir, 'batch-files')
self._key_dir = os.path.join(self.homedir, 'generated-keys')
@ -147,58 +148,46 @@ class GPG(GPGBase):
self.temp_keyring = None
#: The secring used in the most recently created batch file
self.temp_secring = None
#: The version string of our GnuPG binary
self.binary_version = str()
## check that everything runs alright, and grab the gpg binary's
## version number while we're at it:
proc = self._open_subprocess(["--list-config", "--with-colons"])
result = self._result_map['list'](self)
self._read_data(proc.stdout, result)
if proc.returncode:
raise RuntimeError("Error invoking gpg: %s" % result.data)
version_line = str(result.data).partition(':version:')[2]
self.binary_version = version_line.split('\n')[0]
log.debug("Using GnuPG version %s" % self.binary_version)
if _util._is_gpg2:
# Make GnuPG>=2.0.0-only methods public:
self.fix_trustdb = self._fix_trustdb
self.import_ownertrust = self._import_ownertrust
self.export_ownertrust = self._export_ownertrust
# Make sure that the trustdb exists, or else GnuPG will exit with
# a fatal error (at least it does with GnuPG>=2.0.0):
self._create_trustdb()
# Make sure that the trustdb exists, or else GnuPG will exit with a
# fatal error (at least it does with GnuPG>=2.0.0):
self.create_trustdb()
@functools.wraps(_trust._create_trustdb)
def _create_trustdb(self):
def create_trustdb(self):
if self.is_gpg2():
_trust._create_trustdb(self)
else:
log.info("Creating the trustdb is only available with GnuPG>=2.x")
# For backward compatibility with python-gnupg<=1.3.1:
_create_trustdb = create_trustdb
@functools.wraps(_trust.fix_trustdb)
def _fix_trustdb(self, trustdb=None):
def fix_trustdb(self, trustdb=None):
if self.is_gpg2():
_trust.fix_trustdb(self)
else:
log.info("Fixing the trustdb is only available with GnuPG>=2.x")
# For backward compatibility with python-gnupg<=1.3.1:
_fix_trustdb = fix_trustdb
@functools.wraps(_trust.import_ownertrust)
def _import_ownertrust(self, trustdb=None):
def import_ownertrust(self, trustdb=None):
if self.is_gpg2():
_trust.import_ownertrust(self)
else:
log.info("Importing ownertrust is only available with GnuPG>=2.x")
# For backward compatibility with python-gnupg<=1.3.1:
_import_ownertrust = import_ownertrust
@functools.wraps(_trust.export_ownertrust)
def _export_ownertrust(self, trustdb=None):
def export_ownertrust(self, trustdb=None):
if self.is_gpg2():
_trust.export_ownertrust(self)
else:
log.info("Exporting ownertrust is only available with GnuPG>=2.x")
# For backward compatibility with python-gnupg<=1.3.1:
_export_ownertrust = export_ownertrust
def is_gpg1(self):
"""Returns true if using GnuPG <= 1.x."""
@ -284,15 +273,13 @@ class GPG(GPGBase):
signatures. If using detached signatures, the file containing the
detached signature should be specified as the ``sig_file``.
:param file file: A file descriptor object. Its type will be checked
with :func:`_util._is_file`.
:param file file: A file descriptor object.
:param str sig_file: A file containing the GPG signature data for
``file``. If given, ``file`` is verified via this detached
signature.
signature. Its type will be checked with :func:`_util._is_file`.
"""
fn = None
result = self._result_map['verify'](self)
if sig_file is None:
@ -307,19 +294,15 @@ class GPG(GPGBase):
return result
log.debug('verify_file(): Handling detached verification')
sig_fh = None
data_fh = None
try:
sig_fh = open(sig_file, 'rb')
data_fh = open(file, 'rb')
args = ["--verify %s -" % sig_fh.name]
proc = self._open_subprocess(args)
writer = _util._threaded_copy_data(data_fh, proc.stdin)
writer = _util._threaded_copy_data(file, proc.stdin)
self._collect_output(proc, result, writer, stdin=proc.stdin)
finally:
if sig_fh and not sig_fh.closed:
sig_fh.close()
if data_fh and not data_fh.closed:
data_fh.close()
return result
def import_keys(self, key_data):

View File

@ -33,10 +33,8 @@ from glob import glob
from time import localtime
from time import mktime
import encodings
import doctest
import io
import logging
import os
import shutil
import sys
@ -192,7 +190,9 @@ class GPGTestCase(unittest.TestCase):
print(fixed)
test_file = os.path.join(_files, 'cypherpunk_manifesto')
self.assertTrue(os.path.isfile(test_file))
has_shell = self.gpg.verify_file(test_file, fixed)
datafd = open(test_file, 'rb')
has_shell = self.gpg.verify_file(datafd, sig_file=fixed)
datafd.close()
self.assertFalse(has_shell.valid)
def test_parsers_fix_unsafe_semicolon(self):
@ -564,7 +564,7 @@ class GPGTestCase(unittest.TestCase):
def test_import_only(self):
"""Test that key import works."""
self.test_list_keys_initial_public()
self.gpg.import_keys(KEYS_TO_IMPORT)
self.assertTrue(self.gpg.import_keys(KEYS_TO_IMPORT))
public_keys = self.gpg.list_keys()
self.assertTrue(is_list_with_len(public_keys, 2),
"2-element list expected")
@ -664,6 +664,7 @@ class GPGTestCase(unittest.TestCase):
sig = self.gpg.sign(message, default_key=key.fingerprint,
passphrase='johanborst')
self.assertTrue(sig, "Good passphrase should succeed")
try:
file = _util._make_binary_stream(sig.data, self.gpg._encoding)
verified = self.gpg.verify_file(file)
@ -696,7 +697,7 @@ class GPGTestCase(unittest.TestCase):
datafd.seek(0)
sigfd.seek(0)
verified = self.gpg.verify_file(datafn, sigfn)
verified = self.gpg.verify_file(datafd, sig_file=sigfn)
if key.fingerprint != verified.fingerprint:
log.warn("key fingerprint: %r", key.fingerprint)
@ -707,7 +708,7 @@ class GPGTestCase(unittest.TestCase):
os.unlink(sigfn)
def test_signature_verification_detached_binary(self):
"""Test that detached signature verification in binary mode fails."""
"""Test that detached signature verification in binary mode works."""
key = self.generate_key("Adi Shamir", "rsa.com")
datafn = os.path.join(_files, 'cypherpunk_manifesto')
@ -715,7 +716,6 @@ class GPGTestCase(unittest.TestCase):
datafd = open(datafn, 'rb')
data = datafd.read()
datafd.close()
sig = self.gpg.sign(data, default_key=key.fingerprint,
passphrase='adishamir',
@ -734,11 +734,13 @@ class GPGTestCase(unittest.TestCase):
with self.assertRaises(UnicodeDecodeError):
print("SIG=%s" % sig)
verifysig = open(sigfn, 'rb')
verification = self.gpg.verify_file(data, verifysig)
datafd.seek(0)
verification = self.gpg.verify_file(datafd, sig_file=sigfn)
self.assertTrue(isinstance(verification, gnupg._parsers.Verify))
self.assertFalse(verification.valid)
self.assertTrue(verification.valid)
datafd.close()
if os.path.isfile(sigfn):
os.unlink(sigfn)

View File

@ -1,30 +0,0 @@
From cfcb84db5452b1fbc801ca85f2f70015660f3132 Mon Sep 17 00:00:00 2001
From: Lunar <lunar@anargeek.net>
Date: Wed, 6 Mar 2013 18:39:34 +0100
Subject: [PATCH] Make _open_subprocess argument more explicit in _handle_io
The previous way worked out, but was really harder to understand.
---
gnupg.py | 6 +++++-
1 files changed, 5 insertions(+), 1 deletions(-)
diff --git a/gnupg.py b/gnupg.py
index 4a73164..479e6dd 100644
--- a/gnupg.py
+++ b/gnupg.py
@@ -984,7 +984,11 @@ class GPG(object):
"""
Handle a call to GPG - pass input data, collect output data.
"""
- p = self._open_subprocess(args, passphrase is not None)
+ if passphrase is not None:
+ ask_passphrase = True
+ else:
+ ask_passphrase = False
+ p = self._open_subprocess(args, ask_passphrase)
if not binary:
stdin = codecs.getwriter(self.encoding)(p.stdin)
else:
--
1.7.2.5

View File

@ -1,23 +0,0 @@
This patches folder is managed by quilt, which is a tool for automatic patch
application and removal. To use quilt with the patches in this directory,
navigate to the top level directory of this repository, and do:
$ quilt setup patches/series
To add an externally created patch (in other words, one created with ```diff
--git``` or ```git diff```), place that .patch or .diff file in this directory,
and do:
$ quilt import patches/<patchfile>
Then, to apply the new patch, do:
$ quilt push
Removing patches from the stack can be done with:
$ quilt pop
Please see the man quilt(1) for more information on adding and importing new
patches. The debian package maintainer guides also have chapters on quilt
usage.

View File

@ -1 +0,0 @@
0001-Make-_open_subprocess-argument-more-explicit-in-_han.patch

View File

@ -1,558 +0,0 @@
#!/usr/bin/env python
"""Bootstrap distribute installation
If you want to use setuptools in your package's setup.py, just include this
file in the same directory with it, and add this to the top of your setup.py::
from distribute_setup import use_setuptools
use_setuptools()
If you want to require a specific version of setuptools, set a download
mirror, or use an alternate download directory, you can do so by supplying
the appropriate options to ``use_setuptools()``.
This file can also be run as a script to install or upgrade setuptools.
This file was taken from http://nightly.ziade.org/distribute_setup.py
on 2013-05-27.
"""
import os
import shutil
import sys
import time
import fnmatch
import tempfile
import tarfile
import optparse
from distutils import log
try:
from site import USER_SITE
except ImportError:
USER_SITE = None
try:
import subprocess
def _python_cmd(*args):
args = (sys.executable,) + args
return subprocess.call(args) == 0
except ImportError:
# will be used for python 2.3
def _python_cmd(*args):
args = (sys.executable,) + args
# quoting arguments if windows
if sys.platform == 'win32':
def quote(arg):
if ' ' in arg:
return '"%s"' % arg
return arg
args = [quote(arg) for arg in args]
return os.spawnl(os.P_WAIT, sys.executable, *args) == 0
DEFAULT_VERSION = "0.6.44"
DEFAULT_URL = "http://pypi.python.org/packages/source/d/distribute/"
SETUPTOOLS_FAKED_VERSION = "0.6c11"
SETUPTOOLS_PKG_INFO = """\
Metadata-Version: 1.0
Name: setuptools
Version: %s
Summary: xxxx
Home-page: xxx
Author: xxx
Author-email: xxx
License: xxx
Description: xxx
""" % SETUPTOOLS_FAKED_VERSION
def _install(tarball, install_args=()):
# extracting the tarball
tmpdir = tempfile.mkdtemp()
log.warn('Extracting in %s', tmpdir)
old_wd = os.getcwd()
try:
os.chdir(tmpdir)
tar = tarfile.open(tarball)
_extractall(tar)
tar.close()
# going in the directory
subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
os.chdir(subdir)
log.warn('Now working in %s', subdir)
# installing
log.warn('Installing Distribute')
if not _python_cmd('setup.py', 'install', *install_args):
log.warn('Something went wrong during the installation.')
log.warn('See the error message above.')
# exitcode will be 2
return 2
finally:
os.chdir(old_wd)
shutil.rmtree(tmpdir)
def _build_egg(egg, tarball, to_dir):
# extracting the tarball
tmpdir = tempfile.mkdtemp()
log.warn('Extracting in %s', tmpdir)
old_wd = os.getcwd()
try:
os.chdir(tmpdir)
tar = tarfile.open(tarball)
_extractall(tar)
tar.close()
# going in the directory
subdir = os.path.join(tmpdir, os.listdir(tmpdir)[0])
os.chdir(subdir)
log.warn('Now working in %s', subdir)
# building an egg
log.warn('Building a Distribute egg in %s', to_dir)
_python_cmd('setup.py', '-q', 'bdist_egg', '--dist-dir', to_dir)
finally:
os.chdir(old_wd)
shutil.rmtree(tmpdir)
# returning the result
log.warn(egg)
if not os.path.exists(egg):
raise IOError('Could not build the egg.')
def _do_download(version, download_base, to_dir, download_delay):
egg = os.path.join(to_dir, 'distribute-%s-py%d.%d.egg'
% (version, sys.version_info[0], sys.version_info[1]))
if not os.path.exists(egg):
tarball = download_setuptools(version, download_base,
to_dir, download_delay)
_build_egg(egg, tarball, to_dir)
sys.path.insert(0, egg)
import setuptools
setuptools.bootstrap_install_from = egg
def use_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
to_dir=os.curdir, download_delay=15, no_fake=True):
# making sure we use the absolute path
to_dir = os.path.abspath(to_dir)
was_imported = 'pkg_resources' in sys.modules or \
'setuptools' in sys.modules
try:
try:
import pkg_resources
# Setuptools 0.7b and later is a suitable (and preferable)
# substitute for any Distribute version.
try:
pkg_resources.require("setuptools>=0.7b")
return
except pkg_resources.DistributionNotFound:
pass
if not hasattr(pkg_resources, '_distribute'):
if not no_fake:
_fake_setuptools()
raise ImportError
except ImportError:
return _do_download(version, download_base, to_dir, download_delay)
try:
pkg_resources.require("distribute>=" + version)
return
except pkg_resources.VersionConflict:
e = sys.exc_info()[1]
if was_imported:
sys.stderr.write(
"The required version of distribute (>=%s) is not available,\n"
"and can't be installed while this script is running. Please\n"
"install a more recent version first, using\n"
"'easy_install -U distribute'."
"\n\n(Currently using %r)\n" % (version, e.args[0]))
sys.exit(2)
else:
del pkg_resources, sys.modules['pkg_resources'] # reload ok
return _do_download(version, download_base, to_dir,
download_delay)
except pkg_resources.DistributionNotFound:
return _do_download(version, download_base, to_dir,
download_delay)
finally:
if not no_fake:
_create_fake_setuptools_pkg_info(to_dir)
def download_setuptools(version=DEFAULT_VERSION, download_base=DEFAULT_URL,
to_dir=os.curdir, delay=15):
"""Download distribute from a specified location and return its filename
`version` should be a valid distribute version number that is available
as an egg for download under the `download_base` URL (which should end
with a '/'). `to_dir` is the directory where the egg will be downloaded.
`delay` is the number of seconds to pause before an actual download
attempt.
"""
# making sure we use the absolute path
to_dir = os.path.abspath(to_dir)
try:
from urllib.request import urlopen
except ImportError:
from urllib2 import urlopen
tgz_name = "distribute-%s.tar.gz" % version
url = download_base + tgz_name
saveto = os.path.join(to_dir, tgz_name)
src = dst = None
if not os.path.exists(saveto): # Avoid repeated downloads
try:
log.warn("Downloading %s", url)
src = urlopen(url)
# Read/write all in one block, so we don't create a corrupt file
# if the download is interrupted.
data = src.read()
dst = open(saveto, "wb")
dst.write(data)
finally:
if src:
src.close()
if dst:
dst.close()
return os.path.realpath(saveto)
def _no_sandbox(function):
def __no_sandbox(*args, **kw):
try:
from setuptools.sandbox import DirectorySandbox
if not hasattr(DirectorySandbox, '_old'):
def violation(*args):
pass
DirectorySandbox._old = DirectorySandbox._violation
DirectorySandbox._violation = violation
patched = True
else:
patched = False
except ImportError:
patched = False
try:
return function(*args, **kw)
finally:
if patched:
DirectorySandbox._violation = DirectorySandbox._old
del DirectorySandbox._old
return __no_sandbox
def _patch_file(path, content):
"""Will backup the file then patch it"""
f = open(path)
existing_content = f.read()
f.close()
if existing_content == content:
# already patched
log.warn('Already patched.')
return False
log.warn('Patching...')
_rename_path(path)
f = open(path, 'w')
try:
f.write(content)
finally:
f.close()
return True
_patch_file = _no_sandbox(_patch_file)
def _same_content(path, content):
f = open(path)
existing_content = f.read()
f.close()
return existing_content == content
def _rename_path(path):
new_name = path + '.OLD.%s' % time.time()
log.warn('Renaming %s to %s', path, new_name)
os.rename(path, new_name)
return new_name
def _remove_flat_installation(placeholder):
if not os.path.isdir(placeholder):
log.warn('Unkown installation at %s', placeholder)
return False
found = False
for file in os.listdir(placeholder):
if fnmatch.fnmatch(file, 'setuptools*.egg-info'):
found = True
break
if not found:
log.warn('Could not locate setuptools*.egg-info')
return
log.warn('Moving elements out of the way...')
pkg_info = os.path.join(placeholder, file)
if os.path.isdir(pkg_info):
patched = _patch_egg_dir(pkg_info)
else:
patched = _patch_file(pkg_info, SETUPTOOLS_PKG_INFO)
if not patched:
log.warn('%s already patched.', pkg_info)
return False
# now let's move the files out of the way
for element in ('setuptools', 'pkg_resources.py', 'site.py'):
element = os.path.join(placeholder, element)
if os.path.exists(element):
_rename_path(element)
else:
log.warn('Could not find the %s element of the '
'Setuptools distribution', element)
return True
_remove_flat_installation = _no_sandbox(_remove_flat_installation)
def _after_install(dist):
log.warn('After install bootstrap.')
placeholder = dist.get_command_obj('install').install_purelib
_create_fake_setuptools_pkg_info(placeholder)
def _create_fake_setuptools_pkg_info(placeholder):
if not placeholder or not os.path.exists(placeholder):
log.warn('Could not find the install location')
return
pyver = '%s.%s' % (sys.version_info[0], sys.version_info[1])
setuptools_file = 'setuptools-%s-py%s.egg-info' % \
(SETUPTOOLS_FAKED_VERSION, pyver)
pkg_info = os.path.join(placeholder, setuptools_file)
if os.path.exists(pkg_info):
log.warn('%s already exists', pkg_info)
return
log.warn('Creating %s', pkg_info)
try:
f = open(pkg_info, 'w')
except EnvironmentError:
log.warn("Don't have permissions to write %s, skipping", pkg_info)
return
try:
f.write(SETUPTOOLS_PKG_INFO)
finally:
f.close()
pth_file = os.path.join(placeholder, 'setuptools.pth')
log.warn('Creating %s', pth_file)
f = open(pth_file, 'w')
try:
f.write(os.path.join(os.curdir, setuptools_file))
finally:
f.close()
_create_fake_setuptools_pkg_info = _no_sandbox(
_create_fake_setuptools_pkg_info
)
def _patch_egg_dir(path):
# let's check if it's already patched
pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
if os.path.exists(pkg_info):
if _same_content(pkg_info, SETUPTOOLS_PKG_INFO):
log.warn('%s already patched.', pkg_info)
return False
_rename_path(path)
os.mkdir(path)
os.mkdir(os.path.join(path, 'EGG-INFO'))
pkg_info = os.path.join(path, 'EGG-INFO', 'PKG-INFO')
f = open(pkg_info, 'w')
try:
f.write(SETUPTOOLS_PKG_INFO)
finally:
f.close()
return True
_patch_egg_dir = _no_sandbox(_patch_egg_dir)
def _before_install():
log.warn('Before install bootstrap.')
_fake_setuptools()
def _under_prefix(location):
if 'install' not in sys.argv:
return True
args = sys.argv[sys.argv.index('install') + 1:]
for index, arg in enumerate(args):
for option in ('--root', '--prefix'):
if arg.startswith('%s=' % option):
top_dir = arg.split('root=')[-1]
return location.startswith(top_dir)
elif arg == option:
if len(args) > index:
top_dir = args[index + 1]
return location.startswith(top_dir)
if arg == '--user' and USER_SITE is not None:
return location.startswith(USER_SITE)
return True
def _fake_setuptools():
log.warn('Scanning installed packages')
try:
import pkg_resources
except ImportError:
# we're cool
log.warn('Setuptools or Distribute does not seem to be installed.')
return
ws = pkg_resources.working_set
try:
setuptools_dist = ws.find(
pkg_resources.Requirement.parse('setuptools', replacement=False)
)
except TypeError:
# old distribute API
setuptools_dist = ws.find(
pkg_resources.Requirement.parse('setuptools')
)
if setuptools_dist is None:
log.warn('No setuptools distribution found')
return
# detecting if it was already faked
setuptools_location = setuptools_dist.location
log.warn('Setuptools installation detected at %s', setuptools_location)
# if --root or --preix was provided, and if
# setuptools is not located in them, we don't patch it
if not _under_prefix(setuptools_location):
log.warn('Not patching, --root or --prefix is installing Distribute'
' in another location')
return
# let's see if its an egg
if not setuptools_location.endswith('.egg'):
log.warn('Non-egg installation')
res = _remove_flat_installation(setuptools_location)
if not res:
return
else:
log.warn('Egg installation')
pkg_info = os.path.join(setuptools_location, 'EGG-INFO', 'PKG-INFO')
if (os.path.exists(pkg_info) and
_same_content(pkg_info, SETUPTOOLS_PKG_INFO)):
log.warn('Already patched.')
return
log.warn('Patching...')
# let's create a fake egg replacing setuptools one
res = _patch_egg_dir(setuptools_location)
if not res:
return
log.warn('Patching complete.')
_relaunch()
def _relaunch():
log.warn('Relaunching...')
# we have to relaunch the process
# pip marker to avoid a relaunch bug
_cmd1 = ['-c', 'install', '--single-version-externally-managed']
_cmd2 = ['-c', 'install', '--record']
if sys.argv[:3] == _cmd1 or sys.argv[:3] == _cmd2:
sys.argv[0] = 'setup.py'
args = [sys.executable] + sys.argv
sys.exit(subprocess.call(args))
def _extractall(self, path=".", members=None):
"""Extract all members from the archive to the current working
directory and set owner, modification time and permissions on
directories afterwards. `path' specifies a different directory
to extract to. `members' is optional and must be a subset of the
list returned by getmembers().
"""
import copy
import operator
from tarfile import ExtractError
directories = []
if members is None:
members = self
for tarinfo in members:
if tarinfo.isdir():
# Extract directories with a safe mode.
directories.append(tarinfo)
tarinfo = copy.copy(tarinfo)
tarinfo.mode = 448 # decimal for oct 0700
self.extract(tarinfo, path)
# Reverse sort directories.
if sys.version_info < (2, 4):
def sorter(dir1, dir2):
return cmp(dir1.name, dir2.name)
directories.sort(sorter)
directories.reverse()
else:
directories.sort(key=operator.attrgetter('name'), reverse=True)
# Set correct owner, mtime and filemode on directories.
for tarinfo in directories:
dirpath = os.path.join(path, tarinfo.name)
try:
self.chown(tarinfo, dirpath)
self.utime(tarinfo, dirpath)
self.chmod(tarinfo, dirpath)
except ExtractError:
e = sys.exc_info()[1]
if self.errorlevel > 1:
raise
else:
self._dbg(1, "tarfile: %s" % e)
def _build_install_args(options):
"""
Build the arguments to 'python setup.py install' on the distribute package
"""
install_args = []
if options.user_install:
if sys.version_info < (2, 6):
log.warn("--user requires Python 2.6 or later")
raise SystemExit(1)
install_args.append('--user')
return install_args
def _parse_args():
"""
Parse the command line for options
"""
parser = optparse.OptionParser()
parser.add_option(
'--user', dest='user_install', action='store_true', default=False,
help='install in user site package (requires Python 2.6 or later)')
parser.add_option(
'--download-base', dest='download_base', metavar="URL",
default=DEFAULT_URL,
help='alternative URL from where to download the distribute package')
options, args = parser.parse_args()
# positional arguments are ignored
return options
def main(version=DEFAULT_VERSION):
"""Install or upgrade setuptools and EasyInstall"""
options = _parse_args()
tarball = download_setuptools(download_base=options.download_base)
return _install(tarball, _build_install_args(options))
if __name__ == '__main__':
sys.exit(main())

File diff suppressed because it is too large Load Diff

View File

@ -1,55 +0,0 @@
#!/bin/bash
# -*- coding: utf-8 -*-
#
# This file is part of python-gnupg, a Python interface to GnuPG.
# Copyright © 2013 Isis Lovecruft, <isis@leap.se> 0xA3ADB67A2CDB8B35
# © 2013 Andrej B.
# © 2013 LEAP Encryption Access Project
# © 2008-2012 Vinay Sajip
# © 2005 Steve Traugott
# © 2004 A.M. Kuchling
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the Free
# Software Foundation, either version 3 of the License, or (at your option)
# any later version.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the included LICENSE file for details.
project=python-gnupg
VENV=$(which virtualenv)
WRPR=$(which virtualenvwrapper.sh)
if ! test -n "$VENV" ; then
printf "Couldn't find virtualenv. Are you sure it's installed?"
exit 1
fi
if ! test -n "$WRPR"; then
printf "Couldn't find virtualenvwrapper. Are you sure it's installed?"
exit 1
fi
test -r "$WRPR" && . $WRPR
okay=$?
if test "$okay" -eq 0 ; then
printf "Using %s as WORKON_HOME for the new virtualenv...\n" $PWD
printf "What should the name of the new virtualenv be? (default: '%s')\n" $project
read -p"Name for this virtualenv?: " name
if test -z "$name"; then
name="$project"
fi
printf "Using '$name' as our project's name..."
printf "Creating virtualenv..."
mkvirtualenv -a "$PWD" --no-site-packages --unzip-setuptools \
--distribute --prompt="(gnupg)" "$name"
exit $?
else
printf "Something went wrong..."
printf "Exit code %d from mkvirtualenv." "$okay"
exit $okay
fi

File diff suppressed because it is too large Load Diff