Ignore:
Timestamp:
Mar 19, 2014, 11:31:01 PM (11 years ago)
Author:
dmik
Message:

python: Merge vendor 2.7.6 to trunk.

Location:
python/trunk
Files:
2 edited

Legend:

Unmodified
Added
Removed
  • python/trunk

  • python/trunk/Lib/test/test_hashlib.py

    r2 r391  
    11# Test hashlib module
    22#
    3 # $Id: test_hashlib.py 66496 2008-09-18 01:22:16Z benjamin.peterson $
     3# $Id$
    44#
    5 #  Copyright (C) 2005   Gregory P. Smith (greg@krypto.org)
     5#  Copyright (C) 2005-2010   Gregory P. Smith (greg@krypto.org)
    66#  Licensed to PSF under a Contributor Agreement.
    77#
    88
     9import array
    910import hashlib
     11import itertools
     12import sys
     13try:
     14    import threading
     15except ImportError:
     16    threading = None
    1017import unittest
     18import warnings
    1119from test import test_support
    1220from test.test_support import _4G, precisionbigmemtest
     21
     22# Were we compiled --with-pydebug or with #define Py_DEBUG?
     23COMPILED_WITH_PYDEBUG = hasattr(sys, 'gettotalrefcount')
     24
    1325
    1426def hexstr(s):
     
    2739                             'sha384', 'SHA384', 'sha512', 'SHA512' )
    2840
     41    _warn_on_extension_import = COMPILED_WITH_PYDEBUG
     42
     43    def _conditional_import_module(self, module_name):
     44        """Import a module and return a reference to it or None on failure."""
     45        try:
     46            exec('import '+module_name)
     47        except ImportError, error:
     48            if self._warn_on_extension_import:
     49                warnings.warn('Did a C extension fail to compile? %s' % error)
     50        return locals().get(module_name)
     51
     52    def __init__(self, *args, **kwargs):
     53        algorithms = set()
     54        for algorithm in self.supported_hash_names:
     55            algorithms.add(algorithm.lower())
     56        self.constructors_to_test = {}
     57        for algorithm in algorithms:
     58            self.constructors_to_test[algorithm] = set()
     59
     60        # For each algorithm, test the direct constructor and the use
     61        # of hashlib.new given the algorithm name.
     62        for algorithm, constructors in self.constructors_to_test.items():
     63            constructors.add(getattr(hashlib, algorithm))
     64            def _test_algorithm_via_hashlib_new(data=None, _alg=algorithm):
     65                if data is None:
     66                    return hashlib.new(_alg)
     67                return hashlib.new(_alg, data)
     68            constructors.add(_test_algorithm_via_hashlib_new)
     69
     70        _hashlib = self._conditional_import_module('_hashlib')
     71        if _hashlib:
     72            # These two algorithms should always be present when this module
     73            # is compiled.  If not, something was compiled wrong.
     74            assert hasattr(_hashlib, 'openssl_md5')
     75            assert hasattr(_hashlib, 'openssl_sha1')
     76            for algorithm, constructors in self.constructors_to_test.items():
     77                constructor = getattr(_hashlib, 'openssl_'+algorithm, None)
     78                if constructor:
     79                    constructors.add(constructor)
     80
     81        _md5 = self._conditional_import_module('_md5')
     82        if _md5:
     83            self.constructors_to_test['md5'].add(_md5.new)
     84        _sha = self._conditional_import_module('_sha')
     85        if _sha:
     86            self.constructors_to_test['sha1'].add(_sha.new)
     87        _sha256 = self._conditional_import_module('_sha256')
     88        if _sha256:
     89            self.constructors_to_test['sha224'].add(_sha256.sha224)
     90            self.constructors_to_test['sha256'].add(_sha256.sha256)
     91        _sha512 = self._conditional_import_module('_sha512')
     92        if _sha512:
     93            self.constructors_to_test['sha384'].add(_sha512.sha384)
     94            self.constructors_to_test['sha512'].add(_sha512.sha512)
     95
     96        super(HashLibTestCase, self).__init__(*args, **kwargs)
     97
     98    def test_hash_array(self):
     99        a = array.array("b", range(10))
     100        constructors = self.constructors_to_test.itervalues()
     101        for cons in itertools.chain.from_iterable(constructors):
     102            c = cons(a)
     103            c.hexdigest()
     104
     105    def test_algorithms_attribute(self):
     106        self.assertEqual(hashlib.algorithms,
     107            tuple([_algo for _algo in self.supported_hash_names if
     108                                                _algo.islower()]))
     109
    29110    def test_unknown_hash(self):
     111        self.assertRaises(ValueError, hashlib.new, 'spam spam spam spam spam')
     112        self.assertRaises(TypeError, hashlib.new, 1)
     113
     114    def test_get_builtin_constructor(self):
     115        get_builtin_constructor = hashlib.__dict__[
     116                '__get_builtin_constructor']
     117        self.assertRaises(ValueError, get_builtin_constructor, 'test')
    30118        try:
    31             hashlib.new('spam spam spam spam spam')
    32         except ValueError:
     119            import _md5
     120        except ImportError:
    33121            pass
    34         else:
    35             self.assert_(0 == "hashlib didn't reject bogus hash name")
     122        # This forces an ImportError for "import _md5" statements
     123        sys.modules['_md5'] = None
     124        try:
     125            self.assertRaises(ValueError, get_builtin_constructor, 'md5')
     126        finally:
     127            if '_md5' in locals():
     128                sys.modules['_md5'] = _md5
     129            else:
     130                del sys.modules['_md5']
     131        self.assertRaises(TypeError, get_builtin_constructor, 3)
    36132
    37133    def test_hexdigest(self):
    38134        for name in self.supported_hash_names:
    39135            h = hashlib.new(name)
    40             self.assert_(hexstr(h.digest()) == h.hexdigest())
    41 
     136            self.assertTrue(hexstr(h.digest()) == h.hexdigest())
    42137
    43138    def test_large_update(self):
     
    45140        bees = 'b' * 127
    46141        cees = 'c' * 126
     142        abcs = aas + bees + cees
    47143
    48144        for name in self.supported_hash_names:
     
    53149
    54150            m2 = hashlib.new(name)
    55             m2.update(aas + bees + cees)
    56             self.assertEqual(m1.digest(), m2.digest())
     151            m2.update(abcs)
     152            self.assertEqual(m1.digest(), m2.digest(), name+' update problem.')
     153
     154            m3 = hashlib.new(name, abcs)
     155            self.assertEqual(m1.digest(), m3.digest(), name+' new problem.')
    57156
    58157    def check(self, name, data, digest):
    59         # test the direct constructors
    60         computed = getattr(hashlib, name)(data).hexdigest()
    61         self.assert_(computed == digest)
    62         # test the general new() interface
    63         computed = hashlib.new(name, data).hexdigest()
    64         self.assert_(computed == digest)
    65 
     158        constructors = self.constructors_to_test[name]
     159        # 2 is for hashlib.name(...) and hashlib.new(name, ...)
     160        self.assertGreaterEqual(len(constructors), 2)
     161        for hash_object_constructor in constructors:
     162            computed = hash_object_constructor(data).hexdigest()
     163            self.assertEqual(
     164                    computed, digest,
     165                    "Hash algorithm %s constructed using %s returned hexdigest"
     166                    " %r for %d byte input data that should have hashed to %r."
     167                    % (name, hash_object_constructor,
     168                       computed, len(data), digest))
     169
     170    def check_update(self, name, data, digest):
     171        constructors = self.constructors_to_test[name]
     172        # 2 is for hashlib.name(...) and hashlib.new(name, ...)
     173        self.assertGreaterEqual(len(constructors), 2)
     174        for hash_object_constructor in constructors:
     175            h = hash_object_constructor()
     176            h.update(data)
     177            computed = h.hexdigest()
     178            self.assertEqual(
     179                    computed, digest,
     180                    "Hash algorithm %s using %s when updated returned hexdigest"
     181                    " %r for %d byte input data that should have hashed to %r."
     182                    % (name, hash_object_constructor,
     183                       computed, len(data), digest))
     184
     185    def check_unicode(self, algorithm_name):
     186        # Unicode objects are not allowed as input.
     187        expected = hashlib.new(algorithm_name, str(u'spam')).hexdigest()
     188        self.check(algorithm_name, u'spam', expected)
     189
     190    def test_unicode(self):
     191        # In python 2.x unicode is auto-encoded to the system default encoding
     192        # when passed to hashlib functions.
     193        self.check_unicode('md5')
     194        self.check_unicode('sha1')
     195        self.check_unicode('sha224')
     196        self.check_unicode('sha256')
     197        self.check_unicode('sha384')
     198        self.check_unicode('sha512')
    66199
    67200    def test_case_md5_0(self):
     
    83216                pass # 32-bit arch
    84217
     218    @precisionbigmemtest(size=_4G + 5, memuse=1)
     219    def test_case_md5_huge_update(self, size):
     220        if size == _4G + 5:
     221            try:
     222                self.check_update('md5', 'A'*size,
     223                        'c9af2dff37468ce5dfee8f2cfc0a9c6d')
     224            except OverflowError:
     225                pass # 32-bit arch
     226
    85227    @precisionbigmemtest(size=_4G - 1, memuse=1)
    86228    def test_case_md5_uintmax(self, size):
     
    111253                   "34aa973cd4c4daa4f61eeb2bdbad27316534016f")
    112254
     255    @precisionbigmemtest(size=_4G + 5, memuse=1)
     256    def test_case_sha1_huge(self, size):
     257        if size == _4G + 5:
     258            try:
     259                self.check('sha1', 'A'*size,
     260                        '87d745c50e6b2879ffa0fb2c930e9fbfe0dc9a5b')
     261            except OverflowError:
     262                pass # 32-bit arch
     263
     264    @precisionbigmemtest(size=_4G + 5, memuse=1)
     265    def test_case_sha1_huge_update(self, size):
     266        if size == _4G + 5:
     267            try:
     268                self.check_update('sha1', 'A'*size,
     269                        '87d745c50e6b2879ffa0fb2c930e9fbfe0dc9a5b')
     270            except OverflowError:
     271                pass # 32-bit arch
    113272
    114273    # use the examples from Federal Information Processing Standards
     
    197356          "de0ff244877ea60a4cb0432ce577c31beb009c5c2c49aa2e4eadb217ad8cc09b")
    198357
     358    @unittest.skipUnless(threading, 'Threading required for this test.')
     359    @test_support.reap_threads
     360    def test_threaded_hashing(self):
     361        # Updating the same hash object from several threads at once
     362        # using data chunk sizes containing the same byte sequences.
     363        #
     364        # If the internal locks are working to prevent multiple
     365        # updates on the same object from running at once, the resulting
     366        # hash will be the same as doing it single threaded upfront.
     367        hasher = hashlib.sha1()
     368        num_threads = 5
     369        smallest_data = 'swineflu'
     370        data = smallest_data*200000
     371        expected_hash = hashlib.sha1(data*num_threads).hexdigest()
     372
     373        def hash_in_chunks(chunk_size, event):
     374            index = 0
     375            while index < len(data):
     376                hasher.update(data[index:index+chunk_size])
     377                index += chunk_size
     378            event.set()
     379
     380        events = []
     381        for threadnum in xrange(num_threads):
     382            chunk_size = len(data) // (10**threadnum)
     383            assert chunk_size > 0
     384            assert chunk_size % len(smallest_data) == 0
     385            event = threading.Event()
     386            events.append(event)
     387            threading.Thread(target=hash_in_chunks,
     388                             args=(chunk_size, event)).start()
     389
     390        for event in events:
     391            event.wait()
     392
     393        self.assertEqual(expected_hash, hasher.hexdigest())
    199394
    200395def test_main():
    201396    test_support.run_unittest(HashLibTestCase)
    202397
    203 
    204398if __name__ == "__main__":
    205399    test_main()
Note: See TracChangeset for help on using the changeset viewer.